diff --git a/.github/workflows/pr_linting.yml b/.github/workflows/pr_linting.yml new file mode 100644 index 00000000..3d2431b6 --- /dev/null +++ b/.github/workflows/pr_linting.yml @@ -0,0 +1,24 @@ +name: 📇 Code Linting + +on: + push: + branches: [ develop ] + pull_request: + branches: [ develop ] + + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number}} + cancel-in-progress: true + +permissions: + contents: read + pull-requests: write + +jobs: + linting: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: chartboost/ruff-action@v1 diff --git a/.github/workflows/release_trigger.yml b/.github/workflows/release_trigger.yml new file mode 100644 index 00000000..4f02e572 --- /dev/null +++ b/.github/workflows/release_trigger.yml @@ -0,0 +1,13 @@ +name: 🚀 Release Trigger + +on: + workflow_dispatch: + +jobs: + call-release-trigger: + # TODO reference proper action + uses: ynput/ci-testing/.github/workflows/release_basic.yml@develop + secrets: + token: ${{ secrets.YNPUT_BOT_TOKEN }} + email: ${{ secrets.CI_EMAIL }} + user: ${{ secrets.CI_USER }} diff --git a/README.md b/README.md index c717153c..054be7f4 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ This project provides three elements for the AYON pipeline: * server - The AYON backend Addon. - * client - The AYON (currently OpenPype) desktop integration. + * client - The AYON desktop integration. * services - Standalone dockerized daemons that act based on events (aka `leecher` and `processors`). There is a common code that can be re-used for `server`, `client` and `services`. Is located inside client code for developer mode `./client/ayon_ftrack/common/`. @@ -33,6 +33,46 @@ As mentioned there are 2 services `leecher` and `processor`. Both services have To run services locally (recommended only for development purposes), there are 2 possible approaches. One is by running docker image, or using prepared service tools. +- `leecher` - Service that listens to ftrack events and stores them in the AYON database. +- `processor` - Service that is processing ftrack events stored in the AYON database. Only one event is processed at a time. + +### Processor +Processor contains multiple event handlers that handle synchronization or basic automations helpers. It also provides a way to add custom event handlers from other addons. The addon must be loaded into the server, and must be available in bundle based on variant that service is running in ("production", "staging" or dev bundle). +The addon also must have prepared archive file that can be downloaded from the server. + +#### Archive file +The archive file can be a zip or tar, must contain `manifest.json` file that describes the content. The archive file must be uploaded to the server and must be available for download. The addon must implement `get_custom_ftrack_handlers_endpoint` method that returns URL to the archive file. + +```python +class SomeAddon(BaseServerAddon): + name = "some_addon" + version = "1.0.0" + + def get_custom_ftrack_handlers_endpoint(self) -> str: + return "addons/{self.name}/{self.version}/private/ftrack_handlers.tar.gz" +``` + +#### Manifest file +Manifest file is a JSON file that describes the content of the archive file. It is used to load the content of the archive file into the processor. The file must be named `manifest.json` and must be in the root of the archive file. + +```json +{ + "version": "1.0.0", + "handler_subfolders": [ + "event_handlers" + ], + "python_path_subfolders": [ + "common" + ] +} +``` +Content of manifect may change in future, to be able to track changes and keep backwards compatibilit a `"version"` was added. Current version is `"1.0.0"`. + +1.0.0 +- `handler_subfolders` - List of subfolder, relative to manifest.json where files with event handlers can be found. Processor will go through all of the subfolders and import all python files that are in the subfolder. It is recommended to have only one subfolder. +- `python_path_subfolders` - Optional list of subfolders, relative to manifest.json. These paths are added to `sys.path` so content inside can be imported. Can be used for "common" code for the event handlers. It is not recommended to add python modules because of possible conflicts with other addons, but is possible. + + ### Start as docker Both services have prepared scripts to build and run docker images. There are 2 scripts `manage.ps1` for Windows and `Makefile` for Linux. Both scripts are doing the same thing and have same commands. diff --git a/client/ayon_ftrack/__init__.py b/client/ayon_ftrack/__init__.py index b49415bd..415def23 100644 --- a/client/ayon_ftrack/__init__.py +++ b/client/ayon_ftrack/__init__.py @@ -1,3 +1,4 @@ +from .version import __version__ from .ftrack_addon import ( FtrackAddon, FTRACK_ADDON_DIR, @@ -6,6 +7,8 @@ ) __all__ = ( + "__version__", + "FtrackAddon", "FTRACK_ADDON_DIR", diff --git a/client/ayon_ftrack/common/custom_attributes.json b/client/ayon_ftrack/common/custom_attributes.json index d4f23053..b582fef2 100644 --- a/client/ayon_ftrack/common/custom_attributes.json +++ b/client/ayon_ftrack/common/custom_attributes.json @@ -9,7 +9,7 @@ "fps": { "label": "FPS", "type": "number", - "config": {"isdecimal": true} + "config": {"isdecimal": true, "precision": 6} }, "clipIn": { "label": "Clip in", diff --git a/client/ayon_ftrack/common/event_handlers/ftrack_action_handler.py b/client/ayon_ftrack/common/event_handlers/ftrack_action_handler.py index 2be1bded..e6689712 100644 --- a/client/ayon_ftrack/common/event_handlers/ftrack_action_handler.py +++ b/client/ayon_ftrack/common/event_handlers/ftrack_action_handler.py @@ -1,18 +1,11 @@ -import os import functools +from typing import Optional, List, Dict, Any, Union import ftrack_api from .ftrack_base_handler import BaseHandler -def statics_icon(*icon_statics_file_parts): - statics_server = os.environ.get("OPENPYPE_STATICS_SERVER") - if not statics_server: - return None - return "/".join((statics_server, *icon_statics_file_parts)) - - class BaseAction(BaseHandler): """Custom Action base class. @@ -36,24 +29,26 @@ class BaseAction(BaseHandler): Args: session (ftrack_api.Session): Connected ftrack session. + """ + __ignore_handler_class = True - label = None - variant = None - identifier = None - description = None - icon = None - handler_type = "Action" - preactions = [] + label: Optional[str] = None + variant: Optional[str] = None + identifier: Optional[str] = None + description: Optional[str] = None + icon: Optional[str] = None + handler_type: str = "Action" + preactions: List[str] = [] - _full_label = None - _discover_identifier = None - _launch_identifier = None + _full_label: Optional[str] = None + _discover_identifier: Optional[str] = None + _launch_identifier: Optional[str] = None - settings_frack_subkey = "user_handlers" - settings_enabled_key = "enabled" + settings_frack_subkey: str = "user_handlers" + settings_enabled_key: str = "enabled" - def __init__(self, session): + def __init__(self, session: ftrack_api.Session): # Validate minimum requirements if not self.label: raise ValueError("Action missing 'label'.") @@ -68,27 +63,27 @@ def setup_launch_wrapper(self): self._launch = self.launch_wrapper(self._launch) @property - def discover_identifier(self): + def discover_identifier(self) -> str: return self.identifier @property - def launch_identifier(self): + def launch_identifier(self) -> str: return self.identifier @property - def handler_label(self): + def handler_label(self) -> str: return self.full_label @property - def full_label(self): + def full_label(self) -> str: """Full label of action. Value of full label is cached. Returns: str: Label created from 'label' and 'variant' attributes. - """ + """ if self._full_label is None: if self.variant: label = "{} {}".format(self.label, self.variant) @@ -99,7 +94,6 @@ def full_label(self): def register(self): """Register to ftrack topics to discover and launch action.""" - self.session.event_hub.subscribe( "topic=ftrack.action.discover", self._discover, @@ -111,9 +105,12 @@ def register(self): ).format(self.launch_identifier) self.session.event_hub.subscribe(launch_subscription, self._launch) - def _translate_event(self, event, session=None): + def _translate_event( + self, + event: ftrack_api.event.base.Event, + session: Optional[ftrack_api.Session] = None + ) -> List[ftrack_api.entity.base.Entity]: """Translate event to receive entities based on it's data.""" - if session is None: session = self.session @@ -134,7 +131,9 @@ def _translate_event(self, event, session=None): return _entities - def _discover(self, event): + def _discover( + self, event: ftrack_api.event.base.Event + ) -> Optional[Dict[str, Any]]: """Decide if and how will be action showed to user in ftrack. Args: @@ -145,8 +144,8 @@ def _discover(self, event): Union[None, Dict[str, Any]]: None if action is not returned otherwise returns items to show in UI (structure of items is defined by ftrack and can be found in documentation). - """ + """ entities = self._translate_event(event) if not entities: return None @@ -169,7 +168,12 @@ def _discover(self, event): }] } - def discover(self, session, entities, event): + def discover( + self, + session: ftrack_api.Session, + entities: List[ftrack_api.entity.base.Entity], + event: ftrack_api.event.base.Event, + ) -> bool: """Decide if action is showed to used based on event data. Action should override the method to implement logic to show the @@ -185,11 +189,13 @@ def discover(self, session, entities, event): Returns: bool: True if action should be returned. - """ + """ return False - def _handle_preactions(self, session, event): + def _handle_preactions( + self, session: ftrack_api.Session, event: ftrack_api.event.base.Event + ) -> bool: """Launch actions before launching this action. Concept came from Pype and got deprecated (and used) over time. Should @@ -211,8 +217,8 @@ def _handle_preactions(self, session, event): Preactions are marked as deprecated. Server actions should not use preactions and local actions use local identifier which is hard to handle automatically - """ + """ # If preactions are not set if len(self.preactions) == 0: return True @@ -268,7 +274,9 @@ def wrapper_func(*args, **kwargs): return output return wrapper_func - def _launch(self, event): + def _launch( + self, event: ftrack_api.event.base.Event + ) -> Optional[Dict[str, Any]]: entities = self._translate_event(event) if not entities: return @@ -285,7 +293,12 @@ def _launch(self, event): return self._handle_result(response) - def launch(self, session, entities, event): + def launch( + self, + session: ftrack_api.Session, + entities: List[ftrack_api.entity.base.Entity], + event: ftrack_api.event.base.Event + ) -> Optional[Union[bool, Dict[str, Any]]]: """Main part of handling event callback. Args: @@ -294,14 +307,18 @@ def launch(self, session, entities, event): event (ftrack_api.Event): Ftrack event to process. Returns: - Union[None, bool, Dict[str, Any]]: None if nothing should be showed - to user when done, 'True'/'False' if process succeded/failed - or more complex data strucure e.g. to show interface to user. - """ + Union[bool, Dict[str, Any]]: True or false for success or fail, + or more complex data structure e.g. to show interface to user. + """ raise NotImplementedError() - def _interface(self, session, entities, event): + def _interface( + self, + session: ftrack_api.Session, + entities: List[ftrack_api.entity.base.Entity], + event: ftrack_api.event.base.Event + ) -> Optional[Dict[str, Any]]: interface = self.interface(session, entities, event) if not interface: return @@ -327,7 +344,12 @@ def _interface(self, session, entities, event): ) ) - def interface(self, session, entities, event): + def interface( + self, + session: ftrack_api.Session, + entities: List[ftrack_api.entity.base.Entity], + event: ftrack_api.event.base.Event + ) -> Optional[Union[Dict[str, Any], List[Dict[str, Any]]]]: """Show an interface to user befor the action is processed. This is part of launch callback which gives option to return ftrack @@ -346,13 +368,12 @@ def interface(self, session, entities, event): should be showed, list of items to show or dictionary with 'items' key and possibly additional data (e.g. submit button label). - """ + """ return None - def _handle_result(self, result): + def _handle_result(self, result: Any) -> Optional[Dict[str, Any]]: """Validate the returned result from the action callback.""" - if not result: return None @@ -393,7 +414,11 @@ def _handle_result(self, result): return result @staticmethod - def roles_check(settings_roles, user_roles, default=True): + def roles_check( + settings_roles: List[str], + user_roles: List[str], + default: Optional[bool] = True + ) -> bool: """Compare roles from setting and user's roles. Args: @@ -404,8 +429,8 @@ def roles_check(settings_roles, user_roles, default=True): Returns: bool: 'True' if user has at least one role from settings or default if 'settings_roles' is empty. - """ + """ if not settings_roles: return default @@ -419,7 +444,11 @@ def roles_check(settings_roles, user_roles, default=True): return False @classmethod - def get_user_entity_from_event(cls, session, event): + def get_user_entity_from_event( + cls, + session: ftrack_api.Session, + event: ftrack_api.event.base.Event + ) -> Optional[ftrack_api.entity.user.User]: """Query user entity from event.""" not_set = object() @@ -444,15 +473,23 @@ def get_user_entity_from_event(cls, session, event): return user_entity @classmethod - def get_user_roles_from_event(cls, session, event, lower=False): + def get_user_roles_from_event( + cls, + session: ftrack_api.Session, + event: ftrack_api.event.base.Event, + lower: Optional[bool] = False + ) -> List[str]: """Get user roles based on data in event. Args: session (ftrack_api.Session): Prepared ftrack session. event (ftrack_api.event.Event): Event which is processed. lower (Optional[bool]): Lower the role names. Default 'False'. - """ + Returns: + List[str]: List of user roles. + + """ not_set = object() user_roles = event["data"].get("user_roles", not_set) @@ -468,8 +505,11 @@ def get_user_roles_from_event(cls, session, event, lower=False): return user_roles def get_project_name_from_event_with_entities( - self, session, event, entities - ): + self, + session: ftrack_api.Session, + event: ftrack_api.event.base.Event, + entities: List[ftrack_api.entity.base.Entity], + ) -> Optional[str]: """Load or query and fill project entity from/to event data. Project data are stored by ftrack id because in most cases it is @@ -479,8 +519,11 @@ def get_project_name_from_event_with_entities( session (ftrack_api.Session): Current session. event (ftrack_api.Event): Processed event by session. entities (List[Any]): Ftrack entities of selection. - """ + Returns: + Optional[str]: Project name from event data. + + """ # Try to get project entity from event project_name = event["data"].get("project_name") if not project_name: @@ -492,7 +535,12 @@ def get_project_name_from_event_with_entities( event["data"]["project_name"] = project_name return project_name - def get_ftrack_settings(self, session, event, entities): + def get_ftrack_settings( + self, + session: ftrack_api.Session, + event: ftrack_api.event.base.Event, + entities: List[ftrack_api.entity.base.Entity], + ) -> Dict[str, Any]: project_name = self.get_project_name_from_event_with_entities( session, event, entities ) @@ -501,12 +549,16 @@ def get_ftrack_settings(self, session, event, entities): ) return project_settings["ftrack"] - def valid_roles(self, session, entities, event): + def valid_roles( + self, + session: ftrack_api.Session, + entities: List[ftrack_api.entity.base.Entity], + event: ftrack_api.event.base.Event, + ) -> bool: """Validate user roles by settings. Method requires to have set `settings_key` attribute. """ - ftrack_settings = self.get_ftrack_settings(session, event, entities) settings = ( ftrack_settings[self.settings_frack_subkey][self.settings_key] @@ -529,10 +581,11 @@ class LocalAction(BaseAction): Handy for actions where matters if is executed on specific machine. """ - _full_launch_identifier = None + __ignore_handler_class: bool = True + _full_launch_identifier: bool = None @property - def discover_identifier(self): + def discover_identifier(self) -> str: if self._discover_identifier is None: self._discover_identifier = "{}.{}".format( self.identifier, self.process_identifier() @@ -540,7 +593,7 @@ def discover_identifier(self): return self._discover_identifier @property - def launch_identifier(self): + def launch_identifier(self) -> str: """Catch all topics with same identifier.""" if self._launch_identifier is None: self._launch_identifier = "{}.*".format(self.identifier) @@ -560,7 +613,6 @@ def register(self): Filter events to this session user. """ - # Subscribe to discover topic for user under this session self.session.event_hub.subscribe( "topic=ftrack.action.discover and source.user.username={}".format( @@ -580,7 +632,9 @@ def register(self): self._launch ) - def _discover(self, event): + def _discover( + self, event: ftrack_api.event.base.Event + ) -> Optional[Dict[str, Any]]: entities = self._translate_event(event) if not entities: return @@ -603,7 +657,9 @@ def _discover(self, event): }] } - def _launch(self, event): + def _launch( + self, event: ftrack_api.event.base.Event + ) -> Optional[Dict[str, Any]]: event_identifier = event["data"]["actionIdentifier"] # Check if identifier is same # - show message that acion may not be triggered on this machine @@ -615,7 +671,7 @@ def _launch(self, event): " where this action could be launched." ) } - return super(LocalAction, self)._launch(event) + return super()._launch(event) class ServerAction(BaseAction): @@ -624,5 +680,6 @@ class ServerAction(BaseAction): Unlike the `BaseAction` roles are not checked on register but on discover. For the same reason register is modified to not filter topics by username. """ + __ignore_handler_class: bool = True - settings_frack_subkey = "service_event_handlers" + settings_frack_subkey: str = "service_event_handlers" diff --git a/client/ayon_ftrack/common/event_handlers/ftrack_base_handler.py b/client/ayon_ftrack/common/event_handlers/ftrack_base_handler.py index ecd9d933..d3d1c392 100644 --- a/client/ayon_ftrack/common/event_handlers/ftrack_base_handler.py +++ b/client/ayon_ftrack/common/event_handlers/ftrack_base_handler.py @@ -9,13 +9,14 @@ import time import logging from abc import ABCMeta, abstractmethod +from typing import Optional, Any, Union, Iterable, List, Dict, Tuple import ftrack_api from ayon_api import get_addons_settings, get_project -class BaseHandler(object, metaclass=ABCMeta): +class BaseHandler(metaclass=ABCMeta): """Base class for handling ftrack events. Attributes: @@ -26,15 +27,17 @@ class BaseHandler(object, metaclass=ABCMeta): Args: session (ftrack_api.Session): Connected ftrack session. - """ - _log = None - _process_id = None + """ + _log: Optional[logging.Logger] = None + _process_id: Optional[str] = None # Default priority is 100 - enabled = True - priority = 100 - handler_type = "Base" - _handler_label = None + enabled: bool = True + priority: int = 100 + handler_type: str = "Base" + _handler_label: Optional[str] = None + # Mark base classes to be ignored for discovery + __ignore_handler_class: bool = True def __init__(self, session): if not isinstance(session, ftrack_api.session.Session): @@ -46,22 +49,35 @@ def __init__(self, session): self.register = self.register_wrapper(self.register) + @classmethod + def ignore_handler_class(cls) -> bool: + """Check if handler class should be ignored. + + Do not touch implementation of this method, set + '__ignore_handler_class' to 'True' if you want to ignore class. + + """ + cls_name = cls.__name__ + if not cls_name.startswith("_"): + cls_name = f"_{cls_name}" + return getattr(cls, f"{cls_name}__ignore_handler_class", False) + @staticmethod - def join_filter_values(values): + def join_filter_values(values: Iterable[str]) -> str: return ",".join({'"{}"'.format(value) for value in values}) @classmethod - def join_query_keys(cls, keys): + def join_query_keys(cls, keys: Iterable[str]) -> str: return cls.join_filter_values(keys) @property - def log(self): + def log(self) -> logging.Logger: """Quick access to logger. Returns: logging.Logger: Logger that can be used for logging of handler. - """ + """ if self._log is None: # TODO better logging mechanism self._log = logging.getLogger(self.__class__.__name__) @@ -69,35 +85,34 @@ def log(self): return self._log @property - def handler_label(self): + def handler_label(self) -> str: if self._handler_label is None: self._handler_label = self.__class__.__name__ return self._handler_label @property - def session(self): + def session(self) -> ftrack_api.Session: """Fast access to session. Returns: session (ftrack_api.Session): Session which is source of events. - """ + """ return self._session def reset_session(self): """Reset session cache.""" - self.session.reset() @staticmethod - def process_identifier(): + def process_identifier() -> str: """Helper property to have unified access to process id. Todos: Use some global approach rather then implementation on 'BaseEntity'. - """ + """ if not BaseHandler._process_id: BaseHandler._process_id = str(uuid.uuid4()) return BaseHandler._process_id @@ -105,7 +120,6 @@ def process_identifier(): @abstractmethod def register(self): """Subscribe to event topics.""" - pass def register_wrapper(self, func): @@ -170,8 +184,8 @@ def _get_entity_type(self, entity, session=None): Todos: Use object id rather. - """ + """ # Get entity type and make sure it is lower cased. Most places except # the component tab in the Sidebar will use lower case notation. entity_type = entity.get("entityType").replace("_", "").lower() @@ -197,15 +211,21 @@ def _get_entity_type(self, entity, session=None): "Unable to translate entity type: {0}.".format(entity_type) ) - def show_message(self, event, message, success=False): + def show_message( + self, + event: ftrack_api.event.base.Event, + message: str, + success: Optional[bool]=False, + ): """Shows message to user who triggered event. Args: - event (ftrack_api.Event): Event used for source of user id. + event (ftrack_api.event.base.Event): Event used for source + of user id. message (str): Message that will be shown to user. success (bool): Define type (color) of message. False -> red color. - """ + """ if not isinstance(success, bool): success = False @@ -233,13 +253,13 @@ def show_message(self, event, message, success=False): def show_interface( self, - items, - title="", - user_id=None, - user=None, - event=None, - username=None, - submit_btn_label=None + items: List[Dict[str, Any]], + title: Optional[str] = "", + user_id: Optional[str] = None, + user: Optional[Any] = None, + event: Optional[ftrack_api.event.base.Event] = None, + username: Optional[str] = None, + submit_btn_label: Optional[str] = None, ): """Shows ftrack widgets interface to user. @@ -258,8 +278,8 @@ def show_interface( username (str): Username of user to get it's id. This is slowest way how user id is received. submit_btn_label (str): Label of submit button in ftrack widget. - """ + """ if user_id: pass @@ -304,7 +324,16 @@ def show_interface( on_error="ignore" ) - def show_interface_from_dict(self, messages, *args, **kwargs): + def show_interface_from_dict( + self, + messages: Dict[str, Union[str, List[str]]], + title: Optional[str] = "", + user_id: Optional[str] = None, + user: Optional[Any] = None, + event: Optional[ftrack_api.event.base.Event] = None, + username: Optional[str] = None, + submit_btn_label: Optional[str] = None, + ): # TODO Find out how and where is this used if not messages: self.log.debug("No messages to show! (messages dict is empty)") @@ -315,32 +344,35 @@ def show_interface_from_dict(self, messages, *args, **kwargs): for key, value in messages.items(): if not first: items.append(splitter) - else: - first = False + first = False items.append({"type": "label", "value": "

{}

".format(key)}) - if isinstance(value, list): - for item in value: - message = { - "type": "label", "value": "

{}

".format(item) - } - items.append(message) - else: - message = {"type": "label", "value": "

{}

".format(value)} - items.append(message) - - self.show_interface(items, *args, **kwargs) + if isinstance(value, str): + value = [value] + + for item in value: + items.append({"type": "label", "value": f"

{item}

"}) + + self.show_interface( + items, + title=title, + user_id=user_id, + user=user, + event=event, + username=username, + submit_btn_label=submit_btn_label + ) def trigger_action( self, - action_identifier, - event=None, - session=None, - selection=None, - user_data=None, - topic="ftrack.action.launch", - additional_event_data={}, - on_error="ignore" + action_identifier: str, + event: Optional[ftrack_api.event.base.Event] = None, + session: Optional[ftrack_api.Session] = None, + selection: Optional[List[Dict[str, str]]] = None, + user_data: Optional[Dict[str, Any]] = None, + topic: Optional[str] = "ftrack.action.launch", + additional_event_data: Optional[Dict[str, Any]] = None, + on_error: Optional[str] = "ignore" ): self.log.debug( "Triggering action \"{}\" Begins".format(action_identifier)) @@ -388,12 +420,12 @@ def trigger_action( def trigger_event( self, - topic, - event_data=None, - session=None, - source=None, - event=None, - on_error="ignore" + topic: str, + event_data: Optional[Dict[str, Any]] = None, + session: Optional[ftrack_api.Session] = None, + source: Optional[Dict[str, Any]] = None, + event: Optional[ftrack_api.event.base.Event] = None, + on_error: Optional[str] = "ignore" ): if session is None: session = self.session @@ -415,7 +447,11 @@ def trigger_event( "Publishing event: {}" ).format(str(event.__dict__))) - def get_project_from_entity(self, entity, session=None): + def get_project_from_entity( + self, + entity: ftrack_api.entity.base.Entity, + session: Optional[ftrack_api.Session] = None + ): low_entity_type = entity.entity_type.lower() if low_entity_type == "project": return entity @@ -449,7 +485,12 @@ def get_project_from_entity(self, entity, session=None): "Project where id is {}".format(project_data["id"]) ).one() - def get_project_entity_from_event(self, session, event, project_id): + def get_project_entity_from_event( + self, + session: ftrack_api.Session, + event: ftrack_api.event.base.Event, + project_id: str, + ): """Load or query and fill project entity from/to event data. Project data are stored by ftrack id because in most cases it is @@ -463,8 +504,8 @@ def get_project_entity_from_event(self, session, event, project_id): Returns: Union[str, None]: Project name based on entities or None if project cannot be defined. - """ + """ if not project_id: raise ValueError( "Entered `project_id` is not valid. {} ({})".format( @@ -486,7 +527,12 @@ def get_project_entity_from_event(self, session, event, project_id): return project_entity - def get_project_name_from_event(self, session, event, project_id): + def get_project_name_from_event( + self, + session: ftrack_api.Session, + event: ftrack_api.event.base.Event, + project_id: str, + ): """Load or query and fill project entity from/to event data. Project data are stored by ftrack id because in most cases it is @@ -500,8 +546,8 @@ def get_project_name_from_event(self, session, event, project_id): Returns: Union[str, None]: Project name based on entities or None if project cannot be defined. - """ + """ if not project_id: raise ValueError( "Entered `project_id` is not valid. {} ({})".format( @@ -522,7 +568,11 @@ def get_project_name_from_event(self, session, event, project_id): project_id_mapping[project_id] = project_name return project_name - def get_ayon_project_from_event(self, event, project_name): + def get_ayon_project_from_event( + self, + event: ftrack_api.event.base.Event, + project_name: str + ): """Get AYON project from event. Args: @@ -531,8 +581,8 @@ def get_ayon_project_from_event(self, event, project_name): Returns: Union[dict[str, Any], None]: AYON project. - """ + """ ayon_projects = event["data"].setdefault("ayon_projects", {}) if project_name in ayon_projects: return ayon_projects[project_name] @@ -543,7 +593,11 @@ def get_ayon_project_from_event(self, event, project_name): ayon_projects[project_name] = project return project - def get_project_settings_from_event(self, event, project_name): + def get_project_settings_from_event( + self, + event: ftrack_api.event.base.Event, + project_name: str + ): """Load or fill AYON's project settings from event data. Project data are stored by ftrack id because in most cases it is @@ -552,8 +606,8 @@ def get_project_settings_from_event(self, event, project_name): Args: event (ftrack_api.Event): Processed event by session. project_name (str): Project name. - """ + """ project_settings_by_name = event["data"].setdefault( "project_settings", {} ) @@ -573,19 +627,21 @@ def get_project_settings_from_event(self, event, project_name): return copy.deepcopy(project_settings) @staticmethod - def get_entity_path(entity): + def get_entity_path(entity: ftrack_api.entity.base.Entity) -> str: """Return full hierarchical path to entity.""" - return "/".join( [ent["name"] for ent in entity["link"]] ) @classmethod def add_traceback_to_job( - cls, job, session, exc_info, - description=None, - component_name=None, - job_status=None + cls, + job: ftrack_api.entity.job.Job, + session: ftrack_api.Session, + exc_info: Tuple, + description: Optional[str] = None, + component_name: Optional[str] = None, + job_status: Optional[str] = None ): """Add traceback file to a job. @@ -602,8 +658,8 @@ def add_traceback_to_job( not specified. job_status (str): Status of job which will be set. By default is set to 'failed'. - """ + """ if description: job_data = { "description": description @@ -639,7 +695,12 @@ def add_traceback_to_job( os.remove(temp_filepath) @staticmethod - def add_file_component_to_job(job, session, filepath, basename=None): + def add_file_component_to_job( + job: ftrack_api.entity.job.Job, + session: ftrack_api.Session, + filepath: str, + basename: Optional[str] = None + ): """Add filepath as downloadable component to job. Args: @@ -652,8 +713,8 @@ def add_file_component_to_job(job, session, filepath, basename=None): user's side. Must be without extension otherwise extension will be duplicated in downloaded name. Basename from entered path used when not entered. - """ + """ # Make sure session's locations are configured # - they can be deconfigured e.g. using `rollback` method session._configure_locations() diff --git a/client/ayon_ftrack/common/event_handlers/ftrack_event_handler.py b/client/ayon_ftrack/common/event_handlers/ftrack_event_handler.py index b8a9c726..bd4ae326 100644 --- a/client/ayon_ftrack/common/event_handlers/ftrack_event_handler.py +++ b/client/ayon_ftrack/common/event_handlers/ftrack_event_handler.py @@ -1,3 +1,7 @@ +from typing import Optional + +import ftrack_api + from .ftrack_base_handler import BaseHandler @@ -9,32 +13,34 @@ class BaseEventHandler(BaseHandler): By default is listening to "ftrack.update". To change it override 'register' method of change 'subscription_topic' attribute. """ + __ignore_handler_class: bool = True - subscription_topic = "ftrack.update" - handler_type = "Event" + subscription_topic: str = "ftrack.update" + handler_type: str = "Event" def register(self): """Register to subscription topic.""" - self.session.event_hub.subscribe( "topic={}".format(self.subscription_topic), self._process, priority=self.priority ) - def process(self, event): + def process(self, event: ftrack_api.event.base.Event): """Callback triggered on event with matching topic. Args: - session (ftrack_api.Session): Ftrack session which triggered - the event. event (ftrack_api.Event): Ftrack event to process. - """ + """ return self.launch(self.session, event) - def launch(self, session, event): + def launch( + self, + session: ftrack_api.Session, + event: ftrack_api.event.base.Event + ): """Deprecated method used for backwards compatibility. Override 'process' method rather then 'launch'. Method name 'launch' @@ -45,19 +51,18 @@ def launch(self, session, event): session (ftrack_api.Session): Ftrack session which triggered the event. event (ftrack_api.Event): Ftrack event to process. - """ + """ raise NotImplementedError() - def _process(self, event): + def _process(self, event: ftrack_api.event.base.Event): return self._launch(event) - def _launch(self, event): + def _launch(self, event: ftrack_api.event.base.Event): """Callback kept for backwards compatibility. Will be removed when default """ - self.session.rollback() self.session._local_cache.clear() @@ -65,16 +70,20 @@ def _launch(self, event): self.process(event) except Exception as exc: - self.session.rollback() - self.session._configure_locations() self.log.error( "Event \"{}\" Failed: {}".format( self.__class__.__name__, str(exc) ), exc_info=True ) + self.session.rollback() + self.session._configure_locations() - def _translate_event(self, event, session=None): + def _translate_event( + self, + event: ftrack_api.event.base.Event, + session: Optional[ftrack_api.Session] = None + ): """Receive entity objects based on event. Args: @@ -83,8 +92,8 @@ def _translate_event(self, event, session=None): Returns: List[ftrack_api.Entity]: Queried entities based on event data. - """ + """ return self._get_entities( event, session, diff --git a/client/ayon_ftrack/common/ftrack_server.py b/client/ayon_ftrack/common/ftrack_server.py index 0e47abac..58c64108 100644 --- a/client/ayon_ftrack/common/ftrack_server.py +++ b/client/ayon_ftrack/common/ftrack_server.py @@ -3,46 +3,127 @@ import logging import traceback import types +import inspect import ftrack_api from .python_module_tools import modules_from_path +from .event_handlers import BaseHandler class FtrackServer: + """Helper wrapper to run ftrack server with event handlers. + + Handlers are discovered based on a list of paths. Each path is scanned for + python files which are imported as modules. Each module is checked for + 'register' function or classes inheriting from 'BaseHandler'. If class + inheriting from 'BaseHandler' is found it is instantiated and 'register' + method is called. If 'register' function is found it is called with + ftrack session as argument and 'BaseHandler' from the file are ignored. + + Function 'register' tells discovery system to skip looking for classes. + + Classes that start with '_' are ignored. It is possible to define + attribute `__ignore_handler_class = True` on class definition to mark + a "base class" that will be ignored on discovery, so you can safely import + custom base classes in the files. + """ def __init__(self, handler_paths=None): - """ - - 'type' is by default set to 'action' - Runs Action server - - enter 'event' for Event server - - EXAMPLE FOR EVENT SERVER: - ... - server = FtrackServer() - server.run_server() - .. - """ - # set Ftrack logging to Warning only - OPTIONAL ftrack_log = logging.getLogger("ftrack_api") ftrack_log.setLevel(logging.WARNING) self.log = logging.getLogger(__name__) - self.stopped = True - self.is_running = False + self._stopped = True + self._is_running = False + + if handler_paths is None: + handler_paths = [] - self.handler_paths = handler_paths or [] + self._handler_paths = handler_paths + + self._session = None + self._cached_modules = [] + self._cached_objects = [] def stop_session(self): - self.stopped = True - if self.session.event_hub.connected is True: - self.session.event_hub.disconnect() - self.session.close() - self.session = None + session = self._session + self._session = None + self._stopped = True + if session.event_hub.connected is True: + session.event_hub.disconnect() + session.close() - def set_files(self, paths): - # Iterate all paths + def get_session(self): + return self._session + + def get_handler_paths(self): + return self._handler_paths + + def set_handler_paths(self, paths): + if self._is_running: + raise ValueError( + "Cannot change handler paths when server is running." + ) + self._handler_paths = paths + + session = property(get_session) + handler_paths = property(get_handler_paths, set_handler_paths) + + def run_server(self, session=None): + if self._is_running: + raise ValueError("Server is already running.") + self._stopped = False + self._is_running = True + if not session: + session = ftrack_api.Session(auto_connect_event_hub=True) + + # Wait until session has connected event hub + if session._auto_connect_event_hub_thread: + # Use timeout from session (since ftrack-api 2.1.0) + timeout = getattr(session, "request_timeout", 60) + self.log.info("Waiting for event hub to connect") + started = time.time() + while not session.event_hub.connected: + if (time.time() - started) > timeout: + raise RuntimeError(( + "Connection to Ftrack was not created in {} seconds" + ).format(timeout)) + time.sleep(0.1) + + elif not session.event_hub.connected: + self.log.info("Connecting event hub") + session.event_hub.connect() + + self._session = session + if not self._handler_paths: + self.log.warning(( + "Paths to event handlers are not set." + " Ftrack server won't launch." + )) + self._is_running = False + return + + self._load_handlers() + + msg = "Registration of event handlers has finished!" + self.log.info(len(msg) * "*") + self.log.info(msg) + + # keep event_hub on session running + try: + session.event_hub.wait() + finally: + self._is_running = False + self._cached_modules = [] + + def _load_handlers(self): register_functions = [] + handler_classes = [] + + # Iterate all paths + paths = self._handler_paths for path in paths: # Try to format path with environments try: @@ -58,88 +139,72 @@ def set_files(self, paths): )) for filepath, module in modules: - register_function = None - for name, attr in module.__dict__.items(): + self._cached_modules.append(module) + register_function = getattr(module, "register", None) + if register_function is not None: + if isinstance(register_function, types.FunctionType): + register_functions.append( + (filepath, register_function) + ) + else: + self.log.warning( + f"\"{filepath}\"" + " - Found 'register' but it is not a function." + ) + continue + + for attr_name in dir(module): + if attr_name.startswith("_"): + self.log.debug( + f"Skipping private class '{attr_name}'" + ) + continue + + attr = getattr(module, attr_name, None) if ( - name == "register" - and isinstance(attr, types.FunctionType) + not inspect.isclass(attr) + or not issubclass(attr, BaseHandler) + or attr.ignore_handler_class() ): - register_function = attr - break + continue + + if inspect.isabstract(attr): + self.log.warning( + f"Skipping abstract class '{attr_name}'." + ) + continue + handler_classes.append(attr) - if not register_function: + if not handler_classes: self.log.warning( - "\"{}\" - Missing register method".format(filepath) + f"\"{filepath}\"" + " - No 'register' function" + " or 'BaseHandler' classes found." ) - continue - - register_functions.append( - (filepath, register_function) - ) - if not register_functions: + if not register_functions and not handler_classes: self.log.warning(( - "There are no events with `register` function" - " in registered paths: \"{}\"" - ).format("| ".join(paths))) + "There are no files with `register` function or 'BaseHandler'" + " classes in registered paths:\n- \"{}\"" + ).format("- \n".join(paths))) for filepath, register_func in register_functions: try: - register_func(self.session) + register_func(self._session) except Exception: self.log.warning( - "\"{}\" - register was not successful".format(filepath), + f"\"{filepath}\" - register was not successful", exc_info=True ) - def set_handler_paths(self, paths): - self.handler_paths = paths - if self.is_running: - self.stop_session() - self.run_server() - - elif not self.stopped: - self.run_server() - - def run_server(self, session=None, load_files=True): - self.stopped = False - self.is_running = True - if not session: - session = ftrack_api.Session(auto_connect_event_hub=True) - - # Wait until session has connected event hub - if session._auto_connect_event_hub_thread: - # Use timeout from session (since ftrack-api 2.1.0) - timeout = getattr(session, "request_timeout", 60) - self.log.info("Waiting for event hub to connect") - started = time.time() - while not session.event_hub.connected: - if (time.time() - started) > timeout: - raise RuntimeError(( - "Connection to Ftrack was not created in {} seconds" - ).format(timeout)) - time.sleep(0.1) - - elif not session.event_hub.connected: - self.log.info("Connecting event hub") - session.event_hub.connect() - - self.session = session - if load_files: - if not self.handler_paths: - self.log.warning(( - "Paths to event handlers are not set." - " Ftrack server won't launch." - )) - self.is_running = False - return - - self.set_files(self.handler_paths) - - msg = "Registration of event handlers has finished!" - self.log.info(len(msg) * "*") - self.log.info(msg) + for handler_class in handler_classes: + try: + obj = handler_class(self._session) + obj.register() + self._cached_objects.append(obj) - # keep event_hub on session running - self.session.event_hub.wait() - self.is_running = False + except Exception: + self.log.warning( + f"\"{handler_class}\" - register was not successful", + exc_info=True + ) diff --git a/client/ayon_ftrack/common/utils.py b/client/ayon_ftrack/common/utils.py index a94c8b18..5cce573d 100644 --- a/client/ayon_ftrack/common/utils.py +++ b/client/ayon_ftrack/common/utils.py @@ -84,7 +84,7 @@ def get_datetime_data(datetime_obj=None): """Returns current datetime data as dictionary. Note: - This function is copied from 'openpype.lib'. + This function is copied from 'ayon_core.lib'. Args: datetime_obj (datetime): Specific datetime object diff --git a/client/ayon_ftrack/event_handlers_to_convert/action_delete_old_versions.py b/client/ayon_ftrack/event_handlers_to_convert/action_delete_old_versions.py index 7c8dc9bd..a0e3c84a 100644 --- a/client/ayon_ftrack/event_handlers_to_convert/action_delete_old_versions.py +++ b/client/ayon_ftrack/event_handlers_to_convert/action_delete_old_versions.py @@ -3,20 +3,21 @@ import uuid import clique -from pymongo import UpdateOne -from openpype.client import ( - get_assets, - get_subsets, +from ayon_api import ( + get_folders, + get_products, get_versions, - get_representations + get_representations, ) -from openpype.lib import ( +from ayon_api.operations import OperationsSession + +from ayon_core.lib import ( StringTemplate, TemplateUnsolved, format_file_size, ) -from openpype.pipeline import AvalonMongoDB, Anatomy +from ayon_core.pipeline import Anatomy from ayon_ftrack.common import LocalAction from ayon_ftrack.lib import get_ftrack_icon_url @@ -34,8 +35,6 @@ class DeleteOldVersions(LocalAction): settings_key = "delete_old_versions" - dbcon = AvalonMongoDB() - inteface_title = "Choose your preferences" splitter_item = {"type": "label", "value": "---"} sequence_splitter = "__sequence_splitter__" @@ -161,12 +160,10 @@ def launch(self, session, entities, event): " and will keep {1} latest version{2}." ).format(_val1, versions_count, _val3)) - self.dbcon.install() - project = None - avalon_asset_names = [] + folder_paths = [] asset_versions_by_parent_id = collections.defaultdict(list) - subset_names_by_asset_name = collections.defaultdict(list) + product_names_by_folder_path = collections.defaultdict(list) ftrack_assets_by_name = {} for entity in entities: @@ -174,10 +171,13 @@ def launch(self, session, entities, event): parent_ent = ftrack_asset["parent"] parent_ftrack_id = parent_ent["id"] - parent_name = parent_ent["name"] - if parent_name not in avalon_asset_names: - avalon_asset_names.append(parent_name) + path_items = [item["name"] for item in entity["link"]] + path_items[0] = "" + folder_path = "/".join(path_items) + + if folder_path not in folder_paths: + folder_paths.append(folder_path) # Group asset versions by parent entity asset_versions_by_parent_id[parent_ftrack_id].append(entity) @@ -186,101 +186,93 @@ def launch(self, session, entities, event): if project is None: project = parent_ent["project"] - # Collect subset names per asset - subset_name = ftrack_asset["name"] - subset_names_by_asset_name[parent_name].append(subset_name) + # Collect product names per asset + product_name = ftrack_asset["name"] + product_names_by_folder_path[folder_path].append(product_name) - if subset_name not in ftrack_assets_by_name: - ftrack_assets_by_name[subset_name] = ftrack_asset + if product_name not in ftrack_assets_by_name: + ftrack_assets_by_name[product_name] = ftrack_asset # Set Mongo collection project_name = project["full_name"] anatomy = Anatomy(project_name) - self.dbcon.Session["AVALON_PROJECT"] = project_name self.log.debug("Project is set to {}".format(project_name)) - # Get Assets from avalon database - assets = list( - get_assets(project_name, asset_names=avalon_asset_names) - ) - asset_id_to_name_map = { - asset["_id"]: asset["name"] for asset in assets + # Fetch folders + folder_path_by_id = { + folder_entity["id"]: folder_entity["path"] + for folder_entity in get_folders( + project_name, folder_paths=folder_paths + ) } - asset_ids = list(asset_id_to_name_map.keys()) + folder_ids = set(folder_path_by_id.keys()) - self.log.debug("Collected assets ({})".format(len(asset_ids))) + self.log.debug("Collected assets ({})".format(len(folder_ids))) - # Get Subsets - subsets = list( - get_subsets(project_name, asset_ids=asset_ids) - ) - subsets_by_id = {} - subset_ids = [] - for subset in subsets: - asset_id = subset["parent"] - asset_name = asset_id_to_name_map[asset_id] - available_subsets = subset_names_by_asset_name[asset_name] - - if subset["name"] not in available_subsets: - continue + # Get product entities + product_entities_by_id = { + product_entity["id"]: product_entity + for product_entity in get_products( + project_name, folder_ids=folder_ids + ) + } + # Filter products by available product names + for product_entity in product_entities_by_id.values(): + folder_id = product_entity["folderId"] + folder_path = folder_path_by_id[folder_id] - subset_ids.append(subset["_id"]) - subsets_by_id[subset["_id"]] = subset + available_products = product_names_by_folder_path[folder_path] + if product_entity["name"] not in available_products: + product_id = product_entity["id"] + product_entities_by_id.pop(product_id) - self.log.debug("Collected subsets ({})".format(len(subset_ids))) + product_ids = set(product_entities_by_id.keys()) + + self.log.debug("Collected products ({})".format(len(product_ids))) # Get Versions - versions = list( - get_versions(project_name, subset_ids=subset_ids) - ) + version_entities_by_id = { + version_entity["id"]: version_entity + for version_entity in get_versions( + project_name, + product_ids=product_ids, + hero=False, + active=None + ) + } + # Store all versions by product id even inactive entities versions_by_parent = collections.defaultdict(list) - for ent in versions: - versions_by_parent[ent["parent"]].append(ent) + for version_entity in version_entities_by_id.values(): + product_id = version_entity["productId"] + versions_by_parent[product_id].append(version_entity) def sort_func(ent): - return int(ent["name"]) + return ent["version"] - all_last_versions = [] - for parent_id, _versions in versions_by_parent.items(): - for idx, version in enumerate( - sorted(_versions, key=sort_func, reverse=True) + # Filter latest versions + for parent_id, version_entities in versions_by_parent.items(): + for idx, version_entity in enumerate( + sorted(version_entities, key=sort_func, reverse=True) ): if idx >= versions_count: break - all_last_versions.append(version) + version_entities_by_id.pop(version_entity["id"]) - self.log.debug("Collected versions ({})".format(len(versions))) - - # Filter latest versions - for version in all_last_versions: - versions.remove(version) + self.log.debug( + "Collected versions ({})".format(len(version_entities_by_id)) + ) # Update versions_by_parent without filtered versions versions_by_parent = collections.defaultdict(list) - for ent in versions: - versions_by_parent[ent["parent"]].append(ent) - - # Filter already deleted versions - versions_to_pop = [] - for version in versions: - version_tags = version["data"].get("tags") - if version_tags and "deleted" in version_tags: - versions_to_pop.append(version) - - for version in versions_to_pop: - subset = subsets_by_id[version["parent"]] - asset_id = subset["parent"] - asset_name = asset_id_to_name_map[asset_id] - msg = "Asset: \"{}\" | Subset: \"{}\" | Version: \"{}\"".format( - asset_name, subset["name"], version["name"] - ) - self.log.warning(( - "Skipping version. Already tagged as `deleted`. < {} >" - ).format(msg)) - versions.remove(version) + for version_entity in version_entities_by_id.values(): + # Filter already deactivated versions + if not version_entity["active"]: + continue + product_id = version_entity["productId"] + versions_by_parent[product_id].append(version_entity) - version_ids = [ent["_id"] for ent in versions] + version_ids = set(version_entities_by_id.keys()) self.log.debug( "Filtered versions to delete ({})".format(len(version_ids)) @@ -294,22 +286,26 @@ def sort_func(ent): "message": msg } - repres = list( + repre_entities = list( get_representations(project_name, version_ids=version_ids) ) self.log.debug( - "Collected representations to remove ({})".format(len(repres)) + "Collected representations to remove ({})".format( + len(repre_entities) + ) ) dir_paths = {} file_paths_by_dir = collections.defaultdict(list) - for repre in repres: - file_path, seq_path = self.path_from_represenation(repre, anatomy) + for repre_entity in repre_entities: + file_path, seq_path = self.path_from_represenation( + repre_entity, anatomy + ) if file_path is None: self.log.warning(( "Could not format path for represenation \"{}\"" - ).format(str(repre))) + ).format(str(repre_entity))) continue dir_path = os.path.dirname(file_path) @@ -345,8 +341,6 @@ def sort_func(ent): ).format(paths_msg)) # Size of files. - size = 0 - if only_calculate: if force_to_remove: size = self.delete_whole_dir_paths( @@ -368,44 +362,33 @@ def sort_func(ent): else: size = self.delete_only_repre_files(dir_paths, file_paths_by_dir) - mongo_changes_bulk = [] - for version in versions: - orig_version_tags = version["data"].get("tags") or [] - version_tags = [tag for tag in orig_version_tags] - if "deleted" not in version_tags: - version_tags.append("deleted") - - if version_tags == orig_version_tags: - continue - - update_query = {"_id": version["_id"]} - update_data = {"$set": {"data.tags": version_tags}} - mongo_changes_bulk.append(UpdateOne(update_query, update_data)) - - if mongo_changes_bulk: - self.dbcon.bulk_write(mongo_changes_bulk) + op_session = OperationsSession() + for version_entity in version_entities_by_id.values(): + op_session.update_entity( + project_name, + "version", + version_entity["id"], + {"active": False} + ) - self.dbcon.uninstall() + op_session.commit() # Set attribute `is_published` to `False` on ftrack AssetVersions - for subset_id, _versions in versions_by_parent.items(): - subset_name = None - for subset in subsets: - if subset["_id"] == subset_id: - subset_name = subset["name"] - break - - if subset_name is None: + for product_id, _versions in versions_by_parent.items(): + product_entity = product_entities_by_id.get(product_id) + if product_entity is None: self.log.warning( - "Subset with ID `{}` was not found.".format(str(subset_id)) + "Product with ID `{}` was not found.".format(str(product_id)) ) continue - ftrack_asset = ftrack_assets_by_name.get(subset_name) + product_name = product_entity["name"] + + ftrack_asset = ftrack_assets_by_name.get(product_name) if not ftrack_asset: self.log.warning(( "Could not find Ftrack asset with name `{}`" - ).format(subset_name)) + ).format(product_name)) continue version_numbers = [int(ver["name"]) for ver in _versions] @@ -576,9 +559,3 @@ def path_from_represenation(self, representation, anatomy): return (None, None) return (os.path.normpath(path), sequence_path) - - -def register(session): - '''Register plugin. Called when used as an plugin.''' - - DeleteOldVersions(session).register() diff --git a/client/ayon_ftrack/event_handlers_to_convert/action_djvview.py b/client/ayon_ftrack/event_handlers_to_convert/action_djvview.py deleted file mode 100644 index 3ac14d72..00000000 --- a/client/ayon_ftrack/event_handlers_to_convert/action_djvview.py +++ /dev/null @@ -1,239 +0,0 @@ -import os -import time -import subprocess -from operator import itemgetter -from openpype.lib import ApplicationManager -from ayon_ftrack.common import LocalAction -from ayon_ftrack.lib import statics_icon - - -class DJVViewAction(LocalAction): - """Launch DJVView action.""" - identifier = "djvview-launch-action" - label = "DJV View" - description = "DJV View Launcher" - icon = statics_icon("app_icons", "djvView.png") - - type = "Application" - - allowed_types = [ - "cin", "dpx", "avi", "dv", "gif", "flv", "mkv", "mov", "mpg", "mpeg", - "mp4", "m4v", "mxf", "iff", "z", "ifl", "jpeg", "jpg", "jfif", "lut", - "1dl", "exr", "pic", "png", "ppm", "pnm", "pgm", "pbm", "rla", "rpf", - "sgi", "rgba", "rgb", "bw", "tga", "tiff", "tif", "img" - ] - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - self.application_manager = ApplicationManager() - self._last_check = time.time() - self._check_interval = 10 - - def _get_djv_apps(self): - app_group = self.application_manager.app_groups["djvview"] - - output = [] - for app in app_group: - executable = app.find_executable() - if executable is not None: - output.append(app) - return output - - def get_djv_apps(self): - cur_time = time.time() - if (cur_time - self._last_check) > self._check_interval: - self.application_manager.refresh() - return self._get_djv_apps() - - def discover(self, session, entities, event): - """Return available actions based on *event*. """ - selection = event["data"].get("selection", []) - if len(selection) != 1: - return False - - entityType = selection[0].get("entityType", None) - if entityType not in ["assetversion", "task"]: - return False - - if self.get_djv_apps(): - return True - return False - - def interface(self, session, entities, event): - if event["data"].get("values", {}): - return - - entity = entities[0] - versions = [] - - entity_type = entity.entity_type.lower() - if entity_type == "assetversion": - if ( - entity[ - "components" - ][0]["file_type"][1:] in self.allowed_types - ): - versions.append(entity) - else: - master_entity = entity - if entity_type == "task": - master_entity = entity["parent"] - - for asset in master_entity["assets"]: - for version in asset["versions"]: - # Get only AssetVersion of selected task - if ( - entity_type == "task" and - version["task"]["id"] != entity["id"] - ): - continue - # Get only components with allowed type - filetype = version["components"][0]["file_type"] - if filetype[1:] in self.allowed_types: - versions.append(version) - - if len(versions) < 1: - return { - "success": False, - "message": "There are no Asset Versions to open." - } - - # TODO sort them (somehow?) - enum_items = [] - first_value = None - for app in self.get_djv_apps(): - if first_value is None: - first_value = app.full_name - enum_items.append({ - "value": app.full_name, - "label": app.full_label - }) - - if not enum_items: - return { - "success": False, - "message": "Couldn't find DJV executable." - } - - items = [ - { - "type": "enumerator", - "label": "DJV version:", - "name": "djv_app_name", - "data": enum_items, - "value": first_value - }, - { - "type": "label", - "value": "---" - } - ] - version_items = [] - base_label = "v{0} - {1} - {2}" - default_component = None - last_available = None - select_value = None - for version in versions: - for component in version["components"]: - label = base_label.format( - str(version["version"]).zfill(3), - version["asset"]["type"]["name"], - component["name"] - ) - - try: - location = component[ - "component_locations" - ][0]["location"] - file_path = location.get_filesystem_path(component) - except Exception: - file_path = component[ - "component_locations" - ][0]["resource_identifier"] - - if os.path.isdir(os.path.dirname(file_path)): - last_available = file_path - if component["name"] == default_component: - select_value = file_path - version_items.append( - {"label": label, "value": file_path} - ) - - if len(version_items) == 0: - return { - "success": False, - "message": ( - "There are no Asset Versions with accessible path." - ) - } - - item = { - "label": "Items to view", - "type": "enumerator", - "name": "path", - "data": sorted( - version_items, - key=itemgetter("label"), - reverse=True - ) - } - if select_value is not None: - item["value"] = select_value - else: - item["value"] = last_available - - items.append(item) - - return {"items": items} - - def launch(self, session, entities, event): - """Callback method for DJVView action.""" - - # Launching application - event_values = event["data"].get("value") - if not event_values: - return - - djv_app_name = event_values["djv_app_name"] - app = self.application_manager.applications.get(djv_app_name) - executable = None - if app is not None: - executable = app.find_executable() - - if not executable: - return { - "success": False, - "message": "Couldn't find DJV executable." - } - - filpath = os.path.normpath(event_values["path"]) - - cmd = [ - # DJV path - str(executable), - # PATH TO COMPONENT - filpath - ] - - try: - # Run DJV with these commands - _process = subprocess.Popen(cmd) - # Keep process in memory for some time - time.sleep(0.1) - - except FileNotFoundError: - return { - "success": False, - "message": "File \"{}\" was not found.".format( - os.path.basename(filpath) - ) - } - - return True - - -def register(session): - """Register hooks.""" - - DJVViewAction(session).register() diff --git a/client/ayon_ftrack/event_handlers_to_convert/action_fill_workfile_attr.py b/client/ayon_ftrack/event_handlers_to_convert/action_fill_workfile_attr.py index 1488d160..0e6c892c 100644 --- a/client/ayon_ftrack/event_handlers_to_convert/action_fill_workfile_attr.py +++ b/client/ayon_ftrack/event_handlers_to_convert/action_fill_workfile_attr.py @@ -6,16 +6,17 @@ import datetime import ftrack_api - -from openpype.client import ( +from ayon_api import ( get_project, - get_assets, + get_folders, + get_tasks, ) -from openpype.settings import get_project_settings, get_system_settings -from openpype.lib import StringTemplate -from openpype.pipeline import Anatomy -from openpype.pipeline.template_data import get_template_data -from openpype.pipeline.workfile import get_workfile_template_key + +from ayon_core.settings import get_project_settings +from ayon_core.lib import StringTemplate +from ayon_core.pipeline import Anatomy +from ayon_core.pipeline.template_data import get_template_data +from ayon_core.pipeline.workfile import get_workfile_template_key from ayon_ftrack.common import LocalAction, create_chunks from ayon_ftrack.lib import get_ftrack_icon_url @@ -53,13 +54,13 @@ def discover(self, session, entities, event): def launch(self, session, entities, event): # Separate entities and get project entity - project_entity = None + ft_project_entity = None for entity in entities: - if project_entity is None: - project_entity = self.get_project_from_entity(entity) + if ft_project_entity is None: + ft_project_entity = self.get_project_from_entity(entity) break - if not project_entity: + if not ft_project_entity: return { "message": ( "Couldn't find project entity." @@ -70,7 +71,7 @@ def launch(self, session, entities, event): # Get project settings and check if custom attribute where workfile # should be set is defined. - project_name = project_entity["full_name"] + project_name = ft_project_entity["full_name"] project_settings = get_project_settings(project_name) custom_attribute_key = ( project_settings @@ -132,7 +133,7 @@ def launch(self, session, entities, event): session, entities, job_entity, - project_entity, + ft_project_entity, project_settings, attr_conf, report @@ -225,12 +226,12 @@ def in_job_process( session, entities, job_entity, - project_entity, + ft_project_entity, project_settings, attr_conf, report ): - task_entities = [] + ft_task_entities = [] other_entities = [] project_selected = False for entity in entities: @@ -240,31 +241,49 @@ def in_job_process( break elif ent_type_low == "task": - task_entities.append(entity) + ft_task_entities.append(entity) else: other_entities.append(entity) - project_name = project_entity["full_name"] + project_name = ft_project_entity["full_name"] # Find matchin asset documents and map them by ftrack task entities - # - result stored to 'asset_docs_with_task_entities' is list with - # tuple `(asset document, [task entitis, ...])` - # Quety all asset documents - asset_docs = list(get_assets(project_name)) + # - result stored to 'folder_entities_with_ft_task_entities' is list + # with a tuple `(folder entity, [ftrack task entitis, ...])` + # Fetch all folder and task entities + folder_entities = list(get_folders( + project_name, fields={"id", "folderType", "path", "attrib"} + )) + task_entities_by_folder_id = collections.defaultdict(list) + for task_entity in get_tasks( + project_name, fields={"id", "taskType", "name", "folderId"} + ): + folder_id = task_entity["folderId"] + task_entities_by_folder_id[folder_id].append(task_entity) + job_entity["data"] = json.dumps({ - "description": "(1/3) Asset documents queried." + "description": "(1/3) Folder & Task entities queried." }) session.commit() # When project is selected then we can query whole project if project_selected: - asset_docs_with_task_entities = self._get_asset_docs_for_project( - session, project_entity, asset_docs, report + folder_entities_with_ft_task_entities = self._get_asset_docs_for_project( + session, + ft_project_entity, + folder_entities, + task_entities_by_folder_id, + report ) else: - asset_docs_with_task_entities = self._get_tasks_for_selection( - session, other_entities, task_entities, asset_docs, report + folder_entities_with_ft_task_entities = self._get_tasks_for_selection( + session, + other_entities, + ft_task_entities, + folder_entities, + task_entities_by_folder_id, + report ) job_entity["data"] = json.dumps({ @@ -275,27 +294,50 @@ def in_job_process( # Keep placeholders in the template unfilled host_name = "{app}" extension = "{ext}" - project_doc = get_project(project_name) + project_entity = get_project(project_name) project_settings = get_project_settings(project_name) - system_settings = get_system_settings() anatomy = Anatomy(project_name) templates_by_key = {} operations = [] - for asset_doc, task_entities in asset_docs_with_task_entities: - for task_entity in task_entities: + for folder_entity, ft_task_entities in folder_entities_with_ft_task_entities: + folder_id = folder_entity["id"] + folder_path = folder_entity["path"] + task_entities_by_name = { + task_entity["name"]: task_entity + for task_entity in task_entities_by_folder_id[folder_id] + } + task_entities_by_low_name = { + name.lower(): task_entity + for name, task_entity in task_entities_by_name.items() + } + for ft_task_entity in ft_task_entities: + task_name = ft_task_entity["name"] + task_entity = task_entities_by_name.get(task_name) + if not task_entity: + task_entity = task_entities_by_low_name.get( + task_name.lower() + ) + + if not task_entity: + self.log.warning( + f"Coulnd't find task entity \"{task_name}\"" + f" for folder \"{folder_path}\"" + ) + continue + workfile_data = get_template_data( - project_doc, - asset_doc, - task_entity["name"], + project_entity, + folder_entity, + task_entity, host_name, - system_settings + project_settings ) # Use version 1 for each workfile workfile_data["version"] = 1 workfile_data["ext"] = extension - task_type = workfile_data["task"]["type"] + task_type = task_entity["taskType"] template_key = get_workfile_template_key( task_type, host_name, @@ -317,7 +359,7 @@ def in_job_process( else: table_values = collections.OrderedDict(( ("configuration_id", attr_conf["id"]), - ("entity_id", task_entity["id"]) + ("entity_id", ft_task_entity["id"]) )) operations.append( ftrack_api.operation.UpdateEntityOperation( @@ -348,145 +390,151 @@ def _get_entity_path(self, entity): return "/".join(path_items) def _get_asset_docs_for_project( - self, session, project_entity, asset_docs, report + self, + session, + ft_project_entity, + folder_entities, + task_entities_by_folder_id, + report, ): - asset_docs_task_names = {} - - for asset_doc in asset_docs: - asset_data = asset_doc["data"] - ftrack_id = asset_data.get("ftrackId") + folder_entity_task_names = {} + for folder_entity in folder_entities: + ftrack_id = folder_entity["attrib"].get("ftrackId") if not ftrack_id: - hierarchy = list(asset_data.get("parents") or []) - hierarchy.append(asset_doc["name"]) - path = "/".join(hierarchy) + path = folder_entity["path"] report[NOT_SYNCHRONIZED_TITLE].append(path) continue - asset_tasks = asset_data.get("tasks") or {} - asset_docs_task_names[ftrack_id] = ( - asset_doc, list(asset_tasks.keys()) - ) + folder_id = folder_entity["id"] + task_names = { + task_entity["name"] + for task_entity in task_entities_by_folder_id[folder_id] + } + folder_entity_task_names[ftrack_id] = (folder_entity, task_names) - task_entities = session.query(( + ft_task_entities = session.query(( "select id, name, parent_id, link from Task where project_id is {}" - ).format(project_entity["id"])).all() - task_entities_by_parent_id = collections.defaultdict(list) - for task_entity in task_entities: - parent_id = task_entity["parent_id"] - task_entities_by_parent_id[parent_id].append(task_entity) + ).format(ft_project_entity["id"])).all() + ft_task_entities_by_parent_id = collections.defaultdict(list) + for ft_task_entity in ft_task_entities: + parent_id = ft_task_entity["parent_id"] + ft_task_entities_by_parent_id[parent_id].append(ft_task_entity) output = [] - for ftrack_id, item in asset_docs_task_names.items(): - asset_doc, task_names = item - valid_task_entities = [] - for task_entity in task_entities_by_parent_id[ftrack_id]: - if task_entity["name"] in task_names: - valid_task_entities.append(task_entity) + for ftrack_id, item in folder_entity_task_names.items(): + folder_entity, task_names = item + valid_ft_task_entities = [] + for ft_task_entity in ft_task_entities_by_parent_id[ftrack_id]: + if ft_task_entity["name"] in task_names: + valid_ft_task_entities.append(ft_task_entity) else: - path = self._get_entity_path(task_entity) + path = self._get_entity_path(ft_task_entity) report[NOT_SYNCHRONIZED_TITLE].append(path) - if valid_task_entities: - output.append((asset_doc, valid_task_entities)) + if valid_ft_task_entities: + output.append((folder_entity, valid_ft_task_entities)) return output def _get_tasks_for_selection( - self, session, other_entities, task_entities, asset_docs, report + self, + session, + other_entities, + ft_task_entities, + folder_entities, + task_entities_by_folder_id, + report, ): all_tasks = object() - asset_docs_by_ftrack_id = {} - asset_docs_by_parent_id = collections.defaultdict(list) - for asset_doc in asset_docs: - asset_data = asset_doc["data"] - ftrack_id = asset_data.get("ftrackId") - parent_id = asset_data.get("visualParent") - asset_docs_by_parent_id[parent_id].append(asset_doc) + folder_entities_by_ftrack_id = {} + for folder_entity in folder_entities: + ftrack_id = folder_entity["attrib"].get("ftrackId") if ftrack_id: - asset_docs_by_ftrack_id[ftrack_id] = asset_doc + folder_entities_by_ftrack_id[ftrack_id] = folder_entity - missing_doc_ftrack_ids = {} + missing_entity_ftrack_ids = {} all_tasks_ids = set() task_names_by_ftrack_id = collections.defaultdict(list) for other_entity in other_entities: ftrack_id = other_entity["id"] - if ftrack_id not in asset_docs_by_ftrack_id: - missing_doc_ftrack_ids[ftrack_id] = None + if ftrack_id not in folder_entities_by_ftrack_id: + missing_entity_ftrack_ids[ftrack_id] = None continue all_tasks_ids.add(ftrack_id) task_names_by_ftrack_id[ftrack_id] = all_tasks - for task_entity in task_entities: - parent_id = task_entity["parent_id"] - if parent_id not in asset_docs_by_ftrack_id: - missing_doc_ftrack_ids[parent_id] = None + for ft_task_entity in ft_task_entities: + parent_id = ft_task_entity["parent_id"] + if parent_id not in folder_entities_by_ftrack_id: + missing_entity_ftrack_ids[parent_id] = None continue if all_tasks_ids not in all_tasks_ids: - task_names_by_ftrack_id[ftrack_id].append(task_entity["name"]) + task_names_by_ftrack_id[ftrack_id].append(ft_task_entity["name"]) ftrack_ids = set() - asset_doc_with_task_names_by_id = {} + folder_entity_with_task_names_by_id = {} for ftrack_id, task_names in task_names_by_ftrack_id.items(): - asset_doc = asset_docs_by_ftrack_id[ftrack_id] - asset_data = asset_doc["data"] - asset_tasks = asset_data.get("tasks") or {} + folder_entity = folder_entities_by_ftrack_id[ftrack_id] + folder_id = folder_entity["id"] + folder_task_names = { + task_entity["name"] + for task_entity in task_entities_by_folder_id[folder_id] + } if task_names is all_tasks: - task_names = list(asset_tasks.keys()) + task_names = list(folder_task_names) else: new_task_names = [] for task_name in task_names: - if task_name in asset_tasks: + if task_name in folder_task_names: new_task_names.append(task_name) continue - if ftrack_id not in missing_doc_ftrack_ids: - missing_doc_ftrack_ids[ftrack_id] = [] - if missing_doc_ftrack_ids[ftrack_id] is not None: - missing_doc_ftrack_ids[ftrack_id].append(task_name) + missing_entity_ftrack_ids.setdefault(ftrack_id, []) + if missing_entity_ftrack_ids[ftrack_id] is not None: + missing_entity_ftrack_ids[ftrack_id].append(task_name) task_names = new_task_names if task_names: ftrack_ids.add(ftrack_id) - asset_doc_with_task_names_by_id[ftrack_id] = ( - asset_doc, task_names + folder_entity_with_task_names_by_id[ftrack_id] = ( + folder_entity, task_names ) - task_entities = session.query(( + ft_task_entities = session.query(( "select id, name, parent_id from Task where parent_id in ({})" ).format(self.join_query_keys(ftrack_ids))).all() task_entitiy_by_parent_id = collections.defaultdict(list) - for task_entity in task_entities: - parent_id = task_entity["parent_id"] - task_entitiy_by_parent_id[parent_id].append(task_entity) + for ft_task_entity in ft_task_entities: + parent_id = ft_task_entity["parent_id"] + task_entitiy_by_parent_id[parent_id].append(ft_task_entity) output = [] - for ftrack_id, item in asset_doc_with_task_names_by_id.items(): + for ftrack_id, item in folder_entity_with_task_names_by_id.items(): asset_doc, task_names = item - valid_task_entities = [] - for task_entity in task_entitiy_by_parent_id[ftrack_id]: - if task_entity["name"] in task_names: - valid_task_entities.append(task_entity) + valid_ft_task_entities = [] + for ft_task_entity in task_entitiy_by_parent_id[ftrack_id]: + if ft_task_entity["name"] in task_names: + valid_ft_task_entities.append(ft_task_entity) else: - if ftrack_id not in missing_doc_ftrack_ids: - missing_doc_ftrack_ids[ftrack_id] = [] - if missing_doc_ftrack_ids[ftrack_id] is not None: - missing_doc_ftrack_ids[ftrack_id].append(task_name) - if valid_task_entities: - output.append((asset_doc, valid_task_entities)) + missing_entity_ftrack_ids.setdefault(ftrack_id, []) + if missing_entity_ftrack_ids[ftrack_id] is not None: + missing_entity_ftrack_ids[ftrack_id].append(task_name) + if valid_ft_task_entities: + output.append((asset_doc, valid_ft_task_entities)) # Store report information about not synchronized entities - if missing_doc_ftrack_ids: + if missing_entity_ftrack_ids: missing_entities = session.query( "select id, link from TypedContext where id in ({})".format( - self.join_query_keys(missing_doc_ftrack_ids.keys()) + self.join_query_keys(missing_entity_ftrack_ids.keys()) ) ).all() for missing_entity in missing_entities: path = self._get_entity_path(missing_entity) - task_names = missing_doc_ftrack_ids[missing_entity["id"]] + task_names = missing_entity_ftrack_ids[missing_entity["id"]] if task_names is None: report[NOT_SYNCHRONIZED_TITLE].append(path) else: @@ -495,7 +543,3 @@ def _get_tasks_for_selection( report[NOT_SYNCHRONIZED_TITLE].append(task_path) return output - - -def register(session): - FillWorkfileAttributeAction(session).register() diff --git a/client/ayon_ftrack/event_handlers_to_convert/action_prepare_project.py b/client/ayon_ftrack/event_handlers_to_convert/action_prepare_project.py deleted file mode 100644 index 22fae684..00000000 --- a/client/ayon_ftrack/event_handlers_to_convert/action_prepare_project.py +++ /dev/null @@ -1,450 +0,0 @@ -import json -import copy - -from openpype.client import get_project, create_project -from openpype.settings import ProjectSettings, SaveWarningExc - -from ayon_ftrack.common import ( - LocalAction, - get_ayon_attr_configs, - CUST_ATTR_AUTO_SYNC, -) -from ayon_ftrack.lib import get_ftrack_icon_url - - -class PrepareProjectLocal(LocalAction): - """Prepare project attributes in Anatomy.""" - - identifier = "prepare.project.local" - label = "Prepare Project" - description = "Set basic attributes on the project" - icon = get_ftrack_icon_url("PrepareProject.svg") - - role_list = ["Administrator", "Project Manager"] - - settings_key = "prepare_project" - - # Key to store info about trigerring create folder structure - create_project_structure_key = "create_folder_structure" - create_project_structure_identifier = "ayon.create.project.structure" - item_splitter = {"type": "label", "value": "---"} - _keys_order = ( - "fps", - "frameStart", - "frameEnd", - "handleStart", - "handleEnd", - "clipIn", - "clipOut", - "resolutionHeight", - "resolutionWidth", - "pixelAspect", - "applications", - "tools_env", - "library_project", - ) - - def discover(self, session, entities, event): - """Show only on project.""" - if ( - len(entities) != 1 - or entities[0].entity_type.lower() != "project" - ): - return False - - return self.valid_roles(session, entities, event) - - def interface(self, session, entities, event): - if event['data'].get('values', {}): - return - - # Inform user that this may take a while - self.show_message(event, "Preparing data... Please wait", True) - self.log.debug("Preparing data which will be shown") - - self.log.debug("Loading custom attributes") - - project_entity = entities[0] - project_name = project_entity["full_name"] - - project_settings = ProjectSettings(project_name) - - project_anatom_settings = project_settings["project_anatomy"] - root_items = self.prepare_root_items(project_anatom_settings) - - ca_items, multiselect_enumerators = ( - self.prepare_custom_attribute_items(project_anatom_settings) - ) - - self.log.debug("Heavy items are ready. Preparing last items group.") - - title = "Prepare Project" - items = [] - - # Add root items - items.extend(root_items) - - items.append(self.item_splitter) - items.append({ - "type": "label", - "value": "

Set basic Attributes:

" - }) - - items.extend(ca_items) - - # Set value of auto synchronization - auto_sync_value = project_entity["custom_attributes"].get( - CUST_ATTR_AUTO_SYNC, False - ) - auto_sync_item = { - "name": CUST_ATTR_AUTO_SYNC, - "type": "boolean", - "value": auto_sync_value, - "label": "AutoSync to Avalon" - } - # Add autosync attribute - items.append(auto_sync_item) - - # This item will be last before enumerators - # Ask if want to trigger Action Create Folder Structure - create_project_structure_checked = ( - project_settings - ["project_settings"] - ["ftrack"] - ["user_handlers"] - ["prepare_project"] - ["create_project_structure_checked"] - ).value - items.append({ - "type": "label", - "value": "

Want to create basic Folder Structure?

" - }) - items.append({ - "name": self.create_project_structure_key, - "type": "boolean", - "value": create_project_structure_checked, - "label": "Check if Yes" - }) - - # Add enumerator items at the end - for item in multiselect_enumerators: - items.append(item) - - return { - "items": items, - "title": title - } - - def prepare_root_items(self, project_anatom_settings): - self.log.debug("Root items preparation begins.") - - root_items = [] - root_items.append({ - "type": "label", - "value": "

Check your Project root settings

" - }) - root_items.append({ - "type": "label", - "value": ( - "

NOTE: Roots are crutial for path filling" - " (and creating folder structure).

" - ) - }) - root_items.append({ - "type": "label", - "value": ( - "

WARNING: Do not change roots on running project," - " that will cause workflow issues.

" - ) - }) - - empty_text = "Enter root path here..." - - roots_entity = project_anatom_settings["roots"] - for root_name, root_entity in roots_entity.items(): - root_items.append(self.item_splitter) - root_items.append({ - "type": "label", - "value": "Root: \"{}\"".format(root_name) - }) - for platform_name, value_entity in root_entity.items(): - root_items.append({ - "label": platform_name, - "name": "__root__{}__{}".format(root_name, platform_name), - "type": "text", - "value": value_entity.value, - "empty_text": empty_text - }) - - root_items.append({ - "type": "hidden", - "name": "__rootnames__", - "value": json.dumps(list(roots_entity.keys())) - }) - - self.log.debug("Root items preparation ended.") - return root_items - - def _attributes_to_set(self, project_anatom_settings): - attributes_to_set = {} - - attribute_values_by_key = {} - for key, entity in project_anatom_settings["attributes"].items(): - attribute_values_by_key[key] = entity.value - - cust_attrs, hier_cust_attrs = get_ayon_attr_configs( - self.session, split_hierarchical=True - ) - - for attr in hier_cust_attrs: - key = attr["key"] - if key.startswith("avalon_"): - continue - attributes_to_set[key] = { - "label": attr["label"], - "object": attr, - "default": attribute_values_by_key.get(key) - } - - for attr in cust_attrs: - if attr["entity_type"].lower() != "show": - continue - key = attr["key"] - if key.startswith("avalon_"): - continue - attributes_to_set[key] = { - "label": attr["label"], - "object": attr, - "default": attribute_values_by_key.get(key) - } - - # Sort by label - attributes_to_set = dict(sorted( - attributes_to_set.items(), - key=lambda x: x[1]["label"] - )) - return attributes_to_set - - def prepare_custom_attribute_items(self, project_anatom_settings): - items = [] - multiselect_enumerators = [] - attributes_to_set = self._attributes_to_set(project_anatom_settings) - - self.log.debug("Preparing interface for keys: \"{}\"".format( - str([key for key in attributes_to_set]) - )) - - attribute_keys = set(attributes_to_set.keys()) - keys_order = [] - for key in self._keys_order: - if key in attribute_keys: - keys_order.append(key) - - attribute_keys = attribute_keys - set(keys_order) - for key in sorted(attribute_keys): - keys_order.append(key) - - for key in keys_order: - in_data = attributes_to_set[key] - attr = in_data["object"] - - # initial item definition - item = { - "name": key, - "label": in_data["label"] - } - - # cust attr type - may have different visualization - type_name = attr["type"]["name"].lower() - easy_types = ["text", "boolean", "date", "number"] - - easy_type = False - if type_name in easy_types: - easy_type = True - - elif type_name == "enumerator": - - attr_config = json.loads(attr["config"]) - attr_config_data = json.loads(attr_config["data"]) - - if attr_config["multiSelect"] is True: - multiselect_enumerators.append(self.item_splitter) - multiselect_enumerators.append({ - "type": "label", - "value": "

{}

".format(in_data["label"]) - }) - - default = in_data["default"] - names = [] - for option in sorted( - attr_config_data, key=lambda x: x["menu"] - ): - name = option["value"] - new_name = "__{}__{}".format(key, name) - names.append(new_name) - item = { - "name": new_name, - "type": "boolean", - "label": "- {}".format(option["menu"]) - } - if default: - if isinstance(default, (list, tuple)): - if name in default: - item["value"] = True - else: - if name == default: - item["value"] = True - - multiselect_enumerators.append(item) - - multiselect_enumerators.append({ - "type": "hidden", - "name": "__hidden__{}".format(key), - "value": json.dumps(names) - }) - else: - easy_type = True - item["data"] = attr_config_data - - else: - self.log.warning(( - "Custom attribute \"{}\" has type \"{}\"." - " I don't know how to handle" - ).format(key, type_name)) - items.append({ - "type": "label", - "value": ( - "!!! Can't handle Custom attritubte type \"{}\"" - " (key: \"{}\")" - ).format(type_name, key) - }) - - if easy_type: - item["type"] = type_name - - # default value in interface - default = in_data["default"] - if default is not None: - item["value"] = default - - items.append(item) - - return items, multiselect_enumerators - - def launch(self, session, entities, event): - in_data = event["data"].get("values") - if not in_data: - return - - create_project_structure_checked = in_data.pop( - self.create_project_structure_key - ) - - root_values = {} - root_key = "__root__" - for key in tuple(in_data.keys()): - if key.startswith(root_key): - _key = key[len(root_key):] - root_values[_key] = in_data.pop(key) - - root_names = in_data.pop("__rootnames__", None) - root_data = {} - for root_name in json.loads(root_names): - root_data[root_name] = {} - for key, value in tuple(root_values.items()): - prefix = "{}__".format(root_name) - if not key.startswith(prefix): - continue - - _key = key[len(prefix):] - root_data[root_name][_key] = value - - # Find hidden items for multiselect enumerators - keys_to_process = [] - for key in in_data: - if key.startswith("__hidden__"): - keys_to_process.append(key) - - self.log.debug("Preparing data for Multiselect Enumerators") - enumerators = {} - for key in keys_to_process: - new_key = key.replace("__hidden__", "") - enumerator_items = in_data.pop(key) - enumerators[new_key] = json.loads(enumerator_items) - - # find values set for multiselect enumerator - for key, enumerator_items in enumerators.items(): - in_data[key] = [] - - name = "__{}__".format(key) - - for item in enumerator_items: - value = in_data.pop(item) - if value is True: - new_key = item.replace(name, "") - in_data[key].append(new_key) - - self.log.debug("Setting Custom Attribute values") - - project_entity = entities[0] - project_name = project_entity["full_name"] - - # Try to find project document - project_doc = get_project(project_name) - - # Create project if is not available - # - creation is required to be able set project anatomy and attributes - if not project_doc: - project_code = project_entity["name"] - self.log.info("Creating project \"{} [{}]\"".format( - project_name, project_code - )) - create_project(project_name, project_code) - self.trigger_event( - "ayon.project.created", - {"project_name": project_name} - ) - - project_settings = ProjectSettings(project_name) - project_anatomy_settings = project_settings["project_anatomy"] - project_anatomy_settings["roots"] = root_data - - custom_attribute_values = {} - attributes_entity = project_anatomy_settings["attributes"] - for key, value in in_data.items(): - if key not in attributes_entity: - custom_attribute_values[key] = value - else: - attributes_entity[key] = value - - try: - project_settings.save() - except SaveWarningExc as exc: - self.log.info("Few warnings happened during settings save:") - for warning in exc.warnings: - self.log.info(str(warning)) - - # Change custom attributes on project - if custom_attribute_values: - for key, value in custom_attribute_values.items(): - project_entity["custom_attributes"][key] = value - self.log.debug("- Key \"{}\" set to \"{}\"".format(key, value)) - session.commit() - - # Trigger create project structure action - if create_project_structure_checked: - trigger_identifier = "{}.{}".format( - self.create_project_structure_identifier, - self.process_identifier() - ) - self.trigger_action(trigger_identifier, event) - - event_data = copy.deepcopy(in_data) - event_data["project_name"] = project_name - self.trigger_event("ayon.project.prepared", event_data) - return True - - -def register(session): - '''Register plugin. Called when used as an plugin.''' - PrepareProjectLocal(session).register() diff --git a/client/ayon_ftrack/event_handlers_to_convert/action_rv.py b/client/ayon_ftrack/event_handlers_to_convert/action_rv.py deleted file mode 100644 index fe8e3abf..00000000 --- a/client/ayon_ftrack/event_handlers_to_convert/action_rv.py +++ /dev/null @@ -1,332 +0,0 @@ -import os -import subprocess -import traceback -import json - -import ftrack_api - -from openpype.client import ( - get_asset_by_name, - get_subset_by_name, - get_version_by_name, - get_representation_by_name -) -from openpype.pipeline import ( - get_representation_path, - AvalonMongoDB, - Anatomy, -) -from ayon_ftrack.common import LocalAction -from ayon_ftrack.lib import statics_icon - - -class RVAction(LocalAction): - """ Launch RV action """ - identifier = "rv.launch.action" - label = "rv" - description = "rv Launcher" - icon = statics_icon("ftrack", "action_icons", "RV.png") - - type = 'Application' - - allowed_types = ["img", "mov", "exr", "mp4"] - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - - # QUESTION load RV application data from AppplicationManager? - rv_path = None - - # RV_HOME should be set if properly installed - if os.environ.get('RV_HOME'): - rv_path = os.path.join( - os.environ.get('RV_HOME'), - 'bin', - 'rv' - ) - if not os.path.exists(rv_path): - rv_path = None - - if not rv_path: - self.log.info("RV path was not found.") - self.ignore_me = True - - self.rv_path = rv_path - - def discover(self, session, entities, event): - """Return available actions based on *event*. """ - return True - - def preregister(self): - if self.rv_path is None: - return ( - 'RV is not installed or paths in presets are not set correctly' - ) - return True - - def get_components_from_entity(self, session, entity, components): - """Get components from various entity types. - - The components dictionary is modifid in place, so nothing is returned. - - Args: - entity (Ftrack entity) - components (dict) - """ - - if entity.entity_type.lower() == "assetversion": - for component in entity["components"]: - if component["file_type"][1:] not in self.allowed_types: - continue - - try: - components[entity["asset"]["parent"]["name"]].append( - component - ) - except KeyError: - components[entity["asset"]["parent"]["name"]] = [component] - - return - - if entity.entity_type.lower() == "task": - query = "AssetVersion where task_id is '{0}'".format(entity["id"]) - for assetversion in session.query(query): - self.get_components_from_entity( - session, assetversion, components - ) - - return - - if entity.entity_type.lower() == "shot": - query = "AssetVersion where asset.parent.id is '{0}'".format( - entity["id"] - ) - for assetversion in session.query(query): - self.get_components_from_entity( - session, assetversion, components - ) - - return - - raise NotImplementedError( - "\"{}\" entity type is not implemented yet.".format( - entity.entity_type - ) - ) - - def interface(self, session, entities, event): - if event['data'].get('values', {}): - return - - user = session.query( - "User where username is '{0}'".format( - os.environ["FTRACK_API_USER"] - ) - ).one() - job = session.create( - "Job", - { - "user": user, - "status": "running", - "data": json.dumps({ - "description": "RV: Collecting components." - }) - } - ) - # Commit to feedback to user. - session.commit() - - items = [] - try: - items = self.get_interface_items(session, entities) - except Exception: - self.log.error(traceback.format_exc()) - job["status"] = "failed" - else: - job["status"] = "done" - - # Commit to end job. - session.commit() - - return {"items": items} - - def get_interface_items(self, session, entities): - - components = {} - for entity in entities: - self.get_components_from_entity(session, entity, components) - - # Sort by version - for parent_name, entities in components.items(): - version_mapping = {} - for entity in entities: - try: - version_mapping[entity["version"]["version"]].append( - entity - ) - except KeyError: - version_mapping[entity["version"]["version"]] = [entity] - - # Sort same versions by date. - for version, entities in version_mapping.items(): - version_mapping[version] = sorted( - entities, key=lambda x: x["version"]["date"], reverse=True - ) - - components[parent_name] = [] - for version in reversed(sorted(version_mapping.keys())): - components[parent_name].extend(version_mapping[version]) - - # Items to present to user. - items = [] - label = "{} - v{} - {}" - for parent_name, entities in components.items(): - data = [] - for entity in entities: - data.append( - { - "label": label.format( - entity["version"]["asset"]["name"], - str(entity["version"]["version"]).zfill(3), - entity["file_type"][1:] - ), - "value": entity["id"] - } - ) - - items.append( - { - "label": parent_name, - "type": "enumerator", - "name": parent_name, - "data": data, - "value": data[0]["value"] - } - ) - - return items - - def launch(self, session, entities, event): - """Callback method for RV action.""" - # Launching application - if "values" not in event["data"]: - return - - user = session.query( - "User where username is '{0}'".format( - os.environ["FTRACK_API_USER"] - ) - ).one() - job = session.create( - "Job", - { - "user": user, - "status": "running", - "data": json.dumps({ - "description": "RV: Collecting file paths." - }) - } - ) - # Commit to feedback to user. - session.commit() - - paths = [] - try: - paths = self.get_file_paths(session, event) - except Exception: - self.log.error(traceback.format_exc()) - job["status"] = "failed" - else: - job["status"] = "done" - - # Commit to end job. - session.commit() - - args = [os.path.normpath(self.rv_path)] - - fps = entities[0].get("custom_attributes", {}).get("fps", None) - if fps is not None: - args.extend(["-fps", str(fps)]) - - args.extend(paths) - - self.log.info("Running rv: {}".format(args)) - - subprocess.Popen(args) - - return True - - def get_file_paths(self, session, event): - """Get file paths from selected components.""" - - link = session.get( - "Component", list(event["data"]["values"].values())[0] - )["version"]["asset"]["parent"]["link"][0] - project = session.get(link["type"], link["id"]) - project_name = project["full_name"] - dbcon = AvalonMongoDB() - dbcon.Session["AVALON_PROJECT"] = project_name - anatomy = Anatomy(project_name) - - location = ftrack_api.Session().pick_location() - - paths = [] - for parent_name in sorted(event["data"]["values"].keys()): - component = session.get( - "Component", event["data"]["values"][parent_name] - ) - - # Newer publishes have the source referenced in Ftrack. - online_source = False - for neighbour_component in component["version"]["components"]: - if neighbour_component["name"] != "ftrackreview-mp4_src": - continue - - paths.append( - location.get_filesystem_path(neighbour_component) - ) - online_source = True - - if online_source: - continue - - subset_name = component["version"]["asset"]["name"] - version_name = component["version"]["version"] - representation_name = component["file_type"][1:] - - asset_doc = get_asset_by_name( - project_name, parent_name, fields=["_id"] - ) - subset_doc = get_subset_by_name( - project_name, - subset_name=subset_name, - asset_id=asset_doc["_id"] - ) - version_doc = get_version_by_name( - project_name, - version=version_name, - subset_id=subset_doc["_id"] - ) - repre_doc = get_representation_by_name( - project_name, - version_id=version_doc["_id"], - representation_name=representation_name - ) - if not repre_doc: - repre_doc = get_representation_by_name( - project_name, - version_id=version_doc["_id"], - representation_name="preview" - ) - - paths.append(get_representation_path( - repre_doc, root=anatomy.roots, dbcon=dbcon - )) - - return paths - - -def register(session): - """Register hooks.""" - - RVAction(session).register() diff --git a/client/ayon_ftrack/event_handlers_to_convert/action_store_thumbnails_to_avalon.py b/client/ayon_ftrack/event_handlers_to_convert/action_store_thumbnails_to_avalon.py index 1cc1b469..e28d68a6 100644 --- a/client/ayon_ftrack/event_handlers_to_convert/action_store_thumbnails_to_avalon.py +++ b/client/ayon_ftrack/event_handlers_to_convert/action_store_thumbnails_to_avalon.py @@ -18,7 +18,7 @@ get_version_by_name, get_representations ) -from openpype.pipeline import AvalonMongoDB, Anatomy +from ayon_core.pipeline import AvalonMongoDB, Anatomy class StoreThumbnailsToAvalon(LocalAction): @@ -467,7 +467,3 @@ def get_avalon_entities_for_assetversion(self, asset_version, db_con): output["representations"] = repre_ents return output - - -def register(session): - StoreThumbnailsToAvalon(session).register() diff --git a/client/ayon_ftrack/event_handlers_user/action_applications.py b/client/ayon_ftrack/event_handlers_user/action_applications.py index 3d597a62..b9e3d34f 100644 --- a/client/ayon_ftrack/event_handlers_user/action_applications.py +++ b/client/ayon_ftrack/event_handlers_user/action_applications.py @@ -1,17 +1,14 @@ -import os import time +from ayon_core.addon import AddonsManager from ayon_ftrack.common import ( - CUST_ATTR_KEY_SERVER_PATH, is_ftrack_enabled_in_settings, get_folder_path_for_entities, BaseAction, ) -from openpype.lib.applications import ( - ApplicationManager, +from ayon_applications import ( ApplicationLaunchFailed, - ApplictionExecutableNotFound, - CUSTOM_LAUNCH_APP_GROUPS + ApplicationExecutableNotFound, ) @@ -24,7 +21,6 @@ class AppplicationsAction(BaseAction): identifier = "ayon_app" _launch_identifier_with_id = None - icon_url = os.environ.get("OPENPYPE_STATICS_SERVER") # 30 seconds cache_lifetime = 30 @@ -32,8 +28,16 @@ def __init__(self, *args, **kwargs): super(AppplicationsAction, self).__init__(*args, **kwargs) self._applications_manager = None + self._applications_addon = None self._expire_time = 0 + @property + def applications_addon(self): + if self._applications_addon is None: + addons_manager = AddonsManager() + self._applications_addon = addons_manager.get("applications") + return self._applications_addon + @property def applications_manager(self): """ @@ -47,8 +51,10 @@ def applications_manager(self): current_time = time.time() if self._applications_manager is None: - self._applications_manager = ApplicationManager() - self._expire_time = current_time + self._applications_manager = ( + self.applications_addon.get_applications_manager() + ) + self._expire_time = current_time + self.cache_lifetime elif self._expire_time < current_time: self._applications_manager.refresh() @@ -166,23 +172,13 @@ def discover(self, session, entities, event): if not app or not app.enabled: continue - if app.group.name in CUSTOM_LAUNCH_APP_GROUPS: - continue - # Skip applications without valid executables if only_available and not app.find_executable(): continue - app_icon = app.icon - if app_icon and self.icon_url: - try: - app_icon = app_icon.format(self.icon_url) - except Exception: - self.log.warning(( - "Couldn't fill icon path. Icon template: \"{}\"" - " --- Icon url: \"{}\"" - ).format(app_icon, self.icon_url)) - app_icon = None + app_icon = self.applications_addon.get_app_icon_url( + app.icon, server=False + ) items.append({ "label": app.group.label, @@ -247,11 +243,11 @@ def launch(self, session, entities, event): self.applications_manager.launch( app_name, project_name=project_name, - asset_name=folder_path, + folder_path=folder_path, task_name=task_name ) - except ApplictionExecutableNotFound as exc: + except ApplicationExecutableNotFound as exc: self.log.warning(exc.exc_msg) return { "success": False, @@ -283,8 +279,3 @@ def launch(self, session, entities, event): def _get_folder_path(self, session, entity): entity_id = entity["id"] return get_folder_path_for_entities(session, [entity])[entity_id] - - -def register(session): - """Register action. Called when used as an event plugin.""" - AppplicationsAction(session).register() diff --git a/client/ayon_ftrack/event_handlers_user/action_batch_task_creation.py b/client/ayon_ftrack/event_handlers_user/action_batch_task_creation.py index c956546d..8c691738 100644 --- a/client/ayon_ftrack/event_handlers_user/action_batch_task_creation.py +++ b/client/ayon_ftrack/event_handlers_user/action_batch_task_creation.py @@ -159,9 +159,3 @@ def launch(self, session, entities, event): } ] } - - -def register(session): - '''Register action. Called when used as an event plugin.''' - - BatchTasksAction(session).register() diff --git a/client/ayon_ftrack/event_handlers_user/action_clean_hierarchical_attributes.py b/client/ayon_ftrack/event_handlers_user/action_clean_hierarchical_attributes.py index 828a9cc4..44669815 100644 --- a/client/ayon_ftrack/event_handlers_user/action_clean_hierarchical_attributes.py +++ b/client/ayon_ftrack/event_handlers_user/action_clean_hierarchical_attributes.py @@ -101,9 +101,3 @@ def launch(self, session, entities, event): session.commit() return True - - -def register(session): - '''Register plugin. Called when used as an plugin.''' - - CleanHierarchicalAttrsAction(session).register() diff --git a/client/ayon_ftrack/event_handlers_user/action_client_review_sort.py b/client/ayon_ftrack/event_handlers_user/action_client_review_sort.py index 70ec0529..acf7c473 100644 --- a/client/ayon_ftrack/event_handlers_user/action_client_review_sort.py +++ b/client/ayon_ftrack/event_handlers_user/action_client_review_sort.py @@ -76,7 +76,3 @@ def launch(self, session, entities, event): "success": True, "message": "Client Review sorted!" } - - -def register(session): - ClientReviewSort(session).register() diff --git a/client/ayon_ftrack/event_handlers_user/action_component_open.py b/client/ayon_ftrack/event_handlers_user/action_component_open.py index 172d3f7e..361c7ed8 100644 --- a/client/ayon_ftrack/event_handlers_user/action_component_open.py +++ b/client/ayon_ftrack/event_handlers_user/action_component_open.py @@ -4,7 +4,7 @@ from ayon_ftrack.common import LocalAction from ayon_ftrack.lib import get_ftrack_icon_url -from openpype.lib import run_detached_process +from ayon_core.lib import run_detached_process class ComponentOpen(LocalAction): @@ -66,7 +66,3 @@ def launch(self, session, entities, event): "success": True, "message": "Component folder Opened" } - - -def register(session): - ComponentOpen(session).register() diff --git a/client/ayon_ftrack/event_handlers_user/action_create_cust_attrs.py b/client/ayon_ftrack/event_handlers_user/action_create_cust_attrs.py index d780409a..d5a87cbe 100644 --- a/client/ayon_ftrack/event_handlers_user/action_create_cust_attrs.py +++ b/client/ayon_ftrack/event_handlers_user/action_create_cust_attrs.py @@ -104,10 +104,21 @@ } ``` """ - +import os +import sys import json +import traceback +import tempfile +import datetime + import arrow +from ayon_core.settings import get_studio_settings +try: + from ayon_applications import ApplicationManager +except ImportError: + ApplicationManager = None + from ayon_ftrack.common import ( LocalAction, @@ -127,14 +138,179 @@ ) from ayon_ftrack.lib import get_ftrack_icon_url -from openpype.settings import get_system_settings -from openpype.lib import ApplicationManager - class CustAttrException(Exception): pass +class CreateUpdateContext: + def __init__(self, session, app_manager): + self.app_manager = app_manager + self._session = session + self._types_per_name = None + self._security_roles = None + self._object_types = None + self._object_types_by_name = None + self._ftrack_settings = None + self._attrs_settings = None + + self._groups = None + + self._generic_error = None + self._failed_attributes = {} + + @property + def session(self): + return self._session + + @property + def attrs_settings(self): + if self._attrs_settings is not None: + return self._attrs_settings + ftrack_settings = self._get_ftrack_settings() + output = {} + attr_settings = ftrack_settings["custom_attributes"] + for entity_type, attr_data in attr_settings.items(): + # Lower entity type + entity_type = entity_type.lower() + # Just store if entity type is not "task" + if entity_type != "task": + output[entity_type] = attr_data + continue + + # Prepare empty dictionary for entity type if not set yet + if entity_type not in output: + output[entity_type] = {} + + # Store presets per lowered object type + for obj_type, _preset in attr_data.items(): + output[entity_type][obj_type.lower()] = _preset + self._attrs_settings = output + return self._attrs_settings + + def get_custom_attribute_type(self, type_name): + if self._types_per_name is None: + session = self._session + self._types_per_name = { + attr_type["name"].lower(): attr_type + for attr_type in session.query("CustomAttributeType").all() + } + return self._types_per_name.get(type_name.lower()) + + def get_security_roles(self, security_roles): + if self._security_roles is None: + self._security_roles = { + role["name"].lower(): role + for role in self._session.query("SecurityRole").all() + } + + security_roles_lowered = [ + name.lower() for name in security_roles + ] + if ( + len(security_roles_lowered) == 0 + or "all" in security_roles_lowered + ): + return list(self._security_roles.values()) + + output = [] + if security_roles_lowered[0] == "except": + excepts = set(security_roles_lowered[1:]) + for role_name, role in self._security_roles.items(): + if role_name not in excepts: + output.append(role) + + else: + for role_name in set(security_roles_lowered): + if role_name not in self._security_roles: + raise CustAttrException(( + "Securit role \"{}\" was not found in Ftrack." + ).format(role_name)) + output.append(self._security_roles[role_name]) + return output + + def get_group(self, group_name): + if not group_name: + return None + + if self._groups is None: + self._groups = { + group["name"].lower(): group + for group in self._session.query( + f"CustomAttributeGroup where name is \"{group_name}\"" + ).all() + } + + group_name = group_name.lower() + if group_name in self._groups: + return self._groups[group_name] + + groups = self._session.query( + f"CustomAttributeGroup where name is \"{group_name}\"" + ).all() + + if len(groups) > 1: + raise CustAttrException( + "Found more than one group \"{}\"".format(group_name) + ) + + if len(groups) == 1: + group = next(iter(groups)) + self._groups[group_name] = group + return group + + self.session.create( + "CustomAttributeGroup", + {"name": group_name} + ) + self.session.commit() + self._groups[group_name] = self._session.query( + f"CustomAttributeGroup where name is \"{group_name}\"" + ).first() + + return self._groups[group_name] + + def get_object_type_by_name(self, object_type_name): + if self._object_types_by_name is None: + self._object_types_by_name = { + object_type["name"].lower(): object_type + for object_type in self._get_object_types() + } + object_type_name_low = object_type_name.lower() + return self._object_types_by_name.get(object_type_name_low) + + def _get_object_types(self): + if self._object_types is None: + self._object_types = self._session.query("ObjectType").all() + return self._object_types + + def _get_ftrack_settings(self): + if self._ftrack_settings is None: + self._ftrack_settings = get_studio_settings()["ftrack"] + return self._ftrack_settings + + def job_failed(self): + return self._failed_attributes or self._generic_error + + def add_failed_attribute(self, attr_name, message): + self._failed_attributes[attr_name] = message + + def set_generic_error(self, message, traceback_message): + self._generic_error = "\n".join([message, traceback_message]) + + def get_report_text(self): + if not self.job_failed(): + return None + + output_messages = [] + if self._generic_error: + output_messages.append(self._generic_error) + + for attr_name, message in self._failed_attributes.items(): + output_messages.append(f"Attribute \"{attr_name}\": {message}") + return "\n\n".join(output_messages) + + class CustomAttributes(LocalAction): identifier = "ayon.create.update.attributes" label = "AYON Admin" @@ -182,53 +358,65 @@ def launch(self, session, entities, event): session.commit() # TODO how to get custom attributes from different addons? - self.app_manager = ApplicationManager() + app_manager = None + if ApplicationManager is not None: + app_manager = ApplicationManager() + else: + self.log.info("Applications addon is not available.") + context = CreateUpdateContext(session, app_manager) + + generic_message = "Custom attributes creation failed." try: - self.prepare_global_data(session) - self.create_ayon_attributes(event) - self.applications_attribute(event) - self.tools_attribute(event) + self.create_ayon_attributes(context, event) + self.applications_attribute(context, event) + self.tools_attribute(context, event) # self.intent_attribute(event) - self.custom_attributes_from_file(event) - - job["status"] = "done" - session.commit() + self.create_default_custom_attributes(context, event) except Exception: - session.rollback() - job["status"] = "failed" - session.commit() - self.log.error( - "Creating custom attributes failed ({})", exc_info=True + traceback_message = "".join( + traceback.format_exception(*sys.exc_info()) ) + context.set_generic_error(generic_message, traceback_message) - return True + finally: + job_status = "done" + output = True + if context.job_failed(): + job_status = "failed" + output = { + "success": False, + "message": generic_message + } + session.rollback() + report_text = context.get_report_text() + self._upload_report(session, job, report_text) - def prepare_global_data(self, session): - self.types_per_name = { - attr_type["name"].lower(): attr_type - for attr_type in session.query("CustomAttributeType").all() - } - self.security_roles = { - role["name"].lower(): role - for role in session.query("SecurityRole").all() - } + job["status"] = job_status - object_types = session.query("ObjectType").all() - self.object_types_per_id = { - object_type["id"]: object_type for object_type in object_types - } - self.object_types_per_name = { - object_type["name"].lower(): object_type - for object_type in object_types - } + session.commit() - self.groups = {} + return output - self.ftrack_settings = get_system_settings()["modules"]["ftrack"] - self.attrs_settings = self.prepare_attribute_settings() + def _upload_report(self, session, job, report_text): + with tempfile.NamedTemporaryFile( + mode="w", prefix="ayon_ftrack_", suffix=".txt", delete=False + ) as temp_obj: + temp_obj.write(report_text) + temp_filepath = temp_obj.name + + # Upload file with traceback to ftrack server and add it to job + component_name = "{}_{}".format( + self.__class__.__name__, + datetime.datetime.now().strftime("%y-%m-%d-%H%M") + ) + self.add_file_component_to_job( + job, session, temp_filepath, component_name + ) + # Delete temp file + os.remove(temp_filepath) def prepare_attribute_settings(self): output = {} @@ -251,7 +439,7 @@ def prepare_attribute_settings(self): return output - def create_ayon_attributes(self, event): + def create_ayon_attributes(self, context, event): # Set security roles for attribute for item in [ @@ -279,8 +467,7 @@ def create_ayon_attributes(self, event): "type": "boolean", "default": False, "group": CUST_ATTR_GROUP, - "is_hierarchical": True, - "config": {"markdown": False} + "is_hierarchical": True }, { "key": CUST_ATTR_AUTO_SYNC, @@ -291,10 +478,13 @@ def create_ayon_attributes(self, event): "entity_type": "show" } ]: - self.process_attr_data(item, event) + self.process_attr_data(context, item, event) + + def applications_attribute(self, context, event): + if context.app_manager is None: + return - def applications_attribute(self, event): - apps_data = app_definitions_from_app_manager(self.app_manager) + apps_data = app_definitions_from_app_manager(context.app_manager) applications_custom_attr_data = { "label": "Applications", @@ -307,10 +497,13 @@ def applications_attribute(self, event): "data": apps_data } } - self.process_attr_data(applications_custom_attr_data, event) + self.process_attr_data(context, applications_custom_attr_data, event) + + def tools_attribute(self, context, event): + if context.app_manager is None: + return - def tools_attribute(self, event): - tools_data = tool_definitions_from_app_manager(self.app_manager) + tools_data = tool_definitions_from_app_manager(context.app_manager) tools_custom_attr_data = { "label": "Tools", @@ -323,10 +516,10 @@ def tools_attribute(self, event): "data": tools_data } } - self.process_attr_data(tools_custom_attr_data, event) + self.process_attr_data(context, tools_custom_attr_data, event) - def intent_attribute(self, event): - intent_key_values = self.ftrack_settings["intent"]["items"] + def intent_attribute(self, context, event): + intent_key_values = context.ftrack_settings["intent"]["items"] intent_values = [] for key, label in intent_key_values.items(): @@ -353,9 +546,9 @@ def intent_attribute(self, event): "data": intent_values } } - self.process_attr_data(intent_custom_attr_data, event) + self.process_attr_data(context, intent_custom_attr_data, event) - def custom_attributes_from_file(self, event): + def create_default_custom_attributes(self, context, event): # Load json with custom attributes configurations cust_attr_def = default_custom_attributes_definition() attrs_data = [] @@ -388,9 +581,9 @@ def custom_attributes_from_file(self, event): for cust_attr_data in attrs_data: # Add group cust_attr_data["group"] = CUST_ATTR_GROUP - self.process_attr_data(cust_attr_data, event) + self.process_attr_data(context, cust_attr_data, event) - def presets_for_attr_data(self, attr_data): + def presets_for_attr_data(self, context, attr_data): output = {} attr_key = attr_data["key"] @@ -399,7 +592,7 @@ def presets_for_attr_data(self, attr_data): else: entity_key = attr_data["entity_type"] - entity_settings = self.attrs_settings.get(entity_key) or {} + entity_settings = context.attrs_settings.get(entity_key) or {} if entity_key.lower() == "task": object_type = attr_data["object_type"] entity_settings = entity_settings.get(object_type.lower()) or {} @@ -410,38 +603,50 @@ def presets_for_attr_data(self, attr_data): output[key] = value return output - def process_attr_data(self, cust_attr_data, event): - attr_settings = self.presets_for_attr_data(cust_attr_data) + def process_attr_data(self, context, cust_attr_data, event): + attr_settings = self.presets_for_attr_data(context, cust_attr_data) cust_attr_data.update(attr_settings) try: data = {} # Get key, label, type - data.update(self.get_required(cust_attr_data)) + data.update(self.get_required(context, cust_attr_data)) # Get hierachical/ entity_type/ object_id - data.update(self.get_entity_type(cust_attr_data)) + data.update(self.get_entity_type(context, cust_attr_data)) # Get group, default, security roles - data.update(self.get_optional(cust_attr_data)) + data.update(self.get_optional(context, cust_attr_data)) # Process data self.process_attribute(data) - except CustAttrException as cae: - cust_attr_name = cust_attr_data.get("label", cust_attr_data["key"]) + except Exception as exc: + traceback_message = None + if not isinstance(exc, CustAttrException): + traceback_message = "".join( + traceback.format_exception(*sys.exc_info()) + ) + + cust_attr_name = cust_attr_data.get( + "label", cust_attr_data["key"] + ) if cust_attr_name: msg = "Custom attribute error \"{}\" - {}".format( - cust_attr_name, str(cae) + cust_attr_name, str(exc) ) else: - msg = "Custom attribute error - {}".format(str(cae)) + msg = "Custom attribute error - {}".format(str(exc)) self.log.warning(msg, exc_info=True) self.show_message(event, msg) + if traceback_message: + msg = "\n".join([msg, traceback_message]) + context.add_failed_attribute(cust_attr_name, msg) def process_attribute(self, data): existing_attrs = self.session.query(( "select is_hierarchical, key, type, entity_type, object_type_id" " from CustomAttributeConfiguration" )).all() + matching = [] is_hierarchical = data.get("is_hierarchical", False) for attr in existing_attrs: @@ -495,7 +700,7 @@ def process_attribute(self, data): "Custom attribute is duplicated. Key: \"{}\" Type: \"{}\"" ).format(data["key"], data["type"]["name"])) - def get_required(self, attr): + def get_required(self, context, attr): for key in self.required_keys: if key not in attr: raise CustAttrException( @@ -512,7 +717,7 @@ def get_required(self, attr): output = { "key": attr["key"], "label": attr["label"], - "type": self.types_per_name[type_name_l] + "type": context.get_custom_attribute_type(type_name_l) } config = None @@ -523,17 +728,28 @@ def get_required(self, attr): elif type_name == "enumerator": config = self.get_enumerator_config(attr) - if config is not None: - output["config"] = config + # Fake empty config + if config is None: + config = json.dumps({}) + output["config"] = config return output def get_number_config(self, attr): - is_decimal = attr.get("config", {}).get("isdecimal") + config = attr.get("config", {}) + is_decimal = config.get("isdecimal") if is_decimal is None: is_decimal = False - return json.dumps({"isdecimal": is_decimal}) + config_data = { + "isdecimal": is_decimal, + } + if is_decimal: + precision = config.get("precision") + if precision is not None: + config_data["precision"] = precision + + return json.dumps(config_data) def get_text_config(self, attr): markdown = attr.get("config", {}).get("markdown") @@ -549,12 +765,11 @@ def get_enumerator_config(self, attr): data = [] for item in attr["config"]["data"]: - item_data = {} for key in item: - # TODO key check by regex - item_data["menu"] = item[key] - item_data["value"] = key - data.append(item_data) + data.append({ + "menu": item[key], + "value": key, + }) multi_selection = False for key, value in attr["config"].items(): @@ -569,62 +784,6 @@ def get_enumerator_config(self, attr): "data": json.dumps(data) }) - return config - - def get_group(self, attr): - if isinstance(attr, dict): - group_name = attr["group"].lower() - else: - group_name = attr - if group_name in self.groups: - return self.groups[group_name] - - query = "CustomAttributeGroup where name is \"{}\"".format(group_name) - groups = self.session.query(query).all() - - if len(groups) > 1: - raise CustAttrException( - "Found more than one group \"{}\"".format(group_name) - ) - - if len(groups) == 1: - group = next(iter(groups)) - self.groups[group_name] = group - return group - - group = self.session.create( - "CustomAttributeGroup", - {"name": group_name} - ) - self.session.commit() - - return group - - def get_security_roles(self, security_roles): - security_roles_lowered = tuple(name.lower() for name in security_roles) - if ( - len(security_roles_lowered) == 0 - or "all" in security_roles_lowered - ): - return list(self.security_roles.values()) - - output = [] - if security_roles_lowered[0] == "except": - excepts = security_roles_lowered[1:] - for role_name, role in self.security_roles.items(): - if role_name not in excepts: - output.append(role) - - else: - for role_name in security_roles_lowered: - if role_name in self.security_roles: - output.append(self.security_roles[role_name]) - else: - raise CustAttrException(( - "Securit role \"{}\" was not found in Ftrack." - ).format(role_name)) - return output - def get_default(self, attr): attr_type = attr["type"] default = attr["default"] @@ -673,25 +832,22 @@ def get_default(self, attr): return default - def get_optional(self, attr): + def get_optional(self, context, attr): output = {} if "group" in attr: - output["group"] = self.get_group(attr) + output["group"] = context.get_group(attr["group"]) if "default" in attr: output["default"] = self.get_default(attr) - roles_read = [] - roles_write = [] - if "read_security_roles" in attr: - roles_read = attr["read_security_roles"] - if "write_security_roles" in attr: - roles_write = attr["write_security_roles"] - - output["read_security_roles"] = self.get_security_roles(roles_read) - output["write_security_roles"] = self.get_security_roles(roles_write) + output["read_security_roles"] = context.get_security_roles( + attr.get("read_security_roles") or [] + ) + output["write_security_roles"] = context.get_security_roles( + attr.get("write_security_roles") or [] + ) return output - def get_entity_type(self, attr): + def get_entity_type(self, context, attr): if attr.get("is_hierarchical", False): return { "is_hierarchical": True, @@ -709,8 +865,7 @@ def get_entity_type(self, attr): if not object_type_name: raise CustAttrException("Missing object_type") - object_type_name_low = object_type_name.lower() - object_type = self.object_types_per_name.get(object_type_name_low) + object_type = context.get_object_type_by_name(object_type_name) if not object_type: raise CustAttrException(( "Object type with name \"{}\" don't exist" @@ -720,7 +875,3 @@ def get_entity_type(self, attr): "entity_type": entity_type, "object_type_id": object_type["id"] } - - -def register(session): - CustomAttributes(session).register() diff --git a/client/ayon_ftrack/event_handlers_user/action_create_folders.py b/client/ayon_ftrack/event_handlers_user/action_create_folders.py index ae5ffb45..a15e06d8 100644 --- a/client/ayon_ftrack/event_handlers_user/action_create_folders.py +++ b/client/ayon_ftrack/event_handlers_user/action_create_folders.py @@ -2,7 +2,9 @@ import collections import copy -from openpype.pipeline import Anatomy +import ayon_api + +from ayon_core.pipeline import Anatomy from ayon_ftrack.common import LocalAction from ayon_ftrack.lib import get_ftrack_icon_url @@ -98,12 +100,17 @@ def launch(self, session, entities, event): project_entity = self.get_project_from_entity(filtered_entities[0]) project_name = project_entity["full_name"] + ayon_project = ayon_api.get_project(project_name) + if not ayon_project: + return { + "success": False, + "message": f"Project '{project_name}' was not found in AYON.", + } + project_code = project_entity["name"] - task_entities = [] - other_entities = [] - self.get_all_entities( - session, entities, task_entities, other_entities + task_entities, other_entities = self.get_all_entities( + session, entities ) hierarchy = self.get_entities_hierarchy( session, task_entities, other_entities @@ -114,17 +121,14 @@ def launch(self, session, entities, event): for task_type in task_types } - anatomy = Anatomy(project_name) - - work_keys = ["work", "folder"] - work_template = anatomy.templates - for key in work_keys: - work_template = work_template[key] + anatomy = Anatomy(project_name, project_entity=ayon_project) - publish_keys = ["publish", "folder"] - publish_template = anatomy.templates - for key in publish_keys: - publish_template = publish_template[key] + work_template = anatomy.get_template_item( + "work", "default", "directory" + ) + publish_template = anatomy.get_template_item( + "publish", "default", "directory" + ) project_data = { "project": { @@ -160,10 +164,10 @@ def launch(self, session, entities, event): if not task_entities: # create path for entity collected_paths.append(self.compute_template( - anatomy, parent_data, work_keys + parent_data, work_template )) collected_paths.append(self.compute_template( - anatomy, parent_data, publish_keys + parent_data, publish_template )) continue @@ -178,12 +182,12 @@ def launch(self, session, entities, event): # Template wok collected_paths.append(self.compute_template( - anatomy, task_data, work_keys + task_data, work_template )) # Template publish collected_paths.append(self.compute_template( - anatomy, task_data, publish_keys + task_data, publish_template )) if len(collected_paths) == 0: @@ -204,40 +208,72 @@ def launch(self, session, entities, event): "message": "Successfully created project folders." } - def get_all_entities( - self, session, entities, task_entities, other_entities - ): - if not entities: - return + def get_all_entities(self, session, entities): + """ - no_task_entities = [] - for entity in entities: - if entity.entity_type.lower() == "task": - task_entities.append(entity) - else: - no_task_entities.append(entity) + Args: + session (ftrack_api.session.Session): Ftrack session. + entities (list[ftrack_api.entity.base.Entity]): List of entities. - if not no_task_entities: - return task_entities + Returns: + tuple[list, list]: Tuple where first item is list of task entities + and second item is list of entities that are not task + entities. All are entities that were passed in and + their children. + """ - other_entities.extend(no_task_entities) + task_entities = [] + other_entities = [] - no_task_entity_ids = {entity["id"] for entity in no_task_entities} - next_entities = session.query( - ( - "select id, parent_id" - " from TypedContext where parent_id in ({})" - ).format(self.join_query_keys(no_task_entity_ids)) - ).all() + query_queue = collections.deque() + query_queue.append(entities) + while query_queue: + entities = query_queue.popleft() + if not entities: + continue - self.get_all_entities( - session, next_entities, task_entities, other_entities - ) + no_task_entities = [] + for entity in entities: + if entity.entity_type.lower() == "task": + task_entities.append(entity) + else: + no_task_entities.append(entity) + + if not no_task_entities: + continue + + other_entities.extend(no_task_entities) + + no_task_entity_ids = {entity["id"] for entity in no_task_entities} + next_entities = session.query( + ( + "select id, parent_id" + " from TypedContext where parent_id in ({})" + ).format(self.join_query_keys(no_task_entity_ids)) + ).all() + query_queue.append(next_entities) + return task_entities, other_entities def get_entities_hierarchy(self, session, task_entities, other_entities): + """ + + Args: + session (ftrack_api.session.Session): Ftrack session. + task_entities (list[ftrack_api.entity.base.Entity]): List of task + entities. + other_entities (list[ftrack_api.entity.base.Entity]): List of + entities that are not task entities. + + Returns: + list[tuple[ftrack_api.entity.base.Entity, list]]: List of tuples + where first item is parent entity and second item is list of + task entities that are children of parent entity. + """ + + output = [] task_entity_ids = {entity["id"] for entity in task_entities} if not task_entity_ids: - return [] + return output full_task_entities = session.query( ( @@ -250,21 +286,29 @@ def get_entities_hierarchy(self, session, task_entities, other_entities): parent_id = entity["parent_id"] task_entities_by_parent_id[parent_id].append(entity) - output = [] if not task_entities_by_parent_id: return output - other_ids = {entity["id"] for entity in other_entities} - other_ids |= set(task_entities_by_parent_id.keys()) + parent_ids = set(task_entities_by_parent_id.keys()) - parent_entities = session.query( - ( - "select id, name from TypedContext where id in ({})" - ).format(self.join_query_keys(other_ids)) - ).all() + other_entities_by_id = { + entity["id"]: entity + for entity in other_entities + } + parent_ids -= set(other_entities_by_id.keys()) + + if parent_ids: + parent_entities = session.query( + ( + "select id, name from TypedContext where id in ({})" + ).format(self.join_query_keys(parent_ids)) + ).all() + other_entities_by_id.update({ + entity["id"]: entity + for entity in parent_entities + }) - for parent_entity in parent_entities: - parent_id = parent_entity["id"] + for parent_id, parent_entity in other_entities_by_id.items(): output.append(( parent_entity, task_entities_by_parent_id[parent_id] @@ -272,11 +316,8 @@ def get_entities_hierarchy(self, session, task_entities, other_entities): return output - def compute_template(self, anatomy, data, anatomy_keys): - filled_template = anatomy.format_all(data) - for key in anatomy_keys: - filled_template = filled_template[key] - + def compute_template(self, data, template): + filled_template = template.format(data) if filled_template.solved: return os.path.normpath(filled_template) @@ -286,8 +327,3 @@ def compute_template(self, anatomy, data, anatomy_keys): ) ) return os.path.normpath(filled_template.split("{")[0]) - - -def register(session): - """Register plugin. Called when used as an plugin.""" - CreateFolders(session).register() diff --git a/client/ayon_ftrack/event_handlers_user/action_create_project_structure.py b/client/ayon_ftrack/event_handlers_user/action_create_project_structure.py index 3e065f38..93bae055 100644 --- a/client/ayon_ftrack/event_handlers_user/action_create_project_structure.py +++ b/client/ayon_ftrack/event_handlers_user/action_create_project_structure.py @@ -1,6 +1,8 @@ import re -from openpype.pipeline.project_folders import ( +import ayon_api + +from ayon_core.pipeline.project_folders import ( get_project_basic_paths, create_project_folders, ) @@ -11,9 +13,7 @@ class CreateProjectFolders(LocalAction): """Action create folder structure and may create hierarchy in Ftrack. - Creation of folder structure and hierarchy in Ftrack is based on presets. - These presets are located in: - `~/pype-config/presets/tools/project_folder_structure.json` + Creation of folder structure and hierarchy in Ftrack is based on settings. Example of content: ```json @@ -76,6 +76,13 @@ def launch(self, session, entities, event): # Get project entity project_entity = self.get_project_from_entity(entities[0]) project_name = project_entity["full_name"] + ayon_project = ayon_api.get_project(project_name) + if not ayon_project: + return { + "success": False, + "message": f"Project '{project_name}' was not found in AYON.", + } + try: # Get paths based on presets basic_paths = get_project_basic_paths(project_name) @@ -206,7 +213,3 @@ def create_ftrack_entity(self, name, ent_type, parent): new_ent = self.session.create(ent_type, data) self.session.commit() return new_ent - - -def register(session): - CreateProjectFolders(session).register() diff --git a/client/ayon_ftrack/event_handlers_user/action_delivery.py b/client/ayon_ftrack/event_handlers_user/action_delivery.py index a1b95056..b7c043c2 100644 --- a/client/ayon_ftrack/event_handlers_user/action_delivery.py +++ b/client/ayon_ftrack/event_handlers_user/action_delivery.py @@ -9,6 +9,7 @@ get_folders, get_products, get_versions, + get_representations, ) from ayon_ftrack.common import ( @@ -19,11 +20,10 @@ ) from ayon_ftrack.lib import get_ftrack_icon_url -from openpype.client import get_representations -from openpype.lib.dateutils import get_datetime_data -from openpype.pipeline import Anatomy -from openpype.pipeline.load import get_representation_path_with_anatomy -from openpype.pipeline.delivery import ( +from ayon_core.lib.dateutils import get_datetime_data +from ayon_core.pipeline import Anatomy +from ayon_core.pipeline.load import get_representation_path_with_anatomy +from ayon_core.pipeline.delivery import ( get_format_dict, check_destination_path, deliver_single_file, @@ -322,8 +322,8 @@ def real_launch(self, session, entities, event): datetime_data = get_datetime_data() for repre in repres_to_deliver: - source_path = repre.get("data", {}).get("path") - debug_msg = "Processing representation {}".format(repre["_id"]) + source_path = repre["attrib"]["path"] + debug_msg = "Processing representation {}".format(repre["id"]) if source_path: debug_msg += " with published path {}.".format(source_path) self.log.debug(debug_msg) @@ -351,7 +351,7 @@ def real_launch(self, session, entities, event): anatomy_data["folder"] = folder_value repre_report_items = check_destination_path( - repre["_id"], + repre["id"], anatomy, anatomy_data, datetime_data, @@ -687,10 +687,10 @@ def _get_product_entities( if not products_by_name: continue - subset_name = asset["name"] - subset_doc = products_by_name.get(subset_name) - if subset_doc: - output.append(subset_doc) + product_name = asset["name"] + product_entity = products_by_name.get(product_name) + if product_entity: + output.append(product_entity) return output def _get_version_entities( @@ -769,9 +769,3 @@ def _get_version_entities( filtered_versions.append(version_entity) return filtered_versions - - -def register(session): - """Register plugin. Called when used as a plugin.""" - - Delivery(session).register() diff --git a/client/ayon_ftrack/event_handlers_user/action_job_killer.py b/client/ayon_ftrack/event_handlers_user/action_job_killer.py index 530c989e..9c520255 100644 --- a/client/ayon_ftrack/event_handlers_user/action_job_killer.py +++ b/client/ayon_ftrack/event_handlers_user/action_job_killer.py @@ -127,7 +127,3 @@ def launch(self, session, entities, event): "success": True, "message": "All selected jobs were killed Successfully!" } - - -def register(session): - JobKiller(session).register() diff --git a/client/ayon_ftrack/event_handlers_user/action_multiple_notes.py b/client/ayon_ftrack/event_handlers_user/action_multiple_notes.py index 39cac0e7..e9fb57a1 100644 --- a/client/ayon_ftrack/event_handlers_user/action_multiple_notes.py +++ b/client/ayon_ftrack/event_handlers_user/action_multiple_notes.py @@ -97,7 +97,3 @@ def launch(self, session, entities, event): entity["notes"].append(new_note) session.commit() return True - - -def register(session): - MultipleNotes(session).register() diff --git a/client/ayon_ftrack/event_handlers_user/action_test.py b/client/ayon_ftrack/event_handlers_user/action_test.py index 2662ac4c..e11dbf3d 100644 --- a/client/ayon_ftrack/event_handlers_user/action_test.py +++ b/client/ayon_ftrack/event_handlers_user/action_test.py @@ -20,7 +20,3 @@ def launch(self, session, entities, event): self.log.info(event) return True - - -def register(session): - TestAction(session).register() diff --git a/client/ayon_ftrack/event_handlers_user/action_thumbnail_to_childern.py b/client/ayon_ftrack/event_handlers_user/action_thumbnail_to_childern.py index 2ff89d09..8313663f 100644 --- a/client/ayon_ftrack/event_handlers_user/action_thumbnail_to_childern.py +++ b/client/ayon_ftrack/event_handlers_user/action_thumbnail_to_childern.py @@ -48,7 +48,3 @@ def launch(self, session, entities, event): "success": True, "message": "Created job for updating thumbnails!" } - - -def register(session): - ThumbToChildren(session).register() diff --git a/client/ayon_ftrack/event_handlers_user/action_thumbnail_to_parent.py b/client/ayon_ftrack/event_handlers_user/action_thumbnail_to_parent.py index 0fa9aa29..0d25f79b 100644 --- a/client/ayon_ftrack/event_handlers_user/action_thumbnail_to_parent.py +++ b/client/ayon_ftrack/event_handlers_user/action_thumbnail_to_parent.py @@ -73,7 +73,3 @@ def launch(self, session, entities, event): "success": True, "message": "Created job for updating thumbnails!" } - - -def register(session): - ThumbToParent(session).register() diff --git a/client/ayon_ftrack/event_handlers_user/action_where_run_ask.py b/client/ayon_ftrack/event_handlers_user/action_where_run_ask.py index 670d5ae2..3b61441a 100644 --- a/client/ayon_ftrack/event_handlers_user/action_where_run_ask.py +++ b/client/ayon_ftrack/event_handlers_user/action_where_run_ask.py @@ -89,7 +89,3 @@ def _show_info(self, event): items.append(message) self.show_interface(items, title, event=event) - - -def register(session): - ActionWhereIRun(session).register() diff --git a/client/ayon_ftrack/ftrack_addon.py b/client/ayon_ftrack/ftrack_addon.py index 20c6e0de..1d10659f 100644 --- a/client/ayon_ftrack/ftrack_addon.py +++ b/client/ayon_ftrack/ftrack_addon.py @@ -1,13 +1,26 @@ import os +import tempfile +import json -import click +import requests +import ayon_api -from openpype.modules import ( +from ayon_core.addon import ( AYONAddon, - ITrayModule, + ITrayAddon, IPluginPaths, ) -from openpype.lib import Logger +from ayon_core.lib import Logger, run_ayon_launcher_process +from ayon_core.settings import get_project_settings, get_studio_settings +from ayon_core.tools.tray import get_tray_server_url + +from ayon_ftrack.lib.credentials import ( + save_credentials, + get_credentials, + check_credentials, +) + +from .version import __version__ FTRACK_ADDON_DIR = os.path.dirname(os.path.abspath(__file__)) _URL_NOT_SET = object() @@ -15,11 +28,11 @@ class FtrackAddon( AYONAddon, - ITrayModule, + ITrayAddon, IPluginPaths, ): name = "ftrack" - enabled = True + version = __version__ def initialize(self, settings): ftrack_settings = settings[self.name] @@ -36,11 +49,14 @@ def initialize(self, settings): # Prepare attribute self.user_event_handlers_paths = user_event_handlers_paths - self.tray_module = None + self._tray_wrapper = None # TimersManager connection self.timers_manager_connector = None - self._timers_manager_module = None + self._timers_manager_addon = None + + def webserver_initialization(self, web_manager): + self._tray_wrapper.webserver_initialization(web_manager) def get_ftrack_url(self): """Resolved ftrack url. @@ -92,36 +108,13 @@ def get_launch_hook_paths(self): return os.path.join(FTRACK_ADDON_DIR, "launch_hooks") - def modify_application_launch_arguments(self, application, env): - if not application.use_python_2: - return - - self.log.info("Adding Ftrack Python 2 packages to PYTHONPATH.") - - # Prepare vendor dir path - python_2_vendor = os.path.join(FTRACK_ADDON_DIR, "python2_vendor") - - # Add Python 2 modules - python_paths = [ - # `python-ftrack-api` - os.path.join(python_2_vendor, "ftrack-python-api", "source"), - ] - - # Load PYTHONPATH from current launch context - python_path = env.get("PYTHONPATH") - if python_path: - python_paths.append(python_path) - - # Set new PYTHONPATH to launch context environments - env["PYTHONPATH"] = os.pathsep.join(python_paths) - - def connect_with_modules(self, enabled_modules): - for module in enabled_modules: - if not hasattr(module, "get_ftrack_event_handler_paths"): + def connect_with_addons(self, enabled_addons): + for addon in enabled_addons: + if not hasattr(addon, "get_ftrack_event_handler_paths"): continue try: - paths_by_type = module.get_ftrack_event_handler_paths() + paths_by_type = addon.get_ftrack_event_handler_paths() except Exception: continue @@ -163,8 +156,7 @@ def create_ftrack_session(self, **session_kwargs): api_user = os.environ.get("FTRACK_API_USER") if not api_key or not api_user: - from .lib import credentials - cred = credentials.get_credentials() + cred = get_credentials() api_user = cred.get("username") api_key = cred.get("api_key") @@ -175,18 +167,18 @@ def create_ftrack_session(self, **session_kwargs): def tray_init(self): from .tray import FtrackTrayWrapper - self.tray_module = FtrackTrayWrapper(self) - # Module is it's own connector to TimersManager + self._tray_wrapper = FtrackTrayWrapper(self) + # Addon is it's own connector to TimersManager self.timers_manager_connector = self def tray_menu(self, parent_menu): - return self.tray_module.tray_menu(parent_menu) + return self._tray_wrapper.tray_menu(parent_menu) def tray_start(self): - return self.tray_module.validate() + return self._tray_wrapper.validate() def tray_exit(self): - self.tray_module.tray_exit() + self._tray_wrapper.tray_exit() def set_credentials_to_env(self, username, api_key): os.environ["FTRACK_API_USER"] = username or "" @@ -194,31 +186,105 @@ def set_credentials_to_env(self, username, api_key): # --- TimersManager connection methods --- def start_timer(self, data): - if self.tray_module: - self.tray_module.start_timer_manager(data) + if self._tray_wrapper: + self._tray_wrapper.start_timer_manager(data) def stop_timer(self): - if self.tray_module: - self.tray_module.stop_timer_manager() + if self._tray_wrapper: + self._tray_wrapper.stop_timer_manager() + + def ensure_is_process_ready(self, context): + """Ensure addon is ready for process. + + Args: + context (ProcessContext): Process context. + + """ + # Safe to support older ayon-core without 'ProcessPreparationError' + from ayon_core.addon import ProcessPreparationError + from ayon_ftrack.common import is_ftrack_enabled_in_settings + + # Do not continue if Ftrack is not enabled in settings + if context.project_name: + settings = get_project_settings(context.project_name) + else: + settings = get_studio_settings() + + if not is_ftrack_enabled_in_settings(settings): + return + + # Not sure if this should crash or silently continue? + server_url = self.get_ftrack_url() + if not server_url: + return + + username = os.getenv("FTRACK_API_USER") + api_key = os.getenv("FTRACK_API_KEY") + + if ( + username and api_key + and check_credentials(username, api_key, server_url) + ): + self.set_credentials_to_env(username, api_key) + return + + username, api_key = self.get_credentials() + if ( + username and api_key + and check_credentials(username, api_key, server_url) + ): + self.set_credentials_to_env(username, api_key) + return + + if context.headless: + raise ProcessPreparationError( + "Ftrack login details are missing. Unable to proceed" + " without a user interface." + ) + + username, api_key = self._ask_for_credentials(server_url) + if username and api_key: + self.set_credentials_to_env(username, api_key) + # Send the credentials to the running tray + save_credentials(username, api_key, self.get_ftrack_url()) + tray_url = get_tray_server_url() + if tray_url: + requests.post( + f"{tray_url}/addons/ftrack/credentials", + json={"username": username, "api_key": api_key}, + ) + return - def register_timers_manager(self, timer_manager_module): - self._timers_manager_module = timer_manager_module + raise ProcessPreparationError( + "Unable to connect to Ftrack. The process cannot proceed" + " without this connection." + ) + + def register_timers_manager(self, timers_manager_addon): + self._timers_manager_addon = timers_manager_addon def timer_started(self, data): - if self._timers_manager_module is not None: - self._timers_manager_module.timer_started(self.id, data) + if self._timers_manager_addon is not None: + self._timers_manager_addon.timer_started(self.id, data) def timer_stopped(self): - if self._timers_manager_module is not None: - self._timers_manager_module.timer_stopped(self.id) + if self._timers_manager_addon is not None: + self._timers_manager_addon.timer_stopped(self.id) + + def get_task_time(self, project_name, folder_path, task_name): + folder_entity = ayon_api.get_folder_by_path(project_name, folder_path) + if not folder_entity: + return 0 + ftrack_id = folder_entity["attrib"].get("ftrackId") + if not ftrack_id: + return 0 - def get_task_time(self, project_name, asset_name, task_name): session = self.create_ftrack_session() query = ( - 'Task where name is "{}"' - ' and parent.name is "{}"' + 'select time_logged from Task where name is "{}"' + ' and parent_id is "{}"' ' and project.full_name is "{}"' - ).format(task_name, asset_name, project_name) + ).format(task_name, ftrack_id, project_name) task_entity = session.query(query).first() if not task_entity: return 0 @@ -228,13 +294,32 @@ def get_task_time(self, project_name, asset_name, task_name): def get_credentials(self): # type: () -> tuple """Get local Ftrack credentials.""" - from .lib import credentials - cred = credentials.get_credentials(self.ftrack_url) + cred = get_credentials(self.ftrack_url) return cred.get("username"), cred.get("api_key") - def cli(self, click_group): - click_group.add_command(cli_main) + @staticmethod + def _ask_for_credentials(ftrack_url): + login_script = os.path.join( + FTRACK_ADDON_DIR, "tray", "login_dialog.py" + ) + with tempfile.NamedTemporaryFile( + mode="w", prefix="ay_ftrack", suffix=".json", delete=False + ) as tmp: + json_path = tmp.name + json.dump({"server_url": ftrack_url}, tmp.file) + + run_ayon_launcher_process( + "--skip-bootstrap", + login_script, json_path, + add_sys_paths=True, + creationflags=0, + + ) + + with open(json_path, "r") as stream: + data = json.load(stream) + return data.get("username"), data.get("api_key") def _check_ftrack_url(url): @@ -283,8 +368,3 @@ def resolve_ftrack_url(url, logger=None): logger.error("Ftrack server \"{}\" is not accessible!".format(url)) return ftrack_url - - -@click.group(FtrackAddon.name, help="Ftrack module related commands.") -def cli_main(): - pass diff --git a/client/ayon_ftrack/launch_hooks/post_ftrack_changes.py b/client/ayon_ftrack/launch_hooks/post_ftrack_changes.py index 3d55b3ad..cc51c2ad 100644 --- a/client/ayon_ftrack/launch_hooks/post_ftrack_changes.py +++ b/client/ayon_ftrack/launch_hooks/post_ftrack_changes.py @@ -8,25 +8,18 @@ is_ftrack_enabled_in_settings, ) -from openpype.settings import get_project_settings -from openpype.lib.applications import PostLaunchHook -try: - # Backwards compatibility - # TODO remove in next minor version bump (after 0.3.x) - from openpype.lib.applications import LaunchTypes - local_launch_type = LaunchTypes.local -except Exception: - local_launch_type = "local" +from ayon_core.settings import get_project_settings +from ayon_applications import PostLaunchHook, LaunchTypes class PostFtrackHook(PostLaunchHook): order = None - launch_types = {local_launch_type} + launch_types = {LaunchTypes.local} def execute(self): project_name = self.data.get("project_name") project_settings = self.data.get("project_settings") - folder_path = self.data.get("asset_name") + folder_path = self.data.get("folder_path") task_name = self.data.get("task_name") missing_context_keys = [ @@ -114,36 +107,43 @@ def ftrack_status_change(self, session, entity, project_name): ) return - actual_status = entity["status"]["name"].lower() + current_status = entity["status"]["name"].lower() already_tested = set() ent_path = "/".join( [ent["name"] for ent in entity["link"]] ) + + statuses = session.query("select id, name from Status").all() + statuses_by_low_name = { + status["name"].lower(): status + for status in statuses + } # TODO refactor while True: next_status_name = None for item in status_mapping: - new_status = item["name"] + new_status = item["name"].lower() if new_status in already_tested: continue - from_statuses = item["value"] - if ( - actual_status in from_statuses - or "__any__" in from_statuses - ): - if new_status != "__ignore__": - next_status_name = new_status - already_tested.add(new_status) - break already_tested.add(new_status) + found_match = False + for from_status in item["value"]: + from_status = from_status.lower() + if from_status in (current_status, "__any__"): + found_match = True + if new_status != "__ignore__": + next_status_name = new_status + break + + if found_match: + break + if next_status_name is None: break - status = session.query( - f"Status where name is \"{next_status_name}\"" - ).first() + status = statuses_by_low_name.get(next_status_name) if status is None: self.log.warning( f"Status '{next_status_name}' not found in ftrack." diff --git a/client/ayon_ftrack/lib/__init__.py b/client/ayon_ftrack/lib/__init__.py index 490d6cdb..ddfbbfc7 100644 --- a/client/ayon_ftrack/lib/__init__.py +++ b/client/ayon_ftrack/lib/__init__.py @@ -1,10 +1,9 @@ from . import credentials -from .utils import statics_icon, get_ftrack_icon_url +from .utils import get_ftrack_icon_url __all__ = ( "credentials", - "statics_icon", "get_ftrack_icon_url", ) diff --git a/client/ayon_ftrack/lib/credentials.py b/client/ayon_ftrack/lib/credentials.py index 2eb64254..96a7765d 100644 --- a/client/ayon_ftrack/lib/credentials.py +++ b/client/ayon_ftrack/lib/credentials.py @@ -7,7 +7,7 @@ from urlparse import urlparse -from openpype.lib import OpenPypeSecureRegistry +from ayon_core.lib import AYONSecureRegistry USERNAME_KEY = "username" API_KEY_KEY = "api_key" @@ -43,8 +43,8 @@ def get_credentials(ftrack_server=None): username_name = _get_ftrack_secure_key(hostname, USERNAME_KEY) api_key_name = _get_ftrack_secure_key(hostname, API_KEY_KEY) - username_registry = OpenPypeSecureRegistry(username_name) - api_key_registry = OpenPypeSecureRegistry(api_key_name) + username_registry = AYONSecureRegistry(username_name) + api_key_registry = AYONSecureRegistry(api_key_name) output[USERNAME_KEY] = username_registry.get_item(USERNAME_KEY, None) output[API_KEY_KEY] = api_key_registry.get_item(API_KEY_KEY, None) @@ -60,8 +60,8 @@ def save_credentials(username, api_key, ftrack_server=None): # Clear credentials clear_credentials(ftrack_server) - username_registry = OpenPypeSecureRegistry(username_name) - api_key_registry = OpenPypeSecureRegistry(api_key_name) + username_registry = AYONSecureRegistry(username_name) + api_key_registry = AYONSecureRegistry(api_key_name) username_registry.set_item(USERNAME_KEY, username) api_key_registry.set_item(API_KEY_KEY, api_key) @@ -72,8 +72,8 @@ def clear_credentials(ftrack_server=None): username_name = _get_ftrack_secure_key(hostname, USERNAME_KEY) api_key_name = _get_ftrack_secure_key(hostname, API_KEY_KEY) - username_registry = OpenPypeSecureRegistry(username_name) - api_key_registry = OpenPypeSecureRegistry(api_key_name) + username_registry = AYONSecureRegistry(username_name) + api_key_registry = AYONSecureRegistry(api_key_name) current_username = username_registry.get_item(USERNAME_KEY, None) current_api_key = api_key_registry.get_item(API_KEY_KEY, None) diff --git a/client/ayon_ftrack/lib/utils.py b/client/ayon_ftrack/lib/utils.py index c7eb7526..5f2fa829 100644 --- a/client/ayon_ftrack/lib/utils.py +++ b/client/ayon_ftrack/lib/utils.py @@ -1,14 +1,6 @@ -import os from ayon_ftrack.version import __version__ from ayon_ftrack.common import get_ftrack_icon_url as _get_ftrack_icon_url -def statics_icon(*icon_statics_file_parts): - statics_server = os.environ.get("OPENPYPE_STATICS_SERVER") - if not statics_server: - return None - return "/".join((statics_server, *icon_statics_file_parts)) - - def get_ftrack_icon_url(icon_name): return _get_ftrack_icon_url(icon_name, addon_version=__version__) diff --git a/client/ayon_ftrack/pipeline/plugin.py b/client/ayon_ftrack/pipeline/plugin.py index 2b1897dc..86a6ef55 100644 --- a/client/ayon_ftrack/pipeline/plugin.py +++ b/client/ayon_ftrack/pipeline/plugin.py @@ -1,5 +1,5 @@ import pyblish.api -from openpype.pipeline.publish import ( +from ayon_core.pipeline.publish import ( get_plugin_settings, apply_plugin_settings_automatically, ) diff --git a/client/ayon_ftrack/plugins/publish/collect_ftrack_api.py b/client/ayon_ftrack/plugins/publish/collect_ftrack_api.py index ef9736c6..660cdbf8 100644 --- a/client/ayon_ftrack/plugins/publish/collect_ftrack_api.py +++ b/client/ayon_ftrack/plugins/publish/collect_ftrack_api.py @@ -6,10 +6,6 @@ from ayon_ftrack.common import FTRACK_ID_ATTRIB from ayon_ftrack.pipeline import plugin -try: - from openpype.client import get_asset_name_identifier -except ImportError: - get_asset_name_identifier = None class CollectFtrackApi(plugin.FtrackPublishContextPlugin): @@ -33,7 +29,7 @@ def process(self, context): # Collect task project_name = context.data["projectName"] - folder_path = context.data["asset"] + folder_path = context.data["folderPath"] task_name = context.data["task"] # Find project entity @@ -56,7 +52,7 @@ def process(self, context): context_ftrack_entity = None if folder_path: - # Find asset entity + # Find folder entity entities_by_path = self.find_ftrack_entities( session, project_entity, [folder_path] ) @@ -67,13 +63,13 @@ def process(self, context): " in Ftrack project \"{}\"." ).format(folder_path, project_name)) - self.log.debug("Asset found: {}".format(context_ftrack_entity)) + self.log.debug("Folder found: {}".format(context_ftrack_entity)) task_entity = None # Find task entity if task is set if not context_ftrack_entity: self.log.warning( - "Asset entity is not set. Skipping query of task entity." + "Folder entity is not set. Skipping query of task entity." ) elif not task_name: self.log.warning("Task name is not set.") @@ -122,7 +118,7 @@ def per_instance_process( self.log.debug( "Checking entities of instance \"{}\"".format(str(instance)) ) - instance_folder_path = instance.data.get("asset") + instance_folder_path = instance.data.get("folderPath") instance_task_name = instance.data.get("task") folder_path = None @@ -140,7 +136,7 @@ def per_instance_process( ): self.log.debug(( "Instance's context is same as in publish context." - " Asset: {} | Task: {}" + " Folder: {} | Task: {}" ).format(context_folder_path, context_task_name)) instance.data["ftrackEntity"] = context_ftrack_entity instance.data["ftrackTask"] = context_task_entity @@ -164,7 +160,7 @@ def per_instance_process( elif instance_folder_path: if instance_folder_path == context_folder_path: self.log.debug(( - "Instance's context asset is same as in publish" + "Instance's context folder is same as in publish" " context. Folder: {}" ).format(context_folder_path)) instance.data["ftrackEntity"] = context_ftrack_entity @@ -223,23 +219,8 @@ def per_instance_process( def find_ftrack_entities(self, session, project_entity, folder_paths): output = {path: None for path in folder_paths} folder_paths_s = set(output.keys()) - # Folder paths are not yet used as unique identifier if - # 'get_asset_name_identifier' is 'None' so we can query only by name - if get_asset_name_identifier is None: - folder_paths_s.discard(None) - joined_paths = ",".join([ - '"{}"'.format(p) for p in folder_paths_s - ]) - entities = session.query( - ( - "TypedContext where project_id is \"{}\" and name in ({})" - ).format(project_entity["id"], joined_paths) - ).all() - for entity in entities: - output[entity["name"]] = entity - return output - # We can't use 'assetEntity' and folders must be queried because + # We can't use 'folderEntity' and folders must be queried because # we must be assured that 'ownAttrib' is used to avoid collisions # because of hierarchical values. folders = ayon_api.get_folders( diff --git a/client/ayon_ftrack/plugins/publish/collect_ftrack_family.py b/client/ayon_ftrack/plugins/publish/collect_ftrack_family.py index acf0d6fc..e4a589b5 100644 --- a/client/ayon_ftrack/plugins/publish/collect_ftrack_family.py +++ b/client/ayon_ftrack/plugins/publish/collect_ftrack_family.py @@ -7,7 +7,7 @@ """ import pyblish.api -from openpype.lib import filter_profiles +from ayon_core.lib import filter_profiles from ayon_ftrack.pipeline import plugin @@ -36,12 +36,12 @@ def process(self, instance): return host_name = instance.context.data["hostName"] - family = instance.data["family"] + product_type = instance.data["productType"] task_name = instance.data.get("task") filtering_criteria = { "host_names": host_name, - "product_types": family, + "product_types": product_type, "task_names": task_name } profile = filter_profiles( @@ -57,7 +57,7 @@ def process(self, instance): add_ftrack_family = profile["add_ftrack_family"] additional_filters = profile.get("advanced_filtering") if additional_filters: - families_set = set(families) | {family} + families_set = set(families) | {product_type} self.log.info( "'{}' families used for additional filtering".format( families_set)) @@ -74,7 +74,7 @@ def process(self, instance): families.append("ftrack") self.log.debug("{} 'ftrack' family for instance with '{}'".format( - result_str, family + result_str, product_type )) def _get_add_ftrack_f_from_addit_filters( diff --git a/client/ayon_ftrack/plugins/publish/collect_local_ftrack_creds.py b/client/ayon_ftrack/plugins/publish/collect_local_ftrack_creds.py index eeaa9d41..ae7a2d7b 100644 --- a/client/ayon_ftrack/plugins/publish/collect_local_ftrack_creds.py +++ b/client/ayon_ftrack/plugins/publish/collect_local_ftrack_creds.py @@ -20,7 +20,7 @@ def process(self, context): and os.getenv("FTRACK_SERVER") ): return - addon = context.data["openPypeModules"]["ftrack"] + addon = context.data["ayonAddonsManager"].get("ftrack") if addon.enabled: creds = addon.get_credentials() username, api_key = creds diff --git a/client/ayon_ftrack/plugins/publish/collect_webpublisher_credentials.py b/client/ayon_ftrack/plugins/publish/collect_webpublisher_credentials.py index faffd579..5e422adf 100644 --- a/client/ayon_ftrack/plugins/publish/collect_webpublisher_credentials.py +++ b/client/ayon_ftrack/plugins/publish/collect_webpublisher_credentials.py @@ -17,7 +17,7 @@ import pyblish.api import ayon_api -from openpype.pipeline import KnownPublishError +from ayon_core.pipeline import KnownPublishError from ayon_ftrack.pipeline import plugin diff --git a/client/ayon_ftrack/plugins/publish/integrate_ftrack_api.py b/client/ayon_ftrack/plugins/publish/integrate_ftrack_api.py index 4daa48b9..3776ce21 100644 --- a/client/ayon_ftrack/plugins/publish/integrate_ftrack_api.py +++ b/client/ayon_ftrack/plugins/publish/integrate_ftrack_api.py @@ -384,6 +384,11 @@ def _ensure_asset_version_exists( session.commit() else: + # Convert '0' version to string `"0"` + # - ftrack handles `0` as empty value + if version == 0: + version = "0" + new_asset_version_data = { "version": version, "asset_id": asset_id diff --git a/client/ayon_ftrack/plugins/publish/integrate_ftrack_description.py b/client/ayon_ftrack/plugins/publish/integrate_ftrack_description.py index d10e382c..1df6738b 100644 --- a/client/ayon_ftrack/plugins/publish/integrate_ftrack_description.py +++ b/client/ayon_ftrack/plugins/publish/integrate_ftrack_description.py @@ -10,7 +10,7 @@ import six import pyblish.api -from openpype.lib import StringTemplate +from ayon_core.lib import StringTemplate from ayon_ftrack.pipeline import plugin @@ -18,7 +18,7 @@ class IntegrateFtrackDescription(plugin.FtrackPublishInstancePlugin): """Add description to AssetVersions in Ftrack.""" - # Must be after integrate asset new + # Must be after IntegrateAsset plugin in ayon_core order = pyblish.api.IntegratorOrder + 0.4999 label = "Integrate Ftrack description" families = ["ftrack"] diff --git a/client/ayon_ftrack/plugins/publish/integrate_ftrack_farm_status.py b/client/ayon_ftrack/plugins/publish/integrate_ftrack_farm_status.py index 704400d9..c7fc646f 100644 --- a/client/ayon_ftrack/plugins/publish/integrate_ftrack_farm_status.py +++ b/client/ayon_ftrack/plugins/publish/integrate_ftrack_farm_status.py @@ -1,5 +1,5 @@ import pyblish.api -from openpype.lib import filter_profiles +from ayon_core.lib import filter_profiles from ayon_ftrack.pipeline import plugin @@ -37,8 +37,8 @@ def filter_instances(self, context): # Skip disabled instances if instance.data.get("publish") is False: continue - subset_name = instance.data["subset"] - msg_start = "Skipping instance {}.".format(subset_name) + product_name = instance.data["productName"] + msg_start = "Skipping instance {}.".format(product_name) if not instance.data.get("farm"): self.log.debug( "{} Won't be rendered on farm.".format(msg_start) @@ -58,8 +58,8 @@ def filter_instances(self, context): def get_instances_with_statuse_names(self, context, instances): instances_with_status_names = [] for instance in instances: - family = instance.data["family"] - subset_name = instance.data["subset"] + product_type = instance.data["productType"] + product_name = instance.data["productName"] task_entity = instance.data["ftrackTask"] host_name = context.data["hostName"] task_name = task_entity["name"] @@ -70,8 +70,8 @@ def get_instances_with_statuse_names(self, context, instances): "host_names": host_name, "task_types": task_type, "task_names": task_name, - "product_types": family, - "product_names": subset_name, + "product_types": product_type, + "product_names": product_name, }, logger=self.log ) diff --git a/client/ayon_ftrack/plugins/publish/integrate_ftrack_instances.py b/client/ayon_ftrack/plugins/publish/integrate_ftrack_instances.py index 3e2a3c12..3e9e638f 100644 --- a/client/ayon_ftrack/plugins/publish/integrate_ftrack_instances.py +++ b/client/ayon_ftrack/plugins/publish/integrate_ftrack_instances.py @@ -1,18 +1,24 @@ import os import json import copy + import pyblish.api -from openpype.pipeline.publish import get_publish_repre_path -from openpype.lib.openpype_version import get_openpype_version -from openpype.lib.transcoding import ( +from ayon_core.pipeline.publish import get_publish_repre_path +from ayon_core.lib.ayon_info import get_ayon_launcher_version +from ayon_core.lib.transcoding import ( get_ffprobe_streams, convert_ffprobe_fps_to_float, ) -from openpype.lib.profiles_filtering import filter_profiles -from openpype.lib.transcoding import VIDEO_EXTENSIONS +from ayon_core.lib.profiles_filtering import filter_profiles +from ayon_core.lib.transcoding import VIDEO_EXTENSIONS, IMAGE_EXTENSIONS +from ayon_ftrack import __version__ from ayon_ftrack.pipeline import plugin +from ayon_ftrack.common.constants import ( + CUST_ATTR_KEY_SERVER_ID, + CUST_ATTR_KEY_SERVER_PATH, +) class IntegrateFtrackInstance(plugin.FtrackPublishInstancePlugin): @@ -26,7 +32,8 @@ class IntegrateFtrackInstance(plugin.FtrackPublishInstancePlugin): families = ["ftrack"] metadata_keys_to_label = { - "openpype_version": "OpenPype version", + "ayon_ftrack_version": "AYON ftrack version", + "ayon_launcher_version": "AYON launcher version", "frame_start": "Frame start", "frame_end": "Frame end", "duration": "Duration", @@ -79,17 +86,15 @@ def process(self, instance): version_number = int(instance_version) - family = instance.data["family"] + product_type = instance.data["productType"] # Perform case-insensitive family mapping - family_low = family.lower() + product_type_low = product_type.lower() asset_type = instance.data.get("ftrackFamily") if not asset_type: for item in self.product_type_mapping: - map_family = item["name"] - map_value = item["asset_type"] - if map_family.lower() == family_low: - asset_type = map_value + if item["name"].lower() == product_type_low: + asset_type = item["asset_type"] break if not asset_type: @@ -97,23 +102,37 @@ def process(self, instance): self.log.debug( "Family: {}\nMapping: {}".format( - family_low, self.product_type_mapping) + product_type_low, self.product_type_mapping) ) status_name = self._get_asset_version_status_name(instance) # Base of component item data # - create a copy of this object when want to use it + version_entity = instance.data.get("versionEntity") + av_custom_attributes = {} + if version_entity: + version_path = "/".join([ + instance.data["folderPath"], + instance.data["productName"], + "v{:0>3}".format(version_entity["version"]) + ]) + av_custom_attributes.update({ + CUST_ATTR_KEY_SERVER_ID: version_entity["id"], + CUST_ATTR_KEY_SERVER_PATH: version_path, + }) + base_component_item = { "assettype_data": { "short": asset_type, }, "asset_data": { - "name": instance.data["subset"], + "name": instance.data["productName"], }, "assetversion_data": { "version": version_number, "comment": instance.context.data.get("comment") or "", - "status_name": status_name + "status_name": status_name, + "custom_attributes": av_custom_attributes }, "component_overwrite": False, # This can be change optionally @@ -140,11 +159,12 @@ def process(self, instance): self.log.debug("Representation {}".format(repre)) # include only thumbnail representations + repre_path = get_publish_repre_path(instance, repre, False) if repre.get("thumbnail") or "thumbnail" in repre_tags: thumbnail_representations.append(repre) # include only review representations - elif "ftrackreview" in repre_tags: + elif "ftrackreview" in repre_tags and repre_path: review_representations.append(repre) if self._is_repre_video(repre): has_movie_review = True @@ -185,6 +205,7 @@ def process(self, instance): thumbnail_data_items = [] # Create thumbnail components + thumbnail_item = None for repre in thumbnail_representations: # get repre path from representation # and return published_path if available @@ -240,6 +261,13 @@ def process(self, instance): # Add item to component list thumbnail_data_items.append(current_item_data) + # Filter out image reviews if there is a movie review + review_representations = [ + repre + for repre in review_representations + if not has_movie_review or self._is_repre_video(repre) + ] + # Create review components # Change asset name of each new component for review multiple_reviewable = len(review_representations) > 1 @@ -250,14 +278,6 @@ def process(self, instance): "Movie repre has priority from {}".format(repre) ) continue - - repre_path = get_publish_repre_path(instance, repre, False) - if not repre_path: - self.log.warning( - "Published path is not set and source was removed." - ) - continue - # Create copy of base comp item and append it review_item = copy.deepcopy(base_component_item) @@ -304,6 +324,7 @@ def process(self, instance): if sync_thumbnail_item_src: component_list.append(copy.deepcopy(sync_thumbnail_item_src)) + repre_path = get_publish_repre_path(instance, repre, False) # add metadata to review component if self._is_repre_video(repre): component_name = "ftrackreview-mp4" @@ -347,6 +368,9 @@ def process(self, instance): ) component_list.append(origin_name_component) + if not review_representations and thumbnail_item: + component_list.append(thumbnail_item) + # Add others representations as component for repre in other_representations: published_path = get_publish_repre_path(instance, repre, True) @@ -514,7 +538,7 @@ def _get_asset_version_status_name(self, instance): anatomy_data = instance.data["anatomyData"] task_type = anatomy_data.get("task", {}).get("type") filtering_criteria = { - "product_types": instance.data["family"], + "product_types": instance.data["productType"], "host_names": instance.context.data["hostName"], "task_types": task_type } @@ -530,19 +554,43 @@ def _get_asset_version_status_name(self, instance): def _prepare_component_metadata( self, instance, repre, component_path, is_review=None ): + """Return representation file metadata, like width, height, fps. + + This will only return any data for file formats matching a known + video or image extension and may pass with only a warning if it + was unable to retrieve the metadata from the image of video file. + + Args: + instance (pyblish.api.Instance): Pyblish instance. + repre (dict[str, Any]): Representation. + component_path (str): Path to a representation file. + is_review (Optional[bool]): Component is a review component. + + Returns: + dict[str, Any]: Component metadata. + """ + if self._is_repre_video(repre): return self._prepare_video_component_metadata( instance, repre, component_path, is_review ) - return self._prepare_image_component_metadata(repre, component_path) + if self._is_repre_image(repre): + return self._prepare_image_component_metadata( + repre, component_path + ) + return {} def _prepare_video_component_metadata( self, instance, repre, component_path, is_review=None ): metadata = {} - if "openpype_version" in self.additional_metadata_keys: - label = self.metadata_keys_to_label["openpype_version"] - metadata[label] = get_openpype_version() + for key, value in ( + ("ayon_ftrack_version", __version__), + ("ayon_launcher_version", get_ayon_launcher_version()), + ): + if key in self.additional_metadata_keys: + label = self.metadata_keys_to_label[key] + metadata[label] = value extension = os.path.splitext(component_path)[-1] streams = [] @@ -559,10 +607,10 @@ def _prepare_video_component_metadata( for stream in streams if stream["codec_type"] == "video" ] - # Skip if there are not video streams + # Skip if there are no video streams # - exr is special case which can have issues with reading through - # ffmpegh but we want to set fps for it - if not video_streams and extension not in [".exr"]: + # ffmpeg, but we want to set fps for it + if not video_streams and extension != ".exr": return metadata stream_width = None @@ -702,3 +750,7 @@ def _prepare_image_component_metadata(self, repre, component_path): def _is_repre_video(self, repre): repre_ext = ".{}".format(repre["ext"]) return repre_ext in VIDEO_EXTENSIONS + + def _is_repre_image(self, repre): + repre_ext = ".{}".format(repre["ext"]) + return repre_ext in IMAGE_EXTENSIONS diff --git a/client/ayon_ftrack/plugins/publish/integrate_ftrack_note.py b/client/ayon_ftrack/plugins/publish/integrate_ftrack_note.py index 7a1c86a5..7584fc4c 100644 --- a/client/ayon_ftrack/plugins/publish/integrate_ftrack_note.py +++ b/client/ayon_ftrack/plugins/publish/integrate_ftrack_note.py @@ -13,7 +13,7 @@ import six import pyblish.api -from openpype.lib import StringTemplate +from ayon_core.lib import StringTemplate from ayon_ftrack.pipeline import plugin @@ -21,7 +21,7 @@ class IntegrateFtrackNote(plugin.FtrackPublishInstancePlugin): """Create comments in Ftrack.""" - # Must be after integrate asset new + # Must be after IntegrateAsset plugin in ayon_core order = pyblish.api.IntegratorOrder + 0.4999 label = "Integrate Ftrack note" families = ["ftrack"] @@ -31,7 +31,7 @@ class IntegrateFtrackNote(plugin.FtrackPublishInstancePlugin): # - Allows only `intent` and `comment` keys note_template = None # Backwards compatibility - note_with_intent_template = "{intent}: {comment}" + note_with_intent_template = "{comment}" # - note label must exist in Ftrack note_labels = [] diff --git a/client/ayon_ftrack/plugins/publish/integrate_ftrack_status.py b/client/ayon_ftrack/plugins/publish/integrate_ftrack_status.py index 0c72bed6..6c850db9 100644 --- a/client/ayon_ftrack/plugins/publish/integrate_ftrack_status.py +++ b/client/ayon_ftrack/plugins/publish/integrate_ftrack_status.py @@ -1,7 +1,7 @@ import copy import pyblish.api -from openpype.lib import filter_profiles +from ayon_core.lib import filter_profiles from ayon_ftrack.common import create_chunks from ayon_ftrack.pipeline import plugin @@ -117,8 +117,8 @@ def get_status_profiles(self): "host_names": ["nuke"], "task_types": ["Compositing"], "task_names": ["Comp"], - "families": ["render"], - "subset_names": ["renderComp"], + "product_types": ["render"], + "product_names": ["renderComp"], "status_name": "Rendering", } @@ -151,8 +151,8 @@ def get_profile_filter_data(self, context, instance): "host_names": context.data["hostName"], "task_types": task_entity["type"]["name"], "task_names": task_entity["name"], - "families": instance.data["family"], - "subset_names": instance.data["subset"], + "product_types": instance.data["productType"], + "product_names": instance.data["productName"], } def is_valid_instance(self, context, instance): @@ -181,8 +181,8 @@ def is_valid_instance(self, context, instance): task_entity = instance.data.get("ftrackTask") if not task_entity: self.log.debug( - "Skipping instance Does not have filled task".format( - instance.data["subset"])) + "Skipping instance {}. Does not have filled task".format( + instance.data["productName"])) return False task_id = task_entity["id"] @@ -247,7 +247,7 @@ class IntegrateFtrackFarmStatus(IntegrateFtrackStatusBase): def is_valid_instance(self, context, instance): if not instance.data.get("farm"): self.log.debug("{} Won't be rendered on farm.".format( - instance.data["subset"] + instance.data["productName"] )) return False return super(IntegrateFtrackFarmStatus, self).is_valid_instance( @@ -256,9 +256,6 @@ def is_valid_instance(self, context, instance): def get_status_profiles(self): if self.status_profiles is None: profiles = copy.deepcopy(self.farm_status_profiles) - for profile in profiles: - profile["host_names"] = profile.pop("hosts") - profile["subset_names"] = profile.pop("subsets") self.status_profiles = profiles return self.status_profiles @@ -289,7 +286,7 @@ class IntegrateFtrackLocalStatus(IntegrateFtrackStatusBase): def is_valid_instance(self, context, instance): if instance.data.get("farm"): self.log.debug("{} Will be rendered on farm.".format( - instance.data["subset"] + instance.data["productName"] )) return False return super(IntegrateFtrackLocalStatus, self).is_valid_instance( diff --git a/client/ayon_ftrack/plugins/publish/integrate_hierarchy_ftrack.py b/client/ayon_ftrack/plugins/publish/integrate_hierarchy_ftrack.py index cf77683f..2bb0e5cb 100644 --- a/client/ayon_ftrack/plugins/publish/integrate_hierarchy_ftrack.py +++ b/client/ayon_ftrack/plugins/publish/integrate_hierarchy_ftrack.py @@ -4,18 +4,13 @@ import six import pyblish.api +import ayon_api -from openpype.client import get_asset_by_id -from openpype.lib import filter_profiles -from openpype.pipeline import KnownPublishError +from ayon_core.lib import filter_profiles +from ayon_core.pipeline import KnownPublishError from ayon_ftrack.common import get_ayon_attr_configs from ayon_ftrack.pipeline import plugin -try: - from openpype.client import get_asset_name_identifier -except ImportError: - get_asset_name_identifier = None - class IntegrateHierarchyToFtrack(plugin.FtrackPublishContextPlugin): """ @@ -23,17 +18,20 @@ class IntegrateHierarchyToFtrack(plugin.FtrackPublishContextPlugin): Example of entry data: { "ProjectXS": { - "entity_type": "Project", - "custom_attributes": { - "fps": 24,... + "entity_type": "project", + "attributes": { + "fps": 24, + ... }, "tasks": [ "Compositing", - "Lighting",... *task must exist as task type in project schema* + "Lighting", + ... *task must exist as task type in project schema* ], - "childs": { + "children": { "sq01": { - "entity_type": "Sequence", + "entity_type": "folder", + "folder_type": "Sequence", ... } } @@ -63,23 +61,20 @@ def process(self, context): session = context.data["ftrackSession"] project_name = context.data["projectName"] - project = session.query( + ft_project = session.query( 'select id, full_name from Project where full_name is "{}"'.format( project_name ) ).first() - if not project: + if not ft_project: raise KnownPublishError( "Project \"{}\" was not found on ftrack.".format(project_name) ) - self.session = session - self.ft_project = project - self.task_types = self.get_all_task_types(project) - self.task_statuses = self.get_task_statuses(project) - # import ftrack hierarchy - self.import_to_ftrack(context, project_name, hierarchy_context) + self.import_to_ftrack( + session, ft_project, context, project_name, hierarchy_context + ) def query_ftrack_entitites(self, session, ft_project): project_id = ft_project["id"] @@ -145,7 +140,7 @@ def find_matching_ftrack_entities( entity_data["ft_entity"] = entity matching_ftrack_entities.append(entity) - hierarchy_children = entity_data.get("childs") + hierarchy_children = entity_data.get("children") if not hierarchy_children: continue @@ -205,27 +200,32 @@ def query_custom_attribute_values(self, session, entities, hier_attrs): return output - def import_to_ftrack(self, context, project_name, hierarchy_context): + def import_to_ftrack( + self, session, ft_project, context, project_name, hierarchy_context + ): + ft_task_types = self.get_all_task_types(ft_project) + ft_task_statuses = self.get_task_statuses(ft_project) + # Prequery hiearchical custom attributes - hier_attrs = get_ayon_attr_configs(self.session)[1] + hier_attrs = get_ayon_attr_configs(session)[1] hier_attr_by_key = { attr["key"]: attr for attr in hier_attrs } # Query user entity (for comments) - user = self.session.query( - "User where username is \"{}\"".format(self.session.api_user) + user = session.query( + "User where username is \"{}\"".format(session.api_user) ).first() if not user: self.log.warning( "Was not able to query current User {}".format( - self.session.api_user + session.api_user ) ) # Query ftrack hierarchy with parenting ftrack_hierarchy = self.query_ftrack_entitites( - self.session, self.ft_project) + session, ft_project) # Fill ftrack entities to hierarchy context # - there is no need to query entities again @@ -233,11 +233,25 @@ def import_to_ftrack(self, context, project_name, hierarchy_context): hierarchy_context, ftrack_hierarchy) # Query custom attribute values of each entity custom_attr_values_by_id = self.query_custom_attribute_values( - self.session, matching_entities, hier_attrs) + session, matching_entities, hier_attrs) # Get ftrack api module (as they are different per python version) ftrack_api = context.data["ftrackPythonModule"] + self.log.debug( + "Available task types in ftrack: %s", + str(ft_task_types) + ) + self.log.debug( + "Available task statuses in ftrack: %s", + str(ft_task_statuses) + ) + + object_types_by_lower_name = { + obj_type["name"].lower(): obj_type + for obj_type in ft_project["project_schema"]["object_types"] + } + # Use queue of hierarchy items to process import_queue = collections.deque() for entity_name, entity_data in hierarchy_context.items(): @@ -258,38 +272,44 @@ def import_to_ftrack(self, context, project_name, hierarchy_context): entity = entity_data.get("ft_entity") if entity is None and entity_type.lower() == "project": - raise AssertionError( + raise KnownPublishError( "Collected items are not in right order!" ) # Create entity if not exists if entity is None: - entity = self.session.create(entity_type, { + # Sanitize against case sensitive folder types. + folder_type_low = entity_data["folder_type"].lower() + object_type = object_types_by_lower_name[folder_type_low] + entity_type = object_type["name"].replace(" ", "") + + entity = session.create(entity_type, { "name": entity_name, "parent": parent }) entity_data["ft_entity"] = entity - if get_asset_name_identifier is None: - entity_path = entity["name"] + if entity_type.lower() == "project": + entity_path = "" else: entity_path = "{}/{}".format(parent_path, entity_name) # CUSTOM ATTRIBUTES - custom_attributes = entity_data.get("custom_attributes", {}) + attributes = entity_data.get("attributes", {}) instances = [] for instance in context: - instance_asset_name = instance.data.get("asset") + instance_folder_path = instance.data.get("folderPath") + if ( - instance_asset_name - and instance_asset_name.lower() == entity_path.lower() + instance_folder_path + and instance_folder_path.lower() == entity_path.lower() ): instances.append(instance) for instance in instances: instance.data["ftrackEntity"] = entity - for key, cust_attr_value in custom_attributes.items(): + for key, cust_attr_value in attributes.items(): if cust_attr_value is None: continue @@ -337,15 +357,15 @@ def import_to_ftrack(self, context, project_name, hierarchy_context): ) if op is not None: - self.session.recorded_operations.push(op) + session.recorded_operations.push(op) - if self.session.recorded_operations: + if session.recorded_operations: try: - self.session.commit() + session.commit() except Exception: tp, value, tb = sys.exc_info() - self.session.rollback() - self.session._configure_locations() + session.rollback() + session._configure_locations() six.reraise(tp, value, tb) # TASKS @@ -376,22 +396,25 @@ def import_to_ftrack(self, context, project_name, hierarchy_context): for task_name, task_type in tasks_to_create: task_entity = self.create_task( + session, task_name, task_type, entity, + ft_task_types, + ft_task_statuses, ftrack_status_by_task_id ) for instance in instances_by_task_name[task_name.lower()]: instance.data["ftrackTask"] = task_entity # Incoming links. - self.create_links(project_name, entity_data, entity) + self.create_links(session, project_name, entity_data, entity) try: - self.session.commit() + session.commit() except Exception: tp, value, tb = sys.exc_info() - self.session.rollback() - self.session._configure_locations() + session.rollback() + session._configure_locations() six.reraise(tp, value, tb) # Create notes. @@ -401,15 +424,15 @@ def import_to_ftrack(self, context, project_name, hierarchy_context): entity.create_note(comment, user) try: - self.session.commit() + session.commit() except Exception: tp, value, tb = sys.exc_info() - self.session.rollback() - self.session._configure_locations() + session.rollback() + session._configure_locations() six.reraise(tp, value, tb) # Import children. - children = entity_data.get("childs") + children = entity_data.get("children") if not children: continue @@ -418,34 +441,51 @@ def import_to_ftrack(self, context, project_name, hierarchy_context): (entity_name, entity_data, entity, entity_path) ) - def create_links(self, project_name, entity_data, entity): + def create_links(self, session, project_name, entity_data, entity): + # WARNING Don't know how does this work? + # The logic looks only for 'AssetBuild' entities. Not sure where + # value of 'inputs' on entity data comes from. + # Clear existing links. for link in entity.get("incoming_links", []): - self.session.delete(link) + session.delete(link) try: - self.session.commit() + session.commit() except Exception: tp, value, tb = sys.exc_info() - self.session.rollback() - self.session._configure_locations() + session.rollback() + session._configure_locations() six.reraise(tp, value, tb) # Create new links. - for asset_id in entity_data.get("inputs", []): - asset_doc = get_asset_by_id(project_name, asset_id) + input_folder_ids = { + folder_id + for folder_id in entity_data.get("inputs", []) + } + folder_entities = {} + if input_folder_ids: + folder_entities = { + folder_entity["id"]: folder_entity + for folder_entity in ayon_api.get_folders( + project_name, folder_ids=input_folder_ids + ) + } + + for folder_id in input_folder_ids: + folder_entity = folder_entities.get(folder_id) ftrack_id = None - if asset_doc: - ftrack_id = asset_doc["data"].get("ftrackId") + if folder_entity: + ftrack_id = folder_entity["attrib"].get("ftrackId") if not ftrack_id: continue - assetbuild = self.session.get("AssetBuild", ftrack_id) + assetbuild = session.get("AssetBuild", ftrack_id) self.log.debug( "Creating link from {0} to {1}".format( assetbuild["name"], entity["name"] ) ) - self.session.create( + session.create( "TypedContextLink", {"from": assetbuild, "to": entity} ) @@ -468,7 +508,16 @@ def get_task_statuses(self, project_entity): for status in task_workflow_statuses } - def create_task(self, name, task_type, parent, ftrack_status_by_task_id): + def create_task( + self, + session, + name, + task_type, + parent, + ft_task_types, + ft_task_statuses, + ftrack_status_by_task_id + ): filter_data = { "task_names": name, "task_types": task_type @@ -478,11 +527,10 @@ def create_task(self, name, task_type, parent, ftrack_status_by_task_id): filter_data ) status_id = None - status_name = None if profile: status_name = profile["status_name"] status_name_low = status_name.lower() - for _status_id, status in self.task_statuses.items(): + for _status_id, status in ft_task_statuses.items(): if status["name"].lower() == status_name_low: status_id = _status_id status_name = status["name"] @@ -493,23 +541,22 @@ def create_task(self, name, task_type, parent, ftrack_status_by_task_id): "Task status \"{}\" was not found".format(status_name) ) - task = self.session.create("Task", { + task = session.create("Task", { "name": name, "parent": parent }) # TODO not secured!!! - check if task_type exists - self.log.info(task_type) - self.log.info(self.task_types) - task["type"] = self.task_types[task_type] + self.log.debug(task_type) + task["type"] = ft_task_types[task_type] if status_id is not None: task["status_id"] = status_id try: - self.session.commit() + session.commit() except Exception: tp, value, tb = sys.exc_info() - self.session.rollback() - self.session._configure_locations() + session.rollback() + session._configure_locations() six.reraise(tp, value, tb) if status_id is not None: @@ -546,7 +593,7 @@ def _get_active_hierarchy(self, context): while hierarchy_queue: (name, item, path, parent_item) = hierarchy_queue.popleft() - children = item.get("childs") + children = item.get("children") if children: for child_name, child_item in children.items(): child_path = "/".join([path, child_name]) diff --git a/client/ayon_ftrack/plugins/publish/validate_custom_ftrack_attributes.py b/client/ayon_ftrack/plugins/publish/validate_custom_ftrack_attributes.py index e19a84bd..da8460b2 100644 --- a/client/ayon_ftrack/plugins/publish/validate_custom_ftrack_attributes.py +++ b/client/ayon_ftrack/plugins/publish/validate_custom_ftrack_attributes.py @@ -1,5 +1,5 @@ import pyblish.api -from openpype.pipeline.publish import ValidateContentsOrder +from ayon_core.pipeline.publish import ValidateContentsOrder from ayon_ftrack.pipeline import plugin @@ -8,10 +8,6 @@ class ValidateFtrackAttributes(plugin.FtrackPublishInstancePlugin): """ This will validate attributes in ftrack against data in scene. - Attributes to be validated are specified in: - - `$OPENPYPE_CONFIG/presets//ftrack_attributes.json` - This is array (list) of checks in format: [ [, , ] diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/.gitignore b/client/ayon_ftrack/python2_vendor/ftrack-python-api/.gitignore deleted file mode 100644 index be621609..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/.gitignore +++ /dev/null @@ -1,42 +0,0 @@ -# General -*.py[cod] - -# Packages -*.egg -*.egg-info -dist -build -.eggs/ -eggs -parts -bin -var -sdist -develop-eggs -.installed.cfg -lib -lib64 -__pycache__ - -# Installer logs -pip-log.txt - -# Unit test / coverage reports -.coverage -.tox - -# Caches -Thumbs.db - -# Development -.project -.pydevproject -.settings -.idea/ -.history/ -.vscode/ - -# Testing -.cache -test-reports/* -.pytest_cache/* \ No newline at end of file diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/LICENSE.python b/client/ayon_ftrack/python2_vendor/ftrack-python-api/LICENSE.python deleted file mode 100644 index 9dc010d8..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/LICENSE.python +++ /dev/null @@ -1,254 +0,0 @@ -A. HISTORY OF THE SOFTWARE -========================== - -Python was created in the early 1990s by Guido van Rossum at Stichting -Mathematisch Centrum (CWI, see http://www.cwi.nl) in the Netherlands -as a successor of a language called ABC. Guido remains Python's -principal author, although it includes many contributions from others. - -In 1995, Guido continued his work on Python at the Corporation for -National Research Initiatives (CNRI, see http://www.cnri.reston.va.us) -in Reston, Virginia where he released several versions of the -software. - -In May 2000, Guido and the Python core development team moved to -BeOpen.com to form the BeOpen PythonLabs team. In October of the same -year, the PythonLabs team moved to Digital Creations, which became -Zope Corporation. In 2001, the Python Software Foundation (PSF, see -https://www.python.org/psf/) was formed, a non-profit organization -created specifically to own Python-related Intellectual Property. -Zope Corporation was a sponsoring member of the PSF. - -All Python releases are Open Source (see http://www.opensource.org for -the Open Source Definition). Historically, most, but not all, Python -releases have also been GPL-compatible; the table below summarizes -the various releases. - - Release Derived Year Owner GPL- - from compatible? (1) - - 0.9.0 thru 1.2 1991-1995 CWI yes - 1.3 thru 1.5.2 1.2 1995-1999 CNRI yes - 1.6 1.5.2 2000 CNRI no - 2.0 1.6 2000 BeOpen.com no - 1.6.1 1.6 2001 CNRI yes (2) - 2.1 2.0+1.6.1 2001 PSF no - 2.0.1 2.0+1.6.1 2001 PSF yes - 2.1.1 2.1+2.0.1 2001 PSF yes - 2.1.2 2.1.1 2002 PSF yes - 2.1.3 2.1.2 2002 PSF yes - 2.2 and above 2.1.1 2001-now PSF yes - -Footnotes: - -(1) GPL-compatible doesn't mean that we're distributing Python under - the GPL. All Python licenses, unlike the GPL, let you distribute - a modified version without making your changes open source. The - GPL-compatible licenses make it possible to combine Python with - other software that is released under the GPL; the others don't. - -(2) According to Richard Stallman, 1.6.1 is not GPL-compatible, - because its license has a choice of law clause. According to - CNRI, however, Stallman's lawyer has told CNRI's lawyer that 1.6.1 - is "not incompatible" with the GPL. - -Thanks to the many outside volunteers who have worked under Guido's -direction to make these releases possible. - - -B. TERMS AND CONDITIONS FOR ACCESSING OR OTHERWISE USING PYTHON -=============================================================== - -PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 --------------------------------------------- - -1. This LICENSE AGREEMENT is between the Python Software Foundation -("PSF"), and the Individual or Organization ("Licensee") accessing and -otherwise using this software ("Python") in source or binary form and -its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, PSF hereby -grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, -analyze, test, perform and/or display publicly, prepare derivative works, -distribute, and otherwise use Python alone or in any derivative version, -provided, however, that PSF's License Agreement and PSF's notice of copyright, -i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, -2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019 Python Software Foundation; -All Rights Reserved" are retained in Python alone or in any derivative version -prepared by Licensee. - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python. - -4. PSF is making Python available to Licensee on an "AS IS" -basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. Nothing in this License Agreement shall be deemed to create any -relationship of agency, partnership, or joint venture between PSF and -Licensee. This License Agreement does not grant permission to use PSF -trademarks or trade name in a trademark sense to endorse or promote -products or services of Licensee, or any third party. - -8. By copying, installing or otherwise using Python, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. - - -BEOPEN.COM LICENSE AGREEMENT FOR PYTHON 2.0 -------------------------------------------- - -BEOPEN PYTHON OPEN SOURCE LICENSE AGREEMENT VERSION 1 - -1. This LICENSE AGREEMENT is between BeOpen.com ("BeOpen"), having an -office at 160 Saratoga Avenue, Santa Clara, CA 95051, and the -Individual or Organization ("Licensee") accessing and otherwise using -this software in source or binary form and its associated -documentation ("the Software"). - -2. Subject to the terms and conditions of this BeOpen Python License -Agreement, BeOpen hereby grants Licensee a non-exclusive, -royalty-free, world-wide license to reproduce, analyze, test, perform -and/or display publicly, prepare derivative works, distribute, and -otherwise use the Software alone or in any derivative version, -provided, however, that the BeOpen Python License is retained in the -Software, alone or in any derivative version prepared by Licensee. - -3. BeOpen is making the Software available to Licensee on an "AS IS" -basis. BEOPEN MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, BEOPEN MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF THE SOFTWARE WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -4. BEOPEN SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF THE -SOFTWARE FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS -AS A RESULT OF USING, MODIFYING OR DISTRIBUTING THE SOFTWARE, OR ANY -DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -5. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -6. This License Agreement shall be governed by and interpreted in all -respects by the law of the State of California, excluding conflict of -law provisions. Nothing in this License Agreement shall be deemed to -create any relationship of agency, partnership, or joint venture -between BeOpen and Licensee. This License Agreement does not grant -permission to use BeOpen trademarks or trade names in a trademark -sense to endorse or promote products or services of Licensee, or any -third party. As an exception, the "BeOpen Python" logos available at -http://www.pythonlabs.com/logos.html may be used according to the -permissions granted on that web page. - -7. By copying, installing or otherwise using the software, Licensee -agrees to be bound by the terms and conditions of this License -Agreement. - - -CNRI LICENSE AGREEMENT FOR PYTHON 1.6.1 ---------------------------------------- - -1. This LICENSE AGREEMENT is between the Corporation for National -Research Initiatives, having an office at 1895 Preston White Drive, -Reston, VA 20191 ("CNRI"), and the Individual or Organization -("Licensee") accessing and otherwise using Python 1.6.1 software in -source or binary form and its associated documentation. - -2. Subject to the terms and conditions of this License Agreement, CNRI -hereby grants Licensee a nonexclusive, royalty-free, world-wide -license to reproduce, analyze, test, perform and/or display publicly, -prepare derivative works, distribute, and otherwise use Python 1.6.1 -alone or in any derivative version, provided, however, that CNRI's -License Agreement and CNRI's notice of copyright, i.e., "Copyright (c) -1995-2001 Corporation for National Research Initiatives; All Rights -Reserved" are retained in Python 1.6.1 alone or in any derivative -version prepared by Licensee. Alternately, in lieu of CNRI's License -Agreement, Licensee may substitute the following text (omitting the -quotes): "Python 1.6.1 is made available subject to the terms and -conditions in CNRI's License Agreement. This Agreement together with -Python 1.6.1 may be located on the Internet using the following -unique, persistent identifier (known as a handle): 1895.22/1013. This -Agreement may also be obtained from a proxy server on the Internet -using the following URL: http://hdl.handle.net/1895.22/1013". - -3. In the event Licensee prepares a derivative work that is based on -or incorporates Python 1.6.1 or any part thereof, and wants to make -the derivative work available to others as provided herein, then -Licensee hereby agrees to include in any such work a brief summary of -the changes made to Python 1.6.1. - -4. CNRI is making Python 1.6.1 available to Licensee on an "AS IS" -basis. CNRI MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR -IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, CNRI MAKES NO AND -DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS -FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON 1.6.1 WILL NOT -INFRINGE ANY THIRD PARTY RIGHTS. - -5. CNRI SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON -1.6.1 FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS -A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON 1.6.1, -OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. - -6. This License Agreement will automatically terminate upon a material -breach of its terms and conditions. - -7. This License Agreement shall be governed by the federal -intellectual property law of the United States, including without -limitation the federal copyright law, and, to the extent such -U.S. federal law does not apply, by the law of the Commonwealth of -Virginia, excluding Virginia's conflict of law provisions. -Notwithstanding the foregoing, with regard to derivative works based -on Python 1.6.1 that incorporate non-separable material that was -previously distributed under the GNU General Public License (GPL), the -law of the Commonwealth of Virginia shall govern this License -Agreement only as to issues arising under or with respect to -Paragraphs 4, 5, and 7 of this License Agreement. Nothing in this -License Agreement shall be deemed to create any relationship of -agency, partnership, or joint venture between CNRI and Licensee. This -License Agreement does not grant permission to use CNRI trademarks or -trade name in a trademark sense to endorse or promote products or -services of Licensee, or any third party. - -8. By clicking on the "ACCEPT" button where indicated, or by copying, -installing or otherwise using Python 1.6.1, Licensee agrees to be -bound by the terms and conditions of this License Agreement. - - ACCEPT - - -CWI LICENSE AGREEMENT FOR PYTHON 0.9.0 THROUGH 1.2 --------------------------------------------------- - -Copyright (c) 1991 - 1995, Stichting Mathematisch Centrum Amsterdam, -The Netherlands. All rights reserved. - -Permission to use, copy, modify, and distribute this software and its -documentation for any purpose and without fee is hereby granted, -provided that the above copyright notice appear in all copies and that -both that copyright notice and this permission notice appear in -supporting documentation, and that the name of Stichting Mathematisch -Centrum or CWI not be used in advertising or publicity pertaining to -distribution of the software without specific, written prior -permission. - -STICHTING MATHEMATISCH CENTRUM DISCLAIMS ALL WARRANTIES WITH REGARD TO -THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND -FITNESS, IN NO EVENT SHALL STICHTING MATHEMATISCH CENTRUM BE LIABLE -FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT -OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/LICENSE.txt b/client/ayon_ftrack/python2_vendor/ftrack-python-api/LICENSE.txt deleted file mode 100644 index d9a10c0d..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/LICENSE.txt +++ /dev/null @@ -1,176 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/MANIFEST.in b/client/ayon_ftrack/python2_vendor/ftrack-python-api/MANIFEST.in deleted file mode 100644 index 3216ee54..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/MANIFEST.in +++ /dev/null @@ -1,4 +0,0 @@ -include LICENSE.txt -include README.rst -recursive-include resource *.py -recursive-include doc *.rst *.conf *.py *.png *.css diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/README.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/README.rst deleted file mode 100644 index 074a35f9..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/README.rst +++ /dev/null @@ -1,34 +0,0 @@ -################# -ftrack Python API -################# - -Python API for ftrack. - -.. important:: - - This is the new Python client for the ftrack API. If you are migrating from - the old client then please read the dedicated `migration guide `_. - -************* -Documentation -************* - -Full documentation, including installation and setup guides, can be found at -http://ftrack-python-api.rtd.ftrack.com/en/stable/ - -********************* -Copyright and license -********************* - -Copyright (c) 2014 ftrack - -Licensed under the Apache License, Version 2.0 (the "License"); you may not use -this work except in compliance with the License. You may obtain a copy of the -License in the LICENSE.txt file, or at: - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software distributed -under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR -CONDITIONS OF ANY KIND, either express or implied. See the License for the -specific language governing permissions and limitations under the License. \ No newline at end of file diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/bitbucket-pipelines.yml b/client/ayon_ftrack/python2_vendor/ftrack-python-api/bitbucket-pipelines.yml deleted file mode 100644 index 355f00f7..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/bitbucket-pipelines.yml +++ /dev/null @@ -1,24 +0,0 @@ -# Test configuration for bitbucket pipelines. -options: - max-time: 20 -definitions: - services: - ftrack: - image: - name: ftrackdocker/test-server:latest - username: $DOCKER_HUB_USERNAME - password: $DOCKER_HUB_PASSWORD - email: $DOCKER_HUB_EMAIL -pipelines: - default: - - parallel: - - step: - name: run tests against python 2.7.x - image: python:2.7 - caches: - - pip - services: - - ftrack - script: - - bash -c 'while [[ "$(curl -s -o /dev/null -w ''%{http_code}'' $FTRACK_SERVER)" != "200" ]]; do sleep 1; done' - - python setup.py test \ No newline at end of file diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/_static/ftrack.css b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/_static/ftrack.css deleted file mode 100644 index 3456b0c3..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/_static/ftrack.css +++ /dev/null @@ -1,16 +0,0 @@ -@import "css/theme.css"; - -.domain-summary li { - float: left; - min-width: 12em; -} - -.domain-summary ul:before, ul:after { - content: ''; - clear: both; - display:block; -} - -.rst-content table.docutils td:last-child { - white-space: normal; -} diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/base.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/base.rst deleted file mode 100644 index 4e165b01..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/base.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -************************ -ftrack_api.accessor.base -************************ - -.. automodule:: ftrack_api.accessor.base diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/disk.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/disk.rst deleted file mode 100644 index f7d9dddf..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/disk.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -************************ -ftrack_api.accessor.disk -************************ - -.. automodule:: ftrack_api.accessor.disk diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/index.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/index.rst deleted file mode 100644 index 0adc23fe..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/index.rst +++ /dev/null @@ -1,14 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -******************* -ftrack_api.accessor -******************* - -.. automodule:: ftrack_api.accessor - -.. toctree:: - :maxdepth: 1 - :glob: - - * diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/server.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/server.rst deleted file mode 100644 index 62bd7f41..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/accessor/server.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -************************ -ftrack_api.accessor.server -************************ - -.. automodule:: ftrack_api.accessor.server diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/attribute.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/attribute.rst deleted file mode 100644 index 9fd8994e..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/attribute.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -******************** -ftrack_api.attribute -******************** - -.. automodule:: ftrack_api.attribute diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/cache.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/cache.rst deleted file mode 100644 index cbf9128a..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/cache.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -**************** -ftrack_api.cache -**************** - -.. automodule:: ftrack_api.cache diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/collection.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/collection.rst deleted file mode 100644 index 607d574c..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/collection.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -********************* -ftrack_api.collection -********************* - -.. automodule:: ftrack_api.collection diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/asset_version.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/asset_version.rst deleted file mode 100644 index 0bc4ce35..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/asset_version.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -******************************* -ftrack_api.entity.asset_version -******************************* - -.. automodule:: ftrack_api.entity.asset_version diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/base.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/base.rst deleted file mode 100644 index f4beedc9..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/base.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -********************** -ftrack_api.entity.base -********************** - -.. automodule:: ftrack_api.entity.base diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/component.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/component.rst deleted file mode 100644 index c9ce0a0c..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/component.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -*************************** -ftrack_api.entity.component -*************************** - -.. automodule:: ftrack_api.entity.component diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/factory.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/factory.rst deleted file mode 100644 index 483c1664..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/factory.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -************************* -ftrack_api.entity.factory -************************* - -.. automodule:: ftrack_api.entity.factory diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/index.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/index.rst deleted file mode 100644 index fce68c0e..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/index.rst +++ /dev/null @@ -1,14 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -***************** -ftrack_api.entity -***************** - -.. automodule:: ftrack_api.entity - -.. toctree:: - :maxdepth: 1 - :glob: - - * diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/job.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/job.rst deleted file mode 100644 index 9d22a7c3..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/job.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -********************* -ftrack_api.entity.job -********************* - -.. automodule:: ftrack_api.entity.job diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/location.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/location.rst deleted file mode 100644 index 60e006a1..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/location.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -************************** -ftrack_api.entity.location -************************** - -.. automodule:: ftrack_api.entity.location diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/note.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/note.rst deleted file mode 100644 index 3588e48e..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/note.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -********************** -ftrack_api.entity.note -********************** - -.. automodule:: ftrack_api.entity.note diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/project_schema.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/project_schema.rst deleted file mode 100644 index 5777ab0b..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/project_schema.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -******************************** -ftrack_api.entity.project_schema -******************************** - -.. automodule:: ftrack_api.entity.project_schema diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/user.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/user.rst deleted file mode 100644 index 0014498b..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/entity/user.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -********************** -ftrack_api.entity.user -********************** - -.. automodule:: ftrack_api.entity.user diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/base.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/base.rst deleted file mode 100644 index 2b0ca8d3..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/base.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -********************* -ftrack_api.event.base -********************* - -.. automodule:: ftrack_api.event.base diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/expression.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/expression.rst deleted file mode 100644 index f5827170..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/expression.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -*************************** -ftrack_api.event.expression -*************************** - -.. automodule:: ftrack_api.event.expression diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/hub.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/hub.rst deleted file mode 100644 index 36d7a331..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/hub.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -******************** -ftrack_api.event.hub -******************** - -.. automodule:: ftrack_api.event.hub diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/index.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/index.rst deleted file mode 100644 index 0986e8e2..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/index.rst +++ /dev/null @@ -1,14 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -**************** -ftrack_api.event -**************** - -.. automodule:: ftrack_api.event - -.. toctree:: - :maxdepth: 1 - :glob: - - * diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscriber.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscriber.rst deleted file mode 100644 index 974f3758..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscriber.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -*************************** -ftrack_api.event.subscriber -*************************** - -.. automodule:: ftrack_api.event.subscriber diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscription.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscription.rst deleted file mode 100644 index 94a20e36..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/event/subscription.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -***************************** -ftrack_api.event.subscription -***************************** - -.. automodule:: ftrack_api.event.subscription diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/exception.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/exception.rst deleted file mode 100644 index 64c3a699..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/exception.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -******************** -ftrack_api.exception -******************** - -.. automodule:: ftrack_api.exception diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/formatter.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/formatter.rst deleted file mode 100644 index 9b8154bd..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/formatter.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -******************** -ftrack_api.formatter -******************** - -.. automodule:: ftrack_api.formatter diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/index.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/index.rst deleted file mode 100644 index ea3517ca..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/index.rst +++ /dev/null @@ -1,20 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _api_reference: - -************* -API Reference -************* - -ftrack_api -========== - -.. automodule:: ftrack_api - -.. toctree:: - :maxdepth: 1 - :glob: - - */index - * diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/inspection.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/inspection.rst deleted file mode 100644 index 8223ee72..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/inspection.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -********************* -ftrack_api.inspection -********************* - -.. automodule:: ftrack_api.inspection diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/logging.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/logging.rst deleted file mode 100644 index ecb883d3..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/logging.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2016 ftrack - -****************** -ftrack_api.logging -****************** - -.. automodule:: ftrack_api.logging diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/operation.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/operation.rst deleted file mode 100644 index b2dff993..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/operation.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -******************** -ftrack_api.operation -******************** - -.. automodule:: ftrack_api.operation diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/plugin.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/plugin.rst deleted file mode 100644 index a4993d94..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/plugin.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -***************** -ftrack_api.plugin -***************** - -.. automodule:: ftrack_api.plugin diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/query.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/query.rst deleted file mode 100644 index acbd8d23..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/query.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -**************** -ftrack_api.query -**************** - -.. automodule:: ftrack_api.query diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/base.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/base.rst deleted file mode 100644 index 09cdad86..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/base.rst +++ /dev/null @@ -1,10 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _api_reference/resource_identifier_transformer.base: - -*********************************************** -ftrack_api.resource_identifier_transformer.base -*********************************************** - -.. automodule:: ftrack_api.resource_identifier_transformer.base diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/index.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/index.rst deleted file mode 100644 index 755f052c..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/resource_identifier_transformer/index.rst +++ /dev/null @@ -1,16 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _api_reference/resource_identifier_transformer: - -****************************************** -ftrack_api.resource_identifier_transformer -****************************************** - -.. automodule:: ftrack_api.resource_identifier_transformer - -.. toctree:: - :maxdepth: 1 - :glob: - - * diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/session.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/session.rst deleted file mode 100644 index dcce173d..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/session.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -****************** -ftrack_api.session -****************** - -.. automodule:: ftrack_api.session diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/base.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/base.rst deleted file mode 100644 index 55a1cc75..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/base.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -************************* -ftrack_api.structure.base -************************* - -.. automodule:: ftrack_api.structure.base diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/id.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/id.rst deleted file mode 100644 index ade2c7ae..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/id.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -*********************** -ftrack_api.structure.id -*********************** - -.. automodule:: ftrack_api.structure.id diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/index.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/index.rst deleted file mode 100644 index cbd4545c..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/index.rst +++ /dev/null @@ -1,14 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -******************** -ftrack_api.structure -******************** - -.. automodule:: ftrack_api.structure - -.. toctree:: - :maxdepth: 1 - :glob: - - * diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/origin.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/origin.rst deleted file mode 100644 index 403173e2..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/origin.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -*************************** -ftrack_api.structure.origin -*************************** - -.. automodule:: ftrack_api.structure.origin diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/standard.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/standard.rst deleted file mode 100644 index 5c0d8802..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/structure/standard.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -***************************** -ftrack_api.structure.standard -***************************** - -.. automodule:: ftrack_api.structure.standard diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/symbol.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/symbol.rst deleted file mode 100644 index 55dc0125..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/api_reference/symbol.rst +++ /dev/null @@ -1,8 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -***************** -ftrack_api.symbol -***************** - -.. automodule:: ftrack_api.symbol diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/caching.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/caching.rst deleted file mode 100644 index bfc5cef4..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/caching.rst +++ /dev/null @@ -1,175 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - - -.. _caching: - -******* -Caching -******* - -The API makes use of caching in order to provide more efficient retrieval of -data by reducing the number of calls to the remote server:: - - # First call to retrieve user performs a request to the server. - user = session.get('User', 'some-user-id') - - # A later call in the same session to retrieve the same user just gets - # the existing instance from the cache without a request to the server. - user = session.get('User', 'some-user-id') - -It also seamlessly merges related data together regardless of how it was -retrieved:: - - >>> timelog = user['timelogs'][0] - >>> with session.auto_populating(False): - >>> print timelog['comment'] - NOT_SET - >>> session.query( - ... 'select comment from Timelog where id is "{0}"' - ... .format(timelog['id']) - ... ).all() - >>> with session.auto_populating(False): - >>> print timelog['comment'] - 'Some comment' - -By default, each :class:`~ftrack_api.session.Session` is configured with a -simple :class:`~ftrack_api.cache.MemoryCache()` and the cache is lost as soon as -the session expires. - -Configuring a session cache -=========================== - -It is possible to configure the cache that a session uses. An example would be a -persistent auto-populated cache that survives between sessions:: - - import os - import ftrack_api.cache - - # Specify where the file based cache should be stored. - cache_path = os.path.join(tempfile.gettempdir(), 'ftrack_session_cache.dbm') - - - # Define a cache maker that returns a file based cache. Note that a - # function is used because the file based cache should use the session's - # encode and decode methods to serialise the entity data to a format that - # can be written to disk (JSON). - def cache_maker(session): - '''Return cache to use for *session*.''' - return ftrack_api.cache.SerialisedCache( - ftrack_api.cache.FileCache(cache_path), - encode=session.encode, - decode=session.decode - ) - - # Create the session using the cache maker. - session = ftrack_api.Session(cache=cache_maker) - -.. note:: - - There can be a performance penalty when using a more complex cache setup. - For example, serialising data and also writing and reading from disk can be - relatively slow operations. - -Regardless of the cache specified, the session will always construct a -:class:`~ftrack_api.cache.LayeredCache` with a -:class:`~ftrack_api.cache.MemoryCache` at the top level and then your cache at -the second level. This is to ensure consistency of instances returned by the -session. - -You can check (or even modify) at any time what cache configuration a session is -using by accessing the `cache` attribute on a -:class:`~ftrack_api.session.Session`:: - - >>> print session.cache - - -Writing a new cache interface -============================= - -If you have a custom cache backend you should be able to integrate it into the -system by writing a cache interface that matches the one defined by -:class:`ftrack_api.cache.Cache`. This typically involves a subclass and -overriding the :meth:`~ftrack_api.cache.Cache.get`, -:meth:`~ftrack_api.cache.Cache.set` and :meth:`~ftrack_api.cache.Cache.remove` -methods. - - -Managing what gets cached -========================= - -The cache system is quite flexible when it comes to controlling what should be -cached. - -Consider you have a layered cache where the bottom layer cache should be -persisted between sessions. In this setup you probably don't want the persisted -cache to hold non-persisted values, such as modified entity values or newly -created entities not yet committed to the server. However, you might want the -top level memory cache to hold onto these values. - -Here is one way to set this up. First define a new proxy cache that is selective -about what it sets:: - - import ftrack_api.inspection - - - class SelectiveCache(ftrack_api.cache.ProxyCache): - '''Proxy cache that won't cache newly created entities.''' - - def set(self, key, value): - '''Set *value* for *key*.''' - if isinstance(value, ftrack_api.entity.base.Entity): - if ( - ftrack_api.inspection.state(value) - is ftrack_api.symbol.CREATED - ): - return - - super(SelectiveCache, self).set(key, value) - -Now use this custom cache to wrap the serialised cache in the setup above: - -.. code-block:: python - :emphasize-lines: 3, 9 - - def cache_maker(session): - '''Return cache to use for *session*.''' - return SelectiveCache( - ftrack_api.cache.SerialisedCache( - ftrack_api.cache.FileCache(cache_path), - encode=session.encode, - decode=session.decode - ) - ) - -Now to prevent modified attributes also being persisted, tweak the encode -settings for the file cache: - -.. code-block:: python - :emphasize-lines: 1, 9-12 - - import functools - - - def cache_maker(session): - '''Return cache to use for *session*.''' - return SelectiveCache( - ftrack_api.cache.SerialisedCache( - ftrack_api.cache.FileCache(cache_path), - encode=functools.partial( - session.encode, - entity_attribute_strategy='persisted_only' - ), - decode=session.decode - ) - ) - -And use the updated cache maker for your session:: - - session = ftrack_api.Session(cache=cache_maker) - -.. note:: - - For some type of attributes that are computed, long term caching is not - recommended and such values will not be encoded with the `persisted_only` - strategy. diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/conf.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/conf.py deleted file mode 100644 index 11544721..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/conf.py +++ /dev/null @@ -1,102 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -'''ftrack Python API documentation build configuration file.''' - -import os -import re - -# -- General ------------------------------------------------------------------ - -# Extensions. -extensions = [ - 'sphinx.ext.autodoc', - 'sphinx.ext.extlinks', - 'sphinx.ext.intersphinx', - 'sphinx.ext.todo', - 'sphinx.ext.viewcode', - 'lowdown' -] - - -# The suffix of source filenames. -source_suffix = '.rst' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'ftrack Python API' -copyright = u'2014, ftrack' - -# Version -with open( - os.path.join( - os.path.dirname(__file__), '..', 'source', - 'ftrack_api', '_version.py' - ) -) as _version_file: - _version = re.match( - r'.*__version__ = \'(.*?)\'', _version_file.read(), re.DOTALL - ).group(1) - -version = _version -release = _version - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ['_template'] - -# A list of prefixes to ignore for module listings. -modindex_common_prefix = [ - 'ftrack_api.' -] - -# -- HTML output -------------------------------------------------------------- - -if not os.environ.get('READTHEDOCS', None) == 'True': - # Only import and set the theme if building locally. - import sphinx_rtd_theme - html_theme = 'sphinx_rtd_theme' - html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] - -html_static_path = ['_static'] -html_style = 'ftrack.css' - -# If True, copy source rst files to output for reference. -html_copy_source = True - - -# -- Autodoc ------------------------------------------------------------------ - -autodoc_default_flags = ['members', 'undoc-members', 'inherited-members'] -autodoc_member_order = 'bysource' - - -def autodoc_skip(app, what, name, obj, skip, options): - '''Don't skip __init__ method for autodoc.''' - if name == '__init__': - return False - - return skip - - -# -- Intersphinx -------------------------------------------------------------- - -intersphinx_mapping = { - 'python': ('http://docs.python.org/', None), - 'ftrack': ( - 'http://rtd.ftrack.com/docs/ftrack/en/stable/', None - ) -} - - -# -- Todos --------------------------------------------------------------------- - -todo_include_todos = os.environ.get('FTRACK_DOC_INCLUDE_TODOS', False) == 'True' - - -# -- Setup -------------------------------------------------------------------- - -def setup(app): - app.connect('autodoc-skip-member', autodoc_skip) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/docutils.conf b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/docutils.conf deleted file mode 100644 index 3c927cc1..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/docutils.conf +++ /dev/null @@ -1,2 +0,0 @@ -[html4css1 writer] -field-name-limit:0 \ No newline at end of file diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/environment_variables.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/environment_variables.rst deleted file mode 100644 index 99019ee4..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/environment_variables.rst +++ /dev/null @@ -1,56 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _environment_variables: - -********************* -Environment variables -********************* - -The following is a consolidated list of environment variables that this API -can reference: - -.. envvar:: FTRACK_SERVER - - The full url of the ftrack server to connect to. For example - "https://mycompany.ftrackapp.com" - -.. envvar:: FTRACK_API_USER - - The username of the ftrack user to act on behalf of when performing actions - in the system. - - .. note:: - - When this environment variable is not set, the API will typically also - check other standard operating system variables that hold the username - of the current logged in user. To do this it uses - :func:`getpass.getuser`. - -.. envvar:: FTRACK_API_KEY - - The API key to use when performing actions in the system. The API key is - used to determine the permissions that a script has in the system. - -.. envvar:: FTRACK_APIKEY - - For backwards compatibility. See :envvar:`FTRACK_API_KEY`. - -.. envvar:: FTRACK_EVENT_PLUGIN_PATH - - Paths to search recursively for plugins to load and use in a session. - Multiple paths can be specified by separating with the value of - :attr:`os.pathsep` (e.g. ':' or ';'). - -.. envvar:: FTRACK_API_SCHEMA_CACHE_PATH - - Path to a directory that will be used for storing and retrieving a cache of - the entity schemas fetched from the server. - -.. envvar:: http_proxy / https_proxy - - If you need to use a proxy to connect to ftrack you can use the - "standard" :envvar:`http_proxy` and :envvar:`https_proxy`. Please note that they - are lowercase. - - For example "export https_proxy=http://proxy.mycompany.com:8080" \ No newline at end of file diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/event_list.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/event_list.rst deleted file mode 100644 index 0c44a1b6..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/event_list.rst +++ /dev/null @@ -1,137 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _event_list: - -********** -Event list -********** - -The following is a consolidated list of events published directly by this API. - -For some events, a template plugin file is also listed for download -(:guilabel:`Download template plugin`) to help get you started with writing your -own plugin for a particular event. - -.. seealso:: - - * :ref:`handling_events` - * :ref:`ftrack server event list ` - -.. _event_list/ftrack.api.session.construct-entity-type: - -ftrack.api.session.construct-entity-type -======================================== - -:download:`Download template plugin -` - -:ref:`Synchronous `. Published by -the session to retrieve constructed class for specified schema:: - - Event( - topic='ftrack.api.session.construct-entity-type', - data=dict( - schema=schema, - schemas=schemas - ) - ) - -Expects returned data to be:: - - A Python class. - -.. seealso:: :ref:`working_with_entities/entity_types`. - -.. _event_list/ftrack.api.session.configure-location: - -ftrack.api.session.configure-location -===================================== - -:download:`Download template plugin -` - -:ref:`Synchronous `. Published by -the session to allow configuring of location instances:: - - Event( - topic='ftrack.api.session.configure-location', - data=dict( - session=self - ) - ) - -.. seealso:: :ref:`Configuring locations `. - -.. _event_list/ftrack.location.component-added: - -ftrack.location.component-added -=============================== - -Published whenever a component is added to a location:: - - Event( - topic='ftrack.location.component-added', - data=dict( - component_id='e2dc0524-b576-11d3-9612-080027331d74', - location_id='07b82a97-8cf9-11e3-9383-20c9d081909b' - ) - ) - -.. _event_list/ftrack.location.component-removed: - -ftrack.location.component-removed -================================= - -Published whenever a component is removed from a location:: - - Event( - topic='ftrack.location.component-removed', - data=dict( - component_id='e2dc0524-b576-11d3-9612-080027331d74', - location_id='07b82a97-8cf9-11e3-9383-20c9d081909b' - ) - ) - -.. _event_list/ftrack.api.session.ready: - -ftrack.api.session.ready -======================== - -:ref:`Synchronous `. Published after -a :class:`~ftrack_api.session.Session` has been initialized and -is ready to be used:: - - Event( - topic='ftrack.api.session.ready', - data=dict( - session=, - ) - ) - -.. warning:: - - Since the event is synchronous and blocking, avoid doing any unnecessary - work as it will slow down session initialization. - -.. seealso:: - - Also see example usage in :download:`example_plugin_using_session.py - `. - - -.. _event_list/ftrack.api.session.reset: - -ftrack.api.session.reset -======================== - -:ref:`Synchronous `. Published after -a :class:`~ftrack_api.session.Session` has been reset and is ready to be used -again:: - - Event( - topic='ftrack.api.session.reset', - data=dict( - session=, - ) - ) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/assignments_and_allocations.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/assignments_and_allocations.rst deleted file mode 100644 index 985eb9bb..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/assignments_and_allocations.rst +++ /dev/null @@ -1,82 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _example/assignments_and_allocations: - -**************************************** -Working with assignments and allocations -**************************************** - -.. currentmodule:: ftrack_api.session - -The API exposes `assignments` and `allocations` relationships on objects in -the project hierarchy. You can use these to retrieve the allocated or assigned -resources, which can be either groups or users. - -Allocations can be used to allocate users or groups to a project team, while -assignments are more explicit and is used to assign users to tasks. Both -assignment and allocations are modelled as `Appointment` objects, with a -`type` attribute indicating the type of the appoinment. - -The following example retrieves all users part of the project team:: - - # Retrieve a project - project = session.query('Project').first() - - # Set to hold all users part of the project team - project_team = set() - - # Add all allocated groups and users - for allocation in project['allocations']: - - # Resource may be either a group or a user - resource = allocation['resource'] - - # If the resource is a group, add its members - if isinstance(resource, session.types['Group']): - for membership in resource['memberships']: - user = membership['user'] - project_team.add(user) - - # The resource is a user, add it. - else: - user = resource - project_team.add(user) - -The next example shows how to assign the current user to a task:: - - # Retrieve a task and the current user - task = session.query('Task').first() - current_user = session.query( - u'User where username is {0}'.format(session.api_user) - ).one() - - # Create a new Appointment of type assignment. - session.create('Appointment', { - 'context': task, - 'resource': current_user, - 'type': 'assignment' - }) - - # Finally, persist the new assignment - session.commit() - -To list all users assigned to a task, see the following example:: - - task = session.query('Task').first() - users = session.query( - 'select first_name, last_name from User ' - 'where assignments any (context_id = "{0}")'.format(task['id']) - ) - for user in users: - print user['first_name'], user['last_name'] - -To list the current user's assigned tasks, see the example below:: - - assigned_tasks = session.query( - 'select link from Task ' - 'where assignments any (resource.username = "{0}")'.format(session.api_user) - ) - for task in assigned_tasks: - print u' / '.join(item['name'] for item in task['link']) - diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/component.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/component.rst deleted file mode 100644 index 6a39bb20..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/component.rst +++ /dev/null @@ -1,23 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _example/component: - -*********************** -Working with components -*********************** - -.. currentmodule:: ftrack_api.session - -Components can be created manually or using the provide helper methods on a -:meth:`session ` or existing -:meth:`asset version -`:: - - component = version.create_component('/path/to/file_or_sequence.jpg') - session.commit() - -When a component is created using the helpers it is automatically added to a -location. - -.. seealso:: :ref:`Locations tutorial ` diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/custom_attribute.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/custom_attribute.rst deleted file mode 100644 index 033942b4..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/custom_attribute.rst +++ /dev/null @@ -1,94 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _example/custom_attribute: - -*********************** -Using custom attributes -*********************** - -.. currentmodule:: ftrack_api.session - -Custom attributes can be written and read from entities using the -``custom_attributes`` property. - -The ``custom_attributes`` property provides a similar interface to a dictionary. - -Keys can be printed using the keys method:: - - >>> task['custom_attributes'].keys() - [u'my_text_field'] - -or access keys and values as items:: - - >>> print task['custom_attributes'].items() - [(u'my_text_field', u'some text')] - -Read existing custom attribute values:: - - >>> print task['custom_attributes']['my_text_field'] - 'some text' - -Updating a custom attributes can also be done similar to a dictionary:: - - task['custom_attributes']['my_text_field'] = 'foo' - -To query for tasks with a custom attribute, ``my_text_field``, you can use the -key from the configuration:: - - for task in session.query( - 'Task where custom_attributes any ' - '(key is "my_text_field" and value is "bar")' - ): - print task['name'] - -Limitations -=========== - -Expression attributes ---------------------- - -Expression attributes are not yet supported and the reported value will -always be the non-evaluated expression. - -Hierarchical attributes ------------------------ - -Hierarchical attributes are not yet fully supported in the API. Hierarchical -attributes support both read and write, but when read they are not calculated -and instead the `raw` value is returned:: - - # The hierarchical attribute `my_attribute` is set on Shot but this will not - # be reflected on the children. Instead the raw value is returned. - print shot['custom_attributes']['my_attribute'] - 'foo' - print task['custom_attributes']['my_attribute'] - None - -To work around this limitation it is possible to use the legacy api for -hierarchical attributes or to manually query the parents for values and use the -first value that is set. - -Validation -========== - -Custom attributes are validated on the ftrack server before persisted. The -validation will check that the type of the data is correct for the custom -attribute. - - * number - :py:class:`int` or :py:class:`float` - * text - :py:class:`str` or :py:class:`unicode` - * enumerator - :py:class:`list` - * boolean - :py:class:`bool` - * date - :py:class:`datetime.datetime` or :py:class:`datetime.date` - -If the value set is not valid a :py:exc:`ftrack_api.exception.ServerError` is -raised with debug information:: - - shot['custom_attributes']['fstart'] = 'test' - - Traceback (most recent call last): - ... - ftrack_api.exception.ServerError: Server reported error: - ValidationError(Custom attribute value for "fstart" must be of type number. - Got "test" of type ) \ No newline at end of file diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/encode_media.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/encode_media.rst deleted file mode 100644 index 2be01ffe..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/encode_media.rst +++ /dev/null @@ -1,53 +0,0 @@ -.. - :copyright: Copyright (c) 2016 ftrack - -.. currentmodule:: ftrack_api.session - -.. _example/encode_media: - -************** -Encoding media -************** - -Media such as images and video can be encoded by the ftrack server to allow -playing it in the ftrack web interface. Media can be encoded using -:meth:`ftrack_api.session.Session.encode_media` which accepts a path to a file -or an existing component in the ftrack.server location. - -Here is an example of how to encode a video and read the output:: - - job = session.encode_media('/PATH/TO/MEDIA') - job_data = json.loads(job['data']) - - print 'Source component id', job_data['source_component_id'] - print 'Keeping original component', job_data['keep_original'] - for output in job_data['output']: - print u'Output component - id: {0}, format: {1}'.format( - output['component_id'], output['format'] - ) - -You can also call the corresponding helper method on an :meth:`asset version -`, to have the -encoded components automatically associated with the version:: - - job = asset_version.encode_media('/PATH/TO/MEDIA') - -It is also possible to get the URL to an encoded component once the job has -finished:: - - job = session.encode_media('/PATH/TO/MEDIA') - - # Wait for job to finish. - - location = session.query('Location where name is "ftrack.server"').one() - for component in job['job_components']: - print location.get_url(component) - -Media can also be an existing component in another location. Before encoding it, -the component needs to be added to the ftrack.server location:: - - location = session.query('Location where name is "ftrack.server"').one() - location.add_component(component) - session.commit() - - job = session.encode_media(component) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/entity_links.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/entity_links.rst deleted file mode 100644 index 43e31484..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/entity_links.rst +++ /dev/null @@ -1,56 +0,0 @@ -.. - :copyright: Copyright (c) 2016 ftrack - -.. _example/entity_links: - -****************** -Using entity links -****************** - -A link can be used to represent a dependency or another relation between -two entities in ftrack. - -There are two types of entities that can be linked: - -* Versions can be linked to other asset versions, where the link entity type - is `AssetVersionLink`. -* Objects like Task, Shot or Folder, where the link entity type is - `TypedContextLink`. - -Both `AssetVersion` and `TypedContext` objects have the same relations -`incoming_links` and `outgoing_links`. To list the incoming links to a Shot we -can use the relationship `incoming_links`:: - - for link in shot['incoming_links']: - print link['from'], link['to'] - -In the above example `link['to']` is the shot and `link['from']` could be an -asset build or something else that is linked to the shot. There is an equivalent -`outgoing_links` that can be used to access outgoing links on an object. - -To create a new link between objects or asset versions create a new -`TypedContextLink` or `AssetVersionLink` entity with the from and to properties -set. In this example we will link two asset versions:: - - session.create('AssetVersionLink', { - 'from': from_asset_version, - 'to': to_asset_version - }) - session.commit() - -Using asset version link shortcut -================================= - -Links on asset version can also be created by the use of the `uses_versions` and -`used_in_versions` relations:: - - rig_version['uses_versions'].append(model_version) - session.commit() - -This has the same result as creating the `AssetVersionLink` entity as in the -previous section. - -Which versions are using the model can be listed with:: - - for version in model_version['used_in_versions']: - print '{0} is using {1}'.format(version, model_version) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/index.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/index.rst deleted file mode 100644 index 4fca37d7..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/index.rst +++ /dev/null @@ -1,52 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. currentmodule:: ftrack_api.session - -.. _example: - -************** -Usage examples -************** - -The following examples show how to use the API to accomplish specific tasks -using the default configuration. - -.. note:: - - If you are using a server with a customised configuration you may need to - alter the examples slightly to make them work correctly. - -Most of the examples assume you have the *ftrack_api* package imported and have -already constructed a :class:`Session`:: - - import ftrack_api - - session = ftrack_api.Session() - - -.. toctree:: - - project - component - review_session - metadata - custom_attribute - manage_custom_attribute_configuration - link_attribute - scope - job - note - list - timer - assignments_and_allocations - thumbnail - encode_media - entity_links - web_review - publishing - security_roles - task_template - sync_ldap_users - invite_user - diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/invite_user.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/invite_user.rst deleted file mode 100644 index 342f0ef6..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/invite_user.rst +++ /dev/null @@ -1,31 +0,0 @@ -.. - :copyright: Copyright (c) 2017 ftrack - -.. _example/invite_user: - -********************* -Invite user -********************* - -Here we create a new user and send them a invitation through mail - - -Create a new user:: - - user_email = 'artist@mail.vfx-company.com' - - new_user = session.create( - 'User', { - 'username':user_email, - 'email':user_email, - 'is_active':True - } - ) - - session.commit() - - -Invite our new user:: - - new_user.send_invite() - diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/job.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/job.rst deleted file mode 100644 index 296a0f5e..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/job.rst +++ /dev/null @@ -1,97 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _example/job: - -************* -Managing jobs -************* - -.. currentmodule:: ftrack_api.session - -Jobs can be used to display feedback to users in the ftrack web interface when -performing long running tasks in the API. - -To create a job use :meth:`Session.create`:: - - user = # Get a user from ftrack. - - job = session.create('Job', { - 'user': user, - 'status': 'running' - }) - -The created job will appear as running in the :guilabel:`jobs` menu for the -specified user. To set a description on the job, add a dictionary containing -description as the `data` key: - -.. note:: - - In the current version of the API the dictionary needs to be JSON - serialised. - -.. code-block:: python - - import json - - job = session.create('Job', { - 'user': user, - 'status': 'running', - 'data': json.dumps({ - 'description': 'My custom job description.' - }) - }) - -When the long running task has finished simply set the job as completed and -continue with the next task. - -.. code-block:: python - - job['status'] = 'done' - session.commit() - -Attachments -=========== - -Job attachments are files that are attached to a job. In the ftrack web -interface these attachments can be downloaded by clicking on a job in the `Jobs` -menu. - -To get a job's attachments through the API you can use the `job_components` -relation and then use the ftrack server location to get the download URL:: - - server_location = session.query( - 'Location where name is "ftrack.server"' - ).one() - - for job_component in job['job_components']: - print 'Download URL: {0}'.format( - server_location.get_url(job_component['component']) - ) - -To add an attachment to a job you have to add it to the ftrack server location -and create a `jobComponent`:: - - server_location = session.query( - 'Location where name is "ftrack.server"' - ).one() - - # Create component and name it "My file". - component = session.create_component( - '/path/to/file', - data={'name': 'My file'}, - location=server_location - ) - - # Attach the component to the job. - session.create( - 'JobComponent', - {'component_id': component['id'], 'job_id': job['id']} - ) - - session.commit() - -.. note:: - - The ftrack web interface does only support downloading one attachment so - attaching more than one will have limited support in the web interface. diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/link_attribute.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/link_attribute.rst deleted file mode 100644 index 1dcea842..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/link_attribute.rst +++ /dev/null @@ -1,55 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _example/link_attribute: - -********************* -Using link attributes -********************* - -The `link` attribute can be used to retreive the ids and names of the parents of -an object. It is particularly useful in cases where the path of an object must -be presented in a UI, but can also be used to speedup certain query patterns. - -You can use the `link` attribute on any entity inheriting from a -`Context` or `AssetVersion`. Here we use it on the `Task` entity:: - - task = session.query( - 'select link from Task where name is "myTask"' - ).first() - print task['link'] - -It can also be used create a list of parent entities, including the task -itself:: - - entities = [] - for item in task['link']: - entities.append(session.get(item['type'], item['id'])) - -The `link` attribute is an ordered list of dictionaries containting data -of the parents and the item itself. Each dictionary contains the following -entries: - - id - The id of the object and can be used to do a :meth:`Session.get`. - name - The name of the object. - type - The schema id of the object. - -A more advanced use-case is to get the parent names and ids of all timelogs for -a user:: - - for timelog in session.query( - 'select context.link, start, duration from Timelog ' - 'where user.username is "john.doe"' - ): - print timelog['context']['link'], timelog['start'], timelog['duration'] - -The attribute is also available from the `AssetVersion` asset relation:: - - for asset_version in session.query( - 'select link from AssetVersion ' - 'where user.username is "john.doe"' - ): - print asset_version['link'] diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/list.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/list.rst deleted file mode 100644 index 155b25f9..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/list.rst +++ /dev/null @@ -1,46 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _example/list: - -*********** -Using lists -*********** - -.. currentmodule:: ftrack_api.session - -Lists can be used to create a collection of asset versions or objects such as -tasks. It could be a list of items that should be sent to client, be included in -todays review session or items that belong together in way that is different -from the project hierarchy. - -There are two types of lists, one for asset versions and one for other objects -such as tasks. - -To create a list use :meth:`Session.create`:: - - user = # Get a user from ftrack. - project = # Get a project from ftrack. - list_category = # Get a list category from ftrack. - - asset_version_list = session.create('AssetVersionList', { - 'owner': user, - 'project': project, - 'category': list_category - }) - - task_list = session.create('TypedContextList', { - 'owner': user, - 'project': project, - 'category': list_category - }) - -Then add items to the list like this:: - - asset_version_list['items'].append(asset_version) - task_list['items'].append(task) - -And remove items from the list like this:: - - asset_version_list['items'].remove(asset_version) - task_list['items'].remove(task) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/manage_custom_attribute_configuration.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/manage_custom_attribute_configuration.rst deleted file mode 100644 index e3d7c406..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/manage_custom_attribute_configuration.rst +++ /dev/null @@ -1,320 +0,0 @@ -.. - :copyright: Copyright (c) 2017 ftrack - -.. _example/manage_custom_attribute_configuration: - -**************************************** -Managing custom attribute configurations -**************************************** - -From the API it is not only possible to -:ref:`read and update custom attributes for entities `, -but also managing custom attribute configurations. - -Existing custom attribute configurations can be queried as :: - - # Print all existing custom attribute configurations. - print session.query('CustomAttributeConfiguration').all() - -Use :meth:`Session.create` to create a new custom attribute configuration:: - - # Get the custom attribute type. - custom_attribute_type = session.query( - 'CustomAttributeType where name is "text"' - ).one() - - # Create a custom attribute configuration. - session.create('CustomAttributeConfiguration', { - 'entity_type': 'assetversion', - 'type': custom_attribute_type, - 'label': 'Asset version text attribute', - 'key': 'asset_version_text_attribute', - 'default': 'bar', - 'config': json.dumps({'markdown': False}) - }) - - # Persist it to the ftrack instance. - session.commit() - -.. tip:: - - The example above does not add security roles. This can be done either - from System Settings in the ftrack web application, or by following the - :ref:`example/manage_custom_attribute_configuration/security_roles` example. - -Global or project specific -========================== - -A custom attribute can be global or project specific depending on the -`project_id` attribute:: - - # Create a custom attribute configuration. - session.create('CustomAttributeConfiguration', { - # Set the `project_id` and the custom attribute will only be available - # on `my_project`. - 'project_id': my_project['id'], - 'entity_type': 'assetversion', - 'type': custom_attribute_type, - 'label': 'Asset version text attribute', - 'key': 'asset_version_text_attribute', - 'default': 'bar', - 'config': json.dumps({'markdown': False}) - }) - session.commit() - -A project specific custom attribute can be changed to a global:: - - custom_attribute_configuration['project_id'] = None - session.commit() - -Changing a global custom attribute configuration to a project specific is not -allowed. - -Entity types -============ - -Custom attribute configuration entity types are using a legacy notation. A -configuration can have one of the following as `entity_type`: - -:task: - Represents TypedContext (Folder, Shot, Sequence, Task, etc.) custom - attribute configurations. When setting this as entity_type the - object_type_id must be set as well. - - Creating a text custom attribute for Folder:: - - custom_attribute_type = session.query( - 'CustomAttributeType where name is "text"' - ).one() - object_type = session.query('ObjectType where name is "Folder"').one() - session.create('CustomAttributeConfiguration', { - 'entity_type': 'task', - 'object_type_id': object_type['id'], - 'type': custom_attribute_type, - 'label': 'Foo', - 'key': 'foo', - 'default': 'bar', - }) - session.commit() - - Can be associated with a `project_id`. - -:show: - Represents Projects custom attribute configurations. - - Can be associated with a `project_id`. - -:assetversion: - Represents AssetVersion custom attribute configurations. - - Can be associated with a `project_id`. - -:user: - Represents User custom attribute configurations. - - Must be `global` and cannot be associated with a `project_id`. - -:list: - Represents List custom attribute configurations. - - Can be associated with a `project_id`. - -:asset: - Represents Asset custom attribute configurations. - - .. note:: - - Asset custom attributes have limited support in the ftrack web - interface. - - Can be associated with a `project_id`. - -It is not possible to change type after a custom attribute configuration has -been created. - -Custom attribute configuration types -==================================== - -Custom attributes can be of different data types depending on what type is set -in the configuration. Some types requires an extra json encoded config to be -set: - -:text: - A sting type custom attribute. - - The `default` value must be either :py:class:`str` or :py:class:`unicode`. - - Can be either presented as raw text or markdown formatted in applicaitons - which support it. This is configured through a markwdown key:: - - # Get the custom attribute type. - custom_attribute_type = session.query( - 'CustomAttributeType where name is "text"' - ).one() - - # Create a custom attribute configuration. - session.create('CustomAttributeConfiguration', { - 'entity_type': 'assetversion', - 'type': custom_attribute_type, - 'label': 'Asset version text attribute', - 'key': 'asset_version_text_attribute', - 'default': 'bar', - 'config': json.dumps({'markdown': False}) - }) - - # Persist it to the ftrack instance. - session.commit() - -:boolean: - - A boolean type custom attribute. - - The `default` value must be a :py:class:`bool`. - - No config is required. - -:date: - A date type custom attribute. - - The `default` value must be an :term:`arrow` date - e.g. - arrow.Arrow(2017, 2, 8). - - No config is required. - -:enumerator: - An enumerator type custom attribute. - - The `default` value must be a list with either :py:class:`str` or - :py:class:`unicode`. - - The enumerator can either be single or multi select. The config must a json - dump of a dictionary containing `multiSelect` and `data`. Where - `multiSelect` is True or False and data is a list of options. Each option - should be a dictionary containing `value` and `menu`, where `menu` is meant - to be used as label in a user interface. - - Create a custom attribute enumerator:: - - custom_attribute_type = session.query( - 'CustomAttributeType where name is "enumerator"' - ).first() - session.create('CustomAttributeConfiguration', { - 'entity_type': 'assetversion', - 'type': custom_attribute_type, - 'label': 'Enumerator attribute', - 'key': 'enumerator_attribute', - 'default': ['bar'], - 'config': json.dumps({ - 'multiSelect': True, - 'data': json.dumps([ - {'menu': 'Foo', 'value': 'foo'}, - {'menu': 'Bar', 'value': 'bar'} - ]) - }) - }) - session.commit() - -:dynamic enumerator: - - An enumerator type where available options are fetched from remote. Created - in the same way as enumerator but without `data`. - -:number: - - A number custom attribute can be either decimal or integer for presentation. - - This can be configured through the `isdecimal` config option:: - - custom_attribute_type = session.query( - 'CustomAttributeType where name is "number"' - ).first() - session.create('CustomAttributeConfiguration', { - 'entity_type': 'assetversion', - 'type': custom_attribute_type, - 'label': 'Number attribute', - 'key': 'number_attribute', - 'default': 42, - 'config': json.dumps({ - 'isdecimal': True - }) - }) - session.commit() - -Changing default -================ - -It is possible to update the `default` value of a custom attribute -configuration. This will not change the value of any existing custom -attributes:: - - # Change the default value of custom attributes. This will only affect - # newly created entities. - custom_attribute_configuration['default'] = 43 - session.commit() - -.. _example/manage_custom_attribute_configuration/security_roles: - -Security roles -============== - -By default new custom attribute configurations and the entity values are not -readable or writable by any security role. - -This can be configured through the `read_security_roles` and `write_security_roles` -attributes:: - - # Pick random security role. - security_role = session.query('SecurityRole').first() - custom_attribute_type = session.query( - 'CustomAttributeType where name is "date"' - ).first() - session.create('CustomAttributeConfiguration', { - 'entity_type': 'assetversion', - 'type': custom_attribute_type, - 'label': 'Date attribute', - 'key': 'date_attribute', - 'default': arrow.Arrow(2017, 2, 8), - 'write_security_roles': [security_role], - 'read_security_roles': [security_role] - }) - session.commit() - -.. note:: - - Setting the correct security role is important and must be changed to - whatever security role is appropriate for your configuration and intended - purpose. - -Custom attribute groups -======================= - -A custom attribute configuration can be categorized using a -`CustomAttributeGroup`:: - - group = session.query('CustomAttributeGroup').first() - security_role = session.query('SecurityRole').first() - custom_attribute_type = session.query( - 'CustomAttributeType where name is "enumerator"' - ).first() - session.create('CustomAttributeConfiguration', { - 'entity_type': 'assetversion', - 'type': custom_attribute_type, - 'label': 'Enumerator attribute', - 'key': 'enumerator_attribute', - 'default': ['bar'], - 'config': json.dumps({ - 'multiSelect': True, - 'data': json.dumps([ - {'menu': 'Foo', 'value': 'foo'}, - {'menu': 'Bar', 'value': 'bar'} - ]) - }), - 'group': group, - 'write_security_roles': [security_role], - 'read_security_roles': [security_role] - }) - session.commit() - -.. seealso:: - - :ref:`example/custom_attribute` diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/metadata.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/metadata.rst deleted file mode 100644 index 7b168810..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/metadata.rst +++ /dev/null @@ -1,43 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _example/metadata: - -************** -Using metadata -************** - -.. currentmodule:: ftrack_api.session - -Key/value metadata can be written to entities using the metadata property -and also used to query entities. - -The metadata property has a similar interface as a dictionary and keys can be -printed using the keys method:: - - >>> print new_sequence['metadata'].keys() - ['frame_padding', 'focal_length'] - -or items:: - - >>> print new_sequence['metadata'].items() - [('frame_padding': '4'), ('focal_length': '70')] - -Read existing metadata:: - - >>> print new_sequence['metadata']['frame_padding'] - '4' - -Setting metadata can be done in a few ways where that later one will replace -any existing metadata:: - - new_sequence['metadata']['frame_padding'] = '5' - new_sequence['metadata'] = { - 'frame_padding': '4' - } - -Entities can also be queried using metadata:: - - session.query( - 'Sequence where metadata any (key is "frame_padding" and value is "4")' - ) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/note.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/note.rst deleted file mode 100644 index 8f8f1bb5..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/note.rst +++ /dev/null @@ -1,169 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. currentmodule:: ftrack_api.session - -.. _example/note: - -*********** -Using notes -*********** - -Notes can be written on almost all levels in ftrack. To retrieve notes on an -entity you can either query them or use the relation called `notes`:: - - task = session.query('Task').first() - - # Retrieve notes using notes property. - notes_on_task = task['notes'] - - # Or query them. - notes_on_task = session.query('Note where parent_id is "{}"'.format( - task['id'] - )) - -.. note:: - - It's currently not possible to use the `parent` property when querying - notes or to use the `parent` property on notes:: - - task = session.query('Task').first() - - # This won't work in the current version of the API. - session.query('Note where parent.id is "{}"'.format( - task['id'] - )) - - # Neither will this. - parent_of_note = note['parent'] - -To create new notes you can either use the helper method called -:meth:`~ftrack_api.entity.note.CreateNoteMixin.create_note` on any entity that -can have notes or use :meth:`Session.create` to create them manually:: - - user = session.query('User').first() - - # Create note using the helper method. - note = task.create_note('My new note', author=user) - - # Manually create a note - note = session.create('Note', { - 'content': 'My new note', - 'author': user - }) - - task['notes'].append(note) - -Replying to an existing note can also be done with a helper method or by -using :meth:`Session.create`:: - - # Create using helper method. - first_note_on_task = task['notes'][0] - first_note_on_task.create_reply('My new reply on note', author=user) - - # Create manually - reply = session.create('Note', { - 'content': 'My new note', - 'author': user - }) - - first_note_on_task.replies.append(reply) - -Notes can have labels. Use the label argument to set labels on the -note using the helper method:: - - label = session.query( - 'NoteLabel where name is "External Note"' - ).first() - - note = task.create_note( - 'New note with external category', author=user, labels=[label] - ) - -Or add labels to notes when creating a note manually:: - - label = session.query( - 'NoteLabel where name is "External Note"' - ).first() - - note = session.create('Note', { - 'content': 'New note with external category', - 'author': user - }) - - session.create('NoteLabelLink', { - 'note_id': note['id], - 'label_id': label['id'] - }) - - task['notes'].append(note) - -.. note:: - - Support for labels on notes was added in ftrack server version 4.3. For - older versions of the server, NoteCategory can be used instead. - -To specify a category when creating a note simply pass a `NoteCategory` instance -to the helper method:: - - category = session.query( - 'NoteCategory where name is "External Note"' - ).first() - - note = task.create_note( - 'New note with external category', author=user, category=category - ) - -When writing notes you might want to direct the note to someone. This is done -by adding users as recipients. If a user is added as a recipient the user will -receive notifications and the note will be displayed in their inbox. - -To add recipients pass a list of user or group instances to the helper method:: - - john = session.query('User where username is "john"').one() - animation_group = session.query('Group where name is "Animation"').first() - - note = task.create_note( - 'Note with recipients', author=user, recipients=[john, animation_group] - ) - -Attachments -=========== - -Note attachments are files that are attached to a note. In the ftrack web -interface these attachments appears next to the note and can be downloaded by -the user. - -To get a note's attachments through the API you can use the `note_components` -relation and then use the ftrack server location to get the download URL:: - - server_location = session.query( - 'Location where name is "ftrack.server"' - ).one() - - for note_component in note['note_components']: - print 'Download URL: {0}'.format( - server_location.get_url(note_component['component']) - ) - -To add an attachment to a note you have to add it to the ftrack server location -and create a `NoteComponent`:: - - server_location = session.query( - 'Location where name is "ftrack.server"' - ).one() - - # Create component and name it "My file". - component = session.create_component( - '/path/to/file', - data={'name': 'My file'}, - location=server_location - ) - - # Attach the component to the note. - session.create( - 'NoteComponent', - {'component_id': component['id'], 'note_id': note['id']} - ) - - session.commit() diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/project.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/project.rst deleted file mode 100644 index 0b4c0879..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/project.rst +++ /dev/null @@ -1,65 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _example/project: - -********************* -Working with projects -********************* - -.. currentmodule:: ftrack_api.session - -Creating a project -================== - -A project with sequences, shots and tasks can be created in one single -transaction. Tasks need to have a type and status set on creation based on the -project schema:: - - import uuid - - # Create a unique name for the project. - name = 'projectname_{0}'.format(uuid.uuid1().hex) - - # Naively pick the first project schema. For this example to work the - # schema must contain `Shot` and `Sequence` object types. - project_schema = session.query('ProjectSchema').first() - - # Create the project with the chosen schema. - project = session.create('Project', { - 'name': name, - 'full_name': name + '_full', - 'project_schema': project_schema - }) - - # Retrieve default types. - default_shot_status = project_schema.get_statuses('Shot')[0] - default_task_type = project_schema.get_types('Task')[0] - default_task_status = project_schema.get_statuses( - 'Task', default_task_type['id'] - )[0] - - # Create sequences, shots and tasks. - for sequence_number in range(1, 5): - sequence = session.create('Sequence', { - 'name': 'seq_{0}'.format(sequence_number), - 'parent': project - }) - - for shot_number in range(1, 5): - shot = session.create('Shot', { - 'name': '{0}0'.format(shot_number).zfill(3), - 'parent': sequence, - 'status': default_shot_status - }) - - for task_number in range(1, 5): - session.create('Task', { - 'name': 'task_{0}'.format(task_number), - 'parent': shot, - 'status': default_task_status, - 'type': default_task_type - }) - - # Commit all changes to the server. - session.commit() diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/publishing.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/publishing.rst deleted file mode 100644 index bf1da18a..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/publishing.rst +++ /dev/null @@ -1,73 +0,0 @@ -.. - :copyright: Copyright (c) 2016 ftrack - -.. currentmodule:: ftrack_api.session - -.. _example/publishing: - -******************* -Publishing versions -******************* - -To know more about publishing and the concepts around publishing, read the -`ftrack article `_ -about publishing. - -To publish an asset you first need to get the context where the asset should be -published:: - - # Get a task from a given id. - task = session.get('Task', '423ac382-e61d-4802-8914-dce20c92b740') - -And the parent of the task which will be used to publish the asset on:: - - asset_parent = task['parent'] - -Then we create an asset and a version on the asset:: - - asset_type = session.query('AssetType where name is "Geometry"').one() - asset = session.create('Asset', { - 'name': 'My asset', - 'type': asset_type, - 'parent': asset_parent - }) - asset_version = session.create('AssetVersion', { - 'asset': asset, - 'task': task - }) - -.. note:: - - The task is not used as the parent of the asset, instead the task is linked - directly to the AssetVersion. - -Then when we have a version where we can create the components:: - - asset_version.create_component( - '/path/to/a/file.mov', location='auto' - ) - asset_version.create_component( - '/path/to/a/another-file.mov', location='auto' - ) - - session.commit() - -This will automatically create a new component and add it to the location which -has been configured as the first in priority. - -Components can also be named and added to a custom location like this:: - - location = session.query('Location where name is "my-location"') - asset_version.create_component( - '/path/to/a/file.mov', - data={ - 'name': 'foobar' - }, - location=location - ) - -.. seealso:: - - * :ref:`example/component` - * :ref:`example/web_review` - * :ref:`example/thumbnail` diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/review_session.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/review_session.rst deleted file mode 100644 index 68f7870d..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/review_session.rst +++ /dev/null @@ -1,87 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _example/review_session: - -********************* -Using review sessions -********************* - -.. currentmodule:: ftrack_api.session - -Client review sessions can either be queried manually or by using a project -instance. - -.. code-block:: python - - review_sessions = session.query( - 'ReviewSession where name is "Weekly review"' - ) - - project_review_sessions = project['review_sessions'] - -To create a new review session on a specific project use :meth:`Session.create`. - -.. code-block:: python - - review_session = session.create('ReviewSession', { - 'name': 'Weekly review', - 'description': 'See updates from last week.', - 'project': project - }) - -To add objects to a review session create them using -:meth:`Session.create` and reference a review session and an asset version. - -.. code-block:: python - - review_session = session.create('ReviewSessionObject', { - 'name': 'Compositing', - 'description': 'Fixed shadows.', - 'version': 'Version 3', - 'review_session': review_session, - 'asset_version': asset_version - }) - -To list all objects in a review session. - -.. code-block:: python - - review_session_objects = review_session['review_session_objects'] - -Listing and adding collaborators to review session can be done using -:meth:`Session.create` and the `review_session_invitees` relation on a -review session. - -.. code-block:: python - - invitee = session.create('ReviewSessionInvitee', { - 'name': 'John Doe', - 'email': 'john.doe@example.com', - 'review_session': review_session - }) - - session.commit() - - invitees = review_session['review_session_invitees'] - -To remove a collaborator simply delete the object using -:meth:`Session.delete`. - -.. code-block:: python - - session.delete(invitee) - -To send out an invite email to a signle collaborator use -:meth:`Session.send_review_session_invite`. - -.. code-block:: python - - session.send_review_session_invite(invitee) - -Multiple invitees can have emails sent to them in one batch using -:meth:`Session.send_review_session_invites`. - -.. code-block:: python - - session.send_review_session_invites(a_list_of_invitees) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/scope.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/scope.rst deleted file mode 100644 index 3be42322..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/scope.rst +++ /dev/null @@ -1,27 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _example/scope: - -************ -Using scopes -************ - -.. currentmodule:: ftrack_api.session - -Entities can be queried based on their scopes:: - - >>> tasks = session.query( - ... 'Task where scopes.name is "London"' - ... ) - -Scopes can be read and modified for entities:: - - >>> scope = session.query( - ... 'Scope where name is "London"' - ... )[0] - ... - ... if scope in task['scopes']: - ... task['scopes'].remove(scope) - ... else: - ... task['scopes'].append(scope) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/security_roles.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/security_roles.rst deleted file mode 100644 index 4219e3d1..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/security_roles.rst +++ /dev/null @@ -1,73 +0,0 @@ -.. - :copyright: Copyright (c) 2017 ftrack - -.. _example/security_roles: - -********************************* -Working with user security roles -********************************* - -.. currentmodule:: ftrack_api.session - -The API exposes `SecurityRole` and `UserSecurityRole` that can be used to -specify who should have access to certain data on different projects. - -List all available security roles like this:: - - security_roles = session.query( - 'select name from SecurityRole where type is "PROJECT"' - ) - -.. note:: - - We only query for project roles since those are the ones we can add to a - user for certain projects. Other types include API and ASSIGNED. Type API - can only be added to global API keys, which is currently not supported via - the api and type ASSIGNED only applies to assigned tasks. - -To get all security roles from a user we can either use relations like this:: - - for user_security_role in user['user_security_roles']: - if user_security_role['is_all_projects']: - result_string = 'all projects' - else: - result_string = ', '.join( - [project['full_name'] for project in user_security_role['projects']] - ) - - print 'User has security role "{0}" which is valid on {1}.'.format( - user_security_role['security_role']['name'], - result_string - ) - -or query them directly like this:: - - user_security_roles = session.query( - 'UserSecurityRole where user.username is "{0}"'.format(session.api_user) - ).all() - -User security roles can also be added to a user for all projects like this:: - - project_manager_role = session.query( - 'SecurityRole where name is "Project Manager"' - ).one() - - session.create('UserSecurityRole', { - 'is_all_projects': True, - 'user': user, - 'security_role': project_manager_role - }) - session.commit() - -or for certain projects only like this:: - - projects = session.query( - 'Project where full_name is "project1" or full_name is "project2"' - ).all()[:] - - session.create('UserSecurityRole', { - 'user': user, - 'security_role': project_manager_role, - 'projects': projects - }) - session.commit() diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/sync_ldap_users.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/sync_ldap_users.rst deleted file mode 100644 index 5ea0e47d..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/sync_ldap_users.rst +++ /dev/null @@ -1,30 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _example/sync_with_ldap: - -******************** -Sync users with LDAP -******************** - -.. currentmodule:: ftrack_api.session - - -If ftrack is configured to connect to LDAP you may trigger a -synchronization through the api using the -:meth:`ftrack_api.session.Session.call`:: - - result = session.call([ - dict( - action='delayed_job', - job_type='SYNC_USERS_LDAP' - ) - ]) - job = result[0]['data] - -You will get a `ftrack_api.entity.job.Job` instance back which can be used -to check the success of the job:: - - if job.get('status') == 'failed': - # The job failed get the error. - logging.error(job.get('data')) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/task_template.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/task_template.rst deleted file mode 100644 index c6161e83..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/task_template.rst +++ /dev/null @@ -1,56 +0,0 @@ -.. - :copyright: Copyright (c) 2017 ftrack - -.. _example/task_template: - -*************************** -Working with Task Templates -*************************** - -Task templates can help you organize your workflows by building a collection -of tasks to be applied for specific contexts. They can be applied to all `Context` -objects for example Project, Sequences, Shots, etc... - -Query task templates -======================= - -Retrive all task templates and there tasks for a project:: - - project = session.query('Project').first() - - for task_template in project['project_schema']['task_templates']: - print('\ntask template: {0}'.format( - task_template['name'] - )) - - for task_type in [t['task_type'] for t in task_template['items']]: - print('\ttask type: {0}'.format( - task_type['name'] - )) - - - -"Apply" a task template -======================= -Create all tasks in a random task template directly under the project:: - - - project = session.query('Project').first() - - task_template = random.choice( - project['project_schema']['task_templates'] - ) - - for task_type in [t['task_type'] for t in task_template['items']]: - session.create( - 'Task', { - 'name': task_type['name'], - 'type': task_type, - 'parent': project - } - ) - - session.commit() - - - diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/thumbnail.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/thumbnail.rst deleted file mode 100644 index 64199869..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/thumbnail.rst +++ /dev/null @@ -1,71 +0,0 @@ -.. - :copyright: Copyright (c) 2016 ftrack - -.. _example/thumbnail: - -*********************** -Working with thumbnails -*********************** - -Components can be used as thumbnails on various entities, including -`Project`, `Task`, `AssetVersion` and `User`. To create and set a thumbnail -you can use the helper method -:meth:`~ftrack_api.entity.component.CreateThumbnailMixin.create_thumbnail` on -any entity that can have a thumbnail:: - - task = session.get('Task', my_task_id) - thumbnail_component = task.create_thumbnail('/path/to/image.jpg') - -It is also possible to set an entity thumbnail by setting its `thumbnail` -relation or `thumbnail_id` attribute to a component you would -like to use as a thumbnail. For a component to be usable as a thumbnail, -it should - - 1. Be a FileComponent. - 2. Exist in the *ftrack.server* :term:`location`. - 3. Be of an appropriate resolution and valid file type. - -The following example creates a new component in the server location, and -uses that as a thumbnail for a task:: - - task = session.get('Task', my_task_id) - server_location = session.query( - 'Location where name is "ftrack.server"' - ).one() - - thumbnail_component = session.create_component( - '/path/to/image.jpg', - dict(name='thumbnail'), - location=server_location - ) - task['thumbnail'] = thumbnail_component - session.commit() - -The next example reuses a version's thumbnail for the asset parent thumbnail:: - - asset_version = session.get('AssetVersion', my_asset_version_id) - asset_parent = asset_version['asset']['parent'] - asset_parent['thumbnail_id'] = asset_version['thumbnail_id'] - session.commit() - -.. _example/thumbnail/url: - -Retrieving thumbnail URL -======================== - -To get an URL to a thumbnail, `thumbnail_component`, which can be used used -to download or display the image in an interface, use the following:: - - import ftrack_api.symbol - server_location = session.get('Location', ftrack_api.symbol.SERVER_LOCATION_ID) - thumbnail_url = server_location.get_thumbnail_url(thumbnail_component) - thumbnail_url_tiny = server_location.get_thumbnail_url( - thumbnail_component, size=100 - ) - thumbnail_url_large = server_location.get_thumbnail_url( - thumbnail_component, size=500 - ) - -.. seealso:: - - :ref:`example/component` diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/timer.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/timer.rst deleted file mode 100644 index eb86e2f8..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/timer.rst +++ /dev/null @@ -1,37 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _example/timer: - -************ -Using timers -************ - -.. currentmodule:: ftrack_api.session - -Timers can be used to track how much time has been spend working on something. - -To start a timer for a user:: - - user = # Get a user from ftrack. - task = # Get a task from ftrack. - - user.start_timer(task) - -A timer has now been created for that user and should show up in the ftrack web -UI. - -To stop the currently running timer for a user and create a timelog from it:: - - user = # Get a user from ftrack. - - timelog = user.stop_timer() - -.. note:: - - Starting a timer when a timer is already running will raise in an exception. - Use the force parameter to automatically stop the running timer first. - - .. code-block:: python - - user.start_timer(task, force=True) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/web_review.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/web_review.rst deleted file mode 100644 index f1dede57..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/example/web_review.rst +++ /dev/null @@ -1,78 +0,0 @@ -.. - :copyright: Copyright (c) 2016 ftrack - -.. currentmodule:: ftrack_api.session - -.. _example/web_review: - -************************* -Publishing for web review -************************* - -Follow the :ref:`example/encode_media` example if you want to -upload and encode media using ftrack. - -If you already have a file encoded in the correct format and want to bypass -the built-in encoding in ftrack, you can create the component manually -and add it to the `ftrack.server` location:: - - # Retrieve or create version. - version = session.query('AssetVersion', 'SOME-ID') - - server_location = session.query('Location where name is "ftrack.server"').one() - filepath = '/path/to/local/file.mp4' - - component = version.create_component( - path=filepath, - data={ - 'name': 'ftrackreview-mp4' - }, - location=server_location - ) - - # Meta data needs to contain *frameIn*, *frameOut* and *frameRate*. - component['metadata']['ftr_meta'] = json.dumps({ - 'frameIn': 0, - 'frameOut': 150, - 'frameRate': 25 - }) - - component.session.commit() - -To publish an image for review the steps are similar:: - - # Retrieve or create version. - version = session.query('AssetVersion', 'SOME-ID') - - server_location = session.query('Location where name is "ftrack.server"').one() - filepath = '/path/to/image.jpg' - - component = version.create_component( - path=filepath, - data={ - 'name': 'ftrackreview-image' - }, - location=server_location - ) - - # Meta data needs to contain *format*. - component['metadata']['ftr_meta'] = json.dumps({ - 'format': 'image' - }) - - component.session.commit() - -Here is a list of components names and how they should be used: - -================== ===================================== -Component name Use -================== ===================================== -ftrackreview-image Images reviewable in the browser -ftrackreview-mp4 H.264/mp4 video reviewable in browser -ftrackreview-webm WebM video reviewable in browser -================== ===================================== - -.. note:: - - Make sure to use the pre-defined component names and set the `ftr_meta` on - the components or review will not work. diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/glossary.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/glossary.rst deleted file mode 100644 index aa5cc779..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/glossary.rst +++ /dev/null @@ -1,76 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -******** -Glossary -******** - -.. glossary:: - - accessor - An implementation (typically a :term:`Python` plugin) for accessing - a particular type of storage using a specific protocol. - - .. seealso:: :ref:`locations/overview/accessors` - - action - Actions in ftrack provide a standardised way to integrate other tools, - either off-the-shelf or custom built, directly into your ftrack - workflow. - - .. seealso:: :ref:`ftrack:using/actions` - - api - Application programming interface. - - arrow - A Python library that offers a sensible, human-friendly approach to - creating, manipulating, formatting and converting dates, times, and - timestamps. Read more at http://crsmithdev.com/arrow/ - - asset - A container for :term:`asset versions `, typically - representing the output from an artist. For example, 'geometry' - from a modeling artist. Has an :term:`asset type` that categorises the - asset. - - asset type - Category for a particular asset. - - asset version - A specific version of data for an :term:`asset`. Can contain multiple - :term:`components `. - - component - A container to hold any type of data (such as a file or file sequence). - An :term:`asset version` can have any number of components, each with - a specific name. For example, a published version of geometry might - have two components containing the high and low resolution files, with - the component names as 'hires' and 'lowres' respectively. - - PEP-8 - Style guide for :term:`Python` code. Read the guide at - https://www.python.org/dev/peps/pep-0008/ - - plugin - :term:`Python` plugins are used by the API to extend it with new - functionality, such as :term:`locations ` or :term:`actions `. - - .. seealso:: :ref:`understanding_sessions/plugins` - - python - A programming language that lets you work more quickly and integrate - your systems more effectively. Often used in creative industries. Visit - the language website at http://www.python.org - - PyPi - :term:`Python` package index. The Python Package Index or PyPI is the - official third-party software repository for the Python programming - language. Visit the website at https://pypi.python.org/pypi - - resource identifier - A string that is stored in ftrack as a reference to a resource (such as - a file) in a specific location. Used by :term:`accessors ` to - determine how to access data. - - .. seealso:: :ref:`locations/overview/resource_identifiers` diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/handling_events.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/handling_events.rst deleted file mode 100644 index 1d378473..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/handling_events.rst +++ /dev/null @@ -1,315 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _handling_events: - -*************** -Handling events -*************** - -.. currentmodule:: ftrack_api.event - -Events are generated in ftrack when things happen such as a task being updated -or a new version being published. Each :class:`~ftrack_api.session.Session` -automatically connects to the event server and can be used to subscribe to -specific events and perform an action as a result. That action could be updating -another related entity based on a status change or generating folders when a new -shot is created for example. - -The :class:`~hub.EventHub` for each :class:`~ftrack_api.session.Session` is -accessible via :attr:`Session.event_hub -<~ftrack_api.session.Session.event_hub>`. - -.. _handling_events/subscribing: - -Subscribing to events -===================== - -To listen to events, you register a function against a subscription using -:meth:`Session.event_hub.subscribe `. The subscription -uses the :ref:`expression ` syntax and will filter -against each :class:`~base.Event` instance to determine if the registered -function should receive that event. If the subscription matches, the registered -function will be called with the :class:`~base.Event` instance as its sole -argument. The :class:`~base.Event` instance is a mapping like structure and can -be used like a normal dictionary. - -The following example subscribes a function to receive all 'ftrack.update' -events and then print out the entities that were updated:: - - import ftrack_api - - - def my_callback(event): - '''Event callback printing all new or updated entities.''' - for entity in event['data'].get('entities', []): - - # Print data for the entity. - print(entity) - - - # Subscribe to events with the update topic. - session = ftrack_api.Session() - session.event_hub.subscribe('topic=ftrack.update', my_callback) - -At this point, if you run this, your code would exit almost immediately. This -is because the event hub listens for events in a background thread. Typically, -you only want to stay connected whilst using the session, but in some cases you -will want to block and listen for events solely - a dedicated event processor. -To do this, use the :meth:`EventHub.wait ` method:: - - # Wait for events to be received and handled. - session.event_hub.wait() - -You cancel waiting for events by using a system interrupt (:kbd:`Ctrl-C`). -Alternatively, you can specify a *duration* to process events for:: - - # Only wait and process events for 5 seconds. - session.event_hub.wait(duration=5) - -.. note:: - - Events are continually received and queued for processing in the background - as soon as the connection to the server is established. As a result you may - see a flurry of activity as soon as you call - :meth:`~hub.EventHub.wait` for the first time. - -.. _handling_events/subscribing/subscriber_information: - -Subscriber information ----------------------- - -When subscribing, you can also specify additional information about your -subscriber. This contextual information can be useful when routing events, -particularly when :ref:`targeting events -`. By default, the -:class:`~hub.EventHub` will set some default information, but it can be -useful to enhance this. To do so, simply pass in *subscriber* as a dictionary of -data to the :meth:`~hub.EventHub.subscribe` method:: - - session.event_hub.subscribe( - 'topic=ftrack.update', - my_callback, - subscriber={ - 'id': 'my-unique-subscriber-id', - 'applicationId': 'maya' - } - ) - -.. _handling_events/subscribing/sending_replies: - -Sending replies ---------------- - -When handling an event it is sometimes useful to be able to send information -back to the source of the event. For example, -:ref:`ftrack:developing/events/list/ftrack.location.request-resolve` would -expect a resolved path to be sent back. - -You can craft a custom reply event if you want, but an easier way is just to -return the appropriate data from your handler. Any non *None* value will be -automatically sent as a reply:: - - def on_event(event): - # Send following data in automatic reply. - return {'success': True, 'message': 'Cool!'} - - session.event_hub.subscribe('topic=test-reply', on_event) - -.. seealso:: - - :ref:`handling_events/publishing/handling_replies` - -.. note:: - - Some events are published :ref:`synchronously - `. In this case, any returned data - is passed back to the publisher directly. - -.. _handling_events/subscribing/stopping_events: - -Stopping events ---------------- - -The *event* instance passed to each event handler also provides a method for -stopping the event, :meth:`Event.stop `. - -Once an event has been stopped, no further handlers for that specific event -will be called **locally**. Other handlers in other processes may still be -called. - -Combining this with setting appropriate priorities when subscribing to a topic -allows handlers to prevent lower priority handlers running when desired. - - >>> import ftrack_api - >>> import ftrack_api.event.base - >>> - >>> def callback_a(event): - ... '''Stop the event!''' - ... print('Callback A') - ... event.stop() - >>> - >>> def callback_b(event): - ... '''Never run.''' - ... print('Callback B') - >>> - >>> session = ftrack_api.Session() - >>> session.event_hub.subscribe( - ... 'topic=test-stop-event', callback_a, priority=10 - ... ) - >>> session.event_hub.subscribe( - ... 'topic=test-stop-event', callback_b, priority=20 - ... ) - >>> session.event_hub.publish( - ... ftrack_api.event.base.Event(topic='test-stop-event') - ... ) - >>> session.event_hub.wait(duration=5) - Callback A called. - -.. _handling_events/publishing: - -Publishing events -================= - -So far we have looked at listening to events coming from ftrack. However, you -are also free to publish your own events (or even publish relevant ftrack -events). - -To do this, simply construct an instance of :class:`ftrack_api.event.base.Event` -and pass it to :meth:`EventHub.publish ` via the session:: - - import ftrack_api.event.base - - event = ftrack_api.event.base.Event( - topic='my-company.some-topic', - data={'key': 'value'} - ) - session.event_hub.publish(event) - -The event hub will automatically add some information to your event before it -gets published, including the *source* of the event. By default the event source -is just the event hub, but you can customise this to provide more relevant -information if you want. For example, if you were publishing from within Maya:: - - session.event_hub.publish(ftrack_api.event.base.Event( - topic='my-company.some-topic', - data={'key': 'value'}, - source={ - 'applicationId': 'maya' - } - )) - -Remember that all supplied information can be used by subscribers to filter -events so the more accurate the information the better. - -.. _handling_events/publishing/synchronously: - -Publish synchronously ---------------------- - -It is also possible to call :meth:`~hub.EventHub.publish` synchronously by -passing `synchronous=True`. In synchronous mode, only local handlers will be -called. The result from each called handler is collected and all the results -returned together in a list:: - - >>> import ftrack_api - >>> import ftrack_api.event.base - >>> - >>> def callback_a(event): - ... return 'A' - >>> - >>> def callback_b(event): - ... return 'B' - >>> - >>> session = ftrack_api.Session() - >>> session.event_hub.subscribe( - ... 'topic=test-synchronous', callback_a, priority=10 - ... ) - >>> session.event_hub.subscribe( - ... 'topic=test-synchronous', callback_b, priority=20 - ... ) - >>> results = session.event_hub.publish( - ... ftrack_api.event.base.Event(topic='test-synchronous'), - ... synchronous=True - ... ) - >>> print results - ['A', 'B'] - -.. _handling_events/publishing/handling_replies: - -Handling replies ----------------- - -When publishing an event it is also possible to pass a callable that will be -called with any :ref:`reply event ` -received in response to the published event. - -To do so, simply pass in a callable as the *on_reply* parameter:: - - def handle_reply(event): - print 'Got reply', event - - session.event_hub.publish( - ftrack_api.event.base.Event(topic='test-reply'), - on_reply=handle_reply - ) - -.. _handling_events/publishing/targeting: - -Targeting events ----------------- - -In addition to subscribers filtering events to receive, it is also possible to -give an event a specific target to help route it to the right subscriber. - -To do this, set the *target* value on the event to an :ref:`expression -`. The expression will filter against registered -:ref:`subscriber information -`. - -For example, if you have many subscribers listening for a event, but only want -one of those subscribers to get the event, you can target the event to the -subscriber using its registered subscriber id:: - - session.event_hub.publish( - ftrack_api.event.base.Event( - topic='my-company.topic', - data={'key': 'value'}, - target='id=my-custom-subscriber-id' - ) - ) - -.. _handling_events/expressions: - -Expressions -=========== - -An expression is used to filter against a data structure, returning whether the -structure fulfils the expression requirements. Expressions are currently used -for subscriptions when :ref:`subscribing to events -` and for targets when :ref:`publishing targeted -events `. - -The form of the expression is loosely groupings of 'key=value' with conjunctions -to join them. - -For example, a common expression for subscriptions is to filter against an event -topic:: - - 'topic=ftrack.location.component-added' - -However, you can also perform more complex filtering, including accessing -nested parameters:: - - 'topic=ftrack.location.component-added and data.locationId=london' - -.. note:: - - If the structure being tested does not have any value for the specified - key reference then it is treated as *not* matching. - -You can also use a single wildcard '*' at the end of any value for matching -multiple values. For example, the following would match all events that have a -topic starting with 'ftrack.':: - - 'topic=ftrack.*' diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/image/configuring_plugins_directory.png b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/image/configuring_plugins_directory.png deleted file mode 100644 index 7438cb52..00000000 Binary files a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/image/configuring_plugins_directory.png and /dev/null differ diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/index.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/index.rst deleted file mode 100644 index e55b27d0..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/index.rst +++ /dev/null @@ -1,42 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -################# -ftrack Python API -################# - -Welcome to the ftrack :term:`Python` :term:`API` documentation. - -.. important:: - - This is the new :term:`Python` client for the ftrack :term:`API`. If you are - migrating from the old client then please read the dedicated - :ref:`migration guide `. - -.. toctree:: - :maxdepth: 1 - - introduction - installing - tutorial - understanding_sessions - working_with_entities - querying - handling_events - caching - locations/index - example/index - api_reference/index - event_list - environment_variables - security_and_authentication - release/index - glossary - -****************** -Indices and tables -****************** - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/installing.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/installing.rst deleted file mode 100644 index 5e42621b..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/installing.rst +++ /dev/null @@ -1,77 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _installing: - -********** -Installing -********** - -.. highlight:: bash - -Installation is simple with `pip `_:: - - pip install ftrack-python-api - -Building from source -==================== - -You can also build manually from the source for more control. First obtain a -copy of the source by either downloading the -`zipball `_ or -cloning the public repository:: - - git clone git@bitbucket.org:ftrack/ftrack-python-api.git - -Then you can build and install the package into your current Python -site-packages folder:: - - python setup.py install - -Alternatively, just build locally and manage yourself:: - - python setup.py build - -Building documentation from source ----------------------------------- - -To build the documentation from source:: - - python setup.py build_sphinx - -Then view in your browser:: - - file:///path/to/ftrack-python-api/build/doc/html/index.html - -Running tests against the source --------------------------------- - -With a copy of the source it is also possible to run the unit tests:: - - python setup.py test - -Dependencies -============ - -* `ftrack server `_ >= 3.3.11 -* `Python `_ >= 2.7, < 3 -* `Requests `_ >= 2, <3, -* `Arrow `_ >= 0.4.4, < 1, -* `termcolor `_ >= 1.1.0, < 2, -* `pyparsing `_ >= 2.0, < 3, -* `Clique `_ >= 1.2.0, < 2, -* `websocket-client `_ >= 0.40.0, < 1 - -Additional For building ------------------------ - -* `Sphinx `_ >= 1.2.2, < 2 -* `sphinx_rtd_theme `_ >= 0.1.6, < 1 -* `Lowdown `_ >= 0.1.0, < 2 - -Additional For testing ----------------------- - -* `Pytest `_ >= 2.3.5, < 3 -* `pytest-mock `_ >= 0.4, < 1, -* `pytest-catchlog `_ >= 1, <=2 \ No newline at end of file diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/introduction.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/introduction.rst deleted file mode 100644 index 63fe9807..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/introduction.rst +++ /dev/null @@ -1,26 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _introduction: - -************ -Introduction -************ - -This API allows developers to write :term:`Python` scripts that talk directly -with an ftrack server. The scripts can perform operations against that server -depending on granted permissions. - -With any API it is important to find the right balance between flexibility and -usefulness. If an API is too low level then everyone ends up writing boilerplate -code for common problems and usually in an non-uniform way making it harder to -share scripts with others. It's also harder to get started with such an API. -Conversely, an API that attempts to be too smart can often become restrictive -when trying to do more advanced functionality or optimise for performance. - -With this API we have tried to strike the right balance between these two, -providing an API that should be simple to use out-of-the-box, but also expose -more flexibility and power when needed. - -Nothing is perfect though, so please do provide feedback on ways that we can -continue to improve this API for your specific needs. diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/locations/configuring.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/locations/configuring.rst deleted file mode 100644 index 97483221..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/locations/configuring.rst +++ /dev/null @@ -1,87 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _locations/configuring: - -********************* -Configuring locations -********************* - -To allow management of data by a location or retrieval of filesystem paths where -supported, a location instance needs to be configured in a session with an -:term:`accessor` and :term:`structure`. - -.. note:: - - The standard builtin locations require no further setup or configuration - and it is not necessary to read the rest of this section to use them. - -Before continuing, make sure that you are familiar with the general concepts -of locations by reading the :ref:`locations/overview`. - -.. _locations/configuring/manually: - -Configuring manually -==================== - -Locations can be configured manually when using a session by retrieving the -location and setting the appropriate attributes:: - - location = session.query('Location where name is "my.location"').one() - location.structure = ftrack_api.structure.id.IdStructure() - location.priority = 50 - -.. _locations/configuring/automatically: - -Configuring automatically -========================= - -Often the configuration of locations should be determined by developers -looking after the core pipeline and so ftrack provides a way for a plugin to -be registered to configure the necessary locations for each session. This can -then be managed centrally if desired. - -The configuration is handled through the standard events system via a topic -*ftrack.api.session.configure-location*. Set up an :ref:`event listener plugin -` as normal with a register function that -accepts a :class:`~ftrack_api.session.Session` instance. Then register a -callback against the relevant topic to configure locations at the appropriate -time:: - - import ftrack_api - import ftrack_api.entity.location - import ftrack_api.accessor.disk - import ftrack_api.structure.id - - - def configure_locations(event): - '''Configure locations for session.''' - session = event['data']['session'] - - # Find location(s) and customise instances. - location = session.query('Location where name is "my.location"').one() - ftrack_api.mixin(location, ftrack_api.entity.location.UnmanagedLocationMixin) - location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') - location.structure = ftrack_api.structure.id.IdStructure() - location.priority = 50 - - - def register(session): - '''Register plugin with *session*.''' - session.event_hub.subscribe( - 'topic=ftrack.api.session.configure-location', - configure_locations - ) - -.. note:: - - If you expect the plugin to also be evaluated by the legacy API, remember - to :ref:`validate the arguments `. - -So long as the directory containing the plugin exists on your -:envvar:`FTRACK_EVENT_PLUGIN_PATH`, the plugin will run for each session -created and any configured locations will then remain configured for the -duration of that related session. - -Be aware that you can configure many locations in one plugin or have separate -plugins for different locations - the choice is entirely up to you! diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/locations/index.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/locations/index.rst deleted file mode 100644 index ac1eaba6..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/locations/index.rst +++ /dev/null @@ -1,18 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _developing/locations: - -********* -Locations -********* - -Learn how to access locations using the API and configure your own location -plugins. - -.. toctree:: - :maxdepth: 1 - - overview - tutorial - configuring diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/locations/overview.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/locations/overview.rst deleted file mode 100644 index 0a6ec171..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/locations/overview.rst +++ /dev/null @@ -1,143 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _locations/overview: - -******** -Overview -******** - -Locations provides a way to easily track and manage data (files, image sequences -etc.) using ftrack. - -With locations it is possible to see where published data is in the world and -also to transfer data automatically between different locations, even different -storage mechanisms, by defining a few simple :term:`Python` plugins. By keeping -track of the size of the data it also helps manage storage capacity better. In -addition, the intrinsic links to production information makes assigning work to -others and transferring only the relevant data much simpler as well as greatly -reducing the burden on those responsible for archiving finished work. - -Concepts -======== - -The system is implemented in layers using a few key concepts in order to provide -a balance between out of the box functionality and custom configuration. - -.. _locations/overview/locations: - -Locations ---------- - -Data locations can be varied in scope and meaning - a facility, a laptop, a -specific drive. As such, rather than place a hard limit on what can be -considered a location, ftrack simply requires that a location be identifiable by -a string and that string be unique to that location. - -A global company with facilities in many different parts of the world might -follow a location naming convention similar to the following: - - * 'ftrack.london.server01' - * 'ftrack.london.server02' - * 'ftrack.nyc.server01' - * 'ftrack.amsterdam.server01' - * '..' - -Whereas, for a looser setup, the following might suit better: - - * 'bjorns-workstation' - * 'fredriks-mobile' - * 'martins-laptop' - * 'cloud-backup' - -Availability ------------- - -When tracking data across several locations it is important to be able to -quickly find out where data is available and where it is not. As such, ftrack -provides simple mechanisms for retrieving information on the availability of a -:term:`component` in each location. - -For a single file, the availability with be either 0% or 100%. For containers, -such as file sequences, each file is tracked separately and the availability of -the container calculated as an overall percentage (e.g. 47%). - -.. _locations/overview/accessors: - -Accessors ---------- - -Due to the flexibility of what can be considered a location, the system must be -able to cope with locations that represent different ways of storing data. For -example, data might be stored on a local hard drive, a cloud service or even in -a database. - -In addition, the method of accessing that storage can change depending on -perspective - local filesystem, FTP, S3 API etc. - -To handle this, ftrack introduces the idea of an :term:`accessor` that provides -access to the data in a standard way. An accessor is implemented in -:term:`Python` following a set interface and can be configured at runtime to -provide relevant access to a location. - -With an accessor configured for a location, it becomes possible to not only -track data, but also manage it through ftrack by using the accessor to add and -remove data from the location. - -At present, ftrack includes a :py:class:`disk accessor -` for local filesystem access. More will be -added over time and developers are encouraged to contribute their own. - -.. _locations/overview/structure: - -Structure ---------- - -Another important consideration for locations is how data should be structured -in the location (folder structure and naming conventions). For example, -different facilities may want to use different folder structures, or different -storage mechanisms may use different paths for the data. - -For this, ftrack supports the use of a :term:`Python` structure plugin. This -plugin is called when adding a :term:`component` to a location in order to -determine the correct structure to use. - -.. note:: - - A structure plugin accepts an ftrack entity as its input and so can be - reused for generating general structures as well. For example, an action - callback could be implemented to create the base folder structure for some - selected shots by reusing a structure plugin. - -.. _locations/overview/resource_identifiers: - -Resource identifiers --------------------- - -When a :term:`component` can be linked to multiple locations it becomes -necessary to store information about the relationship on the link rather than -directly on the :term:`component` itself. The most important information is the -path to the data in that location. - -However, as seen above, not all locations may be filesystem based or accessed -using standard filesystem protocols. For this reason, and to help avoid -confusion, this *path* is referred to as a :term:`resource identifier` and no -limitations are placed on the format. Keep in mind though that accessors use -this information (retrieved from the database) in order to work out how to -access the data, so the format used must be compatible with all the accessors -used for any one location. For this reason, most -:term:`resource identifiers ` should ideally look like -relative filesystem paths. - -.. _locations/overview/resource_identifiers/transformer: - -Transformer -^^^^^^^^^^^ - -To further support custom formats for -:term:`resource identifiers `, it is also possible to -configure a resource identifier transformer plugin which will convert -the identifiers before they are stored centrally and after they are retrieved. - -A possible use case of this might be to store JSON encoded metadata about a path -in the database and convert this to an actual filesystem path on retrieval. diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/locations/tutorial.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/locations/tutorial.rst deleted file mode 100644 index 4c5a6c0f..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/locations/tutorial.rst +++ /dev/null @@ -1,193 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _locations/tutorial: - -******** -Tutorial -******** - -This tutorial is a walkthrough on how you interact with Locations using the -ftrack :term:`API`. Before you read this tutorial, make sure you familiarize -yourself with the location concepts by reading the :ref:`locations/overview`. - -All examples assume you are using Python 2.x, have the :mod:`ftrack_api` -module imported and a :class:`session ` created. - -.. code-block:: python - - import ftrack_api - session = ftrack_api.Session() - -.. _locations/creating-locations: - -Creating locations -================== - -Locations can be created just like any other entity using -:meth:`Session.create `:: - - location = session.create('Location', dict(name='my.location')) - session.commit() - -.. note:: - Location names beginning with ``ftrack.`` are reserved for internal use. Do - not use this prefix for your location names. - -To create a location only if it doesn't already exist use the convenience -method :meth:`Session.ensure `. This will return -either an existing matching location or a newly created one. - -Retrieving locations -==================== - -You can retrieve existing locations using the standard session -:meth:`~ftrack_api.session.Session.get` and -:meth:`~ftrack_api.session.Session.query` methods:: - - # Retrieve location by unique id. - location_by_id = session.get('Location', 'unique-id') - - # Retrieve location by name. - location_by_name = session.query( - 'Location where name is "my.location"' - ).one() - -To retrieve all existing locations use a standard query:: - - all_locations = session.query('Location').all() - for existing_location in all_locations: - print existing_location['name'] - -Configuring locations -===================== - -At this point you have created a custom location "my.location" in the database -and have an instance to reflect that. However, the location cannot be used in -this session to manage data unless it has been configured. To configure a -location for the session, set the appropriate attributes for accessor and -structure:: - - import tempfile - import ftrack_api.accessor.disk - import ftrack_api.structure.id - - # Assign a disk accessor with *temporary* storage - location.accessor = ftrack_api.accessor.disk.DiskAccessor( - prefix=tempfile.mkdtemp() - ) - - # Assign using ID structure. - location.structure = ftrack_api.structure.id.IdStructure() - - # Set a priority which will be used when automatically picking locations. - # Lower number is higher priority. - location.priority = 30 - -To learn more about how to configure locations automatically in a session, see -:ref:`locations/configuring`. - -.. note:: - - If a location is not configured in a session it can still be used as a - standard entity and to find out availability of components - -Using components with locations -=============================== - -The Locations :term:`API` tries to use sane defaults to stay out of your way. -When creating :term:`components `, a location is automatically picked -using :meth:`Session.pick_location `:: - - (_, component_path) = tempfile.mkstemp(suffix='.txt') - component_a = session.create_component(path=component_path) - -To override, specify a location explicitly:: - - (_, component_path) = tempfile.mkstemp(suffix='.txt') - component_b = session.create_component( - path=component_path, location=location - ) - -If you set the location to ``None``, the component will only be present in the -special origin location for the duration of the session:: - - (_, component_path) = tempfile.mkstemp(suffix='.txt') - component_c = session.create_component(path=component_path, location=None) - -After creating a :term:`component` in a location, it can be added to another -location by calling :meth:`Location.add_component -` and passing the location to -use as the *source* location:: - - origin_location = session.query( - 'Location where name is "ftrack.origin"' - ).one() - location.add_component(component_c, origin_location) - -To remove a component from a location use :meth:`Location.remove_component -`:: - - location.remove_component(component_b) - -Each location specifies whether to automatically manage data when adding or -removing components. To ensure that a location does not manage data, mixin the -relevant location mixin class before use:: - - import ftrack_api - import ftrack_api.entity.location - - ftrack_api.mixin(location, ftrack_api.entity.location.UnmanagedLocationMixin) - -Accessing paths -=============== - -The locations system is designed to help avoid having to deal with filesystem -paths directly. This is particularly important when you consider that a number -of locations won't provide any direct filesystem access (such as cloud storage). - -However, it is useful to still be able to get a filesystem path from locations -that support them (typically those configured with a -:class:`~ftrack_api.accessor.disk.DiskAccessor`). For example, you might need to -pass a filesystem path to another application or perform a copy using a faster -protocol. - -To retrieve the path if available, use :meth:`Location.get_filesystem_path -`:: - - print location.get_filesystem_path(component_c) - -Obtaining component availability -================================ - -Components in locations have a notion of availability. For regular components, -consisting of a single file, the availability would be either 0 if the -component is unavailable or 100 percent if the component is available in the -location. Composite components, like image sequences, have an availability -which is proportional to the amount of child components that have been added to -the location. - -For example, an image sequence might currently be in a state of being -transferred to :data:`test.location`. If half of the images are transferred, it -might be possible to start working with the sequence. To check availability use -the helper :meth:`Session.get_component_availability -` method:: - - print session.get_component_availability(component_c) - -There are also convenience methods on both :meth:`components -` and :meth:`locations -` for -retrieving availability as well:: - - print component_c.get_availability() - print location.get_component_availability(component_c) - -Location events -=============== - -If you want to receive event notifications when components are added to or -removed from locations, you can subscribe to the topics published, -:data:`ftrack_api.symbol.COMPONENT_ADDED_TO_LOCATION_TOPIC` or -:data:`ftrack_api.symbol.COMPONENT_REMOVED_FROM_LOCATION_TOPIC` and the callback -you want to be run. diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/querying.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/querying.rst deleted file mode 100644 index 7a200529..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/querying.rst +++ /dev/null @@ -1,263 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _querying: - -******** -Querying -******** - -.. currentmodule:: ftrack_api.session - -The API provides a simple, but powerful query language in addition to iterating -directly over entity attributes. Using queries can often substantially speed -up your code as well as reduce the amount of code written. - -A query is issued using :meth:`Session.query` and returns a list of matching -entities. The query always has a single *target* entity type that the query -is built against. This means that you cannot currently retrieve back a list of -different entity types in one query, though using :ref:`projections -` does allow retrieving related entities of a different -type in one go. - -The syntax for a query is: - -.. code-block:: none - - select from where - -However, both the selection of projections and criteria are optional. This means -the most basic query is just to fetch all entities of a particular type, such as -all projects in the system:: - - projects = session.query('Project') - -A query always returns a :class:`~ftrack_api.query.QueryResult` instance that -acts like a list with some special behaviour. The main special behaviour is that -the actual query to the server is not issued until you iterate or index into the -query results:: - - for project in projects: - print project['name'] - -You can also explicitly call :meth:`~ftrack_api.query.QueryResult.all` on the -result set:: - - projects = session.query('Project').all() - -.. note:: - - This behaviour exists in order to make way for efficient *paging* and other - optimisations in future. - -.. _querying/criteria: - -Using criteria to narrow results -================================ - -Often you will have some idea of the entities you want to retrieve. In this -case you can optimise your code by not fetching more data than you need. To do -this, add criteria to your query:: - - projects = session.query('Project where status is active') - -Each criteria follows the form: - -.. code-block:: none - - - -You can inspect the entity type or instance to find out which :ref:`attributes -` are available to filter on for a particular -entity type. The list of :ref:`operators ` that can -be applied and the types of values they expect is listed later on. - -.. _querying/criteria/combining: - -Combining criteria ------------------- - -Multiple criteria can be applied in a single expression by joining them with -either ``and`` or ``or``:: - - projects = session.query( - 'Project where status is active and name like "%thrones"' - ) - -You can use parenthesis to control the precedence when compound criteria are -used (by default ``and`` takes precedence):: - - projects = session.query( - 'Project where status is active and ' - '(name like "%thrones" or full_name like "%thrones")' - ) - -.. _querying/criteria/relationships: - -Filtering on relationships --------------------------- - -Filtering on relationships is also intuitively supported. Simply follow the -relationship using a dotted notation:: - - tasks_in_project = session.query( - 'Task where project.id is "{0}"'.format(project['id']) - ) - -This works even for multiple strides across relationships (though do note that -excessive strides can affect performance):: - - tasks_completed_in_project = session.query( - 'Task where project.id is "{0}" and ' - 'status.type.name is "Done"' - .format(project['id']) - ) - -The same works for collections (where each entity in the collection is compared -against the subsequent condition):: - - import arrow - - tasks_with_time_logged_today = session.query( - 'Task where timelogs.start >= "{0}"'.format(arrow.now().floor('day')) - ) - -In the above query, each *Task* that has at least one *Timelog* with a *start* -time greater than the start of today is returned. - -When filtering on relationships, the conjunctions ``has`` and ``any`` can be -used to specify how the criteria should be applied. This becomes important when -querying using multiple conditions on collection relationships. The relationship -condition can be written against the following form:: - - () - -For optimal performance ``has`` should be used for scalar relationships when -multiple conditions are involved. For example, to find notes by a specific -author when only name is known:: - - notes_written_by_jane_doe = session.query( - 'Note where author has (first_name is "Jane" and last_name is "Doe")' - ) - -This query could be written without ``has``, giving the same results:: - - notes_written_by_jane_doe = session.query( - 'Note where author.first_name is "Jane" and author.last_name is "Doe"' - ) - -``any`` should be used for collection relationships. For example, to find all -projects that have at least one metadata instance that has `key=some_key` -and `value=some_value` the query would be:: - - projects_where_some_key_is_some_value = session.query( - 'Project where metadata any (key=some_key and value=some_value)' - ) - -If the query was written without ``any``, projects with one metadata matching -*key* and another matching the *value* would be returned. - -``any`` can also be used to query for empty relationship collections:: - - users_without_timelogs = session.query( - 'User where not timelogs any ()' - ) - -.. _querying/criteria/operators: - -Supported operators -------------------- - -This is the list of currently supported operators: - -+--------------+----------------+----------------------------------------------+ -| Operators | Description | Example | -+==============+================+==============================================+ -| = | Exactly equal. | name is "martin" | -| is | | | -+--------------+----------------+----------------------------------------------+ -| != | Not exactly | name is_not "martin" | -| is_not | equal. | | -+--------------+----------------+----------------------------------------------+ -| > | Greater than | start after "2015-06-01" | -| after | exclusive. | | -| greater_than | | | -+--------------+----------------+----------------------------------------------+ -| < | Less than | end before "2015-06-01" | -| before | exclusive. | | -| less_than | | | -+--------------+----------------+----------------------------------------------+ -| >= | Greater than | bid >= 10 | -| | inclusive. | | -+--------------+----------------+----------------------------------------------+ -| <= | Less than | bid <= 10 | -| | inclusive. | | -+--------------+----------------+----------------------------------------------+ -| in | One of. | status.type.name in ("In Progress", "Done") | -+--------------+----------------+----------------------------------------------+ -| not_in | Not one of. | status.name not_in ("Omitted", "On Hold") | -+--------------+----------------+----------------------------------------------+ -| like | Matches | name like "%thrones" | -| | pattern. | | -+--------------+----------------+----------------------------------------------+ -| not_like | Does not match | name not_like "%thrones" | -| | pattern. | | -+--------------+----------------+----------------------------------------------+ -| has | Test scalar | author has (first_name is "Jane" and | -| | relationship. | last_name is "Doe") | -+--------------+----------------+----------------------------------------------+ -| any | Test collection| metadata any (key=some_key and | -| | relationship. | value=some_value) | -+--------------+----------------+----------------------------------------------+ - -.. _querying/projections: - -Optimising using projections -============================ - -In :ref:`understanding_sessions` we mentioned :ref:`auto-population -` of attribute values on access. This -meant that when iterating over a lot of entities and attributes a large number -of queries were being sent to the server. Ultimately, this can cause your code -to run slowly:: - - >>> projects = session.query('Project') - >>> for project in projects: - ... print( - ... # Multiple queries issued here for each attribute accessed for - ... # each project in the loop! - ... '{project[full_name]} - {project[status][name]})' - ... .format(project=project) - ... ) - - -Fortunately, there is an easy way to optimise. If you know what attributes you -are interested in ahead of time you can include them in your query string as -*projections* in order to fetch them in one go:: - - >>> projects = session.query( - ... 'select full_name, status.name from Project' - ... ) - >>> for project in projects: - ... print( - ... # No additional queries issued here as the values were already - ... # loaded by the above query! - ... '{project[full_name]} - {project[status][name]})' - ... .format(project=project) - ... ) - -Notice how this works for related entities as well. In the example above, we -also fetched the name of each *Status* entity attached to a project in the same -query, which meant that no further queries had to be issued when accessing those -nested attributes. - -.. note:: - - There are no arbitrary limits to the number (or depth) of projections, but - do be aware that excessive projections can ultimately result in poor - performance also. As always, it is about choosing the right tool for the - job. - -You can also customise the -:ref:`working_with_entities/entity_types/default_projections` to use for each -entity type when none are specified in the query string. diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/release/index.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/release/index.rst deleted file mode 100644 index 0eef0b74..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/release/index.rst +++ /dev/null @@ -1,18 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _release: - -*************************** -Release and migration notes -*************************** - -Find out information about what has changed between versions and any important -migration notes to be aware of when switching to a new version. - -.. toctree:: - :maxdepth: 1 - - release_notes - migration - migrating_from_old_api diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/release/migrating_from_old_api.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/release/migrating_from_old_api.rst deleted file mode 100644 index 699ccf22..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/release/migrating_from_old_api.rst +++ /dev/null @@ -1,613 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _release/migrating_from_old_api: - -********************** -Migrating from old API -********************** - -.. currentmodule:: ftrack_api.session - -Why a new API? -============== - -With the introduction of Workflows, ftrack is capable of supporting a greater -diversity of industries. We're enabling teams to closely align the system with -their existing practices and naming conventions, resulting in a tool that feels -more natural and intuitive. The old API was locked to specific workflows, making -it impractical to support this new feature naturally. - -We also wanted this new flexibility to extend to developers, so we set about -redesigning the API to fully leverage the power in the system. And while we had -the wrenches out, we figured why not go that extra mile and build in some of the -features that we see developers having to continually implement in-house across -different companies - features such as caching and support for custom pipeline -extensions. In essence, we decided to build the API that, as pipeline -developers, we had always wanted from our production tracking and asset -management systems. We think we succeeded, and we hope you agree. - -Installing -========== - -Before, you used to download the API package from your ftrack instance. With -each release of the new API we make it available on :term:`PyPi`, and -installing is super simple: - -.. code-block:: none - - pip install ftrack-python-api - -Before installing, it is always good to check the latest -:ref:`release/release_notes` to see which version of the ftrack server is -required. - -.. seealso:: :ref:`installing` - -Overview -======== - -An API needs to be approachable, so we built the new API to feel -intuitive and familiar. We bundle all the core functionality into one place – a -session – with consistent methods for interacting with entities in the system:: - - import ftrack_api - session = ftrack_api.Session() - -The session is responsible for loading plugins and communicating with the ftrack -server and allows you to use multiple simultaneous sessions. You will no longer -need to explicitly call :meth:`ftrack.setup` to load plugins. - -The core methods are straightforward: - -Session.create - create a new entity, like a new version. -Session.query - fetch entities from the server using a powerful query language. -Session.delete - delete existing entities. -Session.commit - commit all changes in one efficient call. - -.. note:: - - The new API batches create, update and delete operations by default for - efficiency. To synchronise local changes with the server you need to call - :meth:`Session.commit`. - -In addition all entities in the API now act like simple Python dictionaries, -with some additional helper methods where appropriate. If you know a little -Python (or even if you don't) getting up to speed should be a breeze:: - - >>> print user.keys() - ['first_name', 'last_name', 'email', ...] - >>> print user['email'] - 'old@example.com' - >>> user['email'] = 'new@example.com' - -And of course, relationships between entities are reflected in a natural way as -well:: - - new_timelog = session.create('Timelog', {...}) - task['timelogs'].append(new_timelog) - -.. seealso :: :ref:`tutorial` - -The new API also makes use of caching in order to provide more efficient -retrieval of data by reducing the number of calls to the remote server. - -.. seealso:: :ref:`caching` - -Open source and standard code style -=================================== - -The new API is open source software and developed in public at -`Bitbucket `_. We welcome you -to join us in the development and create pull requests there. - -In the new API, we also follow the standard code style for Python, -:term:`PEP-8`. This means that you will now find that methods and variables are -written using ``snake_case`` instead of ``camelCase``, amongst other things. - -Package name -============ - -The new package is named :mod:`ftrack_api`. By using a new package name, we -enable you to use the old API and the new side-by-side in the same process. - -Old API:: - - import ftrack - -New API:: - - import ftrack_api - -Specifying your credentials -=========================== - -The old API used three environment variables to authenticate with your ftrack -instance. While these continue to work as before, you now also have -the option to specify them when initializing the session:: - - >>> import ftrack_api - >>> session = ftrack_api.Session( - ... server_url='https://mycompany.ftrackapp.com', - ... api_key='7545384e-a653-11e1-a82c-f22c11dd25eq', - ... api_user='martin' - ... ) - -In the examples below, will assume that you have imported the package and -created a session. - -.. seealso:: - - * :ref:`environment_variables` - * :ref:`tutorial` - - -Querying objects -================ - -The old API relied on predefined methods for querying objects and constructors -which enabled you to get an entity by it's id or name. - -Old API:: - - project = ftrack.getProject('dev_tutorial') - task = ftrack.Task('8923b7b3-4bf0-11e5-8811-3c0754289fd3') - user = ftrack.User('jane') - -New API:: - - project = session.query('Project where name is "dev_tutorial"').one() - task = session.get('Task', '8923b7b3-4bf0-11e5-8811-3c0754289fd3') - user = session.query('User where username is "jane"').one() - -While the new API can be a bit more verbose for simple queries, it is much more -powerful and allows you to filter on any field and preload related data:: - - tasks = session.query( - 'select name, parent.name from Task ' - 'where project.full_name is "My Project" ' - 'and status.type.short is "DONE" ' - 'and not timelogs any ()' - ).all() - -The above fetches all tasks for “My Project” that are done but have no timelogs. -It also pre-fetches related information about the tasks parent – all in one -efficient query. - -.. seealso:: :ref:`querying` - -Creating objects -================ - -In the old API, you create objects using specialized methods, such as -:meth:`ftrack.createProject`, :meth:`Project.createSequence` and -:meth:`Task.createShot`. - -In the new API, you can create any object using :meth:`Session.create`. In -addition, there are a few helper methods to reduce the amount of boilerplate -necessary to create certain objects. Don't forget to call :meth:`Session.commit` -once you have issued your create statements to commit your changes. - -As an example, let's look at populating a project with a few entities. - -Old API:: - - project = ftrack.getProject('migration_test') - - # Get default task type and status from project schema - taskType = project.getTaskTypes()[0] - taskStatus = project.getTaskStatuses(taskType)[0] - - sequence = project.createSequence('001') - - # Create five shots with one task each - for shot_number in xrange(10, 60, 10): - shot = sequence.createShot( - '{0:03d}'.format(shot_number) - ) - shot.createTask( - 'Task name', - taskType, - taskStatus - ) - - -New API:: - - project = session.query('Project where name is "migration_test"').one() - - # Get default task type and status from project schema - project_schema = project['project_schema'] - default_shot_status = project_schema.get_statuses('Shot')[0] - default_task_type = project_schema.get_types('Task')[0] - default_task_status = project_schema.get_statuses( - 'Task', default_task_type['id'] - )[0] - - # Create sequence - sequence = session.create('Sequence', { - 'name': '001', - 'parent': project - }) - - # Create five shots with one task each - for shot_number in xrange(10, 60, 10): - shot = session.create('Shot', { - 'name': '{0:03d}'.format(shot_number), - 'parent': sequence, - 'status': default_shot_status - }) - session.create('Task', { - 'name': 'Task name', - 'parent': shot, - 'status': default_task_status, - 'type': default_task_type - }) - - # Commit all changes to the server. - session.commit() - -If you test the example above, one thing you might notice is that the new API -is much more efficient. Thanks to the transaction-based architecture in the new -API only a single call to the server is required to create all the objects. - -.. seealso:: :ref:`working_with_entities/creating` - -Updating objects -================ - -Updating objects in the new API works in a similar way to the old API. Instead -of using the :meth:`set` method on objects, you simply set the key of the -entity to the new value, and call :meth:`Session.commit` to persist the -changes to the database. - -The following example adjusts the duration and comment of a timelog for a -user using the old and new API, respectively. - -Old API:: - - import ftrack - - user = ftrack.User('john') - user.set('email', 'john@example.com') - -New API:: - - import ftrack_api - session = ftrack_api.Session() - - user = session.query('User where username is "john"').one() - user['email'] = 'john@example.com' - session.commit() - -.. seealso:: :ref:`working_with_entities/updating` - - -Date and datetime attributes -============================ - -In the old API, date and datetime attributes where represented using a standard -:mod:`datetime` object. In the new API we have opted to use the :term:`arrow` -library instead. Datetime attributes are represented in the server timezone, -but with the timezone information stripped. - -Old API:: - - >>> import datetime - - >>> task_old_api = ftrack.Task(task_id) - >>> task_old_api.get('startdate') - datetime.datetime(2015, 9, 2, 0, 0) - - >>> # Updating a datetime attribute - >>> task_old_api.set('startdate', datetime.date.today()) - -New API:: - - >>> import arrow - - >>> task_new_api = session.get('Task', task_id) - >>> task_new_api['start_date'] - - - >>> # In the new API, utilize the arrow library when updating a datetime. - >>> task_new_api['start_date'] = arrow.utcnow().floor('day') - >>> session.commit() - -Custom attributes -================= - -In the old API, custom attributes could be retrieved from an entity by using -the methods :meth:`get` and :meth:`set`, like standard attributes. In the new -API, custom attributes can be written and read from entities using the -``custom_attributes`` property, which provides a dictionary-like interface. - -Old API:: - - >>> task_old_api = ftrack.Task(task_id) - >>> task_old_api.get('my_custom_attribute') - - >>> task_old_api.set('my_custom_attribute', 'My new value') - - -New API:: - - >>> task_new_api = session.get('Task', task_id) - >>> task_new_api['custom_attributes']['my_custom_attribute'] - - - >>> task_new_api['custom_attributes']['my_custom_attribute'] = 'My new value' - -For more information on working with custom attributes and existing -limitations, please see: - -.. seealso:: - - :ref:`example/custom_attribute` - - -Using both APIs side-by-side -============================ - -With so many powerful new features and the necessary support for more flexible -workflows, we chose early on to not limit the new API design by necessitating -backwards compatibility. However, we also didn't want to force teams using the -existing API to make a costly all-or-nothing switchover. As such, we have made -the new API capable of coexisting in the same process as the old API:: - - import ftrack - import ftrack_api - -In addition, the old API will continue to be supported for some time, but do -note that it will not support the new `Workflows -`_ and will not have new features back ported -to it. - -In the first example, we obtain a task reference using the old API and -then use the new API to assign a user to it:: - - import ftrack - import ftrack_api - - # Create session for new API, authenticating using envvars. - session = ftrack_api.Session() - - # Obtain task id using old API - shot = ftrack.getShot(['migration_test', '001', '010']) - task = shot.getTasks()[0] - task_id = task.getId() - - user = session.query( - 'User where username is "{0}"'.format(session.api_user) - ).one() - session.create('Appointment', { - 'resource': user, - 'context_id': task_id, - 'type': 'assignment' - }) - -The second example fetches a version using the new API and uploads and sets a -thumbnail using the old API:: - - import arrow - import ftrack - - # fetch a version published today - version = session.query( - 'AssetVersion where date >= "{0}"'.format( - arrow.now().floor('day') - ) - ).first() - - # Create a thumbnail using the old api. - thumbnail_path = '/path/to/thumbnail.jpg' - version_old_api = ftrack.AssetVersion(version['id']) - thumbnail = version_old_api.createThumbnail(thumbnail_path) - - # Also set the same thumbnail on the task linked to the version. - task_old_api = ftrack.Task(version['task_id']) - task_old_api.setThumbnail(thumbnail) - -.. note:: - - It is now possible to set thumbnails using the new API as well, for more - info see :ref:`example/thumbnail`. - -Plugin registration -------------------- - -To make event and location plugin register functions work with both old and new -API the function should be updated to validate the input arguments. For old -plugins the register method should validate that the first input is of type -``ftrack.Registry``, and for the new API it should be of type -:class:`ftrack_api.session.Session`. - -If the input parameter is not validated, a plugin might be mistakenly -registered twice, since both the new and old API will look for plugins the -same directories. - -.. seealso:: - - :ref:`ftrack:release/migration/3.0.29/developer_notes/register_function` - - -Example: publishing a new version -================================= - -In the following example, we look at migrating a script which publishes a new -version with two components. - -Old API:: - - # Query a shot and a task to create the asset against. - shot = ftrack.getShot(['dev_tutorial', '001', '010']) - task = shot.getTasks()[0] - - # Create new asset. - asset = shot.createAsset(name='forest', assetType='geo') - - # Create a new version for the asset. - version = asset.createVersion( - comment='Added more leaves.', - taskid=task.getId() - ) - - # Get the calculated version number. - print version.getVersion() - - # Add some components. - previewPath = '/path/to/forest_preview.mov' - previewComponent = version.createComponent(path=previewPath) - - modelPath = '/path/to/forest_mode.ma' - modelComponent = version.createComponent(name='model', path=modelPath) - - # Publish. - asset.publish() - - # Add thumbnail to version. - thumbnail = version.createThumbnail('/path/to/forest_thumbnail.jpg') - - # Set thumbnail on other objects without duplicating it. - task.setThumbnail(thumbnail) - -New API:: - - # Query a shot and a task to create the asset against. - shot = session.query( - 'Shot where project.name is "dev_tutorial" ' - 'and parent.name is "001" and name is "010"' - ).one() - task = shot['children'][0] - - # Create new asset. - asset_type = session.query('AssetType where short is "geo"').first() - asset = session.create('Asset', { - 'parent': shot, - 'name': 'forest', - 'type': asset_type - }) - - # Create a new version for the asset. - status = session.query('Status where name is "Pending"').one() - version = session.create('AssetVersion', { - 'asset': asset, - 'status': status, - 'comment': 'Added more leaves.', - 'task': task - }) - - # In the new API, the version number is not set until we persist the changes - print 'Version number before commit: {0}'.format(version['version']) - session.commit() - print 'Version number after commit: {0}'.format(version['version']) - - # Add some components. - preview_path = '/path/to/forest_preview.mov' - preview_component = version.create_component(preview_path, location='auto') - - model_path = '/path/to/forest_mode.ma' - model_component = version.create_component(model_path, { - 'name': 'model' - }, location='auto') - - # Publish. Newly created version defaults to being published in the new api, - # but if set to false you can update it by setting the key on the version. - version['is_published'] = True - - # Persist the changes - session.commit() - - # Add thumbnail to version. - thumbnail = version.create_thumbnail( - '/path/to/forest_thumbnail.jpg' - ) - - # Set thumbnail on other objects without duplicating it. - task['thumbnail'] = thumbnail - session.commit() - - -Workarounds for missing convenience methods -=========================================== - -Query object by path --------------------- - -In the old API, there existed a convenience methods to get an object by -referencing the path (i.e object and parent names). - -Old API:: - - shot = ftrack.getShot(['dev_tutorial', '001', '010']) - -New API:: - - shot = session.query( - 'Shot where project.name is "dev_tutorial" ' - 'and parent.name is "001" and name is "010"' - ) - - -Retrieving an object's parents ------------------------------- - -To retrieve a list of an object's parents, you could call the method -:meth:`getParents` in the old API. Currently, it is not possible to fetch this -in a single call using the new API, so you will have to traverse the ancestors -one-by-one and fetch each object's parent. - -Old API:: - - parents = task.getParents() - -New API:: - - parents = [] - for item in task['link'][:-1]: - parents.append(session.get(item['type'], item['id'])) - -Note that link includes the task itself so `[:-1]` is used to only retreive the -parents. To learn more about the `link` attribute, see -:ref:`Using link attributes example`. - -Limitations in the current version of the API -============================================= - -The new API is still quite young and in active development and there are a few -limitations currently to keep in mind when using it. - -Missing schemas ---------------- - -The following entities are as of the time of writing not currently available -in the new API. Let us know if you depend on any of them. - - * Booking - * Calendar and Calendar Type - * Dependency - * Manager and Manager Type - * Phase - * Role - * Task template - * Temp data - -Action base class ------------------ -There is currently no helper class for creating actions using the new API. We -will add one in the near future. - -In the meantime, it is still possible to create actions without the base class -by listening and responding to the -:ref:`ftrack:developing/events/list/ftrack.action.discover` and -:ref:`ftrack:developing/events/list/ftrack.action.launch` events. - -Legacy location ---------------- - -The ftrack legacy disk locations utilizing the -:class:`InternalResourceIdentifierTransformer` has been deprecated. diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/release/migration.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/release/migration.rst deleted file mode 100644 index 1df2211f..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/release/migration.rst +++ /dev/null @@ -1,98 +0,0 @@ -.. - :copyright: Copyright (c) 2015 ftrack - -.. _release/migration: - -*************** -Migration notes -*************** - -.. note:: - - Migrating from the old ftrack API? Read the dedicated :ref:`guide - `. - -Migrate to upcoming 2.0.0 -========================= - -.. _release/migration/2.0.0/event_hub: - -Default behavior for connecting to event hub --------------------------------------------- - -The default behavior for the `ftrack_api.Session` class will change -for the argument `auto_connect_event_hub`, the default value will -switch from True to False. In order for code relying on the event hub -to continue functioning as expected you must modify your code -to explicitly set the argument to True or that you manually call -`session.event_hub.connect()`. - -.. note:: - If you rely on the `ftrack.location.component-added` or - `ftrack.location.component-removed` events to further process created - or deleted components remember that your session must be connected - to the event hub for the events to be published. - - -Migrate to 1.0.3 -================ - -.. _release/migration/1.0.3/mutating_dictionary: - -Mutating custom attribute dictionary ------------------------------------- - -Custom attributes can no longer be set by mutating entire dictionary:: - - # This will result in an error. - task['custom_attributes'] = dict(foo='baz', bar=2) - session.commit() - -Instead the individual values should be changed:: - - # This works better. - task['custom_attributes']['foo'] = 'baz' - task['custom_attributes']['bar'] = 2 - session.commit() - -Migrate to 1.0.0 -================ - -.. _release/migration/1.0.0/chunked_transfer: - -Chunked accessor transfers --------------------------- - -Data transfers between accessors is now buffered using smaller chunks instead of -all data at the same time. Included accessor file representations such as -:class:`ftrack_api.data.File` and :class:`ftrack_api.accessor.server.ServerFile` -are built to handle that. If you have written your own accessor and file -representation you may have to update it to support multiple reads using the -limit parameter and multiple writes. - -Migrate to 0.2.0 -================ - -.. _release/migration/0.2.0/new_api_name: - -New API name ------------- - -In this release the API has been renamed from `ftrack` to `ftrack_api`. This is -to allow both the old and new API to co-exist in the same environment without -confusion. - -As such, any scripts using this new API need to be updated to import -`ftrack_api` instead of `ftrack`. For example: - -**Previously**:: - - import ftrack - import ftrack.formatter - ... - -**Now**:: - - import ftrack_api - import ftrack_api.formatter - ... diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/release/release_notes.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/release/release_notes.rst deleted file mode 100644 index d7978ac0..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/release/release_notes.rst +++ /dev/null @@ -1,1478 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _release/release_notes: - -************* -Release Notes -************* - -.. currentmodule:: ftrack_api.session - -.. release:: 1.8.2 - :date: 2020-01-14 - - .. change:: fixed - :tag: Test - - test_ensure_entity_with_non_string_data_types test fails due to missing parents. - - .. change:: changed - :tags: session - - Use WeakMethod when registering atexit handler to prevent memory leak. - -.. release:: 1.8.1 - :date: 2019-10-30 - - .. change:: changed - :tags: Location - - Increase chunk size for file operations to 1 Megabyte. - This value can now also be set from the environment variable: - - :envvar:`FTRACK_API_FILE_CHUNK_SIZE` - - .. change:: new - :tag: setup - - Add check for correct python version when installing with pip. - - .. change:: new - :tags: Notes - - Add support for note labels in create_note helper method. - - .. change:: changed - :tags: session - - Ensure errors from server are fully reported with stack trace. - -.. release:: 1.8.0 - :date: 2019-02-21 - - .. change:: fixed - :tags: documentation - - Event description component-removed report component-added event signature. - - .. change:: new - :tags: session, attribute - - Add new scalar type `object` to factory. - - .. change:: new - :tags: session, attribute - - Add support for list of `computed` attributes as part of schema - definition. A computed attribute is derived on the server side, and can - be time dependentant and differ between users. As such a computed - attribute is not suitable for long term encoding and will not be encoded - with the `persisted_only` stragey. - - .. change:: changed - - The `delayed_job` method has been deprecated in favour of a direct - `Session.call`. See :ref:`example/sync_with_ldap` for example - usage. - - .. change:: changed - - Private method :meth:`Session._call` has been converted to - a public method, :meth:`Session.call`. - - The private method will continue to work, but a pending deprecation - warning will be issued when used. The private method will be removed - entirely in version 2.0. - - .. change:: changed - :tags: session, events - - Event server connection error is too generic, - the actual error is now reported to users. - -.. release:: 1.7.1 - :date: 2018-11-13 - - .. change:: fixed - :tags: session, events - - Meta events for event hub connect and disconnect does not include - source. - - .. change:: fixed - :tags: session, location - - Missing context argument to - :meth:`ResourceIdentifierTransformer.decode` - in :meth:`Location.get_resource_identifier`. - -.. release:: 1.7.0 - :date: 2018-07-27 - - .. change:: new - :tags: session, events - - Added new events :ref:`event_list/ftrack.api.session.ready` and - :ref:`event_list/ftrack.api.session.reset` which can be used to perform - operations after the session is ready or has been reset, respectively. - - .. change:: changed - - Private method :meth:`Session._entity_reference` has been converted to - a public method, :meth:`Session.entity_reference`. - - The private method will continue to work, but a pending deprecation - warning will be issued when used. The private method will be removed - entirely in version 2.0. - - .. change:: fixed - :tags: session, events - - :meth:`Session.close` raises an exception if event hub was explicitly - connected after session initialization. - -.. release:: 1.6.0 - :date: 2018-05-17 - - .. change:: new - :tags: depreciation, events - - In version 2.0.0 of the `ftrack-python-api` the default behavior for - the :class:`Session` class will change for the argument - *auto_connect_event_hub*, the default value will switch from *True* to - *False*. - - A warning will now be emitted if async events are published or - subscribed to without *auto_connect_event_hub* has not explicitly been - set to *True*. - - .. seealso:: :ref:`release/migration/2.0.0/event_hub`. - - .. change:: fixed - :tags: documentation - - Event payload not same as what is being emitted for - :ref:`event_list/ftrack.location.component-added` and - :ref:`event_list/ftrack.location.component-removed`. - - .. change:: fixed - :tags: events - - Pyparsing is causing random errors in a threaded environment. - -.. release:: 1.5.0 - :date: 2018-04-19 - - .. change:: fixed - :tags: session, cache - - Cached entities not updated correctly when fetched in a nested - query. - -.. release:: 1.4.0 - :date: 2018-02-05 - - .. change:: fixed - :tags: session, cache - - Collection attributes not merged correctly when fetched from - server. - - .. change:: new - :tags: session, user, api key - - New function :meth:`ftrack_api.session.Session.reset_remote` allows - resetting of attributes to their default value. A convenience method - for resetting a users api key utalizing this was also added - :meth:`ftrack_api.entity.user.User.reset_api_key`. - - .. seealso:: :ref:`working_with_entities/resetting` - - .. change:: new - - Add support for sending out invitation emails to users. - See :ref:`example/invite_user` for example usage. - - .. change:: changed - :tags: cache, performance - - Entities fetched from cache are now lazily merged. Improved - performance when dealing with highly populated caches. - -.. release:: 1.3.3 - :date: 2017-11-16 - - - .. change:: new - :tags: users, ldap - - Add support for triggering a synchronization of - users between ldap and ftrack. See :ref:`example/sync_with_ldap` - for example usage. - - .. note:: - - This requires that you run ftrack 3.5.10 or later. - - .. change:: fixed - :tags: metadata - - Not possible to set metadata on creation. - -.. release:: 1.3.2 - :date: 2017-09-18 - - - .. change:: new - :tags: task template - - Added example for managing task templates through the API. See - :ref:`example/task_template` for example usage. - - .. change:: fixed - :tags: custom attributes - - Not possible to set hierarchical custom attributes on an entity that - has not been committed. - - .. change:: fixed - :tags: custom attributes - - Not possible to set custom attributes on an `Asset` that has not been - committed. - - .. change:: fixed - :tags: metadata - - Not possible to set metadata on creation. - -.. release:: 1.3.1 - :date: 2017-07-21 - - .. change:: fixed - :tags: session, events - - Calling disconnect on the event hub is slow. - -.. release:: 1.3.0 - :date: 2017-07-17 - - .. change:: new - :tags: session - - Support using a :class:`Session` as a context manager to aid closing of - session after use:: - - with ftrack_api.Session() as session: - # Perform operations with session. - - .. change:: new - :tags: session - - :meth:`Session.close` automatically called on Python exit if session not - already closed. - - .. change:: new - :tags: session - - Added :meth:`Session.close` to properly close a session's connections to - the server(s) as well as ensure event listeners are properly - unsubscribed. - - .. change:: new - - Added :exc:`ftrack_api.exception.ConnectionClosedError` to represent - error caused when trying to access servers over closed connection. - -.. release:: 1.2.0 - :date: 2017-06-16 - - .. change:: changed - :tags: events - - Updated the websocket-client dependency to version >= 0.40.0 to allow - for http proxies. - - .. change:: fixed - :tags: documentation - - The :ref:`example/publishing` example incorrectly stated that a - location would be automatically picked if the *location* keyword - argument was omitted. - -.. release:: 1.1.1 - :date: 2017-04-27 - - .. change:: fixed - :tags: custom attributes - - Cannot use custom attributes for `Asset` in ftrack versions prior to - `3.5.0`. - - .. change:: fixed - :tags: documentation - - The :ref:`example ` - section for managing `text` custom attributes is not correct. - -.. release:: 1.1.0 - :date: 2017-03-08 - - .. change:: new - :tags: server location, thumbnail - - Added method :meth:`get_thumbnail_url() ` - to server location, which can be used to retrieve a thumbnail URL. - See :ref:`example/thumbnail/url` for example usage. - - .. change:: new - :tags: documentation - - Added :ref:`example ` on how to manage entity - links from the API. - - .. change:: new - :tags: documentation - - Added :ref:`example ` on - how to manage custom attribute configurations from the API. - - .. change:: new - :tags: documentation - - Added :ref:`example ` on how to use - `SecurityRole` and `UserSecurityRole` to manage security roles for - users. - - .. change:: new - :tags: documentation - - Added :ref:`examples ` to show how - to list a user's assigned tasks and all users assigned to a task. - - .. change:: changed - :tags: session, plugins - - Added *plugin_arguments* to :class:`Session` to allow passing of - optional keyword arguments to discovered plugin register functions. Only - arguments defined in a plugin register function signature are passed so - existing plugin register functions do not need updating if the new - functionality is not desired. - - .. change:: fixed - :tags: documentation - - The :ref:`example/project` example can be confusing since the project - schema may not contain the necessary object types. - - .. change:: fixed - :tags: documentation - - Query tutorial article gives misleading information about the ``has`` - operator. - - .. change:: fixed - :tags: session - - Size is not set on sequence components when using - :meth:`Session.create_component`. - -.. release:: 1.0.4 - :date: 2017-01-13 - - .. change:: fixed - :tags: custom attributes - - Custom attribute values cannot be set on entities that are not - persisted. - - .. change:: fixed - :tags: events - - `username` in published event's source data is set to the operating - system user and not the API user. - -.. release:: 1.0.3 - :date: 2017-01-04 - - .. change:: changed - :tags: session, custom attributes - - Increased performance of custom attributes and better support for - filtering when using a version of ftrack that supports non-sparse - attribute values. - - .. change:: changed - :tags: session, custom attributes - - Custom attributes can no longer be set by mutating entire dictionary. - - .. seealso:: :ref:`release/migration/1.0.3/mutating_dictionary`. - -.. release:: 1.0.2 - :date: 2016-11-17 - - .. change:: changed - :tags: session - - Removed version restriction for higher server versions. - -.. release:: 1.0.1 - :date: 2016-11-11 - - .. change:: fixed - - :meth:`EventHub.publish ` - *on_reply* callback only called for first received reply. It should be - called for all relevant replies received. - -.. release:: 1.0.0 - :date: 2016-10-28 - - .. change:: new - :tags: session - - :meth:`Session.get_upload_metadata` has been added. - - .. change:: changed - :tags: locations, backwards-incompatible - - Data transfer between locations using accessors is now chunked to avoid - reading large files into memory. - - .. seealso:: :ref:`release/migration/1.0.0/chunked_transfer`. - - .. change:: changed - :tags: server accessor - - :class:`ftrack_api.accessor.server.ServerFile` has been refactored to - work with large files more efficiently. - - .. change:: changed - :tags: server accessor - - :class:`ftrack_api.accessor.server.ServerFile` has been updated to use - the get_upload_metadata API endpoint instead of - /component/getPutMetadata. - - .. change:: changed - :tags: locations - - :class:`ftrack_api.data.String` is now using a temporary file instead of - StringIO to avoid reading large files into memory. - - .. change:: fixed - :tags: session, locations - - `ftrack.centralized-storage` does not properly validate location - selection during user configuration. - -.. release:: 0.16.0 - :date: 2016-10-18 - - .. change:: new - :tags: session, encode media - - :meth:`Session.encode_media` can now automatically associate the output - with a version by specifying a *version_id* keyword argument. A new - helper method on versions, :meth:`AssetVersion.encode_media - `, can be - used to make versions playable in a browser. A server version of 3.3.32 - or higher is required for it to function properly. - - .. seealso:: :ref:`example/encode_media`. - - .. change:: changed - :tags: session, encode media - - You can now decide if :meth:`Session.encode_media` should keep or - delete the original component by specifying the *keep_original* - keyword argument. - - .. change:: changed - :tags: backwards-incompatible, collection - - Collection mutation now stores collection instance in operations rather - than underlying data structure. - - .. change:: changed - :tags: performance - - Improve performance of commit operations by optimising encoding and - reducing payload sent to server. - - .. change:: fixed - :tags: documentation - - Asset parent variable is declared but never used in - :ref:`example/publishing`. - - .. change:: fixed - :tags: documentation - - Documentation of hierarchical attributes and their limitations are - misleading. See :ref:`example/custom_attribute`. - -.. release:: 0.15.5 - :date: 2016-08-12 - - .. change:: new - :tags: documentation - - Added two new examples for :ref:`example/publishing` and - :ref:`example/web_review`. - - .. change:: fixed - :tags: session, availability - - :meth:`Session.get_component_availabilities` ignores passed locations - shortlist and includes all locations in returned availability mapping. - - .. change:: fixed - :tags: documentation - - Source distribution of ftrack-python-api does not include ftrack.css - in the documentation. - -.. release:: 0.15.4 - :date: 2016-07-12 - - .. change:: fixed - :tags: querying - - Custom offset not respected by - :meth:`QueryResult.first `. - - .. change:: changed - :tags: querying - - Using a custom offset with :meth:`QueryResult.one - ` helper method now raises an - exception as an offset is inappropriate when expecting to select a - single item. - - .. change:: fixed - :tags: caching - - :meth:`LayeredCache.remove ` - incorrectly raises :exc:`~exceptions.KeyError` if key only exists in - sub-layer cache. - -.. release:: 0.15.3 - :date: 2016-06-30 - - .. change:: fixed - :tags: session, caching - - A newly created entity now has the correct - :attr:`ftrack_api.symbol.CREATED` state when checked in caching layer. - Previously the state was :attr:`ftrack_api.symbol.NOT_SET`. Note that - this fix causes a change in logic and the stored - :class:`ftrack_api.operation.CreateEntityOperation` might hold data that - has not been fully :meth:`merged `. - - .. change:: fixed - :tags: documentation - - The second example in the assignments article is not working. - - .. change:: changed - :tags: session, caching - - A callable cache maker can now return ``None`` to indicate that it could - not create a suitable cache, but :class:`Session` instantiation can - continue safely. - -.. release:: 0.15.2 - :date: 2016-06-02 - - .. change:: new - :tags: documentation - - Added an example on how to work with assignments and allocations - :ref:`example/assignments_and_allocations`. - - .. change:: new - :tags: documentation - - Added :ref:`example/entity_links` article with - examples of how to manage asset version dependencies. - - .. change:: fixed - :tags: performance - - Improve performance of large collection management. - - .. change:: fixed - - Entities are not hashable because - :meth:`ftrack_api.entity.base.Entity.__hash__` raises `TypeError`. - -.. release:: 0.15.1 - :date: 2016-05-02 - - .. change:: fixed - :tags: collection, attribute, performance - - Custom attribute configurations does not cache necessary keys, leading - to performance issues. - - .. change:: fixed - :tags: locations, structure - - Standard structure does not work if version relation is not set on - the `Component`. - -.. release:: 0.15.0 - :date: 2016-04-04 - - .. change:: new - :tags: session, locations - - `ftrack.centralized-storage` not working properly on Windows. - -.. release:: 0.14.0 - :date: 2016-03-14 - - .. change:: changed - :tags: session, locations - - The `ftrack.centralized-storage` configurator now validates that name, - label and description for new locations are filled in. - - .. change:: new - :tags: session, client review - - Added :meth:`Session.send_review_session_invite` and - :meth:`Session.send_review_session_invites` that can be used to inform - review session invitees about a review session. - - .. seealso:: :ref:`Usage guide `. - - .. change:: new - :tags: session, locations - - Added `ftrack.centralized-storage` configurator as a private module. It - implements a wizard like interface used to configure a centralised - storage scenario. - - .. change:: new - :tags: session, locations - - `ftrack.centralized-storage` storage scenario is automatically - configured based on information passed from the server with the - `query_server_information` action. - - .. change:: new - :tags: structure - - Added :class:`ftrack_api.structure.standard.StandardStructure` with - hierarchy based resource identifier generation. - - .. change:: new - :tags: documentation - - Added more information to the :ref:`understanding_sessions/plugins` - article. - - .. change:: fixed - - :meth:`~ftrack_api.entity.user.User.start_timer` arguments *comment* - and *name* are ignored. - - .. change:: fixed - - :meth:`~ftrack_api.entity.user.User.stop_timer` calculates the wrong - duration when the server is not running in UTC. - - For the duration to be calculated correctly ftrack server version - >= 3.3.15 is required. - -.. release:: 0.13.0 - :date: 2016-02-10 - - .. change:: new - :tags: component, thumbnail - - Added improved support for handling thumbnails. - - .. seealso:: :ref:`example/thumbnail`. - - .. change:: new - :tags: session, encode media - - Added :meth:`Session.encode_media` that can be used to encode - media to make it playable in a browser. - - .. seealso:: :ref:`example/encode_media`. - - .. change:: fixed - - :meth:`Session.commit` fails when setting a custom attribute on an asset - version that has been created and committed in the same session. - - .. change:: new - :tags: locations - - Added :meth:`ftrack_api.entity.location.Location.get_url` to retrieve a - URL to a component in a location if supported by the - :class:`ftrack_api.accessor.base.Accessor`. - - .. change:: new - :tags: documentation - - Updated :ref:`example/note` and :ref:`example/job` articles with - examples of how to use note and job components. - - .. change:: changed - :tags: logging, performance - - Logged messages now evaluated lazily using - :class:`ftrack_api.logging.LazyLogMessage` as optimisation. - - .. change:: changed - :tags: session, events - - Auto connection of event hub for :class:`Session` now takes place in - background to improve session startup time. - - .. change:: changed - :tags: session, events - - Event hub connection timeout is now 60 seconds instead of 10. - - .. change:: changed - :tags: server version - - ftrack server version >= 3.3.11, < 3.4 required. - - .. change:: changed - :tags: querying, performance - - :class:`ftrack_api.query.QueryResult` now pages internally using a - specified page size in order to optimise record retrieval for large - query results. :meth:`Session.query` has also been updated to allow - passing a custom page size at runtime if desired. - - .. change:: changed - :tags: querying, performance - - Increased performance of :meth:`~ftrack_api.query.QueryResult.first` and - :meth:`~ftrack_api.query.QueryResult.one` by using new `limit` syntax. - -.. release:: 0.12.0 - :date: 2015-12-17 - - .. change:: new - :tags: session, widget url - - Added :meth:`ftrack_api.session.Session.get_widget_url` to retrieve an - authenticated URL to info or tasks widgets. - -.. release:: 0.11.0 - :date: 2015-12-04 - - .. change:: new - :tags: documentation - - Updated :ref:`release/migrating_from_old_api` with new link attribute - and added a :ref:`usage example `. - - .. change:: new - :tags: caching, schemas, performance - - Caching of schemas for increased performance. - :meth:`ftrack_api.session.Session` now accepts `schema_cache_path` - argument to specify location of schema cache. If not set it will use a - temporary folder. - -.. release:: 0.10.0 - :date: 2015-11-24 - - .. change:: changed - :tags: tests - - Updated session test to use mocked schemas for encoding tests. - - .. change:: fixed - - Documentation specifies Python 2.6 instead of Python 2.7 as minimum - interpreter version. - - .. change:: fixed - - Documentation does not reflect current dependencies. - - .. change:: changed - :tags: session, component, locations, performance - - Improved performance of - :meth:`ftrack_api.entity.location.Location.add_components` by batching - database operations. - - As a result it is no longer possible to determine progress of transfer - for container components in realtime as events will be emitted in batch - at end of operation. - - In addition, it is now the callers responsibility to clean up any - transferred data should an error occur during either data transfer or - database registration. - - .. change:: changed - :tags: exception, locations - - :exc:`ftrack_api.exception.ComponentInLocationError` now accepts either - a single component or multiple components and makes them available as - *components* in its *details* parameter. - - .. change:: changed - :tags: tests - - Updated session test to not fail on the new private link attribute. - - .. change:: changed - :tags: session - - Internal method :py:meth:`_fetch_schemas` has beed renamed to - :py:meth:`Session._load_schemas` and now requires a `schema_cache_path` - argument. - -.. release:: 0.9.0 - :date: 2015-10-30 - - .. change:: new - :tags: caching - - Added :meth:`ftrack_api.cache.Cache.values` as helper for retrieving - all values in cache. - - .. change:: fixed - :tags: session, caching - - :meth:`Session.merge` redundantly attempts to expand entity references - that have already been expanded causing performance degradation. - - .. change:: new - :tags: session - - :meth:`Session.rollback` has been added to support cleanly reverting - session state to last good state following a failed commit. - - .. change:: changed - :tags: events - - Event hub will no longer allow unverified SSL connections. - - .. seealso:: :ref:`security_and_authentication`. - - .. change:: changed - :tags: session - - :meth:`Session.reset` no longer resets the connection. It also clears - all local state and re-configures certain aspects that are cache - dependant, such as location plugins. - - .. change:: fixed - :tags: factory - - Debug logging messages using incorrect index for formatting leading to - misleading exception. - -.. release:: 0.8.4 - :date: 2015-10-08 - - .. change:: new - - Added initial support for custom attributes. - - .. seealso:: :ref:`example/custom_attribute`. - - .. change:: new - :tags: collection, attribute - - Added :class:`ftrack_api.collection.CustomAttributeCollectionProxy` and - :class:`ftrack_api.attribute.CustomAttributeCollectionAttribute` to - handle custom attributes. - - .. change:: changed - :tags: collection, attribute - - ``ftrack_api.attribute.MappedCollectionAttribute`` renamed to - :class:`ftrack_api.attribute.KeyValueMappedCollectionAttribute` to more - closely reflect purpose. - - .. change:: changed - :tags: collection - - :class:`ftrack_api.collection.MappedCollectionProxy` has been refactored - as a generic base class with key, value specialisation handled in new - dedicated class - :class:`ftrack_api.collection.KeyValueMappedCollectionProxy`. This is - done to avoid confusion following introduction of new - :class:`ftrack_api.collection.CustomAttributeCollectionProxy` class. - - .. change:: fixed - :tags: events - - The event hub does not always reconnect after computer has come back - from sleep. - -.. release:: 0.8.3 - :date: 2015-09-28 - - .. change:: changed - :tags: server version - - ftrack server version >= 3.2.1, < 3.4 required. - - .. change:: changed - - Updated *ftrack.server* location implementation. A server version of 3.3 - or higher is required for it to function properly. - - .. change:: fixed - - :meth:`ftrack_api.entity.factory.StandardFactory.create` not respecting - *bases* argument. - -.. release:: 0.8.2 - :date: 2015-09-16 - - .. change:: fixed - :tags: session - - Wrong file type set on component when publishing image sequence using - :meth:`Session.create_component`. - -.. release:: 0.8.1 - :date: 2015-09-08 - - .. change:: fixed - :tags: session - - :meth:`Session.ensure` not implemented. - -.. release:: 0.8.0 - :date: 2015-08-28 - - .. change:: changed - :tags: server version - - ftrack server version >= 3.2.1, < 3.3 required. - - .. change:: new - - Added lists example. - - .. seealso:: :ref:`example/list`. - - .. change:: new - - Added convenience methods for handling timers - :class:`~ftrack_api.entity.user.User.start_timer` and - :class:`~ftrack_api.entity.user.User.stop_timer`. - - .. change:: changed - - The dynamic API classes Type, Status, Priority and - StatusType have been renamed to Type, Status, Priority and State. - - .. change:: changed - - :meth:`Session.reset` now also clears the top most level cache (by - default a :class:`~ftrack_api.cache.MemoryCache`). - - .. change:: fixed - - Some invalid server url formats not detected. - - .. change:: fixed - - Reply events not encoded correctly causing them to be misinterpreted by - the server. - -.. release:: 0.7.0 - :date: 2015-08-24 - - .. change:: changed - :tags: server version - - ftrack server version >= 3.2, < 3.3 required. - - .. change:: changed - - Removed automatic set of default statusid, priorityid and typeid on - objects as that is now either not mandatory or handled on server. - - .. change:: changed - - Updated :meth:`~ftrack_api.entity.project_schema.ProjectSchema.get_statuses` - and :meth:`~ftrack_api.entity.project_schema.ProjectSchema.get_types` to - handle custom objects. - -.. release:: 0.6.0 - :date: 2015-08-19 - - .. change:: changed - :tags: server version - - ftrack server version >= 3.1.8, < 3.2 required. - - .. change:: changed - :tags: querying, documentation - - Updated documentation with details on new operators ``has`` and ``any`` - for querying relationships. - - .. seealso:: :ref:`querying/criteria/operators` - -.. release:: 0.5.2 - :date: 2015-07-29 - - .. change:: changed - :tags: server version - - ftrack server version 3.1.5 or greater required. - - .. change:: changed - - Server reported errors are now more readable and are no longer sometimes - presented as an HTML page. - -.. release:: 0.5.1 - :date: 2015-07-06 - - .. change:: changed - - Defaults computed by :class:`~ftrack_api.entity.factory.StandardFactory` - are now memoised per session to improve performance. - - .. change:: changed - - :class:`~ftrack_api.cache.Memoiser` now supports a *return_copies* - parameter to control whether deep copies should be returned when a value - was retrieved from the cache. - -.. release:: 0.5.0 - :date: 2015-07-02 - - .. change:: changed - - Now checks for server compatibility and requires an ftrack server - version of 3.1 or greater. - - .. change:: new - - Added convenience methods to :class:`~ftrack_api.query.QueryResult` to - fetch :meth:`~ftrack_api.query.QueryResult.first` or exactly - :meth:`~ftrack_api.query.QueryResult.one` result. - - .. change:: new - :tags: notes - - Added support for handling notes. - - .. seealso:: :ref:`example/note`. - - .. change:: changed - - Collection attributes generate empty collection on first access when no - remote value available. This allows interacting with a collection on a - newly created entity before committing. - - .. change:: fixed - :tags: session - - Ambiguous error raised when :class:`Session` is started with an invalid - user or key. - - .. change:: fixed - :tags: caching, session - - :meth:`Session.merge` fails against - :class:`~ftrack_api.cache.SerialisedCache` when circular reference - encountered due to entity identity not being prioritised in merge. - -.. release:: 0.4.3 - :date: 2015-06-29 - - .. change:: fixed - :tags: plugins, session, entity types - - Entity types not constructed following standard install. - - This is because the discovery of the default plugins is unreliable - across Python installation processes (pip, wheel etc). Instead, the - default plugins have been added as templates to the :ref:`event_list` - documentation and the - :class:`~ftrack_api.entity.factory.StandardFactory` used to create any - missing classes on :class:`Session` startup. - -.. release:: 0.4.2 - :date: 2015-06-26 - - .. change:: fixed - :tags: metadata - - Setting exact same metadata twice can cause - :exc:`~ftrack_api.exception.ImmutableAttributeError` to be incorrectly - raised. - - .. change:: fixed - :tags: session - - Calling :meth:`Session.commit` does not clear locally set attribute - values leading to immutability checks being bypassed in certain cases. - -.. release:: 0.4.1 - :date: 2015-06-25 - - .. change:: fixed - :tags: metadata - - Setting metadata twice in one session causes `KeyError`. - -.. release:: 0.4.0 - :date: 2015-06-22 - - .. change:: changed - :tags: documentation - - Documentation extensively updated. - - .. change:: new - :tags: Client review - - Added support for handling review sessions. - - .. seealso:: :ref:`Usage guide `. - - .. change:: fixed - - Metadata property not working in line with rest of system, particularly - the caching framework. - - .. change:: new - :tags: collection - - Added :class:`ftrack_api.collection.MappedCollectionProxy` class for - providing a dictionary interface to a standard - :class:`ftrack_api.collection.Collection`. - - .. change:: new - :tags: collection, attribute - - Added :class:`ftrack_api.attribute.MappedCollectionAttribute` class for - describing an attribute that should use the - :class:`ftrack_api.collection.MappedCollectionProxy`. - - .. change:: new - - Entities that use composite primary keys are now fully supported in the - session, including for :meth:`Session.get` and :meth:`Session.populate`. - - .. change:: change - - Base :class:`ftrack_api.entity.factory.Factory` refactored to separate - out attribute instantiation into dedicated methods to make extending - simpler. - - .. change:: change - :tags: collection, attribute - - :class:`ftrack_api.attribute.DictionaryAttribute` and - :class:`ftrack_api.attribute.DictionaryAttributeCollection` removed. - They have been replaced by the new - :class:`ftrack_api.attribute.MappedCollectionAttribute` and - :class:`ftrack_api.collection.MappedCollectionProxy` respectively. - - .. change:: new - :tags: events - - :class:`Session` now supports an *auto_connect_event_hub* argument to - control whether the built in event hub should connect to the server on - session initialisation. This is useful for when only local events should - be supported or when the connection should be manually controlled. - -.. release:: 0.3.0 - :date: 2015-06-14 - - .. change:: fixed - - Session operations may be applied server side in invalid order resulting - in unexpected error. - - .. change:: fixed - - Creating and deleting an entity in single commit causes error as create - operation never persisted to server. - - Now all operations for the entity are ignored on commit when this case - is detected. - - .. change:: changed - - Internally moved from differential state to operation tracking for - determining session changes when persisting. - - .. change:: new - - ``Session.recorded_operations`` attribute for examining current - pending operations on a :class:`Session`. - - .. change:: new - - :meth:`Session.operation_recording` context manager for suspending - recording operations temporarily. Can also manually control - ``Session.record_operations`` boolean. - - .. change:: new - - Operation classes to track individual operations occurring in session. - - .. change:: new - - Public :meth:`Session.merge` method for merging arbitrary values into - the session manually. - - .. change:: changed - - An entity's state is now computed from the operations performed on it - and is no longer manually settable. - - .. change:: changed - - ``Entity.state`` attribute removed. Instead use the new inspection - :func:`ftrack_api.inspection.state`. - - Previously:: - - print entity.state - - Now:: - - import ftrack_api.inspection - print ftrack_api.inspection.state(entity) - - There is also an optimised inspection, - :func:`ftrack_api.inspection.states`. for determining state of many - entities at once. - - .. change:: changed - - Shallow copying a :class:`ftrack_api.symbol.Symbol` instance now - returns same instance. - -.. release:: 0.2.0 - :date: 2015-06-04 - - .. change:: changed - - Changed name of API from `ftrack` to `ftrack_api`. - - .. seealso:: :ref:`release/migration/0.2.0/new_api_name`. - - .. change:: new - :tags: caching - - Configurable caching support in :class:`Session`, including the ability - to use an external persisted cache and new cache implementations. - - .. seealso:: :ref:`caching`. - - .. change:: new - :tags: caching - - :meth:`Session.get` now tries to retrieve matching entity from - configured cache first. - - .. change:: new - :tags: serialisation, caching - - :meth:`Session.encode` supports a new mode *persisted_only* that will - only encode persisted attribute values. - - .. change:: changed - - Session.merge method is now private (:meth:`Session._merge`) until it is - qualified for general usage. - - .. change:: changed - :tags: entity state - - :class:`~ftrack_api.entity.base.Entity` state now managed on the entity - directly rather than stored separately in the :class:`Session`. - - Previously:: - - session.set_state(entity, state) - print session.get_state(entity) - - Now:: - - entity.state = state - print entity.state - - .. change:: changed - :tags: entity state - - Entity states are now :class:`ftrack_api.symbol.Symbol` instances rather - than strings. - - Previously:: - - entity.state = 'created' - - Now:: - - entity.state = ftrack_api.symbol.CREATED - - .. change:: fixed - :tags: entity state - - It is now valid to transition from most entity states to an - :attr:`ftrack_api.symbol.NOT_SET` state. - - .. change:: changed - :tags: caching - - :class:`~ftrack_api.cache.EntityKeyMaker` removed and replaced by - :class:`~ftrack_api.cache.StringKeyMaker`. Entity identity now - computed separately and passed to key maker to allow key maker to work - with non entity instances. - - .. change:: fixed - :tags: entity - - Internal data keys ignored when re/constructing entities reducing - distracting and irrelevant warnings in logs. - - .. change:: fixed - :tags: entity - - :class:`~ftrack_api.entity.base.Entity` equality test raises error when - other is not an entity instance. - - .. change:: changed - :tags: entity, caching - - :meth:`~ftrack_api.entity.base.Entity.merge` now also merges state and - local attributes. In addition, it ensures values being merged have also - been merged into the session and outputs more log messages. - - .. change:: fixed - :tags: inspection - - :func:`ftrack_api.inspection.identity` returns different result for same - entity depending on whether entity type is unicode or string. - - .. change:: fixed - - :func:`ftrack_api.mixin` causes method resolution failure when same - class mixed in multiple times. - - .. change:: changed - - Representations of objects now show plain id rather than converting to - hex. - - .. change:: fixed - :tags: events - - Event hub raises TypeError when listening to ftrack.update events. - - .. change:: fixed - :tags: events - - :meth:`ftrack_api.event.hub.EventHub.subscribe` fails when subscription - argument contains special characters such as `@` or `+`. - - .. change:: fixed - :tags: collection - - :meth:`ftrack_api.collection.Collection` incorrectly modifies entity - state on initialisation. - -.. release:: 0.1.0 - :date: 2015-03-25 - - .. change:: changed - - Moved standardised construct entity type logic to core package (as part - of the :class:`~ftrack_api.entity.factory.StandardFactory`) for easier - reuse and extension. - -.. release:: 0.1.0-beta.2 - :date: 2015-03-17 - - .. change:: new - :tags: locations - - Support for ftrack.server location. The corresponding server build is - required for it to function properly. - - .. change:: new - :tags: locations - - Support for managing components in locations has been added. Check out - the :ref:`dedicated tutorial `. - - .. change:: new - - A new inspection API (:mod:`ftrack_api.inspection`) has been added for - extracting useful information from objects in the system, such as the - identity of an entity. - - .. change:: changed - - ``Entity.primary_key`` and ``Entity.identity`` have been removed. - Instead, use the new :func:`ftrack_api.inspection.primary_key` and - :func:`ftrack_api.inspection.identity` functions. This was done to make it - clearer the the extracted information is determined from the current - entity state and modifying the returned object will have no effect on - the entity instance itself. - - .. change:: changed - - :func:`ftrack_api.inspection.primary_key` now returns a mapping of the - attribute names and values that make up the primary key, rather than - the previous behaviour of returning a tuple of just the values. To - emulate previous behaviour do:: - - ftrack_api.inspection.primary_key(entity).values() - - .. change:: changed - - :meth:`Session.encode` now supports different strategies for encoding - entities via the entity_attribute_strategy* keyword argument. This makes - it possible to use this method for general serialisation of entity - instances. - - .. change:: changed - - Encoded referenced entities are now a mapping containing - *__entity_type__* and then each key, value pair that makes up the - entity's primary key. For example:: - - { - '__entity_type__': 'User', - 'id': '8b90a444-4e65-11e1-a500-f23c91df25eb' - } - - .. change:: changed - - :meth:`Session.decode` no longer automatically adds decoded entities to - the :class:`Session` cache making it possible to use decode - independently. - - .. change:: new - - Added :meth:`Session.merge` for merging entities recursively into the - session cache. - - .. change:: fixed - - Replacing an entity in a :class:`ftrack_api.collection.Collection` with an - identical entity no longer raises - :exc:`ftrack_api.exception.DuplicateItemInCollectionError`. diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin.py deleted file mode 100644 index 5fda0195..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin.py +++ /dev/null @@ -1,24 +0,0 @@ -# :coding: utf-8 -import logging - -import ftrack_api.session - - -def register(session, **kw): - '''Register plugin. Called when used as an plugin.''' - logger = logging.getLogger('com.example.example-plugin') - - # Validate that session is an instance of ftrack_api.Session. If not, - # assume that register is being called from an old or incompatible API and - # return without doing anything. - if not isinstance(session, ftrack_api.session.Session): - logger.debug( - 'Not subscribing plugin as passed argument {0!r} is not an ' - 'ftrack_api.Session instance.'.format(session) - ) - return - - # Perform your logic here, such as subscribe to an event. - pass - - logger.debug('Plugin registered') diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_safe.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_safe.py deleted file mode 100644 index e69de29b..00000000 diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_using_session.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_using_session.py deleted file mode 100644 index dd11136d..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/resource/example_plugin_using_session.py +++ /dev/null @@ -1,37 +0,0 @@ -# :coding: utf-8 -import logging - -import ftrack_api.session - - -def register_with_session_ready(event): - '''Called when session is ready to be used.''' - logger = logging.getLogger('com.example.example-plugin') - logger.debug('Session ready.') - session = event['data']['session'] - - # Session is now ready and can be used to e.g. query objects. - task = session.query('Task').first() - print task['name'] - - -def register(session, **kw): - '''Register plugin. Called when used as an plugin.''' - logger = logging.getLogger('com.example.example-plugin') - - # Validate that session is an instance of ftrack_api.Session. If not, - # assume that register is being called from an old or incompatible API and - # return without doing anything. - if not isinstance(session, ftrack_api.session.Session): - logger.debug( - 'Not subscribing plugin as passed argument {0!r} is not an ' - 'ftrack_api.Session instance.'.format(session) - ) - return - - session.event_hub.subscribe( - 'topic=ftrack.api.session.ready', - register_with_session_ready - ) - - logger.debug('Plugin registered') diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/security_and_authentication.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/security_and_authentication.rst deleted file mode 100644 index 724afa81..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/security_and_authentication.rst +++ /dev/null @@ -1,38 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _security_and_authentication: - -*************************** -Security and authentication -*************************** - -Self signed SSL certificate -=========================== - -When using a self signed SSL certificate the API may fail to connect if it -cannot verify the SSL certificate. Under the hood the -`requests `_ library is used and it -must be specified where the trusted certificate authority can be found using the -environment variable ``REQUESTS_CA_BUNDLE``. - -.. seealso:: `SSL Cert Verification `_ - -InsecurePlatformWarning -======================= - -When using this API you may sometimes see a warning:: - - InsecurePlatformWarning: A true SSLContext object is not available. This - prevents urllib3 from configuring SSL appropriately and may cause certain - SSL connections to fail. - -If you encounter this warning, its recommended you upgrade to Python 2.7.9, or -use pyOpenSSL. To use pyOpenSSL simply:: - - pip install pyopenssl ndg-httpsclient pyasn1 - -and the `requests `_ library used by -this API will use pyOpenSSL instead. - -.. seealso:: `InsecurePlatformWarning `_ diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/tutorial.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/tutorial.rst deleted file mode 100644 index 73b352eb..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/tutorial.rst +++ /dev/null @@ -1,156 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _tutorial: - -******** -Tutorial -******** - -.. currentmodule:: ftrack_api.session - -This tutorial provides a quick dive into using the API and the broad stroke -concepts involved. - -First make sure the ftrack Python API is :ref:`installed `. - -Then start a Python session and import the ftrack API:: - - >>> import ftrack_api - -The API uses :ref:`sessions ` to manage communication -with an ftrack server. Create a session that connects to your ftrack server -(changing the passed values as appropriate):: - - >>> session = ftrack_api.Session( - ... server_url='https://mycompany.ftrackapp.com', - ... api_key='7545384e-a653-11e1-a82c-f22c11dd25eq', - ... api_user='martin' - ... ) - -.. note:: - - A session can use :ref:`environment variables - ` to configure itself. - -Now print a list of the available entity types retrieved from the server:: - - >>> print session.types.keys() - [u'TypedContext', u'ObjectType', u'Priority', u'Project', u'Sequence', - u'Shot', u'Task', u'Status', u'Type', u'Timelog', u'User'] - -Now the list of possible entity types is known, :ref:`query ` the -server to retrieve entities of a particular type by using the -:meth:`Session.query` method:: - - >>> projects = session.query('Project') - -Each project retrieved will be an :ref:`entity ` instance -that behaves much like a standard Python dictionary. For example, to find out -the available keys for an entity, call the -:meth:`~ftrack_api.entity.Entity.keys` method:: - - >>> print projects[0].keys() - [u'status', u'is_global', u'name', u'end_date', u'context_type', - u'id', u'full_name', u'root', u'start_date'] - -Now, iterate over the retrieved entities and print each ones name:: - - >>> for project in projects: - ... print project['name'] - test - client_review - tdb - man_test - ftrack - bunny - -.. note:: - - Many attributes for retrieved entities are loaded on demand when the - attribute is first accessed. Doing this lots of times in a script can be - inefficient, so it is worth using :ref:`projections ` - in queries or :ref:`pre-populating ` - entities where appropriate. You can also :ref:`customise default projections - ` to help others - pre-load common attributes. - -To narrow a search, add :ref:`criteria ` to the query:: - - >>> active_projects = session.query('Project where status is active') - -Combine criteria for more powerful queries:: - - >>> import arrow - >>> - >>> active_projects_ending_before_next_week = session.query( - ... 'Project where status is active and end_date before "{0}"' - ... .format(arrow.now().replace(weeks=+1)) - ... ) - -Some attributes on an entity will refer to another entity or collection of -entities, such as *children* on a *Project* being a collection of *Context* -entities that have the project as their parent:: - - >>> project = session.query('Project').first() - >>> print project['children'] - - -And on each *Context* there is a corresponding *parent* attribute which is a -link back to the parent:: - - >>> child = project['children'][0] - >>> print child['parent'] is project - True - -These relationships can also be used in the criteria for a query:: - - >>> results = session.query( - ... 'Context where parent.name like "te%"' - ... ) - -To create new entities in the system use :meth:`Session.create`:: - - >>> new_sequence = session.create('Sequence', { - ... 'name': 'Starlord Reveal' - ... }) - -The created entity is not yet persisted to the server, but it is still possible -to modify it. - - >>> new_sequence['description'] = 'First hero character reveal.' - -The sequence also needs a parent. This can be done in one of two ways: - -* Set the parent attribute on the sequence:: - - >>> new_sequence['parent'] = project - -* Add the sequence to a parent's children attribute:: - - >>> project['children'].append(new_sequence) - -When ready, persist to the server using :meth:`Session.commit`:: - - >>> session.commit() - -When finished with a :class:`Session`, it is important to :meth:`~Session.close` -it in order to release resources and properly unsubscribe any registered event -listeners. It is also possible to use the session as a context manager in order -to have it closed automatically after use:: - - >>> with ftrack_api.Session() as session: - ... print session.query('User').first() - - >>> print session.closed - True - -Once a :class:`Session` is closed, any operations that attempt to use the closed -connection to the ftrack server will fail:: - - >>> session.query('Project').first() - ConnectionClosedError: Connection closed. - -Continue to the next section to start learning more about the API in greater -depth or jump over to the :ref:`usage examples ` if you prefer to learn -by example. diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/understanding_sessions.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/understanding_sessions.rst deleted file mode 100644 index e3602c4f..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/understanding_sessions.rst +++ /dev/null @@ -1,281 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _understanding_sessions: - -********************** -Understanding sessions -********************** - -.. currentmodule:: ftrack_api.session - -All communication with an ftrack server takes place through a :class:`Session`. -This allows more opportunity for configuring the connection, plugins etc. and -also makes it possible to connect to multiple ftrack servers from within the -same Python process. - -.. _understanding_sessions/connection: - -Connection -========== - -A session can be manually configured at runtime to connect to a server with -certain credentials:: - - >>> session = ftrack_api.Session( - ... server_url='https://mycompany.ftrackapp.com', - ... api_key='7545384e-a653-11e1-a82c-f22c11dd25eq', - ... api_user='martin' - ... ) - -Alternatively, a session can use the following environment variables to -configure itself: - - * :envvar:`FTRACK_SERVER` - * :envvar:`FTRACK_API_USER` - * :envvar:`FTRACK_API_KEY` - -When using environment variables, no server connection arguments need to be -passed manually:: - - >>> session = ftrack_api.Session() - -.. _understanding_sessions/unit_of_work: - -Unit of work -============ - -Each session follows the unit of work pattern. This means that many of the -operations performed using a session will happen locally and only be persisted -to the server at certain times, notably when calling :meth:`Session.commit`. -This approach helps optimise calls to the server and also group related logic -together in a transaction:: - - user = session.create('User', {}) - user['username'] = 'martin' - other_user = session.create('User', {'username': 'bjorn'}) - other_user['email'] = 'bjorn@example.com' - -Behind the scenes a series of :class:`operations -` are recorded reflecting the changes made. You -can take a peek at these operations if desired by examining the -``Session.recorded_operations`` property:: - - >>> for operation in session.recorded_operations: - ... print operation - - - - - -Calling :meth:`Session.commit` persists all recorded operations to the server -and clears the operation log:: - - session.commit() - -.. note:: - - The commit call will optimise operations to be as efficient as possible - without breaking logical ordering. For example, a create followed by updates - on the same entity will be compressed into a single create. - -Queries are special and always issued on demand. As a result, a query may return -unexpected results if the relevant local changes have not yet been sent to the -server:: - - >>> user = session.create('User', {'username': 'some_unique_username'}) - >>> query = 'User where username is "{0}"'.format(user['username']) - >>> print len(session.query(query)) - 0 - >>> session.commit() - >>> print len(session.query(query)) - 1 - -Where possible, query results are merged in with existing data transparently -with any local changes preserved:: - - >>> user = session.query('User').first() - >>> user['email'] = 'me@example.com' # Not yet committed to server. - >>> retrieved = session.query( - ... 'User where id is "{0}"'.format(user['id']) - ... ).one() - >>> print retrieved['email'] # Displays locally set value. - 'me@example.com' - >>> print retrieved is user - True - -This is possible due to the smart :ref:`caching` layer in the session. - -.. _understanding_sessions/auto_population: - -Auto-population -=============== - -Another important concept in a session is that of auto-population. By default a -session is configured to auto-populate missing attribute values on access. This -means that the first time you access an attribute on an entity instance a query -will be sent to the server to fetch the value:: - - user = session.query('User').first() - # The next command will issue a request to the server to fetch the - # 'username' value on demand at this is the first time it is accessed. - print user['username'] - -Once a value has been retrieved it is :ref:`cached ` locally in the -session and accessing it again will not issue more server calls:: - - # On second access no server call is made. - print user['username'] - -You can control the auto population behaviour of a session by either changing -the ``Session.auto_populate`` attribute on a session or using the provided -context helper :meth:`Session.auto_populating` to temporarily change the -setting. When turned off you may see a special -:attr:`~ftrack_api.symbol.NOT_SET` symbol that represents a value has not yet -been fetched:: - - >>> with session.auto_populating(False): - ... print user['email'] - NOT_SET - -Whilst convenient for simple scripts, making many requests to the server for -each attribute can slow execution of a script. To support optimisation the API -includes methods for batch fetching attributes. Read about them in -:ref:`querying/projections` and :ref:`working_with_entities/populating`. - -.. _understanding_sessions/entity_types: - -Entity types -============ - -When a session has successfully connected to the server it will automatically -download schema information and :ref:`create appropriate classes -` for use. This is important as different -servers can support different entity types and configurations. - -This information is readily available and useful if you need to check that the -entity types you expect are present. Here's how to print a list of all entity -types registered for use in the current API session:: - - >>> print session.types.keys() - [u'Task', u'Shot', u'TypedContext', u'Sequence', u'Priority', - u'Status', u'Project', u'User', u'Type', u'ObjectType'] - -Each entity type is backed by a :ref:`customisable class -` that further describes the entity type and -the attributes that are available. - -.. hint:: - - If you need to use an :func:`isinstance` check, always go through the - session as the classes are built dynamically:: - - >>> isinstance(entity, session.types['Project']) - -.. _understanding_sessions/plugins: - -Configuring plugins -=================== - -Plugins are used by the API to extend it with new functionality, such as -:term:`locations ` or adding convenience methods to -:ref:`understanding_sessions/entity_types`. In addition to new API -functionality, event plugins may also be used for event processing by listening -to :ref:`ftrack update events ` or adding custom functionality to ftrack by registering -:term:`actions `. - - -When starting a new :class:`Session` either pass the *plugins_paths* to search -explicitly or rely on the environment variable -:envvar:`FTRACK_EVENT_PLUGIN_PATH`. As each session is independent of others, -you can configure plugins per session. - -The paths will be searched for :term:`plugins `, python files -which expose a `register` function. These functions will be evaluated and can -be used extend the API with new functionality, such as locations or actions. - -If you do not specify any override then the session will attempt to discover and -use the default plugins. - -Plugins are discovered using :func:`ftrack_api.plugin.discover` with the -session instance passed as the sole positional argument. Most plugins should -take the form of a mount function that then subscribes to specific :ref:`events -` on the session:: - - def configure_locations(event): - '''Configure locations for session.''' - session = event['data']['session'] - # Find location(s) and customise instances. - - def register(session): - '''Register plugin with *session*.''' - session.event_hub.subscribe( - 'topic=ftrack.api.session.configure-location', - configure_locations - ) - -Additional keyword arguments can be passed as *plugin_arguments* to the -:class:`Session` on instantiation. These are passed to the plugin register -function if its signature supports them:: - - # a_plugin.py - def register(session, reticulate_splines=False): - '''Register plugin with *session*.''' - ... - - # main.py - session = ftrack_api.Session( - plugin_arguments={ - 'reticulate_splines': True, - 'some_other_argument': 42 - } - ) - -.. seealso:: - - Lists of events which you can subscribe to in your plugins are available - both for :ref:`synchronous event published by the python API ` - and :ref:`asynchronous events published by the server ` - - -Quick setup ------------ - -1. Create a directory where plugins will be stored. Place any plugins you want -loaded automatically in an API *session* here. - -.. image:: /image/configuring_plugins_directory.png - -2. Configure the :envvar:`FTRACK_EVENT_PLUGIN_PATH` to point to the directory. - - -Detailed setup --------------- - -Start out by creating a directory on your machine where you will store your -plugins. Download :download:`example_plugin.py ` -and place it in the directory. - -Open up a terminal window, and ensure that plugin is picked up when -instantiating the session and manually setting the *plugin_paths*:: - - >>> # Set up basic logging - >>> import logging - >>> logging.basicConfig() - >>> plugin_logger = logging.getLogger('com.example.example-plugin') - >>> plugin_logger.setLevel(logging.DEBUG) - >>> - >>> # Configure the API, loading plugins in the specified paths. - >>> import ftrack_api - >>> plugin_paths = ['/path/to/plugins'] - >>> session = ftrack_api.Session(plugin_paths=plugin_paths) - -If everything is working as expected, you should see the following in the -output:: - - DEBUG:com.example.example-plugin:Plugin registered - -Instead of specifying the plugin paths when instantiating the session, you can -also specify the :envvar:`FTRACK_EVENT_PLUGIN_PATH` to point to the directory. -To specify multiple directories, use the path separator for your operating -system. \ No newline at end of file diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/working_with_entities.rst b/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/working_with_entities.rst deleted file mode 100644 index 2d9d26f9..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/doc/working_with_entities.rst +++ /dev/null @@ -1,434 +0,0 @@ -.. - :copyright: Copyright (c) 2014 ftrack - -.. _working_with_entities: - -********************* -Working with entities -********************* - -.. currentmodule:: ftrack_api.session - -:class:`Entity ` instances are Python dict-like -objects whose keys correspond to attributes for that type in the system. They -may also provide helper methods to perform common operations such as replying to -a note:: - - note = session.query('Note').first() - print note.keys() - print note['content'] - note['content'] = 'A different message!' - reply = note.create_reply(...) - -.. _working_with_entities/attributes: - -Attributes -========== - -Each entity instance is typed according to its underlying entity type on the -server and configured with appropriate attributes. For example, a *task* will be -represented by a *Task* class and have corresponding attributes. You can -:ref:`customise entity classes ` to alter -attribute access or provide your own helper methods. - -To see the available attribute names on an entity use the -:meth:`~ftrack_api.entity.base.Entity.keys` method on the instance:: - - >>> task = session.query('Task').first() - >>> print task.keys() - ['id', 'name', ...] - -If you need more information about the type of attribute, examine the -``attributes`` property on the corresponding class:: - - >>> for attribute in type(task).attributes: - ... print attribute - - - - - - ... - -Notice that there are different types of attribute such as -:class:`~ftrack_api.attribute.ScalarAttribute` for plain values or -:class:`~ftrack_api.attribute.ReferenceAttribute` for relationships. These -different types are reflected in the behaviour on the entity instance when -accessing a particular attribute by key: - - >>> # Scalar - >>> print task['name'] - 'model' - >>> task['name'] = 'comp' - - >>> # Single reference - >>> print task['status'] - - >>> new_status = session.query('Status').first() - >>> task['status'] = new_status - - >>> # Collection - >>> print task['timelogs'] - - >>> print task['timelogs'][:] - [, ...] - >>> new_timelog = session.create('Timelog', {...}) - >>> task['timelogs'].append(new_timelog) - -.. _working_with_entities/attributes/bidirectional: - -Bi-directional relationships ----------------------------- - -Some attributes refer to different sides of a bi-directional relationship. In -the current version of the API bi-directional updates are not propagated -automatically to the other side of the relationship. For example, setting a -*parent* will not update the parent entity's *children* collection locally. -There are plans to support this behaviour better in the future. For now, after -commit, :ref:`populate ` the reverse side -attribute manually. - -.. _working_with_entities/creating: - -Creating entities -================= - -In order to create a new instance of an entity call :meth:`Session.create` -passing in the entity type to create and any initial attribute values:: - - new_user = session.create('User', {'username': 'martin'}) - -If there are any default values that can be set client side then they will be -applied at this point. Typically this will be the unique entity key:: - - >>> print new_user['id'] - 170f02a4-6656-4f15-a5cb-c4dd77ce0540 - -At this point no information has been sent to the server. However, you are free -to continue :ref:`updating ` this object -locally until you are ready to persist the changes by calling -:meth:`Session.commit`. - -If you are wondering about what would happen if you accessed an unset attribute -on a newly created entity, go ahead and give it a go:: - - >>> print new_user['first_name'] - NOT_SET - -The session knows that it is a newly created entity that has not yet been -persisted so it doesn't try to fetch any attributes on access even when -``session.auto_populate`` is turned on. - -.. _working_with_entities/updating: - -Updating entities -================= - -Updating an entity is as simple as modifying the values for specific keys on -the dict-like instance and calling :meth:`Session.commit` when ready. The entity -to update can either be a new entity or a retrieved entity:: - - task = session.query('Task').first() - task['bid'] = 8 - -Remember that, for existing entities, accessing an attribute will load it from -the server automatically. If you are interested in just setting values without -first fetching them from the server, turn :ref:`auto-population -` off temporarily:: - - >>> with session.auto_populating(False): - ... task = session.query('Task').first() - ... task['bid'] = 8 - - -.. _working_with_entities/resetting: - -Server side reset of entity attributes or settings. -=========================== - -Some entities support resetting of attributes, for example -to reset a users api key:: - - - session.reset_remote( - 'api_key', entity=session.query('User where username is "test_user"').one() - ) - -.. note:: - Currently the only attribute possible to reset is 'api_key' on - the user entity type. - - -.. _working_with_entities/deleting: - -Deleting entities -================= - -To delete an entity you need an instance of the entity in your session (either -from having created one or retrieving one). Then call :meth:`Session.delete` on -the entity and :meth:`Session.commit` when ready:: - - task_to_delete = session.query('Task').first() - session.delete(task_to_delete) - ... - session.commit() - -.. note:: - - Even though the entity is deleted, you will still have access to the local - instance and any local data stored on that instance whilst that instance - remains in memory. - -Keep in mind that some deletions, when propagated to the server, will cause -other entities to be deleted also, so you don't have to worry about deleting an -entire hierarchy manually. For example, deleting a *Task* will also delete all -*Notes* on that task. - -.. _working_with_entities/populating: - -Populating entities -=================== - -When an entity is retrieved via :meth:`Session.query` or :meth:`Session.get` it -will have some attributes prepopulated. The rest are dynamically loaded when -they are accessed. If you need to access many attributes it can be more -efficient to request all those attributes be loaded in one go. One way to do -this is to use a :ref:`projections ` in queries. - -However, if you have entities that have been passed to you from elsewhere you -don't have control over the query that was issued to get those entities. In this -case you can you can populate those entities in one go using -:meth:`Session.populate` which works exactly like :ref:`projections -` in queries do, but operating against known entities:: - - >>> users = session.query('User') - >>> session.populate(users, 'first_name, last_name') - >>> with session.auto_populating(False): # Turn off for example purpose. - ... for user in users: - ... print 'Name: {0}'.format(user['first_name']) - ... print 'Email: {0}'.format(user['email']) - Name: Martin - Email: NOT_SET - ... - -.. note:: - - You can populate a single or many entities in one call so long as they are - all the same entity type. - -.. _working_with_entities/entity_states: - -Entity states -============= - -Operations on entities are :ref:`recorded in the session -` as they happen. At any time you can -inspect an entity to determine its current state from those pending operations. - -To do this, use :func:`ftrack_api.inspection.state`:: - - >>> import ftrack_api.inspection - >>> new_user = session.create('User', {}) - >>> print ftrack_api.inspection.state(new_user) - CREATED - >>> existing_user = session.query('User').first() - >>> print ftrack_api.inspection.state(existing_user) - NOT_SET - >>> existing_user['email'] = 'martin@example.com' - >>> print ftrack_api.inspection.state(existing_user) - MODIFIED - >>> session.delete(new_user) - >>> print ftrack_api.inspection.state(new_user) - DELETED - -.. _working_with_entities/entity_types: - -Customising entity types -======================== - -Each type of entity in the system is represented in the Python client by a -dedicated class. However, because the types of entities can vary these classes -are built on demand using schema information retrieved from the server. - -Many of the default classes provide additional helper methods which are mixed -into the generated class at runtime when a session is started. - -In some cases it can be useful to tailor the custom classes to your own pipeline -workflows. Perhaps you want to add more helper functions, change attribute -access rules or even providing a layer of backwards compatibility for existing -code. The Python client was built with this in mind and makes such -customisations as easy as possible. - -When a :class:`Session` is constructed it fetches schema details from the -connected server and then calls an :class:`Entity factory -` to create classes from those schemas. It -does this by emitting a synchronous event, -*ftrack.api.session.construct-entity-type*, for each schema and expecting a -*class* object to be returned. - -In the default setup, a :download:`construct_entity_type.py -<../resource/plugin/construct_entity_type.py>` plugin is placed on the -:envvar:`FTRACK_EVENT_PLUGIN_PATH`. This plugin will register a trivial subclass -of :class:`ftrack_api.entity.factory.StandardFactory` to create the classes in -response to the construct event. The simplest way to get started is to edit this -default plugin as required. - -.. seealso:: :ref:`understanding_sessions/plugins` - -.. _working_with_entities/entity_types/default_projections: - -Default projections -------------------- - -When a :ref:`query ` is issued without any :ref:`projections -`, the session will automatically add default projections -according to the type of the entity. - -For example, the following shows that for a *User*, only *id* is fetched by -default when no projections added to the query:: - - >>> user = session.query('User').first() - >>> with session.auto_populating(False): # For demonstration purpose only. - ... print user.items() - [ - (u'id', u'59f0963a-15e2-11e1-a5f1-0019bb4983d8') - (u'username', Symbol(NOT_SET)), - (u'first_name', Symbol(NOT_SET)), - ... - ] - -.. note:: - - These default projections are also used when you access a relationship - attribute using the dictionary key syntax. - -If you want to default to fetching *username* for a *Task* as well then you can -change the default_projections* in your class factory plugin:: - - class Factory(ftrack_api.entity.factory.StandardFactory): - '''Entity class factory.''' - - def create(self, schema, bases=None): - '''Create and return entity class from *schema*.''' - cls = super(Factory, self).create(schema, bases=bases) - - # Further customise cls before returning. - if schema['id'] == 'User': - cls.default_projections = ['id', 'username'] - - return cls - -Now a projection-less query will also query *username* by default: - -.. note:: - - You will need to start a new session to pick up the change you made:: - - session = ftrack_api.Session() - -.. code-block:: python - - >>> user = session.query('User').first() - >>> with session.auto_populating(False): # For demonstration purpose only. - ... print user.items() - [ - (u'id', u'59f0963a-15e2-11e1-a5f1-0019bb4983d8') - (u'username', u'martin'), - (u'first_name', Symbol(NOT_SET)), - ... - ] - -Note that if any specific projections are applied in a query, those override -the default projections entirely. This allows you to also *reduce* the data -loaded on demand:: - - >>> session = ftrack_api.Session() # Start new session to avoid cache. - >>> user = session.query('select id from User').first() - >>> with session.auto_populating(False): # For demonstration purpose only. - ... print user.items() - [ - (u'id', u'59f0963a-15e2-11e1-a5f1-0019bb4983d8') - (u'username', Symbol(NOT_SET)), - (u'first_name', Symbol(NOT_SET)), - ... - ] - -.. _working_with_entities/entity_types/helper_methods: - -Helper methods --------------- - -If you want to add additional helper methods to the constructed classes to -better support your pipeline logic, then you can simply patch the created -classes in your factory, much like with changing the default projections:: - - def get_full_name(self): - '''Return full name for user.''' - return '{0} {1}'.format(self['first_name'], self['last_name']).strip() - - class Factory(ftrack_api.entity.factory.StandardFactory): - '''Entity class factory.''' - - def create(self, schema, bases=None): - '''Create and return entity class from *schema*.''' - cls = super(Factory, self).create(schema, bases=bases) - - # Further customise cls before returning. - if schema['id'] == 'User': - cls.get_full_name = get_full_name - - return cls - -Now you have a new helper method *get_full_name* on your *User* entities:: - - >>> session = ftrack_api.Session() # New session to pick up changes. - >>> user = session.query('User').first() - >>> print user.get_full_name() - Martin Pengelly-Phillips - -If you'd rather not patch the existing classes, or perhaps have a lot of helpers -to mixin, you can instead inject your own class as the base class. The only -requirement is that it has the base :class:`~ftrack_api.entity.base.Entity` -class in its ancestor classes:: - - import ftrack_api.entity.base - - - class CustomUser(ftrack_api.entity.base.Entity): - '''Represent user.''' - - def get_full_name(self): - '''Return full name for user.''' - return '{0} {1}'.format(self['first_name'], self['last_name']).strip() - - - class Factory(ftrack_api.entity.factory.StandardFactory): - '''Entity class factory.''' - - def create(self, schema, bases=None): - '''Create and return entity class from *schema*.''' - # Alter base class for constructed class. - if bases is None: - bases = [ftrack_api.entity.base.Entity] - - if schema['id'] == 'User': - bases = [CustomUser] - - cls = super(Factory, self).create(schema, bases=bases) - return cls - -The resulting effect is the same:: - - >>> session = ftrack_api.Session() # New session to pick up changes. - >>> user = session.query('User').first() - >>> print user.get_full_name() - Martin Pengelly-Phillips - -.. note:: - - Your custom class is not the leaf class which will still be a dynamically - generated class. Instead your custom class becomes the base for the leaf - class:: - - >>> print type(user).__mro__ - (, , ...) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/pytest.ini b/client/ayon_ftrack/python2_vendor/ftrack-python-api/pytest.ini deleted file mode 100644 index b1f515ee..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/pytest.ini +++ /dev/null @@ -1,7 +0,0 @@ -[pytest] -minversion = 2.4.2 -addopts = -v -k-slow --junitxml=test-reports/junit.xml --cache-clear -norecursedirs = .* _* -python_files = test_*.py -python_functions = test_* -mock_use_standalone_module = true \ No newline at end of file diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/resource/plugin/configure_locations.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/resource/plugin/configure_locations.py deleted file mode 100644 index 0682a5ee..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/resource/plugin/configure_locations.py +++ /dev/null @@ -1,39 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import logging - -import ftrack_api -import ftrack_api.entity.location -import ftrack_api.accessor.disk - - -def configure_locations(event): - '''Configure locations for session.''' - session = event['data']['session'] - - # Find location(s) and customise instances. - # - # location = session.query('Location where name is "my.location"').one() - # ftrack_api.mixin(location, ftrack_api.entity.location.UnmanagedLocationMixin) - # location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') - - -def register(session): - '''Register plugin with *session*.''' - logger = logging.getLogger('ftrack_plugin:configure_locations.register') - - # Validate that session is an instance of ftrack_api.Session. If not, assume - # that register is being called from an old or incompatible API and return - # without doing anything. - if not isinstance(session, ftrack_api.Session): - logger.debug( - 'Not subscribing plugin as passed argument {0} is not an ' - 'ftrack_api.Session instance.'.format(session) - ) - return - - session.event_hub.subscribe( - 'topic=ftrack.api.session.configure-location', - configure_locations - ) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/resource/plugin/construct_entity_type.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/resource/plugin/construct_entity_type.py deleted file mode 100644 index 45f78416..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/resource/plugin/construct_entity_type.py +++ /dev/null @@ -1,46 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import logging - -import ftrack_api.entity.factory - - -class Factory(ftrack_api.entity.factory.StandardFactory): - '''Entity class factory.''' - - def create(self, schema, bases=None): - '''Create and return entity class from *schema*.''' - # Optionally change bases for class to be generated. - cls = super(Factory, self).create(schema, bases=bases) - - # Further customise cls before returning. - - return cls - - -def register(session): - '''Register plugin with *session*.''' - logger = logging.getLogger('ftrack_plugin:construct_entity_type.register') - - # Validate that session is an instance of ftrack_api.Session. If not, assume - # that register is being called from an old or incompatible API and return - # without doing anything. - if not isinstance(session, ftrack_api.Session): - logger.debug( - 'Not subscribing plugin as passed argument {0!r} is not an ' - 'ftrack_api.Session instance.'.format(session) - ) - return - - factory = Factory() - - def construct_entity_type(event): - '''Return class to represent entity type specified by *event*.''' - schema = event['data']['schema'] - return factory.create(schema) - - session.event_hub.subscribe( - 'topic=ftrack.api.session.construct-entity-type', - construct_entity_type - ) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/setup.cfg b/client/ayon_ftrack/python2_vendor/ftrack-python-api/setup.cfg deleted file mode 100644 index b2ad8fd0..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/setup.cfg +++ /dev/null @@ -1,6 +0,0 @@ -[build_sphinx] -config-dir = doc -source-dir = doc -build-dir = build/doc -builder = html -all_files = 1 diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/setup.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/setup.py deleted file mode 100644 index da99a572..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/setup.py +++ /dev/null @@ -1,81 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import os -import re - -from setuptools import setup, find_packages -from setuptools.command.test import test as TestCommand - - -ROOT_PATH = os.path.dirname(os.path.realpath(__file__)) -RESOURCE_PATH = os.path.join(ROOT_PATH, 'resource') -SOURCE_PATH = os.path.join(ROOT_PATH, 'source') -README_PATH = os.path.join(ROOT_PATH, 'README.rst') - - -# Read version from source. -with open( - os.path.join(SOURCE_PATH, 'ftrack_api', '_version.py') -) as _version_file: - VERSION = re.match( - r'.*__version__ = \'(.*?)\'', _version_file.read(), re.DOTALL - ).group(1) - - -# Custom commands. -class PyTest(TestCommand): - '''Pytest command.''' - - def finalize_options(self): - '''Finalize options to be used.''' - TestCommand.finalize_options(self) - self.test_args = [] - self.test_suite = True - - def run_tests(self): - '''Import pytest and run.''' - import pytest - raise SystemExit(pytest.main(self.test_args)) - - -# Call main setup. -setup( - name='ftrack-python-api', - version=VERSION, - description='Python API for ftrack.', - long_description=open(README_PATH).read(), - keywords='ftrack, python, api', - url='https://bitbucket.org/ftrack/ftrack-python-api', - author='ftrack', - author_email='support@ftrack.com', - license='Apache License (2.0)', - packages=find_packages(SOURCE_PATH), - package_dir={ - '': 'source' - }, - setup_requires=[ - 'sphinx >= 1.2.2, < 2', - 'sphinx_rtd_theme >= 0.1.6, < 1', - 'lowdown >= 0.1.0, < 2' - ], - install_requires=[ - 'requests >= 2, <3', - 'arrow >= 0.4.4, < 1', - 'termcolor >= 1.1.0, < 2', - 'pyparsing >= 2.0, < 3', - 'clique >= 1.2.0, < 2', - 'websocket-client >= 0.40.0, < 1' - ], - tests_require=[ - 'pytest >= 2.7, < 3', - 'pytest-mock >= 0.4, < 1', - 'pytest-catchlog >= 1, <=2' - ], - cmdclass={ - 'test': PyTest - }, - zip_safe=False, - python_requires=">=2.7.9, <3.0" - -) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/__init__.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/__init__.py deleted file mode 100644 index 34833aa0..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from ftrack_api import * diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/__init__.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/__init__.py deleted file mode 100644 index d8ee30bd..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/__init__.py +++ /dev/null @@ -1,32 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from ._version import __version__ -from .session import Session - - -def mixin(instance, mixin_class, name=None): - '''Mixin *mixin_class* to *instance*. - - *name* can be used to specify new class name. If not specified then one will - be generated. - - ''' - if name is None: - name = '{0}{1}'.format( - instance.__class__.__name__, mixin_class.__name__ - ) - - # Check mixin class not already present in mro in order to avoid consistent - # method resolution failure. - if mixin_class in instance.__class__.mro(): - return - - instance.__class__ = type( - name, - ( - mixin_class, - instance.__class__ - ), - {} - ) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_centralized_storage_scenario.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_centralized_storage_scenario.py deleted file mode 100644 index fbe14f32..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_centralized_storage_scenario.py +++ /dev/null @@ -1,656 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2016 ftrack - -from __future__ import absolute_import - -import logging -import json -import sys -import os - -import ftrack_api -import ftrack_api.structure.standard as _standard -from ftrack_api.logging import LazyLogMessage as L - - -scenario_name = 'ftrack.centralized-storage' - - -class ConfigureCentralizedStorageScenario(object): - '''Configure a centralized storage scenario.''' - - def __init__(self): - '''Instansiate centralized storage scenario.''' - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - - @property - def storage_scenario(self): - '''Return storage scenario setting.''' - return self.session.query( - 'select value from Setting ' - 'where name is "storage_scenario" and group is "STORAGE"' - ).one() - - @property - def existing_centralized_storage_configuration(self): - '''Return existing centralized storage configuration.''' - storage_scenario = self.storage_scenario - - try: - configuration = json.loads(storage_scenario['value']) - except (ValueError, TypeError): - return None - - if not isinstance(configuration, dict): - return None - - if configuration.get('scenario') != scenario_name: - return None - - return configuration.get('data', {}) - - def _get_confirmation_text(self, configuration): - '''Return confirmation text from *configuration*.''' - configure_location = configuration.get('configure_location') - select_location = configuration.get('select_location') - select_mount_point = configuration.get('select_mount_point') - - if configure_location: - location_text = unicode( - 'A new location will be created:\n\n' - '* Label: {location_label}\n' - '* Name: {location_name}\n' - '* Description: {location_description}\n' - ).format(**configure_location) - else: - location = self.session.get( - 'Location', select_location['location_id'] - ) - location_text = ( - u'You have choosen to use an existing location: {0}'.format( - location['label'] - ) - ) - - mount_points_text = unicode( - '* Linux: {linux}\n' - '* OS X: {osx}\n' - '* Windows: {windows}\n\n' - ).format( - linux=select_mount_point.get('linux_mount_point') or '*Not set*', - osx=select_mount_point.get('osx_mount_point') or '*Not set*', - windows=select_mount_point.get('windows_mount_point') or '*Not set*' - ) - - mount_points_not_set = [] - - if not select_mount_point.get('linux_mount_point'): - mount_points_not_set.append('Linux') - - if not select_mount_point.get('osx_mount_point'): - mount_points_not_set.append('OS X') - - if not select_mount_point.get('windows_mount_point'): - mount_points_not_set.append('Windows') - - if mount_points_not_set: - mount_points_text += unicode( - 'Please be aware that this location will not be working on ' - '{missing} because the mount points are not set up.' - ).format( - missing=' and '.join(mount_points_not_set) - ) - - text = unicode( - '#Confirm storage setup#\n\n' - 'Almost there! Please take a moment to verify the settings you ' - 'are about to save. You can always come back later and update the ' - 'configuration.\n' - '##Location##\n\n' - '{location}\n' - '##Mount points##\n\n' - '{mount_points}' - ).format( - location=location_text, - mount_points=mount_points_text - ) - - return text - - def configure_scenario(self, event): - '''Configure scenario based on *event* and return form items.''' - steps = ( - 'select_scenario', - 'select_location', - 'configure_location', - 'select_structure', - 'select_mount_point', - 'confirm_summary', - 'save_configuration' - ) - - warning_message = '' - values = event['data'].get('values', {}) - - # Calculate previous step and the next. - previous_step = values.get('step', 'select_scenario') - next_step = steps[steps.index(previous_step) + 1] - state = 'configuring' - - self.logger.info(L( - u'Configuring scenario, previous step: {0}, next step: {1}. ' - u'Values {2!r}.', - previous_step, next_step, values - )) - - if 'configuration' in values: - configuration = values.pop('configuration') - else: - configuration = {} - - if values: - # Update configuration with values from the previous step. - configuration[previous_step] = values - - if previous_step == 'select_location': - values = configuration['select_location'] - if values.get('location_id') != 'create_new_location': - location_exists = self.session.query( - 'Location where id is "{0}"'.format( - values.get('location_id') - ) - ).first() - if not location_exists: - next_step = 'select_location' - warning_message = ( - '**The selected location does not exist. Please choose ' - 'one from the dropdown or create a new one.**' - ) - - if next_step == 'select_location': - try: - location_id = ( - self.existing_centralized_storage_configuration['location_id'] - ) - except (KeyError, TypeError): - location_id = None - - options = [{ - 'label': 'Create new location', - 'value': 'create_new_location' - }] - for location in self.session.query( - 'select name, label, description from Location' - ): - if location['name'] not in ( - 'ftrack.origin', 'ftrack.unmanaged', 'ftrack.connect', - 'ftrack.server', 'ftrack.review' - ): - options.append({ - 'label': u'{label} ({name})'.format( - label=location['label'], name=location['name'] - ), - 'description': location['description'], - 'value': location['id'] - }) - - warning = '' - if location_id is not None: - # If there is already a location configured we must make the - # user aware that changing the location may be problematic. - warning = ( - '\n\n**Be careful if you switch to another location ' - 'for an existing storage scenario. Components that have ' - 'already been published to the previous location will be ' - 'made unavailable for common use.**' - ) - default_value = location_id - elif location_id is None and len(options) == 1: - # No location configured and no existing locations to use. - default_value = 'create_new_location' - else: - # There are existing locations to choose from but non of them - # are currently active in the centralized storage scenario. - default_value = None - - items = [{ - 'type': 'label', - 'value': ( - '#Select location#\n' - 'Choose an already existing location or create a new one ' - 'to represent your centralized storage. {0}'.format( - warning - ) - ) - }, { - 'type': 'enumerator', - 'label': 'Location', - 'name': 'location_id', - 'value': default_value, - 'data': options - }] - - default_location_name = 'studio.central-storage-location' - default_location_label = 'Studio location' - default_location_description = ( - 'The studio central location where all components are ' - 'stored.' - ) - - if previous_step == 'configure_location': - configure_location = configuration.get( - 'configure_location' - ) - - if configure_location: - try: - existing_location = self.session.query( - u'Location where name is "{0}"'.format( - configure_location.get('location_name') - ) - ).first() - except UnicodeEncodeError: - next_step = 'configure_location' - warning_message += ( - '**The location name contains non-ascii characters. ' - 'Please change the name and try again.**' - ) - values = configuration['select_location'] - else: - if existing_location: - next_step = 'configure_location' - warning_message += ( - u'**There is already a location named {0}. ' - u'Please change the name and try again.**'.format( - configure_location.get('location_name') - ) - ) - values = configuration['select_location'] - - if ( - not configure_location.get('location_name') or - not configure_location.get('location_label') or - not configure_location.get('location_description') - ): - next_step = 'configure_location' - warning_message += ( - '**Location name, label and description cannot ' - 'be empty.**' - ) - values = configuration['select_location'] - - if next_step == 'configure_location': - # Populate form with previous configuration. - default_location_label = configure_location['location_label'] - default_location_name = configure_location['location_name'] - default_location_description = ( - configure_location['location_description'] - ) - - if next_step == 'configure_location': - - if values.get('location_id') == 'create_new_location': - # Add options to create a new location. - items = [{ - 'type': 'label', - 'value': ( - '#Create location#\n' - 'Here you will create a new location to be used ' - 'with your new Storage scenario. For your ' - 'convenience we have already filled in some default ' - 'values. If this is the first time you are configuring ' - 'a storage scenario in ftrack we recommend that you ' - 'stick with these settings.' - ) - }, { - 'label': 'Label', - 'name': 'location_label', - 'value': default_location_label, - 'type': 'text' - }, { - 'label': 'Name', - 'name': 'location_name', - 'value': default_location_name, - 'type': 'text' - }, { - 'label': 'Description', - 'name': 'location_description', - 'value': default_location_description, - 'type': 'text' - }] - - else: - # The user selected an existing location. Move on to next - # step. - next_step = 'select_mount_point' - - if next_step == 'select_structure': - # There is only one structure to choose from, go to next step. - next_step = 'select_mount_point' - # items = [ - # { - # 'type': 'label', - # 'value': ( - # '#Select structure#\n' - # 'Select which structure to use with your location. ' - # 'The structure is used to generate the filesystem ' - # 'path for components that are added to this location.' - # ) - # }, - # { - # 'type': 'enumerator', - # 'label': 'Structure', - # 'name': 'structure_id', - # 'value': 'standard', - # 'data': [{ - # 'label': 'Standard', - # 'value': 'standard', - # 'description': ( - # 'The Standard structure uses the names in your ' - # 'project structure to determine the path.' - # ) - # }] - # } - # ] - - if next_step == 'select_mount_point': - try: - mount_points = ( - self.existing_centralized_storage_configuration['accessor']['mount_points'] - ) - except (KeyError, TypeError): - mount_points = dict() - - items = [ - { - 'value': ( - '#Mount points#\n' - 'Set mount points for your centralized storage ' - 'location. For the location to work as expected each ' - 'platform that you intend to use must have the ' - 'corresponding mount point set and the storage must ' - 'be accessible. If not set correctly files will not be ' - 'saved or read.' - ), - 'type': 'label' - }, { - 'type': 'text', - 'label': 'Linux', - 'name': 'linux_mount_point', - 'empty_text': 'E.g. /usr/mnt/MyStorage ...', - 'value': mount_points.get('linux', '') - }, { - 'type': 'text', - 'label': 'OS X', - 'name': 'osx_mount_point', - 'empty_text': 'E.g. /Volumes/MyStorage ...', - 'value': mount_points.get('osx', '') - }, { - 'type': 'text', - 'label': 'Windows', - 'name': 'windows_mount_point', - 'empty_text': 'E.g. \\\\MyStorage ...', - 'value': mount_points.get('windows', '') - } - ] - - if next_step == 'confirm_summary': - items = [{ - 'type': 'label', - 'value': self._get_confirmation_text(configuration) - }] - state = 'confirm' - - if next_step == 'save_configuration': - mount_points = configuration['select_mount_point'] - select_location = configuration['select_location'] - - if select_location['location_id'] == 'create_new_location': - configure_location = configuration['configure_location'] - location = self.session.create( - 'Location', - { - 'name': configure_location['location_name'], - 'label': configure_location['location_label'], - 'description': ( - configure_location['location_description'] - ) - } - ) - - else: - location = self.session.query( - 'Location where id is "{0}"'.format( - select_location['location_id'] - ) - ).one() - - setting_value = json.dumps({ - 'scenario': scenario_name, - 'data': { - 'location_id': location['id'], - 'location_name': location['name'], - 'accessor': { - 'mount_points': { - 'linux': mount_points['linux_mount_point'], - 'osx': mount_points['osx_mount_point'], - 'windows': mount_points['windows_mount_point'] - } - } - } - }) - - self.storage_scenario['value'] = setting_value - self.session.commit() - - # Broadcast an event that storage scenario has been configured. - event = ftrack_api.event.base.Event( - topic='ftrack.storage-scenario.configure-done' - ) - self.session.event_hub.publish(event) - - items = [{ - 'type': 'label', - 'value': ( - '#Done!#\n' - 'Your storage scenario is now configured and ready ' - 'to use. **Note that you may have to restart Connect and ' - 'other applications to start using it.**' - ) - }] - state = 'done' - - if warning_message: - items.insert(0, { - 'type': 'label', - 'value': warning_message - }) - - items.append({ - 'type': 'hidden', - 'value': configuration, - 'name': 'configuration' - }) - items.append({ - 'type': 'hidden', - 'value': next_step, - 'name': 'step' - }) - - return { - 'items': items, - 'state': state - } - - def discover_centralized_scenario(self, event): - '''Return action discover dictionary for *event*.''' - return { - 'id': scenario_name, - 'name': 'Centralized storage scenario', - 'description': ( - '(Recommended) centralized storage scenario where all files ' - 'are kept on a storage that is mounted and available to ' - 'everyone in the studio.' - ) - } - - def register(self, session): - '''Subscribe to events on *session*.''' - self.session = session - - #: TODO: Move these to a separate function. - session.event_hub.subscribe( - unicode( - 'topic=ftrack.storage-scenario.discover ' - 'and source.user.username="{0}"' - ).format( - session.api_user - ), - self.discover_centralized_scenario - ) - session.event_hub.subscribe( - unicode( - 'topic=ftrack.storage-scenario.configure ' - 'and data.scenario_id="{0}" ' - 'and source.user.username="{1}"' - ).format( - scenario_name, - session.api_user - ), - self.configure_scenario - ) - - -class ActivateCentralizedStorageScenario(object): - '''Activate a centralized storage scenario.''' - - def __init__(self): - '''Instansiate centralized storage scenario.''' - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - - def activate(self, event): - '''Activate scenario in *event*.''' - storage_scenario = event['data']['storage_scenario'] - - try: - location_data = storage_scenario['data'] - location_name = location_data['location_name'] - location_id = location_data['location_id'] - mount_points = location_data['accessor']['mount_points'] - - except KeyError: - error_message = ( - 'Unable to read storage scenario data.' - ) - self.logger.error(L(error_message)) - raise ftrack_api.exception.LocationError( - 'Unable to configure location based on scenario.' - ) - - else: - location = self.session.create( - 'Location', - data=dict( - name=location_name, - id=location_id - ), - reconstructing=True - ) - - if sys.platform == 'darwin': - prefix = mount_points['osx'] - elif sys.platform == 'linux2': - prefix = mount_points['linux'] - elif sys.platform == 'win32': - prefix = mount_points['windows'] - else: - raise ftrack_api.exception.LocationError( - ( - 'Unable to find accessor prefix for platform {0}.' - ).format(sys.platform) - ) - - location.accessor = ftrack_api.accessor.disk.DiskAccessor( - prefix=prefix - ) - location.structure = _standard.StandardStructure() - location.priority = 1 - self.logger.info(L( - u'Storage scenario activated. Configured {0!r} from ' - u'{1!r}', - location, storage_scenario - )) - - def _verify_startup(self, event): - '''Verify the storage scenario configuration.''' - storage_scenario = event['data']['storage_scenario'] - location_data = storage_scenario['data'] - mount_points = location_data['accessor']['mount_points'] - - prefix = None - if sys.platform == 'darwin': - prefix = mount_points['osx'] - elif sys.platform == 'linux2': - prefix = mount_points['linux'] - elif sys.platform == 'win32': - prefix = mount_points['windows'] - - if not prefix: - return ( - u'The storage scenario has not been configured for your ' - u'operating system. ftrack may not be able to ' - u'store and track files correctly.' - ) - - if not os.path.isdir(prefix): - return ( - unicode( - 'The path {0} does not exist. ftrack may not be able to ' - 'store and track files correctly. \n\nIf the storage is ' - 'newly setup you may want to create necessary folder ' - 'structures. If the storage is a network drive you should ' - 'make sure that it is mounted correctly.' - ).format(prefix) - ) - - def register(self, session): - '''Subscribe to events on *session*.''' - self.session = session - - session.event_hub.subscribe( - ( - 'topic=ftrack.storage-scenario.activate ' - 'and data.storage_scenario.scenario="{0}"'.format( - scenario_name - ) - ), - self.activate - ) - - # Listen to verify startup event from ftrack connect to allow responding - # with a message if something is not working correctly with this - # scenario that the user should be notified about. - self.session.event_hub.subscribe( - ( - 'topic=ftrack.connect.verify-startup ' - 'and data.storage_scenario.scenario="{0}"'.format( - scenario_name - ) - ), - self._verify_startup - ) - -def register(session): - '''Register storage scenario.''' - scenario = ActivateCentralizedStorageScenario() - scenario.register(session) - - -def register_configuration(session): - '''Register storage scenario.''' - scenario = ConfigureCentralizedStorageScenario() - scenario.register(session) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_python_ntpath.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_python_ntpath.py deleted file mode 100644 index 9f79a185..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_python_ntpath.py +++ /dev/null @@ -1,534 +0,0 @@ -# pragma: no cover -# Module 'ntpath' -- common operations on WinNT/Win95 pathnames -"""Common pathname manipulations, WindowsNT/95 version. - -Instead of importing this module directly, import os and refer to this -module as os.path. -""" - -import os -import sys -import stat -import genericpath -import warnings - -from genericpath import * - -__all__ = ["normcase","isabs","join","splitdrive","split","splitext", - "basename","dirname","commonprefix","getsize","getmtime", - "getatime","getctime", "islink","exists","lexists","isdir","isfile", - "ismount","walk","expanduser","expandvars","normpath","abspath", - "splitunc","curdir","pardir","sep","pathsep","defpath","altsep", - "extsep","devnull","realpath","supports_unicode_filenames","relpath"] - -# strings representing various path-related bits and pieces -curdir = '.' -pardir = '..' -extsep = '.' -sep = '\\' -pathsep = ';' -altsep = '/' -defpath = '.;C:\\bin' -if 'ce' in sys.builtin_module_names: - defpath = '\\Windows' -elif 'os2' in sys.builtin_module_names: - # OS/2 w/ VACPP - altsep = '/' -devnull = 'nul' - -# Normalize the case of a pathname and map slashes to backslashes. -# Other normalizations (such as optimizing '../' away) are not done -# (this is done by normpath). - -def normcase(s): - """Normalize case of pathname. - - Makes all characters lowercase and all slashes into backslashes.""" - return s.replace("/", "\\").lower() - - -# Return whether a path is absolute. -# Trivial in Posix, harder on the Mac or MS-DOS. -# For DOS it is absolute if it starts with a slash or backslash (current -# volume), or if a pathname after the volume letter and colon / UNC resource -# starts with a slash or backslash. - -def isabs(s): - """Test whether a path is absolute""" - s = splitdrive(s)[1] - return s != '' and s[:1] in '/\\' - - -# Join two (or more) paths. - -def join(a, *p): - """Join two or more pathname components, inserting "\\" as needed. - If any component is an absolute path, all previous path components - will be discarded.""" - path = a - for b in p: - b_wins = 0 # set to 1 iff b makes path irrelevant - if path == "": - b_wins = 1 - - elif isabs(b): - # This probably wipes out path so far. However, it's more - # complicated if path begins with a drive letter: - # 1. join('c:', '/a') == 'c:/a' - # 2. join('c:/', '/a') == 'c:/a' - # But - # 3. join('c:/a', '/b') == '/b' - # 4. join('c:', 'd:/') = 'd:/' - # 5. join('c:/', 'd:/') = 'd:/' - if path[1:2] != ":" or b[1:2] == ":": - # Path doesn't start with a drive letter, or cases 4 and 5. - b_wins = 1 - - # Else path has a drive letter, and b doesn't but is absolute. - elif len(path) > 3 or (len(path) == 3 and - path[-1] not in "/\\"): - # case 3 - b_wins = 1 - - if b_wins: - path = b - else: - # Join, and ensure there's a separator. - assert len(path) > 0 - if path[-1] in "/\\": - if b and b[0] in "/\\": - path += b[1:] - else: - path += b - elif path[-1] == ":": - path += b - elif b: - if b[0] in "/\\": - path += b - else: - path += "\\" + b - else: - # path is not empty and does not end with a backslash, - # but b is empty; since, e.g., split('a/') produces - # ('a', ''), it's best if join() adds a backslash in - # this case. - path += '\\' - - return path - - -# Split a path in a drive specification (a drive letter followed by a -# colon) and the path specification. -# It is always true that drivespec + pathspec == p -def splitdrive(p): - """Split a pathname into drive and path specifiers. Returns a 2-tuple -"(drive,path)"; either part may be empty""" - if p[1:2] == ':': - return p[0:2], p[2:] - return '', p - - -# Parse UNC paths -def splitunc(p): - """Split a pathname into UNC mount point and relative path specifiers. - - Return a 2-tuple (unc, rest); either part may be empty. - If unc is not empty, it has the form '//host/mount' (or similar - using backslashes). unc+rest is always the input path. - Paths containing drive letters never have an UNC part. - """ - if p[1:2] == ':': - return '', p # Drive letter present - firstTwo = p[0:2] - if firstTwo == '//' or firstTwo == '\\\\': - # is a UNC path: - # vvvvvvvvvvvvvvvvvvvv equivalent to drive letter - # \\machine\mountpoint\directories... - # directory ^^^^^^^^^^^^^^^ - normp = normcase(p) - index = normp.find('\\', 2) - if index == -1: - ##raise RuntimeError, 'illegal UNC path: "' + p + '"' - return ("", p) - index = normp.find('\\', index + 1) - if index == -1: - index = len(p) - return p[:index], p[index:] - return '', p - - -# Split a path in head (everything up to the last '/') and tail (the -# rest). After the trailing '/' is stripped, the invariant -# join(head, tail) == p holds. -# The resulting head won't end in '/' unless it is the root. - -def split(p): - """Split a pathname. - - Return tuple (head, tail) where tail is everything after the final slash. - Either part may be empty.""" - - d, p = splitdrive(p) - # set i to index beyond p's last slash - i = len(p) - while i and p[i-1] not in '/\\': - i = i - 1 - head, tail = p[:i], p[i:] # now tail has no slashes - # remove trailing slashes from head, unless it's all slashes - head2 = head - while head2 and head2[-1] in '/\\': - head2 = head2[:-1] - head = head2 or head - return d + head, tail - - -# Split a path in root and extension. -# The extension is everything starting at the last dot in the last -# pathname component; the root is everything before that. -# It is always true that root + ext == p. - -def splitext(p): - return genericpath._splitext(p, sep, altsep, extsep) -splitext.__doc__ = genericpath._splitext.__doc__ - - -# Return the tail (basename) part of a path. - -def basename(p): - """Returns the final component of a pathname""" - return split(p)[1] - - -# Return the head (dirname) part of a path. - -def dirname(p): - """Returns the directory component of a pathname""" - return split(p)[0] - -# Is a path a symbolic link? -# This will always return false on systems where posix.lstat doesn't exist. - -def islink(path): - """Test for symbolic link. - On WindowsNT/95 and OS/2 always returns false - """ - return False - -# alias exists to lexists -lexists = exists - -# Is a path a mount point? Either a root (with or without drive letter) -# or an UNC path with at most a / or \ after the mount point. - -def ismount(path): - """Test whether a path is a mount point (defined as root of drive)""" - unc, rest = splitunc(path) - if unc: - return rest in ("", "/", "\\") - p = splitdrive(path)[1] - return len(p) == 1 and p[0] in '/\\' - - -# Directory tree walk. -# For each directory under top (including top itself, but excluding -# '.' and '..'), func(arg, dirname, filenames) is called, where -# dirname is the name of the directory and filenames is the list -# of files (and subdirectories etc.) in the directory. -# The func may modify the filenames list, to implement a filter, -# or to impose a different order of visiting. - -def walk(top, func, arg): - """Directory tree walk with callback function. - - For each directory in the directory tree rooted at top (including top - itself, but excluding '.' and '..'), call func(arg, dirname, fnames). - dirname is the name of the directory, and fnames a list of the names of - the files and subdirectories in dirname (excluding '.' and '..'). func - may modify the fnames list in-place (e.g. via del or slice assignment), - and walk will only recurse into the subdirectories whose names remain in - fnames; this can be used to implement a filter, or to impose a specific - order of visiting. No semantics are defined for, or required of, arg, - beyond that arg is always passed to func. It can be used, e.g., to pass - a filename pattern, or a mutable object designed to accumulate - statistics. Passing None for arg is common.""" - warnings.warnpy3k("In 3.x, os.path.walk is removed in favor of os.walk.", - stacklevel=2) - try: - names = os.listdir(top) - except os.error: - return - func(arg, top, names) - for name in names: - name = join(top, name) - if isdir(name): - walk(name, func, arg) - - -# Expand paths beginning with '~' or '~user'. -# '~' means $HOME; '~user' means that user's home directory. -# If the path doesn't begin with '~', or if the user or $HOME is unknown, -# the path is returned unchanged (leaving error reporting to whatever -# function is called with the expanded path as argument). -# See also module 'glob' for expansion of *, ? and [...] in pathnames. -# (A function should also be defined to do full *sh-style environment -# variable expansion.) - -def expanduser(path): - """Expand ~ and ~user constructs. - - If user or $HOME is unknown, do nothing.""" - if path[:1] != '~': - return path - i, n = 1, len(path) - while i < n and path[i] not in '/\\': - i = i + 1 - - if 'HOME' in os.environ: - userhome = os.environ['HOME'] - elif 'USERPROFILE' in os.environ: - userhome = os.environ['USERPROFILE'] - elif not 'HOMEPATH' in os.environ: - return path - else: - try: - drive = os.environ['HOMEDRIVE'] - except KeyError: - drive = '' - userhome = join(drive, os.environ['HOMEPATH']) - - if i != 1: #~user - userhome = join(dirname(userhome), path[1:i]) - - return userhome + path[i:] - - -# Expand paths containing shell variable substitutions. -# The following rules apply: -# - no expansion within single quotes -# - '$$' is translated into '$' -# - '%%' is translated into '%' if '%%' are not seen in %var1%%var2% -# - ${varname} is accepted. -# - $varname is accepted. -# - %varname% is accepted. -# - varnames can be made out of letters, digits and the characters '_-' -# (though is not verified in the ${varname} and %varname% cases) -# XXX With COMMAND.COM you can use any characters in a variable name, -# XXX except '^|<>='. - -def expandvars(path): - """Expand shell variables of the forms $var, ${var} and %var%. - - Unknown variables are left unchanged.""" - if '$' not in path and '%' not in path: - return path - import string - varchars = string.ascii_letters + string.digits + '_-' - res = '' - index = 0 - pathlen = len(path) - while index < pathlen: - c = path[index] - if c == '\'': # no expansion within single quotes - path = path[index + 1:] - pathlen = len(path) - try: - index = path.index('\'') - res = res + '\'' + path[:index + 1] - except ValueError: - res = res + path - index = pathlen - 1 - elif c == '%': # variable or '%' - if path[index + 1:index + 2] == '%': - res = res + c - index = index + 1 - else: - path = path[index+1:] - pathlen = len(path) - try: - index = path.index('%') - except ValueError: - res = res + '%' + path - index = pathlen - 1 - else: - var = path[:index] - if var in os.environ: - res = res + os.environ[var] - else: - res = res + '%' + var + '%' - elif c == '$': # variable or '$$' - if path[index + 1:index + 2] == '$': - res = res + c - index = index + 1 - elif path[index + 1:index + 2] == '{': - path = path[index+2:] - pathlen = len(path) - try: - index = path.index('}') - var = path[:index] - if var in os.environ: - res = res + os.environ[var] - else: - res = res + '${' + var + '}' - except ValueError: - res = res + '${' + path - index = pathlen - 1 - else: - var = '' - index = index + 1 - c = path[index:index + 1] - while c != '' and c in varchars: - var = var + c - index = index + 1 - c = path[index:index + 1] - if var in os.environ: - res = res + os.environ[var] - else: - res = res + '$' + var - if c != '': - index = index - 1 - else: - res = res + c - index = index + 1 - return res - - -# Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A\B. -# Previously, this function also truncated pathnames to 8+3 format, -# but as this module is called "ntpath", that's obviously wrong! - -def normpath(path): - """Normalize path, eliminating double slashes, etc.""" - # Preserve unicode (if path is unicode) - backslash, dot = (u'\\', u'.') if isinstance(path, unicode) else ('\\', '.') - if path.startswith(('\\\\.\\', '\\\\?\\')): - # in the case of paths with these prefixes: - # \\.\ -> device names - # \\?\ -> literal paths - # do not do any normalization, but return the path unchanged - return path - path = path.replace("/", "\\") - prefix, path = splitdrive(path) - # We need to be careful here. If the prefix is empty, and the path starts - # with a backslash, it could either be an absolute path on the current - # drive (\dir1\dir2\file) or a UNC filename (\\server\mount\dir1\file). It - # is therefore imperative NOT to collapse multiple backslashes blindly in - # that case. - # The code below preserves multiple backslashes when there is no drive - # letter. This means that the invalid filename \\\a\b is preserved - # unchanged, where a\\\b is normalised to a\b. It's not clear that there - # is any better behaviour for such edge cases. - if prefix == '': - # No drive letter - preserve initial backslashes - while path[:1] == "\\": - prefix = prefix + backslash - path = path[1:] - else: - # We have a drive letter - collapse initial backslashes - if path.startswith("\\"): - prefix = prefix + backslash - path = path.lstrip("\\") - comps = path.split("\\") - i = 0 - while i < len(comps): - if comps[i] in ('.', ''): - del comps[i] - elif comps[i] == '..': - if i > 0 and comps[i-1] != '..': - del comps[i-1:i+1] - i -= 1 - elif i == 0 and prefix.endswith("\\"): - del comps[i] - else: - i += 1 - else: - i += 1 - # If the path is now empty, substitute '.' - if not prefix and not comps: - comps.append(dot) - return prefix + backslash.join(comps) - - -# Return an absolute path. -try: - from nt import _getfullpathname - -except ImportError: # not running on Windows - mock up something sensible - def abspath(path): - """Return the absolute version of a path.""" - if not isabs(path): - if isinstance(path, unicode): - cwd = os.getcwdu() - else: - cwd = os.getcwd() - path = join(cwd, path) - return normpath(path) - -else: # use native Windows method on Windows - def abspath(path): - """Return the absolute version of a path.""" - - if path: # Empty path must return current working directory. - try: - path = _getfullpathname(path) - except WindowsError: - pass # Bad path - return unchanged. - elif isinstance(path, unicode): - path = os.getcwdu() - else: - path = os.getcwd() - return normpath(path) - -# realpath is a no-op on systems without islink support -realpath = abspath -# Win9x family and earlier have no Unicode filename support. -supports_unicode_filenames = (hasattr(sys, "getwindowsversion") and - sys.getwindowsversion()[3] >= 2) - -def _abspath_split(path): - abs = abspath(normpath(path)) - prefix, rest = splitunc(abs) - is_unc = bool(prefix) - if not is_unc: - prefix, rest = splitdrive(abs) - return is_unc, prefix, [x for x in rest.split(sep) if x] - -def relpath(path, start=curdir): - """Return a relative version of a path""" - - if not path: - raise ValueError("no path specified") - - start_is_unc, start_prefix, start_list = _abspath_split(start) - path_is_unc, path_prefix, path_list = _abspath_split(path) - - if path_is_unc ^ start_is_unc: - raise ValueError("Cannot mix UNC and non-UNC paths (%s and %s)" - % (path, start)) - if path_prefix.lower() != start_prefix.lower(): - if path_is_unc: - raise ValueError("path is on UNC root %s, start on UNC root %s" - % (path_prefix, start_prefix)) - else: - raise ValueError("path is on drive %s, start on drive %s" - % (path_prefix, start_prefix)) - # Work out how much of the filepath is shared by start and path. - i = 0 - for e1, e2 in zip(start_list, path_list): - if e1.lower() != e2.lower(): - break - i += 1 - - rel_list = [pardir] * (len(start_list)-i) + path_list[i:] - if not rel_list: - return curdir - return join(*rel_list) - -try: - # The genericpath.isdir implementation uses os.stat and checks the mode - # attribute to tell whether or not the path is a directory. - # This is overkill on Windows - just pass the path to GetFileAttributes - # and check the attribute from there. - from nt import _isdir as isdir -except ImportError: - # Use genericpath.isdir as imported above. - pass diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_version.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_version.py deleted file mode 100644 index aa1a8c4a..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = '1.8.2' diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_weakref.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_weakref.py deleted file mode 100644 index 69cc6f4b..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/_weakref.py +++ /dev/null @@ -1,66 +0,0 @@ -""" -Yet another backport of WeakMethod for Python 2.7. -Changes include removing exception chaining and adding args to super() calls. - -Copyright (c) 2001-2019 Python Software Foundation.All rights reserved. - -Full license available in LICENSE.python. -""" -from weakref import ref - - -class WeakMethod(ref): - """ - A custom `weakref.ref` subclass which simulates a weak reference to - a bound method, working around the lifetime problem of bound methods. - """ - - __slots__ = "_func_ref", "_meth_type", "_alive", "__weakref__" - - def __new__(cls, meth, callback=None): - try: - obj = meth.__self__ - func = meth.__func__ - except AttributeError: - raise TypeError( - "argument should be a bound method, not {}".format(type(meth)) - ) - - def _cb(arg): - # The self-weakref trick is needed to avoid creating a reference - # cycle. - self = self_wr() - if self._alive: - self._alive = False - if callback is not None: - callback(self) - - self = ref.__new__(cls, obj, _cb) - self._func_ref = ref(func, _cb) - self._meth_type = type(meth) - self._alive = True - self_wr = ref(self) - return self - - def __call__(self): - obj = super(WeakMethod, self).__call__() - func = self._func_ref() - if obj is None or func is None: - return None - return self._meth_type(func, obj) - - def __eq__(self, other): - if isinstance(other, WeakMethod): - if not self._alive or not other._alive: - return self is other - return ref.__eq__(self, other) and self._func_ref == other._func_ref - return NotImplemented - - def __ne__(self, other): - if isinstance(other, WeakMethod): - if not self._alive or not other._alive: - return self is not other - return ref.__ne__(self, other) or self._func_ref != other._func_ref - return NotImplemented - - __hash__ = ref.__hash__ diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/__init__.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/__init__.py deleted file mode 100644 index 1aab07ed..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/base.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/base.py deleted file mode 100644 index 6aa9cf02..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/base.py +++ /dev/null @@ -1,124 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 ftrack - -import abc - -import ftrack_api.exception - - -class Accessor(object): - '''Provide data access to a location. - - A location represents a specific storage, but access to that storage may - vary. For example, both local filesystem and FTP access may be possible for - the same storage. An accessor implements these different ways of accessing - the same data location. - - As different accessors may access the same location, only part of a data - path that is commonly understood may be stored in the database. The format - of this path should be a contract between the accessors that require access - to the same location and is left as an implementation detail. As such, this - system provides no guarantee that two different accessors can provide access - to the same location, though this is a clear goal. The path stored centrally - is referred to as the **resource identifier** and should be used when - calling any of the accessor methods that accept a *resource_identifier* - argument. - - ''' - - __metaclass__ = abc.ABCMeta - - def __init__(self): - '''Initialise location accessor.''' - super(Accessor, self).__init__() - - @abc.abstractmethod - def list(self, resource_identifier): - '''Return list of entries in *resource_identifier* container. - - Each entry in the returned list should be a valid resource identifier. - - Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if - *resource_identifier* does not exist or - :exc:`~ftrack_api.exception.AccessorResourceInvalidError` if - *resource_identifier* is not a container. - - ''' - - @abc.abstractmethod - def exists(self, resource_identifier): - '''Return if *resource_identifier* is valid and exists in location.''' - - @abc.abstractmethod - def is_file(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file.''' - - @abc.abstractmethod - def is_container(self, resource_identifier): - '''Return whether *resource_identifier* refers to a container.''' - - @abc.abstractmethod - def is_sequence(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file sequence.''' - - @abc.abstractmethod - def open(self, resource_identifier, mode='rb'): - '''Return :class:`~ftrack_api.data.Data` for *resource_identifier*.''' - - @abc.abstractmethod - def remove(self, resource_identifier): - '''Remove *resource_identifier*. - - Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if - *resource_identifier* does not exist. - - ''' - - @abc.abstractmethod - def make_container(self, resource_identifier, recursive=True): - '''Make a container at *resource_identifier*. - - If *recursive* is True, also make any intermediate containers. - - Should silently ignore existing containers and not recreate them. - - ''' - - @abc.abstractmethod - def get_container(self, resource_identifier): - '''Return resource_identifier of container for *resource_identifier*. - - Raise :exc:`~ftrack_api.exception.AccessorParentResourceNotFoundError` - if container of *resource_identifier* could not be determined. - - ''' - - def remove_container(self, resource_identifier): # pragma: no cover - '''Remove container at *resource_identifier*.''' - return self.remove(resource_identifier) - - def get_filesystem_path(self, resource_identifier): # pragma: no cover - '''Return filesystem path for *resource_identifier*. - - Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if - filesystem path could not be determined from *resource_identifier* or - :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if - retrieving filesystem paths is not supported by this accessor. - - ''' - raise ftrack_api.exception.AccessorUnsupportedOperationError( - 'get_filesystem_path', resource_identifier=resource_identifier - ) - - def get_url(self, resource_identifier): - '''Return URL for *resource_identifier*. - - Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if - URL could not be determined from *resource_identifier* or - :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if - retrieving URL is not supported by this accessor. - - ''' - raise ftrack_api.exception.AccessorUnsupportedOperationError( - 'get_url', resource_identifier=resource_identifier - ) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/disk.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/disk.py deleted file mode 100644 index 65769603..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/disk.py +++ /dev/null @@ -1,250 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 ftrack - -import os -import sys -import errno -import contextlib - -import ftrack_api._python_ntpath as ntpath -import ftrack_api.accessor.base -import ftrack_api.data -from ftrack_api.exception import ( - AccessorFilesystemPathError, - AccessorUnsupportedOperationError, - AccessorResourceNotFoundError, - AccessorOperationFailedError, - AccessorPermissionDeniedError, - AccessorResourceInvalidError, - AccessorContainerNotEmptyError, - AccessorParentResourceNotFoundError -) - - -class DiskAccessor(ftrack_api.accessor.base.Accessor): - '''Provide disk access to a location. - - Expect resource identifiers to refer to relative filesystem paths. - - ''' - - def __init__(self, prefix, **kw): - '''Initialise location accessor. - - *prefix* specifies the base folder for the disk based structure and - will be prepended to any path. It should be specified in the syntax of - the current OS. - - ''' - if prefix: - prefix = os.path.expanduser(os.path.expandvars(prefix)) - prefix = os.path.abspath(prefix) - self.prefix = prefix - - super(DiskAccessor, self).__init__(**kw) - - def list(self, resource_identifier): - '''Return list of entries in *resource_identifier* container. - - Each entry in the returned list should be a valid resource identifier. - - Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if - *resource_identifier* does not exist or - :exc:`~ftrack_api.exception.AccessorResourceInvalidError` if - *resource_identifier* is not a container. - - ''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - with error_handler( - operation='list', resource_identifier=resource_identifier - ): - listing = [] - for entry in os.listdir(filesystem_path): - listing.append(os.path.join(resource_identifier, entry)) - - return listing - - def exists(self, resource_identifier): - '''Return if *resource_identifier* is valid and exists in location.''' - filesystem_path = self.get_filesystem_path(resource_identifier) - return os.path.exists(filesystem_path) - - def is_file(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file.''' - filesystem_path = self.get_filesystem_path(resource_identifier) - return os.path.isfile(filesystem_path) - - def is_container(self, resource_identifier): - '''Return whether *resource_identifier* refers to a container.''' - filesystem_path = self.get_filesystem_path(resource_identifier) - return os.path.isdir(filesystem_path) - - def is_sequence(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file sequence.''' - raise AccessorUnsupportedOperationError(operation='is_sequence') - - def open(self, resource_identifier, mode='rb'): - '''Return :class:`~ftrack_api.Data` for *resource_identifier*.''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - with error_handler( - operation='open', resource_identifier=resource_identifier - ): - data = ftrack_api.data.File(filesystem_path, mode) - - return data - - def remove(self, resource_identifier): - '''Remove *resource_identifier*. - - Raise :exc:`~ftrack_api.exception.AccessorResourceNotFoundError` if - *resource_identifier* does not exist. - - ''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - if self.is_file(resource_identifier): - with error_handler( - operation='remove', resource_identifier=resource_identifier - ): - os.remove(filesystem_path) - - elif self.is_container(resource_identifier): - with error_handler( - operation='remove', resource_identifier=resource_identifier - ): - os.rmdir(filesystem_path) - - else: - raise AccessorResourceNotFoundError( - resource_identifier=resource_identifier - ) - - def make_container(self, resource_identifier, recursive=True): - '''Make a container at *resource_identifier*. - - If *recursive* is True, also make any intermediate containers. - - ''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - with error_handler( - operation='makeContainer', resource_identifier=resource_identifier - ): - try: - if recursive: - os.makedirs(filesystem_path) - else: - try: - os.mkdir(filesystem_path) - except OSError as error: - if error.errno == errno.ENOENT: - raise AccessorParentResourceNotFoundError( - resource_identifier=resource_identifier - ) - else: - raise - - except OSError, error: - if error.errno != errno.EEXIST: - raise - - def get_container(self, resource_identifier): - '''Return resource_identifier of container for *resource_identifier*. - - Raise :exc:`~ftrack_api.exception.AccessorParentResourceNotFoundError` if - container of *resource_identifier* could not be determined. - - ''' - filesystem_path = self.get_filesystem_path(resource_identifier) - - container = os.path.dirname(filesystem_path) - - if self.prefix: - if not container.startswith(self.prefix): - raise AccessorParentResourceNotFoundError( - resource_identifier=resource_identifier, - message='Could not determine container for ' - '{resource_identifier} as container falls outside ' - 'of configured prefix.' - ) - - # Convert container filesystem path into resource identifier. - container = container[len(self.prefix):] - if ntpath.isabs(container): - # Ensure that resulting path is relative by stripping any - # leftover prefixed slashes from string. - # E.g. If prefix was '/tmp' and path was '/tmp/foo/bar' the - # result will be 'foo/bar'. - container = container.lstrip('\\/') - - return container - - def get_filesystem_path(self, resource_identifier): - '''Return filesystem path for *resource_identifier*. - - For example:: - - >>> accessor = DiskAccessor('my.location', '/mountpoint') - >>> print accessor.get_filesystem_path('test.txt') - /mountpoint/test.txt - >>> print accessor.get_filesystem_path('/mountpoint/test.txt') - /mountpoint/test.txt - - Raise :exc:`ftrack_api.exception.AccessorFilesystemPathError` if filesystem - path could not be determined from *resource_identifier*. - - ''' - filesystem_path = resource_identifier - if filesystem_path: - filesystem_path = os.path.normpath(filesystem_path) - - if self.prefix: - if not os.path.isabs(filesystem_path): - filesystem_path = os.path.normpath( - os.path.join(self.prefix, filesystem_path) - ) - - if not filesystem_path.startswith(self.prefix): - raise AccessorFilesystemPathError( - resource_identifier=resource_identifier, - message='Could not determine access path for ' - 'resource_identifier outside of configured prefix: ' - '{resource_identifier}.' - ) - - return filesystem_path - - -@contextlib.contextmanager -def error_handler(**kw): - '''Conform raised OSError/IOError exception to appropriate FTrack error.''' - try: - yield - - except (OSError, IOError) as error: - (exception_type, exception_value, traceback) = sys.exc_info() - kw.setdefault('error', error) - - error_code = getattr(error, 'errno') - if not error_code: - raise AccessorOperationFailedError(**kw), None, traceback - - if error_code == errno.ENOENT: - raise AccessorResourceNotFoundError(**kw), None, traceback - - elif error_code == errno.EPERM: - raise AccessorPermissionDeniedError(**kw), None, traceback - - elif error_code == errno.ENOTEMPTY: - raise AccessorContainerNotEmptyError(**kw), None, traceback - - elif error_code in (errno.ENOTDIR, errno.EISDIR, errno.EINVAL): - raise AccessorResourceInvalidError(**kw), None, traceback - - else: - raise AccessorOperationFailedError(**kw), None, traceback - - except Exception: - raise diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/server.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/server.py deleted file mode 100644 index 9c735084..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/accessor/server.py +++ /dev/null @@ -1,240 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import hashlib -import base64 -import json - -import requests - -from .base import Accessor -from ..data import String -import ftrack_api.exception -import ftrack_api.symbol - - -class ServerFile(String): - '''Representation of a server file.''' - - def __init__(self, resource_identifier, session, mode='rb'): - '''Initialise file.''' - self.mode = mode - self.resource_identifier = resource_identifier - self._session = session - self._has_read = False - - super(ServerFile, self).__init__() - - def flush(self): - '''Flush all changes.''' - super(ServerFile, self).flush() - - if self.mode == 'wb': - self._write() - - def read(self, limit=None): - '''Read file.''' - if not self._has_read: - self._read() - self._has_read = True - - return super(ServerFile, self).read(limit) - - def _read(self): - '''Read all remote content from key into wrapped_file.''' - position = self.tell() - self.seek(0) - - response = requests.get( - '{0}/component/get'.format(self._session.server_url), - params={ - 'id': self.resource_identifier, - 'username': self._session.api_user, - 'apiKey': self._session.api_key - }, - stream=True - ) - - try: - response.raise_for_status() - except requests.exceptions.HTTPError as error: - raise ftrack_api.exception.AccessorOperationFailedError( - 'Failed to read data: {0}.'.format(error) - ) - - for block in response.iter_content(ftrack_api.symbol.CHUNK_SIZE): - self.wrapped_file.write(block) - - self.flush() - self.seek(position) - - def _write(self): - '''Write current data to remote key.''' - position = self.tell() - self.seek(0) - - # Retrieve component from cache to construct a filename. - component = self._session.get('FileComponent', self.resource_identifier) - if not component: - raise ftrack_api.exception.AccessorOperationFailedError( - 'Unable to retrieve component with id: {0}.'.format( - self.resource_identifier - ) - ) - - # Construct a name from component name and file_type. - name = component['name'] - if component['file_type']: - name = u'{0}.{1}'.format( - name, - component['file_type'].lstrip('.') - ) - - try: - metadata = self._session.get_upload_metadata( - component_id=self.resource_identifier, - file_name=name, - file_size=self._get_size(), - checksum=self._compute_checksum() - ) - except Exception as error: - raise ftrack_api.exception.AccessorOperationFailedError( - 'Failed to get put metadata: {0}.'.format(error) - ) - - # Ensure at beginning of file before put. - self.seek(0) - - # Put the file based on the metadata. - response = requests.put( - metadata['url'], - data=self.wrapped_file, - headers=metadata['headers'] - ) - - try: - response.raise_for_status() - except requests.exceptions.HTTPError as error: - raise ftrack_api.exception.AccessorOperationFailedError( - 'Failed to put file to server: {0}.'.format(error) - ) - - self.seek(position) - - def _get_size(self): - '''Return size of file in bytes.''' - position = self.tell() - self.seek(0, os.SEEK_END) - length = self.tell() - self.seek(position) - return length - - def _compute_checksum(self): - '''Return checksum for file.''' - fp = self.wrapped_file - buf_size = ftrack_api.symbol.CHUNK_SIZE - hash_obj = hashlib.md5() - spos = fp.tell() - - s = fp.read(buf_size) - while s: - hash_obj.update(s) - s = fp.read(buf_size) - - base64_digest = base64.encodestring(hash_obj.digest()) - if base64_digest[-1] == '\n': - base64_digest = base64_digest[0:-1] - - fp.seek(spos) - return base64_digest - - -class _ServerAccessor(Accessor): - '''Provide server location access.''' - - def __init__(self, session, **kw): - '''Initialise location accessor.''' - super(_ServerAccessor, self).__init__(**kw) - - self._session = session - - def open(self, resource_identifier, mode='rb'): - '''Return :py:class:`~ftrack_api.Data` for *resource_identifier*.''' - return ServerFile(resource_identifier, session=self._session, mode=mode) - - def remove(self, resourceIdentifier): - '''Remove *resourceIdentifier*.''' - response = requests.get( - '{0}/component/remove'.format(self._session.server_url), - params={ - 'id': resourceIdentifier, - 'username': self._session.api_user, - 'apiKey': self._session.api_key - } - ) - if response.status_code != 200: - raise ftrack_api.exception.AccessorOperationFailedError( - 'Failed to remove file.' - ) - - def get_container(self, resource_identifier): - '''Return resource_identifier of container for *resource_identifier*.''' - return None - - def make_container(self, resource_identifier, recursive=True): - '''Make a container at *resource_identifier*.''' - - def list(self, resource_identifier): - '''Return list of entries in *resource_identifier* container.''' - raise NotImplementedError() - - def exists(self, resource_identifier): - '''Return if *resource_identifier* is valid and exists in location.''' - return False - - def is_file(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file.''' - raise NotImplementedError() - - def is_container(self, resource_identifier): - '''Return whether *resource_identifier* refers to a container.''' - raise NotImplementedError() - - def is_sequence(self, resource_identifier): - '''Return whether *resource_identifier* refers to a file sequence.''' - raise NotImplementedError() - - def get_url(self, resource_identifier): - '''Return url for *resource_identifier*.''' - url_string = ( - u'{url}/component/get?id={id}&username={username}' - u'&apiKey={apiKey}' - ) - return url_string.format( - url=self._session.server_url, - id=resource_identifier, - username=self._session.api_user, - apiKey=self._session.api_key - ) - - def get_thumbnail_url(self, resource_identifier, size=None): - '''Return thumbnail url for *resource_identifier*. - - Optionally, specify *size* to constrain the downscaled image to size - x size pixels. - ''' - url_string = ( - u'{url}/component/thumbnail?id={id}&username={username}' - u'&apiKey={apiKey}' - ) - url = url_string.format( - url=self._session.server_url, - id=resource_identifier, - username=self._session.api_user, - apiKey=self._session.api_key - ) - if size: - url += u'&size={0}'.format(size) - - return url diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/attribute.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/attribute.py deleted file mode 100644 index 719b612f..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/attribute.py +++ /dev/null @@ -1,707 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import collections -import copy -import logging -import functools - -import ftrack_api.symbol -import ftrack_api.exception -import ftrack_api.collection -import ftrack_api.inspection -import ftrack_api.operation - -logger = logging.getLogger( - __name__ -) - - -def merge_references(function): - '''Decorator to handle merging of references / collections.''' - - @functools.wraps(function) - def get_value(attribute, entity): - '''Merge the attribute with the local cache.''' - - if attribute.name not in entity._inflated: - # Only merge on first access to avoid - # inflating them multiple times. - - logger.debug( - 'Merging potential new data into attached ' - 'entity for attribute {0}.'.format( - attribute.name - ) - ) - - # Local attributes. - local_value = attribute.get_local_value(entity) - if isinstance( - local_value, - ( - ftrack_api.entity.base.Entity, - ftrack_api.collection.Collection, - ftrack_api.collection.MappedCollectionProxy - ) - ): - logger.debug( - 'Merging local value for attribute {0}.'.format(attribute) - ) - - merged_local_value = entity.session._merge( - local_value, merged=dict() - ) - - if merged_local_value is not local_value: - with entity.session.operation_recording(False): - attribute.set_local_value(entity, merged_local_value) - - # Remote attributes. - remote_value = attribute.get_remote_value(entity) - if isinstance( - remote_value, - ( - ftrack_api.entity.base.Entity, - ftrack_api.collection.Collection, - ftrack_api.collection.MappedCollectionProxy - ) - ): - logger.debug( - 'Merging remote value for attribute {0}.'.format(attribute) - ) - - merged_remote_value = entity.session._merge( - remote_value, merged=dict() - ) - - if merged_remote_value is not remote_value: - attribute.set_remote_value(entity, merged_remote_value) - - entity._inflated.add( - attribute.name - ) - - return function( - attribute, entity - ) - - return get_value - - -class Attributes(object): - '''Collection of properties accessible by name.''' - - def __init__(self, attributes=None): - super(Attributes, self).__init__() - self._data = dict() - if attributes is not None: - for attribute in attributes: - self.add(attribute) - - def add(self, attribute): - '''Add *attribute*.''' - existing = self._data.get(attribute.name, None) - if existing: - raise ftrack_api.exception.NotUniqueError( - 'Attribute with name {0} already added as {1}' - .format(attribute.name, existing) - ) - - self._data[attribute.name] = attribute - - def remove(self, attribute): - '''Remove attribute.''' - self._data.pop(attribute.name) - - def get(self, name): - '''Return attribute by *name*. - - If no attribute matches *name* then return None. - - ''' - return self._data.get(name, None) - - def keys(self): - '''Return list of attribute names.''' - return self._data.keys() - - def __contains__(self, item): - '''Return whether *item* present.''' - if not isinstance(item, Attribute): - return False - - return item.name in self._data - - def __iter__(self): - '''Return iterator over attributes.''' - return self._data.itervalues() - - def __len__(self): - '''Return count of attributes.''' - return len(self._data) - - -class Attribute(object): - '''A name and value pair persisted remotely.''' - - def __init__( - self, name, default_value=ftrack_api.symbol.NOT_SET, mutable=True, - computed=False - ): - '''Initialise attribute with *name*. - - *default_value* represents the default value for the attribute. It may - be a callable. It is not used within the attribute when providing - values, but instead exists for other parts of the system to reference. - - If *mutable* is set to False then the local value of the attribute on an - entity can only be set when both the existing local and remote values - are :attr:`ftrack_api.symbol.NOT_SET`. The exception to this is when the - target value is also :attr:`ftrack_api.symbol.NOT_SET`. - - If *computed* is set to True the value is a remote side computed value - and should not be long-term cached. - - ''' - super(Attribute, self).__init__() - self._name = name - self._mutable = mutable - self._computed = computed - self.default_value = default_value - - self._local_key = 'local' - self._remote_key = 'remote' - - def __repr__(self): - '''Return representation of entity.''' - return '<{0}.{1}({2}) object at {3}>'.format( - self.__module__, - self.__class__.__name__, - self.name, - id(self) - ) - - def get_entity_storage(self, entity): - '''Return attribute storage on *entity* creating if missing.''' - storage_key = '_ftrack_attribute_storage' - storage = getattr(entity, storage_key, None) - if storage is None: - storage = collections.defaultdict( - lambda: - { - self._local_key: ftrack_api.symbol.NOT_SET, - self._remote_key: ftrack_api.symbol.NOT_SET - } - ) - setattr(entity, storage_key, storage) - - return storage - - @property - def name(self): - '''Return name.''' - return self._name - - @property - def mutable(self): - '''Return whether attribute is mutable.''' - return self._mutable - - @property - def computed(self): - '''Return whether attribute is computed.''' - return self._computed - - def get_value(self, entity): - '''Return current value for *entity*. - - If a value was set locally then return it, otherwise return last known - remote value. If no remote value yet retrieved, make a request for it - via the session and block until available. - - ''' - value = self.get_local_value(entity) - if value is not ftrack_api.symbol.NOT_SET: - return value - - value = self.get_remote_value(entity) - if value is not ftrack_api.symbol.NOT_SET: - return value - - if not entity.session.auto_populate: - return value - - self.populate_remote_value(entity) - return self.get_remote_value(entity) - - def get_local_value(self, entity): - '''Return locally set value for *entity*.''' - storage = self.get_entity_storage(entity) - return storage[self.name][self._local_key] - - def get_remote_value(self, entity): - '''Return remote value for *entity*. - - .. note:: - - Only return locally stored remote value, do not fetch from remote. - - ''' - storage = self.get_entity_storage(entity) - return storage[self.name][self._remote_key] - - def set_local_value(self, entity, value): - '''Set local *value* for *entity*.''' - if ( - not self.mutable - and self.is_set(entity) - and value is not ftrack_api.symbol.NOT_SET - ): - raise ftrack_api.exception.ImmutableAttributeError(self) - - old_value = self.get_local_value(entity) - - storage = self.get_entity_storage(entity) - storage[self.name][self._local_key] = value - - # Record operation. - if entity.session.record_operations: - entity.session.recorded_operations.push( - ftrack_api.operation.UpdateEntityOperation( - entity.entity_type, - ftrack_api.inspection.primary_key(entity), - self.name, - old_value, - value - ) - ) - - def set_remote_value(self, entity, value): - '''Set remote *value*. - - .. note:: - - Only set locally stored remote value, do not persist to remote. - - ''' - storage = self.get_entity_storage(entity) - storage[self.name][self._remote_key] = value - - def populate_remote_value(self, entity): - '''Populate remote value for *entity*.''' - entity.session.populate([entity], self.name) - - def is_modified(self, entity): - '''Return whether local value set and differs from remote. - - .. note:: - - Will not fetch remote value so may report True even when values - are the same on the remote. - - ''' - local_value = self.get_local_value(entity) - remote_value = self.get_remote_value(entity) - return ( - local_value is not ftrack_api.symbol.NOT_SET - and local_value != remote_value - ) - - def is_set(self, entity): - '''Return whether a value is set for *entity*.''' - return any([ - self.get_local_value(entity) is not ftrack_api.symbol.NOT_SET, - self.get_remote_value(entity) is not ftrack_api.symbol.NOT_SET - ]) - - -class ScalarAttribute(Attribute): - '''Represent a scalar value.''' - - def __init__(self, name, data_type, **kw): - '''Initialise property.''' - super(ScalarAttribute, self).__init__(name, **kw) - self.data_type = data_type - - -class ReferenceAttribute(Attribute): - '''Reference another entity.''' - - def __init__(self, name, entity_type, **kw): - '''Initialise property.''' - super(ReferenceAttribute, self).__init__(name, **kw) - self.entity_type = entity_type - - def populate_remote_value(self, entity): - '''Populate remote value for *entity*. - - As attribute references another entity, use that entity's configured - default projections to auto populate useful attributes when loading. - - ''' - reference_entity_type = entity.session.types[self.entity_type] - default_projections = reference_entity_type.default_projections - - projections = [] - if default_projections: - for projection in default_projections: - projections.append('{0}.{1}'.format(self.name, projection)) - else: - projections.append(self.name) - - entity.session.populate([entity], ', '.join(projections)) - - def is_modified(self, entity): - '''Return whether a local value has been set and differs from remote. - - .. note:: - - Will not fetch remote value so may report True even when values - are the same on the remote. - - ''' - local_value = self.get_local_value(entity) - remote_value = self.get_remote_value(entity) - - if local_value is ftrack_api.symbol.NOT_SET: - return False - - if remote_value is ftrack_api.symbol.NOT_SET: - return True - - if ( - ftrack_api.inspection.identity(local_value) - != ftrack_api.inspection.identity(remote_value) - ): - return True - - return False - - - @merge_references - def get_value(self, entity): - return super(ReferenceAttribute, self).get_value( - entity - ) - -class AbstractCollectionAttribute(Attribute): - '''Base class for collection attributes.''' - - #: Collection class used by attribute. - collection_class = None - - @merge_references - def get_value(self, entity): - '''Return current value for *entity*. - - If a value was set locally then return it, otherwise return last known - remote value. If no remote value yet retrieved, make a request for it - via the session and block until available. - - .. note:: - - As value is a collection that is mutable, will transfer a remote - value into the local value on access if no local value currently - set. - - ''' - super(AbstractCollectionAttribute, self).get_value(entity) - - # Conditionally, copy remote value into local value so that it can be - # mutated without side effects. - local_value = self.get_local_value(entity) - remote_value = self.get_remote_value(entity) - if ( - local_value is ftrack_api.symbol.NOT_SET - and isinstance(remote_value, self.collection_class) - ): - try: - with entity.session.operation_recording(False): - self.set_local_value(entity, copy.copy(remote_value)) - except ftrack_api.exception.ImmutableAttributeError: - pass - - value = self.get_local_value(entity) - - # If the local value is still not set then attempt to set it with a - # suitable placeholder collection so that the caller can interact with - # the collection using its normal interface. This is required for a - # newly created entity for example. It *could* be done as a simple - # default value, but that would incur cost for every collection even - # when they are not modified before commit. - if value is ftrack_api.symbol.NOT_SET: - try: - with entity.session.operation_recording(False): - self.set_local_value( - entity, - # None should be treated as empty collection. - None - ) - except ftrack_api.exception.ImmutableAttributeError: - pass - - return self.get_local_value(entity) - - def set_local_value(self, entity, value): - '''Set local *value* for *entity*.''' - if value is not ftrack_api.symbol.NOT_SET: - value = self._adapt_to_collection(entity, value) - value.mutable = self.mutable - - super(AbstractCollectionAttribute, self).set_local_value(entity, value) - - def set_remote_value(self, entity, value): - '''Set remote *value*. - - .. note:: - - Only set locally stored remote value, do not persist to remote. - - ''' - if value is not ftrack_api.symbol.NOT_SET: - value = self._adapt_to_collection(entity, value) - value.mutable = False - - super(AbstractCollectionAttribute, self).set_remote_value(entity, value) - - def _adapt_to_collection(self, entity, value): - '''Adapt *value* to appropriate collection instance for *entity*. - - .. note:: - - If *value* is None then return a suitable empty collection. - - ''' - raise NotImplementedError() - - -class CollectionAttribute(AbstractCollectionAttribute): - '''Represent a collection of other entities.''' - - #: Collection class used by attribute. - collection_class = ftrack_api.collection.Collection - - def _adapt_to_collection(self, entity, value): - '''Adapt *value* to a Collection instance on *entity*.''' - - if not isinstance(value, ftrack_api.collection.Collection): - - if value is None: - value = ftrack_api.collection.Collection(entity, self) - - elif isinstance(value, list): - value = ftrack_api.collection.Collection( - entity, self, data=value - ) - - else: - raise NotImplementedError( - 'Cannot convert {0!r} to collection.'.format(value) - ) - - else: - if value.attribute is not self: - raise ftrack_api.exception.AttributeError( - 'Collection already bound to a different attribute' - ) - - return value - - -class KeyValueMappedCollectionAttribute(AbstractCollectionAttribute): - '''Represent a mapped key, value collection of entities.''' - - #: Collection class used by attribute. - collection_class = ftrack_api.collection.KeyValueMappedCollectionProxy - - def __init__( - self, name, creator, key_attribute, value_attribute, **kw - ): - '''Initialise attribute with *name*. - - *creator* should be a function that accepts a dictionary of data and - is used by the referenced collection to create new entities in the - collection. - - *key_attribute* should be the name of the attribute on an entity in - the collection that represents the value for 'key' of the dictionary. - - *value_attribute* should be the name of the attribute on an entity in - the collection that represents the value for 'value' of the dictionary. - - ''' - self.creator = creator - self.key_attribute = key_attribute - self.value_attribute = value_attribute - - super(KeyValueMappedCollectionAttribute, self).__init__(name, **kw) - - def _adapt_to_collection(self, entity, value): - '''Adapt *value* to an *entity*.''' - if not isinstance( - value, ftrack_api.collection.KeyValueMappedCollectionProxy - ): - - if value is None: - value = ftrack_api.collection.KeyValueMappedCollectionProxy( - ftrack_api.collection.Collection(entity, self), - self.creator, self.key_attribute, - self.value_attribute - ) - - elif isinstance(value, (list, ftrack_api.collection.Collection)): - - if isinstance(value, list): - value = ftrack_api.collection.Collection( - entity, self, data=value - ) - - value = ftrack_api.collection.KeyValueMappedCollectionProxy( - value, self.creator, self.key_attribute, - self.value_attribute - ) - - elif isinstance(value, collections.Mapping): - # Convert mapping. - # TODO: When backend model improves, revisit this logic. - # First get existing value and delete all references. This is - # needed because otherwise they will not be automatically - # removed server side. - # The following should not cause recursion as the internal - # values should be mapped collections already. - current_value = self.get_value(entity) - if not isinstance( - current_value, - ftrack_api.collection.KeyValueMappedCollectionProxy - ): - raise NotImplementedError( - 'Cannot adapt mapping to collection as current value ' - 'type is not a KeyValueMappedCollectionProxy.' - ) - - # Create the new collection using the existing collection as - # basis. Then update through proxy interface to ensure all - # internal operations called consistently (such as entity - # deletion for key removal). - collection = ftrack_api.collection.Collection( - entity, self, data=current_value.collection[:] - ) - collection_proxy = ( - ftrack_api.collection.KeyValueMappedCollectionProxy( - collection, self.creator, - self.key_attribute, self.value_attribute - ) - ) - - # Remove expired keys from collection. - expired_keys = set(current_value.keys()) - set(value.keys()) - for key in expired_keys: - del collection_proxy[key] - - # Set new values for existing keys / add new keys. - for key, value in value.items(): - collection_proxy[key] = value - - value = collection_proxy - - else: - raise NotImplementedError( - 'Cannot convert {0!r} to collection.'.format(value) - ) - else: - if value.attribute is not self: - raise ftrack_api.exception.AttributeError( - 'Collection already bound to a different attribute.' - ) - - return value - - -class CustomAttributeCollectionAttribute(AbstractCollectionAttribute): - '''Represent a mapped custom attribute collection of entities.''' - - #: Collection class used by attribute. - collection_class = ( - ftrack_api.collection.CustomAttributeCollectionProxy - ) - - def _adapt_to_collection(self, entity, value): - '''Adapt *value* to an *entity*.''' - if not isinstance( - value, ftrack_api.collection.CustomAttributeCollectionProxy - ): - - if value is None: - value = ftrack_api.collection.CustomAttributeCollectionProxy( - ftrack_api.collection.Collection(entity, self) - ) - - elif isinstance(value, (list, ftrack_api.collection.Collection)): - - # Why are we creating a new if it is a list? This will cause - # any merge to create a new proxy and collection. - if isinstance(value, list): - value = ftrack_api.collection.Collection( - entity, self, data=value - ) - - value = ftrack_api.collection.CustomAttributeCollectionProxy( - value - ) - - elif isinstance(value, collections.Mapping): - # Convert mapping. - # TODO: When backend model improves, revisit this logic. - # First get existing value and delete all references. This is - # needed because otherwise they will not be automatically - # removed server side. - # The following should not cause recursion as the internal - # values should be mapped collections already. - current_value = self.get_value(entity) - if not isinstance( - current_value, - ftrack_api.collection.CustomAttributeCollectionProxy - ): - raise NotImplementedError( - 'Cannot adapt mapping to collection as current value ' - 'type is not a MappedCollectionProxy.' - ) - - # Create the new collection using the existing collection as - # basis. Then update through proxy interface to ensure all - # internal operations called consistently (such as entity - # deletion for key removal). - collection = ftrack_api.collection.Collection( - entity, self, data=current_value.collection[:] - ) - collection_proxy = ( - ftrack_api.collection.CustomAttributeCollectionProxy( - collection - ) - ) - - # Remove expired keys from collection. - expired_keys = set(current_value.keys()) - set(value.keys()) - for key in expired_keys: - del collection_proxy[key] - - # Set new values for existing keys / add new keys. - for key, value in value.items(): - collection_proxy[key] = value - - value = collection_proxy - - else: - raise NotImplementedError( - 'Cannot convert {0!r} to collection.'.format(value) - ) - else: - if value.attribute is not self: - raise ftrack_api.exception.AttributeError( - 'Collection already bound to a different attribute.' - ) - - return value diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/cache.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/cache.py deleted file mode 100644 index 49456dc2..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/cache.py +++ /dev/null @@ -1,579 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -'''Caching framework. - -Defines a standardised :class:`Cache` interface for storing data against -specific keys. Key generation is also standardised using a :class:`KeyMaker` -interface. - -Combining a Cache and KeyMaker allows for memoisation of function calls with -respect to the arguments used by using a :class:`Memoiser`. - -As a convenience a simple :func:`memoise` decorator is included for quick -memoisation of function using a global cache and standard key maker. - -''' - -import collections -import functools -import abc -import copy -import inspect -import re -import anydbm -import contextlib -try: - import cPickle as pickle -except ImportError: # pragma: no cover - import pickle - -import ftrack_api.inspection -import ftrack_api.symbol - - -class Cache(object): - '''Cache interface. - - Derive from this to define concrete cache implementations. A cache is - centered around the concept of key:value pairings where the key is unique - across the cache. - - ''' - - __metaclass__ = abc.ABCMeta - - @abc.abstractmethod - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - - @abc.abstractmethod - def set(self, key, value): - '''Set *value* for *key*.''' - - @abc.abstractmethod - def remove(self, key): - '''Remove *key* and return stored value. - - Raise :exc:`KeyError` if *key* not found. - - ''' - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - raise NotImplementedError() # pragma: no cover - - def values(self): - '''Return values for current keys.''' - values = [] - for key in self.keys(): - try: - value = self.get(key) - except KeyError: - continue - else: - values.append(value) - - return values - - def clear(self, pattern=None): - '''Remove all keys matching *pattern*. - - *pattern* should be a regular expression string. - - If *pattern* is None then all keys will be removed. - - ''' - if pattern is not None: - pattern = re.compile(pattern) - - for key in self.keys(): - if pattern is not None: - if not pattern.search(key): - continue - - try: - self.remove(key) - except KeyError: - pass - - -class ProxyCache(Cache): - '''Proxy another cache.''' - - def __init__(self, proxied): - '''Initialise cache with *proxied* cache instance.''' - self.proxied = proxied - super(ProxyCache, self).__init__() - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - return self.proxied.get(key) - - def set(self, key, value): - '''Set *value* for *key*.''' - return self.proxied.set(key, value) - - def remove(self, key): - '''Remove *key* and return stored value. - - Raise :exc:`KeyError` if *key* not found. - - ''' - return self.proxied.remove(key) - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - return self.proxied.keys() - - -class LayeredCache(Cache): - '''Layered cache.''' - - def __init__(self, caches): - '''Initialise cache with *caches*.''' - super(LayeredCache, self).__init__() - self.caches = caches - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - Attempt to retrieve from cache layers in turn, starting with shallowest. - If value retrieved, then also set the value in each higher level cache - up from where retrieved. - - ''' - target_caches = [] - value = ftrack_api.symbol.NOT_SET - - for cache in self.caches: - try: - value = cache.get(key) - except KeyError: - target_caches.append(cache) - continue - else: - break - - if value is ftrack_api.symbol.NOT_SET: - raise KeyError(key) - - # Set value on all higher level caches. - for cache in target_caches: - cache.set(key, value) - - return value - - def set(self, key, value): - '''Set *value* for *key*.''' - for cache in self.caches: - cache.set(key, value) - - def remove(self, key): - '''Remove *key*. - - Raise :exc:`KeyError` if *key* not found in any layer. - - ''' - removed = False - for cache in self.caches: - try: - cache.remove(key) - except KeyError: - pass - else: - removed = True - - if not removed: - raise KeyError(key) - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - keys = [] - for cache in self.caches: - keys.extend(cache.keys()) - - return list(set(keys)) - - -class MemoryCache(Cache): - '''Memory based cache.''' - - def __init__(self): - '''Initialise cache.''' - self._cache = {} - super(MemoryCache, self).__init__() - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - return self._cache[key] - - def set(self, key, value): - '''Set *value* for *key*.''' - self._cache[key] = value - - def remove(self, key): - '''Remove *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - del self._cache[key] - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - return self._cache.keys() - - -class FileCache(Cache): - '''File based cache that uses :mod:`anydbm` module. - - .. note:: - - No locking of the underlying file is performed. - - ''' - - def __init__(self, path): - '''Initialise cache at *path*.''' - self.path = path - - # Initialise cache. - cache = anydbm.open(self.path, 'c') - cache.close() - - super(FileCache, self).__init__() - - @contextlib.contextmanager - def _database(self): - '''Yield opened database file.''' - cache = anydbm.open(self.path, 'w') - try: - yield cache - finally: - cache.close() - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - with self._database() as cache: - return cache[key] - - def set(self, key, value): - '''Set *value* for *key*.''' - with self._database() as cache: - cache[key] = value - - def remove(self, key): - '''Remove *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - with self._database() as cache: - del cache[key] - - def keys(self): - '''Return list of keys at this current time. - - .. warning:: - - Actual keys may differ from those returned due to timing of access. - - ''' - with self._database() as cache: - return cache.keys() - - -class SerialisedCache(ProxyCache): - '''Proxied cache that stores values as serialised data.''' - - def __init__(self, proxied, encode=None, decode=None): - '''Initialise cache with *encode* and *decode* callables. - - *proxied* is the underlying cache to use for storage. - - ''' - self.encode = encode - self.decode = decode - super(SerialisedCache, self).__init__(proxied) - - def get(self, key): - '''Return value for *key*. - - Raise :exc:`KeyError` if *key* not found. - - ''' - value = super(SerialisedCache, self).get(key) - if self.decode: - value = self.decode(value) - - return value - - def set(self, key, value): - '''Set *value* for *key*.''' - if self.encode: - value = self.encode(value) - - super(SerialisedCache, self).set(key, value) - - -class KeyMaker(object): - '''Generate unique keys.''' - - __metaclass__ = abc.ABCMeta - - def __init__(self): - '''Initialise key maker.''' - super(KeyMaker, self).__init__() - self.item_separator = '' - - def key(self, *items): - '''Return key for *items*.''' - keys = [] - for item in items: - keys.append(self._key(item)) - - return self.item_separator.join(keys) - - @abc.abstractmethod - def _key(self, obj): - '''Return key for *obj*.''' - - -class StringKeyMaker(KeyMaker): - '''Generate string key.''' - - def _key(self, obj): - '''Return key for *obj*.''' - return str(obj) - - -class ObjectKeyMaker(KeyMaker): - '''Generate unique keys for objects.''' - - def __init__(self): - '''Initialise key maker.''' - super(ObjectKeyMaker, self).__init__() - self.item_separator = '\0' - self.mapping_identifier = '\1' - self.mapping_pair_separator = '\2' - self.iterable_identifier = '\3' - self.name_identifier = '\4' - - def _key(self, item): - '''Return key for *item*. - - Returned key will be a pickle like string representing the *item*. This - allows for typically non-hashable objects to be used in key generation - (such as dictionaries). - - If *item* is iterable then each item in it shall also be passed to this - method to ensure correct key generation. - - Special markers are used to distinguish handling of specific cases in - order to ensure uniqueness of key corresponds directly to *item*. - - Example:: - - >>> key_maker = ObjectKeyMaker() - >>> def add(x, y): - ... "Return sum of *x* and *y*." - ... return x + y - ... - >>> key_maker.key(add, (1, 2)) - '\x04add\x00__main__\x00\x03\x80\x02K\x01.\x00\x80\x02K\x02.\x03' - >>> key_maker.key(add, (1, 3)) - '\x04add\x00__main__\x00\x03\x80\x02K\x01.\x00\x80\x02K\x03.\x03' - - ''' - # TODO: Consider using a more robust and comprehensive solution such as - # dill (https://github.com/uqfoundation/dill). - if isinstance(item, collections.Iterable): - if isinstance(item, basestring): - return pickle.dumps(item, pickle.HIGHEST_PROTOCOL) - - if isinstance(item, collections.Mapping): - contents = self.item_separator.join([ - ( - self._key(key) + - self.mapping_pair_separator + - self._key(value) - ) - for key, value in sorted(item.items()) - ]) - return ( - self.mapping_identifier + - contents + - self.mapping_identifier - ) - - else: - contents = self.item_separator.join([ - self._key(item) for item in item - ]) - return ( - self.iterable_identifier + - contents + - self.iterable_identifier - ) - - elif inspect.ismethod(item): - return ''.join(( - self.name_identifier, - item.__name__, - self.item_separator, - item.im_class.__name__, - self.item_separator, - item.__module__ - )) - - elif inspect.isfunction(item) or inspect.isclass(item): - return ''.join(( - self.name_identifier, - item.__name__, - self.item_separator, - item.__module__ - )) - - elif inspect.isbuiltin(item): - return self.name_identifier + item.__name__ - - else: - return pickle.dumps(item, pickle.HIGHEST_PROTOCOL) - - -class Memoiser(object): - '''Memoise function calls using a :class:`KeyMaker` and :class:`Cache`. - - Example:: - - >>> memoiser = Memoiser(MemoryCache(), ObjectKeyMaker()) - >>> def add(x, y): - ... "Return sum of *x* and *y*." - ... print 'Called' - ... return x + y - ... - >>> memoiser.call(add, (1, 2), {}) - Called - >>> memoiser.call(add, (1, 2), {}) - >>> memoiser.call(add, (1, 3), {}) - Called - - ''' - - def __init__(self, cache=None, key_maker=None, return_copies=True): - '''Initialise with *cache* and *key_maker* to use. - - If *cache* is not specified a default :class:`MemoryCache` will be - used. Similarly, if *key_maker* is not specified a default - :class:`ObjectKeyMaker` will be used. - - If *return_copies* is True then all results returned from the cache will - be deep copies to avoid indirect mutation of cached values. - - ''' - self.cache = cache - if self.cache is None: - self.cache = MemoryCache() - - self.key_maker = key_maker - if self.key_maker is None: - self.key_maker = ObjectKeyMaker() - - self.return_copies = return_copies - super(Memoiser, self).__init__() - - def call(self, function, args=None, kw=None): - '''Call *function* with *args* and *kw* and return result. - - If *function* was previously called with exactly the same arguments - then return cached result if available. - - Store result for call in cache. - - ''' - if args is None: - args = () - - if kw is None: - kw = {} - - # Support arguments being passed as positionals or keywords. - arguments = inspect.getcallargs(function, *args, **kw) - - key = self.key_maker.key(function, arguments) - try: - value = self.cache.get(key) - - except KeyError: - value = function(*args, **kw) - self.cache.set(key, value) - - # If requested, deep copy value to return in order to avoid cached value - # being inadvertently altered by the caller. - if self.return_copies: - value = copy.deepcopy(value) - - return value - - -def memoise_decorator(memoiser): - '''Decorator to memoise function calls using *memoiser*.''' - def outer(function): - - @functools.wraps(function) - def inner(*args, **kw): - return memoiser.call(function, args, kw) - - return inner - - return outer - - -#: Default memoiser. -memoiser = Memoiser() - -#: Default memoise decorator using standard cache and key maker. -memoise = memoise_decorator(memoiser) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/collection.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/collection.py deleted file mode 100644 index 91655a7b..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/collection.py +++ /dev/null @@ -1,507 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import logging - -import collections -import copy - -import ftrack_api.exception -import ftrack_api.inspection -import ftrack_api.symbol -import ftrack_api.operation -import ftrack_api.cache -from ftrack_api.logging import LazyLogMessage as L - - -class Collection(collections.MutableSequence): - '''A collection of entities.''' - - def __init__(self, entity, attribute, mutable=True, data=None): - '''Initialise collection.''' - self.entity = entity - self.attribute = attribute - self._data = [] - self._identities = set() - - # Set initial dataset. - # Note: For initialisation, immutability is deferred till after initial - # population as otherwise there would be no public way to initialise an - # immutable collection. The reason self._data is not just set directly - # is to ensure other logic can be applied without special handling. - self.mutable = True - try: - if data is None: - data = [] - - with self.entity.session.operation_recording(False): - self.extend(data) - finally: - self.mutable = mutable - - def _identity_key(self, entity): - '''Return identity key for *entity*.''' - return str(ftrack_api.inspection.identity(entity)) - - def __copy__(self): - '''Return shallow copy. - - .. note:: - - To maintain expectations on usage, the shallow copy will include a - shallow copy of the underlying data store. - - ''' - cls = self.__class__ - copied_instance = cls.__new__(cls) - copied_instance.__dict__.update(self.__dict__) - copied_instance._data = copy.copy(self._data) - copied_instance._identities = copy.copy(self._identities) - - return copied_instance - - def _notify(self, old_value): - '''Notify about modification.''' - # Record operation. - if self.entity.session.record_operations: - self.entity.session.recorded_operations.push( - ftrack_api.operation.UpdateEntityOperation( - self.entity.entity_type, - ftrack_api.inspection.primary_key(self.entity), - self.attribute.name, - old_value, - self - ) - ) - - def insert(self, index, item): - '''Insert *item* at *index*.''' - if not self.mutable: - raise ftrack_api.exception.ImmutableCollectionError(self) - - if item in self: - raise ftrack_api.exception.DuplicateItemInCollectionError( - item, self - ) - - old_value = copy.copy(self) - self._data.insert(index, item) - self._identities.add(self._identity_key(item)) - self._notify(old_value) - - def __contains__(self, value): - '''Return whether *value* present in collection.''' - return self._identity_key(value) in self._identities - - def __getitem__(self, index): - '''Return item at *index*.''' - return self._data[index] - - def __setitem__(self, index, item): - '''Set *item* against *index*.''' - if not self.mutable: - raise ftrack_api.exception.ImmutableCollectionError(self) - - try: - existing_index = self.index(item) - except ValueError: - pass - else: - if index != existing_index: - raise ftrack_api.exception.DuplicateItemInCollectionError( - item, self - ) - - old_value = copy.copy(self) - try: - existing_item = self._data[index] - except IndexError: - pass - else: - self._identities.remove(self._identity_key(existing_item)) - - self._data[index] = item - self._identities.add(self._identity_key(item)) - self._notify(old_value) - - def __delitem__(self, index): - '''Remove item at *index*.''' - if not self.mutable: - raise ftrack_api.exception.ImmutableCollectionError(self) - - old_value = copy.copy(self) - item = self._data[index] - del self._data[index] - self._identities.remove(self._identity_key(item)) - self._notify(old_value) - - def __len__(self): - '''Return count of items.''' - return len(self._data) - - def __eq__(self, other): - '''Return whether this collection is equal to *other*.''' - if not isinstance(other, Collection): - return False - - return sorted(self._identities) == sorted(other._identities) - - def __ne__(self, other): - '''Return whether this collection is not equal to *other*.''' - return not self == other - - -class MappedCollectionProxy(collections.MutableMapping): - '''Common base class for mapped collection of entities.''' - - def __init__(self, collection): - '''Initialise proxy for *collection*.''' - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self.collection = collection - super(MappedCollectionProxy, self).__init__() - - def __copy__(self): - '''Return shallow copy. - - .. note:: - - To maintain expectations on usage, the shallow copy will include a - shallow copy of the underlying collection. - - ''' - cls = self.__class__ - copied_instance = cls.__new__(cls) - copied_instance.__dict__.update(self.__dict__) - copied_instance.collection = copy.copy(self.collection) - - return copied_instance - - @property - def mutable(self): - '''Return whether collection is mutable.''' - return self.collection.mutable - - @mutable.setter - def mutable(self, value): - '''Set whether collection is mutable to *value*.''' - self.collection.mutable = value - - @property - def attribute(self): - '''Return attribute bound to.''' - return self.collection.attribute - - @attribute.setter - def attribute(self, value): - '''Set bound attribute to *value*.''' - self.collection.attribute = value - - -class KeyValueMappedCollectionProxy(MappedCollectionProxy): - '''A mapped collection of key, value entities. - - Proxy a standard :class:`Collection` as a mapping where certain attributes - from the entities in the collection are mapped to key, value pairs. - - For example:: - - >>> collection = [Metadata(key='foo', value='bar'), ...] - >>> mapped = KeyValueMappedCollectionProxy( - ... collection, create_metadata, - ... key_attribute='key', value_attribute='value' - ... ) - >>> print mapped['foo'] - 'bar' - >>> mapped['bam'] = 'biz' - >>> print mapped.collection[-1] - Metadata(key='bam', value='biz') - - ''' - - def __init__( - self, collection, creator, key_attribute, value_attribute - ): - '''Initialise collection.''' - self.creator = creator - self.key_attribute = key_attribute - self.value_attribute = value_attribute - super(KeyValueMappedCollectionProxy, self).__init__(collection) - - def _get_entity_by_key(self, key): - '''Return entity instance with matching *key* from collection.''' - for entity in self.collection: - if entity[self.key_attribute] == key: - return entity - - raise KeyError(key) - - def __getitem__(self, key): - '''Return value for *key*.''' - entity = self._get_entity_by_key(key) - return entity[self.value_attribute] - - def __setitem__(self, key, value): - '''Set *value* for *key*.''' - try: - entity = self._get_entity_by_key(key) - except KeyError: - data = { - self.key_attribute: key, - self.value_attribute: value - } - entity = self.creator(self, data) - - if ( - ftrack_api.inspection.state(entity) is - ftrack_api.symbol.CREATED - ): - # Persisting this entity will be handled here, record the - # operation. - self.collection.append(entity) - - else: - # The entity is created and persisted separately by the - # creator. Do not record this operation. - with self.collection.entity.session.operation_recording(False): - # Do not record this operation since it will trigger - # redudant and potentially failing operations. - self.collection.append(entity) - - else: - entity[self.value_attribute] = value - - def __delitem__(self, key): - '''Remove and delete *key*. - - .. note:: - - The associated entity will be deleted as well. - - ''' - for index, entity in enumerate(self.collection): - if entity[self.key_attribute] == key: - break - else: - raise KeyError(key) - - del self.collection[index] - entity.session.delete(entity) - - def __iter__(self): - '''Iterate over all keys.''' - keys = set() - for entity in self.collection: - keys.add(entity[self.key_attribute]) - - return iter(keys) - - def __len__(self): - '''Return count of keys.''' - keys = set() - for entity in self.collection: - keys.add(entity[self.key_attribute]) - - return len(keys) - - -class PerSessionDefaultKeyMaker(ftrack_api.cache.KeyMaker): - '''Generate key for session.''' - - def _key(self, obj): - '''Return key for *obj*.''' - if isinstance(obj, dict): - session = obj.get('session') - if session is not None: - # Key by session only. - return str(id(session)) - - return str(obj) - - -#: Memoiser for use with callables that should be called once per session. -memoise_session = ftrack_api.cache.memoise_decorator( - ftrack_api.cache.Memoiser( - key_maker=PerSessionDefaultKeyMaker(), return_copies=False - ) -) - - -@memoise_session -def _get_custom_attribute_configurations(session): - '''Return list of custom attribute configurations. - - The configuration objects will have key, project_id, id and object_type_id - populated. - - ''' - return session.query( - 'select key, project_id, id, object_type_id, entity_type from ' - 'CustomAttributeConfiguration' - ).all() - - -class CustomAttributeCollectionProxy(MappedCollectionProxy): - '''A mapped collection of custom attribute value entities.''' - - def __init__( - self, collection - ): - '''Initialise collection.''' - self.key_attribute = 'configuration_id' - self.value_attribute = 'value' - super(CustomAttributeCollectionProxy, self).__init__(collection) - - def _get_entity_configurations(self): - '''Return all configurations for current collection entity.''' - entity = self.collection.entity - entity_type = None - project_id = None - object_type_id = None - - if 'object_type_id' in entity.keys(): - project_id = entity['project_id'] - entity_type = 'task' - object_type_id = entity['object_type_id'] - - if entity.entity_type == 'AssetVersion': - project_id = entity['asset']['parent']['project_id'] - entity_type = 'assetversion' - - if entity.entity_type == 'Asset': - project_id = entity['parent']['project_id'] - entity_type = 'asset' - - if entity.entity_type == 'Project': - project_id = entity['id'] - entity_type = 'show' - - if entity.entity_type == 'User': - entity_type = 'user' - - if entity_type is None: - raise ValueError( - 'Entity {!r} not supported.'.format(entity) - ) - - configurations = [] - for configuration in _get_custom_attribute_configurations( - entity.session - ): - if ( - configuration['entity_type'] == entity_type and - configuration['project_id'] in (project_id, None) and - configuration['object_type_id'] == object_type_id - ): - configurations.append(configuration) - - # Return with global configurations at the end of the list. This is done - # so that global conigurations are shadowed by project specific if the - # configurations list is looped when looking for a matching `key`. - return sorted( - configurations, key=lambda item: item['project_id'] is None - ) - - def _get_keys(self): - '''Return a list of all keys.''' - keys = [] - for configuration in self._get_entity_configurations(): - keys.append(configuration['key']) - - return keys - - def _get_entity_by_key(self, key): - '''Return entity instance with matching *key* from collection.''' - configuration_id = self.get_configuration_id_from_key(key) - for entity in self.collection: - if entity[self.key_attribute] == configuration_id: - return entity - - return None - - def get_configuration_id_from_key(self, key): - '''Return id of configuration with matching *key*. - - Raise :exc:`KeyError` if no configuration with matching *key* found. - - ''' - for configuration in self._get_entity_configurations(): - if key == configuration['key']: - return configuration['id'] - - raise KeyError(key) - - def __getitem__(self, key): - '''Return value for *key*.''' - entity = self._get_entity_by_key(key) - - if entity: - return entity[self.value_attribute] - - for configuration in self._get_entity_configurations(): - if configuration['key'] == key: - return configuration['default'] - - raise KeyError(key) - - def __setitem__(self, key, value): - '''Set *value* for *key*.''' - custom_attribute_value = self._get_entity_by_key(key) - - if custom_attribute_value: - custom_attribute_value[self.value_attribute] = value - else: - entity = self.collection.entity - session = entity.session - data = { - self.key_attribute: self.get_configuration_id_from_key(key), - self.value_attribute: value, - 'entity_id': entity['id'] - } - - # Make sure to use the currently active collection. This is - # necessary since a merge might have replaced the current one. - self.collection.entity['custom_attributes'].collection.append( - session.create('CustomAttributeValue', data) - ) - - def __delitem__(self, key): - '''Remove and delete *key*. - - .. note:: - - The associated entity will be deleted as well. - - ''' - custom_attribute_value = self._get_entity_by_key(key) - - if custom_attribute_value: - index = self.collection.index(custom_attribute_value) - del self.collection[index] - - custom_attribute_value.session.delete(custom_attribute_value) - else: - self.logger.warning(L( - 'Cannot delete {0!r} on {1!r}, no custom attribute value set.', - key, self.collection.entity - )) - - def __eq__(self, collection): - '''Return True if *collection* equals proxy collection.''' - if collection is ftrack_api.symbol.NOT_SET: - return False - - return collection.collection == self.collection - - def __iter__(self): - '''Iterate over all keys.''' - keys = self._get_keys() - return iter(keys) - - def __len__(self): - '''Return count of keys.''' - keys = self._get_keys() - return len(keys) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/data.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/data.py deleted file mode 100644 index 1802e380..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/data.py +++ /dev/null @@ -1,119 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 ftrack - -import os -from abc import ABCMeta, abstractmethod -import tempfile - - -class Data(object): - '''File-like object for manipulating data.''' - - __metaclass__ = ABCMeta - - def __init__(self): - '''Initialise data access.''' - self.closed = False - - @abstractmethod - def read(self, limit=None): - '''Return content from current position up to *limit*.''' - - @abstractmethod - def write(self, content): - '''Write content at current position.''' - - def flush(self): - '''Flush buffers ensuring data written.''' - - def seek(self, offset, whence=os.SEEK_SET): - '''Move internal pointer by *offset*. - - The *whence* argument is optional and defaults to os.SEEK_SET or 0 - (absolute file positioning); other values are os.SEEK_CUR or 1 - (seek relative to the current position) and os.SEEK_END or 2 - (seek relative to the file's end). - - ''' - raise NotImplementedError('Seek not supported.') - - def tell(self): - '''Return current position of internal pointer.''' - raise NotImplementedError('Tell not supported.') - - def close(self): - '''Flush buffers and prevent further access.''' - self.flush() - self.closed = True - - -class FileWrapper(Data): - '''Data wrapper for Python file objects.''' - - def __init__(self, wrapped_file): - '''Initialise access to *wrapped_file*.''' - self.wrapped_file = wrapped_file - self._read_since_last_write = False - super(FileWrapper, self).__init__() - - def read(self, limit=None): - '''Return content from current position up to *limit*.''' - self._read_since_last_write = True - - if limit is None: - limit = -1 - - return self.wrapped_file.read(limit) - - def write(self, content): - '''Write content at current position.''' - if self._read_since_last_write: - # Windows requires a seek before switching from read to write. - self.seek(self.tell()) - - self.wrapped_file.write(content) - self._read_since_last_write = False - - def flush(self): - '''Flush buffers ensuring data written.''' - super(FileWrapper, self).flush() - if hasattr(self.wrapped_file, 'flush'): - self.wrapped_file.flush() - - def seek(self, offset, whence=os.SEEK_SET): - '''Move internal pointer by *offset*.''' - self.wrapped_file.seek(offset, whence) - - def tell(self): - '''Return current position of internal pointer.''' - return self.wrapped_file.tell() - - def close(self): - '''Flush buffers and prevent further access.''' - if not self.closed: - super(FileWrapper, self).close() - if hasattr(self.wrapped_file, 'close'): - self.wrapped_file.close() - - -class File(FileWrapper): - '''Data wrapper accepting filepath.''' - - def __init__(self, path, mode='rb'): - '''Open file at *path* with *mode*.''' - file_object = open(path, mode) - super(File, self).__init__(file_object) - - -class String(FileWrapper): - '''Data wrapper using TemporaryFile instance.''' - - def __init__(self, content=None): - '''Initialise data with *content*.''' - super(String, self).__init__( - tempfile.TemporaryFile() - ) - - if content is not None: - self.wrapped_file.write(content) - self.wrapped_file.seek(0) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/__init__.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/__init__.py deleted file mode 100644 index 1d452f28..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack \ No newline at end of file diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/asset_version.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/asset_version.py deleted file mode 100644 index 859d94e4..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/asset_version.py +++ /dev/null @@ -1,91 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.entity.base - - -class AssetVersion(ftrack_api.entity.base.Entity): - '''Represent asset version.''' - - def create_component( - self, path, data=None, location=None - ): - '''Create a new component from *path* with additional *data* - - .. note:: - - This is a helper method. To create components manually use the - standard :meth:`Session.create` method. - - *path* can be a string representing a filesystem path to the data to - use for the component. The *path* can also be specified as a sequence - string, in which case a sequence component with child components for - each item in the sequence will be created automatically. The accepted - format for a sequence is '{head}{padding}{tail} [{ranges}]'. For - example:: - - '/path/to/file.%04d.ext [1-5, 7, 8, 10-20]' - - .. seealso:: - - `Clique documentation `_ - - *data* should be a dictionary of any additional data to construct the - component with (as passed to :meth:`Session.create`). This version is - automatically set as the component's version. - - If *location* is specified then automatically add component to that - location. - - ''' - if data is None: - data = {} - - data.pop('version_id', None) - data['version'] = self - - return self.session.create_component(path, data=data, location=location) - - def encode_media(self, media, keep_original='auto'): - '''Return a new Job that encode *media* to make it playable in browsers. - - *media* can be a path to a file or a FileComponent in the ftrack.server - location. - - The job will encode *media* based on the file type and job data contains - information about encoding in the following format:: - - { - 'output': [{ - 'format': 'video/mp4', - 'component_id': 'e2dc0524-b576-11d3-9612-080027331d74' - }, { - 'format': 'image/jpeg', - 'component_id': '07b82a97-8cf9-11e3-9383-20c9d081909b' - }], - 'source_component_id': 'e3791a09-7e11-4792-a398-3d9d4eefc294', - 'keep_original': True - } - - The output components are associated with the job via the job_components - relation. - - An image component will always be generated if possible, and will be - set as the version's thumbnail. - - The new components will automatically be associated with the version. - A server version of 3.3.32 or higher is required for this to function - properly. - - If *media* is a file path, a new source component will be created and - added to the ftrack server location and a call to :meth:`commit` will be - issued. If *media* is a FileComponent, it will be assumed to be in - available in the ftrack.server location. - - If *keep_original* is not set, the original media will be kept if it - is a FileComponent, and deleted if it is a file path. You can specify - True or False to change this behavior. - ''' - return self.session.encode_media( - media, version_id=self['id'], keep_original=keep_original - ) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/base.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/base.py deleted file mode 100644 index f5a1a3ce..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/base.py +++ /dev/null @@ -1,402 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import abc -import collections -import logging - -import ftrack_api.symbol -import ftrack_api.attribute -import ftrack_api.inspection -import ftrack_api.exception -import ftrack_api.operation -from ftrack_api.logging import LazyLogMessage as L - - -class DynamicEntityTypeMetaclass(abc.ABCMeta): - '''Custom metaclass to customise representation of dynamic classes. - - .. note:: - - Derive from same metaclass as derived bases to avoid conflicts. - - ''' - def __repr__(self): - '''Return representation of class.''' - return ''.format(self.__name__) - - -class Entity(collections.MutableMapping): - '''Base class for all entities.''' - - __metaclass__ = DynamicEntityTypeMetaclass - - entity_type = 'Entity' - attributes = None - primary_key_attributes = None - default_projections = None - - def __init__(self, session, data=None, reconstructing=False): - '''Initialise entity. - - *session* is an instance of :class:`ftrack_api.session.Session` that - this entity instance is bound to. - - *data* is a mapping of key, value pairs to apply as initial attribute - values. - - *reconstructing* indicates whether this entity is being reconstructed, - such as from a query, and therefore should not have any special creation - logic applied, such as initialising defaults for missing data. - - ''' - super(Entity, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self.session = session - self._inflated = set() - - if data is None: - data = {} - - self.logger.debug(L( - '{0} entity from {1!r}.', - ('Reconstructing' if reconstructing else 'Constructing'), data - )) - - self._ignore_data_keys = ['__entity_type__'] - if not reconstructing: - self._construct(data) - else: - self._reconstruct(data) - - def _construct(self, data): - '''Construct from *data*.''' - # Suspend operation recording so that all modifications can be applied - # in single create operation. In addition, recording a modification - # operation requires a primary key which may not be available yet. - - relational_attributes = dict() - - with self.session.operation_recording(False): - # Set defaults for any unset local attributes. - for attribute in self.__class__.attributes: - if attribute.name not in data: - default_value = attribute.default_value - if callable(default_value): - default_value = default_value(self) - - attribute.set_local_value(self, default_value) - - - # Data represents locally set values. - for key, value in data.items(): - if key in self._ignore_data_keys: - continue - - attribute = self.__class__.attributes.get(key) - if attribute is None: - self.logger.debug(L( - 'Cannot populate {0!r} attribute as no such ' - 'attribute found on entity {1!r}.', key, self - )) - continue - - if not isinstance(attribute, ftrack_api.attribute.ScalarAttribute): - relational_attributes.setdefault( - attribute, value - ) - - else: - attribute.set_local_value(self, value) - - # Record create operation. - # Note: As this operation is recorded *before* any Session.merge takes - # place there is the possibility that the operation will hold references - # to outdated data in entity_data. However, this would be unusual in - # that it would mean the same new entity was created twice and only one - # altered. Conversely, if this operation were recorded *after* - # Session.merge took place, any cache would not be able to determine - # the status of the entity, which could be important if the cache should - # not store newly created entities that have not yet been persisted. Out - # of these two 'evils' this approach is deemed the lesser at this time. - # A third, more involved, approach to satisfy both might be to record - # the operation with a PENDING entity_data value and then update with - # merged values post merge. - if self.session.record_operations: - entity_data = {} - - # Lower level API used here to avoid including any empty - # collections that are automatically generated on access. - for attribute in self.attributes: - value = attribute.get_local_value(self) - if value is not ftrack_api.symbol.NOT_SET: - entity_data[attribute.name] = value - - self.session.recorded_operations.push( - ftrack_api.operation.CreateEntityOperation( - self.entity_type, - ftrack_api.inspection.primary_key(self), - entity_data - ) - ) - - for attribute, value in relational_attributes.items(): - # Finally we set values for "relational" attributes, we need - # to do this at the end in order to get the create operations - # in the correct order as the newly created attributes might - # contain references to the newly created entity. - - attribute.set_local_value( - self, value - ) - - def _reconstruct(self, data): - '''Reconstruct from *data*.''' - # Data represents remote values. - for key, value in data.items(): - if key in self._ignore_data_keys: - continue - - attribute = self.__class__.attributes.get(key) - if attribute is None: - self.logger.debug(L( - 'Cannot populate {0!r} attribute as no such attribute ' - 'found on entity {1!r}.', key, self - )) - continue - - attribute.set_remote_value(self, value) - - def __repr__(self): - '''Return representation of instance.''' - return ''.format( - self.__class__.__name__, id(self) - ) - - def __str__(self): - '''Return string representation of instance.''' - with self.session.auto_populating(False): - primary_key = ['Unknown'] - try: - primary_key = ftrack_api.inspection.primary_key(self).values() - except KeyError: - pass - - return '<{0}({1})>'.format( - self.__class__.__name__, ', '.join(primary_key) - ) - - def __hash__(self): - '''Return hash representing instance.''' - return hash(str(ftrack_api.inspection.identity(self))) - - def __eq__(self, other): - '''Return whether *other* is equal to this instance. - - .. note:: - - Equality is determined by both instances having the same identity. - Values of attributes are not considered. - - ''' - try: - return ( - ftrack_api.inspection.identity(other) - == ftrack_api.inspection.identity(self) - ) - except (AttributeError, KeyError): - return False - - def __getitem__(self, key): - '''Return attribute value for *key*.''' - attribute = self.__class__.attributes.get(key) - if attribute is None: - raise KeyError(key) - - return attribute.get_value(self) - - def __setitem__(self, key, value): - '''Set attribute *value* for *key*.''' - attribute = self.__class__.attributes.get(key) - if attribute is None: - raise KeyError(key) - - attribute.set_local_value(self, value) - - def __delitem__(self, key): - '''Clear attribute value for *key*. - - .. note:: - - Will not remove the attribute, but instead clear any local value - and revert to the last known server value. - - ''' - attribute = self.__class__.attributes.get(key) - attribute.set_local_value(self, ftrack_api.symbol.NOT_SET) - - def __iter__(self): - '''Iterate over all attributes keys.''' - for attribute in self.__class__.attributes: - yield attribute.name - - def __len__(self): - '''Return count of attributes.''' - return len(self.__class__.attributes) - - def values(self): - '''Return list of values.''' - if self.session.auto_populate: - self._populate_unset_scalar_attributes() - - return super(Entity, self).values() - - def items(self): - '''Return list of tuples of (key, value) pairs. - - .. note:: - - Will fetch all values from the server if not already fetched or set - locally. - - ''' - if self.session.auto_populate: - self._populate_unset_scalar_attributes() - - return super(Entity, self).items() - - def clear(self): - '''Reset all locally modified attribute values.''' - for attribute in self: - del self[attribute] - - def merge(self, entity, merged=None): - '''Merge *entity* attribute values and other data into this entity. - - Only merge values from *entity* that are not - :attr:`ftrack_api.symbol.NOT_SET`. - - Return a list of changes made with each change being a mapping with - the keys: - - * type - Either 'remote_attribute', 'local_attribute' or 'property'. - * name - The name of the attribute / property modified. - * old_value - The previous value. - * new_value - The new merged value. - - ''' - log_debug = self.logger.isEnabledFor(logging.DEBUG) - - if merged is None: - merged = {} - - log_message = 'Merged {type} "{name}": {old_value!r} -> {new_value!r}' - changes = [] - - # Attributes. - - # Prioritise by type so that scalar values are set first. This should - # guarantee that the attributes making up the identity of the entity - # are merged before merging any collections that may have references to - # this entity. - attributes = collections.deque() - for attribute in entity.attributes: - if isinstance(attribute, ftrack_api.attribute.ScalarAttribute): - attributes.appendleft(attribute) - else: - attributes.append(attribute) - - for other_attribute in attributes: - attribute = self.attributes.get(other_attribute.name) - - # Local attributes. - other_local_value = other_attribute.get_local_value(entity) - if other_local_value is not ftrack_api.symbol.NOT_SET: - local_value = attribute.get_local_value(self) - if local_value != other_local_value: - merged_local_value = self.session.merge( - other_local_value, merged=merged - ) - - attribute.set_local_value(self, merged_local_value) - changes.append({ - 'type': 'local_attribute', - 'name': attribute.name, - 'old_value': local_value, - 'new_value': merged_local_value - }) - log_debug and self.logger.debug( - log_message.format(**changes[-1]) - ) - - # Remote attributes. - other_remote_value = other_attribute.get_remote_value(entity) - if other_remote_value is not ftrack_api.symbol.NOT_SET: - remote_value = attribute.get_remote_value(self) - if remote_value != other_remote_value: - merged_remote_value = self.session.merge( - other_remote_value, merged=merged - ) - - attribute.set_remote_value( - self, merged_remote_value - ) - - changes.append({ - 'type': 'remote_attribute', - 'name': attribute.name, - 'old_value': remote_value, - 'new_value': merged_remote_value - }) - - log_debug and self.logger.debug( - log_message.format(**changes[-1]) - ) - - # We need to handle collections separately since - # they may store a local copy of the remote attribute - # even though it may not be modified. - if not isinstance( - attribute, ftrack_api.attribute.AbstractCollectionAttribute - ): - continue - - local_value = attribute.get_local_value( - self - ) - - # Populated but not modified, update it. - if ( - local_value is not ftrack_api.symbol.NOT_SET and - local_value == remote_value - ): - attribute.set_local_value( - self, merged_remote_value - ) - changes.append({ - 'type': 'local_attribute', - 'name': attribute.name, - 'old_value': local_value, - 'new_value': merged_remote_value - }) - - log_debug and self.logger.debug( - log_message.format(**changes[-1]) - ) - - return changes - - def _populate_unset_scalar_attributes(self): - '''Populate all unset scalar attributes in one query.''' - projections = [] - for attribute in self.attributes: - if isinstance(attribute, ftrack_api.attribute.ScalarAttribute): - if attribute.get_remote_value(self) is ftrack_api.symbol.NOT_SET: - projections.append(attribute.name) - - if projections: - self.session.populate([self], ', '.join(projections)) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/component.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/component.py deleted file mode 100644 index 9d59c4c0..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/component.py +++ /dev/null @@ -1,74 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.entity.base - - -class Component(ftrack_api.entity.base.Entity): - '''Represent a component.''' - - def get_availability(self, locations=None): - '''Return availability in *locations*. - - If *locations* is None, all known locations will be checked. - - Return a dictionary of {location_id:percentage_availability} - - ''' - return self.session.get_component_availability( - self, locations=locations - ) - - -class CreateThumbnailMixin(object): - '''Mixin to add create_thumbnail method on entity class.''' - - def create_thumbnail(self, path, data=None): - '''Set entity thumbnail from *path*. - - Creates a thumbnail component using in the ftrack.server location - :meth:`Session.create_component - ` The thumbnail component - will be created using *data* if specified. If no component name is - given, `thumbnail` will be used. - - The file is expected to be of an appropriate size and valid file - type. - - .. note:: - - A :meth:`Session.commit` will be - automatically issued. - - ''' - if data is None: - data = {} - if not data.get('name'): - data['name'] = 'thumbnail' - - thumbnail_component = self.session.create_component( - path, data, location=None - ) - - origin_location = self.session.get( - 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID - ) - server_location = self.session.get( - 'Location', ftrack_api.symbol.SERVER_LOCATION_ID - ) - server_location.add_component(thumbnail_component, [origin_location]) - - # TODO: This commit can be avoided by reordering the operations in - # this method so that the component is transferred to ftrack.server - # after the thumbnail has been set. - # - # There is currently a bug in the API backend, causing the operations - # to *some* times be ordered wrongly, where the update occurs before - # the component has been created, causing an integrity error. - # - # Once this issue has been resolved, this commit can be removed and - # and the update placed between component creation and registration. - self['thumbnail_id'] = thumbnail_component['id'] - self.session.commit() - - return thumbnail_component diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/factory.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/factory.py deleted file mode 100644 index e925b70f..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/factory.py +++ /dev/null @@ -1,435 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import logging -import uuid -import functools - -import ftrack_api.attribute -import ftrack_api.entity.base -import ftrack_api.entity.location -import ftrack_api.entity.component -import ftrack_api.entity.asset_version -import ftrack_api.entity.project_schema -import ftrack_api.entity.note -import ftrack_api.entity.job -import ftrack_api.entity.user -import ftrack_api.symbol -import ftrack_api.cache -from ftrack_api.logging import LazyLogMessage as L - - -class Factory(object): - '''Entity class factory.''' - - def __init__(self): - '''Initialise factory.''' - super(Factory, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - - def create(self, schema, bases=None): - '''Create and return entity class from *schema*. - - *bases* should be a list of bases to give the constructed class. If not - specified, default to :class:`ftrack_api.entity.base.Entity`. - - ''' - entity_type = schema['id'] - class_name = entity_type - - class_bases = bases - if class_bases is None: - class_bases = [ftrack_api.entity.base.Entity] - - class_namespace = dict() - - # Build attributes for class. - attributes = ftrack_api.attribute.Attributes() - immutable_properties = schema.get('immutable', []) - computed_properties = schema.get('computed', []) - for name, fragment in schema.get('properties', {}).items(): - mutable = name not in immutable_properties - computed = name in computed_properties - - default = fragment.get('default', ftrack_api.symbol.NOT_SET) - if default == '{uid}': - default = lambda instance: str(uuid.uuid4()) - - data_type = fragment.get('type', ftrack_api.symbol.NOT_SET) - - if data_type is not ftrack_api.symbol.NOT_SET: - - if data_type in ( - 'string', 'boolean', 'integer', 'number', 'variable', - 'object' - ): - # Basic scalar attribute. - if data_type == 'number': - data_type = 'float' - - if data_type == 'string': - data_format = fragment.get('format') - if data_format == 'date-time': - data_type = 'datetime' - - attribute = self.create_scalar_attribute( - class_name, name, mutable, computed, default, data_type - ) - if attribute: - attributes.add(attribute) - - elif data_type == 'array': - attribute = self.create_collection_attribute( - class_name, name, mutable - ) - if attribute: - attributes.add(attribute) - - elif data_type == 'mapped_array': - reference = fragment.get('items', {}).get('$ref') - if not reference: - self.logger.debug(L( - 'Skipping {0}.{1} mapped_array attribute that does ' - 'not define a schema reference.', class_name, name - )) - continue - - attribute = self.create_mapped_collection_attribute( - class_name, name, mutable, reference - ) - if attribute: - attributes.add(attribute) - - else: - self.logger.debug(L( - 'Skipping {0}.{1} attribute with unrecognised data ' - 'type {2}', class_name, name, data_type - )) - else: - # Reference attribute. - reference = fragment.get('$ref', ftrack_api.symbol.NOT_SET) - if reference is ftrack_api.symbol.NOT_SET: - self.logger.debug(L( - 'Skipping {0}.{1} mapped_array attribute that does ' - 'not define a schema reference.', class_name, name - )) - continue - - attribute = self.create_reference_attribute( - class_name, name, mutable, reference - ) - if attribute: - attributes.add(attribute) - - default_projections = schema.get('default_projections', []) - - # Construct class. - class_namespace['entity_type'] = entity_type - class_namespace['attributes'] = attributes - class_namespace['primary_key_attributes'] = schema['primary_key'][:] - class_namespace['default_projections'] = default_projections - - cls = type( - str(class_name), # type doesn't accept unicode. - tuple(class_bases), - class_namespace - ) - - return cls - - def create_scalar_attribute( - self, class_name, name, mutable, computed, default, data_type - ): - '''Return appropriate scalar attribute instance.''' - return ftrack_api.attribute.ScalarAttribute( - name, data_type=data_type, default_value=default, mutable=mutable, - computed=computed - ) - - def create_reference_attribute(self, class_name, name, mutable, reference): - '''Return appropriate reference attribute instance.''' - return ftrack_api.attribute.ReferenceAttribute( - name, reference, mutable=mutable - ) - - def create_collection_attribute(self, class_name, name, mutable): - '''Return appropriate collection attribute instance.''' - return ftrack_api.attribute.CollectionAttribute( - name, mutable=mutable - ) - - def create_mapped_collection_attribute( - self, class_name, name, mutable, reference - ): - '''Return appropriate mapped collection attribute instance.''' - self.logger.debug(L( - 'Skipping {0}.{1} mapped_array attribute that has ' - 'no implementation defined for reference {2}.', - class_name, name, reference - )) - - -class PerSessionDefaultKeyMaker(ftrack_api.cache.KeyMaker): - '''Generate key for defaults.''' - - def _key(self, obj): - '''Return key for *obj*.''' - if isinstance(obj, dict): - entity = obj.get('entity') - if entity is not None: - # Key by session only. - return str(id(entity.session)) - - return str(obj) - - -#: Memoiser for use with default callables that should only be called once per -# session. -memoise_defaults = ftrack_api.cache.memoise_decorator( - ftrack_api.cache.Memoiser( - key_maker=PerSessionDefaultKeyMaker(), return_copies=False - ) -) - -#: Memoiser for use with callables that should be called once per session. -memoise_session = ftrack_api.cache.memoise_decorator( - ftrack_api.cache.Memoiser( - key_maker=PerSessionDefaultKeyMaker(), return_copies=False - ) -) - - -@memoise_session -def _get_custom_attribute_configurations(session): - '''Return list of custom attribute configurations. - - The configuration objects will have key, project_id, id and object_type_id - populated. - - ''' - return session.query( - 'select key, project_id, id, object_type_id, entity_type, ' - 'is_hierarchical from CustomAttributeConfiguration' - ).all() - - -def _get_entity_configurations(entity): - '''Return all configurations for current collection entity.''' - entity_type = None - project_id = None - object_type_id = None - - if 'object_type_id' in entity.keys(): - project_id = entity['project_id'] - entity_type = 'task' - object_type_id = entity['object_type_id'] - - if entity.entity_type == 'AssetVersion': - project_id = entity['asset']['parent']['project_id'] - entity_type = 'assetversion' - - if entity.entity_type == 'Project': - project_id = entity['id'] - entity_type = 'show' - - if entity.entity_type == 'User': - entity_type = 'user' - - if entity.entity_type == 'Asset': - entity_type = 'asset' - - if entity.entity_type in ('TypedContextList', 'AssetVersionList'): - entity_type = 'list' - - if entity_type is None: - raise ValueError( - 'Entity {!r} not supported.'.format(entity) - ) - - configurations = [] - for configuration in _get_custom_attribute_configurations( - entity.session - ): - if ( - configuration['entity_type'] == entity_type and - configuration['project_id'] in (project_id, None) and - configuration['object_type_id'] == object_type_id - ): - # The custom attribute configuration is for the target entity type. - configurations.append(configuration) - elif ( - entity_type in ('asset', 'assetversion', 'show', 'task') and - configuration['project_id'] in (project_id, None) and - configuration['is_hierarchical'] - ): - # The target entity type allows hierarchical attributes. - configurations.append(configuration) - - # Return with global configurations at the end of the list. This is done - # so that global conigurations are shadowed by project specific if the - # configurations list is looped when looking for a matching `key`. - return sorted( - configurations, key=lambda item: item['project_id'] is None - ) - - -class StandardFactory(Factory): - '''Standard entity class factory.''' - - def create(self, schema, bases=None): - '''Create and return entity class from *schema*.''' - if not bases: - bases = [] - - extra_bases = [] - # Customise classes. - if schema['id'] == 'ProjectSchema': - extra_bases = [ftrack_api.entity.project_schema.ProjectSchema] - - elif schema['id'] == 'Location': - extra_bases = [ftrack_api.entity.location.Location] - - elif schema['id'] == 'AssetVersion': - extra_bases = [ftrack_api.entity.asset_version.AssetVersion] - - elif schema['id'].endswith('Component'): - extra_bases = [ftrack_api.entity.component.Component] - - elif schema['id'] == 'Note': - extra_bases = [ftrack_api.entity.note.Note] - - elif schema['id'] == 'Job': - extra_bases = [ftrack_api.entity.job.Job] - - elif schema['id'] == 'User': - extra_bases = [ftrack_api.entity.user.User] - - bases = extra_bases + bases - - # If bases does not contain any items, add the base entity class. - if not bases: - bases = [ftrack_api.entity.base.Entity] - - # Add mixins. - if 'notes' in schema.get('properties', {}): - bases.append( - ftrack_api.entity.note.CreateNoteMixin - ) - - if 'thumbnail_id' in schema.get('properties', {}): - bases.append( - ftrack_api.entity.component.CreateThumbnailMixin - ) - - cls = super(StandardFactory, self).create(schema, bases=bases) - - return cls - - def create_mapped_collection_attribute( - self, class_name, name, mutable, reference - ): - '''Return appropriate mapped collection attribute instance.''' - if reference == 'Metadata': - - def create_metadata(proxy, data, reference): - '''Return metadata for *data*.''' - entity = proxy.collection.entity - session = entity.session - data.update({ - 'parent_id': entity['id'], - 'parent_type': entity.entity_type - }) - return session.create(reference, data) - - creator = functools.partial( - create_metadata, reference=reference - ) - key_attribute = 'key' - value_attribute = 'value' - - return ftrack_api.attribute.KeyValueMappedCollectionAttribute( - name, creator, key_attribute, value_attribute, mutable=mutable - ) - - elif reference == 'CustomAttributeValue': - return ( - ftrack_api.attribute.CustomAttributeCollectionAttribute( - name, mutable=mutable - ) - ) - - elif reference.endswith('CustomAttributeValue'): - def creator(proxy, data): - '''Create a custom attribute based on *proxy* and *data*. - - Raise :py:exc:`KeyError` if related entity is already presisted - to the server. The proxy represents dense custom attribute - values and should never create new custom attribute values - through the proxy if entity exists on the remote. - - If the entity is not persisted the ususal - CustomAttributeValue items cannot be updated as - the related entity does not exist on remote and values not in - the proxy. Instead a CustomAttributeValue will - be reconstructed and an update operation will be recorded. - - ''' - entity = proxy.collection.entity - if ( - ftrack_api.inspection.state(entity) is not - ftrack_api.symbol.CREATED - ): - raise KeyError( - 'Custom attributes must be created explicitly for the ' - 'given entity type before being set.' - ) - - configuration = None - for candidate in _get_entity_configurations(entity): - if candidate['key'] == data['key']: - configuration = candidate - break - - if configuration is None: - raise ValueError( - u'No valid custom attribute for data {0!r} was found.' - .format(data) - ) - - create_data = dict(data.items()) - create_data['configuration_id'] = configuration['id'] - create_data['entity_id'] = entity['id'] - - session = entity.session - - # Create custom attribute by reconstructing it and update the - # value. This will prevent a create operation to be sent to the - # remote, as create operations for this entity type is not - # allowed. Instead an update operation will be recorded. - value = create_data.pop('value') - item = session.create( - reference, - create_data, - reconstructing=True - ) - - # Record update operation. - item['value'] = value - - return item - - key_attribute = 'key' - value_attribute = 'value' - - return ftrack_api.attribute.KeyValueMappedCollectionAttribute( - name, creator, key_attribute, value_attribute, mutable=mutable - ) - - self.logger.debug(L( - 'Skipping {0}.{1} mapped_array attribute that has no configuration ' - 'for reference {2}.', class_name, name, reference - )) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/job.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/job.py deleted file mode 100644 index ae37922c..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/job.py +++ /dev/null @@ -1,48 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.entity.base - - -class Job(ftrack_api.entity.base.Entity): - '''Represent job.''' - - def __init__(self, session, data=None, reconstructing=False): - '''Initialise entity. - - *session* is an instance of :class:`ftrack_api.session.Session` that - this entity instance is bound to. - - *data* is a mapping of key, value pairs to apply as initial attribute - values. - - To set a job `description` visible in the web interface, *data* can - contain a key called `data` which should be a JSON serialised - dictionary containing description:: - - data = { - 'status': 'running', - 'data': json.dumps(dict(description='My job description.')), - ... - } - - Will raise a :py:exc:`ValueError` if *data* contains `type` and `type` - is set to something not equal to "api_job". - - *reconstructing* indicates whether this entity is being reconstructed, - such as from a query, and therefore should not have any special creation - logic applied, such as initialising defaults for missing data. - - ''' - - if not reconstructing: - if data.get('type') not in ('api_job', None): - raise ValueError( - 'Invalid job type "{0}". Must be "api_job"'.format( - data.get('type') - ) - ) - - super(Job, self).__init__( - session, data=data, reconstructing=reconstructing - ) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/location.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/location.py deleted file mode 100644 index 707f4fa6..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/location.py +++ /dev/null @@ -1,733 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import collections -import functools - -import ftrack_api.entity.base -import ftrack_api.exception -import ftrack_api.event.base -import ftrack_api.symbol -import ftrack_api.inspection -from ftrack_api.logging import LazyLogMessage as L - - -class Location(ftrack_api.entity.base.Entity): - '''Represent storage for components.''' - - def __init__(self, session, data=None, reconstructing=False): - '''Initialise entity. - - *session* is an instance of :class:`ftrack_api.session.Session` that - this entity instance is bound to. - - *data* is a mapping of key, value pairs to apply as initial attribute - values. - - *reconstructing* indicates whether this entity is being reconstructed, - such as from a query, and therefore should not have any special creation - logic applied, such as initialising defaults for missing data. - - ''' - self.accessor = ftrack_api.symbol.NOT_SET - self.structure = ftrack_api.symbol.NOT_SET - self.resource_identifier_transformer = ftrack_api.symbol.NOT_SET - self.priority = 95 - super(Location, self).__init__( - session, data=data, reconstructing=reconstructing - ) - - def __str__(self): - '''Return string representation of instance.''' - representation = super(Location, self).__str__() - - with self.session.auto_populating(False): - name = self['name'] - if name is not ftrack_api.symbol.NOT_SET: - representation = representation.replace( - '(', '("{0}", '.format(name) - ) - - return representation - - def add_component(self, component, source, recursive=True): - '''Add *component* to location. - - *component* should be a single component instance. - - *source* should be an instance of another location that acts as the - source. - - Raise :exc:`ftrack_api.ComponentInLocationError` if the *component* - already exists in this location. - - Raise :exc:`ftrack_api.LocationError` if managing data and the generated - target structure for the component already exists according to the - accessor. This helps prevent potential data loss by avoiding overwriting - existing data. Note that there is a race condition between the check and - the write so if another process creates data at the same target during - that period it will be overwritten. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the component registration. - - ''' - return self.add_components( - [component], sources=source, recursive=recursive - ) - - def add_components(self, components, sources, recursive=True, _depth=0): - '''Add *components* to location. - - *components* should be a list of component instances. - - *sources* may be either a single source or a list of sources. If a list - then each corresponding index in *sources* will be used for each - *component*. A source should be an instance of another location. - - Raise :exc:`ftrack_api.exception.ComponentInLocationError` if any - component in *components* already exists in this location. In this case, - no changes will be made and no data transferred. - - Raise :exc:`ftrack_api.exception.LocationError` if managing data and the - generated target structure for the component already exists according to - the accessor. This helps prevent potential data loss by avoiding - overwriting existing data. Note that there is a race condition between - the check and the write so if another process creates data at the same - target during that period it will be overwritten. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the components registration. - - .. important:: - - If this location manages data then the *components* data is first - transferred to the target prescribed by the structure plugin, using - the configured accessor. If any component fails to transfer then - :exc:`ftrack_api.exception.LocationError` is raised and none of the - components are registered with the database. In this case it is left - up to the caller to decide and act on manually cleaning up any - transferred data using the 'transferred' detail in the raised error. - - Likewise, after transfer, all components are registered with the - database in a batch call. If any component causes an error then all - components will remain unregistered and - :exc:`ftrack_api.exception.LocationError` will be raised detailing - issues and any transferred data under the 'transferred' detail key. - - ''' - if ( - isinstance(sources, basestring) - or not isinstance(sources, collections.Sequence) - ): - sources = [sources] - - sources_count = len(sources) - if sources_count not in (1, len(components)): - raise ValueError( - 'sources must be either a single source or a sequence of ' - 'sources with indexes corresponding to passed components.' - ) - - if not self.structure: - raise ftrack_api.exception.LocationError( - 'No structure defined for location {location}.', - details=dict(location=self) - ) - - if not components: - # Optimisation: Return early when no components to process, such as - # when called recursively on an empty sequence component. - return - - indent = ' ' * (_depth + 1) - - # Check that components not already added to location. - existing_components = [] - try: - self.get_resource_identifiers(components) - - except ftrack_api.exception.ComponentNotInLocationError as error: - missing_component_ids = [ - missing_component['id'] - for missing_component in error.details['components'] - ] - for component in components: - if component['id'] not in missing_component_ids: - existing_components.append(component) - - else: - existing_components.extend(components) - - if existing_components: - # Some of the components already present in location. - raise ftrack_api.exception.ComponentInLocationError( - existing_components, self - ) - - # Attempt to transfer each component's data to this location. - transferred = [] - - for index, component in enumerate(components): - try: - # Determine appropriate source. - if sources_count == 1: - source = sources[0] - else: - source = sources[index] - - # Add members first for container components. - is_container = 'members' in component.keys() - if is_container and recursive: - self.add_components( - component['members'], source, recursive=recursive, - _depth=(_depth + 1) - ) - - # Add component to this location. - context = self._get_context(component, source) - resource_identifier = self.structure.get_resource_identifier( - component, context - ) - - # Manage data transfer. - self._add_data(component, resource_identifier, source) - - except Exception as error: - raise ftrack_api.exception.LocationError( - 'Failed to transfer component {component} data to location ' - '{location} due to error:\n{indent}{error}\n{indent}' - 'Transferred component data that may require cleanup: ' - '{transferred}', - details=dict( - indent=indent, - component=component, - location=self, - error=error, - transferred=transferred - ) - ) - - else: - transferred.append((component, resource_identifier)) - - # Register all successfully transferred components. - components_to_register = [] - component_resource_identifiers = [] - - try: - for component, resource_identifier in transferred: - if self.resource_identifier_transformer: - # Optionally encode resource identifier before storing. - resource_identifier = ( - self.resource_identifier_transformer.encode( - resource_identifier, - context={'component': component} - ) - ) - - components_to_register.append(component) - component_resource_identifiers.append(resource_identifier) - - # Store component in location information. - self._register_components_in_location( - components, component_resource_identifiers - ) - - except Exception as error: - raise ftrack_api.exception.LocationError( - 'Failed to register components with location {location} due to ' - 'error:\n{indent}{error}\n{indent}Transferred component data ' - 'that may require cleanup: {transferred}', - details=dict( - indent=indent, - location=self, - error=error, - transferred=transferred - ) - ) - - # Publish events. - for component in components_to_register: - - component_id = ftrack_api.inspection.primary_key( - component - ).values()[0] - location_id = ftrack_api.inspection.primary_key(self).values()[0] - - self.session.event_hub.publish( - ftrack_api.event.base.Event( - topic=ftrack_api.symbol.COMPONENT_ADDED_TO_LOCATION_TOPIC, - data=dict( - component_id=component_id, - location_id=location_id - ), - ), - on_error='ignore' - ) - - def _get_context(self, component, source): - '''Return context for *component* and *source*.''' - context = {} - if source: - try: - source_resource_identifier = source.get_resource_identifier( - component - ) - except ftrack_api.exception.ComponentNotInLocationError: - pass - else: - context.update(dict( - source_resource_identifier=source_resource_identifier - )) - - return context - - def _add_data(self, component, resource_identifier, source): - '''Manage transfer of *component* data from *source*. - - *resource_identifier* specifies the identifier to use with this - locations accessor. - - ''' - self.logger.debug(L( - 'Adding data for component {0!r} from source {1!r} to location ' - '{2!r} using resource identifier {3!r}.', - component, resource_identifier, source, self - )) - - # Read data from source and write to this location. - if not source.accessor: - raise ftrack_api.exception.LocationError( - 'No accessor defined for source location {location}.', - details=dict(location=source) - ) - - if not self.accessor: - raise ftrack_api.exception.LocationError( - 'No accessor defined for target location {location}.', - details=dict(location=self) - ) - - is_container = 'members' in component.keys() - if is_container: - # TODO: Improve this check. Possibly introduce an inspection - # such as ftrack_api.inspection.is_sequence_component. - if component.entity_type != 'SequenceComponent': - self.accessor.make_container(resource_identifier) - - else: - # Try to make container of component. - try: - container = self.accessor.get_container( - resource_identifier - ) - - except ftrack_api.exception.AccessorParentResourceNotFoundError: - # Container could not be retrieved from - # resource_identifier. Assume that there is no need to - # make the container. - pass - - else: - # No need for existence check as make_container does not - # recreate existing containers. - self.accessor.make_container(container) - - if self.accessor.exists(resource_identifier): - # Note: There is a race condition here in that the - # data may be added externally between the check for - # existence and the actual write which would still - # result in potential data loss. However, there is no - # good cross platform, cross accessor solution for this - # at present. - raise ftrack_api.exception.LocationError( - 'Cannot add component as data already exists and ' - 'overwriting could result in data loss. Computed ' - 'target resource identifier was: {0}' - .format(resource_identifier) - ) - - # Read and write data. - source_data = source.accessor.open( - source.get_resource_identifier(component), 'rb' - ) - target_data = self.accessor.open(resource_identifier, 'wb') - - # Read/write data in chunks to avoid reading all into memory at the - # same time. - chunked_read = functools.partial( - source_data.read, ftrack_api.symbol.CHUNK_SIZE - ) - for chunk in iter(chunked_read, ''): - target_data.write(chunk) - - target_data.close() - source_data.close() - - def _register_component_in_location(self, component, resource_identifier): - '''Register *component* in location against *resource_identifier*.''' - return self._register_components_in_location( - [component], [resource_identifier] - ) - - def _register_components_in_location( - self, components, resource_identifiers - ): - '''Register *components* in location against *resource_identifiers*. - - Indices of *components* and *resource_identifiers* should align. - - ''' - for component, resource_identifier in zip( - components, resource_identifiers - ): - self.session.create( - 'ComponentLocation', data=dict( - component=component, - location=self, - resource_identifier=resource_identifier - ) - ) - - self.session.commit() - - def remove_component(self, component, recursive=True): - '''Remove *component* from location. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the component deregistration. - - ''' - return self.remove_components([component], recursive=recursive) - - def remove_components(self, components, recursive=True): - '''Remove *components* from location. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the components deregistration. - - ''' - for component in components: - # Check component is in this location - self.get_resource_identifier(component) - - # Remove members first for container components. - is_container = 'members' in component.keys() - if is_container and recursive: - self.remove_components( - component['members'], recursive=recursive - ) - - # Remove data. - self._remove_data(component) - - # Remove metadata. - self._deregister_component_in_location(component) - - # Emit event. - component_id = ftrack_api.inspection.primary_key( - component - ).values()[0] - location_id = ftrack_api.inspection.primary_key(self).values()[0] - self.session.event_hub.publish( - ftrack_api.event.base.Event( - topic=ftrack_api.symbol.COMPONENT_REMOVED_FROM_LOCATION_TOPIC, - data=dict( - component_id=component_id, - location_id=location_id - ) - ), - on_error='ignore' - ) - - def _remove_data(self, component): - '''Remove data associated with *component*.''' - if not self.accessor: - raise ftrack_api.exception.LocationError( - 'No accessor defined for location {location}.', - details=dict(location=self) - ) - - try: - self.accessor.remove( - self.get_resource_identifier(component) - ) - except ftrack_api.exception.AccessorResourceNotFoundError: - # If accessor does not support detecting sequence paths then an - # AccessorResourceNotFoundError is raised. For now, if the - # component type is 'SequenceComponent' assume success. - if not component.entity_type == 'SequenceComponent': - raise - - def _deregister_component_in_location(self, component): - '''Deregister *component* from location.''' - component_id = ftrack_api.inspection.primary_key(component).values()[0] - location_id = ftrack_api.inspection.primary_key(self).values()[0] - - # TODO: Use session.get for optimisation. - component_location = self.session.query( - 'ComponentLocation where component_id is {0} and location_id is ' - '{1}'.format(component_id, location_id) - )[0] - - self.session.delete(component_location) - - # TODO: Should auto-commit here be optional? - self.session.commit() - - def get_component_availability(self, component): - '''Return availability of *component* in this location as a float.''' - return self.session.get_component_availability( - component, locations=[self] - )[self['id']] - - def get_component_availabilities(self, components): - '''Return availabilities of *components* in this location. - - Return list of float values corresponding to each component. - - ''' - return [ - availability[self['id']] for availability in - self.session.get_component_availabilities( - components, locations=[self] - ) - ] - - def get_resource_identifier(self, component): - '''Return resource identifier for *component*. - - Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if the - component is not present in this location. - - ''' - return self.get_resource_identifiers([component])[0] - - def get_resource_identifiers(self, components): - '''Return resource identifiers for *components*. - - Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if any - of the components are not present in this location. - - ''' - resource_identifiers = self._get_resource_identifiers(components) - - # Optionally decode resource identifier. - if self.resource_identifier_transformer: - for index, resource_identifier in enumerate(resource_identifiers): - resource_identifiers[index] = ( - self.resource_identifier_transformer.decode( - resource_identifier, - context={'component': components[index]} - ) - ) - - return resource_identifiers - - def _get_resource_identifiers(self, components): - '''Return resource identifiers for *components*. - - Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if any - of the components are not present in this location. - - ''' - component_ids_mapping = collections.OrderedDict() - for component in components: - component_id = ftrack_api.inspection.primary_key( - component - ).values()[0] - component_ids_mapping[component_id] = component - - component_locations = self.session.query( - 'select component_id, resource_identifier from ComponentLocation ' - 'where location_id is {0} and component_id in ({1})' - .format( - ftrack_api.inspection.primary_key(self).values()[0], - ', '.join(component_ids_mapping.keys()) - ) - ) - - resource_identifiers_map = {} - for component_location in component_locations: - resource_identifiers_map[component_location['component_id']] = ( - component_location['resource_identifier'] - ) - - resource_identifiers = [] - missing = [] - for component_id, component in component_ids_mapping.items(): - if component_id not in resource_identifiers_map: - missing.append(component) - else: - resource_identifiers.append( - resource_identifiers_map[component_id] - ) - - if missing: - raise ftrack_api.exception.ComponentNotInLocationError( - missing, self - ) - - return resource_identifiers - - def get_filesystem_path(self, component): - '''Return filesystem path for *component*.''' - return self.get_filesystem_paths([component])[0] - - def get_filesystem_paths(self, components): - '''Return filesystem paths for *components*.''' - resource_identifiers = self.get_resource_identifiers(components) - - filesystem_paths = [] - for resource_identifier in resource_identifiers: - filesystem_paths.append( - self.accessor.get_filesystem_path(resource_identifier) - ) - - return filesystem_paths - - def get_url(self, component): - '''Return url for *component*. - - Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if - URL could not be determined from *component* or - :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if - retrieving URL is not supported by the location's accessor. - ''' - resource_identifier = self.get_resource_identifier(component) - - return self.accessor.get_url(resource_identifier) - - -class MemoryLocationMixin(object): - '''Represent storage for components. - - Unlike a standard location, only store metadata for components in this - location in memory rather than persisting to the database. - - ''' - - @property - def _cache(self): - '''Return cache.''' - try: - cache = self.__cache - except AttributeError: - cache = self.__cache = {} - - return cache - - def _register_component_in_location(self, component, resource_identifier): - '''Register *component* in location with *resource_identifier*.''' - component_id = ftrack_api.inspection.primary_key(component).values()[0] - self._cache[component_id] = resource_identifier - - def _register_components_in_location( - self, components, resource_identifiers - ): - '''Register *components* in location against *resource_identifiers*. - - Indices of *components* and *resource_identifiers* should align. - - ''' - for component, resource_identifier in zip( - components, resource_identifiers - ): - self._register_component_in_location(component, resource_identifier) - - def _deregister_component_in_location(self, component): - '''Deregister *component* in location.''' - component_id = ftrack_api.inspection.primary_key(component).values()[0] - self._cache.pop(component_id) - - def _get_resource_identifiers(self, components): - '''Return resource identifiers for *components*. - - Raise :exc:`ftrack_api.exception.ComponentNotInLocationError` if any - of the referenced components are not present in this location. - - ''' - resource_identifiers = [] - missing = [] - for component in components: - component_id = ftrack_api.inspection.primary_key( - component - ).values()[0] - resource_identifier = self._cache.get(component_id) - if resource_identifier is None: - missing.append(component) - else: - resource_identifiers.append(resource_identifier) - - if missing: - raise ftrack_api.exception.ComponentNotInLocationError( - missing, self - ) - - return resource_identifiers - - -class UnmanagedLocationMixin(object): - '''Location that does not manage data.''' - - def _add_data(self, component, resource_identifier, source): - '''Manage transfer of *component* data from *source*. - - *resource_identifier* specifies the identifier to use with this - locations accessor. - - Overridden to have no effect. - - ''' - return - - def _remove_data(self, component): - '''Remove data associated with *component*. - - Overridden to have no effect. - - ''' - return - - -class OriginLocationMixin(MemoryLocationMixin, UnmanagedLocationMixin): - '''Special origin location that expects sources as filepaths.''' - - def _get_context(self, component, source): - '''Return context for *component* and *source*.''' - context = {} - if source: - context.update(dict( - source_resource_identifier=source - )) - - return context - - -class ServerLocationMixin(object): - '''Location representing ftrack server. - - Adds convenience methods to location, specific to ftrack server. - ''' - def get_thumbnail_url(self, component, size=None): - '''Return thumbnail url for *component*. - - Optionally, specify *size* to constrain the downscaled image to size - x size pixels. - - Raise :exc:`~ftrack_api.exception.AccessorFilesystemPathError` if - URL could not be determined from *resource_identifier* or - :exc:`~ftrack_api.exception.AccessorUnsupportedOperationError` if - retrieving URL is not supported by the location's accessor. - ''' - resource_identifier = self.get_resource_identifier(component) - return self.accessor.get_thumbnail_url(resource_identifier, size) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/note.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/note.py deleted file mode 100644 index f5a94037..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/note.py +++ /dev/null @@ -1,105 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import warnings - -import ftrack_api.entity.base - - -class Note(ftrack_api.entity.base.Entity): - '''Represent a note.''' - - def create_reply( - self, content, author - ): - '''Create a reply with *content* and *author*. - - .. note:: - - This is a helper method. To create replies manually use the - standard :meth:`Session.create` method. - - ''' - reply = self.session.create( - 'Note', { - 'author': author, - 'content': content - } - ) - - self['replies'].append(reply) - - return reply - - -class CreateNoteMixin(object): - '''Mixin to add create_note method on entity class.''' - - def create_note( - self, content, author, recipients=None, category=None, labels=None - ): - '''Create note with *content*, *author*. - - NoteLabels can be set by including *labels*. - - Note category can be set by including *category*. - - *recipients* can be specified as a list of user or group instances. - - ''' - note_label_support = 'NoteLabel' in self.session.types - - if not labels: - labels = [] - - if labels and not note_label_support: - raise ValueError( - 'NoteLabel is not supported by the current server version.' - ) - - if category and labels: - raise ValueError( - 'Both category and labels cannot be set at the same time.' - ) - - if not recipients: - recipients = [] - - data = { - 'content': content, - 'author': author - } - - if category: - if note_label_support: - labels = [category] - warnings.warn( - 'category argument will be removed in an upcoming version, ' - 'please use labels instead.', - PendingDeprecationWarning - ) - else: - data['category_id'] = category['id'] - - note = self.session.create('Note', data) - - self['notes'].append(note) - - for resource in recipients: - recipient = self.session.create('Recipient', { - 'note_id': note['id'], - 'resource_id': resource['id'] - }) - - note['recipients'].append(recipient) - - for label in labels: - self.session.create( - 'NoteLabelLink', - { - 'label_id': label['id'], - 'note_id': note['id'] - } - ) - - return note diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/project_schema.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/project_schema.py deleted file mode 100644 index ec6db7c0..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/project_schema.py +++ /dev/null @@ -1,94 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.entity.base - - -class ProjectSchema(ftrack_api.entity.base.Entity): - '''Class representing ProjectSchema.''' - - def get_statuses(self, schema, type_id=None): - '''Return statuses for *schema* and optional *type_id*. - - *type_id* is the id of the Type for a TypedContext and can be used to - get statuses where the workflow has been overridden. - - ''' - # Task has overrides and need to be handled separately. - if schema == 'Task': - if type_id is not None: - overrides = self['_overrides'] - for override in overrides: - if override['type_id'] == type_id: - return override['workflow_schema']['statuses'][:] - - return self['_task_workflow']['statuses'][:] - - elif schema == 'AssetVersion': - return self['_version_workflow']['statuses'][:] - - else: - try: - EntityTypeClass = self.session.types[schema] - except KeyError: - raise ValueError('Schema {0} does not exist.'.format(schema)) - - object_type_id_attribute = EntityTypeClass.attributes.get( - 'object_type_id' - ) - - try: - object_type_id = object_type_id_attribute.default_value - except AttributeError: - raise ValueError( - 'Schema {0} does not have statuses.'.format(schema) - ) - - for _schema in self['_schemas']: - if _schema['type_id'] == object_type_id: - result = self.session.query( - 'select task_status from SchemaStatus ' - 'where schema_id is {0}'.format(_schema['id']) - ) - return [ - schema_type['task_status'] for schema_type in result - ] - - raise ValueError( - 'No valid statuses were found for schema {0}.'.format(schema) - ) - - def get_types(self, schema): - '''Return types for *schema*.''' - # Task need to be handled separately. - if schema == 'Task': - return self['_task_type_schema']['types'][:] - - else: - try: - EntityTypeClass = self.session.types[schema] - except KeyError: - raise ValueError('Schema {0} does not exist.'.format(schema)) - - object_type_id_attribute = EntityTypeClass.attributes.get( - 'object_type_id' - ) - - try: - object_type_id = object_type_id_attribute.default_value - except AttributeError: - raise ValueError( - 'Schema {0} does not have types.'.format(schema) - ) - - for _schema in self['_schemas']: - if _schema['type_id'] == object_type_id: - result = self.session.query( - 'select task_type from SchemaType ' - 'where schema_id is {0}'.format(_schema['id']) - ) - return [schema_type['task_type'] for schema_type in result] - - raise ValueError( - 'No valid types were found for schema {0}.'.format(schema) - ) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/user.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/user.py deleted file mode 100644 index 511ad4ba..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/entity/user.py +++ /dev/null @@ -1,123 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import arrow - -import ftrack_api.entity.base -import ftrack_api.exception - - -class User(ftrack_api.entity.base.Entity): - '''Represent a user.''' - - def start_timer(self, context=None, comment='', name=None, force=False): - '''Start a timer for *context* and return it. - - *force* can be used to automatically stop an existing timer and create a - timelog for it. If you need to get access to the created timelog, use - :func:`stop_timer` instead. - - *comment* and *name* are optional but will be set on the timer. - - .. note:: - - This method will automatically commit the changes and if *force* is - False then it will fail with a - :class:`ftrack_api.exception.NotUniqueError` exception if a - timer is already running. - - ''' - if force: - try: - self.stop_timer() - except ftrack_api.exception.NoResultFoundError: - self.logger.debug('Failed to stop existing timer.') - - timer = self.session.create('Timer', { - 'user': self, - 'context': context, - 'name': name, - 'comment': comment - }) - - # Commit the new timer and try to catch any error that indicate another - # timelog already exists and inform the user about it. - try: - self.session.commit() - except ftrack_api.exception.ServerError as error: - if 'IntegrityError' in str(error): - raise ftrack_api.exception.NotUniqueError( - ('Failed to start a timelog for user with id: {0}, it is ' - 'likely that a timer is already running. Either use ' - 'force=True or stop the timer first.').format(self['id']) - ) - else: - # Reraise the error as it might be something unrelated. - raise - - return timer - - def stop_timer(self): - '''Stop the current timer and return a timelog created from it. - - If a timer is not running, a - :exc:`ftrack_api.exception.NoResultFoundError` exception will be - raised. - - .. note:: - - This method will automatically commit the changes. - - ''' - timer = self.session.query( - 'Timer where user_id = "{0}"'.format(self['id']) - ).one() - - # If the server is running in the same timezone as the local - # timezone, we remove the TZ offset to get the correct duration. - is_timezone_support_enabled = self.session.server_information.get( - 'is_timezone_support_enabled', None - ) - if is_timezone_support_enabled is None: - self.logger.warning( - 'Could not identify if server has timezone support enabled. ' - 'Will assume server is running in UTC.' - ) - is_timezone_support_enabled = True - - if is_timezone_support_enabled: - now = arrow.now() - else: - now = arrow.now().replace(tzinfo='utc') - - delta = now - timer['start'] - duration = delta.days * 24 * 60 * 60 + delta.seconds - - timelog = self.session.create('Timelog', { - 'user_id': timer['user_id'], - 'context_id': timer['context_id'], - 'comment': timer['comment'], - 'start': timer['start'], - 'duration': duration, - 'name': timer['name'] - }) - - self.session.delete(timer) - self.session.commit() - - return timelog - - def send_invite(self): - '''Send a invation email to the user''' - - self.session.send_user_invite( - self - ) - def reset_api_key(self): - '''Reset the users api key.''' - - response = self.session.reset_remote( - 'api_key', entity=self - ) - - return response['api_key'] diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/__init__.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/__init__.py deleted file mode 100644 index 1aab07ed..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/base.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/base.py deleted file mode 100644 index b5fd57da..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/base.py +++ /dev/null @@ -1,85 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import uuid -import collections - - -class Event(collections.MutableMapping): - '''Represent a single event.''' - - def __init__(self, topic, id=None, data=None, sent=None, - source=None, target='', in_reply_to_event=None): - '''Initialise event. - - *topic* is the required topic for the event. It can use a dotted - notation to demarcate groupings. For example, 'ftrack.update'. - - *id* is the unique id for this event instance. It is primarily used when - replying to an event. If not supplied a default uuid based value will - be used. - - *data* refers to event specific data. It should be a mapping structure - and defaults to an empty dictionary if not supplied. - - *sent* is the timestamp the event is sent. It will be set automatically - as send time unless specified here. - - *source* is information about where the event originated. It should be - a mapping and include at least a unique id value under an 'id' key. If - not specified, senders usually populate the value automatically at - publish time. - - *target* can be an expression that targets this event. For example, - a reply event would target the event to the sender of the source event. - The expression will be tested against subscriber information only. - - *in_reply_to_event* is used when replying to an event and should contain - the unique id of the event being replied to. - - ''' - super(Event, self).__init__() - self._data = dict( - id=id or uuid.uuid4().hex, - data=data or {}, - topic=topic, - sent=sent, - source=source or {}, - target=target, - in_reply_to_event=in_reply_to_event - ) - self._stopped = False - - def stop(self): - '''Stop further processing of this event.''' - self._stopped = True - - def is_stopped(self): - '''Return whether event has been stopped.''' - return self._stopped - - def __str__(self): - '''Return string representation.''' - return '<{0} {1}>'.format( - self.__class__.__name__, str(self._data) - ) - - def __getitem__(self, key): - '''Return value for *key*.''' - return self._data[key] - - def __setitem__(self, key, value): - '''Set *value* for *key*.''' - self._data[key] = value - - def __delitem__(self, key): - '''Remove *key*.''' - del self._data[key] - - def __iter__(self): - '''Iterate over all keys.''' - return iter(self._data) - - def __len__(self): - '''Return count of keys.''' - return len(self._data) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/expression.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/expression.py deleted file mode 100644 index 0535e4fd..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/expression.py +++ /dev/null @@ -1,282 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from operator import eq, ne, ge, le, gt, lt - -from pyparsing import (Group, Word, CaselessKeyword, Forward, - FollowedBy, Suppress, oneOf, OneOrMore, Optional, - alphanums, quotedString, removeQuotes) - -import ftrack_api.exception - -# Do not enable packrat since it is not thread-safe and will result in parsing -# exceptions in a multi threaded environment. -# ParserElement.enablePackrat() - - -class Parser(object): - '''Parse string based expression into :class:`Expression` instance.''' - - def __init__(self): - '''Initialise parser.''' - self._operators = { - '=': eq, - '!=': ne, - '>=': ge, - '<=': le, - '>': gt, - '<': lt - } - self._parser = self._construct_parser() - super(Parser, self).__init__() - - def _construct_parser(self): - '''Construct and return parser.''' - field = Word(alphanums + '_.') - operator = oneOf(self._operators.keys()) - value = Word(alphanums + '-_,./*@+') - quoted_value = quotedString('quoted_value').setParseAction(removeQuotes) - - condition = Group( - field + operator + (quoted_value | value) - )('condition') - - not_ = Optional(Suppress(CaselessKeyword('not')))('not') - and_ = Suppress(CaselessKeyword('and'))('and') - or_ = Suppress(CaselessKeyword('or'))('or') - - expression = Forward() - parenthesis = Suppress('(') + expression + Suppress(')') - previous = condition | parenthesis - - for conjunction in (not_, and_, or_): - current = Forward() - - if conjunction in (and_, or_): - conjunction_expression = ( - FollowedBy(previous + conjunction + previous) - + Group( - previous + OneOrMore(conjunction + previous) - )(conjunction.resultsName) - ) - - elif conjunction in (not_, ): - conjunction_expression = ( - FollowedBy(conjunction.expr + current) - + Group(conjunction + current)(conjunction.resultsName) - ) - - else: # pragma: no cover - raise ValueError('Unrecognised conjunction.') - - current <<= (conjunction_expression | previous) - previous = current - - expression <<= previous - return expression('expression') - - def parse(self, expression): - '''Parse string *expression* into :class:`Expression`. - - Raise :exc:`ftrack_api.exception.ParseError` if *expression* could - not be parsed. - - ''' - result = None - expression = expression.strip() - if expression: - try: - result = self._parser.parseString( - expression, parseAll=True - ) - except Exception as error: - raise ftrack_api.exception.ParseError( - 'Failed to parse: {0}. {1}'.format(expression, error) - ) - - return self._process(result) - - def _process(self, result): - '''Process *result* using appropriate method. - - Method called is determined by the name of the result. - - ''' - method_name = '_process_{0}'.format(result.getName()) - method = getattr(self, method_name) - return method(result) - - def _process_expression(self, result): - '''Process *result* as expression.''' - return self._process(result[0]) - - def _process_not(self, result): - '''Process *result* as NOT operation.''' - return Not(self._process(result[0])) - - def _process_and(self, result): - '''Process *result* as AND operation.''' - return All([self._process(entry) for entry in result]) - - def _process_or(self, result): - '''Process *result* as OR operation.''' - return Any([self._process(entry) for entry in result]) - - def _process_condition(self, result): - '''Process *result* as condition.''' - key, operator, value = result - return Condition(key, self._operators[operator], value) - - def _process_quoted_value(self, result): - '''Process *result* as quoted value.''' - return result - - -class Expression(object): - '''Represent a structured expression to test candidates against.''' - - def __str__(self): - '''Return string representation.''' - return '<{0}>'.format(self.__class__.__name__) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - return True - - -class All(Expression): - '''Match candidate that matches all of the specified expressions. - - .. note:: - - If no expressions are supplied then will always match. - - ''' - - def __init__(self, expressions=None): - '''Initialise with list of *expressions* to match against.''' - self._expressions = expressions or [] - super(All, self).__init__() - - def __str__(self): - '''Return string representation.''' - return '<{0} [{1}]>'.format( - self.__class__.__name__, - ' '.join(map(str, self._expressions)) - ) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - return all([ - expression.match(candidate) for expression in self._expressions - ]) - - -class Any(Expression): - '''Match candidate that matches any of the specified expressions. - - .. note:: - - If no expressions are supplied then will never match. - - ''' - - def __init__(self, expressions=None): - '''Initialise with list of *expressions* to match against.''' - self._expressions = expressions or [] - super(Any, self).__init__() - - def __str__(self): - '''Return string representation.''' - return '<{0} [{1}]>'.format( - self.__class__.__name__, - ' '.join(map(str, self._expressions)) - ) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - return any([ - expression.match(candidate) for expression in self._expressions - ]) - - -class Not(Expression): - '''Negate expression.''' - - def __init__(self, expression): - '''Initialise with *expression* to negate.''' - self._expression = expression - super(Not, self).__init__() - - def __str__(self): - '''Return string representation.''' - return '<{0} {1}>'.format( - self.__class__.__name__, - self._expression - ) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - return not self._expression.match(candidate) - - -class Condition(Expression): - '''Represent condition.''' - - def __init__(self, key, operator, value): - '''Initialise condition. - - *key* is the key to check on the data when matching. It can be a nested - key represented by dots. For example, 'data.eventType' would attempt to - match candidate['data']['eventType']. If the candidate is missing any - of the requested keys then the match fails immediately. - - *operator* is the operator function to use to perform the match between - the retrieved candidate value and the conditional *value*. - - If *value* is a string, it can use a wildcard '*' at the end to denote - that any values matching the substring portion are valid when matching - equality only. - - ''' - self._key = key - self._operator = operator - self._value = value - self._wildcard = '*' - self._operatorMapping = { - eq: '=', - ne: '!=', - ge: '>=', - le: '<=', - gt: '>', - lt: '<' - } - - def __str__(self): - '''Return string representation.''' - return '<{0} {1}{2}{3}>'.format( - self.__class__.__name__, - self._key, - self._operatorMapping.get(self._operator, self._operator), - self._value - ) - - def match(self, candidate): - '''Return whether *candidate* satisfies this expression.''' - key_parts = self._key.split('.') - - try: - value = candidate - for keyPart in key_parts: - value = value[keyPart] - except (KeyError, TypeError): - return False - - if ( - self._operator is eq - and isinstance(self._value, basestring) - and self._value[-1] == self._wildcard - ): - return self._value[:-1] in value - else: - return self._operator(value, self._value) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/hub.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/hub.py deleted file mode 100644 index 9f4ba80c..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/hub.py +++ /dev/null @@ -1,1091 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2013 ftrack - -from __future__ import absolute_import - -import collections -import urlparse -import threading -import Queue as queue -import logging -import time -import uuid -import operator -import functools -import json -import socket -import warnings - -import requests -import requests.exceptions -import websocket - -import ftrack_api.exception -import ftrack_api.event.base -import ftrack_api.event.subscriber -import ftrack_api.event.expression -from ftrack_api.logging import LazyLogMessage as L - - -SocketIoSession = collections.namedtuple('SocketIoSession', [ - 'id', - 'heartbeatTimeout', - 'supportedTransports', -]) - - -ServerDetails = collections.namedtuple('ServerDetails', [ - 'scheme', - 'hostname', - 'port', -]) - - - - -class EventHub(object): - '''Manage routing of events.''' - - _future_signature_warning = ( - 'When constructing your Session object you did not explicitly define ' - 'auto_connect_event_hub as True even though you appear to be publishing ' - 'and / or subscribing to asynchronous events. In version version 2.0 of ' - 'the ftrack-python-api the default behavior will change from True ' - 'to False. Please make sure to update your tools. You can read more at ' - 'http://ftrack-python-api.rtd.ftrack.com/en/stable/release/migration.html' - ) - - def __init__(self, server_url, api_user, api_key): - '''Initialise hub, connecting to ftrack *server_url*. - - *api_user* is the user to authenticate as and *api_key* is the API key - to authenticate with. - - ''' - super(EventHub, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self.id = uuid.uuid4().hex - self._connection = None - - self._unique_packet_id = 0 - self._packet_callbacks = {} - self._lock = threading.RLock() - - self._wait_timeout = 4 - - self._subscribers = [] - self._reply_callbacks = {} - self._intentional_disconnect = False - - self._event_queue = queue.Queue() - self._event_namespace = 'ftrack.event' - self._expression_parser = ftrack_api.event.expression.Parser() - - # Default values for auto reconnection timeout on unintentional - # disconnection. Equates to 5 minutes. - self._auto_reconnect_attempts = 30 - self._auto_reconnect_delay = 10 - - self._deprecation_warning_auto_connect = False - - # Mapping of Socket.IO codes to meaning. - self._code_name_mapping = { - '0': 'disconnect', - '1': 'connect', - '2': 'heartbeat', - '3': 'message', - '4': 'json', - '5': 'event', - '6': 'acknowledge', - '7': 'error' - } - self._code_name_mapping.update( - dict((name, code) for code, name in self._code_name_mapping.items()) - ) - - self._server_url = server_url - self._api_user = api_user - self._api_key = api_key - - # Parse server URL and store server details. - url_parse_result = urlparse.urlparse(self._server_url) - if not url_parse_result.scheme: - raise ValueError('Could not determine scheme from server url.') - - if not url_parse_result.hostname: - raise ValueError('Could not determine hostname from server url.') - - self.server = ServerDetails( - url_parse_result.scheme, - url_parse_result.hostname, - url_parse_result.port - ) - - def get_server_url(self): - '''Return URL to server.''' - return '{0}://{1}'.format( - self.server.scheme, self.get_network_location() - ) - - def get_network_location(self): - '''Return network location part of url (hostname with optional port).''' - if self.server.port: - return '{0}:{1}'.format(self.server.hostname, self.server.port) - else: - return self.server.hostname - - @property - def secure(self): - '''Return whether secure connection used.''' - return self.server.scheme == 'https' - - def connect(self): - '''Initialise connection to server. - - Raise :exc:`ftrack_api.exception.EventHubConnectionError` if already - connected or connection fails. - - ''' - - self._deprecation_warning_auto_connect = False - - if self.connected: - raise ftrack_api.exception.EventHubConnectionError( - 'Already connected.' - ) - - # Reset flag tracking whether disconnection was intentional. - self._intentional_disconnect = False - - try: - # Connect to socket.io server using websocket transport. - session = self._get_socket_io_session() - - if 'websocket' not in session.supportedTransports: - raise ValueError( - 'Server does not support websocket sessions.' - ) - - scheme = 'wss' if self.secure else 'ws' - url = '{0}://{1}/socket.io/1/websocket/{2}'.format( - scheme, self.get_network_location(), session.id - ) - - # timeout is set to 60 seconds to avoid the issue where the socket - # ends up in a bad state where it is reported as connected but the - # connection has been closed. The issue happens often when connected - # to a secure socket and the computer goes to sleep. - # More information on how the timeout works can be found here: - # https://docs.python.org/2/library/socket.html#socket.socket.setblocking - self._connection = websocket.create_connection(url, timeout=60) - - except Exception as error: - error_message = ( - 'Failed to connect to event server at {server_url} with ' - 'error: "{error}".' - ) - - error_details = { - 'error': unicode(error), - 'server_url': self.get_server_url() - } - - self.logger.debug( - L( - error_message, **error_details - ), - exc_info=1 - ) - raise ftrack_api.exception.EventHubConnectionError( - error_message, - details=error_details - ) - - # Start background processing thread. - self._processor_thread = _ProcessorThread(self) - self._processor_thread.start() - - # Subscribe to reply events if not already. Note: Only adding the - # subscriber locally as the following block will notify server of all - # existing subscribers, which would cause the server to report a - # duplicate subscriber error if EventHub.subscribe was called here. - try: - self._add_subscriber( - 'topic=ftrack.meta.reply', - self._handle_reply, - subscriber=dict( - id=self.id - ) - ) - except ftrack_api.exception.NotUniqueError: - pass - - # Now resubscribe any existing stored subscribers. This can happen when - # reconnecting automatically for example. - for subscriber in self._subscribers[:]: - self._notify_server_about_subscriber(subscriber) - - @property - def connected(self): - '''Return if connected.''' - return self._connection is not None and self._connection.connected - - def disconnect(self, unsubscribe=True): - '''Disconnect from server. - - Raise :exc:`ftrack_api.exception.EventHubConnectionError` if not - currently connected. - - If *unsubscribe* is True then unsubscribe all current subscribers - automatically before disconnecting. - - ''' - if not self.connected: - raise ftrack_api.exception.EventHubConnectionError( - 'Not currently connected.' - ) - - else: - # Set flag to indicate disconnection was intentional. - self._intentional_disconnect = True - - # Set blocking to true on socket to make sure unsubscribe events - # are emitted before closing the connection. - self._connection.sock.setblocking(1) - - # Unsubscribe all subscribers. - if unsubscribe: - for subscriber in self._subscribers[:]: - self.unsubscribe(subscriber.metadata['id']) - - # Now disconnect. - self._connection.close() - self._connection = None - - # Shutdown background processing thread. - self._processor_thread.cancel() - - # Join to it if it is not current thread to help ensure a clean - # shutdown. - if threading.current_thread() != self._processor_thread: - self._processor_thread.join(self._wait_timeout) - - def reconnect(self, attempts=10, delay=5): - '''Reconnect to server. - - Make *attempts* number of attempts with *delay* in seconds between each - attempt. - - .. note:: - - All current subscribers will be automatically resubscribed after - successful reconnection. - - Raise :exc:`ftrack_api.exception.EventHubConnectionError` if fail to - reconnect. - - ''' - try: - self.disconnect(unsubscribe=False) - except ftrack_api.exception.EventHubConnectionError: - pass - - for attempt in range(attempts): - self.logger.debug(L( - 'Reconnect attempt {0} of {1}', attempt, attempts - )) - - # Silence logging temporarily to avoid lots of failed connection - # related information. - try: - logging.disable(logging.CRITICAL) - - try: - self.connect() - except ftrack_api.exception.EventHubConnectionError: - time.sleep(delay) - else: - break - - finally: - logging.disable(logging.NOTSET) - - if not self.connected: - raise ftrack_api.exception.EventHubConnectionError( - 'Failed to reconnect to event server at {0} after {1} attempts.' - .format(self.get_server_url(), attempts) - ) - - def wait(self, duration=None): - '''Wait for events and handle as they arrive. - - If *duration* is specified, then only process events until duration is - reached. *duration* is in seconds though float values can be used for - smaller values. - - ''' - started = time.time() - - while True: - try: - event = self._event_queue.get(timeout=0.1) - except queue.Empty: - pass - else: - self._handle(event) - - # Additional special processing of events. - if event['topic'] == 'ftrack.meta.disconnected': - break - - if duration is not None: - if (time.time() - started) > duration: - break - - def get_subscriber_by_identifier(self, identifier): - '''Return subscriber with matching *identifier*. - - Return None if no subscriber with *identifier* found. - - ''' - for subscriber in self._subscribers[:]: - if subscriber.metadata.get('id') == identifier: - return subscriber - - return None - - def subscribe(self, subscription, callback, subscriber=None, priority=100): - '''Register *callback* for *subscription*. - - A *subscription* is a string that can specify in detail which events the - callback should receive. The filtering is applied against each event - object. Nested references are supported using '.' separators. - For example, 'topic=foo and data.eventType=Shot' would match the - following event:: - - - - The *callback* should accept an instance of - :class:`ftrack_api.event.base.Event` as its sole argument. - - Callbacks are called in order of *priority*. The lower the priority - number the sooner it will be called, with 0 being the first. The - default priority is 100. Note that priority only applies against other - callbacks registered with this hub and not as a global priority. - - An earlier callback can prevent processing of subsequent callbacks by - calling :meth:`Event.stop` on the passed `event` before - returning. - - .. warning:: - - Handlers block processing of other received events. For long - running callbacks it is advisable to delegate the main work to - another process or thread. - - A *callback* can be attached to *subscriber* information that details - the subscriber context. A subscriber context will be generated - automatically if not supplied. - - .. note:: - - The subscription will be stored locally, but until the server - receives notification of the subscription it is possible the - callback will not be called. - - Return subscriber identifier. - - Raise :exc:`ftrack_api.exception.NotUniqueError` if a subscriber with - the same identifier already exists. - - ''' - # Add subscriber locally. - subscriber = self._add_subscriber( - subscription, callback, subscriber, priority - ) - - # Notify server now if possible. - try: - self._notify_server_about_subscriber(subscriber) - except ftrack_api.exception.EventHubConnectionError: - self.logger.debug(L( - 'Failed to notify server about new subscriber {0} ' - 'as server not currently reachable.', subscriber.metadata['id'] - )) - - return subscriber.metadata['id'] - - def _add_subscriber( - self, subscription, callback, subscriber=None, priority=100 - ): - '''Add subscriber locally. - - See :meth:`subscribe` for argument descriptions. - - Return :class:`ftrack_api.event.subscriber.Subscriber` instance. - - Raise :exc:`ftrack_api.exception.NotUniqueError` if a subscriber with - the same identifier already exists. - - ''' - if subscriber is None: - subscriber = {} - - subscriber.setdefault('id', uuid.uuid4().hex) - - # Check subscriber not already subscribed. - existing_subscriber = self.get_subscriber_by_identifier( - subscriber['id'] - ) - - if existing_subscriber is not None: - raise ftrack_api.exception.NotUniqueError( - 'Subscriber with identifier {0} already exists.' - .format(subscriber['id']) - ) - - subscriber = ftrack_api.event.subscriber.Subscriber( - subscription=subscription, - callback=callback, - metadata=subscriber, - priority=priority - ) - - self._subscribers.append(subscriber) - - return subscriber - - def _notify_server_about_subscriber(self, subscriber): - '''Notify server of new *subscriber*.''' - subscribe_event = ftrack_api.event.base.Event( - topic='ftrack.meta.subscribe', - data=dict( - subscriber=subscriber.metadata, - subscription=str(subscriber.subscription) - ) - ) - - self._publish( - subscribe_event, - callback=functools.partial(self._on_subscribed, subscriber) - ) - - def _on_subscribed(self, subscriber, response): - '''Handle acknowledgement of subscription.''' - if response.get('success') is False: - self.logger.warning(L( - 'Server failed to subscribe subscriber {0}: {1}', - subscriber.metadata['id'], response.get('message') - )) - - def unsubscribe(self, subscriber_identifier): - '''Unsubscribe subscriber with *subscriber_identifier*. - - .. note:: - - If the server is not reachable then it won't be notified of the - unsubscription. However, the subscriber will be removed locally - regardless. - - ''' - subscriber = self.get_subscriber_by_identifier(subscriber_identifier) - - if subscriber is None: - raise ftrack_api.exception.NotFoundError( - 'Cannot unsubscribe missing subscriber with identifier {0}' - .format(subscriber_identifier) - ) - - self._subscribers.pop(self._subscribers.index(subscriber)) - - # Notify the server if possible. - unsubscribe_event = ftrack_api.event.base.Event( - topic='ftrack.meta.unsubscribe', - data=dict(subscriber=subscriber.metadata) - ) - - try: - self._publish( - unsubscribe_event, - callback=functools.partial(self._on_unsubscribed, subscriber) - ) - except ftrack_api.exception.EventHubConnectionError: - self.logger.debug(L( - 'Failed to notify server to unsubscribe subscriber {0} as ' - 'server not currently reachable.', subscriber.metadata['id'] - )) - - def _on_unsubscribed(self, subscriber, response): - '''Handle acknowledgement of unsubscribing *subscriber*.''' - if response.get('success') is not True: - self.logger.warning(L( - 'Server failed to unsubscribe subscriber {0}: {1}', - subscriber.metadata['id'], response.get('message') - )) - - def _prepare_event(self, event): - '''Prepare *event* for sending.''' - event['source'].setdefault('id', self.id) - event['source'].setdefault('user', { - 'username': self._api_user - }) - - def _prepare_reply_event(self, event, source_event, source=None): - '''Prepare *event* as a reply to another *source_event*. - - Modify *event*, setting appropriate values to target event correctly as - a reply. - - ''' - event['target'] = 'id={0}'.format(source_event['source']['id']) - event['in_reply_to_event'] = source_event['id'] - if source is not None: - event['source'] = source - - def publish( - self, event, synchronous=False, on_reply=None, on_error='raise' - ): - '''Publish *event*. - - If *synchronous* is specified as True then this method will wait and - return a list of results from any called callbacks. - - .. note:: - - Currently, if synchronous is True then only locally registered - callbacks will be called and no event will be sent to the server. - This may change in future. - - *on_reply* is an optional callable to call with any reply event that is - received in response to the published *event*. - - .. note:: - - Will not be called when *synchronous* is True. - - If *on_error* is set to 'ignore' then errors raised during publish of - event will be caught by this method and ignored. - - ''' - if self._deprecation_warning_auto_connect and not synchronous: - warnings.warn( - self._future_signature_warning, FutureWarning - ) - - try: - return self._publish( - event, synchronous=synchronous, on_reply=on_reply - ) - except Exception: - if on_error == 'ignore': - pass - else: - raise - - def publish_reply(self, source_event, data, source=None): - '''Publish a reply event to *source_event* with supplied *data*. - - If *source* is specified it will be used for the source value of the - sent event. - - ''' - reply_event = ftrack_api.event.base.Event( - 'ftrack.meta.reply', - data=data - ) - self._prepare_reply_event(reply_event, source_event, source=source) - self.publish(reply_event) - - def _publish(self, event, synchronous=False, callback=None, on_reply=None): - '''Publish *event*. - - If *synchronous* is specified as True then this method will wait and - return a list of results from any called callbacks. - - .. note:: - - Currently, if synchronous is True then only locally registered - callbacks will be called and no event will be sent to the server. - This may change in future. - - A *callback* can also be specified. This callback will be called once - the server acknowledges receipt of the sent event. A default callback - that checks for errors from the server will be used if not specified. - - *on_reply* is an optional callable to call with any reply event that is - received in response to the published *event*. Note that there is no - guarantee that a reply will be sent. - - Raise :exc:`ftrack_api.exception.EventHubConnectionError` if not - currently connected. - - ''' - # Prepare event adding any relevant additional information. - self._prepare_event(event) - - if synchronous: - # Bypass emitting event to server and instead call locally - # registered handlers directly, collecting and returning results. - return self._handle(event, synchronous=synchronous) - - if not self.connected: - raise ftrack_api.exception.EventHubConnectionError( - 'Cannot publish event asynchronously as not connected to ' - 'server.' - ) - - # Use standard callback if none specified. - if callback is None: - callback = functools.partial(self._on_published, event) - - # Emit event to central server for asynchronous processing. - try: - # Register on reply callback if specified. - if on_reply is not None: - # TODO: Add cleanup process that runs after a set duration to - # garbage collect old reply callbacks and prevent dictionary - # growing too large. - self._reply_callbacks[event['id']] = on_reply - - try: - self._emit_event_packet( - self._event_namespace, event, callback=callback - ) - except ftrack_api.exception.EventHubConnectionError: - # Connection may have dropped temporarily. Wait a few moments to - # see if background thread reconnects automatically. - time.sleep(15) - - self._emit_event_packet( - self._event_namespace, event, callback=callback - ) - except: - raise - - except Exception: - # Failure to send event should not cause caller to fail. - # TODO: This behaviour is inconsistent with the failing earlier on - # lack of connection and also with the error handling parameter of - # EventHub.publish. Consider refactoring. - self.logger.exception(L('Error sending event {0}.', event)) - - def _on_published(self, event, response): - '''Handle acknowledgement of published event.''' - if response.get('success', False) is False: - self.logger.error(L( - 'Server responded with error while publishing event {0}. ' - 'Error was: {1}', event, response.get('message') - )) - - def _handle(self, event, synchronous=False): - '''Handle *event*. - - If *synchronous* is True, do not send any automatic reply events. - - ''' - # Sort by priority, lower is higher. - # TODO: Use a sorted list to avoid sorting each time in order to improve - # performance. - subscribers = sorted( - self._subscribers, key=operator.attrgetter('priority') - ) - - results = [] - - target = event.get('target', None) - target_expression = None - if target: - try: - target_expression = self._expression_parser.parse(target) - except Exception: - self.logger.exception(L( - 'Cannot handle event as failed to parse event target ' - 'information: {0}', event - )) - return - - for subscriber in subscribers: - # Check if event is targeted to the subscriber. - if ( - target_expression is not None - and not target_expression.match(subscriber.metadata) - ): - continue - - # Check if subscriber interested in the event. - if not subscriber.interested_in(event): - continue - - response = None - - try: - response = subscriber.callback(event) - results.append(response) - except Exception: - self.logger.exception(L( - 'Error calling subscriber {0} for event {1}.', - subscriber, event - )) - - # Automatically publish a non None response as a reply when not in - # synchronous mode. - if not synchronous: - if self._deprecation_warning_auto_connect: - warnings.warn( - self._future_signature_warning, FutureWarning - ) - - if response is not None: - try: - self.publish_reply( - event, data=response, source=subscriber.metadata - ) - - except Exception: - self.logger.exception(L( - 'Error publishing response {0} from subscriber {1} ' - 'for event {2}.', response, subscriber, event - )) - - # Check whether to continue processing topic event. - if event.is_stopped(): - self.logger.debug(L( - 'Subscriber {0} stopped event {1}. Will not process ' - 'subsequent subscriber callbacks for this event.', - subscriber, event - )) - break - - return results - - def _handle_reply(self, event): - '''Handle reply *event*, passing it to any registered callback.''' - callback = self._reply_callbacks.get(event['in_reply_to_event'], None) - if callback is not None: - callback(event) - - def subscription(self, subscription, callback, subscriber=None, - priority=100): - '''Return context manager with *callback* subscribed to *subscription*. - - The subscribed callback will be automatically unsubscribed on exit - of the context manager. - - ''' - return _SubscriptionContext( - self, subscription, callback, subscriber=subscriber, - priority=priority, - ) - - # Socket.IO interface. - # - - def _get_socket_io_session(self): - '''Connect to server and retrieve session information.''' - socket_io_url = ( - '{0}://{1}/socket.io/1/?api_user={2}&api_key={3}' - ).format( - self.server.scheme, - self.get_network_location(), - self._api_user, - self._api_key - ) - try: - response = requests.get( - socket_io_url, - timeout=60 # 60 seconds timeout to recieve errors faster. - ) - except requests.exceptions.Timeout as error: - raise ftrack_api.exception.EventHubConnectionError( - 'Timed out connecting to server: {0}.'.format(error) - ) - except requests.exceptions.SSLError as error: - raise ftrack_api.exception.EventHubConnectionError( - 'Failed to negotiate SSL with server: {0}.'.format(error) - ) - except requests.exceptions.ConnectionError as error: - raise ftrack_api.exception.EventHubConnectionError( - 'Failed to connect to server: {0}.'.format(error) - ) - else: - status = response.status_code - if status != 200: - raise ftrack_api.exception.EventHubConnectionError( - 'Received unexpected status code {0}.'.format(status) - ) - - # Parse result and return session information. - parts = response.text.split(':') - return SocketIoSession( - parts[0], - parts[1], - parts[3].split(',') - ) - - def _add_packet_callback(self, callback): - '''Store callback against a new unique packet ID. - - Return the unique packet ID. - - ''' - with self._lock: - self._unique_packet_id += 1 - unique_identifier = self._unique_packet_id - - self._packet_callbacks[unique_identifier] = callback - - return '{0}+'.format(unique_identifier) - - def _pop_packet_callback(self, packet_identifier): - '''Pop and return callback for *packet_identifier*.''' - return self._packet_callbacks.pop(packet_identifier) - - def _emit_event_packet(self, namespace, event, callback): - '''Send *event* packet under *namespace*.''' - data = self._encode( - dict(name=namespace, args=[event]) - ) - self._send_packet( - self._code_name_mapping['event'], data=data, callback=callback - ) - - def _acknowledge_packet(self, packet_identifier, *args): - '''Send acknowledgement of packet with *packet_identifier*.''' - packet_identifier = packet_identifier.rstrip('+') - data = str(packet_identifier) - if args: - data += '+{1}'.format(self._encode(args)) - - self._send_packet(self._code_name_mapping['acknowledge'], data=data) - - def _send_packet(self, code, data='', callback=None): - '''Send packet via connection.''' - path = '' - packet_identifier = ( - self._add_packet_callback(callback) if callback else '' - ) - packet_parts = (str(code), packet_identifier, path, data) - packet = ':'.join(packet_parts) - - try: - self._connection.send(packet) - self.logger.debug(L(u'Sent packet: {0}', packet)) - except socket.error as error: - raise ftrack_api.exception.EventHubConnectionError( - 'Failed to send packet: {0}'.format(error) - ) - - def _receive_packet(self): - '''Receive and return packet via connection.''' - try: - packet = self._connection.recv() - except Exception as error: - raise ftrack_api.exception.EventHubConnectionError( - 'Error receiving packet: {0}'.format(error) - ) - - try: - parts = packet.split(':', 3) - except AttributeError: - raise ftrack_api.exception.EventHubPacketError( - 'Received invalid packet {0}'.format(packet) - ) - - code, packet_identifier, path, data = None, None, None, None - - count = len(parts) - if count == 4: - code, packet_identifier, path, data = parts - elif count == 3: - code, packet_identifier, path = parts - elif count == 1: - code = parts[0] - else: - raise ftrack_api.exception.EventHubPacketError( - 'Received invalid packet {0}'.format(packet) - ) - - self.logger.debug(L('Received packet: {0}', packet)) - return code, packet_identifier, path, data - - def _handle_packet(self, code, packet_identifier, path, data): - '''Handle packet received from server.''' - code_name = self._code_name_mapping[code] - - if code_name == 'connect': - self.logger.debug('Connected to event server.') - event = ftrack_api.event.base.Event('ftrack.meta.connected') - self._prepare_event(event) - self._event_queue.put(event) - - elif code_name == 'disconnect': - self.logger.debug('Disconnected from event server.') - if not self._intentional_disconnect: - self.logger.debug( - 'Disconnected unexpectedly. Attempting to reconnect.' - ) - try: - self.reconnect( - attempts=self._auto_reconnect_attempts, - delay=self._auto_reconnect_delay - ) - except ftrack_api.exception.EventHubConnectionError: - self.logger.debug('Failed to reconnect automatically.') - else: - self.logger.debug('Reconnected successfully.') - - if not self.connected: - event = ftrack_api.event.base.Event('ftrack.meta.disconnected') - self._prepare_event(event) - self._event_queue.put(event) - - elif code_name == 'heartbeat': - # Reply with heartbeat. - self._send_packet(self._code_name_mapping['heartbeat']) - - elif code_name == 'message': - self.logger.debug(L('Message received: {0}', data)) - - elif code_name == 'event': - payload = self._decode(data) - args = payload.get('args', []) - - if len(args) == 1: - event_payload = args[0] - if isinstance(event_payload, collections.Mapping): - try: - event = ftrack_api.event.base.Event(**event_payload) - except Exception: - self.logger.exception(L( - 'Failed to convert payload into event: {0}', - event_payload - )) - return - - self._event_queue.put(event) - - elif code_name == 'acknowledge': - parts = data.split('+', 1) - acknowledged_packet_identifier = int(parts[0]) - args = [] - if len(parts) == 2: - args = self._decode(parts[1]) - - try: - callback = self._pop_packet_callback( - acknowledged_packet_identifier - ) - except KeyError: - pass - else: - callback(*args) - - elif code_name == 'error': - self.logger.error(L('Event server reported error: {0}.', data)) - - else: - self.logger.debug(L('{0}: {1}', code_name, data)) - - def _encode(self, data): - '''Return *data* encoded as JSON formatted string.''' - return json.dumps( - data, - default=self._encode_object_hook, - ensure_ascii=False - ) - - def _encode_object_hook(self, item): - '''Return *item* transformed for encoding.''' - if isinstance(item, ftrack_api.event.base.Event): - # Convert to dictionary for encoding. - item = dict(**item) - - if 'in_reply_to_event' in item: - # Convert keys to server convention. - item['inReplyToEvent'] = item.pop('in_reply_to_event') - - return item - - raise TypeError('{0!r} is not JSON serializable'.format(item)) - - def _decode(self, string): - '''Return decoded JSON *string* as Python object.''' - return json.loads(string, object_hook=self._decode_object_hook) - - def _decode_object_hook(self, item): - '''Return *item* transformed.''' - if isinstance(item, collections.Mapping): - if 'inReplyToEvent' in item: - item['in_reply_to_event'] = item.pop('inReplyToEvent') - - return item - - -class _SubscriptionContext(object): - '''Context manager for a one-off subscription.''' - - def __init__(self, hub, subscription, callback, subscriber, priority): - '''Initialise context.''' - self._hub = hub - self._subscription = subscription - self._callback = callback - self._subscriber = subscriber - self._priority = priority - self._subscriberIdentifier = None - - def __enter__(self): - '''Enter context subscribing callback to topic.''' - self._subscriberIdentifier = self._hub.subscribe( - self._subscription, self._callback, subscriber=self._subscriber, - priority=self._priority - ) - - def __exit__(self, exception_type, exception_value, traceback): - '''Exit context unsubscribing callback from topic.''' - self._hub.unsubscribe(self._subscriberIdentifier) - - -class _ProcessorThread(threading.Thread): - '''Process messages from server.''' - - daemon = True - - def __init__(self, client): - '''Initialise thread with Socket.IO *client* instance.''' - super(_ProcessorThread, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self.client = client - self.done = threading.Event() - - def run(self): - '''Perform work in thread.''' - while not self.done.is_set(): - try: - code, packet_identifier, path, data = self.client._receive_packet() - self.client._handle_packet(code, packet_identifier, path, data) - - except ftrack_api.exception.EventHubPacketError as error: - self.logger.debug(L('Ignoring invalid packet: {0}', error)) - continue - - except ftrack_api.exception.EventHubConnectionError: - self.cancel() - - # Fake a disconnection event in order to trigger reconnection - # when necessary. - self.client._handle_packet('0', '', '', '') - - break - - except Exception as error: - self.logger.debug(L('Aborting processor thread: {0}', error)) - self.cancel() - break - - def cancel(self): - '''Cancel work as soon as possible.''' - self.done.set() diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscriber.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscriber.py deleted file mode 100644 index 0d38463a..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscriber.py +++ /dev/null @@ -1,27 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import ftrack_api.event.subscription - - -class Subscriber(object): - '''Represent event subscriber.''' - - def __init__(self, subscription, callback, metadata, priority): - '''Initialise subscriber.''' - self.subscription = ftrack_api.event.subscription.Subscription( - subscription - ) - self.callback = callback - self.metadata = metadata - self.priority = priority - - def __str__(self): - '''Return string representation.''' - return '<{0} metadata={1} subscription="{2}">'.format( - self.__class__.__name__, self.metadata, self.subscription - ) - - def interested_in(self, event): - '''Return whether subscriber interested in *event*.''' - return self.subscription.includes(event) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscription.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscription.py deleted file mode 100644 index 0b208d99..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/event/subscription.py +++ /dev/null @@ -1,23 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import ftrack_api.event.expression - - -class Subscription(object): - '''Represent a subscription.''' - - parser = ftrack_api.event.expression.Parser() - - def __init__(self, subscription): - '''Initialise with *subscription*.''' - self._subscription = subscription - self._expression = self.parser.parse(subscription) - - def __str__(self): - '''Return string representation.''' - return self._subscription - - def includes(self, event): - '''Return whether subscription includes *event*.''' - return self._expression.match(event) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/exception.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/exception.py deleted file mode 100644 index 8a2eb9bc..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/exception.py +++ /dev/null @@ -1,392 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import sys -import traceback - -import ftrack_api.entity.base - - -class Error(Exception): - '''ftrack specific error.''' - - default_message = 'Unspecified error occurred.' - - def __init__(self, message=None, details=None): - '''Initialise exception with *message*. - - If *message* is None, the class 'default_message' will be used. - - *details* should be a mapping of extra information that can be used in - the message and also to provide more context. - - ''' - if message is None: - message = self.default_message - - self.message = message - self.details = details - if self.details is None: - self.details = {} - - self.traceback = traceback.format_exc() - - def __str__(self): - '''Return string representation.''' - keys = {} - for key, value in self.details.iteritems(): - if isinstance(value, unicode): - value = value.encode(sys.getfilesystemencoding()) - keys[key] = value - - return str(self.message.format(**keys)) - - -class AuthenticationError(Error): - '''Raise when an authentication error occurs.''' - - default_message = 'Authentication error.' - - -class ServerError(Error): - '''Raise when the server reports an error.''' - - default_message = 'Server reported error processing request.' - - -class ServerCompatibilityError(ServerError): - '''Raise when server appears incompatible.''' - - default_message = 'Server incompatible.' - - -class NotFoundError(Error): - '''Raise when something that should exist is not found.''' - - default_message = 'Not found.' - - -class NotUniqueError(Error): - '''Raise when unique value required and duplicate detected.''' - - default_message = 'Non-unique value detected.' - - -class IncorrectResultError(Error): - '''Raise when a result is incorrect.''' - - default_message = 'Incorrect result detected.' - - -class NoResultFoundError(IncorrectResultError): - '''Raise when a result was expected but no result was found.''' - - default_message = 'Expected result, but no result was found.' - - -class MultipleResultsFoundError(IncorrectResultError): - '''Raise when a single result expected, but multiple results found.''' - - default_message = 'Expected single result, but received multiple results.' - - -class EntityTypeError(Error): - '''Raise when an entity type error occurs.''' - - default_message = 'Entity type error.' - - -class UnrecognisedEntityTypeError(EntityTypeError): - '''Raise when an unrecognised entity type detected.''' - - default_message = 'Entity type "{entity_type}" not recognised.' - - def __init__(self, entity_type, **kw): - '''Initialise with *entity_type* that is unrecognised.''' - kw.setdefault('details', {}).update(dict( - entity_type=entity_type - )) - super(UnrecognisedEntityTypeError, self).__init__(**kw) - - -class OperationError(Error): - '''Raise when an operation error occurs.''' - - default_message = 'Operation error.' - - -class InvalidStateError(Error): - '''Raise when an invalid state detected.''' - - default_message = 'Invalid state.' - - -class InvalidStateTransitionError(InvalidStateError): - '''Raise when an invalid state transition detected.''' - - default_message = ( - 'Invalid transition from {current_state!r} to {target_state!r} state ' - 'for entity {entity!r}' - ) - - def __init__(self, current_state, target_state, entity, **kw): - '''Initialise error.''' - kw.setdefault('details', {}).update(dict( - current_state=current_state, - target_state=target_state, - entity=entity - )) - super(InvalidStateTransitionError, self).__init__(**kw) - - -class AttributeError(Error): - '''Raise when an error related to an attribute occurs.''' - - default_message = 'Attribute error.' - - -class ImmutableAttributeError(AttributeError): - '''Raise when modification of immutable attribute attempted.''' - - default_message = ( - 'Cannot modify value of immutable {attribute.name!r} attribute.' - ) - - def __init__(self, attribute, **kw): - '''Initialise error.''' - kw.setdefault('details', {}).update(dict( - attribute=attribute - )) - super(ImmutableAttributeError, self).__init__(**kw) - - -class CollectionError(Error): - '''Raise when an error related to collections occurs.''' - - default_message = 'Collection error.' - - def __init__(self, collection, **kw): - '''Initialise error.''' - kw.setdefault('details', {}).update(dict( - collection=collection - )) - super(CollectionError, self).__init__(**kw) - - -class ImmutableCollectionError(CollectionError): - '''Raise when modification of immutable collection attempted.''' - - default_message = ( - 'Cannot modify value of immutable collection {collection!r}.' - ) - - -class DuplicateItemInCollectionError(CollectionError): - '''Raise when duplicate item in collection detected.''' - - default_message = ( - 'Item {item!r} already exists in collection {collection!r}.' - ) - - def __init__(self, item, collection, **kw): - '''Initialise error.''' - kw.setdefault('details', {}).update(dict( - item=item - )) - super(DuplicateItemInCollectionError, self).__init__(collection, **kw) - - -class ParseError(Error): - '''Raise when a parsing error occurs.''' - - default_message = 'Failed to parse.' - - -class EventHubError(Error): - '''Raise when issues related to event hub occur.''' - - default_message = 'Event hub error occurred.' - - -class EventHubConnectionError(EventHubError): - '''Raise when event hub encounters connection problem.''' - - default_message = 'Event hub is not connected.' - - -class EventHubPacketError(EventHubError): - '''Raise when event hub encounters an issue with a packet.''' - - default_message = 'Invalid packet.' - - -class PermissionDeniedError(Error): - '''Raise when permission is denied.''' - - default_message = 'Permission denied.' - - -class LocationError(Error): - '''Base for errors associated with locations.''' - - default_message = 'Unspecified location error' - - -class ComponentNotInAnyLocationError(LocationError): - '''Raise when component not available in any location.''' - - default_message = 'Component not available in any location.' - - -class ComponentNotInLocationError(LocationError): - '''Raise when component(s) not in location.''' - - default_message = ( - 'Component(s) {formatted_components} not found in location {location}.' - ) - - def __init__(self, components, location, **kw): - '''Initialise with *components* and *location*.''' - if isinstance(components, ftrack_api.entity.base.Entity): - components = [components] - - kw.setdefault('details', {}).update(dict( - components=components, - formatted_components=', '.join( - [str(component) for component in components] - ), - location=location - )) - - super(ComponentNotInLocationError, self).__init__(**kw) - - -class ComponentInLocationError(LocationError): - '''Raise when component(s) already exists in location.''' - - default_message = ( - 'Component(s) {formatted_components} already exist in location ' - '{location}.' - ) - - def __init__(self, components, location, **kw): - '''Initialise with *components* and *location*.''' - if isinstance(components, ftrack_api.entity.base.Entity): - components = [components] - - kw.setdefault('details', {}).update(dict( - components=components, - formatted_components=', '.join( - [str(component) for component in components] - ), - location=location - )) - - super(ComponentInLocationError, self).__init__(**kw) - - -class AccessorError(Error): - '''Base for errors associated with accessors.''' - - default_message = 'Unspecified accessor error' - - -class AccessorOperationFailedError(AccessorError): - '''Base for failed operations on accessors.''' - - default_message = 'Operation {operation} failed: {error}' - - def __init__( - self, operation='', resource_identifier=None, error=None, **kw - ): - kw.setdefault('details', {}).update(dict( - operation=operation, - resource_identifier=resource_identifier, - error=error - )) - super(AccessorOperationFailedError, self).__init__(**kw) - - -class AccessorUnsupportedOperationError(AccessorOperationFailedError): - '''Raise when operation is unsupported.''' - - default_message = 'Operation {operation} unsupported.' - - -class AccessorPermissionDeniedError(AccessorOperationFailedError): - '''Raise when permission denied.''' - - default_message = ( - 'Cannot {operation} {resource_identifier}. Permission denied.' - ) - - -class AccessorResourceIdentifierError(AccessorError): - '''Raise when a error related to a resource_identifier occurs.''' - - default_message = 'Resource identifier is invalid: {resource_identifier}.' - - def __init__(self, resource_identifier, **kw): - kw.setdefault('details', {}).update(dict( - resource_identifier=resource_identifier - )) - super(AccessorResourceIdentifierError, self).__init__(**kw) - - -class AccessorFilesystemPathError(AccessorResourceIdentifierError): - '''Raise when a error related to an accessor filesystem path occurs.''' - - default_message = ( - 'Could not determine filesystem path from resource identifier: ' - '{resource_identifier}.' - ) - - -class AccessorResourceError(AccessorError): - '''Base for errors associated with specific resource.''' - - default_message = 'Unspecified resource error: {resource_identifier}' - - def __init__(self, operation='', resource_identifier=None, error=None, - **kw): - kw.setdefault('details', {}).update(dict( - operation=operation, - resource_identifier=resource_identifier - )) - super(AccessorResourceError, self).__init__(**kw) - - -class AccessorResourceNotFoundError(AccessorResourceError): - '''Raise when a required resource is not found.''' - - default_message = 'Resource not found: {resource_identifier}' - - -class AccessorParentResourceNotFoundError(AccessorResourceError): - '''Raise when a parent resource (such as directory) is not found.''' - - default_message = 'Parent resource is missing: {resource_identifier}' - - -class AccessorResourceInvalidError(AccessorResourceError): - '''Raise when a resource is not the right type.''' - - default_message = 'Resource invalid: {resource_identifier}' - - -class AccessorContainerNotEmptyError(AccessorResourceError): - '''Raise when container is not empty.''' - - default_message = 'Container is not empty: {resource_identifier}' - - -class StructureError(Error): - '''Base for errors associated with structures.''' - - default_message = 'Unspecified structure error' - - -class ConnectionClosedError(Error): - '''Raise when attempt to use closed connection detected.''' - - default_message = "Connection closed." diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/formatter.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/formatter.py deleted file mode 100644 index c282fcc8..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/formatter.py +++ /dev/null @@ -1,131 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import termcolor - -import ftrack_api.entity.base -import ftrack_api.collection -import ftrack_api.symbol -import ftrack_api.inspection - - -#: Useful filters to pass to :func:`format`.` -FILTER = { - 'ignore_unset': ( - lambda entity, name, value: value is not ftrack_api.symbol.NOT_SET - ) -} - - -def format( - entity, formatters=None, attribute_filter=None, recursive=False, - indent=0, indent_first_line=True, _seen=None -): - '''Return formatted string representing *entity*. - - *formatters* can be used to customise formatting of elements. It should be a - mapping with one or more of the following keys: - - * header - Used to format entity type. - * label - Used to format attribute names. - - Specify an *attribute_filter* to control which attributes to include. By - default all attributes are included. The *attribute_filter* should be a - callable that accepts `(entity, attribute_name, attribute_value)` and - returns True if the attribute should be included in the output. For example, - to filter out all unset values:: - - attribute_filter=ftrack_api.formatter.FILTER['ignore_unset'] - - If *recursive* is True then recurse into Collections and format each entity - present. - - *indent* specifies the overall indentation in spaces of the formatted text, - whilst *indent_first_line* determines whether to apply that indent to the - first generated line. - - .. warning:: - - Iterates over all *entity* attributes which may cause multiple queries - to the server. Turn off auto populating in the session to prevent this. - - ''' - # Initialise default formatters. - if formatters is None: - formatters = dict() - - formatters.setdefault( - 'header', lambda text: termcolor.colored( - text, 'white', 'on_blue', attrs=['bold'] - ) - ) - formatters.setdefault( - 'label', lambda text: termcolor.colored( - text, 'blue', attrs=['bold'] - ) - ) - - # Determine indents. - spacer = ' ' * indent - if indent_first_line: - first_line_spacer = spacer - else: - first_line_spacer = '' - - # Avoid infinite recursion on circular references. - if _seen is None: - _seen = set() - - identifier = str(ftrack_api.inspection.identity(entity)) - if identifier in _seen: - return ( - first_line_spacer + - formatters['header'](entity.entity_type) + '{...}' - ) - - _seen.add(identifier) - information = list() - - information.append( - first_line_spacer + formatters['header'](entity.entity_type) - ) - for key, value in sorted(entity.items()): - if attribute_filter is not None: - if not attribute_filter(entity, key, value): - continue - - child_indent = indent + len(key) + 3 - - if isinstance(value, ftrack_api.entity.base.Entity): - value = format( - value, - formatters=formatters, - attribute_filter=attribute_filter, - recursive=recursive, - indent=child_indent, - indent_first_line=False, - _seen=_seen.copy() - ) - - if isinstance(value, ftrack_api.collection.Collection): - if recursive: - child_values = [] - for index, child in enumerate(value): - child_value = format( - child, - formatters=formatters, - attribute_filter=attribute_filter, - recursive=recursive, - indent=child_indent, - indent_first_line=index != 0, - _seen=_seen.copy() - ) - child_values.append(child_value) - - value = '\n'.join(child_values) - - information.append( - spacer + u' {0}: {1}'.format(formatters['label'](key), value) - ) - - return '\n'.join(information) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/inspection.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/inspection.py deleted file mode 100644 index d8b81520..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/inspection.py +++ /dev/null @@ -1,135 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import collections - -import ftrack_api.symbol -import ftrack_api.operation - - -def identity(entity): - '''Return unique identity of *entity*.''' - return ( - str(entity.entity_type), - primary_key(entity).values() - ) - - -def primary_key(entity): - '''Return primary key of *entity* as an ordered mapping of {field: value}. - - To get just the primary key values:: - - primary_key(entity).values() - - ''' - primary_key = collections.OrderedDict() - for name in entity.primary_key_attributes: - value = entity[name] - if value is ftrack_api.symbol.NOT_SET: - raise KeyError( - 'Missing required value for primary key attribute "{0}" on ' - 'entity {1!r}.'.format(name, entity) - ) - - primary_key[str(name)] = str(value) - - return primary_key - - -def _state(operation, state): - '''Return state following *operation* against current *state*.''' - if ( - isinstance( - operation, ftrack_api.operation.CreateEntityOperation - ) - and state is ftrack_api.symbol.NOT_SET - ): - state = ftrack_api.symbol.CREATED - - elif ( - isinstance( - operation, ftrack_api.operation.UpdateEntityOperation - ) - and state is ftrack_api.symbol.NOT_SET - ): - state = ftrack_api.symbol.MODIFIED - - elif isinstance( - operation, ftrack_api.operation.DeleteEntityOperation - ): - state = ftrack_api.symbol.DELETED - - return state - - -def state(entity): - '''Return current *entity* state. - - .. seealso:: :func:`ftrack_api.inspection.states`. - - ''' - value = ftrack_api.symbol.NOT_SET - - for operation in entity.session.recorded_operations: - # Determine if operation refers to an entity and whether that entity - # is *entity*. - if ( - isinstance( - operation, - ( - ftrack_api.operation.CreateEntityOperation, - ftrack_api.operation.UpdateEntityOperation, - ftrack_api.operation.DeleteEntityOperation - ) - ) - and operation.entity_type == entity.entity_type - and operation.entity_key == primary_key(entity) - ): - value = _state(operation, value) - - return value - - -def states(entities): - '''Return current states of *entities*. - - An optimised function for determining states of multiple entities in one - go. - - .. note:: - - All *entities* should belong to the same session. - - .. seealso:: :func:`ftrack_api.inspection.state`. - - ''' - if not entities: - return [] - - session = entities[0].session - - entities_by_identity = collections.OrderedDict() - for entity in entities: - key = (entity.entity_type, str(primary_key(entity).values())) - entities_by_identity[key] = ftrack_api.symbol.NOT_SET - - for operation in session.recorded_operations: - if ( - isinstance( - operation, - ( - ftrack_api.operation.CreateEntityOperation, - ftrack_api.operation.UpdateEntityOperation, - ftrack_api.operation.DeleteEntityOperation - ) - ) - ): - key = (operation.entity_type, str(operation.entity_key.values())) - if key not in entities_by_identity: - continue - - value = _state(operation, entities_by_identity[key]) - entities_by_identity[key] = value - - return entities_by_identity.values() diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/logging.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/logging.py deleted file mode 100644 index 41969c5b..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/logging.py +++ /dev/null @@ -1,43 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2016 ftrack - -import functools -import warnings - - -def deprecation_warning(message): - def decorator(function): - @functools.wraps(function) - def wrapper(*args, **kwargs): - warnings.warn( - message, - PendingDeprecationWarning - ) - return function(*args, **kwargs) - return wrapper - - return decorator - - -class LazyLogMessage(object): - '''A log message that can be evaluated lazily for improved performance. - - Example:: - - # Formatting of string will not occur unless debug logging enabled. - logger.debug(LazyLogMessage( - 'Hello {0}', 'world' - )) - - ''' - - def __init__(self, message, *args, **kwargs): - '''Initialise with *message* format string and arguments.''' - self.message = message - self.args = args - self.kwargs = kwargs - - def __str__(self): - '''Return string representation.''' - return self.message.format(*self.args, **self.kwargs) - diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/operation.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/operation.py deleted file mode 100644 index bb3bb4ee..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/operation.py +++ /dev/null @@ -1,115 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import copy - - -class Operations(object): - '''Stack of operations.''' - - def __init__(self): - '''Initialise stack.''' - self._stack = [] - super(Operations, self).__init__() - - def clear(self): - '''Clear all operations.''' - del self._stack[:] - - def push(self, operation): - '''Push *operation* onto stack.''' - self._stack.append(operation) - - def pop(self): - '''Pop and return most recent operation from stack.''' - return self._stack.pop() - - def __len__(self): - '''Return count of operations.''' - return len(self._stack) - - def __iter__(self): - '''Return iterator over operations.''' - return iter(self._stack) - - -class Operation(object): - '''Represent an operation.''' - - -class CreateEntityOperation(Operation): - '''Represent create entity operation.''' - - def __init__(self, entity_type, entity_key, entity_data): - '''Initialise operation. - - *entity_type* should be the type of entity in string form (as returned - from :attr:`ftrack_api.entity.base.Entity.entity_type`). - - *entity_key* should be the unique key for the entity and should follow - the form returned from :func:`ftrack_api.inspection.primary_key`. - - *entity_data* should be a mapping of the initial data to populate the - entity with when creating. - - .. note:: - - Shallow copies will be made of each value in *entity_data*. - - ''' - super(CreateEntityOperation, self).__init__() - self.entity_type = entity_type - self.entity_key = entity_key - self.entity_data = {} - for key, value in entity_data.items(): - self.entity_data[key] = copy.copy(value) - - -class UpdateEntityOperation(Operation): - '''Represent update entity operation.''' - - def __init__( - self, entity_type, entity_key, attribute_name, old_value, new_value - ): - '''Initialise operation. - - *entity_type* should be the type of entity in string form (as returned - from :attr:`ftrack_api.entity.base.Entity.entity_type`). - - *entity_key* should be the unique key for the entity and should follow - the form returned from :func:`ftrack_api.inspection.primary_key`. - - *attribute_name* should be the string name of the attribute being - modified and *old_value* and *new_value* should reflect the change in - value. - - .. note:: - - Shallow copies will be made of both *old_value* and *new_value*. - - ''' - super(UpdateEntityOperation, self).__init__() - self.entity_type = entity_type - self.entity_key = entity_key - self.attribute_name = attribute_name - self.old_value = copy.copy(old_value) - self.new_value = copy.copy(new_value) - - -class DeleteEntityOperation(Operation): - '''Represent delete entity operation.''' - - def __init__(self, entity_type, entity_key): - '''Initialise operation. - - *entity_type* should be the type of entity in string form (as returned - from :attr:`ftrack_api.entity.base.Entity.entity_type`). - - *entity_key* should be the unique key for the entity and should follow - the form returned from :func:`ftrack_api.inspection.primary_key`. - - ''' - super(DeleteEntityOperation, self).__init__() - self.entity_type = entity_type - self.entity_key = entity_key - diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/plugin.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/plugin.py deleted file mode 100644 index 2c7a9a45..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/plugin.py +++ /dev/null @@ -1,121 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import logging -import os -import uuid -import imp -import inspect - - -def discover(paths, positional_arguments=None, keyword_arguments=None): - '''Find and load plugins in search *paths*. - - Each discovered module should implement a register function that accepts - *positional_arguments* and *keyword_arguments* as \*args and \*\*kwargs - respectively. - - If a register function does not accept variable arguments, then attempt to - only pass accepted arguments to the function by inspecting its signature. - - ''' - logger = logging.getLogger(__name__ + '.discover') - - if positional_arguments is None: - positional_arguments = [] - - if keyword_arguments is None: - keyword_arguments = {} - - for path in paths: - # Ignore empty paths that could resolve to current directory. - path = path.strip() - if not path: - continue - - for base, directories, filenames in os.walk(path): - for filename in filenames: - name, extension = os.path.splitext(filename) - if extension != '.py': - continue - - module_path = os.path.join(base, filename) - unique_name = uuid.uuid4().hex - - try: - module = imp.load_source(unique_name, module_path) - except Exception as error: - logger.warning( - 'Failed to load plugin from "{0}": {1}' - .format(module_path, error) - ) - continue - - try: - module.register - except AttributeError: - logger.warning( - 'Failed to load plugin that did not define a ' - '"register" function at the module level: {0}' - .format(module_path) - ) - else: - # Attempt to only pass arguments that are accepted by the - # register function. - specification = inspect.getargspec(module.register) - - selected_positional_arguments = positional_arguments - selected_keyword_arguments = keyword_arguments - - if ( - not specification.varargs and - len(positional_arguments) > len(specification.args) - ): - logger.warning( - 'Culling passed arguments to match register ' - 'function signature.' - ) - - selected_positional_arguments = positional_arguments[ - len(specification.args): - ] - selected_keyword_arguments = {} - - elif not specification.keywords: - # Remove arguments that have been passed as positionals. - remainder = specification.args[ - len(positional_arguments): - ] - - # Determine remaining available keyword arguments. - defined_keyword_arguments = [] - if specification.defaults: - defined_keyword_arguments = specification.args[ - -len(specification.defaults): - ] - - remaining_keyword_arguments = set([ - keyword_argument for keyword_argument - in defined_keyword_arguments - if keyword_argument in remainder - ]) - - if not set(keyword_arguments.keys()).issubset( - remaining_keyword_arguments - ): - logger.warning( - 'Culling passed arguments to match register ' - 'function signature.' - ) - selected_keyword_arguments = { - key: value - for key, value in keyword_arguments.items() - if key in remaining_keyword_arguments - } - - module.register( - *selected_positional_arguments, - **selected_keyword_arguments - ) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/query.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/query.py deleted file mode 100644 index ea101a29..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/query.py +++ /dev/null @@ -1,202 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import re -import collections - -import ftrack_api.exception - - -class QueryResult(collections.Sequence): - '''Results from a query.''' - - OFFSET_EXPRESSION = re.compile('(?Poffset (?P\d+))') - LIMIT_EXPRESSION = re.compile('(?Plimit (?P\d+))') - - def __init__(self, session, expression, page_size=500): - '''Initialise result set. - - *session* should be an instance of :class:`ftrack_api.session.Session` - that will be used for executing the query *expression*. - - *page_size* should be an integer specifying the maximum number of - records to fetch in one request allowing the results to be fetched - incrementally in a transparent manner for optimal performance. Any - offset or limit specified in *expression* are honoured for final result - set, but intermediate queries may be issued with different offsets and - limits in order to fetch pages. When an embedded limit is smaller than - the given *page_size* it will be used instead and no paging will take - place. - - .. warning:: - - Setting *page_size* to a very large amount may negatively impact - performance of not only the caller, but the server in general. - - ''' - super(QueryResult, self).__init__() - self._session = session - self._results = [] - - ( - self._expression, - self._offset, - self._limit - ) = self._extract_offset_and_limit(expression) - - self._page_size = page_size - if self._limit is not None and self._limit < self._page_size: - # Optimise case where embedded limit is less than fetching a - # single page. - self._page_size = self._limit - - self._next_offset = self._offset - if self._next_offset is None: - # Initialise with zero offset. - self._next_offset = 0 - - def _extract_offset_and_limit(self, expression): - '''Process *expression* extracting offset and limit. - - Return (expression, offset, limit). - - ''' - offset = None - match = self.OFFSET_EXPRESSION.search(expression) - if match: - offset = int(match.group('value')) - expression = ( - expression[:match.start('offset')] + - expression[match.end('offset'):] - ) - - limit = None - match = self.LIMIT_EXPRESSION.search(expression) - if match: - limit = int(match.group('value')) - expression = ( - expression[:match.start('limit')] + - expression[match.end('limit'):] - ) - - return expression.strip(), offset, limit - - def __getitem__(self, index): - '''Return value at *index*.''' - while self._can_fetch_more() and index >= len(self._results): - self._fetch_more() - - return self._results[index] - - def __len__(self): - '''Return number of items.''' - while self._can_fetch_more(): - self._fetch_more() - - return len(self._results) - - def _can_fetch_more(self): - '''Return whether more results are available to fetch.''' - return self._next_offset is not None - - def _fetch_more(self): - '''Fetch next page of results if available.''' - if not self._can_fetch_more(): - return - - expression = '{0} offset {1} limit {2}'.format( - self._expression, self._next_offset, self._page_size - ) - records, metadata = self._session._query(expression) - self._results.extend(records) - - if self._limit is not None and (len(self._results) >= self._limit): - # Original limit reached. - self._next_offset = None - del self._results[self._limit:] - else: - # Retrieve next page offset from returned metadata. - self._next_offset = metadata.get('next', {}).get('offset', None) - - def all(self): - '''Fetch and return all data.''' - return list(self) - - def one(self): - '''Return exactly one single result from query by applying a limit. - - Raise :exc:`ValueError` if an existing limit is already present in the - expression. - - Raise :exc:`ValueError` if an existing offset is already present in the - expression as offset is inappropriate when expecting a single item. - - Raise :exc:`~ftrack_api.exception.MultipleResultsFoundError` if more - than one result was available or - :exc:`~ftrack_api.exception.NoResultFoundError` if no results were - available. - - .. note:: - - Both errors subclass - :exc:`~ftrack_api.exception.IncorrectResultError` if you want to - catch only one error type. - - ''' - expression = self._expression - - if self._limit is not None: - raise ValueError( - 'Expression already contains a limit clause.' - ) - - if self._offset is not None: - raise ValueError( - 'Expression contains an offset clause which does not make ' - 'sense when selecting a single item.' - ) - - # Apply custom limit as optimisation. A limit of 2 is used rather than - # 1 so that it is possible to test for multiple matching entries - # case. - expression += ' limit 2' - - results, metadata = self._session._query(expression) - - if not results: - raise ftrack_api.exception.NoResultFoundError() - - if len(results) != 1: - raise ftrack_api.exception.MultipleResultsFoundError() - - return results[0] - - def first(self): - '''Return first matching result from query by applying a limit. - - Raise :exc:`ValueError` if an existing limit is already present in the - expression. - - If no matching result available return None. - - ''' - expression = self._expression - - if self._limit is not None: - raise ValueError( - 'Expression already contains a limit clause.' - ) - - # Apply custom offset if present. - if self._offset is not None: - expression += ' offset {0}'.format(self._offset) - - # Apply custom limit as optimisation. - expression += ' limit 1' - - results, metadata = self._session._query(expression) - - if results: - return results[0] - - return None diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/__init__.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/__init__.py deleted file mode 100644 index 1aab07ed..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/base.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/base.py deleted file mode 100644 index ee069b57..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/resource_identifier_transformer/base.py +++ /dev/null @@ -1,50 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - - -class ResourceIdentifierTransformer(object): - '''Transform resource identifiers. - - Provide ability to modify resource identifier before it is stored centrally - (:meth:`encode`), or after it has been retrieved, but before it is used - locally (:meth:`decode`). - - For example, you might want to decompose paths into a set of key, value - pairs to store centrally and then compose a path from those values when - reading back. - - .. note:: - - This is separate from any transformations an - :class:`ftrack_api.accessor.base.Accessor` may perform and is targeted - towards common transformations. - - ''' - - def __init__(self, session): - '''Initialise resource identifier transformer. - - *session* should be the :class:`ftrack_api.session.Session` instance - to use for communication with the server. - - ''' - self.session = session - super(ResourceIdentifierTransformer, self).__init__() - - def encode(self, resource_identifier, context=None): - '''Return encoded *resource_identifier* for storing centrally. - - A mapping of *context* values may be supplied to guide the - transformation. - - ''' - return resource_identifier - - def decode(self, resource_identifier, context=None): - '''Return decoded *resource_identifier* for use locally. - - A mapping of *context* values may be supplied to guide the - transformation. - - ''' - return resource_identifier diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py deleted file mode 100644 index 78f9d135..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/session.py +++ /dev/null @@ -1,2513 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from __future__ import absolute_import - -import json -import logging -import collections -import datetime -import os -import getpass -import functools -import itertools -import distutils.version -import hashlib -import appdirs -import threading -import atexit - -import requests -import requests.auth -import arrow -import clique - -import ftrack_api -import ftrack_api.exception -import ftrack_api.entity.factory -import ftrack_api.entity.base -import ftrack_api.entity.location -import ftrack_api.cache -import ftrack_api.symbol -import ftrack_api.query -import ftrack_api.attribute -import ftrack_api.collection -import ftrack_api.event.hub -import ftrack_api.event.base -import ftrack_api.plugin -import ftrack_api.inspection -import ftrack_api.operation -import ftrack_api.accessor.disk -import ftrack_api.structure.origin -import ftrack_api.structure.entity_id -import ftrack_api.accessor.server -import ftrack_api._centralized_storage_scenario -import ftrack_api.logging -from ftrack_api.logging import LazyLogMessage as L - -try: - from weakref import WeakMethod -except ImportError: - from ftrack_api._weakref import WeakMethod - - -class SessionAuthentication(requests.auth.AuthBase): - '''Attach ftrack session authentication information to requests.''' - - def __init__(self, api_key, api_user): - '''Initialise with *api_key* and *api_user*.''' - self.api_key = api_key - self.api_user = api_user - super(SessionAuthentication, self).__init__() - - def __call__(self, request): - '''Modify *request* to have appropriate headers.''' - request.headers.update({ - 'ftrack-api-key': self.api_key, - 'ftrack-user': self.api_user - }) - return request - - -class Session(object): - '''An isolated session for interaction with an ftrack server.''' - - def __init__( - self, server_url=None, api_key=None, api_user=None, auto_populate=True, - plugin_paths=None, cache=None, cache_key_maker=None, - auto_connect_event_hub=None, schema_cache_path=None, - plugin_arguments=None - ): - '''Initialise session. - - *server_url* should be the URL of the ftrack server to connect to - including any port number. If not specified attempt to look up from - :envvar:`FTRACK_SERVER`. - - *api_key* should be the API key to use for authentication whilst - *api_user* should be the username of the user in ftrack to record - operations against. If not specified, *api_key* should be retrieved - from :envvar:`FTRACK_API_KEY` and *api_user* from - :envvar:`FTRACK_API_USER`. - - If *auto_populate* is True (the default), then accessing entity - attributes will cause them to be automatically fetched from the server - if they are not already. This flag can be changed on the session - directly at any time. - - *plugin_paths* should be a list of paths to search for plugins. If not - specified, default to looking up :envvar:`FTRACK_EVENT_PLUGIN_PATH`. - - *cache* should be an instance of a cache that fulfils the - :class:`ftrack_api.cache.Cache` interface and will be used as the cache - for the session. It can also be a callable that will be called with the - session instance as sole argument. The callable should return ``None`` - if a suitable cache could not be configured, but session instantiation - can continue safely. - - .. note:: - - The session will add the specified cache to a pre-configured layered - cache that specifies the top level cache as a - :class:`ftrack_api.cache.MemoryCache`. Therefore, it is unnecessary - to construct a separate memory cache for typical behaviour. Working - around this behaviour or removing the memory cache can lead to - unexpected behaviour. - - *cache_key_maker* should be an instance of a key maker that fulfils the - :class:`ftrack_api.cache.KeyMaker` interface and will be used to - generate keys for objects being stored in the *cache*. If not specified, - a :class:`~ftrack_api.cache.StringKeyMaker` will be used. - - If *auto_connect_event_hub* is True then embedded event hub will be - automatically connected to the event server and allow for publishing and - subscribing to **non-local** events. If False, then only publishing and - subscribing to **local** events will be possible until the hub is - manually connected using :meth:`EventHub.connect - `. - - .. note:: - - The event hub connection is performed in a background thread to - improve session startup time. If a registered plugin requires a - connected event hub then it should check the event hub connection - status explicitly. Subscribing to events does *not* require a - connected event hub. - - Enable schema caching by setting *schema_cache_path* to a folder path. - If not set, :envvar:`FTRACK_API_SCHEMA_CACHE_PATH` will be used to - determine the path to store cache in. If the environment variable is - also not specified then a temporary directory will be used. Set to - `False` to disable schema caching entirely. - - *plugin_arguments* should be an optional mapping (dict) of keyword - arguments to pass to plugin register functions upon discovery. If a - discovered plugin has a signature that is incompatible with the passed - arguments, the discovery mechanism will attempt to reduce the passed - arguments to only those that the plugin accepts. Note that a warning - will be logged in this case. - - ''' - super(Session, self).__init__() - self.logger = logging.getLogger( - __name__ + '.' + self.__class__.__name__ - ) - self._closed = False - - if server_url is None: - server_url = os.environ.get('FTRACK_SERVER') - - if not server_url: - raise TypeError( - 'Required "server_url" not specified. Pass as argument or set ' - 'in environment variable FTRACK_SERVER.' - ) - - self._server_url = server_url - - if api_key is None: - api_key = os.environ.get( - 'FTRACK_API_KEY', - # Backwards compatibility - os.environ.get('FTRACK_APIKEY') - ) - - if not api_key: - raise TypeError( - 'Required "api_key" not specified. Pass as argument or set in ' - 'environment variable FTRACK_API_KEY.' - ) - - self._api_key = api_key - - if api_user is None: - api_user = os.environ.get('FTRACK_API_USER') - if not api_user: - try: - api_user = getpass.getuser() - except Exception: - pass - - if not api_user: - raise TypeError( - 'Required "api_user" not specified. Pass as argument, set in ' - 'environment variable FTRACK_API_USER or one of the standard ' - 'environment variables used by Python\'s getpass module.' - ) - - self._api_user = api_user - - # Currently pending operations. - self.recorded_operations = ftrack_api.operation.Operations() - self.record_operations = True - - self.cache_key_maker = cache_key_maker - if self.cache_key_maker is None: - self.cache_key_maker = ftrack_api.cache.StringKeyMaker() - - # Enforce always having a memory cache at top level so that the same - # in-memory instance is returned from session. - self.cache = ftrack_api.cache.LayeredCache([ - ftrack_api.cache.MemoryCache() - ]) - - if cache is not None: - if callable(cache): - cache = cache(self) - - if cache is not None: - self.cache.caches.append(cache) - - self._managed_request = None - self._request = requests.Session() - self._request.auth = SessionAuthentication( - self._api_key, self._api_user - ) - - self.auto_populate = auto_populate - - # Fetch server information and in doing so also check credentials. - self._server_information = self._fetch_server_information() - - # Now check compatibility of server based on retrieved information. - self.check_server_compatibility() - - # Construct event hub and load plugins. - self._event_hub = ftrack_api.event.hub.EventHub( - self._server_url, - self._api_user, - self._api_key, - ) - - self._auto_connect_event_hub_thread = None - if auto_connect_event_hub is True: - # Connect to event hub in background thread so as not to block main - # session usage waiting for event hub connection. - self._auto_connect_event_hub_thread = threading.Thread( - target=self._event_hub.connect - ) - self._auto_connect_event_hub_thread.daemon = True - self._auto_connect_event_hub_thread.start() - - # To help with migration from auto_connect_event_hub default changing - # from True to False. - self._event_hub._deprecation_warning_auto_connect = False - - # Register to auto-close session on exit. - atexit.register(WeakMethod(self.close)) - - self._plugin_paths = plugin_paths - if self._plugin_paths is None: - self._plugin_paths = os.environ.get( - 'FTRACK_EVENT_PLUGIN_PATH', '' - ).split(os.pathsep) - - self._discover_plugins(plugin_arguments=plugin_arguments) - - # TODO: Make schemas read-only and non-mutable (or at least without - # rebuilding types)? - if schema_cache_path is not False: - if schema_cache_path is None: - schema_cache_path = appdirs.user_cache_dir() - schema_cache_path = os.environ.get( - 'FTRACK_API_SCHEMA_CACHE_PATH', schema_cache_path - ) - - schema_cache_path = os.path.join( - schema_cache_path, 'ftrack_api_schema_cache.json' - ) - - self.schemas = self._load_schemas(schema_cache_path) - self.types = self._build_entity_type_classes(self.schemas) - - ftrack_api._centralized_storage_scenario.register(self) - - self._configure_locations() - self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.api.session.ready', - data=dict( - session=self - ) - ), - synchronous=True - ) - - def __enter__(self): - '''Return session as context manager.''' - return self - - def __exit__(self, exception_type, exception_value, traceback): - '''Exit session context, closing session in process.''' - self.close() - - @property - def _request(self): - '''Return request session. - - Raise :exc:`ftrack_api.exception.ConnectionClosedError` if session has - been closed and connection unavailable. - - ''' - if self._managed_request is None: - raise ftrack_api.exception.ConnectionClosedError() - - return self._managed_request - - @_request.setter - def _request(self, value): - '''Set request session to *value*.''' - self._managed_request = value - - @property - def closed(self): - '''Return whether session has been closed.''' - return self._closed - - @property - def server_information(self): - '''Return server information such as server version.''' - return self._server_information.copy() - - @property - def server_url(self): - '''Return server ulr used for session.''' - return self._server_url - - @property - def api_user(self): - '''Return username used for session.''' - return self._api_user - - @property - def api_key(self): - '''Return API key used for session.''' - return self._api_key - - @property - def event_hub(self): - '''Return event hub.''' - return self._event_hub - - @property - def _local_cache(self): - '''Return top level memory cache.''' - return self.cache.caches[0] - - def check_server_compatibility(self): - '''Check compatibility with connected server.''' - server_version = self.server_information.get('version') - if server_version is None: - raise ftrack_api.exception.ServerCompatibilityError( - 'Could not determine server version.' - ) - - # Perform basic version check. - if server_version != 'dev': - min_server_version = '3.3.11' - if ( - distutils.version.LooseVersion(min_server_version) - > distutils.version.LooseVersion(server_version) - ): - raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0} incompatible with this version of the ' - 'API which requires a server version >= {1}'.format( - server_version, - min_server_version - ) - ) - - def close(self): - '''Close session. - - Close connections to server. Clear any pending operations and local - cache. - - Use this to ensure that session is cleaned up properly after use. - - ''' - if self.closed: - self.logger.debug('Session already closed.') - return - - self._closed = True - - self.logger.debug('Closing session.') - if self.recorded_operations: - self.logger.warning( - 'Closing session with pending operations not persisted.' - ) - - # Clear pending operations. - self.recorded_operations.clear() - - # Clear top level cache (expected to be enforced memory cache). - self._local_cache.clear() - - # Close connections. - self._request.close() - self._request = None - - try: - self.event_hub.disconnect() - if self._auto_connect_event_hub_thread: - self._auto_connect_event_hub_thread.join() - except ftrack_api.exception.EventHubConnectionError: - pass - - self.logger.debug('Session closed.') - - def reset(self): - '''Reset session clearing local state. - - Clear all pending operations and expunge all entities from session. - - Also clear the local cache. If the cache used by the session is a - :class:`~ftrack_api.cache.LayeredCache` then only clear top level cache. - Otherwise, clear the entire cache. - - Plugins are not rediscovered or reinitialised, but certain plugin events - are re-emitted to properly configure session aspects that are dependant - on cache (such as location plugins). - - .. warning:: - - Previously attached entities are not reset in memory and will retain - their state, but should not be used. Doing so will cause errors. - - ''' - if self.recorded_operations: - self.logger.warning( - 'Resetting session with pending operations not persisted.' - ) - - # Clear pending operations. - self.recorded_operations.clear() - - # Clear top level cache (expected to be enforced memory cache). - self._local_cache.clear() - - # Re-configure certain session aspects that may be dependant on cache. - self._configure_locations() - - self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.api.session.reset', - data=dict( - session=self - ) - ), - synchronous=True - ) - - def auto_populating(self, auto_populate): - '''Temporarily set auto populate to *auto_populate*. - - The current setting will be restored automatically when done. - - Example:: - - with session.auto_populating(False): - print entity['name'] - - ''' - return AutoPopulatingContext(self, auto_populate) - - def operation_recording(self, record_operations): - '''Temporarily set operation recording to *record_operations*. - - The current setting will be restored automatically when done. - - Example:: - - with session.operation_recording(False): - entity['name'] = 'change_not_recorded' - - ''' - return OperationRecordingContext(self, record_operations) - - @property - def created(self): - '''Return list of newly created entities.''' - entities = self._local_cache.values() - states = ftrack_api.inspection.states(entities) - - return [ - entity for (entity, state) in itertools.izip(entities, states) - if state is ftrack_api.symbol.CREATED - ] - - @property - def modified(self): - '''Return list of locally modified entities.''' - entities = self._local_cache.values() - states = ftrack_api.inspection.states(entities) - - return [ - entity for (entity, state) in itertools.izip(entities, states) - if state is ftrack_api.symbol.MODIFIED - ] - - @property - def deleted(self): - '''Return list of deleted entities.''' - entities = self._local_cache.values() - states = ftrack_api.inspection.states(entities) - - return [ - entity for (entity, state) in itertools.izip(entities, states) - if state is ftrack_api.symbol.DELETED - ] - - def reset_remote(self, reset_type, entity=None): - '''Perform a server side reset. - - *reset_type* is a server side supported reset type, - passing the optional *entity* to perform the option upon. - - Please refer to ftrack documentation for a complete list of - supported server side reset types. - ''' - - payload = { - 'action': 'reset_remote', - 'reset_type': reset_type - } - - if entity is not None: - payload.update({ - 'entity_type': entity.entity_type, - 'entity_key': entity.get('id') - }) - - result = self.call( - [payload] - ) - - return result[0]['data'] - - def create(self, entity_type, data=None, reconstructing=False): - '''Create and return an entity of *entity_type* with initial *data*. - - If specified, *data* should be a dictionary of key, value pairs that - should be used to populate attributes on the entity. - - If *reconstructing* is False then create a new entity setting - appropriate defaults for missing data. If True then reconstruct an - existing entity. - - Constructed entity will be automatically :meth:`merged ` - into the session. - - ''' - entity = self._create(entity_type, data, reconstructing=reconstructing) - entity = self.merge(entity) - return entity - - def _create(self, entity_type, data, reconstructing): - '''Create and return an entity of *entity_type* with initial *data*.''' - try: - EntityTypeClass = self.types[entity_type] - except KeyError: - raise ftrack_api.exception.UnrecognisedEntityTypeError(entity_type) - - return EntityTypeClass(self, data=data, reconstructing=reconstructing) - - def ensure(self, entity_type, data, identifying_keys=None): - '''Retrieve entity of *entity_type* with *data*, creating if necessary. - - *data* should be a dictionary of the same form passed to :meth:`create`. - - By default, check for an entity that has matching *data*. If - *identifying_keys* is specified as a list of keys then only consider the - values from *data* for those keys when searching for existing entity. If - *data* is missing an identifying key then raise :exc:`KeyError`. - - If no *identifying_keys* specified then use all of the keys from the - passed *data*. Raise :exc:`ValueError` if no *identifying_keys* can be - determined. - - Each key should be a string. - - .. note:: - - Currently only top level scalars supported. To ensure an entity by - looking at relationships, manually issue the :meth:`query` and - :meth:`create` calls. - - If more than one entity matches the determined filter criteria then - raise :exc:`~ftrack_api.exception.MultipleResultsFoundError`. - - If no matching entity found then create entity using supplied *data*. - - If a matching entity is found, then update it if necessary with *data*. - - .. note:: - - If entity created or updated then a :meth:`commit` will be issued - automatically. If this behaviour is undesired, perform the - :meth:`query` and :meth:`create` calls manually. - - Return retrieved or created entity. - - Example:: - - # First time, a new entity with `username=martin` is created. - entity = session.ensure('User', {'username': 'martin'}) - - # After that, the existing entity is retrieved. - entity = session.ensure('User', {'username': 'martin'}) - - # When existing entity retrieved, entity may also be updated to - # match supplied data. - entity = session.ensure( - 'User', {'username': 'martin', 'email': 'martin@example.com'} - ) - - ''' - if not identifying_keys: - identifying_keys = data.keys() - - self.logger.debug(L( - 'Ensuring entity {0!r} with data {1!r} using identifying keys ' - '{2!r}', entity_type, data, identifying_keys - )) - - if not identifying_keys: - raise ValueError( - 'Could not determine any identifying data to check against ' - 'when ensuring {0!r} with data {1!r}. Identifying keys: {2!r}' - .format(entity_type, data, identifying_keys) - ) - - expression = '{0} where'.format(entity_type) - criteria = [] - for identifying_key in identifying_keys: - value = data[identifying_key] - - if isinstance(value, basestring): - value = '"{0}"'.format(value) - - elif isinstance( - value, (arrow.Arrow, datetime.datetime, datetime.date) - ): - # Server does not store microsecond or timezone currently so - # need to strip from query. - # TODO: When datetime handling improved, update this logic. - value = ( - arrow.get(value).naive.replace(microsecond=0).isoformat() - ) - value = '"{0}"'.format(value) - - criteria.append('{0} is {1}'.format(identifying_key, value)) - - expression = '{0} {1}'.format( - expression, ' and '.join(criteria) - ) - - try: - entity = self.query(expression).one() - - except ftrack_api.exception.NoResultFoundError: - self.logger.debug('Creating entity as did not already exist.') - - # Create entity. - entity = self.create(entity_type, data) - self.commit() - - else: - self.logger.debug('Retrieved matching existing entity.') - - # Update entity if required. - updated = False - for key, target_value in data.items(): - if entity[key] != target_value: - entity[key] = target_value - updated = True - - if updated: - self.logger.debug('Updating existing entity to match new data.') - self.commit() - - return entity - - def delete(self, entity): - '''Mark *entity* for deletion.''' - if self.record_operations: - self.recorded_operations.push( - ftrack_api.operation.DeleteEntityOperation( - entity.entity_type, - ftrack_api.inspection.primary_key(entity) - ) - ) - - def get(self, entity_type, entity_key): - '''Return entity of *entity_type* with unique *entity_key*. - - First check for an existing entry in the configured cache, otherwise - issue a query to the server. - - If no matching entity found, return None. - - ''' - self.logger.debug(L('Get {0} with key {1}', entity_type, entity_key)) - - primary_key_definition = self.types[entity_type].primary_key_attributes - if isinstance(entity_key, basestring): - entity_key = [entity_key] - - if len(entity_key) != len(primary_key_definition): - raise ValueError( - 'Incompatible entity_key {0!r} supplied. Entity type {1} ' - 'expects a primary key composed of {2} values ({3}).' - .format( - entity_key, entity_type, len(primary_key_definition), - ', '.join(primary_key_definition) - ) - ) - - entity = None - try: - entity = self._get(entity_type, entity_key) - - - except KeyError: - - # Query for matching entity. - self.logger.debug( - 'Entity not present in cache. Issuing new query.' - ) - condition = [] - for key, value in zip(primary_key_definition, entity_key): - condition.append('{0} is "{1}"'.format(key, value)) - - expression = '{0} where ({1})'.format( - entity_type, ' and '.join(condition) - ) - - results = self.query(expression).all() - if results: - entity = results[0] - - return entity - - def _get(self, entity_type, entity_key): - '''Return cached entity of *entity_type* with unique *entity_key*. - - Raise :exc:`KeyError` if no such entity in the cache. - - ''' - # Check cache for existing entity emulating - # ftrack_api.inspection.identity result object to pass to key maker. - cache_key = self.cache_key_maker.key( - (str(entity_type), map(str, entity_key)) - ) - self.logger.debug(L( - 'Checking cache for entity with key {0}', cache_key - )) - entity = self.cache.get(cache_key) - self.logger.debug(L( - 'Retrieved existing entity from cache: {0} at {1}', - entity, id(entity) - )) - - return entity - - def query(self, expression, page_size=500): - '''Query against remote data according to *expression*. - - *expression* is not executed directly. Instead return an - :class:`ftrack_api.query.QueryResult` instance that will execute remote - call on access. - - *page_size* specifies the maximum page size that the returned query - result object should be configured with. - - .. seealso:: :ref:`querying` - - ''' - self.logger.debug(L('Query {0!r}', expression)) - - # Add in sensible projections if none specified. Note that this is - # done here rather than on the server to allow local modification of the - # schema setting to include commonly used custom attributes for example. - # TODO: Use a proper parser perhaps? - if not expression.startswith('select'): - entity_type = expression.split(' ', 1)[0] - EntityTypeClass = self.types[entity_type] - projections = EntityTypeClass.default_projections - - expression = 'select {0} from {1}'.format( - ', '.join(projections), - expression - ) - - query_result = ftrack_api.query.QueryResult( - self, expression, page_size=page_size - ) - return query_result - - def _query(self, expression): - '''Execute *query* and return (records, metadata). - - Records will be a list of entities retrieved via the query and metadata - a dictionary of accompanying information about the result set. - - ''' - # TODO: Actually support batching several queries together. - # TODO: Should batches have unique ids to match them up later. - batch = [{ - 'action': 'query', - 'expression': expression - }] - - # TODO: When should this execute? How to handle background=True? - results = self.call(batch) - - # Merge entities into local cache and return merged entities. - data = [] - merged = dict() - for entity in results[0]['data']: - data.append(self._merge_recursive(entity, merged)) - - return data, results[0]['metadata'] - - def merge(self, value, merged=None): - '''Merge *value* into session and return merged value. - - *merged* should be a mapping to record merges during run and should be - used to avoid infinite recursion. If not set will default to a - dictionary. - - ''' - if merged is None: - merged = {} - - with self.operation_recording(False): - return self._merge(value, merged) - - def _merge(self, value, merged): - '''Return merged *value*.''' - log_debug = self.logger.isEnabledFor(logging.DEBUG) - - if isinstance(value, ftrack_api.entity.base.Entity): - log_debug and self.logger.debug( - 'Merging entity into session: {0} at {1}' - .format(value, id(value)) - ) - - return self._merge_entity(value, merged=merged) - - elif isinstance(value, ftrack_api.collection.Collection): - log_debug and self.logger.debug( - 'Merging collection into session: {0!r} at {1}' - .format(value, id(value)) - ) - - merged_collection = [] - for entry in value: - merged_collection.append( - self._merge(entry, merged=merged) - ) - - return merged_collection - - elif isinstance(value, ftrack_api.collection.MappedCollectionProxy): - log_debug and self.logger.debug( - 'Merging mapped collection into session: {0!r} at {1}' - .format(value, id(value)) - ) - - merged_collection = [] - for entry in value.collection: - merged_collection.append( - self._merge(entry, merged=merged) - ) - - return merged_collection - - else: - return value - - def _merge_recursive(self, entity, merged=None): - '''Merge *entity* and all its attributes recursivly.''' - log_debug = self.logger.isEnabledFor(logging.DEBUG) - - if merged is None: - merged = {} - - attached = self.merge(entity, merged) - - for attribute in entity.attributes: - # Remote attributes. - remote_value = attribute.get_remote_value(entity) - - if isinstance( - remote_value, - ( - ftrack_api.entity.base.Entity, - ftrack_api.collection.Collection, - ftrack_api.collection.MappedCollectionProxy - ) - ): - log_debug and self.logger.debug( - 'Merging remote value for attribute {0}.'.format(attribute) - ) - - if isinstance(remote_value, ftrack_api.entity.base.Entity): - self._merge_recursive(remote_value, merged=merged) - - elif isinstance( - remote_value, ftrack_api.collection.Collection - ): - for entry in remote_value: - self._merge_recursive(entry, merged=merged) - - elif isinstance( - remote_value, ftrack_api.collection.MappedCollectionProxy - ): - for entry in remote_value.collection: - self._merge_recursive(entry, merged=merged) - - return attached - - def _merge_entity(self, entity, merged=None): - '''Merge *entity* into session returning merged entity. - - Merge is recursive so any references to other entities will also be - merged. - - *entity* will never be modified in place. Ensure that the returned - merged entity instance is used. - - ''' - log_debug = self.logger.isEnabledFor(logging.DEBUG) - - if merged is None: - merged = {} - - with self.auto_populating(False): - entity_key = self.cache_key_maker.key( - ftrack_api.inspection.identity(entity) - ) - - # Check whether this entity has already been processed. - attached_entity = merged.get(entity_key) - if attached_entity is not None: - log_debug and self.logger.debug( - 'Entity already processed for key {0} as {1} at {2}' - .format(entity_key, attached_entity, id(attached_entity)) - ) - - return attached_entity - else: - log_debug and self.logger.debug( - 'Entity not already processed for key {0}.' - .format(entity_key) - ) - - # Check for existing instance of entity in cache. - log_debug and self.logger.debug( - 'Checking for entity in cache with key {0}'.format(entity_key) - ) - try: - attached_entity = self.cache.get(entity_key) - - log_debug and self.logger.debug( - 'Retrieved existing entity from cache: {0} at {1}' - .format(attached_entity, id(attached_entity)) - ) - - except KeyError: - # Construct new minimal instance to store in cache. - attached_entity = self._create( - entity.entity_type, {}, reconstructing=True - ) - - log_debug and self.logger.debug( - 'Entity not present in cache. Constructed new instance: ' - '{0} at {1}'.format(attached_entity, id(attached_entity)) - ) - - # Mark entity as seen to avoid infinite loops. - merged[entity_key] = attached_entity - - changes = attached_entity.merge(entity, merged=merged) - if changes: - self.cache.set(entity_key, attached_entity) - self.logger.debug('Cache updated with merged entity.') - - else: - self.logger.debug( - 'Cache not updated with merged entity as no differences ' - 'detected.' - ) - - return attached_entity - - def populate(self, entities, projections): - '''Populate *entities* with attributes specified by *projections*. - - Any locally set values included in the *projections* will not be - overwritten with the retrieved remote value. If this 'synchronise' - behaviour is required, first clear the relevant values on the entity by - setting them to :attr:`ftrack_api.symbol.NOT_SET`. Deleting the key will - have the same effect:: - - >>> print(user['username']) - martin - >>> del user['username'] - >>> print(user['username']) - Symbol(NOT_SET) - - .. note:: - - Entities that have been created and not yet persisted will be - skipped as they have no remote values to fetch. - - ''' - self.logger.debug(L( - 'Populate {0!r} projections for {1}.', projections, entities - )) - - if not isinstance( - entities, (list, tuple, ftrack_api.query.QueryResult) - ): - entities = [entities] - - # TODO: How to handle a mixed collection of different entity types - # Should probably fail, but need to consider handling hierarchies such - # as User and Group both deriving from Resource. Actually, could just - # proceed and ignore projections that are not present in entity type. - - entities_to_process = [] - - for entity in entities: - if ftrack_api.inspection.state(entity) is ftrack_api.symbol.CREATED: - # Created entities that are not yet persisted have no remote - # values. Don't raise an error here as it is reasonable to - # iterate over an entities properties and see that some of them - # are NOT_SET. - self.logger.debug(L( - 'Skipping newly created entity {0!r} for population as no ' - 'data will exist in the remote for this entity yet.', entity - )) - continue - - entities_to_process.append(entity) - - if entities_to_process: - reference_entity = entities_to_process[0] - entity_type = reference_entity.entity_type - query = 'select {0} from {1}'.format(projections, entity_type) - - primary_key_definition = reference_entity.primary_key_attributes - entity_keys = [ - ftrack_api.inspection.primary_key(entity).values() - for entity in entities_to_process - ] - - if len(primary_key_definition) > 1: - # Composite keys require full OR syntax unfortunately. - conditions = [] - for entity_key in entity_keys: - condition = [] - for key, value in zip(primary_key_definition, entity_key): - condition.append('{0} is "{1}"'.format(key, value)) - - conditions.append('({0})'.format('and '.join(condition))) - - query = '{0} where {1}'.format(query, ' or '.join(conditions)) - - else: - primary_key = primary_key_definition[0] - - if len(entity_keys) > 1: - query = '{0} where {1} in ({2})'.format( - query, primary_key, - ','.join([ - str(entity_key[0]) for entity_key in entity_keys - ]) - ) - else: - query = '{0} where {1} is {2}'.format( - query, primary_key, str(entity_keys[0][0]) - ) - - result = self.query(query) - - # Fetch all results now. Doing so will cause them to populate the - # relevant entities in the cache. - result.all() - - # TODO: Should we check that all requested attributes were - # actually populated? If some weren't would we mark that to avoid - # repeated calls or perhaps raise an error? - - # TODO: Make atomic. - def commit(self): - '''Commit all local changes to the server.''' - batch = [] - - with self.auto_populating(False): - for operation in self.recorded_operations: - - # Convert operation to payload. - if isinstance( - operation, ftrack_api.operation.CreateEntityOperation - ): - # At present, data payload requires duplicating entity - # type in data and also ensuring primary key added. - entity_data = { - '__entity_type__': operation.entity_type, - } - entity_data.update(operation.entity_key) - entity_data.update(operation.entity_data) - - payload = OperationPayload({ - 'action': 'create', - 'entity_type': operation.entity_type, - 'entity_key': operation.entity_key.values(), - 'entity_data': entity_data - }) - - elif isinstance( - operation, ftrack_api.operation.UpdateEntityOperation - ): - entity_data = { - # At present, data payload requires duplicating entity - # type. - '__entity_type__': operation.entity_type, - operation.attribute_name: operation.new_value - } - - payload = OperationPayload({ - 'action': 'update', - 'entity_type': operation.entity_type, - 'entity_key': operation.entity_key.values(), - 'entity_data': entity_data - }) - - elif isinstance( - operation, ftrack_api.operation.DeleteEntityOperation - ): - payload = OperationPayload({ - 'action': 'delete', - 'entity_type': operation.entity_type, - 'entity_key': operation.entity_key.values() - }) - - else: - raise ValueError( - 'Cannot commit. Unrecognised operation type {0} ' - 'detected.'.format(type(operation)) - ) - - batch.append(payload) - - # Optimise batch. - # TODO: Might be better to perform these on the operations list instead - # so all operation contextual information available. - - # If entity was created and deleted in one batch then remove all - # payloads for that entity. - created = set() - deleted = set() - - for payload in batch: - if payload['action'] == 'create': - created.add( - (payload['entity_type'], str(payload['entity_key'])) - ) - - elif payload['action'] == 'delete': - deleted.add( - (payload['entity_type'], str(payload['entity_key'])) - ) - - created_then_deleted = deleted.intersection(created) - if created_then_deleted: - optimised_batch = [] - for payload in batch: - entity_type = payload.get('entity_type') - entity_key = str(payload.get('entity_key')) - - if (entity_type, entity_key) in created_then_deleted: - continue - - optimised_batch.append(payload) - - batch = optimised_batch - - # Remove early update operations so that only last operation on - # attribute is applied server side. - updates_map = set() - for payload in reversed(batch): - if payload['action'] in ('update', ): - for key, value in payload['entity_data'].items(): - if key == '__entity_type__': - continue - - identity = ( - payload['entity_type'], str(payload['entity_key']), key - ) - if identity in updates_map: - del payload['entity_data'][key] - else: - updates_map.add(identity) - - # Remove NOT_SET values from entity_data. - for payload in batch: - entity_data = payload.get('entity_data', {}) - for key, value in entity_data.items(): - if value is ftrack_api.symbol.NOT_SET: - del entity_data[key] - - # Remove payloads with redundant entity_data. - optimised_batch = [] - for payload in batch: - entity_data = payload.get('entity_data') - if entity_data is not None: - keys = entity_data.keys() - if not keys or keys == ['__entity_type__']: - continue - - optimised_batch.append(payload) - - batch = optimised_batch - - # Collapse updates that are consecutive into one payload. Also, collapse - # updates that occur immediately after creation into the create payload. - optimised_batch = [] - previous_payload = None - - for payload in batch: - if ( - previous_payload is not None - and payload['action'] == 'update' - and previous_payload['action'] in ('create', 'update') - and previous_payload['entity_type'] == payload['entity_type'] - and previous_payload['entity_key'] == payload['entity_key'] - ): - previous_payload['entity_data'].update(payload['entity_data']) - continue - - else: - optimised_batch.append(payload) - previous_payload = payload - - batch = optimised_batch - - # Process batch. - if batch: - result = self.call(batch) - - # Clear recorded operations. - self.recorded_operations.clear() - - # As optimisation, clear local values which are not primary keys to - # avoid redundant merges when merging references. Note: primary keys - # remain as needed for cache retrieval on new entities. - with self.auto_populating(False): - with self.operation_recording(False): - for entity in self._local_cache.values(): - for attribute in entity: - if attribute not in entity.primary_key_attributes: - del entity[attribute] - - # Process results merging into cache relevant data. - for entry in result: - - if entry['action'] in ('create', 'update'): - # Merge returned entities into local cache. - self.merge(entry['data']) - - elif entry['action'] == 'delete': - # TODO: Detach entity - need identity returned? - # TODO: Expunge entity from cache. - pass - # Clear remaining local state, including local values for primary - # keys on entities that were merged. - with self.auto_populating(False): - with self.operation_recording(False): - for entity in self._local_cache.values(): - entity.clear() - - def rollback(self): - '''Clear all recorded operations and local state. - - Typically this would be used following a failed :meth:`commit` in order - to revert the session to a known good state. - - Newly created entities not yet persisted will be detached from the - session / purged from cache and no longer contribute, but the actual - objects are not deleted from memory. They should no longer be used and - doing so could cause errors. - - ''' - with self.auto_populating(False): - with self.operation_recording(False): - - # Detach all newly created entities and remove from cache. This - # is done because simply clearing the local values of newly - # created entities would result in entities with no identity as - # primary key was local while not persisted. In addition, it - # makes no sense for failed created entities to exist in session - # or cache. - for operation in self.recorded_operations: - if isinstance( - operation, ftrack_api.operation.CreateEntityOperation - ): - entity_key = str(( - str(operation.entity_type), - operation.entity_key.values() - )) - try: - self.cache.remove(entity_key) - except KeyError: - pass - - # Clear locally stored modifications on remaining entities. - for entity in self._local_cache.values(): - entity.clear() - - self.recorded_operations.clear() - - def _fetch_server_information(self): - '''Return server information.''' - result = self.call([{'action': 'query_server_information'}]) - return result[0] - - def _discover_plugins(self, plugin_arguments=None): - '''Find and load plugins in search paths. - - Each discovered module should implement a register function that - accepts this session as first argument. Typically the function should - register appropriate event listeners against the session's event hub. - - def register(session): - session.event_hub.subscribe( - 'topic=ftrack.api.session.construct-entity-type', - construct_entity_type - ) - - *plugin_arguments* should be an optional mapping of keyword arguments - and values to pass to plugin register functions upon discovery. - - ''' - plugin_arguments = plugin_arguments or {} - ftrack_api.plugin.discover( - self._plugin_paths, [self], plugin_arguments - ) - - def _read_schemas_from_cache(self, schema_cache_path): - '''Return schemas and schema hash from *schema_cache_path*. - - *schema_cache_path* should be the path to the file containing the - schemas in JSON format. - - ''' - self.logger.debug(L( - 'Reading schemas from cache {0!r}', schema_cache_path - )) - - if not os.path.exists(schema_cache_path): - self.logger.info(L( - 'Cache file not found at {0!r}.', schema_cache_path - )) - - return [], None - - with open(schema_cache_path, 'r') as schema_file: - schemas = json.load(schema_file) - hash_ = hashlib.md5( - json.dumps(schemas, sort_keys=True) - ).hexdigest() - - return schemas, hash_ - - def _write_schemas_to_cache(self, schemas, schema_cache_path): - '''Write *schemas* to *schema_cache_path*. - - *schema_cache_path* should be a path to a file that the schemas can be - written to in JSON format. - - ''' - self.logger.debug(L( - 'Updating schema cache {0!r} with new schemas.', schema_cache_path - )) - - with open(schema_cache_path, 'w') as local_cache_file: - json.dump(schemas, local_cache_file, indent=4) - - def _load_schemas(self, schema_cache_path): - '''Load schemas. - - First try to load schemas from cache at *schema_cache_path*. If the - cache is not available or the cache appears outdated then load schemas - from server and store fresh copy in cache. - - If *schema_cache_path* is set to `False`, always load schemas from - server bypassing cache. - - ''' - local_schema_hash = None - schemas = [] - - if schema_cache_path: - try: - schemas, local_schema_hash = self._read_schemas_from_cache( - schema_cache_path - ) - except (IOError, TypeError, AttributeError, ValueError): - # Catch any known exceptions when trying to read the local - # schema cache to prevent API from being unusable. - self.logger.exception(L( - 'Schema cache could not be loaded from {0!r}', - schema_cache_path - )) - - # Use `dictionary.get` to retrieve hash to support older version of - # ftrack server not returning a schema hash. - server_hash = self._server_information.get( - 'schema_hash', False - ) - if local_schema_hash != server_hash: - self.logger.debug(L( - 'Loading schemas from server due to hash not matching.' - 'Local: {0!r} != Server: {1!r}', local_schema_hash, server_hash - )) - schemas = self.call([{'action': 'query_schemas'}])[0] - - if schema_cache_path: - try: - self._write_schemas_to_cache(schemas, schema_cache_path) - except (IOError, TypeError): - self.logger.exception(L( - 'Failed to update schema cache {0!r}.', - schema_cache_path - )) - - else: - self.logger.debug(L( - 'Using cached schemas from {0!r}', schema_cache_path - )) - - return schemas - - def _build_entity_type_classes(self, schemas): - '''Build default entity type classes.''' - fallback_factory = ftrack_api.entity.factory.StandardFactory() - classes = {} - - for schema in schemas: - results = self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.api.session.construct-entity-type', - data=dict( - schema=schema, - schemas=schemas - ) - ), - synchronous=True - ) - - results = [result for result in results if result is not None] - - if not results: - self.logger.debug(L( - 'Using default StandardFactory to construct entity type ' - 'class for "{0}"', schema['id'] - )) - entity_type_class = fallback_factory.create(schema) - - elif len(results) > 1: - raise ValueError( - 'Expected single entity type to represent schema "{0}" but ' - 'received {1} entity types instead.' - .format(schema['id'], len(results)) - ) - - else: - entity_type_class = results[0] - - classes[entity_type_class.entity_type] = entity_type_class - - return classes - - def _configure_locations(self): - '''Configure locations.''' - # First configure builtin locations, by injecting them into local cache. - - # Origin. - location = self.create( - 'Location', - data=dict( - name='ftrack.origin', - id=ftrack_api.symbol.ORIGIN_LOCATION_ID - ), - reconstructing=True - ) - ftrack_api.mixin( - location, ftrack_api.entity.location.OriginLocationMixin, - name='OriginLocation' - ) - location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') - location.structure = ftrack_api.structure.origin.OriginStructure() - location.priority = 100 - - # Unmanaged. - location = self.create( - 'Location', - data=dict( - name='ftrack.unmanaged', - id=ftrack_api.symbol.UNMANAGED_LOCATION_ID - ), - reconstructing=True - ) - ftrack_api.mixin( - location, ftrack_api.entity.location.UnmanagedLocationMixin, - name='UnmanagedLocation' - ) - location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') - location.structure = ftrack_api.structure.origin.OriginStructure() - # location.resource_identifier_transformer = ( - # ftrack_api.resource_identifier_transformer.internal.InternalResourceIdentifierTransformer(session) - # ) - location.priority = 90 - - # Review. - location = self.create( - 'Location', - data=dict( - name='ftrack.review', - id=ftrack_api.symbol.REVIEW_LOCATION_ID - ), - reconstructing=True - ) - ftrack_api.mixin( - location, ftrack_api.entity.location.UnmanagedLocationMixin, - name='UnmanagedLocation' - ) - location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') - location.structure = ftrack_api.structure.origin.OriginStructure() - location.priority = 110 - - # Server. - location = self.create( - 'Location', - data=dict( - name='ftrack.server', - id=ftrack_api.symbol.SERVER_LOCATION_ID - ), - reconstructing=True - ) - ftrack_api.mixin( - location, ftrack_api.entity.location.ServerLocationMixin, - name='ServerLocation' - ) - location.accessor = ftrack_api.accessor.server._ServerAccessor( - session=self - ) - location.structure = ftrack_api.structure.entity_id.EntityIdStructure() - location.priority = 150 - - # Master location based on server scenario. - storage_scenario = self.server_information.get('storage_scenario') - - if ( - storage_scenario and - storage_scenario.get('scenario') - ): - self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.storage-scenario.activate', - data=dict( - storage_scenario=storage_scenario - ) - ), - synchronous=True - ) - - # Next, allow further configuration of locations via events. - self.event_hub.publish( - ftrack_api.event.base.Event( - topic='ftrack.api.session.configure-location', - data=dict( - session=self - ) - ), - synchronous=True - ) - - @ftrack_api.logging.deprecation_warning( - 'Session._call is now available as public method Session.call. The ' - 'private method will be removed in version 2.0.' - ) - def _call(self, data): - '''Make request to server with *data* batch describing the actions. - - .. note:: - - This private method is now available as public method - :meth:`entity_reference`. This alias remains for backwards - compatibility, but will be removed in version 2.0. - - ''' - return self.call(data) - - def call(self, data): - '''Make request to server with *data* batch describing the actions.''' - url = self._server_url + '/api' - headers = { - 'content-type': 'application/json', - 'accept': 'application/json' - } - data = self.encode(data, entity_attribute_strategy='modified_only') - - self.logger.debug(L('Calling server {0} with {1!r}', url, data)) - - response = self._request.post( - url, - headers=headers, - data=data - ) - - self.logger.debug(L('Call took: {0}', response.elapsed.total_seconds())) - - self.logger.debug(L('Response: {0!r}', response.text)) - try: - result = self.decode(response.text) - - except Exception: - error_message = ( - 'Server reported error in unexpected format. Raw error was: {0}' - .format(response.text) - ) - self.logger.exception(error_message) - raise ftrack_api.exception.ServerError(error_message) - - else: - if 'exception' in result: - # Handle exceptions. - error_message = 'Server reported error: {0}({1})'.format( - result['exception'], result['content'] - ) - self.logger.exception(error_message) - raise ftrack_api.exception.ServerError(error_message) - - return result - - def encode(self, data, entity_attribute_strategy='set_only'): - '''Return *data* encoded as JSON formatted string. - - *entity_attribute_strategy* specifies how entity attributes should be - handled. The following strategies are available: - - * *all* - Encode all attributes, loading any that are currently NOT_SET. - * *set_only* - Encode only attributes that are currently set without - loading any from the remote. - * *modified_only* - Encode only attributes that have been modified - locally. - * *persisted_only* - Encode only remote (persisted) attribute values. - - ''' - entity_attribute_strategies = ( - 'all', 'set_only', 'modified_only', 'persisted_only' - ) - if entity_attribute_strategy not in entity_attribute_strategies: - raise ValueError( - 'Unsupported entity_attribute_strategy "{0}". Must be one of ' - '{1}'.format( - entity_attribute_strategy, - ', '.join(entity_attribute_strategies) - ) - ) - - return json.dumps( - data, - sort_keys=True, - default=functools.partial( - self._encode, - entity_attribute_strategy=entity_attribute_strategy - ) - ) - - def _encode(self, item, entity_attribute_strategy='set_only'): - '''Return JSON encodable version of *item*. - - *entity_attribute_strategy* specifies how entity attributes should be - handled. See :meth:`Session.encode` for available strategies. - - ''' - if isinstance(item, (arrow.Arrow, datetime.datetime, datetime.date)): - return { - '__type__': 'datetime', - 'value': item.isoformat() - } - - if isinstance(item, OperationPayload): - data = dict(item.items()) - if "entity_data" in data: - for key, value in data["entity_data"].items(): - if isinstance(value, ftrack_api.entity.base.Entity): - data["entity_data"][key] = self.entity_reference(value) - - return data - - if isinstance(item, ftrack_api.entity.base.Entity): - data = self.entity_reference(item) - - with self.auto_populating(True): - - for attribute in item.attributes: - value = ftrack_api.symbol.NOT_SET - - if entity_attribute_strategy == 'all': - value = attribute.get_value(item) - - elif entity_attribute_strategy == 'set_only': - if attribute.is_set(item): - value = attribute.get_local_value(item) - if value is ftrack_api.symbol.NOT_SET: - value = attribute.get_remote_value(item) - - elif entity_attribute_strategy == 'modified_only': - if attribute.is_modified(item): - value = attribute.get_local_value(item) - - elif entity_attribute_strategy == 'persisted_only': - if not attribute.computed: - value = attribute.get_remote_value(item) - - if value is not ftrack_api.symbol.NOT_SET: - if isinstance( - attribute, ftrack_api.attribute.ReferenceAttribute - ): - if isinstance(value, ftrack_api.entity.base.Entity): - value = self.entity_reference(value) - - data[attribute.name] = value - - return data - - if isinstance( - item, ftrack_api.collection.MappedCollectionProxy - ): - # Use proxied collection for serialisation. - item = item.collection - - if isinstance(item, ftrack_api.collection.Collection): - data = [] - for entity in item: - data.append(self.entity_reference(entity)) - - return data - - raise TypeError('{0!r} is not JSON serializable'.format(item)) - - def entity_reference(self, entity): - '''Return entity reference that uniquely identifies *entity*. - - Return a mapping containing the __entity_type__ of the entity along with - the key, value pairs that make up it's primary key. - - ''' - reference = { - '__entity_type__': entity.entity_type - } - with self.auto_populating(False): - reference.update(ftrack_api.inspection.primary_key(entity)) - - return reference - - @ftrack_api.logging.deprecation_warning( - 'Session._entity_reference is now available as public method ' - 'Session.entity_reference. The private method will be removed ' - 'in version 2.0.' - ) - def _entity_reference(self, entity): - '''Return entity reference that uniquely identifies *entity*. - - Return a mapping containing the __entity_type__ of the entity along - with the key, value pairs that make up it's primary key. - - .. note:: - - This private method is now available as public method - :meth:`entity_reference`. This alias remains for backwards - compatibility, but will be removed in version 2.0. - - ''' - return self.entity_reference(entity) - - def decode(self, string): - '''Return decoded JSON *string* as Python object.''' - with self.operation_recording(False): - return json.loads(string, object_hook=self._decode) - - def _decode(self, item): - '''Return *item* transformed into appropriate representation.''' - if isinstance(item, collections.Mapping): - if '__type__' in item: - if item['__type__'] == 'datetime': - item = arrow.get(item['value']) - - elif '__entity_type__' in item: - item = self._create( - item['__entity_type__'], item, reconstructing=True - ) - - return item - - def _get_locations(self, filter_inaccessible=True): - '''Helper to returns locations ordered by priority. - - If *filter_inaccessible* is True then only accessible locations will be - included in result. - - ''' - # Optimise this call. - locations = self.query('Location') - - # Filter. - if filter_inaccessible: - locations = filter( - lambda location: location.accessor, - locations - ) - - # Sort by priority. - locations = sorted( - locations, key=lambda location: location.priority - ) - - return locations - - def pick_location(self, component=None): - '''Return suitable location to use. - - If no *component* specified then return highest priority accessible - location. Otherwise, return highest priority accessible location that - *component* is available in. - - Return None if no suitable location could be picked. - - ''' - if component: - return self.pick_locations([component])[0] - - else: - locations = self._get_locations() - if locations: - return locations[0] - else: - return None - - def pick_locations(self, components): - '''Return suitable locations for *components*. - - Return list of locations corresponding to *components* where each - picked location is the highest priority accessible location for that - component. If a component has no location available then its - corresponding entry will be None. - - ''' - candidate_locations = self._get_locations() - availabilities = self.get_component_availabilities( - components, locations=candidate_locations - ) - - locations = [] - for component, availability in zip(components, availabilities): - location = None - - for candidate_location in candidate_locations: - if availability.get(candidate_location['id']) > 0.0: - location = candidate_location - break - - locations.append(location) - - return locations - - def create_component( - self, path, data=None, location='auto' - ): - '''Create a new component from *path* with additional *data* - - .. note:: - - This is a helper method. To create components manually use the - standard :meth:`Session.create` method. - - *path* can be a string representing a filesystem path to the data to - use for the component. The *path* can also be specified as a sequence - string, in which case a sequence component with child components for - each item in the sequence will be created automatically. The accepted - format for a sequence is '{head}{padding}{tail} [{ranges}]'. For - example:: - - '/path/to/file.%04d.ext [1-5, 7, 8, 10-20]' - - .. seealso:: - - `Clique documentation `_ - - *data* should be a dictionary of any additional data to construct the - component with (as passed to :meth:`Session.create`). - - If *location* is specified then automatically add component to that - location. The default of 'auto' will automatically pick a suitable - location to add the component to if one is available. To not add to any - location specifiy locations as None. - - .. note:: - - A :meth:`Session.commit` may be - automatically issued as part of the components registration in the - location. - ''' - if data is None: - data = {} - - if location == 'auto': - # Check if the component name matches one of the ftrackreview - # specific names. Add the component to the ftrack.review location if - # so. This is used to not break backwards compatibility. - if data.get('name') in ( - 'ftrackreview-mp4', 'ftrackreview-webm', 'ftrackreview-image' - ): - location = self.get( - 'Location', ftrack_api.symbol.REVIEW_LOCATION_ID - ) - - else: - location = self.pick_location() - - try: - collection = clique.parse(path) - - except ValueError: - # Assume is a single file. - if 'size' not in data: - data['size'] = self._get_filesystem_size(path) - - data.setdefault('file_type', os.path.splitext(path)[-1]) - - return self._create_component( - 'FileComponent', path, data, location - ) - - else: - # Calculate size of container and members. - member_sizes = {} - container_size = data.get('size') - - if container_size is not None: - if len(collection.indexes) > 0: - member_size = int( - round(container_size / len(collection.indexes)) - ) - for item in collection: - member_sizes[item] = member_size - - else: - container_size = 0 - for item in collection: - member_sizes[item] = self._get_filesystem_size(item) - container_size += member_sizes[item] - - # Create sequence component - container_path = collection.format('{head}{padding}{tail}') - data.setdefault('padding', collection.padding) - data.setdefault('file_type', os.path.splitext(container_path)[-1]) - data.setdefault('size', container_size) - - container = self._create_component( - 'SequenceComponent', container_path, data, location=None - ) - - # Create member components for sequence. - for member_path in collection: - member_data = { - 'name': collection.match(member_path).group('index'), - 'container': container, - 'size': member_sizes[member_path], - 'file_type': os.path.splitext(member_path)[-1] - } - - component = self._create_component( - 'FileComponent', member_path, member_data, location=None - ) - container['members'].append(component) - - if location: - origin_location = self.get( - 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID - ) - location.add_component( - container, origin_location, recursive=True - ) - - return container - - def _create_component(self, entity_type, path, data, location): - '''Create and return component. - - See public function :py:func:`createComponent` for argument details. - - ''' - component = self.create(entity_type, data) - - # Add to special origin location so that it is possible to add to other - # locations. - origin_location = self.get( - 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID - ) - origin_location.add_component(component, path, recursive=False) - - if location: - location.add_component(component, origin_location, recursive=False) - - return component - - def _get_filesystem_size(self, path): - '''Return size from *path*''' - try: - size = os.path.getsize(path) - except OSError: - size = 0 - - return size - - def get_component_availability(self, component, locations=None): - '''Return availability of *component*. - - If *locations* is set then limit result to availability of *component* - in those *locations*. - - Return a dictionary of {location_id:percentage_availability} - - ''' - return self.get_component_availabilities( - [component], locations=locations - )[0] - - def get_component_availabilities(self, components, locations=None): - '''Return availabilities of *components*. - - If *locations* is set then limit result to availabilities of - *components* in those *locations*. - - Return a list of dictionaries of {location_id:percentage_availability}. - The list indexes correspond to those of *components*. - - ''' - availabilities = [] - - if locations is None: - locations = self.query('Location') - - # Separate components into two lists, those that are containers and - # those that are not, so that queries can be optimised. - standard_components = [] - container_components = [] - - for component in components: - if 'members' in component.keys(): - container_components.append(component) - else: - standard_components.append(component) - - # Perform queries. - if standard_components: - self.populate( - standard_components, 'component_locations.location_id' - ) - - if container_components: - self.populate( - container_components, - 'members, component_locations.location_id' - ) - - base_availability = {} - for location in locations: - base_availability[location['id']] = 0.0 - - for component in components: - availability = base_availability.copy() - availabilities.append(availability) - - is_container = 'members' in component.keys() - if is_container and len(component['members']): - member_availabilities = self.get_component_availabilities( - component['members'], locations=locations - ) - multiplier = 1.0 / len(component['members']) - for member, member_availability in zip( - component['members'], member_availabilities - ): - for location_id, ratio in member_availability.items(): - availability[location_id] += ( - ratio * multiplier - ) - else: - for component_location in component['component_locations']: - location_id = component_location['location_id'] - if location_id in availability: - availability[location_id] = 100.0 - - for location_id, percentage in availability.items(): - # Avoid quantization error by rounding percentage and clamping - # to range 0-100. - adjusted_percentage = round(percentage, 9) - adjusted_percentage = max(0.0, min(adjusted_percentage, 100.0)) - availability[location_id] = adjusted_percentage - - return availabilities - - @ftrack_api.logging.deprecation_warning( - 'Session.delayed_job has been deprecated in favour of session.call. ' - 'Please refer to the release notes for more information.' - ) - def delayed_job(self, job_type): - '''Execute a delayed job on the server, a `ftrack.entity.job.Job` is returned. - - *job_type* should be one of the allowed job types. There is currently - only one remote job type "SYNC_USERS_LDAP". - ''' - if job_type not in (ftrack_api.symbol.JOB_SYNC_USERS_LDAP, ): - raise ValueError( - u'Invalid Job type: {0}.'.format(job_type) - ) - - operation = { - 'action': 'delayed_job', - 'job_type': job_type.name - } - - try: - result = self.call( - [operation] - )[0] - - except ftrack_api.exception.ServerError as error: - raise - - return result['data'] - - def get_widget_url(self, name, entity=None, theme=None): - '''Return an authenticated URL for widget with *name* and given options. - - The returned URL will be authenticated using a token which will expire - after 6 minutes. - - *name* should be the name of the widget to return and should be one of - 'info', 'tasks' or 'tasks_browser'. - - Certain widgets require an entity to be specified. If so, specify it by - setting *entity* to a valid entity instance. - - *theme* sets the theme of the widget and can be either 'light' or 'dark' - (defaulting to 'dark' if an invalid option given). - - ''' - operation = { - 'action': 'get_widget_url', - 'name': name, - 'theme': theme - } - if entity: - operation['entity_type'] = entity.entity_type - operation['entity_key'] = ( - ftrack_api.inspection.primary_key(entity).values() - ) - - try: - result = self.call([operation]) - - except ftrack_api.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'get_widget_url\'' in error.message: - raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0!r} does not support "get_widget_url", ' - 'please update server and try again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - else: - return result[0]['widget_url'] - - def encode_media(self, media, version_id=None, keep_original='auto'): - '''Return a new Job that encode *media* to make it playable in browsers. - - *media* can be a path to a file or a FileComponent in the ftrack.server - location. - - The job will encode *media* based on the file type and job data contains - information about encoding in the following format:: - - { - 'output': [{ - 'format': 'video/mp4', - 'component_id': 'e2dc0524-b576-11d3-9612-080027331d74' - }, { - 'format': 'image/jpeg', - 'component_id': '07b82a97-8cf9-11e3-9383-20c9d081909b' - }], - 'source_component_id': 'e3791a09-7e11-4792-a398-3d9d4eefc294', - 'keep_original': True - } - - The output components are associated with the job via the job_components - relation. - - An image component will always be generated if possible that can be used - as a thumbnail. - - If *media* is a file path, a new source component will be created and - added to the ftrack server location and a call to :meth:`commit` will be - issued. If *media* is a FileComponent, it will be assumed to be in - available in the ftrack.server location. - - If *version_id* is specified, the new components will automatically be - associated with the AssetVersion. Otherwise, the components will not - be associated to a version even if the supplied *media* belongs to one. - A server version of 3.3.32 or higher is required for the version_id - argument to function properly. - - If *keep_original* is not set, the original media will be kept if it - is a FileComponent, and deleted if it is a file path. You can specify - True or False to change this behavior. - ''' - if isinstance(media, basestring): - # Media is a path to a file. - server_location = self.get( - 'Location', ftrack_api.symbol.SERVER_LOCATION_ID - ) - if keep_original == 'auto': - keep_original = False - - component_data = None - if keep_original: - component_data = dict(version_id=version_id) - - component = self.create_component( - path=media, - data=component_data, - location=server_location - ) - - # Auto commit to ensure component exists when sent to server. - self.commit() - - elif ( - hasattr(media, 'entity_type') and - media.entity_type in ('FileComponent',) - ): - # Existing file component. - component = media - if keep_original == 'auto': - keep_original = True - - else: - raise ValueError( - 'Unable to encode media of type: {0}'.format(type(media)) - ) - - operation = { - 'action': 'encode_media', - 'component_id': component['id'], - 'version_id': version_id, - 'keep_original': keep_original - } - - try: - result = self.call([operation]) - - except ftrack_api.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'encode_media\'' in error.message: - raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0!r} does not support "encode_media", ' - 'please update server and try again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - return self.get('Job', result[0]['job_id']) - - def get_upload_metadata( - self, component_id, file_name, file_size, checksum=None - ): - '''Return URL and headers used to upload data for *component_id*. - - *file_name* and *file_size* should match the components details. - - The returned URL should be requested using HTTP PUT with the specified - headers. - - The *checksum* is used as the Content-MD5 header and should contain - the base64-encoded 128-bit MD5 digest of the message (without the - headers) according to RFC 1864. This can be used as a message integrity - check to verify that the data is the same data that was originally sent. - ''' - operation = { - 'action': 'get_upload_metadata', - 'component_id': component_id, - 'file_name': file_name, - 'file_size': file_size, - 'checksum': checksum - } - - try: - result = self.call([operation]) - - except ftrack_api.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'get_upload_metadata\'' in error.message: - raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0!r} does not support ' - '"get_upload_metadata", please update server and try ' - 'again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - return result[0] - - def send_user_invite(self, user): - '''Send a invitation to the provided *user*. - - *user* is a User instance - - ''' - - self.send_user_invites( - [user] - ) - - def send_user_invites(self, users): - '''Send a invitation to the provided *user*. - - *users* is a list of User instances - - ''' - - operations = [] - - for user in users: - operations.append( - { - 'action':'send_user_invite', - 'user_id': user['id'] - } - ) - - try: - self.call(operations) - - except ftrack_api.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'send_user_invite\'' in error.message: - raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0!r} does not support ' - '"send_user_invite", please update server and ' - 'try again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - def send_review_session_invite(self, invitee): - '''Send an invite to a review session to *invitee*. - - *invitee* is a instance of ReviewSessionInvitee. - - .. note:: - - The *invitee* must be committed. - - ''' - self.send_review_session_invites([invitee]) - - def send_review_session_invites(self, invitees): - '''Send an invite to a review session to a list of *invitees*. - - *invitee* is a list of ReviewSessionInvitee objects. - - .. note:: - - All *invitees* must be committed. - - ''' - operations = [] - - for invitee in invitees: - operations.append( - { - 'action': 'send_review_session_invite', - 'review_session_invitee_id': invitee['id'] - } - ) - - try: - self.call(operations) - except ftrack_api.exception.ServerError as error: - # Raise informative error if the action is not supported. - if 'Invalid action u\'send_review_session_invite\'' in error.message: - raise ftrack_api.exception.ServerCompatibilityError( - 'Server version {0!r} does not support ' - '"send_review_session_invite", please update server and ' - 'try again.'.format( - self.server_information.get('version') - ) - ) - else: - raise - - -class AutoPopulatingContext(object): - '''Context manager for temporary change of session auto_populate value.''' - - def __init__(self, session, auto_populate): - '''Initialise context.''' - super(AutoPopulatingContext, self).__init__() - self._session = session - self._auto_populate = auto_populate - self._current_auto_populate = None - - def __enter__(self): - '''Enter context switching to desired auto populate setting.''' - self._current_auto_populate = self._session.auto_populate - self._session.auto_populate = self._auto_populate - - def __exit__(self, exception_type, exception_value, traceback): - '''Exit context resetting auto populate to original setting.''' - self._session.auto_populate = self._current_auto_populate - - -class OperationRecordingContext(object): - '''Context manager for temporary change of session record_operations.''' - - def __init__(self, session, record_operations): - '''Initialise context.''' - super(OperationRecordingContext, self).__init__() - self._session = session - self._record_operations = record_operations - self._current_record_operations = None - - def __enter__(self): - '''Enter context.''' - self._current_record_operations = self._session.record_operations - self._session.record_operations = self._record_operations - - def __exit__(self, exception_type, exception_value, traceback): - '''Exit context.''' - self._session.record_operations = self._current_record_operations - - -class OperationPayload(collections.MutableMapping): - '''Represent operation payload.''' - - def __init__(self, *args, **kwargs): - '''Initialise payload.''' - super(OperationPayload, self).__init__() - self._data = dict() - self.update(dict(*args, **kwargs)) - - def __str__(self): - '''Return string representation.''' - return '<{0} {1}>'.format( - self.__class__.__name__, str(self._data) - ) - - def __getitem__(self, key): - '''Return value for *key*.''' - return self._data[key] - - def __setitem__(self, key, value): - '''Set *value* for *key*.''' - self._data[key] = value - - def __delitem__(self, key): - '''Remove *key*.''' - del self._data[key] - - def __iter__(self): - '''Iterate over all keys.''' - return iter(self._data) - - def __len__(self): - '''Return count of keys.''' - return len(self._data) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/__init__.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/__init__.py deleted file mode 100644 index 1aab07ed..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/base.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/base.py deleted file mode 100644 index eae3784d..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/base.py +++ /dev/null @@ -1,38 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from abc import ABCMeta, abstractmethod - - -class Structure(object): - '''Structure plugin interface. - - A structure plugin should compute appropriate paths for data. - - ''' - - __metaclass__ = ABCMeta - - def __init__(self, prefix=''): - '''Initialise structure.''' - self.prefix = prefix - self.path_separator = '/' - super(Structure, self).__init__() - - @abstractmethod - def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. - - *context* can be a mapping that supplies additional information. - - ''' - - def _get_sequence_expression(self, sequence): - '''Return a sequence expression for *sequence* component.''' - padding = sequence['padding'] - if padding: - expression = '%0{0}d'.format(padding) - else: - expression = '%d' - - return expression diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/entity_id.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/entity_id.py deleted file mode 100644 index ae466bf6..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/entity_id.py +++ /dev/null @@ -1,12 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.structure.base - - -class EntityIdStructure(ftrack_api.structure.base.Structure): - '''Entity id pass-through structure.''' - - def get_resource_identifier(self, entity, context=None): - '''Return a *resourceIdentifier* for supplied *entity*.''' - return entity['id'] diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/id.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/id.py deleted file mode 100644 index acc3e21b..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/id.py +++ /dev/null @@ -1,91 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import os - -import ftrack_api.symbol -import ftrack_api.structure.base - - -class IdStructure(ftrack_api.structure.base.Structure): - '''Id based structure supporting Components only. - - A components unique id will be used to form a path to store the data at. - To avoid millions of entries in one directory each id is chunked into four - prefix directories with the remainder used to name the file:: - - /prefix/1/2/3/4/56789 - - If the component has a defined filetype it will be added to the path:: - - /prefix/1/2/3/4/56789.exr - - Components that are children of container components will be placed inside - the id structure of their parent:: - - /prefix/1/2/3/4/56789/355827648d.exr - /prefix/1/2/3/4/56789/ajf24215b5.exr - - However, sequence children will be named using their label as an index and - a common prefix of 'file.':: - - /prefix/1/2/3/4/56789/file.0001.exr - /prefix/1/2/3/4/56789/file.0002.exr - - ''' - - def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. - - *context* can be a mapping that supplies additional information. - - ''' - if entity.entity_type in ('FileComponent',): - # When in a container, place the file inside a directory named - # after the container. - container = entity['container'] - if container and container is not ftrack_api.symbol.NOT_SET: - path = self.get_resource_identifier(container) - - if container.entity_type in ('SequenceComponent',): - # Label doubles as index for now. - name = 'file.{0}{1}'.format( - entity['name'], entity['file_type'] - ) - parts = [os.path.dirname(path), name] - - else: - # Just place uniquely identified file into directory - name = entity['id'] + entity['file_type'] - parts = [path, name] - - else: - name = entity['id'][4:] + entity['file_type'] - parts = ([self.prefix] + list(entity['id'][:4]) + [name]) - - elif entity.entity_type in ('SequenceComponent',): - name = 'file' - - # Add a sequence identifier. - sequence_expression = self._get_sequence_expression(entity) - name += '.{0}'.format(sequence_expression) - - if ( - entity['file_type'] and - entity['file_type'] is not ftrack_api.symbol.NOT_SET - ): - name += entity['file_type'] - - parts = ([self.prefix] + list(entity['id'][:4]) - + [entity['id'][4:]] + [name]) - - elif entity.entity_type in ('ContainerComponent',): - # Just an id directory - parts = ([self.prefix] + - list(entity['id'][:4]) + [entity['id'][4:]]) - - else: - raise NotImplementedError('Cannot generate path for unsupported ' - 'entity {0}'.format(entity)) - - return self.path_separator.join(parts).strip('/') diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/origin.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/origin.py deleted file mode 100644 index 0d4d3a57..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/origin.py +++ /dev/null @@ -1,28 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -from .base import Structure - - -class OriginStructure(Structure): - '''Origin structure that passes through existing resource identifier.''' - - def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. - - *context* should be a mapping that includes at least a - 'source_resource_identifier' key that refers to the resource identifier - to pass through. - - ''' - if context is None: - context = {} - - resource_identifier = context.get('source_resource_identifier') - if resource_identifier is None: - raise ValueError( - 'Could not generate resource identifier as no source resource ' - 'identifier found in passed context.' - ) - - return resource_identifier diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/standard.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/standard.py deleted file mode 100644 index 0b0602df..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/structure/standard.py +++ /dev/null @@ -1,217 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import re -import unicodedata - -import ftrack_api.symbol -import ftrack_api.structure.base - - -class StandardStructure(ftrack_api.structure.base.Structure): - '''Project hierarchy based structure that only supports Components. - - The resource identifier is generated from the project code, the name - of objects in the project structure, asset name and version number:: - - my_project/folder_a/folder_b/asset_name/v003 - - If the component is a `FileComponent` then the name of the component and the - file type are used as filename in the resource_identifier:: - - my_project/folder_a/folder_b/asset_name/v003/foo.jpg - - If the component is a `SequenceComponent` then a sequence expression, - `%04d`, is used. E.g. a component with the name `foo` yields:: - - my_project/folder_a/folder_b/asset_name/v003/foo.%04d.jpg - - For the member components their index in the sequence is used:: - - my_project/folder_a/folder_b/asset_name/v003/foo.0042.jpg - - The name of the component is added to the resource identifier if the - component is a `ContainerComponent`. E.g. a container component with the - name `bar` yields:: - - my_project/folder_a/folder_b/asset_name/v003/bar - - For a member of that container the file name is based on the component name - and file type:: - - my_project/folder_a/folder_b/asset_name/v003/bar/baz.pdf - - ''' - - def __init__( - self, project_versions_prefix=None, illegal_character_substitute='_' - ): - '''Initialise structure. - - If *project_versions_prefix* is defined, insert after the project code - for versions published directly under the project:: - - my_project//v001/foo.jpg - - Replace illegal characters with *illegal_character_substitute* if - defined. - - .. note:: - - Nested component containers/sequences are not supported. - - ''' - super(StandardStructure, self).__init__() - self.project_versions_prefix = project_versions_prefix - self.illegal_character_substitute = illegal_character_substitute - - def _get_parts(self, entity): - '''Return resource identifier parts from *entity*.''' - session = entity.session - - version = entity['version'] - - if version is ftrack_api.symbol.NOT_SET and entity['version_id']: - version = session.get('AssetVersion', entity['version_id']) - - error_message = ( - 'Component {0!r} must be attached to a committed ' - 'version and a committed asset with a parent context.'.format( - entity - ) - ) - - if ( - version is ftrack_api.symbol.NOT_SET or - version in session.created - ): - raise ftrack_api.exception.StructureError(error_message) - - link = version['link'] - - if not link: - raise ftrack_api.exception.StructureError(error_message) - - structure_names = [ - item['name'] - for item in link[1:-1] - ] - - project_id = link[0]['id'] - project = session.get('Project', project_id) - asset = version['asset'] - - version_number = self._format_version(version['version']) - - parts = [] - parts.append(project['name']) - - if structure_names: - parts.extend(structure_names) - elif self.project_versions_prefix: - # Add *project_versions_prefix* if configured and the version is - # published directly under the project. - parts.append(self.project_versions_prefix) - - parts.append(asset['name']) - parts.append(version_number) - - return [self.sanitise_for_filesystem(part) for part in parts] - - def _format_version(self, number): - '''Return a formatted string representing version *number*.''' - return 'v{0:03d}'.format(number) - - def sanitise_for_filesystem(self, value): - '''Return *value* with illegal filesystem characters replaced. - - An illegal character is one that is not typically valid for filesystem - usage, such as non ascii characters, or can be awkward to use in a - filesystem, such as spaces. Replace these characters with - the character specified by *illegal_character_substitute* on - initialisation. If no character was specified as substitute then return - *value* unmodified. - - ''' - if self.illegal_character_substitute is None: - return value - - if isinstance(value, str): - value = value.decode('utf-8') - - value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore') - value = re.sub('[^\w\.-]', self.illegal_character_substitute, value) - return unicode(value.strip().lower()) - - def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. - - *context* can be a mapping that supplies additional information, but - is unused in this implementation. - - - Raise a :py:exc:`ftrack_api.exeption.StructureError` if *entity* is not - attached to a committed version and a committed asset with a parent - context. - - ''' - if entity.entity_type in ('FileComponent',): - container = entity['container'] - - if container: - # Get resource identifier for container. - container_path = self.get_resource_identifier(container) - - if container.entity_type in ('SequenceComponent',): - # Strip the sequence component expression from the parent - # container and back the correct filename, i.e. - # /sequence/component/sequence_component_name.0012.exr. - name = '{0}.{1}{2}'.format( - container['name'], entity['name'], entity['file_type'] - ) - parts = [ - os.path.dirname(container_path), - self.sanitise_for_filesystem(name) - ] - - else: - # Container is not a sequence component so add it as a - # normal component inside the container. - name = entity['name'] + entity['file_type'] - parts = [ - container_path, self.sanitise_for_filesystem(name) - ] - - else: - # File component does not have a container, construct name from - # component name and file type. - parts = self._get_parts(entity) - name = entity['name'] + entity['file_type'] - parts.append(self.sanitise_for_filesystem(name)) - - elif entity.entity_type in ('SequenceComponent',): - # Create sequence expression for the sequence component and add it - # to the parts. - parts = self._get_parts(entity) - sequence_expression = self._get_sequence_expression(entity) - parts.append( - '{0}.{1}{2}'.format( - self.sanitise_for_filesystem(entity['name']), - sequence_expression, - self.sanitise_for_filesystem(entity['file_type']) - ) - ) - - elif entity.entity_type in ('ContainerComponent',): - # Add the name of the container to the resource identifier parts. - parts = self._get_parts(entity) - parts.append(self.sanitise_for_filesystem(entity['name'])) - - else: - raise NotImplementedError( - 'Cannot generate resource identifier for unsupported ' - 'entity {0!r}'.format(entity) - ) - - return self.path_separator.join(parts) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/symbol.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/symbol.py deleted file mode 100644 index f46760f6..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/source/ftrack_api/symbol.py +++ /dev/null @@ -1,77 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import os - - -class Symbol(object): - '''A constant symbol.''' - - def __init__(self, name, value=True): - '''Initialise symbol with unique *name* and *value*. - - *value* is used for nonzero testing. - - ''' - self.name = name - self.value = value - - def __str__(self): - '''Return string representation.''' - return self.name - - def __repr__(self): - '''Return representation.''' - return '{0}({1})'.format(self.__class__.__name__, self.name) - - def __nonzero__(self): - '''Return whether symbol represents non-zero value.''' - return bool(self.value) - - def __copy__(self): - '''Return shallow copy. - - Overridden to always return same instance. - - ''' - return self - - -#: Symbol representing that no value has been set or loaded. -NOT_SET = Symbol('NOT_SET', False) - -#: Symbol representing created state. -CREATED = Symbol('CREATED') - -#: Symbol representing modified state. -MODIFIED = Symbol('MODIFIED') - -#: Symbol representing deleted state. -DELETED = Symbol('DELETED') - -#: Topic published when component added to a location. -COMPONENT_ADDED_TO_LOCATION_TOPIC = 'ftrack.location.component-added' - -#: Topic published when component removed from a location. -COMPONENT_REMOVED_FROM_LOCATION_TOPIC = 'ftrack.location.component-removed' - -#: Identifier of builtin origin location. -ORIGIN_LOCATION_ID = 'ce9b348f-8809-11e3-821c-20c9d081909b' - -#: Identifier of builtin unmanaged location. -UNMANAGED_LOCATION_ID = 'cb268ecc-8809-11e3-a7e2-20c9d081909b' - -#: Identifier of builtin review location. -REVIEW_LOCATION_ID = 'cd41be70-8809-11e3-b98a-20c9d081909b' - -#: Identifier of builtin connect location. -CONNECT_LOCATION_ID = '07b82a97-8cf9-11e3-9383-20c9d081909b' - -#: Identifier of builtin server location. -SERVER_LOCATION_ID = '3a372bde-05bc-11e4-8908-20c9d081909b' - -#: Chunk size used when working with data, default to 1Mb. -CHUNK_SIZE = int(os.getenv('FTRACK_API_FILE_CHUNK_SIZE', 0)) or 1024*1024 - -#: Symbol representing syncing users with ldap -JOB_SYNC_USERS_LDAP = Symbol('SYNC_USERS_LDAP') diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/fixture/media/colour_wheel.mov b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/fixture/media/colour_wheel.mov deleted file mode 100644 index db34709c..00000000 Binary files a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/fixture/media/colour_wheel.mov and /dev/null differ diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/fixture/media/image-resized-10.png b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/fixture/media/image-resized-10.png deleted file mode 100644 index da6ec772..00000000 Binary files a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/fixture/media/image-resized-10.png and /dev/null differ diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/fixture/media/image.png b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/fixture/media/image.png deleted file mode 100644 index cd4d0045..00000000 Binary files a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/fixture/media/image.png and /dev/null differ diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/fixture/plugin/configure_locations.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/fixture/plugin/configure_locations.py deleted file mode 100644 index 5fcf034b..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/fixture/plugin/configure_locations.py +++ /dev/null @@ -1,40 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import logging - -import ftrack_api -import ftrack_api.entity.location -import ftrack_api.accessor.disk - - -def configure_locations(event): - '''Configure locations for session.''' - session = event['data']['session'] - - # Find location(s) and customise instances. - location = session.ensure('Location', {'name': 'test.location'}) - ftrack_api.mixin( - location, ftrack_api.entity.location.UnmanagedLocationMixin - ) - location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') - - -def register(session): - '''Register plugin with *session*.''' - logger = logging.getLogger('ftrack_plugin:configure_locations.register') - - # Validate that session is an instance of ftrack_api.Session. If not, assume - # that register is being called from an old or incompatible API and return - # without doing anything. - if not isinstance(session, ftrack_api.Session): - logger.debug( - 'Not subscribing plugin as passed argument {0} is not an ' - 'ftrack_api.Session instance.'.format(session) - ) - return - - session.event_hub.subscribe( - 'topic=ftrack.api.session.configure-location', - configure_locations - ) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/fixture/plugin/construct_entity_type.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/fixture/plugin/construct_entity_type.py deleted file mode 100644 index bb2f8c42..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/fixture/plugin/construct_entity_type.py +++ /dev/null @@ -1,52 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import logging - -import ftrack_api.entity.factory - - -def stub(self): - '''A stub method for testing only.''' - - -class Factory(ftrack_api.entity.factory.StandardFactory): - '''Entity class factory.''' - - def create(self, schema, bases=None): - '''Create and return entity class from *schema*.''' - # Optionally change bases for class to be generated. - cls = super(Factory, self).create(schema, bases=bases) - - # Further customise cls before returning. - if schema['id'] == 'User': - cls.stub = stub - - return cls - - -def register(session): - '''Register plugin with *session*.''' - logger = logging.getLogger('ftrack_plugin:construct_entity_type.register') - - # Validate that session is an instance of ftrack_api.Session. If not, assume - # that register is being called from an old or incompatible API and return - # without doing anything. - if not isinstance(session, ftrack_api.Session): - logger.debug( - 'Not subscribing plugin as passed argument {0!r} is not an ' - 'ftrack_api.Session instance.'.format(session) - ) - return - - factory = Factory() - - def construct_entity_type(event): - '''Return class to represent entity type specified by *event*.''' - schema = event['data']['schema'] - return factory.create(schema) - - session.event_hub.subscribe( - 'topic=ftrack.api.session.construct-entity-type', - construct_entity_type - ) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/fixture/plugin/count_session_event.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/fixture/plugin/count_session_event.py deleted file mode 100644 index 37938ae8..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/fixture/plugin/count_session_event.py +++ /dev/null @@ -1,41 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2018 ftrack -import logging -import collections - -import ftrack_api.session - - -def count_session_event(event): - '''Called when session is ready to be used.''' - logger = logging.getLogger('com.ftrack.test-session-event-plugin') - event_topic = event['topic'] - logger.debug(u'Event received: {}'.format(event_topic)) - session = event['data']['session'] - session._test_called_events[event_topic] += 1 - - -def register(session, **kw): - '''Register plugin. Called when used as an plugin.''' - logger = logging.getLogger('com.ftrack.test-session-event-plugin') - - # Validate that session is an instance of ftrack_api.Session. If not, - # assume that register is being called from an old or incompatible API and - # return without doing anything. - if not isinstance(session, ftrack_api.session.Session): - logger.debug( - 'Not subscribing plugin as passed argument {0!r} is not an ' - 'ftrack_api.Session instance.'.format(session) - ) - return - - session._test_called_events = collections.defaultdict(int) - session.event_hub.subscribe( - 'topic=ftrack.api.session.ready', - count_session_event - ) - session.event_hub.subscribe( - 'topic=ftrack.api.session.reset', - count_session_event - ) - logger.debug('Plugin registered') diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/__init__.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/__init__.py deleted file mode 100644 index 1aab07ed..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/accessor/__init__.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/accessor/__init__.py deleted file mode 100644 index bc98f15d..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/accessor/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/accessor/test_disk.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/accessor/test_disk.py deleted file mode 100644 index 82895d92..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/accessor/test_disk.py +++ /dev/null @@ -1,267 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import tempfile - -import pytest - -import ftrack_api -import ftrack_api.exception -import ftrack_api.accessor.disk -import ftrack_api.data - - -def test_get_filesystem_path(temporary_path): - '''Convert paths to filesystem paths.''' - accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) - - # Absolute paths outside of configured prefix fail. - with pytest.raises(ftrack_api.exception.AccessorFilesystemPathError): - accessor.get_filesystem_path(os.path.join('/', 'test', 'foo.txt')) - - # Absolute root path. - assert accessor.get_filesystem_path(temporary_path) == temporary_path - - # Absolute path within prefix. - assert ( - accessor.get_filesystem_path( - os.path.join(temporary_path, 'test.txt') - ) == - os.path.join(temporary_path, 'test.txt') - ) - - # Relative root path - assert accessor.get_filesystem_path('') == temporary_path - - # Relative path for file at root - assert (accessor.get_filesystem_path('test.txt') == - os.path.join(temporary_path, 'test.txt')) - - # Relative path for file in subdirectory - assert (accessor.get_filesystem_path('test/foo.txt') == - os.path.join(temporary_path, 'test', 'foo.txt')) - - # Relative path non-collapsed - assert (accessor.get_filesystem_path('test/../foo.txt') == - os.path.join(temporary_path, 'foo.txt')) - - # Relative directory path without trailing slash - assert (accessor.get_filesystem_path('test') == - os.path.join(temporary_path, 'test')) - - # Relative directory path with trailing slash - assert (accessor.get_filesystem_path('test/') == - os.path.join(temporary_path, 'test')) - - -def test_list(temporary_path): - '''List entries.''' - accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) - - # File in root directory - assert accessor.list('') == [] - data = accessor.open('test.txt', 'w+') - data.close() - assert accessor.list('') == ['test.txt'] - - # File in subdirectory - accessor.make_container('test_dir') - assert accessor.list('test_dir') == [] - data = accessor.open('test_dir/test.txt', 'w+') - data.close() - - listing = accessor.list('test_dir') - assert listing == [os.path.join('test_dir', 'test.txt')] - - # Is a valid resource - assert accessor.exists(listing[0]) is True - - -def test_exists(temporary_path): - '''Valid path exists.''' - accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) - - _, temporary_file = tempfile.mkstemp(dir=temporary_path) - assert accessor.exists(temporary_file) is True - - -def test_missing_does_not_exist(temporary_path): - '''Missing path does not exist.''' - accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) - assert accessor.exists('non-existant.txt') is False - - -def test_is_file(temporary_path): - '''Valid file is considered a file.''' - accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) - - _, temporary_file = tempfile.mkstemp(dir=temporary_path) - assert accessor.is_file(temporary_file) is True - - -def test_missing_is_not_file(temporary_path): - '''Missing path is not considered a file.''' - accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) - assert accessor.is_file('non_existant.txt') is False - - -def test_container_is_not_file(temporary_path): - '''Valid container is not considered a file.''' - accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) - - temporary_directory = tempfile.mkdtemp(dir=temporary_path) - assert accessor.is_file(temporary_directory) is False - - -def test_is_container(temporary_path): - '''Valid container is considered a container.''' - accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) - - temporary_directory = tempfile.mkdtemp(dir=temporary_path) - assert accessor.is_container(temporary_directory) is True - - -def test_missing_is_not_container(temporary_path): - '''Missing path is not considered a container.''' - accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) - assert accessor.is_container('non_existant') is False - - -def test_file_is_not_container(temporary_path): - '''Valid file is not considered a container.''' - accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) - - _, temporary_file = tempfile.mkstemp(dir=temporary_path) - assert accessor.is_container(temporary_file) is False - - -def test_is_sequence(temporary_path): - '''Sequence detection unsupported.''' - accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) - - with pytest.raises( - ftrack_api.exception.AccessorUnsupportedOperationError - ): - accessor.is_sequence('foo.%04d.exr') - - -def test_open(temporary_path): - '''Open file.''' - accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) - - with pytest.raises(ftrack_api.exception.AccessorResourceNotFoundError): - accessor.open('test.txt', 'r') - - data = accessor.open('test.txt', 'w+') - assert isinstance(data, ftrack_api.data.Data) is True - assert data.read() == '' - data.write('test data') - data.close() - - data = accessor.open('test.txt', 'r') - assert (data.read() == 'test data') - data.close() - - -def test_remove_file(temporary_path): - '''Delete file at path.''' - accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) - - file_handle, temporary_file = tempfile.mkstemp(dir=temporary_path) - os.close(file_handle) - accessor.remove(temporary_file) - assert os.path.exists(temporary_file) is False - - -def test_remove_container(temporary_path): - '''Delete container at path.''' - accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) - - temporary_directory = tempfile.mkdtemp(dir=temporary_path) - accessor.remove(temporary_directory) - assert os.path.exists(temporary_directory) is False - - -def test_remove_missing(temporary_path): - '''Fail to remove path that does not exist.''' - accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) - with pytest.raises(ftrack_api.exception.AccessorResourceNotFoundError): - accessor.remove('non_existant') - - -def test_make_container(temporary_path): - '''Create container.''' - accessor = ftrack_api.accessor.disk.DiskAccessor(temporary_path) - - accessor.make_container('test') - assert os.path.isdir(os.path.join(temporary_path, 'test')) is True - - # Recursive - accessor.make_container('test/a/b/c') - assert ( - os.path.isdir( - os.path.join(temporary_path, 'test', 'a', 'b', 'c') - ) is - True - ) - - # Non-recursive fail - with pytest.raises( - ftrack_api.exception.AccessorParentResourceNotFoundError - ): - accessor.make_container('test/d/e/f', recursive=False) - - # Existing succeeds - accessor.make_container('test/a/b/c') - - -def test_get_container(temporary_path): - '''Get container from resource_identifier.''' - # With prefix. - accessor = ftrack_api.accessor.disk.DiskAccessor(prefix=temporary_path) - - assert ( - accessor.get_container(os.path.join('test', 'a')) == - 'test' - ) - - assert ( - accessor.get_container(os.path.join('test', 'a/')) == - 'test' - ) - - assert ( - accessor.get_container('test') == - '' - ) - - with pytest.raises( - ftrack_api.exception.AccessorParentResourceNotFoundError - ): - accessor.get_container('') - - with pytest.raises( - ftrack_api.exception.AccessorParentResourceNotFoundError - ): - accessor.get_container(temporary_path) - - # Without prefix. - accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') - - assert ( - accessor.get_container(os.path.join(temporary_path, 'test', 'a')) == - os.path.join(temporary_path, 'test') - ) - - assert ( - accessor.get_container( - os.path.join(temporary_path, 'test', 'a/') - ) == - os.path.join(temporary_path, 'test') - ) - - assert ( - accessor.get_container(os.path.join(temporary_path, 'test')) == - temporary_path - ) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/accessor/test_server.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/accessor/test_server.py deleted file mode 100644 index bb32426a..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/accessor/test_server.py +++ /dev/null @@ -1,41 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import uuid - -import pytest - -import ftrack_api -import ftrack_api.exception -import ftrack_api.accessor.server -import ftrack_api.data - - -def test_read_and_write(new_component, session): - '''Read and write data from server accessor.''' - random_data = uuid.uuid1().hex - - accessor = ftrack_api.accessor.server._ServerAccessor(session) - http_file = accessor.open(new_component['id'], mode='wb') - http_file.write(random_data) - http_file.close() - - data = accessor.open(new_component['id'], 'r') - assert data.read() == random_data, 'Read data is the same as written.' - data.close() - - -def test_remove_data(new_component, session): - '''Remove data using server accessor.''' - random_data = uuid.uuid1().hex - - accessor = ftrack_api.accessor.server._ServerAccessor(session) - http_file = accessor.open(new_component['id'], mode='wb') - http_file.write(random_data) - http_file.close() - - accessor.remove(new_component['id']) - - data = accessor.open(new_component['id'], 'r') - with pytest.raises(ftrack_api.exception.AccessorOperationFailedError): - data.read() diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/conftest.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/conftest.py deleted file mode 100644 index cf4fe3cc..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/conftest.py +++ /dev/null @@ -1,539 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import uuid -import tempfile -import shutil -import os - -import pytest -import clique - -import ftrack_api -import ftrack_api.symbol - - -def pytest_generate_tests(metafunc): - '''Parametrize tests dynamically. - - If a test function has a corresponding parametrize function then call it - passing along the *metafunc*. For example, for a "test_foo" function, look - for and call "parametrize_test_foo" if it exists. - - This is useful when more complex dynamic parametrization is needed than the - standard pytest.mark.parametrize decorator can provide. - - ''' - generator_name = 'parametrize_{}'.format(metafunc.function.__name__) - generator = getattr(metafunc.module, generator_name, None) - if callable(generator): - generator(metafunc) - - -def _temporary_file(request, **kwargs): - '''Return temporary file.''' - file_handle, path = tempfile.mkstemp(**kwargs) - os.close(file_handle) - - def cleanup(): - '''Remove temporary file.''' - try: - os.remove(path) - except OSError: - pass - - request.addfinalizer(cleanup) - return path - - -@pytest.fixture() -def temporary_file(request): - '''Return temporary file.''' - return _temporary_file(request) - - -@pytest.fixture() -def temporary_image(request): - '''Return temporary file.''' - return _temporary_file(request, suffix='.jpg') - - -@pytest.fixture() -def temporary_directory(request): - '''Return temporary directory.''' - path = tempfile.mkdtemp() - - def cleanup(): - '''Remove temporary directory.''' - shutil.rmtree(path) - - request.addfinalizer(cleanup) - - return path - - -@pytest.fixture() -def temporary_sequence(temporary_directory): - '''Return temporary sequence of three files. - - Return the path using the `clique - `_ format, for example:: - - /tmp/asfjsfjoj3/%04d.jpg [1-3] - - ''' - items = [] - for index in range(3): - item_path = os.path.join( - temporary_directory, '{0:04d}.jpg'.format(index) - ) - with open(item_path, 'w') as file_descriptor: - file_descriptor.write(uuid.uuid4().hex) - file_descriptor.close() - - items.append(item_path) - - collections, _ = clique.assemble(items) - sequence_path = collections[0].format() - - return sequence_path - - -@pytest.fixture() -def video_path(): - '''Return a path to a video file.''' - video = os.path.abspath( - os.path.join( - os.path.dirname(__file__), - '..', - 'fixture', - 'media', - 'colour_wheel.mov' - ) - ) - - return video - - -@pytest.fixture() -def session(): - '''Return session instance.''' - return ftrack_api.Session() - - -@pytest.fixture() -def session_no_autoconnect_hub(): - '''Return session instance not auto connected to hub.''' - return ftrack_api.Session(auto_connect_event_hub=False) - - -@pytest.fixture() -def unique_name(): - '''Return a unique name.''' - return 'test-{0}'.format(uuid.uuid4()) - - -@pytest.fixture() -def temporary_path(request): - '''Return temporary path.''' - path = tempfile.mkdtemp() - - def cleanup(): - '''Remove created path.''' - try: - shutil.rmtree(path) - except OSError: - pass - - request.addfinalizer(cleanup) - - return path - - -@pytest.fixture() -def new_user(request, session, unique_name): - '''Return a newly created unique user.''' - entity = session.create('User', {'username': unique_name}) - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(entity) - session.commit() - - request.addfinalizer(cleanup) - - return entity - - -@pytest.fixture() -def user(session): - '''Return the same user entity for entire session.''' - # Jenkins user - entity = session.get('User', 'd07ae5d0-66e1-11e1-b5e9-f23c91df25eb') - assert entity is not None - - return entity - - -@pytest.fixture() -def project_schema(session): - '''Return project schema.''' - # VFX Scheme - entity = session.get( - 'ProjectSchema', '69cb7f92-4dbf-11e1-9902-f23c91df25eb' - ) - assert entity is not None - return entity - - -@pytest.fixture() -def new_project_tree(request, session, user): - '''Return new project with basic tree.''' - project_schema = session.query('ProjectSchema').first() - default_shot_status = project_schema.get_statuses('Shot')[0] - default_task_type = project_schema.get_types('Task')[0] - default_task_status = project_schema.get_statuses( - 'Task', default_task_type['id'] - )[0] - - project_name = 'python_api_test_{0}'.format(uuid.uuid1().hex) - project = session.create('Project', { - 'name': project_name, - 'full_name': project_name + '_full', - 'project_schema': project_schema - }) - - for sequence_number in range(1): - sequence = session.create('Sequence', { - 'name': 'sequence_{0:03d}'.format(sequence_number), - 'parent': project - }) - - for shot_number in range(1): - shot = session.create('Shot', { - 'name': 'shot_{0:03d}'.format(shot_number * 10), - 'parent': sequence, - 'status': default_shot_status - }) - - for task_number in range(1): - task = session.create('Task', { - 'name': 'task_{0:03d}'.format(task_number), - 'parent': shot, - 'status': default_task_status, - 'type': default_task_type - }) - - session.create('Appointment', { - 'type': 'assignment', - 'context': task, - 'resource': user - }) - - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(project) - session.commit() - - request.addfinalizer(cleanup) - - return project - - -@pytest.fixture() -def new_project(request, session, user): - '''Return new empty project.''' - project_schema = session.query('ProjectSchema').first() - project_name = 'python_api_test_{0}'.format(uuid.uuid1().hex) - project = session.create('Project', { - 'name': project_name, - 'full_name': project_name + '_full', - 'project_schema': project_schema - }) - - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(project) - session.commit() - - request.addfinalizer(cleanup) - - return project - - -@pytest.fixture() -def project(session): - '''Return same project for entire session.''' - # Test project. - entity = session.get('Project', '5671dcb0-66de-11e1-8e6e-f23c91df25eb') - assert entity is not None - - return entity - - -@pytest.fixture() -def new_task(request, session, unique_name): - '''Return a new task.''' - project = session.query( - 'Project where id is 5671dcb0-66de-11e1-8e6e-f23c91df25eb' - ).one() - project_schema = project['project_schema'] - default_task_type = project_schema.get_types('Task')[0] - default_task_status = project_schema.get_statuses( - 'Task', default_task_type['id'] - )[0] - - task = session.create('Task', { - 'name': unique_name, - 'parent': project, - 'status': default_task_status, - 'type': default_task_type - }) - - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(task) - session.commit() - - request.addfinalizer(cleanup) - - return task - - -@pytest.fixture() -def task(session): - '''Return same task for entire session.''' - # Tests/python_api/tasks/t1 - entity = session.get('Task', 'adb4ad6c-7679-11e2-8df2-f23c91df25eb') - assert entity is not None - - return entity - - -@pytest.fixture() -def new_scope(request, session, unique_name): - '''Return a new scope.''' - scope = session.create('Scope', { - 'name': unique_name - }) - - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(scope) - session.commit() - - request.addfinalizer(cleanup) - - return scope - - -@pytest.fixture() -def new_job(request, session, unique_name, user): - '''Return a new scope.''' - job = session.create('Job', { - 'type': 'api_job', - 'user': user - }) - - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(job) - session.commit() - - request.addfinalizer(cleanup) - - return job - - -@pytest.fixture() -def new_note(request, session, unique_name, new_task, user): - '''Return a new note attached to a task.''' - note = new_task.create_note(unique_name, user) - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(note) - session.commit() - - request.addfinalizer(cleanup) - - return note - - -@pytest.fixture() -def new_asset_version(request, session): - '''Return a new asset version.''' - asset_version = session.create('AssetVersion', { - 'asset_id': 'dd9a7e2e-c5eb-11e1-9885-f23c91df25eb' - }) - session.commit() - - # Do not cleanup the version as that will sometimes result in a deadlock - # database error. - - return asset_version - - -@pytest.fixture() -def new_component(request, session, temporary_file): - '''Return a new component not in any location except origin.''' - component = session.create_component(temporary_file, location=None) - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(component) - session.commit() - - request.addfinalizer(cleanup) - - return component - - -@pytest.fixture() -def new_container_component(request, session, temporary_directory): - '''Return a new container component not in any location except origin.''' - component = session.create('ContainerComponent') - - # Add to special origin location so that it is possible to add to other - # locations. - origin_location = session.get( - 'Location', ftrack_api.symbol.ORIGIN_LOCATION_ID - ) - origin_location.add_component( - component, temporary_directory, recursive=False - ) - - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(component) - session.commit() - - request.addfinalizer(cleanup) - - return component - - -@pytest.fixture() -def new_sequence_component(request, session, temporary_sequence): - '''Return a new sequence component not in any location except origin.''' - component = session.create_component(temporary_sequence, location=None) - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(component) - session.commit() - - request.addfinalizer(cleanup) - - return component - - -@pytest.fixture -def mocked_schemas(): - '''Return a list of mocked schemas.''' - return [{ - 'id': 'Foo', - 'type': 'object', - 'properties': { - 'id': { - 'type': 'string' - }, - 'string': { - 'type': 'string' - }, - 'integer': { - 'type': 'integer' - }, - 'number': { - 'type': 'number' - }, - 'boolean': { - 'type': 'boolean' - }, - 'bars': { - 'type': 'array', - 'items': { - 'ref': '$Bar' - } - }, - 'date': { - 'type': 'string', - 'format': 'date-time' - } - }, - 'immutable': [ - 'id' - ], - 'primary_key': [ - 'id' - ], - 'required': [ - 'id' - ], - 'default_projections': [ - 'id' - ] - }, { - 'id': 'Bar', - 'type': 'object', - 'properties': { - 'id': { - 'type': 'string' - }, - 'name': { - 'type': 'string' - }, - 'computed_value': { - 'type': 'string', - } - }, - 'computed': [ - 'computed_value' - ], - 'immutable': [ - 'id' - ], - 'primary_key': [ - 'id' - ], - 'required': [ - 'id' - ], - 'default_projections': [ - 'id' - ] - }] - - -@pytest.yield_fixture -def mocked_schema_session(mocker, mocked_schemas): - '''Return a session instance with mocked schemas.''' - with mocker.patch.object( - ftrack_api.Session, - '_load_schemas', - return_value=mocked_schemas - ): - # Mock _configure_locations since it will fail if no location schemas - # exist. - with mocker.patch.object( - ftrack_api.Session, - '_configure_locations' - ): - patched_session = ftrack_api.Session() - yield patched_session diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/__init__.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/__init__.py deleted file mode 100644 index bc98f15d..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_asset_version.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_asset_version.py deleted file mode 100644 index 78d61a62..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_asset_version.py +++ /dev/null @@ -1,54 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack -import json - - -def test_create_component(new_asset_version, temporary_file): - '''Create component on asset version.''' - session = new_asset_version.session - component = new_asset_version.create_component( - temporary_file, location=None - ) - assert component['version'] is new_asset_version - - # Have to delete component before can delete asset version. - session.delete(component) - - -def test_create_component_specifying_different_version( - new_asset_version, temporary_file -): - '''Create component on asset version ignoring specified version.''' - session = new_asset_version.session - component = new_asset_version.create_component( - temporary_file, location=None, - data=dict( - version_id='this-value-should-be-ignored', - version='this-value-should-be-overridden' - ) - ) - assert component['version'] is new_asset_version - - # Have to delete component before can delete asset version. - session.delete(component) - - -def test_encode_media(new_asset_version, video_path): - '''Encode media based on a file path - - Encoded components should be associated with the version. - ''' - session = new_asset_version.session - job = new_asset_version.encode_media(video_path) - assert job.entity_type == 'Job' - - job_data = json.loads(job['data']) - assert 'output' in job_data - assert len(job_data['output']) - assert 'component_id' in job_data['output'][0] - - component_id = job_data['output'][0]['component_id'] - component = session.get('FileComponent', component_id) - - # Component should be associated with the version. - assert component['version_id'] == new_asset_version['id'] diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_base.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_base.py deleted file mode 100644 index aff456e2..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_base.py +++ /dev/null @@ -1,14 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2016 ftrack - -import pytest - - -def test_hash(project, task, user): - '''Entities can be hashed.''' - test_set = set() - test_set.add(project) - test_set.add(task) - test_set.add(user) - - assert test_set == set((project, task, user)) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_component.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_component.py deleted file mode 100644 index 347c74a5..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_component.py +++ /dev/null @@ -1,70 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack -import os - -import pytest - - -def test_get_availability(new_component): - '''Retrieve availability in locations.''' - session = new_component.session - availability = new_component.get_availability() - - # Note: Currently the origin location is also 0.0 as the link is not - # persisted to the server. This may change in future and this test would - # need updating as a result. - assert set(availability.values()) == set([0.0]) - - # Add to a location. - source_location = session.query( - 'Location where name is "ftrack.origin"' - ).one() - - target_location = session.query( - 'Location where name is "ftrack.unmanaged"' - ).one() - - target_location.add_component(new_component, source_location) - - # Recalculate availability. - - # Currently have to manually expire the related attribute. This should be - # solved in future by bi-directional relationship updating. - del new_component['component_locations'] - - availability = new_component.get_availability() - target_availability = availability.pop(target_location['id']) - assert target_availability == 100.0 - - # All other locations should still be 0. - assert set(availability.values()) == set([0.0]) - -@pytest.fixture() -def image_path(): - '''Return a path to an image file.''' - image_path = os.path.abspath( - os.path.join( - os.path.dirname(__file__), - '..', - '..', - 'fixture', - 'media', - 'image.png' - ) - ) - - return image_path - -def test_create_task_thumbnail(task, image_path): - '''Successfully create thumbnail component and set as task thumbnail.''' - component = task.create_thumbnail(image_path) - component.session.commit() - assert component['id'] == task['thumbnail_id'] - - -def test_create_thumbnail_with_data(task, image_path, unique_name): - '''Successfully create thumbnail component with custom data.''' - data = {'name': unique_name} - component = task.create_thumbnail(image_path, data=data) - component.session.commit() - assert component['name'] == unique_name diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_factory.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_factory.py deleted file mode 100644 index 5d5a0baa..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_factory.py +++ /dev/null @@ -1,25 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.entity.factory - - -class CustomUser(ftrack_api.entity.base.Entity): - '''Represent custom user.''' - - -def test_extend_standard_factory_with_bases(session): - '''Successfully add extra bases to standard factory.''' - standard_factory = ftrack_api.entity.factory.StandardFactory() - - schemas = session._load_schemas(False) - user_schema = [ - schema for schema in schemas if schema['id'] == 'User' - ].pop() - - user_class = standard_factory.create(user_schema, bases=[CustomUser]) - session.types[user_class.entity_type] = user_class - - user = session.query('User').first() - - assert CustomUser in type(user).__mro__ diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_job.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_job.py deleted file mode 100644 index 52ddbda0..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_job.py +++ /dev/null @@ -1,42 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import pytest - - -def test_create_job(session, user): - '''Create job.''' - job = session.create('Job', { - 'user': user - }) - - assert job - session.commit() - assert job['type'] == 'api_job' - - session.delete(job) - session.commit() - - -def test_create_job_with_valid_type(session, user): - '''Create job explicitly specifying valid type.''' - job = session.create('Job', { - 'user': user, - 'type': 'api_job' - }) - - assert job - session.commit() - assert job['type'] == 'api_job' - - session.delete(job) - session.commit() - - -def test_create_job_using_faulty_type(session, user): - '''Fail to create job with faulty type.''' - with pytest.raises(ValueError): - session.create('Job', { - 'user': user, - 'type': 'not-allowed-type' - }) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_location.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_location.py deleted file mode 100644 index 5bb90e45..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_location.py +++ /dev/null @@ -1,516 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import base64 -import filecmp - -import pytest -import requests - -import ftrack_api.exception -import ftrack_api.accessor.disk -import ftrack_api.structure.origin -import ftrack_api.structure.id -import ftrack_api.entity.location -import ftrack_api.resource_identifier_transformer.base as _transformer -import ftrack_api.symbol - - -class Base64ResourceIdentifierTransformer( - _transformer.ResourceIdentifierTransformer -): - '''Resource identifier transformer for test purposes. - - Store resource identifier as base 64 encoded string. - - ''' - - def encode(self, resource_identifier, context=None): - '''Return encoded *resource_identifier* for storing centrally. - - A mapping of *context* values may be supplied to guide the - transformation. - - ''' - return base64.encodestring(resource_identifier) - - def decode(self, resource_identifier, context=None): - '''Return decoded *resource_identifier* for use locally. - - A mapping of *context* values may be supplied to guide the - transformation. - - ''' - return base64.decodestring(resource_identifier) - - -@pytest.fixture() -def new_location(request, session, unique_name, temporary_directory): - '''Return new managed location.''' - location = session.create('Location', { - 'name': 'test-location-{}'.format(unique_name) - }) - - location.accessor = ftrack_api.accessor.disk.DiskAccessor( - prefix=os.path.join(temporary_directory, 'location') - ) - location.structure = ftrack_api.structure.id.IdStructure() - location.priority = 10 - - session.commit() - - def cleanup(): - '''Remove created entity.''' - # First auto-remove all components in location. - for location_component in location['location_components']: - session.delete(location_component) - - # At present, need this intermediate commit otherwise server errors - # complaining that location still has components in it. - session.commit() - - session.delete(location) - session.commit() - - request.addfinalizer(cleanup) - - return location - - -@pytest.fixture() -def new_unmanaged_location(request, session, unique_name): - '''Return new unmanaged location.''' - location = session.create('Location', { - 'name': 'test-location-{}'.format(unique_name) - }) - - # TODO: Change to managed and use a temporary directory cleaned up after. - ftrack_api.mixin( - location, ftrack_api.entity.location.UnmanagedLocationMixin, - name='UnmanagedTestLocation' - ) - location.accessor = ftrack_api.accessor.disk.DiskAccessor(prefix='') - location.structure = ftrack_api.structure.origin.OriginStructure() - location.priority = 10 - - session.commit() - - def cleanup(): - '''Remove created entity.''' - # First auto-remove all components in location. - for location_component in location['location_components']: - session.delete(location_component) - - # At present, need this intermediate commit otherwise server errors - # complaining that location still has components in it. - session.commit() - - session.delete(location) - session.commit() - - request.addfinalizer(cleanup) - - return location - - -@pytest.fixture() -def origin_location(session): - '''Return origin location.''' - return session.query('Location where name is "ftrack.origin"').one() - -@pytest.fixture() -def server_location(session): - '''Return server location.''' - return session.get('Location', ftrack_api.symbol.SERVER_LOCATION_ID) - - -@pytest.fixture() -def server_image_component(request, session, server_location): - image_file = os.path.abspath( - os.path.join( - os.path.dirname(__file__), - '..', - '..', - 'fixture', - 'media', - 'image.png' - ) - ) - component = session.create_component( - image_file, location=server_location - ) - - def cleanup(): - server_location.remove_component(component) - request.addfinalizer(cleanup) - - return component - - -@pytest.mark.parametrize('name', [ - 'named', - None -], ids=[ - 'named', - 'unnamed' -]) -def test_string_representation(session, name): - '''Return string representation.''' - location = session.create('Location', {'id': '1'}) - if name: - location['name'] = name - assert str(location) == '' - else: - assert str(location) == '' - - -def test_add_components(new_location, origin_location, session, temporary_file): - '''Add components.''' - component_a = session.create_component( - temporary_file, location=None - ) - component_b = session.create_component( - temporary_file, location=None - ) - - assert ( - new_location.get_component_availabilities([component_a, component_b]) - == [0.0, 0.0] - ) - - new_location.add_components( - [component_a, component_b], [origin_location, origin_location] - ) - - # Recalculate availability. - - # Currently have to manually expire the related attribute. This should be - # solved in future by bi-directional relationship updating. - del component_a['component_locations'] - del component_b['component_locations'] - - assert ( - new_location.get_component_availabilities([component_a, component_b]) - == [100.0, 100.0] - ) - - -def test_add_components_from_single_location( - new_location, origin_location, session, temporary_file -): - '''Add components from single location.''' - component_a = session.create_component( - temporary_file, location=None - ) - component_b = session.create_component( - temporary_file, location=None - ) - - assert ( - new_location.get_component_availabilities([component_a, component_b]) - == [0.0, 0.0] - ) - - new_location.add_components([component_a, component_b], origin_location) - - # Recalculate availability. - - # Currently have to manually expire the related attribute. This should be - # solved in future by bi-directional relationship updating. - del component_a['component_locations'] - del component_b['component_locations'] - - assert ( - new_location.get_component_availabilities([component_a, component_b]) - == [100.0, 100.0] - ) - - -def test_add_components_with_mismatching_sources(new_location, new_component): - '''Fail to add components when sources mismatched.''' - with pytest.raises(ValueError): - new_location.add_components([new_component], []) - - -def test_add_components_with_undefined_structure(new_location, mocker): - '''Fail to add components when location structure undefined.''' - mocker.patch.object(new_location, 'structure', None) - - with pytest.raises(ftrack_api.exception.LocationError): - new_location.add_components([], []) - - -def test_add_components_already_in_location( - session, temporary_file, new_location, new_component, origin_location -): - '''Fail to add components already in location.''' - new_location.add_component(new_component, origin_location) - - another_new_component = session.create_component( - temporary_file, location=None - ) - - with pytest.raises(ftrack_api.exception.ComponentInLocationError): - new_location.add_components( - [another_new_component, new_component], origin_location - ) - - -def test_add_component_when_data_already_exists( - new_location, new_component, origin_location -): - '''Fail to add component when data already exists.''' - # Inject pre-existing data on disk. - resource_identifier = new_location.structure.get_resource_identifier( - new_component - ) - container = new_location.accessor.get_container(resource_identifier) - new_location.accessor.make_container(container) - data = new_location.accessor.open(resource_identifier, 'w') - data.close() - - with pytest.raises(ftrack_api.exception.LocationError): - new_location.add_component(new_component, origin_location) - - -def test_add_component_missing_source_accessor( - new_location, new_component, origin_location, mocker -): - '''Fail to add component when source is missing accessor.''' - mocker.patch.object(origin_location, 'accessor', None) - - with pytest.raises(ftrack_api.exception.LocationError): - new_location.add_component(new_component, origin_location) - - -def test_add_component_missing_target_accessor( - new_location, new_component, origin_location, mocker -): - '''Fail to add component when target is missing accessor.''' - mocker.patch.object(new_location, 'accessor', None) - - with pytest.raises(ftrack_api.exception.LocationError): - new_location.add_component(new_component, origin_location) - - -def test_add_container_component( - new_container_component, new_location, origin_location -): - '''Add container component.''' - new_location.add_component(new_container_component, origin_location) - - assert ( - new_location.get_component_availability(new_container_component) - == 100.0 - ) - - -def test_add_sequence_component_recursively( - new_sequence_component, new_location, origin_location -): - '''Add sequence component recursively.''' - new_location.add_component( - new_sequence_component, origin_location, recursive=True - ) - - assert ( - new_location.get_component_availability(new_sequence_component) - == 100.0 - ) - - -def test_add_sequence_component_non_recursively( - new_sequence_component, new_location, origin_location -): - '''Add sequence component non recursively.''' - new_location.add_component( - new_sequence_component, origin_location, recursive=False - ) - - assert ( - new_location.get_component_availability(new_sequence_component) - == 0.0 - ) - - -def test_remove_components( - session, new_location, origin_location, temporary_file -): - '''Remove components.''' - component_a = session.create_component( - temporary_file, location=None - ) - component_b = session.create_component( - temporary_file, location=None - ) - - new_location.add_components([component_a, component_b], origin_location) - assert ( - new_location.get_component_availabilities([component_a, component_b]) - == [100.0, 100.0] - ) - - new_location.remove_components([ - component_a, component_b - ]) - - # Recalculate availability. - - # Currently have to manually expire the related attribute. This should be - # solved in future by bi-directional relationship updating. - del component_a['component_locations'] - del component_b['component_locations'] - - assert ( - new_location.get_component_availabilities([component_a, component_b]) - == [0.0, 0.0] - ) - - -def test_remove_sequence_component_recursively( - new_sequence_component, new_location, origin_location -): - '''Remove sequence component recursively.''' - new_location.add_component( - new_sequence_component, origin_location, recursive=True - ) - - new_location.remove_component( - new_sequence_component, recursive=True - ) - - assert ( - new_location.get_component_availability(new_sequence_component) - == 0.0 - ) - - -def test_remove_sequence_component_non_recursively( - new_sequence_component, new_location, origin_location -): - '''Remove sequence component non recursively.''' - new_location.add_component( - new_sequence_component, origin_location, recursive=False - ) - - new_location.remove_component( - new_sequence_component, recursive=False - ) - - assert ( - new_location.get_component_availability(new_sequence_component) - == 0.0 - ) - - -def test_remove_component_missing_accessor( - new_location, new_component, origin_location, mocker -): - '''Fail to remove component when location is missing accessor.''' - new_location.add_component(new_component, origin_location) - mocker.patch.object(new_location, 'accessor', None) - - with pytest.raises(ftrack_api.exception.LocationError): - new_location.remove_component(new_component) - - -def test_resource_identifier_transformer( - new_component, new_unmanaged_location, origin_location, mocker -): - '''Transform resource identifier.''' - session = new_unmanaged_location.session - - transformer = Base64ResourceIdentifierTransformer(session) - mocker.patch.object( - new_unmanaged_location, 'resource_identifier_transformer', transformer - ) - - new_unmanaged_location.add_component(new_component, origin_location) - - original_resource_identifier = origin_location.get_resource_identifier( - new_component - ) - assert ( - new_component['component_locations'][0]['resource_identifier'] - == base64.encodestring(original_resource_identifier) - ) - - assert ( - new_unmanaged_location.get_resource_identifier(new_component) - == original_resource_identifier - ) - - -def test_get_filesystem_path(new_component, new_location, origin_location): - '''Retrieve filesystem path.''' - new_location.add_component(new_component, origin_location) - resource_identifier = new_location.structure.get_resource_identifier( - new_component - ) - expected = os.path.normpath( - os.path.join(new_location.accessor.prefix, resource_identifier) - ) - assert new_location.get_filesystem_path(new_component) == expected - - -def test_get_context(new_component, new_location, origin_location): - '''Retrieve context for component.''' - resource_identifier = origin_location.get_resource_identifier( - new_component - ) - context = new_location._get_context(new_component, origin_location) - assert context == { - 'source_resource_identifier': resource_identifier - } - - -def test_get_context_for_component_not_in_source(new_component, new_location): - '''Retrieve context for component not in source location.''' - context = new_location._get_context(new_component, new_location) - assert context == {} - - -def test_data_transfer(session, new_location, origin_location): - '''Transfer a real file and make sure it is identical.''' - video_file = os.path.abspath( - os.path.join( - os.path.dirname(__file__), - '..', - '..', - 'fixture', - 'media', - 'colour_wheel.mov' - ) - ) - component = session.create_component( - video_file, location=new_location - ) - new_video_file = new_location.get_filesystem_path(component) - - assert filecmp.cmp(video_file, new_video_file) - - -def test_get_thumbnail_url(server_location, server_image_component): - '''Test download a thumbnail image from server location''' - thumbnail_url = server_location.get_thumbnail_url( - server_image_component, - size=10 - ) - assert thumbnail_url - - response = requests.get(thumbnail_url) - response.raise_for_status() - - image_file = os.path.abspath( - os.path.join( - os.path.dirname(__file__), - '..', - '..', - 'fixture', - 'media', - 'image-resized-10.png' - ) - ) - expected_image_contents = open(image_file).read() - assert response.content == expected_image_contents diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_metadata.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_metadata.py deleted file mode 100644 index 3a81fdbe..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_metadata.py +++ /dev/null @@ -1,135 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import uuid - -import ftrack_api - - -def test_query_metadata(new_project): - '''Query metadata.''' - session = new_project.session - - metadata_key = uuid.uuid1().hex - metadata_value = uuid.uuid1().hex - new_project['metadata'][metadata_key] = metadata_value - session.commit() - - results = session.query( - 'Project where metadata.key is {0}'.format(metadata_key) - ) - - assert len(results) == 1 - assert new_project['id'] == results[0]['id'] - - results = session.query( - 'Project where metadata.value is {0}'.format(metadata_value) - ) - - assert len(results) == 1 - assert new_project['id'] == results[0]['id'] - - results = session.query( - 'Project where metadata.key is {0} and ' - 'metadata.value is {1}'.format(metadata_key, metadata_value) - ) - - assert len(results) == 1 - assert new_project['id'] == results[0]['id'] - - -def test_set_get_metadata_from_different_sessions(new_project): - '''Get and set metadata using different sessions.''' - session = new_project.session - - metadata_key = uuid.uuid1().hex - metadata_value = uuid.uuid1().hex - new_project['metadata'][metadata_key] = metadata_value - session.commit() - - new_session = ftrack_api.Session() - project = new_session.query( - 'Project where id is {0}'.format(new_project['id']) - )[0] - - assert project['metadata'][metadata_key] == metadata_value - - project['metadata'][metadata_key] = uuid.uuid1().hex - - new_session.commit() - - new_session = ftrack_api.Session() - project = new_session.query( - 'Project where id is {0}'.format(project['id']) - )[0] - - assert project['metadata'][metadata_key] != metadata_value - - -def test_get_set_multiple_metadata(new_project): - '''Get and set multiple metadata.''' - session = new_project.session - - new_project['metadata'] = { - 'key1': 'value1', - 'key2': 'value2' - } - session.commit() - - assert set(new_project['metadata'].keys()) == set(['key1', 'key2']) - - new_session = ftrack_api.Session() - retrieved = new_session.query( - 'Project where id is {0}'.format(new_project['id']) - )[0] - - assert set(retrieved['metadata'].keys()) == set(['key1', 'key2']) - - -def test_metadata_parent_type_remains_in_schema_id_format(session, new_project): - '''Metadata parent_type remains in schema id format post commit.''' - entity = session.create('Metadata', { - 'key': 'key', 'value': 'value', - 'parent_type': new_project.entity_type, - 'parent_id': new_project['id'] - }) - - session.commit() - - assert entity['parent_type'] == new_project.entity_type - - -def test_set_metadata_twice(new_project): - '''Set metadata twice in a row.''' - session = new_project.session - - new_project['metadata'] = { - 'key1': 'value1', - 'key2': 'value2' - } - session.commit() - - assert set(new_project['metadata'].keys()) == set(['key1', 'key2']) - - new_project['metadata'] = { - 'key3': 'value3', - 'key4': 'value4' - } - session.commit() - - -def test_set_same_metadata_on_retrieved_entity(new_project): - '''Set same metadata on retrieved entity.''' - session = new_project.session - - new_project['metadata'] = { - 'key1': 'value1' - } - session.commit() - - project = session.get('Project', new_project['id']) - - project['metadata'] = { - 'key1': 'value1' - } - session.commit() diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_note.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_note.py deleted file mode 100644 index 5d854eae..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_note.py +++ /dev/null @@ -1,67 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api -import ftrack_api.inspection - - -def test_create_reply(session, new_note, user, unique_name): - '''Create reply to a note.''' - reply_text = 'My reply on note' - new_note.create_reply(reply_text, user) - - session.commit() - - assert len(new_note['replies']) == 1 - - assert reply_text == new_note['replies'][0]['content'] - - -def test_create_note_on_entity(session, new_task, user, unique_name): - '''Create note attached to an entity.''' - note = new_task.create_note(unique_name, user) - session.commit() - - session.reset() - retrieved_task = session.get(*ftrack_api.inspection.identity(new_task)) - assert len(retrieved_task['notes']) == 1 - assert ( - ftrack_api.inspection.identity(retrieved_task['notes'][0]) - == ftrack_api.inspection.identity(note) - ) - - -def test_create_note_on_entity_specifying_recipients( - session, new_task, user, unique_name, new_user -): - '''Create note with specified recipients attached to an entity.''' - recipient = new_user - note = new_task.create_note(unique_name, user, recipients=[recipient]) - session.commit() - - session.reset() - retrieved_note = session.get(*ftrack_api.inspection.identity(note)) - - # Note: The calling user is automatically added server side so there will be - # 2 recipients. - assert len(retrieved_note['recipients']) == 2 - specified_recipient_present = False - for entry in retrieved_note['recipients']: - if entry['resource_id'] == recipient['id']: - specified_recipient_present = True - break - - assert specified_recipient_present - - -def test_create_note_on_entity_specifying_category( - session, new_task, user, unique_name -): - '''Create note with specified category attached to an entity.''' - category = session.query('NoteCategory').first() - note = new_task.create_note(unique_name, user, category=category) - session.commit() - - session.reset() - retrieved_note = session.get(*ftrack_api.inspection.identity(note)) - assert retrieved_note['category']['id'] == category['id'] diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_project_schema.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_project_schema.py deleted file mode 100644 index 10ef485a..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_project_schema.py +++ /dev/null @@ -1,64 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import inspect - -import pytest - - -@pytest.mark.parametrize('schema, expected', [ - ('Task', [ - 'Not started', 'In progress', 'Awaiting approval', 'Approved' - ]), - ('Shot', [ - 'Normal', 'Omitted', 'On Hold' - ]), - ('AssetVersion', [ - 'Approved', 'Pending' - ]), - ('AssetBuild', [ - 'Normal', 'Omitted', 'On Hold' - ]), - ('Invalid', ValueError) -], ids=[ - 'task', - 'shot', - 'asset version', - 'asset build', - 'invalid' -]) -def test_get_statuses(project_schema, schema, expected): - '''Retrieve statuses for schema and optional type.''' - if inspect.isclass(expected) and issubclass(expected, Exception): - with pytest.raises(expected): - project_schema.get_statuses(schema) - - else: - statuses = project_schema.get_statuses(schema) - status_names = [status['name'] for status in statuses] - assert sorted(status_names) == sorted(expected) - - -@pytest.mark.parametrize('schema, expected', [ - ('Task', [ - 'Generic', 'Animation', 'Modeling', 'Previz', 'Lookdev', 'Hair', - 'Cloth', 'FX', 'Lighting', 'Compositing', 'Tracking', 'Rigging', - 'test 1', 'test type 2' - ]), - ('AssetBuild', ['Character', 'Prop', 'Environment', 'Matte Painting']), - ('Invalid', ValueError) -], ids=[ - 'task', - 'asset build', - 'invalid' -]) -def test_get_types(project_schema, schema, expected): - '''Retrieve types for schema.''' - if inspect.isclass(expected) and issubclass(expected, Exception): - with pytest.raises(expected): - project_schema.get_types(schema) - - else: - types = project_schema.get_types(schema) - type_names = [type_['name'] for type_ in types] - assert sorted(type_names) == sorted(expected) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_scopes.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_scopes.py deleted file mode 100644 index 1a5afe70..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_scopes.py +++ /dev/null @@ -1,24 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - - -def test_add_remove_and_query_scopes_for_tasks(session, new_task, new_scope): - '''Add, remove and query scopes for task.''' - query_string = 'Task where scopes.name is {0}'.format(new_scope['name']) - tasks = session.query(query_string) - - assert len(tasks) == 0 - - new_task['scopes'].append(new_scope) - session.commit() - - tasks = session.query(query_string) - - assert len(tasks) == 1 and tasks[0] == new_task - - new_task['scopes'].remove(new_scope) - session.commit() - - tasks = session.query(query_string) - - assert len(tasks) == 0 diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_user.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_user.py deleted file mode 100644 index 4d7e4550..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/entity/test_user.py +++ /dev/null @@ -1,49 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2016 ftrack - - -def test_force_start_timer(new_user, task): - '''Successfully force starting a timer when another timer is running.''' - first_timer = new_user.start_timer(context=task) - second_timer = new_user.start_timer(context=task, force=True) - - assert first_timer['id'] - assert second_timer['id'] - assert first_timer['id'] != second_timer['id'] - - -def test_timer_creates_timelog(new_user, task, unique_name): - '''Successfully create time log when stopping timer. - - A timer which was immediately stopped should have a duration less than - a minute. - - ''' - comment = 'comment' + unique_name - timer = new_user.start_timer( - context=task, - name=unique_name, - comment=comment - ) - timer_start = timer['start'] - timelog = new_user.stop_timer() - - assert timelog['user_id'] == new_user['id'] - assert timelog['context_id']== task['id'] - assert timelog['name'] == unique_name - assert timelog['comment'] == comment - assert timelog['start'] == timer_start - assert isinstance(timelog['duration'], (int, long, float)) - assert timelog['duration'] < 60 - - -def test_reset_user_api_key(new_user): - '''Test resetting of api keys.''' - - api_keys = list() - for i in range(0, 10): - api_keys.append(new_user.reset_api_key()) - - # make sure all api keys are unique - assert len(set(api_keys)) == 10 - diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/event/__init__.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/event/__init__.py deleted file mode 100644 index bc98f15d..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/event/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/event/event_hub_server_heartbeat.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/event/event_hub_server_heartbeat.py deleted file mode 100644 index 09b270a0..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/event/event_hub_server_heartbeat.py +++ /dev/null @@ -1,92 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import sys -import time -import logging -import argparse - -import ftrack_api -from ftrack_api.event.base import Event - - -TOPIC = 'test_event_hub_server_heartbeat' -RECEIVED = [] - - -def callback(event): - '''Track received messages.''' - counter = event['data']['counter'] - RECEIVED.append(counter) - print('Received message {0} ({1} in total)'.format(counter, len(RECEIVED))) - - -def main(arguments=None): - '''Publish and receive heartbeat test.''' - parser = argparse.ArgumentParser() - parser.add_argument('mode', choices=['publish', 'subscribe']) - - namespace = parser.parse_args(arguments) - logging.basicConfig(level=logging.INFO) - - session = ftrack_api.Session() - - message_count = 100 - sleep_time_per_message = 1 - - if namespace.mode == 'publish': - max_atempts = 100 - retry_interval = 0.1 - atempt = 0 - while not session.event_hub.connected: - print ( - 'Session is not yet connected to event hub, sleeping for 0.1s' - ) - time.sleep(retry_interval) - - atempt = atempt + 1 - if atempt > max_atempts: - raise Exception( - 'Unable to connect to server within {0} seconds'.format( - max_atempts * retry_interval - ) - ) - - print('Sending {0} messages...'.format(message_count)) - - for counter in range(1, message_count + 1): - session.event_hub.publish( - Event(topic=TOPIC, data=dict(counter=counter)) - ) - print('Sent message {0}'.format(counter)) - - if counter < message_count: - time.sleep(sleep_time_per_message) - - elif namespace.mode == 'subscribe': - session.event_hub.subscribe('topic={0}'.format(TOPIC), callback) - session.event_hub.wait( - duration=( - ((message_count - 1) * sleep_time_per_message) + 15 - ) - ) - - if len(RECEIVED) != message_count: - print( - '>> Failed to receive all messages. Dropped {0} <<' - .format(message_count - len(RECEIVED)) - ) - return False - - # Give time to flush all buffers. - time.sleep(5) - - return True - - -if __name__ == '__main__': - result = main(sys.argv[1:]) - if not result: - raise SystemExit(1) - else: - raise SystemExit(0) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_base.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_base.py deleted file mode 100644 index d9496fe0..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_base.py +++ /dev/null @@ -1,36 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.event.base - - -def test_string_representation(): - '''String representation.''' - event = ftrack_api.event.base.Event('test', id='some-id') - assert str(event) == ( - "" - ) - - -def test_stop(): - '''Set stopped flag on event.''' - event = ftrack_api.event.base.Event('test', id='some-id') - - assert event.is_stopped() is False - - event.stop() - assert event.is_stopped() is True - - -def test_is_stopped(): - '''Report stopped status of event.''' - event = ftrack_api.event.base.Event('test', id='some-id') - - assert event.is_stopped() is False - - event.stop() - assert event.is_stopped() is True - - event.stop() - assert event.is_stopped() is True diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_expression.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_expression.py deleted file mode 100644 index 4cf68b58..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_expression.py +++ /dev/null @@ -1,174 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import operator -import inspect - -import pytest - -from ftrack_api.event.expression import ( - Expression, All, Any, Not, Condition, Parser -) -from ftrack_api.exception import ParseError - - -@pytest.fixture() -def candidate(): - '''Return common candidate to test expressions against.''' - return { - 'id': 10, - 'name': 'value', - 'change': { - 'name': 'value', - 'new_value': 10 - } - } - - -@pytest.mark.parametrize('expression, expected', [ - pytest.mark.xfail(('', Expression())), - ('invalid', ParseError), - ('key=value nor other=value', ParseError), - ('key=value', Condition('key', operator.eq, 'value')), - ('key="value"', Condition('key', operator.eq, 'value')), - ( - 'a=b and ((c=d or e!=f) and not g.h > 10)', - All([ - Condition('a', operator.eq, 'b'), - All([ - Any([ - Condition('c', operator.eq, 'd'), - Condition('e', operator.ne, 'f') - ]), - Not( - Condition('g.h', operator.gt, 10) - ) - ]) - ]) - ) -], ids=[ - 'empty expression', - 'invalid expression', - 'invalid conjunction', - 'basic condition', - 'basic quoted condition', - 'complex condition' -]) -def test_parser_parse(expression, expected): - '''Parse expression into Expression instances.''' - parser = Parser() - - if inspect.isclass(expected)and issubclass(expected, Exception): - with pytest.raises(expected): - parser.parse(expression) - else: - assert str(parser.parse(expression)) == str(expected) - - -@pytest.mark.parametrize('expression, expected', [ - (Expression(), ''), - (All([Expression(), Expression()]), ' ]>'), - (Any([Expression(), Expression()]), ' ]>'), - (Not(Expression()), '>'), - (Condition('key', '=', 'value'), '') -], ids=[ - 'Expression', - 'All', - 'Any', - 'Not', - 'Condition' -]) -def test_string_representation(expression, expected): - '''String representation of expression.''' - assert str(expression) == expected - - -@pytest.mark.parametrize('expression, expected', [ - # Expression - (Expression(), True), - - # All - (All(), True), - (All([Expression(), Expression()]), True), - (All([Expression(), Condition('test', operator.eq, 'value')]), False), - - # Any - (Any(), False), - (Any([Expression(), Condition('test', operator.eq, 'value')]), True), - (Any([ - Condition('test', operator.eq, 'value'), - Condition('other', operator.eq, 'value') - ]), False), - - # Not - (Not(Expression()), False), - (Not(Not(Expression())), True) -], ids=[ - 'Expression-always matches', - - 'All-no expressions always matches', - 'All-all match', - 'All-not all match', - - 'Any-no expressions never matches', - 'Any-some match', - 'Any-none match', - - 'Not-invert positive match', - 'Not-double negative is positive match' -]) -def test_match(expression, candidate, expected): - '''Determine if candidate matches expression.''' - assert expression.match(candidate) is expected - - -def parametrize_test_condition_match(metafunc): - '''Parametrize condition_match tests.''' - identifiers = [] - data = [] - - matrix = { - # Operator, match, no match - operator.eq: { - 'match': 10, 'no-match': 20, - 'wildcard-match': 'valu*', 'wildcard-no-match': 'values*' - }, - operator.ne: {'match': 20, 'no-match': 10}, - operator.ge: {'match': 10, 'no-match': 20}, - operator.le: {'match': 10, 'no-match': 0}, - operator.gt: {'match': 0, 'no-match': 10}, - operator.lt: {'match': 20, 'no-match': 10} - } - - for operator_function, values in matrix.items(): - for value_label, value in values.items(): - if value_label.startswith('wildcard'): - key_options = { - 'plain': 'name', - 'nested': 'change.name' - } - else: - key_options = { - 'plain': 'id', - 'nested': 'change.new_value' - } - - for key_label, key in key_options.items(): - identifiers.append('{} operator {} key {}'.format( - operator_function.__name__, key_label, value_label - )) - - data.append(( - key, operator_function, value, - 'no-match' not in value_label - )) - - metafunc.parametrize( - 'key, operator, value, expected', data, ids=identifiers - ) - - -def test_condition_match(key, operator, value, candidate, expected): - '''Determine if candidate matches condition expression.''' - condition = Condition(key, operator, value) - assert condition.match(candidate) is expected diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_hub.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_hub.py deleted file mode 100644 index 6f1920dd..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_hub.py +++ /dev/null @@ -1,701 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import inspect -import json -import os -import time -import subprocess -import sys - -import pytest - -import ftrack_api.event.hub -import ftrack_api.event.subscriber -from ftrack_api.event.base import Event -import ftrack_api.exception - - -class MockClass(object): - '''Mock class for testing.''' - - def method(self): - '''Mock method for testing.''' - - -def mockFunction(): - '''Mock function for testing.''' - - -class MockConnection(object): - '''Mock connection for testing.''' - - @property - def connected(self): - '''Return whether connected.''' - return True - - def close(self): - '''Close mock connection.''' - pass - - -def assert_callbacks(hub, callbacks): - '''Assert hub has exactly *callbacks* subscribed.''' - # Subscribers always starts with internal handle_reply subscriber. - subscribers = hub._subscribers[:] - subscribers.pop(0) - - if len(subscribers) != len(callbacks): - raise AssertionError( - 'Number of subscribers ({0}) != number of callbacks ({1})' - .format(len(subscribers), len(callbacks)) - ) - - for index, subscriber in enumerate(subscribers): - if subscriber.callback != callbacks[index]: - raise AssertionError( - 'Callback at {0} != subscriber callback at same index.' - .format(index) - ) - - -@pytest.fixture() -def event_hub(request, session): - '''Return event hub to test against. - - Hub is automatically connected at start of test and disconnected at end. - - ''' - hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - hub.connect() - - def cleanup(): - '''Cleanup.''' - if hub.connected: - hub.disconnect() - - request.addfinalizer(cleanup) - - return hub - - -@pytest.mark.parametrize('server_url, expected', [ - ('https://test.ftrackapp.com', 'https://test.ftrackapp.com'), - ('https://test.ftrackapp.com:9000', 'https://test.ftrackapp.com:9000') -], ids=[ - 'with port', - 'without port' -]) -def test_get_server_url(server_url, expected): - '''Return server url.''' - event_hub = ftrack_api.event.hub.EventHub( - server_url, 'user', 'key' - ) - assert event_hub.get_server_url() == expected - - -@pytest.mark.parametrize('server_url, expected', [ - ('https://test.ftrackapp.com', 'test.ftrackapp.com'), - ('https://test.ftrackapp.com:9000', 'test.ftrackapp.com:9000') -], ids=[ - 'with port', - 'without port' -]) -def test_get_network_location(server_url, expected): - '''Return network location of server url.''' - event_hub = ftrack_api.event.hub.EventHub( - server_url, 'user', 'key' - ) - assert event_hub.get_network_location() == expected - - -@pytest.mark.parametrize('server_url, expected', [ - ('https://test.ftrackapp.com', True), - ('http://test.ftrackapp.com', False) -], ids=[ - 'secure', - 'not secure' -]) -def test_secure_property(server_url, expected, mocker): - '''Return whether secure connection used.''' - event_hub = ftrack_api.event.hub.EventHub( - server_url, 'user', 'key' - ) - assert event_hub.secure is expected - - -def test_connected_property(session): - '''Return connected state.''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - assert event_hub.connected is False - - event_hub.connect() - assert event_hub.connected is True - - event_hub.disconnect() - assert event_hub.connected is False - - -@pytest.mark.parametrize('server_url, expected', [ - ('https://test.ftrackapp.com', 'https://test.ftrackapp.com'), - ('https://test.ftrackapp.com:9000', 'https://test.ftrackapp.com:9000'), - ('test.ftrackapp.com', ValueError), - ('https://:9000', ValueError), -], ids=[ - 'with port', - 'without port', - 'missing scheme', - 'missing hostname' -]) -def test_initialise_against_server_url(server_url, expected): - '''Initialise against server url.''' - if inspect.isclass(expected) and issubclass(expected, Exception): - with pytest.raises(expected): - ftrack_api.event.hub.EventHub( - server_url, 'user', 'key' - ) - else: - event_hub = ftrack_api.event.hub.EventHub( - server_url, 'user', 'key' - ) - assert event_hub.get_server_url() == expected - - -def test_connect(session): - '''Connect.''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - event_hub.connect() - - assert event_hub.connected is True - event_hub.disconnect() - - -def test_connect_when_already_connected(event_hub): - '''Fail to connect when already connected''' - assert event_hub.connected is True - - with pytest.raises(ftrack_api.exception.EventHubConnectionError) as error: - event_hub.connect() - - assert 'Already connected' in str(error) - - -def test_connect_failure(session, mocker): - '''Fail to connect to server.''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - - def force_fail(*args, **kwargs): - '''Force connection failure.''' - raise Exception('Forced fail.') - - mocker.patch('websocket.create_connection', force_fail) - with pytest.raises(ftrack_api.exception.EventHubConnectionError): - event_hub.connect() - - -def test_connect_missing_required_transport(session, mocker, caplog): - '''Fail to connect to server that does not provide correct transport.''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - - original_get_socket_io_session = event_hub._get_socket_io_session - - def _get_socket_io_session(): - '''Patched to return no transports.''' - session = original_get_socket_io_session() - return ftrack_api.event.hub.SocketIoSession( - session[0], session[1], [] - ) - - mocker.patch.object( - event_hub, '_get_socket_io_session', _get_socket_io_session - ) - - with pytest.raises(ftrack_api.exception.EventHubConnectionError): - event_hub.connect() - - logs = caplog.records() - assert ( - 'Server does not support websocket sessions.' in str(logs[-1].exc_info) - ) - - -def test_disconnect(event_hub): - '''Disconnect and unsubscribe all subscribers.''' - event_hub.disconnect() - assert len(event_hub._subscribers) == 0 - assert event_hub.connected is False - - -def test_disconnect_without_unsubscribing(event_hub): - '''Disconnect without unsubscribing all subscribers.''' - event_hub.disconnect(unsubscribe=False) - assert len(event_hub._subscribers) > 0 - assert event_hub.connected is False - - -def test_close_connection_from_manually_connected_hub(session_no_autoconnect_hub): - '''Close connection from manually connected hub.''' - session_no_autoconnect_hub.event_hub.connect() - session_no_autoconnect_hub.close() - assert session_no_autoconnect_hub.event_hub.connected is False - - -def test_disconnect_when_not_connected(session): - '''Fail to disconnect when not connected''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - with pytest.raises(ftrack_api.exception.EventHubConnectionError) as error: - event_hub.disconnect() - - assert 'Not currently connected' in str(error) - - -def test_reconnect(event_hub): - '''Reconnect successfully.''' - assert event_hub.connected is True - event_hub.reconnect() - assert event_hub.connected is True - - -def test_reconnect_when_not_connected(session): - '''Reconnect successfully even if not already connected.''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - assert event_hub.connected is False - - event_hub.reconnect() - assert event_hub.connected is True - - event_hub.disconnect() - - -def test_fail_to_reconnect(session, mocker): - '''Fail to reconnect.''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - event_hub.connect() - assert event_hub.connected is True - - def force_fail(*args, **kwargs): - '''Force connection failure.''' - raise Exception('Forced fail.') - - mocker.patch('websocket.create_connection', force_fail) - - attempts = 2 - with pytest.raises(ftrack_api.exception.EventHubConnectionError) as error: - event_hub.reconnect(attempts=attempts, delay=0.5) - - assert 'Failed to reconnect to event server' in str(error) - assert 'after {} attempts'.format(attempts) in str(error) - - -def test_wait(event_hub): - '''Wait for event and handle as they arrive.''' - called = {'callback': False} - - def callback(event): - called['callback'] = True - - event_hub.subscribe('topic=test-subscribe', callback) - - event_hub.publish(Event(topic='test-subscribe')) - - # Until wait, the event should not have been processed even if received. - time.sleep(1) - assert called == {'callback': False} - - event_hub.wait(2) - assert called == {'callback': True} - - -def test_wait_interrupted_by_disconnect(event_hub): - '''Interrupt wait loop with disconnect event.''' - wait_time = 5 - start = time.time() - - # Inject event directly for test purposes. - event = Event(topic='ftrack.meta.disconnected') - event_hub._event_queue.put(event) - - event_hub.wait(wait_time) - - assert time.time() - start < wait_time - - -@pytest.mark.parametrize('identifier, registered', [ - ('registered-test-subscriber', True), - ('unregistered-test-subscriber', False) -], ids=[ - 'registered', - 'missing' -]) -def test_get_subscriber_by_identifier(event_hub, identifier, registered): - '''Return subscriber by identifier.''' - def callback(event): - pass - - subscriber = { - 'id': 'registered-test-subscriber' - } - - event_hub.subscribe('topic=test-subscribe', callback, subscriber) - retrieved = event_hub.get_subscriber_by_identifier(identifier) - - if registered: - assert isinstance(retrieved, ftrack_api.event.subscriber.Subscriber) - assert retrieved.metadata.get('id') == subscriber['id'] - else: - assert retrieved is None - - -def test_subscribe(event_hub): - '''Subscribe to topics.''' - called = {'a': False, 'b': False} - - def callback_a(event): - called['a'] = True - - def callback_b(event): - called['b'] = True - - event_hub.subscribe('topic=test-subscribe', callback_a) - event_hub.subscribe('topic=test-subscribe-other', callback_b) - - event_hub.publish(Event(topic='test-subscribe')) - event_hub.wait(2) - - assert called == {'a': True, 'b': False} - - -def test_subscribe_before_connected(session): - '''Subscribe to topic before connected.''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - - called = {'callback': False} - - def callback(event): - called['callback'] = True - - identifier = 'test-subscriber' - event_hub.subscribe( - 'topic=test-subscribe', callback, subscriber={'id': identifier} - ) - assert event_hub.get_subscriber_by_identifier(identifier) is not None - - event_hub.connect() - - try: - event_hub.publish(Event(topic='test-subscribe')) - event_hub.wait(2) - finally: - event_hub.disconnect() - - assert called == {'callback': True} - - -def test_duplicate_subscriber(event_hub): - '''Fail to subscribe same subscriber more than once.''' - subscriber = {'id': 'test-subscriber'} - event_hub.subscribe('topic=test', None, subscriber=subscriber) - - with pytest.raises(ftrack_api.exception.NotUniqueError) as error: - event_hub.subscribe('topic=test', None, subscriber=subscriber) - - assert '{0} already exists'.format(subscriber['id']) in str(error) - - -def test_unsubscribe(event_hub): - '''Unsubscribe a specific callback.''' - def callback_a(event): - pass - - def callback_b(event): - pass - - identifier_a = event_hub.subscribe('topic=test', callback_a) - identifier_b = event_hub.subscribe('topic=test', callback_b) - - assert_callbacks(event_hub, [callback_a, callback_b]) - - event_hub.unsubscribe(identifier_a) - - # Unsubscribe requires confirmation event so wait here to give event a - # chance to process. - time.sleep(5) - - assert_callbacks(event_hub, [callback_b]) - - -def test_unsubscribe_whilst_disconnected(event_hub): - '''Unsubscribe whilst disconnected.''' - identifier = event_hub.subscribe('topic=test', None) - event_hub.disconnect(unsubscribe=False) - - event_hub.unsubscribe(identifier) - assert_callbacks(event_hub, []) - - -def test_unsubscribe_missing_subscriber(event_hub): - '''Fail to unsubscribe a non-subscribed subscriber.''' - identifier = 'non-subscribed-subscriber' - with pytest.raises(ftrack_api.exception.NotFoundError) as error: - event_hub.unsubscribe(identifier) - - assert ( - 'missing subscriber with identifier {}'.format(identifier) - in str(error) - ) - - -@pytest.mark.parametrize('event_data', [ - dict(source=dict(id='1', user=dict(username='auto'))), - dict(source=dict(user=dict(username='auto'))), - dict(source=dict(id='1')), - dict() -], ids=[ - 'pre-prepared', - 'missing id', - 'missing user', - 'no source' -]) -def test_prepare_event(session, event_data): - '''Prepare event.''' - # Replace username `auto` in event data with API user. - try: - if event_data['source']['user']['username'] == 'auto': - event_data['source']['user']['username'] = session.api_user - except KeyError: - pass - - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - event_hub.id = '1' - - event = Event('test', id='event-id', **event_data) - expected = Event( - 'test', id='event-id', source=dict(id='1', user=dict(username=session.api_user)) - ) - event_hub._prepare_event(event) - assert event == expected - - -def test_prepare_reply_event(session): - '''Prepare reply event.''' - event_hub = ftrack_api.event.hub.EventHub( - session.server_url, session.api_user, session.api_key - ) - - source_event = Event('source', source=dict(id='source-id')) - reply_event = Event('reply') - - event_hub._prepare_reply_event(reply_event, source_event) - assert source_event['source']['id'] in reply_event['target'] - assert reply_event['in_reply_to_event'] == source_event['id'] - - event_hub._prepare_reply_event(reply_event, source_event, {'id': 'source'}) - assert reply_event['source'] == {'id': 'source'} - - -def test_publish(event_hub): - '''Publish asynchronous event.''' - called = {'callback': False} - - def callback(event): - called['callback'] = True - - event_hub.subscribe('topic=test-subscribe', callback) - - event_hub.publish(Event(topic='test-subscribe')) - event_hub.wait(2) - - assert called == {'callback': True} - - -def test_publish_raising_error(event_hub): - '''Raise error, when configured, on failed publish.''' - # Note that the event hub currently only fails publish when not connected. - # All other errors are inconsistently swallowed. - event_hub.disconnect() - event = Event(topic='a-topic', data=dict(status='fail')) - - with pytest.raises(Exception): - event_hub.publish(event, on_error='raise') - - -def test_publish_ignoring_error(event_hub): - '''Ignore error, when configured, on failed publish.''' - # Note that the event hub currently only fails publish when not connected. - # All other errors are inconsistently swallowed. - event_hub.disconnect() - event = Event(topic='a-topic', data=dict(status='fail')) - event_hub.publish(event, on_error='ignore') - - -def test_publish_logs_other_errors(event_hub, caplog, mocker): - '''Log publish errors other than connection error.''' - # Mock connection to force error. - mocker.patch.object(event_hub, '_connection', MockConnection()) - - event = Event(topic='a-topic', data=dict(status='fail')) - event_hub.publish(event) - - expected = 'Error sending event {0}.'.format(event) - messages = [record.getMessage().strip() for record in caplog.records()] - assert expected in messages, 'Expected log message missing in output.' - - -def test_synchronous_publish(event_hub): - '''Publish event synchronously and collect results.''' - def callback_a(event): - return 'A' - - def callback_b(event): - return 'B' - - def callback_c(event): - return 'C' - - event_hub.subscribe('topic=test', callback_a, priority=50) - event_hub.subscribe('topic=test', callback_b, priority=60) - event_hub.subscribe('topic=test', callback_c, priority=70) - - results = event_hub.publish(Event(topic='test'), synchronous=True) - assert results == ['A', 'B', 'C'] - - -def test_publish_with_reply(event_hub): - '''Publish asynchronous event with on reply handler.''' - - def replier(event): - '''Replier.''' - return 'Replied' - - event_hub.subscribe('topic=test', replier) - - called = {'callback': None} - - def on_reply(event): - called['callback'] = event['data'] - - event_hub.publish(Event(topic='test'), on_reply=on_reply) - event_hub.wait(2) - - assert called['callback'] == 'Replied' - - -def test_publish_with_multiple_replies(event_hub): - '''Publish asynchronous event and retrieve multiple replies.''' - - def replier_one(event): - '''Replier.''' - return 'One' - - def replier_two(event): - '''Replier.''' - return 'Two' - - event_hub.subscribe('topic=test', replier_one) - event_hub.subscribe('topic=test', replier_two) - - called = {'callback': []} - - def on_reply(event): - called['callback'].append(event['data']) - - event_hub.publish(Event(topic='test'), on_reply=on_reply) - event_hub.wait(2) - - assert sorted(called['callback']) == ['One', 'Two'] - - -@pytest.mark.slow -def test_server_heartbeat_response(): - '''Maintain connection by responding to server heartbeat request.''' - test_script = os.path.join( - os.path.dirname(__file__), 'event_hub_server_heartbeat.py' - ) - - # Start subscriber that will listen for all three messages. - subscriber = subprocess.Popen([sys.executable, test_script, 'subscribe']) - - # Give subscriber time to connect to server. - time.sleep(10) - - # Start publisher to publish three messages. - publisher = subprocess.Popen([sys.executable, test_script, 'publish']) - - publisher.wait() - subscriber.wait() - - assert subscriber.returncode == 0 - - -def test_stop_event(event_hub): - '''Stop processing of subsequent local handlers when stop flag set.''' - called = { - 'a': False, - 'b': False, - 'c': False - } - - def callback_a(event): - called['a'] = True - - def callback_b(event): - called['b'] = True - event.stop() - - def callback_c(event): - called['c'] = True - - event_hub.subscribe('topic=test', callback_a, priority=50) - event_hub.subscribe('topic=test', callback_b, priority=60) - event_hub.subscribe('topic=test', callback_c, priority=70) - - event_hub.publish(Event(topic='test')) - event_hub.wait(2) - - assert called == { - 'a': True, - 'b': True, - 'c': False - } - - -def test_encode(session): - '''Encode event data.''' - encoded = session.event_hub._encode( - dict(name='ftrack.event', args=[Event('test')]) - ) - assert 'inReplyToEvent' in encoded - assert 'in_reply_to_event' not in encoded - - -def test_decode(session): - '''Decode event data.''' - decoded = session.event_hub._decode( - json.dumps({ - 'inReplyToEvent': 'id' - }) - ) - - assert 'in_reply_to_event' in decoded - assert 'inReplyToEvent' not in decoded diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscriber.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscriber.py deleted file mode 100644 index dc8ac69f..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscriber.py +++ /dev/null @@ -1,33 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import pytest - -import ftrack_api.event.subscriber -from ftrack_api.event.base import Event - - -def test_string_representation(): - '''String representation.''' - subscriber = ftrack_api.event.subscriber.Subscriber( - 'topic=test', lambda x: None, {'meta': 'info'}, 100 - ) - - assert str(subscriber) == ( - '' - ) - - -@pytest.mark.parametrize('expression, event, expected', [ - ('topic=test', Event(topic='test'), True), - ('topic=test', Event(topic='other-test'), False) -], ids=[ - 'interested', - 'not interested' -]) -def test_interested_in(expression, event, expected): - '''Determine if subscriber interested in event.''' - subscriber = ftrack_api.event.subscriber.Subscriber( - expression, lambda x: None, {'meta': 'info'}, 100 - ) - assert subscriber.interested_in(event) is expected diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscription.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscription.py deleted file mode 100644 index 1535309f..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/event/test_subscription.py +++ /dev/null @@ -1,28 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import pytest - -import ftrack_api.event.subscription -from ftrack_api.event.base import Event - - -def test_string_representation(): - '''String representation is subscription expression.''' - expression = 'topic=some-topic' - subscription = ftrack_api.event.subscription.Subscription(expression) - - assert str(subscription) == expression - - -@pytest.mark.parametrize('expression, event, expected', [ - ('topic=test', Event(topic='test'), True), - ('topic=test', Event(topic='other-test'), False) -], ids=[ - 'match', - 'no match' -]) -def test_includes(expression, event, expected): - '''Subscription includes event.''' - subscription = ftrack_api.event.subscription.Subscription(expression) - assert subscription.includes(event) is expected diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/__init__.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/__init__.py deleted file mode 100644 index bc98f15d..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/test_base.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/test_base.py deleted file mode 100644 index 51c896f9..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/resource_identifier_transformer/test_base.py +++ /dev/null @@ -1,36 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import pytest - -import ftrack_api.resource_identifier_transformer.base as _transformer - - -@pytest.fixture() -def transformer(session): - '''Return instance of ResourceIdentifierTransformer.''' - return _transformer.ResourceIdentifierTransformer(session) - - -@pytest.mark.parametrize('resource_identifier, context, expected', [ - ('identifier', None, 'identifier'), - ('identifier', {'user': {'username': 'user'}}, 'identifier') -], ids=[ - 'no context', - 'basic context' -]) -def test_encode(transformer, resource_identifier, context, expected): - '''Encode resource identifier.''' - assert transformer.encode(resource_identifier, context) == expected - - -@pytest.mark.parametrize('resource_identifier, context, expected', [ - ('identifier', None, 'identifier'), - ('identifier', {'user': {'username': 'user'}}, 'identifier') -], ids=[ - 'no context', - 'basic context' -]) -def test_decode(transformer, resource_identifier, context, expected): - '''Encode resource identifier.''' - assert transformer.decode(resource_identifier, context) == expected diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/structure/__init__.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/structure/__init__.py deleted file mode 100644 index bc98f15d..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/structure/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_base.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_base.py deleted file mode 100644 index dbf91ead..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_base.py +++ /dev/null @@ -1,31 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import pytest - -import ftrack_api.structure.base - - -class Concrete(ftrack_api.structure.base.Structure): - '''Concrete implementation to allow testing non-abstract methods.''' - - def get_resource_identifier(self, entity, context=None): - '''Return a resource identifier for supplied *entity*. - - *context* can be a mapping that supplies additional information. - - ''' - return 'resource_identifier' - - -@pytest.mark.parametrize('sequence, expected', [ - ({'padding': None}, '%d'), - ({'padding': 4}, '%04d') -], ids=[ - 'no padding', - 'padded' -]) -def test_get_sequence_expression(sequence, expected): - '''Get sequence expression from sequence.''' - structure = Concrete() - assert structure._get_sequence_expression(sequence) == expected diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_entity_id.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_entity_id.py deleted file mode 100644 index 01ccb35a..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_entity_id.py +++ /dev/null @@ -1,49 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import inspect - -import pytest -import mock - -import ftrack_api -import ftrack_api.structure.entity_id - - -@pytest.fixture(scope='session') -def structure(): - '''Return structure.''' - return ftrack_api.structure.entity_id.EntityIdStructure() - - -# Note: When it is possible to use indirect=True on just a few arguments, the -# called functions here can change to standard fixtures. -# https://github.com/pytest-dev/pytest/issues/579 - -def valid_entity(): - '''Return valid entity.''' - session = ftrack_api.Session() - - entity = session.create('FileComponent', { - 'id': 'f6cd40cb-d1c0-469f-a2d5-10369be8a724', - 'name': 'file_component', - 'file_type': '.png' - }) - - return entity - - -@pytest.mark.parametrize('entity, context, expected', [ - (valid_entity(), {}, 'f6cd40cb-d1c0-469f-a2d5-10369be8a724'), - (mock.Mock(), {}, Exception) -], ids=[ - 'valid-entity', - 'non-entity' -]) -def test_get_resource_identifier(structure, entity, context, expected): - '''Get resource identifier.''' - if inspect.isclass(expected) and issubclass(expected, Exception): - with pytest.raises(expected): - structure.get_resource_identifier(entity, context) - else: - assert structure.get_resource_identifier(entity, context) == expected diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_id.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_id.py deleted file mode 100644 index ef81da2d..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_id.py +++ /dev/null @@ -1,115 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import inspect - -import pytest - -import ftrack_api -import ftrack_api.structure.id - - -@pytest.fixture(scope='session') -def structure(): - '''Return structure.''' - return ftrack_api.structure.id.IdStructure(prefix='path') - - -# Note: When it is possible to use indirect=True on just a few arguments, the -# called functions here can change to standard fixtures. -# https://github.com/pytest-dev/pytest/issues/579 - -def file_component(container=None): - '''Return file component.''' - session = ftrack_api.Session() - - entity = session.create('FileComponent', { - 'id': 'f6cd40cb-d1c0-469f-a2d5-10369be8a724', - 'name': '0001', - 'file_type': '.png', - 'container': container - }) - - return entity - - -def sequence_component(padding=0): - '''Return sequence component with *padding*.''' - session = ftrack_api.Session() - - entity = session.create('SequenceComponent', { - 'id': 'ff17edad-2129-483b-8b59-d1a654c8497b', - 'name': 'sequence_component', - 'file_type': '.png', - 'padding': padding - }) - - return entity - - -def container_component(): - '''Return container component.''' - session = ftrack_api.Session() - - entity = session.create('ContainerComponent', { - 'id': '03ab9967-f86c-4b55-8252-cd187d0c244a', - 'name': 'container_component' - }) - - return entity - - -def unsupported_entity(): - '''Return an unsupported entity.''' - session = ftrack_api.Session() - - entity = session.create('User', { - 'username': 'martin' - }) - - return entity - - -@pytest.mark.parametrize('entity, context, expected', [ - ( - file_component(), {}, - 'path/f/6/c/d/40cb-d1c0-469f-a2d5-10369be8a724.png' - ), - ( - file_component(container_component()), {}, - 'path/0/3/a/b/9967-f86c-4b55-8252-cd187d0c244a/' - 'f6cd40cb-d1c0-469f-a2d5-10369be8a724.png' - ), - ( - file_component(sequence_component()), {}, - 'path/f/f/1/7/edad-2129-483b-8b59-d1a654c8497b/file.0001.png' - ), - ( - sequence_component(padding=0), {}, - 'path/f/f/1/7/edad-2129-483b-8b59-d1a654c8497b/file.%d.png' - ), - ( - sequence_component(padding=4), {}, - 'path/f/f/1/7/edad-2129-483b-8b59-d1a654c8497b/file.%04d.png' - ), - ( - container_component(), {}, - 'path/0/3/a/b/9967-f86c-4b55-8252-cd187d0c244a' - ), - (unsupported_entity(), {}, NotImplementedError) -], ids=[ - 'file-component', - 'file-component-in-container', - 'file-component-in-sequence', - 'unpadded-sequence-component', - 'padded-sequence-component', - 'container-component', - 'unsupported-entity' -]) -def test_get_resource_identifier(structure, entity, context, expected): - '''Get resource identifier.''' - if inspect.isclass(expected) and issubclass(expected, Exception): - with pytest.raises(expected): - structure.get_resource_identifier(entity, context) - else: - assert structure.get_resource_identifier(entity, context) == expected diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_origin.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_origin.py deleted file mode 100644 index e294e04a..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_origin.py +++ /dev/null @@ -1,33 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import inspect - -import pytest -import mock - -import ftrack_api.structure.origin - - -@pytest.fixture(scope='session') -def structure(): - '''Return structure.''' - return ftrack_api.structure.origin.OriginStructure() - - -@pytest.mark.parametrize('entity, context, expected', [ - (mock.Mock(), {'source_resource_identifier': 'identifier'}, 'identifier'), - (mock.Mock(), {}, ValueError), - (mock.Mock(), None, ValueError) -], ids=[ - 'valid-context', - 'invalid-context', - 'unspecified-context' -]) -def test_get_resource_identifier(structure, entity, context, expected): - '''Get resource identifier.''' - if inspect.isclass(expected) and issubclass(expected, Exception): - with pytest.raises(expected): - structure.get_resource_identifier(entity, context) - else: - assert structure.get_resource_identifier(entity, context) == expected diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_standard.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_standard.py deleted file mode 100644 index dd72f8ec..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/structure/test_standard.py +++ /dev/null @@ -1,309 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import uuid - -import pytest - -import ftrack_api -import ftrack_api.structure.standard - - -@pytest.fixture(scope='session') -def new_project(request): - '''Return new empty project.''' - session = ftrack_api.Session() - - project_schema = session.query('ProjectSchema').first() - project_name = 'python_api_test_{0}'.format(uuid.uuid1().hex) - project = session.create('Project', { - 'name': project_name, - 'full_name': project_name + '_full', - 'project_schema': project_schema - }) - - session.commit() - - def cleanup(): - '''Remove created entity.''' - session.delete(project) - session.commit() - - request.addfinalizer(cleanup) - - return project - - -def new_container_component(): - '''Return container component.''' - session = ftrack_api.Session() - - entity = session.create('ContainerComponent', { - 'name': 'container_component' - }) - - return entity - - -def new_sequence_component(): - '''Return sequence component.''' - session = ftrack_api.Session() - - entity = session.create_component( - '/tmp/foo/%04d.jpg [1-10]', location=None, data={'name': 'baz'} - ) - - return entity - - -def new_file_component(name='foo', container=None): - '''Return file component with *name* and *container*.''' - if container: - session = container.session - else: - session = ftrack_api.Session() - - entity = session.create('FileComponent', { - 'name': name, - 'file_type': '.png', - 'container': container - }) - - return entity - - -# Reusable fixtures. -file_component = new_file_component() -container_component = new_container_component() -sequence_component = new_sequence_component() - - -# Note: to improve test performance the same project is reused throughout the -# tests. This means that all hierarchical names must be unique, otherwise an -# IntegrityError will be raised on the server. - -@pytest.mark.parametrize( - 'component, hierarchy, expected, structure, asset_name', - [ - ( - file_component, - [], - '{project_name}/my_new_asset/v001/foo.png', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - file_component, - [], - '{project_name}/foobar/my_new_asset/v001/foo.png', - ftrack_api.structure.standard.StandardStructure( - project_versions_prefix='foobar' - ), - 'my_new_asset' - ), - ( - file_component, - ['baz1', 'bar'], - '{project_name}/baz1/bar/my_new_asset/v001/foo.png', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - sequence_component, - ['baz2', 'bar'], - '{project_name}/baz2/bar/my_new_asset/v001/baz.%04d.jpg', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - sequence_component['members'][3], - ['baz3', 'bar'], - '{project_name}/baz3/bar/my_new_asset/v001/baz.0004.jpg', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - container_component, - ['baz4', 'bar'], - '{project_name}/baz4/bar/my_new_asset/v001/container_component', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - new_file_component(container=container_component), - ['baz5', 'bar'], - ( - '{project_name}/baz5/bar/my_new_asset/v001/container_component/' - 'foo.png' - ), - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - file_component, - [u'björn'], - '{project_name}/bjorn/my_new_asset/v001/foo.png', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - file_component, - [u'björn!'], - '{project_name}/bjorn_/my_new_asset/v001/foo.png', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - new_file_component(name=u'fää'), - [], - '{project_name}/my_new_asset/v001/faa.png', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - new_file_component(name=u'fo/o'), - [], - '{project_name}/my_new_asset/v001/fo_o.png', - ftrack_api.structure.standard.StandardStructure(), - 'my_new_asset' - ), - ( - file_component, - [], - '{project_name}/aao/v001/foo.png', - ftrack_api.structure.standard.StandardStructure(), - u'åäö' - ), - ( - file_component, - [], - '{project_name}/my_ne____w_asset/v001/foo.png', - ftrack_api.structure.standard.StandardStructure(), - u'my_ne!!!!w_asset' - ), - ( - file_component, - [u'björn2'], - u'{project_name}/björn2/my_new_asset/v001/foo.png', - ftrack_api.structure.standard.StandardStructure( - illegal_character_substitute=None - ), - 'my_new_asset' - ), - ( - file_component, - [u'bj!rn'], - '{project_name}/bj^rn/my_new_asset/v001/foo.png', - ftrack_api.structure.standard.StandardStructure( - illegal_character_substitute='^' - ), - 'my_new_asset' - ) - ], ids=[ - 'file_component_on_project', - 'file_component_on_project_with_prefix', - 'file_component_with_hierarchy', - 'sequence_component', - 'sequence_component_member', - 'container_component', - 'container_component_member', - 'slugify_non_ascii_hierarchy', - 'slugify_illegal_hierarchy', - 'slugify_non_ascii_component_name', - 'slugify_illegal_component_name', - 'slugify_non_ascii_asset_name', - 'slugify_illegal_asset_name', - 'slugify_none', - 'slugify_other_character' - ] -) -def test_get_resource_identifier( - component, hierarchy, expected, structure, asset_name, new_project -): - '''Get resource identifier.''' - session = component.session - - # Create structure, asset and version. - context_id = new_project['id'] - for name in hierarchy: - context_id = session.create('Folder', { - 'name': name, - 'project_id': new_project['id'], - 'parent_id': context_id - })['id'] - - asset = session.create( - 'Asset', {'name': asset_name, 'context_id': context_id} - ) - version = session.create('AssetVersion', {'asset': asset}) - - # Update component with version. - if component['container']: - component['container']['version'] = version - else: - component['version'] = version - - session.commit() - - assert structure.get_resource_identifier(component) == expected.format( - project_name=new_project['name'] - ) - - -def test_unsupported_entity(user): - '''Fail to get resource identifier for unsupported entity.''' - structure = ftrack_api.structure.standard.StandardStructure() - with pytest.raises(NotImplementedError): - structure.get_resource_identifier(user) - - -def test_component_without_version_relation(new_project): - '''Get an identifer for component without a version relation.''' - session = new_project.session - - asset = session.create( - 'Asset', {'name': 'foo', 'context_id': new_project['id']} - ) - version = session.create('AssetVersion', {'asset': asset}) - - session.commit() - - file_component = new_file_component() - file_component['version_id'] = version['id'] - - structure = ftrack_api.structure.standard.StandardStructure() - structure.get_resource_identifier(file_component) - - -def test_component_without_committed_version_relation(): - '''Fail to get an identifer for component without a committed version.''' - file_component = new_file_component() - session = file_component.session - version = session.create('AssetVersion', {}) - - file_component['version'] = version - - structure = ftrack_api.structure.standard.StandardStructure() - - with pytest.raises(ftrack_api.exception.StructureError): - structure.get_resource_identifier(file_component) - - -@pytest.mark.xfail( - raises=ftrack_api.exception.ServerError, - reason='Due to user permission errors.' -) -def test_component_without_committed_asset_relation(): - '''Fail to get an identifer for component without a committed asset.''' - file_component = new_file_component() - session = file_component.session - version = session.create('AssetVersion', {}) - - file_component['version'] = version - - session.commit() - - structure = ftrack_api.structure.standard.StandardStructure() - - with pytest.raises(ftrack_api.exception.StructureError): - structure.get_resource_identifier(file_component) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_attribute.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_attribute.py deleted file mode 100644 index 555adb2d..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_attribute.py +++ /dev/null @@ -1,146 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import pytest - -import ftrack_api.attribute -import ftrack_api.exception - - -@pytest.mark.parametrize('attributes', [ - [], - [ftrack_api.attribute.Attribute('test')] -], ids=[ - 'no initial attributes', - 'with initial attributes' -]) -def test_initialise_attributes_collection(attributes): - '''Initialise attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes(attributes) - assert sorted(list(attribute_collection)) == sorted(attributes) - - -def test_add_attribute_to_attributes_collection(): - '''Add valid attribute to attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes() - attribute = ftrack_api.attribute.Attribute('test') - - assert attribute_collection.keys() == [] - attribute_collection.add(attribute) - assert attribute_collection.keys() == ['test'] - - -def test_add_duplicate_attribute_to_attributes_collection(): - '''Fail to add attribute with duplicate name to attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes() - attribute = ftrack_api.attribute.Attribute('test') - - attribute_collection.add(attribute) - with pytest.raises(ftrack_api.exception.NotUniqueError): - attribute_collection.add(attribute) - - -def test_remove_attribute_from_attributes_collection(): - '''Remove attribute from attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes() - attribute = ftrack_api.attribute.Attribute('test') - - attribute_collection.add(attribute) - assert len(attribute_collection) == 1 - - attribute_collection.remove(attribute) - assert len(attribute_collection) == 0 - - -def test_remove_missing_attribute_from_attributes_collection(): - '''Fail to remove attribute not present in attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes() - attribute = ftrack_api.attribute.Attribute('test') - - with pytest.raises(KeyError): - attribute_collection.remove(attribute) - - -def test_get_attribute_from_attributes_collection(): - '''Get attribute from attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes() - attribute = ftrack_api.attribute.Attribute('test') - attribute_collection.add(attribute) - - retrieved_attribute = attribute_collection.get('test') - - assert retrieved_attribute is attribute - - -def test_get_missing_attribute_from_attributes_collection(): - '''Get attribute not present in attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes() - assert attribute_collection.get('test') is None - - -@pytest.mark.parametrize('attributes, expected', [ - ([], []), - ([ftrack_api.attribute.Attribute('test')], ['test']) -], ids=[ - 'no initial attributes', - 'with initial attributes' -]) -def test_attribute_collection_keys(attributes, expected): - '''Retrieve keys for attribute collection.''' - attribute_collection = ftrack_api.attribute.Attributes(attributes) - assert sorted(attribute_collection.keys()) == sorted(expected) - - -@pytest.mark.parametrize('attribute, expected', [ - (None, False), - (ftrack_api.attribute.Attribute('b'), True), - (ftrack_api.attribute.Attribute('c'), False) -], ids=[ - 'none attribute', - 'present attribute', - 'missing attribute' -]) -def test_attributes_collection_contains(attribute, expected): - '''Check presence in attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes([ - ftrack_api.attribute.Attribute('a'), - ftrack_api.attribute.Attribute('b') - ]) - - assert (attribute in attribute_collection) is expected - - -@pytest.mark.parametrize('attributes, expected', [ - ([], 0), - ([ftrack_api.attribute.Attribute('test')], 1), - ( - [ - ftrack_api.attribute.Attribute('a'), - ftrack_api.attribute.Attribute('b') - ], - 2 - ) -], ids=[ - 'no attributes', - 'single attribute', - 'multiple attributes' -]) -def test_attributes_collection_count(attributes, expected): - '''Count attributes in attributes collection.''' - attribute_collection = ftrack_api.attribute.Attributes(attributes) - assert len(attribute_collection) == expected - - -def test_iterate_over_attributes_collection(): - '''Iterate over attributes collection.''' - attributes = [ - ftrack_api.attribute.Attribute('a'), - ftrack_api.attribute.Attribute('b') - ] - - attribute_collection = ftrack_api.attribute.Attributes(attributes) - for attribute in attribute_collection: - attributes.remove(attribute) - - assert len(attributes) == 0 - diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_cache.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_cache.py deleted file mode 100644 index 79157372..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_cache.py +++ /dev/null @@ -1,416 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import uuid -import tempfile - -import pytest - -import ftrack_api.cache - - -@pytest.fixture(params=['proxy', 'layered', 'memory', 'file', 'serialised']) -def cache(request): - '''Return cache.''' - if request.param == 'proxy': - cache = ftrack_api.cache.ProxyCache( - ftrack_api.cache.MemoryCache() - ) - - elif request.param == 'layered': - cache = ftrack_api.cache.LayeredCache( - [ftrack_api.cache.MemoryCache()] - ) - - elif request.param == 'memory': - cache = ftrack_api.cache.MemoryCache() - - elif request.param == 'file': - cache_path = os.path.join( - tempfile.gettempdir(), '{0}.dbm'.format(uuid.uuid4().hex) - ) - - cache = ftrack_api.cache.FileCache(cache_path) - - def cleanup(): - '''Cleanup.''' - try: - os.remove(cache_path) - except OSError: - # BSD DB (Mac OSX) implementation of the interface will append - # a .db extension. - os.remove(cache_path + '.db') - - request.addfinalizer(cleanup) - - elif request.param == 'serialised': - cache = ftrack_api.cache.SerialisedCache( - ftrack_api.cache.MemoryCache(), - encode=lambda value: value, - decode=lambda value: value - ) - - else: - raise ValueError( - 'Unrecognised cache fixture type {0!r}'.format(request.param) - ) - - return cache - - - -class Class(object): - '''Class for testing.''' - - def method(self, key): - '''Method for testing.''' - - -def function(mutable, x, y=2): - '''Function for testing.''' - mutable['called'] = True - return {'result': x + y} - - -def assert_memoised_call( - memoiser, function, expected, args=None, kw=None, memoised=True -): - '''Assert *function* call via *memoiser* was *memoised*.''' - mapping = {'called': False} - if args is not None: - args = (mapping,) + args - else: - args = (mapping,) - - result = memoiser.call(function, args, kw) - - assert result == expected - assert mapping['called'] is not memoised - - -def test_get(cache): - '''Retrieve item from cache.''' - cache.set('key', 'value') - assert cache.get('key') == 'value' - - -def test_get_missing_key(cache): - '''Fail to retrieve missing item from cache.''' - with pytest.raises(KeyError): - cache.get('key') - - -def test_set(cache): - '''Set item in cache.''' - with pytest.raises(KeyError): - cache.get('key') - - cache.set('key', 'value') - assert cache.get('key') == 'value' - - -def test_remove(cache): - '''Remove item from cache.''' - cache.set('key', 'value') - cache.remove('key') - - with pytest.raises(KeyError): - cache.get('key') - - -def test_remove_missing_key(cache): - '''Fail to remove missing key.''' - with pytest.raises(KeyError): - cache.remove('key') - - -def test_keys(cache): - '''Retrieve keys of items in cache.''' - assert cache.keys() == [] - cache.set('a', 'a_value') - cache.set('b', 'b_value') - cache.set('c', 'c_value') - assert sorted(cache.keys()) == sorted(['a', 'b', 'c']) - - -def test_clear(cache): - '''Remove items from cache.''' - cache.set('a', 'a_value') - cache.set('b', 'b_value') - cache.set('c', 'c_value') - - assert cache.keys() - cache.clear() - - assert not cache.keys() - - -def test_clear_using_pattern(cache): - '''Remove items that match pattern from cache.''' - cache.set('matching_key', 'value') - cache.set('another_matching_key', 'value') - cache.set('key_not_matching', 'value') - - assert cache.keys() - cache.clear(pattern='.*matching_key$') - - assert cache.keys() == ['key_not_matching'] - - -def test_clear_encountering_missing_key(cache, mocker): - '''Clear missing key.''' - # Force reporting keys that are not actually valid for test purposes. - mocker.patch.object(cache, 'keys', lambda: ['missing']) - assert cache.keys() == ['missing'] - - # Should not error even though key not valid. - cache.clear() - - # The key was not successfully removed so should still be present. - assert cache.keys() == ['missing'] - - -def test_layered_cache_propagates_value_on_get(): - '''Layered cache propagates value on get.''' - caches = [ - ftrack_api.cache.MemoryCache(), - ftrack_api.cache.MemoryCache(), - ftrack_api.cache.MemoryCache() - ] - - cache = ftrack_api.cache.LayeredCache(caches) - - # Set item on second level cache only. - caches[1].set('key', 'value') - - # Retrieving key via layered cache should propagate it automatically to - # higher level caches only. - assert cache.get('key') == 'value' - assert caches[0].get('key') == 'value' - - with pytest.raises(KeyError): - caches[2].get('key') - - -def test_layered_cache_remove_at_depth(): - '''Remove key that only exists at depth in LayeredCache.''' - caches = [ - ftrack_api.cache.MemoryCache(), - ftrack_api.cache.MemoryCache() - ] - - cache = ftrack_api.cache.LayeredCache(caches) - - # Set item on second level cache only. - caches[1].set('key', 'value') - - # Removing key that only exists at depth should not raise key error. - cache.remove('key') - - # Ensure key was removed. - assert not cache.keys() - - -def test_expand_references(): - '''Test that references are expanded from serialized cache.''' - - cache_path = os.path.join( - tempfile.gettempdir(), '{0}.dbm'.format(uuid.uuid4().hex) - ) - - def make_cache(session, cache_path): - '''Create a serialised file cache.''' - serialized_file_cache = ftrack_api.cache.SerialisedCache( - ftrack_api.cache.FileCache(cache_path), - encode=session.encode, - decode=session.decode - ) - - return serialized_file_cache - - # Populate the serialized file cache. - session = ftrack_api.Session( - cache=lambda session, cache_path=cache_path:make_cache( - session, cache_path - ) - ) - - expanded_results = dict() - - query_string = 'select asset.parent from AssetVersion where asset is_not None limit 10' - - for sequence in session.query(query_string): - asset = sequence.get('asset') - - expanded_results.setdefault( - asset.get('id'), asset.get('parent') - ) - - # Fetch the data from cache. - new_session = ftrack_api.Session( - cache=lambda session, cache_path=cache_path:make_cache( - session, cache_path - ) - ) - - - new_session_two = ftrack_api.Session( - cache=lambda session, cache_path=cache_path:make_cache( - session, cache_path - ) - ) - - - # Make sure references are merged. - for sequence in new_session.query(query_string): - asset = sequence.get('asset') - - assert ( - asset.get('parent') == expanded_results[asset.get('id')] - ) - - # Use for fetching directly using get. - assert ( - new_session_two.get(asset.entity_type, asset.get('id')).get('parent') == - expanded_results[asset.get('id')] - ) - - - -@pytest.mark.parametrize('items, key', [ - (({},), '{}'), - (({}, {}), '{}{}') -], ids=[ - 'single object', - 'multiple objects' -]) -def test_string_key_maker_key(items, key): - '''Generate key using string key maker.''' - key_maker = ftrack_api.cache.StringKeyMaker() - assert key_maker.key(*items) == key - - -@pytest.mark.parametrize('items, key', [ - ( - ({},), - '\x01\x01' - ), - ( - ({'a': 'b'}, [1, 2]), - '\x01' - '\x80\x02U\x01a.' '\x02' '\x80\x02U\x01b.' - '\x01' - '\x00' - '\x03' - '\x80\x02K\x01.' '\x00' '\x80\x02K\x02.' - '\x03' - ), - ( - (function,), - '\x04function\x00unit.test_cache' - ), - ( - (Class,), - '\x04Class\x00unit.test_cache' - ), - ( - (Class.method,), - '\x04method\x00Class\x00unit.test_cache' - ), - ( - (callable,), - '\x04callable' - ) -], ids=[ - 'single mapping', - 'multiple objects', - 'function', - 'class', - 'method', - 'builtin' -]) -def test_object_key_maker_key(items, key): - '''Generate key using string key maker.''' - key_maker = ftrack_api.cache.ObjectKeyMaker() - assert key_maker.key(*items) == key - - -def test_memoised_call(): - '''Call memoised function.''' - memoiser = ftrack_api.cache.Memoiser() - - # Initial call should not be memoised so function is executed. - assert_memoised_call( - memoiser, function, args=(1,), expected={'result': 3}, memoised=False - ) - - # Identical call should be memoised so function is not executed again. - assert_memoised_call( - memoiser, function, args=(1,), expected={'result': 3}, memoised=True - ) - - # Differing call is not memoised so function is executed. - assert_memoised_call( - memoiser, function, args=(3,), expected={'result': 5}, memoised=False - ) - - -def test_memoised_call_variations(): - '''Call memoised function with identical arguments using variable format.''' - memoiser = ftrack_api.cache.Memoiser() - expected = {'result': 3} - - # Call function once to ensure is memoised. - assert_memoised_call( - memoiser, function, args=(1,), expected=expected, memoised=False - ) - - # Each of the following calls should equate to the same key and make - # use of the memoised value. - for args, kw in [ - ((), {'x': 1}), - ((), {'x': 1, 'y': 2}), - ((1,), {'y': 2}), - ((1,), {}) - ]: - assert_memoised_call( - memoiser, function, args=args, kw=kw, expected=expected - ) - - # The following calls should all be treated as new variations and so - # not use any memoised value. - assert_memoised_call( - memoiser, function, kw={'x': 2}, expected={'result': 4}, memoised=False - ) - assert_memoised_call( - memoiser, function, kw={'x': 3, 'y': 2}, expected={'result': 5}, - memoised=False - ) - assert_memoised_call( - memoiser, function, args=(4, ), kw={'y': 2}, expected={'result': 6}, - memoised=False - ) - assert_memoised_call( - memoiser, function, args=(5, ), expected={'result': 7}, memoised=False - ) - - -def test_memoised_mutable_return_value(): - '''Avoid side effects for returned mutable arguments when memoising.''' - memoiser = ftrack_api.cache.Memoiser() - arguments = ({'called': False}, 1) - - result_a = memoiser.call(function, arguments) - assert result_a == {'result': 3} - assert arguments[0]['called'] - - # Modify mutable externally and check that stored memoised value is - # unchanged. - del result_a['result'] - - arguments[0]['called'] = False - result_b = memoiser.call(function, arguments) - - assert result_b == {'result': 3} - assert not arguments[0]['called'] diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_collection.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_collection.py deleted file mode 100644 index 15c3e5cf..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_collection.py +++ /dev/null @@ -1,574 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import copy -import uuid - -import mock -import pytest - -import ftrack_api.collection -import ftrack_api.symbol -import ftrack_api.inspection -import ftrack_api.exception -import ftrack_api.operation - - -def create_mock_entity(session): - '''Return new mock entity for *session*.''' - entity = mock.MagicMock() - entity.session = session - entity.primary_key_attributes = ['id'] - entity['id'] = str(uuid.uuid4()) - return entity - - -@pytest.fixture -def mock_entity(session): - '''Return mock entity.''' - return create_mock_entity(session) - - -@pytest.fixture -def mock_entities(session): - '''Return list of two mock entities.''' - return [ - create_mock_entity(session), - create_mock_entity(session) - ] - - -@pytest.fixture -def mock_attribute(): - '''Return mock attribute.''' - attribute = mock.MagicMock() - attribute.name = 'test' - return attribute - - -def test_collection_initialisation_does_not_modify_entity_state( - mock_entity, mock_attribute, mock_entities -): - '''Initialising collection does not modify entity state.''' - ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - - assert ftrack_api.inspection.state(mock_entity) is ftrack_api.symbol.NOT_SET - - -def test_immutable_collection_initialisation( - mock_entity, mock_attribute, mock_entities -): - '''Initialise immutable collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities, mutable=False - ) - - assert list(collection) == mock_entities - assert collection.mutable is False - - -def test_collection_shallow_copy( - mock_entity, mock_attribute, mock_entities, session -): - '''Shallow copying collection should avoid indirect mutation.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - - with mock_entity.session.operation_recording(False): - collection_copy = copy.copy(collection) - new_entity = create_mock_entity(session) - collection_copy.append(new_entity) - - assert list(collection) == mock_entities - assert list(collection_copy) == mock_entities + [new_entity] - - -def test_collection_insert( - mock_entity, mock_attribute, mock_entities, session -): - '''Insert a value into collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - - new_entity = create_mock_entity(session) - collection.insert(0, new_entity) - assert list(collection) == [new_entity] + mock_entities - - -def test_collection_insert_duplicate( - mock_entity, mock_attribute, mock_entities -): - '''Fail to insert a duplicate value into collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - - with pytest.raises(ftrack_api.exception.DuplicateItemInCollectionError): - collection.insert(0, mock_entities[1]) - - -def test_immutable_collection_insert( - mock_entity, mock_attribute, mock_entities, session -): - '''Fail to insert a value into immutable collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities, mutable=False - ) - - with pytest.raises(ftrack_api.exception.ImmutableCollectionError): - collection.insert(0, create_mock_entity(session)) - - -def test_collection_set_item( - mock_entity, mock_attribute, mock_entities, session -): - '''Set item at index in collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - - new_entity = create_mock_entity(session) - collection[0] = new_entity - assert list(collection) == [new_entity, mock_entities[1]] - - -def test_collection_re_set_item( - mock_entity, mock_attribute, mock_entities -): - '''Re-set value at exact same index in collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - - collection[0] = mock_entities[0] - assert list(collection) == mock_entities - - -def test_collection_set_duplicate_item( - mock_entity, mock_attribute, mock_entities -): - '''Fail to set a duplicate value into collection at different index.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - - with pytest.raises(ftrack_api.exception.DuplicateItemInCollectionError): - collection[0] = mock_entities[1] - - -def test_immutable_collection_set_item( - mock_entity, mock_attribute, mock_entities -): - '''Fail to set item at index in immutable collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities, mutable=False - ) - - with pytest.raises(ftrack_api.exception.ImmutableCollectionError): - collection[0] = mock_entities[0] - - -def test_collection_delete_item( - mock_entity, mock_attribute, mock_entities -): - '''Remove item at index from collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - del collection[0] - assert list(collection) == [mock_entities[1]] - - -def test_collection_delete_item_at_invalid_index( - mock_entity, mock_attribute, mock_entities -): - '''Fail to remove item at missing index from immutable collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - - with pytest.raises(IndexError): - del collection[4] - - -def test_immutable_collection_delete_item( - mock_entity, mock_attribute, mock_entities -): - '''Fail to remove item at index from immutable collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities, mutable=False - ) - - with pytest.raises(ftrack_api.exception.ImmutableCollectionError): - del collection[0] - - -def test_collection_count( - mock_entity, mock_attribute, mock_entities, session -): - '''Count items in collection.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - assert len(collection) == 2 - - collection.append(create_mock_entity(session)) - assert len(collection) == 3 - - del collection[0] - assert len(collection) == 2 - - -@pytest.mark.parametrize('other, expected', [ - ([], False), - ([1, 2], True), - ([1, 2, 3], False), - ([1], False) -], ids=[ - 'empty', - 'same', - 'additional', - 'missing' -]) -def test_collection_equal(mocker, mock_entity, mock_attribute, other, expected): - '''Determine collection equality against another collection.''' - # Temporarily override determination of entity identity so that it works - # against simple scalar values for purpose of test. - mocker.patch.object( - ftrack_api.inspection, 'identity', lambda entity: str(entity) - ) - - collection_a = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=[1, 2] - ) - - collection_b = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=other - ) - assert (collection_a == collection_b) is expected - - -def test_collection_not_equal_to_non_collection( - mocker, mock_entity, mock_attribute -): - '''Collection not equal to a non-collection.''' - # Temporarily override determination of entity identity so that it works - # against simple scalar values for purpose of test. - mocker.patch.object( - ftrack_api.inspection, 'identity', lambda entity: str(entity) - ) - - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=[1, 2] - ) - - assert (collection != {}) is True - - -def test_collection_notify_on_modification( - mock_entity, mock_attribute, mock_entities, session -): - '''Record UpdateEntityOperation on collection modification.''' - collection = ftrack_api.collection.Collection( - mock_entity, mock_attribute, data=mock_entities - ) - assert len(session.recorded_operations) == 0 - - collection.append(create_mock_entity(session)) - assert len(session.recorded_operations) == 1 - operation = session.recorded_operations.pop() - assert isinstance(operation, ftrack_api.operation.UpdateEntityOperation) - assert operation.new_value == collection - - -def test_mapped_collection_proxy_shallow_copy(new_project, unique_name): - '''Shallow copying mapped collection proxy avoids indirect mutation.''' - metadata = new_project['metadata'] - - with new_project.session.operation_recording(False): - metadata_copy = copy.copy(metadata) - metadata_copy[unique_name] = True - - assert unique_name not in metadata - assert unique_name in metadata_copy - - -def test_mapped_collection_proxy_mutable_property(new_project): - '''Mapped collection mutable property maps to underlying collection.''' - metadata = new_project['metadata'] - - assert metadata.mutable is True - assert metadata.collection.mutable is True - - metadata.mutable = False - assert metadata.collection.mutable is False - - -def test_mapped_collection_proxy_attribute_property( - new_project, mock_attribute -): - '''Mapped collection attribute property maps to underlying collection.''' - metadata = new_project['metadata'] - - assert metadata.attribute is metadata.collection.attribute - - metadata.attribute = mock_attribute - assert metadata.collection.attribute is mock_attribute - - -def test_mapped_collection_proxy_get_item(new_project, unique_name): - '''Retrieve item in mapped collection proxy.''' - session = new_project.session - - # Prepare data. - metadata = new_project['metadata'] - value = 'value' - metadata[unique_name] = value - session.commit() - - # Check in clean session retrieval of value. - session.reset() - retrieved = session.get(*ftrack_api.inspection.identity(new_project)) - - assert retrieved is not new_project - assert retrieved['metadata'].keys() == [unique_name] - assert retrieved['metadata'][unique_name] == value - - -def test_mapped_collection_proxy_set_item(new_project, unique_name): - '''Set new item in mapped collection proxy.''' - session = new_project.session - - metadata = new_project['metadata'] - assert unique_name not in metadata - - value = 'value' - metadata[unique_name] = value - assert metadata[unique_name] == value - - # Check change persisted correctly. - session.commit() - session.reset() - retrieved = session.get(*ftrack_api.inspection.identity(new_project)) - - assert retrieved is not new_project - assert retrieved['metadata'].keys() == [unique_name] - assert retrieved['metadata'][unique_name] == value - - -def test_mapped_collection_proxy_update_item(new_project, unique_name): - '''Update existing item in mapped collection proxy.''' - session = new_project.session - - # Prepare a pre-existing value. - metadata = new_project['metadata'] - value = 'value' - metadata[unique_name] = value - session.commit() - - # Set new value. - new_value = 'new_value' - metadata[unique_name] = new_value - - # Confirm change persisted correctly. - session.commit() - session.reset() - retrieved = session.get(*ftrack_api.inspection.identity(new_project)) - - assert retrieved is not new_project - assert retrieved['metadata'].keys() == [unique_name] - assert retrieved['metadata'][unique_name] == new_value - - -def test_mapped_collection_proxy_delete_item(new_project, unique_name): - '''Remove existing item from mapped collection proxy.''' - session = new_project.session - - # Prepare a pre-existing value to remove. - metadata = new_project['metadata'] - value = 'value' - metadata[unique_name] = value - session.commit() - - # Now remove value. - del new_project['metadata'][unique_name] - assert unique_name not in new_project['metadata'] - - # Confirm change persisted correctly. - session.commit() - session.reset() - retrieved = session.get(*ftrack_api.inspection.identity(new_project)) - - assert retrieved is not new_project - assert retrieved['metadata'].keys() == [] - assert unique_name not in retrieved['metadata'] - - -def test_mapped_collection_proxy_delete_missing_item(new_project, unique_name): - '''Fail to remove item for missing key from mapped collection proxy.''' - metadata = new_project['metadata'] - assert unique_name not in metadata - with pytest.raises(KeyError): - del metadata[unique_name] - - -def test_mapped_collection_proxy_iterate_keys(new_project, unique_name): - '''Iterate over keys in mapped collection proxy.''' - metadata = new_project['metadata'] - metadata.update({ - 'a': 'value-a', - 'b': 'value-b', - 'c': 'value-c' - }) - - # Commit here as otherwise cleanup operation will fail because transaction - # will include updating metadata to refer to a deleted entity. - new_project.session.commit() - - iterated = set() - for key in metadata: - iterated.add(key) - - assert iterated == set(['a', 'b', 'c']) - - -def test_mapped_collection_proxy_count(new_project, unique_name): - '''Count items in mapped collection proxy.''' - metadata = new_project['metadata'] - metadata.update({ - 'a': 'value-a', - 'b': 'value-b', - 'c': 'value-c' - }) - - # Commit here as otherwise cleanup operation will fail because transaction - # will include updating metadata to refer to a deleted entity. - new_project.session.commit() - - assert len(metadata) == 3 - - -def test_mapped_collection_on_create(session, unique_name, project): - '''Test that it is possible to set relational attributes on create''' - metadata = { - 'a': 'value-a', - 'b': 'value-b', - 'c': 'value-c' - } - - task_id = session.create( - 'Task', { - 'name': unique_name, - 'parent': project, - 'metadata': metadata, - - } - ).get('id') - - session.commit() - - # Reset the session and check that we have the expected - # values. - session.reset() - - task = session.get( - 'Task', task_id - ) - - for key, value in metadata.items(): - assert value == task['metadata'][key] - - -def test_collection_refresh(new_asset_version, new_component): - '''Test collection reload.''' - session_two = ftrack_api.Session(auto_connect_event_hub=False) - - query_string = 'select components from AssetVersion where id is "{0}"'.format( - new_asset_version.get('id') - ) - - # Fetch the new asset version in a new session. - new_asset_version_two = session_two.query( - query_string - ).one() - - # Modify our asset version - new_asset_version.get('components').append( - new_component - ) - - new_asset_version.session.commit() - - # Query the same asset version again and make sure we get the newly - # populated data. - session_two.query( - query_string - ).all() - - assert ( - new_asset_version.get('components') == new_asset_version_two.get('components') - ) - - # Make a local change to our asset version - new_asset_version_two.get('components').pop() - - # Query the same asset version again and make sure our local changes - # are not overwritten. - - session_two.query( - query_string - ).all() - - assert len(new_asset_version_two.get('components')) == 0 - - -def test_mapped_collection_reload(new_asset_version): - '''Test mapped collection reload.''' - session_two = ftrack_api.Session(auto_connect_event_hub=False) - - query_string = 'select metadata from AssetVersion where id is "{0}"'.format( - new_asset_version.get('id') - ) - - # Fetch the new asset version in a new session. - new_asset_version_two = session_two.query( - query_string - ).one() - - # Modify our asset version - new_asset_version['metadata']['test'] = str(uuid.uuid4()) - - new_asset_version.session.commit() - - # Query the same asset version again and make sure we get the newly - # populated data. - session_two.query( - query_string - ).all() - - assert ( - new_asset_version['metadata']['test'] == new_asset_version_two['metadata']['test'] - ) - - local_data = str(uuid.uuid4()) - - new_asset_version_two['metadata']['test'] = local_data - - # Modify our asset version again - new_asset_version['metadata']['test'] = str(uuid.uuid4()) - - new_asset_version.session.commit() - - # Query the same asset version again and make sure our local changes - # are not overwritten. - session_two.query( - query_string - ).all() - - assert ( - new_asset_version_two['metadata']['test'] == local_data - ) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_custom_attribute.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_custom_attribute.py deleted file mode 100644 index 7a9b0fad..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_custom_attribute.py +++ /dev/null @@ -1,251 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import uuid - -import pytest - -import ftrack_api - -@pytest.fixture( - params=[ - 'AssetVersion', 'Shot', 'AssetVersionList', 'TypedContextList', 'User', - 'Asset' - ] -) -def new_entity_and_custom_attribute(request, session): - '''Return tuple with new entity, custom attribute name and value.''' - if request.param == 'AssetVersion': - entity = session.create( - request.param, { - 'asset': session.query('Asset').first() - } - ) - return (entity, 'versiontest', 123) - - elif request.param == 'Shot': - sequence = session.query('Sequence').first() - entity = session.create( - request.param, { - 'parent_id': sequence['id'], - 'project_id': sequence['project_id'], - 'name': str(uuid.uuid1()) - } - ) - return (entity, 'fstart', 1005) - - elif request.param == 'Asset': - shot = session.query('Shot').first() - entity = session.create( - request.param, { - 'context_id': shot['project_id'], - 'name': str(uuid.uuid1()) - } - ) - return (entity, 'htest', 1005) - - elif request.param in ('AssetVersionList', 'TypedContextList'): - entity = session.create( - request.param, { - 'project_id': session.query('Project').first()['id'], - 'category_id': session.query('ListCategory').first()['id'], - 'name': str(uuid.uuid1()) - } - ) - return (entity, 'listbool', True) - - elif request.param == 'User': - entity = session.create( - request.param, { - 'first_name': 'Custom attribute test', - 'last_name': 'Custom attribute test', - 'username': str(uuid.uuid1()) - } - ) - return (entity, 'teststring', 'foo') - - -@pytest.mark.parametrize( - 'entity_type, entity_model_name, custom_attribute_name', - [ - ('Task', 'task', 'customNumber'), - ('AssetVersion', 'assetversion', 'NumberField') - ], - ids=[ - 'task', - 'asset_version' - ] -) -def test_read_set_custom_attribute( - session, entity_type, entity_model_name, custom_attribute_name -): - '''Retrieve custom attribute value set on instance.''' - custom_attribute_value = session.query( - 'CustomAttributeValue where configuration.key is ' - '{custom_attribute_name}' - .format( - custom_attribute_name=custom_attribute_name - ) - ).first() - - entity = session.query( - 'select custom_attributes from {entity_type} where id is ' - '{entity_id}'.format( - entity_type=entity_type, - entity_id=custom_attribute_value['entity_id'], - ) - ).first() - - assert custom_attribute_value - - assert entity['id'] == entity['custom_attributes'].collection.entity['id'] - assert entity is entity['custom_attributes'].collection.entity - assert ( - entity['custom_attributes'][custom_attribute_name] == - custom_attribute_value['value'] - ) - - assert custom_attribute_name in entity['custom_attributes'].keys() - - -@pytest.mark.parametrize( - 'entity_type, custom_attribute_name', - [ - ('Task', 'customNumber'), - ('Shot', 'fstart'), - ( - 'AssetVersion', 'NumberField' - ) - ], - ids=[ - 'task', - 'shot', - 'asset_version' - ] -) -def test_write_set_custom_attribute_value( - session, entity_type, custom_attribute_name -): - '''Overwrite existing instance level custom attribute value.''' - entity = session.query( - 'select custom_attributes from {entity_type} where ' - 'custom_attributes.configuration.key is {custom_attribute_name}'.format( - entity_type=entity_type, - custom_attribute_name=custom_attribute_name - ) - ).first() - - entity['custom_attributes'][custom_attribute_name] = 42 - - assert entity['custom_attributes'][custom_attribute_name] == 42 - - session.commit() - - -@pytest.mark.parametrize( - 'entity_type, custom_attribute_name', - [ - ('Task', 'fstart'), - ('Shot', 'Not existing'), - ('AssetVersion', 'fstart') - ], - ids=[ - 'task', - 'shot', - 'asset_version' - ] -) -def test_read_custom_attribute_that_does_not_exist( - session, entity_type, custom_attribute_name -): - '''Fail to read value from a custom attribute that does not exist.''' - entity = session.query( - 'select custom_attributes from {entity_type}'.format( - entity_type=entity_type - ) - ).first() - - with pytest.raises(KeyError): - entity['custom_attributes'][custom_attribute_name] - - -@pytest.mark.parametrize( - 'entity_type, custom_attribute_name', - [ - ('Task', 'fstart'), - ('Shot', 'Not existing'), - ('AssetVersion', 'fstart') - ], - ids=[ - 'task', - 'shot', - 'asset_version' - ] -) -def test_write_custom_attribute_that_does_not_exist( - session, entity_type, custom_attribute_name -): - '''Fail to write a value to a custom attribute that does not exist.''' - entity = session.query( - 'select custom_attributes from {entity_type}'.format( - entity_type=entity_type - ) - ).first() - - with pytest.raises(KeyError): - entity['custom_attributes'][custom_attribute_name] = 42 - - -def test_set_custom_attribute_on_new_but_persisted_version( - session, new_asset_version -): - '''Set custom attribute on new persisted version.''' - new_asset_version['custom_attributes']['versiontest'] = 5 - session.commit() - - -@pytest.mark.xfail( - raises=ftrack_api.exception.ServerError, - reason='Due to user permission errors.' -) -def test_batch_create_entity_and_custom_attributes( - new_entity_and_custom_attribute -): - '''Write custom attribute value and entity in the same batch.''' - entity, name, value = new_entity_and_custom_attribute - session = entity.session - entity['custom_attributes'][name] = value - - assert entity['custom_attributes'][name] == value - session.commit() - - assert entity['custom_attributes'][name] == value - - -def test_refresh_custom_attribute(new_asset_version): - '''Test custom attribute refresh.''' - session_two = ftrack_api.Session() - - query_string = 'select custom_attributes from AssetVersion where id is "{0}"'.format( - new_asset_version.get('id') - ) - - asset_version_two = session_two.query( - query_string - ).first() - - new_asset_version['custom_attributes']['versiontest'] = 42 - - new_asset_version.session.commit() - - asset_version_two = session_two.query( - query_string - ).first() - - assert ( - new_asset_version['custom_attributes']['versiontest'] == - asset_version_two['custom_attributes']['versiontest'] - ) - - - diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_data.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_data.py deleted file mode 100644 index c53dda96..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_data.py +++ /dev/null @@ -1,129 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import tempfile - -import pytest - -import ftrack_api.data - - -@pytest.fixture() -def content(): - '''Return initial content.''' - return 'test data' - - -@pytest.fixture(params=['file', 'file_wrapper', 'string']) -def data(request, content): - '''Return cache.''' - - if request.param == 'string': - data_object = ftrack_api.data.String(content) - - elif request.param == 'file': - file_handle, path = tempfile.mkstemp() - file_object = os.fdopen(file_handle, 'r+') - file_object.write(content) - file_object.flush() - file_object.close() - - data_object = ftrack_api.data.File(path, 'r+') - - def cleanup(): - '''Cleanup.''' - data_object.close() - os.remove(path) - - request.addfinalizer(cleanup) - - elif request.param == 'file_wrapper': - file_handle, path = tempfile.mkstemp() - file_object = os.fdopen(file_handle, 'r+') - file_object.write(content) - file_object.seek(0) - - data_object = ftrack_api.data.FileWrapper(file_object) - - def cleanup(): - '''Cleanup.''' - data_object.close() - os.remove(path) - - request.addfinalizer(cleanup) - - else: - raise ValueError('Unrecognised parameter: {0}'.format(request.param)) - - return data_object - - -def test_read(data, content): - '''Return content from current position up to *limit*.''' - assert data.read(5) == content[:5] - assert data.read() == content[5:] - - -def test_write(data, content): - '''Write content at current position.''' - assert data.read() == content - data.write('more test data') - data.seek(0) - assert data.read() == content + 'more test data' - - -def test_flush(data): - '''Flush buffers ensuring data written.''' - # TODO: Implement better test than just calling function. - data.flush() - - -def test_seek(data, content): - '''Move internal pointer to *position*.''' - data.seek(5) - assert data.read() == content[5:] - - -def test_tell(data): - '''Return current position of internal pointer.''' - assert data.tell() == 0 - data.seek(5) - assert data.tell() == 5 - - -def test_close(data): - '''Flush buffers and prevent further access.''' - data.close() - with pytest.raises(ValueError) as error: - data.read() - - assert 'I/O operation on closed file' in str(error.value) - - -class Dummy(ftrack_api.data.Data): - '''Dummy string.''' - - def read(self, limit=None): - '''Return content from current position up to *limit*.''' - - def write(self, content): - '''Write content at current position.''' - - -def test_unsupported_tell(): - '''Fail when tell unsupported.''' - data = Dummy() - with pytest.raises(NotImplementedError) as error: - data.tell() - - assert 'Tell not supported' in str(error.value) - - -def test_unsupported_seek(): - '''Fail when seek unsupported.''' - data = Dummy() - with pytest.raises(NotImplementedError) as error: - data.seek(5) - - assert 'Seek not supported' in str(error.value) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_formatter.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_formatter.py deleted file mode 100644 index ae565cb3..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_formatter.py +++ /dev/null @@ -1,70 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import termcolor - -import ftrack_api.formatter - - -def colored(text, *args, **kwargs): - '''Pass through so there are no escape sequences in output.''' - return text - - -def test_format(user, mocker): - '''Return formatted representation of entity.''' - mocker.patch.object(termcolor, 'colored', colored) - - result = ftrack_api.formatter.format(user) - - # Cannot test entire string as too variable so check for key text. - assert result.startswith('User\n') - assert ' username: jenkins' in result - assert ' email: ' in result - - -def test_format_using_custom_formatters(user): - '''Return formatted representation of entity using custom formatters.''' - result = ftrack_api.formatter.format( - user, formatters={ - 'header': lambda text: '*{0}*'.format(text), - 'label': lambda text: '-{0}'.format(text) - } - ) - - # Cannot test entire string as too variable so check for key text. - assert result.startswith('*User*\n') - assert ' -username: jenkins' in result - assert ' -email: ' in result - - -def test_format_filtering(new_user, mocker): - '''Return formatted representation using custom filter.''' - mocker.patch.object(termcolor, 'colored', colored) - - with new_user.session.auto_populating(False): - result = ftrack_api.formatter.format( - new_user, - attribute_filter=ftrack_api.formatter.FILTER['ignore_unset'] - ) - - # Cannot test entire string as too variable so check for key text. - assert result.startswith('User\n') - assert ' username: {0}'.format(new_user['username']) in result - assert ' email: ' not in result - - -def test_format_recursive(user, mocker): - '''Return formatted recursive representation.''' - mocker.patch.object(termcolor, 'colored', colored) - - user.session.populate(user, 'timelogs.user') - - with user.session.auto_populating(False): - result = ftrack_api.formatter.format(user, recursive=True) - - # Cannot test entire string as too variable so check for key text. - assert result.startswith('User\n') - assert ' username: jenkins' - assert ' timelogs: Timelog' in result - assert ' user: User{...}' in result diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_inspection.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_inspection.py deleted file mode 100644 index 57b44613..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_inspection.py +++ /dev/null @@ -1,101 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2014 ftrack - -import ftrack_api.inspection -import ftrack_api.symbol - - -def test_identity(user): - '''Retrieve identity of *user*.''' - identity = ftrack_api.inspection.identity(user) - assert identity[0] == 'User' - assert identity[1] == ['d07ae5d0-66e1-11e1-b5e9-f23c91df25eb'] - - -def test_primary_key(user): - '''Retrieve primary key of *user*.''' - primary_key = ftrack_api.inspection.primary_key(user) - assert primary_key == { - 'id': 'd07ae5d0-66e1-11e1-b5e9-f23c91df25eb' - } - - -def test_created_entity_state(session, unique_name): - '''Created entity has CREATED state.''' - new_user = session.create('User', {'username': unique_name}) - assert ftrack_api.inspection.state(new_user) is ftrack_api.symbol.CREATED - - # Even after a modification the state should remain as CREATED. - new_user['username'] = 'changed' - assert ftrack_api.inspection.state(new_user) is ftrack_api.symbol.CREATED - - -def test_retrieved_entity_state(user): - '''Retrieved entity has NOT_SET state.''' - assert ftrack_api.inspection.state(user) is ftrack_api.symbol.NOT_SET - - -def test_modified_entity_state(user): - '''Modified entity has MODIFIED state.''' - user['username'] = 'changed' - assert ftrack_api.inspection.state(user) is ftrack_api.symbol.MODIFIED - - -def test_deleted_entity_state(session, user): - '''Deleted entity has DELETED state.''' - session.delete(user) - assert ftrack_api.inspection.state(user) is ftrack_api.symbol.DELETED - - -def test_post_commit_entity_state(session, unique_name): - '''Entity has NOT_SET state post commit.''' - new_user = session.create('User', {'username': unique_name}) - assert ftrack_api.inspection.state(new_user) is ftrack_api.symbol.CREATED - - session.commit() - - assert ftrack_api.inspection.state(new_user) is ftrack_api.symbol.NOT_SET - - -def test_states(session, unique_name, user): - '''Determine correct states for multiple entities.''' - # NOT_SET - user_a = session.create('User', {'username': unique_name}) - session.commit() - - # CREATED - user_b = session.create('User', {'username': unique_name}) - user_b['username'] = 'changed' - - # MODIFIED - user_c = user - user_c['username'] = 'changed' - - # DELETED - user_d = session.create('User', {'username': unique_name}) - session.delete(user_d) - - # Assert states. - states = ftrack_api.inspection.states([user_a, user_b, user_c, user_d]) - - assert states == [ - ftrack_api.symbol.NOT_SET, - ftrack_api.symbol.CREATED, - ftrack_api.symbol.MODIFIED, - ftrack_api.symbol.DELETED - ] - - -def test_states_for_no_entities(): - '''Return empty list of states when no entities passed.''' - states = ftrack_api.inspection.states([]) - assert states == [] - - -def test_skip_operations_for_non_inspected_entities(session, unique_name): - '''Skip operations for non inspected entities.''' - user_a = session.create('User', {'username': unique_name + '-1'}) - user_b = session.create('User', {'username': unique_name + '-2'}) - - states = ftrack_api.inspection.states([user_a]) - assert states == [ftrack_api.symbol.CREATED] diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_operation.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_operation.py deleted file mode 100644 index 702bfae3..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_operation.py +++ /dev/null @@ -1,79 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api.operation - - -def test_operations_initialise(): - '''Initialise empty operations stack.''' - operations = ftrack_api.operation.Operations() - assert len(operations) == 0 - - -def test_operations_push(): - '''Push new operation onto stack.''' - operations = ftrack_api.operation.Operations() - assert len(operations) == 0 - - operation = ftrack_api.operation.Operation() - operations.push(operation) - assert list(operations)[-1] is operation - - -def test_operations_pop(): - '''Pop and return operation from stack.''' - operations = ftrack_api.operation.Operations() - assert len(operations) == 0 - - operations.push(ftrack_api.operation.Operation()) - operations.push(ftrack_api.operation.Operation()) - operation = ftrack_api.operation.Operation() - operations.push(operation) - - assert len(operations) == 3 - popped = operations.pop() - assert popped is operation - assert len(operations) == 2 - - -def test_operations_count(): - '''Count operations in stack.''' - operations = ftrack_api.operation.Operations() - assert len(operations) == 0 - - operations.push(ftrack_api.operation.Operation()) - assert len(operations) == 1 - - operations.pop() - assert len(operations) == 0 - - -def test_operations_clear(): - '''Clear operations stack.''' - operations = ftrack_api.operation.Operations() - operations.push(ftrack_api.operation.Operation()) - operations.push(ftrack_api.operation.Operation()) - operations.push(ftrack_api.operation.Operation()) - assert len(operations) == 3 - - operations.clear() - assert len(operations) == 0 - - -def test_operations_iter(): - '''Iterate over operations stack.''' - operations = ftrack_api.operation.Operations() - operation_a = ftrack_api.operation.Operation() - operation_b = ftrack_api.operation.Operation() - operation_c = ftrack_api.operation.Operation() - - operations.push(operation_a) - operations.push(operation_b) - operations.push(operation_c) - - assert len(operations) == 3 - for operation, expected in zip( - operations, [operation_a, operation_b, operation_c] - ): - assert operation is expected - diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_package.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_package.py deleted file mode 100644 index 247b496d..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_package.py +++ /dev/null @@ -1,48 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import ftrack_api - - -class Class(object): - '''Class.''' - - -class Mixin(object): - '''Mixin.''' - - def method(self): - '''Method.''' - return True - - -def test_mixin(): - '''Mixin class to instance.''' - instance_a = Class() - instance_b = Class() - - assert not hasattr(instance_a, 'method') - assert not hasattr(instance_b, 'method') - - ftrack_api.mixin(instance_a, Mixin) - - assert hasattr(instance_a, 'method') - assert instance_a.method() is True - assert not hasattr(instance_b, 'method') - - -def test_mixin_same_class_multiple_times(): - '''Mixin class to instance multiple times.''' - instance = Class() - assert not hasattr(instance, 'method') - assert len(instance.__class__.mro()) == 2 - - ftrack_api.mixin(instance, Mixin) - assert hasattr(instance, 'method') - assert instance.method() is True - assert len(instance.__class__.mro()) == 4 - - ftrack_api.mixin(instance, Mixin) - assert hasattr(instance, 'method') - assert instance.method() is True - assert len(instance.__class__.mro()) == 4 diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_plugin.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_plugin.py deleted file mode 100644 index 252c813a..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_plugin.py +++ /dev/null @@ -1,192 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import textwrap -import logging -import re - -import pytest - -import ftrack_api.plugin - - -@pytest.fixture() -def valid_plugin(temporary_path): - '''Return path to directory containing a valid plugin.''' - with open(os.path.join(temporary_path, 'plugin.py'), 'w') as file_object: - file_object.write(textwrap.dedent(''' - def register(*args, **kw): - print "Registered", args, kw - ''')) - - return temporary_path - - -@pytest.fixture() -def python_non_plugin(temporary_path): - '''Return path to directory containing Python file that is non plugin.''' - with open(os.path.join(temporary_path, 'non.py'), 'w') as file_object: - file_object.write(textwrap.dedent(''' - print "Not a plugin" - - def not_called(): - print "Not called" - ''')) - - return temporary_path - - -@pytest.fixture() -def non_plugin(temporary_path): - '''Return path to directory containing file that is non plugin.''' - with open(os.path.join(temporary_path, 'non.txt'), 'w') as file_object: - file_object.write('Never seen') - - return temporary_path - - -@pytest.fixture() -def broken_plugin(temporary_path): - '''Return path to directory containing broken plugin.''' - with open(os.path.join(temporary_path, 'broken.py'), 'w') as file_object: - file_object.write('syntax error') - - return temporary_path - - -@pytest.fixture() -def plugin(request, temporary_path): - '''Return path containing a plugin with requested specification.''' - specification = request.param - output = re.sub('(\w+)=\w+', '"\g<1>={}".format(\g<1>)', specification) - output = re.sub('\*args', 'args', output) - output = re.sub('\*\*kwargs', 'sorted(kwargs.items())', output) - - with open(os.path.join(temporary_path, 'plugin.py'), 'w') as file_object: - content = textwrap.dedent(''' - def register({}): - print {} - '''.format(specification, output)) - file_object.write(content) - - return temporary_path - - -def test_discover_empty_paths(capsys): - '''Discover no plugins when paths are empty.''' - ftrack_api.plugin.discover([' ']) - output, error = capsys.readouterr() - assert not output - assert not error - - -def test_discover_valid_plugin(valid_plugin, capsys): - '''Discover valid plugin.''' - ftrack_api.plugin.discover([valid_plugin], (1, 2), {'3': 4}) - output, error = capsys.readouterr() - assert 'Registered (1, 2) {\'3\': 4}' in output - - -def test_discover_python_non_plugin(python_non_plugin, capsys): - '''Discover Python non plugin.''' - ftrack_api.plugin.discover([python_non_plugin]) - output, error = capsys.readouterr() - assert 'Not a plugin' in output - assert 'Not called' not in output - - -def test_discover_non_plugin(non_plugin, capsys): - '''Discover non plugin.''' - ftrack_api.plugin.discover([non_plugin]) - output, error = capsys.readouterr() - assert not output - assert not error - - -def test_discover_broken_plugin(broken_plugin, caplog): - '''Discover broken plugin.''' - ftrack_api.plugin.discover([broken_plugin]) - - records = caplog.records() - assert len(records) == 1 - assert records[0].levelno is logging.WARNING - assert 'Failed to load plugin' in records[0].message - - -@pytest.mark.parametrize( - 'plugin, positional, keyword, expected', - [ - ( - 'a, b=False, c=False, d=False', - (1, 2), {'c': True, 'd': True, 'e': True}, - '1 b=2 c=True d=True' - ), - ( - '*args', - (1, 2), {'b': True, 'c': False}, - '(1, 2)' - ), - ( - '**kwargs', - tuple(), {'b': True, 'c': False}, - '[(\'b\', True), (\'c\', False)]' - ), - ( - 'a=False, b=False', - (True,), {'b': True}, - 'a=True b=True' - ), - ( - 'a, c=False, *args', - (1, 2, 3, 4), {}, - '1 c=2 (3, 4)' - ), - ( - 'a, c=False, **kwargs', - tuple(), {'a': 1, 'b': 2, 'c': 3, 'd': 4}, - '1 c=3 [(\'b\', 2), (\'d\', 4)]' - ), - ], - indirect=['plugin'], - ids=[ - 'mixed-explicit', - 'variable-args-only', - 'variable-kwargs-only', - 'keyword-from-positional', - 'trailing-variable-args', - 'trailing-keyword-args' - ] -) -def test_discover_plugin_with_specific_signature( - plugin, positional, keyword, expected, capsys -): - '''Discover plugin passing only supported arguments.''' - ftrack_api.plugin.discover( - [plugin], positional, keyword - ) - output, error = capsys.readouterr() - assert expected in output - - -def test_discover_plugin_varying_signatures(temporary_path, capsys): - '''Discover multiple plugins with varying signatures.''' - with open(os.path.join(temporary_path, 'plugin_a.py'), 'w') as file_object: - file_object.write(textwrap.dedent(''' - def register(a): - print (a,) - ''')) - - with open(os.path.join(temporary_path, 'plugin_b.py'), 'w') as file_object: - file_object.write(textwrap.dedent(''' - def register(a, b=False): - print (a,), {'b': b} - ''')) - - ftrack_api.plugin.discover( - [temporary_path], (True,), {'b': True} - ) - - output, error = capsys.readouterr() - assert '(True,)'in output - assert '(True,) {\'b\': True}' in output diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_query.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_query.py deleted file mode 100644 index f8e3f9de..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_query.py +++ /dev/null @@ -1,164 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import math - -import pytest - -import ftrack_api -import ftrack_api.query -import ftrack_api.exception - - -def test_index(session): - '''Index into query result.''' - results = session.query('User') - assert isinstance(results[2], session.types['User']) - - -def test_len(session): - '''Return count of results using len.''' - results = session.query('User where username is jenkins') - assert len(results) == 1 - - -def test_all(session): - '''Return all results using convenience method.''' - results = session.query('User').all() - assert isinstance(results, list) - assert len(results) - - -def test_implicit_iteration(session): - '''Implicitly iterate through query result.''' - results = session.query('User') - assert isinstance(results, ftrack_api.query.QueryResult) - - records = [] - for record in results: - records.append(record) - - assert len(records) == len(results) - - -def test_one(session): - '''Return single result using convenience method.''' - user = session.query('User where username is jenkins').one() - assert user['username'] == 'jenkins' - - -def test_one_fails_for_no_results(session): - '''Fail to fetch single result when no results available.''' - with pytest.raises(ftrack_api.exception.NoResultFoundError): - session.query('User where username is does_not_exist').one() - - -def test_one_fails_for_multiple_results(session): - '''Fail to fetch single result when multiple results available.''' - with pytest.raises(ftrack_api.exception.MultipleResultsFoundError): - session.query('User').one() - - -def test_one_with_existing_limit(session): - '''Fail to return single result when existing limit in expression.''' - with pytest.raises(ValueError): - session.query('User where username is jenkins limit 0').one() - - -def test_one_with_existing_offset(session): - '''Fail to return single result when existing offset in expression.''' - with pytest.raises(ValueError): - session.query('User where username is jenkins offset 2').one() - - -def test_one_with_prefetched_data(session): - '''Return single result ignoring prefetched data.''' - query = session.query('User where username is jenkins') - query.all() - - user = query.one() - assert user['username'] == 'jenkins' - - -def test_first(session): - '''Return first result using convenience method.''' - users = session.query('User').all() - - user = session.query('User').first() - assert user == users[0] - - -def test_first_returns_none_when_no_results(session): - '''Return None when no results available.''' - user = session.query('User where username is does_not_exist').first() - assert user is None - - -def test_first_with_existing_limit(session): - '''Fail to return first result when existing limit in expression.''' - with pytest.raises(ValueError): - session.query('User where username is jenkins limit 0').first() - - -def test_first_with_existing_offset(session): - '''Return first result whilst respecting custom offset.''' - users = session.query('User').all() - - user = session.query('User offset 2').first() - assert user == users[2] - - -def test_first_with_prefetched_data(session): - '''Return first result ignoring prefetched data.''' - query = session.query('User where username is jenkins') - query.all() - - user = query.first() - assert user['username'] == 'jenkins' - - -def test_paging(session, mocker): - '''Page through results.''' - mocker.patch.object(session, 'call', wraps=session.call) - - page_size = 5 - query = session.query('User limit 50', page_size=page_size) - records = query.all() - - assert session.call.call_count == ( - math.ceil(len(records) / float(page_size)) - ) - - -def test_paging_respects_offset_and_limit(session, mocker): - '''Page through results respecting offset and limit.''' - users = session.query('User').all() - - mocker.patch.object(session, 'call', wraps=session.call) - - page_size = 6 - query = session.query('User offset 2 limit 8', page_size=page_size) - records = query.all() - - assert session.call.call_count == 2 - assert len(records) == 8 - assert records == users[2:10] - - -def test_paging_respects_limit_smaller_than_page_size(session, mocker): - '''Use initial limit when less than page size.''' - mocker.patch.object(session, 'call', wraps=session.call) - - page_size = 100 - query = session.query('User limit 10', page_size=page_size) - records = query.all() - - assert session.call.call_count == 1 - session.call.assert_called_once_with( - [{ - 'action': 'query', - 'expression': 'select id from User offset 0 limit 10' - }] - ) - - assert len(records) == 10 \ No newline at end of file diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_session.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_session.py deleted file mode 100644 index 5087efcc..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_session.py +++ /dev/null @@ -1,1519 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import os -import tempfile -import functools -import uuid -import textwrap -import datetime -import json -import random - -import pytest -import mock -import arrow -import requests - -import ftrack_api -import ftrack_api.cache -import ftrack_api.inspection -import ftrack_api.symbol -import ftrack_api.exception -import ftrack_api.session -import ftrack_api.collection - - -@pytest.fixture(params=['memory', 'persisted']) -def cache(request): - '''Return cache.''' - if request.param == 'memory': - cache = None # There is already a default Memory cache present. - elif request.param == 'persisted': - cache_path = os.path.join( - tempfile.gettempdir(), '{0}.dbm'.format(uuid.uuid4().hex) - ) - - cache = lambda session: ftrack_api.cache.SerialisedCache( - ftrack_api.cache.FileCache(cache_path), - encode=functools.partial( - session.encode, entity_attribute_strategy='persisted_only' - ), - decode=session.decode - ) - - def cleanup(): - '''Cleanup.''' - try: - os.remove(cache_path) - except OSError: - # BSD DB (Mac OSX) implementation of the interface will append - # a .db extension. - os.remove(cache_path + '.db') - - request.addfinalizer(cleanup) - - return cache - - -@pytest.fixture() -def temporary_invalid_schema_cache(request): - '''Return schema cache path to invalid schema cache file.''' - schema_cache_path = os.path.join( - tempfile.gettempdir(), - 'ftrack_api_schema_cache_test_{0}.json'.format(uuid.uuid4().hex) - ) - - with open(schema_cache_path, 'w') as file_: - file_.write('${invalid json}') - - def cleanup(): - '''Cleanup.''' - os.remove(schema_cache_path) - - request.addfinalizer(cleanup) - - return schema_cache_path - - -@pytest.fixture() -def temporary_valid_schema_cache(request, mocked_schemas): - '''Return schema cache path to valid schema cache file.''' - schema_cache_path = os.path.join( - tempfile.gettempdir(), - 'ftrack_api_schema_cache_test_{0}.json'.format(uuid.uuid4().hex) - ) - - with open(schema_cache_path, 'w') as file_: - json.dump(mocked_schemas, file_, indent=4) - - def cleanup(): - '''Cleanup.''' - os.remove(schema_cache_path) - - request.addfinalizer(cleanup) - - return schema_cache_path - - -class SelectiveCache(ftrack_api.cache.ProxyCache): - '''Proxy cache that should not cache newly created entities.''' - - def set(self, key, value): - '''Set *value* for *key*.''' - if isinstance(value, ftrack_api.entity.base.Entity): - if ( - ftrack_api.inspection.state(value) - is ftrack_api.symbol.CREATED - ): - return - - super(SelectiveCache, self).set(key, value) - - -def test_get_entity(session, user): - '''Retrieve an entity by type and id.''' - matching = session.get(*ftrack_api.inspection.identity(user)) - assert matching == user - - -def test_get_non_existant_entity(session): - '''Retrieve a non-existant entity by type and id.''' - matching = session.get('User', 'non-existant-id') - assert matching is None - - -def test_get_entity_of_invalid_type(session): - '''Fail to retrieve an entity using an invalid type.''' - with pytest.raises(KeyError): - session.get('InvalidType', 'id') - - -def test_create(session): - '''Create entity.''' - user = session.create('User', {'username': 'martin'}) - with session.auto_populating(False): - assert user['id'] is not ftrack_api.symbol.NOT_SET - assert user['username'] == 'martin' - assert user['email'] is ftrack_api.symbol.NOT_SET - - -def test_create_using_only_defaults(session): - '''Create entity using defaults only.''' - user = session.create('User') - with session.auto_populating(False): - assert user['id'] is not ftrack_api.symbol.NOT_SET - assert user['username'] is ftrack_api.symbol.NOT_SET - - -def test_create_using_server_side_defaults(session): - '''Create entity using server side defaults.''' - user = session.create('User') - with session.auto_populating(False): - assert user['id'] is not ftrack_api.symbol.NOT_SET - assert user['username'] is ftrack_api.symbol.NOT_SET - - session.commit() - assert user['username'] is not ftrack_api.symbol.NOT_SET - - -def test_create_overriding_defaults(session): - '''Create entity overriding defaults.''' - uid = str(uuid.uuid4()) - user = session.create('User', {'id': uid}) - with session.auto_populating(False): - assert user['id'] == uid - - -def test_create_with_reference(session): - '''Create entity with a reference to another.''' - status = session.query('Status')[0] - task = session.create('Task', {'status': status}) - assert task['status'] is status - - -def test_ensure_new_entity(session, unique_name): - '''Ensure entity, creating first.''' - entity = session.ensure('User', {'username': unique_name}) - assert entity['username'] == unique_name - - -def test_ensure_entity_with_non_string_data_types(session): - '''Ensure entity against non-string data types, creating first.''' - datetime = arrow.get() - - task = session.query('Task').first() - user = session.query( - 'User where username is {}'.format(session.api_user) - ).first() - - first = session.ensure( - 'Timelog', - { - 'start': datetime, - 'duration': 10, - 'user_id': user['id'], - 'context_id': task['id'] - } - ) - - with mock.patch.object(session, 'create') as mocked: - session.ensure( - 'Timelog', - { - 'start': datetime, - 'duration': 10, - 'user_id': user['id'], - 'context_id': task['id'] - } - ) - assert not mocked.called - - assert first['start'] == datetime - assert first['duration'] == 10 - - -def test_ensure_entity_with_identifying_keys(session, unique_name): - '''Ensure entity, checking using keys subset and then creating.''' - entity = session.ensure( - 'User', {'username': unique_name, 'email': 'test@example.com'}, - identifying_keys=['username'] - ) - assert entity['username'] == unique_name - - -def test_ensure_entity_with_invalid_identifying_keys(session, unique_name): - '''Fail to ensure entity when identifying key missing from data.''' - with pytest.raises(KeyError): - session.ensure( - 'User', {'username': unique_name, 'email': 'test@example.com'}, - identifying_keys=['invalid'] - ) - - -def test_ensure_entity_with_missing_identifying_keys(session): - '''Fail to ensure entity when no identifying keys determined.''' - with pytest.raises(ValueError): - session.ensure('User', {}) - - -def test_ensure_existing_entity(session, unique_name): - '''Ensure existing entity.''' - entity = session.ensure('User', {'first_name': unique_name}) - - # Second call should not commit any new entity, just retrieve the existing. - with mock.patch.object(session, 'create') as mocked: - retrieved = session.ensure('User', {'first_name': unique_name}) - assert not mocked.called - assert retrieved == entity - - -def test_ensure_update_existing_entity(session, unique_name): - '''Ensure and update existing entity.''' - entity = session.ensure( - 'User', {'first_name': unique_name, 'email': 'anon@example.com'} - ) - assert entity['email'] == 'anon@example.com' - - # Second call should commit updates. - retrieved = session.ensure( - 'User', {'first_name': unique_name, 'email': 'test@example.com'}, - identifying_keys=['first_name'] - ) - assert retrieved == entity - assert retrieved['email'] == 'test@example.com' - - -def test_reconstruct_entity(session): - '''Reconstruct entity.''' - uid = str(uuid.uuid4()) - data = { - 'id': uid, - 'username': 'martin', - 'email': 'martin@example.com' - } - user = session.create('User', data, reconstructing=True) - - for attribute in user.attributes: - # No local attributes should be set. - assert attribute.get_local_value(user) is ftrack_api.symbol.NOT_SET - - # Only remote attributes that had explicit values should be set. - value = attribute.get_remote_value(user) - if attribute.name in data: - assert value == data[attribute.name] - else: - assert value is ftrack_api.symbol.NOT_SET - - -def test_reconstruct_entity_does_not_apply_defaults(session): - '''Reconstruct entity does not apply defaults.''' - # Note: Use private method to avoid merge which requires id be set. - user = session._create('User', {}, reconstructing=True) - with session.auto_populating(False): - assert user['id'] is ftrack_api.symbol.NOT_SET - - -def test_reconstruct_empty_entity(session): - '''Reconstruct empty entity.''' - # Note: Use private method to avoid merge which requires id be set. - user = session._create('User', {}, reconstructing=True) - - for attribute in user.attributes: - # No local attributes should be set. - assert attribute.get_local_value(user) is ftrack_api.symbol.NOT_SET - - # No remote attributes should be set. - assert attribute.get_remote_value(user) is ftrack_api.symbol.NOT_SET - - -def test_delete_operation_ordering(session, unique_name): - '''Delete entities in valid order.''' - # Construct entities. - project_schema = session.query('ProjectSchema').first() - project = session.create('Project', { - 'name': unique_name, - 'full_name': unique_name, - 'project_schema': project_schema - }) - - sequence = session.create('Sequence', { - 'name': unique_name, - 'parent': project - }) - - session.commit() - - # Delete in order that should succeed. - session.delete(sequence) - session.delete(project) - - session.commit() - - -def test_create_then_delete_operation_ordering(session, unique_name): - '''Create and delete entity in one transaction.''' - entity = session.create('User', {'username': unique_name}) - session.delete(entity) - session.commit() - - -def test_create_and_modify_to_have_required_attribute(session, unique_name): - '''Create and modify entity to have required attribute in transaction.''' - entity = session.create('Scope', {}) - other = session.create('Scope', {'name': unique_name}) - entity['name'] = '{0}2'.format(unique_name) - session.commit() - - -def test_ignore_in_create_entity_payload_values_set_to_not_set( - mocker, unique_name, session -): - '''Ignore in commit, created entity data set to NOT_SET''' - mocked = mocker.patch.object(session, 'call') - - # Should ignore 'email' attribute in payload. - new_user = session.create( - 'User', {'username': unique_name, 'email': 'test'} - ) - new_user['email'] = ftrack_api.symbol.NOT_SET - session.commit() - payloads = mocked.call_args[0][0] - assert len(payloads) == 1 - - -def test_ignore_operation_that_modifies_attribute_to_not_set( - mocker, session, user -): - '''Ignore in commit, operation that sets attribute value to NOT_SET''' - mocked = mocker.patch.object(session, 'call') - - # Should result in no call to server. - user['email'] = ftrack_api.symbol.NOT_SET - session.commit() - - assert not mocked.called - - -def test_operation_optimisation_on_commit(session, mocker): - '''Optimise operations on commit.''' - mocked = mocker.patch.object(session, 'call') - - user_a = session.create('User', {'username': 'bob'}) - user_a['username'] = 'foo' - user_a['email'] = 'bob@example.com' - - user_b = session.create('User', {'username': 'martin'}) - user_b['email'] = 'martin@ftrack.com' - - user_a['email'] = 'bob@example.com' - user_a['first_name'] = 'Bob' - - user_c = session.create('User', {'username': 'neverexist'}) - user_c['email'] = 'ignore@example.com' - session.delete(user_c) - - user_a_entity_key = ftrack_api.inspection.primary_key(user_a).values() - user_b_entity_key = ftrack_api.inspection.primary_key(user_b).values() - - session.commit() - - # The above operations should have translated into three payloads to call - # (two creates and one update). - payloads = mocked.call_args[0][0] - assert len(payloads) == 3 - - assert payloads[0]['action'] == 'create' - assert payloads[0]['entity_key'] == user_a_entity_key - assert set(payloads[0]['entity_data'].keys()) == set([ - '__entity_type__', 'id', 'resource_type', 'username' - ]) - - assert payloads[1]['action'] == 'create' - assert payloads[1]['entity_key'] == user_b_entity_key - assert set(payloads[1]['entity_data'].keys()) == set([ - '__entity_type__', 'id', 'resource_type', 'username', 'email' - ]) - - assert payloads[2]['action'] == 'update' - assert payloads[2]['entity_key'] == user_a_entity_key - assert set(payloads[2]['entity_data'].keys()) == set([ - '__entity_type__', 'email', 'first_name' - ]) - - -def test_state_collection(session, unique_name, user): - '''Session state collection holds correct entities.''' - # NOT_SET - user_a = session.create('User', {'username': unique_name}) - session.commit() - - # CREATED - user_b = session.create('User', {'username': unique_name}) - user_b['username'] = 'changed' - - # MODIFIED - user_c = user - user_c['username'] = 'changed' - - # DELETED - user_d = session.create('User', {'username': unique_name}) - session.delete(user_d) - - assert session.created == [user_b] - assert session.modified == [user_c] - assert session.deleted == [user_d] - - -def test_get_entity_with_composite_primary_key(session, new_project): - '''Retrieve entity that uses a composite primary key.''' - entity = session.create('Metadata', { - 'key': 'key', 'value': 'value', - 'parent_type': new_project.entity_type, - 'parent_id': new_project['id'] - }) - - session.commit() - - # Avoid cache. - new_session = ftrack_api.Session() - retrieved_entity = new_session.get( - 'Metadata', ftrack_api.inspection.primary_key(entity).values() - ) - - assert retrieved_entity == entity - - -def test_get_entity_with_incomplete_composite_primary_key(session, new_project): - '''Fail to retrieve entity using incomplete composite primary key.''' - entity = session.create('Metadata', { - 'key': 'key', 'value': 'value', - 'parent_type': new_project.entity_type, - 'parent_id': new_project['id'] - }) - - session.commit() - - # Avoid cache. - new_session = ftrack_api.Session() - with pytest.raises(ValueError): - new_session.get( - 'Metadata', ftrack_api.inspection.primary_key(entity).values()[0] - ) - - -def test_populate_entity(session, new_user): - '''Populate entity that uses single primary key.''' - with session.auto_populating(False): - assert new_user['email'] is ftrack_api.symbol.NOT_SET - - session.populate(new_user, 'email') - assert new_user['email'] is not ftrack_api.symbol.NOT_SET - - -def test_populate_entities(session, unique_name): - '''Populate multiple entities that use single primary key.''' - users = [] - for index in range(3): - users.append( - session.create( - 'User', {'username': '{0}-{1}'.format(unique_name, index)} - ) - ) - - session.commit() - - with session.auto_populating(False): - for user in users: - assert user['email'] is ftrack_api.symbol.NOT_SET - - session.populate(users, 'email') - - for user in users: - assert user['email'] is not ftrack_api.symbol.NOT_SET - - -def test_populate_entity_with_composite_primary_key(session, new_project): - '''Populate entity that uses a composite primary key.''' - entity = session.create('Metadata', { - 'key': 'key', 'value': 'value', - 'parent_type': new_project.entity_type, - 'parent_id': new_project['id'] - }) - - session.commit() - - # Avoid cache. - new_session = ftrack_api.Session() - retrieved_entity = new_session.get( - 'Metadata', ftrack_api.inspection.primary_key(entity).values() - ) - - # Manually change already populated remote value so can test it gets reset - # on populate call. - retrieved_entity.attributes.get('value').set_remote_value( - retrieved_entity, 'changed' - ) - - new_session.populate(retrieved_entity, 'value') - assert retrieved_entity['value'] == 'value' - - -@pytest.mark.parametrize('server_information, compatible', [ - ({}, False), - ({'version': '3.3.11'}, True), - ({'version': '3.3.12'}, True), - ({'version': '3.4'}, True), - ({'version': '3.4.1'}, True), - ({'version': '3.5.16'}, True), - ({'version': '3.3.10'}, False) -], ids=[ - 'No information', - 'Valid current version', - 'Valid higher version', - 'Valid higher version', - 'Valid higher version', - 'Valid higher version', - 'Invalid lower version' -]) -def test_check_server_compatibility( - server_information, compatible, session -): - '''Check server compatibility.''' - with mock.patch.dict( - session._server_information, server_information, clear=True - ): - if compatible: - session.check_server_compatibility() - else: - with pytest.raises(ftrack_api.exception.ServerCompatibilityError): - session.check_server_compatibility() - - -def test_encode_entity_using_all_attributes_strategy(mocked_schema_session): - '''Encode entity using "all" entity_attribute_strategy.''' - new_bar = mocked_schema_session.create( - 'Bar', - { - 'name': 'myBar', - 'id': 'bar_unique_id' - } - ) - - new_foo = mocked_schema_session.create( - 'Foo', - { - 'id': 'a_unique_id', - 'string': 'abc', - 'integer': 42, - 'number': 12345678.9, - 'boolean': False, - 'date': arrow.get('2015-11-18 15:24:09'), - 'bars': [new_bar] - } - ) - - encoded = mocked_schema_session.encode( - new_foo, entity_attribute_strategy='all' - ) - - assert encoded == textwrap.dedent(''' - {"__entity_type__": "Foo", - "bars": [{"__entity_type__": "Bar", "id": "bar_unique_id"}], - "boolean": false, - "date": {"__type__": "datetime", "value": "2015-11-18T15:24:09+00:00"}, - "id": "a_unique_id", - "integer": 42, - "number": 12345678.9, - "string": "abc"} - ''').replace('\n', '') - - -def test_encode_entity_using_only_set_attributes_strategy( - mocked_schema_session -): - '''Encode entity using "set_only" entity_attribute_strategy.''' - new_foo = mocked_schema_session.create( - 'Foo', - { - 'id': 'a_unique_id', - 'string': 'abc', - 'integer': 42 - } - ) - - encoded = mocked_schema_session.encode( - new_foo, entity_attribute_strategy='set_only' - ) - - assert encoded == textwrap.dedent(''' - {"__entity_type__": "Foo", - "id": "a_unique_id", - "integer": 42, - "string": "abc"} - ''').replace('\n', '') - - -def test_encode_computed_attribute_using_persisted_only_attributes_strategy( - mocked_schema_session -): - '''Encode computed attribute, "persisted_only" entity_attribute_strategy.''' - new_bar = mocked_schema_session._create( - 'Bar', - { - 'name': 'myBar', - 'id': 'bar_unique_id', - 'computed_value': 'FOO' - }, - reconstructing=True - ) - - encoded = mocked_schema_session.encode( - new_bar, entity_attribute_strategy='persisted_only' - ) - - assert encoded == textwrap.dedent(''' - {"__entity_type__": "Bar", - "id": "bar_unique_id", - "name": "myBar"} - ''').replace('\n', '') - - -def test_encode_entity_using_only_modified_attributes_strategy( - mocked_schema_session -): - '''Encode entity using "modified_only" entity_attribute_strategy.''' - new_foo = mocked_schema_session._create( - 'Foo', - { - 'id': 'a_unique_id', - 'string': 'abc', - 'integer': 42 - }, - reconstructing=True - ) - - new_foo['string'] = 'Modified' - - encoded = mocked_schema_session.encode( - new_foo, entity_attribute_strategy='modified_only' - ) - - assert encoded == textwrap.dedent(''' - {"__entity_type__": "Foo", - "id": "a_unique_id", - "string": "Modified"} - ''').replace('\n', '') - - -def test_encode_entity_using_invalid_strategy(session, new_task): - '''Fail to encode entity using invalid strategy.''' - with pytest.raises(ValueError): - session.encode(new_task, entity_attribute_strategy='invalid') - - -def test_encode_operation_payload(session): - '''Encode operation payload.''' - sequence_component = session.create_component( - "/path/to/sequence.%d.jpg [1]", location=None - ) - file_component = sequence_component["members"][0] - - encoded = session.encode([ - ftrack_api.session.OperationPayload({ - 'action': 'create', - 'entity_data': { - '__entity_type__': u'FileComponent', - u'container': sequence_component, - 'id': file_component['id'] - }, - 'entity_key': [file_component['id']], - 'entity_type': u'FileComponent' - }), - ftrack_api.session.OperationPayload({ - 'action': 'update', - 'entity_data': { - '__entity_type__': u'SequenceComponent', - u'members': ftrack_api.collection.Collection( - sequence_component, - sequence_component.attributes.get('members'), - data=[file_component] - ) - }, - 'entity_key': [sequence_component['id']], - 'entity_type': u'SequenceComponent' - }) - ]) - - expected = textwrap.dedent(''' - [{{"action": "create", - "entity_data": {{"__entity_type__": "FileComponent", - "container": {{"__entity_type__": "SequenceComponent", - "id": "{0[id]}"}}, - "id": "{1[id]}"}}, - "entity_key": ["{1[id]}"], - "entity_type": "FileComponent"}}, - {{"action": "update", - "entity_data": {{"__entity_type__": "SequenceComponent", - "members": [{{"__entity_type__": "FileComponent", "id": "{1[id]}"}}]}}, - "entity_key": ["{0[id]}"], - "entity_type": "SequenceComponent"}}] - '''.format(sequence_component, file_component)).replace('\n', '') - - assert encoded == expected - - -def test_decode_partial_entity( - session, new_task -): - '''Decode partially encoded entity.''' - encoded = session.encode( - new_task, entity_attribute_strategy='set_only' - ) - - entity = session.decode(encoded) - - assert entity == new_task - assert entity is not new_task - - -def test_reset(mocker): - '''Reset session.''' - plugin_path = os.path.abspath( - os.path.join(os.path.dirname(__file__), '..', 'fixture', 'plugin') - ) - session = ftrack_api.Session(plugin_paths=[plugin_path]) - - assert hasattr(session.types.get('User'), 'stub') - location = session.query('Location where name is "test.location"').one() - assert location.accessor is not ftrack_api.symbol.NOT_SET - - mocked_close = mocker.patch.object(session._request, 'close') - mocked_fetch = mocker.patch.object(session, '_load_schemas') - - session.reset() - - # Assert custom entity type maintained. - assert hasattr(session.types.get('User'), 'stub') - - # Assert location plugin re-configured. - location = session.query('Location where name is "test.location"').one() - assert location.accessor is not ftrack_api.symbol.NOT_SET - - # Assert connection not closed and no schema fetch issued. - assert not mocked_close.called - assert not mocked_fetch.called - - -def test_rollback_scalar_attribute_change(session, new_user): - '''Rollback scalar attribute change via session.''' - assert not session.recorded_operations - current_first_name = new_user['first_name'] - - new_user['first_name'] = 'NewName' - assert new_user['first_name'] == 'NewName' - assert session.recorded_operations - - session.rollback() - - assert not session.recorded_operations - assert new_user['first_name'] == current_first_name - - -def test_rollback_collection_attribute_change(session, new_user): - '''Rollback collection attribute change via session.''' - assert not session.recorded_operations - current_timelogs = new_user['timelogs'] - assert list(current_timelogs) == [] - - timelog = session.create('Timelog', {}) - new_user['timelogs'].append(timelog) - assert list(new_user['timelogs']) == [timelog] - assert session.recorded_operations - - session.rollback() - - assert not session.recorded_operations - assert list(new_user['timelogs']) == [] - - -def test_rollback_entity_creation(session): - '''Rollback entity creation via session.''' - assert not session.recorded_operations - - new_user = session.create('User') - assert session.recorded_operations - assert new_user in session.created - - session.rollback() - - assert not session.recorded_operations - assert new_user not in session.created - assert new_user not in session._local_cache.values() - - -def test_rollback_entity_deletion(session, new_user): - '''Rollback entity deletion via session.''' - assert not session.recorded_operations - - session.delete(new_user) - assert session.recorded_operations - assert new_user in session.deleted - - session.rollback() - assert not session.recorded_operations - assert new_user not in session.deleted - assert new_user in session._local_cache.values() - - -# Caching -# ------------------------------------------------------------------------------ - - -def test_get_entity_bypassing_cache(session, user, mocker): - '''Retrieve an entity by type and id bypassing cache.''' - mocker.patch.object(session, 'call', wraps=session.call) - - session.cache.remove( - session.cache_key_maker.key(ftrack_api.inspection.identity(user)) - ) - - matching = session.get(*ftrack_api.inspection.identity(user)) - - # Check a different instance returned. - assert matching is not user - - # Check instances have the same identity. - assert matching == user - - # Check cache was bypassed and server was called. - assert session.call.called - - -def test_get_entity_from_cache(cache, task, mocker): - '''Retrieve an entity by type and id from cache.''' - session = ftrack_api.Session(cache=cache) - - # Prepare cache. - session.merge(task) - - # Disable server calls. - mocker.patch.object(session, 'call') - - # Retrieve entity from cache. - entity = session.get(*ftrack_api.inspection.identity(task)) - - assert entity is not None, 'Failed to retrieve entity from cache.' - assert entity == task - assert entity is not task - - # Check that no call was made to server. - assert not session.call.called - - -def test_get_entity_tree_from_cache(cache, new_project_tree, mocker): - '''Retrieve an entity tree from cache.''' - session = ftrack_api.Session(cache=cache) - - # Prepare cache. - # TODO: Maybe cache should be prepopulated for a better check here. - session.query( - 'select children, children.children, children.children.children, ' - 'children.children.children.assignments, ' - 'children.children.children.assignments.resource ' - 'from Project where id is "{0}"' - .format(new_project_tree['id']) - ).one() - - # Disable server calls. - mocker.patch.object(session, 'call') - - # Retrieve entity from cache. - entity = session.get(*ftrack_api.inspection.identity(new_project_tree)) - - assert entity is not None, 'Failed to retrieve entity from cache.' - assert entity == new_project_tree - assert entity is not new_project_tree - - # Check tree. - with session.auto_populating(False): - for sequence in entity['children']: - for shot in sequence['children']: - for task in shot['children']: - assignments = task['assignments'] - for assignment in assignments: - resource = assignment['resource'] - - assert resource is not ftrack_api.symbol.NOT_SET - - # Check that no call was made to server. - assert not session.call.called - - -def test_get_metadata_from_cache(session, mocker, cache, new_task): - '''Retrieve an entity along with its metadata from cache.''' - new_task['metadata']['key'] = 'value' - session.commit() - - fresh_session = ftrack_api.Session(cache=cache) - - # Prepare cache. - fresh_session.query( - 'select metadata.key, metadata.value from ' - 'Task where id is "{0}"' - .format(new_task['id']) - ).all() - - # Disable server calls. - mocker.patch.object(fresh_session, 'call') - - # Retrieve entity from cache. - entity = fresh_session.get(*ftrack_api.inspection.identity(new_task)) - - assert entity is not None, 'Failed to retrieve entity from cache.' - assert entity == new_task - assert entity is not new_task - - # Check metadata cached correctly. - with fresh_session.auto_populating(False): - metadata = entity['metadata'] - assert metadata['key'] == 'value' - - assert not fresh_session.call.called - - -def test_merge_circular_reference(cache, temporary_file): - '''Merge circular reference into cache.''' - session = ftrack_api.Session(cache=cache) - # The following will test the condition as a FileComponent will be created - # with corresponding ComponentLocation. The server will return the file - # component data with the component location embedded. The component - # location will in turn have an embedded reference to the file component. - # If the merge does not prioritise the primary keys of the instance then - # any cache that relies on using the identity of the file component will - # fail. - component = session.create_component(path=temporary_file) - assert component - - -def test_create_with_selective_cache(session): - '''Create entity does not store entity in selective cache.''' - cache = ftrack_api.cache.MemoryCache() - session.cache.caches.append(SelectiveCache(cache)) - try: - user = session.create('User', {'username': 'martin'}) - cache_key = session.cache_key_maker.key( - ftrack_api.inspection.identity(user) - ) - - with pytest.raises(KeyError): - cache.get(cache_key) - - finally: - session.cache.caches.pop() - - -def test_correct_file_type_on_sequence_component(session): - '''Create sequence component with correct file type.''' - path = '/path/to/image/sequence.%04d.dpx [1-10]' - sequence_component = session.create_component(path) - - assert sequence_component['file_type'] == '.dpx' - - -def test_read_schemas_from_cache( - session, temporary_valid_schema_cache -): - '''Read valid content from schema cache.''' - expected_hash = 'a98d0627b5e33966e43e1cb89b082db7' - - schemas, hash_ = session._read_schemas_from_cache( - temporary_valid_schema_cache - ) - - assert expected_hash == hash_ - - -def test_fail_to_read_schemas_from_invalid_cache( - session, temporary_invalid_schema_cache -): - '''Fail to read invalid content from schema cache.''' - with pytest.raises(ValueError): - session._read_schemas_from_cache( - temporary_invalid_schema_cache - ) - - -def test_write_schemas_to_cache( - session, temporary_valid_schema_cache -): - '''Write valid content to schema cache.''' - expected_hash = 'a98d0627b5e33966e43e1cb89b082db7' - schemas, _ = session._read_schemas_from_cache(temporary_valid_schema_cache) - - session._write_schemas_to_cache(schemas, temporary_valid_schema_cache) - - schemas, hash_ = session._read_schemas_from_cache( - temporary_valid_schema_cache - ) - - assert expected_hash == hash_ - - -def test_fail_to_write_invalid_schemas_to_cache( - session, temporary_valid_schema_cache -): - '''Fail to write invalid content to schema cache.''' - # Datetime not serialisable by default. - invalid_content = datetime.datetime.now() - - with pytest.raises(TypeError): - session._write_schemas_to_cache( - invalid_content, temporary_valid_schema_cache - ) - - -def test_load_schemas_from_valid_cache( - mocker, session, temporary_valid_schema_cache, mocked_schemas -): - '''Load schemas from cache.''' - expected_schemas = session._load_schemas(temporary_valid_schema_cache) - - mocked = mocker.patch.object(session, 'call') - schemas = session._load_schemas(temporary_valid_schema_cache) - - assert schemas == expected_schemas - assert not mocked.called - - -def test_load_schemas_from_server_when_cache_invalid( - mocker, session, temporary_invalid_schema_cache -): - '''Load schemas from server when cache invalid.''' - mocked = mocker.patch.object(session, 'call', wraps=session.call) - - session._load_schemas(temporary_invalid_schema_cache) - assert mocked.called - - -def test_load_schemas_from_server_when_cache_outdated( - mocker, session, temporary_valid_schema_cache -): - '''Load schemas from server when cache outdated.''' - schemas, _ = session._read_schemas_from_cache(temporary_valid_schema_cache) - schemas.append({ - 'id': 'NewTest' - }) - session._write_schemas_to_cache(schemas, temporary_valid_schema_cache) - - mocked = mocker.patch.object(session, 'call', wraps=session.call) - session._load_schemas(temporary_valid_schema_cache) - - assert mocked.called - - -def test_load_schemas_from_server_not_reporting_schema_hash( - mocker, session, temporary_valid_schema_cache -): - '''Load schemas from server when server does not report schema hash.''' - mocked_write = mocker.patch.object( - session, '_write_schemas_to_cache', - wraps=session._write_schemas_to_cache - ) - - server_information = session._server_information.copy() - server_information.pop('schema_hash') - mocker.patch.object( - session, '_server_information', new=server_information - ) - - session._load_schemas(temporary_valid_schema_cache) - - # Cache still written even if hash not reported. - assert mocked_write.called - - mocked = mocker.patch.object(session, 'call', wraps=session.call) - session._load_schemas(temporary_valid_schema_cache) - - # No hash reported by server so cache should have been bypassed. - assert mocked.called - - -def test_load_schemas_bypassing_cache( - mocker, session, temporary_valid_schema_cache -): - '''Load schemas bypassing cache when set to False.''' - with mocker.patch.object(session, 'call', wraps=session.call): - - session._load_schemas(temporary_valid_schema_cache) - assert session.call.call_count == 1 - - session._load_schemas(False) - assert session.call.call_count == 2 - - -def test_get_tasks_widget_url(session): - '''Tasks widget URL returns valid HTTP status.''' - url = session.get_widget_url('tasks') - response = requests.get(url) - response.raise_for_status() - - -def test_get_info_widget_url(session, task): - '''Info widget URL for *task* returns valid HTTP status.''' - url = session.get_widget_url('info', entity=task, theme='light') - response = requests.get(url) - response.raise_for_status() - - -def test_encode_media_from_path(session, video_path): - '''Encode media based on a file path.''' - job = session.encode_media(video_path) - - assert job.entity_type == 'Job' - - job_data = json.loads(job['data']) - assert 'output' in job_data - assert 'source_component_id' in job_data - assert 'keep_original' in job_data and job_data['keep_original'] is False - assert len(job_data['output']) - assert 'component_id' in job_data['output'][0] - assert 'format' in job_data['output'][0] - - -def test_encode_media_from_component(session, video_path): - '''Encode media based on a component.''' - location = session.query('Location where name is "ftrack.server"').one() - component = session.create_component( - video_path, - location=location - ) - session.commit() - - job = session.encode_media(component) - - assert job.entity_type == 'Job' - - job_data = json.loads(job['data']) - assert 'keep_original' in job_data and job_data['keep_original'] is True - - -def test_create_sequence_component_with_size(session, temporary_sequence): - '''Create a sequence component and verify that is has a size.''' - location = session.query('Location where name is "ftrack.server"').one() - component = session.create_component( - temporary_sequence - ) - - assert component['size'] > 0 - - -def test_plugin_arguments(mocker): - '''Pass plugin arguments to plugin discovery mechanism.''' - mock = mocker.patch( - 'ftrack_api.plugin.discover' - ) - session = ftrack_api.Session( - plugin_paths=[], plugin_arguments={"test": "value"} - ) - assert mock.called - mock.assert_called_once_with([], [session], {"test": "value"}) - -def test_remote_reset(session, new_user): - '''Reset user api key.''' - key_1 = session.reset_remote( - 'api_key', entity=new_user - ) - - key_2 = session.reset_remote( - 'api_key', entity=new_user - ) - - - assert key_1 != key_2 - - -@pytest.mark.parametrize('attribute', [ - ('id',), - ('email',) - -], ids=[ - 'Fail resetting primary key', - 'Fail resetting attribute without default value', -]) -def test_fail_remote_reset(session, user, attribute): - '''Fail trying to rest invalid attributes.''' - - with pytest.raises(ftrack_api.exception.ServerError): - session.reset_remote( - attribute, user - ) - - -def test_close(session): - '''Close session.''' - assert session.closed is False - session.close() - assert session.closed is True - - -def test_close_already_closed_session(session): - '''Close session that is already closed.''' - session.close() - assert session.closed is True - session.close() - assert session.closed is True - - -def test_server_call_after_close(session): - '''Fail to issue calls to server after session closed.''' - session.close() - assert session.closed is True - - with pytest.raises(ftrack_api.exception.ConnectionClosedError): - session.query('User').first() - - -def test_context_manager(session): - '''Use session as context manager.''' - with session: - assert session.closed is False - - assert session.closed is True - - -def test_delayed_job(session): - '''Test the delayed_job action''' - - with pytest.raises(ValueError): - session.delayed_job( - 'DUMMY_JOB' - ) - - -@pytest.mark.skip(reason='No configured ldap server.') -def test_delayed_job_ldap_sync(session): - '''Test the a delayed_job ldap sync action''' - result = session.delayed_job( - ftrack_api.symbol.JOB_SYNC_USERS_LDAP - ) - - assert isinstance( - result, ftrack_api.entity.job.Job - ) - - -def test_query_nested_custom_attributes(session, new_asset_version): - '''Query custom attributes nested and update a value and query again. - - This test will query custom attributes via 2 relations, then update the - value in one API session and read it back in another to verify that it gets - the new value. - - ''' - session_one = session - session_two = ftrack_api.Session( - auto_connect_event_hub=False - ) - - # Read the version via a relation in both sessions. - def get_versions(sessions): - versions = [] - for _session in sessions: - asset = _session.query( - 'select versions.custom_attributes from Asset where id is "{0}"'.format( - new_asset_version.get('asset_id') - ) - ).first() - - for version in asset['versions']: - if version.get('id') == new_asset_version.get('id'): - versions.append(version) - - return versions - - # Get version from both sessions. - versions = get_versions((session_one, session_two)) - - # Read attribute for both sessions. - for version in versions: - version['custom_attributes']['versiontest'] - - # Set attribute on session_one. - versions[0]['custom_attributes']['versiontest'] = random.randint( - 0, 99999 - ) - - session.commit() - - # Read version from server for session_two. - session_two_version = get_versions((session_two, ))[0] - - # Verify that value in session 2 is the same as set and committed in - # session 1. - assert ( - session_two_version['custom_attributes']['versiontest'] == - versions[0]['custom_attributes']['versiontest'] - ) - - -def test_query_nested(session): - '''Query components nested and update a value and query again. - - This test will query components via 2 relations, then update the - value in one API session and read it back in another to verify that it gets - the new value. - - ''' - session_one = session - session_two = ftrack_api.Session( - auto_connect_event_hub=False - ) - - query = ( - 'select versions.components.name from Asset where id is ' - '"12939d0c-6766-11e1-8104-f23c91df25eb"' - ) - - def get_version(session): - '''Return the test version from *session*.''' - asset = session.query(query).first() - asset_version = None - for version in asset['versions']: - if version['version'] == 8: - asset_version = version - break - - return asset_version - - asset_version = get_version(session_one) - asset_version2 = get_version(session_two) - - # This assert is not needed, but reading the collections are to ensure they - # are inflated. - assert ( - asset_version2['components'][0]['name'] == - asset_version['components'][0]['name'] - ) - - asset_version['components'][0]['name'] = str(uuid.uuid4()) - - session.commit() - - asset_version2 = get_version(session_two) - - assert ( - asset_version['components'][0]['name'] == - asset_version2['components'][0]['name'] - ) - - -def test_merge_iterations(session, mocker, project): - '''Ensure merge does not happen to many times when querying.''' - mocker.spy(session, '_merge') - - session.query( - 'select status from Task where project_id is {} limit 10'.format( - project['id'] - ) - ).all() - - assert session._merge.call_count < 75 - - -@pytest.mark.parametrize( - 'get_versions', - [ - lambda component, asset_version, asset: component['version']['asset']['versions'], - lambda component, asset_version, asset: asset_version['asset']['versions'], - lambda component, asset_version, asset: asset['versions'], - ], - ids=[ - 'from_component', - 'from_asset_version', - 'from_asset', - ] -) -def test_query_nested2(session, get_versions): - '''Query version.asset.versions from component and then add new version. - - This test will query versions via multiple relations and ensure a new - version appears when added to a different session and then is queried - again. - - ''' - session_one = session - session_two = ftrack_api.Session( - auto_connect_event_hub=False - ) - - # Get a random component that is linked to a version and asset. - component_id = session_two.query( - 'FileComponent where version.asset_id != None' - ).first()['id'] - - query = ( - 'select version.asset.versions from Component where id is "{}"'.format( - component_id - ) - ) - - component = session_one.query(query).one() - asset_version = component['version'] - asset = component['version']['asset'] - versions = component['version']['asset']['versions'] - length = len(versions) - - session_two.create('AssetVersion', { - 'asset_id': asset['id'] - }) - - session_two.commit() - - component = session_one.query(query).one() - versions = get_versions(component, asset_version, asset) - new_length = len(versions) - - assert length + 1 == new_length - - -def test_session_ready_reset_events(mocker): - '''Session ready and reset events.''' - plugin_path = os.path.abspath( - os.path.join(os.path.dirname(__file__), '..', 'fixture', 'plugin') - ) - session = ftrack_api.Session(plugin_paths=[plugin_path]) - - assert session._test_called_events['ftrack.api.session.ready'] is 1 - assert session._test_called_events['ftrack.api.session.reset'] is 0 - - session.reset() - assert session._test_called_events['ftrack.api.session.ready'] is 1 - assert session._test_called_events['ftrack.api.session.reset'] is 1 - - -def test_entity_reference(mocker, session): - '''Return entity reference that uniquely identifies entity.''' - mock_entity = mocker.Mock(entity_type="MockEntityType") - mock_auto_populating = mocker.patch.object(session, "auto_populating") - mock_primary_key = mocker.patch( - "ftrack_api.inspection.primary_key", return_value={"id": "mock-id"} - ) - - reference = session.entity_reference(mock_entity) - - assert reference == { - "__entity_type__": "MockEntityType", - "id": "mock-id" - } - - mock_auto_populating.assert_called_once_with(False) - mock_primary_key.assert_called_once_with(mock_entity) - - -def test__entity_reference(mocker, session): - '''Act as alias to entity_reference.''' - mock_entity = mocker.Mock(entity_type="MockEntityType") - mock_entity_reference = mocker.patch.object(session, "entity_reference") - mocker.patch("warnings.warn") - - session._entity_reference(mock_entity) - - mock_entity_reference.assert_called_once_with(mock_entity) - - -def test__entity_reference_issues_deprecation_warning(mocker, session): - '''Issue deprecation warning for usage of _entity_reference.''' - mocker.patch.object(session, "entity_reference") - mock_warn = mocker.patch("warnings.warn") - - session._entity_reference({}) - - mock_warn.assert_called_once_with( - ( - "Session._entity_reference is now available as public method " - "Session.entity_reference. The private method will be removed " - "in version 2.0." - ), - PendingDeprecationWarning - ) diff --git a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_timer.py b/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_timer.py deleted file mode 100644 index cf8b014e..00000000 --- a/client/ayon_ftrack/python2_vendor/ftrack-python-api/test/unit/test_timer.py +++ /dev/null @@ -1,74 +0,0 @@ -# :coding: utf-8 -# :copyright: Copyright (c) 2015 ftrack - -import pytest -import ftrack_api.exception - - -def test_manually_create_multiple_timers_with_error(session, new_user): - '''Fail to create a second timer.''' - session.create('Timer', { - 'user': new_user - }) - - session.commit() - - with pytest.raises(ftrack_api.exception.ServerError): - session.create('Timer', { - 'user': new_user - }) - - session.commit() - - session.reset() - - -def test_create_multiple_timers_with_error(session, new_user): - '''Fail to create a second timer.''' - new_user.start_timer() - - with pytest.raises(ftrack_api.exception.NotUniqueError): - new_user.start_timer() - - session.reset() - - -def test_start_and_stop_a_timer(session, new_user, new_task): - '''Start a new timer and stop it to create a timelog.''' - new_user.start_timer(new_task) - - new_user.stop_timer() - - timelog = session.query( - 'Timelog where context_id = "{0}"'.format(new_task['id']) - ).one() - - assert timelog['user_id'] == new_user['id'], 'User id is correct.' - assert timelog['context_id'] == new_task['id'], 'Task id is correct.' - - -def test_start_a_timer_when_timer_is_running(session, new_user, new_task): - '''Start a timer when an existing timer is already running.''' - new_user.start_timer(new_task) - - # Create the second timer without context. - new_user.start_timer(force=True) - - # There should be only one existing timelog for this user. - timelogs = session.query( - 'Timelog where user_id = "{0}"'.format(new_user['id']) - ).all() - assert len(timelogs) == 1, 'One timelog exists.' - - timelog = session.query( - 'Timer where user_id = "{0}"'.format(new_user['id']) - ).one() - - # Make sure running timer has no context. - assert timelog['context_id'] is None, 'Timer does not have a context.' - - -def test_stop_timer_without_timer_running(session, new_user): - '''Stop a timer when no timer is running.''' - with pytest.raises(ftrack_api.exception.NoResultFoundError): - new_user.stop_timer() diff --git a/client/ayon_ftrack/resources/__init__.py b/client/ayon_ftrack/resources/__init__.py new file mode 100644 index 00000000..f25bd2bc --- /dev/null +++ b/client/ayon_ftrack/resources/__init__.py @@ -0,0 +1,16 @@ +import os + +RESOURCES_DIR = os.path.dirname(os.path.abspath(__file__)) + + +def get_resource(*args): + """ Serves to simple resources access + + Args: + *args: should contain *subfolder* names and *filename* of + resource from resources folder + Returns: + str: Path to resource. + + """ + return os.path.normpath(os.path.join(RESOURCES_DIR, *args)) diff --git a/client/ayon_ftrack/resources/sign_in_message.html b/client/ayon_ftrack/resources/sign_in_message.html new file mode 100644 index 00000000..8ee2828c --- /dev/null +++ b/client/ayon_ftrack/resources/sign_in_message.html @@ -0,0 +1,32 @@ + + + +

Sign in to Ftrack was successful

+

+ You signed in with username {}. +

+

+ You can close this window now. +

+ + diff --git a/client/ayon_ftrack/scripts/sub_user_server.py b/client/ayon_ftrack/scripts/sub_user_server.py index f0e97a24..9f426864 100644 --- a/client/ayon_ftrack/scripts/sub_user_server.py +++ b/client/ayon_ftrack/scripts/sub_user_server.py @@ -2,8 +2,9 @@ import signal import socket -from openpype.lib import Logger -from openpype.modules import ModulesManager +from ayon_core.lib import Logger +from ayon_core.addon import AddonsManager + from ayon_ftrack.common import FtrackServer from ayon_ftrack.tray.user_server import ( @@ -36,8 +37,8 @@ def main(args): session = SocketSession( auto_connect_event_hub=True, sock=sock, Eventhub=SocketBaseEventHub ) - manager = ModulesManager() - addon = manager.modules_by_name["ftrack"] + manager = AddonsManager() + addon = manager.get("ftrack") server = FtrackServer(addon.user_event_handlers_paths) log.debug("Launching User Ftrack Server") server.run_server(session=session) diff --git a/client/ayon_ftrack/tray/ftrack_tray.py b/client/ayon_ftrack/tray/ftrack_tray.py index c5366605..304b318c 100644 --- a/client/ayon_ftrack/tray/ftrack_tray.py +++ b/client/ayon_ftrack/tray/ftrack_tray.py @@ -5,18 +5,21 @@ import ftrack_api from qtpy import QtCore, QtWidgets, QtGui +from aiohttp.web import Response, json_response + +from ayon_core import resources +from ayon_core.lib import Logger -from openpype import resources -from openpype.lib import Logger from ayon_ftrack import resolve_ftrack_url, FTRACK_ADDON_DIR from ayon_ftrack.lib import credentials + from . import login_dialog from .user_server import SocketThread class FtrackTrayWrapper: - def __init__(self, module): - self.module = module + def __init__(self, addon): + self._addon = addon self.log = Logger.get_logger(self.__class__.__name__) self.thread_action_server = None @@ -28,7 +31,7 @@ def __init__(self, module): self.bool_action_thread_running = False self.bool_timer_event = False - self.widget_login = login_dialog.CredentialsDialog(module) + self.widget_login = login_dialog.TrayCredentialsDialog(addon) self.widget_login.login_changed.connect(self.on_login_change) self.widget_login.logout_signal.connect(self.on_logout) @@ -41,40 +44,74 @@ def __init__(self, module): resources.get_resource("icons", "circle_orange.png") ) + def webserver_initialization(self, web_manager): + web_manager.add_addon_route( + self._addon.name, + "credentials", + "POST", + self._web_credentials_change + ) + web_manager.add_addon_route( + self._addon.name, + "credentials", + "GET", + self._web_get_credentials + ) + + async def _web_credentials_change(self, request): + data = await request.json() + username = data.get("username") + api_key = data.get("api_key") + self.set_credentials(username, api_key) + return Response(status=200) + + async def _web_get_credentials(self, _): + username = api_key = None + if self.bool_logged: + username = self.widget_login.username + api_key = self.widget_login.api_key + + return json_response({ + "username": username, + "api_key": api_key + }) + def show_login_widget(self): self.widget_login.show() self.widget_login.activateWindow() self.widget_login.raise_() def show_ftrack_browser(self): - QtGui.QDesktopServices.openUrl(self.module.ftrack_url) + QtGui.QDesktopServices.openUrl(self._addon.ftrack_url) def validate(self): - validation = False cred = credentials.get_credentials() - ft_user = cred.get("username") - ft_api_key = cred.get("api_key") - validation = credentials.check_credentials(ft_user, ft_api_key) + validation = self.set_credentials( + cred.get("username"), cred.get("api_key") + ) + if not validation: + self.log.info("Please sign in to Ftrack") + self.bool_logged = False + self.show_login_widget() + self.set_menu_visibility() + + return validation + + def set_credentials(self, username, api_key): + validation = credentials.check_credentials(username, api_key) if validation: - self.widget_login.set_credentials(ft_user, ft_api_key) - self.module.set_credentials_to_env(ft_user, ft_api_key) + self.widget_login.set_credentials(username, api_key) + self._addon.set_credentials_to_env(username, api_key) self.log.info("Connected to Ftrack successfully") self.on_login_change() - return validation - - if not validation and ft_user and ft_api_key: + if not validation and username and api_key: + server = self._addon.get_ftrack_url() self.log.warning( - "Current Ftrack credentials are not valid. {}: {} - {}".format( - str(os.environ.get("FTRACK_SERVER")), ft_user, ft_api_key - ) + f"Current Ftrack credentials are not valid. {server}:" + f" {username} - {api_key}" ) - self.log.info("Please sign in to Ftrack") - self.bool_logged = False - self.show_login_widget() - self.set_menu_visibility() - return validation # Necessary - login_dialog works with this method after logging in @@ -83,10 +120,9 @@ def on_login_change(self): if self.action_credentials: self.action_credentials.setIcon(self.icon_logged) + username, _ = self.widget_login.get_credentials() self.action_credentials.setToolTip( - "Logged as user \"{}\"".format( - self.widget_login.user_input.text() - ) + f"Logged as user \"{username}\"" ) self.set_menu_visibility() @@ -119,7 +155,7 @@ def set_action_server(self): self.bool_action_server_running = True self.bool_action_thread_running = False - ftrack_url = self.module.ftrack_url + ftrack_url = self._addon.ftrack_url os.environ["FTRACK_SERVER"] = ftrack_url min_fail_seconds = 5 @@ -344,7 +380,7 @@ def stop_timer_thread(self): def changed_user(self): self.stop_action_server() - self.module.set_credentials_to_env(None, None) + self._addon.set_credentials_to_env(None, None) self.validate() def start_timer_manager(self, data): @@ -356,10 +392,10 @@ def stop_timer_manager(self): self.thread_timer.ftrack_stop_timer() def timer_started(self, data): - self.module.timer_started(data) + self._addon.timer_started(data) def timer_stopped(self): - self.module.timer_stopped() + self._addon.timer_stopped() class FtrackEventsThread(QtCore.QThread): diff --git a/client/ayon_ftrack/tray/login_dialog.py b/client/ayon_ftrack/tray/login_dialog.py index 0ae2e0c0..d793dba3 100644 --- a/client/ayon_ftrack/tray/login_dialog.py +++ b/client/ayon_ftrack/tray/login_dialog.py @@ -1,316 +1,416 @@ -import os +import sys +import json import requests -from qtpy import QtCore, QtGui, QtWidgets +from qtpy import QtWidgets, QtCore, QtGui -from openpype import style, resources -from ayon_ftrack.lib import credentials +from ayon_core import style +from ayon_core.resources import get_ayon_icon_filepath +from ayon_core.tools.utils import get_qt_app -from . import login_tools +from ayon_ftrack.lib import credentials +from ayon_ftrack.tray.login_tools import LoginServerThread -class CredentialsDialog(QtWidgets.QDialog): +class _CredentialsDialog(QtWidgets.QDialog): SIZE_W = 300 SIZE_H = 230 login_changed = QtCore.Signal() logout_signal = QtCore.Signal() - def __init__(self, module, parent=None): - super(CredentialsDialog, self).__init__(parent) + def __init__(self, parent=None): + super().__init__(parent) self.setWindowTitle("AYON - Ftrack Login") - - self._module = module - - self._login_server_thread = None - self._is_logged = False - self._in_advance_mode = False - - icon = QtGui.QIcon(resources.get_openpype_icon_filepath()) - self.setWindowIcon(icon) - + self.setWindowIcon(QtGui.QIcon(get_ayon_icon_filepath())) self.setWindowFlags( - QtCore.Qt.WindowCloseButtonHint | - QtCore.Qt.WindowMinimizeButtonHint + QtCore.Qt.WindowCloseButtonHint + | QtCore.Qt.WindowMinimizeButtonHint ) self.setMinimumSize(QtCore.QSize(self.SIZE_W, self.SIZE_H)) self.setMaximumSize(QtCore.QSize(self.SIZE_W + 100, self.SIZE_H + 100)) self.setStyleSheet(style.load_stylesheet()) - self.login_changed.connect(self._on_login) - - self.ui_init() - - def ui_init(self): - self.ftsite_label = QtWidgets.QLabel("Ftrack URL:") - self.user_label = QtWidgets.QLabel("Username:") - self.api_label = QtWidgets.QLabel("API Key:") + # Inputs - user filling values + inputs_widget = QtWidgets.QWidget(self) + url_label = QtWidgets.QLabel("Ftrack URL:", inputs_widget) + user_label = QtWidgets.QLabel("Username:", inputs_widget) + api_label = QtWidgets.QLabel("API Key:", inputs_widget) - self.ftsite_input = QtWidgets.QLabel() - self.ftsite_input.setTextInteractionFlags( + url_input = QtWidgets.QLabel(inputs_widget) + url_input.setTextInteractionFlags( QtCore.Qt.TextBrowserInteraction ) - # self.ftsite_input.setReadOnly(True) - self.ftsite_input.setCursor(QtGui.QCursor(QtCore.Qt.IBeamCursor)) + url_input.setCursor(QtGui.QCursor(QtCore.Qt.IBeamCursor)) - self.user_input = QtWidgets.QLineEdit() - self.user_input.setPlaceholderText("user.name") - self.user_input.textChanged.connect(self._user_changed) + user_input = QtWidgets.QLineEdit(inputs_widget) + user_input.setPlaceholderText("user.name") - self.api_input = QtWidgets.QLineEdit() - self.api_input.setPlaceholderText( + api_input = QtWidgets.QLineEdit(inputs_widget) + api_input.setPlaceholderText( "e.g. xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" ) - self.api_input.textChanged.connect(self._api_changed) - input_layout = QtWidgets.QFormLayout() + input_layout = QtWidgets.QFormLayout(inputs_widget) input_layout.setContentsMargins(10, 15, 10, 5) + input_layout.addRow(url_label, url_input) + input_layout.addRow(user_label, user_input) + input_layout.addRow(api_label, api_input) + + # Notes and errors for user + labels_widget = QtWidgets.QWidget(self) + note_label = QtWidgets.QLabel( + ( + "NOTE: Click on \"Login\" button to log with your default" + " browser or click on \"Advanced\" button to enter" + " API key manually." + ), + labels_widget + ) + note_label.setWordWrap(True) + note_label.setVisible(False) + + error_label = QtWidgets.QLabel("", labels_widget) + error_label.setWordWrap(True) + error_label.setVisible(False) - input_layout.addRow(self.ftsite_label, self.ftsite_input) - input_layout.addRow(self.user_label, self.user_input) - input_layout.addRow(self.api_label, self.api_input) + label_layout = QtWidgets.QVBoxLayout(labels_widget) + label_layout.setContentsMargins(10, 5, 10, 5) + label_layout.addWidget(note_label) + label_layout.addWidget(error_label) - self.btn_advanced = QtWidgets.QPushButton("Advanced") - self.btn_advanced.clicked.connect(self._on_advanced_clicked) + bts_widget = QtWidgets.QWidget(self) + btn_advanced = QtWidgets.QPushButton("Advanced", bts_widget) - self.btn_simple = QtWidgets.QPushButton("Simple") - self.btn_simple.clicked.connect(self._on_simple_clicked) + btn_simple = QtWidgets.QPushButton("Simple", bts_widget) - self.btn_login = QtWidgets.QPushButton("Login") - self.btn_login.setToolTip( + btn_login = QtWidgets.QPushButton("Login", bts_widget) + btn_login.setToolTip( "Set Username and API Key with entered values" ) - self.btn_login.clicked.connect(self._on_login_clicked) - self.btn_ftrack_login = QtWidgets.QPushButton("Ftrack login") - self.btn_ftrack_login.setToolTip("Open browser for Login to Ftrack") - self.btn_ftrack_login.clicked.connect(self._on_ftrack_login_clicked) + btn_ftrack_login = QtWidgets.QPushButton("Ftrack login", bts_widget) + btn_ftrack_login.setToolTip("Open browser for Login to Ftrack") - self.btn_logout = QtWidgets.QPushButton("Logout") - self.btn_logout.clicked.connect(self._on_logout_clicked) + btn_logout = QtWidgets.QPushButton("Logout", bts_widget) - self.btn_close = QtWidgets.QPushButton("Close") - self.btn_close.setToolTip("Close this window") - self.btn_close.clicked.connect(self._close_widget) + btn_close = QtWidgets.QPushButton("Close", bts_widget) + btn_close.setToolTip("Close this window") - btns_layout = QtWidgets.QHBoxLayout() - btns_layout.addWidget(self.btn_advanced) - btns_layout.addWidget(self.btn_simple) + btns_layout = QtWidgets.QHBoxLayout(bts_widget) + btns_layout.setContentsMargins(0, 0, 0, 0) + btns_layout.addWidget(btn_advanced, 0) + btns_layout.addWidget(btn_simple, 0) btns_layout.addStretch(1) - btns_layout.addWidget(self.btn_ftrack_login) - btns_layout.addWidget(self.btn_login) - btns_layout.addWidget(self.btn_logout) - btns_layout.addWidget(self.btn_close) - - self.note_label = QtWidgets.QLabel(( - "NOTE: Click on \"{}\" button to log with your default browser" - " or click on \"{}\" button to enter API key manually." - ).format(self.btn_ftrack_login.text(), self.btn_advanced.text())) - - self.note_label.setWordWrap(True) - self.note_label.hide() + btns_layout.addWidget(btn_ftrack_login, 0) + btns_layout.addWidget(btn_login, 0) + btns_layout.addWidget(btn_logout, 0) + btns_layout.addWidget(btn_close, 0) + + main_layout = QtWidgets.QVBoxLayout(self) + main_layout.addWidget(inputs_widget, 0) + main_layout.addWidget(labels_widget, 0) + main_layout.addStretch(1) + main_layout.addWidget(bts_widget, 0) + + show_timer = QtCore.QTimer() + show_timer.setInterval(0) + + ftrack_login_timer = QtCore.QTimer() + ftrack_login_timer.setInterval(50) + + show_timer.timeout.connect(self._on_show_timer) + ftrack_login_timer.timeout.connect(self._on_ftrack_login_timer) + user_input.textChanged.connect(self._user_changed) + api_input.textChanged.connect(self._api_changed) + btn_advanced.clicked.connect(self._on_advanced_clicked) + btn_simple.clicked.connect(self._on_simple_clicked) + btn_login.clicked.connect(self._on_login_clicked) + btn_ftrack_login.clicked.connect(self._on_ftrack_login_clicked) + btn_logout.clicked.connect(self._on_logout_clicked) + btn_close.clicked.connect(self._close_widget) + + self._url_label = url_label + self._url_input = url_input + self._user_label = user_label + self._user_input = user_input + self._api_label = api_label + self._api_input = api_input + + self._error_label = error_label + self._note_label = note_label + + self._btn_advanced = btn_advanced + self._btn_simple = btn_simple + self._btn_login = btn_login + self._btn_ftrack_login = btn_ftrack_login + self._btn_logout = btn_logout + self._btn_close = btn_close + + self._show_timer = show_timer + self._show_counter = 0 + + self._ftrack_login_timer = ftrack_login_timer + self._waiting_for_ftrack_login = False + self._ftrack_login_result = None - self.error_label = QtWidgets.QLabel("") - self.error_label.setWordWrap(True) - self.error_label.hide() + self._login_server_thread = None + self._is_logged = None + self._in_advance_mode = None + self._set_advanced_mode(False) + self._set_is_logged(False) + + def showEvent(self, event): + super().showEvent(event) + self._show_timer.start() + + def _on_show_timer(self): + if self._show_counter < 2: + self._show_counter += 1 + return + self._show_counter = 0 + self._show_timer.stop() + self._fill_ftrack_url() - label_layout = QtWidgets.QVBoxLayout() - label_layout.setContentsMargins(10, 5, 10, 5) - label_layout.addWidget(self.note_label) - label_layout.addWidget(self.error_label) + def closeEvent(self, event): + self._cleanup() + super().closeEvent(event) - main = QtWidgets.QVBoxLayout(self) - main.addLayout(input_layout) - main.addLayout(label_layout) - main.addStretch(1) - main.addLayout(btns_layout) + def set_credentials(self, username, api_key, is_logged=True): + self._user_input.setText(username) + self._api_input.setText(api_key) - self.fill_ftrack_url() + self._error_label.setVisible(False) - self.set_is_logged(self._is_logged) + for widget in ( + self._url_input, + self._user_input, + self._api_input, + ): + self._set_widget_state(widget, True) - self.setLayout(main) + if is_logged is not None: + self._set_is_logged(is_logged) - def show(self, *args, **kwargs): - super(CredentialsDialog, self).show(*args, **kwargs) - self.fill_ftrack_url() + def get_credentials(self): + if self._is_logged: + return self._user_input.text(), self._api_input.text() + return None, None - def fill_ftrack_url(self): - checked_url = self.check_url() - if checked_url == self.ftsite_input.text(): + def _fill_ftrack_url(self): + checked_url = self._check_url() + if checked_url == self._url_input.text(): return - self.ftsite_input.setText(checked_url or "< Not set >") + self._url_input.setText(checked_url or "< Not set >") enabled = bool(checked_url) - self.btn_login.setEnabled(enabled) - self.btn_ftrack_login.setEnabled(enabled) - - self.api_input.setEnabled(enabled) - self.user_input.setEnabled(enabled) + for widget in ( + self._btn_login, + self._btn_ftrack_login, + self._api_input, + self._user_input, + ): + widget.setEnabled(enabled) if not checked_url: - self.btn_advanced.hide() - self.btn_simple.hide() - self.btn_ftrack_login.hide() - self.btn_login.hide() - self.note_label.hide() - self.api_input.hide() - self.user_input.hide() - - def set_advanced_mode(self, is_advanced): - self._in_advance_mode = is_advanced + for widget in ( + self._btn_advanced, + self._btn_simple, + self._btn_ftrack_login, + self._btn_login, + self._note_label, + self._api_input, + self._user_input, + ): + widget.setVisible(False) + + def _update_advanced_logged_visibility(self): + is_advanced = self._in_advance_mode + is_logged = self._is_logged - self.error_label.setVisible(False) + advanced_visible = not is_logged and is_advanced + for widget in ( + self._btn_login, + self._btn_simple, + ): + widget.setVisible(advanced_visible) - is_logged = self._is_logged + login_visible = not is_logged and not is_advanced + for widget in ( + self._note_label, + self._btn_ftrack_login, + self._btn_advanced, + ): + widget.setVisible(login_visible) + + user_api_visible = is_logged or is_advanced + for widget in ( + self._user_label, + self._user_input, + self._api_label, + self._api_input, + ): + widget.setVisible(user_api_visible) + + def _set_advanced_mode(self, is_advanced): + if self._in_advance_mode == is_advanced: + return + + self._in_advance_mode = is_advanced - self.note_label.setVisible(not is_logged and not is_advanced) - self.btn_ftrack_login.setVisible(not is_logged and not is_advanced) - self.btn_advanced.setVisible(not is_logged and not is_advanced) + self._error_label.setVisible(False) - self.btn_login.setVisible(not is_logged and is_advanced) - self.btn_simple.setVisible(not is_logged and is_advanced) + self._update_advanced_logged_visibility() - self.user_label.setVisible(is_logged or is_advanced) - self.user_input.setVisible(is_logged or is_advanced) - self.api_label.setVisible(is_logged or is_advanced) - self.api_input.setVisible(is_logged or is_advanced) if is_advanced: - self.user_input.setFocus() + self._user_input.setFocus() else: - self.btn_ftrack_login.setFocus() + self._btn_ftrack_login.setFocus() + + def _set_is_logged(self, is_logged): + if self._is_logged == is_logged: + return - def set_is_logged(self, is_logged): self._is_logged = is_logged - self.user_input.setReadOnly(is_logged) - self.api_input.setReadOnly(is_logged) - self.user_input.setCursor(QtGui.QCursor(QtCore.Qt.IBeamCursor)) - self.api_input.setCursor(QtGui.QCursor(QtCore.Qt.IBeamCursor)) + for input_widget in ( + self._user_input, + self._api_input, + ): + input_widget.setReadOnly(is_logged) + input_widget.setCursor(QtGui.QCursor(QtCore.Qt.IBeamCursor)) - self.btn_logout.setVisible(is_logged) + self._btn_logout.setVisible(is_logged) - self.set_advanced_mode(self._in_advance_mode) + self._update_advanced_logged_visibility() - def set_error(self, msg): - self.error_label.setText(msg) - self.error_label.show() + def _set_error(self, msg): + self._error_label.setText(msg) + self._error_label.setVisible(True) def _on_logout_clicked(self): - self.user_input.setText("") - self.api_input.setText("") - self.set_is_logged(False) + self._user_input.setText("") + self._api_input.setText("") + self._set_is_logged(False) self.logout_signal.emit() def _on_simple_clicked(self): - self.set_advanced_mode(False) + self._set_advanced_mode(False) def _on_advanced_clicked(self): - self.set_advanced_mode(True) + self._set_advanced_mode(True) def _user_changed(self): - self._not_invalid_input(self.user_input) + self._set_widget_state(self._user_input, True) def _api_changed(self): - self._not_invalid_input(self.api_input) + self._set_widget_state(self._api_input, True) - def _not_invalid_input(self, input_widget): - input_widget.setStyleSheet("") + def _set_widget_state(self, input_widget, valid): + stylesheet = "" if valid else "border: 1px solid red;" + input_widget.setStyleSheet(stylesheet) - def _invalid_input(self, input_widget): - input_widget.setStyleSheet("border: 1px solid red;") + def _close_widget(self): + self.close() def _on_login(self): - self.set_is_logged(True) + self.login_changed.emit() + self._set_is_logged(True) self._close_widget() def _on_login_clicked(self): - username = self.user_input.text().strip() - api_key = self.api_input.text().strip() + username = self._user_input.text().strip() + api_key = self._api_input.text().strip() missing = [] if username == "": missing.append("Username") - self._invalid_input(self.user_input) + self._set_widget_state(self._user_input, False) if api_key == "": missing.append("API Key") - self._invalid_input(self.api_input) + self._set_widget_state(self._api_input, False) if len(missing) > 0: - self.set_error("You didn't enter {}".format(" and ".join(missing))) + self._set_error("You didn't enter {}".format(" and ".join(missing))) return - if not self.login_with_credentials(username, api_key): - self._invalid_input(self.user_input) - self._invalid_input(self.api_input) - self.set_error( + if not self._login_with_credentials(username, api_key): + self._set_widget_state(self._user_input, False) + self._set_widget_state(self._api_input, False) + self._set_error( "We're unable to sign in to Ftrack with these credentials" ) + def _login_with_credentials(self, username, api_key): + verification = credentials.check_credentials(username, api_key) + if verification: + credentials.save_credentials(username, api_key, False) + self.set_credentials(username, api_key) + self._on_login() + return verification + + def _cleanup_login_server_thread(self): + if self._login_server_thread is None: + return + thread, self._login_server_thread = self._login_server_thread, None + if thread.is_alive(): + thread.stop() + thread.join() + def _on_ftrack_login_clicked(self): - url = self.check_url() + url = self._check_url() if not url: return # If there is an existing server thread running we need to stop it. - if self._login_server_thread: - if self._login_server_thread.is_alive(): - self._login_server_thread.stop() - self._login_server_thread.join() - self._login_server_thread = None + self._cleanup_login_server_thread() # If credentials are not properly set, try to get them using a http # server. - self._login_server_thread = login_tools.LoginServerThread( + self._waiting_for_ftrack_login = True + self._ftrack_login_timer.start() + + self._login_server_thread = LoginServerThread( url, self._result_of_ftrack_thread ) self._login_server_thread.start() def _result_of_ftrack_thread(self, username, api_key): - if not self.login_with_credentials(username, api_key): - self._invalid_input(self.api_input) - self.set_error(( - "Somthing happened with Ftrack login." - " Try enter Username and API key manually." - )) - - def login_with_credentials(self, username, api_key): - verification = credentials.check_credentials(username, api_key) - if verification: - credentials.save_credentials(username, api_key, False) - self._module.set_credentials_to_env(username, api_key) - self.set_credentials(username, api_key) - self.login_changed.emit() - return verification + self._ftrack_login_result = (username, api_key) + self._waiting_for_ftrack_login = False - def set_credentials(self, username, api_key, is_logged=True): - self.user_input.setText(username) - self.api_input.setText(api_key) + def _on_ftrack_login_timer(self): + if self._waiting_for_ftrack_login: + return - self.error_label.hide() + self._ftrack_login_timer.stop() + self._cleanup_login_server_thread() - self._not_invalid_input(self.ftsite_input) - self._not_invalid_input(self.user_input) - self._not_invalid_input(self.api_input) + username, api_key = self._ftrack_login_result + if not self._login_with_credentials(username, api_key): + self._set_widget_state(self._api_input, False) + self._set_error(( + "Somthing happened with Ftrack login." + " Try enter Username and API key manually." + )) - if is_logged is not None: - self.set_is_logged(is_logged) + def _cleanup(self): + self._cleanup_login_server_thread() + self._ftrack_login_timer.stop() + self._waiting_for_ftrack_login = False - def check_url(self): - settings_url = self._module.settings_ftrack_url - url = self._module.ftrack_url - if not settings_url: - self.set_error( - "Ftrack URL is not defined in settings!" - ) - return + def _get_source_ftrack_url(self): + # NOTE This must be overriden + return None + def _check_url(self): + url = self._get_source_ftrack_url() if url is None: - self.set_error( + self._set_error( "Specified URL does not lead to a valid Ftrack server." ) return @@ -322,7 +422,7 @@ def check_url(self): allow_redirects=False ) except requests.exceptions.RequestException: - self.set_error( + self._set_error( "Specified URL could not be reached." ) return @@ -331,15 +431,75 @@ def check_url(self): result.status_code != 200 or "FTRACK_VERSION" not in result.headers ): - self.set_error( + self._set_error( "Specified URL does not lead to a valid Ftrack server." ) return return url + +class PopupCredentialsDialog(_CredentialsDialog): + def __init__(self, ftrack_url, parent=None): + super().__init__(parent) + + self._ftrack_url = ftrack_url + + def _get_source_ftrack_url(self): + return self._ftrack_url + + def _close_widget(self): + username, api_key = self.get_credentials() + if not username or not api_key: + self.reject() + else: + self.accept() + + +class TrayCredentialsDialog(_CredentialsDialog): + def __init__(self, addon, parent=None): + super().__init__(parent) + self._addon = addon + + def _on_login(self): + username, api_key = self.get_credentials() + self._addon.set_credentials_to_env(username, api_key) + super()._on_login() + + def _get_source_ftrack_url(self): + return self._addon.ftrack_url + + def _check_url(self): + settings_url = self._addon.settings_ftrack_url + if not settings_url: + self._set_error( + "Ftrack URL is not defined in settings!" + ) + return + + return super()._check_url() + def closeEvent(self, event): event.ignore() self._close_widget() def _close_widget(self): + self._cleanup() self.hide() + + +def main(): + json_filepath = sys.argv[-1] + with open(json_filepath, "r") as stream: + data = json.load(stream) + app = get_qt_app() # noqa F841 + dialog = PopupCredentialsDialog(data["server_url"]) + dialog.exec_() + username, api_key = dialog.get_credentials() + data["username"] = username + data["api_key"] = api_key + with open(json_filepath, "w") as stream: + json.dump(data, stream) + + +if __name__ == "__main__": + main() diff --git a/client/ayon_ftrack/tray/login_tools.py b/client/ayon_ftrack/tray/login_tools.py index 95a072c3..833caa62 100644 --- a/client/ayon_ftrack/tray/login_tools.py +++ b/client/ayon_ftrack/tray/login_tools.py @@ -1,18 +1,20 @@ -from http.server import BaseHTTPRequestHandler, HTTPServer -from urllib import parse -import webbrowser import functools import threading -from openpype import resources +from urllib import parse +from http.server import BaseHTTPRequestHandler, HTTPServer + +import webbrowser + +from ayon_ftrack.resources import get_resource class LoginServerHandler(BaseHTTPRequestHandler): - '''Login server handler.''' + """Login server handler.""" - message_filepath = resources.get_resource("ftrack", "sign_in_message.html") + message_filepath = get_resource("sign_in_message.html") def __init__(self, login_callback, *args, **kw): - '''Initialise handler.''' + """Initialise handler.""" self.login_callback = login_callback BaseHTTPRequestHandler.__init__(self, *args, **kw) @@ -29,17 +31,17 @@ def log_message(self, format_str, *args): )) def do_GET(self): - '''Override to handle requests ourselves.''' + """Override to handle requests ourselves.""" parsed_path = parse.urlparse(self.path) query = parsed_path.query api_user = None api_key = None login_credentials = None - if 'api_user' and 'api_key' in query: + if "api_user" and "api_key" in query: login_credentials = parse.parse_qs(query) - api_user = login_credentials['api_user'][0] - api_key = login_credentials['api_key'][0] + api_user = login_credentials["api_user"][0] + api_key = login_credentials["api_key"][0] with open(self.message_filepath, "r") as message_file: sign_in_message = message_file.read() @@ -68,7 +70,7 @@ def do_GET(self): class LoginServerThread(threading.Thread): - '''Login server thread.''' + """Login server thread.""" def __init__(self, url, callback): self.url = url @@ -77,7 +79,7 @@ def __init__(self, url, callback): super(LoginServerThread, self).__init__() def _handle_login(self, api_user, api_key): - '''Login to server with *api_user* and *api_key*.''' + """Login to server with *api_user* and *api_key*.""" self.callback(api_user, api_key) def stop(self): @@ -85,19 +87,16 @@ def stop(self): self._server.server_close() def run(self): - '''Listen for events.''' + """Listen for events.""" self._server = HTTPServer( - ('localhost', 0), + ("localhost", 0), functools.partial( LoginServerHandler, self._handle_login ) ) - unformated_url = ( - '{0}/user/api_credentials?''redirect_url=http://localhost:{1}' - ) - webbrowser.open_new_tab( - unformated_url.format( - self.url, self._server.server_port - ) + url = ( + f"{self.url}/user/api_credentials" + f"?redirect_url=http://localhost:{self._server.server_port}" ) + webbrowser.open_new_tab(url) self._server.handle_request() diff --git a/client/ayon_ftrack/tray/user_server.py b/client/ayon_ftrack/tray/user_server.py index f687cb8d..d53edc52 100644 --- a/client/ayon_ftrack/tray/user_server.py +++ b/client/ayon_ftrack/tray/user_server.py @@ -22,7 +22,7 @@ except ImportError: from ftrack_api._weakref import WeakMethod -from openpype.lib import get_openpype_execute_args, Logger +from ayon_core.lib import get_ayon_launcher_args, Logger class SocketBaseEventHub(ftrack_api.event.hub.EventHub): @@ -111,7 +111,7 @@ def __init__( # Currently pending operations. self.recorded_operations = ftrack_api.operation.Operations() - # OpenPype change - In new API are operations properties + # AYON change - In new API are operations properties new_api = hasattr(self.__class__, "record_operations") if new_api: @@ -231,9 +231,9 @@ def _create_event_hub(self): class SocketThread(threading.Thread): - """Thread that checks suprocess of storer of processor of events""" + """Thread that checks subprocess of storer of processor of events""" - MAX_TIMEOUT = int(os.environ.get("OPENPYPE_FTRACK_SOCKET_TIMEOUT", 45)) + MAX_TIMEOUT = 45 def __init__(self, name, port, filepath, additional_args=None): super(SocketThread, self).__init__() @@ -278,9 +278,8 @@ def run(self): ) env = os.environ.copy() - env["OPENPYPE_PROCESS_MONGO_ID"] = str(Logger.mongo_process_id) - # OpenPype executable (with path to start script if not build) - args = get_openpype_execute_args( + # AYON executable (with path to start script if not build) + args = get_ayon_launcher_args( # Add `run` command "run", self.filepath, @@ -346,7 +345,7 @@ def run(self): self._handle_data(connection, data) - except Exception as exc: + except Exception: self.log.error( "Event server process failed", exc_info=True ) diff --git a/client/ayon_ftrack/version.py b/client/ayon_ftrack/version.py index 02ef47c4..5d2b9d74 100644 --- a/client/ayon_ftrack/version.py +++ b/client/ayon_ftrack/version.py @@ -1,3 +1,3 @@ # -*- coding: utf-8 -*- - -__version__ = "1.0.4-dev.1" +"""Package declaring AYON addon 'ftrack' version.""" +__version__ = "1.2.1-dev.1" diff --git a/create_package.py b/create_package.py index ffd85ca3..654198a5 100644 --- a/create_package.py +++ b/create_package.py @@ -1,17 +1,19 @@ +#!/usr/bin/env python + """Prepares server package from addon repo to upload to server. -Requires Python3.9. (Or at least 3.8+). +Requires Python 3.9. (Or at least 3.8+). This script should be called from cloned addon repo. It will produce 'package' subdirectory which could be pasted into server -addon directory directly (eg. into `ayon-docker/addons`). +addon directory directly (eg. into `ayon-backend/addons`). Format of package folder: ADDON_REPO/package/{addon name}/{addon version} You can specify `--output_dir` in arguments to change output directory where -package will be created. Existing package directory will be always purged if +package will be created. Existing package directory will always be purged if already present! This could be used to create package directly in server folder if available. @@ -22,27 +24,40 @@ import os import sys import re -import json +import io import shutil -import argparse import platform +import argparse import logging import collections import zipfile -from typing import Optional, Any, Pattern +import subprocess +from typing import Optional, Iterable, Pattern, Union, List, Tuple + +import package -ADDON_NAME: str = "ftrack" -ADDON_CLIENT_NAME: str = "ayon_ftrack" +FileMapping = Tuple[Union[str, io.BytesIO], str] -CURRENT_DIR: str = os.path.dirname(os.path.abspath(__file__)) -VERSION_PATH: str = os.path.join(CURRENT_DIR, "version.py") -SERVER_DIR: str = os.path.join(CURRENT_DIR, "server") -CLIENT_DIR: str = os.path.join(CURRENT_DIR, "client") +ADDON_NAME: str = package.name +ADDON_VERSION: str = package.version +ADDON_CLIENT_DIR: Union[str, None] = getattr(package, "client_dir", None) -COMMON_DIR: str = os.path.join(CLIENT_DIR, ADDON_CLIENT_NAME, "common") +CURRENT_ROOT: str = os.path.dirname(os.path.abspath(__file__)) +SERVER_ROOT: str = os.path.join(CURRENT_ROOT, "server") +FRONTEND_ROOT: str = os.path.join(CURRENT_ROOT, "frontend") +FRONTEND_DIST_ROOT: str = os.path.join(FRONTEND_ROOT, "dist") +DST_DIST_DIR: str = os.path.join("frontend", "dist") +PRIVATE_ROOT: str = os.path.join(CURRENT_ROOT, "private") +PUBLIC_ROOT: str = os.path.join(CURRENT_ROOT, "public") +CLIENT_ROOT: str = os.path.join(CURRENT_ROOT, "client") + +VERSION_PY_CONTENT = f'''# -*- coding: utf-8 -*- +"""Package declaring AYON addon '{ADDON_NAME}' version.""" +__version__ = "{ADDON_VERSION}" +''' # Patterns of directories to be skipped for server part of addon -IGNORE_DIR_PATTERNS: list[Pattern] = [ +IGNORE_DIR_PATTERNS: List[Pattern] = [ re.compile(pattern) for pattern in { # Skip directories starting with '.' @@ -53,7 +68,7 @@ ] # Patterns of files to be skipped for server part of addon -IGNORE_FILE_PATTERNS: list[Pattern] = [ +IGNORE_FILE_PATTERNS: List[Pattern] = [ re.compile(pattern) for pattern in { # Skip files starting with '.' @@ -83,9 +98,25 @@ def _extract_member(self, member, tpath, pwd): else: tpath = "\\\\?\\" + tpath - return super(ZipFileLongPaths, self)._extract_member( - member, tpath, pwd - ) + return super()._extract_member(member, tpath, pwd) + + +def _get_yarn_executable() -> Union[str, None]: + cmd = "which" + if platform.system().lower() == "windows": + cmd = "where" + + for line in subprocess.check_output( + [cmd, "yarn"], encoding="utf-8" + ).splitlines(): + if not line or not os.path.exists(line): + continue + try: + subprocess.call([line, "--version"]) + return line + except OSError: + continue + return None def safe_copy_file(src_path: str, dst_path: str): @@ -102,278 +133,404 @@ def safe_copy_file(src_path: str, dst_path: str): return dst_dir: str = os.path.dirname(dst_path) - try: - os.makedirs(dst_dir) - except Exception: - pass + os.makedirs(dst_dir, exist_ok=True) shutil.copy2(src_path, dst_path) -def _value_match_regexes(value: str, regexes: list[Pattern]) -> bool: - for regex in regexes: - if regex.search(value): - return True - return False +def _value_match_regexes(value: str, regexes: Iterable[Pattern]) -> bool: + return any( + regex.search(value) + for regex in regexes + ) def find_files_in_subdir( src_path: str, - ignore_file_patterns: Optional[list[Pattern]] = None, - ignore_dir_patterns: Optional[list[Pattern]] = None -) -> list[tuple[str, str]]: + ignore_file_patterns: Optional[List[Pattern]] = None, + ignore_dir_patterns: Optional[List[Pattern]] = None +) -> List[Tuple[str, str]]: + """Find all files to copy in subdirectories of given path. + + All files that match any of the patterns in 'ignore_file_patterns' will + be skipped and any directories that match any of the patterns in + 'ignore_dir_patterns' will be skipped with all subfiles. + + Args: + src_path (str): Path to directory to search in. + ignore_file_patterns (Optional[list[Pattern]]): List of regexes + to match files to ignore. + ignore_dir_patterns (Optional[list[Pattern]]): List of regexes + to match directories to ignore. + + Returns: + list[tuple[str, str]]: List of tuples with path to file and parent + directories relative to 'src_path'. + """ + if ignore_file_patterns is None: - ignore_file_patterns: list[Pattern] = IGNORE_FILE_PATTERNS + ignore_file_patterns = IGNORE_FILE_PATTERNS if ignore_dir_patterns is None: - ignore_dir_patterns: list[Pattern] = IGNORE_DIR_PATTERNS - output: list[tuple[str, str]] = [] + ignore_dir_patterns = IGNORE_DIR_PATTERNS + output: List[Tuple[str, str]] = [] + if not os.path.exists(src_path): + return output - hierarchy_queue: collections.deque[tuple[str, list[str]]] = ( - collections.deque() - ) + hierarchy_queue: collections.deque = collections.deque() hierarchy_queue.append((src_path, [])) while hierarchy_queue: - item = hierarchy_queue.popleft() + item: Tuple[str, str] = hierarchy_queue.popleft() dirpath, parents = item for name in os.listdir(dirpath): - path = os.path.join(dirpath, name) + path: str = os.path.join(dirpath, name) if os.path.isfile(path): if not _value_match_regexes(name, ignore_file_patterns): - items = list(parents) + items: List[str] = list(parents) items.append(name) output.append((path, os.path.sep.join(items))) continue if not _value_match_regexes(name, ignore_dir_patterns): - items = list(parents) + items: List[str] = list(parents) items.append(name) hierarchy_queue.append((path, items)) return output -def copy_server_content( - addon_output_dir: str, - current_dir: str, - log: logging.Logger -): - """Copies server side folders to 'addon_package_dir' +def update_client_version(logger): + """Update version in client code if version.py is present.""" + if not ADDON_CLIENT_DIR: + return + + version_path: str = os.path.join( + CLIENT_ROOT, ADDON_CLIENT_DIR, "version.py" + ) + if not os.path.exists(version_path): + logger.debug("Did not find version.py in client directory") + return + + logger.info("Updating client version") + with open(version_path, "w") as stream: + stream.write(VERSION_PY_CONTENT) - Args: - addon_output_dir (str): package dir in addon repo dir - current_dir (str): addon repo dir - log (logging.Logger) - """ - log.info("Copying server content") +def update_docker_version(logger): + """Update version in Dockerfile if present.""" + image_regex = re.compile(r"(?P\s+image:[^:]+:)(?P.+)") + env_regex = re.compile( + r"(?P[\s\-]+\"AYON_ADDON_VERSION=)(?P.+)(?P\".*)" + ) + for service_name in ( + "leecher", + "processor", + ): + service_dir = os.path.join( + CURRENT_ROOT, "services", service_name + ) - server_dir: str = os.path.join(current_dir, "server") + # Pyproject.toml + pyproject_toml = os.path.join(service_dir, "pyproject.toml") + with open(pyproject_toml, "r") as stream: + content = stream.read() + + new_lines = [] + changed = None + for line in content.splitlines(): + if changed is None and line.startswith("version = "): + new_line = f'version = "{ADDON_VERSION}"' + changed = new_line != line + line = new_line + new_lines.append(line) + + if changed is None: + # Version was not found (could be a bug?) + logger.error( + "Did not find 'version' in pyproject.toml" + f" of service '{service_name}" + ) + elif changed: + # Add empty line at the end + if new_lines[-1]: + new_lines.append("") + # Store new lines if something changed + with open(pyproject_toml, "w") as stream: + stream.write("\n".join(new_lines)) + + # docker-dompose.yml + dockercompose_path = os.path.join(service_dir, "docker-compose.yml") + with open(dockercompose_path, "r") as stream: + content = stream.read() + + new_lines = [] + image_changed = None + env_changed = None + for line in content.splitlines(): + if env_changed is None: + env_r = env_regex.search(line) + if env_r: + base = env_r.group("base") + end = env_r.group("end") + new_line = f"{base}{ADDON_VERSION}{end}" + env_changed = new_line != line + line = new_line + if image_changed is None: + image_r = image_regex.search(line) + if image_r: + base = image_r.group("base") + new_line = f"{base}{ADDON_VERSION}" + image_changed = new_line != line + line = new_line + + new_lines.append(line) + + if env_changed is None: + logger.error( + "Did not find 'AYON_ADDON_VERSION' env in docker-compose.yml" + f" of service '{service_name}" + ) - filepaths_to_copy: list[tuple[str, str]] = [] + if image_changed is None: + logger.error( + "Did not find 'image' in in docker-compose.yml" + f" of service '{service_name}" + ) - for path, sub_path in find_files_in_subdir(server_dir): - filepaths_to_copy.append( - (path, os.path.join(addon_output_dir, sub_path)) + if image_changed or env_changed: + # Add empty line at the end + if new_lines[-1]: + new_lines.append("") + with open(dockercompose_path, "w") as stream: + stream.write("\n".join(new_lines)) + + +def build_frontend(): + yarn_executable = _get_yarn_executable() + if yarn_executable is None: + raise RuntimeError("Yarn executable was not found.") + + subprocess.run([yarn_executable, "install"], cwd=FRONTEND_ROOT) + subprocess.run([yarn_executable, "build"], cwd=FRONTEND_ROOT) + if not os.path.exists(FRONTEND_DIST_ROOT): + raise RuntimeError( + "Frontend build failed. Did not find 'dist' folder." ) - filepaths_to_copy.extend([ - # Make sure 'version.py' has same content - ( - VERSION_PATH, - os.path.join(addon_output_dir, "version.py") - ), - # Copy constants needed for attributes creation - ( - os.path.join(COMMON_DIR, "constants.py"), - os.path.join(addon_output_dir, "constants.py") - ), - ]) - # Copy files - for src_path, dst_path in filepaths_to_copy: - safe_copy_file(src_path, dst_path) +def get_client_files_mapping() -> List[Tuple[str, str]]: + """Mapping of source client code files to destination paths. + + Example output: + [ + ( + "C:/addons/MyAddon/version.py", + "my_addon/version.py" + ), + ( + "C:/addons/MyAddon/client/my_addon/__init__.py", + "my_addon/__init__.py" + ) + ] + Returns: + list[tuple[str, str]]: List of path mappings to copy. The destination + path is relative to expected output directory. + """ -def _get_client_files_mapping(current_dir: str): - client_code = os.path.join(CLIENT_DIR, ADDON_CLIENT_NAME) - output = [ - (src, os.path.join(ADDON_CLIENT_NAME, dst)) - for src, dst in find_files_in_subdir(client_code) - if dst != "version.py" + # Add client code content to zip + client_code_dir: str = os.path.join(CLIENT_ROOT, ADDON_CLIENT_DIR) + + return [ + (path, os.path.join(ADDON_CLIENT_DIR, sub_path)) + for path, sub_path in find_files_in_subdir(client_code_dir) ] - # Make sure 'version.py' has same content - output.append( + + +def get_client_zip_content(log) -> io.BytesIO: + log.info("Preparing client code zip") + files_mapping: List[Tuple[str, str]] = get_client_files_mapping() + stream = io.BytesIO() + with ZipFileLongPaths(stream, "w", zipfile.ZIP_DEFLATED) as zipf: + for src_path, subpath in files_mapping: + zipf.write(src_path, subpath) + stream.seek(0) + return stream + + +def get_base_files_mapping() -> List[FileMapping]: + filepaths_to_copy: List[FileMapping] = [ ( - VERSION_PATH, - os.path.join(ADDON_CLIENT_NAME, "version.py") + os.path.join(CURRENT_ROOT, "package.py"), + "package.py" + ), + ( + os.path.join( + CLIENT_ROOT, ADDON_CLIENT_DIR, "common", "constants.py" + ), + os.path.join("server", "constants.py") + ), + ] + # Go through server, private and public directories and find all files + for dirpath in (SERVER_ROOT, PRIVATE_ROOT, PUBLIC_ROOT): + if not os.path.exists(dirpath): + continue + + dirname = os.path.basename(dirpath) + for src_file, subpath in find_files_in_subdir(dirpath): + dst_subpath = os.path.join(dirname, subpath) + filepaths_to_copy.append((src_file, dst_subpath)) + + if os.path.exists(FRONTEND_DIST_ROOT): + for src_file, subpath in find_files_in_subdir(FRONTEND_DIST_ROOT): + dst_subpath = os.path.join(DST_DIST_DIR, subpath) + filepaths_to_copy.append((src_file, dst_subpath)) + + pyproject_toml = os.path.join(CLIENT_ROOT, "pyproject.toml") + if os.path.exists(pyproject_toml): + filepaths_to_copy.append( + (pyproject_toml, "private/pyproject.toml") ) - ) - return output + return filepaths_to_copy -def zip_client_side( - addon_package_dir: str, - current_dir: str, - log: logging.Logger, - zip_basename: Optional[str] = None -): - """Copy and zip `client` content into `addon_package_dir'. + +def copy_client_code(output_dir: str, log: logging.Logger): + """Copies server side folders to 'addon_package_dir' Args: - addon_package_dir (str): Output package directory path. - current_dir (str): Directoy path of addon source. - zip_basename (str): Output zip file name in format. 'client' by - default. - log (logging.Logger): Logger object. - """ + output_dir (str): Output directory path. + log (logging.Logger) - if not zip_basename: - zip_basename = "client" - log.info("Preparing client code zip") - private_dir: str = os.path.join(addon_package_dir, "private") - if not os.path.exists(private_dir): - os.makedirs(private_dir) + """ + log.info(f"Copying client for {ADDON_NAME}-{ADDON_VERSION}") - files_mapping: list[tuple[str, str]] = _get_client_files_mapping( - current_dir + full_output_path = os.path.join( + output_dir, f"{ADDON_NAME}_{ADDON_VERSION}" ) - zip_filename: str = zip_basename + ".zip" - zip_filepath: str = os.path.join(os.path.join(private_dir, zip_filename)) - with ZipFileLongPaths( - zip_filepath, "w", zipfile.ZIP_DEFLATED - ) as zipf: - for (src_path, dst_path) in files_mapping: - zipf.write(src_path, dst_path) + if os.path.exists(full_output_path): + shutil.rmtree(full_output_path) + os.makedirs(full_output_path, exist_ok=True) + + for src_path, dst_subpath in get_client_files_mapping(): + dst_path = os.path.join(full_output_path, dst_subpath) + safe_copy_file(src_path, dst_path) - shutil.copy(os.path.join(CLIENT_DIR, "pyproject.toml"), private_dir) + log.info("Client copy finished") -def create_server_package( +def copy_addon_package( output_dir: str, - addon_output_dir: str, - addon_version: str, + files_mapping: List[FileMapping], log: logging.Logger ): - """Create server package zip file. - - The zip file can be installed to a server using UI or rest api endpoints. + """Copy client code to output directory. Args: - output_dir (str): Directory path to output zip file. - addon_output_dir (str): Directory path to addon output directory. - addon_version (str): Version of addon. + output_dir (str): Directory path to output client code. + files_mapping (List[FileMapping]): List of tuples with source file + and destination subpath. log (logging.Logger): Logger object. + """ + log.info(f"Copying package for {ADDON_NAME}-{ADDON_VERSION}") - log.info("Creating server package") - output_path = os.path.join( - output_dir, f"{ADDON_NAME}-{addon_version}.zip" + # Add addon name and version to output directory + addon_output_dir: str = os.path.join( + output_dir, ADDON_NAME, ADDON_VERSION ) - manifest_data: dict[str, str] = { - "addon_name": ADDON_NAME, - "addon_version": addon_version - } - with ZipFileLongPaths(output_path, "w", zipfile.ZIP_DEFLATED) as zipf: - # Write a manifest to zip - zipf.writestr("manifest.json", json.dumps(manifest_data, indent=4)) + if os.path.isdir(addon_output_dir): + log.info(f"Purging {addon_output_dir}") + shutil.rmtree(addon_output_dir) - # Move addon content to zip into 'addon' directory - addon_output_dir_offset = len(addon_output_dir) + 1 - for root, _, filenames in os.walk(addon_output_dir): - if not filenames: - continue - - dst_root = "addon" - if root != addon_output_dir: - dst_root = os.path.join( - dst_root, root[addon_output_dir_offset:] - ) - for filename in filenames: - src_path = os.path.join(root, filename) - dst_path = os.path.join(dst_root, filename) - zipf.write(src_path, dst_path) + os.makedirs(addon_output_dir, exist_ok=True) - log.info(f"Output package can be found: {output_path}") + # Copy server content + for src_file, dst_subpath in files_mapping: + dst_path: str = os.path.join(addon_output_dir, dst_subpath) + dst_dir: str = os.path.dirname(dst_path) + os.makedirs(dst_dir, exist_ok=True) + if isinstance(src_file, io.BytesIO): + with open(dst_path, "wb") as stream: + stream.write(src_file.getvalue()) + else: + safe_copy_file(src_file, dst_path) + log.info("Package copy finished") -def copy_client_code(current_dir: str, output_dir: str): - """Copy client code to output directory. - Args: - current_dir (str): Directory path of addon source. - output_dir (str): Directory path to output client code. - """ +def create_addon_package( + output_dir: str, + files_mapping: List[FileMapping], + log: logging.Logger +): + log.info(f"Creating package for {ADDON_NAME}-{ADDON_VERSION}") - if os.path.exists(output_dir): - shutil.rmtree(output_dir) + os.makedirs(output_dir, exist_ok=True) + output_path = os.path.join( + output_dir, f"{ADDON_NAME}-{ADDON_VERSION}.zip" + ) - if os.path.exists(output_dir): - raise RuntimeError( - f"Failed to remove target folder '{output_dir}'" - ) + with ZipFileLongPaths(output_path, "w", zipfile.ZIP_DEFLATED) as zipf: + # Copy server content + for src_file, dst_subpath in files_mapping: + if isinstance(src_file, io.BytesIO): + zipf.writestr(dst_subpath, src_file.getvalue()) + else: + zipf.write(src_file, dst_subpath) - os.makedirs(output_dir, exist_ok=True) - mapping = _get_client_files_mapping(current_dir) - for (src_path, dst_path) in mapping: - full_dst_path = os.path.join(output_dir, dst_path) - os.makedirs(os.path.dirname(full_dst_path), exist_ok=True) - shutil.copy2(src_path, full_dst_path) + log.info("Package created") def main( - output_dir: Optional[str]=None, - skip_zip: Optional[bool]=False, - keep_sources: Optional[bool]=False, - only_client: Optional[bool]=False, + output_dir: Optional[str] = None, + skip_zip: Optional[bool] = False, + only_client: Optional[bool] = False ): log: logging.Logger = logging.getLogger("create_package") + log.info("Package creation started") - current_dir: str = os.path.dirname(os.path.abspath(__file__)) if not output_dir: - output_dir = os.path.join(current_dir, "package") + output_dir = os.path.join(CURRENT_ROOT, "package") - if only_client: - log.info("Creating client folder") - if not output_dir: + update_docker_version(log) + + has_client_code = bool(ADDON_CLIENT_DIR) + if has_client_code: + client_dir: str = os.path.join(CLIENT_ROOT, ADDON_CLIENT_DIR) + if not os.path.exists(client_dir): raise RuntimeError( - "Output directory must be defined" - " for client only preparation." + f"Client directory was not found '{client_dir}'." + " Please check 'client_dir' in 'package.py'." ) - copy_client_code(current_dir, output_dir) - log.info("Client folder created") - return + update_client_version(log) - log.info("Start creating package") + if only_client: + if not has_client_code: + raise RuntimeError("Client code is not available. Skipping") - version_content: dict[str, Any] = {} - with open(VERSION_PATH, "r") as stream: - exec(stream.read(), version_content) - addon_version: str = version_content["__version__"] + copy_client_code(output_dir, log) + return - addon_output_root: str = os.path.join(output_dir, ADDON_NAME) - if os.path.isdir(addon_output_root): - log.info(f"Purging {addon_output_root}") - shutil.rmtree(addon_output_root) + log.info(f"Preparing package for {ADDON_NAME}-{ADDON_VERSION}") - log.info(f"Preparing package for {ADDON_NAME}-{addon_version}") - addon_output_dir: str = os.path.join(addon_output_root, addon_version) - if not os.path.exists(addon_output_dir): - os.makedirs(addon_output_dir) + if os.path.exists(FRONTEND_ROOT): + build_frontend() - copy_server_content(addon_output_dir, current_dir, log) + files_mapping: List[FileMapping] = [] + files_mapping.extend(get_base_files_mapping()) - zip_client_side(addon_output_dir, current_dir, log) + if has_client_code: + files_mapping.append( + (get_client_zip_content(log), "private/client.zip") + ) # Skip server zipping - if not skip_zip: - create_server_package( - output_dir, addon_output_dir, addon_version, log - ) - # Remove sources only if zip file is created - if not keep_sources: - log.info("Removing source files for server package") - shutil.rmtree(addon_output_root) + if skip_zip: + copy_addon_package(output_dir, files_mapping, log) + else: + create_addon_package(output_dir, files_mapping, log) + log.info("Package creation finished") @@ -388,14 +545,6 @@ def main( " server folder structure." ) ) - parser.add_argument( - "--keep-sources", - dest="keep_sources", - action="store_true", - help=( - "Keep folder structure when server package is created." - ) - ) parser.add_argument( "-o", "--output", dest="output_dir", @@ -414,6 +563,16 @@ def main( " Requires '-o', '--output' argument to be filled." ) ) + parser.add_argument( + "--debug", + dest="debug", + action="store_true", + help="Debug log messages." + ) args = parser.parse_args(sys.argv[1:]) - main(args.output_dir, args.skip_zip, args.keep_sources, args.only_client) + level = logging.INFO + if args.debug: + level = logging.DEBUG + logging.basicConfig(level=level) + main(args.output_dir, args.skip_zip, args.only_client) diff --git a/package.py b/package.py new file mode 100644 index 00000000..fcf7a38a --- /dev/null +++ b/package.py @@ -0,0 +1,18 @@ +name = "ftrack" +version = "1.2.1-dev.1" +title = "Ftrack" +client_dir = "ayon_ftrack" + +services = { + "leecher": {"image": f"ynput/ayon-ftrack-leecher:{version}"}, + "processor": {"image": f"ynput/ayon-ftrack-processor:{version}"} +} + +plugin_for = ["ayon_server"] + +ayon_required_addons = { + "core": ">=0.4.3", +} +ayon_compatible_addons = { + "applications": ">=0.2.4", +} diff --git a/server/private/.gitkeep b/private/.gitkeep similarity index 100% rename from server/private/.gitkeep rename to private/.gitkeep diff --git a/server/public/.gitkeep b/public/.gitkeep similarity index 100% rename from server/public/.gitkeep rename to public/.gitkeep diff --git a/server/public/icons/AYONAdmin.svg b/public/icons/AYONAdmin.svg similarity index 100% rename from server/public/icons/AYONAdmin.svg rename to public/icons/AYONAdmin.svg diff --git a/server/public/icons/ActionAskWhereIRun.svg b/public/icons/ActionAskWhereIRun.svg similarity index 100% rename from server/public/icons/ActionAskWhereIRun.svg rename to public/icons/ActionAskWhereIRun.svg diff --git a/server/public/icons/AssetsRemover.svg b/public/icons/AssetsRemover.svg similarity index 100% rename from server/public/icons/AssetsRemover.svg rename to public/icons/AssetsRemover.svg diff --git a/server/public/icons/BatchTasks.svg b/public/icons/BatchTasks.svg similarity index 100% rename from server/public/icons/BatchTasks.svg rename to public/icons/BatchTasks.svg diff --git a/server/public/icons/ComponentOpen.svg b/public/icons/ComponentOpen.svg similarity index 100% rename from server/public/icons/ComponentOpen.svg rename to public/icons/ComponentOpen.svg diff --git a/server/public/icons/CreateFolders.svg b/public/icons/CreateFolders.svg similarity index 100% rename from server/public/icons/CreateFolders.svg rename to public/icons/CreateFolders.svg diff --git a/server/public/icons/CreateProjectFolders.svg b/public/icons/CreateProjectFolders.svg similarity index 100% rename from server/public/icons/CreateProjectFolders.svg rename to public/icons/CreateProjectFolders.svg diff --git a/server/public/icons/DeleteAsset.svg b/public/icons/DeleteAsset.svg similarity index 100% rename from server/public/icons/DeleteAsset.svg rename to public/icons/DeleteAsset.svg diff --git a/server/public/icons/Delivery.svg b/public/icons/Delivery.svg similarity index 100% rename from server/public/icons/Delivery.svg rename to public/icons/Delivery.svg diff --git a/server/public/icons/MultipleNotes.svg b/public/icons/MultipleNotes.svg similarity index 100% rename from server/public/icons/MultipleNotes.svg rename to public/icons/MultipleNotes.svg diff --git a/server/public/icons/PrepareProject.svg b/public/icons/PrepareProject.svg similarity index 100% rename from server/public/icons/PrepareProject.svg rename to public/icons/PrepareProject.svg diff --git a/server/public/icons/SeedProject.svg b/public/icons/SeedProject.svg similarity index 100% rename from server/public/icons/SeedProject.svg rename to public/icons/SeedProject.svg diff --git a/server/public/icons/SortReview.svg b/public/icons/SortReview.svg similarity index 100% rename from server/public/icons/SortReview.svg rename to public/icons/SortReview.svg diff --git a/server/public/icons/TestAction.svg b/public/icons/TestAction.svg similarity index 100% rename from server/public/icons/TestAction.svg rename to public/icons/TestAction.svg diff --git a/server/public/icons/Thumbnail.svg b/public/icons/Thumbnail.svg similarity index 100% rename from server/public/icons/Thumbnail.svg rename to public/icons/Thumbnail.svg diff --git a/ruff.toml b/ruff.toml new file mode 100644 index 00000000..1c6f98e6 --- /dev/null +++ b/ruff.toml @@ -0,0 +1,74 @@ +# Exclude a variety of commonly ignored directories. +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".ipynb_checkpoints", + ".mypy_cache", + ".nox", + ".pants.d", + ".pyenv", + ".pytest_cache", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + ".vscode", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "site-packages", + "venv", +] + +# Same as Black. +line-length = 79 +indent-width = 4 + +[lint] +# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default. +# Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or +# McCabe complexity (`C901`) by default. +select = ["E4", "E7", "E9", "F"] +ignore = [] + +# Allow fix for all enabled rules (when `--fix`) is provided. +fixable = ["ALL"] +unfixable = [] + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + +[format] +# Like Black, use double quotes for strings. +quote-style = "double" + +# Like Black, indent with spaces, rather than tabs. +indent-style = "space" + +# Like Black, respect magic trailing commas. +skip-magic-trailing-comma = false + +# Like Black, automatically detect the appropriate line ending. +line-ending = "auto" + +# Enable auto-formatting of code examples in docstrings. Markdown, +# reStructuredText code/literal blocks and doctests are all supported. +# +# This is currently disabled by default, but it is planned for this +# to be opt-out in the future. +docstring-code-format = false + +# Set the line length limit used when formatting code snippets in +# docstrings. +# +# This only has an effect when the `docstring-code-format` setting is +# enabled. +docstring-code-line-length = "dynamic" \ No newline at end of file diff --git a/server/__init__.py b/server/__init__.py index 605c5776..618ffba8 100644 --- a/server/__init__.py +++ b/server/__init__.py @@ -1,11 +1,17 @@ +from typing import Type, Any + import semver -from typing import Type +from fastapi import Query +from nxtools import logging from ayon_server.addons import BaseServerAddon, AddonLibrary from ayon_server.lib.postgres import Postgres -from .settings import FtrackSettings, DEFAULT_VALUES -from .version import __version__ +from .settings import ( + FtrackSettings, + DEFAULT_VALUES, + convert_settings_overrides, +) from .constants import ( FTRACK_ID_ATTRIB, FTRACK_PATH_ATTRIB, @@ -13,14 +19,7 @@ class FtrackAddon(BaseServerAddon): - name = "ftrack" - title = "Ftrack" - version = __version__ settings_model: Type[FtrackSettings] = FtrackSettings - services = { - "leecher": {"image": f"ynput/ayon-ftrack-leecher:{__version__}"}, - "processor": {"image": f"ynput/ayon-ftrack-processor:{__version__}"} - } async def get_default_settings(self): settings_model_cls = self.get_settings_model() @@ -52,6 +51,63 @@ async def setup(self): if need_restart: self.request_server_restart() + def initialize(self) -> None: + self.add_endpoint( + "/customProcessorHandlers", + self.get_custom_processor_handlers, + method="GET", + ) + + async def get_custom_processor_handlers( + self, + variant: str = Query("production"), + ) -> {}: + bundles = await Postgres.fetch( + "SELECT name, is_production, is_staging," + " is_dev, data->'addons' as addons FROM bundles" + ) + bundles_by_variant = { + "production": None, + "staging": None + } + for bundle in bundles: + if bundle["is_dev"]: + bundles_by_variant[bundle["name"]] = bundle + continue + + if bundle["is_production"]: + bundles_by_variant["production"] = bundle + + if bundle["is_staging"]: + bundles_by_variant["staging"] = bundle + + handlers = [] + output = {"custom_handlers": handlers} + if variant not in bundles_by_variant: + return output + addons = bundles_by_variant[variant]["addons"] + addon_library = AddonLibrary.getinstance() + for addon_name, addon_version in addons.items(): + addons_mapping = addon_library.get(addon_name) or {} + addon = addons_mapping.get(addon_version) + if not hasattr(addon, "get_custom_ftrack_handlers_endpoint"): + continue + try: + endpoint = addon.get_custom_ftrack_handlers_endpoint() + if endpoint: + handlers.append({ + "addon_name": addon_name, + "addon_version": addon_version, + "endpoint": endpoint, + }) + except BaseException as exc: + logging.warning( + f"Failed to receive ftrack handlers from addon" + f" {addon_name} {addon_version}. {exc}" + ) + + return output + async def _empty_create_ftrack_attributes(self): return False @@ -137,3 +193,12 @@ async def create_ftrack_attributes(self) -> bool: ) return True + async def convert_settings_overrides( + self, + source_version: str, + overrides: dict[str, Any], + ) -> dict[str, Any]: + convert_settings_overrides(source_version, overrides) + return await super().convert_settings_overrides( + source_version, overrides + ) diff --git a/server/settings/__init__.py b/server/settings/__init__.py index 948f756b..ed6d85c2 100644 --- a/server/settings/__init__.py +++ b/server/settings/__init__.py @@ -1,3 +1,4 @@ +from .conversions import convert_settings_overrides from .main import ( FtrackSettings, DEFAULT_VALUES, @@ -5,6 +6,8 @@ __all__ = ( + "convert_settings_overrides", + "FtrackSettings", "DEFAULT_VALUES", ) diff --git a/server/settings/common.py b/server/settings/common.py index 1c539bf8..195e55ae 100644 --- a/server/settings/common.py +++ b/server/settings/common.py @@ -1,6 +1,4 @@ -from pydantic import Field - -from ayon_server.settings import BaseSettingsModel +from ayon_server.settings import BaseSettingsModel, SettingsField ROLES_TITLE = "Roles for action" @@ -13,5 +11,5 @@ class DictWithStrList(BaseSettingsModel): """ _layout = "expanded" - name: str = Field("") - value: list[str] = Field(default_factory=list) + name: str = SettingsField("") + value: list[str] = SettingsField(default_factory=list) diff --git a/server/settings/conversions.py b/server/settings/conversions.py new file mode 100644 index 00000000..5fecff43 --- /dev/null +++ b/server/settings/conversions.py @@ -0,0 +1,74 @@ +from typing import Any + + +def _convert_integrate_ftrack_status_settings(overrides): + """Convert settings of 'IntegrateFtrackFarmStatus' profiles. + + This change happened in 1.1.0 version of the addon, where the settings + were converted to use AYON naming convention over OpenPype convention. + + Args: + overrides (dict[str, Any]): Settings overrides. + """ + value = overrides + for key in ( + "publish", + "IntegrateFtrackFarmStatus", + "farm_status_profiles", + ): + if not isinstance(value, dict) or key not in value: + return + + value = value[key] + + if not isinstance(value, list): + return + + for profile in value: + for src_key, dst_key in ( + ("hosts", "host_names"), + ("families", "product_types"), + ("subset_names", "product_names"), + ): + if src_key in profile: + profile[dst_key] = profile.pop(src_key) + + +def _convert_task_to_version_status_mapping_1_2_0(overrides): + value = overrides + for key in ( + "service_event_handlers", + "status_task_to_version", + ): + value = value.get(key) + if not value: + return + + if "asset_types_filter" in value: + value["asset_types"] = value.pop("asset_types_filter") + value["asset_types_filter_type"] = "allow_list" + + +def _convert_version_to_task_status_mapping_1_2_0(overrides): + value = overrides + for key in ( + "service_event_handlers", + "status_version_to_task", + ): + value = value.get(key) + if not value: + return + + if "asset_types_to_skip" in value: + value["asset_types"] = value.pop("asset_types_to_skip") + value["asset_types_filter_type"] = "deny_list" + + +def convert_settings_overrides( + source_version: str, + overrides: dict[str, Any], +) -> dict[str, Any]: + _convert_integrate_ftrack_status_settings(overrides) + _convert_task_to_version_status_mapping_1_2_0(overrides) + _convert_version_to_task_status_mapping_1_2_0(overrides) + return overrides diff --git a/server/settings/custom_attributes.py b/server/settings/custom_attributes.py index 4baaebfa..eae8d23e 100644 --- a/server/settings/custom_attributes.py +++ b/server/settings/custom_attributes.py @@ -1,14 +1,12 @@ -from pydantic import Field - -from ayon_server.settings import BaseSettingsModel +from ayon_server.settings import BaseSettingsModel, SettingsField class CustomAttributeModel(BaseSettingsModel): - write_security_roles: list[str] = Field( + write_security_roles: list[str] = SettingsField( default_factory=list, title="Write roles", ) - read_roles: list[str] = Field( + read_security_roles: list[str] = SettingsField( default_factory=list, title="Read roles", ) @@ -89,11 +87,11 @@ class HierarchicalAttributesModel(BaseSettingsModel): class CustomAttributesModel(BaseSettingsModel): - show: ProjectCustomAttributesModel = Field( + show: ProjectCustomAttributesModel = SettingsField( default_factory=ProjectCustomAttributesModel, title="Project Custom attributes", ) - is_hierarchical: HierarchicalAttributesModel = Field( + is_hierarchical: HierarchicalAttributesModel = SettingsField( default_factory=HierarchicalAttributesModel, title="Hierarchical Attributes", ) @@ -144,20 +142,14 @@ class CustomAttributesModel(BaseSettingsModel): ] }, "ayon_id": { - "write_security_roles": [ - "API", - "Administrator" - ], + "write_security_roles": [], "read_security_roles": [ "API", "Administrator" ] }, "ayon_path": { - "write_security_roles": [ - "API", - "Administrator" - ], + "write_security_roles": [], "read_security_roles": [ "API", "Administrator" diff --git a/server/settings/desktopapp_handlers.py b/server/settings/desktopapp_handlers.py index e8518e0b..c45dee5e 100644 --- a/server/settings/desktopapp_handlers.py +++ b/server/settings/desktopapp_handlers.py @@ -1,13 +1,17 @@ -from pydantic import Field, validator +from pydantic import validator -from ayon_server.settings import BaseSettingsModel, ensure_unique_names +from ayon_server.settings import ( + BaseSettingsModel, + SettingsField, + ensure_unique_names, +) from .common import DictWithStrList, ROLES_TITLE class SimpleAction(BaseSettingsModel): enabled: bool = True - role_list: list[str] = Field( + role_list: list[str] = SettingsField( title=ROLES_TITLE, default_factory=list, ) @@ -19,11 +23,11 @@ class ApplicationLaunchStatuses(BaseSettingsModel): Change task's status to left side if current task status is in list on right side """ enabled: bool = True - ignored_statuses: list[str] = Field( + ignored_statuses: list[str] = SettingsField( default_factory=list, title="Do not change status if current status is", ) - status_change: list[DictWithStrList] = Field( + status_change: list[DictWithStrList] = SettingsField( title="Status change", default_factory=list, ) @@ -37,14 +41,14 @@ def ensure_unique_names(cls, value): class CreateUpdateCustomAttributesAction(BaseSettingsModel): - role_list: list[str] = Field( + role_list: list[str] = SettingsField( title=ROLES_TITLE, default_factory=list, ) class PrepareProjectAction(SimpleAction): - create_project_structure_checked: bool = Field( + create_project_structure_checked: bool = SettingsField( True, description="Check \"Create project structure\" by default", title="Create project structure", @@ -53,7 +57,7 @@ class PrepareProjectAction(SimpleAction): class FillWorkfileAttr(BaseSettingsModel): enabled: bool = True - custom_attribute_key: str = Field( + custom_attribute_key: str = SettingsField( "", title="Custom attribute key", description=( @@ -61,7 +65,7 @@ class FillWorkfileAttr(BaseSettingsModel): " type added to Task entity type" ), ) - role_list: list[str] = Field( + role_list: list[str] = SettingsField( title=ROLES_TITLE, default_factory=list, ) @@ -70,38 +74,36 @@ class FillWorkfileAttr(BaseSettingsModel): class FtrackDesktopAppHandlers(BaseSettingsModel): """Settings for event handlers running in ftrack service.""" - create_update_attributes: CreateUpdateCustomAttributesAction = Field( - title="Create/Update Custom Attributes", - default_factory=CreateUpdateCustomAttributesAction, + create_update_attributes: CreateUpdateCustomAttributesAction = ( + SettingsField( + title="Create/Update Custom Attributes", + default_factory=CreateUpdateCustomAttributesAction, + ) ) - prepare_project: PrepareProjectAction = Field( + prepare_project: PrepareProjectAction = SettingsField( title="Prepare Project", default_factory=PrepareProjectAction, ) - clean_hierarchical_attr: SimpleAction = Field( + clean_hierarchical_attr: SimpleAction = SettingsField( title="Clean hierarchical custom attributes", default_factory=SimpleAction ) - delete_old_versions: SimpleAction = Field( + delete_old_versions: SimpleAction = SettingsField( title="Delete old versions", default_factory=SimpleAction, ) - delivery_action: SimpleAction = Field( + delivery_action: SimpleAction = SettingsField( title="Delivery action", default_factory=SimpleAction, ) - job_killer: SimpleAction = Field( + job_killer: SimpleAction = SettingsField( title="Job Killer", default_factory=SimpleAction, ) - fill_workfile_attribute: FillWorkfileAttr = Field( + fill_workfile_attribute: FillWorkfileAttr = SettingsField( title="Fill workfile Custom attribute", default_factory=FillWorkfileAttr, ) - # Removed settings - # - seed_project - # - sync_to_avalon_local - # - store_thubmnail_to_avalon DEFAULT_DESKTOP_HANDLERS_SETTINGS = { diff --git a/server/settings/main.py b/server/settings/main.py index 788a536b..dac5d080 100644 --- a/server/settings/main.py +++ b/server/settings/main.py @@ -1,6 +1,10 @@ -from pydantic import Field, validator +from pydantic import validator -from ayon_server.settings import BaseSettingsModel, ensure_unique_names +from ayon_server.settings import ( + BaseSettingsModel, + ensure_unique_names, + SettingsField, +) from ayon_server.settings.enum import secrets_enum from .service_handlers import ( @@ -29,33 +33,33 @@ class FtrackServiceSettings(BaseSettingsModel): so you can see which changes happened from service. """ - username: str = Field( + username: str = SettingsField( enum_resolver=secrets_enum, title="Ftrack user name" ) - api_key: str = Field( + api_key: str = SettingsField( enum_resolver=secrets_enum, title="Ftrack API key" ) class PostLaunchHookMapping(BaseSettingsModel): - name: str = Field("", title="New status") - value: list[str] = Field(default_factory=list, title="From statuses") + name: str = SettingsField("", title="New status") + value: list[str] = SettingsField(default_factory=list, title="From statuses") class PostLaunchHookSettings(BaseSettingsModel): """Change task status on application launch. - Changeo of status is based on mapping. Each item in mapping define new - status which is used based on current status/es. Special value for current - statuses is '__any__', in that case the new status is always used. And if - new status name is '__ignore__', the change of status is skipped if current + Change of status is based on mapping. Each item in mapping defines new + status which is used based on current status(es). Special value for current + statuses is `__any__`, in that case the new status is always used. And if + new status name is `__ignore__`, the change of status is skipped if current status is in current statuses list. """ enabled: bool = True - mapping: list[PostLaunchHookMapping] = Field(default_factory=list) + mapping: list[PostLaunchHookMapping] = SettingsField(default_factory=list) @validator("mapping") def ensure_unique_names(cls, value): @@ -68,35 +72,35 @@ def ensure_unique_names(cls, value): class FtrackSettings(BaseSettingsModel): """Ftrack addon settings.""" - enabled: bool = Field(True) - ftrack_server: str = Field( + enabled: bool = SettingsField(True) + ftrack_server: str = SettingsField( "", title="Ftrack server url", scope=["studio"], ) - service_settings: FtrackServiceSettings = Field( + service_settings: FtrackServiceSettings = SettingsField( default_factory=FtrackServiceSettings, title="Service settings", scope=["studio"], ) - service_event_handlers: FtrackServiceHandlers = Field( + service_event_handlers: FtrackServiceHandlers = SettingsField( default_factory=FtrackServiceHandlers, title="Server Actions/Events", ) - post_launch_hook: PostLaunchHookSettings = Field( + post_launch_hook: PostLaunchHookSettings = SettingsField( default_factory=PostLaunchHookSettings, title="Status change on application launch" ) - user_handlers: FtrackDesktopAppHandlers = Field( + user_handlers: FtrackDesktopAppHandlers = SettingsField( default_factory=FtrackDesktopAppHandlers, title="User Actions/Events", ) - publish: FtrackPublishPlugins = Field( + publish: FtrackPublishPlugins = SettingsField( default_factory=FtrackPublishPlugins, title="Publish plugins" ) - custom_attributes: CustomAttributesModel = Field( + custom_attributes: CustomAttributesModel = SettingsField( title="Custom Attributes", default_factory=CustomAttributesModel ) @@ -132,5 +136,6 @@ class FtrackSettings(BaseSettingsModel): } ] }, - "publish": DEFAULT_PUBLISH_SETTINGS + "publish": DEFAULT_PUBLISH_SETTINGS, + "custom_attributes": DEFAULT_CUSTOM_ATTRIBUTES_SETTINGS, } diff --git a/server/settings/publish_plugins.py b/server/settings/publish_plugins.py index b226b52a..1f9e0307 100644 --- a/server/settings/publish_plugins.py +++ b/server/settings/publish_plugins.py @@ -1,19 +1,20 @@ import json -from pydantic import Field, validator +from pydantic import validator from ayon_server.settings import ( BaseSettingsModel, + SettingsField, ensure_unique_names, ) class CollectFamilyAdvancedFilterModel(BaseSettingsModel): _layout = "expanded" - families: list[str] = Field( + families: list[str] = SettingsField( default_factory=list, title="Additional Families" ) - add_ftrack_family: bool = Field( + add_ftrack_family: bool = SettingsField( True, title="Add Ftrack Family" ) @@ -21,27 +22,27 @@ class CollectFamilyAdvancedFilterModel(BaseSettingsModel): class CollectFamilyProfile(BaseSettingsModel): _layout = "expanded" - host_names: list[str] = Field( + host_names: list[str] = SettingsField( default_factory=list, title="Host names", ) - product_types: list[str] = Field( + product_types: list[str] = SettingsField( default_factory=list, title="Families", ) - task_types: list[str] = Field( + task_types: list[str] = SettingsField( default_factory=list, title="Task types", ) - task_names: list[str] = Field( + task_names: list[str] = SettingsField( default_factory=list, title="Task names", ) - add_ftrack_family: bool = Field( + add_ftrack_family: bool = SettingsField( True, title="Add Ftrack Family", ) - advanced_filtering: list[CollectFamilyAdvancedFilterModel] = Field( + advanced_filtering: list[CollectFamilyAdvancedFilterModel] = SettingsField( title="Advanced adding if additional families present", default_factory=list, ) @@ -50,7 +51,7 @@ class CollectFamilyProfile(BaseSettingsModel): class CollectFtrackFamilyPlugin(BaseSettingsModel): _isGroup = True enabled: bool = True - profiles: list[CollectFamilyProfile] = Field( + profiles: list[CollectFamilyProfile] = SettingsField( default_factory=list, title="Profiles", ) @@ -59,7 +60,7 @@ class CollectFtrackFamilyPlugin(BaseSettingsModel): class CollectFtrackCustomAttributeDataModel(BaseSettingsModel): _isGroup = True enabled: bool = True - custom_attribute_keys: list[str] = Field( + custom_attribute_keys: list[str] = SettingsField( title="Custom attribute keys", default_factory=list, ) @@ -68,7 +69,7 @@ class CollectFtrackCustomAttributeDataModel(BaseSettingsModel): class ValidateFtrackAttributesModel(BaseSettingsModel): _isGroup = True enabled: bool = True - ftrack_custom_attributes: str = Field( + ftrack_custom_attributes: str = SettingsField( "{}", title="Custom attributes to validate", widget="textarea", @@ -88,28 +89,28 @@ def json_parse(cls, value): class IntegrateHierarchyProfile(BaseSettingsModel): _layout = "expanded" - task_types: list[str] = Field( + task_types: list[str] = SettingsField( default_factory=list, title="Task types", ) - task_names: list[str] = Field( + task_names: list[str] = SettingsField( default_factory=list, title="Task names", ) - status_name: str = Field("", title="Status name") + status_name: str = SettingsField("", title="Status name") class IntegrateHierarchyToFtrackModel(BaseSettingsModel): _isGroup = True - create_task_status_profiles: list[IntegrateHierarchyProfile] = Field( - default_factory=list, + create_task_status_profiles: list[IntegrateHierarchyProfile] = ( + SettingsField(default_factory=list) ) class IntegrateFtrackNoteModel(BaseSettingsModel): _isGroup = True enabled: bool = True - note_template: str = Field( + note_template: str = SettingsField( "", title="Note template", description=( @@ -118,7 +119,7 @@ class IntegrateFtrackNoteModel(BaseSettingsModel): " app_label, published_paths and source." ) ) - note_labels: list[str] = Field( + note_labels: list[str] = SettingsField( title="Note labels", default_factory=list, ) @@ -127,9 +128,9 @@ class IntegrateFtrackNoteModel(BaseSettingsModel): class IntegrateFtrackDescriptionModel(BaseSettingsModel): _isGroup = True enabled: bool = True - optional: bool = Field(False, title="Optional") - active: bool = Field(True, title="Active") - description_template: str = Field( + optional: bool = SettingsField(False, title="Optional") + active: bool = SettingsField(True, title="Active") + description_template: str = SettingsField( "", title="Description template", description=( @@ -146,32 +147,33 @@ class IntegrateFtrackComponentOverwriteModel(BaseSettingsModel): class AssetVersionStatusProfile(BaseSettingsModel): _layout = "expanded" - host_names: list[str] = Field( + host_names: list[str] = SettingsField( default_factory=list, title="Host names", ) - product_types: list[str] = Field( + product_types: list[str] = SettingsField( default_factory=list, title="Families", ) - task_types: list[str] = Field( + task_types: list[str] = SettingsField( default_factory=list, title="Task types", ) - status: str = Field( + status: str = SettingsField( "", title="Status name", ) class IntegrateFtrackFamilyMapping(BaseSettingsModel): - name: str = Field("", title="Family") - asset_type: str = Field("", title="Asset Type") + name: str = SettingsField("", title="Family") + asset_type: str = SettingsField("", title="Asset Type") def integrate_ftrack_metadata_enum(): return [ - {"value": "openpype_version", "label": "OpenPype version"}, + {"value": "ayon_ftrack_version", "label": "AYON ftrack version"}, + {"value": "ayon_launcher_version", "label": "AYON launcher version"}, {"value": "frame_start", "label": "Frame start"}, {"value": "frame_end", "label": "Frame end"}, {"value": "duration", "label": "Duration"}, @@ -184,19 +186,21 @@ def integrate_ftrack_metadata_enum(): class IntegrateFtrackInstanceModel(BaseSettingsModel): _isGroup = True - product_type_mapping: list[IntegrateFtrackFamilyMapping] = Field( - title="Family Mapping", + product_type_mapping: list[IntegrateFtrackFamilyMapping] = SettingsField( + title="Product type Mapping", default_factory=list, ) - keep_first_product_name_for_review: bool = Field( + keep_first_product_name_for_review: bool = SettingsField( True, title="Make product name as first asset name", ) - asset_versions_status_profiles: list[AssetVersionStatusProfile] = Field( - title="AssetVersion status on publish", - default_factory=list, + asset_versions_status_profiles: list[AssetVersionStatusProfile] = ( + SettingsField( + title="AssetVersion status on publish", + default_factory=list, + ) ) - additional_metadata_keys: list[str] = Field( + additional_metadata_keys: list[str] = SettingsField( default_factory=list, title="Additional metadata keys on components", enum_resolver=integrate_ftrack_metadata_enum @@ -210,27 +214,27 @@ def validate_unique_outputs(cls, value): class IntegrateFarmStartusProfile(BaseSettingsModel): _layout = "expanded" - host_names: list[str] = Field( + host_names: list[str] = SettingsField( default_factory=list, title="Host names", ) - task_types: list[str] = Field( + task_types: list[str] = SettingsField( default_factory=list, title="Task types", ) - task_names: list[str] = Field( + task_names: list[str] = SettingsField( default_factory=list, title="Task names", ) - product_types: list[str] = Field( + product_types: list[str] = SettingsField( default_factory=list, title="Product types", ) - product_names: list[str] = Field( + product_names: list[str] = SettingsField( title="Product names", default_factory=list, ) - status_name: str = Field( + status_name: str = SettingsField( "", title="Status name" ) @@ -238,7 +242,7 @@ class IntegrateFarmStartusProfile(BaseSettingsModel): class IntegrateFtrackFarmStatusModel(BaseSettingsModel): _isGroup = True - farm_status_profiles: list[IntegrateFarmStartusProfile] = Field( + farm_status_profiles: list[IntegrateFarmStartusProfile] = SettingsField( title="Farm status profiles", default_factory=list, ) @@ -246,27 +250,27 @@ class IntegrateFtrackFarmStatusModel(BaseSettingsModel): class FtrackTaskStatusProfile(BaseSettingsModel): _layout = "expanded" - host_names: list[str] = Field( + host_names: list[str] = SettingsField( default_factory=list, title="Host names", ) - task_types: list[str] = Field( + task_types: list[str] = SettingsField( default_factory=list, title="Task types", ) - task_names: list[str] = Field( + task_names: list[str] = SettingsField( default_factory=list, title="Task names", ) - families: list[str] = Field( + product_types: list[str] = SettingsField( default_factory=list, - title="Families", + title="Product types", ) - subset_names: list[str] = Field( + product_names: list[str] = SettingsField( default_factory=list, - title="Subset names", + title="Product names", ) - status_name: str = Field( + status_name: str = SettingsField( "", title="Status name" ) @@ -274,7 +278,7 @@ class FtrackTaskStatusProfile(BaseSettingsModel): class FtrackTaskStatusLocalModel(BaseSettingsModel): _isGroup = True - status_profiles: list[FtrackTaskStatusProfile] = Field( + status_profiles: list[FtrackTaskStatusProfile] = SettingsField( title="Status profiles", default_factory=list, description="Change status of task when is integrated locally" @@ -283,18 +287,18 @@ class FtrackTaskStatusLocalModel(BaseSettingsModel): class FtrackTaskStatusOnFarmModel(BaseSettingsModel): _isGroup = True - status_profiles: list[FtrackTaskStatusProfile] = Field( + status_profiles: list[FtrackTaskStatusProfile] = SettingsField( title="Status profiles", default_factory=list, description=( - "Change status of task when it's subset is integrated on farm" + "Change status of task when it's product is integrated on farm" ) ) class IntegrateFtrackTaskStatusModel(BaseSettingsModel): _isGroup = True - after_version_statuses: bool = Field( + after_version_statuses: bool = SettingsField( True, title="After version integration", description=( @@ -308,12 +312,12 @@ class IntegrateFtrackTaskStatusModel(BaseSettingsModel): class FtrackPublishPlugins(BaseSettingsModel): """Settings for event handlers running in ftrack service.""" - CollectFtrackFamily: CollectFtrackFamilyPlugin = Field( + CollectFtrackFamily: CollectFtrackFamilyPlugin = SettingsField( title="Collect Ftrack Family", default_factory=CollectFtrackFamilyPlugin, ) CollectFtrackCustomAttributeData: CollectFtrackCustomAttributeDataModel = ( - Field( + SettingsField( title="Collect Custom Attribute Data", default_factory=CollectFtrackCustomAttributeDataModel, description=( @@ -322,51 +326,59 @@ class FtrackPublishPlugins(BaseSettingsModel): ) ) ) - ValidateFtrackAttributes: ValidateFtrackAttributesModel = Field( + ValidateFtrackAttributes: ValidateFtrackAttributesModel = SettingsField( title="Validate Ftrack Attributes", default_factory=ValidateFtrackAttributesModel, ) - IntegrateHierarchyToFtrack: IntegrateHierarchyToFtrackModel = Field( - title="Integrate Hierarchy to ftrack", - default_factory=IntegrateHierarchyToFtrackModel, - description=( - "Set task status on new task creation." - " Ftrack's default status is used otherwise." + IntegrateHierarchyToFtrack: IntegrateHierarchyToFtrackModel = ( + SettingsField( + title="Integrate Hierarchy to ftrack", + default_factory=IntegrateHierarchyToFtrackModel, + description=( + "Set task status on new task creation." + " Ftrack's default status is used otherwise." + ) ) ) - IntegrateFtrackNote: IntegrateFtrackNoteModel = Field( + IntegrateFtrackNote: IntegrateFtrackNoteModel = SettingsField( title="Integrate Ftrack Note", default_factory=IntegrateFtrackNoteModel, ) - IntegrateFtrackDescription: IntegrateFtrackDescriptionModel = Field( - title="Integrate Ftrack Description", - default_factory=IntegrateFtrackDescriptionModel, - description="Add description to integrated AssetVersion.", + IntegrateFtrackDescription: IntegrateFtrackDescriptionModel = ( + SettingsField( + title="Integrate Ftrack Description", + default_factory=IntegrateFtrackDescriptionModel, + description="Add description to integrated AssetVersion.", + ) ) - IntegrateFtrackComponentOverwrite: IntegrateFtrackComponentOverwriteModel = Field( + IntegrateFtrackComponentOverwrite: IntegrateFtrackComponentOverwriteModel = SettingsField( title="Integrate Ftrack Component Overwrite", default_factory=IntegrateFtrackComponentOverwriteModel, ) - IntegrateFtrackInstance: IntegrateFtrackInstanceModel = Field( + IntegrateFtrackInstance: IntegrateFtrackInstanceModel = SettingsField( title="Integrate Ftrack Instance", default_factory=IntegrateFtrackInstanceModel, ) - IntegrateFtrackFarmStatus: IntegrateFtrackFarmStatusModel = Field( + IntegrateFtrackFarmStatus: IntegrateFtrackFarmStatusModel = SettingsField( title="Integrate Ftrack Farm Status", default_factory=IntegrateFtrackFarmStatusModel, description=( - "Change status of task when it's subset is submitted to farm" + "Change status of task when it's product is submitted to farm" ), ) - ftrack_task_status_local_publish: FtrackTaskStatusLocalModel = Field( - default_factory=FtrackTaskStatusLocalModel, - title="Ftrack Status Local Integration", + ftrack_task_status_local_publish: FtrackTaskStatusLocalModel = ( + SettingsField( + default_factory=FtrackTaskStatusLocalModel, + title="Ftrack Status Local Integration", + ) ) - ftrack_task_status_on_farm_publish: FtrackTaskStatusOnFarmModel = Field( - default_factory=FtrackTaskStatusOnFarmModel, - title="Ftrack Status On Farm Integration", + ftrack_task_status_on_farm_publish: FtrackTaskStatusOnFarmModel = ( + SettingsField( + default_factory=FtrackTaskStatusOnFarmModel, + title="Ftrack Status On Farm Integration", + ) ) - IntegrateFtrackTaskStatus: IntegrateFtrackTaskStatusModel = Field( + IntegrateFtrackTaskStatus: IntegrateFtrackTaskStatusModel = SettingsField( default_factory=IntegrateFtrackTaskStatusModel, title="Integrate Ftrack Task Status" ) @@ -585,7 +597,7 @@ class FtrackPublishPlugins(BaseSettingsModel): }, "IntegrateFtrackNote": { "enabled": True, - "note_template": "{intent}: {comment}", + "note_template": "{comment}", "note_labels": [] }, "IntegrateFtrackDescription": { @@ -708,15 +720,15 @@ class FtrackPublishPlugins(BaseSettingsModel): "IntegrateFtrackFarmStatus": { "farm_status_profiles": [ { - "hosts": [ + "host_names": [ "celaction" ], "task_types": [], "task_names": [], - "families": [ + "product_types": [ "render" ], - "subsets": [], + "product_names": [], "status_name": "Render" } ] diff --git a/server/settings/service_handlers.py b/server/settings/service_handlers.py index 47e75dbb..136bf1c0 100644 --- a/server/settings/service_handlers.py +++ b/server/settings/service_handlers.py @@ -1,6 +1,10 @@ -from pydantic import Field, validator +from pydantic import validator -from ayon_server.settings import BaseSettingsModel, ensure_unique_names +from ayon_server.settings import ( + BaseSettingsModel, + SettingsField, + ensure_unique_names, +) from .common import DictWithStrList, ROLES_TITLE @@ -8,7 +12,7 @@ class SimpleAction(BaseSettingsModel): enabled: bool = True - role_list: list[str] = Field( + role_list: list[str] = SettingsField( title=ROLES_TITLE, default_factory=list, ) @@ -16,19 +20,19 @@ class SimpleAction(BaseSettingsModel): class SyncHierarchicalAttributes(BaseSettingsModel): enabled: bool = True - interest_entity_types: list[str] = Field( + interest_entity_types: list[str] = SettingsField( title="Entity types of interest", default_factory=list, ) - interest_attributes: list[str] = Field( + interest_attributes: list[str] = SettingsField( title="Attributes to sync", default_factory=list, ) - action_enabled: bool = Field( + action_enabled: bool = SettingsField( True, title="Enable Action", ) - role_list: list[str] = Field( + role_list: list[str] = SettingsField( title=ROLES_TITLE, default_factory=list, ) @@ -36,19 +40,21 @@ class SyncHierarchicalAttributes(BaseSettingsModel): class CloneReviewAction(BaseSettingsModel): enabled: bool = True - role_list: list[str] = Field(default_factory=list, title=ROLES_TITLE) + role_list: list[str] = SettingsField( + default_factory=list, title=ROLES_TITLE + ) class ThumbnailHierarchyUpdates(BaseSettingsModel): """Push thumbnail from version, up through multiple hierarchy levels.""" enabled: bool = True - levels: int = Field(1, title="Levels", ge=0) + levels: int = SettingsField(1, title="Levels", ge=0) class SyncStatusTaskToParentMapping(BaseSettingsModel): - new_status: str = Field(title="New parent status") - task_statuses: list[str] = Field( + new_status: str = SettingsField(title="New parent status") + task_statuses: list[str] = SettingsField( title="Task status", default_factory=list, ) @@ -57,29 +63,45 @@ class SyncStatusTaskToParentMapping(BaseSettingsModel): class SyncStatusTaskToParent(BaseSettingsModel): _isGroup = True enabled: bool = True - parent_object_types: list[str] = Field( + parent_object_types: list[str] = SettingsField( title="Object types", default_factory=list, ) - parent_status_match_all_task_statuses: list[DictWithStrList] = Field( - title="Change parent if all tasks match", - default_factory=list, + parent_status_match_all_task_statuses: list[DictWithStrList] = ( + SettingsField( + title="Change parent if all tasks match", + default_factory=list, + ) ) - parent_status_by_task_status: list[SyncStatusTaskToParentMapping] = Field( - title="Change parent status if a single task matches", - default_factory=list, + parent_status_by_task_status: list[SyncStatusTaskToParentMapping] = ( + SettingsField( + title="Change parent status if a single task matches", + default_factory=list, + ) ) +def _allow_deny_enum(): + return [ + {"value": "allow_list", "label": "Allow list"}, + {"value": "deny_list", "label": "Deny list"} + ] + + class SyncStatusTaskToVersion(BaseSettingsModel): _isGroup = True enabled: bool = True - mapping: list[DictWithStrList] = Field( + mapping: list[DictWithStrList] = SettingsField( title="Status mapping", default_factory=list, ) - asset_types_to_skip: list[str] = Field( - title="Skip on Asset types (short)", + asset_types_filter_type: str = SettingsField( + title="Asset types Allow/Deny", + default="allow_list", + enum_resolver=_allow_deny_enum, + ) + asset_types: list[str] = SettingsField( + title="Asset types (short)", default_factory=list, ) @@ -94,12 +116,17 @@ def ensure_unique_names(cls, value): class SyncStatusVersionToTask(BaseSettingsModel): _isGroup = True enabled: bool = True - mapping: list[DictWithStrList] = Field( + mapping: list[DictWithStrList] = SettingsField( title="Status mapping", default_factory=list, ) - asset_types_to_skip: list[str] = Field( - title="Skip on Asset types (short)", + asset_types_filter_type: str = SettingsField( + title="Asset types Allow/Deny", + default="deny_list", + enum_resolver=_allow_deny_enum, + ) + asset_types: list[str] = SettingsField( + title="Asset types (short)", default_factory=list, ) @@ -120,11 +147,11 @@ class NextTaskStatusMapping(BaseSettingsModel): class NextTaskUpdate(BaseSettingsModel): _isGroup = True enabled: bool = True - mapping: list[NextTaskStatusMapping] = Field( + mapping: list[NextTaskStatusMapping] = SettingsField( title="Status Mappings", default_factory=list, ) - ignored_statuses: list[str] = Field( + ignored_statuses: list[str] = SettingsField( title="Ignored statuses", default_factory=list, ) @@ -141,7 +168,7 @@ def ensure_unique_names(cls, value): class TransferHierNonHierAttrsAction(BaseSettingsModel): _isGroup = True enabled: bool = True - role_list: list[str] = Field( + role_list: list[str] = SettingsField( title=ROLES_TITLE, default_factory=list, ) @@ -150,23 +177,23 @@ class TransferHierNonHierAttrsAction(BaseSettingsModel): # class CreateDailyReviewSession(BaseSettingsModel): # _isGroup = True # enabled: bool = True -# review_session_template: str = Field( +# review_session_template: str = SettingsField( # "", # title="ReviewSession name template", # ) -# cycle_enabled: bool = Field( +# cycle_enabled: bool = SettingsField( # False, # title="Run automatically every day", # section="Automated execution", # ) -# cycle_hour_start: str = Field( +# cycle_hour_start: str = SettingsField( # "00:00:00", # title="Create daily review session at", # description="This may take affect on next day", # widget="time", # regex="(?:[01]\d|2[0123]):(?:[012345]\d):(?:[012345]\d)", # ) -# role_list: list[str] = Field( +# role_list: list[str] = SettingsField( # section="---", # title=ROLES_TITLE, # default_factory=list, @@ -185,18 +212,18 @@ def custom_attribute_type(): class DailyListCustomAttributesModel(BaseSettingsModel): _layout = "expanded" - attr_name: str = Field("", title="Attribute name") - attr_type: str = Field( + attr_name: str = SettingsField("", title="Attribute name") + attr_type: str = SettingsField( "bool_value", title="Attribute type", enum_resolver=custom_attribute_type, conditionalEnum=True, ) - bool_value: bool = Field(True, title="Expected value") - str_value: str = Field("", title="Expected value") - int_value: int = Field(0, title="Expected value") - float_value: float = Field(0.0, title="Expected value") - enum_value: list[str] = Field( + bool_value: bool = SettingsField(True, title="Expected value") + str_value: str = SettingsField("", title="Expected value") + int_value: int = SettingsField(0, title="Expected value") + float_value: float = SettingsField(0.0, title="Expected value") + enum_value: list[str] = SettingsField( title="Expected value", default_factory=list, ) @@ -204,11 +231,11 @@ class DailyListCustomAttributesModel(BaseSettingsModel): class DailyListFilterModel(BaseSettingsModel): _layout = "expanded" - statuses: list[str] = Field( + statuses: list[str] = SettingsField( title="Statuses", default_factory=list, ) - custom_attributes: list[DailyListCustomAttributesModel] = Field( + custom_attributes: list[DailyListCustomAttributesModel] = SettingsField( title="Custom attributes", default_factory=list, ) @@ -218,17 +245,17 @@ class DailyListItemModel(BaseSettingsModel): """Create list with AssetVersions by filter criteria.""" _layout = "expanded" - name_template: str = Field("{yy}{mm}{dd}", title="Name template") - category: str = Field( + name_template: str = SettingsField("{yy}{mm}{dd}", title="Name template") + category: str = SettingsField( "Dailies", title="List category", enum_resolver=lambda: ["Default", "Clients", "Dailies"], ) - cycle_enabled: bool = Field( + cycle_enabled: bool = SettingsField( False, title="Run automatically", ) - filters: list[DailyListFilterModel] = Field( + filters: list[DailyListFilterModel] = SettingsField( title="Asset version filters", default_factory=list, ) @@ -257,7 +284,7 @@ class CreateDailyListsModel(BaseSettingsModel): _isGroup = True enabled: bool = True - cycle_hour_start: str = Field( + cycle_hour_start: str = SettingsField( "00:00:00", title="Create daily lists at", description="This may take affect on next day", @@ -266,78 +293,91 @@ class CreateDailyListsModel(BaseSettingsModel): section="Automated execution", scope=["studio"], ) - cycle_days: list[str] = Field( + cycle_days: list[str] = SettingsField( title="Days of week", default_factory=default_week_days, enum_resolver=week_days, scope=["studio"], ) - lists: list[DailyListItemModel] = Field( + lists: list[DailyListItemModel] = SettingsField( title="Lists", default_factory=list, ) - role_list: list[str] = Field( + role_list: list[str] = SettingsField( section="---", title=ROLES_TITLE, default_factory=list, ) +class ComponentsSizeCalcModel(BaseSettingsModel): + # Cannot be turned off per project + enabled: bool = SettingsField(True, scope=["studio"]) + role_list: list[str] = SettingsField( + title=ROLES_TITLE, + default_factory=list, + ) + + class FtrackServiceHandlers(BaseSettingsModel): """Settings for event handlers running in ftrack service.""" - prepare_project: SimpleAction = Field( + prepare_project: SimpleAction = SettingsField( title="Prepare Project", default_factory=SimpleAction, ) - sync_from_ftrack: SimpleAction = Field( + sync_from_ftrack: SimpleAction = SettingsField( title="Sync to AYON", default_factory=SimpleAction, ) - sync_hier_entity_attributes: SyncHierarchicalAttributes = Field( + sync_hier_entity_attributes: SyncHierarchicalAttributes = SettingsField( title="Sync Hierarchical and Entity Attributes", default_factory=SyncHierarchicalAttributes, ) - clone_review_session: CloneReviewAction = Field( + clone_review_session: CloneReviewAction = SettingsField( title="Clone Review Session", default_factory=CloneReviewAction, ) - delete_ayon_entities: SimpleAction = Field( + delete_ayon_entities: SimpleAction = SettingsField( title="Delete Folders/Products", default_factory=SimpleAction, ) - thumbnail_updates: ThumbnailHierarchyUpdates = Field( + thumbnail_updates: ThumbnailHierarchyUpdates = SettingsField( title="Update Hierarchy thumbnails", default_factory=ThumbnailHierarchyUpdates, ) - status_task_to_parent: SyncStatusTaskToParent = Field( + status_task_to_parent: SyncStatusTaskToParent = SettingsField( title="Sync status from Task to Parent", default_factory=SyncStatusTaskToParent, ) - status_task_to_version: SyncStatusTaskToVersion = Field( + status_task_to_version: SyncStatusTaskToVersion = SettingsField( title="Sync status from Task to Version", default_factory=SyncStatusTaskToVersion, ) - status_version_to_task: SyncStatusVersionToTask = Field( + status_version_to_task: SyncStatusVersionToTask = SettingsField( title="Sync status from Version to Task", default_factory=SyncStatusVersionToTask, ) - next_task_update: NextTaskUpdate = Field( + next_task_update: NextTaskUpdate = SettingsField( title="Update status on next task", default_factory=NextTaskUpdate, ) - transfer_values_of_hierarchical_attributes: TransferHierNonHierAttrsAction = Field( + transfer_values_of_hierarchical_attributes: TransferHierNonHierAttrsAction = SettingsField( title="Action to transfer hierarchical attribute values", default_factory=TransferHierNonHierAttrsAction, ) - # create_daily_review_session: CreateDailyReviewSession = Field( + # create_daily_review_session: CreateDailyReviewSession = SettingsField( # title="Create daily review session", # default_factory=CreateDailyReviewSession, # ) - create_daily_lists: CreateDailyListsModel = Field( + create_daily_lists: CreateDailyListsModel = SettingsField( title="Create daily lists", default_factory=CreateDailyListsModel, ) + project_components_sizes: ComponentsSizeCalcModel = SettingsField( + title="Calculate project component sizes", + default_factory=ComponentsSizeCalcModel, + ) DEFAULT_SERVICE_HANDLERS_SETTINGS = { @@ -412,12 +452,14 @@ class FtrackServiceHandlers(BaseSettingsModel): "status_task_to_version": { "enabled": True, "mapping": [], - "asset_types_filter": [] + "asset_types_filter_type": "allow_list", + "asset_types": [] }, "status_version_to_task": { "enabled": True, "mapping": [], - "asset_types_to_skip": [] + "asset_types_filter_type": "deny_list", + "asset_types": [] }, "next_task_update": { "enabled": True, @@ -479,4 +521,11 @@ class FtrackServiceHandlers(BaseSettingsModel): } ], }, + "project_components_sizes": { + "enabled": True, + "role_list": [ + "Administrator", + "Project Manager" + ], + }, } diff --git a/service_tools/.gitignore b/service_tools/.gitignore index 129efda2..ab4de356 100644 --- a/service_tools/.gitignore +++ b/service_tools/.gitignore @@ -1,2 +1,3 @@ venv/* -.env \ No newline at end of file +.env +downloads/* \ No newline at end of file diff --git a/service_tools/Makefile b/service_tools/Makefile index 2140566c..07a429a1 100644 --- a/service_tools/Makefile +++ b/service_tools/Makefile @@ -3,7 +3,7 @@ ifneq (,$(wildcard ./.env)) export endif -VERSION=$(shell python -c "import os;import sys;content={};f=open('$(CURDIR)/../version.py');exec(f.read(),content);f.close();print(content['__version__'])") +VERSION=$(shell python -c "import os;import sys;content={};f=open('$(CURDIR)/../package.py');exec(f.read(),content);f.close();print(content['version'])") define HELP_INFO diff --git a/service_tools/main.py b/service_tools/main.py index e0ec0e44..bff132d1 100644 --- a/service_tools/main.py +++ b/service_tools/main.py @@ -1,6 +1,5 @@ import os import sys -import logging import argparse import subprocess import time @@ -9,7 +8,8 @@ DEFAULT_VARIANT_ENV_KEY, ) -ADDON_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) +ADDON_DIR = os.path.dirname(CURRENT_DIR) def run_both(): @@ -65,6 +65,13 @@ def main(): if opts.variant: os.environ[DEFAULT_VARIANT_ENV_KEY] = opts.variant + # Set download root for service tools inside service tools + download_root = os.getenv("AYON_FTRACK_DOWNLOAD_ROOT") + if not download_root: + os.environ["AYON_FTRACK_DOWNLOAD_ROOT"] = os.path.join( + CURRENT_DIR, "downloads" + ) + service_name = opts.service if service_name == "both": return run_both() @@ -88,5 +95,4 @@ def main(): if __name__ == "__main__": - logging.basicConfig() main() diff --git a/service_tools/start.ps1 b/service_tools/manage.ps1 similarity index 95% rename from service_tools/start.ps1 rename to service_tools/manage.ps1 index ab678ed6..0236855d 100644 --- a/service_tools/start.ps1 +++ b/service_tools/manage.ps1 @@ -8,7 +8,7 @@ if ($ARGS.Length -gt 1) { $script_dir_rel = Split-Path -Path $MyInvocation.MyCommand.Definition -Parent $script_dir = (Get-Item $script_dir_rel).FullName -$ADDON_VERSION = Invoke-Expression -Command "python -c ""import os;import sys;content={};f=open(r'$($script_dir)/../version.py');exec(f.read(),content);f.close();print(content['__version__'])""" +$ADDON_VERSION = Invoke-Expression -Command "python -c ""import os;import sys;content={};f=open(r'$($script_dir)/../package.py');exec(f.read(),content);f.close();print(content['version'])""" function Default-Func { Write-Host "" diff --git a/services/leecher/Makefile b/services/leecher/Makefile index 9352b154..6d0bac92 100755 --- a/services/leecher/Makefile +++ b/services/leecher/Makefile @@ -1,5 +1,5 @@ include .env -VERSION=$(shell python -c "import os;import sys;content={};f=open('$(CURDIR)/../../version.py');exec(f.read(),content);f.close();print(content['__version__'])") +VERSION=$(shell python -c "import os;import sys;content={};f=open('$(CURDIR)/../../package.py');exec(f.read(),content);f.close();print(content['version'])") BASE_NAME := ayon-ftrack-leecher IMAGE := ynput/$(BASE_NAME):$(VERSION) BASH_CONTAINER_NAME := $(BASE_NAME)-bash-$(VERSION) diff --git a/services/leecher/docker-compose.yml b/services/leecher/docker-compose.yml index c6df363d..e1e0d2c6 100644 --- a/services/leecher/docker-compose.yml +++ b/services/leecher/docker-compose.yml @@ -2,10 +2,10 @@ name: ayon-ftrack-services services: leecher: container_name: leecher - image: ynput/ayon-ftrack-leecher:1.0.4-dev.1 + image: ynput/ayon-ftrack-leecher:1.2.1-dev.1 restart: unless-stopped environment: - "AYON_SERVER_URL=${AYON_SERVER_URL}" - "AYON_API_KEY=${AYON_API_KEY}" - "AYON_ADDON_NAME=ftrack" - - "AYON_ADDON_VERSION=1.0.4-dev.1" + - "AYON_ADDON_VERSION=1.2.1-dev.1" diff --git a/services/leecher/leecher/listener.py b/services/leecher/leecher/listener.py index 3b624b04..60156ca2 100644 --- a/services/leecher/leecher/listener.py +++ b/services/leecher/leecher/listener.py @@ -4,7 +4,7 @@ import logging import threading import traceback -from typing import Any, Union +from typing import Any import ftrack_api import ayon_api @@ -176,8 +176,11 @@ def main_loop(): def main(): - logging.basicConfig(level=logging.INFO) - + logging.basicConfig( + format="%(asctime)s %(levelname)-8s %(message)s", + level=logging.INFO, + datefmt="%Y-%m-%d %H:%M:%S" + ) try: ayon_api.init_service() connected = True diff --git a/services/leecher/manage.ps1 b/services/leecher/manage.ps1 index 82972134..7919ebf0 100644 --- a/services/leecher/manage.ps1 +++ b/services/leecher/manage.ps1 @@ -10,7 +10,7 @@ $script_dir = (Get-Item $script_dir_rel).FullName $BASE_NAME = "ayon-ftrack-leecher" $IMAGE_NAME = "ynput/$($BASE_NAME)" -$ADDON_VERSION = Invoke-Expression -Command "python -c ""import os;import sys;content={};f=open(r'$($script_dir)/../../version.py');exec(f.read(),content);f.close();print(content['__version__'])""" +$ADDON_VERSION = Invoke-Expression -Command "python -c ""import os;import sys;content={};f=open(r'$($script_dir)/../../package.py');exec(f.read(),content);f.close();print(content['version'])""" $IMAGE_FULL_NAME = "$($IMAGE_NAME):$($ADDON_VERSION)" $BASH_CONTAINER_NAME = "$($BASE_NAME)-bash-$($ADDON_VERSION)" diff --git a/services/leecher/pyproject.toml b/services/leecher/pyproject.toml index a9f0db9a..028785c8 100644 --- a/services/leecher/pyproject.toml +++ b/services/leecher/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "ftrack-leecher" -version = "1.0.4-dev.1" +version = "1.2.1-dev.1" description = "" authors = ["Ynput s.r.o. "] diff --git a/services/processor/Makefile b/services/processor/Makefile index 2aa31139..115d61a0 100755 --- a/services/processor/Makefile +++ b/services/processor/Makefile @@ -1,5 +1,5 @@ include .env -VERSION=$(shell python -c "import os;import sys;content={};f=open('$(CURDIR)/../../version.py');exec(f.read(),content);f.close();print(content['__version__'])") +VERSION=$(shell python -c "import os;import sys;content={};f=open('$(CURDIR)/../../package.py');exec(f.read(),content);f.close();print(content['version'])") BASE_NAME := ayon-ftrack-processor IMAGE := ynput/$(BASE_NAME):$(VERSION) BASH_CONTAINER_NAME := $(BASE_NAME)-bash-$(VERSION) diff --git a/services/processor/docker-compose.yml b/services/processor/docker-compose.yml index 78efe68f..d36de1ce 100644 --- a/services/processor/docker-compose.yml +++ b/services/processor/docker-compose.yml @@ -2,10 +2,10 @@ name: ayon-ftrack-services services: processor: container_name: processor - image: ynput/ayon-ftrack-processor:1.0.4-dev.1 + image: ynput/ayon-ftrack-processor:1.2.1-dev.1 restart: unless-stopped environment: - "AYON_SERVER_URL=${AYON_SERVER_URL}" - "AYON_API_KEY=${AYON_API_KEY}" - "AYON_ADDON_NAME=ftrack" - - "AYON_ADDON_VERSION=1.0.4-dev.1" + - "AYON_ADDON_VERSION=1.2.1-dev.1" diff --git a/services/processor/manage.ps1 b/services/processor/manage.ps1 index 3400962d..298eb2a8 100644 --- a/services/processor/manage.ps1 +++ b/services/processor/manage.ps1 @@ -10,7 +10,7 @@ $script_dir = (Get-Item $script_dir_rel).FullName $BASE_NAME = "ayon-ftrack-processor" $IMAGE_NAME = "ynput/$($BASE_NAME)" -$ADDON_VERSION = Invoke-Expression -Command "python -c ""import os;import sys;content={};f=open(r'$($script_dir)/../../version.py');exec(f.read(),content);f.close();print(content['__version__'])""" +$ADDON_VERSION = Invoke-Expression -Command "python -c ""import os;import sys;content={};f=open(r'$($script_dir)/../../package.py');exec(f.read(),content);f.close();print(content['version'])""" $IMAGE_FULL_NAME = "$($IMAGE_NAME):$($ADDON_VERSION)" $BASH_CONTAINER_NAME = "$($BASE_NAME)-bash-$($ADDON_VERSION)" diff --git a/services/processor/processor/default_handlers/action_clone_review_session.py b/services/processor/processor/default_handlers/action_clone_review_session.py index bce5445f..0fe1b2ad 100644 --- a/services/processor/processor/default_handlers/action_clone_review_session.py +++ b/services/processor/processor/default_handlers/action_clone_review_session.py @@ -122,9 +122,3 @@ def launch(self, session, entities, event): 'success': True, 'message': 'Action completed successfully' } - - -def register(session): - '''Register action. Called when used as an event plugin.''' - - CloneReviewSession(session).register() diff --git a/services/processor/processor/default_handlers/action_create_lists.py b/services/processor/processor/default_handlers/action_create_lists.py index 0f4fe0c4..ad59a130 100644 --- a/services/processor/processor/default_handlers/action_create_lists.py +++ b/services/processor/processor/default_handlers/action_create_lists.py @@ -204,6 +204,10 @@ def register(self, *args, **kwargs): self._cycle_timer.start() def _timer_callback(self): + # Stop chrono callbacks if session is closed + if self.session.closed: + return + service_settings = get_service_addon_settings() action_settings = ( service_settings @@ -832,7 +836,3 @@ def _fill_list_name_template( exc_info=True ) return output - - -def register(session): - CreateDailyListServerAction(session).register() diff --git a/services/processor/processor/default_handlers/action_delete_entities.py b/services/processor/processor/default_handlers/action_delete_entities.py index 3ffd804b..c81b7454 100644 --- a/services/processor/processor/default_handlers/action_delete_entities.py +++ b/services/processor/processor/default_handlers/action_delete_entities.py @@ -925,13 +925,3 @@ def _delete_products(self, session, entities, event, ftrack_ids): "success": True, "message": message } - - -def register(session): - """ - - Args: - session (ftrack_api.Session): Ftrack session. - """ - - DeleteEntitiesAction(session).register() diff --git a/services/processor/processor/default_handlers/action_multiple_notes.py b/services/processor/processor/default_handlers/action_multiple_notes.py index 6ee21412..c49f1ea4 100644 --- a/services/processor/processor/default_handlers/action_multiple_notes.py +++ b/services/processor/processor/default_handlers/action_multiple_notes.py @@ -159,9 +159,3 @@ def launch(self, session, entities, event): entity["notes"].append(new_note) session.commit() return True - - -def register(session): - '''Register plugin. Called when used as an plugin.''' - - MultipleNotesServer(session).register() diff --git a/services/processor/processor/default_handlers/action_prepare_project.py b/services/processor/processor/default_handlers/action_prepare_project.py index 23572610..cacdf3fb 100644 --- a/services/processor/processor/default_handlers/action_prepare_project.py +++ b/services/processor/processor/default_handlers/action_prepare_project.py @@ -82,7 +82,7 @@ def _get_list_items(self, attr_name, attr_def, default): "label": item["label"], "name": name, "value": value in default - }) + }) output.append({ "type": "hidden", "value": json.dumps(mapping), @@ -450,7 +450,7 @@ def _get_rename_project_items( ) if repeated: intro_message = ( - f"Entered values are not valid.

" + "Entered values are not valid.

" ) + intro_message items = [ @@ -543,6 +543,50 @@ def _get_rename_project_items( "items": items, } + def _convert_value_for_attr_conf( + self, value, attr_conf, attr_type_names_by_id + ): + # TODO validate all value types + if not isinstance(value, list): + return value + + attr_name = attr_conf["key"] + attr_type_name = attr_type_names_by_id[attr_conf["type_id"]] + attr_config = json.loads(attr_conf["config"]) + # Skip if value is not multiselection enumerator + if ( + attr_type_name != "enumerator" + or attr_config["multiSelect"] is False + ): + self.log.info( + f"Skipped attribute '{attr_name}' because value" + f" type (list) does not match" + f" ftrack attribute type ({attr_type_name})." + ) + return None + + attr_config_data = attr_config["data"] + if isinstance(attr_config_data, str): + attr_config_data = json.loads(attr_config_data) + + available_values = { + item["value"] + for item in attr_config_data + } + new_value = [ + item + for item in value + if item in available_values + ] + value_diff = set(value) - set(new_value) + if value_diff: + joined_values = ", ".join({f'"{item}"'for item in value_diff}) + self.log.info( + f"Skipped invalid '{attr_name}' enumerator" + f" values {joined_values}." + ) + return new_value + def _set_ftrack_attributes(self, session, project_entity, values): custom_attrs, hier_custom_attrs = get_ayon_attr_configs(session) project_attrs = [ @@ -573,6 +617,12 @@ def _set_ftrack_attributes(self, session, project_entity, values): attr_id = value_item["configuration_id"] values_by_attr_id[attr_id] = value + attr_type_names_by_id = { + attr_type["id"]: attr_type["name"] + for attr_type in session.query( + "select id, name from CustomAttributeType" + ).all() + } for attr_name, attr_value in values.items(): attrs = [ attrs_by_name.get(attr_name), @@ -581,6 +631,12 @@ def _set_ftrack_attributes(self, session, project_entity, values): for attr in attrs: if attr is None: continue + attr_value = self._convert_value_for_attr_conf( + attr_value, attr, attr_type_names_by_id + ) + if attr_value is None: + continue + attr_id = attr["id"] is_new = attr_id not in values_by_attr_id current_value = values_by_attr_id.get(attr_id) @@ -692,7 +748,3 @@ def launch(self, session, entities, event): "message": "Project created in AYON.", "success": True } - - -def register(session): - PrepareProjectServer(session).register() \ No newline at end of file diff --git a/services/processor/processor/default_handlers/action_private_project_detection.py b/services/processor/processor/default_handlers/action_private_project_detection.py index e4e8840b..9849e56e 100644 --- a/services/processor/processor/default_handlers/action_private_project_detection.py +++ b/services/processor/processor/default_handlers/action_private_project_detection.py @@ -53,9 +53,3 @@ def _launch(self, event): ], "submit_button_label": "Got it" } - - -def register(session): - '''Register plugin. Called when used as an plugin.''' - - PrivateProjectDetectionAction(session).register() diff --git a/services/processor/processor/default_handlers/action_project_component_sizes.py b/services/processor/processor/default_handlers/action_project_component_sizes.py new file mode 100644 index 00000000..38a962b0 --- /dev/null +++ b/services/processor/processor/default_handlers/action_project_component_sizes.py @@ -0,0 +1,475 @@ +import os +import sys +import tempfile +import csv +import datetime +import json +import collections + +from ftrack_common import ( + ServerAction, + BaseEventHandler, + get_service_ftrack_icon_url, + create_chunks, +) + + +def format_file_size(file_size, suffix=None): + """Returns formatted string with size in appropriate unit. + + Args: + file_size (int): Size of file in bytes. + suffix (str): Suffix for formatted size. Default is 'B' (as bytes). + + Returns: + str: Formatted size using proper unit and passed suffix (e.g. 7 MiB). + """ + + if suffix is None: + suffix = "B" + + for unit in ["", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"]: + if abs(file_size) < 1024.0: + return "%3.1f%s%s" % (file_size, unit, suffix) + file_size /= 1024.0 + return "%.1f%s%s" % (file_size, "Yi", suffix) + + +class ProjectComponentsSizesCalculator(BaseEventHandler): + def register(self): + self.session.event_hub.subscribe( + "topic=ayon.calculate.project.component.size", + self._launch, + priority=self.priority + ) + + def launch(self, session, event): + project_id = event["data"]["project_id"] + job_id = event["data"]["job_id"] + job_entity = session.query( + f"select id, data from Job where id is '{job_id}'" + ).first() + # Skip if job or project cannot be found + if job_entity is None or job_entity["status"] != "running": + return + + job_data = json.loads(job_entity["data"]) + try: + job_entity = self._calculate_project_size( + session, project_id, job_entity, job_data + ) + except Exception: + self.log.warning("Project calculation failed", exc_info=True) + return + + if self._is_job_done(job_entity): + self._finish_job(session, event, job_entity) + + def _get_project_entities(self, session, project_id): + entity_ids = set() + hierarchy_queue = collections.deque() + hierarchy_queue.append([project_id]) + while hierarchy_queue: + parent_ids = hierarchy_queue.popleft() + new_parent_ids = [] + for _parent_ids in create_chunks(parent_ids): + if not _parent_ids: + continue + entities = session.query( + "select id from TypedContext where" + f" project_id is '{project_id}'" + f" and parent_id in ({self.join_query_keys(_parent_ids)})" + ).all() + for entity in entities: + if entity.entity_type.lower() == "task": + continue + entity_id = entity["id"] + entity_ids.add(entity_id) + new_parent_ids.append(entity_id) + if new_parent_ids: + hierarchy_queue.append(new_parent_ids) + return entity_ids + + def _get_asset_ids(self, session, entity_ids): + assets_ids = set() + for _entity_ids in create_chunks(entity_ids): + assets = session.query( + "select id from Asset" + f" where context_id in ({self.join_query_keys(_entity_ids)})" + ).all() + assets_ids |= { + asset["id"] + for asset in assets + } + return assets_ids + + def _get_asset_version_ids(self, session, asset_ids): + asset_version_ids = set() + for entity_ids in create_chunks(asset_ids): + asset_versions = session.query( + "select id from AssetVersion" + f" where asset_id in ({self.join_query_keys(entity_ids)})" + ).all() + asset_version_ids |= { + asset_version["id"] + for asset_version in asset_versions + } + return asset_version_ids + + def _get_components_size( + self, session, asset_version_ids, location_names + ): + size = 0 + for entity_ids in create_chunks(asset_version_ids): + components = session.query( + "select id, size from Component" + f" where version_id in ({self.join_query_keys(entity_ids)})" + " and component_locations.location.name" + f" in ({self.join_query_keys(location_names)})" + ).all() + size += sum([component["size"] for component in components]) + return size + + def _set_progress_description(self, job_data): + description_template = job_data["desc_template"] + finished_projects = sum(( + 1 + for project_info in job_data["project_data"].values() + if project_info["done"] + )) + job_data["description"] = ( + description_template.format(finished_projects) + ) + + def _calculate_project_size( + self, session, project_id, job_entity, job_data + ): + location_names = ["ftrack.server", "ftrack.review"] + + project = session.query( + f"select id, full_name from Project where id is '{project_id}'" + ).first() + + project_info = job_data["project_data"][project_id] + # If more than 3 attemps already happened, mark as done + if not project or project_info["attempts"] > 2: + project_info["done"] = True + self._set_progress_description(job_data) + job_entity["data"] = json.dumps(job_data) + session.commit() + return job_entity + + # Set attempts to higher number + project_info["attempts"] += 1 + job_entity["data"] = json.dumps(job_data) + session.commit() + + project_name = project["full_name"] + + self.log.debug(f"Calculating size of project '{project_name}'") + + entity_ids = self._get_project_entities(session, project_id) + asset_ids = self._get_asset_ids(session, entity_ids) + asset_version_ids = self._get_asset_version_ids(session, asset_ids) + size = self._get_components_size( + session, asset_version_ids, location_names) + + job_entity = session.query( + f"Job where id is \"{job_entity['id']}\"").first() + self.log.debug(( + f"Project '{project_name}' size is {format_file_size(size)}" + )) + job_data = json.loads(job_entity["data"]) + project_data = job_data.get("project_data") + if project_data is None: + return job_entity + project_info = project_data[project_id] + if project_info["size"] != -1: + return job_entity + project_info["size"] = size + project_info["done"] = True + self._set_progress_description(job_data) + job_entity["data"] = json.dumps(job_data) + session.commit() + return job_entity + + def _add_output_to_job(self, session, job_entity, output, component_name): + # Sort by name + sorted_output = sorted(output.items(), key=lambda i: i[0]) + # Sort by size in reverse + sorted_output.sort(key=lambda i: i[1], reverse=True) + + # Create temp file where output will be stored + temp_obj = tempfile.NamedTemporaryFile( + mode="w", prefix="ayon_ftrack_", suffix=".csv", delete=False + ) + temp_obj.close() + temp_filepath = temp_obj.name + # Store the output + with open(temp_filepath, "w") as stream: + writer = csv.writer(stream) + for row in sorted_output: + project_name, size = row + writer.writerow([project_name, format_file_size(size), size]) + + self.add_file_component_to_job( + job_entity, session, temp_filepath, component_name + ) + + os.remove(temp_filepath) + + def _is_job_done(self, job_entity): + job_data = json.loads(job_entity["data"]) + project_data = job_data.get("project_data") + if not project_data: + return False + for project_info in project_data.values(): + if not project_info["done"]: + return False + return True + + def _set_job_description(self, session, job_entity, description): + job_entity["data"] = json.dumps({"description": description}) + session.commit() + + def _finish_job(self, session, event, job_entity): + data = json.loads(job_entity["data"]) + component_name = data["component_name"] + + sizes_per_project = { + project_info["name"]: project_info["size"] + for project_info in data["project_data"].values() + } + + self.log.debug(( + "Finished." + f" Uploading result to job component '{component_name}'")) + + self._set_job_description(session, job_entity, "Uploading file") + + self._add_output_to_job( + session, + job_entity, + sizes_per_project, + component_name + ) + + job_entity["status"] = "done" + self._set_job_description( + session, job_entity, "Finished. Click to download" + ) + + self.show_message( + event, + "Size calculation finished. You can download csv from job.", + True + ) + + +class ProjectComponentsSizes(ServerAction): + """Calculate project components sizes. + + Action can calculate sizes of all available project or single project. + """ + + identifier = "project.components.sizes" + label = "AYON Admin" + variant = "- Calculate project component sizes" + description = "Calculate component sizes of all versions on ftrack server" + icon = get_service_ftrack_icon_url("AYONAdmin.svg") + settings_key = "project_components_sizes" + + def discover(self, session, entities, event): + """Check if action is available for user role.""" + ftrack_settings = self.get_ftrack_settings(session, event, entities) + settings = ( + ftrack_settings[self.settings_frack_subkey][self.settings_key] + ) + if settings["enabled"]: + return self.valid_roles(session, entities, event) + return False + + def interface(self, session, entities, event): + self.log.info(json.dumps(dict(event.items()), indent=4, default=str)) + if event["data"].get("values"): + return + + title = "Confirm your workflow" + enum_items = [ + { + "label": "All projects", + "value": "all_projects" + } + ] + items = [ + { + "name": "workflow", + "label": "Workflow", + "type": "enumerator", + "value": "all_projects", + "data": enum_items + } + ] + + project = None + for entity in entities: + project = self.get_project_from_entity(entity, session) + if project: + break + label = "Couldn't find a project in your selection." + if project: + enum_items.append({ + "label": "Selected project", + "value": "selected_project" + }) + label = f"Selected project is '{project['full_name']}'." + + items.append({ + "type": "label", + "value": f"NOTE: {label}" + }) + + return { + "items": items, + "title": title, + "submit_button_label": "Confirm" + } + + def launch(self, session, entities, event): + if "values" not in event["data"]: + return + + values = event["data"]["values"] + workflow = values["workflow"] + current_date = datetime.datetime.now().strftime("%y-%m-%d-%H%M") + self.log.debug(f"User selected '{workflow}' workflow") + if workflow == "selected_project": + project = None + for entity in entities: + project = self.get_project_from_entity(entity, session) + if project: + break + + if not project: + return { + "type": "message", + "success": False, + "message": ( + "Had issue to find a project in your selection." + ) + } + project_name = project["full_name"] + component_base_name = f"{project_name}_size" + project_entities = [project] + else: + project_entities = session.query( + "select id, full_name from Project").all() + component_base_name = "AllProjects_size" + + if not project_entities: + self.log.info("There are no projects to calculate size on.") + return { + "type": "message", + "success": False, + "message": ( + "Had issue to find a project in your selection." + ) + } + + component_name = f"{component_base_name}_{current_date}" + + user_entity = session.query( + "User where id is {}".format(event["source"]["user"]["id"]) + ).one() + username = user_entity.get("username") + if not username: + username = ( + f"{user_entity['first_name']} {user_entity['last_name']}" + ) + + job_entity = session.create( + "Job", + { + "user": user_entity, + "status": "running", + "data": json.dumps({ + "description": "Size calculation started" + }) + } + ) + session.commit() + + try: + output = self._create_calculate_jobs( + session, project_entities, job_entity, event, component_name + ) + self.log.debug( + f"Created job for '{username}'. Calculation started.") + + except Exception as exc: + # Get exc info before changes in logging to be able to upload it + # to the job. + exc_info = sys.exc_info() + self.log.warning( + "Calculation of project size failed.", exc_info=exc) + session.rollback() + + description = "Size calculation Failed (Download traceback)" + self.add_traceback_to_job( + job_entity, session, exc_info, description + ) + output = { + "type": "message", + "success": False, + "message": ( + "Failed to calculate sizes." + " Error details can be found in a job." + ) + } + + return output + + def _create_calculate_jobs( + self, session, projects, job_entity, event, component_name + ): + description_template = ( + f"Size calculation ({{}}/{len(projects)})") + job_data = json.loads(job_entity["data"]) + job_data.update({ + "desc_template": description_template, + "component_name": component_name, + "project_data": { + project["id"]: { + "size": -1, + "attempts": 0, + "done": False, + "name": project["full_name"], + } + for project in projects + } + }) + job_entity["data"] = json.dumps(job_data) + session.commit() + + for project in projects: + project_id = project["id"] + + self.trigger_event( + "ayon.calculate.project.component.size", + event_data={ + "project_id": project_id, + "job_id": job_entity["id"] + }, + session=session, + source=event["source"], + event=event, + on_error="ignore" + ) + + return { + "type": "message", + "success": True, + "message": ( + "This may take some time. Look into jobs to check progress." + ) + } diff --git a/services/processor/processor/default_handlers/action_push_frame_values_to_task.py b/services/processor/processor/default_handlers/action_push_frame_values_to_task.py index f0a4ffdc..ecd85889 100644 --- a/services/processor/processor/default_handlers/action_push_frame_values_to_task.py +++ b/services/processor/processor/default_handlers/action_push_frame_values_to_task.py @@ -550,7 +550,3 @@ def push_values_to_entities( session.commit() session.commit() - - -def register(session): - PushHierValuesToNonHier(session).register() diff --git a/services/processor/processor/default_handlers/action_sync_from_ftrack.py b/services/processor/processor/default_handlers/action_sync_from_ftrack.py index 44004994..90b189c2 100644 --- a/services/processor/processor/default_handlers/action_sync_from_ftrack.py +++ b/services/processor/processor/default_handlers/action_sync_from_ftrack.py @@ -110,7 +110,3 @@ def _on_leecher_start(self, event): ), on_error="ignore" ) - - -def register(session): - SyncFromFtrackAction(session).register() diff --git a/services/processor/processor/default_handlers/action_tranfer_hierarchical_values.py b/services/processor/processor/default_handlers/action_tranfer_hierarchical_values.py index a13ed261..59f82b38 100644 --- a/services/processor/processor/default_handlers/action_tranfer_hierarchical_values.py +++ b/services/processor/processor/default_handlers/action_tranfer_hierarchical_values.py @@ -338,9 +338,3 @@ def _get_attr_type(self, conf_def): return float return int return None - - -def register(session): - '''Register plugin. Called when used as an plugin.''' - - TransferHierarchicalValues(session).register() diff --git a/services/processor/processor/default_handlers/event_first_version_status.py b/services/processor/processor/default_handlers/event_first_version_status.py index 500ed426..a74d4dbe 100644 --- a/services/processor/processor/default_handlers/event_first_version_status.py +++ b/services/processor/processor/default_handlers/event_first_version_status.py @@ -1,7 +1,5 @@ import collections -import ayon_api - from ftrack_common import BaseEventHandler @@ -207,9 +205,3 @@ def filter_entities_info(self, event): filtered_entities_info[project_id].append(entity_info) return filtered_entities_info - - -def register(session): - '''Register plugin. Called when used as an plugin.''' - - FirstVersionStatus(session).register() diff --git a/services/processor/processor/default_handlers/event_next_task_update.py b/services/processor/processor/default_handlers/event_next_task_update.py index 8626580f..50579c8b 100644 --- a/services/processor/processor/default_handlers/event_next_task_update.py +++ b/services/processor/processor/default_handlers/event_next_task_update.py @@ -129,9 +129,10 @@ def process_by_project(self, session, event, project_id, _entities_info): )) return - mod_mapping = {} - for item in event_settings["mapping"]: - mod_mapping[item["name"]] = item["value"] + mod_mapping = { + item["name"]: item["value"] + for item in event_settings["mapping"] + } event_settings["mapping"] = mod_mapping statuses = session.query("Status").all() @@ -452,7 +453,3 @@ def sort_by_name_task_entities_by_type(task_entities_by_type_id): # Override values in source object for type_id, value in _task_entities_by_type_id.items(): task_entities_by_type_id[type_id] = value - - -def register(session): - NextTaskUpdate(session).register() diff --git a/services/processor/processor/default_handlers/event_push_frame_values_to_task.py b/services/processor/processor/default_handlers/event_push_frame_values_to_task.py index 007d90c8..ed760490 100644 --- a/services/processor/processor/default_handlers/event_push_frame_values_to_task.py +++ b/services/processor/processor/default_handlers/event_push_frame_values_to_task.py @@ -499,7 +499,3 @@ def launch(self, session, event): for project_id, entities_info in filtered_entities_info.items(): self.process_by_project(session, event, project_id, entities_info) - - -def register(session): - PushHierValuesToNonHierEvent(session).register() diff --git a/services/processor/processor/default_handlers/event_sync_from_ftrack.py b/services/processor/processor/default_handlers/event_sync_from_ftrack.py index 1aae8667..4f693e81 100644 --- a/services/processor/processor/default_handlers/event_sync_from_ftrack.py +++ b/services/processor/processor/default_handlers/event_sync_from_ftrack.py @@ -5,7 +5,6 @@ import atexit import arrow -import ayon_api import ftrack_api from ayon_api import ( @@ -27,6 +26,7 @@ CUST_ATTR_KEY_SYNC_FAIL, CUST_ATTR_AUTO_SYNC, + CUST_ATTR_TOOLS, FPS_KEYS, is_ftrack_enabled_in_settings, @@ -1265,7 +1265,7 @@ def _propagate_task_type_changes(self, task_type_changes): if entity.task_type == new_type_name: continue - if not new_type_name in task_types_names: + if new_type_name not in task_types_names: project_need_update = True to_change.append((entity, new_type_name)) @@ -1320,7 +1320,10 @@ def _propagate_attrib_changes(self): if key == "typeid" and entity.entity_type == "task": task_type_changes[ftrack_id] = (entity, info) - if key not in entity.attribs: + dst_key = key + if key == CUST_ATTR_TOOLS: + dst_key = "tools" + if dst_key not in entity.attribs: continue if value is not None: @@ -1334,7 +1337,7 @@ def _propagate_attrib_changes(self): continue value = self._convert_value_by_cust_attr_conf( value, attr) - entity.attribs[key] = value + entity.attribs[dst_key] = value self._propagate_task_type_changes(task_type_changes) @@ -1602,7 +1605,7 @@ def launch(self, session, event): f" \"{sync_process.project_name}\" by \"{username}\".{sub_msg}" )) if sync_process.trigger_project_sync: - # Trigger sync to avalon action if auto sync was turned on + # Trigger sync to AYON action if auto sync was turned on selection = [{ "entityId": sync_process.ft_project_id, "entityType": "show" @@ -1661,182 +1664,177 @@ def _get_username(self, session, event): username = user_entity["username"] or username return username - @property - def duplicated_report(self): - if not self.duplicated: - return [] - - ft_project = self.cur_project - duplicated_names = [] - for ftrack_id in self.duplicated: - ftrack_ent = self.ftrack_ents_by_id.get(ftrack_id) - if not ftrack_ent: - ftrack_ent = self.process_session.query( - self.entities_query_by_id.format( - ft_project["id"], ftrack_id - ) - ).one() - self.ftrack_ents_by_id[ftrack_id] = ftrack_ent - name = ftrack_ent["name"] - if name not in duplicated_names: - duplicated_names.append(name) - - joined_names = ", ".join( - ["\"{}\"".format(name) for name in duplicated_names] - ) - ft_ents = self.process_session.query( - self.entities_name_query_by_name.format( - ft_project["id"], joined_names - ) - ).all() - - ft_ents_by_name = collections.defaultdict(list) - for ft_ent in ft_ents: - name = ft_ent["name"] - ft_ents_by_name[name].append(ft_ent) - - if not ft_ents_by_name: - return [] - - subtitle = "Duplicated entity names:" - items = [] - items.append({ - "type": "label", - "value": "# {}".format(subtitle) - }) - items.append({ - "type": "label", - "value": ( - "

NOTE: It is not allowed to use the same name" - " for multiple entities in the same project

" - ) - }) - - for name, ents in ft_ents_by_name.items(): - items.append({ - "type": "label", - "value": "## {}".format(name) - }) - paths = [] - for ent in ents: - ftrack_id = ent["id"] - ent_path = "/".join([_ent["name"] for _ent in ent["link"]]) - avalon_ent = self.avalon_ents_by_id.get(ftrack_id) - - if avalon_ent: - additional = " (synchronized)" - if avalon_ent["name"] != name: - additional = " (synchronized as {})".format( - avalon_ent["name"] - ) - ent_path += additional - paths.append(ent_path) - - items.append({ - "type": "label", - "value": '

{}

'.format("
".join(paths)) - }) - - return items - - @property - def regex_report(self): - if not self.regex_failed: - return [] - - subtitle = "Entity names contain prohibited symbols:" - items = [] - items.append({ - "type": "label", - "value": "# {}".format(subtitle) - }) - items.append({ - "type": "label", - "value": ( - "

NOTE: You can use Letters( a-Z )," - " Numbers( 0-9 ) and Underscore( _ )

" - ) - }) - - ft_project = self.cur_project - for ftrack_id in self.regex_failed: - ftrack_ent = self.ftrack_ents_by_id.get(ftrack_id) - if not ftrack_ent: - ftrack_ent = self.process_session.query( - self.entities_query_by_id.format( - ft_project["id"], ftrack_id - ) - ).one() - self.ftrack_ents_by_id[ftrack_id] = ftrack_ent - - name = ftrack_ent["name"] - ent_path_items = [_ent["name"] for _ent in ftrack_ent["link"][:-1]] - ent_path_items.append("{}".format(name)) - ent_path = "/".join(ent_path_items) - items.append({ - "type": "label", - "value": "

{} - {}

".format(name, ent_path) - }) - - return items - - def report(self): - msg_len = len(self.duplicated) + len(self.regex_failed) - for msgs in self.report_items.values(): - msg_len += len(msgs) - - if msg_len == 0: - return - - items = [] - project_name = self.cur_project["full_name"] - title = "Synchronization report ({}):".format(project_name) - - keys = ["error", "warning", "info"] - for key in keys: - subitems = [] - if key == "warning": - subitems.extend(self.duplicated_report) - subitems.extend(self.regex_report) - - for _msg, _items in self.report_items[key].items(): - if not _items: - continue - - msg_items = _msg.split("||") - msg = msg_items[0] - subitems.append({ - "type": "label", - "value": "# {}".format(msg) - }) - - if len(msg_items) > 1: - for note in msg_items[1:]: - subitems.append({ - "type": "label", - "value": "

NOTE: {}

".format(note) - }) - - if isinstance(_items, str): - _items = [_items] - subitems.append({ - "type": "label", - "value": '

{}

'.format("
".join(_items)) - }) - - if items and subitems: - items.append(self.report_splitter) - - items.extend(subitems) - - self.show_interface( - items=items, - title=title, - event=self._cur_event - ) - return True - - -def register(session): - '''Register plugin. Called when used as an plugin.''' - AutoSyncFromFtrack(session).register() + # @property + # def duplicated_report(self): + # if not self.duplicated: + # return [] + # + # ft_project = self.cur_project + # duplicated_names = [] + # for ftrack_id in self.duplicated: + # ftrack_ent = self.ftrack_ents_by_id.get(ftrack_id) + # if not ftrack_ent: + # ftrack_ent = self.process_session.query( + # self.entities_query_by_id.format( + # ft_project["id"], ftrack_id + # ) + # ).one() + # self.ftrack_ents_by_id[ftrack_id] = ftrack_ent + # name = ftrack_ent["name"] + # if name not in duplicated_names: + # duplicated_names.append(name) + # + # joined_names = ", ".join( + # ["\"{}\"".format(name) for name in duplicated_names] + # ) + # ft_ents = self.process_session.query( + # self.entities_name_query_by_name.format( + # ft_project["id"], joined_names + # ) + # ).all() + # + # ft_ents_by_name = collections.defaultdict(list) + # for ft_ent in ft_ents: + # name = ft_ent["name"] + # ft_ents_by_name[name].append(ft_ent) + # + # if not ft_ents_by_name: + # return [] + # + # subtitle = "Duplicated entity names:" + # items = [] + # items.append({ + # "type": "label", + # "value": "# {}".format(subtitle) + # }) + # items.append({ + # "type": "label", + # "value": ( + # "

NOTE: It is not allowed to use the same name" + # " for multiple entities in the same project

" + # ) + # }) + # + # for name, ents in ft_ents_by_name.items(): + # items.append({ + # "type": "label", + # "value": "## {}".format(name) + # }) + # paths = [] + # for ent in ents: + # ftrack_id = ent["id"] + # ent_path = "/".join([_ent["name"] for _ent in ent["link"]]) + # avalon_ent = self.avalon_ents_by_id.get(ftrack_id) + # + # if avalon_ent: + # additional = " (synchronized)" + # if avalon_ent["name"] != name: + # additional = " (synchronized as {})".format( + # avalon_ent["name"] + # ) + # ent_path += additional + # paths.append(ent_path) + # + # items.append({ + # "type": "label", + # "value": '

{}

'.format("
".join(paths)) + # }) + # + # return items + # + # @property + # def regex_report(self): + # if not self.regex_failed: + # return [] + # + # subtitle = "Entity names contain prohibited symbols:" + # items = [] + # items.append({ + # "type": "label", + # "value": "# {}".format(subtitle) + # }) + # items.append({ + # "type": "label", + # "value": ( + # "

NOTE: You can use Letters( a-Z )," + # " Numbers( 0-9 ) and Underscore( _ )

" + # ) + # }) + # + # ft_project = self.cur_project + # for ftrack_id in self.regex_failed: + # ftrack_ent = self.ftrack_ents_by_id.get(ftrack_id) + # if not ftrack_ent: + # ftrack_ent = self.process_session.query( + # self.entities_query_by_id.format( + # ft_project["id"], ftrack_id + # ) + # ).one() + # self.ftrack_ents_by_id[ftrack_id] = ftrack_ent + # + # name = ftrack_ent["name"] + # ent_path_items = [_ent["name"] for _ent in ftrack_ent["link"][:-1]] + # ent_path_items.append("{}".format(name)) + # ent_path = "/".join(ent_path_items) + # items.append({ + # "type": "label", + # "value": "

{} - {}

".format(name, ent_path) + # }) + # + # return items + # + # def report(self): + # msg_len = len(self.duplicated) + len(self.regex_failed) + # for msgs in self.report_items.values(): + # msg_len += len(msgs) + # + # if msg_len == 0: + # return + # + # items = [] + # project_name = self.cur_project["full_name"] + # title = "Synchronization report ({}):".format(project_name) + # + # keys = ["error", "warning", "info"] + # for key in keys: + # subitems = [] + # if key == "warning": + # subitems.extend(self.duplicated_report) + # subitems.extend(self.regex_report) + # + # for _msg, _items in self.report_items[key].items(): + # if not _items: + # continue + # + # msg_items = _msg.split("||") + # msg = msg_items[0] + # subitems.append({ + # "type": "label", + # "value": "# {}".format(msg) + # }) + # + # if len(msg_items) > 1: + # for note in msg_items[1:]: + # subitems.append({ + # "type": "label", + # "value": "

NOTE: {}

".format(note) + # }) + # + # if isinstance(_items, str): + # _items = [_items] + # subitems.append({ + # "type": "label", + # "value": '

{}

'.format("
".join(_items)) + # }) + # + # if items and subitems: + # items.append(self.report_splitter) + # + # items.extend(subitems) + # + # self.show_interface( + # items=items, + # title=title, + # event=self._cur_event + # ) + # return True diff --git a/services/processor/processor/default_handlers/event_task_to_parent_status.py b/services/processor/processor/default_handlers/event_task_to_parent_status.py index 8125edc2..e8514242 100644 --- a/services/processor/processor/default_handlers/event_task_to_parent_status.py +++ b/services/processor/processor/default_handlers/event_task_to_parent_status.py @@ -434,7 +434,3 @@ def new_status_by_remainders( if best_order_status: output[parent_id] = best_order_status return output - - -def register(session): - TaskStatusToParent(session).register() diff --git a/services/processor/processor/default_handlers/event_task_to_version_status.py b/services/processor/processor/default_handlers/event_task_to_version_status.py index b1a2b58c..47950023 100644 --- a/services/processor/processor/default_handlers/event_task_to_version_status.py +++ b/services/processor/processor/default_handlers/event_task_to_version_status.py @@ -123,30 +123,30 @@ def process_by_project(self, session, event, project_id, entities_info): ["service_event_handlers"] [self.settings_key] ) - mod_mapping = {} - for item in event_settings["mapping"]: - mod_mapping[item["name"]] = item["value"] - event_settings["mapping"] = mod_mapping - _status_mapping = event_settings["mapping"] if not event_settings["enabled"]: self.log.debug("Project \"{}\" has disabled {}.".format( project_name, self.__class__.__name__ )) return - if not _status_mapping: + status_mapping = { + item["name"].lower(): item["value"] + for item in event_settings["mapping"] + } + if not status_mapping: self.log.debug(( "Project \"{}\" does not have set status mapping for {}." ).format(project_name, self.__class__.__name__)) return - status_mapping = { - key.lower(): value - for key, value in _status_mapping.items() + asset_type_filter = event_settings["asset_types_filter_type"] + is_allow_list = asset_type_filter == "allow_list" + asset_type_names = { + asset_type_name.lower() + for asset_type_name in event_settings["asset_types"] + if asset_type_name } - asset_types_filter = event_settings["asset_types_filter"] - task_ids = [ entity_info["entityId"] for entity_info in entities_info @@ -154,7 +154,7 @@ def process_by_project(self, session, event, project_id, entities_info): last_asset_versions_by_task_id = ( self.find_last_asset_versions_for_task_ids( - session, task_ids, asset_types_filter + session, task_ids, is_allow_list, asset_type_names ) ) @@ -173,9 +173,10 @@ def process_by_project(self, session, event, project_id, entities_info): if not task_entities: return - status_ids = set() - for task_entity in task_entities: - status_ids.add(task_entity["status_id"]) + status_ids = { + task_entity["status_id"] + for task_entity in task_entities + } task_status_entities = session.query( "select id, name from Status where id in ({})".format( @@ -306,7 +307,7 @@ def get_asset_version_statuses(self, project_entity): return av_statuses_by_low_name, av_statuses_by_id def find_last_asset_versions_for_task_ids( - self, session, task_ids, asset_types_filter + self, session, task_ids, is_allow_list, asset_type_names ): """Find latest AssetVersion entities for task. @@ -314,44 +315,63 @@ def find_last_asset_versions_for_task_ids( same version for the task. Args: - asset_versions (list): AssetVersion entities sorted by "version". - task_ids (list): Task ids. - asset_types_filter (list): Asset types short names that will be + session (ftrack_api.Session): Ftrack session. + task_ids (list[str]): Task ids. + is_allow_list (bool): If True then asset_types are used + as allow list. + asset_type_names (set[str]): Asset types short names that will be used to filter AssetVersions. Filtering is skipped if entered value is empty list. + + Returns: + dict[str, list[ftrack_api.Entity]]: Dictionary with task id as key + and list of AssetVersion entities as value. + """ + last_asset_versions_by_task_id = collections.defaultdict(list) + + if not task_ids: + return last_asset_versions_by_task_id + + if is_allow_list and not asset_type_names: + return last_asset_versions_by_task_id # Allow event only on specific asset type names asset_query_part = "" - if asset_types_filter: + if asset_type_names: # Query all AssetTypes asset_types = session.query( "select id, short from AssetType" ).all() # Store AssetTypes by id asset_type_short_by_id = { - asset_type["id"]: asset_type["short"] + asset_type["id"]: asset_type["short"].lower() for asset_type in asset_types } # Lower asset types from settings - # WARNING: not sure if is good idea to lower names as Ftrack may - # contain asset type with name "Scene" and "scene"! - asset_types_filter_low = set( - asset_types_name.lower() - for asset_types_name in asset_types_filter - ) - asset_type_ids = [] - for type_id, short in asset_type_short_by_id.items(): - # TODO log if asset type name is not found - if short.lower() in asset_types_filter_low: - asset_type_ids.append(type_id) - - # TODO log that none of asset type names were found in ftrack - if asset_type_ids: + asset_type_ids = { + type_id + for type_id, short in asset_type_short_by_id.items() + if short.lower() in asset_type_names + } + + # Allow list is enabled but asset type names are not available + if is_allow_list and not asset_type_ids: + self.log.warning(( + "None of asset type names were found in Ftrack." + " Skipping filter." + )) + return last_asset_versions_by_task_id + + if is_allow_list: asset_query_part = " and asset.type_id in ({})".format( self.join_query_keys(asset_type_ids) ) + elif asset_type_ids: + asset_query_part = " and asset.type_id not in ({})".format( + self.join_query_keys(asset_type_ids) + ) # Query tasks' AssetVersions asset_versions = session.query(( @@ -360,7 +380,6 @@ def find_last_asset_versions_for_task_ids( " order by version descending" ).format(self.join_query_keys(task_ids), asset_query_part)).all() - last_asset_versions_by_task_id = collections.defaultdict(list) last_version_by_task_id = {} not_finished_task_ids = set(task_ids) for asset_version in asset_versions: @@ -381,14 +400,10 @@ def find_last_asset_versions_for_task_ids( elif last_version > version: # Skip processing if version is lower than last version # and pop task id from `not_finished_task_ids` - not_finished_task_ids.remove(task_id) + not_finished_task_ids.discard(task_id) continue # Add AssetVersion entity to output dictionary last_asset_versions_by_task_id[task_id].append(asset_version) return last_asset_versions_by_task_id - - -def register(session): - TaskToVersionStatus(session).register() diff --git a/services/processor/processor/default_handlers/event_thumbnail_updates.py b/services/processor/processor/default_handlers/event_thumbnail_updates.py index d8940b98..914ac146 100644 --- a/services/processor/processor/default_handlers/event_thumbnail_updates.py +++ b/services/processor/processor/default_handlers/event_thumbnail_updates.py @@ -160,7 +160,3 @@ def filter_entities(self, event): filtered_entities_info[project_id] = [] filtered_entities_info[project_id].append(entity_info) return filtered_entities_info - - -def register(session): - ThumbnailEvents(session).register() diff --git a/services/processor/processor/default_handlers/event_version_to_task_statuses.py b/services/processor/processor/default_handlers/event_version_to_task_statuses.py index 4381a328..3f1ff8ba 100644 --- a/services/processor/processor/default_handlers/event_version_to_task_statuses.py +++ b/services/processor/processor/default_handlers/event_version_to_task_statuses.py @@ -78,21 +78,17 @@ def process_by_project(self, session, event, project_id, entities_info): )) return - mod_mapping = {} - for item in event_settings["mapping"]: - mod_mapping[item["name"]] = item["value"] - event_settings["mapping"] = mod_mapping - - _status_mapping = event_settings["mapping"] or {} status_mapping = { - key.lower(): value - for key, value in _status_mapping.items() + item["name"].lower(): item["value"] + for item in event_settings["mapping"] } + asset_type_filter = event_settings["asset_types_filter_type"] + is_allow_list = asset_type_filter == "allow_list" - asset_types_to_skip = [ + asset_types = { short_name.lower() - for short_name in event_settings["asset_types_to_skip"] - ] + for short_name in event_settings["asset_types"] + } # Collect entity ids asset_version_ids = set() @@ -112,10 +108,12 @@ def process_by_project(self, session, event, project_id, entities_info): task_ids = set() asset_version_entities = [] for asset_version in _asset_version_entities: - if asset_types_to_skip: - short_name = asset_version["asset"]["type"]["short"].lower() - if short_name in asset_types_to_skip: - continue + short_name = asset_version["asset"]["type"]["short"].lower() + if ( + (is_allow_list and short_name not in asset_types) + or (not is_allow_list and short_name in asset_types) + ): + continue asset_version_entities.append(asset_version) task_ids.add(asset_version["task_id"]) @@ -248,9 +246,3 @@ def statuses_for_tasks(self, session, task_entities, project_id): } return output - - -def register(session): - '''Register plugin. Called when used as an plugin.''' - - VersionToTaskStatus(session).register() diff --git a/services/processor/processor/download_utils.py b/services/processor/processor/download_utils.py new file mode 100644 index 00000000..d01b7104 --- /dev/null +++ b/services/processor/processor/download_utils.py @@ -0,0 +1,257 @@ +import os +import sys +import shutil +import uuid +import contextlib +import threading +import time +import json +import tarfile +import zipfile + +import appdirs +import ayon_api + +IMPLEMENTED_ARCHIVE_FORMATS = { + ".zip", ".tar", ".tgz", ".tar.gz", ".tar.xz", ".tar.bz2" +} +PROCESS_ID = uuid.uuid4().hex +# Wait 1 hour before cleaning up download dir +# - Running process should update lock file every 1 second +_LOCK_CLEANUP_TIME = 60 * 60 + + +def get_download_root(): + root = os.getenv("AYON_FTRACK_DOWNLOAD_ROOT") + if not root: + root = os.path.join( + appdirs.user_data_dir("ayon-ftrack", "Ynput"), + "downloads" + ) + return root + + +def get_archive_ext_and_type(archive_file): + """Get archive extension and type. + + Args: + archive_file (str): Path to archive file. + + Returns: + Tuple[str, str]: Archive extension and type. + """ + + tmp_name = archive_file.lower() + if tmp_name.endswith(".zip"): + return ".zip", "zip" + + for ext in ( + ".tar", + ".tgz", + ".tar.gz", + ".tar.xz", + ".tar.bz2", + ): + if tmp_name.endswith(ext): + return ext, "tar" + + return None, None + + +def extract_archive_file(archive_file, dst_folder=None): + """Extract archived file to a directory. + + Args: + archive_file (str): Path to a archive file. + dst_folder (Optional[str]): Directory where content will be extracted. + By default, same folder where archive file is. + """ + + if not dst_folder: + dst_folder = os.path.dirname(archive_file) + + archive_ext, archive_type = get_archive_ext_and_type(archive_file) + + print(f"Extracting {archive_file} -> {dst_folder}") + if archive_type is None: + _, ext = os.path.splitext(archive_file) + raise ValueError(( + f"Invalid file extension \"{ext}\"." + f" Expected {', '.join(IMPLEMENTED_ARCHIVE_FORMATS)}" + )) + + if archive_type == "zip": + with zipfile.ZipFile(archive_file) as zip_file: + zip_file.extractall(dst_folder) + + elif archive_type == "tar": + if archive_ext == ".tar": + tar_type = "r:" + elif archive_ext.endswith(".xz"): + tar_type = "r:xz" + elif archive_ext.endswith(".gz"): + tar_type = "r:gz" + elif archive_ext.endswith(".bz2"): + tar_type = "r:bz2" + else: + tar_type = "r:*" + + with tarfile.open(archive_file, tar_type) as tar_file: + tar_file.extractall(dst_folder) + + +class _LockThread(threading.Thread): + def __init__(self, lock_file): + super().__init__() + self.lock_file = lock_file + self._event = threading.Event() + self.interval = 1 + + def stop(self): + if not self._event.is_set(): + self._event.set() + + def run(self): + with open(self.lock_file, "w") as stream: + while not self._event.wait(self.interval): + stream.seek(0) + stream.write(str(time.time())) + stream.flush() + + +@contextlib.contextmanager +def _lock_file_update(lock_file): + thread = _LockThread(lock_file) + thread.start() + try: + yield + finally: + thread.stop() + thread.join() + + +def _download_event_handlers(dirpath, custom_handlers, event_handler_dirs): + for custom_handler in custom_handlers: + addon_name = custom_handler["addon_name"] + addon_version = custom_handler["addon_version"] + endpoint = custom_handler["endpoint"] + filename = endpoint.rsplit("/")[-1] + path = os.path.join(dirpath, filename) + url = "/".join([ayon_api.get_base_url(), endpoint]) + try: + ayon_api.download_file(url, path) + + except BaseException as exc: + print( + "Failed to download event handlers" + f" for {addon_name} {addon_version}" + f"from '{endpoint}'. Reason: {exc}" + ) + continue + + try: + # Create temp dir for event handlers + subdir = f"{addon_name}_{addon_version}" + extract_dir = os.path.join(dirpath, subdir) + # Extract downloaded archive + extract_archive_file(path, extract_dir) + manifest_file = os.path.join(extract_dir, "manifest.json") + if not os.path.exists(manifest_file): + print( + f"Manifest file not found in" + f" downloaded archive from {endpoint}" + ) + continue + + with open(manifest_file, "r") as stream: + manifest = json.load(stream) + + manifest_version = manifest["version"] + maj_v, min_v, patch_v = ( + int(part) for part in manifest_version.split(".") + ) + if (maj_v, min_v, patch_v) > (1, 0, 0): + print( + f"Manifest file has unknown version {manifest_version}." + " Trying to process it anyway." + ) + continue + + for even_handler_subpath in manifest.get("handler_subfolders", []): + # Add path to event handler dirs + event_handler_dirs.append(os.path.join( + extract_dir, even_handler_subpath + )) + + for python_subpath in manifest.get("python_path_subfolders", []): + python_dir = os.path.join(extract_dir, python_subpath) + sys.path.insert(0, python_dir) + + except BaseException as exc: + print(f"Failed to extract downloaded archive: {exc}") + + finally: + # Remove archive + os.remove(path) + + +@contextlib.contextmanager +def downloaded_event_handlers(custom_handlers): + event_handler_dirs = [] + if not custom_handlers: + yield event_handler_dirs + return + + root = get_download_root() + dirpath = os.path.join(root, PROCESS_ID) + os.makedirs(dirpath, exist_ok=True) + + lock_file = os.path.join(dirpath, "lock") + try: + with _lock_file_update(lock_file): + print("Downloading event handlers...") + _download_event_handlers( + dirpath, custom_handlers, event_handler_dirs + ) + yield event_handler_dirs + finally: + shutil.rmtree(dirpath) + print("Cleaned up downloaded event handlers") + + +def cleanup_download_root(): + root = get_download_root() + if not os.path.exists(root): + return + + current_time = time.time() + paths_to_remove = [] + for subdir in os.listdir(root): + path = os.path.join(root, subdir) + if not os.path.isdir(path): + continue + lock_file = os.path.join(path, "lock") + if not os.path.exists(lock_file): + paths_to_remove.append(path) + continue + + try: + with open(lock_file, "r") as stream: + content = stream.read() + + except BaseException: + print( + "Failed to read lock file to check" + f" if can remove downloaded content '{lock_file}'" + ) + continue + + last_update = 0 + if content: + last_update = float(content) + if (current_time - last_update) > _LOCK_CLEANUP_TIME: + paths_to_remove.append(path) + + for path in paths_to_remove: + print(f"Cleaning up download directory: {path}") + shutil.rmtree(path) diff --git a/services/processor/processor/ftrack_session.py b/services/processor/processor/ftrack_session.py index 95932532..21d34344 100644 --- a/services/processor/processor/ftrack_session.py +++ b/services/processor/processor/ftrack_session.py @@ -1,5 +1,4 @@ import os -import sys import logging import getpass import atexit @@ -31,6 +30,8 @@ class ProcessEventHub(ftrack_api.event.hub.EventHub): _server_con = None def get_next_ftrack_event(self): + if not self.connected: + return None return enroll_event_job( source_topic="ftrack.leech", target_topic="ftrack.proc", @@ -43,7 +44,7 @@ def finish_job(self, job): event_id = job["id"] source_id = job["dependsOn"] source_event = get_event(event_id) - print(f"Processing event... {source_id}") + print(f"Processed event... {source_id}") description = f"Processed {source_event['description']}" @@ -73,6 +74,7 @@ def wait(self, duration=None): started = time.time() while True: job = None + empty_queue = False try: item = self._event_queue.get(timeout=0.1) if isinstance(item, tuple): @@ -81,6 +83,14 @@ def wait(self, duration=None): event = item except queue.Empty: + empty_queue = True + + # Do not do this under except handling to avoid confusing + # traceback if something happens + if empty_queue: + if not self.connected: + break + if not self.load_event_from_jobs(): time.sleep(0.1) continue @@ -177,7 +187,7 @@ def __init__( # Currently pending operations. self.recorded_operations = ftrack_api.operation.Operations() - # OpenPype change - In new API are operations properties + # AYON change - In new API are operations properties new_api = hasattr(self.__class__, "record_operations") if new_api: diff --git a/services/processor/processor/handlers_to_convert/action_create_review_session.py b/services/processor/processor/handlers_to_convert/action_create_review_session.py index cbd890b8..e036ae82 100644 --- a/services/processor/processor/handlers_to_convert/action_create_review_session.py +++ b/services/processor/processor/handlers_to_convert/action_create_review_session.py @@ -132,6 +132,10 @@ def register(self, *args, **kwargs): self._check_review_session() def _timer_callback(self): + # Stop chrono callback if session is closed + if self.session.closed: + return + if ( self._cycle_timer is not None and self._last_cyle_time is not None @@ -305,8 +309,3 @@ def _fill_review_template(self, template, data): exc_info=True ) return output - - -def register(session): - '''Register plugin. Called when used as an plugin.''' - CreateDailyReviewSessionServerAction(session).register() diff --git a/services/processor/processor/handlers_to_convert/event_del_avalon_id_from_new.py b/services/processor/processor/handlers_to_convert/event_del_avalon_id_from_new.py index e42c803a..25df6541 100644 --- a/services/processor/processor/handlers_to_convert/event_del_avalon_id_from_new.py +++ b/services/processor/processor/handlers_to_convert/event_del_avalon_id_from_new.py @@ -1,6 +1,6 @@ from ftrack_common import BaseEventHandler, CUST_ATTR_KEY_SERVER_ID -from openpype_modules.ftrack.event_handlers_server.event_sync_to_avalon import ( +from ayon_ftrack.event_handlers_server.event_sync_to_avalon import ( SyncToAvalonEvent ) @@ -47,8 +47,3 @@ def launch(self, session, event): except Exception: session.rollback() continue - - -def register(session): - '''Register plugin. Called when used as an plugin.''' - DelAvalonIdFromNew(session).register() diff --git a/services/processor/processor/handlers_to_convert/event_sync_links.py b/services/processor/processor/handlers_to_convert/event_sync_links.py index 29667663..32ced7ea 100644 --- a/services/processor/processor/handlers_to_convert/event_sync_links.py +++ b/services/processor/processor/handlers_to_convert/event_sync_links.py @@ -9,15 +9,15 @@ from openpype.pipeline import AvalonMongoDB -class SyncLinksToAvalon(BaseEventHandler): - """Synchronize inpug linkts to avalon documents.""" - # Run after sync to avalon event handler +class SyncLinksToAYON(BaseEventHandler): + """Synchronize input linkts to AYON server.""" + # Run after sync to AYON event handler priority = 110 def __init__(self, session): self.dbcon = AvalonMongoDB() - super(SyncLinksToAvalon, self).__init__(session) + super(SyncLinksToAYON, self).__init__(session) def launch(self, session, event): # Try to commit and if any error happen then recreate session @@ -138,8 +138,3 @@ def _get_mongo_ids_by_ftrack_ids(self, session, attr_id, ftrack_ids): mongo_id_by_ftrack_id[ftrack_id] = mongo_id return mongo_id_by_ftrack_id - - -def register(session): - '''Register plugin. Called when used as an plugin.''' - SyncLinksToAvalon(session).register() diff --git a/services/processor/processor/handlers_to_convert/event_sync_to_avalon.py b/services/processor/processor/handlers_to_convert/event_sync_to_avalon.py index 60fb4bf6..c988d963 100644 --- a/services/processor/processor/handlers_to_convert/event_sync_to_avalon.py +++ b/services/processor/processor/handlers_to_convert/event_sync_to_avalon.py @@ -1772,7 +1772,6 @@ def process_moved(self): continue new_parent_id = ent_info["changes"]["parent_id"]["new"] - old_parent_id = ent_info["changes"]["parent_id"]["old"] mongo_id = avalon_ent["_id"] if self.changeability_by_mongo_id[mongo_id]: @@ -2729,8 +2728,3 @@ def _mongo_id_configuration( temp_dict[entity_type] = mongo_id_configuration_id return mongo_id_configuration_id - - -def register(session): - '''Register plugin. Called when used as an plugin.''' - SyncToAvalonEvent(session).register() diff --git a/services/processor/processor/lib/sync_from_ftrack.py b/services/processor/processor/lib/sync_from_ftrack.py index ac78f93f..076f7492 100644 --- a/services/processor/processor/lib/sync_from_ftrack.py +++ b/services/processor/processor/lib/sync_from_ftrack.py @@ -14,6 +14,7 @@ CUST_ATTR_KEY_SERVER_ID, CUST_ATTR_KEY_SERVER_PATH, CUST_ATTR_KEY_SYNC_FAIL, + CUST_ATTR_TOOLS, FTRACK_ID_ATTRIB, FTRACK_PATH_ATTRIB, REMOVED_ID_VALUE, @@ -324,9 +325,16 @@ def sync_to_server(self): ft_entities_by_parent_id[parent_id].append(entity) ft_entity_ids = set(ft_entities_by_id.keys()) - cust_attr_value_by_entity_id = get_custom_attributes_by_entity_id( + + cust_attr_value_by_entity_id = collections.defaultdict(dict) + # Fix custom attributes for tools + for entity_id, values_by_key in get_custom_attributes_by_entity_id( ft_session, ft_entity_ids, attr_confs, hier_attr_confs - ) + ).items(): + if CUST_ATTR_TOOLS in values_by_key: + values_by_key["tools"] = values_by_key.pop(CUST_ATTR_TOOLS) + cust_attr_value_by_entity_id[entity_id] = values_by_key + self.log.info("Checking changes of immutable entities") self.match_immutable_entities( ft_project, @@ -708,7 +716,7 @@ def update_attributes_from_ftrack( # in 'attribs' for key, value in attribute_values.items(): # QUESTION Should we skip "unsetting" of project attributes? - # - very dangerous for OpenPype and maybe for project should + # - very dangerous for AYON and maybe for project should # be taken default value of attribute (if there is any) if is_project and value is None: continue diff --git a/services/processor/processor/server.py b/services/processor/processor/server.py index 801150c7..199be86d 100644 --- a/services/processor/processor/server.py +++ b/services/processor/processor/server.py @@ -5,6 +5,7 @@ import logging import signal import traceback +import atexit import ayon_api import ftrack_api @@ -12,6 +13,12 @@ from ftrack_common import FtrackServer from .ftrack_session import AYONServerSession +from .download_utils import ( + cleanup_download_root, + downloaded_event_handlers, +) + +CURRENT_DIR = os.path.dirname(os.path.abspath(__file__)) class _GlobalContext: @@ -21,11 +28,9 @@ class _GlobalContext: def get_handler_paths() -> list[str]: - current_dir = os.path.dirname(os.path.abspath(__file__)) - handler_paths = [ - os.path.join(current_dir, "default_handlers"), + return [ + os.path.join(CURRENT_DIR, "default_handlers"), ] - return handler_paths def get_service_label(): @@ -35,7 +40,19 @@ def get_service_label(): ]) +def _cleanup_session(): + session = _GlobalContext.session + _GlobalContext.session = None + if session is not None: + logging.info("Closing ftrack session.") + if session.event_hub.connected is True: + session.event_hub.disconnect() + session.close() + + def _create_session(): + _cleanup_session() + ftrack_settings = ayon_api.get_service_addon_settings() ftrack_url = ftrack_settings["ftrack_server"] service_settings = ftrack_settings["service_settings"] @@ -83,14 +100,16 @@ def create_session(): session = _GlobalContext.session if session is not None: - print("Created ftrack session") + logging.info("Created ftrack session") return session if not error_message: error_message = error_summary - print(error_message) + + log_msg = error_message if tb_content: - print(tb_content) + log_msg += f"\n{tb_content}" + logging.error(log_msg) if ( (tb_content is not None and _GlobalContext.session_fail_logged == 2) or (tb_content is None and _GlobalContext.session_fail_logged == 1) @@ -121,23 +140,65 @@ def create_session(): def main_loop(): + addon_name = ayon_api.get_service_addon_name() + addon_version = ayon_api.get_service_addon_version() + variant = ayon_api.get_default_settings_variant() + handlers_url = ( + f"addons/{addon_name}/{addon_version}/customProcessorHandlers" + f"?variant={variant}" + ) while not _GlobalContext.stop_event.is_set(): session = create_session() if session is None: + logging.info( + "Failed to create ftrack session. Will try in 10 seconds." + ) time.sleep(10) continue - _GlobalContext.session_fail_logged = False + _GlobalContext.session_fail_logged = 0 - handler_paths = get_handler_paths() + # Cleanup download root + cleanup_download_root() + + response = ayon_api.get(handlers_url) + custom_handlers = [] + if response.status_code == 200: + custom_handlers = response.data["custom_handlers"] - server = FtrackServer(handler_paths) - print("Starting listening loop") - server.run_server(session) + handler_paths = get_handler_paths() + with downloaded_event_handlers(custom_handlers) as custom_handler_dirs: + handler_paths.extend(custom_handler_dirs) + logging.info("Starting listen server") + server = FtrackServer(handler_paths) + try: + server.run_server(session) + finally: + logging.info("Server stopped.") + _cleanup_session() + logging.info("Main loop stopped.") + + +def _cleanup_process(): + """Cleanup timer threads on exit.""" + logging.info("Process stop requested. Terminating process.") + logging.info("Canceling threading timers.") + for thread in threading.enumerate(): + if isinstance(thread, threading.Timer): + thread.cancel() + + logging.info("Stopping main loop.") + if not _GlobalContext.stop_event.is_set(): + _GlobalContext.stop_event.set() + _cleanup_session() def main(): - logging.basicConfig(level=logging.INFO) + logging.basicConfig( + format="%(asctime)s %(levelname)-8s %(message)s", + level=logging.INFO, + datefmt="%Y-%m-%d %H:%M:%S" + ) try: ayon_api.init_service() @@ -146,7 +207,7 @@ def main(): connected = False if not connected: - print("Failed to connect to AYON server.") + logging.warning("Failed to connect to AYON server.") # Sleep for 10 seconds, so it is possible to see the message in # docker # NOTE: Becuase AYON connection failed, there's no way how to log it @@ -154,21 +215,17 @@ def main(): time.sleep(10) sys.exit(1) - print("Connected to AYON server.") + logging.info("Connected to AYON server.") # Register interrupt signal def signal_handler(sig, frame): - print("Process stop requested. Terminating process.") - _GlobalContext.stop_event.set() - session = _GlobalContext.session - if session is not None: - if session.event_hub.connected is True: - session.event_hub.disconnect() - session.close() - print("Termination finished.") + _cleanup_process() sys.exit(0) signal.signal(signal.SIGINT, signal_handler) signal.signal(signal.SIGTERM, signal_handler) - - main_loop() + atexit.register(_cleanup_process) + try: + main_loop() + finally: + _cleanup_process() diff --git a/services/processor/pyproject.toml b/services/processor/pyproject.toml index a9a4a8c6..33527f3c 100644 --- a/services/processor/pyproject.toml +++ b/services/processor/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "ftrack-processor" -version = "1.0.4-dev.1" +version = "1.2.1-dev.1" description = "" authors = ["Ynput s.r.o. "] diff --git a/version.py b/version.py deleted file mode 100644 index d0d12674..00000000 --- a/version.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- -"""Package declaring Ftrack addon version.""" -__version__ = "1.0.4-dev.1"