From f2fdeedac7e4d4de7afafdcf5a7bb4117c3c478e Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 7 Jun 2021 18:19:56 -0700 Subject: [PATCH 01/99] Add support for include on get_asset_children() --- frameioclient/service/assets.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/frameioclient/service/assets.py b/frameioclient/service/assets.py index b64d56d7..c684e990 100644 --- a/frameioclient/service/assets.py +++ b/frameioclient/service/assets.py @@ -17,15 +17,29 @@ def get(self, asset_id): endpoint = '/assets/{}'.format(asset_id) return self.client._api_call('get', endpoint) - def get_children(self, asset_id, **kwargs): + def get_children(self, asset_id, include=[]): """ Get a folder. :Args: asset_id (string): The asset id. + + :Kwargs: + includes (list): List of includes you would like to add. + + Example:: + + client.assets.get_children( + asset_id='1231-12414-afasfaf-aklsajflaksjfla', + includes=['review_links','cover_asset','creator','presentation'] + ) """ endpoint = '/assets/{}/children'.format(asset_id) - return self.client._api_call('get', endpoint, kwargs) + + if len(include) > 0: + endpoint += '?include={}'.format(include.join(',')) + + return self.client._api_call('get', endpoint) def create(self, parent_asset_id, **kwargs): """ From 49cbb28cf41c42670438ae2aca0a97ea21da232f Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 7 Jun 2021 18:22:41 -0700 Subject: [PATCH 02/99] Convert build_asset_info to private method --- frameioclient/service/assets.py | 31 +++++++++++++++---------------- 1 file changed, 15 insertions(+), 16 deletions(-) diff --git a/frameioclient/service/assets.py b/frameioclient/service/assets.py index c684e990..714d3ae3 100644 --- a/frameioclient/service/assets.py +++ b/frameioclient/service/assets.py @@ -7,6 +7,18 @@ from ..lib import FrameioUploader, FrameioDownloader class Asset(Service): + def _build_asset_info(self, filepath): + full_path = os.path.abspath(filepath) + + file_info = { + "filepath": full_path, + "filename": os.path.basename(full_path), + "filesize": os.path.getsize(full_path), + "mimetype": mimetypes.guess_type(full_path)[0] + } + + return file_info + def get(self, asset_id): """ Get an asset by id. @@ -123,7 +135,7 @@ def copy(self, destination_folder_id, **kwargs): endpoint = '/assets/{}/copy'.format(destination_folder_id) return self.client._api_call('post', endpoint, kwargs) - def bulk_copy(self, destination_folder_id, asset_list=[], copy_comments=False): + def bulk_copy(self, destination_folder_id, asset_list, copy_comments=False): """Bulk copy assets :Args: @@ -137,8 +149,7 @@ def bulk_copy(self, destination_folder_id, asset_list=[], copy_comments=False): "7ee008c5-49a2-f8b5-997d-8b64de153c30"], copy_comments=True) """ - payload = {"batch": []} - new_list = list() + payload = {"batch": list()} if copy_comments: payload['copy_comments'] = "all" @@ -183,18 +194,6 @@ def _upload(self, asset, file): # if not os.path.exists(folderpath): # sys.exit("Folder doesn't exist, exiting...") - def build_asset_info(self, filepath): - full_path = os.path.abspath(filepath) - - file_info = { - "filepath": full_path, - "filename": os.path.basename(full_path), - "filesize": os.path.getsize(full_path), - "mimetype": mimetypes.guess_type(full_path)[0] - } - - return file_info - def upload(self, destination_id, filepath, asset=None): """ Upload a file. The method will exit once the file is downloaded. @@ -220,7 +219,7 @@ def upload(self, destination_id, filepath, asset=None): # Then try to grab it as a project folder_id = Project(self.client).get_project(destination_id)['root_asset_id'] finally: - file_info = self.build_asset_info(filepath) + file_info = self._build_asset_info(filepath) if not asset: try: From c3b7dea7191d16da9211d7583176d5617bce33b0 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 7 Jun 2021 19:13:03 -0700 Subject: [PATCH 03/99] Add missing kwargs for pagination --- frameioclient/service/assets.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frameioclient/service/assets.py b/frameioclient/service/assets.py index 714d3ae3..28e07bc9 100644 --- a/frameioclient/service/assets.py +++ b/frameioclient/service/assets.py @@ -29,7 +29,7 @@ def get(self, asset_id): endpoint = '/assets/{}'.format(asset_id) return self.client._api_call('get', endpoint) - def get_children(self, asset_id, include=[]): + def get_children(self, asset_id, include=[], **kwargs): """ Get a folder. @@ -51,7 +51,7 @@ def get_children(self, asset_id, include=[]): if len(include) > 0: endpoint += '?include={}'.format(include.join(',')) - return self.client._api_call('get', endpoint) + return self.client._api_call('get', endpoint, kwargs) def create(self, parent_asset_id, **kwargs): """ From ce3cf07cae6b18d79cbddac810fd3360fce60ece Mon Sep 17 00:00:00 2001 From: Jeff Date: Wed, 7 Jul 2021 12:28:05 +0300 Subject: [PATCH 04/99] (chore): Add pull request template --- .github/PULL_REQUEST_TEMPLATE.md | 13 +++++++++++++ 1 file changed, 13 insertions(+) create mode 100644 .github/PULL_REQUEST_TEMPLATE.md diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 00000000..7c188d61 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,13 @@ +## [DEVREL-XXXX] + +### Description: +Please provide a short description of what this PR does + +### Depends on: +- Does this PR depend on any other ones? + +### Includes changes from: +- Does this PR includ changes from another PR? + +### I'd like feedback on: +- What would you like feedback on? From f6ba232998777f51835e423de5d5a21acb733fe0 Mon Sep 17 00:00:00 2001 From: Jeff Date: Sat, 31 Jul 2021 00:26:03 -0700 Subject: [PATCH 05/99] Introduce helper function class (#76) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Add helper functions and missing PresentationException * Add helper functions to simplify a couple of tasks * Bump version: 1.1.0 → 1.2.0 * Fix FrameioHelpers class * Fix FrameioHelpers import * Fix __init__ imports * Finish fixing helpers... --- .bumpversion.cfg | 2 +- frameioclient/client.py | 15 +++++++++++++-- frameioclient/lib/__init__.py | 2 +- frameioclient/lib/download.py | 12 +++++++++--- frameioclient/service/__init__.py | 3 ++- frameioclient/service/assets.py | 20 ++++++++++++++++++- frameioclient/service/helpers.py | 32 +++++++++++++++++++++++++++++++ setup.py | 2 +- 8 files changed, 78 insertions(+), 10 deletions(-) create mode 100644 frameioclient/service/helpers.py diff --git a/.bumpversion.cfg b/.bumpversion.cfg index a9f550e4..484cfe2d 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.1.0 +current_version = 1.2.0 commit = True tag = True diff --git a/frameioclient/client.py b/frameioclient/client.py index 7706706b..9985cd39 100644 --- a/frameioclient/client.py +++ b/frameioclient/client.py @@ -4,8 +4,14 @@ from requests.adapters import HTTPAdapter from requests.packages.urllib3.util.retry import Retry -from .lib import ClientVersion, PaginatedResponse, Utils, ClientVersion, FrameioDownloader - +from .lib import ( + ClientVersion, + PaginatedResponse, + Utils, + ClientVersion, + FrameioDownloader, + PresentationException +) class FrameioConnection(object): def __init__(self, token, host='https://api.frame.io'): @@ -128,3 +134,8 @@ def projects(self): def teams(self): from .service import Team return Team(self) + + @property + def helpers(self): + from .service import FrameioHelpers + return FrameioHelpers(self) diff --git a/frameioclient/lib/__init__.py b/frameioclient/lib/__init__.py index a29d8999..6bc3b71e 100644 --- a/frameioclient/lib/__init__.py +++ b/frameioclient/lib/__init__.py @@ -2,4 +2,4 @@ from .upload import FrameioUploader from .utils import Utils, PaginatedResponse, KB, MB from .exceptions import * -from .version import ClientVersion \ No newline at end of file +from .version import ClientVersion diff --git a/frameioclient/lib/download.py b/frameioclient/lib/download.py index 9605d71a..2533bde5 100644 --- a/frameioclient/lib/download.py +++ b/frameioclient/lib/download.py @@ -1,5 +1,6 @@ import io import os +import sys import math import time import requests @@ -12,7 +13,7 @@ thread_local = threading.local() class FrameioDownloader(object): - def __init__(self, asset, download_folder, prefix, multi_part=False, concurrency=5): + def __init__(self, asset, download_folder, prefix, multi_part=False, concurrency=5, replace=False): self.multi_part = multi_part self.asset = asset self.asset_type = None @@ -27,6 +28,7 @@ def __init__(self, asset, download_folder, prefix, multi_part=False, concurrency self.chunks = math.ceil(self.file_size/self.chunk_size) self.prefix = prefix self.filename = Utils.normalize_filename(asset["name"]) + self.replace = replace self._evaluate_asset() @@ -48,8 +50,12 @@ def _create_file_stub(self): # fp.write(b"\0" * self.file_size) # Disabled to prevent pre-allocatation of disk space fp.close() except FileExistsError as e: - print(e) - raise e + if self.replace == True: + os.remove(self.destination) # Remove the file + self._create_file_stub() # Create a new stub + else: + print(e) + raise e return True def get_download_key(self): diff --git a/frameioclient/service/__init__.py b/frameioclient/service/__init__.py index 0cd634d5..8959ebc8 100644 --- a/frameioclient/service/__init__.py +++ b/frameioclient/service/__init__.py @@ -4,4 +4,5 @@ from .logs import AuditLogs from .comments import Comment from .projects import Project -from .links import ReviewLink, PresentationLink \ No newline at end of file +from .links import ReviewLink, PresentationLink +from .helpers import FrameioHelpers \ No newline at end of file diff --git a/frameioclient/service/assets.py b/frameioclient/service/assets.py index 28e07bc9..3e691482 100644 --- a/frameioclient/service/assets.py +++ b/frameioclient/service/assets.py @@ -75,6 +75,24 @@ def create(self, parent_asset_id, **kwargs): endpoint = '/assets/{}/children'.format(parent_asset_id) return self.client._api_call('post', endpoint, payload=kwargs) + def create_folder(self, parent_asset_id, name="New Folder"): + """ + Create a new folder. + + :Args: + parent_asset_id (string): The parent asset id. + name (string): The name of the new folder. + + Example:: + + client.assets.create_folder( + parent_asset_id="123abc", + name="ExampleFile.mp4", + ) + """ + endpoint = '/assets/{}/children'.format(parent_asset_id) + return self.client._api_call('post', endpoint, payload={"name": name, "type":"folder"}) + def from_url(self, parent_asset_id, name, url): """ Create an asset from a URL. @@ -242,7 +260,7 @@ def upload(self, destination_id, filepath, asset=None): return asset - def download(self, asset, download_folder, prefix=None, multi_part=False, concurrency=5): + def download(self, asset, download_folder, prefix=None, multi_part=False, concurrency=5, replace=False): """ Download an asset. The method will exit once the file is downloaded. diff --git a/frameioclient/service/helpers.py b/frameioclient/service/helpers.py new file mode 100644 index 00000000..adaa5fba --- /dev/null +++ b/frameioclient/service/helpers.py @@ -0,0 +1,32 @@ +from .service import Service + +class FrameioHelpers(Service): + def get_updated_assets(self, account_id, project_id, timestamp): + """ + Get assets added or updated since timestamp. + + :Args: + account_id (string): The account id. + project_id (string): The project id. + timestamp (string): ISO 8601 UTC format. + (datetime.now(timezone.utc).isoformat()) + """ + payload = { + "account_id": account_id, + "page": 1, + "page_size": 50, + "include": "children", + "sort": "-inserted_at", + "filter": { + "project_id": { + "op": "eq", + "value": project_id + }, + "updated_at": { + "op": "gte", + "value": timestamp + } + } + } + endpoint = '/search/library' + return self.client._api_call('post', endpoint, payload=payload) diff --git a/setup.py b/setup.py index 70acd024..aab7c210 100644 --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ from setuptools.command.install import install -version='1.1.0' +version='1.2.0' with open("README.md", "r") as f: long_description = f.read() From ffaa6be0830787c57f637c5ed4b9fa883bea026c Mon Sep 17 00:00:00 2001 From: Jeff Date: Sat, 31 Jul 2021 00:27:39 -0700 Subject: [PATCH 06/99] Add support for library search (#78) * Add retries for occasional frame.io API 500 errors * Add library search function * Fix URL for library endpoint docs * Fix search endpoint * Tweak docstring --- frameioclient/client.py | 5 +-- frameioclient/service/__init__.py | 1 + frameioclient/service/search.py | 74 +++++++++++++++++++++++++++++++ 3 files changed, 77 insertions(+), 3 deletions(-) create mode 100644 frameioclient/service/search.py diff --git a/frameioclient/client.py b/frameioclient/client.py index 9985cd39..6e7eb0a1 100644 --- a/frameioclient/client.py +++ b/frameioclient/client.py @@ -1,4 +1,3 @@ -import re import sys import requests from requests.adapters import HTTPAdapter @@ -20,8 +19,8 @@ def __init__(self, token, host='https://api.frame.io'): self.retry_strategy = Retry( total=3, backoff_factor=1, - status_forcelist=[429], - method_whitelist=["POST", "OPTIONS", "GET"] + status_forcelist=[400, 429, 500], + method_whitelist=["POST", "OPTIONS", "GET", "PUT"] ) self.client_version = ClientVersion.version() self.headers = Utils.format_headers(self.token, self.client_version) diff --git a/frameioclient/service/__init__.py b/frameioclient/service/__init__.py index 8959ebc8..18ab0ea6 100644 --- a/frameioclient/service/__init__.py +++ b/frameioclient/service/__init__.py @@ -4,5 +4,6 @@ from .logs import AuditLogs from .comments import Comment from .projects import Project +from .search import Search from .links import ReviewLink, PresentationLink from .helpers import FrameioHelpers \ No newline at end of file diff --git a/frameioclient/service/search.py b/frameioclient/service/search.py new file mode 100644 index 00000000..1c0df40a --- /dev/null +++ b/frameioclient/service/search.py @@ -0,0 +1,74 @@ +from .service import Service + +class Search(Service): + def library(self, query, type=None, project_id=None, account_id=None, team_id=None, uploader=None, sort=None, filter=None, page_size=10, page=1): + """ + Search for assets using the library search endpoint, documented here \ + https://developer.frame.io/docs/workflows-assets/search-for-assets. + + For more information check out https://developer.frame.io/api/reference/operation/librarySearchPost/. + + :Args: + query (string): The search keyword you want to search with. + account_id (string): The account ID you want to be searching within. #TODO, confirm that this is required or not, could we use self.me? + + :Kwargs: + type (string): The type of frame.io asset you want to search: [file, folder, review_link, presentation]. + project_id (uuid): The frame.io project you want to constrain your search to. + account_id (uuid): The frame.io account want you to contrain your search to (you may only have one, but some users have 20+ that they have acces to). + team_id (uuid): The frame.io team you want to constrain your search to. + uploader (string): The name of the uploader, this includes first + last name with a space. + sort (string): The field you want to sort by. + filter (string): This is only necessary if you want to build a fully custom query, the most common functionality is exposed using other kwargs though. + page_size (int): Useful if you want to increase the number of items returned by the search API here. + page (int): The page of results you're requesting. + + Example:: + client.assets.search( + query="Final", + type="file", + sort="name" + ) + """ + + # Define base payload + payload = { + "account_id": account_id, + "q": query, + "sort": sort, + "page_size": page_size, + "page": page + } + + # Add fully custom filter + if filter is not None: + payload['filter'] = filter + + # Add simple filters + if project_id is not None: + payload['filter']['project_id'] = { + "op": "eq", + "value": project_id + } + if team_id is not None: + payload['filter']['team_id'] = { + "op": "eq", + "value": team_id + } + if type is not None: + payload['filter']['type'] = { + "op": "eq", + "value": type + } + if uploader is not None: + payload['filter']['creator.name'] = { + "op": "match", + "value": uploader + } + + # Add sorting + if sort is not None: + payload['sort'] = sort + + endpoint = '/search/library' + return self.client._api_call('post', endpoint, payload=payload) From de74b8188fd9a0e1b438c33bebfea291551fd53e Mon Sep 17 00:00:00 2001 From: Jeff Date: Sat, 31 Jul 2021 00:43:28 -0700 Subject: [PATCH 07/99] Inmproved download functionality (#79) Add helper functions to simplify a couple of tasks Fix FrameioHelpers class Fix FrameioHelpers import Fix __init__ imports Finish fixing helpers... Don't do multi-part downloads on files smaller than 25 MB Add download improvements from strombergdev Fix extra prefix in client.assets.download() Tweak download function Fix missing call to _get_path() to set self.destination Add test to Makefile Fix download function's keyword via ** keyword args Don't do multi-part downloads on files smaller than 25 MB --- Makefile | 3 + frameioclient/lib/download.py | 102 +++++++++++++++++++++++--------- frameioclient/lib/exceptions.py | 21 +++++++ frameioclient/service/assets.py | 4 +- 4 files changed, 101 insertions(+), 29 deletions(-) diff --git a/Makefile b/Makefile index 3c2e716b..6876d3fa 100644 --- a/Makefile +++ b/Makefile @@ -15,3 +15,6 @@ bump-patch: clean: find . -name "*.pyc" -exec rm -f {} \; + +test: + cd tests && pipenv run python integration.py \ No newline at end of file diff --git a/frameioclient/lib/download.py b/frameioclient/lib/download.py index 2533bde5..dd0a796f 100644 --- a/frameioclient/lib/download.py +++ b/frameioclient/lib/download.py @@ -8,12 +8,18 @@ import concurrent.futures from .utils import Utils -from .exceptions import DownloadException, WatermarkIDDownloadException, AssetNotFullyUploaded +from .exceptions import ( + DownloadException, + WatermarkIDDownloadException, + AssetNotFullyUploaded, + AssetChecksumNotPresent, + AssetChecksumMismatch +) thread_local = threading.local() class FrameioDownloader(object): - def __init__(self, asset, download_folder, prefix, multi_part=False, concurrency=5, replace=False): + def __init__(self, asset, download_folder, prefix=None, replace=False, checksum_verification=True, multi_part=False, concurrency=5): self.multi_part = multi_part self.asset = asset self.asset_type = None @@ -29,8 +35,10 @@ def __init__(self, asset, download_folder, prefix, multi_part=False, concurrency self.prefix = prefix self.filename = Utils.normalize_filename(asset["name"]) self.replace = replace + self.checksum_verification = checksum_verification self._evaluate_asset() + self._get_path() def _evaluate_asset(self): if self.asset.get("_type") != "file": @@ -45,19 +53,39 @@ def _get_session(self): return thread_local.session def _create_file_stub(self): + if self.replace == True: + os.remove(self.destination) # Remove the file + self._create_file_stub() # Create a new stub + try: fp = open(self.destination, "w") # fp.write(b"\0" * self.file_size) # Disabled to prevent pre-allocatation of disk space fp.close() - except FileExistsError as e: - if self.replace == True: - os.remove(self.destination) # Remove the file - self._create_file_stub() # Create a new stub - else: - print(e) - raise e + + except Exception as e: + raise e + return True + def _get_path(self): + print("prefix:", self.prefix) + if self.prefix != None: + self.filename = self.prefix + self.filename + + if self.destination == None: + final_destination = os.path.join(self.download_folder, self.filename) + self.destination = final_destination + + return self.destination + + def _get_checksum(self): + try: + self.original_checksum = self.asset['checksums']['xx_hash'] + except (TypeError, KeyError): + self.original_checksum = None + + return self.original_checksum + def get_download_key(self): try: url = self.asset['original'] @@ -84,26 +112,27 @@ def get_download_key(self): return url - def get_path(self): - if self.prefix != None: - self.filename = self.prefix + self.filename + def download_handler(self): + if os.path.isfile(self.destination) and self.replace != True: + try: + raise FileExistsError + except NameError: + raise OSError('File exists') # Python < 3.3 - if self.destination == None: - final_destination = os.path.join(self.download_folder, self.filename) - self.destination = final_destination - - return self.destination + url = self.get_download_key() - def download_handler(self): - if os.path.isfile(self.get_path()): - print("File already exists at this location.") - return self.destination + if self.watermarked == True: + return self.download(url) else: - url = self.get_download_key() - - if self.watermarked == True: + # Don't use multi-part download for files below 25 MB + if self.asset['filesize'] < 26214400: return self.download(url) + if self.multi_part == True: + return self.multi_part_download(url) else: + # Don't use multi-part download for files below 25 MB + if self.asset['filesize'] < 26214400: + return self.download(url) if self.multi_part == True: return self.multi_part_download(url) else: @@ -114,8 +143,17 @@ def download(self, url): print("Beginning download -- {} -- {}".format(self.asset["name"], Utils.format_bytes(self.file_size, type="size"))) # Downloading - r = requests.get(url) - open(self.destination, "wb").write(r.content) + session = self._get_session() + r = session.get('GET', url, stream=True) + + with open(self.destination, 'wb') as handle: + try: + # TODO make sure this approach works for SBWM download + for chunk in r.iter_content(chunk_size=4096): + if chunk: + handle.write(chunk) + except requests.exceptions.ChunkedEncodingError as e: + raise e download_time = time.time() - start_time download_speed = Utils.format_bytes(math.ceil(self.file_size/(download_time))) @@ -161,7 +199,17 @@ def multi_part_download(self, url): download_speed = Utils.format_bytes(math.ceil(self.file_size/(download_time))) print("Downloaded {} at {}".format(Utils.format_bytes(self.file_size, type="size"), download_speed)) - return self.destination + if self.checksum_verification == True: + # Check for checksum, if not present throw error + if self._get_checksum() == None: + raise AssetChecksumNotPresent + else: + if Utils.calculate_hash(self.destination) != self.original_checksum: + raise AssetChecksumMismatch + else: + return self.destination + else: + return self.destination def download_chunk(self, task): # Download a particular chunk diff --git a/frameioclient/lib/exceptions.py b/frameioclient/lib/exceptions.py index 8710296a..9f03739d 100644 --- a/frameioclient/lib/exceptions.py +++ b/frameioclient/lib/exceptions.py @@ -40,3 +40,24 @@ def __init__( ): self.message = message super().__init__(self.message) + +class AssetChecksumNotPresent(Exception): + """Exception raised when there's no checksum present for the Frame.io asset. + """ + def __init__( + self, + message="""No checksum found on Frame.io for this asset. This could be because it was uploaded \ + before we introduced the feature, the media pipeline failed to process the asset, or the asset has yet to finish being processed.""" + ): + self.message = message + super().__init__(self.message) + +class AssetChecksumMismatch(Exception): + """Exception raised when the checksum for the downloaded file doesn't match what's found on Frame.io. + """ + def __init__( + self, + message="Checksum mismatch, you should re-download the asset to resolve any corrupt bits." + ): + self.message = message + super().__init__(self.message) \ No newline at end of file diff --git a/frameioclient/service/assets.py b/frameioclient/service/assets.py index 3e691482..67d06554 100644 --- a/frameioclient/service/assets.py +++ b/frameioclient/service/assets.py @@ -260,7 +260,7 @@ def upload(self, destination_id, filepath, asset=None): return asset - def download(self, asset, download_folder, prefix=None, multi_part=False, concurrency=5, replace=False): + def download(self, asset, download_folder, **kwargs): """ Download an asset. The method will exit once the file is downloaded. @@ -272,5 +272,5 @@ def download(self, asset, download_folder, prefix=None, multi_part=False, concur client.assets.download(asset, "~./Downloads") """ - downloader = FrameioDownloader(asset, download_folder, prefix, multi_part, concurrency) + downloader = FrameioDownloader(asset, download_folder, **kwargs) return downloader.download_handler() \ No newline at end of file From 07a28d0157b43adeebd51e904f261c43be02128a Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 2 Aug 2021 18:54:07 -0700 Subject: [PATCH 08/99] Introduce telemetry, status bars, and re-factor Fix issue with (auth=True) in _get_session() Abstract API call formatter Remove duplicate code --- frameioclient/__init__.py | 2 - frameioclient/client.py | 15 +-- frameioclient/lib/download.py | 6 +- frameioclient/lib/transport.py | 189 ++++++++++++++++++++++++++++++++ frameioclient/service/assets.py | 13 ++- 5 files changed, 208 insertions(+), 17 deletions(-) create mode 100644 frameioclient/lib/transport.py diff --git a/frameioclient/__init__.py b/frameioclient/__init__.py index 2fc90a3b..ff9661ba 100644 --- a/frameioclient/__init__.py +++ b/frameioclient/__init__.py @@ -1,3 +1 @@ from .client import FrameioClient -from .service import * -from .lib import * \ No newline at end of file diff --git a/frameioclient/client.py b/frameioclient/client.py index 6e7eb0a1..a2d072d7 100644 --- a/frameioclient/client.py +++ b/frameioclient/client.py @@ -4,6 +4,8 @@ from requests.packages.urllib3.util.retry import Retry from .lib import ( + APIClient, + Telemetry, ClientVersion, PaginatedResponse, Utils, @@ -12,18 +14,9 @@ PresentationException ) -class FrameioConnection(object): +class FrameioClient(APIClient, object): def __init__(self, token, host='https://api.frame.io'): - self.token = token - self.host = host - self.retry_strategy = Retry( - total=3, - backoff_factor=1, - status_forcelist=[400, 429, 500], - method_whitelist=["POST", "OPTIONS", "GET", "PUT"] - ) - self.client_version = ClientVersion.version() - self.headers = Utils.format_headers(self.token, self.client_version) + super().__init__(token, host) self.adapter = HTTPAdapter(max_retries=self.retry_strategy) self.session = requests.Session() diff --git a/frameioclient/lib/download.py b/frameioclient/lib/download.py index dd0a796f..6f62ff98 100644 --- a/frameioclient/lib/download.py +++ b/frameioclient/lib/download.py @@ -34,8 +34,10 @@ def __init__(self, asset, download_folder, prefix=None, replace=False, checksum_ self.chunks = math.ceil(self.file_size/self.chunk_size) self.prefix = prefix self.filename = Utils.normalize_filename(asset["name"]) + self.request_logs = list() self.replace = replace self.checksum_verification = checksum_verification + self.session = AWSClient()._get_session() self._evaluate_asset() self._get_path() @@ -143,8 +145,8 @@ def download(self, url): print("Beginning download -- {} -- {}".format(self.asset["name"], Utils.format_bytes(self.file_size, type="size"))) # Downloading - session = self._get_session() - r = session.get('GET', url, stream=True) + r = self.session.get(url) + open(self.destination, "wb").write(r.content) with open(self.destination, 'wb') as handle: try: diff --git a/frameioclient/lib/transport.py b/frameioclient/lib/transport.py new file mode 100644 index 00000000..6fa672a8 --- /dev/null +++ b/frameioclient/lib/transport.py @@ -0,0 +1,189 @@ +import logging +import enlighten +import requests +import threading + +from requests.adapters import HTTPAdapter +from requests.packages.urllib3.util.retry import Retry + +from .version import ClientVersion +from .utils import Utils, PaginatedResponse +from .bandwidth import NetworkBandwidth, DiskBandwidth + + +class HTTPClient(object): + def __init__(self): + # Initialize empty thread object + self.thread_local = None + self.client_version = ClientVersion.version() + self.shared_headers = { + 'x-frameio-client': 'python/{}'.format(self.client_version) + } + # Configure retry strategy (very broad right now) + self.retry_strategy = Retry( + total=3, + backoff_factor=1, + status_forcelist=[400, 429, 500, 503], + method_whitelist=["GET", "POST", "PUT", "GET", "DELETE"] + ) + # Create real thread + self._initialize_thread() + + def _initialize_thread(self): + self.thread_local = threading.local() + + def _get_session(self): + if not hasattr(self.thread_local, "session"): + http = requests.Session() + adapter = HTTPAdapter(max_retries=self.retry_strategy) + adapter.add_headers(self.shared_headers) # add version header + http.mount("https", adapter) + self.thread_local.session = http + + return self.thread_local.session + + +class APIClient(HTTPClient, object): + def __init__(self, token, host): + super().__init__() + self.host = host + self.token = token + self._initialize_thread() + self.session = self._get_session() + self.auth_header = { + 'Authorization': 'Bearer {}'.format(self.token), + } + + def _format_api_call(self, endpoint): + return '{}/v2{}'.format(self.host, endpoint) + + def _api_call(self, method, endpoint, payload={}, limit=None): + r = self.session.request( + method, + url=self._format_api_call(endpoint), + headers=self.auth_header, + json=payload + ) + + if r.ok: + if r.headers.get('page-number'): + if int(r.headers.get('total-pages')) > 1: + return PaginatedResponse( + results=r.json(), + limit=limit, + page_size=r.headers['per-page'], + total_pages=r.headers['total-pages'], + total=r.headers['total'], + endpoint=endpoint, + method=method, + payload=payload, + client=self + ) + if isinstance(r.json(), list): + return r.json()[:limit] + + return r.json() + + if r.status_code == 422 and "presentation" in endpoint: + raise PresentationException + + return r.raise_for_status() + + def get_specific_page(self, method, endpoint, payload, page): + """ + Gets a specific page for that endpoint, used by Pagination Class + + :Args: + method (string): 'get', 'post' + endpoint (string): endpoint ('/accounts//teams') + payload (dict): Request payload + page (int): What page to get + """ + if method == 'get': + endpoint = '{}?page={}'.format(endpoint, page) + return self._api_call(method, endpoint) + + if method == 'post': + payload['page'] = page + return self._api_call(method, endpoint, payload=payload) + + +class AWSClient(HTTPClient, object): + def __init__(self, concurrency=None, progress=True): + super().__init__() # Initialize via inheritance + self.progress = progress + if concurrency is not None: + self.concurrency = concurrency + else: + self.concurrency = self.optimize_concurrency() + + def optimize_concurrency(self): + """ + This method looks as the net_stats and disk_stats that we've run on \ + the current environment in order to suggest the best optimized \ + number of concurrent TCP connections. + + Example:: + AWSClient.optimize_concurrency() + """ + + net_stats = NetworkBandwidth + disk_stats = DiskBandwidth + + # Algorithm ensues + # + # + + return 5 + + @staticmethod + def get_byte_range(url, start_byte=0, end_byte=2048): + """ + Get a specific byte range from a given URL. This is **not** optimized \ + for heavily-threaded operations currently because it doesn't use a shared \ + HTTP session object / thread + + :Args: + url (string): The URL you want to fetch a byte-range from + start_byte (int): The first byte you want to request + end_byte (int): The last byte you want to extract + + Example:: + AWSClient.get_byte_range(asset, "~./Downloads") + """ + + headers = {"Range": "bytes=%d-%d" % (start_byte, end_byte)} + br = requests.get(url, headers=headers).content + return br + + @staticmethod + def check_cdn(url): + # TODO improve this algo + if 'assets.frame.io' in url: + return 'Cloudfront' + elif 's3' in url: + return 'S3' + else: + return None + + +class TransferJob(AWSClient): + # These will be used to track the job and then push telemetry + def __init__(self, job_info): + self.job_info = self.check_cdn(job_info) + self.cdn = 'S3' # or 'CF' - use check_cdn to confirm + self.progress_manager = None + +class DownloadJob(TransferJob): + def __init__(self): + self.asset_type = 'review_link' # we should use a dataclass here + # Need to create a re-usable job schema + # Think URL -> output_path + pass + +class UploadJob(TransferJob): + def __init__(self, destination): + self.destination = destination + # Need to create a re-usable job schema + # Think local_file path and remote Frame.io destination + pass diff --git a/frameioclient/service/assets.py b/frameioclient/service/assets.py index 67d06554..f79b4a8a 100644 --- a/frameioclient/service/assets.py +++ b/frameioclient/service/assets.py @@ -260,7 +260,7 @@ def upload(self, destination_id, filepath, asset=None): return asset - def download(self, asset, download_folder, **kwargs): + def download(self, asset, download_folder, prefix=None, multi_part=False, concurrency=5, stats=False): """ Download an asset. The method will exit once the file is downloaded. @@ -272,5 +272,14 @@ def download(self, asset, download_folder, **kwargs): client.assets.download(asset, "~./Downloads") """ - downloader = FrameioDownloader(asset, download_folder, **kwargs) + downloader = FrameioDownloader( + asset, + download_folder, + prefix, + multi_part, + concurrency, + user_id=self.client.me['id'], + stats=stats + ) + return downloader.download_handler() \ No newline at end of file From 4908192220471a2f3373acfc8cc5e74b8ee477a6 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 2 Aug 2021 19:12:36 -0700 Subject: [PATCH 09/99] Update Readme Updated docstrings Prevent pre-allocation for download resources Improve doc strings and add arbitrary range request Parameterize the download directory for integration test Adds speedtest.net powered bandwidth check Adds download benchmark script Adds download progress UI Auto stash before rebase of "jh/speed-test" Refactor transport layer and add telemetry and logging WIP Dockerfile Update python version matrix for CI WIP add telemetry Dockerfile for CDN perf testing add missing dep in setup.py Missing comma :face-palm: Fix name of the segment python package Add additional missing packages Add missing comma Adjust status bar position Continued telemetry work --- .vscode/settings.json | 3 + Dockerfile | 34 +++ Makefile | 14 +- examples/download_asset.py | 15 - frameioclient/__init__.py | 4 +- frameioclient/client.py | 93 ++----- frameioclient/lib/__init__.py | 9 +- frameioclient/lib/bandwidth.py | 56 ++++ frameioclient/lib/download.py | 257 +++++++++++++----- frameioclient/lib/logger.py | 12 + frameioclient/lib/service.py | 20 ++ frameioclient/lib/telemetry.py | 97 +++++++ frameioclient/lib/transport.py | 32 ++- frameioclient/lib/utils.py | 20 +- frameioclient/service/service.py | 3 - .../{service => services}/__init__.py | 0 frameioclient/{service => services}/assets.py | 112 ++------ .../{service => services}/comments.py | 2 +- frameioclient/{service => services}/links.py | 2 +- frameioclient/{service => services}/logs.py | 2 +- .../{service => services}/projects.py | 2 +- frameioclient/{service => services}/teams.py | 2 +- frameioclient/{service => services}/users.py | 2 +- pyproject.toml | 3 + scripts/__init__.py | 0 scripts/benchmark/__init__.py | 0 scripts/benchmark/download.py | 63 +++++ scripts/benchmark/upload.py | 16 ++ scripts/benchmark/utils.py | 25 ++ setup.py | 7 +- tests/integration.py | 18 +- 31 files changed, 651 insertions(+), 274 deletions(-) create mode 100644 .vscode/settings.json create mode 100644 Dockerfile delete mode 100644 examples/download_asset.py create mode 100644 frameioclient/lib/bandwidth.py create mode 100644 frameioclient/lib/logger.py create mode 100644 frameioclient/lib/service.py create mode 100644 frameioclient/lib/telemetry.py delete mode 100644 frameioclient/service/service.py rename frameioclient/{service => services}/__init__.py (100%) rename frameioclient/{service => services}/assets.py (71%) rename frameioclient/{service => services}/comments.py (98%) rename frameioclient/{service => services}/links.py (98%) rename frameioclient/{service => services}/logs.py (91%) rename frameioclient/{service => services}/projects.py (98%) rename frameioclient/{service => services}/teams.py (98%) rename frameioclient/{service => services}/users.py (81%) create mode 100644 pyproject.toml create mode 100644 scripts/__init__.py create mode 100644 scripts/benchmark/__init__.py create mode 100644 scripts/benchmark/download.py create mode 100644 scripts/benchmark/upload.py create mode 100644 scripts/benchmark/utils.py diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 00000000..de288e1e --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "python.formatting.provider": "black" +} \ No newline at end of file diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..a66579e1 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,34 @@ +FROM python:3.8.6-slim-buster as deps +# Set work directory +WORKDIR /home/speedtest + +# Copy files +COPY Pipfile . +COPY Pipfile.lock . + +# Install pipenv +RUN pip install pipenv + +FROM deps as installer +# Set work directory +WORKDIR /home/speedtest + +# Install deps +RUN pipenv install --system --deploy --ignore-pipfile + +# Copy over the other pieces +COPY frameioclient frameioclient +COPY setup.py . +COPY README.md . + +# Install the local frameioclient +RUN pipenv install -e . --skip-lock + +# Copy over scripts and tests +COPY scripts scripts +COPY tests tests + +ENV SEGMENT_WRITE_KEY= + +FROM installer as runtime +ENTRYPOINT [ "pipenv", "run", "python", "scripts/benchmark/download.py" ] diff --git a/Makefile b/Makefile index 6876d3fa..a570a84d 100644 --- a/Makefile +++ b/Makefile @@ -17,4 +17,16 @@ clean: find . -name "*.pyc" -exec rm -f {} \; test: - cd tests && pipenv run python integration.py \ No newline at end of file + cd tests && pipenv run python integration.py + +package: + pipenv run python3 setup.py sdist bdist_wheel + +build-docker: + docker build . -t benchmark + +run-benchmark: + docker run -it -e $1 benchmark + +format: + black frameioclient \ No newline at end of file diff --git a/examples/download_asset.py b/examples/download_asset.py deleted file mode 100644 index 4e4e12f5..00000000 --- a/examples/download_asset.py +++ /dev/null @@ -1,15 +0,0 @@ -import os -from frameioclient import FrameioClient - -def benchmark(asset_id): - token = os.getenv("FRAMEIO_TOKEN") - client = FrameioClient(token) - asset_info = client.assets.get(asset_id) - accelerated_filename = client.download(asset_info, "downloads", prefix="accelerated_", multi_part=True, concurrency=20) - - # print("Normal speed: {}, Accelerated speed: {}".format(normal_speed, accelerated_speed)) - -if __name__ == "__main__": - # download_file("60ff4cca-f97b-4311-be24-0eecd6970c01") - benchmark("20a1df34-e8ad-48fd-b455-c68294cc7f71") - # benchmark("9cee7966-7db1-4066-b326-f9e6f5e929e4") \ No newline at end of file diff --git a/frameioclient/__init__.py b/frameioclient/__init__.py index ff9661ba..8f02abec 100644 --- a/frameioclient/__init__.py +++ b/frameioclient/__init__.py @@ -1 +1,3 @@ -from .client import FrameioClient +from .lib import * +from .services import * +from .client import FrameioClient \ No newline at end of file diff --git a/frameioclient/client.py b/frameioclient/client.py index a2d072d7..74aefa9c 100644 --- a/frameioclient/client.py +++ b/frameioclient/client.py @@ -1,80 +1,25 @@ -import sys -import requests -from requests.adapters import HTTPAdapter -from requests.packages.urllib3.util.retry import Retry - from .lib import ( + Utils, APIClient, + AWSClient, Telemetry, ClientVersion, - PaginatedResponse, - Utils, ClientVersion, - FrameioDownloader, - PresentationException + FrameioDownloader ) class FrameioClient(APIClient, object): - def __init__(self, token, host='https://api.frame.io'): + def __init__(self, token, host): super().__init__(token, host) - self.adapter = HTTPAdapter(max_retries=self.retry_strategy) - self.session = requests.Session() - self.session.mount("https://", self.adapter) - - def _api_call(self, method, endpoint, payload={}, limit=None): - url = '{}/v2{}'.format(self.host, endpoint) - - r = self.session.request( - method, - url, - json=payload, - headers=self.headers, - ) - - if r.ok: - if r.headers.get('page-number'): - if int(r.headers.get('total-pages')) > 1: - return PaginatedResponse( - results=r.json(), - limit=limit, - page_size=r.headers['per-page'], - total_pages=r.headers['total-pages'], - total=r.headers['total'], - endpoint=endpoint, - method=method, - payload=payload, - client=self - ) - if isinstance(r.json(), list): - return r.json()[:limit] - return r.json() - - if r.status_code == 422 and "presentation" in endpoint: - raise PresentationException - - return r.raise_for_status() - - def get_specific_page(self, method, endpoint, payload, page): - """ - Gets a specific page for that endpoint, used by Pagination Class - - :Args: - method (string): 'get', 'post' - endpoint (string): endpoint ('/accounts//teams') - payload (dict): Request payload - page (int): What page to get - """ - if method == 'get': - endpoint = '{}?page={}'.format(endpoint, page) - return self._api_call(method, endpoint) - - if method == 'post': - payload['page'] = page - return self._api_call(method, endpoint, payload=payload) + @property + def me(self): + return self.users.get_me() + @property + def telemetry(self): + return Telemetry(self) -class FrameioClient(FrameioConnection): @property def _auth(self): return self.token @@ -85,46 +30,46 @@ def _version(self): @property def _download(self): - return FrameioDownloader + return FrameioDownloader(self) @property def users(self): - from .service import User + from .services import User return User(self) @property def assets(self): - from .service import Asset + from .services import Asset return Asset(self) @property def comments(self): - from .service import Comment + from .services import Comment return Comment(self) @property def logs(self): - from .service import AuditLogs + from .services import AuditLogs return AuditLogs(self) @property def review_links(self): - from .service import ReviewLink + from .services import ReviewLink return ReviewLink(self) @property def presentation_links(self): - from .service import PresentationLink + from .services import PresentationLink return PresentationLink(self) @property def projects(self): - from .service import Project + from .services import Project return Project(self) @property def teams(self): - from .service import Team + from .services import Team return Team(self) @property diff --git a/frameioclient/lib/__init__.py b/frameioclient/lib/__init__.py index 6bc3b71e..fa270c2b 100644 --- a/frameioclient/lib/__init__.py +++ b/frameioclient/lib/__init__.py @@ -1,5 +1,8 @@ -from .download import FrameioDownloader -from .upload import FrameioUploader -from .utils import Utils, PaginatedResponse, KB, MB from .exceptions import * +from .logger import SDKLogger +from .telemetry import Telemetry from .version import ClientVersion +from .upload import FrameioUploader +from .download import FrameioDownloader +from .transport import AWSClient, APIClient +from .utils import Utils, PaginatedResponse, KB, MB \ No newline at end of file diff --git a/frameioclient/lib/bandwidth.py b/frameioclient/lib/bandwidth.py new file mode 100644 index 00000000..8349783b --- /dev/null +++ b/frameioclient/lib/bandwidth.py @@ -0,0 +1,56 @@ +import speedtest + +class NetworkBandwidth: + # Test the network bandwidth any time we have a new IP address + # Persist this information to a config.json file + + def __init__(self): + self.results = dict() + + def load_stats(self): + # Force an update on these stats before starting download/upload + pass + + def persist_stats(self): + pass + + def run(self): + self.results = self.speed_test() + + @staticmethod + def speedtest(): + """ + Run a speedtest using Speedtest.net in order to get a 'control' for \ + bandwidth optimization. + + Example:: + NetworkBandwidth.speedtest() + """ + + st = speedtest.Speedtest() + download_speed = round(st.download(threads=10) * (1.192 * 10 ** -7), 2) + upload_speed = round(st.upload(threads=10) * (1.192 * 10 ** -7), 2) + servernames = [] + server_names = st.get_servers(servernames) + ping = st.results.ping + + return { + "ping": ping, + "download_speed": download_speed, + "upload_speed": upload_speed, + } + + def __repr__(self): + self.results + +class DiskBandwidth: + # Test the disk speed and write to a config.json file for re-use + # Worth re-checking the disk every time a new one is detected (base route) + + def __init__(self, volume): + self.volume = volume + self.results = dict() + + def __repr__(self): + self.results + diff --git a/frameioclient/lib/download.py b/frameioclient/lib/download.py index 6f62ff98..384404c8 100644 --- a/frameioclient/lib/download.py +++ b/frameioclient/lib/download.py @@ -3,23 +3,24 @@ import sys import math import time -import requests +import enlighten import threading import concurrent.futures from .utils import Utils +from .logger import SDKLogger +from .transport import AWSClient +from .telemetry import Event, ComparisonTest + from .exceptions import ( DownloadException, WatermarkIDDownloadException, - AssetNotFullyUploaded, - AssetChecksumNotPresent, - AssetChecksumMismatch + AssetNotFullyUploaded ) -thread_local = threading.local() - class FrameioDownloader(object): - def __init__(self, asset, download_folder, prefix=None, replace=False, checksum_verification=True, multi_part=False, concurrency=5): + def __init__(self, asset, download_folder, prefix, multi_part=False, concurrency=5, progress=True, user_id=None, stats=False): + self.user_id = user_id self.multi_part = multi_part self.asset = asset self.asset_type = None @@ -30,29 +31,49 @@ def __init__(self, asset, download_folder, prefix=None, replace=False, checksum_ self.file_size = asset["filesize"] self.concurrency = concurrency self.futures = list() + self.checksum = None + self.original_checksum = None self.chunk_size = (25 * 1024 * 1024) # 25 MB chunk size self.chunks = math.ceil(self.file_size/self.chunk_size) self.prefix = prefix + self.stats = stats + self.progress = progress + self.bytes_started = 0 + self.bytes_completed = 0 + self.in_progress = 0 self.filename = Utils.normalize_filename(asset["name"]) self.request_logs = list() - self.replace = replace - self.checksum_verification = checksum_verification - self.session = AWSClient()._get_session() + self.session = AWSClient()._get_session(auth=None) self._evaluate_asset() self._get_path() + def _update_in_progress(self): + self.in_progress = self.bytes_started - self.bytes_completed + return self.in_progress # Number of in-progress bytes + + def get_path(self): + if self.prefix != None: + self.filename = self.prefix + self.filename + + if self.destination == None: + final_destination = os.path.join(self.download_folder, self.filename) + self.destination = final_destination + + return self.destination + def _evaluate_asset(self): if self.asset.get("_type") != "file": raise DownloadException(message="Unsupport Asset type: {}".format(self.asset.get("_type"))) + # This logic may block uploads that were started before this field was introduced if self.asset.get("upload_completed_at") == None: raise AssetNotFullyUploaded - def _get_session(self): - if not hasattr(thread_local, "session"): - thread_local.session = requests.Session() - return thread_local.session + try: + self.original_checksum = self.asset['checksums']['xx_hash'] + except (TypeError, KeyError): + self.original_checksum = None def _create_file_stub(self): if self.replace == True: @@ -115,22 +136,20 @@ def get_download_key(self): return url def download_handler(self): - if os.path.isfile(self.destination) and self.replace != True: - try: - raise FileExistsError - except NameError: - raise OSError('File exists') # Python < 3.3 - - url = self.get_download_key() + if os.path.isdir(os.path.join(os.path.curdir, self.download_folder)): + print("Folder exists, don't need to create it") + else: + print("Destination folder not found, creating") + os.mkdir(self.download_folder) - if self.watermarked == True: - return self.download(url) + if os.path.isfile(self.get_path()): + print("File already exists at this location.") + return self.destination else: - # Don't use multi-part download for files below 25 MB - if self.asset['filesize'] < 26214400: - return self.download(url) - if self.multi_part == True: - return self.multi_part_download(url) + url = self.get_download_key() + + if self.watermarked == True: + return self.single_part_download(url) else: # Don't use multi-part download for files below 25 MB if self.asset['filesize'] < 26214400: @@ -138,9 +157,9 @@ def download_handler(self): if self.multi_part == True: return self.multi_part_download(url) else: - return self.download(url) + return self.single_part_download(url) - def download(self, url): + def single_part_download(self, url): start_time = time.time() print("Beginning download -- {} -- {}".format(self.asset["name"], Utils.format_bytes(self.file_size, type="size"))) @@ -179,28 +198,113 @@ def multi_part_download(self, url): print("Multi-part download -- {} -- {}".format(self.asset["name"], Utils.format_bytes(self.file_size, type="size"))) # Queue up threads - with concurrent.futures.ThreadPoolExecutor(max_workers=self.concurrency) as executor: - for i in range(int(self.chunks)): - out_byte = offset * (i+1) # Increment by the iterable + 1 so we don't mutiply by zero - task = (url, in_byte, out_byte, i) - - time.sleep(0.1) # Stagger start for each chunk by 0.1 seconds - self.futures.append(executor.submit(self.download_chunk, task)) - in_byte = out_byte # Reset new in byte equal to last out byte - - # Wait on threads to finish - for future in concurrent.futures.as_completed(self.futures): - try: - status = future.result() - print(status) - except Exception as exc: - print(exc) + with enlighten.get_manager() as manager: + status = manager.status_bar( + position=3, + status_format=u'{fill}Stage: {stage}{fill}{elapsed}', + color='bold_underline_bright_white_on_lightslategray', + justify=enlighten.Justify.CENTER, + stage='Initializing', + autorefresh=True, + min_delta=0.5 + ) + + BAR_FORMAT = '{desc}{desc_pad}|{bar}|{percentage:3.0f}% ' + \ + 'Downloading: {count_1:.2j}/{total:.2j} ' + \ + 'Completed: {count_2:.2j}/{total:.2j} ' + \ + '[{elapsed}<{eta}, {rate:.2j}{unit}/s]' + + # Add counter to track completed chunks + initializing = manager.counter( + position=2, + total=float(self.file_size), + desc='Progress', + unit='B', + bar_format=BAR_FORMAT, + ) + + # Add additional counter + in_progress = initializing.add_subcounter('yellow', all_fields=True) + completed = initializing.add_subcounter('green', all_fields=True) + + # Set default state + initializing.refresh() + + status.update(stage='Downloading', color='green') + + with concurrent.futures.ThreadPoolExecutor(max_workers=self.concurrency) as executor: + for i in range(int(self.chunks)): + # Increment by the iterable + 1 so we don't mutiply by zero + out_byte = offset * (i+1) + # Create task tuple + task = (url, in_byte, out_byte, i, in_progress) + # Stagger start for each chunk by 0.1 seconds + if i < self.concurrency: time.sleep(0.1) + # Append tasks to futures list + self.futures.append(executor.submit(self._download_chunk, task)) + # Reset new in byte equal to last out byte + in_byte = out_byte - # Calculate and print stats - download_time = time.time() - start_time - download_speed = Utils.format_bytes(math.ceil(self.file_size/(download_time))) - print("Downloaded {} at {}".format(Utils.format_bytes(self.file_size, type="size"), download_speed)) + # Keep updating the progress while we have > 0 bytes left. + # Wait on threads to finish + for future in concurrent.futures.as_completed(self.futures): + try: + chunk_size = future.result() + completed.update_from(in_progress, float((chunk_size - 1)), force=True) + except Exception as exc: + print(exc) + + # Calculate and print stats + download_time = round((time.time() - start_time), 2) + download_speed = round((self.file_size/download_time), 2) + + # Perform hash-verification + status.update(stage='Verifying') + + VERIFICATION_FORMAT = '{desc}{desc_pad}|{bar}|{percentage:3.0f}% ' + \ + 'Progress: {count:.2j}/{total:.2j} ' + \ + '[{elapsed}<{eta}, {rate:.2j}{unit}/s]' + + # Add counter to track completed chunks + verification = manager.counter( + position=1, + total=float(self.file_size), + desc='Verifying', + unit='B', + bar_format=VERIFICATION_FORMAT, + color='purple' + ) + + # Calculate the file hash + Utils.calculate_hash(self.destination, progress_callback=verification) + + # Update the header + status.update(stage='Download Complete!', force=True) + + # Log completion event + SDKLogger('downloads').info("Downloaded {} at {}".format(Utils.format_bytes(self.file_size, type="size"), download_speed)) + + # Submit telemetry + transfer_stats = {'speed': download_speed, 'time': download_time, 'cdn': AWSClient.check_cdn(url)} + + Event(self.user_id, 'python-sdk-download-stats', transfer_stats) + + # If stats = True, we return a dict with way more info, otherwise \ + if self.stats: + # We end by returning a dict with info about the download + dl_info = { + "destination": self.destination, + "speed": download_speed, + "elapsed": download_time, + "cdn": AWSClient.check_cdn(url), + "concurrency": self.concurrency, + "size": self.file_size, + "chunks": self.chunks + } + return dl_info + return self.destination + if self.checksum_verification == True: # Check for checksum, if not present throw error if self._get_checksum() == None: @@ -213,33 +317,60 @@ def multi_part_download(self, url): else: return self.destination - def download_chunk(self, task): + def _download_chunk(self, task): # Download a particular chunk # Called by the threadpool executor + # Destructure the task object into its parts url = task[0] start_byte = task[1] end_byte = task[2] chunk_number = task[3] + in_progress = task[4] + + # Set the initial chunk_size, but prepare to overwrite + chunk_size = (end_byte - start_byte) + + if self.bytes_started + (chunk_size) > self.file_size: + difference = abs(self.file_size - (self.bytes_started + chunk_size)) # should be negative + chunk_size = chunk_size - difference + print(f"Chunk size as done via math: {chunk_size}") + else: + pass - session = self._get_session() - print("Getting chunk {}/{}".format(chunk_number + 1, self.chunks)) + # Set chunk size in a smarter way + self.bytes_started += (chunk_size) + + # Update the bar for in_progress chunks + in_progress.update(float(chunk_size)) - # Specify the starting and ending of the file + # Specify the start and end of the range request headers = {"Range": "bytes=%d-%d" % (start_byte, end_byte)} # Grab the data as a stream - r = session.get(url, headers=headers, stream=True) + r = self.session.get(url, headers=headers, stream=True) + # Write the file to disk with open(self.destination, "r+b") as fp: - fp.seek(start_byte) # Seek to the right of the file + fp.seek(start_byte) # Seek to the right spot in the file + chunk_size = len(r.content) # Get the final chunk size fp.write(r.content) # Write the data - print("Done writing chunk {}/{}".format(chunk_number + 1, self.chunks)) - return "Complete!" + # Save requests logs + self.request_logs.append({ + 'headers': r.headers, + 'http_status': r.status_code, + 'bytes_transferred': len(r.content) + }) + + # Increase the count for bytes_completed, but only if it doesn't overrun file length + self.bytes_completed += (chunk_size) + if self.bytes_completed > self.file_size: + self.bytes_completed = self.file_size + + # Update the in_progress bar + self._update_in_progress() + + # After the function completes, we report back the # of bytes transferred + return chunk_size - @staticmethod - def get_byte_range(url, start_byte=0, end_byte=2048): - headers = {"Range": "bytes=%d-%d" % (start_byte, end_byte)} - br = requests.get(url, headers=headers).content - return br \ No newline at end of file diff --git a/frameioclient/lib/logger.py b/frameioclient/lib/logger.py new file mode 100644 index 00000000..62b7d389 --- /dev/null +++ b/frameioclient/lib/logger.py @@ -0,0 +1,12 @@ +import logging + +class SDKLogger(object): + def __init__(self, log_name): + self.initialize_logger() + self.logger = logging.getLogger(log_name) + + def initialize_logger(self): + logging.basicConfig(level=logging.INFO) + + def info(self, message): + self.logger.info(message) \ No newline at end of file diff --git a/frameioclient/lib/service.py b/frameioclient/lib/service.py new file mode 100644 index 00000000..abc9a898 --- /dev/null +++ b/frameioclient/lib/service.py @@ -0,0 +1,20 @@ +from ..lib.bandwidth import NetworkBandwidth + +class Service(object): + def __init__(self, client): + self.client = client + self.concurrency = 10 + self.bandwidth = NetworkBandwidth() + + # Auto-configure afterwards + self.autoconfigure() + + def autoconfigure(self): + # self.bandwidth = SpeedTest.speedtest() + return + + def save_config(self): + pass + + def load_config(self): + pass diff --git a/frameioclient/lib/telemetry.py b/frameioclient/lib/telemetry.py new file mode 100644 index 00000000..869e7bae --- /dev/null +++ b/frameioclient/lib/telemetry.py @@ -0,0 +1,97 @@ +import os +import analytics + +from pprint import pprint + +from .logger import SDKLogger +from .version import ClientVersion + +segment_id = os.environ['SEGMENT_WRITE_KEY'] # Production +analytics.write_key = segment_id + +class Telemetry(object): + def __init__(self, user_id): + self.user_id = user_id + self.speedtest = None + self.identity = None + self.context = None + self.integrations = { + 'all': False, + 'Amplitude': True + } + self.logger = SDKLogger('telemetry') + + self.build_context() + + def build_context(self): + return { + "app": { + "name": "python-frameoclient", + "version": ClientVersion.version(), + } + } + + def push(self, event_name, properties): + self.logger.info(("Pushing '{}' event to segment".format(event_name), properties)) + + try: + status = analytics.track( + self.user_id, + event_name, + properties={**properties}, + context={**self.build_context()}, + integrations={**self.integrations} + ) + except Exception as e: + self.logger.info(e, event_name, properties) + + +class Event(Telemetry, object): + def __init__(self, user_id, event_name, properties): + super().__init__(user_id) + self.push(event_name, properties) + +class ComparisonTest(Event, object): + def __init__(self, transfer_stats, request_logs=[]): + super().__init__() + self.event_name = event_name + self.transfer_stats = None + self.requests_logs = requests_logs + + @staticmethod + def _parse_requests_data(req_object): + return { + 'speed': 0, + 'time_to_first_byte': 0, + 'response_time': 0, + 'byte_transferred': 0, + 'http_status': 200, + 'request_type': 'GET' + } + + def _build_transfer_stats_payload(self, event_data): + # Turn the request payload into a useful shape + properties = { + 'download_speed': 0, + 'control': { + 'upload_bytes_sec': 0, + 'download_bits_sec': 0, + 'ping_ms': 0 + }, + 'hash_speed': 0 + } + + return properties + + def track_transfer(self): + for chunk in self.requests_logs: + pprint(chunk) + # self.logger.info(pprint(chunk)) + + # Collect info to build message + + # Build payload for transfer tracking + # stats_payload = self._build_transfer_stats_payload() + + # Push the payload for tracking the transfer + # self.push('python_transfer_stats', stats_payload) diff --git a/frameioclient/lib/transport.py b/frameioclient/lib/transport.py index 6fa672a8..9dbfa3fa 100644 --- a/frameioclient/lib/transport.py +++ b/frameioclient/lib/transport.py @@ -1,7 +1,9 @@ +import os import logging import enlighten import requests import threading +import concurrent.futures from requests.adapters import HTTPAdapter from requests.packages.urllib3.util.retry import Retry @@ -32,7 +34,7 @@ def __init__(self): def _initialize_thread(self): self.thread_local = threading.local() - def _get_session(self): + def _get_session(self, auth=True): if not hasattr(self.thread_local, "session"): http = requests.Session() adapter = HTTPAdapter(max_retries=self.retry_strategy) @@ -44,23 +46,24 @@ def _get_session(self): class APIClient(HTTPClient, object): - def __init__(self, token, host): + def __init__(self, token, host='https://api.frame.io'): super().__init__() self.host = host self.token = token self._initialize_thread() - self.session = self._get_session() + self.session = self._get_session(auth=token) self.auth_header = { 'Authorization': 'Bearer {}'.format(self.token), } - - def _format_api_call(self, endpoint): - return '{}/v2{}'.format(self.host, endpoint) def _api_call(self, method, endpoint, payload={}, limit=None): + url = '{}/v2{}'.format(self.host, endpoint) + + headers = {**self.shared_headers, **self.auth_header} + r = self.session.request( method, - url=self._format_api_call(endpoint), + url, headers=self.auth_header, json=payload ) @@ -94,14 +97,14 @@ def get_specific_page(self, method, endpoint, payload, page): Gets a specific page for that endpoint, used by Pagination Class :Args: - method (string): 'get', 'post' - endpoint (string): endpoint ('/accounts//teams') - payload (dict): Request payload - page (int): What page to get + method (string): 'get', 'post' + endpoint (string): endpoint ('/accounts//teams') + payload (dict): Request payload + page (int): What page to get """ if method == 'get': endpoint = '{}?page={}'.format(endpoint, page) - return self._api_call(method, endpoint) + return self._api_call(method, endpoint) if method == 'post': payload['page'] = page @@ -140,8 +143,7 @@ def optimize_concurrency(self): def get_byte_range(url, start_byte=0, end_byte=2048): """ Get a specific byte range from a given URL. This is **not** optimized \ - for heavily-threaded operations currently because it doesn't use a shared \ - HTTP session object / thread + for heavily-threaded operations currently. :Args: url (string): The URL you want to fetch a byte-range from @@ -170,7 +172,7 @@ def check_cdn(url): class TransferJob(AWSClient): # These will be used to track the job and then push telemetry def __init__(self, job_info): - self.job_info = self.check_cdn(job_info) + self.job_info = job_info self.cdn = 'S3' # or 'CF' - use check_cdn to confirm self.progress_manager = None diff --git a/frameioclient/lib/utils.py b/frameioclient/lib/utils.py index 7f3ef571..4c29d7ea 100644 --- a/frameioclient/lib/utils.py +++ b/frameioclient/lib/utils.py @@ -1,6 +1,7 @@ import re import sys import xxhash +import enlighten KB = 1024 MB = KB * KB @@ -51,7 +52,7 @@ def format_bytes(size, type="speed"): return formatted @staticmethod - def calculate_hash(file_path): + def calculate_hash(file_path, progress_callback=None): """ Calculate an xx64hash """ @@ -62,7 +63,11 @@ def calculate_hash(file_path): numread = f.readinto(b) if not numread: break + xxh64_hash.update(b[:numread]) + if progress_callback: + # Should only subtract 1 here when necessary, not every time! + progress_callback.update(float(numread - 1), force=True) xxh64_digest = xxh64_hash.hexdigest() @@ -189,3 +194,16 @@ def __len__(self): return self.total + +class ProgressBar(object): + def __init__(self, parent=None, total=0, iterable=[]): + self.parent = parent + self.total = total + self.iterable = iterable + + def create(self): + pass + + def update(self): + pass + diff --git a/frameioclient/service/service.py b/frameioclient/service/service.py deleted file mode 100644 index 159eb61d..00000000 --- a/frameioclient/service/service.py +++ /dev/null @@ -1,3 +0,0 @@ -class Service(object): - def __init__(self, client): - self.client = client diff --git a/frameioclient/service/__init__.py b/frameioclient/services/__init__.py similarity index 100% rename from frameioclient/service/__init__.py rename to frameioclient/services/__init__.py diff --git a/frameioclient/service/assets.py b/frameioclient/services/assets.py similarity index 71% rename from frameioclient/service/assets.py rename to frameioclient/services/assets.py index f79b4a8a..5c8fa82c 100644 --- a/frameioclient/service/assets.py +++ b/frameioclient/services/assets.py @@ -1,24 +1,12 @@ import os import mimetypes -from .service import Service from .projects import Project +from ..lib.service import Service from ..lib import FrameioUploader, FrameioDownloader class Asset(Service): - def _build_asset_info(self, filepath): - full_path = os.path.abspath(filepath) - - file_info = { - "filepath": full_path, - "filename": os.path.basename(full_path), - "filesize": os.path.getsize(full_path), - "mimetype": mimetypes.guess_type(full_path)[0] - } - - return file_info - def get(self, asset_id): """ Get an asset by id. @@ -29,28 +17,14 @@ def get(self, asset_id): endpoint = '/assets/{}'.format(asset_id) return self.client._api_call('get', endpoint) - def get_children(self, asset_id, include=[], **kwargs): + def get_children(self, asset_id, **kwargs): """ Get a folder. :Args: asset_id (string): The asset id. - - :Kwargs: - includes (list): List of includes you would like to add. - - Example:: - - client.assets.get_children( - asset_id='1231-12414-afasfaf-aklsajflaksjfla', - includes=['review_links','cover_asset','creator','presentation'] - ) """ endpoint = '/assets/{}/children'.format(asset_id) - - if len(include) > 0: - endpoint += '?include={}'.format(include.join(',')) - return self.client._api_call('get', endpoint, kwargs) def create(self, parent_asset_id, **kwargs): @@ -75,24 +49,6 @@ def create(self, parent_asset_id, **kwargs): endpoint = '/assets/{}/children'.format(parent_asset_id) return self.client._api_call('post', endpoint, payload=kwargs) - def create_folder(self, parent_asset_id, name="New Folder"): - """ - Create a new folder. - - :Args: - parent_asset_id (string): The parent asset id. - name (string): The name of the new folder. - - Example:: - - client.assets.create_folder( - parent_asset_id="123abc", - name="ExampleFile.mp4", - ) - """ - endpoint = '/assets/{}/children'.format(parent_asset_id) - return self.client._api_call('post', endpoint, payload={"name": name, "type":"folder"}) - def from_url(self, parent_asset_id, name, url): """ Create an asset from a URL. @@ -153,7 +109,7 @@ def copy(self, destination_folder_id, **kwargs): endpoint = '/assets/{}/copy'.format(destination_folder_id) return self.client._api_call('post', endpoint, kwargs) - def bulk_copy(self, destination_folder_id, asset_list, copy_comments=False): + def bulk_copy(self, destination_folder_id, asset_list=[], copy_comments=False): """Bulk copy assets :Args: @@ -167,7 +123,8 @@ def bulk_copy(self, destination_folder_id, asset_list, copy_comments=False): "7ee008c5-49a2-f8b5-997d-8b64de153c30"], copy_comments=True) """ - payload = {"batch": list()} + payload = {"batch": []} + new_list = list() if copy_comments: payload['copy_comments'] = "all" @@ -197,7 +154,7 @@ def _upload(self, asset, file): file (file): The file to upload. Example:: - client._upload(asset, open('example.mp4')) + client.upload(asset, open('example.mp4')) """ uploader = FrameioUploader(asset, file) @@ -212,20 +169,19 @@ def _upload(self, asset, file): # if not os.path.exists(folderpath): # sys.exit("Folder doesn't exist, exiting...") - def upload(self, destination_id, filepath, asset=None): - """ - Upload a file. The method will exit once the file is downloaded. - - :Args: - destination_id (uuid): The destination Project or Folder ID. - filepath (string): The locaiton of the file on your local filesystem \ - that you want to upload. + def build_asset_info(self, filepath): + full_path = os.path.abspath(filepath) - Example:: + file_info = { + "filepath": full_path, + "filename": os.path.basename(full_path), + "filesize": os.path.getsize(full_path), + "mimetype": mimetypes.guess_type(full_path)[0] + } - client.assets.upload('1231-12414-afasfaf-aklsajflaksjfla', "./file.mov") - """ + return file_info + def upload(self, destination_id, filepath): # Check if destination is a project or folder # If it's a project, well then we look up its root asset ID, otherwise we use the folder id provided # Then we start our upload @@ -237,28 +193,20 @@ def upload(self, destination_id, filepath, asset=None): # Then try to grab it as a project folder_id = Project(self.client).get_project(destination_id)['root_asset_id'] finally: - file_info = self._build_asset_info(filepath) - - if not asset: - try: - asset = self.create(folder_id, - type="file", - name=file_info['filename'], - filetype=file_info['mimetype'], - filesize=file_info['filesize'] - ) - - except Exception as e: - print(e) - - try: - with open(file_info['filepath'], "rb") as fp: - self._upload(asset, fp) + file_info = self.build_asset_info(filepath) + try: + asset = self.create(folder_id, + type="file", + name=file_info['filename'], + filetype=file_info['mimetype'], + filesize=file_info['filesize'] + ) - except Exception as e: - print(e) + with open(file_info['filepath'], "rb") as fp: + self._upload(asset, fp) - return asset + except Exception as e: + print(e) def download(self, asset, download_folder, prefix=None, multi_part=False, concurrency=5, stats=False): """ @@ -270,7 +218,7 @@ def download(self, asset, download_folder, prefix=None, multi_part=False, concur Example:: - client.assets.download(asset, "~./Downloads") + client.download(asset, "~./Downloads") """ downloader = FrameioDownloader( asset, @@ -281,5 +229,5 @@ def download(self, asset, download_folder, prefix=None, multi_part=False, concur user_id=self.client.me['id'], stats=stats ) - + return downloader.download_handler() \ No newline at end of file diff --git a/frameioclient/service/comments.py b/frameioclient/services/comments.py similarity index 98% rename from frameioclient/service/comments.py rename to frameioclient/services/comments.py index d4e6adbc..a3fde975 100644 --- a/frameioclient/service/comments.py +++ b/frameioclient/services/comments.py @@ -1,4 +1,4 @@ -from .service import Service +from ..lib.service import Service class Comment(Service): def create(self, asset_id, **kwargs): diff --git a/frameioclient/service/links.py b/frameioclient/services/links.py similarity index 98% rename from frameioclient/service/links.py rename to frameioclient/services/links.py index 0f666e81..b617a36d 100644 --- a/frameioclient/service/links.py +++ b/frameioclient/services/links.py @@ -1,4 +1,4 @@ -from .service import Service +from ..lib.service import Service class ReviewLink(Service): def create(self, project_id, **kwargs): diff --git a/frameioclient/service/logs.py b/frameioclient/services/logs.py similarity index 91% rename from frameioclient/service/logs.py rename to frameioclient/services/logs.py index 2ae8bc30..3dba9af9 100644 --- a/frameioclient/service/logs.py +++ b/frameioclient/services/logs.py @@ -1,4 +1,4 @@ -from .service import Service +from ..lib.service import Service class AuditLogs(Service): def list(self, account_id): diff --git a/frameioclient/service/projects.py b/frameioclient/services/projects.py similarity index 98% rename from frameioclient/service/projects.py rename to frameioclient/services/projects.py index c1e52923..668dd7f4 100644 --- a/frameioclient/service/projects.py +++ b/frameioclient/services/projects.py @@ -1,4 +1,4 @@ -from .service import Service +from ..lib.service import Service class Project(Service): def create(self, team_id, **kwargs): diff --git a/frameioclient/service/teams.py b/frameioclient/services/teams.py similarity index 98% rename from frameioclient/service/teams.py rename to frameioclient/services/teams.py index 75c01137..c515c14a 100644 --- a/frameioclient/service/teams.py +++ b/frameioclient/services/teams.py @@ -1,5 +1,5 @@ import warnings -from .service import Service +from ..lib.service import Service class Team(Service): def create(self, account_id, **kwargs): diff --git a/frameioclient/service/users.py b/frameioclient/services/users.py similarity index 81% rename from frameioclient/service/users.py rename to frameioclient/services/users.py index 36daa322..61c0fb26 100644 --- a/frameioclient/service/users.py +++ b/frameioclient/services/users.py @@ -1,4 +1,4 @@ -from .service import Service +from ..lib.service import Service class User(Service): def get_me(self): diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..b0471b7f --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["setuptools", "wheel"] +build-backend = "setuptools.build_meta:__legacy__" \ No newline at end of file diff --git a/scripts/__init__.py b/scripts/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/scripts/benchmark/__init__.py b/scripts/benchmark/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/scripts/benchmark/download.py b/scripts/benchmark/download.py new file mode 100644 index 00000000..943a9b9e --- /dev/null +++ b/scripts/benchmark/download.py @@ -0,0 +1,63 @@ +import os +import sys + +from utils import timefunc +from frameioclient import FrameioClient +from frameioclient.lib.bandwidth import NetworkBandwidth + + +def download(asset_id='', destination='downloads', clean_up=True, size='small'): + token = os.getenv("FRAMEIO_TOKEN") + client = FrameioClient(token) + client.assets.get + asset_info = client.assets.get(asset_id) + download_info = client.assets.download(asset_info, destination, multi_part=True, concurrency=10, stats=True) + + if clean_up == True: + os.remove(download_info['destination']) + + return download_info + +def test_s3(): + asset_list = [] + stats = [] + for asset in asset_list: + report = download(asset_id=asset) + stats.append(report) + + return stats + +def test_cloudfront(): + asset_list = ['811baf7a-3248-4c7c-9d94-cc1c6c496a76','35f8ac33-a710-440e-8dcc-f98cfd90e0e5','e981f087-edbb-448d-baad-c8363b78f5ae'] + stats = [] + for asset in asset_list: + report = download(asset_id=asset) + stats.append(report) + + return stats + +def build_metric(s3_stats, cf_stats, baseline): + # Compare S3 against the baseline after calculating the average of the runs + # Compare CF against the baseline after calculating the average of the runs + # Compare S3 against CF and produce a number in Mbit/s {:.2j}? + # Report the asset_id as well + # Report whether something was a HIT or a MISS in cache + # Report which CDN we hit + pass + +def run_benchmark(): + s3_stats = test_s3() + cf_stats = test_cloudfront() + build_metrics(s3_stats cf_stats, NetworkBandwidth) + + # ComparisonTest(self.user_id, transfer_stats, self.request_logs) + + +if __name__ == "__main__": + # Old Method: + # timefunc(benchmark_download, asset_id='811baf7a-3248-4c7c-9d94-cc1c6c496a76', destination='downloads', iterations=3) # large + # timefunc(benchmark_download, asset_id='35f8ac33-a710-440e-8dcc-f98cfd90e0e5', destination='downloads', iterations=1) # medium + # timefunc(benchmark_download, asset_id='e981f087-edbb-448d-baad-c8363b78f5ae', destination='downloads', iterations=5) # small + + # New method: + run_benchmark() \ No newline at end of file diff --git a/scripts/benchmark/upload.py b/scripts/benchmark/upload.py new file mode 100644 index 00000000..050349fe --- /dev/null +++ b/scripts/benchmark/upload.py @@ -0,0 +1,16 @@ +import os +import sys + +from .utils import timefunc +from frameioclient import FrameioClient + + +def benchmark_upload(source_file='', remote_destination=''): + token = os.getenv("FRAMEIO_TOKEN") + client = FrameioClient(token) + client.assets.upload(remote_destination, source_file) + + return True + +if __name__ == "__main__": + timefunc(benchmark_upload, source_file='', remote_destination='', iterations=1) # medium diff --git a/scripts/benchmark/utils.py b/scripts/benchmark/utils.py new file mode 100644 index 00000000..314b0387 --- /dev/null +++ b/scripts/benchmark/utils.py @@ -0,0 +1,25 @@ +import sys + +from timeit import default_timer as timer + +def timefunc(func, *args, **kwargs): + """Time a function. + + args: + iterations=3 + + Usage example: + timeit(myfunc, 1, b=2) + """ + try: + iterations = kwargs.pop('iterations') + except KeyError: + iterations = 3 + elapsed = sys.maxsize + for _ in range(iterations): + start = timer() + result = func(*args, **kwargs) + elapsed = min(timer() - start, elapsed) + print(('Best of {} {}(): {:.9f}'.format(iterations, func.__name__, elapsed))) + return result + diff --git a/setup.py b/setup.py index aab7c210..f456b2ff 100644 --- a/setup.py +++ b/setup.py @@ -27,11 +27,14 @@ def run(self): version=version, python_requires='>=2.7.16, <4', install_requires=[ + 'analytics-python', + 'enlighten', + 'futures; python_version == "2.7"', + 'importlib-metadata ~= 1.0 ; python_version < "3.8"', 'requests', + 'speedtest-cli', 'urllib3', 'xxhash', - 'importlib-metadata ~= 1.0 ; python_version < "3.8"', - 'futures; python_version == "2.7"' ], extras_require={ 'dev': [ diff --git a/tests/integration.py b/tests/integration.py index 42f7be82..7dd4bd07 100644 --- a/tests/integration.py +++ b/tests/integration.py @@ -20,6 +20,8 @@ slack_webhook_url = os.getenv("SLACK_WEBHOOK_URL") ci_job_name = os.getenv("CIRCLE_JOB", default=None) +download_dir = 'downloads' + retries = 0 # Initialize the client @@ -45,10 +47,10 @@ def verify_local(client, dl_children): dl_items = dict() # Iterate over local directory and get filenames and hashes - dled_files = os.listdir('downloads') + dled_files = os.listdir(download_dir) for count, fn in enumerate(dled_files, start=1): print("{}/{} Generating hash for: {}".format(count, len(dled_files), fn)) - dl_file_path = os.path.join(os.path.abspath(os.path.curdir), 'downloads', fn) + dl_file_path = os.path.join(os.path.abspath(os.path.curdir), download_dir, fn) print("Path to downloaded file for hashing: {}".format(dl_file_path)) xxhash = Utils.calculate_hash(dl_file_path) xxhash_name = "{}_{}".format(fn, 'xxHash') @@ -77,9 +79,9 @@ def test_download(client, override=False): print("Testing download function...") if override: # Clearing download directory - shutil.rmtree('./downloads') + shutil.rmtree(download_dir) - if os.path.isdir('downloads'): + if os.path.isdir(download_dir): print("Local downloads folder detected...") asset_list = client.assets.get_children( download_asset_id, @@ -91,7 +93,7 @@ def test_download(client, override=False): verify_local(client, asset_list) return True - os.mkdir('downloads') + os.mkdir(download_dir) asset_list = client.assets.get_children( download_asset_id, @@ -105,7 +107,7 @@ def test_download(client, override=False): start_time = time.time() print("{}/{} Beginning to download: {}".format(count, len(asset_list), asset['name'])) - client.assets.download(asset, 'downloads', multi_part=True, concurrency=20) + client.assets.download(asset, download_dir, multi_part=True, concurrency=10) download_time = time.time() - start_time download_speed = Utils.format_bytes(ceil(asset['filesize']/(download_time))) @@ -139,11 +141,11 @@ def test_upload(client): print("Folder created, id: {}, name: {}".format(new_parent_id, new_folder['name'])) # Upload all the files we downloaded earlier - dled_files = os.listdir('downloads') + dled_files = os.listdir(download_dir) for count, fn in enumerate(dled_files, start=1): start_time = time.time() - ul_abs_path = os.path.join(os.curdir, 'downloads', fn) + ul_abs_path = os.path.join(os.curdir, download_dir, fn) filesize = os.path.getsize(ul_abs_path) filename = os.path.basename(ul_abs_path) From 29dc0ce1b5b4ee51b3fdb00c4c0a2206abb88f27 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 2 Aug 2021 19:24:58 -0700 Subject: [PATCH 10/99] Fix all the merge conflict issues --- frameioclient/client.py | 6 ++---- frameioclient/lib/download.py | 3 ++- frameioclient/{service => services}/helpers.py | 2 +- frameioclient/{service => services}/search.py | 2 +- scripts/benchmark/download.py | 3 ++- 5 files changed, 8 insertions(+), 8 deletions(-) rename frameioclient/{service => services}/helpers.py (95%) rename frameioclient/{service => services}/search.py (98%) diff --git a/frameioclient/client.py b/frameioclient/client.py index 74aefa9c..e66c4b8f 100644 --- a/frameioclient/client.py +++ b/frameioclient/client.py @@ -1,7 +1,5 @@ from .lib import ( - Utils, APIClient, - AWSClient, Telemetry, ClientVersion, ClientVersion, @@ -9,7 +7,7 @@ ) class FrameioClient(APIClient, object): - def __init__(self, token, host): + def __init__(self, token, host='https://api.frame.io'): super().__init__(token, host) @property @@ -74,5 +72,5 @@ def teams(self): @property def helpers(self): - from .service import FrameioHelpers + from .services import FrameioHelpers return FrameioHelpers(self) diff --git a/frameioclient/lib/download.py b/frameioclient/lib/download.py index 384404c8..8190c669 100644 --- a/frameioclient/lib/download.py +++ b/frameioclient/lib/download.py @@ -19,12 +19,13 @@ ) class FrameioDownloader(object): - def __init__(self, asset, download_folder, prefix, multi_part=False, concurrency=5, progress=True, user_id=None, stats=False): + def __init__(self, asset, download_folder, prefix, multi_part=False, concurrency=5, progress=True, user_id=None, stats=False, replace=False): self.user_id = user_id self.multi_part = multi_part self.asset = asset self.asset_type = None self.download_folder = download_folder + self.replace = replace self.resolution_map = dict() self.destination = None self.watermarked = asset['is_session_watermarked'] # Default is probably false diff --git a/frameioclient/service/helpers.py b/frameioclient/services/helpers.py similarity index 95% rename from frameioclient/service/helpers.py rename to frameioclient/services/helpers.py index adaa5fba..c7b3fc22 100644 --- a/frameioclient/service/helpers.py +++ b/frameioclient/services/helpers.py @@ -1,4 +1,4 @@ -from .service import Service +from ..lib.service import Service class FrameioHelpers(Service): def get_updated_assets(self, account_id, project_id, timestamp): diff --git a/frameioclient/service/search.py b/frameioclient/services/search.py similarity index 98% rename from frameioclient/service/search.py rename to frameioclient/services/search.py index 1c0df40a..7392e592 100644 --- a/frameioclient/service/search.py +++ b/frameioclient/services/search.py @@ -1,4 +1,4 @@ -from .service import Service +from ..lib.service import Service class Search(Service): def library(self, query, type=None, project_id=None, account_id=None, team_id=None, uploader=None, sort=None, filter=None, page_size=10, page=1): diff --git a/scripts/benchmark/download.py b/scripts/benchmark/download.py index 943a9b9e..480bf1cb 100644 --- a/scripts/benchmark/download.py +++ b/scripts/benchmark/download.py @@ -43,12 +43,13 @@ def build_metric(s3_stats, cf_stats, baseline): # Report the asset_id as well # Report whether something was a HIT or a MISS in cache # Report which CDN we hit + print("Thing") pass def run_benchmark(): s3_stats = test_s3() cf_stats = test_cloudfront() - build_metrics(s3_stats cf_stats, NetworkBandwidth) + # build_metrics(s3_stats, cf_stats, NetworkBandwidth) # ComparisonTest(self.user_id, transfer_stats, self.request_logs) From 32efa1d5f8e54960f037267ee73ca7104885ceea Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 2 Aug 2021 18:37:37 -0700 Subject: [PATCH 11/99] Add functions for fetching tree and downloading entire projects MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add helper functions to simplify a couple of tasks Bump version: 1.1.0 → 1.2.0 Fix FrameioHelpers class Fix FrameioHelpers import Fix __init__ imports Finish fixing helpers... WIP add project tree function Required for the sake of the download function Working recursive project tree + download Fix and clean-up --- .gitignore | 1 + examples/comment_scraper.py | 2 +- examples/download_project.py | 47 +++++ examples/project_tree.py | 27 +++ frameioclient/client.py | 3 +- frameioclient/lib/__init__.py | 3 +- frameioclient/lib/constants.py | 32 ++++ frameioclient/lib/download.py | 14 +- frameioclient/service/assets.py | 302 ++++++++++++++++++++++++++++++ frameioclient/service/helpers.py | 151 +++++++++++++++ frameioclient/service/projects.py | 145 ++++++++++++++ frameioclient/service/service.py | 5 + 12 files changed, 723 insertions(+), 9 deletions(-) create mode 100644 examples/download_project.py create mode 100644 examples/project_tree.py create mode 100644 frameioclient/lib/constants.py create mode 100644 frameioclient/service/assets.py create mode 100644 frameioclient/service/helpers.py create mode 100644 frameioclient/service/projects.py create mode 100644 frameioclient/service/service.py diff --git a/.gitignore b/.gitignore index 6b709839..f1141588 100644 --- a/.gitignore +++ b/.gitignore @@ -108,3 +108,4 @@ venv.bak/ Pipfile Pipfile.lock .vscode/launch.json +.vscode/settings.json diff --git a/examples/comment_scraper.py b/examples/comment_scraper.py index 182db0e4..3cc33979 100644 --- a/examples/comment_scraper.py +++ b/examples/comment_scraper.py @@ -103,4 +103,4 @@ def write_comments_csv(c_list): comments_list = build_comments_list(client, ROOT_ASSET_ID, comments) # Write the comments to comments.csv - write_comments_csv(comments_list) \ No newline at end of file + write_comments_csv(comments_list) diff --git a/examples/download_project.py b/examples/download_project.py new file mode 100644 index 00000000..7357270c --- /dev/null +++ b/examples/download_project.py @@ -0,0 +1,47 @@ +from frameioclient.lib.utils import Utils +import os +from pathlib import Path + +import pdb +from time import time,sleep +from pprint import pprint +from frameioclient import FrameioClient + +def get_folder_size(path='.'): + total = 0 + for entry in os.scandir(path): + if entry.is_file(): + total += entry.stat().st_size + elif entry.is_dir(): + total += get_folder_size(entry.path) + return total + +def demo_project_download(project_id): + TOKEN = os.getenv("FRAMEIO_TOKEN") + client = FrameioClient(TOKEN) + + start_time = time() + download_dir = '/Volumes/Jeff-EXT/Python Transfer Test' + item_count = client.projects.download(project_id, destination_directory=download_dir) + + # item_count = client.projects.download(project_id, destination_directory='/Users/jeff/Temp/Transfer vs Python SDK/Python SDK') + + end_time = time() + elapsed = round((end_time - start_time), 2) + + + folder_size = get_folder_size(download_dir) + # pdb.set_trace() + + print(f"Found {item_count} items") + print(f"Took {elapsed} second to download {Utils.format_bytes(folder_size, type='size')} for project: {client.projects.get(project_id)['name']}") + print("\n") + +if __name__ == "__main__": + # project_id = '2dfb6ce6-90d8-4994-881f-f02cd94b1c81' + # project_id='e2845993-7330-54c6-8b77-eafbd5144eac' + # project_id = '5d3ff176-ab1f-4c0b-a027-abe3d2a960e3' + project_id = 'ba1791e8-bf1e-46cb-bcad-5e4bb6431a08' + demo_project_download(project_id) + +# Took 443.84 second to download 12.43 GB to USB HDD for project: HersheyPark Summer Campaign using Python SDK \ No newline at end of file diff --git a/examples/project_tree.py b/examples/project_tree.py new file mode 100644 index 00000000..0f4f6450 --- /dev/null +++ b/examples/project_tree.py @@ -0,0 +1,27 @@ +import os + +from time import time +from pprint import pprint +from frameioclient import FrameioClient + +def demo_folder_tree(project_id): + TOKEN = os.getenv("FRAMEIO_TOKEN") + client = FrameioClient(TOKEN) + + start_time = time() + tree = client.helpers.build_project_tree(project_id, slim=True) + + end_time = time() + elapsed = round((end_time - start_time), 2) + + item_count = len(tree) + pprint(tree) + + print(f"Found {item_count} items") + print(f"Took {elapsed} second to fetch the slim payload for project: {project_id}") + print("\n") + +if __name__ == "__main__": + # project_id = 'ba1791e8-bf1e-46cb-bcad-5e4bb6431a08' + project_id = '2dfb6ce6-90d8-4994-881f-f02cd94b1c81' + demo_folder_tree(project_id) diff --git a/frameioclient/client.py b/frameioclient/client.py index e66c4b8f..19f51a06 100644 --- a/frameioclient/client.py +++ b/frameioclient/client.py @@ -3,7 +3,8 @@ Telemetry, ClientVersion, ClientVersion, - FrameioDownloader + FrameioDownloader, + PresentationException ) class FrameioClient(APIClient, object): diff --git a/frameioclient/lib/__init__.py b/frameioclient/lib/__init__.py index fa270c2b..64d35e8d 100644 --- a/frameioclient/lib/__init__.py +++ b/frameioclient/lib/__init__.py @@ -1,3 +1,4 @@ +from .constants import * from .exceptions import * from .logger import SDKLogger from .telemetry import Telemetry @@ -5,4 +6,4 @@ from .upload import FrameioUploader from .download import FrameioDownloader from .transport import AWSClient, APIClient -from .utils import Utils, PaginatedResponse, KB, MB \ No newline at end of file +from .utils import Utils, PaginatedResponse, KB, MB diff --git a/frameioclient/lib/constants.py b/frameioclient/lib/constants.py new file mode 100644 index 00000000..fbdfcc88 --- /dev/null +++ b/frameioclient/lib/constants.py @@ -0,0 +1,32 @@ +### Asset Fields ### + +asset_excludes = { + "only_fields": [ + # "a.downloads", + "a.name", + "a.filesize", + "u.name", + "a.is_session_watermarked", + "a.item_count", + "a.creator.name" + "a.creator.id", + "a.inserted_at", + "a.original", + "a.upload_completed_at", + ], + "excluded_fields": [ + "a.checksums", + "a.h264_1080_best", + "a.source" + ], + "drop_includes": [ + "a.trancode_statuses", + "a.transcodes", + "a.source", + "a.checksums" + ], + "hard_drop_fields": [ + "a.transcodes", + "a.source" + ] +} diff --git a/frameioclient/lib/download.py b/frameioclient/lib/download.py index 8190c669..e585a700 100644 --- a/frameioclient/lib/download.py +++ b/frameioclient/lib/download.py @@ -19,8 +19,7 @@ ) class FrameioDownloader(object): - def __init__(self, asset, download_folder, prefix, multi_part=False, concurrency=5, progress=True, user_id=None, stats=False, replace=False): - self.user_id = user_id + def __init__(self, asset, download_folder, prefix, multi_part=False, concurrency=5, replace=False): self.multi_part = multi_part self.asset = asset self.asset_type = None @@ -85,10 +84,13 @@ def _create_file_stub(self): fp = open(self.destination, "w") # fp.write(b"\0" * self.file_size) # Disabled to prevent pre-allocatation of disk space fp.close() - - except Exception as e: - raise e - + except FileExistsError as e: + if self.replace == True: + os.remove(self.destination) # Remove the file + self._create_file_stub() # Create a new stub + else: + print(e) + raise e return True def _get_path(self): diff --git a/frameioclient/service/assets.py b/frameioclient/service/assets.py new file mode 100644 index 00000000..5fcf2888 --- /dev/null +++ b/frameioclient/service/assets.py @@ -0,0 +1,302 @@ +import os +import mimetypes + +from .service import Service +from .projects import Project + +from ..lib import FrameioUploader, FrameioDownloader, constants + +class Asset(Service): + def _build_asset_info(self, filepath): + full_path = os.path.abspath(filepath) + + file_info = { + "filepath": full_path, + "filename": os.path.basename(full_path), + "filesize": os.path.getsize(full_path), + "mimetype": mimetypes.guess_type(full_path)[0] + } + + return file_info + + def get(self, asset_id): + """ + Get an asset by id. + + :Args: + asset_id (string): The asset id. + """ + endpoint = '/assets/{}'.format(asset_id) + return self.client._api_call('get', endpoint) + + def get_children(self, asset_id, include=[], slim=False, **kwargs): + """ + Get a folder. + + :Args: + asset_id (string): The asset id. + + :Kwargs: + includes (list): List of includes you would like to add. + + Example:: + + client.assets.get_children( + asset_id='1231-12414-afasfaf-aklsajflaksjfla', + include=['review_links','cover_asset','creator','presentation'] + ) + """ + endpoint = '/assets/{}/children'.format(asset_id) + + if slim == True: + query_params = '' + + if len(include) > 0: + query_params += '?include={}'.format(include.join(',')) + else: + # Always include children + query_params += '?' + 'include=children' + + # Only fields + query_params += '&' + 'only_fields=' + ','.join(constants.asset_excludes['only_fields']) + + # # Drop includes + query_params += '&' + 'drop_includes=' + ','.join(constants.asset_excludes['drop_includes']) + + # # Hard drop fields + query_params += '&' + 'hard_drop_fields=' + ','.join(constants.asset_excludes['hard_drop_fields']) + + # Excluded fields + # query_params += '&' + 'excluded_fields=' + ','.join(constants.asset_excludes['excluded_fields']) + + # # Sort by inserted_at + # query_params += '&' + 'sort=-inserted_at' + + endpoint += query_params + + # print("Final URL", endpoint) + + return self.client._api_call('get', endpoint, kwargs) + + def create(self, parent_asset_id, **kwargs): + """ + Create an asset. + + :Args: + parent_asset_id (string): The parent asset id. + :Kwargs: + (optional) kwargs: additional request parameters. + + Example:: + + client.assets.create( + parent_asset_id="123abc", + name="ExampleFile.mp4", + type="file", + filetype="video/mp4", + filesize=123456 + ) + """ + endpoint = '/assets/{}/children'.format(parent_asset_id) + return self.client._api_call('post', endpoint, payload=kwargs) + + def create_folder(self, parent_asset_id, name="New Folder"): + """ + Create a new folder. + + :Args: + parent_asset_id (string): The parent asset id. + name (string): The name of the new folder. + + Example:: + + client.assets.create_folder( + parent_asset_id="123abc", + name="ExampleFile.mp4", + ) + """ + endpoint = '/assets/{}/children'.format(parent_asset_id) + return self.client._api_call('post', endpoint, payload={"name": name, "type":"folder"}) + + def from_url(self, parent_asset_id, name, url): + """ + Create an asset from a URL. + + :Args: + parent_asset_id (string): The parent asset id. + name (string): The filename. + url (string): The remote URL. + + Example:: + + client.assets.from_url( + parent_asset_id="123abc", + name="ExampleFile.mp4", + type="file", + url="https://" + ) + """ + + payload = { + "name": name, + "type": "file", + "source": { + "url": url + } + } + + endpoint = '/assets/{}/children'.format(parent_asset_id) + return self.client._api_call('post', endpoint, payload=payload) + + def update(self, asset_id, **kwargs): + """ + Updates an asset + + :Args: + asset_id (string): the asset's id + :Kwargs: + the fields to update + + Example:: + client.assets.update("adeffee123342", name="updated_filename.mp4") + """ + endpoint = '/assets/{}'.format(asset_id) + return self.client._api_call('put', endpoint, kwargs) + + def copy(self, destination_folder_id, **kwargs): + """ + Copy an asset + + :Args: + destination_folder_id (string): The id of the folder you want to copy into. + :Kwargs: + id (string): The id of the asset you want to copy. + + Example:: + client.assets.copy("adeffee123342", id="7ee008c5-49a2-f8b5-997d-8b64de153c30") + """ + endpoint = '/assets/{}/copy'.format(destination_folder_id) + return self.client._api_call('post', endpoint, kwargs) + + def bulk_copy(self, destination_folder_id, asset_list, copy_comments=False): + """Bulk copy assets + + :Args: + destination_folder_id (string): The id of the folder you want to copy into. + :Kwargs: + asset_list (list): A list of the asset IDs you want to copy. + copy_comments (boolean): Whether or not to copy comments: True or False. + + Example:: + client.assets.bulk_copy("adeffee123342", asset_list=["7ee008c5-49a2-f8b5-997d-8b64de153c30", \ + "7ee008c5-49a2-f8b5-997d-8b64de153c30"], copy_comments=True) + """ + + payload = {"batch": list()} + + if copy_comments: + payload['copy_comments'] = "all" + + for asset in asset_list: + payload['batch'].append({"id": asset}) + + endpoint = '/batch/assets/{}/copy'.format(destination_folder_id) + return self.client._api_call('post', endpoint, payload) + + def delete(self, asset_id): + """ + Delete an asset + + :Args: + asset_id (string): the asset's id + """ + endpoint = '/assets/{}'.format(asset_id) + return self.client._api_call('delete', endpoint) + + def _upload(self, asset, file): + """ + Upload an asset. The method will exit once the file is uploaded. + + :Args: + asset (object): The asset object. + file (file): The file to upload. + + Example:: + client._upload(asset, open('example.mp4')) + """ + + uploader = FrameioUploader(asset, file) + uploader.upload() + + # def upload_folder(sFelf, destination_id, folderpath): + # try: + # if os.path.isdir(folderpath): + # # Good it's a directory, we can keep going + # pass + + # except OSError: + # if not os.path.exists(folderpath): + # sys.exit("Folder doesn't exist, exiting...") + + def upload(self, destination_id, filepath, asset=None): + """ + Upload a file. The method will exit once the file is downloaded. + + :Args: + destination_id (uuid): The destination Project or Folder ID. + filepath (string): The locaiton of the file on your local filesystem \ + that you want to upload. + + Example:: + + client.assets.upload('1231-12414-afasfaf-aklsajflaksjfla', "./file.mov") + """ + + # Check if destination is a project or folder + # If it's a project, well then we look up its root asset ID, otherwise we use the folder id provided + # Then we start our upload + + try: + # First try to grab it as a folder + folder_id = self.get(destination_id)['id'] + except Exception as e: + # Then try to grab it as a project + folder_id = Project(self.client).get_project(destination_id)['root_asset_id'] + finally: + file_info = self._build_asset_info(filepath) + + if not asset: + try: + asset = self.create(folder_id, + type="file", + name=file_info['filename'], + filetype=file_info['mimetype'], + filesize=file_info['filesize'] + ) + + except Exception as e: + print(e) + + try: + with open(file_info['filepath'], "rb") as fp: + self._upload(asset, fp) + + except Exception as e: + print(e) + + return asset + + def download(self, asset, download_folder, prefix=None, multi_part=False, concurrency=5, replace=False): + """ + Download an asset. The method will exit once the file is downloaded. + + :Args: + asset (object): The asset object. + download_folder (path): The location to download the file to. + + Example:: + + client.assets.download(asset, "~./Downloads") + """ + downloader = FrameioDownloader(asset, download_folder, prefix, multi_part, concurrency) + return downloader.download_handler() diff --git a/frameioclient/service/helpers.py b/frameioclient/service/helpers.py new file mode 100644 index 00000000..2b0f57f7 --- /dev/null +++ b/frameioclient/service/helpers.py @@ -0,0 +1,151 @@ +import os + +from pathlib import Path +from time import time, sleep + +from .service import Service +from ..lib.utils import Utils + +from copy import deepcopy +from typing import List +from pprint import pprint + +class FrameioHelpers(Service): + def get_updated_assets(self, account_id, project_id, timestamp): + """ + Get assets added or updated since timestamp. + + :Args: + account_id (string): The account id. + project_id (string): The project id. + timestamp (string): ISO 8601 UTC format. + (datetime.now(timezone.utc).isoformat()) + """ + payload = { + "account_id": account_id, + "page": 1, + "page_size": 50, + "include": "children", + "sort": "-inserted_at", + "filter": { + "project_id": { + "op": "eq", + "value": project_id + }, + "updated_at": { + "op": "gte", + "value": timestamp + } + } + } + endpoint = '/search/library' + return self.client._api_call('post', endpoint, payload=payload) + + def get_assets_recursively(self, asset_id, slim=True): + assets = self.client.assets.get_children(asset_id, slim=slim) + print("Number of assets at top level", len(assets)) + + for asset in assets: + # try: + print(f"Type: {asset['_type']}, Name: {asset['name']}, Children: {len(asset['children'])}") + # except KeyError: + # print("No children found") + + total_bytes = 0 + + if asset['_type'] == "file": + # Don't do nothing, it's a file! + continue + + if asset['_type'] == "verson_stack": + print("Grabbing top item from version stack") + versions = self.client.assets.get_children(asset['id'], slim=True) + asset = versions[0] # re-assign on purpose + continue + + # We only get the first three items when we use "include=children" + if asset['_type'] == "folder": + # try: + if asset['item_count'] > 3: + # Recursively fetch the contents of the folder because we have to + asset['children'] = self.get_assets_recursively(asset['id'], slim) + print("Grabbed more items for this sub dir") + + else: + for i in asset['children']: + # If a folder is found, we still need to recursively search it + if i['_type'] == "folder": + i['children'] = self.get_assets_recursively(i['id'], slim) + + # except KeyError as e: + # # No children found in this folder, move on + # print(e) + # continue + + return assets + + def build_project_tree(self, project_id, slim=True): + # if slim == True: + # self.client.assets.get_children() + + # Get project info + project = self.client.projects.get(project_id) + + # Get children + initial_tree = self.get_assets_recursively(project['root_asset_id'], slim) + + return initial_tree + + def download_project(self, project_id, destination): + project = self.client.projects.get(project_id) + initial_tree = self.get_assets_recursively(project['root_asset_id']) + self.recursive_downloader(destination, initial_tree) + # pprint(initial_tree) + # print(f"Downloading {Utils.format_bytes(total_bytes, type='size')}") + + def recursive_downloader(self, directory, asset, count=0): + # TODO resolve this clusterfuck of downloads + print(f"Directory {directory}") + + try: + # First check to see if we need to make the directory + target_directory = os.path.join(os.path.curdir, directory) + if not os.path.isdir(target_directory): + os.mkdir(os.path.abspath(target_directory)) + + except Exception as e: + target_directory = os.path.abspath(os.path.join(os.path.curdir, directory)) + print(e) + + if type(asset) == list: + for i in asset: + self.recursive_downloader(directory, i) + + else: + try: + if asset['_type'] == 'folder': + if len(asset['children']) >= 0: + count += 1 + # Create the new folder that these items will go in before it's too late + if not os.path.exists(os.path.join(target_directory, asset['name'])): + print("Path doesn't exist") + new_path = Path(target_directory, str(asset['name']).replace('/', '-')) + print(new_path.absolute) + print("Making new directory") + Path.mkdir(new_path) + sleep(2) + + # Pass along the new directory they'll be living in and the children + self.recursive_downloader(f"{directory}/{str(asset['name']).replace('/', '-')}", asset['children']) + + if asset['_type'] == 'file': + count += 1 + return self.client.assets.download(asset, target_directory, multi_part=True, concurrency=10) + + except Exception as e: + print(e) + + return True + +if __name__ == "__main__": + pass diff --git a/frameioclient/service/projects.py b/frameioclient/service/projects.py new file mode 100644 index 00000000..993200f2 --- /dev/null +++ b/frameioclient/service/projects.py @@ -0,0 +1,145 @@ +from .service import Service +from .helpers import FrameioHelpers + +class Project(Service): + def create(self, team_id, **kwargs): + """ + Create a project. + + :Args: + team_id (string): The team id. + :Kwargs: + (optional) kwargs: additional request parameters. + + Example:: + client.projects.create( + team_id="123", + name="My Awesome Project" + ) + """ + endpoint = '/teams/{}/projects'.format(team_id) + return self.client._api_call('post', endpoint, payload=kwargs) + + def get(self, project_id): + """ + Get an individual project + + :Args: + project_id (string): The project's id + + Example:: + client.projects.get( + project_id="123" + ) + + """ + endpoint = '/projects/{}'.format(project_id) + return self.client._api_call('get', endpoint) + + def tree(self, project_id, slim): + """ + Fetch a tree representation of all files/folders in a project. + + :Args: + project_id (string): The project's id + slim (bool): If true, fetch only the minimum information for the following: + filename, + filesize, + thumbnail, + creator_id, + inserted_at (date created), + path (represented like a filesystem) + + Example:: + client.projects.get( + project_id="123", + slim=True + ) + """ + # endpoint = "/projects/{}/tree?depth=20&drop_includes=a.transcode_statuses,a.transcodes,a.source,a.checksums&only_fields=a.name,a.filesize,u.name,a.item_count,a.creator_id,a.inserted_at,a.uploaded_at".format(project_id) + # return self.client._api_call('get', endpoint) + + return FrameioHelpers(self.client).build_project_tree(project_id, slim) + + def download(self, project_id, destination_directory='downloads'): + """ + Download the provided project to disk. + + :Args: + project_id (uuid): The project's id. + destination_directory (string): Directory on disk that you want to download the project to. + + Example:: + client.projects.download( + project_id="123", + destination_directory="./downloads" + ) + """ + return FrameioHelpers(self.client).download_project(project_id, destination=destination_directory) + + def get_collaborators(self, project_id, **kwargs): + """ + Get collaborators for a project + + :Args: + project_id (uuid): The project's id + + Example:: + client.projects.get_collaborators( + project_id="123" + ) + + """ + endpoint = "/projects/{}/collaborators?include=project_role".format(project_id) + return self.client._api_call('get', endpoint, kwargs) + + def get_pending_collaborators(self, project_id, **kwargs): + """ + Get pending collaborators for a project + + :Args: + project_id (uuid): The project's id + + Example:: + client.projects.get_pending_collaborators( + project_id="123" + ) + + """ + endpoint = "/projects/{}/pending_collaborators".format(project_id) + return self.client._api_call('get', endpoint, kwargs) + + def add_collaborator(self, project_id, email): + """ + Add Collaborator to a Project Collaborator. + + :Args: + project_id (uuid): The project id + email (string): Email user's e-mail address + + Example:: + client.projects.add_collaborator( + project_id="123", + email="janedoe@frame.io", + ) + """ + payload = {"email": email} + endpoint = '/projects/{}/collaborators'.format(project_id) + return self._api_call('post', endpoint, payload=payload) + + def remove_collaborator(self, project_id, email): + """ + Remove Collaborator from Project. + + :Args: + project_id (uuid): The Project ID. + email (string): The user's e-mail address + + Example:: + client.projects.remove_collaborator( + project_id="123", + email="janedoe@frame.io" + ) + """ + endpoint = '/projects/{}/collaborators/_?email={}'.format(project_id, email) + return self._api_call('delete', endpoint) diff --git a/frameioclient/service/service.py b/frameioclient/service/service.py new file mode 100644 index 00000000..a2ffa123 --- /dev/null +++ b/frameioclient/service/service.py @@ -0,0 +1,5 @@ +from ..client import FrameioClient + +class Service(object): + def __init__(self, client: FrameioClient): + self.client = client From 4c23e4c168b2107ea0a122e028423f15869ecf51 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 2 Aug 2021 19:40:32 -0700 Subject: [PATCH 12/99] Fix more issues post-merge --- frameioclient/lib/download.py | 9 +- frameioclient/lib/service.py | 3 +- frameioclient/lib/transport.py | 3 +- frameioclient/service/assets.py | 302 ----------------------------- frameioclient/service/helpers.py | 151 --------------- frameioclient/service/projects.py | 145 -------------- frameioclient/service/service.py | 5 - frameioclient/services/assets.py | 131 ++++++++++--- frameioclient/services/helpers.py | 119 ++++++++++++ frameioclient/services/projects.py | 44 ++++- 10 files changed, 270 insertions(+), 642 deletions(-) delete mode 100644 frameioclient/service/assets.py delete mode 100644 frameioclient/service/helpers.py delete mode 100644 frameioclient/service/projects.py delete mode 100644 frameioclient/service/service.py diff --git a/frameioclient/lib/download.py b/frameioclient/lib/download.py index e585a700..142c4970 100644 --- a/frameioclient/lib/download.py +++ b/frameioclient/lib/download.py @@ -19,7 +19,7 @@ ) class FrameioDownloader(object): - def __init__(self, asset, download_folder, prefix, multi_part=False, concurrency=5, replace=False): + def __init__(self, asset, download_folder, prefix, multi_part=False, replace=False): self.multi_part = multi_part self.asset = asset self.asset_type = None @@ -29,21 +29,18 @@ def __init__(self, asset, download_folder, prefix, multi_part=False, concurrency self.destination = None self.watermarked = asset['is_session_watermarked'] # Default is probably false self.file_size = asset["filesize"] - self.concurrency = concurrency self.futures = list() self.checksum = None self.original_checksum = None self.chunk_size = (25 * 1024 * 1024) # 25 MB chunk size self.chunks = math.ceil(self.file_size/self.chunk_size) self.prefix = prefix - self.stats = stats - self.progress = progress self.bytes_started = 0 self.bytes_completed = 0 self.in_progress = 0 + self.session = AWSClient()._get_session(auth=None) self.filename = Utils.normalize_filename(asset["name"]) self.request_logs = list() - self.session = AWSClient()._get_session(auth=None) self._evaluate_asset() self._get_path() @@ -235,7 +232,7 @@ def multi_part_download(self, url): status.update(stage='Downloading', color='green') - with concurrent.futures.ThreadPoolExecutor(max_workers=self.concurrency) as executor: + with concurrent.futures.ThreadPoolExecutor(max_workers=self.client.concurrency) as executor: for i in range(int(self.chunks)): # Increment by the iterable + 1 so we don't mutiply by zero out_byte = offset * (i+1) diff --git a/frameioclient/lib/service.py b/frameioclient/lib/service.py index abc9a898..e168e121 100644 --- a/frameioclient/lib/service.py +++ b/frameioclient/lib/service.py @@ -1,7 +1,8 @@ +from ..client import FrameioClient from ..lib.bandwidth import NetworkBandwidth class Service(object): - def __init__(self, client): + def __init__(self, client: FrameioClient): self.client = client self.concurrency = 10 self.bandwidth = NetworkBandwidth() diff --git a/frameioclient/lib/transport.py b/frameioclient/lib/transport.py index 9dbfa3fa..89628c32 100644 --- a/frameioclient/lib/transport.py +++ b/frameioclient/lib/transport.py @@ -115,7 +115,8 @@ class AWSClient(HTTPClient, object): def __init__(self, concurrency=None, progress=True): super().__init__() # Initialize via inheritance self.progress = progress - if concurrency is not None: + # Ensure this is a valid number before assigning + if concurrency is not None and type(concurrency) == int and concurrency > 0: self.concurrency = concurrency else: self.concurrency = self.optimize_concurrency() diff --git a/frameioclient/service/assets.py b/frameioclient/service/assets.py deleted file mode 100644 index 5fcf2888..00000000 --- a/frameioclient/service/assets.py +++ /dev/null @@ -1,302 +0,0 @@ -import os -import mimetypes - -from .service import Service -from .projects import Project - -from ..lib import FrameioUploader, FrameioDownloader, constants - -class Asset(Service): - def _build_asset_info(self, filepath): - full_path = os.path.abspath(filepath) - - file_info = { - "filepath": full_path, - "filename": os.path.basename(full_path), - "filesize": os.path.getsize(full_path), - "mimetype": mimetypes.guess_type(full_path)[0] - } - - return file_info - - def get(self, asset_id): - """ - Get an asset by id. - - :Args: - asset_id (string): The asset id. - """ - endpoint = '/assets/{}'.format(asset_id) - return self.client._api_call('get', endpoint) - - def get_children(self, asset_id, include=[], slim=False, **kwargs): - """ - Get a folder. - - :Args: - asset_id (string): The asset id. - - :Kwargs: - includes (list): List of includes you would like to add. - - Example:: - - client.assets.get_children( - asset_id='1231-12414-afasfaf-aklsajflaksjfla', - include=['review_links','cover_asset','creator','presentation'] - ) - """ - endpoint = '/assets/{}/children'.format(asset_id) - - if slim == True: - query_params = '' - - if len(include) > 0: - query_params += '?include={}'.format(include.join(',')) - else: - # Always include children - query_params += '?' + 'include=children' - - # Only fields - query_params += '&' + 'only_fields=' + ','.join(constants.asset_excludes['only_fields']) - - # # Drop includes - query_params += '&' + 'drop_includes=' + ','.join(constants.asset_excludes['drop_includes']) - - # # Hard drop fields - query_params += '&' + 'hard_drop_fields=' + ','.join(constants.asset_excludes['hard_drop_fields']) - - # Excluded fields - # query_params += '&' + 'excluded_fields=' + ','.join(constants.asset_excludes['excluded_fields']) - - # # Sort by inserted_at - # query_params += '&' + 'sort=-inserted_at' - - endpoint += query_params - - # print("Final URL", endpoint) - - return self.client._api_call('get', endpoint, kwargs) - - def create(self, parent_asset_id, **kwargs): - """ - Create an asset. - - :Args: - parent_asset_id (string): The parent asset id. - :Kwargs: - (optional) kwargs: additional request parameters. - - Example:: - - client.assets.create( - parent_asset_id="123abc", - name="ExampleFile.mp4", - type="file", - filetype="video/mp4", - filesize=123456 - ) - """ - endpoint = '/assets/{}/children'.format(parent_asset_id) - return self.client._api_call('post', endpoint, payload=kwargs) - - def create_folder(self, parent_asset_id, name="New Folder"): - """ - Create a new folder. - - :Args: - parent_asset_id (string): The parent asset id. - name (string): The name of the new folder. - - Example:: - - client.assets.create_folder( - parent_asset_id="123abc", - name="ExampleFile.mp4", - ) - """ - endpoint = '/assets/{}/children'.format(parent_asset_id) - return self.client._api_call('post', endpoint, payload={"name": name, "type":"folder"}) - - def from_url(self, parent_asset_id, name, url): - """ - Create an asset from a URL. - - :Args: - parent_asset_id (string): The parent asset id. - name (string): The filename. - url (string): The remote URL. - - Example:: - - client.assets.from_url( - parent_asset_id="123abc", - name="ExampleFile.mp4", - type="file", - url="https://" - ) - """ - - payload = { - "name": name, - "type": "file", - "source": { - "url": url - } - } - - endpoint = '/assets/{}/children'.format(parent_asset_id) - return self.client._api_call('post', endpoint, payload=payload) - - def update(self, asset_id, **kwargs): - """ - Updates an asset - - :Args: - asset_id (string): the asset's id - :Kwargs: - the fields to update - - Example:: - client.assets.update("adeffee123342", name="updated_filename.mp4") - """ - endpoint = '/assets/{}'.format(asset_id) - return self.client._api_call('put', endpoint, kwargs) - - def copy(self, destination_folder_id, **kwargs): - """ - Copy an asset - - :Args: - destination_folder_id (string): The id of the folder you want to copy into. - :Kwargs: - id (string): The id of the asset you want to copy. - - Example:: - client.assets.copy("adeffee123342", id="7ee008c5-49a2-f8b5-997d-8b64de153c30") - """ - endpoint = '/assets/{}/copy'.format(destination_folder_id) - return self.client._api_call('post', endpoint, kwargs) - - def bulk_copy(self, destination_folder_id, asset_list, copy_comments=False): - """Bulk copy assets - - :Args: - destination_folder_id (string): The id of the folder you want to copy into. - :Kwargs: - asset_list (list): A list of the asset IDs you want to copy. - copy_comments (boolean): Whether or not to copy comments: True or False. - - Example:: - client.assets.bulk_copy("adeffee123342", asset_list=["7ee008c5-49a2-f8b5-997d-8b64de153c30", \ - "7ee008c5-49a2-f8b5-997d-8b64de153c30"], copy_comments=True) - """ - - payload = {"batch": list()} - - if copy_comments: - payload['copy_comments'] = "all" - - for asset in asset_list: - payload['batch'].append({"id": asset}) - - endpoint = '/batch/assets/{}/copy'.format(destination_folder_id) - return self.client._api_call('post', endpoint, payload) - - def delete(self, asset_id): - """ - Delete an asset - - :Args: - asset_id (string): the asset's id - """ - endpoint = '/assets/{}'.format(asset_id) - return self.client._api_call('delete', endpoint) - - def _upload(self, asset, file): - """ - Upload an asset. The method will exit once the file is uploaded. - - :Args: - asset (object): The asset object. - file (file): The file to upload. - - Example:: - client._upload(asset, open('example.mp4')) - """ - - uploader = FrameioUploader(asset, file) - uploader.upload() - - # def upload_folder(sFelf, destination_id, folderpath): - # try: - # if os.path.isdir(folderpath): - # # Good it's a directory, we can keep going - # pass - - # except OSError: - # if not os.path.exists(folderpath): - # sys.exit("Folder doesn't exist, exiting...") - - def upload(self, destination_id, filepath, asset=None): - """ - Upload a file. The method will exit once the file is downloaded. - - :Args: - destination_id (uuid): The destination Project or Folder ID. - filepath (string): The locaiton of the file on your local filesystem \ - that you want to upload. - - Example:: - - client.assets.upload('1231-12414-afasfaf-aklsajflaksjfla', "./file.mov") - """ - - # Check if destination is a project or folder - # If it's a project, well then we look up its root asset ID, otherwise we use the folder id provided - # Then we start our upload - - try: - # First try to grab it as a folder - folder_id = self.get(destination_id)['id'] - except Exception as e: - # Then try to grab it as a project - folder_id = Project(self.client).get_project(destination_id)['root_asset_id'] - finally: - file_info = self._build_asset_info(filepath) - - if not asset: - try: - asset = self.create(folder_id, - type="file", - name=file_info['filename'], - filetype=file_info['mimetype'], - filesize=file_info['filesize'] - ) - - except Exception as e: - print(e) - - try: - with open(file_info['filepath'], "rb") as fp: - self._upload(asset, fp) - - except Exception as e: - print(e) - - return asset - - def download(self, asset, download_folder, prefix=None, multi_part=False, concurrency=5, replace=False): - """ - Download an asset. The method will exit once the file is downloaded. - - :Args: - asset (object): The asset object. - download_folder (path): The location to download the file to. - - Example:: - - client.assets.download(asset, "~./Downloads") - """ - downloader = FrameioDownloader(asset, download_folder, prefix, multi_part, concurrency) - return downloader.download_handler() diff --git a/frameioclient/service/helpers.py b/frameioclient/service/helpers.py deleted file mode 100644 index 2b0f57f7..00000000 --- a/frameioclient/service/helpers.py +++ /dev/null @@ -1,151 +0,0 @@ -import os - -from pathlib import Path -from time import time, sleep - -from .service import Service -from ..lib.utils import Utils - -from copy import deepcopy -from typing import List -from pprint import pprint - -class FrameioHelpers(Service): - def get_updated_assets(self, account_id, project_id, timestamp): - """ - Get assets added or updated since timestamp. - - :Args: - account_id (string): The account id. - project_id (string): The project id. - timestamp (string): ISO 8601 UTC format. - (datetime.now(timezone.utc).isoformat()) - """ - payload = { - "account_id": account_id, - "page": 1, - "page_size": 50, - "include": "children", - "sort": "-inserted_at", - "filter": { - "project_id": { - "op": "eq", - "value": project_id - }, - "updated_at": { - "op": "gte", - "value": timestamp - } - } - } - endpoint = '/search/library' - return self.client._api_call('post', endpoint, payload=payload) - - def get_assets_recursively(self, asset_id, slim=True): - assets = self.client.assets.get_children(asset_id, slim=slim) - print("Number of assets at top level", len(assets)) - - for asset in assets: - # try: - print(f"Type: {asset['_type']}, Name: {asset['name']}, Children: {len(asset['children'])}") - # except KeyError: - # print("No children found") - - total_bytes = 0 - - if asset['_type'] == "file": - # Don't do nothing, it's a file! - continue - - if asset['_type'] == "verson_stack": - print("Grabbing top item from version stack") - versions = self.client.assets.get_children(asset['id'], slim=True) - asset = versions[0] # re-assign on purpose - continue - - # We only get the first three items when we use "include=children" - if asset['_type'] == "folder": - # try: - if asset['item_count'] > 3: - # Recursively fetch the contents of the folder because we have to - asset['children'] = self.get_assets_recursively(asset['id'], slim) - print("Grabbed more items for this sub dir") - - else: - for i in asset['children']: - # If a folder is found, we still need to recursively search it - if i['_type'] == "folder": - i['children'] = self.get_assets_recursively(i['id'], slim) - - # except KeyError as e: - # # No children found in this folder, move on - # print(e) - # continue - - return assets - - def build_project_tree(self, project_id, slim=True): - # if slim == True: - # self.client.assets.get_children() - - # Get project info - project = self.client.projects.get(project_id) - - # Get children - initial_tree = self.get_assets_recursively(project['root_asset_id'], slim) - - return initial_tree - - def download_project(self, project_id, destination): - project = self.client.projects.get(project_id) - initial_tree = self.get_assets_recursively(project['root_asset_id']) - self.recursive_downloader(destination, initial_tree) - # pprint(initial_tree) - # print(f"Downloading {Utils.format_bytes(total_bytes, type='size')}") - - def recursive_downloader(self, directory, asset, count=0): - # TODO resolve this clusterfuck of downloads - print(f"Directory {directory}") - - try: - # First check to see if we need to make the directory - target_directory = os.path.join(os.path.curdir, directory) - if not os.path.isdir(target_directory): - os.mkdir(os.path.abspath(target_directory)) - - except Exception as e: - target_directory = os.path.abspath(os.path.join(os.path.curdir, directory)) - print(e) - - if type(asset) == list: - for i in asset: - self.recursive_downloader(directory, i) - - else: - try: - if asset['_type'] == 'folder': - if len(asset['children']) >= 0: - count += 1 - # Create the new folder that these items will go in before it's too late - if not os.path.exists(os.path.join(target_directory, asset['name'])): - print("Path doesn't exist") - new_path = Path(target_directory, str(asset['name']).replace('/', '-')) - print(new_path.absolute) - print("Making new directory") - Path.mkdir(new_path) - sleep(2) - - # Pass along the new directory they'll be living in and the children - self.recursive_downloader(f"{directory}/{str(asset['name']).replace('/', '-')}", asset['children']) - - if asset['_type'] == 'file': - count += 1 - return self.client.assets.download(asset, target_directory, multi_part=True, concurrency=10) - - except Exception as e: - print(e) - - return True - -if __name__ == "__main__": - pass diff --git a/frameioclient/service/projects.py b/frameioclient/service/projects.py deleted file mode 100644 index 993200f2..00000000 --- a/frameioclient/service/projects.py +++ /dev/null @@ -1,145 +0,0 @@ -from .service import Service -from .helpers import FrameioHelpers - -class Project(Service): - def create(self, team_id, **kwargs): - """ - Create a project. - - :Args: - team_id (string): The team id. - :Kwargs: - (optional) kwargs: additional request parameters. - - Example:: - client.projects.create( - team_id="123", - name="My Awesome Project" - ) - """ - endpoint = '/teams/{}/projects'.format(team_id) - return self.client._api_call('post', endpoint, payload=kwargs) - - def get(self, project_id): - """ - Get an individual project - - :Args: - project_id (string): The project's id - - Example:: - client.projects.get( - project_id="123" - ) - - """ - endpoint = '/projects/{}'.format(project_id) - return self.client._api_call('get', endpoint) - - def tree(self, project_id, slim): - """ - Fetch a tree representation of all files/folders in a project. - - :Args: - project_id (string): The project's id - slim (bool): If true, fetch only the minimum information for the following: - filename, - filesize, - thumbnail, - creator_id, - inserted_at (date created), - path (represented like a filesystem) - - Example:: - client.projects.get( - project_id="123", - slim=True - ) - """ - # endpoint = "/projects/{}/tree?depth=20&drop_includes=a.transcode_statuses,a.transcodes,a.source,a.checksums&only_fields=a.name,a.filesize,u.name,a.item_count,a.creator_id,a.inserted_at,a.uploaded_at".format(project_id) - # return self.client._api_call('get', endpoint) - - return FrameioHelpers(self.client).build_project_tree(project_id, slim) - - def download(self, project_id, destination_directory='downloads'): - """ - Download the provided project to disk. - - :Args: - project_id (uuid): The project's id. - destination_directory (string): Directory on disk that you want to download the project to. - - Example:: - client.projects.download( - project_id="123", - destination_directory="./downloads" - ) - """ - return FrameioHelpers(self.client).download_project(project_id, destination=destination_directory) - - def get_collaborators(self, project_id, **kwargs): - """ - Get collaborators for a project - - :Args: - project_id (uuid): The project's id - - Example:: - client.projects.get_collaborators( - project_id="123" - ) - - """ - endpoint = "/projects/{}/collaborators?include=project_role".format(project_id) - return self.client._api_call('get', endpoint, kwargs) - - def get_pending_collaborators(self, project_id, **kwargs): - """ - Get pending collaborators for a project - - :Args: - project_id (uuid): The project's id - - Example:: - client.projects.get_pending_collaborators( - project_id="123" - ) - - """ - endpoint = "/projects/{}/pending_collaborators".format(project_id) - return self.client._api_call('get', endpoint, kwargs) - - def add_collaborator(self, project_id, email): - """ - Add Collaborator to a Project Collaborator. - - :Args: - project_id (uuid): The project id - email (string): Email user's e-mail address - - Example:: - client.projects.add_collaborator( - project_id="123", - email="janedoe@frame.io", - ) - """ - payload = {"email": email} - endpoint = '/projects/{}/collaborators'.format(project_id) - return self._api_call('post', endpoint, payload=payload) - - def remove_collaborator(self, project_id, email): - """ - Remove Collaborator from Project. - - :Args: - project_id (uuid): The Project ID. - email (string): The user's e-mail address - - Example:: - client.projects.remove_collaborator( - project_id="123", - email="janedoe@frame.io" - ) - """ - endpoint = '/projects/{}/collaborators/_?email={}'.format(project_id, email) - return self._api_call('delete', endpoint) diff --git a/frameioclient/service/service.py b/frameioclient/service/service.py deleted file mode 100644 index a2ffa123..00000000 --- a/frameioclient/service/service.py +++ /dev/null @@ -1,5 +0,0 @@ -from ..client import FrameioClient - -class Service(object): - def __init__(self, client: FrameioClient): - self.client = client diff --git a/frameioclient/services/assets.py b/frameioclient/services/assets.py index 5c8fa82c..445a7c80 100644 --- a/frameioclient/services/assets.py +++ b/frameioclient/services/assets.py @@ -4,7 +4,7 @@ from .projects import Project from ..lib.service import Service -from ..lib import FrameioUploader, FrameioDownloader +from ..lib import FrameioUploader, FrameioDownloader, constants class Asset(Service): def get(self, asset_id): @@ -17,14 +17,53 @@ def get(self, asset_id): endpoint = '/assets/{}'.format(asset_id) return self.client._api_call('get', endpoint) - def get_children(self, asset_id, **kwargs): + def get_children(self, asset_id, include=[], slim=False, **kwargs): """ Get a folder. :Args: asset_id (string): The asset id. + + :Kwargs: + includes (list): List of includes you would like to add. + + Example:: + + client.assets.get_children( + asset_id='1231-12414-afasfaf-aklsajflaksjfla', + include=['review_links','cover_asset','creator','presentation'] + ) """ endpoint = '/assets/{}/children'.format(asset_id) + + if slim == True: + query_params = '' + + if len(include) > 0: + query_params += '?include={}'.format(include.join(',')) + else: + # Always include children + query_params += '?' + 'include=children' + + # Only fields + query_params += '&' + 'only_fields=' + ','.join(constants.asset_excludes['only_fields']) + + # # Drop includes + query_params += '&' + 'drop_includes=' + ','.join(constants.asset_excludes['drop_includes']) + + # # Hard drop fields + query_params += '&' + 'hard_drop_fields=' + ','.join(constants.asset_excludes['hard_drop_fields']) + + # Excluded fields + # query_params += '&' + 'excluded_fields=' + ','.join(constants.asset_excludes['excluded_fields']) + + # # Sort by inserted_at + # query_params += '&' + 'sort=-inserted_at' + + endpoint += query_params + + # print("Final URL", endpoint) + return self.client._api_call('get', endpoint, kwargs) def create(self, parent_asset_id, **kwargs): @@ -48,7 +87,25 @@ def create(self, parent_asset_id, **kwargs): """ endpoint = '/assets/{}/children'.format(parent_asset_id) return self.client._api_call('post', endpoint, payload=kwargs) - + + def create_folder(self, parent_asset_id, name="New Folder"): + """ + Create a new folder. + + :Args: + parent_asset_id (string): The parent asset id. + name (string): The name of the new folder. + + Example:: + + client.assets.create_folder( + parent_asset_id="123abc", + name="ExampleFile.mp4", + ) + """ + endpoint = '/assets/{}/children'.format(parent_asset_id) + return self.client._api_call('post', endpoint, payload={"name": name, "type":"folder"}) + def from_url(self, parent_asset_id, name, url): """ Create an asset from a URL. @@ -181,7 +238,20 @@ def build_asset_info(self, filepath): return file_info - def upload(self, destination_id, filepath): + def upload(self, destination_id, filepath, asset=None): + """ + Upload a file. The method will exit once the file is downloaded. + + :Args: + destination_id (uuid): The destination Project or Folder ID. + filepath (string): The locaiton of the file on your local filesystem \ + that you want to upload. + + Example:: + + client.assets.upload('1231-12414-afasfaf-aklsajflaksjfla', "./file.mov") + """ + # Check if destination is a project or folder # If it's a project, well then we look up its root asset ID, otherwise we use the folder id provided # Then we start our upload @@ -193,41 +263,42 @@ def upload(self, destination_id, filepath): # Then try to grab it as a project folder_id = Project(self.client).get_project(destination_id)['root_asset_id'] finally: - file_info = self.build_asset_info(filepath) - try: - asset = self.create(folder_id, - type="file", - name=file_info['filename'], - filetype=file_info['mimetype'], - filesize=file_info['filesize'] - ) + file_info = self._build_asset_info(filepath) - with open(file_info['filepath'], "rb") as fp: - self._upload(asset, fp) + if not asset: + try: + asset = self.create(folder_id, + type="file", + name=file_info['filename'], + filetype=file_info['mimetype'], + filesize=file_info['filesize'] + ) - except Exception as e: - print(e) + except Exception as e: + print(e) - def download(self, asset, download_folder, prefix=None, multi_part=False, concurrency=5, stats=False): + try: + with open(file_info['filepath'], "rb") as fp: + self._upload(asset, fp) + + except Exception as e: + print(e) + + return asset + + def download(self, asset, download_folder, prefix=None, multi_part=False, replace=False): """ Download an asset. The method will exit once the file is downloaded. :Args: asset (object): The asset object. download_folder (path): The location to download the file to. + multi_part (bool): Attempt to do a multi-part download (non-WMID assets). + replace (bool): Whether or not you want to replace a file if one is found at the destination path. Example:: - client.download(asset, "~./Downloads") - """ - downloader = FrameioDownloader( - asset, - download_folder, - prefix, - multi_part, - concurrency, - user_id=self.client.me['id'], - stats=stats - ) - - return downloader.download_handler() \ No newline at end of file + client.assets.download(asset, "~./Downloads") + """ + downloader = FrameioDownloader(asset, download_folder, prefix, multi_part, replace) + return downloader.download_handler() diff --git a/frameioclient/services/helpers.py b/frameioclient/services/helpers.py index c7b3fc22..76de009d 100644 --- a/frameioclient/services/helpers.py +++ b/frameioclient/services/helpers.py @@ -1,4 +1,14 @@ +import os + +from pathlib import Path +from time import time, sleep + from ..lib.service import Service +from ..lib.utils import Utils + +from copy import deepcopy +from typing import List +from pprint import pprint class FrameioHelpers(Service): def get_updated_assets(self, account_id, project_id, timestamp): @@ -30,3 +40,112 @@ def get_updated_assets(self, account_id, project_id, timestamp): } endpoint = '/search/library' return self.client._api_call('post', endpoint, payload=payload) + + def get_assets_recursively(self, asset_id, slim=True): + assets = self.client.assets.get_children(asset_id, slim=slim) + print("Number of assets at top level", len(assets)) + + for asset in assets: + # try: + print(f"Type: {asset['_type']}, Name: {asset['name']}, Children: {len(asset['children'])}") + # except KeyError: + # print("No children found") + + total_bytes = 0 + + if asset['_type'] == "file": + # Don't do nothing, it's a file! + continue + + if asset['_type'] == "verson_stack": + print("Grabbing top item from version stack") + versions = self.client.assets.get_children(asset['id'], slim=True) + asset = versions[0] # re-assign on purpose + continue + + # We only get the first three items when we use "include=children" + if asset['_type'] == "folder": + # try: + if asset['item_count'] > 3: + # Recursively fetch the contents of the folder because we have to + asset['children'] = self.get_assets_recursively(asset['id'], slim) + print("Grabbed more items for this sub dir") + + else: + for i in asset['children']: + # If a folder is found, we still need to recursively search it + if i['_type'] == "folder": + i['children'] = self.get_assets_recursively(i['id'], slim) + + # except KeyError as e: + # # No children found in this folder, move on + # print(e) + # continue + + return assets + + def build_project_tree(self, project_id, slim=True): + # if slim == True: + # self.client.assets.get_children() + + # Get project info + project = self.client.projects.get(project_id) + + # Get children + initial_tree = self.get_assets_recursively(project['root_asset_id'], slim) + + return initial_tree + + def download_project(self, project_id, destination): + project = self.client.projects.get(project_id) + initial_tree = self.get_assets_recursively(project['root_asset_id']) + self.recursive_downloader(destination, initial_tree) + # pprint(initial_tree) + # print(f"Downloading {Utils.format_bytes(total_bytes, type='size')}") + + def recursive_downloader(self, directory, asset, count=0): + # TODO resolve this clusterfuck of downloads + print(f"Directory {directory}") + + try: + # First check to see if we need to make the directory + target_directory = os.path.join(os.path.curdir, directory) + if not os.path.isdir(target_directory): + os.mkdir(os.path.abspath(target_directory)) + + except Exception as e: + target_directory = os.path.abspath(os.path.join(os.path.curdir, directory)) + print(e) + + if type(asset) == list: + for i in asset: + self.recursive_downloader(directory, i) + + else: + try: + if asset['_type'] == 'folder': + if len(asset['children']) >= 0: + count += 1 + # Create the new folder that these items will go in before it's too late + if not os.path.exists(os.path.join(target_directory, asset['name'])): + print("Path doesn't exist") + new_path = Path(target_directory, str(asset['name']).replace('/', '-')) + print(new_path.absolute) + print("Making new directory") + Path.mkdir(new_path) + sleep(2) + + # Pass along the new directory they'll be living in and the children + self.recursive_downloader(f"{directory}/{str(asset['name']).replace('/', '-')}", asset['children']) + + if asset['_type'] == 'file': + count += 1 + return self.client.assets.download(asset, target_directory, multi_part=True, concurrency=10) + + except Exception as e: + print(e) + + return True + +if __name__ == "__main__": + pass diff --git a/frameioclient/services/projects.py b/frameioclient/services/projects.py index 668dd7f4..2bf8abb9 100644 --- a/frameioclient/services/projects.py +++ b/frameioclient/services/projects.py @@ -1,4 +1,5 @@ from ..lib.service import Service +from .helpers import FrameioHelpers class Project(Service): def create(self, team_id, **kwargs): @@ -34,7 +35,48 @@ def get(self, project_id): """ endpoint = '/projects/{}'.format(project_id) return self.client._api_call('get', endpoint) - + + def tree(self, project_id, slim): + """ + Fetch a tree representation of all files/folders in a project. + + :Args: + project_id (string): The project's id + slim (bool): If true, fetch only the minimum information for the following: + filename, + filesize, + thumbnail, + creator_id, + inserted_at (date created), + path (represented like a filesystem) + + Example:: + client.projects.get( + project_id="123", + slim=True + ) + """ + # endpoint = "/projects/{}/tree?depth=20&drop_includes=a.transcode_statuses,a.transcodes,a.source,a.checksums&only_fields=a.name,a.filesize,u.name,a.item_count,a.creator_id,a.inserted_at,a.uploaded_at".format(project_id) + # return self.client._api_call('get', endpoint) + + return FrameioHelpers(self.client).build_project_tree(project_id, slim) + + def download(self, project_id, destination_directory='downloads'): + """ + Download the provided project to disk. + + :Args: + project_id (uuid): The project's id. + destination_directory (string): Directory on disk that you want to download the project to. + + Example:: + client.projects.download( + project_id="123", + destination_directory="./downloads" + ) + """ + return FrameioHelpers(self.client).download_project(project_id, destination=destination_directory) + def get_collaborators(self, project_id, **kwargs): """ Get collaborators for a project From dea3841dcc7c350e6741f56b8b1e9d82bc245159 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 2 Aug 2021 20:03:01 -0700 Subject: [PATCH 13/99] Fix some more issues after merging --- frameioclient/lib/download.py | 87 ++++++++++++++++------------------ frameioclient/lib/transport.py | 14 ++++-- scripts/__init__.py | 1 + scripts/benchmark/__init__.py | 1 + scripts/benchmark/download.py | 3 +- 5 files changed, 54 insertions(+), 52 deletions(-) diff --git a/frameioclient/lib/download.py b/frameioclient/lib/download.py index 142c4970..b593d3b5 100644 --- a/frameioclient/lib/download.py +++ b/frameioclient/lib/download.py @@ -3,6 +3,7 @@ import sys import math import time +import requests import enlighten import threading import concurrent.futures @@ -15,7 +16,9 @@ from .exceptions import ( DownloadException, WatermarkIDDownloadException, - AssetNotFullyUploaded + AssetNotFullyUploaded, + AssetChecksumMismatch, + AssetChecksumNotPresent ) class FrameioDownloader(object): @@ -38,7 +41,8 @@ def __init__(self, asset, download_folder, prefix, multi_part=False, replace=Fal self.bytes_started = 0 self.bytes_completed = 0 self.in_progress = 0 - self.session = AWSClient()._get_session(auth=None) + self.aws_client = AWSClient(concurrency=5) + self.session = self.aws_client._get_session(auth=None) self.filename = Utils.normalize_filename(asset["name"]) self.request_logs = list() @@ -73,10 +77,6 @@ def _evaluate_asset(self): self.original_checksum = None def _create_file_stub(self): - if self.replace == True: - os.remove(self.destination) # Remove the file - self._create_file_stub() # Create a new stub - try: fp = open(self.destination, "w") # fp.write(b"\0" * self.file_size) # Disabled to prevent pre-allocatation of disk space @@ -142,9 +142,10 @@ def download_handler(self): print("Destination folder not found, creating") os.mkdir(self.download_folder) - if os.path.isfile(self.get_path()): - print("File already exists at this location.") - return self.destination + if not self.replace: + if os.path.isfile(self.get_path()): + print("File already exists at this location.") + return self.destination else: url = self.get_download_key() @@ -232,14 +233,14 @@ def multi_part_download(self, url): status.update(stage='Downloading', color='green') - with concurrent.futures.ThreadPoolExecutor(max_workers=self.client.concurrency) as executor: + with concurrent.futures.ThreadPoolExecutor(max_workers=self.aws_client.concurrency) as executor: for i in range(int(self.chunks)): # Increment by the iterable + 1 so we don't mutiply by zero out_byte = offset * (i+1) # Create task tuple task = (url, in_byte, out_byte, i, in_progress) # Stagger start for each chunk by 0.1 seconds - if i < self.concurrency: time.sleep(0.1) + if i < self.aws_client.concurrency: time.sleep(0.1) # Append tasks to futures list self.futures.append(executor.submit(self._download_chunk, task)) # Reset new in byte equal to last out byte @@ -258,36 +259,43 @@ def multi_part_download(self, url): download_time = round((time.time() - start_time), 2) download_speed = round((self.file_size/download_time), 2) - # Perform hash-verification - status.update(stage='Verifying') - - VERIFICATION_FORMAT = '{desc}{desc_pad}|{bar}|{percentage:3.0f}% ' + \ - 'Progress: {count:.2j}/{total:.2j} ' + \ - '[{elapsed}<{eta}, {rate:.2j}{unit}/s]' - # Add counter to track completed chunks - verification = manager.counter( - position=1, - total=float(self.file_size), - desc='Verifying', - unit='B', - bar_format=VERIFICATION_FORMAT, - color='purple' - ) - - # Calculate the file hash - Utils.calculate_hash(self.destination, progress_callback=verification) + if self.checksum_verification == True: + # Check for checksum, if not present throw error + if self._get_checksum() == None: + raise AssetChecksumNotPresent + else: + # Perform hash-verification + status.update(stage='Verifying') + + VERIFICATION_FORMAT = '{desc}{desc_pad}|{bar}|{percentage:3.0f}% ' + \ + 'Progress: {count:.2j}/{total:.2j} ' + \ + '[{elapsed}<{eta}, {rate:.2j}{unit}/s]' + + # Add counter to track completed chunks + verification = manager.counter( + position=1, + total=float(self.file_size), + desc='Verifying', + unit='B', + bar_format=VERIFICATION_FORMAT, + color='purple' + ) + + # Calculate the file hash + if Utils.calculate_hash(self.destination, progress_callback=verification) != self.original_checksum: + raise AssetChecksumMismatch # Update the header status.update(stage='Download Complete!', force=True) - # Log completion event - SDKLogger('downloads').info("Downloaded {} at {}".format(Utils.format_bytes(self.file_size, type="size"), download_speed)) + # Log completion event + SDKLogger('downloads').info("Downloaded {} at {}".format(Utils.format_bytes(self.file_size, type="size"), download_speed)) - # Submit telemetry - transfer_stats = {'speed': download_speed, 'time': download_time, 'cdn': AWSClient.check_cdn(url)} + # Submit telemetry + transfer_stats = {'speed': download_speed, 'time': download_time, 'cdn': AWSClient.check_cdn(url)} - Event(self.user_id, 'python-sdk-download-stats', transfer_stats) + Event(self.user_id, 'python-sdk-download-stats', transfer_stats) # If stats = True, we return a dict with way more info, otherwise \ if self.stats: @@ -305,17 +313,6 @@ def multi_part_download(self, url): return self.destination - if self.checksum_verification == True: - # Check for checksum, if not present throw error - if self._get_checksum() == None: - raise AssetChecksumNotPresent - else: - if Utils.calculate_hash(self.destination) != self.original_checksum: - raise AssetChecksumMismatch - else: - return self.destination - else: - return self.destination def _download_chunk(self, task): # Download a particular chunk diff --git a/frameioclient/lib/transport.py b/frameioclient/lib/transport.py index 89628c32..e7597d82 100644 --- a/frameioclient/lib/transport.py +++ b/frameioclient/lib/transport.py @@ -3,13 +3,13 @@ import enlighten import requests import threading -import concurrent.futures from requests.adapters import HTTPAdapter from requests.packages.urllib3.util.retry import Retry from .version import ClientVersion -from .utils import Utils, PaginatedResponse +from .utils import PaginatedResponse +from .exceptions import PresentationException from .bandwidth import NetworkBandwidth, DiskBandwidth @@ -46,7 +46,7 @@ def _get_session(self, auth=True): class APIClient(HTTPClient, object): - def __init__(self, token, host='https://api.frame.io'): + def __init__(self, token, host): super().__init__() self.host = host self.token = token @@ -64,7 +64,7 @@ def _api_call(self, method, endpoint, payload={}, limit=None): r = self.session.request( method, url, - headers=self.auth_header, + headers=headers, json=payload ) @@ -82,6 +82,7 @@ def _api_call(self, method, endpoint, payload={}, limit=None): payload=payload, client=self ) + if isinstance(r.json(), list): return r.json()[:limit] @@ -155,7 +156,10 @@ def get_byte_range(url, start_byte=0, end_byte=2048): AWSClient.get_byte_range(asset, "~./Downloads") """ - headers = {"Range": "bytes=%d-%d" % (start_byte, end_byte)} + range_header = {"Range": "bytes=%d-%d" % (start_byte, end_byte)} + shared_headers = {'x-frameio-client': 'python/{}'.format(self.client_version)} + headers = {**shared_headers, **range_header} + br = requests.get(url, headers=headers).content return br diff --git a/scripts/__init__.py b/scripts/__init__.py index e69de29b..8b0efa25 100644 --- a/scripts/__init__.py +++ b/scripts/__init__.py @@ -0,0 +1 @@ +from .benchmark import * \ No newline at end of file diff --git a/scripts/benchmark/__init__.py b/scripts/benchmark/__init__.py index e69de29b..738214bd 100644 --- a/scripts/benchmark/__init__.py +++ b/scripts/benchmark/__init__.py @@ -0,0 +1 @@ +from .utils import timefunc \ No newline at end of file diff --git a/scripts/benchmark/download.py b/scripts/benchmark/download.py index 480bf1cb..45ce7d97 100644 --- a/scripts/benchmark/download.py +++ b/scripts/benchmark/download.py @@ -9,9 +9,8 @@ def download(asset_id='', destination='downloads', clean_up=True, size='small'): token = os.getenv("FRAMEIO_TOKEN") client = FrameioClient(token) - client.assets.get asset_info = client.assets.get(asset_id) - download_info = client.assets.download(asset_info, destination, multi_part=True, concurrency=10, stats=True) + download_info = client.assets.download(asset_info, destination, multi_part=True, replace=True) if clean_up == True: os.remove(download_info['destination']) From d1daf39ef558e67e0d11fea4b1d8c83b55bbc6ee Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 2 Aug 2021 20:19:34 -0700 Subject: [PATCH 14/99] Add CLI for uploading/downloading assets Re-built using commits from the other branch --- README.md | 28 +++++++++- examples/recursive_upload.py | 94 ++++++++++++++++++++++++++++++++++ frameioclient/client.py | 4 +- frameioclient/fiocli.py | 52 +++++++++++++++++++ frameioclient/lib/transport.py | 25 +++++---- frameioclient/lib/upload.py | 53 ++++++++++++++++++- setup.py | 5 ++ 7 files changed, 246 insertions(+), 15 deletions(-) create mode 100644 examples/recursive_upload.py create mode 100644 frameioclient/fiocli.py diff --git a/README.md b/README.md index 7d9f2af6..b4b398fc 100644 --- a/README.md +++ b/README.md @@ -24,12 +24,38 @@ $ git clone https://github.com/frameio/python-frameio-client $ pip install . ``` -_Note: The Frame.io Python client may not work correctly in Python 3.8+_ +### Developing +Install the package into your development environment and link to it by running the following: + +```sh +pipenv install -e . -pre +``` ## Documentation [Frame.io API Documentation](https://developer.frame.io/docs) +### Use CLI +When you install this package, a cli tool called `fioctl` will also be installed to your environment. + +**To upload a file or folder** +```sh +fioctl \ +--token fio-u-YOUR_TOKEN_HERE \ +--destination "YOUR TARGET FRAME.IO PROJECT OR FOLDER" \ +--target "YOUR LOCAL SYSTEM DIRECTORY" \ +--threads 8 +``` + +**To download a file, project, or folder** +```sh +fioctl \ +--token fio-u-YOUR_TOKEN_HERE \ +--destination "YOUR LOCAL SYSTEM DIRECTORY" \ +--target "YOUR TARGET FRAME.IO PROJECT OR FOLDER" \ +--threads 2 +``` + ## Usage _Note: A valid token is required to make requests to Frame.io. Go to our [Developer Portal](https://developer.frame.io/) to get a token!_ diff --git a/examples/recursive_upload.py b/examples/recursive_upload.py new file mode 100644 index 00000000..05101eea --- /dev/null +++ b/examples/recursive_upload.py @@ -0,0 +1,94 @@ +import os +import time +import mimetypes +import concurrent.futures +import threading +from frameioclient import FrameioClient +from pprint import pprint + +global file_num +file_num = 0 + +global file_count +file_count = 0 + +def create_n_upload(task): + client=task[0] + file_p=task[1] + parent_asset_id=task[2] + abs_path = os.path.abspath(file_p) + file_s = os.path.getsize(file_p) + file_n = os.path.split(file_p)[1] + file_mime = mimetypes.guess_type(abs_path)[0] + + asset = client.create_asset( + parent_asset_id=parent_asset_id, + name=file_n, + type="file", + filetype=file_mime, + filesize=file_s + ) + + with open(abs_path, "rb") as ul_file: + asset_info = client.upload(asset, ul_file) + + return asset_info + + +def create_folder(folder_n, parent_asset_id): + asset = client.create_asset( + parent_asset_id=parent_asset_id, + name=folder_n, + type="folder", + ) + + return asset['id'] + + +def file_counter(root_folder): + matches = [] + for root, dirnames, filenames in os.walk(root_folder): + for filename in filenames: + matches.append(os.path.join(filename)) + + return matches + + +def recursive_upload(client, folder, parent_asset_id): + # Seperate files and folders: + file_list = list() + folder_list = list() + + for item in os.listdir(folder): + if item == ".DS_Store": # Ignore .DS_Store files on Mac + continue + + complete_item_path = os.path.join(folder, item) + + if os.path.isfile(complete_item_path): + file_list.append(item) + else: + folder_list.append(item) + + with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: + for file_p in file_list: + global file_num + file_num += 1 + print(f"Starting {file_num}/{file_count}") + complete_dir_obj = os.path.join(folder, file_p) + task = (client, complete_dir_obj, parent_asset_id) + executor.submit(create_n_upload, task) + + for folder_i in folder_list: + new_folder = os.path.join(folder, folder_i) + new_parent_asset_id = create_folder(folder_i, parent_asset_id) + recursive_upload(client, new_folder, new_parent_asset_id) + + +if __name__ == "__main__": + root_folder = "./test_structure" + parent_asset_id = "PARENT_ASSET_ID" + client = FrameioClient(os.getenv("FRAME_IO_TOKEN")) + + file_count = len(file_counter(root_folder)) + recursive_upload(client, root_folder, parent_asset_id) \ No newline at end of file diff --git a/frameioclient/client.py b/frameioclient/client.py index 19f51a06..dd7e1976 100644 --- a/frameioclient/client.py +++ b/frameioclient/client.py @@ -8,8 +8,8 @@ ) class FrameioClient(APIClient, object): - def __init__(self, token, host='https://api.frame.io'): - super().__init__(token, host) + def __init__(self, token, host='https://api.frame.io', threads=5, progress=False): + super().__init__(token, host, threads, progress) @property def me(self): diff --git a/frameioclient/fiocli.py b/frameioclient/fiocli.py new file mode 100644 index 00000000..6d2ff98d --- /dev/null +++ b/frameioclient/fiocli.py @@ -0,0 +1,52 @@ +import os +import sys +import argparse + +from frameioclient import FrameioClient + + +def main(): + parser=argparse.ArgumentParser(prog='fiocli', description='Frame.io Python SDK CLI') + + ## Define args + parser.add_argument('--token', action='store', metavar='token', type=str, nargs='+', help='Developer Token') + # parser.add_argument('--op', action='store', metavar='op', type=str, nargs='+', help='Operation: upload, download') + parser.add_argument('--target', action='store', metavar='target', type=str, nargs='+', help='Target: remote project or folder, or alternatively a local file/folder') + parser.add_argument('--destination', action='store', metavar='destination', type=str, nargs='+', help='Destination: remote project or folder, or alternatively a local file/folder') + parser.add_argument('--threads', action='store', metavar='threads', type=int, nargs='+', help='Number of threads to use') + + ## Parse args + args = parser.parse_args() + + if args.threads: + threads = args.threads[0] + else: + threads = 5 + + ## Handle args + if args.token: + client = None + # print(args.token) + try: + client = FrameioClient(args.token[0], progress=True, threads=threads) + except Exception as e: + print("Failed") + sys.exit(1) + + # If args.op == 'upload': + if args.target: + if args.destination: + # Check to see if this is a local target and thus a download + if os.path.isdir(args.destination[0]): + asset = client.assets.get(args.target[0]) + return client.assets.download(asset, args.destination[0], progress=True, multi_part=True, concurrency=threads) + else: # This is an upload + if os.path.isdir(args.target[0]): + return client.assets.upload_folder(args.target[0], args.destination[0]) + else: + return client.assets.upload(args.destination[0], args.target[0]) + else: + print("No destination supplied") + else: + print("No target supplied") + diff --git a/frameioclient/lib/transport.py b/frameioclient/lib/transport.py index e7597d82..ed398561 100644 --- a/frameioclient/lib/transport.py +++ b/frameioclient/lib/transport.py @@ -46,24 +46,27 @@ def _get_session(self, auth=True): class APIClient(HTTPClient, object): - def __init__(self, token, host): + def __init__(self, token, host, threads, progress): super().__init__() self.host = host self.token = token + self.threads = threads + self.progress = progress self._initialize_thread() - self.session = self._get_session(auth=token) + self.session = self._get_session() self.auth_header = { - 'Authorization': 'Bearer {}'.format(self.token), + 'Authorization': 'Bearer {}'.format(self.token) } - def _api_call(self, method, endpoint, payload={}, limit=None): - url = '{}/v2{}'.format(self.host, endpoint) + def _format_api_call(self, endpoint): + return '{}/v2{}'.format(self.host, endpoint) + def _api_call(self, method, endpoint, payload={}, limit=None): headers = {**self.shared_headers, **self.auth_header} r = self.session.request( method, - url, + self._format_api_call(endpoint), headers=headers, json=payload ) @@ -98,14 +101,14 @@ def get_specific_page(self, method, endpoint, payload, page): Gets a specific page for that endpoint, used by Pagination Class :Args: - method (string): 'get', 'post' - endpoint (string): endpoint ('/accounts//teams') - payload (dict): Request payload - page (int): What page to get + method (string): 'get', 'post' + endpoint (string): endpoint ('/accounts//teams') + payload (dict): Request payload + page (int): What page to get """ if method == 'get': endpoint = '{}?page={}'.format(endpoint, page) - return self._api_call(method, endpoint) + return self._api_call(method, endpoint) if method == 'post': payload['page'] = page diff --git a/frameioclient/lib/upload.py b/frameioclient/lib/upload.py index 448dae1b..97d7f1ec 100644 --- a/frameioclient/lib/upload.py +++ b/frameioclient/lib/upload.py @@ -4,13 +4,17 @@ import threading import concurrent.futures +from .utils import Utils + thread_local = threading.local() class FrameioUploader(object): - def __init__(self, asset, file): + def __init__(self, asset=None, file=None): self.asset = asset self.file = file self.chunk_size = None + self.file_count = 0 + self.file_num = 0 def _calculate_chunks(self, total_size, chunk_count): self.chunk_size = int(math.ceil(total_size / chunk_count)) @@ -76,3 +80,50 @@ def upload(self): task = (url, chunk_offset, i) executor.submit(self._upload_chunk, task) + + + def file_counter(self, folder): + matches = [] + for root, dirnames, filenames in os.walk(folder): + for filename in filenames: + matches.append(os.path.join(filename)) + + self.file_count = len(matches) + + return matches + + def recursive_upload(self, client, folder, parent_asset_id): + # Seperate files and folders: + file_list = list() + folder_list = list() + + if self.file_count == 0: + self.file_counter(folder) + + for item in os.listdir(folder): + if item == ".DS_Store": # Ignore .DS_Store files on Mac + continue + + complete_item_path = os.path.join(folder, item) + + if os.path.isfile(complete_item_path): + file_list.append(item) + else: + folder_list.append(item) + + for file_p in file_list: + self.file_num += 1 + + complete_dir_obj = os.path.join(folder, file_p) + print(f"Starting {self.file_num:02d}/{self.file_count}, Size: {Utils.format_bytes(os.path.getsize(complete_dir_obj), type='size')}, Name: {file_p}") + client.assets.upload(parent_asset_id, complete_dir_obj) + + for folder_name in folder_list: + new_folder = os.path.join(folder, folder_name) + new_parent_asset_id = client.assets.create( + parent_asset_id=parent_asset_id, + name=folder_name, + type="folder" + )['id'] + + self.recursive_upload(client, new_folder, new_parent_asset_id) \ No newline at end of file diff --git a/setup.py b/setup.py index f456b2ff..a378aded 100644 --- a/setup.py +++ b/setup.py @@ -41,6 +41,11 @@ def run(self): 'bump2version', ] }, + entry_points ={ + 'console_scripts': [ + 'fiocli = frameioclient.fiocli:main' + ] + }, classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', From f707168d04f791959d0600b31661ec29a78af3c1 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 2 Aug 2021 20:40:31 -0700 Subject: [PATCH 15/99] Fix integration test --- frameioclient/lib/download.py | 44 +++++++++++++++++--------------- frameioclient/services/assets.py | 38 +++++++++------------------ tests/integration.py | 8 +++--- 3 files changed, 40 insertions(+), 50 deletions(-) diff --git a/frameioclient/lib/download.py b/frameioclient/lib/download.py index b593d3b5..0f57c926 100644 --- a/frameioclient/lib/download.py +++ b/frameioclient/lib/download.py @@ -35,6 +35,7 @@ def __init__(self, asset, download_folder, prefix, multi_part=False, replace=Fal self.futures = list() self.checksum = None self.original_checksum = None + self.checksum_verification = True self.chunk_size = (25 * 1024 * 1024) # 25 MB chunk size self.chunks = math.ceil(self.file_size/self.chunk_size) self.prefix = prefix @@ -45,6 +46,7 @@ def __init__(self, asset, download_folder, prefix, multi_part=False, replace=Fal self.session = self.aws_client._get_session(auth=None) self.filename = Utils.normalize_filename(asset["name"]) self.request_logs = list() + self.stats = True self._evaluate_asset() self._get_path() @@ -142,32 +144,34 @@ def download_handler(self): print("Destination folder not found, creating") os.mkdir(self.download_folder) - if not self.replace: - if os.path.isfile(self.get_path()): - print("File already exists at this location.") - return self.destination - else: - url = self.get_download_key() + if os.path.isfile(self.get_path()) == False: + pass - if self.watermarked == True: - return self.single_part_download(url) + if os.path.isfile(self.get_path()) and self.replace == True: + os.remove(self.get_path()) + + if os.path.isfile(self.get_path()) and self.replace == False: + print("File already exists at this location.") + return self.destination + + url = self.get_download_key() + + if self.watermarked == True: + return self.single_part_download(url) + else: + # Don't use multi-part download for files below 25 MB + if self.asset['filesize'] < 26214400: + return self.download(url) + if self.multi_part == True: + return self.multi_part_download(url) else: - # Don't use multi-part download for files below 25 MB - if self.asset['filesize'] < 26214400: - return self.download(url) - if self.multi_part == True: - return self.multi_part_download(url) - else: - return self.single_part_download(url) + return self.single_part_download(url) def single_part_download(self, url): start_time = time.time() print("Beginning download -- {} -- {}".format(self.asset["name"], Utils.format_bytes(self.file_size, type="size"))) # Downloading - r = self.session.get(url) - open(self.destination, "wb").write(r.content) - with open(self.destination, 'wb') as handle: try: # TODO make sure this approach works for SBWM download @@ -295,7 +299,7 @@ def multi_part_download(self, url): # Submit telemetry transfer_stats = {'speed': download_speed, 'time': download_time, 'cdn': AWSClient.check_cdn(url)} - Event(self.user_id, 'python-sdk-download-stats', transfer_stats) + # Event(self.user_id, 'python-sdk-download-stats', transfer_stats) # If stats = True, we return a dict with way more info, otherwise \ if self.stats: @@ -305,7 +309,7 @@ def multi_part_download(self, url): "speed": download_speed, "elapsed": download_time, "cdn": AWSClient.check_cdn(url), - "concurrency": self.concurrency, + "concurrency": self.aws_client.concurrency, "size": self.file_size, "chunks": self.chunks } diff --git a/frameioclient/services/assets.py b/frameioclient/services/assets.py index 445a7c80..064406a0 100644 --- a/frameioclient/services/assets.py +++ b/frameioclient/services/assets.py @@ -7,6 +7,18 @@ from ..lib import FrameioUploader, FrameioDownloader, constants class Asset(Service): + def _build_asset_info(self, filepath): + full_path = os.path.abspath(filepath) + + file_info = { + "filepath": full_path, + "filename": os.path.basename(full_path), + "filesize": os.path.getsize(full_path), + "mimetype": mimetypes.guess_type(full_path)[0] + } + + return file_info + def get(self, asset_id): """ Get an asset by id. @@ -124,7 +136,6 @@ def from_url(self, parent_asset_id, name, url): url="https://" ) """ - payload = { "name": name, "type": "file", @@ -179,9 +190,7 @@ def bulk_copy(self, destination_folder_id, asset_list=[], copy_comments=False): client.assets.bulk_copy("adeffee123342", asset_list=["7ee008c5-49a2-f8b5-997d-8b64de153c30", \ "7ee008c5-49a2-f8b5-997d-8b64de153c30"], copy_comments=True) """ - payload = {"batch": []} - new_list = list() if copy_comments: payload['copy_comments'] = "all" @@ -213,31 +222,9 @@ def _upload(self, asset, file): Example:: client.upload(asset, open('example.mp4')) """ - uploader = FrameioUploader(asset, file) uploader.upload() - # def upload_folder(self, destination_id, folderpath): - # try: - # if os.path.isdir(folderpath): - # # Good it's a directory, we can keep going - - # except OSError: - # if not os.path.exists(folderpath): - # sys.exit("Folder doesn't exist, exiting...") - - def build_asset_info(self, filepath): - full_path = os.path.abspath(filepath) - - file_info = { - "filepath": full_path, - "filename": os.path.basename(full_path), - "filesize": os.path.getsize(full_path), - "mimetype": mimetypes.guess_type(full_path)[0] - } - - return file_info - def upload(self, destination_id, filepath, asset=None): """ Upload a file. The method will exit once the file is downloaded. @@ -248,7 +235,6 @@ def upload(self, destination_id, filepath, asset=None): that you want to upload. Example:: - client.assets.upload('1231-12414-afasfaf-aklsajflaksjfla', "./file.mov") """ diff --git a/tests/integration.py b/tests/integration.py index 7dd4bd07..f0ae6086 100644 --- a/tests/integration.py +++ b/tests/integration.py @@ -31,11 +31,11 @@ def init_client(): sys.exit(1) if environment == "PRODUCTION": - client = FrameioClient(token) + client = FrameioClient(token, threads=10) print("Client connection initialized.") else: - client = FrameioClient(token, host='https://api.dev.frame.io') + client = FrameioClient(token, host='https://api.dev.frame.io', threads=10) print("Client connection initialized.") return client @@ -107,7 +107,7 @@ def test_download(client, override=False): start_time = time.time() print("{}/{} Beginning to download: {}".format(count, len(asset_list), asset['name'])) - client.assets.download(asset, download_dir, multi_part=True, concurrency=10) + client.assets.download(asset, download_dir, multi_part=True) download_time = time.time() - start_time download_speed = Utils.format_bytes(ceil(asset['filesize']/(download_time))) @@ -332,7 +332,7 @@ def run_test(): print("Beginning Integration test...") client = init_client() - test_download(client) + test_download(client, override=True) upload_folder_id = test_upload(client) check_upload_completion(client, download_asset_id, upload_folder_id) # clean_up(client, upload_folder_id) From f1defd8008bc6034d41fbbd25382a19a6410b149 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 2 Aug 2021 20:44:05 -0700 Subject: [PATCH 16/99] Fix missing creation of downloads directory --- tests/integration.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/integration.py b/tests/integration.py index f0ae6086..ecd9e8ec 100644 --- a/tests/integration.py +++ b/tests/integration.py @@ -332,7 +332,7 @@ def run_test(): print("Beginning Integration test...") client = init_client() - test_download(client, override=True) + test_download(client) upload_folder_id = test_upload(client) check_upload_completion(client, download_asset_id, upload_folder_id) # clean_up(client, upload_folder_id) From 9a20758899ee53586538aa9311adf62489091c9c Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 2 Aug 2021 18:49:50 -0700 Subject: [PATCH 17/99] Add docs --- .circleci/config.yml | 40 ++-- .gitignore | 2 + Makefile | 5 +- README.md | 20 ++ docs/Makefile | 32 +++ docs/classes/assets.rst | 5 + docs/classes/comments.rst | 5 + docs/classes/index.rst | 12 + docs/classes/logs.rst | 5 + docs/classes/projects.rst | 5 + docs/classes/search.rst | 2 + docs/classes/sharing.rst | 8 + docs/classes/teams.rst | 5 + docs/classes/users.rst | 5 + docs/conf.py | 60 +++++ docs/index.rst | 35 +++ docs/installation.rst | 37 +++ docs/make.bat | 35 +++ docs/modules/downloader.rst | 8 + docs/modules/helpers.rst | 9 + docs/modules/index.rst | 8 + docs/modules/uploader.rst | 8 + docs/modules/utils.rst | 8 + docs/publish.py | 190 +++++++++++++++ docs/requirements.txt | 8 + examples/asset_tree.py | 33 +++ examples/new_tests.py | 20 ++ frameioclient/__init__.py | 2 +- frameioclient/client.py | 78 +++++++ frameioclient/lib/download.py | 9 + frameioclient/lib/upload.py | 9 + frameioclient/lib/utils.py | 20 +- frameioclient/service/assets.py | 355 +++++++++++++++++++++++++++++ frameioclient/service/projects.py | 134 +++++++++++ frameioclient/service/service.py | 5 + frameioclient/services/comments.py | 51 +++-- frameioclient/services/links.py | 84 +++---- frameioclient/services/logs.py | 14 +- frameioclient/services/teams.py | 57 ++--- setup.py | 2 + 40 files changed, 1305 insertions(+), 125 deletions(-) create mode 100644 docs/Makefile create mode 100644 docs/classes/assets.rst create mode 100644 docs/classes/comments.rst create mode 100644 docs/classes/index.rst create mode 100644 docs/classes/logs.rst create mode 100644 docs/classes/projects.rst create mode 100644 docs/classes/search.rst create mode 100644 docs/classes/sharing.rst create mode 100644 docs/classes/teams.rst create mode 100644 docs/classes/users.rst create mode 100644 docs/conf.py create mode 100644 docs/index.rst create mode 100644 docs/installation.rst create mode 100644 docs/make.bat create mode 100644 docs/modules/downloader.rst create mode 100644 docs/modules/helpers.rst create mode 100644 docs/modules/index.rst create mode 100644 docs/modules/uploader.rst create mode 100644 docs/modules/utils.rst create mode 100644 docs/publish.py create mode 100644 docs/requirements.txt create mode 100644 examples/asset_tree.py create mode 100644 examples/new_tests.py create mode 100644 frameioclient/service/assets.py create mode 100644 frameioclient/service/projects.py create mode 100644 frameioclient/service/service.py diff --git a/.circleci/config.yml b/.circleci/config.yml index f975809e..2d510f62 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -33,6 +33,11 @@ workflows: requires: - hold + - docs: + requires: + # - deploy + - build + # upload_test: # triggers: # - schedule: @@ -41,10 +46,8 @@ workflows: # branches: # only: # - jh/use-xxhash-for-integration-test - # jobs: # - build - # - upload_test_job: # requires: # - build @@ -103,17 +106,12 @@ jobs: name: Attach build artifact - run: - name: Install package - command: | - pip install '/tmp/artifact' - - - run: - name: Run integration test + name: Upload to pypi command: | - python /tmp/artifact/tests/integration.py - + cd /tmp/artifact + twine upload dist/* - deploy: + docs: docker: - image: circleci/python:latest @@ -125,18 +123,18 @@ jobs: - run: name: Install dependencies command: | - pip install setuptools wheel twine - + cd /tmp/artifact/docs + pip install -r requirements.txt + - run: - name: init .pypirc + name: Build autodocs command: | - cd /tmp/artifact - echo -e "[pypi]" >> ~/.pypirc - echo -e "username = $TWINE_USERNAME" >> ~/.pypirc - echo -e "password = $TWINE_PASSWORD" >> ~/.pypirc + cd /tmp/artifact/docs + make jekyll - run: - name: Upload to pypi + name: Publish autodocs command: | - cd /tmp/artifact - twine upload dist/* + cd /tmp/artifact/docs + python publish.py + diff --git a/.gitignore b/.gitignore index f1141588..37bc36ef 100644 --- a/.gitignore +++ b/.gitignore @@ -109,3 +109,5 @@ Pipfile Pipfile.lock .vscode/launch.json .vscode/settings.json + +pyproject.toml \ No newline at end of file diff --git a/Makefile b/Makefile index a570a84d..0762ee5c 100644 --- a/Makefile +++ b/Makefile @@ -29,4 +29,7 @@ run-benchmark: docker run -it -e $1 benchmark format: - black frameioclient \ No newline at end of file + black frameioclient + +publish-docs: + cd docs && pip install -r requirements.txt && make jekyll && make publish \ No newline at end of file diff --git a/README.md b/README.md index b4b398fc..d8c4dd94 100644 --- a/README.md +++ b/README.md @@ -56,6 +56,26 @@ fioctl \ --threads 2 ``` +### Links + +**Sphinx Documentation** +- https://pythonhosted.org/sphinxcontrib-restbuilder/ +- https://www.npmjs.com/package/rst-selector-parser +- https://sphinx-themes.org/sample-sites/furo/_sources/index.rst.txt +- https://developer.mantidproject.org/Standards/DocumentationGuideForDevs.html +- https://sublime-and-sphinx-guide.readthedocs.io/en/latest/code_blocks.html +- https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html +- https://stackoverflow.com/questions/64451966/python-sphinx-how-to-embed-code-into-a-docstring +- https://pythonhosted.org/an_example_pypi_project/sphinx.html + +**Decorators** +- https://docs.python.org/3.7/library/functools.html +- https://realpython.com/primer-on-python-decorators/ +- https://www.sphinx-doc.org/en/master/usage/quickstart.html +- https://www.geeksforgeeks.org/decorators-with-parameters-in-python/ +- https://stackoverflow.com/questions/43544954/why-does-sphinx-autodoc-output-a-decorators-docstring-when-there-are-two-decora + + ## Usage _Note: A valid token is required to make requests to Frame.io. Go to our [Developer Portal](https://developer.frame.io/) to get a token!_ diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 00000000..29ca4852 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,32 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = . +BUILDDIR = dist + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +publish: + python publish.py + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +jekyll: + sphinx-build -b jekyll . dist/markdown + +rst: + sphinx-build -b rst . dist/rst + +html: + sphinx-build -b html . dist/html \ No newline at end of file diff --git a/docs/classes/assets.rst b/docs/classes/assets.rst new file mode 100644 index 00000000..ff78e6d6 --- /dev/null +++ b/docs/classes/assets.rst @@ -0,0 +1,5 @@ +Assets +========================= + +.. autoclass:: frameioclient.Asset + :members: diff --git a/docs/classes/comments.rst b/docs/classes/comments.rst new file mode 100644 index 00000000..ea9f45cc --- /dev/null +++ b/docs/classes/comments.rst @@ -0,0 +1,5 @@ +Comments +=================== + +.. autoclass:: frameioclient.Comment + :members: diff --git a/docs/classes/index.rst b/docs/classes/index.rst new file mode 100644 index 00000000..be5258bf --- /dev/null +++ b/docs/classes/index.rst @@ -0,0 +1,12 @@ +Classes +===================== + +.. toctree:: + users + assets + comments + logs + projects + teams + sharing + search \ No newline at end of file diff --git a/docs/classes/logs.rst b/docs/classes/logs.rst new file mode 100644 index 00000000..098f0fc2 --- /dev/null +++ b/docs/classes/logs.rst @@ -0,0 +1,5 @@ +Audit Logs +=================== + +.. autoclass:: frameioclient.AuditLogs + :members: diff --git a/docs/classes/projects.rst b/docs/classes/projects.rst new file mode 100644 index 00000000..0998acd0 --- /dev/null +++ b/docs/classes/projects.rst @@ -0,0 +1,5 @@ +Projects +=================== + +.. autoclass:: frameioclient.Project + :members: diff --git a/docs/classes/search.rst b/docs/classes/search.rst new file mode 100644 index 00000000..53825d7a --- /dev/null +++ b/docs/classes/search.rst @@ -0,0 +1,2 @@ +Search +=================== diff --git a/docs/classes/sharing.rst b/docs/classes/sharing.rst new file mode 100644 index 00000000..3bff7927 --- /dev/null +++ b/docs/classes/sharing.rst @@ -0,0 +1,8 @@ +Sharing +=================== + +.. autoclass:: frameioclient.PresentationLink + :members: + +.. autoclass:: frameioclient.ReviewLink + :members: \ No newline at end of file diff --git a/docs/classes/teams.rst b/docs/classes/teams.rst new file mode 100644 index 00000000..23c34eb9 --- /dev/null +++ b/docs/classes/teams.rst @@ -0,0 +1,5 @@ +Teams +=================== + +.. autoclass:: frameioclient.Team + :members: diff --git a/docs/classes/users.rst b/docs/classes/users.rst new file mode 100644 index 00000000..77ed12d1 --- /dev/null +++ b/docs/classes/users.rst @@ -0,0 +1,5 @@ +Users +=================== + +.. autoclass:: frameioclient.User + :members: \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 00000000..a8dd5de3 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,60 @@ +import os +import sys +sys.path.insert(0, os.path.abspath('..')) + +import frameioclient + +PACKAGE_TITLE = 'Frame.io Python SDK' +PACKAGE_NAME = 'frameioclient' +PACKAGE_DIR = '../frameioclient' +AUTHOR_NAME = 'Frame.io' + +try: + RELEASE = frameioclient.ClientVersion.version() +except AttributeError: + RELEASE = 'unknown' + +version = RELEASE.split('.')[0] + +# -- Project information ----------------------------------------------------- + +project = PACKAGE_TITLE +copyright = 'MIT License 2021, Frame.io' +author = AUTHOR_NAME + +# The full version, including alpha/beta/rc tags +release = RELEASE + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.napoleon', + 'sphinxcontrib.restbuilder', + 'sphinx_jekyll_builder', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', 'build/*', 'examples/*', 'tests/*', '*.cfg', '.vscode/*', '.github/*', '.circleci/*', '.pytest_cache/*', 'dist/*'] + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = 'furo' + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 00000000..9da0551d --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,35 @@ +Welcome to Frame.io's Python SDK documentation! +=============================================== + +.. toctree:: + :maxdepth: 3 + :caption: Contents: + +.. warning:: + This sample documentation was generated on |today|, and is rebuilt weekly. + + +FrameioClient +=================== +.. automodule:: frameioclient.FrameioClient + :inherited-members: + + +Classes +=========== +.. toctree:: + classes/index + + +Modules +=========== +.. toctree:: + modules/index + + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/installation.rst b/docs/installation.rst new file mode 100644 index 00000000..7ee3d525 --- /dev/null +++ b/docs/installation.rst @@ -0,0 +1,37 @@ +=============== +frameioclient +=============== + +.. toctree:: + :hidden: + + installation + +Installation +============ + +Stable releases of frameioclient can be installed with + +.. code-block:: sh + + pip <- or you may download a `.tgz` source + +archive from `pypi `_. +See the :doc:`installation` page for more detailed instructions. + +If you want to use the latest code, you can grab it from our +`Git repository `_, or `fork it `_. + +Usage +=================================== + +Authorization +------------- + +Frame.io Python SDK documentation: `Personal Access Tokens `_. + + +.. code-block:: python + + from frameioclient import FrameioClient + client = FrameioClient(token='my-token') diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 00000000..2119f510 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,35 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=. +set BUILDDIR=_build + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/modules/downloader.rst b/docs/modules/downloader.rst new file mode 100644 index 00000000..2c213f92 --- /dev/null +++ b/docs/modules/downloader.rst @@ -0,0 +1,8 @@ +FrameioDownloader +=================== + +.. autoclass:: frameioclient.FrameioDownloader + :members: + :private-members: + :inherited-members: + :undoc-members: diff --git a/docs/modules/helpers.rst b/docs/modules/helpers.rst new file mode 100644 index 00000000..9f8c2b4e --- /dev/null +++ b/docs/modules/helpers.rst @@ -0,0 +1,9 @@ +FrameioHelpers +========================= + +.. autoclass:: frameioclient.FrameioHelpers + :members: + :private-members: + :inherited-members: + :undoc-members: + diff --git a/docs/modules/index.rst b/docs/modules/index.rst new file mode 100644 index 00000000..ed40d893 --- /dev/null +++ b/docs/modules/index.rst @@ -0,0 +1,8 @@ +Modules +===================== + +.. toctree:: + downloader + uploader + helpers + utils diff --git a/docs/modules/uploader.rst b/docs/modules/uploader.rst new file mode 100644 index 00000000..385dfe1c --- /dev/null +++ b/docs/modules/uploader.rst @@ -0,0 +1,8 @@ +FrameioUploader +=================== + +.. autoclass:: frameioclient.FrameioUploader + :members: + :private-members: + :inherited-members: + :undoc-members: diff --git a/docs/modules/utils.rst b/docs/modules/utils.rst new file mode 100644 index 00000000..cfc24284 --- /dev/null +++ b/docs/modules/utils.rst @@ -0,0 +1,8 @@ +Utils +=================== + +.. autoclass:: frameioclient.Utils + :members: + :private-members: + :inherited-members: + :undoc-members: diff --git a/docs/publish.py b/docs/publish.py new file mode 100644 index 00000000..4df3d17d --- /dev/null +++ b/docs/publish.py @@ -0,0 +1,190 @@ +import os +import hashlib +import frontmatter +import contentful_management + +TOKEN = os.getenv("CONTENTFUL_TOKEN") +SPACE_ID = os.getenv("CONTENTFUL_SPACE_ID") +SDK_ID = os.getenv("CONTENTFUL_SDK_ID") + +docs_path = "./dist/jekyll/api" + + +def transform_path(path): + # The paths generated automatically need modifying. + # This function should be localized to each SDK. + + if path == '/api-frameioclient': + new_path = 'package' + else: + new_path = path.split('/api-frameioclient-')[1].lower() + + return new_path + + +def transform_title(docname): + if docname == 'api/frameioclient': + new_title = 'Frame.io Python SDK' + else: + print(docname) + new_title = docname.split('.')[1].title() + + return new_title + + +def load_local(directory): + # Load in the local docs + docs_data = list() + files = os.listdir(directory) + for fn in files: + fpath = os.path.join(directory, fn) + with open(fpath) as f: + post = frontmatter.load(f) + post['path'] = transform_path(post['path']) + post['title'] = transform_title(post['docname']) + docs_data.append(post) + + return docs_data + + +def load_remote(): + # Create the client + client = contentful_management.Client(TOKEN) + + # Grab all the autoDocs + autoDoc = client.content_types(SPACE_ID, 'master').find('autoDoc') + entries = autoDoc.entries().all() + + # Filter out the ones that aren't the right programming language + relevant_docs = list() + for entry in entries: + # entry = autoDoc.entries().find(entry.id) + entry.sys['locale'] = 'en-US' + sdk = entry.programming_language.id + if sdk == SDK_ID: + relevant_docs.append(entry) + + return relevant_docs + + +def hash_content(content): + # Returns an SHA-256 hash of the stringified content provided + hash_object = hashlib.sha256(bytes(content, 'utf-8')) + sha256 = hash_object.hexdigest() + return sha256 + + +def update_doc(): + pass + + +def publish_new_docs(docs, publish=False): + client = contentful_management.Client(TOKEN) + + for new_entry in docs: + entry_attributes = { + 'content_type_id': 'autoDoc', + 'fields': { + 'title': { + 'en-US': new_entry['title'] + }, + 'slug': { + 'en-US': new_entry['slug'] + }, + 'content': { + 'en-US': new_entry['content'] + }, + 'programmingLanguage': { + 'en-US': { + 'sys': { + "id": SDK_ID, + "type": "Link", + "linkType": "Entry" + } + } + } + } + } + + new_entry = client.entries(SPACE_ID, 'master').create( + attributes=entry_attributes + ) + + # Only publish the new stuff is `publish=True` + if publish == True: + new_entry.publish() + + print(f"Submitted {entry_attributes['fields']['title']}") + + print("Done submitting") + + +def compare_docs(local, remote): + # Compare the remote docs and the local docs + + # Enrich local docs + enriched_local = dict() + for doc in local: + # print(doc.keys()) + enriched_local[hash_content(doc.content)] = { + "date": doc['date'], + "title": doc['title'], + "slug": doc['path'], + "content": doc.content, + "hash": hash_content(doc.content) + } + + # Enrich remote docs + enriched_remote = dict() + for doc in remote: + # print(doc.fields()) + enriched_remote[hash_content(doc.fields()['content'])] = { + "date": doc.sys['updated_at'], + "title": doc.fields()['title'], + "slug": doc.fields()['slug'], + "content": doc.fields()['content'], + "hash": hash_content(doc.fields()['content']) + } + + + # Compare titles and content hashes, update only ones in which the hashes are different + + # Declare our now list that we'll be appending to shortly + docs_to_update = list() + docs_to_maybe_publish = list() + docs_to_definitely_publish = list() + + # Iterate over keys + for doc_hash in enriched_local.keys(): + # If key found in remote keys, skip it + if doc_hash in enriched_remote.keys(): + print(f"Local and remote match for {enriched_remote[doc_hash]['title']}, skipping...") + continue + else: + docs_to_maybe_publish.append(enriched_local[doc_hash]) + + # return docs_to_update, docs_to_publish + return docs_to_maybe_publish + + +def main(): + # Grab the remote docs + remote_docs = load_remote() + + # Grab the local docs + local_docs = load_local(docs_path) + + # docs_to_update, docs_to_publish = compare_docs(local=local_docs, remote=remote_docs) + docs_to_publish = compare_docs(local=local_docs, remote=remote_docs) + + # Publish those docs! + publish_new_docs(docs_to_publish) + + # Iterate over the new docs and if + # for doc in new_docs: + # # print(doc.content) + # print(doc.keys()) + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 00000000..a1f50ba6 --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,8 @@ +sphinx +sphinx-jekyll-builder +sphinxcontrib-restbuilder +contentful_management +python-frontmatter +# frameioclient +xxhash +furo \ No newline at end of file diff --git a/examples/asset_tree.py b/examples/asset_tree.py new file mode 100644 index 00000000..8dcafac9 --- /dev/null +++ b/examples/asset_tree.py @@ -0,0 +1,33 @@ +import os + +import pdb +from time import time +from pprint import pprint +from frameioclient import FrameioClient + +def demo_folder_tree(project_id, slim): + TOKEN = os.getenv("FRAMEIO_TOKEN") + client = FrameioClient(TOKEN) + + start_time = time() + tree = client.projects.tree(project_id, slim) + + end_time = time() + elapsed = round((end_time - start_time), 2) + + item_count = len(tree) + pprint(tree) + # pdb.set_trace() + + print(f"Found {item_count} items") + print(f"Took {elapsed} second to fetch the slim payload for project: {project_id}") + print("\n") + +if __name__ == "__main__": + project_id = '2dfb6ce6-90d8-4994-881f-f02cd94b1c81' + # project_id='e2845993-7330-54c6-8b77-eafbd5144eac' + demo_folder_tree(project_id, slim=True) + # demo_folder_tree(project_id, slim=False) + +# 445 seconds for slim +# 509 seconds for non-slim \ No newline at end of file diff --git a/examples/new_tests.py b/examples/new_tests.py new file mode 100644 index 00000000..7b1fac6e --- /dev/null +++ b/examples/new_tests.py @@ -0,0 +1,20 @@ +import os +from pprint import pprint + +from frameioclient import FrameioClient, Asset, ClientVersion + + +token = os.getenv('FRAMEIO_TOKEN') +client = FrameioClient(token) +folder_id = 'dd8526ee-2c7d-4b48-9bf7-b847664666bb' +file_path = '/Users/jeff/Code/python-frameio-client/examples/downloads/accelerated_Test_Chart_5_Sec_embedded_meta_Mezzanine.mxf' + +client.assets.upload(folder_id, file_path) + + +print(client.users.get_me()) + +pprint(client.teams.list_projects(client.teams.list_all()[0]['id'])) + +for log in client.logs.list(client.users.get_me()['account_id']): + print(log) \ No newline at end of file diff --git a/frameioclient/__init__.py b/frameioclient/__init__.py index 8f02abec..04bcc1e2 100644 --- a/frameioclient/__init__.py +++ b/frameioclient/__init__.py @@ -1,3 +1,3 @@ from .lib import * from .services import * -from .client import FrameioClient \ No newline at end of file +from .client import FrameioClient diff --git a/frameioclient/client.py b/frameioclient/client.py index dd7e1976..ddb0f976 100644 --- a/frameioclient/client.py +++ b/frameioclient/client.py @@ -1,3 +1,8 @@ +""" +client.py +==================================== +The core module of the frameioclient +""" from .lib import ( APIClient, Telemetry, @@ -19,6 +24,79 @@ def me(self): def telemetry(self): return Telemetry(self) +<<<<<<< + +======= + self.adapter = HTTPAdapter(max_retries=self.retry_strategy) + self.session = requests.Session() + self.session.mount("https://", self.adapter) + + def _api_call(self, method, endpoint, payload={}, limit=None): + url = '{}/v2{}'.format(self.host, endpoint) + + r = self.session.request( + method, + url, + json=payload, + headers=self.headers, + ) + + if r.ok: + if r.headers.get('page-number'): + if int(r.headers.get('total-pages')) > 1: + return PaginatedResponse( + results=r.json(), + limit=limit, + page_size=r.headers['per-page'], + total_pages=r.headers['total-pages'], + total=r.headers['total'], + endpoint=endpoint, + method=method, + payload=payload, + client=self + ) + if isinstance(r.json(), list): + return r.json()[:limit] + return r.json() + + if r.status_code == 422 and "presentation" in endpoint: + raise PresentationException + + return r.raise_for_status() + + def get_specific_page(self, method, endpoint, payload, page): + """ + Gets a specific page for that endpoint, used by Pagination Class + + :Args: + method (string): 'get', 'post' + endpoint (string): endpoint ('/accounts//teams') + payload (dict): Request payload + page (int): What page to get + """ + if method == 'get': + # If we've already got a ? in the endpoint, then it has to be an & + if '?' in endpoint: + endpoint = '{}&page={}'.format(endpoint, page) + else: + endpoint = '{}?page={}'.format(endpoint, page) + return self._api_call(method, endpoint) + + if method == 'post': + payload['page'] = page + return self._api_call(method, endpoint, payload=payload) + + +class FrameioClient(FrameioConnection): + """[summary] + + Args: + FrameioConnection ([type]): [description] + + Returns: + [type]: [description] + """ +>>>>>>> @property def _auth(self): return self.token diff --git a/frameioclient/lib/download.py b/frameioclient/lib/download.py index 0f57c926..ef2b56ea 100644 --- a/frameioclient/lib/download.py +++ b/frameioclient/lib/download.py @@ -138,6 +138,8 @@ def get_download_key(self): return url def download_handler(self): + """Call this to perform the actual download of your asset! + """ if os.path.isdir(os.path.join(os.path.curdir, self.download_folder)): print("Folder exists, don't need to create it") else: @@ -147,10 +149,17 @@ def download_handler(self): if os.path.isfile(self.get_path()) == False: pass +<<<<<<< if os.path.isfile(self.get_path()) and self.replace == True: os.remove(self.get_path()) if os.path.isfile(self.get_path()) and self.replace == False: +======= + def download_handler(self): + """Call this to perform the actual download of your asset! + """ + if os.path.isfile(self.get_path()): +>>>>>>> print("File already exists at this location.") return self.destination diff --git a/frameioclient/lib/upload.py b/frameioclient/lib/upload.py index 97d7f1ec..2edcb915 100644 --- a/frameioclient/lib/upload.py +++ b/frameioclient/lib/upload.py @@ -17,6 +17,15 @@ def __init__(self, asset=None, file=None): self.file_num = 0 def _calculate_chunks(self, total_size, chunk_count): + """Calculate chunk size + + Args: + total_size (int): Total filesize in bytes + chunk_count (int): Total number of URL's we got back from the API + + Returns: + chunk_offsets (list): List of chunk offsets + """ self.chunk_size = int(math.ceil(total_size / chunk_count)) chunk_offsets = list() diff --git a/frameioclient/lib/utils.py b/frameioclient/lib/utils.py index 4c29d7ea..094519b0 100644 --- a/frameioclient/lib/utils.py +++ b/frameioclient/lib/utils.py @@ -7,18 +7,28 @@ MB = KB * KB +def Reference(*args, **kwargs): + print(kwargs['operation']) + def inner(func): + ''' + do operations with func + ''' + return func + return inner + class Utils: @staticmethod def stream(func, page=1, page_size=20): """ - Accepts a lambda of a call to a client list method, and streams the results until - the list has been exhausted + Accepts a lambda of a call to a client list method, and streams the results until \ + the list has been exhausted. - :Args: + Args: fun (function): A 1-arity function to apply during the stream - Example:: - stream(lambda pagination: client.get_collaborators(project_id, **pagination)) + Example:: + + stream(lambda pagination: client.get_collaborators(project_id, **pagination)) """ total_pages = page while page <= total_pages: diff --git a/frameioclient/service/assets.py b/frameioclient/service/assets.py new file mode 100644 index 00000000..320ca89d --- /dev/null +++ b/frameioclient/service/assets.py @@ -0,0 +1,355 @@ +import os +import mimetypes + +from .service import Service +from .projects import Project + +from ..lib import FrameioUploader, FrameioDownloader, constants, Reference + +class Asset(Service): + def _build_asset_info(self, filepath): + full_path = os.path.abspath(filepath) + + file_info = { + "filepath": full_path, + "filename": os.path.basename(full_path), + "filesize": os.path.getsize(full_path), + "mimetype": mimetypes.guess_type(full_path)[0] + } + + return file_info + + @Reference(operation="#getAsset") + def get(self, asset_id): + """ + Get an asset by id. + + Args: + asset_id (string): The asset id. + """ + endpoint = '/assets/{}'.format(asset_id) + return self.client._api_call('get', endpoint) + + @Reference(operation="#getAssets") + def get_children(self, asset_id, include=[], slim=False, **kwargs): + """ + Get a folder. + + Args: + asset_id (string): The asset id. + + :Keyword Arguments: + includes (list): List of includes you would like to add. + + Example:: + + client.assets.get_children( + asset_id='1231-12414-afasfaf-aklsajflaksjfla', + includes=['review_links','cover_asset','creator','presentation'] + ) + """ + endpoint = '/assets/{}/children'.format(asset_id) +<<<<<<< + +======= + + + if slim == True: + query_params = '' + + # Include children + query_params += '?' + 'include=children,creator' + + # Only fields + query_params += '&' + 'only_fields=' + ','.join(constants.asset_excludes['only_fields']) + + # # Drop includes + query_params += '&' + 'drop_includes=' + ','.join(constants.asset_excludes['drop_includes']) + + # # Hard drop fields + query_params += '&' + 'hard_drop_fields=' + ','.join(constants.asset_excludes['hard_drop_fields']) + + # Excluded fields + # query_params += '&' + 'excluded_fields=' + ','.join(constants.asset_excludes['excluded_fields']) + + # # Sort by inserted_at + # query_params += '&' + 'sort=-inserted_at' + + endpoint += query_params + + # print("Final URL", endpoint) + + if len(include) > 0: + endpoint += '&include={}'.format(include.join(',')) + + return self.client._api_call('get', endpoint, kwargs) + + if len(include) > 0: + endpoint += '?include={}'.format(include.join(',')) + +>>>>>>> + return self.client._api_call('get', endpoint, kwargs) + + @Reference(operation="#createAsset") + def create(self, parent_asset_id, **kwargs): + """ + Create an asset. + + Args: + parent_asset_id (string): The parent asset id. + + :Keyword Arguments: + (optional) kwargs: additional request parameters. + + Example:: + + client.assets.create( + parent_asset_id="123abc", + name="ExampleFile.mp4", + type="file", + filetype="video/mp4", + filesize=123456 + ) + """ + endpoint = '/assets/{}/children'.format(parent_asset_id) + return self.client._api_call('post', endpoint, payload=kwargs) + +<<<<<<< + +======= + @Reference(operation="#createAsset") + def create_folder(self, parent_asset_id, name="New Folder"): + """ + Create a new folder. + + Args: + parent_asset_id (string): The parent asset id. + name (string): The name of the new folder. + + Example:: + + client.assets.create_folder( + parent_asset_id="123abc", + name="ExampleFile.mp4", + ) + """ + endpoint = '/assets/{}/children'.format(parent_asset_id) + return self.client._api_call('post', endpoint, payload={"name": name, "type":"folder"}) + + @Reference(operation="#createAsset") +>>>>>>> + def from_url(self, parent_asset_id, name, url): + """ + Create an asset from a URL. + + Args: + parent_asset_id (str): The parent asset id. + name (str): The filename. + url (str): The remote URL. + + Example:: + + client.assets.from_url( + parent_asset_id="123abc", + name="ExampleFile.mp4", + type="file", + url="https://" + ) + """ + + payload = { + "name": name, + "type": "file", + "source": { + "url": url + } + } + + endpoint = '/assets/{}/children'.format(parent_asset_id) + return self.client._api_call('post', endpoint, payload=payload) + + @Reference(operation="#updateAsset") + def update(self, asset_id, **kwargs): + """ + Updates an asset + + Args: + asset_id (string): the asset's id + + :Keyword Arguments: + kwargs (optional): fields and values you wish to update + + Example:: + + client.assets.update("adeffee123342", name="updated_filename.mp4") + """ + endpoint = '/assets/{}'.format(asset_id) + return self.client._api_call('put', endpoint, kwargs) + + @Reference(operation="#copyAsset") + def copy(self, destination_folder_id, **kwargs): + """ + Copy an Asset + + Args: + destination_folder_id (str): The id of the folder you want to copy into. + + :Keyword Arguments: + id (str): The id of the asset you want to copy. + + Example:: + + client.assets.copy("adeffee123342", id="7ee008c5-49a2-f8b5-997d-8b64de153c30") + """ + endpoint = '/assets/{}/copy'.format(destination_folder_id) + return self.client._api_call('post', endpoint, kwargs) + +<<<<<<< + def bulk_copy(self, destination_folder_id, asset_list=[], copy_comments=False): +======= + @Reference(operation="#batchCopyAsset") + def bulk_copy(self, destination_folder_id, asset_list, copy_comments=False): +>>>>>>> + """Bulk copy assets + + Args: + destination_folder_id (string): The id of the folder you want to copy into. + + :Keyword Arguments: + asset_list (list): A list of the asset IDs you want to copy. + copy_comments (boolean): Whether or not to copy comments: True or False. + + Example:: + + client.assets.bulk_copy("adeffee123342", + asset_list=[ + "7ee008c5-49a2-f8b5-997d-8b64de153c30", + "7ee008c5-49a2-f8b5-997d-8b64de153c30" + ], + copy_comments=True + ) + """ + + payload = {"batch": []} + new_list = list() + + if copy_comments: + payload['copy_comments'] = "all" + + for asset in asset_list: + payload['batch'].append({"id": asset}) + + endpoint = '/batch/assets/{}/copy'.format(destination_folder_id) + return self.client._api_call('post', endpoint, payload) + + @Reference(operation="#deleteAsset") + def delete(self, asset_id): + """ + Delete an asset + + Args: + asset_id (string): the asset's id + """ + endpoint = '/assets/{}'.format(asset_id) + return self.client._api_call('delete', endpoint) + + def _upload(self, asset, file): + """ + Upload an asset. The method will exit once the file is uploaded. + + Args: + asset (object): The asset object. + file (file): The file to upload. + + Example:: + + client._upload(asset, open('example.mp4')) + """ + + uploader = FrameioUploader(asset, file) + uploader.upload() + + # def upload_folder(sFelf, destination_id, folderpath): + # try: + # if os.path.isdir(folderpath): + # # Good it's a directory, we can keep going + # pass + + # except OSError: + # if not os.path.exists(folderpath): + # sys.exit("Folder doesn't exist, exiting...") + + def build_asset_info(self, filepath): + full_path = os.path.abspath(filepath) + + file_info = { + "filepath": full_path, + "filename": os.path.basename(full_path), + "filesize": os.path.getsize(full_path), + "mimetype": mimetypes.guess_type(full_path)[0] + } + + return file_info + + def upload(self, destination_id, filepath, asset=None): + """ + Upload a file. The method will exit once the file is downloaded. + + Args: + destination_id (uuid): The destination Project or Folder ID. + filepath (string): The locaiton of the file on your local filesystem \ + that you want to upload. + + Example:: + + client.assets.upload('1231-12414-afasfaf-aklsajflaksjfla', "./file.mov") + """ + + # Check if destination is a project or folder + # If it's a project, well then we look up its root asset ID, otherwise we use the folder id provided + # Then we start our upload + + try: + # First try to grab it as a folder + folder_id = self.get(destination_id)['id'] + except Exception as e: + # Then try to grab it as a project + folder_id = Project(self.client).get_project(destination_id)['root_asset_id'] + finally: + file_info = self.build_asset_info(filepath) + + if not asset: + try: + asset = self.create(folder_id, + type="file", + name=file_info['filename'], + filetype=file_info['mimetype'], + filesize=file_info['filesize'] + ) + + except Exception as e: + print(e) + + try: + with open(file_info['filepath'], "rb") as fp: + self._upload(asset, fp) + + except Exception as e: + print(e) + + return asset + + def download(self, asset, download_folder, prefix=None, multi_part=False, concurrency=5): + """ + Download an asset. The method will exit once the file is downloaded. + + Args: + asset (object): The asset object. + download_folder (path): The location to download the file to. + + Example:: + + client.assets.download(asset, "~./Downloads") + """ + downloader = FrameioDownloader(asset, download_folder, prefix, multi_part, concurrency) + return downloader.download_handler() diff --git a/frameioclient/service/projects.py b/frameioclient/service/projects.py new file mode 100644 index 00000000..e8a78942 --- /dev/null +++ b/frameioclient/service/projects.py @@ -0,0 +1,134 @@ +from .service import Service +from .helpers import FrameioHelpers + +class Project(Service): + def create(self, team_id, **kwargs): + """Create a project. + + Args: + team_id (string): The team id. + + :Keyword Arguments: + (optional) kwargs: additional request parameters. + + Example:: + + client.projects.create( + team_id="123", + name="My Awesome Project" + ) + """ + endpoint = '/teams/{}/projects'.format(team_id) + return self.client._api_call('post', endpoint, payload=kwargs) + + def get(self, project_id): + """ + Get an individual project + + Args: + project_id (string): The project's id + + Example:: + + client.project.get( + project_id="123" + ) + """ + endpoint = '/projects/{}'.format(project_id) + return self.client._api_call('get', endpoint) + + def tree(self, project_id, slim): + """ + Fetch a tree representation of all files/folders in a project. + + Args: + project_id (string): The project's id + slim (bool): If true, fetch only the minimum information for the following: \ + filename, \ + filesize, \ + thumbnail, \ + creator_id, \ + inserted_at (date created), \ + path (represented like a filesystem) + + Example:: + + client.projects.get( + project_id="123", + slim=True + ) + """ + # endpoint = "/projects/{}/tree?depth=20&drop_includes=a.transcode_statuses,a.transcodes,a.source,a.checksums&only_fields=a.name,a.filesize,u.name,a.item_count,a.creator_id,a.inserted_at,a.uploaded_at".format(project_id) + # return self.client._api_call('get', endpoint) + + return FrameioHelpers(self.client).build_project_tree(project_id, slim) + + + def get_collaborators(self, project_id, **kwargs): + """ + Get collaborators for a project. + + Args: + project_id (uuid): The project's id. + + Example:: + + client.projects.get_collaborators( + project_id="123" + ) + """ + endpoint = "/projects/{}/collaborators?include=project_role".format(project_id) + return self.client._api_call('get', endpoint, kwargs) + + def get_pending_collaborators(self, project_id, **kwargs): + """ + Get pending collaborators for a project. + + Args: + project_id (uuid): The project's id. + + Example:: + + client.projects.get_pending_collaborators( + project_id="123" + ) + """ + endpoint = "/projects/{}/pending_collaborators".format(project_id) + return self.client._api_call('get', endpoint, kwargs) + + def add_collaborator(self, project_id, email): + """ + Add Collaborator to a Project Collaborator. + + Args: + project_id (uuid): The project id. + email (string): Email user's e-mail address. + + Example:: + + client.projects.add_collaborator( + project_id="123", + email="janedoe@frame.io" + ) + """ + payload = {"email": email} + endpoint = '/projects/{}/collaborators'.format(project_id) + return self._api_call('post', endpoint, payload=payload) + + def remove_collaborator(self, project_id, email): + """ + Remove Collaborator from Project. + + Args: + project_id (uuid): The Project ID. + email (string): The user's e-mail address. + + Example:: + + client.projects.remove_collaborator( + project_id="123", + email="janedoe@frame.io" + ) + """ + endpoint = '/projects/{}/collaborators/_?email={}'.format(project_id, email) + return self._api_call('delete', endpoint) diff --git a/frameioclient/service/service.py b/frameioclient/service/service.py new file mode 100644 index 00000000..a2ffa123 --- /dev/null +++ b/frameioclient/service/service.py @@ -0,0 +1,5 @@ +from ..client import FrameioClient + +class Service(object): + def __init__(self, client: FrameioClient): + self.client = client diff --git a/frameioclient/services/comments.py b/frameioclient/services/comments.py index a3fde975..36603281 100644 --- a/frameioclient/services/comments.py +++ b/frameioclient/services/comments.py @@ -5,17 +5,18 @@ def create(self, asset_id, **kwargs): """ Create a comment. - :Args: + Args: asset_id (string): The asset id. - :Kwargs: + + :Keyword Arguments: (optional) kwargs: additional request parameters. - Example:: + Example:: - client.comments.create( - asset_id="123abc", - text="Hello world" - ) + client.comments.create( + asset_id="123abc", + text="Hello world" + ) """ endpoint = '/assets/{}/comments'.format(asset_id) return self.client._api_call('post', endpoint, payload=kwargs) @@ -24,7 +25,7 @@ def get(self, comment_id, **kwargs): """ Get a comment. - :Args: + Args: comment_id (string): The comment id. """ endpoint = '/comments/{}'.format(comment_id) @@ -34,7 +35,7 @@ def list(self, asset_id, **kwargs): """ Get an asset's comments. - :Args: + Args: asset_id (string): The asset id. """ endpoint = '/assets/{}/comments'.format(asset_id) @@ -44,17 +45,18 @@ def update(self, comment_id, **kwargs): """ Update a comment. - :Args: + Args: comment_id (string): The comment id. - :Kwargs: + + :Keyword Arguments: (optional) kwargs: additional request parameters. - Example:: + Example:: - client.comments.update( - comment_id="123abc", - text="Hello world" - ) + client.comments.update( + comment_id="123abc", + text="Hello world" + ) """ endpoint = '/comments/{}'.format(comment_id) return self.client._api_call('post', endpoint, payload=kwargs) @@ -63,7 +65,7 @@ def delete(self, comment_id): """ Delete a comment. - :Args: + Args: comment_id (string): The comment id. """ endpoint = '/comments/{}'.format(comment_id) @@ -73,17 +75,18 @@ def reply(self, comment_id, **kwargs): """ Reply to an existing comment. - :Args: + Args: comment_id (string): The comment id. - :Kwargs: + + :Keyword Arguments: (optional) kwargs: additional request parameters. - Example:: + Example:: - client.comments.reply( - comment_id="123abc", - text="Hello world" - ) + client.comments.reply( + comment_id="123abc", + text="Hello world" + ) """ endpoint = '/comments/{}/replies'.format(comment_id) return self.client._api_call('post', endpoint, payload=kwargs) diff --git a/frameioclient/services/links.py b/frameioclient/services/links.py index b617a36d..127860b0 100644 --- a/frameioclient/services/links.py +++ b/frameioclient/services/links.py @@ -5,18 +5,19 @@ def create(self, project_id, **kwargs): """ Create a review link. - :Args: + Args: project_id (string): The project id. - :Kwargs: + + :Keyword Arguments: kwargs: additional request parameters. - Example:: + Example:: - client.review_links.create( - project_id="123", - name="My Review Link", - password="abc123" - ) + client.review_links.create( + project_id="123", + name="My Review Link", + password="abc123" + ) """ endpoint = '/projects/{}/review_links'.format(project_id) return self.client._api_call('post', endpoint, payload=kwargs) @@ -25,7 +26,7 @@ def list(self, project_id): """ Get the review links of a project - :Args: + Args: asset_id (string): The asset id. """ endpoint = '/projects/{}/review_links'.format(project_id) @@ -35,7 +36,7 @@ def get(self, link_id, **kwargs): """ Get a single review link - :Args: + Args: link_id (string): The review link id. """ endpoint = '/review_links/{}'.format(link_id) @@ -45,14 +46,14 @@ def get_assets(self, link_id): """ Get items from a single review link. - :Args: + Args: link_id (string): The review link id. - Example:: + Example:: - client.review_links.get_assets( - link_id="123" - ) + client.review_links.get_assets( + link_id="123" + ) """ endpoint = '/review_links/{}/items'.format(link_id) return self.client._api_call('get', endpoint) @@ -61,17 +62,18 @@ def update_assets(self, link_id, **kwargs): """ Add or update assets for a review link. - :Args: + Args: link_id (string): The review link id. - :Kwargs: + + :Keyword Arguments: kwargs: additional request parameters. - Example:: + Example:: - client.review_links.update_assets( - link_id="123", - asset_ids=["abc","def"] - ) + client.review_links.update_assets( + link_id="123", + asset_ids=["abc","def"] + ) """ endpoint = '/review_links/{}/assets'.format(link_id) return self.client._api_call('post', endpoint, payload=kwargs) @@ -80,20 +82,21 @@ def update_settings(self, link_id, **kwargs): """ Updates review link settings. - :Args: + Args: link_id (string): The review link id. - :Kwargs: + + :Keyword Arguments: kwargs: additional request parameters. - Example:: + Example:: - client.review_links.update_settings( - link_id, - expires_at="2020-04-08T12:00:00+00:00", - is_active=False, - name="Review Link 123", - password="my_fun_password", - ) + client.review_links.update_settings( + link_id, + expires_at="2020-04-08T12:00:00+00:00", + is_active=False, + name="Review Link 123", + password="my_fun_password", + ) """ endpoint = '/review_links/{}'.format(link_id) return self.client._api_call('put', endpoint, payload=kwargs) @@ -104,18 +107,19 @@ def create(self, asset_id, **kwargs): """ Create a presentation link. - :Args: + Args: asset_id (string): The asset id. - :Kwargs: + + :Keyword Arguments: kwargs: additional request parameters. - Example:: + Example:: - client.presentation_links.create( - asset_id="9cee7966-4066-b326-7db1-f9e6f5e929e4", - title="My fresh presentation", - password="abc123" - ) + client.presentation_links.create( + asset_id="9cee7966-4066-b326-7db1-f9e6f5e929e4", + title="My fresh presentation", + password="abc123" + ) """ endpoint = '/assets/{}/presentations'.format(asset_id) return self.client._api_call('post', endpoint, payload=kwargs) diff --git a/frameioclient/services/logs.py b/frameioclient/services/logs.py index 3dba9af9..25642620 100644 --- a/frameioclient/services/logs.py +++ b/frameioclient/services/logs.py @@ -5,13 +5,17 @@ def list(self, account_id): """ Get audit logs for the currently authenticated account. - :Args: + Args: + account_id (uuid): Account ID you want to get audit logs for. - Example:: + Example:: - client.logs.list( - account_id="6bdcb4d9-9a2e-a765-4548-ae6b27a6c024" - ) + client.logs.list( + account_id="6bdcb4d9-9a2e-a765-4548-ae6b27a6c024" + ) + + Returns: + list: List of audit logs. """ endpoint = '/accounts/{}/audit_logs'.format(account_id) return self.client._api_call('get', endpoint) diff --git a/frameioclient/services/teams.py b/frameioclient/services/teams.py index c515c14a..9df71294 100644 --- a/frameioclient/services/teams.py +++ b/frameioclient/services/teams.py @@ -6,17 +6,18 @@ def create(self, account_id, **kwargs): """ Create a Team - :Args: - account_id (string): The account id you want to create this Team under. - :Kwargs: + Args: + account_id (string): The account id you want to create this team under. + + :Keyword Arguments:: (optional) kwargs: additional request parameters. - Example:: + Example:: - client.teams.create( - account_id="6bdcb4d9-4548-4548-4548-27a6c024ae6b", - name="My Awesome Project", - ) + client.teams.create( + account_id="6bdcb4d9-4548-4548-4548-27a6c024ae6b", + name="My Awesome Project", + ) """ warnings.warn('Note: Your token must support team.create scopes') endpoint = '/accounts/{}/teams'.format(account_id) @@ -24,10 +25,10 @@ def create(self, account_id, **kwargs): def list(self, account_id, **kwargs): """ - Get teams owned by the specified account. - (To return all teams, use list_all()) + Get teams owned by the specified account. \ + (To return all teams, use list_all()) - :Args: + Args: account_id (string): The account id. """ endpoint = '/accounts/{}/teams'.format(account_id) @@ -37,7 +38,7 @@ def list_all(self, **kwargs): """ Get all teams for the authenticated user. - :Args: + Args: account_id (string): The account id. """ endpoint = '/teams' @@ -47,39 +48,39 @@ def get(self, team_id): """ Get team by id - :Args: - team_id (string): the Team's id + Args: + team_id (string): the team's id """ endpoint = '/teams/{}'.format(team_id) return self.client._api_call('get', endpoint) def get_members(self, team_id): """ - Get the member list for a given Team. + Get the member list for a given team. - :Args: - team_id (string): The Team id. + Args: + team_id (string): The team id. """ endpoint = '/teams/{}/members'.format(team_id) return self.client._api_call('get', endpoint) def list_projects(self, team_id, **kwargs): """ - Get projects owned by the Team. + Get projects owned by the team. - :Args: - team_id (string): The Team id. + Args: + team_id (string): The team id. """ endpoint = '/teams/{}/projects'.format(team_id) return self.client._api_call('get', endpoint, kwargs) def add_members(self, team_id, emails): """ - Add a list of users via their e-mail address to a given Team. + Add a list of users via their e-mail address to a given team. - :Args: - team_id (string): The team id. - emails (list): The e-mails you want to add. + Args: + team_id (string): The team id. + emails (list): The e-mails you want to add. """ payload = dict() payload['batch'] = list(map(lambda email: {"email": email}, emails)) @@ -89,11 +90,11 @@ def add_members(self, team_id, emails): def remove_members(self, team_id, emails): """ - Remove a list of users via their e-mail address from a given Team. + Remove a list of users via their e-mail address from a given team. - :Args: - team_id (string): The team id. - emails (list): The e-mails you want to add. + Args: + team_id (string): The team id. + emails (list): The e-mails you want to add. """ payload = dict() diff --git a/setup.py b/setup.py index a378aded..660e0315 100644 --- a/setup.py +++ b/setup.py @@ -39,6 +39,8 @@ def run(self): extras_require={ 'dev': [ 'bump2version', + 'sphinx', + 'sphinx-jekyll-builder' ] }, entry_points ={ From 8f9dc6f5d1ca2af55e452543acc135fbba6101fe Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 2 Aug 2021 21:02:20 -0700 Subject: [PATCH 18/99] Resolve conflicts post-merge --- frameioclient/lib/__init__.py | 2 +- frameioclient/service/assets.py | 355 ------------------------------ frameioclient/service/projects.py | 134 ----------- frameioclient/service/service.py | 5 - frameioclient/services/assets.py | 11 +- 5 files changed, 11 insertions(+), 496 deletions(-) delete mode 100644 frameioclient/service/assets.py delete mode 100644 frameioclient/service/projects.py delete mode 100644 frameioclient/service/service.py diff --git a/frameioclient/lib/__init__.py b/frameioclient/lib/__init__.py index 64d35e8d..6303df14 100644 --- a/frameioclient/lib/__init__.py +++ b/frameioclient/lib/__init__.py @@ -6,4 +6,4 @@ from .upload import FrameioUploader from .download import FrameioDownloader from .transport import AWSClient, APIClient -from .utils import Utils, PaginatedResponse, KB, MB +from .utils import Utils, PaginatedResponse, KB, MB, Reference diff --git a/frameioclient/service/assets.py b/frameioclient/service/assets.py deleted file mode 100644 index 320ca89d..00000000 --- a/frameioclient/service/assets.py +++ /dev/null @@ -1,355 +0,0 @@ -import os -import mimetypes - -from .service import Service -from .projects import Project - -from ..lib import FrameioUploader, FrameioDownloader, constants, Reference - -class Asset(Service): - def _build_asset_info(self, filepath): - full_path = os.path.abspath(filepath) - - file_info = { - "filepath": full_path, - "filename": os.path.basename(full_path), - "filesize": os.path.getsize(full_path), - "mimetype": mimetypes.guess_type(full_path)[0] - } - - return file_info - - @Reference(operation="#getAsset") - def get(self, asset_id): - """ - Get an asset by id. - - Args: - asset_id (string): The asset id. - """ - endpoint = '/assets/{}'.format(asset_id) - return self.client._api_call('get', endpoint) - - @Reference(operation="#getAssets") - def get_children(self, asset_id, include=[], slim=False, **kwargs): - """ - Get a folder. - - Args: - asset_id (string): The asset id. - - :Keyword Arguments: - includes (list): List of includes you would like to add. - - Example:: - - client.assets.get_children( - asset_id='1231-12414-afasfaf-aklsajflaksjfla', - includes=['review_links','cover_asset','creator','presentation'] - ) - """ - endpoint = '/assets/{}/children'.format(asset_id) -<<<<<<< - -======= - - - if slim == True: - query_params = '' - - # Include children - query_params += '?' + 'include=children,creator' - - # Only fields - query_params += '&' + 'only_fields=' + ','.join(constants.asset_excludes['only_fields']) - - # # Drop includes - query_params += '&' + 'drop_includes=' + ','.join(constants.asset_excludes['drop_includes']) - - # # Hard drop fields - query_params += '&' + 'hard_drop_fields=' + ','.join(constants.asset_excludes['hard_drop_fields']) - - # Excluded fields - # query_params += '&' + 'excluded_fields=' + ','.join(constants.asset_excludes['excluded_fields']) - - # # Sort by inserted_at - # query_params += '&' + 'sort=-inserted_at' - - endpoint += query_params - - # print("Final URL", endpoint) - - if len(include) > 0: - endpoint += '&include={}'.format(include.join(',')) - - return self.client._api_call('get', endpoint, kwargs) - - if len(include) > 0: - endpoint += '?include={}'.format(include.join(',')) - ->>>>>>> - return self.client._api_call('get', endpoint, kwargs) - - @Reference(operation="#createAsset") - def create(self, parent_asset_id, **kwargs): - """ - Create an asset. - - Args: - parent_asset_id (string): The parent asset id. - - :Keyword Arguments: - (optional) kwargs: additional request parameters. - - Example:: - - client.assets.create( - parent_asset_id="123abc", - name="ExampleFile.mp4", - type="file", - filetype="video/mp4", - filesize=123456 - ) - """ - endpoint = '/assets/{}/children'.format(parent_asset_id) - return self.client._api_call('post', endpoint, payload=kwargs) - -<<<<<<< - -======= - @Reference(operation="#createAsset") - def create_folder(self, parent_asset_id, name="New Folder"): - """ - Create a new folder. - - Args: - parent_asset_id (string): The parent asset id. - name (string): The name of the new folder. - - Example:: - - client.assets.create_folder( - parent_asset_id="123abc", - name="ExampleFile.mp4", - ) - """ - endpoint = '/assets/{}/children'.format(parent_asset_id) - return self.client._api_call('post', endpoint, payload={"name": name, "type":"folder"}) - - @Reference(operation="#createAsset") ->>>>>>> - def from_url(self, parent_asset_id, name, url): - """ - Create an asset from a URL. - - Args: - parent_asset_id (str): The parent asset id. - name (str): The filename. - url (str): The remote URL. - - Example:: - - client.assets.from_url( - parent_asset_id="123abc", - name="ExampleFile.mp4", - type="file", - url="https://" - ) - """ - - payload = { - "name": name, - "type": "file", - "source": { - "url": url - } - } - - endpoint = '/assets/{}/children'.format(parent_asset_id) - return self.client._api_call('post', endpoint, payload=payload) - - @Reference(operation="#updateAsset") - def update(self, asset_id, **kwargs): - """ - Updates an asset - - Args: - asset_id (string): the asset's id - - :Keyword Arguments: - kwargs (optional): fields and values you wish to update - - Example:: - - client.assets.update("adeffee123342", name="updated_filename.mp4") - """ - endpoint = '/assets/{}'.format(asset_id) - return self.client._api_call('put', endpoint, kwargs) - - @Reference(operation="#copyAsset") - def copy(self, destination_folder_id, **kwargs): - """ - Copy an Asset - - Args: - destination_folder_id (str): The id of the folder you want to copy into. - - :Keyword Arguments: - id (str): The id of the asset you want to copy. - - Example:: - - client.assets.copy("adeffee123342", id="7ee008c5-49a2-f8b5-997d-8b64de153c30") - """ - endpoint = '/assets/{}/copy'.format(destination_folder_id) - return self.client._api_call('post', endpoint, kwargs) - -<<<<<<< - def bulk_copy(self, destination_folder_id, asset_list=[], copy_comments=False): -======= - @Reference(operation="#batchCopyAsset") - def bulk_copy(self, destination_folder_id, asset_list, copy_comments=False): ->>>>>>> - """Bulk copy assets - - Args: - destination_folder_id (string): The id of the folder you want to copy into. - - :Keyword Arguments: - asset_list (list): A list of the asset IDs you want to copy. - copy_comments (boolean): Whether or not to copy comments: True or False. - - Example:: - - client.assets.bulk_copy("adeffee123342", - asset_list=[ - "7ee008c5-49a2-f8b5-997d-8b64de153c30", - "7ee008c5-49a2-f8b5-997d-8b64de153c30" - ], - copy_comments=True - ) - """ - - payload = {"batch": []} - new_list = list() - - if copy_comments: - payload['copy_comments'] = "all" - - for asset in asset_list: - payload['batch'].append({"id": asset}) - - endpoint = '/batch/assets/{}/copy'.format(destination_folder_id) - return self.client._api_call('post', endpoint, payload) - - @Reference(operation="#deleteAsset") - def delete(self, asset_id): - """ - Delete an asset - - Args: - asset_id (string): the asset's id - """ - endpoint = '/assets/{}'.format(asset_id) - return self.client._api_call('delete', endpoint) - - def _upload(self, asset, file): - """ - Upload an asset. The method will exit once the file is uploaded. - - Args: - asset (object): The asset object. - file (file): The file to upload. - - Example:: - - client._upload(asset, open('example.mp4')) - """ - - uploader = FrameioUploader(asset, file) - uploader.upload() - - # def upload_folder(sFelf, destination_id, folderpath): - # try: - # if os.path.isdir(folderpath): - # # Good it's a directory, we can keep going - # pass - - # except OSError: - # if not os.path.exists(folderpath): - # sys.exit("Folder doesn't exist, exiting...") - - def build_asset_info(self, filepath): - full_path = os.path.abspath(filepath) - - file_info = { - "filepath": full_path, - "filename": os.path.basename(full_path), - "filesize": os.path.getsize(full_path), - "mimetype": mimetypes.guess_type(full_path)[0] - } - - return file_info - - def upload(self, destination_id, filepath, asset=None): - """ - Upload a file. The method will exit once the file is downloaded. - - Args: - destination_id (uuid): The destination Project or Folder ID. - filepath (string): The locaiton of the file on your local filesystem \ - that you want to upload. - - Example:: - - client.assets.upload('1231-12414-afasfaf-aklsajflaksjfla', "./file.mov") - """ - - # Check if destination is a project or folder - # If it's a project, well then we look up its root asset ID, otherwise we use the folder id provided - # Then we start our upload - - try: - # First try to grab it as a folder - folder_id = self.get(destination_id)['id'] - except Exception as e: - # Then try to grab it as a project - folder_id = Project(self.client).get_project(destination_id)['root_asset_id'] - finally: - file_info = self.build_asset_info(filepath) - - if not asset: - try: - asset = self.create(folder_id, - type="file", - name=file_info['filename'], - filetype=file_info['mimetype'], - filesize=file_info['filesize'] - ) - - except Exception as e: - print(e) - - try: - with open(file_info['filepath'], "rb") as fp: - self._upload(asset, fp) - - except Exception as e: - print(e) - - return asset - - def download(self, asset, download_folder, prefix=None, multi_part=False, concurrency=5): - """ - Download an asset. The method will exit once the file is downloaded. - - Args: - asset (object): The asset object. - download_folder (path): The location to download the file to. - - Example:: - - client.assets.download(asset, "~./Downloads") - """ - downloader = FrameioDownloader(asset, download_folder, prefix, multi_part, concurrency) - return downloader.download_handler() diff --git a/frameioclient/service/projects.py b/frameioclient/service/projects.py deleted file mode 100644 index e8a78942..00000000 --- a/frameioclient/service/projects.py +++ /dev/null @@ -1,134 +0,0 @@ -from .service import Service -from .helpers import FrameioHelpers - -class Project(Service): - def create(self, team_id, **kwargs): - """Create a project. - - Args: - team_id (string): The team id. - - :Keyword Arguments: - (optional) kwargs: additional request parameters. - - Example:: - - client.projects.create( - team_id="123", - name="My Awesome Project" - ) - """ - endpoint = '/teams/{}/projects'.format(team_id) - return self.client._api_call('post', endpoint, payload=kwargs) - - def get(self, project_id): - """ - Get an individual project - - Args: - project_id (string): The project's id - - Example:: - - client.project.get( - project_id="123" - ) - """ - endpoint = '/projects/{}'.format(project_id) - return self.client._api_call('get', endpoint) - - def tree(self, project_id, slim): - """ - Fetch a tree representation of all files/folders in a project. - - Args: - project_id (string): The project's id - slim (bool): If true, fetch only the minimum information for the following: \ - filename, \ - filesize, \ - thumbnail, \ - creator_id, \ - inserted_at (date created), \ - path (represented like a filesystem) - - Example:: - - client.projects.get( - project_id="123", - slim=True - ) - """ - # endpoint = "/projects/{}/tree?depth=20&drop_includes=a.transcode_statuses,a.transcodes,a.source,a.checksums&only_fields=a.name,a.filesize,u.name,a.item_count,a.creator_id,a.inserted_at,a.uploaded_at".format(project_id) - # return self.client._api_call('get', endpoint) - - return FrameioHelpers(self.client).build_project_tree(project_id, slim) - - - def get_collaborators(self, project_id, **kwargs): - """ - Get collaborators for a project. - - Args: - project_id (uuid): The project's id. - - Example:: - - client.projects.get_collaborators( - project_id="123" - ) - """ - endpoint = "/projects/{}/collaborators?include=project_role".format(project_id) - return self.client._api_call('get', endpoint, kwargs) - - def get_pending_collaborators(self, project_id, **kwargs): - """ - Get pending collaborators for a project. - - Args: - project_id (uuid): The project's id. - - Example:: - - client.projects.get_pending_collaborators( - project_id="123" - ) - """ - endpoint = "/projects/{}/pending_collaborators".format(project_id) - return self.client._api_call('get', endpoint, kwargs) - - def add_collaborator(self, project_id, email): - """ - Add Collaborator to a Project Collaborator. - - Args: - project_id (uuid): The project id. - email (string): Email user's e-mail address. - - Example:: - - client.projects.add_collaborator( - project_id="123", - email="janedoe@frame.io" - ) - """ - payload = {"email": email} - endpoint = '/projects/{}/collaborators'.format(project_id) - return self._api_call('post', endpoint, payload=payload) - - def remove_collaborator(self, project_id, email): - """ - Remove Collaborator from Project. - - Args: - project_id (uuid): The Project ID. - email (string): The user's e-mail address. - - Example:: - - client.projects.remove_collaborator( - project_id="123", - email="janedoe@frame.io" - ) - """ - endpoint = '/projects/{}/collaborators/_?email={}'.format(project_id, email) - return self._api_call('delete', endpoint) diff --git a/frameioclient/service/service.py b/frameioclient/service/service.py deleted file mode 100644 index a2ffa123..00000000 --- a/frameioclient/service/service.py +++ /dev/null @@ -1,5 +0,0 @@ -from ..client import FrameioClient - -class Service(object): - def __init__(self, client: FrameioClient): - self.client = client diff --git a/frameioclient/services/assets.py b/frameioclient/services/assets.py index 064406a0..5d2de6b8 100644 --- a/frameioclient/services/assets.py +++ b/frameioclient/services/assets.py @@ -4,7 +4,7 @@ from .projects import Project from ..lib.service import Service -from ..lib import FrameioUploader, FrameioDownloader, constants +from ..lib import FrameioUploader, FrameioDownloader, constants, Reference class Asset(Service): def _build_asset_info(self, filepath): @@ -19,6 +19,7 @@ def _build_asset_info(self, filepath): return file_info + @Reference(operation="#getAsset") def get(self, asset_id): """ Get an asset by id. @@ -29,6 +30,7 @@ def get(self, asset_id): endpoint = '/assets/{}'.format(asset_id) return self.client._api_call('get', endpoint) + @Reference(operation="#getAssets") def get_children(self, asset_id, include=[], slim=False, **kwargs): """ Get a folder. @@ -78,6 +80,7 @@ def get_children(self, asset_id, include=[], slim=False, **kwargs): return self.client._api_call('get', endpoint, kwargs) + @Reference(operation="#createAsset") def create(self, parent_asset_id, **kwargs): """ Create an asset. @@ -100,6 +103,7 @@ def create(self, parent_asset_id, **kwargs): endpoint = '/assets/{}/children'.format(parent_asset_id) return self.client._api_call('post', endpoint, payload=kwargs) + @Reference(operation="#createAsset") def create_folder(self, parent_asset_id, name="New Folder"): """ Create a new folder. @@ -118,6 +122,7 @@ def create_folder(self, parent_asset_id, name="New Folder"): endpoint = '/assets/{}/children'.format(parent_asset_id) return self.client._api_call('post', endpoint, payload={"name": name, "type":"folder"}) + @Reference(operation="#createAsset") def from_url(self, parent_asset_id, name, url): """ Create an asset from a URL. @@ -147,6 +152,7 @@ def from_url(self, parent_asset_id, name, url): endpoint = '/assets/{}/children'.format(parent_asset_id) return self.client._api_call('post', endpoint, payload=payload) + @Reference(operation="#updateAsset") def update(self, asset_id, **kwargs): """ Updates an asset @@ -162,6 +168,7 @@ def update(self, asset_id, **kwargs): endpoint = '/assets/{}'.format(asset_id) return self.client._api_call('put', endpoint, kwargs) + @Reference(operation="#copyAsset") def copy(self, destination_folder_id, **kwargs): """ Copy an asset @@ -177,6 +184,7 @@ def copy(self, destination_folder_id, **kwargs): endpoint = '/assets/{}/copy'.format(destination_folder_id) return self.client._api_call('post', endpoint, kwargs) + @Reference(operation="#batchCopyAsset") def bulk_copy(self, destination_folder_id, asset_list=[], copy_comments=False): """Bulk copy assets @@ -201,6 +209,7 @@ def bulk_copy(self, destination_folder_id, asset_list=[], copy_comments=False): endpoint = '/batch/assets/{}/copy'.format(destination_folder_id) return self.client._api_call('post', endpoint, payload) + @Reference(operation="#deleteAsset") def delete(self, asset_id): """ Delete an asset From 10261c0c79ce953586892326a96594721f02becb Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 2 Aug 2021 21:04:03 -0700 Subject: [PATCH 19/99] Fix CI config --- .circleci/config.yml | 29 ++++++++++++++++++++++++++++- 1 file changed, 28 insertions(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 2d510f62..d589e07f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -35,7 +35,7 @@ workflows: - docs: requires: - # - deploy + - deploy - build # upload_test: @@ -138,3 +138,30 @@ jobs: cd /tmp/artifact/docs python publish.py + deploy: + docker: + - image: circleci/python:latest + + steps: + - attach_workspace: + at: /tmp/artifact + name: Attach build artifact + + - run: + name: Install dependencies + command: | + pip install setuptools wheel twine + + - run: + name: init .pypirc + command: | + cd /tmp/artifact + echo -e "[pypi]" >> ~/.pypirc + echo -e "username = $TWINE_USERNAME" >> ~/.pypirc + echo -e "password = $TWINE_PASSWORD" >> ~/.pypirc + + - run: + name: Upload to pypi + command: | + cd /tmp/artifact + twine upload dist/* From 97fefecae14ddc2ebba58e8fae4e7a50f7ebee67 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 2 Aug 2021 21:08:26 -0700 Subject: [PATCH 20/99] Restore streaming download (RAM friendly) --- frameioclient/lib/download.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/frameioclient/lib/download.py b/frameioclient/lib/download.py index ef2b56ea..4883053c 100644 --- a/frameioclient/lib/download.py +++ b/frameioclient/lib/download.py @@ -140,31 +140,29 @@ def get_download_key(self): def download_handler(self): """Call this to perform the actual download of your asset! """ + + # Check folders if os.path.isdir(os.path.join(os.path.curdir, self.download_folder)): print("Folder exists, don't need to create it") else: print("Destination folder not found, creating") os.mkdir(self.download_folder) + # Check files if os.path.isfile(self.get_path()) == False: pass -<<<<<<< if os.path.isfile(self.get_path()) and self.replace == True: os.remove(self.get_path()) if os.path.isfile(self.get_path()) and self.replace == False: -======= - def download_handler(self): - """Call this to perform the actual download of your asset! - """ - if os.path.isfile(self.get_path()): ->>>>>>> print("File already exists at this location.") return self.destination + # Get URL url = self.get_download_key() + # Handle watermarking if self.watermarked == True: return self.single_part_download(url) else: @@ -180,6 +178,10 @@ def single_part_download(self, url): start_time = time.time() print("Beginning download -- {} -- {}".format(self.asset["name"], Utils.format_bytes(self.file_size, type="size"))) + # Downloading + r = self.session.get(url, stream=True) + open(self.destination, "wb").write(r.content) + # Downloading with open(self.destination, 'wb') as handle: try: From 66e72ecba2acb3ee0bcb8ad2abb12d4438308f08 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 2 Aug 2021 21:12:37 -0700 Subject: [PATCH 21/99] Remove merge straggler --- frameioclient/client.py | 74 ----------------------------------------- 1 file changed, 74 deletions(-) diff --git a/frameioclient/client.py b/frameioclient/client.py index ddb0f976..6312a598 100644 --- a/frameioclient/client.py +++ b/frameioclient/client.py @@ -9,7 +9,6 @@ ClientVersion, ClientVersion, FrameioDownloader, - PresentationException ) class FrameioClient(APIClient, object): @@ -24,79 +23,6 @@ def me(self): def telemetry(self): return Telemetry(self) -<<<<<<< - -======= - self.adapter = HTTPAdapter(max_retries=self.retry_strategy) - self.session = requests.Session() - self.session.mount("https://", self.adapter) - - def _api_call(self, method, endpoint, payload={}, limit=None): - url = '{}/v2{}'.format(self.host, endpoint) - - r = self.session.request( - method, - url, - json=payload, - headers=self.headers, - ) - - if r.ok: - if r.headers.get('page-number'): - if int(r.headers.get('total-pages')) > 1: - return PaginatedResponse( - results=r.json(), - limit=limit, - page_size=r.headers['per-page'], - total_pages=r.headers['total-pages'], - total=r.headers['total'], - endpoint=endpoint, - method=method, - payload=payload, - client=self - ) - if isinstance(r.json(), list): - return r.json()[:limit] - return r.json() - - if r.status_code == 422 and "presentation" in endpoint: - raise PresentationException - - return r.raise_for_status() - - def get_specific_page(self, method, endpoint, payload, page): - """ - Gets a specific page for that endpoint, used by Pagination Class - - :Args: - method (string): 'get', 'post' - endpoint (string): endpoint ('/accounts//teams') - payload (dict): Request payload - page (int): What page to get - """ - if method == 'get': - # If we've already got a ? in the endpoint, then it has to be an & - if '?' in endpoint: - endpoint = '{}&page={}'.format(endpoint, page) - else: - endpoint = '{}?page={}'.format(endpoint, page) - return self._api_call(method, endpoint) - - if method == 'post': - payload['page'] = page - return self._api_call(method, endpoint, payload=payload) - - -class FrameioClient(FrameioConnection): - """[summary] - - Args: - FrameioConnection ([type]): [description] - - Returns: - [type]: [description] - """ ->>>>>>> @property def _auth(self): return self.token From 3649ce6ba1a5329973e2a76bfe7b3e06c8d09a13 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 2 Aug 2021 21:17:29 -0700 Subject: [PATCH 22/99] Refactor examples, and add one more for RBC --- examples/{ => assets}/asset_scraper.py | 0 examples/{ => assets}/asset_tree.py | 0 examples/{ => assets}/recursive_upload.py | 0 examples/{ => assets}/upload_asset.py | 0 examples/{ => comments}/comment_scraper.py | 0 examples/comments/range_based_comment.py | 15 +++++++++++++++ examples/new_tests.py | 20 -------------------- examples/{ => projects}/download_project.py | 0 examples/{ => projects}/project_tree.py | 0 examples/{ => users}/get_me.py | 0 examples/{ => users}/invite_users.py | 0 examples/{ => users}/user_management.py | 0 12 files changed, 15 insertions(+), 20 deletions(-) rename examples/{ => assets}/asset_scraper.py (100%) rename examples/{ => assets}/asset_tree.py (100%) rename examples/{ => assets}/recursive_upload.py (100%) rename examples/{ => assets}/upload_asset.py (100%) rename examples/{ => comments}/comment_scraper.py (100%) create mode 100644 examples/comments/range_based_comment.py delete mode 100644 examples/new_tests.py rename examples/{ => projects}/download_project.py (100%) rename examples/{ => projects}/project_tree.py (100%) rename examples/{ => users}/get_me.py (100%) rename examples/{ => users}/invite_users.py (100%) rename examples/{ => users}/user_management.py (100%) diff --git a/examples/asset_scraper.py b/examples/assets/asset_scraper.py similarity index 100% rename from examples/asset_scraper.py rename to examples/assets/asset_scraper.py diff --git a/examples/asset_tree.py b/examples/assets/asset_tree.py similarity index 100% rename from examples/asset_tree.py rename to examples/assets/asset_tree.py diff --git a/examples/recursive_upload.py b/examples/assets/recursive_upload.py similarity index 100% rename from examples/recursive_upload.py rename to examples/assets/recursive_upload.py diff --git a/examples/upload_asset.py b/examples/assets/upload_asset.py similarity index 100% rename from examples/upload_asset.py rename to examples/assets/upload_asset.py diff --git a/examples/comment_scraper.py b/examples/comments/comment_scraper.py similarity index 100% rename from examples/comment_scraper.py rename to examples/comments/comment_scraper.py diff --git a/examples/comments/range_based_comment.py b/examples/comments/range_based_comment.py new file mode 100644 index 00000000..4e0c75c4 --- /dev/null +++ b/examples/comments/range_based_comment.py @@ -0,0 +1,15 @@ +import os +from frameioclient import FrameioClient + +def leave_range_based_comment(asset_id, comment): + client = FrameioClient(os.getenv("FRAME_IO_TOKEN")) + res = client.create_comment( + asset_id=asset_id, + text="This is my range based comment", + timestamp=1911, + duration=3.5 + ) + + +if __name__ == "__main__": + leave_range_based_comment("id", "this is my comment!") \ No newline at end of file diff --git a/examples/new_tests.py b/examples/new_tests.py deleted file mode 100644 index 7b1fac6e..00000000 --- a/examples/new_tests.py +++ /dev/null @@ -1,20 +0,0 @@ -import os -from pprint import pprint - -from frameioclient import FrameioClient, Asset, ClientVersion - - -token = os.getenv('FRAMEIO_TOKEN') -client = FrameioClient(token) -folder_id = 'dd8526ee-2c7d-4b48-9bf7-b847664666bb' -file_path = '/Users/jeff/Code/python-frameio-client/examples/downloads/accelerated_Test_Chart_5_Sec_embedded_meta_Mezzanine.mxf' - -client.assets.upload(folder_id, file_path) - - -print(client.users.get_me()) - -pprint(client.teams.list_projects(client.teams.list_all()[0]['id'])) - -for log in client.logs.list(client.users.get_me()['account_id']): - print(log) \ No newline at end of file diff --git a/examples/download_project.py b/examples/projects/download_project.py similarity index 100% rename from examples/download_project.py rename to examples/projects/download_project.py diff --git a/examples/project_tree.py b/examples/projects/project_tree.py similarity index 100% rename from examples/project_tree.py rename to examples/projects/project_tree.py diff --git a/examples/get_me.py b/examples/users/get_me.py similarity index 100% rename from examples/get_me.py rename to examples/users/get_me.py diff --git a/examples/invite_users.py b/examples/users/invite_users.py similarity index 100% rename from examples/invite_users.py rename to examples/users/invite_users.py diff --git a/examples/user_management.py b/examples/users/user_management.py similarity index 100% rename from examples/user_management.py rename to examples/users/user_management.py From 1c8a201d2b69de9f27b487cfebc6150dbe0ff83b Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 2 Aug 2021 21:18:00 -0700 Subject: [PATCH 23/99] Update range-based comment example --- examples/comments/range_based_comment.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/examples/comments/range_based_comment.py b/examples/comments/range_based_comment.py index 4e0c75c4..61cb7331 100644 --- a/examples/comments/range_based_comment.py +++ b/examples/comments/range_based_comment.py @@ -3,13 +3,15 @@ def leave_range_based_comment(asset_id, comment): client = FrameioClient(os.getenv("FRAME_IO_TOKEN")) - res = client.create_comment( + res = client.comments.create( asset_id=asset_id, text="This is my range based comment", timestamp=1911, duration=3.5 ) + print(res) + if __name__ == "__main__": leave_range_based_comment("id", "this is my comment!") \ No newline at end of file From 586c29cc7aa65e1efaed1adb62a7ee872737ed95 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Tue, 3 Aug 2021 10:54:48 -0700 Subject: [PATCH 24/99] Refactor for transfer class --- .circleci/config.yml | 2 + frameioclient/client.py | 4 +- frameioclient/fiocli.py | 11 +- frameioclient/lib/__init__.py | 3 +- frameioclient/lib/constants.py | 2 + frameioclient/lib/download.py | 25 +-- frameioclient/lib/telemetry.py | 4 +- frameioclient/lib/transfer.py | 330 ++++++++++++++++++++++++++++++ frameioclient/lib/transport.py | 102 ++------- frameioclient/services/helpers.py | 2 +- tests/integration.py | 5 +- 11 files changed, 378 insertions(+), 112 deletions(-) create mode 100644 frameioclient/lib/transfer.py diff --git a/.circleci/config.yml b/.circleci/config.yml index d589e07f..8eae902e 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -71,6 +71,7 @@ jobs: paths: - . + test_integration: description: Python << parameters.python-version >> parameters: @@ -95,6 +96,7 @@ jobs: command: | python /tmp/artifact/tests/integration.py + upload_test_job: description: Upload test docker: diff --git a/frameioclient/client.py b/frameioclient/client.py index 6312a598..f42ba259 100644 --- a/frameioclient/client.py +++ b/frameioclient/client.py @@ -8,10 +8,10 @@ Telemetry, ClientVersion, ClientVersion, - FrameioDownloader, + FrameioDownloader ) -class FrameioClient(APIClient, object): +class FrameioClient(APIClient): def __init__(self, token, host='https://api.frame.io', threads=5, progress=False): super().__init__(token, host, threads, progress) diff --git a/frameioclient/fiocli.py b/frameioclient/fiocli.py index 6d2ff98d..9d173234 100644 --- a/frameioclient/fiocli.py +++ b/frameioclient/fiocli.py @@ -31,6 +31,7 @@ def main(): client = FrameioClient(args.token[0], progress=True, threads=threads) except Exception as e: print("Failed") + print(e) sys.exit(1) # If args.op == 'upload': @@ -38,8 +39,14 @@ def main(): if args.destination: # Check to see if this is a local target and thus a download if os.path.isdir(args.destination[0]): - asset = client.assets.get(args.target[0]) - return client.assets.download(asset, args.destination[0], progress=True, multi_part=True, concurrency=threads) + try: + asset = client.assets.get(args.target[0]) + return client.assets.download(asset, args.destination[0], progress=True, multi_part=True) + except Exception as e: + print(e) + client.projects.download(args.target[0], args.destination[0]) + + else: # This is an upload if os.path.isdir(args.target[0]): return client.assets.upload_folder(args.target[0], args.destination[0]) diff --git a/frameioclient/lib/__init__.py b/frameioclient/lib/__init__.py index 6303df14..f1aa82e1 100644 --- a/frameioclient/lib/__init__.py +++ b/frameioclient/lib/__init__.py @@ -5,5 +5,6 @@ from .version import ClientVersion from .upload import FrameioUploader from .download import FrameioDownloader -from .transport import AWSClient, APIClient +from .transport import APIClient +from .transfer import AWSClient from .utils import Utils, PaginatedResponse, KB, MB, Reference diff --git a/frameioclient/lib/constants.py b/frameioclient/lib/constants.py index fbdfcc88..2512a2b8 100644 --- a/frameioclient/lib/constants.py +++ b/frameioclient/lib/constants.py @@ -30,3 +30,5 @@ "a.source" ] } + +default_thread_count = 5 \ No newline at end of file diff --git a/frameioclient/lib/download.py b/frameioclient/lib/download.py index 4883053c..43e67b67 100644 --- a/frameioclient/lib/download.py +++ b/frameioclient/lib/download.py @@ -1,24 +1,15 @@ -import io import os -import sys import math -import time -import requests -import enlighten -import threading -import concurrent.futures from .utils import Utils from .logger import SDKLogger -from .transport import AWSClient +from .transfer import AWSClient from .telemetry import Event, ComparisonTest from .exceptions import ( DownloadException, WatermarkIDDownloadException, AssetNotFullyUploaded, - AssetChecksumMismatch, - AssetChecksumNotPresent ) class FrameioDownloader(object): @@ -88,7 +79,6 @@ def _create_file_stub(self): os.remove(self.destination) # Remove the file self._create_file_stub() # Create a new stub else: - print(e) raise e return True @@ -164,16 +154,17 @@ def download_handler(self): # Handle watermarking if self.watermarked == True: - return self.single_part_download(url) + return self.aws_client()._download_whole(url) else: # Don't use multi-part download for files below 25 MB if self.asset['filesize'] < 26214400: - return self.download(url) + return self.aws_client._download_whole(url) if self.multi_part == True: - return self.multi_part_download(url) + return self.aws_client.multi_thread_download(url) else: - return self.single_part_download(url) + return self.aws_client._download_whole(url) +<<<<<<< def single_part_download(self, url): start_time = time.time() print("Beginning download -- {} -- {}".format(self.asset["name"], Utils.format_bytes(self.file_size, type="size"))) @@ -386,3 +377,7 @@ def _download_chunk(self, task): # After the function completes, we report back the # of bytes transferred return chunk_size + +======= + +>>>>>>> \ No newline at end of file diff --git a/frameioclient/lib/telemetry.py b/frameioclient/lib/telemetry.py index 869e7bae..dad32261 100644 --- a/frameioclient/lib/telemetry.py +++ b/frameioclient/lib/telemetry.py @@ -54,9 +54,9 @@ def __init__(self, user_id, event_name, properties): class ComparisonTest(Event, object): def __init__(self, transfer_stats, request_logs=[]): super().__init__() - self.event_name = event_name + # self.event_name = event_name self.transfer_stats = None - self.requests_logs = requests_logs + # self.requests_logs = requests_logs @staticmethod def _parse_requests_data(req_object): diff --git a/frameioclient/lib/transfer.py b/frameioclient/lib/transfer.py new file mode 100644 index 00000000..bf30a891 --- /dev/null +++ b/frameioclient/lib/transfer.py @@ -0,0 +1,330 @@ +import os +import math +import time +import enlighten +import requests +import concurrent.futures + +from .utils import Utils +from .logger import SDKLogger + +from .exceptions import ( + DownloadException, + AssetChecksumMismatch, + AssetChecksumNotPresent +) + +from .bandwidth import NetworkBandwidth, DiskBandwidth +from .transport import HTTPClient + +class AWSClient(HTTPClient, object): + def __init__(self, concurrency=None, progress=True): + super().__init__() # Initialize via inheritance + self.progress = progress + self.progress_manager = None + self.destination = None + + # Ensure this is a valid number before assigning + if concurrency is not None and type(concurrency) == int and concurrency > 0: + self.concurrency = concurrency + else: + self.concurrency = self._optimize_concurrency() + + if self.progress: + self.progress_manager = enlighten.get_manager() + + @staticmethod + def check_cdn(url): + # TODO improve this algo + if 'assets.frame.io' in url: + return 'Cloudfront' + elif 's3' in url: + return 'S3' + else: + return None + + def _create_file_stub(self): + try: + fp = open(self.destination, "w") + # fp.write(b"\0" * self.file_size) # Disabled to prevent pre-allocatation of disk space + fp.close() + except FileExistsError as e: + if self.replace == True: + os.remove(self.destination) # Remove the file + self._create_file_stub() # Create a new stub + else: + print(e) + raise e + return True + + def _optimize_concurrency(self): + """ + This method looks as the net_stats and disk_stats that we've run on \ + the current environment in order to suggest the best optimized \ + number of concurrent TCP connections. + + Example:: + AWSClient._optimize_concurrency() + """ + + net_stats = NetworkBandwidth + disk_stats = DiskBandwidth + + # Algorithm ensues + # + # + + return 5 + + def _get_byte_range(self, url, start_byte=0, end_byte=2048): + """ + Get a specific byte range from a given URL. This is **not** optimized \ + for heavily-threaded operations currently. + + :Args: + url (string): The URL you want to fetch a byte-range from + start_byte (int): The first byte you want to request + end_byte (int): The last byte you want to extract + + Example:: + AWSClient().get_byte_range(asset, "~./Downloads") + """ + + range_header = {"Range": "bytes=%d-%d" % (start_byte, end_byte)} + + headers = {**self.shared_headers, **range_header} + + br = requests.get(url, headers=headers).content + return br + + def _download_whole(self, url): + start_time = time.time() + print("Beginning download -- {} -- {}".format(self.asset["name"], Utils.format_bytes(self.file_size, type="size"))) + + # Downloading + r = self.session.get(url, stream=True) + + # Downloading + with open(self.destination, 'wb') as handle: + try: + # TODO make sure this approach works for SBWM download + for chunk in r.iter_content(chunk_size=4096): + if chunk: + handle.write(chunk) + except requests.exceptions.ChunkedEncodingError as e: + raise e + + download_time = time.time() - start_time + download_speed = Utils.format_bytes(math.ceil(self.file_size/(download_time))) + print("Downloaded {} at {}".format(Utils.format_bytes(self.file_size, type="size"), download_speed)) + + return self.destination, download_speed + + def _download_chunk(self, task): + # Download a particular chunk + # Called by the threadpool executor + + # Destructure the task object into its parts + url = task[0] + start_byte = task[1] + end_byte = task[2] + chunk_number = task[3] + in_progress = task[4] + + # Set the initial chunk_size, but prepare to overwrite + chunk_size = (end_byte - start_byte) + + if self.bytes_started + (chunk_size) > self.file_size: + difference = abs(self.file_size - (self.bytes_started + chunk_size)) # should be negative + chunk_size = chunk_size - difference + print(f"Chunk size as done via math: {chunk_size}") + else: + pass + + # Set chunk size in a smarter way + self.bytes_started += (chunk_size) + + # Update the bar for in_progress chunks + in_progress.update(float(chunk_size)) + + # Specify the start and end of the range request + headers = {"Range": "bytes=%d-%d" % (start_byte, end_byte)} + + # Grab the data as a stream + r = self.session.get(url, headers=headers, stream=True) + + # Write the file to disk + with open(self.destination, "r+b") as fp: + fp.seek(start_byte) # Seek to the right spot in the file + chunk_size = len(r.content) # Get the final chunk size + fp.write(r.content) # Write the data + + # Save requests logs + self.request_logs.append({ + 'headers': r.headers, + 'http_status': r.status_code, + 'bytes_transferred': len(r.content) + }) + + # Increase the count for bytes_completed, but only if it doesn't overrun file length + self.bytes_completed += (chunk_size) + if self.bytes_completed > self.file_size: + self.bytes_completed = self.file_size + + # Update the in_progress bar + self._update_in_progress() + + # After the function completes, we report back the # of bytes transferred + return chunk_size + + def multi_thread_download(self, url): + start_time = time.time() + + # Generate stub + try: + self._create_file_stub() + + except Exception as e: + raise DownloadException(message=e) + + offset = math.ceil(self.file_size / self.chunks) + in_byte = 0 # Set initially here, but then override + + print("Multi-part download -- {} -- {}".format(self.asset["name"], Utils.format_bytes(self.file_size, type="size"))) + + # Queue up threads + with enlighten.get_manager() as manager: + status = manager.status_bar( + position=3, + status_format=u'{fill}Stage: {stage}{fill}{elapsed}', + color='bold_underline_bright_white_on_lightslategray', + justify=enlighten.Justify.CENTER, + stage='Initializing', + autorefresh=True, + min_delta=0.5 + ) + + BAR_FORMAT = '{desc}{desc_pad}|{bar}|{percentage:3.0f}% ' + \ + 'Downloading: {count_1:.2j}/{total:.2j} ' + \ + 'Completed: {count_2:.2j}/{total:.2j} ' + \ + '[{elapsed}<{eta}, {rate:.2j}{unit}/s]' + + # Add counter to track completed chunks + initializing = manager.counter( + position=2, + total=float(self.file_size), + desc='Progress', + unit='B', + bar_format=BAR_FORMAT, + ) + + # Add additional counter + in_progress = initializing.add_subcounter('yellow', all_fields=True) + completed = initializing.add_subcounter('green', all_fields=True) + + # Set default state + initializing.refresh() + + status.update(stage='Downloading', color='green') + + with concurrent.futures.ThreadPoolExecutor(max_workers=self.aws_client.concurrency) as executor: + for i in range(int(self.chunks)): + # Increment by the iterable + 1 so we don't mutiply by zero + out_byte = offset * (i+1) + # Create task tuple + task = (url, in_byte, out_byte, i, in_progress) + # Stagger start for each chunk by 0.1 seconds + if i < self.aws_client.concurrency: + time.sleep(0.1) + # Append tasks to futures list + self.futures.append(executor.submit(self._download_chunk, task)) + # Reset new in byte equal to last out byte + in_byte = out_byte + + # Keep updating the progress while we have > 0 bytes left. + # Wait on threads to finish + for future in concurrent.futures.as_completed(self.futures): + try: + chunk_size = future.result() + completed.update_from(in_progress, float((chunk_size - 1)), force=True) + except Exception as exc: + print(exc) + + # Calculate and print stats + download_time = round((time.time() - start_time), 2) + download_speed = round((self.file_size/download_time), 2) + + if self.checksum_verification == True: + # Check for checksum, if not present throw error + if self._get_checksum() == None: + raise AssetChecksumNotPresent + else: + # Perform hash-verification + status.update(stage='Verifying') + + VERIFICATION_FORMAT = '{desc}{desc_pad}|{bar}|{percentage:3.0f}% ' + \ + 'Progress: {count:.2j}/{total:.2j} ' + \ + '[{elapsed}<{eta}, {rate:.2j}{unit}/s]' + + # Add counter to track completed chunks + verification = manager.counter( + position=1, + total=float(self.file_size), + desc='Verifying', + unit='B', + bar_format=VERIFICATION_FORMAT, + color='purple' + ) + + # Calculate the file hash + if Utils.calculate_hash(self.destination, progress_callback=verification) != self.original_checksum: + raise AssetChecksumMismatch + + # Update the header + status.update(stage='Download Complete!', force=True) + + # Log completion event + SDKLogger('downloads').info("Downloaded {} at {}".format(Utils.format_bytes(self.file_size, type="size"), download_speed)) + + # Submit telemetry + transfer_stats = {'speed': download_speed, 'time': download_time, 'cdn': AWSClient.check_cdn(url)} + + # Event(self.user_id, 'python-sdk-download-stats', transfer_stats) + + # If stats = True, we return a dict with way more info, otherwise \ + if self.stats: + # We end by returning a dict with info about the download + dl_info = { + "destination": self.destination, + "speed": download_speed, + "elapsed": download_time, + "cdn": AWSClient.check_cdn(url), + "concurrency": self.aws_client.concurrency, + "size": self.file_size, + "chunks": self.chunks + } + return dl_info + else: + return self.destination + + +class TransferJob(AWSClient): + # These will be used to track the job and then push telemetry + def __init__(self, job_info): + self.job_info = job_info # < - convert to JobInfo class + self.cdn = 'S3' # or 'CF' - use check_cdn to confirm + self.progress_manager = None + +class DownloadJob(TransferJob): + def __init__(self): + self.asset_type = 'review_link' # we should use a dataclass here + # Need to create a re-usable job schema + # Think URL -> output_path + pass + +class UploadJob(TransferJob): + def __init__(self, destination): + self.destination = destination + # Need to create a re-usable job schema + # Think local_file path and remote Frame.io destination + pass diff --git a/frameioclient/lib/transport.py b/frameioclient/lib/transport.py index ed398561..46a79704 100644 --- a/frameioclient/lib/transport.py +++ b/frameioclient/lib/transport.py @@ -4,23 +4,34 @@ import requests import threading +from pprint import pprint from requests.adapters import HTTPAdapter from requests.packages.urllib3.util.retry import Retry from .version import ClientVersion from .utils import PaginatedResponse +from .constants import default_thread_count from .exceptions import PresentationException -from .bandwidth import NetworkBandwidth, DiskBandwidth +# from .bandwidth import NetworkBandwidth, DiskBandwidth class HTTPClient(object): - def __init__(self): + """[summary] + + Args: + object ([type]): [description] + """ + def __init__(self, threads=default_thread_count): + # Setup number of threads to use + self.threads = threads + # Initialize empty thread object self.thread_local = None self.client_version = ClientVersion.version() self.shared_headers = { 'x-frameio-client': 'python/{}'.format(self.client_version) } + # Configure retry strategy (very broad right now) self.retry_strategy = Retry( total=3, @@ -28,6 +39,7 @@ def __init__(self): status_forcelist=[400, 429, 500, 503], method_whitelist=["GET", "POST", "PUT", "GET", "DELETE"] ) + # Create real thread self._initialize_thread() @@ -47,7 +59,7 @@ def _get_session(self, auth=True): class APIClient(HTTPClient, object): def __init__(self, token, host, threads, progress): - super().__init__() + super().__init__(threads) self.host = host self.token = token self.threads = threads @@ -72,6 +84,7 @@ def _api_call(self, method, endpoint, payload={}, limit=None): ) if r.ok: + pprint(r.headers.items) if r.headers.get('page-number'): if int(r.headers.get('total-pages')) > 1: return PaginatedResponse( @@ -114,86 +127,3 @@ def get_specific_page(self, method, endpoint, payload, page): payload['page'] = page return self._api_call(method, endpoint, payload=payload) - -class AWSClient(HTTPClient, object): - def __init__(self, concurrency=None, progress=True): - super().__init__() # Initialize via inheritance - self.progress = progress - # Ensure this is a valid number before assigning - if concurrency is not None and type(concurrency) == int and concurrency > 0: - self.concurrency = concurrency - else: - self.concurrency = self.optimize_concurrency() - - def optimize_concurrency(self): - """ - This method looks as the net_stats and disk_stats that we've run on \ - the current environment in order to suggest the best optimized \ - number of concurrent TCP connections. - - Example:: - AWSClient.optimize_concurrency() - """ - - net_stats = NetworkBandwidth - disk_stats = DiskBandwidth - - # Algorithm ensues - # - # - - return 5 - - @staticmethod - def get_byte_range(url, start_byte=0, end_byte=2048): - """ - Get a specific byte range from a given URL. This is **not** optimized \ - for heavily-threaded operations currently. - - :Args: - url (string): The URL you want to fetch a byte-range from - start_byte (int): The first byte you want to request - end_byte (int): The last byte you want to extract - - Example:: - AWSClient.get_byte_range(asset, "~./Downloads") - """ - - range_header = {"Range": "bytes=%d-%d" % (start_byte, end_byte)} - shared_headers = {'x-frameio-client': 'python/{}'.format(self.client_version)} - headers = {**shared_headers, **range_header} - - br = requests.get(url, headers=headers).content - return br - - @staticmethod - def check_cdn(url): - # TODO improve this algo - if 'assets.frame.io' in url: - return 'Cloudfront' - elif 's3' in url: - return 'S3' - else: - return None - - -class TransferJob(AWSClient): - # These will be used to track the job and then push telemetry - def __init__(self, job_info): - self.job_info = job_info - self.cdn = 'S3' # or 'CF' - use check_cdn to confirm - self.progress_manager = None - -class DownloadJob(TransferJob): - def __init__(self): - self.asset_type = 'review_link' # we should use a dataclass here - # Need to create a re-usable job schema - # Think URL -> output_path - pass - -class UploadJob(TransferJob): - def __init__(self, destination): - self.destination = destination - # Need to create a re-usable job schema - # Think local_file path and remote Frame.io destination - pass diff --git a/frameioclient/services/helpers.py b/frameioclient/services/helpers.py index 76de009d..6d6d1b29 100644 --- a/frameioclient/services/helpers.py +++ b/frameioclient/services/helpers.py @@ -140,7 +140,7 @@ def recursive_downloader(self, directory, asset, count=0): if asset['_type'] == 'file': count += 1 - return self.client.assets.download(asset, target_directory, multi_part=True, concurrency=10) + return self.client.assets.download(asset, target_directory, multi_part=True) except Exception as e: print(e) diff --git a/tests/integration.py b/tests/integration.py index ecd9e8ec..b1cb2ca2 100644 --- a/tests/integration.py +++ b/tests/integration.py @@ -2,11 +2,10 @@ import sys import json import time -import socket -import platform -import mimetypes import shutil +import socket import requests +import platform from math import ceil from pprint import pprint, pformat From a81a9b5a01079824bacaec0720779349898ec059 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Tue, 3 Aug 2021 10:58:45 -0700 Subject: [PATCH 25/99] Fix merge conflict --- frameioclient/fiocli.py | 1 - frameioclient/lib/download.py | 218 --------------------------------- frameioclient/lib/transport.py | 1 - 3 files changed, 220 deletions(-) diff --git a/frameioclient/fiocli.py b/frameioclient/fiocli.py index 9d173234..edea40a8 100644 --- a/frameioclient/fiocli.py +++ b/frameioclient/fiocli.py @@ -10,7 +10,6 @@ def main(): ## Define args parser.add_argument('--token', action='store', metavar='token', type=str, nargs='+', help='Developer Token') - # parser.add_argument('--op', action='store', metavar='op', type=str, nargs='+', help='Operation: upload, download') parser.add_argument('--target', action='store', metavar='target', type=str, nargs='+', help='Target: remote project or folder, or alternatively a local file/folder') parser.add_argument('--destination', action='store', metavar='destination', type=str, nargs='+', help='Destination: remote project or folder, or alternatively a local file/folder') parser.add_argument('--threads', action='store', metavar='threads', type=int, nargs='+', help='Number of threads to use') diff --git a/frameioclient/lib/download.py b/frameioclient/lib/download.py index 43e67b67..164acc4a 100644 --- a/frameioclient/lib/download.py +++ b/frameioclient/lib/download.py @@ -163,221 +163,3 @@ def download_handler(self): return self.aws_client.multi_thread_download(url) else: return self.aws_client._download_whole(url) - -<<<<<<< - def single_part_download(self, url): - start_time = time.time() - print("Beginning download -- {} -- {}".format(self.asset["name"], Utils.format_bytes(self.file_size, type="size"))) - - # Downloading - r = self.session.get(url, stream=True) - open(self.destination, "wb").write(r.content) - - # Downloading - with open(self.destination, 'wb') as handle: - try: - # TODO make sure this approach works for SBWM download - for chunk in r.iter_content(chunk_size=4096): - if chunk: - handle.write(chunk) - except requests.exceptions.ChunkedEncodingError as e: - raise e - - download_time = time.time() - start_time - download_speed = Utils.format_bytes(math.ceil(self.file_size/(download_time))) - print("Downloaded {} at {}".format(Utils.format_bytes(self.file_size, type="size"), download_speed)) - - return self.destination, download_speed - - def multi_part_download(self, url): - start_time = time.time() - - # Generate stub - try: - self._create_file_stub() - - except Exception as e: - raise DownloadException(message=e) - - offset = math.ceil(self.file_size / self.chunks) - in_byte = 0 # Set initially here, but then override - - print("Multi-part download -- {} -- {}".format(self.asset["name"], Utils.format_bytes(self.file_size, type="size"))) - - # Queue up threads - with enlighten.get_manager() as manager: - status = manager.status_bar( - position=3, - status_format=u'{fill}Stage: {stage}{fill}{elapsed}', - color='bold_underline_bright_white_on_lightslategray', - justify=enlighten.Justify.CENTER, - stage='Initializing', - autorefresh=True, - min_delta=0.5 - ) - - BAR_FORMAT = '{desc}{desc_pad}|{bar}|{percentage:3.0f}% ' + \ - 'Downloading: {count_1:.2j}/{total:.2j} ' + \ - 'Completed: {count_2:.2j}/{total:.2j} ' + \ - '[{elapsed}<{eta}, {rate:.2j}{unit}/s]' - - # Add counter to track completed chunks - initializing = manager.counter( - position=2, - total=float(self.file_size), - desc='Progress', - unit='B', - bar_format=BAR_FORMAT, - ) - - # Add additional counter - in_progress = initializing.add_subcounter('yellow', all_fields=True) - completed = initializing.add_subcounter('green', all_fields=True) - - # Set default state - initializing.refresh() - - status.update(stage='Downloading', color='green') - - with concurrent.futures.ThreadPoolExecutor(max_workers=self.aws_client.concurrency) as executor: - for i in range(int(self.chunks)): - # Increment by the iterable + 1 so we don't mutiply by zero - out_byte = offset * (i+1) - # Create task tuple - task = (url, in_byte, out_byte, i, in_progress) - # Stagger start for each chunk by 0.1 seconds - if i < self.aws_client.concurrency: time.sleep(0.1) - # Append tasks to futures list - self.futures.append(executor.submit(self._download_chunk, task)) - # Reset new in byte equal to last out byte - in_byte = out_byte - - # Keep updating the progress while we have > 0 bytes left. - # Wait on threads to finish - for future in concurrent.futures.as_completed(self.futures): - try: - chunk_size = future.result() - completed.update_from(in_progress, float((chunk_size - 1)), force=True) - except Exception as exc: - print(exc) - - # Calculate and print stats - download_time = round((time.time() - start_time), 2) - download_speed = round((self.file_size/download_time), 2) - - - if self.checksum_verification == True: - # Check for checksum, if not present throw error - if self._get_checksum() == None: - raise AssetChecksumNotPresent - else: - # Perform hash-verification - status.update(stage='Verifying') - - VERIFICATION_FORMAT = '{desc}{desc_pad}|{bar}|{percentage:3.0f}% ' + \ - 'Progress: {count:.2j}/{total:.2j} ' + \ - '[{elapsed}<{eta}, {rate:.2j}{unit}/s]' - - # Add counter to track completed chunks - verification = manager.counter( - position=1, - total=float(self.file_size), - desc='Verifying', - unit='B', - bar_format=VERIFICATION_FORMAT, - color='purple' - ) - - # Calculate the file hash - if Utils.calculate_hash(self.destination, progress_callback=verification) != self.original_checksum: - raise AssetChecksumMismatch - - # Update the header - status.update(stage='Download Complete!', force=True) - - # Log completion event - SDKLogger('downloads').info("Downloaded {} at {}".format(Utils.format_bytes(self.file_size, type="size"), download_speed)) - - # Submit telemetry - transfer_stats = {'speed': download_speed, 'time': download_time, 'cdn': AWSClient.check_cdn(url)} - - # Event(self.user_id, 'python-sdk-download-stats', transfer_stats) - - # If stats = True, we return a dict with way more info, otherwise \ - if self.stats: - # We end by returning a dict with info about the download - dl_info = { - "destination": self.destination, - "speed": download_speed, - "elapsed": download_time, - "cdn": AWSClient.check_cdn(url), - "concurrency": self.aws_client.concurrency, - "size": self.file_size, - "chunks": self.chunks - } - return dl_info - - return self.destination - - - def _download_chunk(self, task): - # Download a particular chunk - # Called by the threadpool executor - - # Destructure the task object into its parts - url = task[0] - start_byte = task[1] - end_byte = task[2] - chunk_number = task[3] - in_progress = task[4] - - # Set the initial chunk_size, but prepare to overwrite - chunk_size = (end_byte - start_byte) - - if self.bytes_started + (chunk_size) > self.file_size: - difference = abs(self.file_size - (self.bytes_started + chunk_size)) # should be negative - chunk_size = chunk_size - difference - print(f"Chunk size as done via math: {chunk_size}") - else: - pass - - # Set chunk size in a smarter way - self.bytes_started += (chunk_size) - - # Update the bar for in_progress chunks - in_progress.update(float(chunk_size)) - - # Specify the start and end of the range request - headers = {"Range": "bytes=%d-%d" % (start_byte, end_byte)} - - # Grab the data as a stream - r = self.session.get(url, headers=headers, stream=True) - - # Write the file to disk - with open(self.destination, "r+b") as fp: - fp.seek(start_byte) # Seek to the right spot in the file - chunk_size = len(r.content) # Get the final chunk size - fp.write(r.content) # Write the data - - # Save requests logs - self.request_logs.append({ - 'headers': r.headers, - 'http_status': r.status_code, - 'bytes_transferred': len(r.content) - }) - - # Increase the count for bytes_completed, but only if it doesn't overrun file length - self.bytes_completed += (chunk_size) - if self.bytes_completed > self.file_size: - self.bytes_completed = self.file_size - - # Update the in_progress bar - self._update_in_progress() - - # After the function completes, we report back the # of bytes transferred - return chunk_size - - -======= - ->>>>>>> \ No newline at end of file diff --git a/frameioclient/lib/transport.py b/frameioclient/lib/transport.py index 46a79704..19e6f752 100644 --- a/frameioclient/lib/transport.py +++ b/frameioclient/lib/transport.py @@ -84,7 +84,6 @@ def _api_call(self, method, endpoint, payload={}, limit=None): ) if r.ok: - pprint(r.headers.items) if r.headers.get('page-number'): if int(r.headers.get('total-pages')) > 1: return PaginatedResponse( From da6964d7479c2f65cb78503015a53122c7cf87ae Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Tue, 3 Aug 2021 11:04:12 -0700 Subject: [PATCH 26/99] Add more documentation --- frameioclient/lib/transport.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/frameioclient/lib/transport.py b/frameioclient/lib/transport.py index 19e6f752..9823c7c6 100644 --- a/frameioclient/lib/transport.py +++ b/frameioclient/lib/transport.py @@ -1,10 +1,6 @@ -import os -import logging -import enlighten import requests import threading -from pprint import pprint from requests.adapters import HTTPAdapter from requests.packages.urllib3.util.retry import Retry @@ -16,10 +12,13 @@ class HTTPClient(object): - """[summary] + """HTTP Client base that automatically handles the following: + - Shared thread/session object + - Client version headers + - Automated retries Args: - object ([type]): [description] + threads (int): Number of threads to use concurrently. """ def __init__(self, threads=default_thread_count): # Setup number of threads to use @@ -58,6 +57,14 @@ def _get_session(self, auth=True): class APIClient(HTTPClient, object): + """Frame.io API Client that handles automatic pagination, and lots of other nice things. + + Args: + HTTPClient (class): HTTP Client base class + token (str): Frame.io developer token, JWT, or OAuth access token. + threads (int): Number of threads to concurrently use for uploads/downloads. + progress (bool): If True, show status bars in console. + """ def __init__(self, token, host, threads, progress): super().__init__(threads) self.host = host From 9342ff4ce95e51f4d06dc31d656c4851fec9d981 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Tue, 3 Aug 2021 11:04:21 -0700 Subject: [PATCH 27/99] Clean-up unused imports --- frameioclient/lib/download.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frameioclient/lib/download.py b/frameioclient/lib/download.py index 164acc4a..aec74aa2 100644 --- a/frameioclient/lib/download.py +++ b/frameioclient/lib/download.py @@ -2,9 +2,9 @@ import math from .utils import Utils -from .logger import SDKLogger +# from .logger import SDKLogger from .transfer import AWSClient -from .telemetry import Event, ComparisonTest +# from .telemetry import Event, ComparisonTest from .exceptions import ( DownloadException, From 439c546a4eee5078f90721701723809277bcbceb Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Tue, 3 Aug 2021 15:15:31 -0700 Subject: [PATCH 28/99] Formatting using black --- frameioclient/client.py | 135 +++++------ frameioclient/fiocli.py | 52 +++- frameioclient/lib/bandwidth.py | 5 +- frameioclient/lib/constants.py | 23 +- frameioclient/lib/download.py | 312 ++++++++++++------------ frameioclient/lib/exceptions.py | 47 ++-- frameioclient/lib/logger.py | 5 +- frameioclient/lib/service.py | 1 + frameioclient/lib/telemetry.py | 41 ++-- frameioclient/lib/transfer.py | 180 ++++++++------ frameioclient/lib/transport.py | 45 ++-- frameioclient/lib/upload.py | 256 ++++++++++---------- frameioclient/lib/utils.py | 372 +++++++++++++++-------------- frameioclient/lib/version.py | 3 +- frameioclient/services/__init__.py | 2 +- frameioclient/services/comments.py | 179 +++++++------- frameioclient/services/helpers.py | 273 ++++++++++----------- frameioclient/services/links.py | 239 +++++++++--------- frameioclient/services/logs.py | 29 +-- frameioclient/services/projects.py | 281 +++++++++++----------- frameioclient/services/search.py | 79 +++--- frameioclient/services/teams.py | 181 +++++++------- frameioclient/services/users.py | 3 +- 23 files changed, 1416 insertions(+), 1327 deletions(-) diff --git a/frameioclient/client.py b/frameioclient/client.py index f42ba259..cc59323c 100644 --- a/frameioclient/client.py +++ b/frameioclient/client.py @@ -3,79 +3,66 @@ ==================================== The core module of the frameioclient """ -from .lib import ( - APIClient, - Telemetry, - ClientVersion, - ClientVersion, - FrameioDownloader -) +from .lib import APIClient, Telemetry, ClientVersion, ClientVersion, FrameioDownloader +from services import * + class FrameioClient(APIClient): - def __init__(self, token, host='https://api.frame.io', threads=5, progress=False): - super().__init__(token, host, threads, progress) - - @property - def me(self): - return self.users.get_me() - - @property - def telemetry(self): - return Telemetry(self) - - @property - def _auth(self): - return self.token - - @property - def _version(self): - return ClientVersion.version() - - @property - def _download(self): - return FrameioDownloader(self) - - @property - def users(self): - from .services import User - return User(self) - - @property - def assets(self): - from .services import Asset - return Asset(self) - - @property - def comments(self): - from .services import Comment - return Comment(self) - - @property - def logs(self): - from .services import AuditLogs - return AuditLogs(self) - - @property - def review_links(self): - from .services import ReviewLink - return ReviewLink(self) - - @property - def presentation_links(self): - from .services import PresentationLink - return PresentationLink(self) - - @property - def projects(self): - from .services import Project - return Project(self) - - @property - def teams(self): - from .services import Team - return Team(self) - - @property - def helpers(self): - from .services import FrameioHelpers - return FrameioHelpers(self) + def __init__(self, token, host="https://api.frame.io", threads=5, progress=False): + super().__init__(token, host, threads, progress) + + @property + def me(self): + return self.users.get_me() + + @property + def telemetry(self): + return Telemetry(self) + + @property + def _auth(self): + return self.token + + @property + def _version(self): + return ClientVersion.version() + + @property + def _download(self): + return FrameioDownloader(self) + + @property + def users(self): + return User(self) + + @property + def assets(self): + return Asset(self) + + @property + def comments(self): + return Comment(self) + + @property + def logs(self): + return AuditLogs(self) + + @property + def review_links(self): + return ReviewLink(self) + + @property + def presentation_links(self): + return PresentationLink(self) + + @property + def projects(self): + return Project(self) + + @property + def teams(self): + return Team(self) + + @property + def helpers(self): + return FrameioHelpers(self) diff --git a/frameioclient/fiocli.py b/frameioclient/fiocli.py index edea40a8..682d5f70 100644 --- a/frameioclient/fiocli.py +++ b/frameioclient/fiocli.py @@ -6,13 +6,43 @@ def main(): - parser=argparse.ArgumentParser(prog='fiocli', description='Frame.io Python SDK CLI') + parser = argparse.ArgumentParser( + prog="fiocli", description="Frame.io Python SDK CLI" + ) ## Define args - parser.add_argument('--token', action='store', metavar='token', type=str, nargs='+', help='Developer Token') - parser.add_argument('--target', action='store', metavar='target', type=str, nargs='+', help='Target: remote project or folder, or alternatively a local file/folder') - parser.add_argument('--destination', action='store', metavar='destination', type=str, nargs='+', help='Destination: remote project or folder, or alternatively a local file/folder') - parser.add_argument('--threads', action='store', metavar='threads', type=int, nargs='+', help='Number of threads to use') + parser.add_argument( + "--token", + action="store", + metavar="token", + type=str, + nargs="+", + help="Developer Token", + ) + parser.add_argument( + "--target", + action="store", + metavar="target", + type=str, + nargs="+", + help="Target: remote project or folder, or alternatively a local file/folder", + ) + parser.add_argument( + "--destination", + action="store", + metavar="destination", + type=str, + nargs="+", + help="Destination: remote project or folder, or alternatively a local file/folder", + ) + parser.add_argument( + "--threads", + action="store", + metavar="threads", + type=int, + nargs="+", + help="Number of threads to use", + ) ## Parse args args = parser.parse_args() @@ -40,19 +70,21 @@ def main(): if os.path.isdir(args.destination[0]): try: asset = client.assets.get(args.target[0]) - return client.assets.download(asset, args.destination[0], progress=True, multi_part=True) + return client.assets.download( + asset, args.destination[0], progress=True, multi_part=True + ) except Exception as e: print(e) client.projects.download(args.target[0], args.destination[0]) - - else: # This is an upload + else: # This is an upload if os.path.isdir(args.target[0]): - return client.assets.upload_folder(args.target[0], args.destination[0]) + return client.assets.upload_folder( + args.target[0], args.destination[0] + ) else: return client.assets.upload(args.destination[0], args.target[0]) else: print("No destination supplied") else: print("No target supplied") - diff --git a/frameioclient/lib/bandwidth.py b/frameioclient/lib/bandwidth.py index 8349783b..b1991d53 100644 --- a/frameioclient/lib/bandwidth.py +++ b/frameioclient/lib/bandwidth.py @@ -1,5 +1,6 @@ import speedtest + class NetworkBandwidth: # Test the network bandwidth any time we have a new IP address # Persist this information to a config.json file @@ -43,14 +44,14 @@ def speedtest(): def __repr__(self): self.results + class DiskBandwidth: # Test the disk speed and write to a config.json file for re-use # Worth re-checking the disk every time a new one is detected (base route) - + def __init__(self, volume): self.volume = volume self.results = dict() def __repr__(self): self.results - diff --git a/frameioclient/lib/constants.py b/frameioclient/lib/constants.py index 2512a2b8..70964526 100644 --- a/frameioclient/lib/constants.py +++ b/frameioclient/lib/constants.py @@ -8,27 +8,14 @@ "u.name", "a.is_session_watermarked", "a.item_count", - "a.creator.name" - "a.creator.id", + "a.creator.name" "a.creator.id", "a.inserted_at", "a.original", "a.upload_completed_at", ], - "excluded_fields": [ - "a.checksums", - "a.h264_1080_best", - "a.source" - ], - "drop_includes": [ - "a.trancode_statuses", - "a.transcodes", - "a.source", - "a.checksums" - ], - "hard_drop_fields": [ - "a.transcodes", - "a.source" - ] + "excluded_fields": ["a.checksums", "a.h264_1080_best", "a.source"], + "drop_includes": ["a.trancode_statuses", "a.transcodes", "a.source", "a.checksums"], + "hard_drop_fields": ["a.transcodes", "a.source"], } -default_thread_count = 5 \ No newline at end of file +default_thread_count = 5 diff --git a/frameioclient/lib/download.py b/frameioclient/lib/download.py index aec74aa2..76c199f7 100644 --- a/frameioclient/lib/download.py +++ b/frameioclient/lib/download.py @@ -2,164 +2,172 @@ import math from .utils import Utils + # from .logger import SDKLogger from .transfer import AWSClient + # from .telemetry import Event, ComparisonTest from .exceptions import ( - DownloadException, - WatermarkIDDownloadException, - AssetNotFullyUploaded, + DownloadException, + WatermarkIDDownloadException, + AssetNotFullyUploaded, ) + class FrameioDownloader(object): - def __init__(self, asset, download_folder, prefix, multi_part=False, replace=False): - self.multi_part = multi_part - self.asset = asset - self.asset_type = None - self.download_folder = download_folder - self.replace = replace - self.resolution_map = dict() - self.destination = None - self.watermarked = asset['is_session_watermarked'] # Default is probably false - self.file_size = asset["filesize"] - self.futures = list() - self.checksum = None - self.original_checksum = None - self.checksum_verification = True - self.chunk_size = (25 * 1024 * 1024) # 25 MB chunk size - self.chunks = math.ceil(self.file_size/self.chunk_size) - self.prefix = prefix - self.bytes_started = 0 - self.bytes_completed = 0 - self.in_progress = 0 - self.aws_client = AWSClient(concurrency=5) - self.session = self.aws_client._get_session(auth=None) - self.filename = Utils.normalize_filename(asset["name"]) - self.request_logs = list() - self.stats = True - - self._evaluate_asset() - self._get_path() - - def _update_in_progress(self): - self.in_progress = self.bytes_started - self.bytes_completed - return self.in_progress # Number of in-progress bytes - - def get_path(self): - if self.prefix != None: - self.filename = self.prefix + self.filename - - if self.destination == None: - final_destination = os.path.join(self.download_folder, self.filename) - self.destination = final_destination - - return self.destination - - def _evaluate_asset(self): - if self.asset.get("_type") != "file": - raise DownloadException(message="Unsupport Asset type: {}".format(self.asset.get("_type"))) - - # This logic may block uploads that were started before this field was introduced - if self.asset.get("upload_completed_at") == None: - raise AssetNotFullyUploaded - - try: - self.original_checksum = self.asset['checksums']['xx_hash'] - except (TypeError, KeyError): - self.original_checksum = None - - def _create_file_stub(self): - try: - fp = open(self.destination, "w") - # fp.write(b"\0" * self.file_size) # Disabled to prevent pre-allocatation of disk space - fp.close() - except FileExistsError as e: - if self.replace == True: - os.remove(self.destination) # Remove the file - self._create_file_stub() # Create a new stub - else: - raise e - return True - - def _get_path(self): - print("prefix:", self.prefix) - if self.prefix != None: - self.filename = self.prefix + self.filename - - if self.destination == None: - final_destination = os.path.join(self.download_folder, self.filename) - self.destination = final_destination - - return self.destination - - def _get_checksum(self): - try: - self.original_checksum = self.asset['checksums']['xx_hash'] - except (TypeError, KeyError): - self.original_checksum = None - - return self.original_checksum - - def get_download_key(self): - try: - url = self.asset['original'] - except KeyError as e: - if self.watermarked == True: - resolution_list = list() + def __init__(self, asset, download_folder, prefix, multi_part=False, replace=False): + self.multi_part = multi_part + self.asset = asset + self.asset_type = None + self.download_folder = download_folder + self.replace = replace + self.resolution_map = dict() + self.destination = None + self.watermarked = asset["is_session_watermarked"] # Default is probably false + self.file_size = asset["filesize"] + self.futures = list() + self.checksum = None + self.original_checksum = None + self.checksum_verification = True + self.chunk_size = 25 * 1024 * 1024 # 25 MB chunk size + self.chunks = math.ceil(self.file_size / self.chunk_size) + self.prefix = prefix + self.bytes_started = 0 + self.bytes_completed = 0 + self.in_progress = 0 + self.aws_client = AWSClient(concurrency=5) + self.session = self.aws_client._get_session(auth=None) + self.filename = Utils.normalize_filename(asset["name"]) + self.request_logs = list() + self.stats = True + + self._evaluate_asset() + self._get_path() + + def _update_in_progress(self): + self.in_progress = self.bytes_started - self.bytes_completed + return self.in_progress # Number of in-progress bytes + + def get_path(self): + if self.prefix != None: + self.filename = self.prefix + self.filename + + if self.destination == None: + final_destination = os.path.join(self.download_folder, self.filename) + self.destination = final_destination + + return self.destination + + def _evaluate_asset(self): + if self.asset.get("_type") != "file": + raise DownloadException( + message="Unsupport Asset type: {}".format(self.asset.get("_type")) + ) + + # This logic may block uploads that were started before this field was introduced + if self.asset.get("upload_completed_at") == None: + raise AssetNotFullyUploaded + + try: + self.original_checksum = self.asset["checksums"]["xx_hash"] + except (TypeError, KeyError): + self.original_checksum = None + + def _create_file_stub(self): + try: + fp = open(self.destination, "w") + # fp.write(b"\0" * self.file_size) # Disabled to prevent pre-allocatation of disk space + fp.close() + except FileExistsError as e: + if self.replace == True: + os.remove(self.destination) # Remove the file + self._create_file_stub() # Create a new stub + else: + raise e + return True + + def _get_path(self): + print("prefix:", self.prefix) + if self.prefix != None: + self.filename = self.prefix + self.filename + + if self.destination == None: + final_destination = os.path.join(self.download_folder, self.filename) + self.destination = final_destination + + return self.destination + + def _get_checksum(self): + try: + self.original_checksum = self.asset["checksums"]["xx_hash"] + except (TypeError, KeyError): + self.original_checksum = None + + return self.original_checksum + + def get_download_key(self): try: - for resolution_key, download_url in sorted(self.asset['downloads'].items()): - resolution = resolution_key.split("_")[1] # Grab the item at index 1 (resolution) - try: - resolution = int(resolution) - except ValueError: - continue - - if download_url is not None: - resolution_list.append(download_url) - - # Grab the highest resolution (first item) now - url = resolution_list[0] - except KeyError: - raise DownloadException - else: - raise WatermarkIDDownloadException - - return url - - def download_handler(self): - """Call this to perform the actual download of your asset! - """ - - # Check folders - if os.path.isdir(os.path.join(os.path.curdir, self.download_folder)): - print("Folder exists, don't need to create it") - else: - print("Destination folder not found, creating") - os.mkdir(self.download_folder) - - # Check files - if os.path.isfile(self.get_path()) == False: - pass - - if os.path.isfile(self.get_path()) and self.replace == True: - os.remove(self.get_path()) - - if os.path.isfile(self.get_path()) and self.replace == False: - print("File already exists at this location.") - return self.destination - - # Get URL - url = self.get_download_key() - - # Handle watermarking - if self.watermarked == True: - return self.aws_client()._download_whole(url) - else: - # Don't use multi-part download for files below 25 MB - if self.asset['filesize'] < 26214400: - return self.aws_client._download_whole(url) - if self.multi_part == True: - return self.aws_client.multi_thread_download(url) - else: - return self.aws_client._download_whole(url) + url = self.asset["original"] + except KeyError as e: + if self.watermarked == True: + resolution_list = list() + try: + for resolution_key, download_url in sorted( + self.asset["downloads"].items() + ): + resolution = resolution_key.split("_")[ + 1 + ] # Grab the item at index 1 (resolution) + try: + resolution = int(resolution) + except ValueError: + continue + + if download_url is not None: + resolution_list.append(download_url) + + # Grab the highest resolution (first item) now + url = resolution_list[0] + except KeyError: + raise DownloadException + else: + raise WatermarkIDDownloadException + + return url + + def download_handler(self): + """Call this to perform the actual download of your asset!""" + + # Check folders + if os.path.isdir(os.path.join(os.path.curdir, self.download_folder)): + print("Folder exists, don't need to create it") + else: + print("Destination folder not found, creating") + os.mkdir(self.download_folder) + + # Check files + if os.path.isfile(self.get_path()) == False: + pass + + if os.path.isfile(self.get_path()) and self.replace == True: + os.remove(self.get_path()) + + if os.path.isfile(self.get_path()) and self.replace == False: + print("File already exists at this location.") + return self.destination + + # Get URL + url = self.get_download_key() + + # Handle watermarking + if self.watermarked == True: + return self.aws_client()._download_whole(url) + else: + # Don't use multi-part download for files below 25 MB + if self.asset["filesize"] < 26214400: + return self.aws_client._download_whole(url) + if self.multi_part == True: + return self.aws_client.multi_thread_download(url) + else: + return self.aws_client._download_whole(url) diff --git a/frameioclient/lib/exceptions.py b/frameioclient/lib/exceptions.py index 9f03739d..d9b06914 100644 --- a/frameioclient/lib/exceptions.py +++ b/frameioclient/lib/exceptions.py @@ -4,60 +4,61 @@ class PresentationException(Exception): """ def __init__( - self, - message="Your asset already has a presentation link associated with it." + self, message="Your asset already has a presentation link associated with it." ): self.message = message super().__init__(self.message) + class WatermarkIDDownloadException(Exception): """Exception raised when trying to download a file where there is no available download URL. """ + def __init__( - self, - message="This file is unavailable for download due to security and permission settings." + self, + message="This file is unavailable for download due to security and permission settings.", ): self.message = message super().__init__(self.message) + class DownloadException(Exception): - """Exception raised when trying to download a file - """ - def __init__( - self, - message="Generic Dowload exception." - ): + """Exception raised when trying to download a file""" + + def __init__(self, message="Generic Dowload exception."): self.message = message super().__init__(self.message) + class AssetNotFullyUploaded(Exception): - """Exception raised when trying to download a file that isn't yet fully upload. - """ + """Exception raised when trying to download a file that isn't yet fully upload.""" + def __init__( - self, - message="Unable to download this asset because it not yet fully uploaded." + self, message="Unable to download this asset because it not yet fully uploaded." ): self.message = message super().__init__(self.message) + class AssetChecksumNotPresent(Exception): - """Exception raised when there's no checksum present for the Frame.io asset. - """ + """Exception raised when there's no checksum present for the Frame.io asset.""" + def __init__( - self, + self, message="""No checksum found on Frame.io for this asset. This could be because it was uploaded \ - before we introduced the feature, the media pipeline failed to process the asset, or the asset has yet to finish being processed.""" + before we introduced the feature, the media pipeline failed to process the asset, or the asset has yet to finish being processed.""", ): self.message = message super().__init__(self.message) + class AssetChecksumMismatch(Exception): - """Exception raised when the checksum for the downloaded file doesn't match what's found on Frame.io. - """ + """Exception raised when the checksum for the downloaded file doesn't match what's found on Frame.io.""" + def __init__( - self, - message="Checksum mismatch, you should re-download the asset to resolve any corrupt bits." + self, + message="Checksum mismatch, you should re-download the asset to resolve any corrupt bits.", ): self.message = message - super().__init__(self.message) \ No newline at end of file + super().__init__(self.message) diff --git a/frameioclient/lib/logger.py b/frameioclient/lib/logger.py index 62b7d389..6b7fd55a 100644 --- a/frameioclient/lib/logger.py +++ b/frameioclient/lib/logger.py @@ -1,5 +1,6 @@ import logging + class SDKLogger(object): def __init__(self, log_name): self.initialize_logger() @@ -7,6 +8,6 @@ def __init__(self, log_name): def initialize_logger(self): logging.basicConfig(level=logging.INFO) - + def info(self, message): - self.logger.info(message) \ No newline at end of file + self.logger.info(message) diff --git a/frameioclient/lib/service.py b/frameioclient/lib/service.py index e168e121..bd5e455c 100644 --- a/frameioclient/lib/service.py +++ b/frameioclient/lib/service.py @@ -1,6 +1,7 @@ from ..client import FrameioClient from ..lib.bandwidth import NetworkBandwidth + class Service(object): def __init__(self, client: FrameioClient): self.client = client diff --git a/frameioclient/lib/telemetry.py b/frameioclient/lib/telemetry.py index dad32261..2c720bdc 100644 --- a/frameioclient/lib/telemetry.py +++ b/frameioclient/lib/telemetry.py @@ -6,20 +6,18 @@ from .logger import SDKLogger from .version import ClientVersion -segment_id = os.environ['SEGMENT_WRITE_KEY'] # Production +segment_id = os.environ["SEGMENT_WRITE_KEY"] # Production analytics.write_key = segment_id + class Telemetry(object): def __init__(self, user_id): self.user_id = user_id self.speedtest = None self.identity = None self.context = None - self.integrations = { - 'all': False, - 'Amplitude': True - } - self.logger = SDKLogger('telemetry') + self.integrations = {"all": False, "Amplitude": True} + self.logger = SDKLogger("telemetry") self.build_context() @@ -32,7 +30,9 @@ def build_context(self): } def push(self, event_name, properties): - self.logger.info(("Pushing '{}' event to segment".format(event_name), properties)) + self.logger.info( + ("Pushing '{}' event to segment".format(event_name), properties) + ) try: status = analytics.track( @@ -40,7 +40,7 @@ def push(self, event_name, properties): event_name, properties={**properties}, context={**self.build_context()}, - integrations={**self.integrations} + integrations={**self.integrations}, ) except Exception as e: self.logger.info(e, event_name, properties) @@ -51,6 +51,7 @@ def __init__(self, user_id, event_name, properties): super().__init__(user_id) self.push(event_name, properties) + class ComparisonTest(Event, object): def __init__(self, transfer_stats, request_logs=[]): super().__init__() @@ -61,24 +62,20 @@ def __init__(self, transfer_stats, request_logs=[]): @staticmethod def _parse_requests_data(req_object): return { - 'speed': 0, - 'time_to_first_byte': 0, - 'response_time': 0, - 'byte_transferred': 0, - 'http_status': 200, - 'request_type': 'GET' + "speed": 0, + "time_to_first_byte": 0, + "response_time": 0, + "byte_transferred": 0, + "http_status": 200, + "request_type": "GET", } def _build_transfer_stats_payload(self, event_data): # Turn the request payload into a useful shape properties = { - 'download_speed': 0, - 'control': { - 'upload_bytes_sec': 0, - 'download_bits_sec': 0, - 'ping_ms': 0 - }, - 'hash_speed': 0 + "download_speed": 0, + "control": {"upload_bytes_sec": 0, "download_bits_sec": 0, "ping_ms": 0}, + "hash_speed": 0, } return properties @@ -87,7 +84,7 @@ def track_transfer(self): for chunk in self.requests_logs: pprint(chunk) # self.logger.info(pprint(chunk)) - + # Collect info to build message # Build payload for transfer tracking diff --git a/frameioclient/lib/transfer.py b/frameioclient/lib/transfer.py index bf30a891..1cc29038 100644 --- a/frameioclient/lib/transfer.py +++ b/frameioclient/lib/transfer.py @@ -9,17 +9,18 @@ from .logger import SDKLogger from .exceptions import ( - DownloadException, - AssetChecksumMismatch, - AssetChecksumNotPresent + DownloadException, + AssetChecksumMismatch, + AssetChecksumNotPresent, ) from .bandwidth import NetworkBandwidth, DiskBandwidth from .transport import HTTPClient + class AWSClient(HTTPClient, object): def __init__(self, concurrency=None, progress=True): - super().__init__() # Initialize via inheritance + super().__init__() # Initialize via inheritance self.progress = progress self.progress_manager = None self.destination = None @@ -36,10 +37,10 @@ def __init__(self, concurrency=None, progress=True): @staticmethod def check_cdn(url): # TODO improve this algo - if 'assets.frame.io' in url: - return 'Cloudfront' - elif 's3' in url: - return 'S3' + if "assets.frame.io" in url: + return "Cloudfront" + elif "s3" in url: + return "S3" else: return None @@ -50,8 +51,8 @@ def _create_file_stub(self): fp.close() except FileExistsError as e: if self.replace == True: - os.remove(self.destination) # Remove the file - self._create_file_stub() # Create a new stub + os.remove(self.destination) # Remove the file + self._create_file_stub() # Create a new stub else: print(e) raise e @@ -75,7 +76,7 @@ def _optimize_concurrency(self): # return 5 - + def _get_byte_range(self, url, start_byte=0, end_byte=2048): """ Get a specific byte range from a given URL. This is **not** optimized \ @@ -99,13 +100,17 @@ def _get_byte_range(self, url, start_byte=0, end_byte=2048): def _download_whole(self, url): start_time = time.time() - print("Beginning download -- {} -- {}".format(self.asset["name"], Utils.format_bytes(self.file_size, type="size"))) + print( + "Beginning download -- {} -- {}".format( + self.asset["name"], Utils.format_bytes(self.file_size, type="size") + ) + ) # Downloading r = self.session.get(url, stream=True) # Downloading - with open(self.destination, 'wb') as handle: + with open(self.destination, "wb") as handle: try: # TODO make sure this approach works for SBWM download for chunk in r.iter_content(chunk_size=4096): @@ -115,8 +120,12 @@ def _download_whole(self, url): raise e download_time = time.time() - start_time - download_speed = Utils.format_bytes(math.ceil(self.file_size/(download_time))) - print("Downloaded {} at {}".format(Utils.format_bytes(self.file_size, type="size"), download_speed)) + download_speed = Utils.format_bytes(math.ceil(self.file_size / (download_time))) + print( + "Downloaded {} at {}".format( + Utils.format_bytes(self.file_size, type="size"), download_speed + ) + ) return self.destination, download_speed @@ -132,42 +141,46 @@ def _download_chunk(self, task): in_progress = task[4] # Set the initial chunk_size, but prepare to overwrite - chunk_size = (end_byte - start_byte) + chunk_size = end_byte - start_byte if self.bytes_started + (chunk_size) > self.file_size: - difference = abs(self.file_size - (self.bytes_started + chunk_size)) # should be negative + difference = abs( + self.file_size - (self.bytes_started + chunk_size) + ) # should be negative chunk_size = chunk_size - difference print(f"Chunk size as done via math: {chunk_size}") else: pass # Set chunk size in a smarter way - self.bytes_started += (chunk_size) + self.bytes_started += chunk_size # Update the bar for in_progress chunks in_progress.update(float(chunk_size)) - - # Specify the start and end of the range request - headers = {"Range": "bytes=%d-%d" % (start_byte, end_byte)} + + # Specify the start and end of the range request + headers = {"Range": "bytes=%d-%d" % (start_byte, end_byte)} # Grab the data as a stream r = self.session.get(url, headers=headers, stream=True) # Write the file to disk with open(self.destination, "r+b") as fp: - fp.seek(start_byte) # Seek to the right spot in the file - chunk_size = len(r.content) # Get the final chunk size - fp.write(r.content) # Write the data + fp.seek(start_byte) # Seek to the right spot in the file + chunk_size = len(r.content) # Get the final chunk size + fp.write(r.content) # Write the data # Save requests logs - self.request_logs.append({ - 'headers': r.headers, - 'http_status': r.status_code, - 'bytes_transferred': len(r.content) - }) + self.request_logs.append( + { + "headers": r.headers, + "http_status": r.status_code, + "bytes_transferred": len(r.content), + } + ) # Increase the count for bytes_completed, but only if it doesn't overrun file length - self.bytes_completed += (chunk_size) + self.bytes_completed += chunk_size if self.bytes_completed > self.file_size: self.bytes_completed = self.file_size @@ -188,49 +201,57 @@ def multi_thread_download(self, url): raise DownloadException(message=e) offset = math.ceil(self.file_size / self.chunks) - in_byte = 0 # Set initially here, but then override - - print("Multi-part download -- {} -- {}".format(self.asset["name"], Utils.format_bytes(self.file_size, type="size"))) + in_byte = 0 # Set initially here, but then override + + print( + "Multi-part download -- {} -- {}".format( + self.asset["name"], Utils.format_bytes(self.file_size, type="size") + ) + ) # Queue up threads with enlighten.get_manager() as manager: status = manager.status_bar( position=3, - status_format=u'{fill}Stage: {stage}{fill}{elapsed}', - color='bold_underline_bright_white_on_lightslategray', - justify=enlighten.Justify.CENTER, - stage='Initializing', - autorefresh=True, - min_delta=0.5 + status_format="{fill}Stage: {stage}{fill}{elapsed}", + color="bold_underline_bright_white_on_lightslategray", + justify=enlighten.Justify.CENTER, + stage="Initializing", + autorefresh=True, + min_delta=0.5, ) - BAR_FORMAT = '{desc}{desc_pad}|{bar}|{percentage:3.0f}% ' + \ - 'Downloading: {count_1:.2j}/{total:.2j} ' + \ - 'Completed: {count_2:.2j}/{total:.2j} ' + \ - '[{elapsed}<{eta}, {rate:.2j}{unit}/s]' + BAR_FORMAT = ( + "{desc}{desc_pad}|{bar}|{percentage:3.0f}% " + + "Downloading: {count_1:.2j}/{total:.2j} " + + "Completed: {count_2:.2j}/{total:.2j} " + + "[{elapsed}<{eta}, {rate:.2j}{unit}/s]" + ) # Add counter to track completed chunks initializing = manager.counter( position=2, total=float(self.file_size), - desc='Progress', - unit='B', + desc="Progress", + unit="B", bar_format=BAR_FORMAT, ) # Add additional counter - in_progress = initializing.add_subcounter('yellow', all_fields=True) - completed = initializing.add_subcounter('green', all_fields=True) + in_progress = initializing.add_subcounter("yellow", all_fields=True) + completed = initializing.add_subcounter("green", all_fields=True) # Set default state initializing.refresh() - status.update(stage='Downloading', color='green') - - with concurrent.futures.ThreadPoolExecutor(max_workers=self.aws_client.concurrency) as executor: + status.update(stage="Downloading", color="green") + + with concurrent.futures.ThreadPoolExecutor( + max_workers=self.aws_client.concurrency + ) as executor: for i in range(int(self.chunks)): # Increment by the iterable + 1 so we don't mutiply by zero - out_byte = offset * (i+1) + out_byte = offset * (i + 1) # Create task tuple task = (url, in_byte, out_byte, i, in_progress) # Stagger start for each chunk by 0.1 seconds @@ -240,19 +261,21 @@ def multi_thread_download(self, url): self.futures.append(executor.submit(self._download_chunk, task)) # Reset new in byte equal to last out byte in_byte = out_byte - + # Keep updating the progress while we have > 0 bytes left. # Wait on threads to finish for future in concurrent.futures.as_completed(self.futures): try: chunk_size = future.result() - completed.update_from(in_progress, float((chunk_size - 1)), force=True) + completed.update_from( + in_progress, float((chunk_size - 1)), force=True + ) except Exception as exc: print(exc) - + # Calculate and print stats download_time = round((time.time() - start_time), 2) - download_speed = round((self.file_size/download_time), 2) + download_speed = round((self.file_size / download_time), 2) if self.checksum_verification == True: # Check for checksum, if not present throw error @@ -260,34 +283,49 @@ def multi_thread_download(self, url): raise AssetChecksumNotPresent else: # Perform hash-verification - status.update(stage='Verifying') + status.update(stage="Verifying") - VERIFICATION_FORMAT = '{desc}{desc_pad}|{bar}|{percentage:3.0f}% ' + \ - 'Progress: {count:.2j}/{total:.2j} ' + \ - '[{elapsed}<{eta}, {rate:.2j}{unit}/s]' + VERIFICATION_FORMAT = ( + "{desc}{desc_pad}|{bar}|{percentage:3.0f}% " + + "Progress: {count:.2j}/{total:.2j} " + + "[{elapsed}<{eta}, {rate:.2j}{unit}/s]" + ) # Add counter to track completed chunks verification = manager.counter( position=1, total=float(self.file_size), - desc='Verifying', - unit='B', + desc="Verifying", + unit="B", bar_format=VERIFICATION_FORMAT, - color='purple' + color="purple", ) # Calculate the file hash - if Utils.calculate_hash(self.destination, progress_callback=verification) != self.original_checksum: + if ( + Utils.calculate_hash( + self.destination, progress_callback=verification + ) + != self.original_checksum + ): raise AssetChecksumMismatch # Update the header - status.update(stage='Download Complete!', force=True) + status.update(stage="Download Complete!", force=True) # Log completion event - SDKLogger('downloads').info("Downloaded {} at {}".format(Utils.format_bytes(self.file_size, type="size"), download_speed)) + SDKLogger("downloads").info( + "Downloaded {} at {}".format( + Utils.format_bytes(self.file_size, type="size"), download_speed + ) + ) # Submit telemetry - transfer_stats = {'speed': download_speed, 'time': download_time, 'cdn': AWSClient.check_cdn(url)} + transfer_stats = { + "speed": download_speed, + "time": download_time, + "cdn": AWSClient.check_cdn(url), + } # Event(self.user_id, 'python-sdk-download-stats', transfer_stats) @@ -301,7 +339,7 @@ def multi_thread_download(self, url): "cdn": AWSClient.check_cdn(url), "concurrency": self.aws_client.concurrency, "size": self.file_size, - "chunks": self.chunks + "chunks": self.chunks, } return dl_info else: @@ -311,17 +349,19 @@ def multi_thread_download(self, url): class TransferJob(AWSClient): # These will be used to track the job and then push telemetry def __init__(self, job_info): - self.job_info = job_info # < - convert to JobInfo class - self.cdn = 'S3' # or 'CF' - use check_cdn to confirm + self.job_info = job_info # < - convert to JobInfo class + self.cdn = "S3" # or 'CF' - use check_cdn to confirm self.progress_manager = None + class DownloadJob(TransferJob): def __init__(self): - self.asset_type = 'review_link' # we should use a dataclass here + self.asset_type = "review_link" # we should use a dataclass here # Need to create a re-usable job schema # Think URL -> output_path pass + class UploadJob(TransferJob): def __init__(self, destination): self.destination = destination diff --git a/frameioclient/lib/transport.py b/frameioclient/lib/transport.py index 9823c7c6..460ceea9 100644 --- a/frameioclient/lib/transport.py +++ b/frameioclient/lib/transport.py @@ -8,6 +8,7 @@ from .utils import PaginatedResponse from .constants import default_thread_count from .exceptions import PresentationException + # from .bandwidth import NetworkBandwidth, DiskBandwidth @@ -19,7 +20,8 @@ class HTTPClient(object): Args: threads (int): Number of threads to use concurrently. - """ + """ + def __init__(self, threads=default_thread_count): # Setup number of threads to use self.threads = threads @@ -28,7 +30,7 @@ def __init__(self, threads=default_thread_count): self.thread_local = None self.client_version = ClientVersion.version() self.shared_headers = { - 'x-frameio-client': 'python/{}'.format(self.client_version) + "x-frameio-client": "python/{}".format(self.client_version) } # Configure retry strategy (very broad right now) @@ -36,7 +38,7 @@ def __init__(self, threads=default_thread_count): total=3, backoff_factor=1, status_forcelist=[400, 429, 500, 503], - method_whitelist=["GET", "POST", "PUT", "GET", "DELETE"] + method_whitelist=["GET", "POST", "PUT", "GET", "DELETE"], ) # Create real thread @@ -49,7 +51,7 @@ def _get_session(self, auth=True): if not hasattr(self.thread_local, "session"): http = requests.Session() adapter = HTTPAdapter(max_retries=self.retry_strategy) - adapter.add_headers(self.shared_headers) # add version header + adapter.add_headers(self.shared_headers) # add version header http.mount("https", adapter) self.thread_local.session = http @@ -64,7 +66,8 @@ class APIClient(HTTPClient, object): token (str): Frame.io developer token, JWT, or OAuth access token. threads (int): Number of threads to concurrently use for uploads/downloads. progress (bool): If True, show status bars in console. - """ + """ + def __init__(self, token, host, threads, progress): super().__init__(threads) self.host = host @@ -73,36 +76,31 @@ def __init__(self, token, host, threads, progress): self.progress = progress self._initialize_thread() self.session = self._get_session() - self.auth_header = { - 'Authorization': 'Bearer {}'.format(self.token) - } + self.auth_header = {"Authorization": "Bearer {}".format(self.token)} def _format_api_call(self, endpoint): - return '{}/v2{}'.format(self.host, endpoint) + return "{}/v2{}".format(self.host, endpoint) def _api_call(self, method, endpoint, payload={}, limit=None): headers = {**self.shared_headers, **self.auth_header} r = self.session.request( - method, - self._format_api_call(endpoint), - headers=headers, - json=payload + method, self._format_api_call(endpoint), headers=headers, json=payload ) if r.ok: - if r.headers.get('page-number'): - if int(r.headers.get('total-pages')) > 1: + if r.headers.get("page-number"): + if int(r.headers.get("total-pages")) > 1: return PaginatedResponse( results=r.json(), limit=limit, - page_size=r.headers['per-page'], - total_pages=r.headers['total-pages'], - total=r.headers['total'], + page_size=r.headers["per-page"], + total_pages=r.headers["total-pages"], + total=r.headers["total"], endpoint=endpoint, method=method, payload=payload, - client=self + client=self, ) if isinstance(r.json(), list): @@ -125,11 +123,10 @@ def get_specific_page(self, method, endpoint, payload, page): payload (dict): Request payload page (int): What page to get """ - if method == 'get': - endpoint = '{}?page={}'.format(endpoint, page) + if method == "get": + endpoint = "{}?page={}".format(endpoint, page) return self._api_call(method, endpoint) - if method == 'post': - payload['page'] = page + if method == "post": + payload["page"] = page return self._api_call(method, endpoint, payload=payload) - diff --git a/frameioclient/lib/upload.py b/frameioclient/lib/upload.py index 2edcb915..ae5b10eb 100644 --- a/frameioclient/lib/upload.py +++ b/frameioclient/lib/upload.py @@ -8,131 +8,137 @@ thread_local = threading.local() -class FrameioUploader(object): - def __init__(self, asset=None, file=None): - self.asset = asset - self.file = file - self.chunk_size = None - self.file_count = 0 - self.file_num = 0 - - def _calculate_chunks(self, total_size, chunk_count): - """Calculate chunk size - - Args: - total_size (int): Total filesize in bytes - chunk_count (int): Total number of URL's we got back from the API - - Returns: - chunk_offsets (list): List of chunk offsets - """ - self.chunk_size = int(math.ceil(total_size / chunk_count)) - chunk_offsets = list() - - for index in range(chunk_count): - offset_amount = index * self.chunk_size - chunk_offsets.append(offset_amount) - - return chunk_offsets - - def _get_session(self): - if not hasattr(thread_local, "session"): - thread_local.session = requests.Session() - return thread_local.session - - def _smart_read_chunk(self, chunk_offset, is_final_chunk): - with open(os.path.realpath(self.file.name), "rb") as file: - file.seek(chunk_offset, 0) - if is_final_chunk: # If it's the final chunk, we want to just read until the end of the file - data = file.read() - else: # If it's not the final chunk, we want to ONLY read the specified chunk - data = file.read(self.chunk_size) - return data - - def _upload_chunk(self, task): - url = task[0] - chunk_offset = task[1] - chunk_id = task[2] - chunks_total = len(self.asset['upload_urls']) - - is_final_chunk = False - - if chunk_id+1 == chunks_total: - is_final_chunk = True - - session = self._get_session() - - chunk_data = self._smart_read_chunk(chunk_offset, is_final_chunk) - - try: - r = session.put(url, data=chunk_data, headers={ - 'content-type': self.asset['filetype'], - 'x-amz-acl': 'private' - }) - # print("Completed chunk, status: {}".format(r.status_code)) - except Exception as e: - print(e) - - r.raise_for_status() - - def upload(self): - total_size = self.asset['filesize'] - upload_urls = self.asset['upload_urls'] - - chunk_offsets = self._calculate_chunks(total_size, chunk_count=len(upload_urls)) - - with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: - for i in range(len(upload_urls)): - url = upload_urls[i] - chunk_offset = chunk_offsets[i] - - task = (url, chunk_offset, i) - executor.submit(self._upload_chunk, task) - - - def file_counter(self, folder): - matches = [] - for root, dirnames, filenames in os.walk(folder): - for filename in filenames: - matches.append(os.path.join(filename)) - - self.file_count = len(matches) - - return matches - - def recursive_upload(self, client, folder, parent_asset_id): - # Seperate files and folders: - file_list = list() - folder_list = list() - - if self.file_count == 0: - self.file_counter(folder) - - for item in os.listdir(folder): - if item == ".DS_Store": # Ignore .DS_Store files on Mac - continue - - complete_item_path = os.path.join(folder, item) - - if os.path.isfile(complete_item_path): - file_list.append(item) - else: - folder_list.append(item) - - for file_p in file_list: - self.file_num += 1 +class FrameioUploader(object): + def __init__(self, asset=None, file=None): + self.asset = asset + self.file = file + self.chunk_size = None + self.file_count = 0 + self.file_num = 0 + + def _calculate_chunks(self, total_size, chunk_count): + """Calculate chunk size + + Args: + total_size (int): Total filesize in bytes + chunk_count (int): Total number of URL's we got back from the API + + Returns: + chunk_offsets (list): List of chunk offsets + """ + self.chunk_size = int(math.ceil(total_size / chunk_count)) - complete_dir_obj = os.path.join(folder, file_p) - print(f"Starting {self.file_num:02d}/{self.file_count}, Size: {Utils.format_bytes(os.path.getsize(complete_dir_obj), type='size')}, Name: {file_p}") - client.assets.upload(parent_asset_id, complete_dir_obj) + chunk_offsets = list() + + for index in range(chunk_count): + offset_amount = index * self.chunk_size + chunk_offsets.append(offset_amount) + + return chunk_offsets + + def _get_session(self): + if not hasattr(thread_local, "session"): + thread_local.session = requests.Session() + return thread_local.session + + def _smart_read_chunk(self, chunk_offset, is_final_chunk): + with open(os.path.realpath(self.file.name), "rb") as file: + file.seek(chunk_offset, 0) + if ( + is_final_chunk + ): # If it's the final chunk, we want to just read until the end of the file + data = file.read() + else: # If it's not the final chunk, we want to ONLY read the specified chunk + data = file.read(self.chunk_size) + return data + + def _upload_chunk(self, task): + url = task[0] + chunk_offset = task[1] + chunk_id = task[2] + chunks_total = len(self.asset["upload_urls"]) + + is_final_chunk = False + + if chunk_id + 1 == chunks_total: + is_final_chunk = True + + session = self._get_session() + + chunk_data = self._smart_read_chunk(chunk_offset, is_final_chunk) + + try: + r = session.put( + url, + data=chunk_data, + headers={ + "content-type": self.asset["filetype"], + "x-amz-acl": "private", + }, + ) + # print("Completed chunk, status: {}".format(r.status_code)) + except Exception as e: + print(e) + + r.raise_for_status() + + def upload(self): + total_size = self.asset["filesize"] + upload_urls = self.asset["upload_urls"] + + chunk_offsets = self._calculate_chunks(total_size, chunk_count=len(upload_urls)) + + with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: + for i in range(len(upload_urls)): + url = upload_urls[i] + chunk_offset = chunk_offsets[i] + + task = (url, chunk_offset, i) + executor.submit(self._upload_chunk, task) + + def file_counter(self, folder): + matches = [] + for root, dirnames, filenames in os.walk(folder): + for filename in filenames: + matches.append(os.path.join(filename)) + + self.file_count = len(matches) + + return matches + + def recursive_upload(self, client, folder, parent_asset_id): + # Seperate files and folders: + file_list = list() + folder_list = list() + + if self.file_count == 0: + self.file_counter(folder) + + for item in os.listdir(folder): + if item == ".DS_Store": # Ignore .DS_Store files on Mac + continue + + complete_item_path = os.path.join(folder, item) + + if os.path.isfile(complete_item_path): + file_list.append(item) + else: + folder_list.append(item) + + for file_p in file_list: + self.file_num += 1 - for folder_name in folder_list: - new_folder = os.path.join(folder, folder_name) - new_parent_asset_id = client.assets.create( - parent_asset_id=parent_asset_id, - name=folder_name, - type="folder" - )['id'] - - self.recursive_upload(client, new_folder, new_parent_asset_id) \ No newline at end of file + complete_dir_obj = os.path.join(folder, file_p) + print( + f"Starting {self.file_num:02d}/{self.file_count}, Size: {Utils.format_bytes(os.path.getsize(complete_dir_obj), type='size')}, Name: {file_p}" + ) + client.assets.upload(parent_asset_id, complete_dir_obj) + + for folder_name in folder_list: + new_folder = os.path.join(folder, folder_name) + new_parent_asset_id = client.assets.create( + parent_asset_id=parent_asset_id, name=folder_name, type="folder" + )["id"] + + self.recursive_upload(client, new_folder, new_parent_asset_id) diff --git a/frameioclient/lib/utils.py b/frameioclient/lib/utils.py index 094519b0..0f76e497 100644 --- a/frameioclient/lib/utils.py +++ b/frameioclient/lib/utils.py @@ -8,18 +8,21 @@ def Reference(*args, **kwargs): - print(kwargs['operation']) + print(kwargs["operation"]) + def inner(func): - ''' - do operations with func - ''' + """ + do operations with func + """ return func + return inner + class Utils: - @staticmethod - def stream(func, page=1, page_size=20): - """ + @staticmethod + def stream(func, page=1, page_size=20): + """ Accepts a lambda of a call to a client list method, and streams the results until \ the list has been exhausted. @@ -30,190 +33,201 @@ def stream(func, page=1, page_size=20): stream(lambda pagination: client.get_collaborators(project_id, **pagination)) """ - total_pages = page - while page <= total_pages: - result_list = func(page=page, page_size=page_size) - total_pages = result_list.total_pages - for res in result_list: - yield res - - page += 1 - - @staticmethod - def format_bytes(size, type="speed"): - """ - Convert bytes to KB/MB/GB/TB/s - """ - # 2**10 = 1024 - power = 2**10 - n = 0 - power_labels = {0 : 'B', 1: 'KB', 2: 'MB', 3: 'GB', 4: 'TB'} - - while size > power: - size /= power - n += 1 - - formatted = " ".join((str(round(size, 2)), power_labels[n])) - - if type == "speed": - return formatted + "/s" - - elif type == "size": - return formatted - - @staticmethod - def calculate_hash(file_path, progress_callback=None): - """ - Calculate an xx64hash - """ - xxh64_hash = xxhash.xxh64() - b = bytearray(MB * 8) - f = open(file_path, "rb") - while True: - numread = f.readinto(b) - if not numread: - break - - xxh64_hash.update(b[:numread]) - if progress_callback: - # Should only subtract 1 here when necessary, not every time! - progress_callback.update(float(numread - 1), force=True) - - xxh64_digest = xxh64_hash.hexdigest() - - return xxh64_digest + total_pages = page + while page <= total_pages: + result_list = func(page=page, page_size=page_size) + total_pages = result_list.total_pages + for res in result_list: + yield res + + page += 1 + + @staticmethod + def format_bytes(size, type="speed"): + """ + Convert bytes to KB/MB/GB/TB/s + """ + # 2**10 = 1024 + power = 2 ** 10 + n = 0 + power_labels = {0: "B", 1: "KB", 2: "MB", 3: "GB", 4: "TB"} + + while size > power: + size /= power + n += 1 + + formatted = " ".join((str(round(size, 2)), power_labels[n])) + + if type == "speed": + return formatted + "/s" + + elif type == "size": + return formatted + + @staticmethod + def calculate_hash(file_path, progress_callback=None): + """ + Calculate an xx64hash + """ + xxh64_hash = xxhash.xxh64() + b = bytearray(MB * 8) + f = open(file_path, "rb") + while True: + numread = f.readinto(b) + if not numread: + break + + xxh64_hash.update(b[:numread]) + if progress_callback: + # Should only subtract 1 here when necessary, not every time! + progress_callback.update(float(numread - 1), force=True) + + xxh64_digest = xxh64_hash.hexdigest() + + return xxh64_digest + + @staticmethod + def compare_items(dict1, dict2): + """ + Python 2 and 3 compatible way of comparing 2x dictionaries + """ + comparison = None + + if sys.version_info.major >= 3: + import operator + + comparison = operator.eq(dict1, dict2) - @staticmethod - def compare_items(dict1, dict2): - """ - Python 2 and 3 compatible way of comparing 2x dictionaries - """ - comparison = None - - if sys.version_info.major >= 3: - import operator - comparison = operator.eq(dict1, dict2) - - else: - if dict1 == dict2: - comparison = True - - if comparison == False: - print("File mismatch between upload and download") - - return comparison - - @staticmethod - def get_valid_filename(s): - """ - Strip out invalid characters from a filename using regex - """ - s = str(s).strip().replace(' ', '_') - return re.sub(r'(?u)[^-\w.]', '', s) - - @staticmethod - def normalize_filename(fn): - """ - Normalize filename using pure python - """ - validchars = "-_.() " - out = "" - - if isinstance(fn, str): - pass - elif isinstance(fn, unicode): - fn = str(fn.decode('utf-8', 'ignore')) - else: - pass - - for c in fn: - if str.isalpha(c) or str.isdigit(c) or (c in validchars): - out += c else: - out += "_" - return out - - @staticmethod - def format_headers(token, version): - return { - 'Authorization': 'Bearer {}'.format(token), - 'x-frameio-client': 'python/{}'.format(version) - } - + if dict1 == dict2: + comparison = True + + if comparison == False: + print("File mismatch between upload and download") + + return comparison + + @staticmethod + def get_valid_filename(s): + """ + Strip out invalid characters from a filename using regex + """ + s = str(s).strip().replace(" ", "_") + return re.sub(r"(?u)[^-\w.]", "", s) + + @staticmethod + def normalize_filename(fn): + """ + Normalize filename using pure python + """ + validchars = "-_.() " + out = "" + + if isinstance(fn, str): + pass + elif isinstance(fn, unicode): + fn = str(fn.decode("utf-8", "ignore")) + else: + pass -class PaginatedResponse(object): - def __init__(self, results=[], limit=None, page_size=0, total=0, - total_pages=0, endpoint=None, method=None, payload={}, - client=None): - self.results = results - - self.limit = limit - self.page_size = int(page_size) - self.total = int(total) - self.total_pages = int(total_pages) - - self.endpoint = endpoint - self.method = method - self.payload = payload - self.client = client - - self.asset_index = 0 # Index on current page - self.returned = 0 # Total returned count - self.current_page = 1 - - def __iter__(self): - return self - - def __next__(self): - # Reset if we've reached end - if self.returned == self.limit or self.returned == self.total: - self.asset_index = 0 - self.returned = 0 - self.current_page = 1 - - self.results = self.client.get_specific_page( - self.method, self.endpoint, self.payload, page=1).results - raise StopIteration - - if self.limit is None or self.returned < self.limit: - if self.asset_index < self.page_size and self.returned < self.total: - self.asset_index += 1 - self.returned += 1 - return self.results[self.asset_index - 1] - raise StopIteration + for c in fn: + if str.isalpha(c) or str.isdigit(c) or (c in validchars): + out += c + else: + out += "_" + return out - if self.current_page < self.total_pages: - self.current_page += 1 - self.asset_index = 1 - self.returned += 1 + @staticmethod + def format_headers(token, version): + return { + "Authorization": "Bearer {}".format(token), + "x-frameio-client": "python/{}".format(version), + } - self.results = self.client.get_specific_page( - self.method, self.endpoint, self.payload, self.current_page).results - return self.results[self.asset_index - 1] - raise StopIteration +class PaginatedResponse(object): + def __init__( + self, + results=[], + limit=None, + page_size=0, + total=0, + total_pages=0, + endpoint=None, + method=None, + payload={}, + client=None, + ): + self.results = results + + self.limit = limit + self.page_size = int(page_size) + self.total = int(total) + self.total_pages = int(total_pages) + + self.endpoint = endpoint + self.method = method + self.payload = payload + self.client = client + + self.asset_index = 0 # Index on current page + self.returned = 0 # Total returned count + self.current_page = 1 + + def __iter__(self): + return self + + def __next__(self): + # Reset if we've reached end + if self.returned == self.limit or self.returned == self.total: + self.asset_index = 0 + self.returned = 0 + self.current_page = 1 + + self.results = self.client.get_specific_page( + self.method, self.endpoint, self.payload, page=1 + ).results + raise StopIteration + + if self.limit is None or self.returned < self.limit: + if self.asset_index < self.page_size and self.returned < self.total: + self.asset_index += 1 + self.returned += 1 + return self.results[self.asset_index - 1] + raise StopIteration + + if self.current_page < self.total_pages: + self.current_page += 1 + self.asset_index = 1 + self.returned += 1 + + self.results = self.client.get_specific_page( + self.method, self.endpoint, self.payload, self.current_page + ).results + + return self.results[self.asset_index - 1] + raise StopIteration - raise StopIteration + raise StopIteration - def next(self): # Python 2 - return self.__next__() + def next(self): # Python 2 + return self.__next__() - def __len__(self): - if self.limit and self.limit < self.total: - return self.limit + def __len__(self): + if self.limit and self.limit < self.total: + return self.limit - return self.total + return self.total class ProgressBar(object): - def __init__(self, parent=None, total=0, iterable=[]): - self.parent = parent - self.total = total - self.iterable = iterable - - def create(self): - pass + def __init__(self, parent=None, total=0, iterable=[]): + self.parent = parent + self.total = total + self.iterable = iterable - def update(self): - pass + def create(self): + pass + def update(self): + pass diff --git a/frameioclient/lib/version.py b/frameioclient/lib/version.py index 4f09e4e5..3a8f3ae9 100644 --- a/frameioclient/lib/version.py +++ b/frameioclient/lib/version.py @@ -4,7 +4,8 @@ # Running on pre-3.8 Python; use importlib-metadata package import importlib_metadata as metadata + class ClientVersion: @staticmethod def version(): - return metadata.version('frameioclient') + return metadata.version("frameioclient") diff --git a/frameioclient/services/__init__.py b/frameioclient/services/__init__.py index 18ab0ea6..0efb1b65 100644 --- a/frameioclient/services/__init__.py +++ b/frameioclient/services/__init__.py @@ -6,4 +6,4 @@ from .projects import Project from .search import Search from .links import ReviewLink, PresentationLink -from .helpers import FrameioHelpers \ No newline at end of file +from .helpers import FrameioHelpers diff --git a/frameioclient/services/comments.py b/frameioclient/services/comments.py index 36603281..8a5db008 100644 --- a/frameioclient/services/comments.py +++ b/frameioclient/services/comments.py @@ -1,92 +1,93 @@ from ..lib.service import Service + class Comment(Service): - def create(self, asset_id, **kwargs): - """ - Create a comment. - - Args: - asset_id (string): The asset id. - - :Keyword Arguments: - (optional) kwargs: additional request parameters. - - Example:: - - client.comments.create( - asset_id="123abc", - text="Hello world" - ) - """ - endpoint = '/assets/{}/comments'.format(asset_id) - return self.client._api_call('post', endpoint, payload=kwargs) - - def get(self, comment_id, **kwargs): - """ - Get a comment. - - Args: - comment_id (string): The comment id. - """ - endpoint = '/comments/{}'.format(comment_id) - return self.client._api_call('get', endpoint, **kwargs) - - def list(self, asset_id, **kwargs): - """ - Get an asset's comments. - - Args: - asset_id (string): The asset id. - """ - endpoint = '/assets/{}/comments'.format(asset_id) - return self.client._api_call('get', endpoint, **kwargs) - - def update(self, comment_id, **kwargs): - """ - Update a comment. - - Args: - comment_id (string): The comment id. - - :Keyword Arguments: - (optional) kwargs: additional request parameters. - - Example:: - - client.comments.update( - comment_id="123abc", - text="Hello world" - ) - """ - endpoint = '/comments/{}'.format(comment_id) - return self.client._api_call('post', endpoint, payload=kwargs) - - def delete(self, comment_id): - """ - Delete a comment. - - Args: - comment_id (string): The comment id. - """ - endpoint = '/comments/{}'.format(comment_id) - return self.client._api_call('delete', endpoint) - - def reply(self, comment_id, **kwargs): - """ - Reply to an existing comment. - - Args: - comment_id (string): The comment id. - - :Keyword Arguments: - (optional) kwargs: additional request parameters. - - Example:: - - client.comments.reply( - comment_id="123abc", - text="Hello world" - ) - """ - endpoint = '/comments/{}/replies'.format(comment_id) - return self.client._api_call('post', endpoint, payload=kwargs) + def create(self, asset_id, **kwargs): + """ + Create a comment. + + Args: + asset_id (string): The asset id. + + :Keyword Arguments: + (optional) kwargs: additional request parameters. + + Example:: + + client.comments.create( + asset_id="123abc", + text="Hello world" + ) + """ + endpoint = "/assets/{}/comments".format(asset_id) + return self.client._api_call("post", endpoint, payload=kwargs) + + def get(self, comment_id, **kwargs): + """ + Get a comment. + + Args: + comment_id (string): The comment id. + """ + endpoint = "/comments/{}".format(comment_id) + return self.client._api_call("get", endpoint, **kwargs) + + def list(self, asset_id, **kwargs): + """ + Get an asset's comments. + + Args: + asset_id (string): The asset id. + """ + endpoint = "/assets/{}/comments".format(asset_id) + return self.client._api_call("get", endpoint, **kwargs) + + def update(self, comment_id, **kwargs): + """ + Update a comment. + + Args: + comment_id (string): The comment id. + + :Keyword Arguments: + (optional) kwargs: additional request parameters. + + Example:: + + client.comments.update( + comment_id="123abc", + text="Hello world" + ) + """ + endpoint = "/comments/{}".format(comment_id) + return self.client._api_call("post", endpoint, payload=kwargs) + + def delete(self, comment_id): + """ + Delete a comment. + + Args: + comment_id (string): The comment id. + """ + endpoint = "/comments/{}".format(comment_id) + return self.client._api_call("delete", endpoint) + + def reply(self, comment_id, **kwargs): + """ + Reply to an existing comment. + + Args: + comment_id (string): The comment id. + + :Keyword Arguments: + (optional) kwargs: additional request parameters. + + Example:: + + client.comments.reply( + comment_id="123abc", + text="Hello world" + ) + """ + endpoint = "/comments/{}/replies".format(comment_id) + return self.client._api_call("post", endpoint, payload=kwargs) diff --git a/frameioclient/services/helpers.py b/frameioclient/services/helpers.py index 6d6d1b29..e19808d7 100644 --- a/frameioclient/services/helpers.py +++ b/frameioclient/services/helpers.py @@ -10,142 +10,149 @@ from typing import List from pprint import pprint + class FrameioHelpers(Service): - def get_updated_assets(self, account_id, project_id, timestamp): - """ - Get assets added or updated since timestamp. - - :Args: - account_id (string): The account id. - project_id (string): The project id. - timestamp (string): ISO 8601 UTC format. - (datetime.now(timezone.utc).isoformat()) - """ - payload = { - "account_id": account_id, - "page": 1, - "page_size": 50, - "include": "children", - "sort": "-inserted_at", - "filter": { - "project_id": { - "op": "eq", - "value": project_id - }, - "updated_at": { - "op": "gte", - "value": timestamp + def get_updated_assets(self, account_id, project_id, timestamp): + """ + Get assets added or updated since timestamp. + + :Args: + account_id (string): The account id. + project_id (string): The project id. + timestamp (string): ISO 8601 UTC format. + (datetime.now(timezone.utc).isoformat()) + """ + payload = { + "account_id": account_id, + "page": 1, + "page_size": 50, + "include": "children", + "sort": "-inserted_at", + "filter": { + "project_id": {"op": "eq", "value": project_id}, + "updated_at": {"op": "gte", "value": timestamp}, + }, } - } - } - endpoint = '/search/library' - return self.client._api_call('post', endpoint, payload=payload) - - def get_assets_recursively(self, asset_id, slim=True): - assets = self.client.assets.get_children(asset_id, slim=slim) - print("Number of assets at top level", len(assets)) - - for asset in assets: - # try: - print(f"Type: {asset['_type']}, Name: {asset['name']}, Children: {len(asset['children'])}") - # except KeyError: - # print("No children found") - - total_bytes = 0 - - if asset['_type'] == "file": - # Don't do nothing, it's a file! - continue - - if asset['_type'] == "verson_stack": - print("Grabbing top item from version stack") - versions = self.client.assets.get_children(asset['id'], slim=True) - asset = versions[0] # re-assign on purpose - continue - - # We only get the first three items when we use "include=children" - if asset['_type'] == "folder": - # try: - if asset['item_count'] > 3: - # Recursively fetch the contents of the folder because we have to - asset['children'] = self.get_assets_recursively(asset['id'], slim) - print("Grabbed more items for this sub dir") + endpoint = "/search/library" + return self.client._api_call("post", endpoint, payload=payload) + + def get_assets_recursively(self, asset_id, slim=True): + assets = self.client.assets.get_children(asset_id, slim=slim) + print("Number of assets at top level", len(assets)) + + for asset in assets: + # try: + print( + f"Type: {asset['_type']}, Name: {asset['name']}, Children: {len(asset['children'])}" + ) + # except KeyError: + # print("No children found") + + total_bytes = 0 + + if asset["_type"] == "file": + # Don't do nothing, it's a file! + continue + + if asset["_type"] == "verson_stack": + print("Grabbing top item from version stack") + versions = self.client.assets.get_children(asset["id"], slim=True) + asset = versions[0] # re-assign on purpose + continue + + # We only get the first three items when we use "include=children" + if asset["_type"] == "folder": + # try: + if asset["item_count"] > 3: + # Recursively fetch the contents of the folder because we have to + asset["children"] = self.get_assets_recursively(asset["id"], slim) + print("Grabbed more items for this sub dir") + + else: + for i in asset["children"]: + # If a folder is found, we still need to recursively search it + if i["_type"] == "folder": + i["children"] = self.get_assets_recursively(i["id"], slim) + + # except KeyError as e: + # # No children found in this folder, move on + # print(e) + # continue + + return assets + + def build_project_tree(self, project_id, slim=True): + # if slim == True: + # self.client.assets.get_children() + + # Get project info + project = self.client.projects.get(project_id) + + # Get children + initial_tree = self.get_assets_recursively(project["root_asset_id"], slim) + + return initial_tree + + def download_project(self, project_id, destination): + project = self.client.projects.get(project_id) + initial_tree = self.get_assets_recursively(project["root_asset_id"]) + self.recursive_downloader(destination, initial_tree) + # pprint(initial_tree) + # print(f"Downloading {Utils.format_bytes(total_bytes, type='size')}") + + def recursive_downloader(self, directory, asset, count=0): + # TODO resolve this clusterfuck of downloads + print(f"Directory {directory}") + + try: + # First check to see if we need to make the directory + target_directory = os.path.join(os.path.curdir, directory) + if not os.path.isdir(target_directory): + os.mkdir(os.path.abspath(target_directory)) + + except Exception as e: + target_directory = os.path.abspath(os.path.join(os.path.curdir, directory)) + print(e) + + if type(asset) == list: + for i in asset: + self.recursive_downloader(directory, i) else: - for i in asset['children']: - # If a folder is found, we still need to recursively search it - if i['_type'] == "folder": - i['children'] = self.get_assets_recursively(i['id'], slim) - - # except KeyError as e: - # # No children found in this folder, move on - # print(e) - # continue - - return assets - - def build_project_tree(self, project_id, slim=True): - # if slim == True: - # self.client.assets.get_children() - - # Get project info - project = self.client.projects.get(project_id) - - # Get children - initial_tree = self.get_assets_recursively(project['root_asset_id'], slim) - - return initial_tree - - def download_project(self, project_id, destination): - project = self.client.projects.get(project_id) - initial_tree = self.get_assets_recursively(project['root_asset_id']) - self.recursive_downloader(destination, initial_tree) - # pprint(initial_tree) - # print(f"Downloading {Utils.format_bytes(total_bytes, type='size')}") - - def recursive_downloader(self, directory, asset, count=0): - # TODO resolve this clusterfuck of downloads - print(f"Directory {directory}") - - try: - # First check to see if we need to make the directory - target_directory = os.path.join(os.path.curdir, directory) - if not os.path.isdir(target_directory): - os.mkdir(os.path.abspath(target_directory)) - - except Exception as e: - target_directory = os.path.abspath(os.path.join(os.path.curdir, directory)) - print(e) - - if type(asset) == list: - for i in asset: - self.recursive_downloader(directory, i) - - else: - try: - if asset['_type'] == 'folder': - if len(asset['children']) >= 0: - count += 1 - # Create the new folder that these items will go in before it's too late - if not os.path.exists(os.path.join(target_directory, asset['name'])): - print("Path doesn't exist") - new_path = Path(target_directory, str(asset['name']).replace('/', '-')) - print(new_path.absolute) - print("Making new directory") - Path.mkdir(new_path) - sleep(2) - - # Pass along the new directory they'll be living in and the children - self.recursive_downloader(f"{directory}/{str(asset['name']).replace('/', '-')}", asset['children']) - - if asset['_type'] == 'file': - count += 1 - return self.client.assets.download(asset, target_directory, multi_part=True) - - except Exception as e: - print(e) - - return True + try: + if asset["_type"] == "folder": + if len(asset["children"]) >= 0: + count += 1 + # Create the new folder that these items will go in before it's too late + if not os.path.exists( + os.path.join(target_directory, asset["name"]) + ): + print("Path doesn't exist") + new_path = Path( + target_directory, str(asset["name"]).replace("/", "-") + ) + print(new_path.absolute) + print("Making new directory") + Path.mkdir(new_path) + sleep(2) + + # Pass along the new directory they'll be living in and the children + self.recursive_downloader( + f"{directory}/{str(asset['name']).replace('/', '-')}", + asset["children"], + ) + + if asset["_type"] == "file": + count += 1 + return self.client.assets.download( + asset, target_directory, multi_part=True + ) + + except Exception as e: + print(e) + + return True + if __name__ == "__main__": - pass + pass diff --git a/frameioclient/services/links.py b/frameioclient/services/links.py index 127860b0..e109335a 100644 --- a/frameioclient/services/links.py +++ b/frameioclient/services/links.py @@ -1,125 +1,126 @@ from ..lib.service import Service + class ReviewLink(Service): - def create(self, project_id, **kwargs): - """ - Create a review link. - - Args: - project_id (string): The project id. - - :Keyword Arguments: - kwargs: additional request parameters. - - Example:: - - client.review_links.create( - project_id="123", - name="My Review Link", - password="abc123" - ) - """ - endpoint = '/projects/{}/review_links'.format(project_id) - return self.client._api_call('post', endpoint, payload=kwargs) - - def list(self, project_id): - """ - Get the review links of a project - - Args: - asset_id (string): The asset id. - """ - endpoint = '/projects/{}/review_links'.format(project_id) - return self.client._api_call('get', endpoint) - - def get(self, link_id, **kwargs): - """ - Get a single review link - - Args: - link_id (string): The review link id. - """ - endpoint = '/review_links/{}'.format(link_id) - return self.client._api_call('get', endpoint, payload=kwargs) - - def get_assets(self, link_id): - """ - Get items from a single review link. - - Args: - link_id (string): The review link id. - - Example:: - - client.review_links.get_assets( - link_id="123" - ) - """ - endpoint = '/review_links/{}/items'.format(link_id) - return self.client._api_call('get', endpoint) - - def update_assets(self, link_id, **kwargs): - """ - Add or update assets for a review link. - - Args: - link_id (string): The review link id. - - :Keyword Arguments: - kwargs: additional request parameters. - - Example:: - - client.review_links.update_assets( - link_id="123", - asset_ids=["abc","def"] - ) - """ - endpoint = '/review_links/{}/assets'.format(link_id) - return self.client._api_call('post', endpoint, payload=kwargs) - - def update_settings(self, link_id, **kwargs): - """ - Updates review link settings. - - Args: - link_id (string): The review link id. - - :Keyword Arguments: - kwargs: additional request parameters. - - Example:: - - client.review_links.update_settings( - link_id, - expires_at="2020-04-08T12:00:00+00:00", - is_active=False, - name="Review Link 123", - password="my_fun_password", - ) - """ - endpoint = '/review_links/{}'.format(link_id) - return self.client._api_call('put', endpoint, payload=kwargs) + def create(self, project_id, **kwargs): + """ + Create a review link. + + Args: + project_id (string): The project id. + + :Keyword Arguments: + kwargs: additional request parameters. + + Example:: + + client.review_links.create( + project_id="123", + name="My Review Link", + password="abc123" + ) + """ + endpoint = "/projects/{}/review_links".format(project_id) + return self.client._api_call("post", endpoint, payload=kwargs) + + def list(self, project_id): + """ + Get the review links of a project + + Args: + asset_id (string): The asset id. + """ + endpoint = "/projects/{}/review_links".format(project_id) + return self.client._api_call("get", endpoint) + + def get(self, link_id, **kwargs): + """ + Get a single review link + + Args: + link_id (string): The review link id. + """ + endpoint = "/review_links/{}".format(link_id) + return self.client._api_call("get", endpoint, payload=kwargs) + + def get_assets(self, link_id): + """ + Get items from a single review link. + + Args: + link_id (string): The review link id. + + Example:: + + client.review_links.get_assets( + link_id="123" + ) + """ + endpoint = "/review_links/{}/items".format(link_id) + return self.client._api_call("get", endpoint) + + def update_assets(self, link_id, **kwargs): + """ + Add or update assets for a review link. + + Args: + link_id (string): The review link id. + + :Keyword Arguments: + kwargs: additional request parameters. + + Example:: + + client.review_links.update_assets( + link_id="123", + asset_ids=["abc","def"] + ) + """ + endpoint = "/review_links/{}/assets".format(link_id) + return self.client._api_call("post", endpoint, payload=kwargs) + + def update_settings(self, link_id, **kwargs): + """ + Updates review link settings. + + Args: + link_id (string): The review link id. + + :Keyword Arguments: + kwargs: additional request parameters. + + Example:: + + client.review_links.update_settings( + link_id, + expires_at="2020-04-08T12:00:00+00:00", + is_active=False, + name="Review Link 123", + password="my_fun_password", + ) + """ + endpoint = "/review_links/{}".format(link_id) + return self.client._api_call("put", endpoint, payload=kwargs) class PresentationLink(Service): - def create(self, asset_id, **kwargs): - """ - Create a presentation link. - - Args: - asset_id (string): The asset id. - - :Keyword Arguments: - kwargs: additional request parameters. - - Example:: - - client.presentation_links.create( - asset_id="9cee7966-4066-b326-7db1-f9e6f5e929e4", - title="My fresh presentation", - password="abc123" - ) - """ - endpoint = '/assets/{}/presentations'.format(asset_id) - return self.client._api_call('post', endpoint, payload=kwargs) + def create(self, asset_id, **kwargs): + """ + Create a presentation link. + + Args: + asset_id (string): The asset id. + + :Keyword Arguments: + kwargs: additional request parameters. + + Example:: + + client.presentation_links.create( + asset_id="9cee7966-4066-b326-7db1-f9e6f5e929e4", + title="My fresh presentation", + password="abc123" + ) + """ + endpoint = "/assets/{}/presentations".format(asset_id) + return self.client._api_call("post", endpoint, payload=kwargs) diff --git a/frameioclient/services/logs.py b/frameioclient/services/logs.py index 25642620..857a51a2 100644 --- a/frameioclient/services/logs.py +++ b/frameioclient/services/logs.py @@ -1,21 +1,22 @@ from ..lib.service import Service + class AuditLogs(Service): - def list(self, account_id): - """ - Get audit logs for the currently authenticated account. + def list(self, account_id): + """ + Get audit logs for the currently authenticated account. - Args: - account_id (uuid): Account ID you want to get audit logs for. + Args: + account_id (uuid): Account ID you want to get audit logs for. - Example:: + Example:: - client.logs.list( - account_id="6bdcb4d9-9a2e-a765-4548-ae6b27a6c024" - ) + client.logs.list( + account_id="6bdcb4d9-9a2e-a765-4548-ae6b27a6c024" + ) - Returns: - list: List of audit logs. - """ - endpoint = '/accounts/{}/audit_logs'.format(account_id) - return self.client._api_call('get', endpoint) + Returns: + list: List of audit logs. + """ + endpoint = "/accounts/{}/audit_logs".format(account_id) + return self.client._api_call("get", endpoint) diff --git a/frameioclient/services/projects.py b/frameioclient/services/projects.py index 2bf8abb9..62829108 100644 --- a/frameioclient/services/projects.py +++ b/frameioclient/services/projects.py @@ -1,145 +1,148 @@ from ..lib.service import Service from .helpers import FrameioHelpers -class Project(Service): - def create(self, team_id, **kwargs): - """ - Create a project. - - :Args: - team_id (string): The team id. - :Kwargs: - (optional) kwargs: additional request parameters. - - Example:: - client.projects.create( - team_id="123", - name="My Awesome Project" - ) - """ - endpoint = '/teams/{}/projects'.format(team_id) - return self.client._api_call('post', endpoint, payload=kwargs) - - def get(self, project_id): - """ - Get an individual project - - :Args: - project_id (string): The project's id - - Example:: - client.project.get( - project_id="123" - ) - - """ - endpoint = '/projects/{}'.format(project_id) - return self.client._api_call('get', endpoint) - - def tree(self, project_id, slim): - """ - Fetch a tree representation of all files/folders in a project. - - :Args: - project_id (string): The project's id - slim (bool): If true, fetch only the minimum information for the following: - filename, - filesize, - thumbnail, - creator_id, - inserted_at (date created), - path (represented like a filesystem) - - Example:: - client.projects.get( - project_id="123", - slim=True - ) - """ - # endpoint = "/projects/{}/tree?depth=20&drop_includes=a.transcode_statuses,a.transcodes,a.source,a.checksums&only_fields=a.name,a.filesize,u.name,a.item_count,a.creator_id,a.inserted_at,a.uploaded_at".format(project_id) - # return self.client._api_call('get', endpoint) - - return FrameioHelpers(self.client).build_project_tree(project_id, slim) - - def download(self, project_id, destination_directory='downloads'): - """ - Download the provided project to disk. - - :Args: - project_id (uuid): The project's id. - destination_directory (string): Directory on disk that you want to download the project to. - - Example:: - client.projects.download( - project_id="123", - destination_directory="./downloads" - ) - """ - return FrameioHelpers(self.client).download_project(project_id, destination=destination_directory) - def get_collaborators(self, project_id, **kwargs): - """ - Get collaborators for a project - - :Args: - project_id (uuid): The project's id - - Example:: - client.projects.get_collaborators( - project_id="123" - ) - - """ - endpoint = "/projects/{}/collaborators?include=project_role".format(project_id) - return self.client._api_call('get', endpoint, kwargs) - - def get_pending_collaborators(self, project_id, **kwargs): - """ - Get pending collaborators for a project - - :Args: - project_id (uuid): The project's id - - Example:: - client.projects.get_pending_collaborators( - project_id="123" +class Project(Service): + def create(self, team_id, **kwargs): + """ + Create a project. + + :Args: + team_id (string): The team id. + :Kwargs: + (optional) kwargs: additional request parameters. + + Example:: + client.projects.create( + team_id="123", + name="My Awesome Project" + ) + """ + endpoint = "/teams/{}/projects".format(team_id) + return self.client._api_call("post", endpoint, payload=kwargs) + + def get(self, project_id): + """ + Get an individual project + + :Args: + project_id (string): The project's id + + Example:: + client.project.get( + project_id="123" + ) + + """ + endpoint = "/projects/{}".format(project_id) + return self.client._api_call("get", endpoint) + + def tree(self, project_id, slim): + """ + Fetch a tree representation of all files/folders in a project. + + :Args: + project_id (string): The project's id + slim (bool): If true, fetch only the minimum information for the following: + filename, + filesize, + thumbnail, + creator_id, + inserted_at (date created), + path (represented like a filesystem) + + Example:: + client.projects.get( + project_id="123", + slim=True + ) + """ + # endpoint = "/projects/{}/tree?depth=20&drop_includes=a.transcode_statuses,a.transcodes,a.source,a.checksums&only_fields=a.name,a.filesize,u.name,a.item_count,a.creator_id,a.inserted_at,a.uploaded_at".format(project_id) + # return self.client._api_call('get', endpoint) + + return FrameioHelpers(self.client).build_project_tree(project_id, slim) + + def download(self, project_id, destination_directory="downloads"): + """ + Download the provided project to disk. + + :Args: + project_id (uuid): The project's id. + destination_directory (string): Directory on disk that you want to download the project to. + + Example:: + client.projects.download( + project_id="123", + destination_directory="./downloads" + ) + """ + return FrameioHelpers(self.client).download_project( + project_id, destination=destination_directory ) - """ - endpoint = "/projects/{}/pending_collaborators".format(project_id) - return self.client._api_call('get', endpoint, kwargs) - - def add_collaborator(self, project_id, email): - """ - Add Collaborator to a Project Collaborator. - - :Args: - project_id (uuid): The project id - email (string): Email user's e-mail address - - Example:: - client.projects.add_collaborator( - project_id="123", - email="janedoe@frame.io", - ) - """ - payload = {"email": email} - endpoint = '/projects/{}/collaborators'.format(project_id) - return self._api_call('post', endpoint, payload=payload) - - def remove_collaborator(self, project_id, email): - """ - Remove Collaborator from Project. - - :Args: - project_id (uuid): The Project ID. - email (string): The user's e-mail address - - Example:: - client.projects.remove_collaborator( - project_id="123", - email="janedoe@frame.io" - ) - """ - endpoint = '/projects/{}/collaborators/_?email={}'.format(project_id, email) - return self._api_call('delete', endpoint) + def get_collaborators(self, project_id, **kwargs): + """ + Get collaborators for a project + + :Args: + project_id (uuid): The project's id + + Example:: + client.projects.get_collaborators( + project_id="123" + ) + + """ + endpoint = "/projects/{}/collaborators?include=project_role".format(project_id) + return self.client._api_call("get", endpoint, kwargs) + + def get_pending_collaborators(self, project_id, **kwargs): + """ + Get pending collaborators for a project + + :Args: + project_id (uuid): The project's id + + Example:: + client.projects.get_pending_collaborators( + project_id="123" + ) + + """ + endpoint = "/projects/{}/pending_collaborators".format(project_id) + return self.client._api_call("get", endpoint, kwargs) + + def add_collaborator(self, project_id, email): + """ + Add Collaborator to a Project Collaborator. + + :Args: + project_id (uuid): The project id + email (string): Email user's e-mail address + + Example:: + client.projects.add_collaborator( + project_id="123", + email="janedoe@frame.io", + ) + """ + payload = {"email": email} + endpoint = "/projects/{}/collaborators".format(project_id) + return self._api_call("post", endpoint, payload=payload) + + def remove_collaborator(self, project_id, email): + """ + Remove Collaborator from Project. + + :Args: + project_id (uuid): The Project ID. + email (string): The user's e-mail address + + Example:: + client.projects.remove_collaborator( + project_id="123", + email="janedoe@frame.io" + ) + """ + endpoint = "/projects/{}/collaborators/_?email={}".format(project_id, email) + return self._api_call("delete", endpoint) diff --git a/frameioclient/services/search.py b/frameioclient/services/search.py index 7392e592..ee55e0dc 100644 --- a/frameioclient/services/search.py +++ b/frameioclient/services/search.py @@ -1,8 +1,21 @@ from ..lib.service import Service + class Search(Service): - def library(self, query, type=None, project_id=None, account_id=None, team_id=None, uploader=None, sort=None, filter=None, page_size=10, page=1): - """ + def library( + self, + query, + type=None, + project_id=None, + account_id=None, + team_id=None, + uploader=None, + sort=None, + filter=None, + page_size=10, + page=1, + ): + """ Search for assets using the library search endpoint, documented here \ https://developer.frame.io/docs/workflows-assets/search-for-assets. @@ -31,44 +44,32 @@ def library(self, query, type=None, project_id=None, account_id=None, team_id=No ) """ - # Define base payload - payload = { - "account_id": account_id, - "q": query, - "sort": sort, - "page_size": page_size, - "page": page - } + # Define base payload + payload = { + "account_id": account_id, + "q": query, + "sort": sort, + "page_size": page_size, + "page": page, + } - # Add fully custom filter - if filter is not None: - payload['filter'] = filter + # Add fully custom filter + if filter is not None: + payload["filter"] = filter - # Add simple filters - if project_id is not None: - payload['filter']['project_id'] = { - "op": "eq", - "value": project_id - } - if team_id is not None: - payload['filter']['team_id'] = { - "op": "eq", - "value": team_id - } - if type is not None: - payload['filter']['type'] = { - "op": "eq", - "value": type - } - if uploader is not None: - payload['filter']['creator.name'] = { - "op": "match", - "value": uploader - } + # Add simple filters + if project_id is not None: + payload["filter"]["project_id"] = {"op": "eq", "value": project_id} + if team_id is not None: + payload["filter"]["team_id"] = {"op": "eq", "value": team_id} + if type is not None: + payload["filter"]["type"] = {"op": "eq", "value": type} + if uploader is not None: + payload["filter"]["creator.name"] = {"op": "match", "value": uploader} - # Add sorting - if sort is not None: - payload['sort'] = sort + # Add sorting + if sort is not None: + payload["sort"] = sort - endpoint = '/search/library' - return self.client._api_call('post', endpoint, payload=payload) + endpoint = "/search/library" + return self.client._api_call("post", endpoint, payload=payload) diff --git a/frameioclient/services/teams.py b/frameioclient/services/teams.py index 9df71294..7590dcb4 100644 --- a/frameioclient/services/teams.py +++ b/frameioclient/services/teams.py @@ -1,104 +1,105 @@ import warnings from ..lib.service import Service + class Team(Service): - def create(self, account_id, **kwargs): - """ - Create a Team + def create(self, account_id, **kwargs): + """ + Create a Team - Args: - account_id (string): The account id you want to create this team under. - - :Keyword Arguments:: - (optional) kwargs: additional request parameters. + Args: + account_id (string): The account id you want to create this team under. - Example:: + :Keyword Arguments:: + (optional) kwargs: additional request parameters. - client.teams.create( - account_id="6bdcb4d9-4548-4548-4548-27a6c024ae6b", - name="My Awesome Project", - ) - """ - warnings.warn('Note: Your token must support team.create scopes') - endpoint = '/accounts/{}/teams'.format(account_id) - return self.client._api_call('post', endpoint, payload=kwargs) + Example:: - def list(self, account_id, **kwargs): - """ + client.teams.create( + account_id="6bdcb4d9-4548-4548-4548-27a6c024ae6b", + name="My Awesome Project", + ) + """ + warnings.warn("Note: Your token must support team.create scopes") + endpoint = "/accounts/{}/teams".format(account_id) + return self.client._api_call("post", endpoint, payload=kwargs) + + def list(self, account_id, **kwargs): + """ Get teams owned by the specified account. \ (To return all teams, use list_all()) Args: account_id (string): The account id. """ - endpoint = '/accounts/{}/teams'.format(account_id) - return self.client._api_call('get', endpoint, kwargs) - - def list_all(self, **kwargs): - """ - Get all teams for the authenticated user. - - Args: - account_id (string): The account id. - """ - endpoint = '/teams' - return self.client._api_call('get', endpoint, kwargs) - - def get(self, team_id): - """ - Get team by id - - Args: - team_id (string): the team's id - """ - endpoint = '/teams/{}'.format(team_id) - return self.client._api_call('get', endpoint) - - def get_members(self, team_id): - """ - Get the member list for a given team. - - Args: - team_id (string): The team id. - """ - endpoint = '/teams/{}/members'.format(team_id) - return self.client._api_call('get', endpoint) - - def list_projects(self, team_id, **kwargs): - """ - Get projects owned by the team. - - Args: - team_id (string): The team id. - """ - endpoint = '/teams/{}/projects'.format(team_id) - return self.client._api_call('get', endpoint, kwargs) - - def add_members(self, team_id, emails): - """ - Add a list of users via their e-mail address to a given team. - - Args: - team_id (string): The team id. - emails (list): The e-mails you want to add. - """ - payload = dict() - payload['batch'] = list(map(lambda email: {"email": email}, emails)) - - endpoint = '/batch/teams/{}/members'.format(team_id) - return self._api_call('post', endpoint, payload=payload) - - def remove_members(self, team_id, emails): - """ - Remove a list of users via their e-mail address from a given team. - - Args: - team_id (string): The team id. - emails (list): The e-mails you want to add. - """ - - payload = dict() - payload['batch'] = list(map(lambda email: {"email": email}, emails)) - - endpoint = '/batch/teams/{}/members'.format(team_id) - return self._api_call('delete', endpoint, payload=payload) + endpoint = "/accounts/{}/teams".format(account_id) + return self.client._api_call("get", endpoint, kwargs) + + def list_all(self, **kwargs): + """ + Get all teams for the authenticated user. + + Args: + account_id (string): The account id. + """ + endpoint = "/teams" + return self.client._api_call("get", endpoint, kwargs) + + def get(self, team_id): + """ + Get team by id + + Args: + team_id (string): the team's id + """ + endpoint = "/teams/{}".format(team_id) + return self.client._api_call("get", endpoint) + + def get_members(self, team_id): + """ + Get the member list for a given team. + + Args: + team_id (string): The team id. + """ + endpoint = "/teams/{}/members".format(team_id) + return self.client._api_call("get", endpoint) + + def list_projects(self, team_id, **kwargs): + """ + Get projects owned by the team. + + Args: + team_id (string): The team id. + """ + endpoint = "/teams/{}/projects".format(team_id) + return self.client._api_call("get", endpoint, kwargs) + + def add_members(self, team_id, emails): + """ + Add a list of users via their e-mail address to a given team. + + Args: + team_id (string): The team id. + emails (list): The e-mails you want to add. + """ + payload = dict() + payload["batch"] = list(map(lambda email: {"email": email}, emails)) + + endpoint = "/batch/teams/{}/members".format(team_id) + return self._api_call("post", endpoint, payload=payload) + + def remove_members(self, team_id, emails): + """ + Remove a list of users via their e-mail address from a given team. + + Args: + team_id (string): The team id. + emails (list): The e-mails you want to add. + """ + + payload = dict() + payload["batch"] = list(map(lambda email: {"email": email}, emails)) + + endpoint = "/batch/teams/{}/members".format(team_id) + return self._api_call("delete", endpoint, payload=payload) diff --git a/frameioclient/services/users.py b/frameioclient/services/users.py index 61c0fb26..c4fcd1f1 100644 --- a/frameioclient/services/users.py +++ b/frameioclient/services/users.py @@ -1,8 +1,9 @@ from ..lib.service import Service + class User(Service): def get_me(self): """ Get the current user. """ - return self.client._api_call('get', '/me') \ No newline at end of file + return self.client._api_call("get", "/me") From 8f217236957a2328e18cc39add7df9cf3104c94c Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 6 Aug 2021 20:57:36 -0700 Subject: [PATCH 29/99] Fix some more issues and get recursive uploads working --- examples/assets/recursive_upload.py | 3 +-- frameioclient/client.py | 24 +++++++++++++++---- frameioclient/lib/download.py | 13 ++++++----- frameioclient/lib/logger.py | 2 +- frameioclient/lib/transfer.py | 3 +++ frameioclient/services/assets.py | 36 +++++++++++++++++++++++++---- scripts/benchmark/upload.py | 4 ++-- 7 files changed, 65 insertions(+), 20 deletions(-) diff --git a/examples/assets/recursive_upload.py b/examples/assets/recursive_upload.py index 05101eea..ae0fb421 100644 --- a/examples/assets/recursive_upload.py +++ b/examples/assets/recursive_upload.py @@ -1,8 +1,7 @@ import os -import time import mimetypes import concurrent.futures -import threading + from frameioclient import FrameioClient from pprint import pprint diff --git a/frameioclient/client.py b/frameioclient/client.py index cc59323c..e51d3428 100644 --- a/frameioclient/client.py +++ b/frameioclient/client.py @@ -4,7 +4,7 @@ The core module of the frameioclient """ from .lib import APIClient, Telemetry, ClientVersion, ClientVersion, FrameioDownloader -from services import * +from .services import * class FrameioClient(APIClient): @@ -15,54 +15,68 @@ def __init__(self, token, host="https://api.frame.io", threads=5, progress=False def me(self): return self.users.get_me() - @property def telemetry(self): return Telemetry(self) - @property def _auth(self): return self.token - @property def _version(self): return ClientVersion.version() - @property def _download(self): return FrameioDownloader(self) @property def users(self): + from .services import User + return User(self) @property def assets(self): + from .services import Asset + return Asset(self) @property def comments(self): + from .services import Comment + return Comment(self) @property def logs(self): + from .services import AuditLogs + return AuditLogs(self) @property def review_links(self): + from .services import ReviewLink + return ReviewLink(self) @property def presentation_links(self): + from .services import PresentationLink + return PresentationLink(self) @property def projects(self): + from .services import Project + return Project(self) @property def teams(self): + from .services import Team + return Team(self) @property def helpers(self): + from .services import FrameioHelpers + return FrameioHelpers(self) diff --git a/frameioclient/lib/download.py b/frameioclient/lib/download.py index 76c199f7..b9ef08eb 100644 --- a/frameioclient/lib/download.py +++ b/frameioclient/lib/download.py @@ -3,18 +3,19 @@ from .utils import Utils -# from .logger import SDKLogger +from .logger import SDKLogger from .transfer import AWSClient # from .telemetry import Event, ComparisonTest +logger = SDKLogger('downloads') + from .exceptions import ( DownloadException, WatermarkIDDownloadException, AssetNotFullyUploaded, ) - class FrameioDownloader(object): def __init__(self, asset, download_folder, prefix, multi_part=False, replace=False): self.multi_part = multi_part @@ -88,7 +89,7 @@ def _create_file_stub(self): return True def _get_path(self): - print("prefix:", self.prefix) + logger.info("prefix: {}".format(self.prefix)) if self.prefix != None: self.filename = self.prefix + self.filename @@ -141,9 +142,9 @@ def download_handler(self): # Check folders if os.path.isdir(os.path.join(os.path.curdir, self.download_folder)): - print("Folder exists, don't need to create it") + logger.info("Folder exists, don't need to create it") else: - print("Destination folder not found, creating") + logger.info("Destination folder not found, creating") os.mkdir(self.download_folder) # Check files @@ -154,7 +155,7 @@ def download_handler(self): os.remove(self.get_path()) if os.path.isfile(self.get_path()) and self.replace == False: - print("File already exists at this location.") + logger.info("File already exists at this location.") return self.destination # Get URL diff --git a/frameioclient/lib/logger.py b/frameioclient/lib/logger.py index 6b7fd55a..7b368545 100644 --- a/frameioclient/lib/logger.py +++ b/frameioclient/lib/logger.py @@ -1,7 +1,7 @@ import logging -class SDKLogger(object): +class SDKLogger: def __init__(self, log_name): self.initialize_logger() self.logger = logging.getLogger(log_name) diff --git a/frameioclient/lib/transfer.py b/frameioclient/lib/transfer.py index 1cc29038..da9e079a 100644 --- a/frameioclient/lib/transfer.py +++ b/frameioclient/lib/transfer.py @@ -56,6 +56,9 @@ def _create_file_stub(self): else: print(e) raise e + except TypeError as e: + print(e) + raise e return True def _optimize_concurrency(self): diff --git a/frameioclient/services/assets.py b/frameioclient/services/assets.py index 5d2de6b8..3a2299b5 100644 --- a/frameioclient/services/assets.py +++ b/frameioclient/services/assets.py @@ -2,6 +2,7 @@ import mimetypes from .projects import Project +# from .helpers import FrameioHelpers from ..lib.service import Service from ..lib import FrameioUploader, FrameioDownloader, constants, Reference @@ -236,11 +237,11 @@ def _upload(self, asset, file): def upload(self, destination_id, filepath, asset=None): """ - Upload a file. The method will exit once the file is downloaded. + Upload a file. The method will exit once the file is uploaded. :Args: destination_id (uuid): The destination Project or Folder ID. - filepath (string): The locaiton of the file on your local filesystem \ + filepath (string): The location of the file on your local filesystem \ that you want to upload. Example:: @@ -256,13 +257,13 @@ def upload(self, destination_id, filepath, asset=None): folder_id = self.get(destination_id)['id'] except Exception as e: # Then try to grab it as a project - folder_id = Project(self.client).get_project(destination_id)['root_asset_id'] + folder_id = Project(self.client).get(destination_id)['root_asset_id'] finally: file_info = self._build_asset_info(filepath) if not asset: try: - asset = self.create(folder_id, + asset = self.create(folder_id, type="file", name=file_info['filename'], filetype=file_info['mimetype'], @@ -297,3 +298,30 @@ def download(self, asset, download_folder, prefix=None, multi_part=False, replac """ downloader = FrameioDownloader(asset, download_folder, prefix, multi_part, replace) return downloader.download_handler() + + def upload_folder(self, source_path, destination_id): + """ + Upload a folder full of assets, maintaining hierarchy. \ + The method will exit once the file is uploaded. + + :Args: + filepath (path): The location of the folder on your disk. + destination_id (uuid): The destination Project or Folder ID. + + Example:: + client.assets.upload("./file.mov", "1231-12414-afasfaf-aklsajflaksjfla") + """ + + # Check if destination is a project or folder + # If it's a project, well then we look up its root asset ID, otherwise we use the folder id provided + # Then we start our upload + + try: + # First try to grab it as a folder + folder_id = self.get(destination_id)['id'] + except Exception as e: + # Then try to grab it as a project + folder_id = Project(self.client).get(destination_id)['root_asset_id'] + finally: + return FrameioUploader().recursive_upload(self.client, source_path, folder_id) + diff --git a/scripts/benchmark/upload.py b/scripts/benchmark/upload.py index 050349fe..909b8830 100644 --- a/scripts/benchmark/upload.py +++ b/scripts/benchmark/upload.py @@ -1,7 +1,7 @@ import os import sys -from .utils import timefunc +from utils import timefunc from frameioclient import FrameioClient @@ -13,4 +13,4 @@ def benchmark_upload(source_file='', remote_destination=''): return True if __name__ == "__main__": - timefunc(benchmark_upload, source_file='', remote_destination='', iterations=1) # medium + timefunc(benchmark_upload, source_file='', remote_destination='dd8526ee-2c7d-4b48-9bf7-b847664666bb', iterations=1) # medium From ff1f86bb02f9d5dce055b2ac5a3da4b80560c6b9 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 6 Aug 2021 21:07:14 -0700 Subject: [PATCH 30/99] Solve for missing Segment key in ENV --- frameioclient/lib/telemetry.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frameioclient/lib/telemetry.py b/frameioclient/lib/telemetry.py index 2c720bdc..8354ad61 100644 --- a/frameioclient/lib/telemetry.py +++ b/frameioclient/lib/telemetry.py @@ -6,7 +6,7 @@ from .logger import SDKLogger from .version import ClientVersion -segment_id = os.environ["SEGMENT_WRITE_KEY"] # Production +segment_id = os.getenv("SEGMENT_WRITE_KEY", "") # Production analytics.write_key = segment_id From e094d7f1b2752fd03536dc823dc5242e26e03a25 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 9 Aug 2021 22:05:32 -0700 Subject: [PATCH 31/99] Fix the retry mechanism --- frameioclient/lib/transport.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/frameioclient/lib/transport.py b/frameioclient/lib/transport.py index 460ceea9..273b0104 100644 --- a/frameioclient/lib/transport.py +++ b/frameioclient/lib/transport.py @@ -1,13 +1,13 @@ import requests import threading +from urllib3.util.retry import Retry from requests.adapters import HTTPAdapter -from requests.packages.urllib3.util.retry import Retry from .version import ClientVersion from .utils import PaginatedResponse -from .constants import default_thread_count from .exceptions import PresentationException +from .constants import default_thread_count, retryable_statuses # from .bandwidth import NetworkBandwidth, DiskBandwidth @@ -35,9 +35,9 @@ def __init__(self, threads=default_thread_count): # Configure retry strategy (very broad right now) self.retry_strategy = Retry( - total=3, - backoff_factor=1, - status_forcelist=[400, 429, 500, 503], + total=100, + backoff_factor=2, + status_forcelist=retryable_statuses, method_whitelist=["GET", "POST", "PUT", "GET", "DELETE"], ) @@ -47,14 +47,17 @@ def __init__(self, threads=default_thread_count): def _initialize_thread(self): self.thread_local = threading.local() - def _get_session(self, auth=True): + def _get_session(self): + # Create session only if needed if not hasattr(self.thread_local, "session"): http = requests.Session() adapter = HTTPAdapter(max_retries=self.retry_strategy) adapter.add_headers(self.shared_headers) # add version header - http.mount("https", adapter) + http.mount("https://", adapter) + http.mount("http://", adapter) self.thread_local.session = http + # Return session return self.thread_local.session From a202220feb530701e4c3504c56d1d10e0e96fa8d Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Tue, 10 Aug 2021 14:45:56 -0700 Subject: [PATCH 32/99] Add get_accounts() function --- frameioclient/services/users.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/frameioclient/services/users.py b/frameioclient/services/users.py index c4fcd1f1..be9aecbd 100644 --- a/frameioclient/services/users.py +++ b/frameioclient/services/users.py @@ -7,3 +7,8 @@ def get_me(self): Get the current user. """ return self.client._api_call("get", "/me") + + def get_accounts(self): + """Get a list of accounts the user has access to + """ + return self.client._api_call("get", "/accounts") \ No newline at end of file From 6e5225b4a3cc81fb0d46a12989a1762368a6f2df Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Tue, 10 Aug 2021 14:46:52 -0700 Subject: [PATCH 33/99] Move retryable statuses to constants.py --- frameioclient/lib/constants.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/frameioclient/lib/constants.py b/frameioclient/lib/constants.py index 70964526..4b59e029 100644 --- a/frameioclient/lib/constants.py +++ b/frameioclient/lib/constants.py @@ -19,3 +19,5 @@ } default_thread_count = 5 + +retryable_statuses = [400, 429, 500, 503] From e172a57107da446a92ef8f77882e943fdb9a6b81 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 13 Aug 2021 18:07:24 -0700 Subject: [PATCH 34/99] Add upload progress bar for CLI --- frameioclient/lib/upload.py | 66 +++++++++++++++++++++++++++++++++---- 1 file changed, 60 insertions(+), 6 deletions(-) diff --git a/frameioclient/lib/upload.py b/frameioclient/lib/upload.py index ae5b10eb..9c2e7f4f 100644 --- a/frameioclient/lib/upload.py +++ b/frameioclient/lib/upload.py @@ -1,6 +1,7 @@ import os import math import requests +import enlighten import threading import concurrent.futures @@ -16,6 +17,7 @@ def __init__(self, asset=None, file=None): self.chunk_size = None self.file_count = 0 self.file_num = 0 + self.futures = [] def _calculate_chunks(self, total_size, chunk_count): """Calculate chunk size @@ -57,6 +59,7 @@ def _upload_chunk(self, task): url = task[0] chunk_offset = task[1] chunk_id = task[2] + in_progress = task[3] chunks_total = len(self.asset["upload_urls"]) is_final_chunk = False @@ -67,6 +70,7 @@ def _upload_chunk(self, task): session = self._get_session() chunk_data = self._smart_read_chunk(chunk_offset, is_final_chunk) + in_progress.update(len(chunk_data)) try: r = session.put( @@ -83,19 +87,69 @@ def _upload_chunk(self, task): r.raise_for_status() + return len(chunk_data) + def upload(self): total_size = self.asset["filesize"] upload_urls = self.asset["upload_urls"] chunk_offsets = self._calculate_chunks(total_size, chunk_count=len(upload_urls)) - with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: - for i in range(len(upload_urls)): - url = upload_urls[i] - chunk_offset = chunk_offsets[i] + with enlighten.get_manager() as manager: + status = manager.status_bar( + position=3, + status_format="{fill}Stage: {stage}{fill}{elapsed}", + color="bold_underline_bright_white_on_lightslategray", + justify=enlighten.Justify.CENTER, + stage="Initializing", + autorefresh=True, + min_delta=0.5, + ) + + BAR_FORMAT = ( + "{desc}{desc_pad}|{bar}|{percentage:3.0f}% " + + "Uploading: {count_1:.2j}/{total:.2j} " + + "Completed: {count_2:.2j}/{total:.2j} " + + "[{elapsed}<{eta}, {rate:.2j}{unit}/s]" + ) + + # Add counter to track completed chunks + initializing = manager.counter( + position=2, + total=float(self.asset['filesize']), + desc="Progress", + unit="B", + bar_format=BAR_FORMAT, + ) + + # Add additional counter + in_progress = initializing.add_subcounter("yellow", all_fields=True) + completed = initializing.add_subcounter("green", all_fields=True) + + # Set default state + initializing.refresh() + + status.update(stage="Uploading", color="green") + + with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: + for i in range(len(upload_urls)): + url = upload_urls[i] + chunk_offset = chunk_offsets[i] + + task = (url, chunk_offset, i, in_progress) + self.futures.append(executor.submit(self._upload_chunk, task)) + + # Keep updating the progress while we have > 0 bytes left. + # Wait on threads to finish + for future in concurrent.futures.as_completed(self.futures): + try: + chunk_size = future.result() + completed.update_from( + in_progress, float((chunk_size - 1)), force=True + ) + except Exception as exc: + print(exc) - task = (url, chunk_offset, i) - executor.submit(self._upload_chunk, task) def file_counter(self, folder): matches = [] From cb84310d0ce16a466b00a365de2fe46301b4db19 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Wed, 8 Sep 2021 17:04:32 -0700 Subject: [PATCH 35/99] Fix doc strings in projects.py --- frameioclient/services/projects.py | 52 +++++++++++++++++++----------- 1 file changed, 33 insertions(+), 19 deletions(-) diff --git a/frameioclient/services/projects.py b/frameioclient/services/projects.py index 62829108..49def3c2 100644 --- a/frameioclient/services/projects.py +++ b/frameioclient/services/projects.py @@ -10,14 +10,16 @@ def create(self, team_id, **kwargs): :Args: team_id (string): The team id. :Kwargs: - (optional) kwargs: additional request parameters. + kwargs (optional): additional request parameters. Example:: + client.projects.create( team_id="123", name="My Awesome Project" ) """ + endpoint = "/teams/{}/projects".format(team_id) return self.client._api_call("post", endpoint, payload=kwargs) @@ -26,14 +28,16 @@ def get(self, project_id): Get an individual project :Args: + project_id (string): The project's id - Example:: - client.project.get( - project_id="123" - ) + Example:: + client.project.get( + project_id="123" + ) """ + endpoint = "/projects/{}".format(project_id) return self.client._api_call("get", endpoint) @@ -43,20 +47,22 @@ def tree(self, project_id, slim): :Args: project_id (string): The project's id - slim (bool): If true, fetch only the minimum information for the following: - filename, - filesize, - thumbnail, - creator_id, - inserted_at (date created), - path (represented like a filesystem) + slim (bool): If true, fetch only the minimum information for the following: \ + filename, \ + filesize, \ + thumbnail, \ + creator_id, \ + inserted_at (date created), \ + path (represented like a filesystem) \ Example:: + client.projects.get( project_id="123", slim=True ) """ + # endpoint = "/projects/{}/tree?depth=20&drop_includes=a.transcode_statuses,a.transcodes,a.source,a.checksums&only_fields=a.name,a.filesize,u.name,a.item_count,a.creator_id,a.inserted_at,a.uploaded_at".format(project_id) # return self.client._api_call('get', endpoint) @@ -71,11 +77,13 @@ def download(self, project_id, destination_directory="downloads"): destination_directory (string): Directory on disk that you want to download the project to. Example:: + client.projects.download( project_id="123", destination_directory="./downloads" ) """ + return FrameioHelpers(self.client).download_project( project_id, destination=destination_directory ) @@ -88,11 +96,12 @@ def get_collaborators(self, project_id, **kwargs): project_id (uuid): The project's id Example:: + client.projects.get_collaborators( project_id="123" ) - """ + endpoint = "/projects/{}/collaborators?include=project_role".format(project_id) return self.client._api_call("get", endpoint, kwargs) @@ -104,11 +113,12 @@ def get_pending_collaborators(self, project_id, **kwargs): project_id (uuid): The project's id Example:: + client.projects.get_pending_collaborators( project_id="123" ) - """ + endpoint = "/projects/{}/pending_collaborators".format(project_id) return self.client._api_call("get", endpoint, kwargs) @@ -121,11 +131,13 @@ def add_collaborator(self, project_id, email): email (string): Email user's e-mail address Example:: + client.projects.add_collaborator( project_id="123", email="janedoe@frame.io", ) """ + payload = {"email": email} endpoint = "/projects/{}/collaborators".format(project_id) return self._api_call("post", endpoint, payload=payload) @@ -138,11 +150,13 @@ def remove_collaborator(self, project_id, email): project_id (uuid): The Project ID. email (string): The user's e-mail address - Example:: - client.projects.remove_collaborator( - project_id="123", - email="janedoe@frame.io" - ) + Example:: + + client.projects.remove_collaborator( + project_id="123", + email="janedoe@frame.io" + ) """ + endpoint = "/projects/{}/collaborators/_?email={}".format(project_id, email) return self._api_call("delete", endpoint) From 11865bbc3cc45669dd158dcc5200afd8083f59c6 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 22 Nov 2021 15:34:00 -0800 Subject: [PATCH 36/99] Fix typo in version_stack detection --- frameioclient/services/helpers.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frameioclient/services/helpers.py b/frameioclient/services/helpers.py index e19808d7..ea65501a 100644 --- a/frameioclient/services/helpers.py +++ b/frameioclient/services/helpers.py @@ -54,7 +54,7 @@ def get_assets_recursively(self, asset_id, slim=True): # Don't do nothing, it's a file! continue - if asset["_type"] == "verson_stack": + if asset["_type"] == "version_stack": print("Grabbing top item from version stack") versions = self.client.assets.get_children(asset["id"], slim=True) asset = versions[0] # re-assign on purpose From 632b8d129fcf2ad22f1232016c574fba7a10aba7 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Tue, 21 Dec 2021 09:30:27 -0800 Subject: [PATCH 37/99] Introduce Config for simpler default const storage --- frameioclient/client.py | 12 ++++++++++-- frameioclient/config.py | 4 ++++ 2 files changed, 14 insertions(+), 2 deletions(-) create mode 100644 frameioclient/config.py diff --git a/frameioclient/client.py b/frameioclient/client.py index e51d3428..048bdc6b 100644 --- a/frameioclient/client.py +++ b/frameioclient/client.py @@ -3,12 +3,20 @@ ==================================== The core module of the frameioclient """ -from .lib import APIClient, Telemetry, ClientVersion, ClientVersion, FrameioDownloader + +from .config import Config +from .lib import APIClient, ClientVersion, FrameioDownloader, Telemetry from .services import * class FrameioClient(APIClient): - def __init__(self, token, host="https://api.frame.io", threads=5, progress=False): + def __init__( + self, + token, + host: str = Config.api_host, + threads: int = Config.default_concurrency, + progress=False + ): super().__init__(token, host, threads, progress) @property diff --git a/frameioclient/config.py b/frameioclient/config.py new file mode 100644 index 00000000..1a638d93 --- /dev/null +++ b/frameioclient/config.py @@ -0,0 +1,4 @@ +class Config: + api_host = 'https://api.frame.io/v2' + default_page_size = 50 + default_concurrency = 5 From 1a3c632d2da816d52fb8f8948bb88c49fade6bb2 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Tue, 21 Dec 2021 09:30:43 -0800 Subject: [PATCH 38/99] Fix docstring formatting --- frameioclient/services/teams.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/frameioclient/services/teams.py b/frameioclient/services/teams.py index 7590dcb4..629c1e02 100644 --- a/frameioclient/services/teams.py +++ b/frameioclient/services/teams.py @@ -26,12 +26,12 @@ def create(self, account_id, **kwargs): def list(self, account_id, **kwargs): """ - Get teams owned by the specified account. \ - (To return all teams, use list_all()) - - Args: - account_id (string): The account id. - """ + Get teams owned by the specified account. \ + (To return all teams, use list_all()) + + Args: + account_id (string): The account id. + """ endpoint = "/accounts/{}/teams".format(account_id) return self.client._api_call("get", endpoint, kwargs) From 9e6765046fa6e9d86baeef435367d5a003db6bf6 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Tue, 21 Dec 2021 09:31:36 -0800 Subject: [PATCH 39/99] Run format --- frameioclient/client.py | 2 +- frameioclient/config.py | 2 +- frameioclient/lib/download.py | 3 ++- frameioclient/lib/upload.py | 3 +-- frameioclient/services/users.py | 5 ++--- 5 files changed, 7 insertions(+), 8 deletions(-) diff --git a/frameioclient/client.py b/frameioclient/client.py index 048bdc6b..871eac87 100644 --- a/frameioclient/client.py +++ b/frameioclient/client.py @@ -15,7 +15,7 @@ def __init__( token, host: str = Config.api_host, threads: int = Config.default_concurrency, - progress=False + progress=False, ): super().__init__(token, host, threads, progress) diff --git a/frameioclient/config.py b/frameioclient/config.py index 1a638d93..70f7ef1d 100644 --- a/frameioclient/config.py +++ b/frameioclient/config.py @@ -1,4 +1,4 @@ class Config: - api_host = 'https://api.frame.io/v2' + api_host = "https://api.frame.io/v2" default_page_size = 50 default_concurrency = 5 diff --git a/frameioclient/lib/download.py b/frameioclient/lib/download.py index b9ef08eb..5a7f2a1a 100644 --- a/frameioclient/lib/download.py +++ b/frameioclient/lib/download.py @@ -8,7 +8,7 @@ # from .telemetry import Event, ComparisonTest -logger = SDKLogger('downloads') +logger = SDKLogger("downloads") from .exceptions import ( DownloadException, @@ -16,6 +16,7 @@ AssetNotFullyUploaded, ) + class FrameioDownloader(object): def __init__(self, asset, download_folder, prefix, multi_part=False, replace=False): self.multi_part = multi_part diff --git a/frameioclient/lib/upload.py b/frameioclient/lib/upload.py index 9c2e7f4f..3023d138 100644 --- a/frameioclient/lib/upload.py +++ b/frameioclient/lib/upload.py @@ -116,7 +116,7 @@ def upload(self): # Add counter to track completed chunks initializing = manager.counter( position=2, - total=float(self.asset['filesize']), + total=float(self.asset["filesize"]), desc="Progress", unit="B", bar_format=BAR_FORMAT, @@ -150,7 +150,6 @@ def upload(self): except Exception as exc: print(exc) - def file_counter(self, folder): matches = [] for root, dirnames, filenames in os.walk(folder): diff --git a/frameioclient/services/users.py b/frameioclient/services/users.py index be9aecbd..cf64fb82 100644 --- a/frameioclient/services/users.py +++ b/frameioclient/services/users.py @@ -9,6 +9,5 @@ def get_me(self): return self.client._api_call("get", "/me") def get_accounts(self): - """Get a list of accounts the user has access to - """ - return self.client._api_call("get", "/accounts") \ No newline at end of file + """Get a list of accounts the user has access to""" + return self.client._api_call("get", "/accounts") From 7207a193f57541f8a9dc58d668e60dd40d6ca259 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Tue, 21 Dec 2021 10:09:01 -0800 Subject: [PATCH 40/99] Fix the API host in Config --- frameioclient/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frameioclient/config.py b/frameioclient/config.py index 70f7ef1d..8c733c8d 100644 --- a/frameioclient/config.py +++ b/frameioclient/config.py @@ -1,4 +1,4 @@ class Config: - api_host = "https://api.frame.io/v2" + api_host = "https://api.frame.io" default_page_size = 50 default_concurrency = 5 From 00aa971d8ee19a27d02fa95d34808c46c0a6d9f1 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Tue, 21 Dec 2021 10:09:21 -0800 Subject: [PATCH 41/99] Don't print @Reference annotations --- frameioclient/lib/utils.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/frameioclient/lib/utils.py b/frameioclient/lib/utils.py index 0f76e497..cc5ba0b9 100644 --- a/frameioclient/lib/utils.py +++ b/frameioclient/lib/utils.py @@ -1,14 +1,13 @@ import re import sys import xxhash -import enlighten KB = 1024 MB = KB * KB def Reference(*args, **kwargs): - print(kwargs["operation"]) + # print(kwargs["operation"]) def inner(func): """ From dfa18aa960e77ebfc5ea220527e264df828bbd5e Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Tue, 21 Dec 2021 10:09:32 -0800 Subject: [PATCH 42/99] Overhaul asset_scraper.py example --- examples/assets/asset_scraper.py | 178 ++++++++++++++++++------------- 1 file changed, 103 insertions(+), 75 deletions(-) diff --git a/examples/assets/asset_scraper.py b/examples/assets/asset_scraper.py index cd37b732..995d1a42 100644 --- a/examples/assets/asset_scraper.py +++ b/examples/assets/asset_scraper.py @@ -1,16 +1,18 @@ ################################### # This scraper shows you how to gather assets from # a Frame.io account and write to a CSV. -# Assets are gathered recursively from each +# Assets are gathered recursively from each # team's projects. Folders, files and version stacks are written to the CSV. # Note: Debug statements are left in the file and commented out. ################################### import csv +from functools import lru_cache import os import time from itertools import chain +from typing import Dict, List from frameioclient import FrameioClient @@ -23,132 +25,156 @@ class RootAssetIDNotFound(Exception): pass -def get_teams_from_account(client): +@lru_cache() +def get_teams_from_account(client: FrameioClient) -> Dict: """ - Builds a list of teams for the account. Note: the API offers two strategies to fetch an account's teams, - `'get_teams`` and `get_all_teams`. Using `get_teams` we'll pull only the teams owned by the account_id, + Builds a list of teams for the account. Note: the API offers two strategies to fetch an account's teams, + `'get_teams`` and `get_all_teams`. Using `get_teams`, we'll pull only the teams owned by the account_id, disregarding teams the user belongs to but does not own. More info: https://docs.frame.io/docs/directory-lists-and-file-trees#2-fetch-the-accounts-teams """ - acct = client.get_me() - acct_id = acct['account_id'] - return client.get_teams(acct_id) + acct = client.users.get_me() + acct_id = acct["account_id"] + team_name_kv = dict() + for team in client.teams.list(acct_id)[3:4]: + team_name_kv[team["id"]] = team["name"] + return team_name_kv -def get_projects_from_team(client, team): + +def get_projects_from_team( + client: FrameioClient, team_id: str, team_name: str +) -> List[Dict]: """Returns a list of projects for a team.""" projects_in_team = [] - data = client.get_projects(team.get('id')) - team_name = team.get('name') + data = client.teams.list_projects(team_id) for proj in data: # Add project_name and team_name to the dict - proj['project_name'] = proj.get('name') - proj['team_name'] = team_name - # print('Debug: Found project: {}'.format(proj['project_name'])) + proj["project_name"] = proj.get("name") + proj["team_name"] = team_name + print("Debug: Found project: {}".format(proj["project_name"])) projects_in_team.append(proj) - # print('Debug: projects in team now {}'.format(len(projects_in_team))) + print("Debug: projects in team now: {}".format(len(projects_in_team))) return projects_in_team -def get_projects_from_account(client): + +def get_projects_from_account(client) -> List[Dict]: """Gets projects from all teams in the account.""" projects_in_account = [] teams = get_teams_from_account(client) - for team in teams: - team_name = team.get('name') - # print('Debug: === Found team: {} ==='.format(team_name)) - projects_in_team = (get_projects_from_team(client, team)) + for team_id, team_name in teams.items(): + print("Debug: === Found team: {} ===".format(team_name)) + projects_in_team = get_projects_from_team(client, team_id, team_name) projects_in_account.extend(projects_in_team) - # print('Debug: projects in account now: {}'.format(len(projects_in_account))) + print("Debug: projects in account now: {}".format(len(projects_in_account))) return projects_in_account -def scrape_asset_data_from_projects(client, projects): + +def scrape_asset_data_from_projects( + client: FrameioClient, projects: List[Dict] +) -> List[Dict]: """ Scrapes the asset data for an authenticated client and provided list of projects. Returns a list of asset metadata for all assets contained in the project. """ assets_in_projects = [] - for project in projects: + for project in projects[1:2]: + print("Debug: Scanning project: {} for assets".format(project["name"])) assets_in_project = [] - proj_root_asset_id = project.get('root_asset_id') - assets_in_project = scrape_asset_data(client, proj_root_asset_id, assets_in_project) + proj_root_asset_id = project.get("root_asset_id") + assets_in_project = scrape_asset_data( + client, + proj_root_asset_id, + assets_in_project, + project["name"], + ) assets_in_projects.extend(assets_in_project) - # print('Debug: total assets collected from projects: {}'.format(len(assets_in_projects))) - - for asset in assets_in_project: - # TODO: Repeats code from earlier and really shouldn't - asset['project_name'] = project.get('project_name') - asset['team_name'] = project.get('name') + print( + "Debug: total assets collected from projects: {}".format( + len(assets_in_projects) + ) + ) return assets_in_projects -def scrape_asset_data(client, asset_id, asset_list): +def scrape_asset_data( + client: FrameioClient, + asset_id: str, + asset_list: List[Dict], + project_name: str, +) -> List[Dict]: """ Takes an initialized client and an asset_id representing a position in a directory tree. Recursively builds a list of assets within the tree. Returns a list of dicts. """ - assets = client.get_asset_children(asset_id) + assets = client.assets.get_children(asset_id) for asset in assets: # Recurse through folders but skip the empty ones - if asset['type'] == "folder" and asset != []: + if asset["type"] == "folder" and asset != []: # Include non-empty folders in the list of scraped assets asset_list.append(asset) - scrape_asset_data(client, asset['id'], asset_list) + scrape_asset_data(client, asset["id"], asset_list, project_name) - if asset['type'] == "file": + if asset["type"] == "file": asset_list.append(asset) - if asset['type'] == "version_stack": + if asset["type"] == "version_stack": # Read about version stacks: https://docs.frame.io/docs/managing-version-stacks - versions = client.get_asset_children(asset['id']) + versions = client.assets.get_children(asset["id"]) asset_list.append(asset) for v_asset in versions: asset_list.append(v_asset) + asset["project_name"] = project_name + asset["team_name"] = get_teams_from_account(client)[asset["team_id"]] + return asset_list -def flatten_dict(d): + +def flatten_dict(d) -> List[Dict]: """ Use this helper functon to flatten a dict holding API response data - and namespace the attributes. + and namespace the attributes. """ def expand(key, val): - if isinstance(val, dict): - return [ (key + '.' + k, v) for k, v in flatten_dict(val).items() ] - else: - return [ (key, val) ] - - items = [ item for k, v in d.items() for item in expand(k, v)] + if isinstance(val, dict): + return [(key + "." + k, v) for k, v in flatten_dict(val).items()] + else: + return [(key, val)] + + items = [item for k, v in d.items() for item in expand(k, v)] return dict(items) -def write_assets_to_csv(asset_list, filename): + +def write_assets_to_csv(asset_list: List[Dict], filename: str) -> None: """ Writes assets to assets.csv Any attributes you add to the headers list will automatically be written to the CSV The API returns many attributes so familiarize with the response data! """ headers = [ - 'id', - 'name', - 'type', - 'inserted_at', - 'item_count', - 'comment_count', - 'filesize', - 'shared', - 'private', - 'versions', - 'parent_id', - 'project_name', - 'project_id', - 'team_name', - 'creator.name', - 'creator.email', + "id", + "name", + "type", + "inserted_at", + "item_count", + "comment_count", + "filesize", + "shared", + "private", + "versions", + "parent_id", + "project_name", + "project_id", + "team_name", + "creator.name", + "creator.email", ] # Flattening the assets dicts is not necessary but namespaces the CSV headers nicely. @@ -156,22 +182,25 @@ def write_assets_to_csv(asset_list, filename): for a in asset_list: flat_assets_list.append(flatten_dict(a)) - with open('asset_record_for_account_id-{}'.format(filename), 'w') as f: - f_csv = csv.DictWriter(f, headers, extrasaction='ignore') + with open("asset_record_for_account_id-{}".format(filename), "w") as f: + f_csv = csv.DictWriter(f, headers, extrasaction="ignore") f_csv.writeheader() f_csv.writerows(flat_assets_list) return -if __name__ == '__main__': +if __name__ == "__main__": + TOKEN = os.getenv("FRAME_IO_TOKEN") + ROOT_ASSET_ID = os.getenv("ROOT_ASSET_ID") + + if TOKEN == None: + raise ClientNotTokenized("The Python SDK requires a valid developer token.") - TOKEN = os.getenv('FRAME_IO_TOKEN') - if os.environ.get('FRAME_IO_TOKEN') == None: - raise ClientNotTokenized('The Python SDK requires a valid developer token.') - ROOT_ASSET_ID = os.getenv('ROOT_ASSET_ID') - if os.environ.get('ROOT_ASSET_ID') == None: - raise RootAssetIDNotFound('If you don\'t know what Root Asset ID is, read this guide: https://docs.frame.io/docs/root-asset-ids') + if ROOT_ASSET_ID == None: + raise RootAssetIDNotFound( + "If you don't know what Root Asset ID is, read this guide: https://docs.frame.io/docs/root-asset-ids" + ) # Initialize the client library client = FrameioClient(TOKEN) @@ -181,9 +210,8 @@ def write_assets_to_csv(asset_list, filename): assets_in_account = scrape_asset_data_from_projects(client, projects) # Pass a filename to the .csv writer so we can explicitly ID the file - acct = client.get_me() - acct_id = acct['account_id'] - filename = 'assets_for_account_id-{}.csv'.format(acct_id) + acct = client.users.get_me() + acct_id = acct["account_id"] # Write the .csv - write_assets_to_csv(assets_in_account, filename) \ No newline at end of file + write_assets_to_csv(assets_in_account, acct_id) From 86ec59356d77f4d82250754642f5b1b68aa8b251 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Tue, 21 Dec 2021 10:10:21 -0800 Subject: [PATCH 43/99] Don't limit asset scraper --- examples/assets/asset_scraper.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/assets/asset_scraper.py b/examples/assets/asset_scraper.py index 995d1a42..e0ea26fc 100644 --- a/examples/assets/asset_scraper.py +++ b/examples/assets/asset_scraper.py @@ -35,7 +35,7 @@ def get_teams_from_account(client: FrameioClient) -> Dict: acct = client.users.get_me() acct_id = acct["account_id"] team_name_kv = dict() - for team in client.teams.list(acct_id)[3:4]: + for team in client.teams.list(acct_id): team_name_kv[team["id"]] = team["name"] return team_name_kv @@ -80,7 +80,7 @@ def scrape_asset_data_from_projects( Returns a list of asset metadata for all assets contained in the project. """ assets_in_projects = [] - for project in projects[1:2]: + for project in projects: print("Debug: Scanning project: {} for assets".format(project["name"])) assets_in_project = [] proj_root_asset_id = project.get("root_asset_id") From cdec4612dd102860b4b8a539a9c97cf85470cd05 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Tue, 21 Dec 2021 12:04:55 -0800 Subject: [PATCH 44/99] Fix issue w/ _get_session() --- frameioclient/lib/download.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frameioclient/lib/download.py b/frameioclient/lib/download.py index 5a7f2a1a..066450e5 100644 --- a/frameioclient/lib/download.py +++ b/frameioclient/lib/download.py @@ -39,7 +39,7 @@ def __init__(self, asset, download_folder, prefix, multi_part=False, replace=Fal self.bytes_completed = 0 self.in_progress = 0 self.aws_client = AWSClient(concurrency=5) - self.session = self.aws_client._get_session(auth=None) + self.session = self.aws_client._get_session() self.filename = Utils.normalize_filename(asset["name"]) self.request_logs = list() self.stats = True From 005020fd369245627e376fb8a5eaa3064a33438e Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Tue, 21 Dec 2021 12:43:36 -0800 Subject: [PATCH 45/99] More progress re-architecting transfer and FrameioDownloader --- frameioclient/lib/download.py | 20 ++++++-- frameioclient/lib/transfer.py | 88 ++++++++++++++++++----------------- scripts/benchmark/download.py | 25 ++++++++-- 3 files changed, 82 insertions(+), 51 deletions(-) diff --git a/frameioclient/lib/download.py b/frameioclient/lib/download.py index 066450e5..2cb6e7fd 100644 --- a/frameioclient/lib/download.py +++ b/frameioclient/lib/download.py @@ -1,5 +1,6 @@ import os import math +from typing import Dict from .utils import Utils @@ -18,7 +19,14 @@ class FrameioDownloader(object): - def __init__(self, asset, download_folder, prefix, multi_part=False, replace=False): + def __init__( + self, + asset: Dict, + download_folder: str, + prefix: str, + multi_part: bool = False, + replace: bool = False, + ): self.multi_part = multi_part self.asset = asset self.asset_type = None @@ -38,8 +46,8 @@ def __init__(self, asset, download_folder, prefix, multi_part=False, replace=Fal self.bytes_started = 0 self.bytes_completed = 0 self.in_progress = 0 - self.aws_client = AWSClient(concurrency=5) - self.session = self.aws_client._get_session() + self.aws_client = None + self.session = None self.filename = Utils.normalize_filename(asset["name"]) self.request_logs = list() self.stats = True @@ -162,9 +170,13 @@ def download_handler(self): # Get URL url = self.get_download_key() + # AWS Client + self.aws_client = AWSClient(downloader=self, concurrency=5) + # Handle watermarking if self.watermarked == True: - return self.aws_client()._download_whole(url) + return self.aws_client._download_whole(url) + else: # Don't use multi-part download for files below 25 MB if self.asset["filesize"] < 26214400: diff --git a/frameioclient/lib/transfer.py b/frameioclient/lib/transfer.py index da9e079a..c03a4c4e 100644 --- a/frameioclient/lib/transfer.py +++ b/frameioclient/lib/transfer.py @@ -5,6 +5,7 @@ import requests import concurrent.futures + from .utils import Utils from .logger import SDKLogger @@ -19,11 +20,15 @@ class AWSClient(HTTPClient, object): - def __init__(self, concurrency=None, progress=True): + def __init__(self, downloader, concurrency=None, progress=True): super().__init__() # Initialize via inheritance self.progress = progress self.progress_manager = None self.destination = None + self.bytes_started = 0 + self.bytes_completed = 0 + self.downloader = downloader + self.futures = [] # Ensure this is a valid number before assigning if concurrency is not None and type(concurrency) == int and concurrency > 0: @@ -46,12 +51,12 @@ def check_cdn(url): def _create_file_stub(self): try: - fp = open(self.destination, "w") + fp = open(self.downloader.destination, "w") # fp.write(b"\0" * self.file_size) # Disabled to prevent pre-allocatation of disk space fp.close() except FileExistsError as e: if self.replace == True: - os.remove(self.destination) # Remove the file + os.remove(self.downloader.destination) # Remove the file self._create_file_stub() # Create a new stub else: print(e) @@ -61,24 +66,24 @@ def _create_file_stub(self): raise e return True - def _optimize_concurrency(self): - """ - This method looks as the net_stats and disk_stats that we've run on \ - the current environment in order to suggest the best optimized \ - number of concurrent TCP connections. + # def _optimize_concurrency(self): + # """ + # This method looks as the net_stats and disk_stats that we've run on \ + # the current environment in order to suggest the best optimized \ + # number of concurrent TCP connections. - Example:: - AWSClient._optimize_concurrency() - """ + # Example:: + # AWSClient._optimize_concurrency() + # """ - net_stats = NetworkBandwidth - disk_stats = DiskBandwidth + # net_stats = NetworkBandwidth + # disk_stats = DiskBandwidth - # Algorithm ensues - # - # + # # Algorithm ensues + # # + # # - return 5 + # return 5 def _get_byte_range(self, url, start_byte=0, end_byte=2048): """ @@ -101,11 +106,11 @@ def _get_byte_range(self, url, start_byte=0, end_byte=2048): br = requests.get(url, headers=headers).content return br - def _download_whole(self, url): + def _download_whole(self, url: str): start_time = time.time() print( "Beginning download -- {} -- {}".format( - self.asset["name"], Utils.format_bytes(self.file_size, type="size") + self.asset["name"], Utils.format_bytes(self.downloader.file_size, type="size") ) ) @@ -113,7 +118,7 @@ def _download_whole(self, url): r = self.session.get(url, stream=True) # Downloading - with open(self.destination, "wb") as handle: + with open(self.downloader.destination, "wb") as handle: try: # TODO make sure this approach works for SBWM download for chunk in r.iter_content(chunk_size=4096): @@ -123,10 +128,10 @@ def _download_whole(self, url): raise e download_time = time.time() - start_time - download_speed = Utils.format_bytes(math.ceil(self.file_size / (download_time))) + download_speed = Utils.format_bytes(math.ceil(self.downloader.file_size / (download_time))) print( "Downloaded {} at {}".format( - Utils.format_bytes(self.file_size, type="size"), download_speed + Utils.format_bytes(self.downloader.file_size, type="size"), download_speed ) ) @@ -146,9 +151,9 @@ def _download_chunk(self, task): # Set the initial chunk_size, but prepare to overwrite chunk_size = end_byte - start_byte - if self.bytes_started + (chunk_size) > self.file_size: + if self.bytes_started + (chunk_size) > self.downloader.file_size: difference = abs( - self.file_size - (self.bytes_started + chunk_size) + self.downloader.file_size - (self.bytes_started + chunk_size) ) # should be negative chunk_size = chunk_size - difference print(f"Chunk size as done via math: {chunk_size}") @@ -184,11 +189,11 @@ def _download_chunk(self, task): # Increase the count for bytes_completed, but only if it doesn't overrun file length self.bytes_completed += chunk_size - if self.bytes_completed > self.file_size: - self.bytes_completed = self.file_size + if self.bytes_completed > self.downloader.file_size: + self.bytes_completed = self.downloader.file_size # Update the in_progress bar - self._update_in_progress() + self.downloader._update_in_progress() # After the function completes, we report back the # of bytes transferred return chunk_size @@ -199,16 +204,15 @@ def multi_thread_download(self, url): # Generate stub try: self._create_file_stub() - except Exception as e: raise DownloadException(message=e) - offset = math.ceil(self.file_size / self.chunks) + offset = math.ceil(self.downloader.file_size / self.downloader.chunks) in_byte = 0 # Set initially here, but then override print( "Multi-part download -- {} -- {}".format( - self.asset["name"], Utils.format_bytes(self.file_size, type="size") + self.downloader.asset["name"], Utils.format_bytes(self.downloader.file_size, type="size") ) ) @@ -234,7 +238,7 @@ def multi_thread_download(self, url): # Add counter to track completed chunks initializing = manager.counter( position=2, - total=float(self.file_size), + total=float(self.downloader.file_size), desc="Progress", unit="B", bar_format=BAR_FORMAT, @@ -250,15 +254,15 @@ def multi_thread_download(self, url): status.update(stage="Downloading", color="green") with concurrent.futures.ThreadPoolExecutor( - max_workers=self.aws_client.concurrency + max_workers=self.concurrency ) as executor: - for i in range(int(self.chunks)): + for i in range(int(self.downloader.chunks)): # Increment by the iterable + 1 so we don't mutiply by zero out_byte = offset * (i + 1) # Create task tuple task = (url, in_byte, out_byte, i, in_progress) # Stagger start for each chunk by 0.1 seconds - if i < self.aws_client.concurrency: + if i < self.concurrency: time.sleep(0.1) # Append tasks to futures list self.futures.append(executor.submit(self._download_chunk, task)) @@ -278,11 +282,11 @@ def multi_thread_download(self, url): # Calculate and print stats download_time = round((time.time() - start_time), 2) - download_speed = round((self.file_size / download_time), 2) + download_speed = round((self.downloader.file_size / download_time), 2) - if self.checksum_verification == True: + if self.downloader.checksum_verification == True: # Check for checksum, if not present throw error - if self._get_checksum() == None: + if self.downloader._get_checksum() == None: raise AssetChecksumNotPresent else: # Perform hash-verification @@ -297,7 +301,7 @@ def multi_thread_download(self, url): # Add counter to track completed chunks verification = manager.counter( position=1, - total=float(self.file_size), + total=float(self.downloader.file_size), desc="Verifying", unit="B", bar_format=VERIFICATION_FORMAT, @@ -319,7 +323,7 @@ def multi_thread_download(self, url): # Log completion event SDKLogger("downloads").info( "Downloaded {} at {}".format( - Utils.format_bytes(self.file_size, type="size"), download_speed + Utils.format_bytes(self.downloader.file_size, type="size"), download_speed ) ) @@ -340,9 +344,9 @@ def multi_thread_download(self, url): "speed": download_speed, "elapsed": download_time, "cdn": AWSClient.check_cdn(url), - "concurrency": self.aws_client.concurrency, - "size": self.file_size, - "chunks": self.chunks, + "concurrency": self.concurrency, + "size": self.downloader.file_size, + "chunks": self.downloader.chunks, } return dl_info else: diff --git a/scripts/benchmark/download.py b/scripts/benchmark/download.py index 45ce7d97..3f9882f9 100644 --- a/scripts/benchmark/download.py +++ b/scripts/benchmark/download.py @@ -6,17 +6,25 @@ from frameioclient.lib.bandwidth import NetworkBandwidth -def download(asset_id='', destination='downloads', clean_up=True, size='small'): +def download( + asset_id: str = "", + destination: str = "downloads", + clean_up: bool = True, + size: str = "small", +): token = os.getenv("FRAMEIO_TOKEN") client = FrameioClient(token) asset_info = client.assets.get(asset_id) - download_info = client.assets.download(asset_info, destination, multi_part=True, replace=True) + download_info = client.assets.download( + asset_info, destination, multi_part=True, replace=True + ) if clean_up == True: - os.remove(download_info['destination']) + os.remove(download_info["destination"]) return download_info + def test_s3(): asset_list = [] stats = [] @@ -26,8 +34,13 @@ def test_s3(): return stats + def test_cloudfront(): - asset_list = ['811baf7a-3248-4c7c-9d94-cc1c6c496a76','35f8ac33-a710-440e-8dcc-f98cfd90e0e5','e981f087-edbb-448d-baad-c8363b78f5ae'] + asset_list = [ + "811baf7a-3248-4c7c-9d94-cc1c6c496a76", + "35f8ac33-a710-440e-8dcc-f98cfd90e0e5", + "e981f087-edbb-448d-baad-c8363b78f5ae", + ] stats = [] for asset in asset_list: report = download(asset_id=asset) @@ -35,6 +48,7 @@ def test_cloudfront(): return stats + def build_metric(s3_stats, cf_stats, baseline): # Compare S3 against the baseline after calculating the average of the runs # Compare CF against the baseline after calculating the average of the runs @@ -45,6 +59,7 @@ def build_metric(s3_stats, cf_stats, baseline): print("Thing") pass + def run_benchmark(): s3_stats = test_s3() cf_stats = test_cloudfront() @@ -60,4 +75,4 @@ def run_benchmark(): # timefunc(benchmark_download, asset_id='e981f087-edbb-448d-baad-c8363b78f5ae', destination='downloads', iterations=5) # small # New method: - run_benchmark() \ No newline at end of file + run_benchmark() From 44b68d134a990d4a32203a763adf6a815a0f2cca Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 3 Jan 2022 16:23:36 -0800 Subject: [PATCH 46/99] Remove progress bars and refactor AWSClient --- frameioclient/lib/__init__.py | 5 +- frameioclient/lib/download.py | 6 +- frameioclient/lib/transfer.py | 469 +++++++++++++++++++------------ frameioclient/lib/transport.py | 55 +++- frameioclient/lib/upload.py | 75 ++--- frameioclient/lib/utils.py | 11 +- frameioclient/services/assets.py | 24 +- scripts/benchmark/download.py | 5 +- tests/integration.py | 10 +- 9 files changed, 388 insertions(+), 272 deletions(-) diff --git a/frameioclient/lib/__init__.py b/frameioclient/lib/__init__.py index f1aa82e1..66f3854d 100644 --- a/frameioclient/lib/__init__.py +++ b/frameioclient/lib/__init__.py @@ -4,7 +4,6 @@ from .telemetry import Telemetry from .version import ClientVersion from .upload import FrameioUploader -from .download import FrameioDownloader from .transport import APIClient -from .transfer import AWSClient -from .utils import Utils, PaginatedResponse, KB, MB, Reference +from .transfer import AWSClient, FrameioDownloader +from .utils import Utils, PaginatedResponse, KB, MB, ApiReference diff --git a/frameioclient/lib/download.py b/frameioclient/lib/download.py index 2cb6e7fd..4308f435 100644 --- a/frameioclient/lib/download.py +++ b/frameioclient/lib/download.py @@ -35,13 +35,13 @@ def __init__( self.resolution_map = dict() self.destination = None self.watermarked = asset["is_session_watermarked"] # Default is probably false - self.file_size = asset["filesize"] + self.filesize = asset["filesize"] self.futures = list() self.checksum = None self.original_checksum = None self.checksum_verification = True self.chunk_size = 25 * 1024 * 1024 # 25 MB chunk size - self.chunks = math.ceil(self.file_size / self.chunk_size) + self.chunks = math.ceil(self.filesize / self.chunk_size) self.prefix = prefix self.bytes_started = 0 self.bytes_completed = 0 @@ -146,7 +146,7 @@ def get_download_key(self): return url - def download_handler(self): + def download(self): """Call this to perform the actual download of your asset!""" # Check folders diff --git a/frameioclient/lib/transfer.py b/frameioclient/lib/transfer.py index c03a4c4e..ce2d8955 100644 --- a/frameioclient/lib/transfer.py +++ b/frameioclient/lib/transfer.py @@ -1,43 +1,214 @@ -import os +import concurrent.futures import math +import os import time -import enlighten -import requests -import concurrent.futures - +from pprint import pprint +from typing import Dict, List +from random import randint -from .utils import Utils -from .logger import SDKLogger +import requests from .exceptions import ( - DownloadException, AssetChecksumMismatch, AssetChecksumNotPresent, + DownloadException, ) +from .logger import SDKLogger +from .utils import Utils + +logger = SDKLogger("downloads") -from .bandwidth import NetworkBandwidth, DiskBandwidth +from .bandwidth import DiskBandwidth, NetworkBandwidth +from .exceptions import ( + AssetNotFullyUploaded, + DownloadException, + WatermarkIDDownloadException, +) from .transport import HTTPClient +class FrameioDownloader(object): + def __init__( + self, + asset: Dict, + download_folder: str, + prefix: str, + multi_part: bool = False, + replace: bool = False, + ): + self.multi_part = multi_part + self.asset = asset + self.asset_type = None + self.download_folder = download_folder + self.replace = replace + self.resolution_map = dict() + self.destination = None + self.watermarked = asset["is_session_watermarked"] # Default is probably false + self.filesize = asset["filesize"] + self.futures = list() + self.checksum = None + self.original_checksum = None + self.checksum_verification = True + self.chunk_size = 25 * 1024 * 1024 # 25 MB chunk size + self.chunks = math.ceil(self.filesize / self.chunk_size) + self.prefix = prefix + self.bytes_started = 0 + self.bytes_completed = 0 + self.in_progress = 0 + self.aws_client = None + self.session = None + self.filename = Utils.normalize_filename(asset["name"]) + self.request_logs = list() + self.stats = True + + self._evaluate_asset() + self._get_path() + + def get_path(self): + if self.prefix != None: + self.filename = self.prefix + self.filename + + if self.destination == None: + final_destination = os.path.join(self.download_folder, self.filename) + self.destination = final_destination + + return self.destination + + def _evaluate_asset(self): + if self.asset.get("_type") != "file": + raise DownloadException( + message="Unsupport Asset type: {}".format(self.asset.get("_type")) + ) + + # This logic may block uploads that were started before this field was introduced + if self.asset.get("upload_completed_at") == None: + raise AssetNotFullyUploaded + + try: + self.original_checksum = self.asset["checksums"]["xx_hash"] + except (TypeError, KeyError): + self.original_checksum = None + + def _create_file_stub(self): + try: + fp = open(self.destination, "w") + # fp.write(b"\0" * self.filesize) # Disabled to prevent pre-allocatation of disk space + fp.close() + except FileExistsError as e: + if self.replace == True: + os.remove(self.destination) # Remove the file + self._create_file_stub() # Create a new stub + else: + raise e + return True + + def _get_path(self): + logger.info("prefix: {}".format(self.prefix)) + if self.prefix != None: + self.filename = self.prefix + self.filename + + if self.destination == None: + final_destination = os.path.join(self.download_folder, self.filename) + self.destination = final_destination + + return self.destination + + def _get_checksum(self): + try: + self.original_checksum = self.asset["checksums"]["xx_hash"] + except (TypeError, KeyError): + self.original_checksum = None + + return self.original_checksum + + def get_download_key(self): + try: + url = self.asset["original"] + except KeyError as e: + if self.watermarked == True: + resolution_list = list() + try: + for resolution_key, download_url in sorted( + self.asset["downloads"].items() + ): + resolution = resolution_key.split("_")[ + 1 + ] # Grab the item at index 1 (resolution) + try: + resolution = int(resolution) + except ValueError: + continue + + if download_url is not None: + resolution_list.append(download_url) + + # Grab the highest resolution (first item) now + url = resolution_list[0] + except KeyError: + raise DownloadException + else: + raise WatermarkIDDownloadException + + return url + + def download(self): + """Call this to perform the actual download of your asset!""" + + # Check folders + if os.path.isdir(os.path.join(os.path.curdir, self.download_folder)): + logger.info("Folder exists, don't need to create it") + else: + logger.info("Destination folder not found, creating") + os.mkdir(self.download_folder) + + # Check files + if os.path.isfile(self.get_path()) == False: + pass + + if os.path.isfile(self.get_path()) and self.replace == True: + os.remove(self.get_path()) + + if os.path.isfile(self.get_path()) and self.replace == False: + logger.info("File already exists at this location.") + return self.destination + + # Get URL + url = self.get_download_key() + + # AWS Client + self.aws_client = AWSClient(downloader=self, concurrency=5) + + # Handle watermarking + if self.watermarked == True: + return self.aws_client._download_whole(url) + + else: + # Don't use multi-part download for files below 25 MB + if self.asset["filesize"] < 26214400: + return self.aws_client._download_whole(url) + if self.multi_part == True: + return self.aws_client.multi_thread_download(url) + else: + return self.aws_client._download_whole(url) + + class AWSClient(HTTPClient, object): - def __init__(self, downloader, concurrency=None, progress=True): - super().__init__() # Initialize via inheritance + def __init__(self, downloader: FrameioDownloader, concurrency=None, progress=True): + super().__init__(self) # Initialize via inheritance self.progress = progress self.progress_manager = None - self.destination = None + self.destination = downloader.destination self.bytes_started = 0 self.bytes_completed = 0 self.downloader = downloader self.futures = [] + self.original = self.downloader.asset['original'] # Ensure this is a valid number before assigning if concurrency is not None and type(concurrency) == int and concurrency > 0: self.concurrency = concurrency - else: - self.concurrency = self._optimize_concurrency() - - if self.progress: - self.progress_manager = enlighten.get_manager() + # else: + # self.concurrency = self._optimize_concurrency() @staticmethod def check_cdn(url): @@ -52,7 +223,7 @@ def check_cdn(url): def _create_file_stub(self): try: fp = open(self.downloader.destination, "w") - # fp.write(b"\0" * self.file_size) # Disabled to prevent pre-allocatation of disk space + # fp.write(b"\0" * self.filesize) # Disabled to prevent pre-allocatation of disk space fp.close() except FileExistsError as e: if self.replace == True: @@ -66,24 +237,24 @@ def _create_file_stub(self): raise e return True - # def _optimize_concurrency(self): - # """ - # This method looks as the net_stats and disk_stats that we've run on \ - # the current environment in order to suggest the best optimized \ - # number of concurrent TCP connections. + def _optimize_concurrency(self): + """ + This method looks as the net_stats and disk_stats that we've run on \ + the current environment in order to suggest the best optimized \ + number of concurrent TCP connections. - # Example:: - # AWSClient._optimize_concurrency() - # """ + Example:: + AWSClient._optimize_concurrency() + """ - # net_stats = NetworkBandwidth - # disk_stats = DiskBandwidth + net_stats = NetworkBandwidth + disk_stats = DiskBandwidth - # # Algorithm ensues - # # - # # + # Algorithm ensues + # + # - # return 5 + return 5 def _get_byte_range(self, url, start_byte=0, end_byte=2048): """ @@ -110,11 +281,13 @@ def _download_whole(self, url: str): start_time = time.time() print( "Beginning download -- {} -- {}".format( - self.asset["name"], Utils.format_bytes(self.downloader.file_size, type="size") + self.asset["name"], + Utils.format_bytes(self.downloader.filesize, type="size"), ) ) # Downloading + self.session = self._get_session() r = self.session.get(url, stream=True) # Downloading @@ -128,16 +301,19 @@ def _download_whole(self, url: str): raise e download_time = time.time() - start_time - download_speed = Utils.format_bytes(math.ceil(self.downloader.file_size / (download_time))) + download_speed = Utils.format_bytes( + math.ceil(self.downloader.filesize / (download_time)) + ) print( "Downloaded {} at {}".format( - Utils.format_bytes(self.downloader.file_size, type="size"), download_speed + Utils.format_bytes(self.downloader.filesize, type="size"), + download_speed, ) ) return self.destination, download_speed - def _download_chunk(self, task): + def _download_chunk(self, task: List): # Download a particular chunk # Called by the threadpool executor @@ -146,14 +322,14 @@ def _download_chunk(self, task): start_byte = task[1] end_byte = task[2] chunk_number = task[3] - in_progress = task[4] + # in_progress = task[4] # Set the initial chunk_size, but prepare to overwrite chunk_size = end_byte - start_byte - if self.bytes_started + (chunk_size) > self.downloader.file_size: + if self.bytes_started + (chunk_size) > self.downloader.filesize: difference = abs( - self.downloader.file_size - (self.bytes_started + chunk_size) + self.downloader.filesize - (self.bytes_started + chunk_size) ) # should be negative chunk_size = chunk_size - difference print(f"Chunk size as done via math: {chunk_size}") @@ -163,13 +339,11 @@ def _download_chunk(self, task): # Set chunk size in a smarter way self.bytes_started += chunk_size - # Update the bar for in_progress chunks - in_progress.update(float(chunk_size)) - # Specify the start and end of the range request headers = {"Range": "bytes=%d-%d" % (start_byte, end_byte)} # Grab the data as a stream + self.session = self._get_session() r = self.session.get(url, headers=headers, stream=True) # Write the file to disk @@ -179,7 +353,7 @@ def _download_chunk(self, task): fp.write(r.content) # Write the data # Save requests logs - self.request_logs.append( + self.downloader.request_logs.append( { "headers": r.headers, "http_status": r.status_code, @@ -189,16 +363,13 @@ def _download_chunk(self, task): # Increase the count for bytes_completed, but only if it doesn't overrun file length self.bytes_completed += chunk_size - if self.bytes_completed > self.downloader.file_size: - self.bytes_completed = self.downloader.file_size - - # Update the in_progress bar - self.downloader._update_in_progress() + if self.bytes_completed > self.downloader.filesize: + self.bytes_completed = self.downloader.filesize # After the function completes, we report back the # of bytes transferred return chunk_size - def multi_thread_download(self, url): + def multi_thread_download(self): start_time = time.time() # Generate stub @@ -207,150 +378,92 @@ def multi_thread_download(self, url): except Exception as e: raise DownloadException(message=e) - offset = math.ceil(self.downloader.file_size / self.downloader.chunks) + pprint(self.downloader) + + offset = math.ceil(self.downloader.filesize / self.downloader.chunks) in_byte = 0 # Set initially here, but then override print( "Multi-part download -- {} -- {}".format( - self.downloader.asset["name"], Utils.format_bytes(self.downloader.file_size, type="size") + self.downloader.asset["name"], + Utils.format_bytes(self.downloader.filesize, type="size"), ) ) - # Queue up threads - with enlighten.get_manager() as manager: - status = manager.status_bar( - position=3, - status_format="{fill}Stage: {stage}{fill}{elapsed}", - color="bold_underline_bright_white_on_lightslategray", - justify=enlighten.Justify.CENTER, - stage="Initializing", - autorefresh=True, - min_delta=0.5, - ) + with concurrent.futures.ThreadPoolExecutor( + max_workers=self.concurrency + ) as executor: + for i in range(int(self.downloader.chunks)): + # Increment by the iterable + 1 so we don't mutiply by zero + out_byte = offset * (i + 1) - BAR_FORMAT = ( - "{desc}{desc_pad}|{bar}|{percentage:3.0f}% " - + "Downloading: {count_1:.2j}/{total:.2j} " - + "Completed: {count_2:.2j}/{total:.2j} " - + "[{elapsed}<{eta}, {rate:.2j}{unit}/s]" - ) + # Create task tuple + task = (self.downloader.asset['original'], in_byte, out_byte, i) - # Add counter to track completed chunks - initializing = manager.counter( - position=2, - total=float(self.downloader.file_size), - desc="Progress", - unit="B", - bar_format=BAR_FORMAT, + # Stagger start for each chunk by 0.1 seconds + if i < self.concurrency: + time.sleep(randint(1, 5) / 10) + + # Append tasks to futures list + self.futures.append(executor.submit(self._download_chunk, task)) + + # Reset new in byte equal to last out byte + in_byte = out_byte + + # Wait on threads to finish + for future in concurrent.futures.as_completed(self.futures): + try: + chunk_size = future.result() + print(chunk_size) + except Exception as exc: + print(exc) + + # Calculate and print stats + download_time = round((time.time() - start_time), 2) + pprint(self.downloader) + download_speed = round((self.downloader.filesize / download_time), 2) + + if self.downloader.checksum_verification == True: + # Check for checksum, if not present throw error + if self.downloader._get_checksum() == None: + raise AssetChecksumNotPresent + + # Calculate the file hash + if Utils.calculate_hash(self.destination) != self.downloader.original_checksum: + raise AssetChecksumMismatch + + # Log completion event + SDKLogger("downloads").info( + "Downloaded {} at {}".format( + Utils.format_bytes(self.downloader.filesize, type="size"), + download_speed, ) + ) - # Add additional counter - in_progress = initializing.add_subcounter("yellow", all_fields=True) - completed = initializing.add_subcounter("green", all_fields=True) - - # Set default state - initializing.refresh() - - status.update(stage="Downloading", color="green") + # Submit telemetry + transfer_stats = { + "speed": download_speed, + "time": download_time, + "cdn": AWSClient.check_cdn(self.original) + } - with concurrent.futures.ThreadPoolExecutor( - max_workers=self.concurrency - ) as executor: - for i in range(int(self.downloader.chunks)): - # Increment by the iterable + 1 so we don't mutiply by zero - out_byte = offset * (i + 1) - # Create task tuple - task = (url, in_byte, out_byte, i, in_progress) - # Stagger start for each chunk by 0.1 seconds - if i < self.concurrency: - time.sleep(0.1) - # Append tasks to futures list - self.futures.append(executor.submit(self._download_chunk, task)) - # Reset new in byte equal to last out byte - in_byte = out_byte - - # Keep updating the progress while we have > 0 bytes left. - # Wait on threads to finish - for future in concurrent.futures.as_completed(self.futures): - try: - chunk_size = future.result() - completed.update_from( - in_progress, float((chunk_size - 1)), force=True - ) - except Exception as exc: - print(exc) - - # Calculate and print stats - download_time = round((time.time() - start_time), 2) - download_speed = round((self.downloader.file_size / download_time), 2) - - if self.downloader.checksum_verification == True: - # Check for checksum, if not present throw error - if self.downloader._get_checksum() == None: - raise AssetChecksumNotPresent - else: - # Perform hash-verification - status.update(stage="Verifying") - - VERIFICATION_FORMAT = ( - "{desc}{desc_pad}|{bar}|{percentage:3.0f}% " - + "Progress: {count:.2j}/{total:.2j} " - + "[{elapsed}<{eta}, {rate:.2j}{unit}/s]" - ) - - # Add counter to track completed chunks - verification = manager.counter( - position=1, - total=float(self.downloader.file_size), - desc="Verifying", - unit="B", - bar_format=VERIFICATION_FORMAT, - color="purple", - ) - - # Calculate the file hash - if ( - Utils.calculate_hash( - self.destination, progress_callback=verification - ) - != self.original_checksum - ): - raise AssetChecksumMismatch - - # Update the header - status.update(stage="Download Complete!", force=True) - - # Log completion event - SDKLogger("downloads").info( - "Downloaded {} at {}".format( - Utils.format_bytes(self.downloader.file_size, type="size"), download_speed - ) - ) + # Event(self.user_id, 'python-sdk-download-stats', transfer_stats) - # Submit telemetry - transfer_stats = { + # If stats = True, we return a dict with way more info, otherwise \ + if self.downloader.stats: + # We end by returning a dict with info about the download + dl_info = { + "destination": self.destination, "speed": download_speed, - "time": download_time, - "cdn": AWSClient.check_cdn(url), + "elapsed": download_time, + "cdn": AWSClient.check_cdn(self.original), + "concurrency": self.concurrency, + "size": self.downloader.filesize, + "chunks": self.downloader.chunks, } - - # Event(self.user_id, 'python-sdk-download-stats', transfer_stats) - - # If stats = True, we return a dict with way more info, otherwise \ - if self.stats: - # We end by returning a dict with info about the download - dl_info = { - "destination": self.destination, - "speed": download_speed, - "elapsed": download_time, - "cdn": AWSClient.check_cdn(url), - "concurrency": self.concurrency, - "size": self.downloader.file_size, - "chunks": self.downloader.chunks, - } - return dl_info - else: - return self.destination + return dl_info + else: + return self.destination class TransferJob(AWSClient): diff --git a/frameioclient/lib/transport.py b/frameioclient/lib/transport.py index 273b0104..36c26e7f 100644 --- a/frameioclient/lib/transport.py +++ b/frameioclient/lib/transport.py @@ -1,15 +1,16 @@ -import requests +import concurrent.futures import threading +import time -from urllib3.util.retry import Retry +import requests from requests.adapters import HTTPAdapter +from token_bucket import Limiter, MemoryStorage +from urllib3.util.retry import Retry -from .version import ClientVersion -from .utils import PaginatedResponse -from .exceptions import PresentationException from .constants import default_thread_count, retryable_statuses - -# from .bandwidth import NetworkBandwidth, DiskBandwidth +from .exceptions import PresentationException +from .utils import PaginatedResponse +from .version import ClientVersion class HTTPClient(object): @@ -133,3 +134,43 @@ def get_specific_page(self, method, endpoint, payload, page): if method == "post": payload["page"] = page return self._api_call(method, endpoint, payload=payload) + + def exec_stream(callable, iterable, sync=lambda _: False, capacity=10, rate=10): + """ + Executes a stream according to a defined rate limit. + """ + limiter = Limiter(capacity, rate, MemoryStorage()) + futures = set() + + def execute(operation): + return (operation, callable(operation)) + + with concurrent.futures.ThreadPoolExecutor(max_workers=capacity) as executor: + while True: + if not limiter.consume("stream", 1): + start = int(time.time()) + done, pending = concurrent.futures.wait( + futures, return_when=concurrent.futures.FIRST_COMPLETED + ) + for future in done: + yield future.result() + + futures = pending + if (int(time.time()) - start) < 1: + time.sleep( + 1.0 / rate + ) # guarantee there's capacity in the rate limit at end of the loop + + operation = next(iterable, None) + + if not operation: + done, _ = concurrent.futures.wait(futures) + for future in done: + yield future.result() + break + + if sync(operation): + yield execute(operation) + continue + + futures.add(executor.submit(execute, operation)) diff --git a/frameioclient/lib/upload.py b/frameioclient/lib/upload.py index 3023d138..6a181b30 100644 --- a/frameioclient/lib/upload.py +++ b/frameioclient/lib/upload.py @@ -1,7 +1,6 @@ import os import math import requests -import enlighten import threading import concurrent.futures @@ -59,7 +58,6 @@ def _upload_chunk(self, task): url = task[0] chunk_offset = task[1] chunk_id = task[2] - in_progress = task[3] chunks_total = len(self.asset["upload_urls"]) is_final_chunk = False @@ -68,9 +66,7 @@ def _upload_chunk(self, task): is_final_chunk = True session = self._get_session() - chunk_data = self._smart_read_chunk(chunk_offset, is_final_chunk) - in_progress.update(len(chunk_data)) try: r = session.put( @@ -94,61 +90,22 @@ def upload(self): upload_urls = self.asset["upload_urls"] chunk_offsets = self._calculate_chunks(total_size, chunk_count=len(upload_urls)) - - with enlighten.get_manager() as manager: - status = manager.status_bar( - position=3, - status_format="{fill}Stage: {stage}{fill}{elapsed}", - color="bold_underline_bright_white_on_lightslategray", - justify=enlighten.Justify.CENTER, - stage="Initializing", - autorefresh=True, - min_delta=0.5, - ) - - BAR_FORMAT = ( - "{desc}{desc_pad}|{bar}|{percentage:3.0f}% " - + "Uploading: {count_1:.2j}/{total:.2j} " - + "Completed: {count_2:.2j}/{total:.2j} " - + "[{elapsed}<{eta}, {rate:.2j}{unit}/s]" - ) - - # Add counter to track completed chunks - initializing = manager.counter( - position=2, - total=float(self.asset["filesize"]), - desc="Progress", - unit="B", - bar_format=BAR_FORMAT, - ) - - # Add additional counter - in_progress = initializing.add_subcounter("yellow", all_fields=True) - completed = initializing.add_subcounter("green", all_fields=True) - - # Set default state - initializing.refresh() - - status.update(stage="Uploading", color="green") - - with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: - for i in range(len(upload_urls)): - url = upload_urls[i] - chunk_offset = chunk_offsets[i] - - task = (url, chunk_offset, i, in_progress) - self.futures.append(executor.submit(self._upload_chunk, task)) - - # Keep updating the progress while we have > 0 bytes left. - # Wait on threads to finish - for future in concurrent.futures.as_completed(self.futures): - try: - chunk_size = future.result() - completed.update_from( - in_progress, float((chunk_size - 1)), force=True - ) - except Exception as exc: - print(exc) + with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: + for i in range(len(upload_urls)): + url = upload_urls[i] + chunk_offset = chunk_offsets[i] + + task = (url, chunk_offset, i) + self.futures.append(executor.submit(self._upload_chunk, task)) + + # Keep updating the progress while we have > 0 bytes left. + # Wait on threads to finish + for future in concurrent.futures.as_completed(self.futures): + try: + chunk_size = future.result() + print(chunk_size) + except Exception as exc: + print(exc) def file_counter(self, folder): matches = [] diff --git a/frameioclient/lib/utils.py b/frameioclient/lib/utils.py index cc5ba0b9..09739c58 100644 --- a/frameioclient/lib/utils.py +++ b/frameioclient/lib/utils.py @@ -1,18 +1,21 @@ +import os import re import sys + import xxhash KB = 1024 MB = KB * KB +ENV = os.getenv('FRAMEIO_ENVIRONMENT', 'prod') - -def Reference(*args, **kwargs): - # print(kwargs["operation"]) - +def ApiReference(*args, **kwargs): def inner(func): """ do operations with func """ + if ENV == 'build': + print(f"API Operation: {kwargs.get('operation')}") + return func return inner diff --git a/frameioclient/services/assets.py b/frameioclient/services/assets.py index 3a2299b5..e0be9ad0 100644 --- a/frameioclient/services/assets.py +++ b/frameioclient/services/assets.py @@ -1,11 +1,13 @@ import os import mimetypes +from frameioclient.lib.transfer import AWSClient + from .projects import Project # from .helpers import FrameioHelpers from ..lib.service import Service -from ..lib import FrameioUploader, FrameioDownloader, constants, Reference +from ..lib import FrameioUploader, FrameioDownloader, ApiReference, constants class Asset(Service): def _build_asset_info(self, filepath): @@ -20,7 +22,7 @@ def _build_asset_info(self, filepath): return file_info - @Reference(operation="#getAsset") + @ApiReference(operation="#getAsset") def get(self, asset_id): """ Get an asset by id. @@ -31,7 +33,7 @@ def get(self, asset_id): endpoint = '/assets/{}'.format(asset_id) return self.client._api_call('get', endpoint) - @Reference(operation="#getAssets") + @ApiReference(operation="#getAssets") def get_children(self, asset_id, include=[], slim=False, **kwargs): """ Get a folder. @@ -81,7 +83,7 @@ def get_children(self, asset_id, include=[], slim=False, **kwargs): return self.client._api_call('get', endpoint, kwargs) - @Reference(operation="#createAsset") + @ApiReference(operation="#createAsset") def create(self, parent_asset_id, **kwargs): """ Create an asset. @@ -104,7 +106,7 @@ def create(self, parent_asset_id, **kwargs): endpoint = '/assets/{}/children'.format(parent_asset_id) return self.client._api_call('post', endpoint, payload=kwargs) - @Reference(operation="#createAsset") + @ApiReference(operation="#createAsset") def create_folder(self, parent_asset_id, name="New Folder"): """ Create a new folder. @@ -123,7 +125,7 @@ def create_folder(self, parent_asset_id, name="New Folder"): endpoint = '/assets/{}/children'.format(parent_asset_id) return self.client._api_call('post', endpoint, payload={"name": name, "type":"folder"}) - @Reference(operation="#createAsset") + @ApiReference(operation="#createAsset") def from_url(self, parent_asset_id, name, url): """ Create an asset from a URL. @@ -153,7 +155,7 @@ def from_url(self, parent_asset_id, name, url): endpoint = '/assets/{}/children'.format(parent_asset_id) return self.client._api_call('post', endpoint, payload=payload) - @Reference(operation="#updateAsset") + @ApiReference(operation="#updateAsset") def update(self, asset_id, **kwargs): """ Updates an asset @@ -169,7 +171,7 @@ def update(self, asset_id, **kwargs): endpoint = '/assets/{}'.format(asset_id) return self.client._api_call('put', endpoint, kwargs) - @Reference(operation="#copyAsset") + @ApiReference(operation="#copyAsset") def copy(self, destination_folder_id, **kwargs): """ Copy an asset @@ -185,7 +187,7 @@ def copy(self, destination_folder_id, **kwargs): endpoint = '/assets/{}/copy'.format(destination_folder_id) return self.client._api_call('post', endpoint, kwargs) - @Reference(operation="#batchCopyAsset") + @ApiReference(operation="#batchCopyAsset") def bulk_copy(self, destination_folder_id, asset_list=[], copy_comments=False): """Bulk copy assets @@ -210,7 +212,7 @@ def bulk_copy(self, destination_folder_id, asset_list=[], copy_comments=False): endpoint = '/batch/assets/{}/copy'.format(destination_folder_id) return self.client._api_call('post', endpoint, payload) - @Reference(operation="#deleteAsset") + @ApiReference(operation="#deleteAsset") def delete(self, asset_id): """ Delete an asset @@ -297,7 +299,7 @@ def download(self, asset, download_folder, prefix=None, multi_part=False, replac client.assets.download(asset, "~./Downloads") """ downloader = FrameioDownloader(asset, download_folder, prefix, multi_part, replace) - return downloader.download_handler() + return AWSClient(downloader, concurrency=5).multi_thread_download() def upload_folder(self, source_path, destination_id): """ diff --git a/scripts/benchmark/download.py b/scripts/benchmark/download.py index 3f9882f9..033ce0b9 100644 --- a/scripts/benchmark/download.py +++ b/scripts/benchmark/download.py @@ -2,7 +2,8 @@ import sys from utils import timefunc -from frameioclient import FrameioClient +import frameioclient + from frameioclient.lib.bandwidth import NetworkBandwidth @@ -13,7 +14,7 @@ def download( size: str = "small", ): token = os.getenv("FRAMEIO_TOKEN") - client = FrameioClient(token) + client = frameioclient.FrameioClient(token) asset_info = client.assets.get(asset_id) download_info = client.assets.download( asset_info, destination, multi_part=True, replace=True diff --git a/tests/integration.py b/tests/integration.py index b1cb2ca2..8676c6b3 100644 --- a/tests/integration.py +++ b/tests/integration.py @@ -24,7 +24,7 @@ retries = 0 # Initialize the client -def init_client(): +def init_client() -> FrameioClient: if len(token) < 5: print("Bad token, exiting test.") sys.exit(1) @@ -40,7 +40,7 @@ def init_client(): return client # Verify local and source -def verify_local(client, dl_children): +def verify_local(client: FrameioClient, dl_children): # Compare remote filenames and hashes global dl_items dl_items = dict() @@ -74,7 +74,7 @@ def verify_local(client, dl_children): return True # Test download functionality -def test_download(client, override=False): +def test_download(client: FrameioClient, override=False): print("Testing download function...") if override: # Clearing download directory @@ -122,7 +122,7 @@ def test_download(client, override=False): return True # Test upload functionality -def test_upload(client): +def test_upload(client: FrameioClient): print("Beginning upload test") # Create new parent asset project_info = client.projects.get(project_id) @@ -316,7 +316,7 @@ def send_to_slack(message): else: return False -def clean_up(client, asset_to_delete): +def clean_up(client: FrameioClient, asset_to_delete): print("Removing files from test...") try: From 910b4ed6fc44dba16a5ce60c5d7d5e3ad21a2866 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 3 Jan 2022 16:33:29 -0800 Subject: [PATCH 47/99] Tweak docs build for CircleCI --- docs/requirements.txt | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index a1f50ba6..91a410fb 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -3,6 +3,7 @@ sphinx-jekyll-builder sphinxcontrib-restbuilder contentful_management python-frontmatter -# frameioclient +frameioclient xxhash -furo \ No newline at end of file +furo +analytics-python \ No newline at end of file From b4ea321cdb31e3de93d633eb2ae7547f9c20b45e Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 3 Jan 2022 16:39:04 -0800 Subject: [PATCH 48/99] Update language --- frameioclient/services/helpers.py | 1 - scripts/benchmark/utils.py | 8 ++++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/frameioclient/services/helpers.py b/frameioclient/services/helpers.py index ea65501a..973397c3 100644 --- a/frameioclient/services/helpers.py +++ b/frameioclient/services/helpers.py @@ -101,7 +101,6 @@ def download_project(self, project_id, destination): # print(f"Downloading {Utils.format_bytes(total_bytes, type='size')}") def recursive_downloader(self, directory, asset, count=0): - # TODO resolve this clusterfuck of downloads print(f"Directory {directory}") try: diff --git a/scripts/benchmark/utils.py b/scripts/benchmark/utils.py index 314b0387..5c2eae8d 100644 --- a/scripts/benchmark/utils.py +++ b/scripts/benchmark/utils.py @@ -2,8 +2,9 @@ from timeit import default_timer as timer + def timefunc(func, *args, **kwargs): - """Time a function. + """Time a function. args: iterations=3 @@ -12,7 +13,7 @@ def timefunc(func, *args, **kwargs): timeit(myfunc, 1, b=2) """ try: - iterations = kwargs.pop('iterations') + iterations = kwargs.pop("iterations") except KeyError: iterations = 3 elapsed = sys.maxsize @@ -20,6 +21,5 @@ def timefunc(func, *args, **kwargs): start = timer() result = func(*args, **kwargs) elapsed = min(timer() - start, elapsed) - print(('Best of {} {}(): {:.9f}'.format(iterations, func.__name__, elapsed))) + print(("Best of {} {}(): {:.9f}".format(iterations, func.__name__, elapsed))) return result - From 19ced06ac65c167bd65108cdebbd37883b750b50 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 3 Jan 2022 16:39:11 -0800 Subject: [PATCH 49/99] Add missing dependency --- docs/requirements.txt | 3 ++- setup.py | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index 91a410fb..a32b0796 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -6,4 +6,5 @@ python-frontmatter frameioclient xxhash furo -analytics-python \ No newline at end of file +analytics-python +token-bucket \ No newline at end of file diff --git a/setup.py b/setup.py index 660e0315..d09a041c 100644 --- a/setup.py +++ b/setup.py @@ -32,6 +32,7 @@ def run(self): 'futures; python_version == "2.7"', 'importlib-metadata ~= 1.0 ; python_version < "3.8"', 'requests', + 'token-bucket', 'speedtest-cli', 'urllib3', 'xxhash', From f654ad5977dedee81b62999d0b238f9aae899458 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 3 Jan 2022 16:55:20 -0800 Subject: [PATCH 50/99] Fix tests for < py3.6.2 --- docs/requirements.txt | 3 +- tests/integration.py | 346 +-------------------------------------- tests/py2_integration.py | 342 ++++++++++++++++++++++++++++++++++++++ tests/py3_integration.py | 342 ++++++++++++++++++++++++++++++++++++++ 4 files changed, 695 insertions(+), 338 deletions(-) create mode 100644 tests/py2_integration.py create mode 100644 tests/py3_integration.py diff --git a/docs/requirements.txt b/docs/requirements.txt index a32b0796..3a864abd 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -7,4 +7,5 @@ frameioclient xxhash furo analytics-python -token-bucket \ No newline at end of file +token-bucket +speedtest-cli \ No newline at end of file diff --git a/tests/integration.py b/tests/integration.py index 8676c6b3..80ecbf73 100644 --- a/tests/integration.py +++ b/tests/integration.py @@ -1,342 +1,14 @@ -import os import sys -import json -import time -import shutil -import socket -import requests -import platform +# Send integration test to py2 or py3 based on stuff -from math import ceil -from pprint import pprint, pformat -from datetime import datetime -from frameioclient import FrameioClient, Utils, KB, MB - -token = os.getenv("FRAMEIO_TOKEN") # Your Frame.io token -project_id = os.getenv("PROJECT_ID") # Project you want to upload files back into -download_asset_id = os.getenv("DOWNLOAD_FOLDER_ID") # Source folder on Frame.io (to then verify against) -environment = os.getenv("ENVIRONMENT", default="PRODUCTION") -slack_webhook_url = os.getenv("SLACK_WEBHOOK_URL") -ci_job_name = os.getenv("CIRCLE_JOB", default=None) - -download_dir = 'downloads' - -retries = 0 - -# Initialize the client -def init_client() -> FrameioClient: - if len(token) < 5: - print("Bad token, exiting test.") - sys.exit(1) - - if environment == "PRODUCTION": - client = FrameioClient(token, threads=10) - print("Client connection initialized.") - - else: - client = FrameioClient(token, host='https://api.dev.frame.io', threads=10) - print("Client connection initialized.") - - return client - -# Verify local and source -def verify_local(client: FrameioClient, dl_children): - # Compare remote filenames and hashes - global dl_items - dl_items = dict() - - # Iterate over local directory and get filenames and hashes - dled_files = os.listdir(download_dir) - for count, fn in enumerate(dled_files, start=1): - print("{}/{} Generating hash for: {}".format(count, len(dled_files), fn)) - dl_file_path = os.path.join(os.path.abspath(os.path.curdir), download_dir, fn) - print("Path to downloaded file for hashing: {}".format(dl_file_path)) - xxhash = Utils.calculate_hash(dl_file_path) - xxhash_name = "{}_{}".format(fn, 'xxHash') - dl_items[xxhash_name] = xxhash - - print("QCing Downloaded Files...") - - print("Original Items Check: \n") - og_items = flatten_asset_children(dl_children) - pprint(og_items) - - print("Downloaded Items Check: \n") - pprint(dl_items) - - pass_fail = Utils.compare_items(og_items, dl_items) - - # If verification fails here, try downloading again. - if pass_fail == False: - print("Mismatch between original and downloaded files, re-downloading...") - test_download(client, override=True) - else: - return True - -# Test download functionality -def test_download(client: FrameioClient, override=False): - print("Testing download function...") - if override: - # Clearing download directory - shutil.rmtree(download_dir) - - if os.path.isdir(download_dir): - print("Local downloads folder detected...") - asset_list = client.assets.get_children( - download_asset_id, - page=1, - page_size=40, - include="children" - ) - - verify_local(client, asset_list) - return True - - os.mkdir(download_dir) - - asset_list = client.assets.get_children( - download_asset_id, - page=1, - page_size=40, - include="children" - ) - - print("Downloading {} files.".format(len(asset_list))) - for count, asset in enumerate(asset_list, start=1): - start_time = time.time() - print("{}/{} Beginning to download: {}".format(count, len(asset_list), asset['name'])) - - client.assets.download(asset, download_dir, multi_part=True) - - download_time = time.time() - start_time - download_speed = Utils.format_bytes(ceil(asset['filesize']/(download_time))) - - print("{}/{} Download completed in {:.2f}s @ {}".format((count), len(asset_list), download_time, download_speed)) - - print("Done downloading files") - - # Verify downloads - if verify_local(client, asset_list): - print("Download verification passed") - - return True - -# Test upload functionality -def test_upload(client: FrameioClient): - print("Beginning upload test") - # Create new parent asset - project_info = client.projects.get(project_id) - root_asset_id = project_info['root_asset_id'] - - print("Creating new folder to upload to") - new_folder = client.assets.create( - parent_asset_id=root_asset_id, - name="{}_{}_Py{}_{}".format(socket.gethostname(), platform.system(), platform.python_version(), datetime.now().strftime("%B-%d-%Y")), - type="folder", - ) - - new_parent_id = new_folder['id'] - - print("Folder created, id: {}, name: {}".format(new_parent_id, new_folder['name'])) - - # Upload all the files we downloaded earlier - dled_files = os.listdir(download_dir) - - for count, fn in enumerate(dled_files, start=1): - start_time = time.time() - ul_abs_path = os.path.join(os.curdir, download_dir, fn) - filesize = os.path.getsize(ul_abs_path) - filename = os.path.basename(ul_abs_path) - - print("{}/{} Beginning to upload: {}".format(count, len(dled_files), fn)) - - client.assets.upload(new_parent_id, ul_abs_path) - - upload_time = time.time() - start_time - upload_speed = Utils.format_bytes(ceil(filesize/(upload_time))) - - print("{}/{} Upload completed in {:.2f}s @ {}".format((count), len(dled_files), upload_time, upload_speed)) - - print("Sleeping for 10 seconds to allow upload and media analysis to finish...") - time.sleep(10) - - print("Continuing...") - - return new_parent_id - -# Flatten asset children and pull out important info for comparison -def flatten_asset_children(asset_children): - flat_dict = dict() - - for asset in asset_children: - try: - xxhash_name = "{}_{}".format(asset['name'], 'xxHash') - xxhash_checksum = asset['checksums']['xx_hash'] - - if sys.version_info.major < 3: # if Python 2 convert the field - xxhash_checksum = str(xxhash_checksum.encode('utf-8')) - - flat_dict[xxhash_name] = xxhash_checksum - - except TypeError as e: - print(e) - xxhash_name = "{}_{}".format(asset['name'], 'xxHash') - flat_dict[xxhash_name] = "missing" - - continue - - return flat_dict - -def check_for_checksums(client, upload_folder_id): - # Get asset children for upload folder - asset_children = client.assets.get_children( - upload_folder_id, - page=1, - page_size=40, - include="children" - ) - - global retries - print("Checking for checksums attempt #{}".format(retries+1)) - - if retries < 20: - for asset in asset_children: - try: - asset['checksums']['xx_hash'] - print("Success...") - print("Asset ID: {}".format(asset['id'])) - print("Asset Name: {}".format(asset['name'])) - print("Checksum dict: {}".format(asset['checksums'])) - except TypeError as e: - # print(e) - print("Failure...") - print("Checksum dict: {}".format(asset['checksums'])) - print("Asset ID: {}".format(asset['id'])) - print("Asset Name: {}".format(asset['name'])) - print("Checksums not yet calculated, sleeping for 15 seconds.") - time.sleep(15) - retries += 1 - check_for_checksums(client, upload_folder_id) - return True - else: - return False - -def check_upload_completion(client, download_folder_id, upload_folder_id): - # Do a comparison against filenames and filesizes here to make sure they match - - print("Beginning upload comparison check") - - # Get asset children for download folder - dl_asset_children = client.assets.get_children( - download_folder_id, - page=1, - page_size=40, - include="children" - ) - - print("Got asset children for original download folder") - - print("Making sure checksums are calculated before verifying") - check_for_checksums(client, upload_folder_id) - - # Get asset children for upload folder - ul_asset_children = client.assets.get_children( - upload_folder_id, - page=1, - page_size=40, - include="children" - ) - - print("Got asset children for uploaded folder") - - global dl_items # Get the global dl_items - - # if len(dl_items.items) < 1: - - og_items = flatten_asset_children(dl_asset_children) - ul_items = flatten_asset_children(ul_asset_children) - - print("'Completed' uploads: {}/{}".format(int(len(ul_items)), int(len(og_items)))) - print("Percentage uploads completed but not verified: {:.2%}".format(len(ul_items)/len(og_items))) - - print("Running verification...") - - print("OG Items Check:") - pprint(og_items) - - print("DL Items Check:") - pprint(dl_items) - - print("UL Items Check:") - pprint(ul_items) - - pass_fail = Utils.compare_items(og_items, ul_items) - - print("Verification complete for {}/{} uploaded assets.".format(int(len(ul_items)), int(len(og_items)))) - - if ci_job_name is not None: - print("CircleCI Job Name: {}".format(ci_job_name)) - if ci_job_name == "upload_test_job": - send_to_slack(format_slack_message(pass_fail, og_items, dl_items, ul_items)) - - if pass_fail == True: - print("Integration test passed! :)") - else: - print("Integration test failed! :(") - sys.exit(1) - - return True - -def format_slack_message(pass_fail, og_items, dl_items, ul_items): - # Format slack message for sending - message = "Test Pass/Fail: *{}*\n\n*Original assets:* \n{}\n*Downloaded assets:* \n {}\n*Uploaded assets:* \n {}".format(pass_fail, pformat(og_items), pformat(dl_items), pformat(ul_items)) - print(message) - - return message - -def send_to_slack(message): - # Send Slack message to provided - if len(slack_webhook_url) < 2: - print("No Slack webhook ENV var provided, not sending a Slack message...") - - data = { - 'text': message, - 'username': 'Upload Integration Test', - 'icon_emoji': ':robot_face:' - } +if __name__ == "__main__": + version_major = sys.version_info[0] + version_minor = sys.version_info[1] - response = requests.post(slack_webhook_url, data=json.dumps( - data), headers={'Content-Type': 'application/json'}) - - print('Response: ' + str(response.text)) - print('Response code: ' + str(response.status_code)) + if version_major > 3 and version_minor > 6: + import py3_integration + py3_integration.run_test() - if response.status_code == 200: - return True else: - return False - -def clean_up(client: FrameioClient, asset_to_delete): - print("Removing files from test...") - - try: - client._api_call('delete', '/assets/{}'.format(asset_to_delete)) - print("Managed to cleanup!") - except Exception as e: - print(e) - - return True - -def run_test(): - print("Beginning Integration test...") - - client = init_client() - test_download(client) - upload_folder_id = test_upload(client) - check_upload_completion(client, download_asset_id, upload_folder_id) - # clean_up(client, upload_folder_id) - - print("Test complete, exiting...") - -if __name__ == "__main__": - run_test() + import py2_integration + py2_integration.run_test() \ No newline at end of file diff --git a/tests/py2_integration.py b/tests/py2_integration.py new file mode 100644 index 00000000..b1cb2ca2 --- /dev/null +++ b/tests/py2_integration.py @@ -0,0 +1,342 @@ +import os +import sys +import json +import time +import shutil +import socket +import requests +import platform + +from math import ceil +from pprint import pprint, pformat +from datetime import datetime +from frameioclient import FrameioClient, Utils, KB, MB + +token = os.getenv("FRAMEIO_TOKEN") # Your Frame.io token +project_id = os.getenv("PROJECT_ID") # Project you want to upload files back into +download_asset_id = os.getenv("DOWNLOAD_FOLDER_ID") # Source folder on Frame.io (to then verify against) +environment = os.getenv("ENVIRONMENT", default="PRODUCTION") +slack_webhook_url = os.getenv("SLACK_WEBHOOK_URL") +ci_job_name = os.getenv("CIRCLE_JOB", default=None) + +download_dir = 'downloads' + +retries = 0 + +# Initialize the client +def init_client(): + if len(token) < 5: + print("Bad token, exiting test.") + sys.exit(1) + + if environment == "PRODUCTION": + client = FrameioClient(token, threads=10) + print("Client connection initialized.") + + else: + client = FrameioClient(token, host='https://api.dev.frame.io', threads=10) + print("Client connection initialized.") + + return client + +# Verify local and source +def verify_local(client, dl_children): + # Compare remote filenames and hashes + global dl_items + dl_items = dict() + + # Iterate over local directory and get filenames and hashes + dled_files = os.listdir(download_dir) + for count, fn in enumerate(dled_files, start=1): + print("{}/{} Generating hash for: {}".format(count, len(dled_files), fn)) + dl_file_path = os.path.join(os.path.abspath(os.path.curdir), download_dir, fn) + print("Path to downloaded file for hashing: {}".format(dl_file_path)) + xxhash = Utils.calculate_hash(dl_file_path) + xxhash_name = "{}_{}".format(fn, 'xxHash') + dl_items[xxhash_name] = xxhash + + print("QCing Downloaded Files...") + + print("Original Items Check: \n") + og_items = flatten_asset_children(dl_children) + pprint(og_items) + + print("Downloaded Items Check: \n") + pprint(dl_items) + + pass_fail = Utils.compare_items(og_items, dl_items) + + # If verification fails here, try downloading again. + if pass_fail == False: + print("Mismatch between original and downloaded files, re-downloading...") + test_download(client, override=True) + else: + return True + +# Test download functionality +def test_download(client, override=False): + print("Testing download function...") + if override: + # Clearing download directory + shutil.rmtree(download_dir) + + if os.path.isdir(download_dir): + print("Local downloads folder detected...") + asset_list = client.assets.get_children( + download_asset_id, + page=1, + page_size=40, + include="children" + ) + + verify_local(client, asset_list) + return True + + os.mkdir(download_dir) + + asset_list = client.assets.get_children( + download_asset_id, + page=1, + page_size=40, + include="children" + ) + + print("Downloading {} files.".format(len(asset_list))) + for count, asset in enumerate(asset_list, start=1): + start_time = time.time() + print("{}/{} Beginning to download: {}".format(count, len(asset_list), asset['name'])) + + client.assets.download(asset, download_dir, multi_part=True) + + download_time = time.time() - start_time + download_speed = Utils.format_bytes(ceil(asset['filesize']/(download_time))) + + print("{}/{} Download completed in {:.2f}s @ {}".format((count), len(asset_list), download_time, download_speed)) + + print("Done downloading files") + + # Verify downloads + if verify_local(client, asset_list): + print("Download verification passed") + + return True + +# Test upload functionality +def test_upload(client): + print("Beginning upload test") + # Create new parent asset + project_info = client.projects.get(project_id) + root_asset_id = project_info['root_asset_id'] + + print("Creating new folder to upload to") + new_folder = client.assets.create( + parent_asset_id=root_asset_id, + name="{}_{}_Py{}_{}".format(socket.gethostname(), platform.system(), platform.python_version(), datetime.now().strftime("%B-%d-%Y")), + type="folder", + ) + + new_parent_id = new_folder['id'] + + print("Folder created, id: {}, name: {}".format(new_parent_id, new_folder['name'])) + + # Upload all the files we downloaded earlier + dled_files = os.listdir(download_dir) + + for count, fn in enumerate(dled_files, start=1): + start_time = time.time() + ul_abs_path = os.path.join(os.curdir, download_dir, fn) + filesize = os.path.getsize(ul_abs_path) + filename = os.path.basename(ul_abs_path) + + print("{}/{} Beginning to upload: {}".format(count, len(dled_files), fn)) + + client.assets.upload(new_parent_id, ul_abs_path) + + upload_time = time.time() - start_time + upload_speed = Utils.format_bytes(ceil(filesize/(upload_time))) + + print("{}/{} Upload completed in {:.2f}s @ {}".format((count), len(dled_files), upload_time, upload_speed)) + + print("Sleeping for 10 seconds to allow upload and media analysis to finish...") + time.sleep(10) + + print("Continuing...") + + return new_parent_id + +# Flatten asset children and pull out important info for comparison +def flatten_asset_children(asset_children): + flat_dict = dict() + + for asset in asset_children: + try: + xxhash_name = "{}_{}".format(asset['name'], 'xxHash') + xxhash_checksum = asset['checksums']['xx_hash'] + + if sys.version_info.major < 3: # if Python 2 convert the field + xxhash_checksum = str(xxhash_checksum.encode('utf-8')) + + flat_dict[xxhash_name] = xxhash_checksum + + except TypeError as e: + print(e) + xxhash_name = "{}_{}".format(asset['name'], 'xxHash') + flat_dict[xxhash_name] = "missing" + + continue + + return flat_dict + +def check_for_checksums(client, upload_folder_id): + # Get asset children for upload folder + asset_children = client.assets.get_children( + upload_folder_id, + page=1, + page_size=40, + include="children" + ) + + global retries + print("Checking for checksums attempt #{}".format(retries+1)) + + if retries < 20: + for asset in asset_children: + try: + asset['checksums']['xx_hash'] + print("Success...") + print("Asset ID: {}".format(asset['id'])) + print("Asset Name: {}".format(asset['name'])) + print("Checksum dict: {}".format(asset['checksums'])) + except TypeError as e: + # print(e) + print("Failure...") + print("Checksum dict: {}".format(asset['checksums'])) + print("Asset ID: {}".format(asset['id'])) + print("Asset Name: {}".format(asset['name'])) + print("Checksums not yet calculated, sleeping for 15 seconds.") + time.sleep(15) + retries += 1 + check_for_checksums(client, upload_folder_id) + return True + else: + return False + +def check_upload_completion(client, download_folder_id, upload_folder_id): + # Do a comparison against filenames and filesizes here to make sure they match + + print("Beginning upload comparison check") + + # Get asset children for download folder + dl_asset_children = client.assets.get_children( + download_folder_id, + page=1, + page_size=40, + include="children" + ) + + print("Got asset children for original download folder") + + print("Making sure checksums are calculated before verifying") + check_for_checksums(client, upload_folder_id) + + # Get asset children for upload folder + ul_asset_children = client.assets.get_children( + upload_folder_id, + page=1, + page_size=40, + include="children" + ) + + print("Got asset children for uploaded folder") + + global dl_items # Get the global dl_items + + # if len(dl_items.items) < 1: + + og_items = flatten_asset_children(dl_asset_children) + ul_items = flatten_asset_children(ul_asset_children) + + print("'Completed' uploads: {}/{}".format(int(len(ul_items)), int(len(og_items)))) + print("Percentage uploads completed but not verified: {:.2%}".format(len(ul_items)/len(og_items))) + + print("Running verification...") + + print("OG Items Check:") + pprint(og_items) + + print("DL Items Check:") + pprint(dl_items) + + print("UL Items Check:") + pprint(ul_items) + + pass_fail = Utils.compare_items(og_items, ul_items) + + print("Verification complete for {}/{} uploaded assets.".format(int(len(ul_items)), int(len(og_items)))) + + if ci_job_name is not None: + print("CircleCI Job Name: {}".format(ci_job_name)) + if ci_job_name == "upload_test_job": + send_to_slack(format_slack_message(pass_fail, og_items, dl_items, ul_items)) + + if pass_fail == True: + print("Integration test passed! :)") + else: + print("Integration test failed! :(") + sys.exit(1) + + return True + +def format_slack_message(pass_fail, og_items, dl_items, ul_items): + # Format slack message for sending + message = "Test Pass/Fail: *{}*\n\n*Original assets:* \n{}\n*Downloaded assets:* \n {}\n*Uploaded assets:* \n {}".format(pass_fail, pformat(og_items), pformat(dl_items), pformat(ul_items)) + print(message) + + return message + +def send_to_slack(message): + # Send Slack message to provided + if len(slack_webhook_url) < 2: + print("No Slack webhook ENV var provided, not sending a Slack message...") + + data = { + 'text': message, + 'username': 'Upload Integration Test', + 'icon_emoji': ':robot_face:' + } + + response = requests.post(slack_webhook_url, data=json.dumps( + data), headers={'Content-Type': 'application/json'}) + + print('Response: ' + str(response.text)) + print('Response code: ' + str(response.status_code)) + + if response.status_code == 200: + return True + else: + return False + +def clean_up(client, asset_to_delete): + print("Removing files from test...") + + try: + client._api_call('delete', '/assets/{}'.format(asset_to_delete)) + print("Managed to cleanup!") + except Exception as e: + print(e) + + return True + +def run_test(): + print("Beginning Integration test...") + + client = init_client() + test_download(client) + upload_folder_id = test_upload(client) + check_upload_completion(client, download_asset_id, upload_folder_id) + # clean_up(client, upload_folder_id) + + print("Test complete, exiting...") + +if __name__ == "__main__": + run_test() diff --git a/tests/py3_integration.py b/tests/py3_integration.py new file mode 100644 index 00000000..c294aa89 --- /dev/null +++ b/tests/py3_integration.py @@ -0,0 +1,342 @@ +import os +import sys +import json +import time +import shutil +import socket +import requests +import platform + +from math import ceil +from pprint import pprint, pformat +from datetime import datetime +from frameioclient import FrameioClient, Utils, KB, MB + +token = os.getenv("FRAMEIO_TOKEN") # Your Frame.io token +project_id = os.getenv("PROJECT_ID") # Project you want to upload files back into +download_asset_id = os.getenv("DOWNLOAD_FOLDER_ID") # Source folder on Frame.io (to then verify against) +environment = os.getenv("ENVIRONMENT", default="PRODUCTION") +slack_webhook_url = os.getenv("SLACK_WEBHOOK_URL") +ci_job_name = os.getenv("CIRCLE_JOB", default=None) + +download_dir = 'downloads' + +retries = 0 + +# Initialize the client +def init_client() -> FrameioClient: + if len(token) < 5: + print("Bad token, exiting test.") + sys.exit(1) + + if environment == "PRODUCTION": + client = FrameioClient(token, threads=10) + print("Client connection initialized.") + + else: + client = FrameioClient(token, host='https://api.dev.frame.io', threads=10) + print("Client connection initialized.") + + return client + +# Verify local and source +def verify_local(client: FrameioClient, dl_children): + # Compare remote filenames and hashes + global dl_items + dl_items = dict() + + # Iterate over local directory and get filenames and hashes + dled_files = os.listdir(download_dir) + for count, fn in enumerate(dled_files, start=1): + print("{}/{} Generating hash for: {}".format(count, len(dled_files), fn)) + dl_file_path = os.path.join(os.path.abspath(os.path.curdir), download_dir, fn) + print("Path to downloaded file for hashing: {}".format(dl_file_path)) + xxhash = Utils.calculate_hash(dl_file_path) + xxhash_name = "{}_{}".format(fn, 'xxHash') + dl_items[xxhash_name] = xxhash + + print("QCing Downloaded Files...") + + print("Original Items Check: \n") + og_items = flatten_asset_children(dl_children) + pprint(og_items) + + print("Downloaded Items Check: \n") + pprint(dl_items) + + pass_fail = Utils.compare_items(og_items, dl_items) + + # If verification fails here, try downloading again. + if pass_fail == False: + print("Mismatch between original and downloaded files, re-downloading...") + test_download(client, override=True) + else: + return True + +# Test download functionality +def test_download(client: FrameioClient, override=False): + print("Testing download function...") + if override: + # Clearing download directory + shutil.rmtree(download_dir) + + if os.path.isdir(download_dir): + print("Local downloads folder detected...") + asset_list = client.assets.get_children( + download_asset_id, + page=1, + page_size=40, + include="children" + ) + + verify_local(client, asset_list) + return True + + os.mkdir(download_dir) + + asset_list = client.assets.get_children( + download_asset_id, + page=1, + page_size=40, + include="children" + ) + + print("Downloading {} files.".format(len(asset_list))) + for count, asset in enumerate(asset_list, start=1): + start_time = time.time() + print("{}/{} Beginning to download: {}".format(count, len(asset_list), asset['name'])) + + client.assets.download(asset, download_dir, multi_part=True) + + download_time = time.time() - start_time + download_speed = Utils.format_bytes(ceil(asset['filesize']/(download_time))) + + print("{}/{} Download completed in {:.2f}s @ {}".format((count), len(asset_list), download_time, download_speed)) + + print("Done downloading files") + + # Verify downloads + if verify_local(client, asset_list): + print("Download verification passed") + + return True + +# Test upload functionality +def test_upload(client: FrameioClient): + print("Beginning upload test") + # Create new parent asset + project_info = client.projects.get(project_id) + root_asset_id = project_info['root_asset_id'] + + print("Creating new folder to upload to") + new_folder = client.assets.create( + parent_asset_id=root_asset_id, + name="{}_{}_Py{}_{}".format(socket.gethostname(), platform.system(), platform.python_version(), datetime.now().strftime("%B-%d-%Y")), + type="folder", + ) + + new_parent_id = new_folder['id'] + + print("Folder created, id: {}, name: {}".format(new_parent_id, new_folder['name'])) + + # Upload all the files we downloaded earlier + dled_files = os.listdir(download_dir) + + for count, fn in enumerate(dled_files, start=1): + start_time = time.time() + ul_abs_path = os.path.join(os.curdir, download_dir, fn) + filesize = os.path.getsize(ul_abs_path) + filename = os.path.basename(ul_abs_path) + + print("{}/{} Beginning to upload: {}".format(count, len(dled_files), fn)) + + client.assets.upload(new_parent_id, ul_abs_path) + + upload_time = time.time() - start_time + upload_speed = Utils.format_bytes(ceil(filesize/(upload_time))) + + print("{}/{} Upload completed in {:.2f}s @ {}".format((count), len(dled_files), upload_time, upload_speed)) + + print("Sleeping for 10 seconds to allow upload and media analysis to finish...") + time.sleep(10) + + print("Continuing...") + + return new_parent_id + +# Flatten asset children and pull out important info for comparison +def flatten_asset_children(asset_children): + flat_dict = dict() + + for asset in asset_children: + try: + xxhash_name = "{}_{}".format(asset['name'], 'xxHash') + xxhash_checksum = asset['checksums']['xx_hash'] + + if sys.version_info.major < 3: # if Python 2 convert the field + xxhash_checksum = str(xxhash_checksum.encode('utf-8')) + + flat_dict[xxhash_name] = xxhash_checksum + + except TypeError as e: + print(e) + xxhash_name = "{}_{}".format(asset['name'], 'xxHash') + flat_dict[xxhash_name] = "missing" + + continue + + return flat_dict + +def check_for_checksums(client, upload_folder_id): + # Get asset children for upload folder + asset_children = client.assets.get_children( + upload_folder_id, + page=1, + page_size=40, + include="children" + ) + + global retries + print("Checking for checksums attempt #{}".format(retries+1)) + + if retries < 20: + for asset in asset_children: + try: + asset['checksums']['xx_hash'] + print("Success...") + print("Asset ID: {}".format(asset['id'])) + print("Asset Name: {}".format(asset['name'])) + print("Checksum dict: {}".format(asset['checksums'])) + except TypeError as e: + # print(e) + print("Failure...") + print("Checksum dict: {}".format(asset['checksums'])) + print("Asset ID: {}".format(asset['id'])) + print("Asset Name: {}".format(asset['name'])) + print("Checksums not yet calculated, sleeping for 15 seconds.") + time.sleep(15) + retries += 1 + check_for_checksums(client, upload_folder_id) + return True + else: + return False + +def check_upload_completion(client, download_folder_id, upload_folder_id): + # Do a comparison against filenames and filesizes here to make sure they match + + print("Beginning upload comparison check") + + # Get asset children for download folder + dl_asset_children = client.assets.get_children( + download_folder_id, + page=1, + page_size=40, + include="children" + ) + + print("Got asset children for original download folder") + + print("Making sure checksums are calculated before verifying") + check_for_checksums(client, upload_folder_id) + + # Get asset children for upload folder + ul_asset_children = client.assets.get_children( + upload_folder_id, + page=1, + page_size=40, + include="children" + ) + + print("Got asset children for uploaded folder") + + global dl_items # Get the global dl_items + + # if len(dl_items.items) < 1: + + og_items = flatten_asset_children(dl_asset_children) + ul_items = flatten_asset_children(ul_asset_children) + + print("'Completed' uploads: {}/{}".format(int(len(ul_items)), int(len(og_items)))) + print("Percentage uploads completed but not verified: {:.2%}".format(len(ul_items)/len(og_items))) + + print("Running verification...") + + print("OG Items Check:") + pprint(og_items) + + print("DL Items Check:") + pprint(dl_items) + + print("UL Items Check:") + pprint(ul_items) + + pass_fail = Utils.compare_items(og_items, ul_items) + + print("Verification complete for {}/{} uploaded assets.".format(int(len(ul_items)), int(len(og_items)))) + + if ci_job_name is not None: + print("CircleCI Job Name: {}".format(ci_job_name)) + if ci_job_name == "upload_test_job": + send_to_slack(format_slack_message(pass_fail, og_items, dl_items, ul_items)) + + if pass_fail == True: + print("Integration test passed! :)") + else: + print("Integration test failed! :(") + sys.exit(1) + + return True + +def format_slack_message(pass_fail, og_items, dl_items, ul_items): + # Format slack message for sending + message = "Test Pass/Fail: *{}*\n\n*Original assets:* \n{}\n*Downloaded assets:* \n {}\n*Uploaded assets:* \n {}".format(pass_fail, pformat(og_items), pformat(dl_items), pformat(ul_items)) + print(message) + + return message + +def send_to_slack(message: str): + # Send Slack message to provided + if len(slack_webhook_url) < 2: + print("No Slack webhook ENV var provided, not sending a Slack message...") + + data = { + 'text': message, + 'username': 'Upload Integration Test', + 'icon_emoji': ':robot_face:' + } + + response = requests.post(slack_webhook_url, data=json.dumps( + data), headers={'Content-Type': 'application/json'}) + + print('Response: ' + str(response.text)) + print('Response code: ' + str(response.status_code)) + + if response.status_code == 200: + return True + else: + return False + +def clean_up(client: FrameioClient, asset_to_delete: str): + print("Removing files from test...") + + try: + client._api_call('delete', '/assets/{}'.format(asset_to_delete)) + print("Managed to cleanup!") + except Exception as e: + print(e) + + return True + +def run_test(): + print("Beginning Integration test...") + + client = init_client() + test_download(client) + upload_folder_id = test_upload(client) + check_upload_completion(client, download_asset_id, upload_folder_id) + # clean_up(client, upload_folder_id) + + print("Test complete, exiting...") + +if __name__ == "__main__": + run_test() From f4444b00d95446cd9162a215cfa5c7d97ea8b44a Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 3 Jan 2022 16:57:51 -0800 Subject: [PATCH 51/99] Fix f-string issue --- frameioclient/lib/upload.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frameioclient/lib/upload.py b/frameioclient/lib/upload.py index 6a181b30..4ea37087 100644 --- a/frameioclient/lib/upload.py +++ b/frameioclient/lib/upload.py @@ -141,7 +141,7 @@ def recursive_upload(self, client, folder, parent_asset_id): complete_dir_obj = os.path.join(folder, file_p) print( - f"Starting {self.file_num:02d}/{self.file_count}, Size: {Utils.format_bytes(os.path.getsize(complete_dir_obj), type='size')}, Name: {file_p}" + "Starting {:02d}/{}, Size: {}, Name: {}".format(self.file_num, self.file_count, Utils.format_bytes(os.path.getsize(complete_dir_obj), type='size'), file_p) ) client.assets.upload(parent_asset_id, complete_dir_obj) From e35dea321cf9e9526c049e4b771fac6903af0185 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 3 Jan 2022 16:59:33 -0800 Subject: [PATCH 52/99] Remove splats --- frameioclient/lib/telemetry.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/frameioclient/lib/telemetry.py b/frameioclient/lib/telemetry.py index 8354ad61..35a079b4 100644 --- a/frameioclient/lib/telemetry.py +++ b/frameioclient/lib/telemetry.py @@ -38,9 +38,9 @@ def push(self, event_name, properties): status = analytics.track( self.user_id, event_name, - properties={**properties}, - context={**self.build_context()}, - integrations={**self.integrations}, + properties=properties, + context=self.build_context(), + integrations=self.integrations, ) except Exception as e: self.logger.info(e, event_name, properties) From 2f2271c75cc1c9d4e9f2fad4986624915362f1a3 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 3 Jan 2022 17:01:11 -0800 Subject: [PATCH 53/99] Fix for py2 --- frameioclient/lib/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frameioclient/lib/utils.py b/frameioclient/lib/utils.py index 09739c58..2d9e4445 100644 --- a/frameioclient/lib/utils.py +++ b/frameioclient/lib/utils.py @@ -14,7 +14,7 @@ def inner(func): do operations with func """ if ENV == 'build': - print(f"API Operation: {kwargs.get('operation')}") + print("API Operation: {}".format(kwargs.get('operation'))) return func From eab63de9b77f81f277068a85457914444592f453 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 7 Jan 2022 13:32:52 -0800 Subject: [PATCH 54/99] Upstream dep update to more inclusive language --- frameioclient/lib/transport.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frameioclient/lib/transport.py b/frameioclient/lib/transport.py index 36c26e7f..cdc28363 100644 --- a/frameioclient/lib/transport.py +++ b/frameioclient/lib/transport.py @@ -39,7 +39,7 @@ def __init__(self, threads=default_thread_count): total=100, backoff_factor=2, status_forcelist=retryable_statuses, - method_whitelist=["GET", "POST", "PUT", "GET", "DELETE"], + allowed_methods=["GET", "POST", "PUT", "GET", "DELETE"], ) # Create real thread From 14b3fdd173752262d48d42538fe40a37d43876c4 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Tue, 11 Jan 2022 11:18:05 -0800 Subject: [PATCH 55/99] Update recursive uploader code --- examples/assets/recursive_upload.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/examples/assets/recursive_upload.py b/examples/assets/recursive_upload.py index ae0fb421..462998e5 100644 --- a/examples/assets/recursive_upload.py +++ b/examples/assets/recursive_upload.py @@ -20,7 +20,7 @@ def create_n_upload(task): file_n = os.path.split(file_p)[1] file_mime = mimetypes.guess_type(abs_path)[0] - asset = client.create_asset( + asset = client.assets.create( parent_asset_id=parent_asset_id, name=file_n, type="file", @@ -29,13 +29,13 @@ def create_n_upload(task): ) with open(abs_path, "rb") as ul_file: - asset_info = client.upload(asset, ul_file) + asset_info = client.assets._upload(asset, ul_file) return asset_info def create_folder(folder_n, parent_asset_id): - asset = client.create_asset( + asset = client.assets.create( parent_asset_id=parent_asset_id, name=folder_n, type="folder", From 08efeda5b5988a06a8e79cbd73bf24878a0161e2 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Wed, 2 Feb 2022 23:51:42 -0800 Subject: [PATCH 56/99] Fix collaborator and team manager functions --- frameioclient/services/projects.py | 4 ++-- frameioclient/services/teams.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/frameioclient/services/projects.py b/frameioclient/services/projects.py index 49def3c2..386cd67a 100644 --- a/frameioclient/services/projects.py +++ b/frameioclient/services/projects.py @@ -140,7 +140,7 @@ def add_collaborator(self, project_id, email): payload = {"email": email} endpoint = "/projects/{}/collaborators".format(project_id) - return self._api_call("post", endpoint, payload=payload) + return self.client._api_call("post", endpoint, payload=payload) def remove_collaborator(self, project_id, email): """ @@ -159,4 +159,4 @@ def remove_collaborator(self, project_id, email): """ endpoint = "/projects/{}/collaborators/_?email={}".format(project_id, email) - return self._api_call("delete", endpoint) + return self.client._api_call("delete", endpoint) diff --git a/frameioclient/services/teams.py b/frameioclient/services/teams.py index 629c1e02..0d0fb762 100644 --- a/frameioclient/services/teams.py +++ b/frameioclient/services/teams.py @@ -87,7 +87,7 @@ def add_members(self, team_id, emails): payload["batch"] = list(map(lambda email: {"email": email}, emails)) endpoint = "/batch/teams/{}/members".format(team_id) - return self._api_call("post", endpoint, payload=payload) + return self.client._api_call("post", endpoint, payload=payload) def remove_members(self, team_id, emails): """ @@ -102,4 +102,4 @@ def remove_members(self, team_id, emails): payload["batch"] = list(map(lambda email: {"email": email}, emails)) endpoint = "/batch/teams/{}/members".format(team_id) - return self._api_call("delete", endpoint, payload=payload) + return self.client._api_call("delete", endpoint, payload=payload) From 764823da72962a35e212d4a9bbd3f39bf2591647 Mon Sep 17 00:00:00 2001 From: Jeff Date: Fri, 4 Feb 2022 10:55:15 -0800 Subject: [PATCH 57/99] Add documentation builder action --- .github/actions/docs.yml | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) create mode 100644 .github/actions/docs.yml diff --git a/.github/actions/docs.yml b/.github/actions/docs.yml new file mode 100644 index 00000000..04de0c2f --- /dev/null +++ b/.github/actions/docs.yml @@ -0,0 +1,21 @@ +name: Documentation +on: + push: + branches: + - dev +jobs: + build: + runs-on: ubuntu-latest + steps: + - uses: actions/setup-python@v2 + - uses: actions/checkout@master + with: + fetch-depth: 0 # otherwise, you will failed to push refs to dest repo + - name: Build and Commit + uses: sphinx-notes/pages@v2 + - name: Push changes + uses: ad-m/github-push-action@master + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + branch: gh-pages + documentation_path: './docs' From 49613666db1691d4e012d360021cc4894314d929 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 10:59:53 -0800 Subject: [PATCH 58/99] Improve documentation, add type hints, start to break py2 --- Makefile | 5 +- docs/Makefile | 6 +- docs/classes/search.rst | 3 + docs/conf.py | 3 +- docs/requirements.txt | 4 +- examples/projects/download_project.py | 4 +- frameioclient/lib/transfer.py | 28 +++---- frameioclient/lib/transport.py | 35 +++++--- frameioclient/lib/upload.py | 29 +++---- frameioclient/lib/utils.py | 61 ++++++++++---- frameioclient/services/assets.py | 110 ++++++++++++++------------ frameioclient/services/comments.py | 46 ++++++----- frameioclient/services/links.py | 8 ++ frameioclient/services/logs.py | 8 +- frameioclient/services/projects.py | 65 ++++++++------- frameioclient/services/search.py | 56 ++++++------- frameioclient/services/teams.py | 2 +- frameioclient/services/users.py | 6 +- setup.py | 3 +- tests/py2_integration.py | 4 +- tests/py3_integration.py | 4 +- 21 files changed, 281 insertions(+), 209 deletions(-) diff --git a/Makefile b/Makefile index 0762ee5c..c6126f0e 100644 --- a/Makefile +++ b/Makefile @@ -31,5 +31,8 @@ run-benchmark: format: black frameioclient +view-docs: + cd docs && pip install -r requirements.txt && make dev + publish-docs: - cd docs && pip install -r requirements.txt && make jekyll && make publish \ No newline at end of file + cd docs && pip install -r requirements.txt && make jekyll && make publish diff --git a/docs/Makefile b/docs/Makefile index 29ca4852..88536142 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -29,4 +29,8 @@ rst: sphinx-build -b rst . dist/rst html: - sphinx-build -b html . dist/html \ No newline at end of file + sphinx-build -b html . dist/html + +dev: + sphinx-autobuild -b html . _build/html + diff --git a/docs/classes/search.rst b/docs/classes/search.rst index 53825d7a..67f86fa8 100644 --- a/docs/classes/search.rst +++ b/docs/classes/search.rst @@ -1,2 +1,5 @@ Search =================== + +.. autoclass:: frameioclient.Search + :members: diff --git a/docs/conf.py b/docs/conf.py index a8dd5de3..4dee1d87 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -19,7 +19,7 @@ # -- Project information ----------------------------------------------------- project = PACKAGE_TITLE -copyright = 'MIT License 2021, Frame.io' +copyright = 'MIT License 2022, Frame.io' author = AUTHOR_NAME # The full version, including alpha/beta/rc tags @@ -36,6 +36,7 @@ 'sphinx.ext.napoleon', 'sphinxcontrib.restbuilder', 'sphinx_jekyll_builder', + 'sphinx_autodoc_typehints' ] # Add any paths that contain templates here, relative to this directory. diff --git a/docs/requirements.txt b/docs/requirements.txt index 3a864abd..63a029d6 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -8,4 +8,6 @@ xxhash furo analytics-python token-bucket -speedtest-cli \ No newline at end of file +speedtest-cli +sphinx-autobuild +sphinx-autodoc-typehints \ No newline at end of file diff --git a/examples/projects/download_project.py b/examples/projects/download_project.py index 7357270c..fd492244 100644 --- a/examples/projects/download_project.py +++ b/examples/projects/download_project.py @@ -1,4 +1,4 @@ -from frameioclient.lib.utils import Utils +from frameioclient.lib.utils import FormatTypes, Utils import os from pathlib import Path @@ -34,7 +34,7 @@ def demo_project_download(project_id): # pdb.set_trace() print(f"Found {item_count} items") - print(f"Took {elapsed} second to download {Utils.format_bytes(folder_size, type='size')} for project: {client.projects.get(project_id)['name']}") + print(f"Took {elapsed} second to download {Utils.format_value(folder_size, type=FormatTypes.SIZE)} for project: {client.projects.get(project_id)['name']}") print("\n") if __name__ == "__main__": diff --git a/frameioclient/lib/transfer.py b/frameioclient/lib/transfer.py index ce2d8955..4a8b8005 100644 --- a/frameioclient/lib/transfer.py +++ b/frameioclient/lib/transfer.py @@ -3,27 +3,21 @@ import os import time from pprint import pprint -from typing import Dict, List from random import randint +from typing import Dict, List import requests -from .exceptions import ( - AssetChecksumMismatch, - AssetChecksumNotPresent, - DownloadException, -) +from .exceptions import (AssetChecksumMismatch, AssetChecksumNotPresent, + DownloadException) from .logger import SDKLogger -from .utils import Utils +from .utils import FormatTypes, Utils logger = SDKLogger("downloads") from .bandwidth import DiskBandwidth, NetworkBandwidth -from .exceptions import ( - AssetNotFullyUploaded, - DownloadException, - WatermarkIDDownloadException, -) +from .exceptions import (AssetNotFullyUploaded, DownloadException, + WatermarkIDDownloadException) from .transport import HTTPClient @@ -282,7 +276,7 @@ def _download_whole(self, url: str): print( "Beginning download -- {} -- {}".format( self.asset["name"], - Utils.format_bytes(self.downloader.filesize, type="size"), + Utils.format_value(self.downloader.filesize, type=FormatTypes.SIZE), ) ) @@ -301,12 +295,12 @@ def _download_whole(self, url: str): raise e download_time = time.time() - start_time - download_speed = Utils.format_bytes( + download_speed = Utils.format_value( math.ceil(self.downloader.filesize / (download_time)) ) print( "Downloaded {} at {}".format( - Utils.format_bytes(self.downloader.filesize, type="size"), + Utils.format_value(self.downloader.filesize, type=FormatTypes.SIZE), download_speed, ) ) @@ -386,7 +380,7 @@ def multi_thread_download(self): print( "Multi-part download -- {} -- {}".format( self.downloader.asset["name"], - Utils.format_bytes(self.downloader.filesize, type="size"), + Utils.format_value(self.downloader.filesize, type=FormatTypes.SIZE), ) ) @@ -435,7 +429,7 @@ def multi_thread_download(self): # Log completion event SDKLogger("downloads").info( "Downloaded {} at {}".format( - Utils.format_bytes(self.downloader.filesize, type="size"), + Utils.format_value(self.downloader.filesize, type=FormatTypes.SIZE), download_speed, ) ) diff --git a/frameioclient/lib/transport.py b/frameioclient/lib/transport.py index cdc28363..3b073a25 100644 --- a/frameioclient/lib/transport.py +++ b/frameioclient/lib/transport.py @@ -1,6 +1,7 @@ import concurrent.futures import threading import time +from typing import Dict, Optional import requests from requests.adapters import HTTPAdapter @@ -13,17 +14,27 @@ from .version import ClientVersion +class HTTPMethods: + GET = 'get' + POST = 'post' + PUT = 'put' + DELETE = 'delete' + PATCH = 'patch' + HEAD = 'head' + class HTTPClient(object): """HTTP Client base that automatically handles the following: - - Shared thread/session object - - Client version headers - - Automated retries + - Shared thread/session object + - Client version headers + - Automated retries - Args: - threads (int): Number of threads to use concurrently. """ - def __init__(self, threads=default_thread_count): + def __init__(self, threads: Optional[int] = default_thread_count): + """ + :param threads: Number of threads to use concurrently. + """ + # Setup number of threads to use self.threads = threads @@ -72,7 +83,7 @@ class APIClient(HTTPClient, object): progress (bool): If True, show status bars in console. """ - def __init__(self, token, host, threads, progress): + def __init__(self, token: str, host: str, threads: int, progress: bool): super().__init__(threads) self.host = host self.token = token @@ -82,10 +93,10 @@ def __init__(self, token, host, threads, progress): self.session = self._get_session() self.auth_header = {"Authorization": "Bearer {}".format(self.token)} - def _format_api_call(self, endpoint): + def _format_api_call(self, endpoint: str): return "{}/v2{}".format(self.host, endpoint) - def _api_call(self, method, endpoint, payload={}, limit=None): + def _api_call(self, method, endpoint: str, payload: Dict = {}, limit: Optional[int] = None): headers = {**self.shared_headers, **self.auth_header} r = self.session.request( @@ -117,7 +128,7 @@ def _api_call(self, method, endpoint, payload={}, limit=None): return r.raise_for_status() - def get_specific_page(self, method, endpoint, payload, page): + def get_specific_page(self, method: HTTPMethods, endpoint: str, payload: Dict, page: int): """ Gets a specific page for that endpoint, used by Pagination Class @@ -127,11 +138,11 @@ def get_specific_page(self, method, endpoint, payload, page): payload (dict): Request payload page (int): What page to get """ - if method == "get": + if method == HTTPMethods.GET: endpoint = "{}?page={}".format(endpoint, page) return self._api_call(method, endpoint) - if method == "post": + if method == HTTPMethods.POST: payload["page"] = page return self._api_call(method, endpoint, payload=payload) diff --git a/frameioclient/lib/upload.py b/frameioclient/lib/upload.py index 4ea37087..8819ab7d 100644 --- a/frameioclient/lib/upload.py +++ b/frameioclient/lib/upload.py @@ -1,10 +1,12 @@ -import os +import concurrent.futures import math -import requests +import os import threading -import concurrent.futures +from typing import List + +import requests -from .utils import Utils +from .utils import FormatTypes, Utils thread_local = threading.local() @@ -18,15 +20,14 @@ def __init__(self, asset=None, file=None): self.file_num = 0 self.futures = [] - def _calculate_chunks(self, total_size, chunk_count): - """Calculate chunk size + def _calculate_chunks(self, total_size: int, chunk_count: int) -> List[int]: + """ + Calculate chunk size - Args: - total_size (int): Total filesize in bytes - chunk_count (int): Total number of URL's we got back from the API + :param total_size: Total filesize in bytes + :param chunk_count: Total number of URL's we got back from the API - Returns: - chunk_offsets (list): List of chunk offsets + :return chunk_offsets: List of chunk offsets """ self.chunk_size = int(math.ceil(total_size / chunk_count)) @@ -43,7 +44,7 @@ def _get_session(self): thread_local.session = requests.Session() return thread_local.session - def _smart_read_chunk(self, chunk_offset, is_final_chunk): + def _smart_read_chunk(self, chunk_offset: int, is_final_chunk: bool) -> bytes: with open(os.path.realpath(self.file.name), "rb") as file: file.seek(chunk_offset, 0) if ( @@ -54,7 +55,7 @@ def _smart_read_chunk(self, chunk_offset, is_final_chunk): data = file.read(self.chunk_size) return data - def _upload_chunk(self, task): + def _upload_chunk(self, task) -> int: url = task[0] chunk_offset = task[1] chunk_id = task[2] @@ -141,7 +142,7 @@ def recursive_upload(self, client, folder, parent_asset_id): complete_dir_obj = os.path.join(folder, file_p) print( - "Starting {:02d}/{}, Size: {}, Name: {}".format(self.file_num, self.file_count, Utils.format_bytes(os.path.getsize(complete_dir_obj), type='size'), file_p) + "Starting {:02d}/{}, Size: {}, Name: {}".format(self.file_num, self.file_count, Utils.format_value(os.path.getsize(complete_dir_obj), type=FormatTypes.SIZE), file_p) ) client.assets.upload(parent_asset_id, complete_dir_obj) diff --git a/frameioclient/lib/utils.py b/frameioclient/lib/utils.py index 2d9e4445..9da7d756 100644 --- a/frameioclient/lib/utils.py +++ b/frameioclient/lib/utils.py @@ -1,35 +1,43 @@ +import enum import os import re import sys +from typing import Any, Dict, Optional import xxhash KB = 1024 MB = KB * KB -ENV = os.getenv('FRAMEIO_ENVIRONMENT', 'prod') +ENV = os.getenv("FRAMEIO_ENVIRONMENT", "prod") + def ApiReference(*args, **kwargs): def inner(func): """ do operations with func """ - if ENV == 'build': - print("API Operation: {}".format(kwargs.get('operation'))) + if ENV == "build": + print("API Operation: {}".format(kwargs.get("operation"))) return func return inner +class FormatTypes(enum.Enum): + SPEED = 0 + SIZE = 1 + + class Utils: @staticmethod def stream(func, page=1, page_size=20): """ - Accepts a lambda of a call to a client list method, and streams the results until \ + Accepts a lambda of a call to a client list method, and streams the results until \ the list has been exhausted. Args: - fun (function): A 1-arity function to apply during the stream + fun (function): A 1-arity function to apply during the stream Example:: @@ -45,31 +53,37 @@ def stream(func, page=1, page_size=20): page += 1 @staticmethod - def format_bytes(size, type="speed"): + def format_value(value: int, type: FormatTypes = FormatTypes.SIZE) -> str: """ Convert bytes to KB/MB/GB/TB/s + + :param value: a numeric value + :param type: the FormatType specified """ # 2**10 = 1024 power = 2 ** 10 n = 0 power_labels = {0: "B", 1: "KB", 2: "MB", 3: "GB", 4: "TB"} - while size > power: - size /= power + while value > power: + value /= power n += 1 - formatted = " ".join((str(round(size, 2)), power_labels[n])) + formatted = " ".join((str(round(value, 2)), power_labels[n])) - if type == "speed": + if type == FormatTypes.SPEED: return formatted + "/s" - elif type == "size": + elif type == FormatTypes.SIZE: return formatted @staticmethod - def calculate_hash(file_path, progress_callback=None): + def calculate_hash(file_path: str, progress_callback: Optional[Any] = None): """ Calculate an xx64hash + + :param file_path: The path on your system to the file you'd like to checksum + :param progress_callback: A progress callback to use when you want to callback w/ progress """ xxh64_hash = xxhash.xxh64() b = bytearray(MB * 8) @@ -80,18 +94,22 @@ def calculate_hash(file_path, progress_callback=None): break xxh64_hash.update(b[:numread]) + if progress_callback: # Should only subtract 1 here when necessary, not every time! - progress_callback.update(float(numread - 1), force=True) + progress_callback(float(numread - 1), force=True) xxh64_digest = xxh64_hash.hexdigest() return xxh64_digest @staticmethod - def compare_items(dict1, dict2): + def compare_items(dict1: Dict, dict2: Dict) -> bool: """ Python 2 and 3 compatible way of comparing 2x dictionaries + + :param dict1: Dictionary 1 for comparison + :param dict2: Dictionary 2 for comparison """ comparison = None @@ -110,17 +128,21 @@ def compare_items(dict1, dict2): return comparison @staticmethod - def get_valid_filename(s): + def get_valid_filename(s: str) -> str: """ Strip out invalid characters from a filename using regex + + :param s: Filename to remove invalid characters from """ s = str(s).strip().replace(" ", "_") return re.sub(r"(?u)[^-\w.]", "", s) @staticmethod - def normalize_filename(fn): + def normalize_filename(fn: str) -> str: """ Normalize filename using pure python + + :param fn: Filename to normalize using pure python """ validchars = "-_.() " out = "" @@ -140,7 +162,12 @@ def normalize_filename(fn): return out @staticmethod - def format_headers(token, version): + def format_headers(token: str, version: str) -> Dict: + """[summary] + + :param token: Frame.io OAuth/Dev Token to use + :param version: The version of the frameioclient sdk to add to our HTTP header + """ return { "Authorization": "Bearer {}".format(token), "x-frameio-client": "python/{}".format(version), diff --git a/frameioclient/services/assets.py b/frameioclient/services/assets.py index e0be9ad0..b049c3cd 100644 --- a/frameioclient/services/assets.py +++ b/frameioclient/services/assets.py @@ -1,13 +1,12 @@ -import os import mimetypes +import os from frameioclient.lib.transfer import AWSClient +from ..lib import ApiReference, FrameioDownloader, FrameioUploader, constants +from ..lib.service import Service from .projects import Project -# from .helpers import FrameioHelpers -from ..lib.service import Service -from ..lib import FrameioUploader, FrameioDownloader, ApiReference, constants class Asset(Service): def _build_asset_info(self, filepath): @@ -27,7 +26,7 @@ def get(self, asset_id): """ Get an asset by id. - :Args: + Args: asset_id (string): The asset id. """ endpoint = '/assets/{}'.format(asset_id) @@ -38,13 +37,13 @@ def get_children(self, asset_id, include=[], slim=False, **kwargs): """ Get a folder. - :Args: + Args: asset_id (string): The asset id. - :Kwargs: + :Keyword Arguments: includes (list): List of includes you would like to add. - Example:: + Example:: client.assets.get_children( asset_id='1231-12414-afasfaf-aklsajflaksjfla', @@ -88,12 +87,12 @@ def create(self, parent_asset_id, **kwargs): """ Create an asset. - :Args: + Args: parent_asset_id (string): The parent asset id. - :Kwargs: + :Keyword Arguments: (optional) kwargs: additional request parameters. - Example:: + Example:: client.assets.create( parent_asset_id="123abc", @@ -107,15 +106,15 @@ def create(self, parent_asset_id, **kwargs): return self.client._api_call('post', endpoint, payload=kwargs) @ApiReference(operation="#createAsset") - def create_folder(self, parent_asset_id, name="New Folder"): + def create_folder(self, parent_asset_id: str, name: str ="New Folder"): """ Create a new folder. - :Args: - parent_asset_id (string): The parent asset id. - name (string): The name of the new folder. + Args: + parent_asset_id: The parent asset id. + name: The name of the new folder. - Example:: + Example:: client.assets.create_folder( parent_asset_id="123abc", @@ -126,16 +125,16 @@ def create_folder(self, parent_asset_id, name="New Folder"): return self.client._api_call('post', endpoint, payload={"name": name, "type":"folder"}) @ApiReference(operation="#createAsset") - def from_url(self, parent_asset_id, name, url): + def from_url(self, parent_asset_id: str, name: str, url: str): """ Create an asset from a URL. - :Args: + Args: parent_asset_id (string): The parent asset id. name (string): The filename. url (string): The remote URL. - Example:: + Example:: client.assets.from_url( parent_asset_id="123abc", @@ -143,7 +142,7 @@ def from_url(self, parent_asset_id, name, url): type="file", url="https://" ) - """ + """ payload = { "name": name, "type": "file", @@ -160,14 +159,15 @@ def update(self, asset_id, **kwargs): """ Updates an asset - :Args: + Args: asset_id (string): the asset's id - :Kwargs: + :Keyword Arguments: the fields to update - Example:: + Example:: + client.assets.update("adeffee123342", name="updated_filename.mp4") - """ + """ endpoint = '/assets/{}'.format(asset_id) return self.client._api_call('put', endpoint, kwargs) @@ -176,14 +176,15 @@ def copy(self, destination_folder_id, **kwargs): """ Copy an asset - :Args: + Args: destination_folder_id (string): The id of the folder you want to copy into. - :Kwargs: + :Keyword Arguments: id (string): The id of the asset you want to copy. - Example:: + Example:: + client.assets.copy("adeffee123342", id="7ee008c5-49a2-f8b5-997d-8b64de153c30") - """ + """ endpoint = '/assets/{}/copy'.format(destination_folder_id) return self.client._api_call('post', endpoint, kwargs) @@ -191,16 +192,24 @@ def copy(self, destination_folder_id, **kwargs): def bulk_copy(self, destination_folder_id, asset_list=[], copy_comments=False): """Bulk copy assets - :Args: + Args: destination_folder_id (string): The id of the folder you want to copy into. - :Kwargs: + + :Keyword Arguments: asset_list (list): A list of the asset IDs you want to copy. copy_comments (boolean): Whether or not to copy comments: True or False. - Example:: - client.assets.bulk_copy("adeffee123342", asset_list=["7ee008c5-49a2-f8b5-997d-8b64de153c30", \ - "7ee008c5-49a2-f8b5-997d-8b64de153c30"], copy_comments=True) - """ + Example:: + + client.assets.bulk_copy( + "adeffee123342", + asset_list=[ + "7ee008c5-49a2-f8b5-997d-8b64de153c30", + "7ee008c5-49a2-f8b5-997d-8b64de153c30" + ], + copy_comments=True + ) + """ payload = {"batch": []} if copy_comments: @@ -217,9 +226,9 @@ def delete(self, asset_id): """ Delete an asset - :Args: + Args: asset_id (string): the asset's id - """ + """ endpoint = '/assets/{}'.format(asset_id) return self.client._api_call('delete', endpoint) @@ -227,13 +236,14 @@ def _upload(self, asset, file): """ Upload an asset. The method will exit once the file is uploaded. - :Args: - asset (object): The asset object. - file (file): The file to upload. + Args: + asset: The asset object. + file: The file to upload. + + Example:: - Example:: client.upload(asset, open('example.mp4')) - """ + """ uploader = FrameioUploader(asset, file) uploader.upload() @@ -241,14 +251,16 @@ def upload(self, destination_id, filepath, asset=None): """ Upload a file. The method will exit once the file is uploaded. - :Args: + Args: + destination_id (uuid): The destination Project or Folder ID. filepath (string): The location of the file on your local filesystem \ that you want to upload. - Example:: - client.assets.upload('1231-12414-afasfaf-aklsajflaksjfla', "./file.mov") - """ + Example:: + + client.assets.upload('1231-12414-afasfaf-aklsajflaksjfla', "./file.mov") + """ # Check if destination is a project or folder # If it's a project, well then we look up its root asset ID, otherwise we use the folder id provided @@ -288,16 +300,16 @@ def download(self, asset, download_folder, prefix=None, multi_part=False, replac """ Download an asset. The method will exit once the file is downloaded. - :Args: + Args: asset (object): The asset object. download_folder (path): The location to download the file to. multi_part (bool): Attempt to do a multi-part download (non-WMID assets). replace (bool): Whether or not you want to replace a file if one is found at the destination path. - Example:: + Example:: client.assets.download(asset, "~./Downloads") - """ + """ downloader = FrameioDownloader(asset, download_folder, prefix, multi_part, replace) return AWSClient(downloader, concurrency=5).multi_thread_download() @@ -306,7 +318,7 @@ def upload_folder(self, source_path, destination_id): Upload a folder full of assets, maintaining hierarchy. \ The method will exit once the file is uploaded. - :Args: + Args: filepath (path): The location of the folder on your disk. destination_id (uuid): The destination Project or Folder ID. diff --git a/frameioclient/services/comments.py b/frameioclient/services/comments.py index 8a5db008..f4cc1503 100644 --- a/frameioclient/services/comments.py +++ b/frameioclient/services/comments.py @@ -1,13 +1,16 @@ +from ..lib.utils import ApiReference from ..lib.service import Service +from typing import Union +from uuid import UUID class Comment(Service): - def create(self, asset_id, **kwargs): + @ApiReference(operation="#createComment") + def create(self, asset_id: Union[str, UUID], **kwargs): """ Create a comment. - Args: - asset_id (string): The asset id. + :param asset_id: The asset id. :Keyword Arguments: (optional) kwargs: additional request parameters. @@ -18,36 +21,36 @@ def create(self, asset_id, **kwargs): asset_id="123abc", text="Hello world" ) - """ + """ endpoint = "/assets/{}/comments".format(asset_id) return self.client._api_call("post", endpoint, payload=kwargs) - def get(self, comment_id, **kwargs): + @ApiReference(operation="#getComment") + def get(self, comment_id: Union[str, UUID], **kwargs): """ Get a comment. - Args: - comment_id (string): The comment id. - """ + :param comment_id: The comment id. + """ endpoint = "/comments/{}".format(comment_id) return self.client._api_call("get", endpoint, **kwargs) - def list(self, asset_id, **kwargs): + @ApiReference(operation="#getComments") + def list(self, asset_id: Union[str, UUID], **kwargs): """ Get an asset's comments. - Args: - asset_id (string): The asset id. - """ + :param asset_id: The asset id. + """ endpoint = "/assets/{}/comments".format(asset_id) return self.client._api_call("get", endpoint, **kwargs) - def update(self, comment_id, **kwargs): + @ApiReference(operation="#updateComment") + def update(self, comment_id: Union[str, UUID], **kwargs): """ Update a comment. - Args: - comment_id (string): The comment id. + :param comment_id: The comment id. :Keyword Arguments: (optional) kwargs: additional request parameters. @@ -58,20 +61,21 @@ def update(self, comment_id, **kwargs): comment_id="123abc", text="Hello world" ) - """ + """ endpoint = "/comments/{}".format(comment_id) return self.client._api_call("post", endpoint, payload=kwargs) - def delete(self, comment_id): + @ApiReference(operation="#deleteComment") + def delete(self, comment_id: Union[str, UUID]): """ Delete a comment. - Args: - comment_id (string): The comment id. - """ + :param comment_id: The comment id. + """ endpoint = "/comments/{}".format(comment_id) return self.client._api_call("delete", endpoint) + @ApiReference(operation="#createReply") def reply(self, comment_id, **kwargs): """ Reply to an existing comment. @@ -88,6 +92,6 @@ def reply(self, comment_id, **kwargs): comment_id="123abc", text="Hello world" ) - """ + """ endpoint = "/comments/{}/replies".format(comment_id) return self.client._api_call("post", endpoint, payload=kwargs) diff --git a/frameioclient/services/links.py b/frameioclient/services/links.py index e109335a..731f02c9 100644 --- a/frameioclient/services/links.py +++ b/frameioclient/services/links.py @@ -1,7 +1,9 @@ +from ..lib.utils import ApiReference from ..lib.service import Service class ReviewLink(Service): + @ApiReference(operation="#reviewLinkCreate") def create(self, project_id, **kwargs): """ Create a review link. @@ -23,6 +25,7 @@ def create(self, project_id, **kwargs): endpoint = "/projects/{}/review_links".format(project_id) return self.client._api_call("post", endpoint, payload=kwargs) + @ApiReference(operation="#reviewLinksList") def list(self, project_id): """ Get the review links of a project @@ -33,6 +36,7 @@ def list(self, project_id): endpoint = "/projects/{}/review_links".format(project_id) return self.client._api_call("get", endpoint) + @ApiReference(operation="#reviewLinkGet") def get(self, link_id, **kwargs): """ Get a single review link @@ -43,6 +47,7 @@ def get(self, link_id, **kwargs): endpoint = "/review_links/{}".format(link_id) return self.client._api_call("get", endpoint, payload=kwargs) + @ApiReference(operation="#reviewLinkItemsList") def get_assets(self, link_id): """ Get items from a single review link. @@ -59,6 +64,7 @@ def get_assets(self, link_id): endpoint = "/review_links/{}/items".format(link_id) return self.client._api_call("get", endpoint) + @ApiReference(operation="#reviewLinkItemsUpdate") def update_assets(self, link_id, **kwargs): """ Add or update assets for a review link. @@ -79,6 +85,7 @@ def update_assets(self, link_id, **kwargs): endpoint = "/review_links/{}/assets".format(link_id) return self.client._api_call("post", endpoint, payload=kwargs) + @ApiReference(operation="#reviewLinkUpdate") def update_settings(self, link_id, **kwargs): """ Updates review link settings. @@ -104,6 +111,7 @@ def update_settings(self, link_id, **kwargs): class PresentationLink(Service): + @ApiReference(operation="#createPresentation") def create(self, asset_id, **kwargs): """ Create a presentation link. diff --git a/frameioclient/services/logs.py b/frameioclient/services/logs.py index 857a51a2..b159448f 100644 --- a/frameioclient/services/logs.py +++ b/frameioclient/services/logs.py @@ -1,13 +1,15 @@ +from typing import Union +from uuid import UUID + from ..lib.service import Service class AuditLogs(Service): - def list(self, account_id): + def list(self, account_id: Union[str, UUID]): """ Get audit logs for the currently authenticated account. - Args: - account_id (uuid): Account ID you want to get audit logs for. + :param account_id: Account ID you want to get audit logs for. Example:: diff --git a/frameioclient/services/projects.py b/frameioclient/services/projects.py index 386cd67a..fa81e174 100644 --- a/frameioclient/services/projects.py +++ b/frameioclient/services/projects.py @@ -1,14 +1,17 @@ +from typing import Union, Optional +from uuid import UUID + from ..lib.service import Service from .helpers import FrameioHelpers class Project(Service): - def create(self, team_id, **kwargs): + def create(self, team_id: Union[str, UUID], **kwargs): """ Create a project. - :Args: - team_id (string): The team id. + :param team_id: The team id. + :Kwargs: kwargs (optional): additional request parameters. @@ -23,13 +26,11 @@ def create(self, team_id, **kwargs): endpoint = "/teams/{}/projects".format(team_id) return self.client._api_call("post", endpoint, payload=kwargs) - def get(self, project_id): + def get(self, project_id: Union[str, UUID]): """ Get an individual project - :Args: - - project_id (string): The project's id + :param project_id: The project's id Example:: @@ -41,13 +42,12 @@ def get(self, project_id): endpoint = "/projects/{}".format(project_id) return self.client._api_call("get", endpoint) - def tree(self, project_id, slim): + def tree(self, project_id: Union[str, UUID], slim: Optional[bool] = False): """ Fetch a tree representation of all files/folders in a project. - :Args: - project_id (string): The project's id - slim (bool): If true, fetch only the minimum information for the following: \ + :param project_id: The project's id + :param slim: If true, fetch only the minimum information for the following: \ filename, \ filesize, \ thumbnail, \ @@ -55,7 +55,7 @@ def tree(self, project_id, slim): inserted_at (date created), \ path (represented like a filesystem) \ - Example:: + Example:: client.projects.get( project_id="123", @@ -68,15 +68,14 @@ def tree(self, project_id, slim): return FrameioHelpers(self.client).build_project_tree(project_id, slim) - def download(self, project_id, destination_directory="downloads"): + def download(self, project_id: Union[str, UUID], destination_directory="downloads"): """ Download the provided project to disk. - :Args: - project_id (uuid): The project's id. - destination_directory (string): Directory on disk that you want to download the project to. + :param project_id: The project's id. + :param destination_directory: Directory on disk that you want to download the project to. - Example:: + Example:: client.projects.download( project_id="123", @@ -88,14 +87,13 @@ def download(self, project_id, destination_directory="downloads"): project_id, destination=destination_directory ) - def get_collaborators(self, project_id, **kwargs): + def get_collaborators(self, project_id: Union[str, UUID], **kwargs): """ Get collaborators for a project - :Args: - project_id (uuid): The project's id + :param project_id: The project's id - Example:: + Example:: client.projects.get_collaborators( project_id="123" @@ -105,14 +103,13 @@ def get_collaborators(self, project_id, **kwargs): endpoint = "/projects/{}/collaborators?include=project_role".format(project_id) return self.client._api_call("get", endpoint, kwargs) - def get_pending_collaborators(self, project_id, **kwargs): + def get_pending_collaborators(self, project_id: Union[str, UUID], **kwargs): """ Get pending collaborators for a project - :Args: - project_id (uuid): The project's id + :param project_id: The project's id - Example:: + Example:: client.projects.get_pending_collaborators( project_id="123" @@ -122,15 +119,14 @@ def get_pending_collaborators(self, project_id, **kwargs): endpoint = "/projects/{}/pending_collaborators".format(project_id) return self.client._api_call("get", endpoint, kwargs) - def add_collaborator(self, project_id, email): + def add_collaborator(self, project_id: Union[str, UUID], email: str): """ Add Collaborator to a Project Collaborator. - :Args: - project_id (uuid): The project id - email (string): Email user's e-mail address + :param project_id: The project id + :param email: Email user's e-mail address - Example:: + Example:: client.projects.add_collaborator( project_id="123", @@ -142,13 +138,12 @@ def add_collaborator(self, project_id, email): endpoint = "/projects/{}/collaborators".format(project_id) return self.client._api_call("post", endpoint, payload=payload) - def remove_collaborator(self, project_id, email): + def remove_collaborator(self, project_id: Union[str, UUID], email: str): """ Remove Collaborator from Project. - :Args: - project_id (uuid): The Project ID. - email (string): The user's e-mail address + :param project_id: The Project ID. + :param email: The user's e-mail address Example:: @@ -158,5 +153,7 @@ def remove_collaborator(self, project_id, email): ) """ + # TODO update this function to not use query parameter based email input + endpoint = "/projects/{}/collaborators/_?email={}".format(project_id, email) return self.client._api_call("delete", endpoint) diff --git a/frameioclient/services/search.py b/frameioclient/services/search.py index ee55e0dc..b6b54cf3 100644 --- a/frameioclient/services/search.py +++ b/frameioclient/services/search.py @@ -1,48 +1,48 @@ +from typing import Optional, Union +from uuid import UUID + from ..lib.service import Service class Search(Service): def library( self, - query, - type=None, - project_id=None, - account_id=None, - team_id=None, - uploader=None, - sort=None, - filter=None, - page_size=10, - page=1, + query: str, + type: Optional[str] = None, + project_id: Union[str, UUID] = None, + account_id: Union[str, UUID] = None, + team_id: Union[str, UUID] = None, + uploader: Optional[str] = None, + sort: Optional[str] = None, + filter: Optional[str] = None, + page_size: Optional[int] = 10, + page: Optional[int] = 1, ): """ - Search for assets using the library search endpoint, documented here \ - https://developer.frame.io/docs/workflows-assets/search-for-assets. - - For more information check out https://developer.frame.io/api/reference/operation/librarySearchPost/. + Search for assets using the library search endpoint, documented at https://developer.frame.io/docs/workflows-assets/search-for-assets. + For more information check out https://developer.frame.io/api/reference/operation/librarySearchPost/. - :Args: - query (string): The search keyword you want to search with. - account_id (string): The account ID you want to be searching within. #TODO, confirm that this is required or not, could we use self.me? + # TODO, confirm that account_id is required or not, could we use self.me? - :Kwargs: - type (string): The type of frame.io asset you want to search: [file, folder, review_link, presentation]. - project_id (uuid): The frame.io project you want to constrain your search to. - account_id (uuid): The frame.io account want you to contrain your search to (you may only have one, but some users have 20+ that they have acces to). - team_id (uuid): The frame.io team you want to constrain your search to. - uploader (string): The name of the uploader, this includes first + last name with a space. - sort (string): The field you want to sort by. - filter (string): This is only necessary if you want to build a fully custom query, the most common functionality is exposed using other kwargs though. - page_size (int): Useful if you want to increase the number of items returned by the search API here. - page (int): The page of results you're requesting. + :param query: The search keyword you want to search with. + :param account_id: The frame.io account want you to contrain your search to (you may only have one, but some users have 20+ that they have acces to). + :param type: The type of frame.io asset you want to search: [file, folder, review_link, presentation]. + :param project_id: The frame.io project you want to constrain your search to. + :param team_id: The frame.io team you want to constrain your search to. + :param uploader: The name of the uploader, this includes first + last name with a space. + :param sort: The field you want to sort by. + :param filter: This is only necessary if you want to build a fully custom query, the most common functionality is exposed using other kwargs though. + :param page_size: Useful if you want to increase the number of items returned by the search API here. + :param page: The page of results you're requesting. Example:: + client.assets.search( query="Final", type="file", sort="name" ) - """ + """ # Define base payload payload = { diff --git a/frameioclient/services/teams.py b/frameioclient/services/teams.py index 0d0fb762..6c3ee306 100644 --- a/frameioclient/services/teams.py +++ b/frameioclient/services/teams.py @@ -20,7 +20,7 @@ def create(self, account_id, **kwargs): name="My Awesome Project", ) """ - warnings.warn("Note: Your token must support team.create scopes") + warnings.warn("Note: Your token must support the team.create scope") endpoint = "/accounts/{}/teams".format(account_id) return self.client._api_call("post", endpoint, payload=kwargs) diff --git a/frameioclient/services/users.py b/frameioclient/services/users.py index cf64fb82..f23dc213 100644 --- a/frameioclient/services/users.py +++ b/frameioclient/services/users.py @@ -1,3 +1,5 @@ +from typing import Dict + from ..lib.service import Service @@ -9,5 +11,7 @@ def get_me(self): return self.client._api_call("get", "/me") def get_accounts(self): - """Get a list of accounts the user has access to""" + """ + Get a list of accounts the user has access to + """ return self.client._api_call("get", "/accounts") diff --git a/setup.py b/setup.py index d09a041c..2956df67 100644 --- a/setup.py +++ b/setup.py @@ -25,11 +25,10 @@ def run(self): setuptools.setup( name='frameioclient', version=version, - python_requires='>=2.7.16, <4', + python_requires='>=3.6.5, <4', install_requires=[ 'analytics-python', 'enlighten', - 'futures; python_version == "2.7"', 'importlib-metadata ~= 1.0 ; python_version < "3.8"', 'requests', 'token-bucket', diff --git a/tests/py2_integration.py b/tests/py2_integration.py index b1cb2ca2..338f6351 100644 --- a/tests/py2_integration.py +++ b/tests/py2_integration.py @@ -109,7 +109,7 @@ def test_download(client, override=False): client.assets.download(asset, download_dir, multi_part=True) download_time = time.time() - start_time - download_speed = Utils.format_bytes(ceil(asset['filesize']/(download_time))) + download_speed = Utils.format_value(ceil(asset['filesize']/(download_time))) print("{}/{} Download completed in {:.2f}s @ {}".format((count), len(asset_list), download_time, download_speed)) @@ -153,7 +153,7 @@ def test_upload(client): client.assets.upload(new_parent_id, ul_abs_path) upload_time = time.time() - start_time - upload_speed = Utils.format_bytes(ceil(filesize/(upload_time))) + upload_speed = Utils.format_value(ceil(filesize/(upload_time))) print("{}/{} Upload completed in {:.2f}s @ {}".format((count), len(dled_files), upload_time, upload_speed)) diff --git a/tests/py3_integration.py b/tests/py3_integration.py index c294aa89..1f89776e 100644 --- a/tests/py3_integration.py +++ b/tests/py3_integration.py @@ -109,7 +109,7 @@ def test_download(client: FrameioClient, override=False): client.assets.download(asset, download_dir, multi_part=True) download_time = time.time() - start_time - download_speed = Utils.format_bytes(ceil(asset['filesize']/(download_time))) + download_speed = Utils.format_value(ceil(asset['filesize']/(download_time))) print("{}/{} Download completed in {:.2f}s @ {}".format((count), len(asset_list), download_time, download_speed)) @@ -153,7 +153,7 @@ def test_upload(client: FrameioClient): client.assets.upload(new_parent_id, ul_abs_path) upload_time = time.time() - start_time - upload_speed = Utils.format_bytes(ceil(filesize/(upload_time))) + upload_speed = Utils.format_value(ceil(filesize/(upload_time))) print("{}/{} Upload completed in {:.2f}s @ {}".format((count), len(dled_files), upload_time, upload_speed)) From 09a35e8a2fbd4ba8fa0db59a6cd3edb6bf948e97 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 11:01:47 -0800 Subject: [PATCH 59/99] Fix action trigger branch --- .github/actions/docs.yml | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/.github/actions/docs.yml b/.github/actions/docs.yml index 04de0c2f..2b6719ac 100644 --- a/.github/actions/docs.yml +++ b/.github/actions/docs.yml @@ -2,20 +2,20 @@ name: Documentation on: push: branches: - - dev + - develop jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/setup-python@v2 - - uses: actions/checkout@master - with: - fetch-depth: 0 # otherwise, you will failed to push refs to dest repo - - name: Build and Commit - uses: sphinx-notes/pages@v2 - - name: Push changes - uses: ad-m/github-push-action@master - with: - github_token: ${{ secrets.GITHUB_TOKEN }} - branch: gh-pages - documentation_path: './docs' + - uses: actions/setup-python@v2 + - uses: actions/checkout@master + with: + fetch-depth: 0 # otherwise, you will failed to push refs to dest repo + - name: Build and Commit + uses: sphinx-notes/pages@v2 + - name: Push changes + uses: ad-m/github-push-action@master + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + branch: gh-pages + documentation_path: "./docs" From 03ae74df89c2d5c6b544d86924dc7f3185bbd038 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 11:02:38 -0800 Subject: [PATCH 60/99] Tweak github action config again --- .github/{actions => workflows}/docs.yml | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename .github/{actions => workflows}/docs.yml (100%) diff --git a/.github/actions/docs.yml b/.github/workflows/docs.yml similarity index 100% rename from .github/actions/docs.yml rename to .github/workflows/docs.yml From 2cb8924fb84aa933ad8c515db2588af39dd67910 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 11:04:57 -0800 Subject: [PATCH 61/99] Directly assign requirements_path --- .github/workflows/docs.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 2b6719ac..3bb699b6 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -19,3 +19,4 @@ jobs: github_token: ${{ secrets.GITHUB_TOKEN }} branch: gh-pages documentation_path: "./docs" + requirements_path: "./requirements.txt" From 751f743c4db0e07c6dc732a7328defc1912dc2e3 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 11:06:37 -0800 Subject: [PATCH 62/99] Tweak requirements.txt path --- .github/workflows/docs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 3bb699b6..47360253 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -19,4 +19,4 @@ jobs: github_token: ${{ secrets.GITHUB_TOKEN }} branch: gh-pages documentation_path: "./docs" - requirements_path: "./requirements.txt" + requirements_path: "requirements.txt" From 224ad411fc0789ea27abf2fdfd8e50aa145d6e3a Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 11:07:46 -0800 Subject: [PATCH 63/99] Fix requirements.txt installation directory --- .github/workflows/docs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 47360253..c58fe843 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -19,4 +19,4 @@ jobs: github_token: ${{ secrets.GITHUB_TOKEN }} branch: gh-pages documentation_path: "./docs" - requirements_path: "requirements.txt" + requirements_path: "./docs/requirements.txt" From b8e4c262c9995ddaaf410bcd4029e9362d5a3cfc Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 11:09:20 -0800 Subject: [PATCH 64/99] Tweak config again --- .github/workflows/docs.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index c58fe843..5e0ae4f3 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -18,5 +18,4 @@ jobs: with: github_token: ${{ secrets.GITHUB_TOKEN }} branch: gh-pages - documentation_path: "./docs" requirements_path: "./docs/requirements.txt" From a88ee1904b7991bf7ffef9da21db25d7951cda79 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 11:12:05 -0800 Subject: [PATCH 65/99] Try adding repository_path --- .github/workflows/docs.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 5e0ae4f3..2cfc8171 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -18,4 +18,6 @@ jobs: with: github_token: ${{ secrets.GITHUB_TOKEN }} branch: gh-pages - requirements_path: "./docs/requirements.txt" + repository_path: '.' + documentation_path: './docs' + requirements_path: './docs/requirements.txt' From 43a25814f0dd38ee70ea35ed0f30489d86ca9257 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 11:14:45 -0800 Subject: [PATCH 66/99] Disable telemetry for now --- frameioclient/client.py | 7 ++++--- frameioclient/lib/__init__.py | 2 +- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/frameioclient/client.py b/frameioclient/client.py index 871eac87..c218d4b6 100644 --- a/frameioclient/client.py +++ b/frameioclient/client.py @@ -5,7 +5,8 @@ """ from .config import Config -from .lib import APIClient, ClientVersion, FrameioDownloader, Telemetry +from .lib import APIClient, ClientVersion, FrameioDownloader +# from .lib import Telemetry from .services import * @@ -23,8 +24,8 @@ def __init__( def me(self): return self.users.get_me() - def telemetry(self): - return Telemetry(self) + # def telemetry(self): + # return Telemetry(self) def _auth(self): return self.token diff --git a/frameioclient/lib/__init__.py b/frameioclient/lib/__init__.py index 66f3854d..330f5995 100644 --- a/frameioclient/lib/__init__.py +++ b/frameioclient/lib/__init__.py @@ -1,7 +1,7 @@ from .constants import * from .exceptions import * from .logger import SDKLogger -from .telemetry import Telemetry +# from .telemetry import Telemetry from .version import ClientVersion from .upload import FrameioUploader from .transport import APIClient From 028885c380aaef1e37dbc32cc3b039e63d9a75bc Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 11:15:55 -0800 Subject: [PATCH 67/99] Trying again w/ simpler requirements_path --- .github/workflows/docs.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 2cfc8171..4b200312 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -18,6 +18,4 @@ jobs: with: github_token: ${{ secrets.GITHUB_TOKEN }} branch: gh-pages - repository_path: '.' - documentation_path: './docs' requirements_path: './docs/requirements.txt' From 29a3d023edb9064a7f095ca4658ce4bf9418d005 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 11:17:04 -0800 Subject: [PATCH 68/99] Trying new syntax --- .github/workflows/docs.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 4b200312..9997d20e 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -18,4 +18,5 @@ jobs: with: github_token: ${{ secrets.GITHUB_TOKEN }} branch: gh-pages - requirements_path: './docs/requirements.txt' + documentation_path: 'docs/' + requirements_path: 'docs/requirements.txt' From d6f324275f1d9de184494b94ebd30c5a875f56ba Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 11:19:55 -0800 Subject: [PATCH 69/99] Trying the same thing again --- .github/workflows/docs.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 9997d20e..dba1f574 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -18,5 +18,5 @@ jobs: with: github_token: ${{ secrets.GITHUB_TOKEN }} branch: gh-pages - documentation_path: 'docs/' - requirements_path: 'docs/requirements.txt' + documentation_path: './docs' + requirements_path: './docs/requirements.txt' From 0c239e9bdfe79e109bb9c4110c7eea5c50895a74 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 11:23:29 -0800 Subject: [PATCH 70/99] Apply inputs to the correct action --- .github/workflows/docs.yml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index dba1f574..19ec9d34 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -13,10 +13,11 @@ jobs: fetch-depth: 0 # otherwise, you will failed to push refs to dest repo - name: Build and Commit uses: sphinx-notes/pages@v2 + with: + documentation_path: './docs' + requirements_path: './docs/requirements.txt' - name: Push changes uses: ad-m/github-push-action@master with: github_token: ${{ secrets.GITHUB_TOKEN }} branch: gh-pages - documentation_path: './docs' - requirements_path: './docs/requirements.txt' From fe539b6e8cd4e45dc57b722819075f93a4328b89 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 11:23:52 -0800 Subject: [PATCH 71/99] Rename docs workflow -> documentation --- .github/workflows/{docs.yml => documentation.yml} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename .github/workflows/{docs.yml => documentation.yml} (100%) diff --git a/.github/workflows/docs.yml b/.github/workflows/documentation.yml similarity index 100% rename from .github/workflows/docs.yml rename to .github/workflows/documentation.yml From c4615a0e1c4e6a962e57b22dfb449104446e60f0 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 11:28:07 -0800 Subject: [PATCH 72/99] Disable python 2.x builds --- .circleci/config.yml | 56 ++++++++++++++++++++++---------------------- 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 8eae902e..1245fe52 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -15,7 +15,7 @@ workflows: - build matrix: parameters: - python-version: ["2.7.16", "2.7.18", "3.5.10", "3.6.5", "3.7.7", "3.8.6", "3.9.3", "latest"] + python-version: ["3.6.5", "3.7.7", "3.8.6", "3.9.3", "latest"] - hold: type: approval @@ -33,10 +33,10 @@ workflows: requires: - hold - - docs: - requires: - - deploy - - build + # - docs: + # requires: + # - deploy + # - build # upload_test: # triggers: @@ -113,32 +113,32 @@ jobs: cd /tmp/artifact twine upload dist/* - docs: - docker: - - image: circleci/python:latest + # docs: + # docker: + # - image: circleci/python:latest - steps: - - attach_workspace: - at: /tmp/artifact - name: Attach build artifact + # steps: + # - attach_workspace: + # at: /tmp/artifact + # name: Attach build artifact - - run: - name: Install dependencies - command: | - cd /tmp/artifact/docs - pip install -r requirements.txt + # - run: + # name: Install dependencies + # command: | + # cd /tmp/artifact/docs + # pip install -r requirements.txt - - run: - name: Build autodocs - command: | - cd /tmp/artifact/docs - make jekyll - - - run: - name: Publish autodocs - command: | - cd /tmp/artifact/docs - python publish.py + # - run: + # name: Build autodocs + # command: | + # cd /tmp/artifact/docs + # make jekyll + + # - run: + # name: Publish autodocs + # command: | + # cd /tmp/artifact/docs + # python publish.py deploy: docker: From 5c77bb6eaf8c3c490628ba0a85fda41eeae705f7 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 11:34:43 -0800 Subject: [PATCH 73/99] Replace .format() w/ f-strings --- examples/assets/asset_scraper.py | 12 ++++----- frameioclient/lib/download.py | 4 +-- frameioclient/lib/telemetry.py | 2 +- frameioclient/lib/transfer.py | 42 ++++++++++++++++---------------- frameioclient/lib/transport.py | 31 ++++++++++++----------- frameioclient/lib/utils.py | 4 +-- setup.py | 4 +-- 7 files changed, 50 insertions(+), 49 deletions(-) diff --git a/examples/assets/asset_scraper.py b/examples/assets/asset_scraper.py index e0ea26fc..082d8595 100644 --- a/examples/assets/asset_scraper.py +++ b/examples/assets/asset_scraper.py @@ -51,9 +51,9 @@ def get_projects_from_team( # Add project_name and team_name to the dict proj["project_name"] = proj.get("name") proj["team_name"] = team_name - print("Debug: Found project: {}".format(proj["project_name"])) + print(f"Debug: Found project: {proj['project_name']}") projects_in_team.append(proj) - print("Debug: projects in team now: {}".format(len(projects_in_team))) + print(f"Debug: projects in team now: {len(projects_in_team)}") return projects_in_team @@ -64,10 +64,10 @@ def get_projects_from_account(client) -> List[Dict]: teams = get_teams_from_account(client) for team_id, team_name in teams.items(): - print("Debug: === Found team: {} ===".format(team_name)) + print(f"Debug: === Found team: {team_name} ===") projects_in_team = get_projects_from_team(client, team_id, team_name) projects_in_account.extend(projects_in_team) - print("Debug: projects in account now: {}".format(len(projects_in_account))) + print(f"Debug: projects in account now: {len(projects_in_account)}") return projects_in_account @@ -81,7 +81,7 @@ def scrape_asset_data_from_projects( """ assets_in_projects = [] for project in projects: - print("Debug: Scanning project: {} for assets".format(project["name"])) + print(f"Debug: Scanning project: {project['name']} for assets") assets_in_project = [] proj_root_asset_id = project.get("root_asset_id") assets_in_project = scrape_asset_data( @@ -182,7 +182,7 @@ def write_assets_to_csv(asset_list: List[Dict], filename: str) -> None: for a in asset_list: flat_assets_list.append(flatten_dict(a)) - with open("asset_record_for_account_id-{}".format(filename), "w") as f: + with open(f"asset_record_for_account_id-{filename}", "w") as f: f_csv = csv.DictWriter(f, headers, extrasaction="ignore") f_csv.writeheader() f_csv.writerows(flat_assets_list) diff --git a/frameioclient/lib/download.py b/frameioclient/lib/download.py index 4308f435..a96dca46 100644 --- a/frameioclient/lib/download.py +++ b/frameioclient/lib/download.py @@ -72,7 +72,7 @@ def get_path(self): def _evaluate_asset(self): if self.asset.get("_type") != "file": raise DownloadException( - message="Unsupport Asset type: {}".format(self.asset.get("_type")) + message=f"Unsupport Asset type: {self.asset.get('_type')}" ) # This logic may block uploads that were started before this field was introduced @@ -98,7 +98,7 @@ def _create_file_stub(self): return True def _get_path(self): - logger.info("prefix: {}".format(self.prefix)) + logger.info(f"prefix: {self.prefix}") if self.prefix != None: self.filename = self.prefix + self.filename diff --git a/frameioclient/lib/telemetry.py b/frameioclient/lib/telemetry.py index 35a079b4..ab57be5c 100644 --- a/frameioclient/lib/telemetry.py +++ b/frameioclient/lib/telemetry.py @@ -31,7 +31,7 @@ def build_context(self): def push(self, event_name, properties): self.logger.info( - ("Pushing '{}' event to segment".format(event_name), properties) + (f"Pushing '{event_name}' event to segment", properties) ) try: diff --git a/frameioclient/lib/transfer.py b/frameioclient/lib/transfer.py index 4a8b8005..da291fec 100644 --- a/frameioclient/lib/transfer.py +++ b/frameioclient/lib/transfer.py @@ -8,16 +8,22 @@ import requests -from .exceptions import (AssetChecksumMismatch, AssetChecksumNotPresent, - DownloadException) +from .exceptions import ( + AssetChecksumMismatch, + AssetChecksumNotPresent, + DownloadException, +) from .logger import SDKLogger from .utils import FormatTypes, Utils logger = SDKLogger("downloads") from .bandwidth import DiskBandwidth, NetworkBandwidth -from .exceptions import (AssetNotFullyUploaded, DownloadException, - WatermarkIDDownloadException) +from .exceptions import ( + AssetNotFullyUploaded, + DownloadException, + WatermarkIDDownloadException, +) from .transport import HTTPClient @@ -71,7 +77,7 @@ def get_path(self): def _evaluate_asset(self): if self.asset.get("_type") != "file": raise DownloadException( - message="Unsupport Asset type: {}".format(self.asset.get("_type")) + message=f"Unsupport Asset type: {self.asset.get('_type')}" ) # This logic may block uploads that were started before this field was introduced @@ -196,7 +202,7 @@ def __init__(self, downloader: FrameioDownloader, concurrency=None, progress=Tru self.bytes_completed = 0 self.downloader = downloader self.futures = [] - self.original = self.downloader.asset['original'] + self.original = self.downloader.asset["original"] # Ensure this is a valid number before assigning if concurrency is not None and type(concurrency) == int and concurrency > 0: @@ -299,10 +305,7 @@ def _download_whole(self, url: str): math.ceil(self.downloader.filesize / (download_time)) ) print( - "Downloaded {} at {}".format( - Utils.format_value(self.downloader.filesize, type=FormatTypes.SIZE), - download_speed, - ) + f"Downloaded {Utils.format_value(self.downloader.filesize, type=FormatTypes.SIZE)} at {download_speed}" ) return self.destination, download_speed @@ -378,10 +381,7 @@ def multi_thread_download(self): in_byte = 0 # Set initially here, but then override print( - "Multi-part download -- {} -- {}".format( - self.downloader.asset["name"], - Utils.format_value(self.downloader.filesize, type=FormatTypes.SIZE), - ) + f"Multi-part download -- {self.downloader.asset['name']} -- {Utils.format_value(self.downloader.filesize, type=FormatTypes.SIZE)}" ) with concurrent.futures.ThreadPoolExecutor( @@ -392,7 +392,7 @@ def multi_thread_download(self): out_byte = offset * (i + 1) # Create task tuple - task = (self.downloader.asset['original'], in_byte, out_byte, i) + task = (self.downloader.asset["original"], in_byte, out_byte, i) # Stagger start for each chunk by 0.1 seconds if i < self.concurrency: @@ -423,22 +423,22 @@ def multi_thread_download(self): raise AssetChecksumNotPresent # Calculate the file hash - if Utils.calculate_hash(self.destination) != self.downloader.original_checksum: + if ( + Utils.calculate_hash(self.destination) + != self.downloader.original_checksum + ): raise AssetChecksumMismatch # Log completion event SDKLogger("downloads").info( - "Downloaded {} at {}".format( - Utils.format_value(self.downloader.filesize, type=FormatTypes.SIZE), - download_speed, - ) + f"Downloaded {Utils.format_value(self.downloader.filesize, type=FormatTypes.SIZE)} at {download_speed}" ) # Submit telemetry transfer_stats = { "speed": download_speed, "time": download_time, - "cdn": AWSClient.check_cdn(self.original) + "cdn": AWSClient.check_cdn(self.original), } # Event(self.user_id, 'python-sdk-download-stats', transfer_stats) diff --git a/frameioclient/lib/transport.py b/frameioclient/lib/transport.py index 3b073a25..81baccaf 100644 --- a/frameioclient/lib/transport.py +++ b/frameioclient/lib/transport.py @@ -15,12 +15,13 @@ class HTTPMethods: - GET = 'get' - POST = 'post' - PUT = 'put' - DELETE = 'delete' - PATCH = 'patch' - HEAD = 'head' + GET = "get" + POST = "post" + PUT = "put" + DELETE = "delete" + PATCH = "patch" + HEAD = "head" + class HTTPClient(object): """HTTP Client base that automatically handles the following: @@ -41,9 +42,7 @@ def __init__(self, threads: Optional[int] = default_thread_count): # Initialize empty thread object self.thread_local = None self.client_version = ClientVersion.version() - self.shared_headers = { - "x-frameio-client": "python/{}".format(self.client_version) - } + self.shared_headers = {"x-frameio-client": f"python/{self.client_version}"} # Configure retry strategy (very broad right now) self.retry_strategy = Retry( @@ -91,12 +90,14 @@ def __init__(self, token: str, host: str, threads: int, progress: bool): self.progress = progress self._initialize_thread() self.session = self._get_session() - self.auth_header = {"Authorization": "Bearer {}".format(self.token)} + self.auth_header = {"Authorization": f"Bearer {self.token}"} def _format_api_call(self, endpoint: str): - return "{}/v2{}".format(self.host, endpoint) + return f"{self.host}/v2{endpoint}" - def _api_call(self, method, endpoint: str, payload: Dict = {}, limit: Optional[int] = None): + def _api_call( + self, method, endpoint: str, payload: Dict = {}, limit: Optional[int] = None + ): headers = {**self.shared_headers, **self.auth_header} r = self.session.request( @@ -128,7 +129,9 @@ def _api_call(self, method, endpoint: str, payload: Dict = {}, limit: Optional[i return r.raise_for_status() - def get_specific_page(self, method: HTTPMethods, endpoint: str, payload: Dict, page: int): + def get_specific_page( + self, method: HTTPMethods, endpoint: str, payload: Dict, page: int + ): """ Gets a specific page for that endpoint, used by Pagination Class @@ -139,7 +142,7 @@ def get_specific_page(self, method: HTTPMethods, endpoint: str, payload: Dict, p page (int): What page to get """ if method == HTTPMethods.GET: - endpoint = "{}?page={}".format(endpoint, page) + endpoint = "{endpoint}?page={page}" return self._api_call(method, endpoint) if method == HTTPMethods.POST: diff --git a/frameioclient/lib/utils.py b/frameioclient/lib/utils.py index 9da7d756..3d18b670 100644 --- a/frameioclient/lib/utils.py +++ b/frameioclient/lib/utils.py @@ -169,8 +169,8 @@ def format_headers(token: str, version: str) -> Dict: :param version: The version of the frameioclient sdk to add to our HTTP header """ return { - "Authorization": "Bearer {}".format(token), - "x-frameio-client": "python/{}".format(version), + "Authorization": f"Bearer {token}", + "x-frameio-client": f"python/{version}", } diff --git a/setup.py b/setup.py index 2956df67..f04c7789 100644 --- a/setup.py +++ b/setup.py @@ -17,9 +17,7 @@ def run(self): tag = os.getenv('CIRCLE_TAG') if tag != version: - info = "Git tag: {0} does not match the version of this app: {1}".format( - tag, version - ) + info = f"Git tag: {tag} does not match the version of this app: {version}" sys.exit(info) setuptools.setup( From 03a1029af33c18bf666d76955f6654ad3c2fb3eb Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 11:37:28 -0800 Subject: [PATCH 74/99] Force docs to develop branch --- .github/workflows/documentation.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index 19ec9d34..994a5013 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -11,6 +11,7 @@ jobs: - uses: actions/checkout@master with: fetch-depth: 0 # otherwise, you will failed to push refs to dest repo + ref: develop - name: Build and Commit uses: sphinx-notes/pages@v2 with: From edd8c9cd2c46f634de7aa3d32992a1ebb6963e99 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 11:40:39 -0800 Subject: [PATCH 75/99] Fix requests retry config --- frameioclient/lib/transport.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frameioclient/lib/transport.py b/frameioclient/lib/transport.py index 81baccaf..6dda6e16 100644 --- a/frameioclient/lib/transport.py +++ b/frameioclient/lib/transport.py @@ -49,7 +49,7 @@ def __init__(self, threads: Optional[int] = default_thread_count): total=100, backoff_factor=2, status_forcelist=retryable_statuses, - allowed_methods=["GET", "POST", "PUT", "GET", "DELETE"], + method_whitelist=["GET", "POST", "PUT", "GET", "DELETE"], ) # Create real thread From 8e09f899fc6ef227a731b9d562a3d02dc92c3fd1 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 11:45:57 -0800 Subject: [PATCH 76/99] Remove < py3.6 compat --- setup.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/setup.py b/setup.py index f04c7789..64a68ee5 100644 --- a/setup.py +++ b/setup.py @@ -52,10 +52,6 @@ def run(self): 'Topic :: Multimedia :: Video', 'Topic :: Software Development :: Libraries', 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', From 130b08368d3bb19beafbf89587167fd404c74502 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 11:46:27 -0800 Subject: [PATCH 77/99] =?UTF-8?q?Bump=20version:=201.2.0=20=E2=86=92=202.0?= =?UTF-8?q?.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .bumpversion.cfg | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index 484cfe2d..44db103a 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 1.2.0 +current_version = 2.0.0 commit = True tag = True diff --git a/setup.py b/setup.py index 64a68ee5..b39899c4 100644 --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ from setuptools.command.install import install -version='1.2.0' +version='2.0.0' with open("README.md", "r") as f: long_description = f.read() From 11d4102bf768034d17680c475b1188e72644740b Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 12:41:11 -0800 Subject: [PATCH 78/99] Add types, improve docstrings, add asset versioning --- frameioclient/client.py | 2 +- frameioclient/services/assets.py | 636 ++++++++++++++++------------- frameioclient/services/comments.py | 52 ++- 3 files changed, 393 insertions(+), 297 deletions(-) diff --git a/frameioclient/client.py b/frameioclient/client.py index c218d4b6..15406fac 100644 --- a/frameioclient/client.py +++ b/frameioclient/client.py @@ -13,7 +13,7 @@ class FrameioClient(APIClient): def __init__( self, - token, + token: str, host: str = Config.api_host, threads: int = Config.default_concurrency, progress=False, diff --git a/frameioclient/services/assets.py b/frameioclient/services/assets.py index b049c3cd..db5f522c 100644 --- a/frameioclient/services/assets.py +++ b/frameioclient/services/assets.py @@ -1,5 +1,7 @@ import mimetypes import os +from typing import Dict, List, Optional, Union +from uuid import UUID from frameioclient.lib.transfer import AWSClient @@ -9,333 +11,399 @@ class Asset(Service): - def _build_asset_info(self, filepath): - full_path = os.path.abspath(filepath) - - file_info = { - "filepath": full_path, - "filename": os.path.basename(full_path), - "filesize": os.path.getsize(full_path), - "mimetype": mimetypes.guess_type(full_path)[0] - } - - return file_info - - @ApiReference(operation="#getAsset") - def get(self, asset_id): - """ - Get an asset by id. - - Args: - asset_id (string): The asset id. - """ - endpoint = '/assets/{}'.format(asset_id) - return self.client._api_call('get', endpoint) - - @ApiReference(operation="#getAssets") - def get_children(self, asset_id, include=[], slim=False, **kwargs): - """ - Get a folder. - - Args: - asset_id (string): The asset id. - - :Keyword Arguments: - includes (list): List of includes you would like to add. - - Example:: - - client.assets.get_children( - asset_id='1231-12414-afasfaf-aklsajflaksjfla', - include=['review_links','cover_asset','creator','presentation'] - ) - """ - endpoint = '/assets/{}/children'.format(asset_id) - - if slim == True: - query_params = '' - - if len(include) > 0: - query_params += '?include={}'.format(include.join(',')) - else: - # Always include children - query_params += '?' + 'include=children' + def _build_asset_info(self, filepath: str) -> Dict: + full_path = os.path.abspath(filepath) - # Only fields - query_params += '&' + 'only_fields=' + ','.join(constants.asset_excludes['only_fields']) + file_info = { + "filepath": full_path, + "filename": os.path.basename(full_path), + "filesize": os.path.getsize(full_path), + "mimetype": mimetypes.guess_type(full_path)[0], + } - # # Drop includes - query_params += '&' + 'drop_includes=' + ','.join(constants.asset_excludes['drop_includes']) + return file_info - # # Hard drop fields - query_params += '&' + 'hard_drop_fields=' + ','.join(constants.asset_excludes['hard_drop_fields']) + @ApiReference(operation="#getAsset") + def get(self, asset_id: Union[str, UUID]): + """ + Get an asset by id. - # Excluded fields - # query_params += '&' + 'excluded_fields=' + ','.join(constants.asset_excludes['excluded_fields']) + :param asset_id: The asset id. - # # Sort by inserted_at - # query_params += '&' + 'sort=-inserted_at' + Example:: - endpoint += query_params + client.assets.get( + asset_id='1231-12414-afasfaf-aklsajflaksjfla', + ) - # print("Final URL", endpoint) - - return self.client._api_call('get', endpoint, kwargs) + """ + endpoint = "/assets/{}".format(asset_id) + return self.client._api_call("get", endpoint) + + @ApiReference(operation="#getAssets") + def get_children( + self, + asset_id: Union[str, UUID], + includes: Optional[List] = [], + slim: Optional[bool] = False, + **kwargs, + ): + """ + Get a folder. + + :param asset_id: The asset id. + + :Keyword Arguments: + includes (list): List of includes you would like to add. + + Example:: + + client.assets.get_children( + asset_id='1231-12414-afasfaf-aklsajflaksjfla', + include=['review_links','cover_asset','creator','presentation'] + ) + """ + endpoint = "/assets/{}/children".format(asset_id) + + if slim == True: + query_params = "" + + if len(includes) > 0: + query_params += "?include={}".format(includes.join(",")) + else: + # Always include children + query_params += "?" + "include=children" + + # Only fields + query_params += ( + "&" + "only_fields=" + ",".join(constants.asset_excludes["only_fields"]) + ) + + # # Drop includes + query_params += ( + "&" + + "drop_includes=" + + ",".join(constants.asset_excludes["drop_includes"]) + ) + + # # Hard drop fields + query_params += ( + "&" + + "hard_drop_fields=" + + ",".join(constants.asset_excludes["hard_drop_fields"]) + ) + + # Excluded fields + # query_params += '&' + 'excluded_fields=' + ','.join(constants.asset_excludes['excluded_fields']) + + # # Sort by inserted_at + # query_params += '&' + 'sort=-inserted_at' + + endpoint += query_params + + # print("Final URL", endpoint) + + return self.client._api_call("get", endpoint, kwargs) + + @ApiReference(operation="#createAsset") + def create( + self, + parent_asset_id: Union[str, UUID], + name: str, + type: Optional[str] = "file", + filetype: Optional[str] = None, + filesize: Optional[int] = None, + ): + """ + Create an asset. + + :param parent_asset_id: The parent asset id + :param name: The asset's display name + :param type: The type of asset ('file', 'folder') + :param filesize: The size of the asset in bytes + :param filetype: The MIME-type of the asset + + Example:: + + client.assets.create( + parent_asset_id="123abc", + name="ExampleFile.mp4", + type="file", + filetype="video/mp4", + filesize=123456 + ) + """ + kwargs = { + "name": name, + "type": type, + "filesize": filesize, + "filetype": filetype, + } + + endpoint = "/assets/{}/children".format(parent_asset_id) + return self.client._api_call("post", endpoint, payload=kwargs) + + @ApiReference(operation="#createAsset") + def create_folder(self, parent_asset_id: str, name: str = "New Folder"): + """ + Create a new folder. + + :param parent_asset_id: The parent asset id. + :param name: The name of the new folder. + + Example:: + + client.assets.create_folder( + parent_asset_id="123abc", + name="ExampleFile.mp4", + ) + """ + endpoint = "/assets/{}/children".format(parent_asset_id) + return self.client._api_call( + "post", endpoint, payload={"name": name, "type": "folder"} + ) - @ApiReference(operation="#createAsset") - def create(self, parent_asset_id, **kwargs): - """ - Create an asset. + @ApiReference(operation="#createAsset") + def from_url(self, parent_asset_id: Union[str, UUID], name: str, url: str): + """ + Create an asset from a URL. - Args: - parent_asset_id (string): The parent asset id. - :Keyword Arguments: - (optional) kwargs: additional request parameters. + :param parent_asset_id: The parent asset id. + :param name: The filename. + :param url: The remote URL. - Example:: + Example:: - client.assets.create( - parent_asset_id="123abc", - name="ExampleFile.mp4", - type="file", - filetype="video/mp4", - filesize=123456 - ) - """ - endpoint = '/assets/{}/children'.format(parent_asset_id) - return self.client._api_call('post', endpoint, payload=kwargs) + client.assets.from_url( + parent_asset_id="123abc", + name="ExampleFile.mp4", + type="file", + url="https://" + ) + """ + payload = {"name": name, "type": "file", "source": {"url": url}} + + endpoint = "/assets/{}/children".format(parent_asset_id) + return self.client._api_call("post", endpoint, payload=payload) + + @ApiReference(operation="#updateAsset") + def update(self, asset_id: Union[str, UUID], **kwargs): + """ + Updates an asset + + :param asset_id: The asset's id - @ApiReference(operation="#createAsset") - def create_folder(self, parent_asset_id: str, name: str ="New Folder"): - """ - Create a new folder. + :Keyword Arguments: + the fields to update - Args: - parent_asset_id: The parent asset id. - name: The name of the new folder. + Example:: - Example:: + client.assets.update("adeffee123342", name="updated_filename.mp4") + """ + endpoint = "/assets/{}".format(asset_id) + return self.client._api_call("put", endpoint, kwargs) - client.assets.create_folder( - parent_asset_id="123abc", - name="ExampleFile.mp4", - ) - """ - endpoint = '/assets/{}/children'.format(parent_asset_id) - return self.client._api_call('post', endpoint, payload={"name": name, "type":"folder"}) - - @ApiReference(operation="#createAsset") - def from_url(self, parent_asset_id: str, name: str, url: str): - """ - Create an asset from a URL. - - Args: - parent_asset_id (string): The parent asset id. - name (string): The filename. - url (string): The remote URL. - - Example:: - - client.assets.from_url( - parent_asset_id="123abc", - name="ExampleFile.mp4", - type="file", - url="https://" - ) - """ - payload = { - "name": name, - "type": "file", - "source": { - "url": url - } - } - - endpoint = '/assets/{}/children'.format(parent_asset_id) - return self.client._api_call('post', endpoint, payload=payload) - - @ApiReference(operation="#updateAsset") - def update(self, asset_id, **kwargs): - """ - Updates an asset - - Args: - asset_id (string): the asset's id - :Keyword Arguments: - the fields to update - - Example:: - - client.assets.update("adeffee123342", name="updated_filename.mp4") - """ - endpoint = '/assets/{}'.format(asset_id) - return self.client._api_call('put', endpoint, kwargs) - - @ApiReference(operation="#copyAsset") - def copy(self, destination_folder_id, **kwargs): - """ - Copy an asset - - Args: - destination_folder_id (string): The id of the folder you want to copy into. - :Keyword Arguments: - id (string): The id of the asset you want to copy. - - Example:: - - client.assets.copy("adeffee123342", id="7ee008c5-49a2-f8b5-997d-8b64de153c30") - """ - endpoint = '/assets/{}/copy'.format(destination_folder_id) - return self.client._api_call('post', endpoint, kwargs) - - @ApiReference(operation="#batchCopyAsset") - def bulk_copy(self, destination_folder_id, asset_list=[], copy_comments=False): - """Bulk copy assets - - Args: - destination_folder_id (string): The id of the folder you want to copy into. - - :Keyword Arguments: - asset_list (list): A list of the asset IDs you want to copy. - copy_comments (boolean): Whether or not to copy comments: True or False. - - Example:: - - client.assets.bulk_copy( - "adeffee123342", - asset_list=[ - "7ee008c5-49a2-f8b5-997d-8b64de153c30", - "7ee008c5-49a2-f8b5-997d-8b64de153c30" - ], - copy_comments=True - ) - """ - payload = {"batch": []} + @ApiReference(operation="#copyAsset") + def copy( + self, destination_folder_id: Union[str, UUID], target_asset_id: Union[str, UUID] + ): + """ + Copy an asset + + :param destination_folder_id: The id of the folder you want to copy into. + :param target_asset_id: The id of the asset you want to copy. + + Example:: + + client.assets.copy("adeffee123342", id="7ee008c5-49a2-f8b5-997d-8b64de153c30") + """ + endpoint = "/assets/{}/copy".format(destination_folder_id) + return self.client._api_call("post", endpoint, kwargs) - if copy_comments: - payload['copy_comments'] = "all" + @ApiReference(operation="#batchCopyAsset") + def bulk_copy( + self, + destination_folder_id: Union[str, UUID], + asset_list: Optional[List] = [], + copy_comments: Optional[bool] = False, + ): + """ + Bulk copy assets - for asset in asset_list: - payload['batch'].append({"id": asset}) + :param destination_folder_id: The id of the folder you want to copy into. + :param asset_list: A list of the asset IDs you want to copy. + :param copy_comments: Whether or not to copy comments: True or False. - endpoint = '/batch/assets/{}/copy'.format(destination_folder_id) - return self.client._api_call('post', endpoint, payload) + Example:: - @ApiReference(operation="#deleteAsset") - def delete(self, asset_id): - """ - Delete an asset + client.assets.bulk_copy( + "adeffee123342", + asset_list=[ + "7ee008c5-49a2-f8b5-997d-8b64de153c30", + "7ee008c5-49a2-f8b5-997d-8b64de153c30" + ], + copy_comments=True + ) + """ + payload = {"batch": []} - Args: - asset_id (string): the asset's id - """ - endpoint = '/assets/{}'.format(asset_id) - return self.client._api_call('delete', endpoint) + if copy_comments: + payload["copy_comments"] = "all" - def _upload(self, asset, file): - """ - Upload an asset. The method will exit once the file is uploaded. + for asset in asset_list: + payload["batch"].append({"id": asset}) - Args: - asset: The asset object. - file: The file to upload. + endpoint = "/batch/assets/{}/copy".format(destination_folder_id) + return self.client._api_call("post", endpoint, payload) - Example:: + def add_version( + self, target_asset_id: Union[str, UUID], new_version_id: Union[str, UUID] + ): + """ + Add a new version to a version stack, or create a new one! - client.upload(asset, open('example.mp4')) - """ - uploader = FrameioUploader(asset, file) - uploader.upload() + :param target_asset_id: The main/destination Asset or Version Stack. + :param new_version_id: The id for the asset you want to add to the Version Stack or create a new one with. - def upload(self, destination_id, filepath, asset=None): - """ - Upload a file. The method will exit once the file is uploaded. + Example:: - Args: + client.add_version_to_asset( + destination_id="123", + next_asset_id="234" + ) + """ - destination_id (uuid): The destination Project or Folder ID. - filepath (string): The location of the file on your local filesystem \ - that you want to upload. + payload = {"next_asset_id": new_version_id} - Example:: + endpoint = f"/assets/{target_asset_id}/version" - client.assets.upload('1231-12414-afasfaf-aklsajflaksjfla', "./file.mov") - """ + return self.client._api_call("post", endpoint, payload=payload) - # Check if destination is a project or folder - # If it's a project, well then we look up its root asset ID, otherwise we use the folder id provided - # Then we start our upload + @ApiReference(operation="#deleteAsset") + def delete(self, asset_id: Union[str, UUID]): + """ + Delete an asset - try: - # First try to grab it as a folder - folder_id = self.get(destination_id)['id'] - except Exception as e: - # Then try to grab it as a project - folder_id = Project(self.client).get(destination_id)['root_asset_id'] - finally: - file_info = self._build_asset_info(filepath) + :param asset_id: the asset's id + """ + endpoint = "/assets/{}".format(asset_id) + return self.client._api_call("delete", endpoint) - if not asset: - try: - asset = self.create(folder_id, - type="file", - name=file_info['filename'], - filetype=file_info['mimetype'], - filesize=file_info['filesize'] - ) + def _upload(self, asset: Dict, file: object): + """ + Upload an asset. The method will exit once the file is uploaded. - except Exception as e: - print(e) + :param asset: The asset object as returned via the frame.io API. + :param file: The file to upload. - try: - with open(file_info['filepath'], "rb") as fp: - self._upload(asset, fp) + Example:: - except Exception as e: - print(e) + client.upload(asset, open('example.mp4')) + """ + uploader = FrameioUploader(asset, file) + uploader.upload() - return asset + def upload( + self, + destination_id: Union[str, UUID], + filepath: str, + asset: Optional[Dict] = None, + ): + """ + Upload a file. The method will exit once the file is uploaded. - def download(self, asset, download_folder, prefix=None, multi_part=False, replace=False): - """ - Download an asset. The method will exit once the file is downloaded. + :param destination_id: The destination Project or Folder ID. + :param filepath: The location of the file on your local filesystem that you want to upload. - Args: - asset (object): The asset object. - download_folder (path): The location to download the file to. - multi_part (bool): Attempt to do a multi-part download (non-WMID assets). - replace (bool): Whether or not you want to replace a file if one is found at the destination path. + Example:: + + client.assets.upload('1231-12414-afasfaf-aklsajflaksjfla', "./file.mov") + """ - Example:: + # Check if destination is a project or folder + # If it's a project, well then we look up its root asset ID, otherwise we use the folder id provided + # Then we start our upload - client.assets.download(asset, "~./Downloads") - """ - downloader = FrameioDownloader(asset, download_folder, prefix, multi_part, replace) - return AWSClient(downloader, concurrency=5).multi_thread_download() + try: + # First try to grab it as a folder + folder_id = self.get(destination_id)["id"] + except Exception as e: + # Then try to grab it as a project + folder_id = Project(self.client).get(destination_id)["root_asset_id"] + finally: + file_info = self._build_asset_info(filepath) + + if not asset: + try: + asset = self.create( + folder_id, + type="file", + name=file_info["filename"], + filetype=file_info["mimetype"], + filesize=file_info["filesize"], + ) + + except Exception as e: + print(e) + + try: + with open(file_info["filepath"], "rb") as fp: + self._upload(asset, fp) + + except Exception as e: + print(e) + + return asset + + def download( + self, + asset: Dict, + download_folder: str, + prefix: Optional[str] = None, + multi_part: Optional[bool] = None, + replace: Optional[bool] = False, + ): + """ + Download an asset. The method will exit once the file is downloaded. + + :param asset: The asset object. + :param download_folder: The location to download the file to. + :param multi_part: Attempt to do a multi-part download (non-WMID assets). + :param replace: Whether or not you want to replace a file if one is found at the destination path. + + Example:: + + client.assets.download(asset, "~./Downloads") + """ + downloader = FrameioDownloader( + asset, download_folder, prefix, multi_part, replace + ) + return AWSClient(downloader, concurrency=5).multi_thread_download() - def upload_folder(self, source_path, destination_id): - """ - Upload a folder full of assets, maintaining hierarchy. \ - The method will exit once the file is uploaded. + def upload_folder(self, source_path: str, destination_id: Union[str, UUID]): + """ + Upload a folder full of assets, maintaining hierarchy. \ + The method will exit once the file is uploaded. - Args: - filepath (path): The location of the folder on your disk. - destination_id (uuid): The destination Project or Folder ID. + :param filepath: The location of the folder on your disk. + :param destination_id: The destination Project or Folder ID. - Example:: - client.assets.upload("./file.mov", "1231-12414-afasfaf-aklsajflaksjfla") - """ + Example:: - # Check if destination is a project or folder - # If it's a project, well then we look up its root asset ID, otherwise we use the folder id provided - # Then we start our upload + client.assets.upload("./file.mov", "1231-12414-afasfaf-aklsajflaksjfla") + """ - try: - # First try to grab it as a folder - folder_id = self.get(destination_id)['id'] - except Exception as e: - # Then try to grab it as a project - folder_id = Project(self.client).get(destination_id)['root_asset_id'] - finally: - return FrameioUploader().recursive_upload(self.client, source_path, folder_id) + # Check if destination is a project or folder + # If it's a project, well then we look up its root asset ID, otherwise we use the folder id provided + # Then we start our upload + try: + # First try to grab it as a folder + folder_id = self.get(destination_id)["id"] + except Exception as e: + # Then try to grab it as a project + folder_id = Project(self.client).get(destination_id)["root_asset_id"] + finally: + return FrameioUploader().recursive_upload( + self.client, source_path, folder_id + ) diff --git a/frameioclient/services/comments.py b/frameioclient/services/comments.py index f4cc1503..81132353 100644 --- a/frameioclient/services/comments.py +++ b/frameioclient/services/comments.py @@ -1,16 +1,27 @@ -from ..lib.utils import ApiReference -from ..lib.service import Service -from typing import Union +from typing import Optional, Union from uuid import UUID +from ..lib.service import Service +from ..lib.utils import ApiReference + class Comment(Service): @ApiReference(operation="#createComment") - def create(self, asset_id: Union[str, UUID], **kwargs): + def create( + self, + asset_id: Union[str, UUID], + text: Optional[str] = None, + timestamp: Optional[int] = None, + annotation: Optional[str] = None, + **kwargs + ): """ Create a comment. :param asset_id: The asset id. + :param text: The comment text. + :param timestamp: The timestamp of the comment. + :param annotation: The serialized contents of the annotation. :Keyword Arguments: (optional) kwargs: additional request parameters. @@ -19,9 +30,12 @@ def create(self, asset_id: Union[str, UUID], **kwargs): client.comments.create( asset_id="123abc", - text="Hello world" + text="Hello world", + timestamp=10 ) """ + kwargs = {"text": text, "annotation": annotation, "timestamp": timestamp} + endpoint = "/assets/{}/comments".format(asset_id) return self.client._api_call("post", endpoint, payload=kwargs) @@ -31,7 +45,7 @@ def get(self, comment_id: Union[str, UUID], **kwargs): Get a comment. :param comment_id: The comment id. - """ + """ endpoint = "/comments/{}".format(comment_id) return self.client._api_call("get", endpoint, **kwargs) @@ -41,16 +55,26 @@ def list(self, asset_id: Union[str, UUID], **kwargs): Get an asset's comments. :param asset_id: The asset id. - """ + """ endpoint = "/assets/{}/comments".format(asset_id) return self.client._api_call("get", endpoint, **kwargs) @ApiReference(operation="#updateComment") - def update(self, comment_id: Union[str, UUID], **kwargs): + def update( + self, + comment_id: Union[str, UUID], + text: Optional[str] = None, + timestamp: Optional[int] = None, + annotation: Optional[str] = None, + **kwargs + ): """ Update a comment. :param comment_id: The comment id. + :param text: The comment text. + :param timestamp: The timestamp of the comment. + :param annotation: The serialized contents of the annotation. :Keyword Arguments: (optional) kwargs: additional request parameters. @@ -59,9 +83,13 @@ def update(self, comment_id: Union[str, UUID], **kwargs): client.comments.update( comment_id="123abc", - text="Hello world" + text="Hello world", + timestamp=10 ) - """ + """ + + kwargs = {"text": text, "annotation": annotation, "timestamp": timestamp} + endpoint = "/comments/{}".format(comment_id) return self.client._api_call("post", endpoint, payload=kwargs) @@ -71,7 +99,7 @@ def delete(self, comment_id: Union[str, UUID]): Delete a comment. :param comment_id: The comment id. - """ + """ endpoint = "/comments/{}".format(comment_id) return self.client._api_call("delete", endpoint) @@ -92,6 +120,6 @@ def reply(self, comment_id, **kwargs): comment_id="123abc", text="Hello world" ) - """ + """ endpoint = "/comments/{}/replies".format(comment_id) return self.client._api_call("post", endpoint, payload=kwargs) From 85df47a30c213819fc9702c90d395a1dd42b3c21 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 12:42:20 -0800 Subject: [PATCH 79/99] Fix assets.copy --- frameioclient/services/assets.py | 33 ++++++++++++++++++-------------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/frameioclient/services/assets.py b/frameioclient/services/assets.py index db5f522c..b60d4dbb 100644 --- a/frameioclient/services/assets.py +++ b/frameioclient/services/assets.py @@ -36,7 +36,7 @@ def get(self, asset_id: Union[str, UUID]): asset_id='1231-12414-afasfaf-aklsajflaksjfla', ) - """ + """ endpoint = "/assets/{}".format(asset_id) return self.client._api_call("get", endpoint) @@ -62,7 +62,7 @@ def get_children( asset_id='1231-12414-afasfaf-aklsajflaksjfla', include=['review_links','cover_asset','creator','presentation'] ) - """ + """ endpoint = "/assets/{}/children".format(asset_id) if slim == True: @@ -132,7 +132,7 @@ def create( filetype="video/mp4", filesize=123456 ) - """ + """ kwargs = { "name": name, "type": type, @@ -157,7 +157,7 @@ def create_folder(self, parent_asset_id: str, name: str = "New Folder"): parent_asset_id="123abc", name="ExampleFile.mp4", ) - """ + """ endpoint = "/assets/{}/children".format(parent_asset_id) return self.client._api_call( "post", endpoint, payload={"name": name, "type": "folder"} @@ -180,7 +180,7 @@ def from_url(self, parent_asset_id: Union[str, UUID], name: str, url: str): type="file", url="https://" ) - """ + """ payload = {"name": name, "type": "file", "source": {"url": url}} endpoint = "/assets/{}/children".format(parent_asset_id) @@ -199,13 +199,15 @@ def update(self, asset_id: Union[str, UUID], **kwargs): Example:: client.assets.update("adeffee123342", name="updated_filename.mp4") - """ + """ endpoint = "/assets/{}".format(asset_id) return self.client._api_call("put", endpoint, kwargs) @ApiReference(operation="#copyAsset") def copy( - self, destination_folder_id: Union[str, UUID], target_asset_id: Union[str, UUID] + self, + destination_folder_id: Union[str, UUID], + target_asset_id: Union[str, UUID], ): """ Copy an asset @@ -216,7 +218,10 @@ def copy( Example:: client.assets.copy("adeffee123342", id="7ee008c5-49a2-f8b5-997d-8b64de153c30") - """ + """ + kwargs = { + "id": target_asset_id + } endpoint = "/assets/{}/copy".format(destination_folder_id) return self.client._api_call("post", endpoint, kwargs) @@ -244,7 +249,7 @@ def bulk_copy( ], copy_comments=True ) - """ + """ payload = {"batch": []} if copy_comments: @@ -271,7 +276,7 @@ def add_version( destination_id="123", next_asset_id="234" ) - """ + """ payload = {"next_asset_id": new_version_id} @@ -285,7 +290,7 @@ def delete(self, asset_id: Union[str, UUID]): Delete an asset :param asset_id: the asset's id - """ + """ endpoint = "/assets/{}".format(asset_id) return self.client._api_call("delete", endpoint) @@ -299,7 +304,7 @@ def _upload(self, asset: Dict, file: object): Example:: client.upload(asset, open('example.mp4')) - """ + """ uploader = FrameioUploader(asset, file) uploader.upload() @@ -318,7 +323,7 @@ def upload( Example:: client.assets.upload('1231-12414-afasfaf-aklsajflaksjfla', "./file.mov") - """ + """ # Check if destination is a project or folder # If it's a project, well then we look up its root asset ID, otherwise we use the folder id provided @@ -374,7 +379,7 @@ def download( Example:: client.assets.download(asset, "~./Downloads") - """ + """ downloader = FrameioDownloader( asset, download_folder, prefix, multi_part, replace ) From 8780d7ed3bb7780f00db523415b240d73253fdaa Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 12:57:35 -0800 Subject: [PATCH 80/99] Fix formatting of MB/s --- frameioclient/lib/transfer.py | 4 ++-- tests/py2_integration.py | 3 ++- tests/py3_integration.py | 5 +++-- 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/frameioclient/lib/transfer.py b/frameioclient/lib/transfer.py index da291fec..f6f0fdd4 100644 --- a/frameioclient/lib/transfer.py +++ b/frameioclient/lib/transfer.py @@ -305,7 +305,7 @@ def _download_whole(self, url: str): math.ceil(self.downloader.filesize / (download_time)) ) print( - f"Downloaded {Utils.format_value(self.downloader.filesize, type=FormatTypes.SIZE)} at {download_speed}" + f"Downloaded {Utils.format_value(self.downloader.filesize, type=FormatTypes.SIZE)} at {Utils.format_value(download_speed, type=FormatTypes.SPEED)}" ) return self.destination, download_speed @@ -431,7 +431,7 @@ def multi_thread_download(self): # Log completion event SDKLogger("downloads").info( - f"Downloaded {Utils.format_value(self.downloader.filesize, type=FormatTypes.SIZE)} at {download_speed}" + f"Downloaded {Utils.format_value(self.downloader.filesize, type=FormatTypes.SIZE)} at {Utils.format_value(download_speed, type=FormatTypes.SPEED)}" ) # Submit telemetry diff --git a/tests/py2_integration.py b/tests/py2_integration.py index 338f6351..4c2c2bba 100644 --- a/tests/py2_integration.py +++ b/tests/py2_integration.py @@ -11,6 +11,7 @@ from pprint import pprint, pformat from datetime import datetime from frameioclient import FrameioClient, Utils, KB, MB +from frameioclient.lib.utils import FormatTypes token = os.getenv("FRAMEIO_TOKEN") # Your Frame.io token project_id = os.getenv("PROJECT_ID") # Project you want to upload files back into @@ -109,7 +110,7 @@ def test_download(client, override=False): client.assets.download(asset, download_dir, multi_part=True) download_time = time.time() - start_time - download_speed = Utils.format_value(ceil(asset['filesize']/(download_time))) + download_speed = Utils.format_value(ceil(asset['filesize']/(download_time)), type=FormatTypes.SPEED) print("{}/{} Download completed in {:.2f}s @ {}".format((count), len(asset_list), download_time, download_speed)) diff --git a/tests/py3_integration.py b/tests/py3_integration.py index 1f89776e..c4564f11 100644 --- a/tests/py3_integration.py +++ b/tests/py3_integration.py @@ -11,6 +11,7 @@ from pprint import pprint, pformat from datetime import datetime from frameioclient import FrameioClient, Utils, KB, MB +from frameioclient.lib.utils import FormatTypes token = os.getenv("FRAMEIO_TOKEN") # Your Frame.io token project_id = os.getenv("PROJECT_ID") # Project you want to upload files back into @@ -109,7 +110,7 @@ def test_download(client: FrameioClient, override=False): client.assets.download(asset, download_dir, multi_part=True) download_time = time.time() - start_time - download_speed = Utils.format_value(ceil(asset['filesize']/(download_time))) + download_speed = Utils.format_value(ceil(asset['filesize']/(download_time)), type=FormatTypes.SPEED) print("{}/{} Download completed in {:.2f}s @ {}".format((count), len(asset_list), download_time, download_speed)) @@ -153,7 +154,7 @@ def test_upload(client: FrameioClient): client.assets.upload(new_parent_id, ul_abs_path) upload_time = time.time() - start_time - upload_speed = Utils.format_value(ceil(filesize/(upload_time))) + upload_speed = Utils.format_value(ceil(filesize/(upload_time)), type=FormatTypes.SPEED) print("{}/{} Upload completed in {:.2f}s @ {}".format((count), len(dled_files), upload_time, upload_speed)) From a87b5a4ad407db835d4aeb35763f4dc1ffba5c6d Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 13:01:03 -0800 Subject: [PATCH 81/99] Add some more types --- frameioclient/lib/transfer.py | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/frameioclient/lib/transfer.py b/frameioclient/lib/transfer.py index f6f0fdd4..808f74a8 100644 --- a/frameioclient/lib/transfer.py +++ b/frameioclient/lib/transfer.py @@ -4,26 +4,20 @@ import time from pprint import pprint from random import randint -from typing import Dict, List +from typing import Dict, List, Optional import requests -from .exceptions import ( - AssetChecksumMismatch, - AssetChecksumNotPresent, - DownloadException, -) +from .exceptions import (AssetChecksumMismatch, AssetChecksumNotPresent, + DownloadException) from .logger import SDKLogger from .utils import FormatTypes, Utils logger = SDKLogger("downloads") from .bandwidth import DiskBandwidth, NetworkBandwidth -from .exceptions import ( - AssetNotFullyUploaded, - DownloadException, - WatermarkIDDownloadException, -) +from .exceptions import (AssetNotFullyUploaded, DownloadException, + WatermarkIDDownloadException) from .transport import HTTPClient @@ -256,7 +250,7 @@ def _optimize_concurrency(self): return 5 - def _get_byte_range(self, url, start_byte=0, end_byte=2048): + def _get_byte_range(self, url: str, start_byte: Optional[int] = 0, end_byte: Optional[int] = 2048): """ Get a specific byte range from a given URL. This is **not** optimized \ for heavily-threaded operations currently. From 0ecb812e1b76bb496bb4275d72b4e2b52be6fc40 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 13:04:17 -0800 Subject: [PATCH 82/99] Add .nojekyll file to fix github pages problem --- .nojekyll | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 .nojekyll diff --git a/.nojekyll b/.nojekyll new file mode 100644 index 00000000..e69de29b From e32c869d12402fd388e80eaef3dd2186205a7e50 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 13:27:52 -0800 Subject: [PATCH 83/99] Move .nojekyll --- .nojekyll => docs/.nojekyll | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename .nojekyll => docs/.nojekyll (100%) diff --git a/.nojekyll b/docs/.nojekyll similarity index 100% rename from .nojekyll rename to docs/.nojekyll From d6dbaa45872d84aef1073ce506393abb9843f174 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 13:32:21 -0800 Subject: [PATCH 84/99] Try to fix the github pages issue --- docs/.nojekyll | 0 docs/conf.py | 1 + frameioclient/services/projects.py | 2 +- 3 files changed, 2 insertions(+), 1 deletion(-) delete mode 100644 docs/.nojekyll diff --git a/docs/.nojekyll b/docs/.nojekyll deleted file mode 100644 index e69de29b..00000000 diff --git a/docs/conf.py b/docs/conf.py index 4dee1d87..73cc4d39 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -34,6 +34,7 @@ extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.napoleon', + 'sphinx.ext.githubpages', 'sphinxcontrib.restbuilder', 'sphinx_jekyll_builder', 'sphinx_autodoc_typehints' diff --git a/frameioclient/services/projects.py b/frameioclient/services/projects.py index fa81e174..a5829c27 100644 --- a/frameioclient/services/projects.py +++ b/frameioclient/services/projects.py @@ -53,7 +53,7 @@ def tree(self, project_id: Union[str, UUID], slim: Optional[bool] = False): thumbnail, \ creator_id, \ inserted_at (date created), \ - path (represented like a filesystem) \ + path (represented like a filesystem) Example:: From 556b835503fca776fdb2dceda3ee6d76f2f1121f Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 13:39:45 -0800 Subject: [PATCH 85/99] Fix library search example --- frameioclient/services/search.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frameioclient/services/search.py b/frameioclient/services/search.py index b6b54cf3..31067f09 100644 --- a/frameioclient/services/search.py +++ b/frameioclient/services/search.py @@ -37,7 +37,7 @@ def library( Example:: - client.assets.search( + client.search.library( query="Final", type="file", sort="name" From f712baf0b28eaa47767a34dbb129a527ba4bf69c Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Fri, 4 Feb 2022 13:53:27 -0800 Subject: [PATCH 86/99] Run make format --- frameioclient/client.py | 1 + frameioclient/lib/__init__.py | 1 + frameioclient/lib/telemetry.py | 4 +--- frameioclient/lib/transfer.py | 18 +++++++++++++----- frameioclient/lib/upload.py | 9 ++++++++- frameioclient/services/assets.py | 4 +--- frameioclient/services/comments.py | 14 +++++++------- 7 files changed, 32 insertions(+), 19 deletions(-) diff --git a/frameioclient/client.py b/frameioclient/client.py index 15406fac..fefbbe40 100644 --- a/frameioclient/client.py +++ b/frameioclient/client.py @@ -6,6 +6,7 @@ from .config import Config from .lib import APIClient, ClientVersion, FrameioDownloader + # from .lib import Telemetry from .services import * diff --git a/frameioclient/lib/__init__.py b/frameioclient/lib/__init__.py index 330f5995..0ace68ac 100644 --- a/frameioclient/lib/__init__.py +++ b/frameioclient/lib/__init__.py @@ -1,6 +1,7 @@ from .constants import * from .exceptions import * from .logger import SDKLogger + # from .telemetry import Telemetry from .version import ClientVersion from .upload import FrameioUploader diff --git a/frameioclient/lib/telemetry.py b/frameioclient/lib/telemetry.py index ab57be5c..51248bd7 100644 --- a/frameioclient/lib/telemetry.py +++ b/frameioclient/lib/telemetry.py @@ -30,9 +30,7 @@ def build_context(self): } def push(self, event_name, properties): - self.logger.info( - (f"Pushing '{event_name}' event to segment", properties) - ) + self.logger.info((f"Pushing '{event_name}' event to segment", properties)) try: status = analytics.track( diff --git a/frameioclient/lib/transfer.py b/frameioclient/lib/transfer.py index 808f74a8..f698e519 100644 --- a/frameioclient/lib/transfer.py +++ b/frameioclient/lib/transfer.py @@ -8,16 +8,22 @@ import requests -from .exceptions import (AssetChecksumMismatch, AssetChecksumNotPresent, - DownloadException) +from .exceptions import ( + AssetChecksumMismatch, + AssetChecksumNotPresent, + DownloadException, +) from .logger import SDKLogger from .utils import FormatTypes, Utils logger = SDKLogger("downloads") from .bandwidth import DiskBandwidth, NetworkBandwidth -from .exceptions import (AssetNotFullyUploaded, DownloadException, - WatermarkIDDownloadException) +from .exceptions import ( + AssetNotFullyUploaded, + DownloadException, + WatermarkIDDownloadException, +) from .transport import HTTPClient @@ -250,7 +256,9 @@ def _optimize_concurrency(self): return 5 - def _get_byte_range(self, url: str, start_byte: Optional[int] = 0, end_byte: Optional[int] = 2048): + def _get_byte_range( + self, url: str, start_byte: Optional[int] = 0, end_byte: Optional[int] = 2048 + ): """ Get a specific byte range from a given URL. This is **not** optimized \ for heavily-threaded operations currently. diff --git a/frameioclient/lib/upload.py b/frameioclient/lib/upload.py index 8819ab7d..50128245 100644 --- a/frameioclient/lib/upload.py +++ b/frameioclient/lib/upload.py @@ -142,7 +142,14 @@ def recursive_upload(self, client, folder, parent_asset_id): complete_dir_obj = os.path.join(folder, file_p) print( - "Starting {:02d}/{}, Size: {}, Name: {}".format(self.file_num, self.file_count, Utils.format_value(os.path.getsize(complete_dir_obj), type=FormatTypes.SIZE), file_p) + "Starting {:02d}/{}, Size: {}, Name: {}".format( + self.file_num, + self.file_count, + Utils.format_value( + os.path.getsize(complete_dir_obj), type=FormatTypes.SIZE + ), + file_p, + ) ) client.assets.upload(parent_asset_id, complete_dir_obj) diff --git a/frameioclient/services/assets.py b/frameioclient/services/assets.py index b60d4dbb..08a2ac08 100644 --- a/frameioclient/services/assets.py +++ b/frameioclient/services/assets.py @@ -219,9 +219,7 @@ def copy( client.assets.copy("adeffee123342", id="7ee008c5-49a2-f8b5-997d-8b64de153c30") """ - kwargs = { - "id": target_asset_id - } + kwargs = {"id": target_asset_id} endpoint = "/assets/{}/copy".format(destination_folder_id) return self.client._api_call("post", endpoint, kwargs) diff --git a/frameioclient/services/comments.py b/frameioclient/services/comments.py index 81132353..5d797183 100644 --- a/frameioclient/services/comments.py +++ b/frameioclient/services/comments.py @@ -8,12 +8,12 @@ class Comment(Service): @ApiReference(operation="#createComment") def create( - self, - asset_id: Union[str, UUID], - text: Optional[str] = None, - timestamp: Optional[int] = None, - annotation: Optional[str] = None, - **kwargs + self, + asset_id: Union[str, UUID], + text: Optional[str] = None, + timestamp: Optional[int] = None, + annotation: Optional[str] = None, + **kwargs ): """ Create a comment. @@ -33,7 +33,7 @@ def create( text="Hello world", timestamp=10 ) - """ + """ kwargs = {"text": text, "annotation": annotation, "timestamp": timestamp} endpoint = "/assets/{}/comments".format(asset_id) From 6eba4ba1d8fd8c700db91eb242ff777cd57a5519 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 7 Feb 2022 15:02:41 -0800 Subject: [PATCH 87/99] Don't run CI on gh-pages --- .circleci/config.yml | 27 ++++++++++----------------- 1 file changed, 10 insertions(+), 17 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 1245fe52..1d805407 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -8,11 +8,19 @@ workflows: version: 2 build_test_deploy: jobs: - - build + - build: + filters: + branches: + ignore: + - gh-pages - test_integration: requires: - build + filters: + branches: + ignore: + - gh-pages matrix: parameters: python-version: ["3.6.5", "3.7.7", "3.8.6", "3.9.3", "latest"] @@ -21,7 +29,6 @@ workflows: type: approval requires: - test_integration - filters: branches: only: @@ -59,54 +66,44 @@ jobs: steps: - checkout: name: Checkout Git - - run: name: Build Package command: | echo -e "Running sdist" python setup.py sdist - - persist_to_workspace: root: /home/circleci/project/ paths: - . - test_integration: description: Python << parameters.python-version >> parameters: python-version: type: string - docker: - image: circleci/python:<< parameters.python-version >> - steps: - attach_workspace: at: /tmp/artifact name: Attach build artifact - - run: name: Install package command: | pip install --user '/tmp/artifact' - - run: name: Run integration test command: | python /tmp/artifact/tests/integration.py - upload_test_job: description: Upload test docker: - image: circleci/python:latest - steps: - attach_workspace: at: /tmp/artifact name: Attach build artifact - - run: name: Upload to pypi command: | @@ -127,7 +124,7 @@ jobs: # command: | # cd /tmp/artifact/docs # pip install -r requirements.txt - + # - run: # name: Build autodocs # command: | @@ -143,17 +140,14 @@ jobs: deploy: docker: - image: circleci/python:latest - steps: - attach_workspace: at: /tmp/artifact name: Attach build artifact - - run: name: Install dependencies command: | pip install setuptools wheel twine - - run: name: init .pypirc command: | @@ -161,7 +155,6 @@ jobs: echo -e "[pypi]" >> ~/.pypirc echo -e "username = $TWINE_USERNAME" >> ~/.pypirc echo -e "password = $TWINE_PASSWORD" >> ~/.pypirc - - run: name: Upload to pypi command: | From 2598820e865777fdbec37efc0441cb07c9028801 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 7 Feb 2022 15:09:14 -0800 Subject: [PATCH 88/99] Tweak python version matrix for testing --- .circleci/config.yml | 62 +------------------------------------------- 1 file changed, 1 insertion(+), 61 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 1d805407..a93f49c2 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -23,7 +23,7 @@ workflows: - gh-pages matrix: parameters: - python-version: ["3.6.5", "3.7.7", "3.8.6", "3.9.3", "latest"] + python-version: ["3.6.5", "3.7.7", "3.8.6", "3.9.3", "3.9.9", "3.9.10", "latest"] - hold: type: approval @@ -40,25 +40,6 @@ workflows: requires: - hold - # - docs: - # requires: - # - deploy - # - build - - # upload_test: - # triggers: - # - schedule: - # cron: "0,30 * * * *" - # filters: - # branches: - # only: - # - jh/use-xxhash-for-integration-test - # jobs: - # - build - # - upload_test_job: - # requires: - # - build - jobs: build: docker: @@ -96,47 +77,6 @@ jobs: command: | python /tmp/artifact/tests/integration.py - upload_test_job: - description: Upload test - docker: - - image: circleci/python:latest - steps: - - attach_workspace: - at: /tmp/artifact - name: Attach build artifact - - run: - name: Upload to pypi - command: | - cd /tmp/artifact - twine upload dist/* - - # docs: - # docker: - # - image: circleci/python:latest - - # steps: - # - attach_workspace: - # at: /tmp/artifact - # name: Attach build artifact - - # - run: - # name: Install dependencies - # command: | - # cd /tmp/artifact/docs - # pip install -r requirements.txt - - # - run: - # name: Build autodocs - # command: | - # cd /tmp/artifact/docs - # make jekyll - - # - run: - # name: Publish autodocs - # command: | - # cd /tmp/artifact/docs - # python publish.py - deploy: docker: - image: circleci/python:latest From ec3d25d5bff699e3a7764975412abb30589f10eb Mon Sep 17 00:00:00 2001 From: jhurtadosandoval <120607545+jhurtadosandoval@users.noreply.github.com> Date: Mon, 27 Feb 2023 16:40:01 -0800 Subject: [PATCH 89/99] Update comment_scraper.py (#94) The comment scraper example resulted in a 404 error because comments.get() needed to be replaced with comments.list() --- examples/comments/comment_scraper.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/comments/comment_scraper.py b/examples/comments/comment_scraper.py index 3cc33979..4504eef2 100644 --- a/examples/comments/comment_scraper.py +++ b/examples/comments/comment_scraper.py @@ -33,7 +33,7 @@ def build_comments_list(client, asset_id, comment_list): build_comments_list(client, asset['id'], comment_list) if asset.get('type') == 'file' and asset.get('comment_count') > 0: - comments = client.comments.get(asset['id']) + comments = client.comments.list(asset['id']) for comment in comments: # The 'get_comments" call won't return the asset name # So we'll add it to the dictionary now. @@ -44,7 +44,7 @@ def build_comments_list(client, asset_id, comment_list): # Read about version stacks: https://docs.frame.io/docs/managing-version-stacks versions = client.assets.get_children(asset['id']) for v_asset in versions: - comments = client.comments.get(v_asset['id']) + comments = client.comments.list(v_asset['id']) for comment in comments: comment['asset'] = { 'name': asset['name'] } comment_list.append(comment) From cfdda3d6a2a7130589f50f9a03c23a6d363691f9 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Wed, 22 May 2024 17:19:43 -0700 Subject: [PATCH 90/99] Cleanup unused functions --- frameioclient/lib/service.py | 5 +---- frameioclient/lib/transfer.py | 8 -------- scripts/benchmark/download.py | 2 -- 3 files changed, 1 insertion(+), 14 deletions(-) diff --git a/frameioclient/lib/service.py b/frameioclient/lib/service.py index bd5e455c..dd5c99b1 100644 --- a/frameioclient/lib/service.py +++ b/frameioclient/lib/service.py @@ -1,14 +1,11 @@ from ..client import FrameioClient -from ..lib.bandwidth import NetworkBandwidth - class Service(object): def __init__(self, client: FrameioClient): self.client = client self.concurrency = 10 - self.bandwidth = NetworkBandwidth() - # Auto-configure afterwards + # Run auto-configure afterwards self.autoconfigure() def autoconfigure(self): diff --git a/frameioclient/lib/transfer.py b/frameioclient/lib/transfer.py index f698e519..422c5614 100644 --- a/frameioclient/lib/transfer.py +++ b/frameioclient/lib/transfer.py @@ -18,7 +18,6 @@ logger = SDKLogger("downloads") -from .bandwidth import DiskBandwidth, NetworkBandwidth from .exceptions import ( AssetNotFullyUploaded, DownloadException, @@ -247,13 +246,6 @@ def _optimize_concurrency(self): AWSClient._optimize_concurrency() """ - net_stats = NetworkBandwidth - disk_stats = DiskBandwidth - - # Algorithm ensues - # - # - return 5 def _get_byte_range( diff --git a/scripts/benchmark/download.py b/scripts/benchmark/download.py index 033ce0b9..a411e94b 100644 --- a/scripts/benchmark/download.py +++ b/scripts/benchmark/download.py @@ -4,8 +4,6 @@ from utils import timefunc import frameioclient -from frameioclient.lib.bandwidth import NetworkBandwidth - def download( asset_id: str = "", From 3c0b260f5e6e9191d430844739a67677396b2042 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Wed, 22 May 2024 17:30:50 -0700 Subject: [PATCH 91/99] Bump circleci python orb --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index a93f49c2..a78a01f6 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -1,7 +1,7 @@ version: 2.1 orbs: - python: circleci/python@0.2.1 + python: circleci/python@1.0.0 win: circleci/windows@2.2.0 workflows: From e352505b9cd334de1341bb0ce190b5ae773811cf Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Wed, 22 May 2024 17:33:18 -0700 Subject: [PATCH 92/99] Tweak python version matrix for testing --- .circleci/config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index a78a01f6..03708697 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -23,7 +23,7 @@ workflows: - gh-pages matrix: parameters: - python-version: ["3.6.5", "3.7.7", "3.8.6", "3.9.3", "3.9.9", "3.9.10", "latest"] + python-version: ["3.6.5", "3.7.7", "3.8.6", "3.9.3", "3.9.6", "3.9.9", "latest"] - hold: type: approval From 01867dc70ab45e1af78a7c0ffcd425a8c54b0132 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Wed, 22 May 2024 17:48:05 -0700 Subject: [PATCH 93/99] Little fixes --- frameioclient/lib/transfer.py | 25 +++++++++++++------------ frameioclient/lib/transport.py | 4 ++++ frameioclient/services/assets.py | 7 ++++++- frameioclient/services/helpers.py | 22 ++++++++++++++-------- 4 files changed, 37 insertions(+), 21 deletions(-) diff --git a/frameioclient/lib/transfer.py b/frameioclient/lib/transfer.py index 422c5614..125e4403 100644 --- a/frameioclient/lib/transfer.py +++ b/frameioclient/lib/transfer.py @@ -58,7 +58,7 @@ def __init__( self.session = None self.filename = Utils.normalize_filename(asset["name"]) self.request_logs = list() - self.stats = True + self.stats = False self._evaluate_asset() self._get_path() @@ -411,17 +411,18 @@ def multi_thread_download(self): pprint(self.downloader) download_speed = round((self.downloader.filesize / download_time), 2) - if self.downloader.checksum_verification == True: - # Check for checksum, if not present throw error - if self.downloader._get_checksum() == None: - raise AssetChecksumNotPresent - - # Calculate the file hash - if ( - Utils.calculate_hash(self.destination) - != self.downloader.original_checksum - ): - raise AssetChecksumMismatch + # TODO: Ensure this works correctly on assets that are missing checksums/at all + # if self.downloader.checksum_verification == True: + # # Check for checksum, if not present throw error + # if self.downloader._get_checksum() == None: + # raise AssetChecksumNotPresent + + # # Calculate the file hash + # if ( + # Utils.calculate_hash(self.destination) + # != self.downloader.original_checksum + # ): + # raise AssetChecksumMismatch # Log completion event SDKLogger("downloads").info( diff --git a/frameioclient/lib/transport.py b/frameioclient/lib/transport.py index 6dda6e16..09ac0ce4 100644 --- a/frameioclient/lib/transport.py +++ b/frameioclient/lib/transport.py @@ -126,6 +126,10 @@ def _api_call( if r.status_code == 422 and "presentation" in endpoint: raise PresentationException + + if r.status_code == 500 and 'audit' in endpoint: + print(f"Hit a 500 on page: {r.headers.get('page-number')}, url: {r.url}") + return [] return r.raise_for_status() diff --git a/frameioclient/services/assets.py b/frameioclient/services/assets.py index 08a2ac08..3ce45c88 100644 --- a/frameioclient/services/assets.py +++ b/frameioclient/services/assets.py @@ -138,6 +138,7 @@ def create( "type": type, "filesize": filesize, "filetype": filetype, + "properties": {"reference_id": "7eaa2f13-1202-42b3-a360-9d21e9a9efa7"}, } endpoint = "/assets/{}/children".format(parent_asset_id) @@ -356,6 +357,10 @@ def upload( except Exception as e: print(e) + else: + with open(file_info["filepath"], "rb") as fp: + self._upload(asset, fp) + return asset def download( @@ -393,7 +398,7 @@ def upload_folder(self, source_path: str, destination_id: Union[str, UUID]): Example:: - client.assets.upload("./file.mov", "1231-12414-afasfaf-aklsajflaksjfla") + client.assets.upload("./file.mov", "1231-12414-afasfaf-aklsajflaksjfla") """ # Check if destination is a project or folder diff --git a/frameioclient/services/helpers.py b/frameioclient/services/helpers.py index 973397c3..75955403 100644 --- a/frameioclient/services/helpers.py +++ b/frameioclient/services/helpers.py @@ -40,7 +40,7 @@ def get_assets_recursively(self, asset_id, slim=True): assets = self.client.assets.get_children(asset_id, slim=slim) print("Number of assets at top level", len(assets)) - for asset in assets: + for index, asset in enumerate(assets): # try: print( f"Type: {asset['_type']}, Name: {asset['name']}, Children: {len(asset['children'])}" @@ -57,7 +57,7 @@ def get_assets_recursively(self, asset_id, slim=True): if asset["_type"] == "version_stack": print("Grabbing top item from version stack") versions = self.client.assets.get_children(asset["id"], slim=True) - asset = versions[0] # re-assign on purpose + assets[index]['children'] = versions # re-assign on purpose continue # We only get the first three items when we use "include=children" @@ -97,10 +97,8 @@ def download_project(self, project_id, destination): project = self.client.projects.get(project_id) initial_tree = self.get_assets_recursively(project["root_asset_id"]) self.recursive_downloader(destination, initial_tree) - # pprint(initial_tree) - # print(f"Downloading {Utils.format_bytes(total_bytes, type='size')}") - def recursive_downloader(self, directory, asset, count=0): + def recursive_downloader(self, directory, asset, manifest=[]): print(f"Directory {directory}") try: @@ -121,7 +119,7 @@ def recursive_downloader(self, directory, asset, count=0): try: if asset["_type"] == "folder": if len(asset["children"]) >= 0: - count += 1 + # count += 1 # Create the new folder that these items will go in before it's too late if not os.path.exists( os.path.join(target_directory, asset["name"]) @@ -139,17 +137,25 @@ def recursive_downloader(self, directory, asset, count=0): self.recursive_downloader( f"{directory}/{str(asset['name']).replace('/', '-')}", asset["children"], + manifest ) if asset["_type"] == "file": - count += 1 - return self.client.assets.download( + # count += 1 + fn = self.client.assets.download( asset, target_directory, multi_part=True ) + manifest.append({ + "asset_id": asset['id'], + "file_path": fn, + "directory": target_directory + }) + except Exception as e: print(e) + pprint(manifest) return True From 91f9030e7bf897374c6ad4558378e16d7756037a Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Thu, 6 Jun 2024 09:48:20 -0700 Subject: [PATCH 94/99] WIP SDK updates (dockerfile update and poetry, and pyproject) --- Dockerfile | 38 +- poetry.lock | 1284 ++++++++++++++++++++++++++++++++++++++++++++++++ pyproject.toml | 62 +++ 3 files changed, 1357 insertions(+), 27 deletions(-) create mode 100644 poetry.lock diff --git a/Dockerfile b/Dockerfile index a66579e1..162373c5 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,34 +1,18 @@ -FROM python:3.8.6-slim-buster as deps -# Set work directory -WORKDIR /home/speedtest +FROM python:3.11-buster as builder -# Copy files -COPY Pipfile . -COPY Pipfile.lock . +RUN pip install poetry==1.8.3 -# Install pipenv -RUN pip install pipenv +ENV POETRY_NO_INTERACTION=1 \ + POETRY_VIRTUALENVS_IN_PROJECT=1 \ + POETRY_VIRTUALENVS_CREATE=1 \ + POETRY_CACHE_DIR=/tmp/poetry_cache -FROM deps as installer -# Set work directory -WORKDIR /home/speedtest +WORKDIR /frameio -# Install deps -RUN pipenv install --system --deploy --ignore-pipfile - -# Copy over the other pieces +COPY README.md README.md +COPY pyproject.toml poetry.lock ./ COPY frameioclient frameioclient -COPY setup.py . -COPY README.md . - -# Install the local frameioclient -RUN pipenv install -e . --skip-lock - -# Copy over scripts and tests -COPY scripts scripts -COPY tests tests -ENV SEGMENT_WRITE_KEY= +RUN --mount=type=cache,target=$POETRY_CACHE_DIR poetry install --without dev -FROM installer as runtime -ENTRYPOINT [ "pipenv", "run", "python", "scripts/benchmark/download.py" ] +ENTRYPOINT [ "poetry", "run", "fiocli" ] diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 00000000..9f751d69 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1284 @@ +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. + +[[package]] +name = "alabaster" +version = "0.7.13" +description = "A configurable sidebar-enabled Sphinx theme" +optional = true +python-versions = ">=3.6" +files = [ + {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, + {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, +] + +[[package]] +name = "analytics-python" +version = "1.4.post1" +description = "The hassle-free way to integrate analytics into any python application." +optional = false +python-versions = "*" +files = [ + {file = "analytics-python-1.4.post1.tar.gz", hash = "sha256:b083e69c149c39e7ad17067f0e5c1742fbd15fdc469ade36c4d1ad5edf31ee5e"}, + {file = "analytics_python-1.4.post1-py2.py3-none-any.whl", hash = "sha256:33ab660150d0f37bb2fefc93fd19c9e7bd85e5b17db44df5e7e1139f63c14246"}, +] + +[package.dependencies] +backoff = "1.10.0" +monotonic = ">=1.5" +python-dateutil = ">2.1" +requests = ">=2.7,<3.0" +six = ">=1.5" + +[package.extras] +test = ["flake8 (==3.7.9)", "mock (==2.0.0)", "pylint (==1.9.3)"] + +[[package]] +name = "ansicon" +version = "1.89.0" +description = "Python wrapper for loading Jason Hood's ANSICON" +optional = false +python-versions = "*" +files = [ + {file = "ansicon-1.89.0-py2.py3-none-any.whl", hash = "sha256:f1def52d17f65c2c9682cf8370c03f541f410c1752d6a14029f97318e4b9dfec"}, + {file = "ansicon-1.89.0.tar.gz", hash = "sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1"}, +] + +[[package]] +name = "babel" +version = "2.14.0" +description = "Internationalization utilities" +optional = true +python-versions = ">=3.7" +files = [ + {file = "Babel-2.14.0-py3-none-any.whl", hash = "sha256:efb1a25b7118e67ce3a259bed20545c29cb68be8ad2c784c83689981b7a57287"}, + {file = "Babel-2.14.0.tar.gz", hash = "sha256:6919867db036398ba21eb5c7a0f6b28ab8cbc3ae7a73a44ebe34ae74a4e7d363"}, +] + +[package.dependencies] +pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] + +[[package]] +name = "backoff" +version = "1.10.0" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "backoff-1.10.0-py2.py3-none-any.whl", hash = "sha256:5e73e2cbe780e1915a204799dba0a01896f45f4385e636bcca7a0614d879d0cd"}, + {file = "backoff-1.10.0.tar.gz", hash = "sha256:b8fba021fac74055ac05eb7c7bfce4723aedde6cd0a504e5326bcb0bdd6d19a4"}, +] + +[[package]] +name = "beautifulsoup4" +version = "4.12.3" +description = "Screen-scraping library" +optional = true +python-versions = ">=3.6.0" +files = [ + {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, + {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, +] + +[package.dependencies] +soupsieve = ">1.2" + +[package.extras] +cchardet = ["cchardet"] +chardet = ["chardet"] +charset-normalizer = ["charset-normalizer"] +html5lib = ["html5lib"] +lxml = ["lxml"] + +[[package]] +name = "blessed" +version = "1.20.0" +description = "Easy, practical library for making terminal apps, by providing an elegant, well-documented interface to Colors, Keyboard input, and screen Positioning capabilities." +optional = false +python-versions = ">=2.7" +files = [ + {file = "blessed-1.20.0-py2.py3-none-any.whl", hash = "sha256:0c542922586a265e699188e52d5f5ac5ec0dd517e5a1041d90d2bbf23f906058"}, + {file = "blessed-1.20.0.tar.gz", hash = "sha256:2cdd67f8746e048f00df47a2880f4d6acbcdb399031b604e34ba8f71d5787680"}, +] + +[package.dependencies] +jinxed = {version = ">=1.1.0", markers = "platform_system == \"Windows\""} +six = ">=1.9.0" +wcwidth = ">=0.1.4" + +[[package]] +name = "bump2version" +version = "1.0.1" +description = "Version-bump your software with a single command!" +optional = false +python-versions = ">=3.5" +files = [ + {file = "bump2version-1.0.1-py2.py3-none-any.whl", hash = "sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410"}, + {file = "bump2version-1.0.1.tar.gz", hash = "sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6"}, +] + +[[package]] +name = "certifi" +version = "2024.2.2" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, +] + +[[package]] +name = "chardet" +version = "5.2.0" +description = "Universal encoding detector for Python 3" +optional = true +python-versions = ">=3.7" +files = [ + {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, + {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "contentful-management" +version = "2.13.1" +description = "Contentful Management API Client" +optional = true +python-versions = "*" +files = [ + {file = "contentful_management-2.13.1.tar.gz", hash = "sha256:23718aeede4e0adee928c49142ac0828604c02f15a6ab76e765719422cf84d42"}, +] + +[package.dependencies] +python-dateutil = "*" +requests = ">=2.20.0,<3.0" + +[[package]] +name = "docutils" +version = "0.17.1" +description = "Docutils -- Python Documentation Utilities" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +files = [ + {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, + {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, +] + +[[package]] +name = "enlighten" +version = "1.12.4" +description = "Enlighten Progress Bar" +optional = false +python-versions = "*" +files = [ + {file = "enlighten-1.12.4-py2.py3-none-any.whl", hash = "sha256:5c53c57441bc5986c1d02f2f539aead9d59a206783641953a49b8d995db6b584"}, + {file = "enlighten-1.12.4.tar.gz", hash = "sha256:75f3d92b49e0ef5e454fc1a0f39dc0ab8f6d9946cbe534db3ded3010217d5b5f"}, +] + +[package.dependencies] +blessed = ">=1.17.7" +prefixed = ">=0.3.2" + +[[package]] +name = "furl" +version = "2.1.3" +description = "URL manipulation made simple." +optional = false +python-versions = "*" +files = [ + {file = "furl-2.1.3-py2.py3-none-any.whl", hash = "sha256:9ab425062c4217f9802508e45feb4a83e54324273ac4b202f1850363309666c0"}, + {file = "furl-2.1.3.tar.gz", hash = "sha256:5a6188fe2666c484a12159c18be97a1977a71d632ef5bb867ef15f54af39cc4e"}, +] + +[package.dependencies] +orderedmultidict = ">=1.0.1" +six = ">=1.8.0" + +[[package]] +name = "furo" +version = "2022.9.29" +description = "A clean customisable Sphinx documentation theme." +optional = true +python-versions = ">=3.7" +files = [ + {file = "furo-2022.9.29-py3-none-any.whl", hash = "sha256:559ee17999c0f52728481dcf6b1b0cf8c9743e68c5e3a18cb45a7992747869a9"}, + {file = "furo-2022.9.29.tar.gz", hash = "sha256:d4238145629c623609c2deb5384f8d036e2a1ee2a101d64b67b4348112470dbd"}, +] + +[package.dependencies] +beautifulsoup4 = "*" +pygments = ">=2.7" +sphinx = ">=4.0,<6.0" +sphinx-basic-ng = "*" + +[[package]] +name = "html2text" +version = "2020.1.16" +description = "Turn HTML into equivalent Markdown-structured text." +optional = true +python-versions = ">=3.5" +files = [ + {file = "html2text-2020.1.16-py3-none-any.whl", hash = "sha256:c7c629882da0cf377d66f073329ccf34a12ed2adf0169b9285ae4e63ef54c82b"}, + {file = "html2text-2020.1.16.tar.gz", hash = "sha256:e296318e16b059ddb97f7a8a1d6a5c1d7af4544049a01e261731d2d5cc277bbb"}, +] + +[[package]] +name = "idna" +version = "3.7" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +description = "Getting image size from png/jpeg/jpeg2000/gif file" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] + +[[package]] +name = "importlib-metadata" +version = "4.13.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "importlib_metadata-4.13.0-py3-none-any.whl", hash = "sha256:8a8a81bcf996e74fee46f0d16bd3eaa382a7eb20fd82445c3ad11f4090334116"}, + {file = "importlib_metadata-4.13.0.tar.gz", hash = "sha256:dd0173e8f150d6815e098fd354f6414b0f079af4644ddfe90c71e2fc6174346d"}, +] + +[package.dependencies] +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +perf = ["ipython"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] + +[[package]] +name = "jinja2" +version = "3.1.3" +description = "A very fast and expressive template engine." +optional = true +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.3-py3-none-any.whl", hash = "sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa"}, + {file = "Jinja2-3.1.3.tar.gz", hash = "sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jinxed" +version = "1.2.1" +description = "Jinxed Terminal Library" +optional = false +python-versions = "*" +files = [ + {file = "jinxed-1.2.1-py2.py3-none-any.whl", hash = "sha256:37422659c4925969c66148c5e64979f553386a4226b9484d910d3094ced37d30"}, + {file = "jinxed-1.2.1.tar.gz", hash = "sha256:30c3f861b73279fea1ed928cfd4dfb1f273e16cd62c8a32acfac362da0f78f3f"}, +] + +[package.dependencies] +ansicon = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "livereload" +version = "2.6.3" +description = "Python LiveReload is an awesome tool for web developers" +optional = true +python-versions = "*" +files = [ + {file = "livereload-2.6.3-py2.py3-none-any.whl", hash = "sha256:ad4ac6f53b2d62bb6ce1a5e6e96f1f00976a32348afedcb4b6d68df2a1d346e4"}, + {file = "livereload-2.6.3.tar.gz", hash = "sha256:776f2f865e59fde56490a56bcc6773b6917366bce0c267c60ee8aaf1a0959869"}, +] + +[package.dependencies] +six = "*" +tornado = {version = "*", markers = "python_version > \"2.7\""} + +[[package]] +name = "markupsafe" +version = "2.1.5" +description = "Safely add untrusted strings to HTML/XML markup." +optional = true +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, + {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, + {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, + {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +] + +[[package]] +name = "monotonic" +version = "1.6" +description = "An implementation of time.monotonic() for Python 2 & < 3.3" +optional = false +python-versions = "*" +files = [ + {file = "monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c"}, + {file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"}, +] + +[[package]] +name = "munch" +version = "4.0.0" +description = "A dot-accessible dictionary (a la JavaScript objects)" +optional = true +python-versions = ">=3.6" +files = [ + {file = "munch-4.0.0-py2.py3-none-any.whl", hash = "sha256:71033c45db9fb677a0b7eb517a4ce70ae09258490e419b0e7f00d1e386ecb1b4"}, + {file = "munch-4.0.0.tar.gz", hash = "sha256:542cb151461263216a4e37c3fd9afc425feeaf38aaa3025cd2a981fadb422235"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=1.7.0", markers = "python_version < \"3.8\""} + +[package.extras] +testing = ["astroid (>=2.0)", "coverage", "pylint (>=2.3.1,<2.4.0)", "pytest"] +yaml = ["PyYAML (>=5.1.0)"] + +[[package]] +name = "orderedmultidict" +version = "1.0.1" +description = "Ordered Multivalue Dictionary" +optional = false +python-versions = "*" +files = [ + {file = "orderedmultidict-1.0.1-py2.py3-none-any.whl", hash = "sha256:43c839a17ee3cdd62234c47deca1a8508a3f2ca1d0678a3bf791c87cf84adbf3"}, + {file = "orderedmultidict-1.0.1.tar.gz", hash = "sha256:04070bbb5e87291cc9bfa51df413677faf2141c73c61d2a5f7b26bea3cd882ad"}, +] + +[package.dependencies] +six = ">=1.8.0" + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = true +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "prefixed" +version = "0.7.0" +description = "Prefixed alternative numeric library" +optional = false +python-versions = "*" +files = [ + {file = "prefixed-0.7.0-py2.py3-none-any.whl", hash = "sha256:537b0e4ff4516c4578f277a41d7104f769d6935ae9cdb0f88fed82ec7b3c0ca5"}, + {file = "prefixed-0.7.0.tar.gz", hash = "sha256:0b54d15e602eb8af4ac31b1db21a37ea95ce5890e0741bb0dd9ded493cefbbe9"}, +] + +[[package]] +name = "pydash" +version = "7.0.6" +description = "The kitchen sink of Python utility libraries for doing \"stuff\" in a functional way. Based on the Lo-Dash Javascript library." +optional = true +python-versions = ">=3.7" +files = [ + {file = "pydash-7.0.6-py3-none-any.whl", hash = "sha256:10e506935953fde4b0d6fe21a88e17783cd1479256ae96f285b5f89063b4efd6"}, + {file = "pydash-7.0.6.tar.gz", hash = "sha256:7d9df7e9f36f2bbb08316b609480e7c6468185473a21bdd8e65dda7915565a26"}, +] + +[package.dependencies] +typing-extensions = ">=3.10,<4.6.0 || >4.6.0" + +[package.extras] +dev = ["Sphinx", "black", "build", "coverage", "docformatter", "flake8", "flake8-black", "flake8-bugbear", "flake8-isort", "furo", "importlib-metadata (<5)", "invoke", "isort", "mypy", "pylint", "pytest", "pytest-cov", "pytest-mypy-testing", "sphinx-autodoc-typehints", "tox", "twine", "wheel"] + +[[package]] +name = "pygments" +version = "2.17.2" +description = "Pygments is a syntax highlighting package written in Python." +optional = true +python-versions = ">=3.7" +files = [ + {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, + {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, +] + +[package.extras] +plugins = ["importlib-metadata"] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pyparsing" +version = "3.1.1" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = true +python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.1.1-py3-none-any.whl", hash = "sha256:32c7c0b711493c72ff18a981d24f28aaf9c1fb7ed5e9667c9e84e3db623bdbfb"}, + {file = "pyparsing-3.1.1.tar.gz", hash = "sha256:ede28a1a32462f5a9705e07aea48001a08f7cf81a021585011deba701581a0db"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "0.19.2" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.5" +files = [ + {file = "python-dotenv-0.19.2.tar.gz", hash = "sha256:a5de49a31e953b45ff2d2fd434bbc2670e8db5273606c1e737cc6b93eff3655f"}, + {file = "python_dotenv-0.19.2-py2.py3-none-any.whl", hash = "sha256:32b2bdc1873fd3a3c346da1c6db83d0053c3c62f28f1f38516070c4c8971b1d3"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-frontmatter" +version = "1.1.0" +description = "Parse and manage posts with YAML (or other) frontmatter" +optional = true +python-versions = "*" +files = [ + {file = "python-frontmatter-1.1.0.tar.gz", hash = "sha256:7118d2bd56af9149625745c58c9b51fb67e8d1294a0c76796dafdc72c36e5f6d"}, + {file = "python_frontmatter-1.1.0-py3-none-any.whl", hash = "sha256:335465556358d9d0e6c98bbeb69b1c969f2a4a21360587b9873bfc3b213407c1"}, +] + +[package.dependencies] +PyYAML = "*" + +[package.extras] +docs = ["sphinx"] +test = ["mypy", "pyaml", "pytest", "toml", "types-PyYAML", "types-toml"] + +[[package]] +name = "pytz" +version = "2024.1" +description = "World timezone definitions, modern and historical" +optional = true +python-versions = "*" +files = [ + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = true +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "snowballstemmer" +version = "2.2.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +optional = true +python-versions = "*" +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] + +[[package]] +name = "soupsieve" +version = "2.4.1" +description = "A modern CSS selector implementation for Beautiful Soup." +optional = true +python-versions = ">=3.7" +files = [ + {file = "soupsieve-2.4.1-py3-none-any.whl", hash = "sha256:1c1bfee6819544a3447586c889157365a27e10d88cde3ad3da0cf0ddf646feb8"}, + {file = "soupsieve-2.4.1.tar.gz", hash = "sha256:89d12b2d5dfcd2c9e8c22326da9d9aa9cb3dfab0a83a024f05704076ee8d35ea"}, +] + +[[package]] +name = "sphinx" +version = "4.5.0" +description = "Python documentation generator" +optional = true +python-versions = ">=3.6" +files = [ + {file = "Sphinx-4.5.0-py3-none-any.whl", hash = "sha256:ebf612653238bcc8f4359627a9b7ce44ede6fdd75d9d30f68255c7383d3a6226"}, + {file = "Sphinx-4.5.0.tar.gz", hash = "sha256:7bf8ca9637a4ee15af412d1a1d9689fec70523a68ca9bb9127c2f3eeb344e2e6"}, +] + +[package.dependencies] +alabaster = ">=0.7,<0.8" +babel = ">=1.3" +colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} +docutils = ">=0.14,<0.18" +imagesize = "*" +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} +Jinja2 = ">=2.3" +packaging = "*" +Pygments = ">=2.0" +requests = ">=2.5.0" +snowballstemmer = ">=1.1" +sphinxcontrib-applehelp = "*" +sphinxcontrib-devhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" +sphinxcontrib-jsmath = "*" +sphinxcontrib-qthelp = "*" +sphinxcontrib-serializinghtml = ">=1.1.5" + +[package.extras] +docs = ["sphinxcontrib-websupport"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "isort", "mypy (>=0.931)", "types-requests", "types-typed-ast"] +test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast"] + +[[package]] +name = "sphinx-autobuild" +version = "2021.3.14" +description = "Rebuild Sphinx documentation on changes, with live-reload in the browser." +optional = true +python-versions = ">=3.6" +files = [ + {file = "sphinx-autobuild-2021.3.14.tar.gz", hash = "sha256:de1ca3b66e271d2b5b5140c35034c89e47f263f2cd5db302c9217065f7443f05"}, + {file = "sphinx_autobuild-2021.3.14-py3-none-any.whl", hash = "sha256:8fe8cbfdb75db04475232f05187c776f46f6e9e04cacf1e49ce81bdac649ccac"}, +] + +[package.dependencies] +colorama = "*" +livereload = "*" +sphinx = "*" + +[package.extras] +test = ["pytest", "pytest-cov"] + +[[package]] +name = "sphinx-autodoc-typehints" +version = "1.19.1" +description = "Type hints (PEP 484) support for the Sphinx autodoc extension" +optional = true +python-versions = ">=3.7" +files = [ + {file = "sphinx_autodoc_typehints-1.19.1-py3-none-any.whl", hash = "sha256:9be46aeeb1b315eb5df1f3a7cb262149895d16c7d7dcd77b92513c3c3a1e85e6"}, + {file = "sphinx_autodoc_typehints-1.19.1.tar.gz", hash = "sha256:6c841db55e0e9be0483ff3962a2152b60e79306f4288d8c4e7e86ac84486a5ea"}, +] + +[package.dependencies] +Sphinx = ">=4.5" + +[package.extras] +testing = ["covdefaults (>=2.2)", "coverage (>=6.3)", "diff-cover (>=6.4)", "nptyping (>=2.1.2)", "pytest (>=7.1)", "pytest-cov (>=3)", "sphobjinv (>=2)", "typing-extensions (>=4.1)"] +type-comments = ["typed-ast (>=1.5.2)"] + +[[package]] +name = "sphinx-basic-ng" +version = "1.0.0b2" +description = "A modern skeleton for Sphinx themes." +optional = true +python-versions = ">=3.7" +files = [ + {file = "sphinx_basic_ng-1.0.0b2-py3-none-any.whl", hash = "sha256:eb09aedbabfb650607e9b4b68c9d240b90b1e1be221d6ad71d61c52e29f7932b"}, + {file = "sphinx_basic_ng-1.0.0b2.tar.gz", hash = "sha256:9ec55a47c90c8c002b5960c57492ec3021f5193cb26cebc2dc4ea226848651c9"}, +] + +[package.dependencies] +sphinx = ">=4.0" + +[package.extras] +docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-tabs"] + +[[package]] +name = "sphinx-jekyll-builder" +version = "0.3.0" +description = "sphinx builder that outputs jekyll compatible markdown files with frontmatter" +optional = true +python-versions = "*" +files = [ + {file = "sphinx-jekyll-builder-0.3.0.tar.gz", hash = "sha256:5ecc0a1821849fc55c4b26e0efb8fb45454904c6900378ebd776f8a74d6e06f7"}, + {file = "sphinx_jekyll_builder-0.3.0-py2.py3-none-any.whl", hash = "sha256:b3b2d46ba49b7d47d8f58077c9ccce928f2cfec6d3fce7738c69f1590c72ebc5"}, +] + +[package.dependencies] +alabaster = ">=0.7.12" +Babel = ">=2.6.0" +certifi = ">=2018.11.29" +chardet = ">=3.0.4" +docutils = ">=0.14" +html2text = ">=2018.1.9" +idna = ">=3.7" +imagesize = ">=1.1.0" +Jinja2 = ">=2.10.1" +MarkupSafe = ">=1.1.0" +munch = ">=2.3.2" +packaging = ">=19.0" +pydash = ">=4.7.4" +Pygments = ">=2.3.1" +pyparsing = ">=2.3.1" +pytz = ">=2018.9" +PyYAML = ">=5.1" +requests = ">=2.21.0" +six = ">=1.12.0" +snowballstemmer = ">=1.2.1" +Sphinx = ">=1.8.3" +sphinx-markdown-builder = ">=0.5.3" +sphinxcontrib-websupport = ">=1.1.0" +typing = ">=3.6.6" +urllib3 = ">=1.24.2" + +[[package]] +name = "sphinx-markdown-builder" +version = "0.6.5" +description = "A Sphinx extension to add markdown generation support." +optional = true +python-versions = ">=3.7" +files = [ + {file = "sphinx-markdown-builder-0.6.5.tar.gz", hash = "sha256:dae3184cfefdfe9ee1af69ae9e6e09cf2768f51afeb81ae1b3c219dbfdb33e97"}, + {file = "sphinx_markdown_builder-0.6.5-py3-none-any.whl", hash = "sha256:59c8e841b56bbf04a2c11e1984f7258fa28a20c0257aa54ea3ae7a0013a27d4a"}, +] + +[package.dependencies] +docutils = "*" +sphinx = ">=2.2.0" +tabulate = "*" + +[package.extras] +dev = ["black", "bumpver", "coveralls", "flake8", "isort", "pip-tools", "pylint", "pytest", "pytest-cov", "sphinx-needs", "sphinxcontrib-plantuml"] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "1.0.2" +description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +optional = true +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, + {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +optional = true +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, + {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.0.0" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +optional = true +python-versions = ">=3.6" +files = [ + {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, + {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +optional = true +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] + +[package.extras] +test = ["flake8", "mypy", "pytest"] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "1.0.3" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +optional = true +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, + {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-restbuilder" +version = "0.3" +description = "Sphinx extension to output reST files." +optional = true +python-versions = ">=2.7, !=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" +files = [ + {file = "sphinxcontrib-restbuilder-0.3.tar.gz", hash = "sha256:6b3ee9394b5ec5e73e6afb34d223530d0b9098cb7562f9c5e364e6d6b41410ce"}, + {file = "sphinxcontrib_restbuilder-0.3-py2.py3-none-any.whl", hash = "sha256:6ba2ddc7a87d845c075c1b2e00d541bd1c8400488e50e32c9b4169ccdd9f30cb"}, +] + +[package.dependencies] +Sphinx = ">=1.4" + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.5" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +optional = true +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, + {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, +] + +[package.extras] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-websupport" +version = "1.2.4" +description = "Sphinx API for Web Apps" +optional = true +python-versions = ">=3.5" +files = [ + {file = "sphinxcontrib-websupport-1.2.4.tar.gz", hash = "sha256:4edf0223a0685a7c485ae5a156b6f529ba1ee481a1417817935b20bde1956232"}, + {file = "sphinxcontrib_websupport-1.2.4-py2.py3-none-any.whl", hash = "sha256:6fc9287dfc823fe9aa432463edd6cea47fa9ebbf488d7f289b322ffcfca075c7"}, +] + +[package.dependencies] +sphinxcontrib-serializinghtml = "*" + +[package.extras] +lint = ["flake8"] +test = ["Sphinx", "pytest", "sqlalchemy", "whoosh"] + +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = true +python-versions = ">=3.7" +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + +[[package]] +name = "token-bucket" +version = "0.3.0" +description = "Very fast implementation of the token bucket algorithm." +optional = false +python-versions = ">=3.5" +files = [ + {file = "token_bucket-0.3.0-py2.py3-none-any.whl", hash = "sha256:6df24309e3cf5b808ae5ef714a3191ec5b54f48c34ef959e4882eef140703369"}, + {file = "token_bucket-0.3.0.tar.gz", hash = "sha256:979571c99db2ff9e651f2b2146a62b2ebadf7de6c217a8781698282976cb675f"}, +] + +[[package]] +name = "tornado" +version = "6.2" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +optional = true +python-versions = ">= 3.7" +files = [ + {file = "tornado-6.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:20f638fd8cc85f3cbae3c732326e96addff0a15e22d80f049e00121651e82e72"}, + {file = "tornado-6.2-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:87dcafae3e884462f90c90ecc200defe5e580a7fbbb4365eda7c7c1eb809ebc9"}, + {file = "tornado-6.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba09ef14ca9893954244fd872798b4ccb2367c165946ce2dd7376aebdde8e3ac"}, + {file = "tornado-6.2-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b8150f721c101abdef99073bf66d3903e292d851bee51910839831caba341a75"}, + {file = "tornado-6.2-cp37-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a2f5999215a3a06a4fc218026cd84c61b8b2b40ac5296a6db1f1451ef04c1e"}, + {file = "tornado-6.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5f8c52d219d4995388119af7ccaa0bcec289535747620116a58d830e7c25d8a8"}, + {file = "tornado-6.2-cp37-abi3-musllinux_1_1_i686.whl", hash = "sha256:6fdfabffd8dfcb6cf887428849d30cf19a3ea34c2c248461e1f7d718ad30b66b"}, + {file = "tornado-6.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:1d54d13ab8414ed44de07efecb97d4ef7c39f7438cf5e976ccd356bebb1b5fca"}, + {file = "tornado-6.2-cp37-abi3-win32.whl", hash = "sha256:5c87076709343557ef8032934ce5f637dbb552efa7b21d08e89ae7619ed0eb23"}, + {file = "tornado-6.2-cp37-abi3-win_amd64.whl", hash = "sha256:e5f923aa6a47e133d1cf87d60700889d7eae68988704e20c75fb2d65677a8e4b"}, + {file = "tornado-6.2.tar.gz", hash = "sha256:9b630419bde84ec666bfd7ea0a4cb2a8a651c2d5cccdbdd1972a0c859dfc3c13"}, +] + +[[package]] +name = "tqdm" +version = "4.66.2" +description = "Fast, Extensible Progress Meter" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tqdm-4.66.2-py3-none-any.whl", hash = "sha256:1ee4f8a893eb9bef51c6e35730cebf234d5d0b6bd112b0271e10ed7c24a02bd9"}, + {file = "tqdm-4.66.2.tar.gz", hash = "sha256:6cd52cdf0fef0e0f543299cfc96fec90d7b8a7e88745f411ec33eb44d5ed3531"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +notebook = ["ipywidgets (>=6)"] +slack = ["slack-sdk"] +telegram = ["requests"] + +[[package]] +name = "typing" +version = "3.7.4.3" +description = "Type Hints for Python" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "typing-3.7.4.3-py2-none-any.whl", hash = "sha256:283d868f5071ab9ad873e5e52268d611e851c870a2ba354193026f2dfb29d8b5"}, + {file = "typing-3.7.4.3.tar.gz", hash = "sha256:1187fb9c82fd670d10aa07bbb6cfcfe4bdda42d6fab8d5134f04e8c4d0b71cc9"}, +] + +[[package]] +name = "typing-extensions" +version = "4.7.1" +description = "Backported and Experimental Type Hints for Python 3.7+" +optional = false +python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, +] + +[[package]] +name = "urllib3" +version = "1.26.18" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, + {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, +] + +[package.extras] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "wcwidth" +version = "0.2.13" +description = "Measures the displayed width of unicode strings in a terminal" +optional = false +python-versions = "*" +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] + +[[package]] +name = "xxhash" +version = "3.4.1" +description = "Python binding for xxHash" +optional = false +python-versions = ">=3.7" +files = [ + {file = "xxhash-3.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:91dbfa55346ad3e18e738742236554531a621042e419b70ad8f3c1d9c7a16e7f"}, + {file = "xxhash-3.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:665a65c2a48a72068fcc4d21721510df5f51f1142541c890491afc80451636d2"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb11628470a6004dc71a09fe90c2f459ff03d611376c1debeec2d648f44cb693"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bef2a7dc7b4f4beb45a1edbba9b9194c60a43a89598a87f1a0226d183764189"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c0f7b2d547d72c7eda7aa817acf8791f0146b12b9eba1d4432c531fb0352228"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00f2fdef6b41c9db3d2fc0e7f94cb3db86693e5c45d6de09625caad9a469635b"}, + {file = "xxhash-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23cfd9ca09acaf07a43e5a695143d9a21bf00f5b49b15c07d5388cadf1f9ce11"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6a9ff50a3cf88355ca4731682c168049af1ca222d1d2925ef7119c1a78e95b3b"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f1d7c69a1e9ca5faa75546fdd267f214f63f52f12692f9b3a2f6467c9e67d5e7"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:672b273040d5d5a6864a36287f3514efcd1d4b1b6a7480f294c4b1d1ee1b8de0"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:4178f78d70e88f1c4a89ff1ffe9f43147185930bb962ee3979dba15f2b1cc799"}, + {file = "xxhash-3.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9804b9eb254d4b8cc83ab5a2002128f7d631dd427aa873c8727dba7f1f0d1c2b"}, + {file = "xxhash-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c09c49473212d9c87261d22c74370457cfff5db2ddfc7fd1e35c80c31a8c14ce"}, + {file = "xxhash-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:ebbb1616435b4a194ce3466d7247df23499475c7ed4eb2681a1fa42ff766aff6"}, + {file = "xxhash-3.4.1-cp310-cp310-win_arm64.whl", hash = "sha256:25dc66be3db54f8a2d136f695b00cfe88018e59ccff0f3b8f545869f376a8a46"}, + {file = "xxhash-3.4.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:58c49083801885273e262c0f5bbeac23e520564b8357fbb18fb94ff09d3d3ea5"}, + {file = "xxhash-3.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b526015a973bfbe81e804a586b703f163861da36d186627e27524f5427b0d520"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36ad4457644c91a966f6fe137d7467636bdc51a6ce10a1d04f365c70d6a16d7e"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:248d3e83d119770f96003271fe41e049dd4ae52da2feb8f832b7a20e791d2920"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2070b6d5bbef5ee031666cf21d4953c16e92c2f8a24a94b5c240f8995ba3b1d0"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2746035f518f0410915e247877f7df43ef3372bf36cfa52cc4bc33e85242641"}, + {file = "xxhash-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8ba6181514681c2591840d5632fcf7356ab287d4aff1c8dea20f3c78097088"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0aac5010869240e95f740de43cd6a05eae180c59edd182ad93bf12ee289484fa"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4cb11d8debab1626181633d184b2372aaa09825bde709bf927704ed72765bed1"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:b29728cff2c12f3d9f1d940528ee83918d803c0567866e062683f300d1d2eff3"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a15cbf3a9c40672523bdb6ea97ff74b443406ba0ab9bca10ceccd9546414bd84"}, + {file = "xxhash-3.4.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6e66df260fed01ed8ea790c2913271641c58481e807790d9fca8bfd5a3c13844"}, + {file = "xxhash-3.4.1-cp311-cp311-win32.whl", hash = "sha256:e867f68a8f381ea12858e6d67378c05359d3a53a888913b5f7d35fbf68939d5f"}, + {file = "xxhash-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:200a5a3ad9c7c0c02ed1484a1d838b63edcf92ff538770ea07456a3732c577f4"}, + {file = "xxhash-3.4.1-cp311-cp311-win_arm64.whl", hash = "sha256:1d03f1c0d16d24ea032e99f61c552cb2b77d502e545187338bea461fde253583"}, + {file = "xxhash-3.4.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c4bbba9b182697a52bc0c9f8ec0ba1acb914b4937cd4a877ad78a3b3eeabefb3"}, + {file = "xxhash-3.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9fd28a9da300e64e434cfc96567a8387d9a96e824a9be1452a1e7248b7763b78"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6066d88c9329ab230e18998daec53d819daeee99d003955c8db6fc4971b45ca3"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93805bc3233ad89abf51772f2ed3355097a5dc74e6080de19706fc447da99cd3"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64da57d5ed586ebb2ecdde1e997fa37c27fe32fe61a656b77fabbc58e6fbff6e"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a97322e9a7440bf3c9805cbaac090358b43f650516486746f7fa482672593df"}, + {file = "xxhash-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bbe750d512982ee7d831838a5dee9e9848f3fb440e4734cca3f298228cc957a6"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fd79d4087727daf4d5b8afe594b37d611ab95dc8e29fe1a7517320794837eb7d"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:743612da4071ff9aa4d055f3f111ae5247342931dedb955268954ef7201a71ff"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:b41edaf05734092f24f48c0958b3c6cbaaa5b7e024880692078c6b1f8247e2fc"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:a90356ead70d715fe64c30cd0969072de1860e56b78adf7c69d954b43e29d9fa"}, + {file = "xxhash-3.4.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ac56eebb364e44c85e1d9e9cc5f6031d78a34f0092fea7fc80478139369a8b4a"}, + {file = "xxhash-3.4.1-cp312-cp312-win32.whl", hash = "sha256:911035345932a153c427107397c1518f8ce456f93c618dd1c5b54ebb22e73747"}, + {file = "xxhash-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:f31ce76489f8601cc7b8713201ce94b4bd7b7ce90ba3353dccce7e9e1fee71fa"}, + {file = "xxhash-3.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:b5beb1c6a72fdc7584102f42c4d9df232ee018ddf806e8c90906547dfb43b2da"}, + {file = "xxhash-3.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6d42b24d1496deb05dee5a24ed510b16de1d6c866c626c2beb11aebf3be278b9"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b685fab18876b14a8f94813fa2ca80cfb5ab6a85d31d5539b7cd749ce9e3624"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:419ffe34c17ae2df019a4685e8d3934d46b2e0bbe46221ab40b7e04ed9f11137"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0e041ce5714f95251a88670c114b748bca3bf80cc72400e9f23e6d0d59cf2681"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc860d887c5cb2f524899fb8338e1bb3d5789f75fac179101920d9afddef284b"}, + {file = "xxhash-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:312eba88ffe0a05e332e3a6f9788b73883752be63f8588a6dc1261a3eaaaf2b2"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:e01226b6b6a1ffe4e6bd6d08cfcb3ca708b16f02eb06dd44f3c6e53285f03e4f"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9f3025a0d5d8cf406a9313cd0d5789c77433ba2004b1c75439b67678e5136537"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:6d3472fd4afef2a567d5f14411d94060099901cd8ce9788b22b8c6f13c606a93"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:43984c0a92f06cac434ad181f329a1445017c33807b7ae4f033878d860a4b0f2"}, + {file = "xxhash-3.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a55e0506fdb09640a82ec4f44171273eeabf6f371a4ec605633adb2837b5d9d5"}, + {file = "xxhash-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:faec30437919555b039a8bdbaba49c013043e8f76c999670aef146d33e05b3a0"}, + {file = "xxhash-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:c9e1b646af61f1fc7083bb7b40536be944f1ac67ef5e360bca2d73430186971a"}, + {file = "xxhash-3.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:961d948b7b1c1b6c08484bbce3d489cdf153e4122c3dfb07c2039621243d8795"}, + {file = "xxhash-3.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:719a378930504ab159f7b8e20fa2aa1896cde050011af838af7e7e3518dd82de"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74fb5cb9406ccd7c4dd917f16630d2e5e8cbbb02fc2fca4e559b2a47a64f4940"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5dab508ac39e0ab988039bc7f962c6ad021acd81fd29145962b068df4148c476"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c59f3e46e7daf4c589e8e853d700ef6607afa037bfad32c390175da28127e8c"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cc07256eff0795e0f642df74ad096f8c5d23fe66bc138b83970b50fc7f7f6c5"}, + {file = "xxhash-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9f749999ed80f3955a4af0eb18bb43993f04939350b07b8dd2f44edc98ffee9"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7688d7c02149a90a3d46d55b341ab7ad1b4a3f767be2357e211b4e893efbaaf6"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a8b4977963926f60b0d4f830941c864bed16aa151206c01ad5c531636da5708e"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8106d88da330f6535a58a8195aa463ef5281a9aa23b04af1848ff715c4398fb4"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4c76a77dbd169450b61c06fd2d5d436189fc8ab7c1571d39265d4822da16df22"}, + {file = "xxhash-3.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:11f11357c86d83e53719c592021fd524efa9cf024dc7cb1dfb57bbbd0d8713f2"}, + {file = "xxhash-3.4.1-cp38-cp38-win32.whl", hash = "sha256:0c786a6cd74e8765c6809892a0d45886e7c3dc54de4985b4a5eb8b630f3b8e3b"}, + {file = "xxhash-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:aabf37fb8fa27430d50507deeab2ee7b1bcce89910dd10657c38e71fee835594"}, + {file = "xxhash-3.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6127813abc1477f3a83529b6bbcfeddc23162cece76fa69aee8f6a8a97720562"}, + {file = "xxhash-3.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef2e194262f5db16075caea7b3f7f49392242c688412f386d3c7b07c7733a70a"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71be94265b6c6590f0018bbf73759d21a41c6bda20409782d8117e76cd0dfa8b"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10e0a619cdd1c0980e25eb04e30fe96cf8f4324758fa497080af9c21a6de573f"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fa122124d2e3bd36581dd78c0efa5f429f5220313479fb1072858188bc2d5ff1"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17032f5a4fea0a074717fe33477cb5ee723a5f428de7563e75af64bfc1b1e10"}, + {file = "xxhash-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca7783b20e3e4f3f52f093538895863f21d18598f9a48211ad757680c3bd006f"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d77d09a1113899fad5f354a1eb4f0a9afcf58cefff51082c8ad643ff890e30cf"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:21287bcdd299fdc3328cc0fbbdeaa46838a1c05391264e51ddb38a3f5b09611f"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:dfd7a6cc483e20b4ad90224aeb589e64ec0f31e5610ab9957ff4314270b2bf31"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:543c7fcbc02bbb4840ea9915134e14dc3dc15cbd5a30873a7a5bf66039db97ec"}, + {file = "xxhash-3.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fe0a98d990e433013f41827b62be9ab43e3cf18e08b1483fcc343bda0d691182"}, + {file = "xxhash-3.4.1-cp39-cp39-win32.whl", hash = "sha256:b9097af00ebf429cc7c0e7d2fdf28384e4e2e91008130ccda8d5ae653db71e54"}, + {file = "xxhash-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:d699b921af0dcde50ab18be76c0d832f803034d80470703700cb7df0fbec2832"}, + {file = "xxhash-3.4.1-cp39-cp39-win_arm64.whl", hash = "sha256:2be491723405e15cc099ade1280133ccfbf6322d2ef568494fb7d07d280e7eee"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:431625fad7ab5649368c4849d2b49a83dc711b1f20e1f7f04955aab86cd307bc"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc6dbd5fc3c9886a9e041848508b7fb65fd82f94cc793253990f81617b61fe49"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3ff8dbd0ec97aec842476cb8ccc3e17dd288cd6ce3c8ef38bff83d6eb927817"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef73a53fe90558a4096e3256752268a8bdc0322f4692ed928b6cd7ce06ad4fe3"}, + {file = "xxhash-3.4.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:450401f42bbd274b519d3d8dcf3c57166913381a3d2664d6609004685039f9d3"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a162840cf4de8a7cd8720ff3b4417fbc10001eefdd2d21541a8226bb5556e3bb"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b736a2a2728ba45017cb67785e03125a79d246462dfa892d023b827007412c52"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d0ae4c2e7698adef58710d6e7a32ff518b66b98854b1c68e70eee504ad061d8"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6322c4291c3ff174dcd104fae41500e75dad12be6f3085d119c2c8a80956c51"}, + {file = "xxhash-3.4.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:dd59ed668801c3fae282f8f4edadf6dc7784db6d18139b584b6d9677ddde1b6b"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:92693c487e39523a80474b0394645b393f0ae781d8db3474ccdcead0559ccf45"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4603a0f642a1e8d7f3ba5c4c25509aca6a9c1cc16f85091004a7028607ead663"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fa45e8cbfbadb40a920fe9ca40c34b393e0b067082d94006f7f64e70c7490a6"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:595b252943b3552de491ff51e5bb79660f84f033977f88f6ca1605846637b7c6"}, + {file = "xxhash-3.4.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:562d8b8f783c6af969806aaacf95b6c7b776929ae26c0cd941d54644ea7ef51e"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:41ddeae47cf2828335d8d991f2d2b03b0bdc89289dc64349d712ff8ce59d0647"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c44d584afdf3c4dbb3277e32321d1a7b01d6071c1992524b6543025fb8f4206f"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd7bddb3a5b86213cc3f2c61500c16945a1b80ecd572f3078ddbbe68f9dabdfb"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ecb6c987b62437c2f99c01e97caf8d25660bf541fe79a481d05732e5236719c"}, + {file = "xxhash-3.4.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:696b4e18b7023527d5c50ed0626ac0520edac45a50ec7cf3fc265cd08b1f4c03"}, + {file = "xxhash-3.4.1.tar.gz", hash = "sha256:0379d6cf1ff987cd421609a264ce025e74f346e3e145dd106c0cc2e3ec3f99a9"}, +] + +[[package]] +name = "zipp" +version = "3.15.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.7" +files = [ + {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, + {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + +[extras] +docs = [] + +[metadata] +lock-version = "2.0" +python-versions = "^3.7" +content-hash = "13837790ac1dd2f2458290c7f582a78ae1eada646b314b3b7fb5e667a8d1b350" diff --git a/pyproject.toml b/pyproject.toml index b0471b7f..bd142da2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,3 +1,65 @@ +[tool.poetry] +name = "frameioclient" +version = "2.0.1a5" +description='Client library for the Frame.io API' +readme = "README.md" +license='MIT' +homepage = "https://github.com/Frameio/python-frameio-client" +authors = ["Frame.io DevRel "] + +classifiers = [ + 'Development Status :: 5 - Production/Stable', + 'Intended Audience :: Developers', + 'Topic :: Multimedia :: Video', + 'Topic :: Software Development :: Libraries', + 'License :: OSI Approved :: MIT License', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9' +] + +[tool.poetry.dependencies] +python = "^3.7" +analytics-python = "^1.4.0" +enlighten = "^1.10.2" +importlib-metadata = "^4.11.3" +requests = "^2.27.1" +token-bucket = "^0.3.0" +urllib3 = "^1.26.9" +xxhash = "^3.0.0" +furl = "^2.1.3" +tqdm = "^4.66.2" + +[tool.poetry.dev-dependencies] +bump2version = "^1.0.1" + +# Optional dependencies +Sphinx = { version = "^4.4.0", optional = true } +sphinx-jekyll-builder = { version = "^0.3.0", optional = true } +sphinxcontrib-restbuilder = { version = "^0.3", optional = true } +sphinx-autobuild = { version = "^2021.3.14", optional = true } +contentful_management = { version = "^2.11.0", optional = true } +python-frontmatter = { version = "^1.0.0", optional = true } +sphinx-autodoc-typehints = { version = "^1.17.0", optional = true } +furo = { version = "^2022.3.4", optional = true } +python-dotenv = "^0.19.2" + +[tool.poetry.extras] +docs = [ + "sphinx", + "sphinx-jekyll-builder", + "sphinxcontrib-restbuilder", + "sphinx-autobuild", + "contentful_management", + "python-frontmatter", + "sphinx-autodoc-typehints", + "furo" +] + +[tool.poetry.scripts] +fiocli = 'frameioclient.fiocli:main' + [build-system] requires = ["setuptools", "wheel"] build-backend = "setuptools.build_meta:__legacy__" \ No newline at end of file From 03f147f1689ba72f1dc2a379b0f4322313d4a8bd Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Thu, 6 Jun 2024 09:48:32 -0700 Subject: [PATCH 95/99] Add poetry.toml --- poetry.toml | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 poetry.toml diff --git a/poetry.toml b/poetry.toml new file mode 100644 index 00000000..53b35d37 --- /dev/null +++ b/poetry.toml @@ -0,0 +1,3 @@ +[virtualenvs] +create = true +in-project = true From e7c683c7c1ee191f60b9cfbea07a2fc2651065a1 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Thu, 6 Jun 2024 09:49:29 -0700 Subject: [PATCH 96/99] Add python 3.10 and 3.11 to the pypi classifiers --- pyproject.toml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index bd142da2..f16ff690 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,9 @@ classifiers = [ 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9' + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11' ] [tool.poetry.dependencies] From 89ad09d262d91deb914436463630a139edbab20e Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Thu, 6 Jun 2024 10:51:35 -0700 Subject: [PATCH 97/99] Remove speedtest entirely --- docs/requirements.txt | 1 - frameioclient/lib/bandwidth.py | 57 ---------------------------------- frameioclient/lib/service.py | 3 +- frameioclient/lib/telemetry.py | 1 - setup.py | 1 - 5 files changed, 1 insertion(+), 62 deletions(-) delete mode 100644 frameioclient/lib/bandwidth.py diff --git a/docs/requirements.txt b/docs/requirements.txt index 63a029d6..f5f3d0a0 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -8,6 +8,5 @@ xxhash furo analytics-python token-bucket -speedtest-cli sphinx-autobuild sphinx-autodoc-typehints \ No newline at end of file diff --git a/frameioclient/lib/bandwidth.py b/frameioclient/lib/bandwidth.py deleted file mode 100644 index b1991d53..00000000 --- a/frameioclient/lib/bandwidth.py +++ /dev/null @@ -1,57 +0,0 @@ -import speedtest - - -class NetworkBandwidth: - # Test the network bandwidth any time we have a new IP address - # Persist this information to a config.json file - - def __init__(self): - self.results = dict() - - def load_stats(self): - # Force an update on these stats before starting download/upload - pass - - def persist_stats(self): - pass - - def run(self): - self.results = self.speed_test() - - @staticmethod - def speedtest(): - """ - Run a speedtest using Speedtest.net in order to get a 'control' for \ - bandwidth optimization. - - Example:: - NetworkBandwidth.speedtest() - """ - - st = speedtest.Speedtest() - download_speed = round(st.download(threads=10) * (1.192 * 10 ** -7), 2) - upload_speed = round(st.upload(threads=10) * (1.192 * 10 ** -7), 2) - servernames = [] - server_names = st.get_servers(servernames) - ping = st.results.ping - - return { - "ping": ping, - "download_speed": download_speed, - "upload_speed": upload_speed, - } - - def __repr__(self): - self.results - - -class DiskBandwidth: - # Test the disk speed and write to a config.json file for re-use - # Worth re-checking the disk every time a new one is detected (base route) - - def __init__(self, volume): - self.volume = volume - self.results = dict() - - def __repr__(self): - self.results diff --git a/frameioclient/lib/service.py b/frameioclient/lib/service.py index dd5c99b1..cf877bf3 100644 --- a/frameioclient/lib/service.py +++ b/frameioclient/lib/service.py @@ -9,8 +9,7 @@ def __init__(self, client: FrameioClient): self.autoconfigure() def autoconfigure(self): - # self.bandwidth = SpeedTest.speedtest() - return + pass def save_config(self): pass diff --git a/frameioclient/lib/telemetry.py b/frameioclient/lib/telemetry.py index 51248bd7..211e19da 100644 --- a/frameioclient/lib/telemetry.py +++ b/frameioclient/lib/telemetry.py @@ -13,7 +13,6 @@ class Telemetry(object): def __init__(self, user_id): self.user_id = user_id - self.speedtest = None self.identity = None self.context = None self.integrations = {"all": False, "Amplitude": True} diff --git a/setup.py b/setup.py index b39899c4..8187f56d 100644 --- a/setup.py +++ b/setup.py @@ -30,7 +30,6 @@ def run(self): 'importlib-metadata ~= 1.0 ; python_version < "3.8"', 'requests', 'token-bucket', - 'speedtest-cli', 'urllib3', 'xxhash', ], From 20786c75ae50c6bd764e8a9b4320bad083c209be Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Mon, 10 Jun 2024 12:00:23 -0700 Subject: [PATCH 98/99] Don't print part size during upload --- frameioclient/lib/upload.py | 1 - 1 file changed, 1 deletion(-) diff --git a/frameioclient/lib/upload.py b/frameioclient/lib/upload.py index 50128245..d8babb6f 100644 --- a/frameioclient/lib/upload.py +++ b/frameioclient/lib/upload.py @@ -104,7 +104,6 @@ def upload(self): for future in concurrent.futures.as_completed(self.futures): try: chunk_size = future.result() - print(chunk_size) except Exception as exc: print(exc) From 5255b3639c8fd32f5fb5c2d4eafee2b6001e9206 Mon Sep 17 00:00:00 2001 From: Jeff Hodges Date: Tue, 11 Jun 2024 13:33:24 -0700 Subject: [PATCH 99/99] Format the response for a succesful upload via fiocli as JSON --- frameioclient/fiocli.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/frameioclient/fiocli.py b/frameioclient/fiocli.py index 682d5f70..13486ff5 100644 --- a/frameioclient/fiocli.py +++ b/frameioclient/fiocli.py @@ -1,3 +1,4 @@ +import json import os import sys import argparse @@ -83,7 +84,10 @@ def main(): args.target[0], args.destination[0] ) else: - return client.assets.upload(args.destination[0], args.target[0]) + try: + return json.dumps(client.assets.upload(args.destination[0], args.target[0])) + except Exception as e: + print(e) else: print("No destination supplied") else: