From 2f869dd4484907aa65f1fcc5700ba4aab7030826 Mon Sep 17 00:00:00 2001 From: Ian Epperson Date: Tue, 13 Aug 2024 18:14:39 -0700 Subject: [PATCH 1/4] Update python versions to test - drop 3.6 and 3.7, add to 3.12 --- .github/workflows/python-package.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 632e7bd..1f3d258 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -16,7 +16,6 @@ jobs: strategy: matrix: python-version: [3.8, 3.9, 3.10, 3.11, 3.12] - steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} From 801ec6d0fe9f3b46d525121467a6923f513b4a3b Mon Sep 17 00:00:00 2001 From: Ian Epperson Date: Tue, 13 Aug 2024 18:22:01 -0700 Subject: [PATCH 2/4] Add proper quotes to Python versions --- .github/workflows/python-package.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index 1f3d258..930735a 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.8, 3.9, 3.10, 3.11, 3.12] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] steps: - uses: actions/checkout@v2 - name: Set up Python ${{ matrix.python-version }} From bef0747859a651a639c23b052636b5ab286acf4a Mon Sep 17 00:00:00 2001 From: Ian Epperson Date: Thu, 15 Aug 2024 11:13:38 -0700 Subject: [PATCH 3/4] Reformatted with recent version of Black --- filestorage/__init__.py | 28 +- filestorage/config_utils.py | 82 +++--- filestorage/config_utils.pyi | 8 +- filestorage/file_item.py | 29 +- filestorage/filter_base.py | 3 +- filestorage/filters/__init__.py | 2 +- filestorage/filters/valid_extensions.py | 4 +- filestorage/handler_base.py | 50 ++-- filestorage/handler_base.pyi | 8 +- filestorage/handlers/__init__.py | 10 +- filestorage/handlers/_template.py | 2 +- filestorage/handlers/dummy.py | 50 +--- filestorage/handlers/file.py | 52 ++-- filestorage/handlers/s3.py | 106 +++----- filestorage/handlers/s3.pyi | 4 +- filestorage/pyramid_config.py | 12 +- filestorage/storage_container.py | 40 ++- filestorage/utils.py | 8 +- filestorage/utils.pyi | 4 +- setup.py | 54 ++-- tests/filters/test_randomize_filename.py | 12 +- tests/filters/test_valid_extensions.py | 8 +- tests/handlers/s3_mock.py | 6 +- tests/handlers/test_local_file.py | 328 ++++++++++------------- tests/handlers/test_s3.py | 102 +++---- tests/test_config_utils.py | 98 ++++--- tests/test_file_item.py | 74 ++--- tests/test_filter_base.py | 36 +-- tests/test_handler_base.py | 142 +++++----- tests/test_pyramid_config.py | 36 ++- tests/test_storage_container.py | 37 ++- 31 files changed, 644 insertions(+), 791 deletions(-) diff --git a/filestorage/__init__.py b/filestorage/__init__.py index 43b5d13..d666adf 100644 --- a/filestorage/__init__.py +++ b/filestorage/__init__.py @@ -11,27 +11,27 @@ def _read(rel_path): here = os.path.abspath(os.path.dirname(__file__)) - with codecs.open(os.path.join(here, rel_path), 'r') as fp: + with codecs.open(os.path.join(here, rel_path), "r") as fp: return fp.read() -__version__ = _read('VERSION').strip() +__version__ = _read("VERSION").strip() # Instantiate the store singleton store = StorageContainer() __all__ = [ - 'store', - 'StorageContainer', - 'StorageHandlerBase', - 'AsyncStorageHandlerBase', - 'FileItem', - 'FilterBase', - 'AsyncFilterBase', - 'exceptions', - 'handlers', - 'filters', - 'pyramid_config', - 'config_utils', + "store", + "StorageContainer", + "StorageHandlerBase", + "AsyncStorageHandlerBase", + "FileItem", + "FilterBase", + "AsyncFilterBase", + "exceptions", + "handlers", + "filters", + "pyramid_config", + "config_utils", ] diff --git a/filestorage/config_utils.py b/filestorage/config_utils.py index 8cc4816..73760fd 100644 --- a/filestorage/config_utils.py +++ b/filestorage/config_utils.py @@ -9,15 +9,15 @@ def try_import(default_module: str, model: str): """Attempt to import the given name.""" - module_name, _, cls_name = model.rpartition('.') + module_name, _, cls_name = model.rpartition(".") module_name = module_name or default_module try: module = importlib.import_module(module_name) cls = getattr(module, cls_name) - except (ImportError): - raise ValueError('module not installed') + except ImportError: + raise ValueError("module not installed") except AttributeError: - raise ValueError('bad class name') + raise ValueError("bad class name") return cls @@ -32,14 +32,14 @@ def get_init_properties(cls, to_class=object) -> Set[str]: Returns a set of all parameters found. """ result = set() - init = getattr(cls, '__init__', None) + init = getattr(cls, "__init__", None) if init is not None: for param in inspect.signature(init).parameters.values(): if param.kind == param.VAR_KEYWORD: # Ignore any **kwargs continue - if param.name == 'self': + if param.name == "self": continue result.add(param.name) @@ -53,7 +53,7 @@ def get_init_properties(cls, to_class=object) -> Set[str]: def setup_from_settings( settings: Dict[str, str], store: StorageContainer, - key_prefix: str = 'store', + key_prefix: str = "store", ) -> bool: """Setup the provided store with the settings dictionary. Will only pay attention to keys that start with the key_prefix. @@ -68,7 +68,7 @@ def setup_from_settings( # If there's configuration to be had, setup the store with it. if settings_dict: - setup_store(store, key_prefix, '', settings_dict) + setup_store(store, key_prefix, "", settings_dict) return True else: # Otherwise, assume that the store isn't going to be used now @@ -82,26 +82,26 @@ def setup_store( """Setup a specific store to the given name in the settings_dict. key_prefix denotes where this name came from (for good error messages). """ - name = name or '' + name = name or "" try: - handler_class_name = settings_dict['handler'][None] + handler_class_name = settings_dict["handler"][None] except KeyError: raise FilestorageConfigError( - f'Pyramid settings has no key for {key_prefix}{name}.handler' + f"Pyramid settings has no key for {key_prefix}{name}.handler" ) - if handler_class_name.lower() == 'none': + if handler_class_name.lower() == "none": handler = None else: - handler = get_handler(key_prefix + name, settings_dict['handler']) + handler = get_handler(key_prefix + name, settings_dict["handler"]) - settings_dict.pop('handler') + settings_dict.pop("handler") store.handler = handler # Setup any sub-store configuration for key, sub_config in settings_dict.items(): - if key.startswith('[') and key.endswith(']'): - sub_store = key.lstrip('[').rstrip(']').strip('"').strip("'") + if key.startswith("[") and key.endswith("]"): + sub_store = key.lstrip("[").rstrip("]").strip('"').strip("'") setup_store( store=store[sub_store], key_prefix=key_prefix + key, @@ -110,17 +110,17 @@ def setup_store( ) else: raise FilestorageConfigError( - f'Pyramid settings unknown key {key_prefix}.{key}' + f"Pyramid settings unknown key {key_prefix}.{key}" ) def get_handler(key_prefix: str, settings_dict: Dict) -> StorageHandlerBase: - name = f'{key_prefix}.handler' + name = f"{key_prefix}.handler" handler_name = settings_dict.pop(None) try: - handler_cls = try_import('filestorage.handlers', handler_name) + handler_cls = try_import("filestorage.handlers", handler_name) except ValueError: - raise FilestorageConfigError(f'Pyramid settings bad value for {name}') + raise FilestorageConfigError(f"Pyramid settings bad value for {name}") valid_args = get_init_properties(handler_cls, StorageHandlerBase) @@ -128,35 +128,33 @@ def get_handler(key_prefix: str, settings_dict: Dict) -> StorageHandlerBase: for key, value in settings_dict.items(): if key not in valid_args: maybe = difflib.get_close_matches(key, valid_args, 1) - maybe_txt = '' + maybe_txt = "" if maybe: maybe_txt = f' Did you mean "{name}.{maybe[0]}"?' raise FilestorageConfigError( f'Pyramid invalid setting "{name}.{key}". {maybe_txt}' ) - if key == 'filters': - kwargs['filters'] = get_all_filters(name, value) + if key == "filters": + kwargs["filters"] = get_all_filters(name, value) else: kwargs[key] = decode_kwarg(value) try: return handler_cls(**kwargs) except Exception as err: - raise FilestorageConfigError( - f'Pyramid settings bad args for {name}: {err}' - ) + raise FilestorageConfigError(f"Pyramid settings bad args for {name}: {err}") def get_all_filters(key_prefix: str, settings_dict: Dict) -> List[FilterBase]: """Get all the filters from within the settings_dict""" filters: List[Tuple[int, FilterBase]] = [] for filter_ref, filter_dict in settings_dict.items(): - filter_prefix = f'{key_prefix}.filters{filter_ref}' + filter_prefix = f"{key_prefix}.filters{filter_ref}" try: - filter_id = int(filter_ref.lstrip('[').rstrip(']')) + filter_id = int(filter_ref.lstrip("[").rstrip("]")) except Exception as err: raise FilestorageConfigError( - f'Pyramid settings bad key {key_prefix}{filter_ref}: {err}' + f"Pyramid settings bad key {key_prefix}{filter_ref}: {err}" ) filters.append((filter_id, get_filter(filter_prefix, filter_dict))) @@ -168,18 +166,16 @@ def get_filter(key_prefix: str, settings_dict: Dict) -> FilterBase: """Get a single filter from within the settings_dict""" filter_name = settings_dict.pop(None) try: - filter_cls = try_import('filestorage.filters', filter_name) + filter_cls = try_import("filestorage.filters", filter_name) except ValueError: - raise FilestorageConfigError( - f'Pyramid settings bad value for {key_prefix}' - ) + raise FilestorageConfigError(f"Pyramid settings bad value for {key_prefix}") kwargs = {key: decode_kwarg(value) for key, value in settings_dict.items()} try: return filter_cls(**kwargs) except Exception as err: raise FilestorageConfigError( - f'Pyramid settings bad args for {key_prefix}: {err}' + f"Pyramid settings bad args for {key_prefix}: {err}" ) @@ -198,21 +194,19 @@ def decode_kwarg(value) -> Any: try: value = value.pop(None) except KeyError: - raise ValueError(f'decode_kwarg got an invalid dict: {value!r}') + raise ValueError(f"decode_kwarg got an invalid dict: {value!r}") return decode_kwarg(value) if not isinstance(value, str): - raise ValueError(f'decode_kwarg expected a str, got: {value!r}') - if (value.startswith('[') and value.endswith(']')) or ( - value.startswith('{') and value.endswith('}') + raise ValueError(f"decode_kwarg expected a str, got: {value!r}") + if (value.startswith("[") and value.endswith("]")) or ( + value.startswith("{") and value.endswith("}") ): # handle lists, sets and dicts try: return eval(value, {}, {}) except Exception as err: - raise FilestorageConfigError( - f'Pyramid settings bad value {value}: {err}' - ) + raise FilestorageConfigError(f"Pyramid settings bad value {value}: {err}") if value.isdigit(): return int(value) @@ -258,7 +252,7 @@ def get_keys_from(prefix: str, settings: Dict) -> Dict: """Get nested dicts from a dictionary of . separated keys""" result: Dict = {} for key, value in settings.items(): - if key.startswith(f'{prefix}.') or key.startswith(f'{prefix}['): + if key.startswith(f"{prefix}.") or key.startswith(f"{prefix}["): set_nested_value(key, value, result) return result.get(prefix, {}) @@ -269,8 +263,8 @@ def set_nested_value(key: str, value: str, result: Dict) -> Dict: sub = result # Add a . to each [ to make the parsing delimiter consistent: # 'foo[0][1]' to 'foo.[0].[1]' - key = key.replace('[', '.[') - for part in key.split('.'): + key = key.replace("[", ".[") + for part in key.split("."): sub = sub.setdefault(part, {}) sub[None] = value.strip() return result diff --git a/filestorage/config_utils.pyi b/filestorage/config_utils.pyi index dfa2935..1dcde52 100644 --- a/filestorage/config_utils.pyi +++ b/filestorage/config_utils.pyi @@ -16,12 +16,8 @@ def setup_from_settings( def setup_store( store: StorageContainer, key_prefix: str, name: str, settings_dict: Dict ) -> Any: ... -def get_handler( - key_prefix: str, settings_dict: Dict -) -> StorageHandlerBase: ... -def get_all_filters( - key_prefix: str, settings_dict: Dict -) -> List[FilterBase]: ... +def get_handler(key_prefix: str, settings_dict: Dict) -> StorageHandlerBase: ... +def get_all_filters(key_prefix: str, settings_dict: Dict) -> List[FilterBase]: ... def get_filter(key_prefix: str, settings_dict: Dict) -> FilterBase: ... def unquote(value: str) -> str: ... def decode_kwarg(value: Any) -> Any: ... diff --git a/filestorage/file_item.py b/filestorage/file_item.py index b288613..c0da65e 100644 --- a/filestorage/file_item.py +++ b/filestorage/file_item.py @@ -6,7 +6,7 @@ class SyncReader: - def __init__(self, item: 'FileItem'): + def __init__(self, item: "FileItem"): self.data = item.data self.filename = item.filename if self.data is not None: @@ -20,14 +20,14 @@ def seek(self, offset: int, whence: int = 0) -> int: def read(self, size: int = -1) -> bytes: if self.data is None: - return b'' + return b"" return self._reader(size) closed = False class AsyncReader: - def __init__(self, item: 'FileItem'): + def __init__(self, item: "FileItem"): self.data = item.data self.filename = item.filename if self.data is not None: @@ -41,7 +41,7 @@ async def seek(self, offset: int, whence: int = 0) -> int: async def read(self, size: int = -1) -> bytes: if self.data is None: - return b'' + return b"" return await self._reader(size) closed = False @@ -53,21 +53,18 @@ class FileItem(NamedTuple): data: Optional[BinaryIO] = None media_type: Optional[str] = None # Formerly known as MIME-type - def copy(self, **kwargs) -> 'FileItem': - filename = kwargs.get('filename', self.filename) - path = kwargs.get('path', self.path) - data = kwargs.get('data', self.data) - media_type = kwargs.get('media_type', self.media_type) + def copy(self, **kwargs) -> "FileItem": + filename = kwargs.get("filename", self.filename) + path = kwargs.get("path", self.path) + data = kwargs.get("data", self.data) + media_type = kwargs.get("media_type", self.media_type) - return FileItem( - filename=filename, path=path, data=data, media_type=media_type - ) + return FileItem(filename=filename, path=path, data=data, media_type=media_type) def __repr__(self) -> str: - has_data = 'no data' if self.data is None else 'with data' + has_data = "no data" if self.data is None else "with data" return ( - f'' + f"" ) @property @@ -77,7 +74,7 @@ def has_data(self) -> bool: @property def url_path(self) -> str: """A relative URL path string for this path/filename""" - return '/'.join(self.path + (self.filename,)) + return "/".join(self.path + (self.filename,)) @property def fs_path(self) -> str: diff --git a/filestorage/filter_base.py b/filestorage/filter_base.py index fb63350..f0350f7 100644 --- a/filestorage/filter_base.py +++ b/filestorage/filter_base.py @@ -25,8 +25,7 @@ async def async_call(self, item: FileItem) -> FileItem: """Apply the filter asynchronously""" if not self.async_ok: raise FilestorageConfigError( - f'The {self.__class__.__name__} filter cannot be used ' - 'asynchronously' + f"The {self.__class__.__name__} filter cannot be used " "asynchronously" ) if iscoroutinefunction(self._apply): diff --git a/filestorage/filters/__init__.py b/filestorage/filters/__init__.py index 59f7efc..3bb0917 100644 --- a/filestorage/filters/__init__.py +++ b/filestorage/filters/__init__.py @@ -1,4 +1,4 @@ from .randomize_filename import RandomizeFilename from .valid_extensions import ValidateExtension -__all__ = ['RandomizeFilename', 'ValidateExtension'] +__all__ = ["RandomizeFilename", "ValidateExtension"] diff --git a/filestorage/filters/valid_extensions.py b/filestorage/filters/valid_extensions.py index b4cdfad..f52a47e 100644 --- a/filestorage/filters/valid_extensions.py +++ b/filestorage/filters/valid_extensions.py @@ -13,9 +13,7 @@ class ValidateExtension(FilterBase): async_ok = True def __init__(self, extensions: List[str]): - self.extensions = set( - ext.lower().strip(os.path.extsep) for ext in extensions - ) + self.extensions = set(ext.lower().strip(os.path.extsep) for ext in extensions) def extension_allowed(self, ext: str) -> bool: """Determine if the provided file extension is allowed.""" diff --git a/filestorage/handler_base.py b/filestorage/handler_base.py index 5c877eb..cc6e39a 100644 --- a/filestorage/handler_base.py +++ b/filestorage/handler_base.py @@ -50,7 +50,7 @@ def __init__( @property def base_url(self) -> str: - return self._base_url or '' + return self._base_url or "" @property def path(self) -> Tuple[str, ...]: @@ -75,7 +75,7 @@ def validate(self) -> Optional[Awaitable]: if inspect.isclass(filter_): filter_name: str = filter_.__name__ # type: ignore raise FilestorageConfigError( - f'Filter {filter_name} is a class, not an instance. ' + f"Filter {filter_name} is a class, not an instance. " f'Did you mean to use "filters=[{filter_name}()]" instead?' ) result = filter_.validate() @@ -116,15 +116,15 @@ def sanitize_filename(cls, filename: str) -> str: """Perform a quick pass to sanitize the filename""" # Strip out any . prefix - which should eliminate attempts to write # special Unix files - filename = filename.lstrip('.') + filename = filename.lstrip(".") # Strip out any non-alpha, . or _ characters. def clean_char(c: str) -> str: - if c.isalnum() or c in ('.', '_'): + if c.isalnum() or c in (".", "_"): return c - return '_' + return "_" - filename = ''.join(clean_char(c) for c in filename) + filename = "".join(clean_char(c) for c in filename) return filename @@ -231,14 +231,12 @@ def save_file(self, filename: str, data: BinaryIO) -> str: return self._save(item) - def save_field(self, field: 'cgi.FieldStorage') -> str: + def save_field(self, field: "cgi.FieldStorage") -> str: """Save a file stored in a CGI field.""" if not field.file: - raise RuntimeError('No file data in the field') + raise RuntimeError("No file data in the field") - return self.save_file( - field.filename or 'file', cast(BinaryIO, field.file) - ) + return self.save_file(field.filename or "file", cast(BinaryIO, field.file)) def save_data(self, filename: str, data: bytes) -> str: """Save a file from the byte data provided.""" @@ -263,8 +261,8 @@ def validate(self) -> Optional[Awaitable]: for filter_ in self.filters: if not filter_.async_ok: raise FilestorageConfigError( - f'Filter {filter_} cannot be used in ' - f'asynchronous storage handler {self}' + f"Filter {filter_} cannot be used in " + f"asynchronous storage handler {self}" ) return super().validate() @@ -275,7 +273,7 @@ async def async_exists(self, filename: str) -> bool: def _exists(self, item: FileItem) -> bool: if not self.allow_sync_methods: - raise RuntimeError('Sync exists method not allowed') + raise RuntimeError("Sync exists method not allowed") return utils.async_to_sync(self._async_exists)(item) @abstractmethod @@ -292,7 +290,7 @@ async def async_get_size(self, filename: str) -> int: def _get_size(self, item: FileItem) -> int: if not self.allow_sync_methods: - raise RuntimeError('Sync get_size method not allowed') + raise RuntimeError("Sync get_size method not allowed") return utils.async_to_sync(self._async_get_size)(item) @abstractmethod @@ -311,7 +309,7 @@ async def async_get_accessed_time(self, filename: str) -> datetime: def _get_accessed_time(self, item: FileItem) -> datetime: if not self.allow_sync_methods: - raise RuntimeError('Sync get_accessed_time method not allowed') + raise RuntimeError("Sync get_accessed_time method not allowed") return utils.async_to_sync(self._async_get_accessed_time)(item) @abstractmethod @@ -330,7 +328,7 @@ async def async_get_created_time(self, filename: str) -> datetime: def _get_created_time(self, item: FileItem) -> datetime: if not self.allow_sync_methods: - raise RuntimeError('Sync get_created_time method not allowed') + raise RuntimeError("Sync get_created_time method not allowed") return utils.async_to_sync(self._async_get_created_time)(item) @abstractmethod @@ -349,7 +347,7 @@ async def async_get_modified_time(self, filename: str) -> datetime: def _get_modified_time(self, item: FileItem) -> datetime: if not self.allow_sync_methods: - raise RuntimeError('Sync get_modified_time method not allowed') + raise RuntimeError("Sync get_modified_time method not allowed") return utils.async_to_sync(self._async_get_modified_time)(item) @abstractmethod @@ -368,7 +366,7 @@ async def async_delete(self, filename: str) -> None: def _delete(self, item: FileItem) -> None: if not self.allow_sync_methods: - raise RuntimeError('Sync delete method not allowed') + raise RuntimeError("Sync delete method not allowed") utils.async_to_sync(self._async_delete)(item) @abstractmethod @@ -380,7 +378,7 @@ async def _async_delete(self, item: FileItem) -> None: def _save(self, item: FileItem) -> str: if not self.allow_sync_methods: - raise RuntimeError('Sync save method not allowed') + raise RuntimeError("Sync save method not allowed") return utils.async_to_sync(self._async_save)(item) @abstractmethod @@ -406,13 +404,13 @@ async def async_save_file(self, filename: str, data: BinaryIO) -> str: filename = new_filename return filename - async def async_save_field(self, field: 'cgi.FieldStorage') -> str: + async def async_save_field(self, field: "cgi.FieldStorage") -> str: """Save a file stored in a CGI field.""" if not field.file: - raise RuntimeError('No file data in the field') + raise RuntimeError("No file data in the field") return await self.async_save_file( - field.filename or 'file', cast(BinaryIO, field.file) + field.filename or "file", cast(BinaryIO, field.file) ) async def async_save_data(self, filename: str, data: bytes) -> str: @@ -440,11 +438,11 @@ def filters(self) -> List[FilterBase]: def base_url(self) -> str: return self._store.sync_handler.base_url - def __init__(self, store: 'StorageContainer', path: Tuple[str, ...]): + def __init__(self, store: "StorageContainer", path: Tuple[str, ...]): super().__init__(path=path) self._store = store - def subfolder(self, folder_name: str) -> 'Folder': + def subfolder(self, folder_name: str) -> "Folder": """Get a subfolder for this folder""" return Folder(store=self._store, path=self._path + (folder_name,)) @@ -455,7 +453,7 @@ def __eq__(self, other) -> bool: and (self._path == other._path) ) - def __truediv__(self, other: str) -> 'Folder': + def __truediv__(self, other: str) -> "Folder": """Get a new subfolder when using the divide operator. Allows building a path with path-looking code: diff --git a/filestorage/handler_base.pyi b/filestorage/handler_base.pyi index 15e5acc..68254f8 100644 --- a/filestorage/handler_base.pyi +++ b/filestorage/handler_base.pyi @@ -45,9 +45,7 @@ class StorageHandlerBase(ABC, metaclass=abc.ABCMeta): class AsyncStorageHandlerBase(StorageHandlerBase, ABC, metaclass=abc.ABCMeta): allow_sync_methods: Any = ... - def __init__( - self, allow_sync_methods: bool = ..., **kwargs: Any - ) -> None: ... + def __init__(self, allow_sync_methods: bool = ..., **kwargs: Any) -> None: ... def validate(self) -> Optional[Awaitable]: ... async def async_exists(self, filename: str) -> bool: ... async def async_get_size(self, filename: str) -> int: ... @@ -66,9 +64,7 @@ class Folder(AsyncStorageHandlerBase): def filters(self) -> List[FilterBase]: ... @property def base_url(self) -> str: ... - def __init__( - self, store: StorageContainer, path: Tuple[str, ...] - ) -> None: ... + def __init__(self, store: StorageContainer, path: Tuple[str, ...]) -> None: ... def subfolder(self, folder_name: str) -> Folder: ... def __eq__(self, other: Any) -> bool: ... def __truediv__(self, other: str) -> Folder: ... diff --git a/filestorage/handlers/__init__.py b/filestorage/handlers/__init__.py index 70c1353..7b45afb 100644 --- a/filestorage/handlers/__init__.py +++ b/filestorage/handlers/__init__.py @@ -4,9 +4,9 @@ __all__ = [ - 'LocalFileHandler', - 'AsyncLocalFileHandler', - 'DummyHandler', - 'AsyncDummyHandler', - 'S3Handler', + "LocalFileHandler", + "AsyncLocalFileHandler", + "DummyHandler", + "AsyncDummyHandler", + "S3Handler", ] diff --git a/filestorage/handlers/_template.py b/filestorage/handlers/_template.py index 5361e22..98d4d83 100644 --- a/filestorage/handlers/_template.py +++ b/filestorage/handlers/_template.py @@ -25,7 +25,7 @@ def _validate(self) -> Optional[Awaitable]: # If the problem is with the configuration: # raise FilestorageConfigError('describe the problem') # Can be an async method if necessary and will be awaited on properly. - raise FilestorageConfigError('This is a template, not a real handler!') + raise FilestorageConfigError("This is a template, not a real handler!") # async def _async_exists(self, item: FileItem) -> bool: def _exists(self, item: FileItem) -> bool: diff --git a/filestorage/handlers/dummy.py b/filestorage/handlers/dummy.py index 3e22230..fd102d4 100644 --- a/filestorage/handlers/dummy.py +++ b/filestorage/handlers/dummy.py @@ -13,17 +13,17 @@ def __init__(self, **kwargs): # a named tuple containing the contents of the file, the access # time, the creation time, and the time of last modification. self._file = NamedTuple( - 'File', + "File", [ - ('contents', bytes), - ('atime', datetime), - ('ctime', datetime), - ('mtime', datetime), + ("contents", bytes), + ("atime", datetime), + ("ctime", datetime), + ("mtime", datetime), ], ) self.files: Dict[str, NamedTuple] = {} self.last_save: Optional[FileItem] = None - self.last_save_contents: bytes = b'' + self.last_save_contents: bytes = b"" self.last_delete: Optional[FileItem] = None self.validated = False @@ -45,9 +45,7 @@ def _get_size(self, item: FileItem) -> int: """Indicate if the given file size is equal to the anticipated size.""" return len(self.files[item.url_path].contents) - def assert_get_size( - self, filename: str, path: Tuple[str, ...], size: int - ) -> None: + def assert_get_size(self, filename: str, path: Tuple[str, ...], size: int) -> None: """Assert that given file size is equal to the anticipated size.""" assert self._get_size(FileItem(filename=filename, path=path)) == size @@ -59,10 +57,7 @@ def assert_get_accessed_time( self, filename: str, path: Tuple[str, ...], date: datetime ) -> None: """Assert that given file access time is equal to the anticipated time.""" - assert ( - self._get_accessed_time(FileItem(filename=filename, path=path)) - == date - ) + assert self._get_accessed_time(FileItem(filename=filename, path=path)) == date def _get_created_time(self, item: FileItem) -> datetime: """Indicate if the given file creation time is equal to the anticipated time.""" @@ -72,10 +67,7 @@ def assert_get_created_time( self, filename: str, path: Tuple[str, ...], date: datetime ) -> None: """Assert that given file creation time is equal to the anticipated time.""" - assert ( - self._get_created_time(FileItem(filename=filename, path=path)) - == date - ) + assert self._get_created_time(FileItem(filename=filename, path=path)) == date def _get_modified_time(self, item: FileItem) -> datetime: """Indicate if the given file modification time is equal to the anticipated time.""" @@ -85,10 +77,7 @@ def assert_get_modified_time( self, filename: str, path: Tuple[str, ...], date: datetime ) -> None: """Assert that given file modification time is equal to the anticipated time.""" - assert ( - self._get_modified_time(FileItem(filename=filename, path=path)) - == date - ) + assert self._get_modified_time(FileItem(filename=filename, path=path)) == date def _save(self, item: FileItem) -> str: """Save the provided file to the given filename in the storage @@ -141,9 +130,7 @@ async def _async_get_size(self, item: FileItem) -> int: """Indicate if the given file size is equal to the anticipated size.""" return len(self.files[item.url_path].contents) - def assert_get_size( - self, filename: str, path: Tuple[str, ...], size: int - ) -> None: + def assert_get_size(self, filename: str, path: Tuple[str, ...], size: int) -> None: """Assert that given file size is equal to the anticipated size.""" assert self._get_size(FileItem(filename=filename, path=path)) == size @@ -155,10 +142,7 @@ def assert_get_accessed_time( self, filename: str, path: Tuple[str, ...], date: datetime ) -> None: """Assert that given file access time is equal to the anticipated time.""" - assert ( - self._get_accessed_time(FileItem(filename=filename, path=path)) - == date - ) + assert self._get_accessed_time(FileItem(filename=filename, path=path)) == date async def _async_get_created_time(self, item: FileItem) -> bool: """Indicate if the given file creation time is equal to the anticipated time.""" @@ -168,10 +152,7 @@ def assert_get_created_time( self, filename: str, path: Tuple[str, ...], date: datetime ) -> None: """Assert that given file creation time is equal to the anticipated time.""" - assert ( - self._get_created_time(FileItem(filename=filename, path=path)) - == date - ) + assert self._get_created_time(FileItem(filename=filename, path=path)) == date async def _async_get_modified_time(self, item: FileItem) -> bool: """Indicate if the given file modification time is equal to the anticipated time.""" @@ -181,10 +162,7 @@ def assert_get_modified_time( self, filename: str, path: Tuple[str, ...], date: datetime ) -> None: """Assert that given file modification time is equal to the anticipated time.""" - assert ( - self._get_modified_time(FileItem(filename=filename, path=path)) - == date - ) + assert self._get_modified_time(FileItem(filename=filename, path=path)) == date async def _async_save(self, item: FileItem) -> str: """Save the provided file to the given filename in the storage diff --git a/filestorage/handlers/file.py b/filestorage/handlers/file.py index b33be82..770700b 100644 --- a/filestorage/handlers/file.py +++ b/filestorage/handlers/file.py @@ -38,9 +38,9 @@ def local_path(self, item: FileItem) -> str: def make_dir(self, item: Optional[FileItem] = None): """Ensures the provided path exists.""" if not item: - item = self.get_item('') + item = self.get_item("") else: - item = item.copy(filename='') + item = item.copy(filename="") local_path = self.local_path(item) if local_path in self._created_dirs: @@ -54,11 +54,11 @@ def validate(self) -> None: if self.auto_make_dir: self.make_dir() else: - item = self.get_item('') + item = self.get_item("") if not self._exists(item): local_path = self.local_path(item) raise FilestorageConfigError( - f'Configured directory {local_path!r} does not exist' + f"Configured directory {local_path!r} does not exist" ) def _exists(self, item: FileItem) -> bool: @@ -88,13 +88,13 @@ def _delete(self, item: FileItem) -> None: def _save(self, item: FileItem) -> str: if item.data is None: - raise RuntimeError('No data for file {item.filename!r}') + raise RuntimeError("No data for file {item.filename!r}") if self.auto_make_dir: self.make_dir(item) item = self.resolve_filename(item) - with open(self.local_path(item), 'wb') as destination: + with open(self.local_path(item), "wb") as destination: with item as f: while True: chunk = f.read(self.chunk_size) @@ -111,14 +111,12 @@ def resolve_filename(self, item: FileItem) -> FileItem: basename, ext = os.path.splitext(item.filename) for counter in range(1, 1000000): - filename = f'{basename}-{counter}{ext}' + filename = f"{basename}-{counter}{ext}" item = item.copy(filename=filename) if not self._exists(item): return item else: - raise RuntimeError( - f'Cannot get unique name for file {basename}{ext}' - ) + raise RuntimeError(f"Cannot get unique name for file {basename}{ext}") def os_wrap(fn: utils.SyncCallable) -> utils.AsyncCallable: @@ -129,7 +127,7 @@ def os_wrap(fn: utils.SyncCallable) -> utils.AsyncCallable: def disabled_method(*args, **kwargs): - raise RuntimeError('method not allowed') + raise RuntimeError("method not allowed") class AsyncLocalFileHandler(LocalFileHandler, AsyncStorageHandlerBase): @@ -138,9 +136,9 @@ class AsyncLocalFileHandler(LocalFileHandler, AsyncStorageHandlerBase): async def async_make_dir(self, item: Optional[FileItem] = None): """Ensures the provided path exists.""" if not item: - item = self.get_item('') + item = self.get_item("") else: - item = item.copy(filename='') + item = item.copy(filename="") local_path = self.local_path(item) if local_path in self._created_dirs: @@ -151,8 +149,8 @@ async def async_make_dir(self, item: Optional[FileItem] = None): def validate(self) -> None: if aiofiles is None: raise FilestorageConfigError( - 'The aiofiles library is required for using ' - f'{self.__class__.__name__}' + "The aiofiles library is required for using " + f"{self.__class__.__name__}" ) # Ensure the sync methods can operate while validating @@ -193,13 +191,13 @@ async def _async_delete(self, item: FileItem) -> None: async def _async_save(self, item: FileItem) -> str: if item.data is None: - raise RuntimeError('No data for file {item.filename!r}') + raise RuntimeError("No data for file {item.filename!r}") if self.auto_make_dir: await self.async_make_dir(item) item = await self.async_resolve_filename(item) - open_context = aiofiles.open(self.local_path(item), 'wb') + open_context = aiofiles.open(self.local_path(item), "wb") async with open_context as destination: # type: ignore async with item as f: while True: @@ -217,46 +215,44 @@ async def async_resolve_filename(self, item: FileItem) -> FileItem: basename, ext = os.path.splitext(item.filename) for counter in range(1, 1000000): - filename = f'{basename}-{counter}{ext}' + filename = f"{basename}-{counter}{ext}" item = item.copy(filename=filename) if not await self._async_exists(item): return item else: - raise RuntimeError( - f'Cannot get unique name for file {basename}{ext}' - ) + raise RuntimeError(f"Cannot get unique name for file {basename}{ext}") def _save(self, item: FileItem) -> str: if not self.allow_sync_methods: - raise RuntimeError('Sync save method not allowed') + raise RuntimeError("Sync save method not allowed") return super()._save(item) def _exists(self, item: FileItem) -> bool: if not self.allow_sync_methods: - raise RuntimeError('Sync exists method not allowed') + raise RuntimeError("Sync exists method not allowed") return super()._exists(item) def _get_size(self, item: FileItem) -> int: if not self.allow_sync_methods: - raise RuntimeError('Sync get_size method not allowed') + raise RuntimeError("Sync get_size method not allowed") return super()._get_size(item) def _get_accessed_time(self, item: FileItem) -> datetime: if not self.allow_sync_methods: - raise RuntimeError('Sync get_accessed_time method not allowed') + raise RuntimeError("Sync get_accessed_time method not allowed") return super()._get_accessed_time(item) def _get_created_time(self, item: FileItem) -> datetime: if not self.allow_sync_methods: - raise RuntimeError('Sync get_created_time method not allowed') + raise RuntimeError("Sync get_created_time method not allowed") return super()._get_created_time(item) def _get_modified_time(self, item: FileItem) -> datetime: if not self.allow_sync_methods: - raise RuntimeError('Sync get_modified_time method not allowed') + raise RuntimeError("Sync get_modified_time method not allowed") return super()._get_modified_time(item) def _delete(self, item: FileItem) -> None: if not self.allow_sync_methods: - raise RuntimeError('Sync delete method not allowed') + raise RuntimeError("Sync delete method not allowed") super()._delete(item) diff --git a/filestorage/handlers/s3.py b/filestorage/handlers/s3.py index 13f4d66..eb9b864 100644 --- a/filestorage/handlers/s3.py +++ b/filestorage/handlers/s3.py @@ -27,29 +27,27 @@ class AioBotoS3ResourceContext: - async def __aenter__(self): - ... + async def __aenter__(self): ... - async def __aexit__(self, exc_type: str, exc: Exception, tb): - ... + async def __aexit__(self, exc_type: str, exc: Exception, tb): ... if Literal is not None: # Python 3.8 + TypeACL = Literal[ - 'private', - 'public-read', - 'public-read-write', - 'authenticated-read', - 'aws-exec-read', - 'bucket-owner-read', - 'bucket-owner-full-control', - 'log-delivery-write', + "private", + "public-read", + "public-read-write", + "authenticated-read", + "aws-exec-read", + "bucket-owner-read", + "bucket-owner-full-control", + "log-delivery-write", ] # https://boto3.amazonaws.com/v1/documentation/api/1.9.42/guide/s3.html # #changing-the-addressing-style - TypeAddressingStyle = Literal[None, 'auto', 'path', 'virtual'] + TypeAddressingStyle = Literal[None, "auto", "path", "virtual"] else: # Python 3.7 TypeACL = str # type: ignore @@ -62,7 +60,7 @@ class S3Handler(AsyncStorageHandlerBase): def __init__( self, bucket_name: str, - acl: TypeACL = 'public-read', + acl: TypeACL = "public-read", profile_name: Optional[str] = None, aws_access_key_id: Optional[str] = None, aws_secret_access_key: Optional[str] = None, @@ -87,23 +85,21 @@ def __init__( # parameters passed to the AioConfig object self.aio_config_params = { - 'connect_timeout': connect_timeout, - 'read_timeout': read_timeout, - 'connector_args': { - 'keepalive_timeout': keepalive_timeout, + "connect_timeout": connect_timeout, + "read_timeout": read_timeout, + "connector_args": { + "keepalive_timeout": keepalive_timeout, }, - 'retries': { - 'max_attempts': num_retries, + "retries": { + "max_attempts": num_retries, }, } if addressing_style: - self.aio_config_params['s3'] = { - 'addressing_style': addressing_style - } + self.aio_config_params["s3"] = {"addressing_style": addressing_style} if region_name: - self.aio_config_params['region_name'] = region_name + self.aio_config_params["region_name"] = region_name self.__memoized_conn_options = None @@ -113,9 +109,7 @@ def __conn_options(self): if self.__memoized_conn_options: return self.__memoized_conn_options - self.__memoized_conn_options = { - 'config': AioConfig(**self.aio_config_params) - } + self.__memoized_conn_options = {"config": AioConfig(**self.aio_config_params)} # This could be blank if the dev wants to use the local auth mechanisms # for AWS - either environment variables: @@ -130,36 +124,32 @@ def __conn_options(self): if self.aws_access_key_id: self.__memoized_conn_options.update( { - 'aws_access_key_id': str(self.aws_access_key_id), - 'aws_secret_access_key': str(self.aws_secret_access_key), + "aws_access_key_id": str(self.aws_access_key_id), + "aws_secret_access_key": str(self.aws_secret_access_key), } ) # Not well hidden, but might as well make it less visible - self.aws_secret_access_key = '(hidden)' - self.aws_access_key_id = '(hidden)' + self.aws_secret_access_key = "(hidden)" + self.aws_access_key_id = "(hidden)" if self.aws_session_token: - self.__memoized_conn_options['aws_session_token'] = str( + self.__memoized_conn_options["aws_session_token"] = str( self.aws_session_token ) - self.aws_session_token = '(hidden)' + self.aws_session_token = "(hidden)" if self.profile_name: - self.__memoized_conn_options['profile_name'] = str( - self.profile_name - ) + self.__memoized_conn_options["profile_name"] = str(self.profile_name) # The endpoint_url isn't part of the configuration. if self.host_url: - self.__memoized_conn_options['endpoint_url'] = str(self.host_url) + self.__memoized_conn_options["endpoint_url"] = str(self.host_url) return self.__memoized_conn_options async def _validate(self) -> Optional[Awaitable]: """Perform any setup or validation.""" if aioboto3 is None: - raise FilestorageConfigError( - 'aioboto3 library required but not installed.' - ) + raise FilestorageConfigError("aioboto3 library required but not installed.") # Call this in order to populate the options self.__conn_options @@ -168,8 +158,8 @@ async def _validate(self) -> Optional[Awaitable]: async def test_credentials(self): """Perform a read, check, delete set of operations on a dummy file.""" item = self.get_item( - filename=f'__delete_me__{uuid.uuid4()}.txt', - data=BytesIO(b'Credential test run from the filestorage library.'), + filename=f"__delete_me__{uuid.uuid4()}.txt", + data=BytesIO(b"Credential test run from the filestorage library."), ) async with self.resource as s3: filename = await self._async_save(item, s3) @@ -178,7 +168,7 @@ async def test_credentials(self): await self._async_delete(item, s3) @property - def resource(self) -> 'AioBotoS3ResourceContext': + def resource(self) -> "AioBotoS3ResourceContext": """Provide a context manager for accessing the S3 resources. async with handler.resource as s3: @@ -189,7 +179,7 @@ def resource(self) -> 'AioBotoS3ResourceContext': async with handler.resource as s3: client = s3.meta.client """ - return aioboto3.resource('s3', **self.__conn_options) + return aioboto3.resource("s3", **self.__conn_options) async def get_bucket(self, resource): return await resource.Bucket(self.bucket_name) # type: ignore @@ -202,11 +192,9 @@ async def _async_exists(self, item: FileItem, s3=None) -> bool: return await self._async_exists(item, s3) try: - await s3.meta.client.head_object( - Bucket=self.bucket_name, Key=item.url_path - ) + await s3.meta.client.head_object(Bucket=self.bucket_name, Key=item.url_path) except ClientError as err: - if int(err.response.get('Error', {}).get('Code')) == 404: + if int(err.response.get("Error", {}).get("Code")) == 404: return False raise return True @@ -221,25 +209,19 @@ async def _async_get_size(self, item: FileItem, s3=None) -> int: Bucket=self.bucket_name, Key=item.url_path ) - return int(head['ContentLength']) + return int(head["ContentLength"]) - async def _async_get_accessed_time( - self, item: FileItem, s3=None - ) -> datetime: + async def _async_get_accessed_time(self, item: FileItem, s3=None) -> datetime: raise NotImplementedError( - 'get_accessed_time is not supported with the S3 handler' + "get_accessed_time is not supported with the S3 handler" ) - async def _async_get_created_time( - self, item: FileItem, s3=None - ) -> datetime: + async def _async_get_created_time(self, item: FileItem, s3=None) -> datetime: raise NotImplementedError( - 'get_created_time is not supported with the S3 handler' + "get_created_time is not supported with the S3 handler" ) - async def _async_get_modified_time( - self, item: FileItem, s3=None - ) -> datetime: + async def _async_get_modified_time(self, item: FileItem, s3=None) -> datetime: if s3 is None: # If not called with the s3 context, do it again. async with self.resource as s3: @@ -249,13 +231,13 @@ async def _async_get_modified_time( Bucket=self.bucket_name, Key=item.url_path ) - return head['LastModified'] + return head["LastModified"] async def _async_save(self, item: FileItem, s3=None) -> str: """Save the provided file to the given filename in the storage container. Returns the name of the file saved. """ - extra = {'ACL': self.acl, 'ContentType': item.content_type} + extra = {"ACL": self.acl, "ContentType": item.content_type} if s3 is None: # If not called with the s3 context, do it again. diff --git a/filestorage/handlers/s3.pyi b/filestorage/handlers/s3.pyi index d3b2347..6b71eae 100644 --- a/filestorage/handlers/s3.pyi +++ b/filestorage/handlers/s3.pyi @@ -9,9 +9,7 @@ from typing import Any, Optional class AioBotoS3ResourceContext: async def __aenter__(self) -> None: ... - async def __aexit__( - self, exc_type: str, exc: Exception, tb: Any - ) -> Any: ... + async def __aexit__(self, exc_type: str, exc: Exception, tb: Any) -> Any: ... TypeACL: Any TypeAddressingStyle: Any diff --git a/filestorage/pyramid_config.py b/filestorage/pyramid_config.py index 8edd636..bb524b0 100644 --- a/filestorage/pyramid_config.py +++ b/filestorage/pyramid_config.py @@ -17,7 +17,7 @@ def includeme(config): - store_prefix = 'store' + store_prefix = "store" # Make a copy of the settings so that each valid key can be consumed and # verified, and invalid ones can be complained about. settings = { @@ -27,15 +27,15 @@ def includeme(config): } # Check if we should be using the global store or a local pyramid_store - use_global_store = settings.pop('store.use_global', 'true') - if use_global_store.lower() not in ('true', 'false', 'yes', 'no', ''): + use_global_store = settings.pop("store.use_global", "true") + if use_global_store.lower() not in ("true", "false", "yes", "no", ""): raise RuntimeError( 'Unknown setting "store.use_global". ' - f'Expected true/false/yes/no, but got {use_global_store!r}' + f"Expected true/false/yes/no, but got {use_global_store!r}" ) pyramid_store = store - if use_global_store.lower() in ('false', 'no'): + if use_global_store.lower() in ("false", "no"): # If not using the global store, make a new store for get_store to use. pyramid_store = StorageContainer() @@ -43,7 +43,7 @@ def get_store(request): return pyramid_store # Add the store object to every request. - name = settings.pop(f'{store_prefix}.request_property', 'store') + name = settings.pop(f"{store_prefix}.request_property", "store") config.add_request_method(callable=get_store, name=name, property=True) if setup_from_settings(settings, pyramid_store): diff --git a/filestorage/storage_container.py b/filestorage/storage_container.py index 4049c97..a2c855c 100644 --- a/filestorage/storage_container.py +++ b/filestorage/storage_container.py @@ -19,14 +19,14 @@ class StorageContainer(Folder): def __init__( self, name: Optional[str] = None, - parent: Optional['StorageContainer'] = None, + parent: Optional["StorageContainer"] = None, ): # Init the folder superclass super().__init__(store=self, path=tuple()) self._name: Optional[str] = name self._parent = parent - self._children: Dict[str, 'StorageContainer'] = {} + self._children: Dict[str, "StorageContainer"] = {} self._handler: Optional[StorageHandlerBase] = None self._do_not_use = False self._finalized = False @@ -34,12 +34,12 @@ def __init__( @property def name(self) -> str: """Provide a name for this container based on its lineage""" - parent = '' + parent = "" if self._parent is not None: parent = self._parent.name if self._name is None: return parent - return f'{parent}[{repr(self._name)}]' + return f"{parent}[{repr(self._name)}]" @property def finalized(self) -> bool: @@ -53,9 +53,7 @@ def do_not_use(self) -> bool: def sync_handler(self) -> StorageHandlerBase: handler = self.handler if handler is None: - raise FilestorageConfigError( - f'No handler provided for store{self.name}' - ) + raise FilestorageConfigError(f"No handler provided for store{self.name}") return cast(StorageHandlerBase, handler) @property @@ -63,7 +61,7 @@ def async_handler(self) -> AsyncStorageHandlerBase: handler = self.handler if not isinstance(handler, AsyncStorageHandlerBase): raise FilestorageConfigError( - f'No async handler provided for store{self.name}' + f"No async handler provided for store{self.name}" ) return cast(AsyncStorageHandlerBase, handler) @@ -75,9 +73,7 @@ def handler( if self._do_not_use: return None if self._handler is None: - raise FilestorageConfigError( - f'No handler provided for store{self.name}' - ) + raise FilestorageConfigError(f"No handler provided for store{self.name}") return self._handler @handler.setter @@ -85,7 +81,7 @@ def handler(self, handler: Optional[StorageHandlerBase]) -> None: """Set the handler for this store""" if self._finalized: raise FilestorageConfigError( - f'Setting store{self.name}.handler: store already finalized!' + f"Setting store{self.name}.handler: store already finalized!" ) if handler is None: self._handler = None @@ -94,8 +90,8 @@ def handler(self, handler: Optional[StorageHandlerBase]) -> None: if not isinstance(handler, StorageHandlerBase): raise FilestorageConfigError( - f'Setting store{self.name}.handler: ' - f'{handler!r} is not a StorageHandler' + f"Setting store{self.name}.handler: " + f"{handler!r} is not a StorageHandler" ) self._do_not_use = False # Inject the handler name @@ -111,9 +107,7 @@ async def async_finalize_config(self) -> None: return if self._handler is None: - raise FilestorageConfigError( - f'No handler provided for store{self.name}' - ) + raise FilestorageConfigError(f"No handler provided for store{self.name}") result = self._handler.validate() if iscoroutine(result) or isfuture(result): @@ -128,19 +122,17 @@ def finalize_config(self) -> None: event_loop = get_event_loop() if event_loop.is_running(): raise FilestorageConfigError( - 'Async event loop is already running. ' - 'Must await store.async_finalize_config() instead.' + "Async event loop is already running. " + "Must await store.async_finalize_config() instead." ) event_loop.run_until_complete(self.async_finalize_config()) - def __getitem__(self, key: str) -> 'StorageContainer': + def __getitem__(self, key: str) -> "StorageContainer": """Get or create a storage container as a lookup. The provided container will be lazily configured. """ if self._finalized and key not in self._children: raise FilestorageConfigError( - f'Getting store{self.name}[{key!r}]: store already finalized!' + f"Getting store{self.name}[{key!r}]: store already finalized!" ) - return self._children.setdefault( - key, StorageContainer(name=key, parent=self) - ) + return self._children.setdefault(key, StorageContainer(name=key, parent=self)) diff --git a/filestorage/utils.py b/filestorage/utils.py index dac380e..ad1f153 100644 --- a/filestorage/utils.py +++ b/filestorage/utils.py @@ -11,8 +11,8 @@ # asgiref doesn't yet have type info from asgiref import sync # type: ignore -T = TypeVar('T') -R = TypeVar('R') +T = TypeVar("T") +R = TypeVar("R") AsyncCallable = Callable[[T], Awaitable[R]] SyncCallable = Callable[[T], R] @@ -30,9 +30,7 @@ def async_to_sync(fn: AsyncCallable) -> SyncCallable: return cast(SyncCallable, sync.async_to_sync(fn)) -def any_to_async( - fn: MaybeAsyncCallable, thread_sensitive=True -) -> AsyncCallable: +def any_to_async(fn: MaybeAsyncCallable, thread_sensitive=True) -> AsyncCallable: if iscoroutinefunction(fn): return fn return sync_to_async(fn, thread_sensitive=thread_sensitive) diff --git a/filestorage/utils.pyi b/filestorage/utils.pyi index 326e66b..263843f 100644 --- a/filestorage/utils.pyi +++ b/filestorage/utils.pyi @@ -6,9 +6,7 @@ AsyncCallable = Callable[[T], Awaitable[R]] SyncCallable = Callable[[T], R] MaybeAsyncCallable = Union[SyncCallable, AsyncCallable] -def sync_to_async( - fn: SyncCallable, thread_sensitive: Any = ... -) -> AsyncCallable: ... +def sync_to_async(fn: SyncCallable, thread_sensitive: Any = ...) -> AsyncCallable: ... def async_to_sync(fn: AsyncCallable) -> SyncCallable: ... def any_to_async( fn: MaybeAsyncCallable, thread_sensitive: Any = ... diff --git a/setup.py b/setup.py index a1c6107..5cf707c 100644 --- a/setup.py +++ b/setup.py @@ -1,45 +1,45 @@ from setuptools import setup # type: ignore -with open('README.md', 'r', encoding='utf-8') as readme: +with open("README.md", "r", encoding="utf-8") as readme: long_description = readme.read() -with open('filestorage/VERSION', 'r', encoding='utf-8') as version_file: +with open("filestorage/VERSION", "r", encoding="utf-8") as version_file: version = version_file.read().strip() setup( - name='filestorage', + name="filestorage", version=version, - author='Ian Epperson', - author_email='ian@epperson.com', - description='Save uploaded files in configurable stores', + author="Ian Epperson", + author_email="ian@epperson.com", + description="Save uploaded files in configurable stores", long_description=long_description, - long_description_content_type='text/markdown', - url='https://github.com/ianepperson/filestorage', - packages=['filestorage', 'filestorage.filters', 'filestorage.handlers'], + long_description_content_type="text/markdown", + url="https://github.com/ianepperson/filestorage", + packages=["filestorage", "filestorage.filters", "filestorage.handlers"], classifiers=[ - 'Programming Language :: Python :: 3', - 'License :: OSI Approved :: MIT License', - 'Operating System :: OS Independent', + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", ], - python_requires='>=3.6', - platforms='any', + python_requires=">=3.6", + platforms="any", package_data={ - 'filestorage': ['*.pyi', 'py.typed', 'VERSION'], - 'filestorage.filters': ['*.pyi', 'py.typed'], - 'filestorage.handlers': ['*.pyi', 'py.typed'], + "filestorage": ["*.pyi", "py.typed", "VERSION"], + "filestorage.filters": ["*.pyi", "py.typed"], + "filestorage.handlers": ["*.pyi", "py.typed"], }, include_package_data=True, - install_requires=['asgiref'], + install_requires=["asgiref"], extras_require={ - 'aio_file': ['aiofiles'], - 's3': ['aioboto3'], - 'test': [ - 'pytest', - 'mock', - 'pytest-asyncio', - 'pytest-mock', - 'aioboto3', - 'aiofiles', + "aio_file": ["aiofiles"], + "s3": ["aioboto3"], + "test": [ + "pytest", + "mock", + "pytest-asyncio", + "pytest-mock", + "aioboto3", + "aiofiles", ], }, ) diff --git a/tests/filters/test_randomize_filename.py b/tests/filters/test_randomize_filename.py index fac960a..58bf1d7 100644 --- a/tests/filters/test_randomize_filename.py +++ b/tests/filters/test_randomize_filename.py @@ -8,13 +8,11 @@ @pytest.fixture def item(): - return FileItem( - filename='file.txt', path=('folder',), data=BytesIO(b'content') - ) + return FileItem(filename="file.txt", path=("folder",), data=BytesIO(b"content")) def with_spam(old_name): - return 'SPAM-' + old_name + '-SPAM' + return "SPAM-" + old_name + "-SPAM" def test_randomize_filename(item): @@ -23,9 +21,9 @@ def test_randomize_filename(item): result1 = filter._apply(item) result2 = filter._apply(item) - assert result1.filename != 'file.txt' + assert result1.filename != "file.txt" assert result1.filename != result2 - assert result1.filename.endswith('.txt') + assert result1.filename.endswith(".txt") # And nothing else is changed assert item.data == result1.data @@ -37,4 +35,4 @@ def test_custom_randomize_filename(item): result1 = filter._apply(item) - assert result1.filename == 'SPAM-file-SPAM.txt' + assert result1.filename == "SPAM-file-SPAM.txt" diff --git a/tests/filters/test_valid_extensions.py b/tests/filters/test_valid_extensions.py index c589b6b..0fe897d 100644 --- a/tests/filters/test_valid_extensions.py +++ b/tests/filters/test_valid_extensions.py @@ -9,13 +9,11 @@ @pytest.fixture def item(): - return FileItem( - filename='file.txt', path=('folder',), data=BytesIO(b'content') - ) + return FileItem(filename="file.txt", path=("folder",), data=BytesIO(b"content")) def test_valid_extension(item): - filter = ValidateExtension(extensions=['txt', 'html']) + filter = ValidateExtension(extensions=["txt", "html"]) result = filter._apply(item) @@ -24,7 +22,7 @@ def test_valid_extension(item): def test_invalid_extension(item): - filter = ValidateExtension(extensions=['png', 'jpg']) + filter = ValidateExtension(extensions=["png", "jpg"]) with pytest.raises(FileExtensionNotAllowed): filter._apply(item) diff --git a/tests/handlers/s3_mock.py b/tests/handlers/s3_mock.py index f3662c4..042d3ba 100644 --- a/tests/handlers/s3_mock.py +++ b/tests/handlers/s3_mock.py @@ -64,13 +64,13 @@ async def head_object(self, **kwargs): self._head_object_kwargs = kwargs if self._make_object_missing: raise botocore.exceptions.ClientError( - operation_name='head_object', + operation_name="head_object", error_response={ - 'Error': {'Code': '404'}, + "Error": {"Code": "404"}, }, ) # Perhaps there is a better way to mock this - return {'ContentLength': '8', 'LastModified': datetime(2015, 1, 1)} + return {"ContentLength": "8", "LastModified": datetime(2015, 1, 1)} class MockAsyncContext: diff --git a/tests/handlers/test_local_file.py b/tests/handlers/test_local_file.py index 7dbd840..2fc0422 100644 --- a/tests/handlers/test_local_file.py +++ b/tests/handlers/test_local_file.py @@ -30,12 +30,12 @@ def exists(directory: str, filename: str) -> bool: def get_contents(directory: str, filename: str) -> bytes: path = os.path.join(directory, filename) - with open(path, 'rb') as f: + with open(path, "rb") as f: return f.read() def test_auto_create_directory(directory): - directory = os.path.join(directory, 'folder', 'subfolder') + directory = os.path.join(directory, "folder", "subfolder") handler = LocalFileHandler(base_path=directory, auto_make_dir=True) assert not os.path.exists(directory) @@ -45,122 +45,116 @@ def test_auto_create_directory(directory): def test_error_when_no_directory(directory): - directory = os.path.join(directory, 'folder', 'subfolder') + directory = os.path.join(directory, "folder", "subfolder") handler = LocalFileHandler(base_path=directory) with pytest.raises(FilestorageConfigError) as err: handler.validate() - assert directory.rstrip('/').rstrip('\\') in str(err.value) - assert 'does not exist' in str(err.value) + assert directory.rstrip("/").rstrip("\\") in str(err.value) + assert "does not exist" in str(err.value) def test_local_file_handler_save(directory): handler = LocalFileHandler(base_path=directory) - handler.save_data(filename='test.txt', data=b'contents') + handler.save_data(filename="test.txt", data=b"contents") - assert exists(directory, 'test.txt') - assert get_contents(directory, 'test.txt') == b'contents' + assert exists(directory, "test.txt") + assert get_contents(directory, "test.txt") == b"contents" def test_local_file_handler_try_save_subfolder(directory, store): store.handler = LocalFileHandler(base_path=directory, auto_make_dir=True) - handler = store / 'folder' / 'subfolder' + handler = store / "folder" / "subfolder" - handler.save_data(filename='test.txt', data=b'contents') + handler.save_data(filename="test.txt", data=b"contents") - directory = os.path.join(directory, 'folder', 'subfolder') - assert exists(directory, 'test.txt') - assert get_contents(directory, 'test.txt') == b'contents' + directory = os.path.join(directory, "folder", "subfolder") + assert exists(directory, "test.txt") + assert get_contents(directory, "test.txt") == b"contents" def test_local_file_save_same_filename(directory): handler = LocalFileHandler(base_path=directory) - first = handler.save_data(filename='test.txt', data=b'contents 1') - second = handler.save_data(filename='test.txt', data=b'contents 2') - third = handler.save_data(filename='test.txt', data=b'contents 3') + first = handler.save_data(filename="test.txt", data=b"contents 1") + second = handler.save_data(filename="test.txt", data=b"contents 2") + third = handler.save_data(filename="test.txt", data=b"contents 3") - assert first == 'test.txt' - assert second == 'test-1.txt' - assert third == 'test-2.txt' + assert first == "test.txt" + assert second == "test-1.txt" + assert third == "test-2.txt" assert exists(directory, first) assert exists(directory, second) assert exists(directory, third) - assert get_contents(directory, first) == b'contents 1' - assert get_contents(directory, second) == b'contents 2' - assert get_contents(directory, third) == b'contents 3' + assert get_contents(directory, first) == b"contents 1" + assert get_contents(directory, second) == b"contents 2" + assert get_contents(directory, third) == b"contents 3" def test_local_file_handler_exists(directory): handler = LocalFileHandler(base_path=directory) - assert not exists(directory, 'test.txt') + assert not exists(directory, "test.txt") - handler.save_data(filename='test.txt', data=b'contents') - assert exists(directory, 'test.txt') + handler.save_data(filename="test.txt", data=b"contents") + assert exists(directory, "test.txt") def test_local_file_handler_get_size(directory): handler = LocalFileHandler(base_path=directory) - handler.save_data(filename='test.txt', data=b'contents') - assert exists(directory, 'test.txt') - assert handler.get_size('test.txt') == 8 + handler.save_data(filename="test.txt", data=b"contents") + assert exists(directory, "test.txt") + assert handler.get_size("test.txt") == 8 def test_local_file_handler_get_accessed_time(directory): handler = LocalFileHandler(base_path=directory) - handler.save_data(filename='test.txt', data=b'contents') - assert exists(directory, 'test.txt') + handler.save_data(filename="test.txt", data=b"contents") + assert exists(directory, "test.txt") - item = handler.get_item('test.txt') - atime = handler.get_accessed_time('test.txt') - assert atime == datetime.fromtimestamp( - os.path.getatime(handler.local_path(item)) - ) + item = handler.get_item("test.txt") + atime = handler.get_accessed_time("test.txt") + assert atime == datetime.fromtimestamp(os.path.getatime(handler.local_path(item))) def test_local_file_handler_get_created_time(directory): handler = LocalFileHandler(base_path=directory) - handler.save_data(filename='test.txt', data=b'contents') - assert exists(directory, 'test.txt') + handler.save_data(filename="test.txt", data=b"contents") + assert exists(directory, "test.txt") - item = handler.get_item('test.txt') - ctime = handler.get_created_time('test.txt') - assert ctime == datetime.fromtimestamp( - os.path.getctime(handler.local_path(item)) - ) + item = handler.get_item("test.txt") + ctime = handler.get_created_time("test.txt") + assert ctime == datetime.fromtimestamp(os.path.getctime(handler.local_path(item))) def test_local_file_handler_get_modified_time(directory): handler = LocalFileHandler(base_path=directory) - handler.save_data(filename='test.txt', data=b'contents') - assert exists(directory, 'test.txt') + handler.save_data(filename="test.txt", data=b"contents") + assert exists(directory, "test.txt") - item = handler.get_item('test.txt') - mtime = handler.get_modified_time('test.txt') - assert mtime == datetime.fromtimestamp( - os.path.getmtime(handler.local_path(item)) - ) + item = handler.get_item("test.txt") + mtime = handler.get_modified_time("test.txt") + assert mtime == datetime.fromtimestamp(os.path.getmtime(handler.local_path(item))) def test_local_file_handler_delete(directory): handler = LocalFileHandler(base_path=directory) - handler.save_data(filename='test.txt', data=b'contents') - assert exists(directory, 'test.txt') + handler.save_data(filename="test.txt", data=b"contents") + assert exists(directory, "test.txt") - handler.delete(filename='test.txt') + handler.delete(filename="test.txt") - assert not exists(directory, 'test.txt') + assert not exists(directory, "test.txt") # Async tests # def test_async_auto_create_directory(directory): - directory = os.path.join(directory, 'folder', 'subfolder') + directory = os.path.join(directory, "folder", "subfolder") handler = AsyncLocalFileHandler(base_path=directory, auto_make_dir=True) assert not os.path.exists(directory) @@ -170,18 +164,18 @@ def test_async_auto_create_directory(directory): def test_async_error_when_no_directory(directory): - directory = os.path.join(directory, 'folder', 'subfolder') + directory = os.path.join(directory, "folder", "subfolder") handler = AsyncLocalFileHandler(base_path=directory) with pytest.raises(FilestorageConfigError) as err: handler.validate() - assert directory.rstrip('/').rstrip('\\') in str(err.value) - assert 'does not exist' in str(err.value) + assert directory.rstrip("/").rstrip("\\") in str(err.value) + assert "does not exist" in str(err.value) def test_async_validate_when_no_sync(directory): - directory = os.path.join(directory, 'folder', 'subfolder') + directory = os.path.join(directory, "folder", "subfolder") handler = AsyncLocalFileHandler( base_path=directory, allow_sync_methods=False, auto_make_dir=True ) @@ -196,273 +190,239 @@ def test_async_validate_when_no_sync(directory): async def test_async_local_file_handler_save(directory): handler = AsyncLocalFileHandler(base_path=directory) - await handler.async_save_data(filename='test.txt', data=b'contents') + await handler.async_save_data(filename="test.txt", data=b"contents") - assert exists(directory, 'test.txt') - assert get_contents(directory, 'test.txt') == b'contents' + assert exists(directory, "test.txt") + assert get_contents(directory, "test.txt") == b"contents" @pytest.mark.asyncio async def test_async_local_file_handler_exists(directory): handler = AsyncLocalFileHandler(base_path=directory) - assert not exists(directory, 'test.txt') - await handler.async_save_data(filename='test.txt', data=b'contents') + assert not exists(directory, "test.txt") + await handler.async_save_data(filename="test.txt", data=b"contents") - assert exists(directory, 'test.txt') + assert exists(directory, "test.txt") @pytest.mark.asyncio async def test_async_local_file_handler_get_size(directory): handler = AsyncLocalFileHandler(base_path=directory) - await handler.async_save_data(filename='test.txt', data=b'contents') - assert exists(directory, 'test.txt') - assert await handler.async_get_size('test.txt') == 8 + await handler.async_save_data(filename="test.txt", data=b"contents") + assert exists(directory, "test.txt") + assert await handler.async_get_size("test.txt") == 8 @pytest.mark.asyncio async def test_async_local_file_handler_get_accessed_time(directory): handler = AsyncLocalFileHandler(base_path=directory) - await handler.async_save_data(filename='test.txt', data=b'contents') - assert exists(directory, 'test.txt') + await handler.async_save_data(filename="test.txt", data=b"contents") + assert exists(directory, "test.txt") - item = handler.get_item('test.txt') - atime = await handler.async_get_accessed_time('test.txt') - assert atime == datetime.fromtimestamp( - os.path.getatime(handler.local_path(item)) - ) + item = handler.get_item("test.txt") + atime = await handler.async_get_accessed_time("test.txt") + assert atime == datetime.fromtimestamp(os.path.getatime(handler.local_path(item))) @pytest.mark.asyncio async def test_async_local_file_handler_get_created_time(directory): handler = AsyncLocalFileHandler(base_path=directory) - await handler.async_save_data(filename='test.txt', data=b'contents') - assert exists(directory, 'test.txt') + await handler.async_save_data(filename="test.txt", data=b"contents") + assert exists(directory, "test.txt") - item = handler.get_item('test.txt') - ctime = await handler.async_get_created_time('test.txt') - assert ctime == datetime.fromtimestamp( - os.path.getctime(handler.local_path(item)) - ) + item = handler.get_item("test.txt") + ctime = await handler.async_get_created_time("test.txt") + assert ctime == datetime.fromtimestamp(os.path.getctime(handler.local_path(item))) @pytest.mark.asyncio async def test_async_local_file_handler_get_modified_time(directory): handler = AsyncLocalFileHandler(base_path=directory) - await handler.async_save_data(filename='test.txt', data=b'contents') - assert exists(directory, 'test.txt') + await handler.async_save_data(filename="test.txt", data=b"contents") + assert exists(directory, "test.txt") - item = handler.get_item('test.txt') - mtime = await handler.async_get_modified_time('test.txt') - assert mtime == datetime.fromtimestamp( - os.path.getmtime(handler.local_path(item)) - ) + item = handler.get_item("test.txt") + mtime = await handler.async_get_modified_time("test.txt") + assert mtime == datetime.fromtimestamp(os.path.getmtime(handler.local_path(item))) @pytest.mark.asyncio async def test_async_local_file_handler_delete(directory): handler = AsyncLocalFileHandler(base_path=directory) - await handler.async_save_data(filename='test.txt', data=b'contents') - assert exists(directory, 'test.txt') + await handler.async_save_data(filename="test.txt", data=b"contents") + assert exists(directory, "test.txt") - await handler.async_delete(filename='test.txt') + await handler.async_delete(filename="test.txt") - assert not exists(directory, 'test.txt') + assert not exists(directory, "test.txt") @pytest.mark.asyncio async def test_async_to_sync_local_file_handler_save(directory): handler = AsyncLocalFileHandler(base_path=directory) - handler.save_data(filename='test.txt', data=b'contents') + handler.save_data(filename="test.txt", data=b"contents") - assert exists(directory, 'test.txt') - assert get_contents(directory, 'test.txt') == b'contents' + assert exists(directory, "test.txt") + assert get_contents(directory, "test.txt") == b"contents" @pytest.mark.asyncio async def test_async_to_sync_local_file_handler_exists(directory): handler = AsyncLocalFileHandler(base_path=directory) - assert not exists(directory, 'test.txt') + assert not exists(directory, "test.txt") - handler.save_data(filename='test.txt', data=b'contents') - assert exists(directory, 'test.txt') + handler.save_data(filename="test.txt", data=b"contents") + assert exists(directory, "test.txt") @pytest.mark.asyncio async def test_async_to_sync_local_file_handler_get_size(directory): handler = AsyncLocalFileHandler(base_path=directory) - handler.save_data(filename='test.txt', data=b'contents') - assert exists(directory, 'test.txt') - assert handler.get_size('test.txt') == 8 + handler.save_data(filename="test.txt", data=b"contents") + assert exists(directory, "test.txt") + assert handler.get_size("test.txt") == 8 @pytest.mark.asyncio async def test_async_to_sync_local_file_handler_get_accessed_time(directory): handler = AsyncLocalFileHandler(base_path=directory) - handler.save_data(filename='test.txt', data=b'contents') - assert exists(directory, 'test.txt') + handler.save_data(filename="test.txt", data=b"contents") + assert exists(directory, "test.txt") - item = handler.get_item('test.txt') - atime = handler.get_accessed_time('test.txt') - assert atime == datetime.fromtimestamp( - os.path.getatime(handler.local_path(item)) - ) + item = handler.get_item("test.txt") + atime = handler.get_accessed_time("test.txt") + assert atime == datetime.fromtimestamp(os.path.getatime(handler.local_path(item))) @pytest.mark.asyncio async def test_async_to_sync_local_file_handler_get_created_time(directory): handler = AsyncLocalFileHandler(base_path=directory) - handler.save_data(filename='test.txt', data=b'contents') - assert exists(directory, 'test.txt') + handler.save_data(filename="test.txt", data=b"contents") + assert exists(directory, "test.txt") - item = handler.get_item('test.txt') - ctime = handler.get_created_time('test.txt') - assert ctime == datetime.fromtimestamp( - os.path.getctime(handler.local_path(item)) - ) + item = handler.get_item("test.txt") + ctime = handler.get_created_time("test.txt") + assert ctime == datetime.fromtimestamp(os.path.getctime(handler.local_path(item))) @pytest.mark.asyncio async def test_async_to_sync_local_file_handler_get_modified_time(directory): handler = AsyncLocalFileHandler(base_path=directory) - handler.save_data(filename='test.txt', data=b'contents') - assert exists(directory, 'test.txt') + handler.save_data(filename="test.txt", data=b"contents") + assert exists(directory, "test.txt") - item = handler.get_item('test.txt') - mtime = handler.get_modified_time('test.txt') - assert mtime == datetime.fromtimestamp( - os.path.getmtime(handler.local_path(item)) - ) + item = handler.get_item("test.txt") + mtime = handler.get_modified_time("test.txt") + assert mtime == datetime.fromtimestamp(os.path.getmtime(handler.local_path(item))) @pytest.mark.asyncio async def test_async_to_sync_local_file_handler_delete(directory): handler = AsyncLocalFileHandler(base_path=directory) - handler.save_data(filename='test.txt', data=b'contents') - assert exists(directory, 'test.txt') + handler.save_data(filename="test.txt", data=b"contents") + assert exists(directory, "test.txt") - handler.delete(filename='test.txt') + handler.delete(filename="test.txt") - assert not exists(directory, 'test.txt') + assert not exists(directory, "test.txt") @pytest.mark.asyncio async def test_async_local_file_handler_try_save_subfolder(directory, store): - store.handler = AsyncLocalFileHandler( - base_path=directory, auto_make_dir=True - ) - handler = store / 'folder' / 'subfolder' + store.handler = AsyncLocalFileHandler(base_path=directory, auto_make_dir=True) + handler = store / "folder" / "subfolder" - await handler.async_save_data(filename='test.txt', data=b'contents') + await handler.async_save_data(filename="test.txt", data=b"contents") - directory = os.path.join(directory, 'folder', 'subfolder') - assert exists(directory, 'test.txt') - assert get_contents(directory, 'test.txt') == b'contents' + directory = os.path.join(directory, "folder", "subfolder") + assert exists(directory, "test.txt") + assert get_contents(directory, "test.txt") == b"contents" @pytest.mark.asyncio async def test_async_local_file_save_same_filename(directory): handler = AsyncLocalFileHandler(base_path=directory) - first = await handler.async_save_data( - filename='test.txt', data=b'contents 1' - ) - second = await handler.async_save_data( - filename='test.txt', data=b'contents 2' - ) - third = await handler.async_save_data( - filename='test.txt', data=b'contents 3' - ) + first = await handler.async_save_data(filename="test.txt", data=b"contents 1") + second = await handler.async_save_data(filename="test.txt", data=b"contents 2") + third = await handler.async_save_data(filename="test.txt", data=b"contents 3") - assert first == 'test.txt' - assert second == 'test-1.txt' - assert third == 'test-2.txt' + assert first == "test.txt" + assert second == "test-1.txt" + assert third == "test-2.txt" assert exists(directory, first) assert exists(directory, second) assert exists(directory, third) - assert get_contents(directory, first) == b'contents 1' - assert get_contents(directory, second) == b'contents 2' - assert get_contents(directory, third) == b'contents 3' + assert get_contents(directory, first) == b"contents 1" + assert get_contents(directory, second) == b"contents 2" + assert get_contents(directory, third) == b"contents 3" def test_async_only_save(directory): - handler = AsyncLocalFileHandler( - base_path=directory, allow_sync_methods=False - ) + handler = AsyncLocalFileHandler(base_path=directory, allow_sync_methods=False) with pytest.raises(RuntimeError) as err: - handler.save_data(filename='test.txt', data=b'contents') + handler.save_data(filename="test.txt", data=b"contents") - assert str(err.value) == 'Sync save method not allowed' + assert str(err.value) == "Sync save method not allowed" def test_async_only_exists(directory): - handler = AsyncLocalFileHandler( - base_path=directory, allow_sync_methods=False - ) + handler = AsyncLocalFileHandler(base_path=directory, allow_sync_methods=False) with pytest.raises(RuntimeError) as err: - handler.exists(filename='test.txt') + handler.exists(filename="test.txt") - assert str(err.value) == 'Sync exists method not allowed' + assert str(err.value) == "Sync exists method not allowed" @pytest.mark.asyncio async def test_async_only_get_size(directory): - handler = AsyncLocalFileHandler( - base_path=directory, allow_sync_methods=False - ) + handler = AsyncLocalFileHandler(base_path=directory, allow_sync_methods=False) with pytest.raises(RuntimeError) as err: - handler.get_size(filename='test.txt') + handler.get_size(filename="test.txt") - assert str(err.value) == 'Sync get_size method not allowed' + assert str(err.value) == "Sync get_size method not allowed" @pytest.mark.asyncio async def test_async_only_get_accessed_time(directory): - handler = AsyncLocalFileHandler( - base_path=directory, allow_sync_methods=False - ) + handler = AsyncLocalFileHandler(base_path=directory, allow_sync_methods=False) with pytest.raises(RuntimeError) as err: - handler.get_accessed_time(filename='test.txt') + handler.get_accessed_time(filename="test.txt") - assert str(err.value) == 'Sync get_accessed_time method not allowed' + assert str(err.value) == "Sync get_accessed_time method not allowed" @pytest.mark.asyncio async def test_async_only_get_created_time(directory): - handler = AsyncLocalFileHandler( - base_path=directory, allow_sync_methods=False - ) + handler = AsyncLocalFileHandler(base_path=directory, allow_sync_methods=False) with pytest.raises(RuntimeError) as err: - handler.get_created_time(filename='test.txt') + handler.get_created_time(filename="test.txt") - assert str(err.value) == 'Sync get_created_time method not allowed' + assert str(err.value) == "Sync get_created_time method not allowed" @pytest.mark.asyncio async def test_async_only_get_modified_time(directory): - handler = AsyncLocalFileHandler( - base_path=directory, allow_sync_methods=False - ) + handler = AsyncLocalFileHandler(base_path=directory, allow_sync_methods=False) with pytest.raises(RuntimeError) as err: - handler.get_modified_time(filename='test.txt') + handler.get_modified_time(filename="test.txt") - assert str(err.value) == 'Sync get_modified_time method not allowed' + assert str(err.value) == "Sync get_modified_time method not allowed" def test_async_only_delete(directory): - handler = AsyncLocalFileHandler( - base_path=directory, allow_sync_methods=False - ) + handler = AsyncLocalFileHandler(base_path=directory, allow_sync_methods=False) with pytest.raises(RuntimeError) as err: - handler.delete(filename='test.txt') + handler.delete(filename="test.txt") - assert str(err.value) == 'Sync delete method not allowed' + assert str(err.value) == "Sync delete method not allowed" diff --git a/tests/handlers/test_s3.py b/tests/handlers/test_s3.py index 1cecb1a..9a9626b 100644 --- a/tests/handlers/test_s3.py +++ b/tests/handlers/test_s3.py @@ -11,7 +11,7 @@ def mock_s3_resource(mocker): resource = s3_mock.MockS3AsyncResource() contextualized = s3_mock.MockAsyncContext(resource) mocker.patch( - 'filestorage.handlers.S3Handler.resource', + "filestorage.handlers.S3Handler.resource", new=contextualized, ) return resource @@ -22,7 +22,7 @@ def mock_s3_resource_failure(mocker): resource = s3_mock.MockS3AsyncResource(make_object_missing=True) contextualized = s3_mock.MockAsyncContext(resource) mocker.patch( - 'filestorage.handlers.S3Handler.resource', + "filestorage.handlers.S3Handler.resource", new=contextualized, ) return resource @@ -30,12 +30,12 @@ def mock_s3_resource_failure(mocker): @pytest.fixture def handler(): - return S3Handler(bucket_name='bucket') + return S3Handler(bucket_name="bucket") @pytest.fixture def async_only_handler(): - return S3Handler(bucket_name='bucket', allow_sync_methods=False) + return S3Handler(bucket_name="bucket", allow_sync_methods=False) @pytest.mark.asyncio @@ -45,125 +45,113 @@ async def test_validate(mock_s3_resource, handler): @pytest.mark.asyncio async def test_async_exists(mock_s3_resource, handler): - item = handler.get_item('foo.txt') + item = handler.get_item("foo.txt") assert await handler._async_exists(item) def test_exists(mock_s3_resource, handler): - item = handler.get_item('foo.txt') + item = handler.get_item("foo.txt") assert handler._exists(item) def test_not_exists(mock_s3_resource_failure, handler): - item = handler.get_item('foo.txt') + item = handler.get_item("foo.txt") assert not handler._exists(item) @pytest.mark.asyncio async def test_async_get_size(mock_s3_resource, handler): - item = handler.get_item('foo.txt', data=BytesIO(b'contents')) + item = handler.get_item("foo.txt", data=BytesIO(b"contents")) assert await handler._async_get_size(item) == 8 def test_get_size(mock_s3_resource, handler): - item = handler.get_item('foo.txt', data=BytesIO(b'contents')) + item = handler.get_item("foo.txt", data=BytesIO(b"contents")) assert handler._get_size(item) == 8 @pytest.mark.asyncio async def test_async_get_accessed_time(mock_s3_resource, handler): - item = handler.get_item('foo.txt', data=BytesIO(b'contents')) + item = handler.get_item("foo.txt", data=BytesIO(b"contents")) with pytest.raises(NotImplementedError) as err: await handler._async_get_accessed_time(item) - assert ( - str(err.value) - == 'get_accessed_time is not supported with the S3 handler' - ) + assert str(err.value) == "get_accessed_time is not supported with the S3 handler" def test_get_accessed_time(mock_s3_resource, handler): - item = handler.get_item('foo.txt', data=BytesIO(b'contents')) + item = handler.get_item("foo.txt", data=BytesIO(b"contents")) with pytest.raises(NotImplementedError) as err: handler._get_accessed_time(item) - assert ( - str(err.value) - == 'get_accessed_time is not supported with the S3 handler' - ) + assert str(err.value) == "get_accessed_time is not supported with the S3 handler" @pytest.mark.asyncio async def test_async_get_created_time(mock_s3_resource, handler): - item = handler.get_item('foo.txt', data=BytesIO(b'contents')) + item = handler.get_item("foo.txt", data=BytesIO(b"contents")) with pytest.raises(NotImplementedError) as err: await handler._async_get_created_time(item) - assert ( - str(err.value) - == 'get_created_time is not supported with the S3 handler' - ) + assert str(err.value) == "get_created_time is not supported with the S3 handler" def test_get_created_time(mock_s3_resource, handler): - item = handler.get_item('foo.txt', data=BytesIO(b'contents')) + item = handler.get_item("foo.txt", data=BytesIO(b"contents")) with pytest.raises(NotImplementedError) as err: handler._get_created_time(item) - assert ( - str(err.value) - == 'get_created_time is not supported with the S3 handler' - ) + assert str(err.value) == "get_created_time is not supported with the S3 handler" @pytest.mark.asyncio async def test_async_get_modified_time(mock_s3_resource, handler): - item = handler.get_item('foo.txt', data=BytesIO(b'contents')) + item = handler.get_item("foo.txt", data=BytesIO(b"contents")) assert await handler._async_get_modified_time(item) == datetime(2015, 1, 1) def test_get_modified_time(mock_s3_resource, handler): - item = handler.get_item('foo.txt', data=BytesIO(b'contents')) + item = handler.get_item("foo.txt", data=BytesIO(b"contents")) assert handler._get_modified_time(item) == datetime(2015, 1, 1) @pytest.mark.asyncio async def test_async_save(mock_s3_resource, handler): - item = handler.get_item('foo.txt', data=BytesIO(b'contents')) + item = handler.get_item("foo.txt", data=BytesIO(b"contents")) await handler._async_save(item) call_args = mock_s3_resource._bucket._upload_fileobj_call_args assert call_args == { - 'ExtraArgs': {'ACL': 'public-read', 'ContentType': 'text/plain'} + "ExtraArgs": {"ACL": "public-read", "ContentType": "text/plain"} } def test_save(mock_s3_resource, handler): - item = handler.get_item('foo.txt', data=BytesIO(b'contents')) + item = handler.get_item("foo.txt", data=BytesIO(b"contents")) handler._save(item) call_args = mock_s3_resource._bucket._upload_fileobj_call_args assert call_args == { - 'ExtraArgs': {'ACL': 'public-read', 'ContentType': 'text/plain'} + "ExtraArgs": {"ACL": "public-read", "ContentType": "text/plain"} } @pytest.mark.asyncio async def test_async_delete(mock_s3_resource, handler): - item = handler.get_item('foo.txt') + item = handler.get_item("foo.txt") await handler._async_delete(item) @@ -171,7 +159,7 @@ async def test_async_delete(mock_s3_resource, handler): def test_delete(mock_s3_resource, handler): - item = handler.get_item('foo.txt') + item = handler.get_item("foo.txt") handler._delete(item) @@ -182,86 +170,82 @@ def test_delete(mock_s3_resource, handler): def test_cant_save(async_only_handler): - item = async_only_handler.get_item('foo.txt', data=BytesIO(b'contents')) + item = async_only_handler.get_item("foo.txt", data=BytesIO(b"contents")) with pytest.raises(RuntimeError) as err: async_only_handler._save(item) - assert str(err.value) == 'Sync save method not allowed' + assert str(err.value) == "Sync save method not allowed" def test_cant_exists(async_only_handler): - item = async_only_handler.get_item('foo.txt', data=BytesIO(b'contents')) + item = async_only_handler.get_item("foo.txt", data=BytesIO(b"contents")) with pytest.raises(RuntimeError) as err: async_only_handler._exists(item) - assert str(err.value) == 'Sync exists method not allowed' + assert str(err.value) == "Sync exists method not allowed" def test_cant_get_size(async_only_handler): - item = async_only_handler.get_item('foo.txt', data=BytesIO(b'contents')) + item = async_only_handler.get_item("foo.txt", data=BytesIO(b"contents")) with pytest.raises(RuntimeError) as err: async_only_handler._get_size(item) - assert str(err.value) == 'Sync get_size method not allowed' + assert str(err.value) == "Sync get_size method not allowed" def test_cant_get_accessed_time(async_only_handler): - item = async_only_handler.get_item('foo.txt', data=BytesIO(b'contents')) + item = async_only_handler.get_item("foo.txt", data=BytesIO(b"contents")) with pytest.raises(RuntimeError) as err: async_only_handler._get_accessed_time(item) - assert str(err.value) == 'Sync get_accessed_time method not allowed' + assert str(err.value) == "Sync get_accessed_time method not allowed" def test_cant_get_created_time(async_only_handler): - item = async_only_handler.get_item('foo.txt', data=BytesIO(b'contents')) + item = async_only_handler.get_item("foo.txt", data=BytesIO(b"contents")) with pytest.raises(RuntimeError) as err: async_only_handler._get_created_time(item) - assert str(err.value) == 'Sync get_created_time method not allowed' + assert str(err.value) == "Sync get_created_time method not allowed" def test_cant_get_modified_time(async_only_handler): - item = async_only_handler.get_item('foo.txt', data=BytesIO(b'contents')) + item = async_only_handler.get_item("foo.txt", data=BytesIO(b"contents")) with pytest.raises(RuntimeError) as err: async_only_handler._get_modified_time(item) - assert str(err.value) == 'Sync get_modified_time method not allowed' + assert str(err.value) == "Sync get_modified_time method not allowed" def test_cant_delete(async_only_handler): - item = async_only_handler.get_item('foo.txt', data=BytesIO(b'contents')) + item = async_only_handler.get_item("foo.txt", data=BytesIO(b"contents")) with pytest.raises(RuntimeError) as err: async_only_handler._delete(item) - assert str(err.value) == 'Sync delete method not allowed' + assert str(err.value) == "Sync delete method not allowed" @pytest.mark.asyncio async def test_async_save_in_folder(mock_s3_resource, handler): - item = handler.get_item( - 'foo.txt', data=BytesIO(b'contents'), subpath=('folder',) - ) + item = handler.get_item("foo.txt", data=BytesIO(b"contents"), subpath=("folder",)) await handler._async_save(item) - assert ( - mock_s3_resource._bucket._upload_fileobj_filename == 'folder/foo.txt' - ) + assert mock_s3_resource._bucket._upload_fileobj_filename == "folder/foo.txt" @pytest.mark.asyncio async def test_async_delete_in_folder(mock_s3_resource, handler): - item = handler.get_item('foo.txt', subpath=('folder',)) + item = handler.get_item("foo.txt", subpath=("folder",)) await handler._async_delete(item) assert mock_s3_resource._file_object._deleted - assert mock_s3_resource._file_object._filename == 'folder/foo.txt' + assert mock_s3_resource._file_object._filename == "folder/foo.txt" diff --git a/tests/test_config_utils.py b/tests/test_config_utils.py index 0e48db3..b7ba442 100644 --- a/tests/test_config_utils.py +++ b/tests/test_config_utils.py @@ -11,130 +11,128 @@ def store(): def test_set_nested_value(): result = {} - key = 'foo.bar.baz' - value = 'value' + key = "foo.bar.baz" + value = "value" config_utils.set_nested_value(key, value, result) - assert result['foo']['bar']['baz'][None] == value + assert result["foo"]["bar"]["baz"][None] == value def test_set_nested_value_with_bracket(): result = {} - key = 'foo.bar[2].baz' - value = 'value' + key = "foo.bar[2].baz" + value = "value" config_utils.set_nested_value(key, value, result) - assert result['foo']['bar']['[2]']['baz'][None] == value + assert result["foo"]["bar"]["[2]"]["baz"][None] == value def test_get_keys_from(): settings = { - 'foo.bar.baz': 'first', - 'foo.bar.bang': 'second', - 'something.else': 'third', - 'foot.and.mouth': 'nope', - 'foo[1].bar.bang': 'second1', + "foo.bar.baz": "first", + "foo.bar.bang": "second", + "something.else": "third", + "foot.and.mouth": "nope", + "foo[1].bar.bang": "second1", } - key_dict = config_utils.get_keys_from('foo', settings) + key_dict = config_utils.get_keys_from("foo", settings) assert key_dict == { - 'bar': {'baz': {None: 'first'}, 'bang': {None: 'second'}}, - '[1]': {'bar': {'bang': {None: 'second1'}}}, + "bar": {"baz": {None: "first"}, "bang": {None: "second"}}, + "[1]": {"bar": {"bang": {None: "second1"}}}, } def test_setup_handler_with_filters(store): settings = { - 'store.handler': 'DummyHandler', - 'store.handler.filters[0]': 'RandomizeFilename', - 'store.handler.filters[1]': 'ValidateExtension', - 'store.handler.filters[1].extensions': "['jpg', 'png']", + "store.handler": "DummyHandler", + "store.handler.filters[0]": "RandomizeFilename", + "store.handler.filters[1]": "ValidateExtension", + "store.handler.filters[1].extensions": "['jpg', 'png']", } config_utils.setup_from_settings(settings, store) store.finalize_config() - assert store.handler.__class__.__name__ == 'DummyHandler' - assert store.handler.filters[0].__class__.__name__ == 'RandomizeFilename' - assert store.handler.filters[1].__class__.__name__ == 'ValidateExtension' - assert store.handler.filters[1].extensions == {'jpg', 'png'} + assert store.handler.__class__.__name__ == "DummyHandler" + assert store.handler.filters[0].__class__.__name__ == "RandomizeFilename" + assert store.handler.filters[1].__class__.__name__ == "ValidateExtension" + assert store.handler.filters[1].extensions == {"jpg", "png"} def test_setup_two_handlers(store): settings = { - 'store.handler': 'DummyHandler', - 'store.handler.base_url': '//base', - 'store["foo"].handler': 'DummyHandler', - 'store["foo"].handler.base_url': '//base.foo', + "store.handler": "DummyHandler", + "store.handler.base_url": "//base", + 'store["foo"].handler': "DummyHandler", + 'store["foo"].handler.base_url': "//base.foo", } config_utils.setup_from_settings(settings, store) - assert store.handler.base_url == '//base' - assert store['foo'].handler.base_url == '//base.foo' + assert store.handler.base_url == "//base" + assert store["foo"].handler.base_url == "//base.foo" def test_setup_nested_handlers(store): settings = { - 'store.handler': 'None', - 'store["foo"].handler': 'DummyHandler', - 'store["foo"].handler.base_url': '//base.foo', - 'store["foo"]["bar"].handler': 'DummyHandler', - 'store["foo"]["bar"].handler.base_url': '//base.foo.bar', + "store.handler": "None", + 'store["foo"].handler': "DummyHandler", + 'store["foo"].handler.base_url': "//base.foo", + 'store["foo"]["bar"].handler': "DummyHandler", + 'store["foo"]["bar"].handler.base_url': "//base.foo.bar", } config_utils.setup_from_settings(settings, store) store.finalize_config() assert store.handler is None - assert store['foo'].handler.base_url == '//base.foo' - assert store['foo']['bar'].handler.base_url == '//base.foo.bar' + assert store["foo"].handler.base_url == "//base.foo" + assert store["foo"]["bar"].handler.base_url == "//base.foo.bar" def test_handler_full_name(store): settings = { - 'store.handler': 'filestorage.handlers.DummyHandler', + "store.handler": "filestorage.handlers.DummyHandler", } config_utils.setup_from_settings(settings, store) store.finalize_config() - assert store.handler.__class__.__name__ == 'DummyHandler' + assert store.handler.__class__.__name__ == "DummyHandler" def test_missing_required_parameter(store): settings = { - 'store.handler': 'DummyHandler', - 'store.handler.filters[0]': 'ValidateExtension', + "store.handler": "DummyHandler", + "store.handler.filters[0]": "ValidateExtension", # 'store.handler.filters[0].extensions': "['jpg', 'png']", } with pytest.raises(FilestorageConfigError) as err: config_utils.setup_from_settings(settings, store) - assert 'store.handler.filters[0]' in str(err.value) - assert "missing 1 required positional argument: 'extensions'" in str( - err.value - ) + assert "store.handler.filters[0]" in str(err.value) + assert "missing 1 required positional argument: 'extensions'" in str(err.value) def test_decode_ints_and_strings(store): settings = { - 'store.handler': 'DummyHandler', + "store.handler": "DummyHandler", # The URL isn't really checked well and could be a number - 'store.handler.base_url': '5', - 'store["six"].handler': 'DummyHandler', + "store.handler.base_url": "5", + 'store["six"].handler': "DummyHandler", # The quotes should force it to be a string 'store["six"].handler.base_url': '"6"', } config_utils.setup_from_settings(settings, store) assert store.handler.base_url == 5 - assert store['six'].handler.base_url == '6' + assert store["six"].handler.base_url == "6" def test_suggest_filters(store): settings = { - 'store.handler': 'DummyHandler', - 'store.handler.filter[0]': 'ValidateExtension', - 'store.handler.filter[0].extensions': "['jpg', 'png']", + "store.handler": "DummyHandler", + "store.handler.filter[0]": "ValidateExtension", + "store.handler.filter[0].extensions": "['jpg', 'png']", } with pytest.raises(FilestorageConfigError) as err: config_utils.setup_from_settings(settings, store) diff --git a/tests/test_file_item.py b/tests/test_file_item.py index f1490bd..f739c62 100644 --- a/tests/test_file_item.py +++ b/tests/test_file_item.py @@ -6,13 +6,13 @@ @pytest.mark.parametrize( - ('filename', 'mediatype'), + ("filename", "mediatype"), [ - ['foo', None], - ['foo.txt', 'text/plain'], - ['foo.html', 'text/html'], - ['foo.jpg', 'image/jpeg'], - ['foo.png', 'image/png'], + ["foo", None], + ["foo.txt", "text/plain"], + ["foo.html", "text/html"], + ["foo.jpg", "image/jpeg"], + ["foo.png", "image/png"], ], ) def test_content_type_guess(filename, mediatype): @@ -22,50 +22,50 @@ def test_content_type_guess(filename, mediatype): def text_content_type_fixed(): - item = FileItem(filename='foo.txt', media_type='wacky') + item = FileItem(filename="foo.txt", media_type="wacky") - assert item.content_type == 'wacky' + assert item.content_type == "wacky" def test_fileitem_reader(): - item = FileItem(filename='foo.txt', data=BytesIO(b'contents')) + item = FileItem(filename="foo.txt", data=BytesIO(b"contents")) item.data.seek(3) with item as f: - assert f.read() == b'contents' + assert f.read() == b"contents" f.seek(3) - assert f.read() == b'tents' + assert f.read() == b"tents" @pytest.mark.asyncio async def test_async_fileitem_reader(): - item = FileItem(filename='foo.txt', data=BytesIO(b'contents')) + item = FileItem(filename="foo.txt", data=BytesIO(b"contents")) item.data.seek(3) async with item as f: - assert await f.read() == b'contents' + assert await f.read() == b"contents" await f.seek(3) - assert await f.read() == b'tents' + assert await f.read() == b"tents" def test_url_path(): - item = FileItem(filename='foo.txt', path=('folder', 'subfolder')) + item = FileItem(filename="foo.txt", path=("folder", "subfolder")) - assert item.url_path == 'folder/subfolder/foo.txt' + assert item.url_path == "folder/subfolder/foo.txt" def test_fs_path(): - item = FileItem(filename='foo.txt', path=('folder', 'subfolder')) + item = FileItem(filename="foo.txt", path=("folder", "subfolder")) - if os.name == 'nt': - assert item.fs_path == 'folder\\subfolder\\foo.txt' + if os.name == "nt": + assert item.fs_path == "folder\\subfolder\\foo.txt" else: - assert item.fs_path == 'folder/subfolder/foo.txt' + assert item.fs_path == "folder/subfolder/foo.txt" def test_has_data(): - item1 = FileItem(filename='foo.txt') - item2 = FileItem(filename='foo.txt', data=BytesIO(b'')) + item1 = FileItem(filename="foo.txt") + item2 = FileItem(filename="foo.txt", data=BytesIO(b"")) assert not item1.has_data assert item2.has_data @@ -73,10 +73,10 @@ def test_has_data(): def test_copy_all(): item = FileItem( - filename='foo.txt', - path=('folder',), - data=BytesIO(b'contents'), - media_type='stuff', + filename="foo.txt", + path=("folder",), + data=BytesIO(b"contents"), + media_type="stuff", ) new_item = item.copy() @@ -88,13 +88,13 @@ def test_copy_all(): def test_copy_new_data(): item = FileItem( - filename='foo.txt', - path=('folder',), - data=BytesIO(b'contents'), - media_type='stuff', + filename="foo.txt", + path=("folder",), + data=BytesIO(b"contents"), + media_type="stuff", ) - new_item = item.copy(data=BytesIO(b'other')) + new_item = item.copy(data=BytesIO(b"other")) # Tuple is no longer identical as the data is different assert new_item != item @@ -105,17 +105,17 @@ def test_copy_new_data(): def test_copy_new_filename(): item = FileItem( - filename='foo.txt', - path=('folder',), - data=BytesIO(b'contents'), - media_type='stuff', + filename="foo.txt", + path=("folder",), + data=BytesIO(b"contents"), + media_type="stuff", ) - new_item = item.copy(filename='bar.txt') + new_item = item.copy(filename="bar.txt") # Tuple is no longer identical as the data is different assert new_item != item - assert new_item.filename == 'bar.txt' + assert new_item.filename == "bar.txt" assert new_item.data is item.data assert new_item.path == item.path assert new_item.media_type == item.media_type diff --git a/tests/test_filter_base.py b/tests/test_filter_base.py index 09529c6..5b667be 100644 --- a/tests/test_filter_base.py +++ b/tests/test_filter_base.py @@ -8,67 +8,67 @@ class SimpleFilter(FilterBase): async_ok = True def _apply(self, item: FileItem) -> FileItem: - return item.copy(filename='filtered_name.txt') + return item.copy(filename="filtered_name.txt") class FailedFilter(FilterBase): async_ok = True def _apply(self, item: FileItem) -> FileItem: - raise RuntimeError('called a FailedFilter') + raise RuntimeError("called a FailedFilter") class AsyncSimpleFilter(AsyncFilterBase): def _apply(self, item: FileItem) -> FileItem: - return item.copy(filename='filtered_name.txt') + return item.copy(filename="filtered_name.txt") class AsyncFailedFilter(AsyncFilterBase): def _apply(self, item: FileItem) -> FileItem: - raise RuntimeError('called a FailedFilter') + raise RuntimeError("called a FailedFilter") -@pytest.mark.parametrize('Filter', [SimpleFilter, AsyncSimpleFilter]) +@pytest.mark.parametrize("Filter", [SimpleFilter, AsyncSimpleFilter]) def test_sync_filter_call(Filter): handler = DummyHandler(filters=[Filter()]) - handler.save_data(data=b'contents', filename='file.txt') + handler.save_data(data=b"contents", filename="file.txt") item = handler.last_save - assert item.filename == 'filtered_name.txt' + assert item.filename == "filtered_name.txt" with item as f: - assert f.read() == b'contents' + assert f.read() == b"contents" -@pytest.mark.parametrize('Filter', [FailedFilter, AsyncFailedFilter]) +@pytest.mark.parametrize("Filter", [FailedFilter, AsyncFailedFilter]) def test_sync_filter_bad_call(Filter): handler = DummyHandler(filters=[Filter()]) with pytest.raises(RuntimeError) as err: - handler.save_data(data=b'contents', filename='file.txt') + handler.save_data(data=b"contents", filename="file.txt") - assert str(err.value) == 'called a FailedFilter' + assert str(err.value) == "called a FailedFilter" -@pytest.mark.parametrize('Filter', [SimpleFilter, AsyncSimpleFilter]) +@pytest.mark.parametrize("Filter", [SimpleFilter, AsyncSimpleFilter]) @pytest.mark.asyncio async def test_async_filter_call(Filter): handler = AsyncDummyHandler(filters=[Filter()]) - await handler.async_save_data(data=b'contents', filename='file.txt') + await handler.async_save_data(data=b"contents", filename="file.txt") item = handler.last_save - assert item.filename == 'filtered_name.txt' + assert item.filename == "filtered_name.txt" with item as f: - assert f.read() == b'contents' + assert f.read() == b"contents" -@pytest.mark.parametrize('Filter', [FailedFilter, AsyncFailedFilter]) +@pytest.mark.parametrize("Filter", [FailedFilter, AsyncFailedFilter]) @pytest.mark.asyncio async def test_async_filter_bad_call(Filter): handler = AsyncDummyHandler(filters=[Filter()]) with pytest.raises(RuntimeError) as err: - await handler.async_save_data(data=b'contents', filename='file.txt') + await handler.async_save_data(data=b"contents", filename="file.txt") - assert str(err.value) == 'called a FailedFilter' + assert str(err.value) == "called a FailedFilter" diff --git a/tests/test_handler_base.py b/tests/test_handler_base.py index 4aa8b13..17c1d9c 100644 --- a/tests/test_handler_base.py +++ b/tests/test_handler_base.py @@ -22,14 +22,14 @@ def handler(): def test_different_paths(): handlers = [ - DummyHandler(path=('foo',)), - DummyHandler(path=('foo')), - DummyHandler(path=['foo']), - DummyHandler(path='foo'), + DummyHandler(path=("foo",)), + DummyHandler(path=("foo")), + DummyHandler(path=["foo"]), + DummyHandler(path="foo"), ] for handler in handlers: - assert handler.path == ('foo',) + assert handler.path == ("foo",) def test_validate(): @@ -75,28 +75,28 @@ async def test_validate_bad_async_filter(): with pytest.raises(FilestorageConfigError) as err: await handler.validate() - assert 'cannot be used' in str(err.value) + assert "cannot be used" in str(err.value) def test_get_item(): - handler = DummyHandler(path=['foo']) - item = handler.get_item('file.txt') + handler = DummyHandler(path=["foo"]) + item = handler.get_item("file.txt") assert isinstance(item, FileItem) - assert item == FileItem(filename='file.txt', path=('foo',)) + assert item == FileItem(filename="file.txt", path=("foo",)) @pytest.mark.parametrize( - ('dirty', 'clean'), + ("dirty", "clean"), [ - ['..foo', 'foo'], - ['foo..', 'foo..'], - ['../foo', '_foo'], - ['/.foo', '_.foo'], - ['a b c', 'a_b_c'], - ['a/b/c', 'a_b_c'], - ['1/2/3', '1_2_3'], - ['☺', '_'], + ["..foo", "foo"], + ["foo..", "foo.."], + ["../foo", "_foo"], + ["/.foo", "_.foo"], + ["a b c", "a_b_c"], + ["a/b/c", "a_b_c"], + ["1/2/3", "1_2_3"], + ["☺", "_"], ], ) def test_sanitize_filename(handler, dirty, clean): @@ -104,116 +104,116 @@ def test_sanitize_filename(handler, dirty, clean): def test_get_url(): - handler = AsyncDummyHandler(base_url='http://eppx.com') + handler = AsyncDummyHandler(base_url="http://eppx.com") - assert handler.get_url('file.txt') == 'http://eppx.com/file.txt' + assert handler.get_url("file.txt") == "http://eppx.com/file.txt" def test_get_size(handler): - handler.save_data(data=b'contents', filename='file.txt') + handler.save_data(data=b"contents", filename="file.txt") item_size = handler.last_save_contents - assert len(item_size) == handler.get_size('file.txt') + assert len(item_size) == handler.get_size("file.txt") def test_get_accessed_time(handler): - handler.save_data(data=b'contents', filename='file.txt') + handler.save_data(data=b"contents", filename="file.txt") - atime = handler.files['file.txt'].atime - assert atime == handler.get_accessed_time('file.txt') + atime = handler.files["file.txt"].atime + assert atime == handler.get_accessed_time("file.txt") def test_get_created_time(handler): - handler.save_data(data=b'contents', filename='file.txt') + handler.save_data(data=b"contents", filename="file.txt") - ctime = handler.files['file.txt'].ctime - assert ctime == handler.get_created_time('file.txt') + ctime = handler.files["file.txt"].ctime + assert ctime == handler.get_created_time("file.txt") def test_get_modified_time(handler): - handler.save_data(data=b'contents', filename='file.txt') + handler.save_data(data=b"contents", filename="file.txt") - mtime = handler.files['file.txt'].mtime - assert mtime == handler.get_modified_time('file.txt') + mtime = handler.files["file.txt"].mtime + assert mtime == handler.get_modified_time("file.txt") def test_save_file(handler): - handler.save_data(data=b'contents', filename='file.txt') + handler.save_data(data=b"contents", filename="file.txt") item = handler.last_save - assert item.filename == 'file.txt' + assert item.filename == "file.txt" with item as f: - assert f.read() == b'contents' + assert f.read() == b"contents" @pytest.mark.asyncio async def test_async_save_file(): handler = AsyncDummyHandler() - await handler.async_save_data(data=b'contents', filename='file.txt') + await handler.async_save_data(data=b"contents", filename="file.txt") item = handler.last_save - assert item.filename == 'file.txt' + assert item.filename == "file.txt" with item as f: - assert f.read() == b'contents' + assert f.read() == b"contents" def test_save_field(handler): - headers = {'content-disposition': 'attachment; filename=file.txt'} + headers = {"content-disposition": "attachment; filename=file.txt"} field = cgi.FieldStorage(headers=headers) - field.file = BytesIO(b'contents') + field.file = BytesIO(b"contents") handler.save_field(field) item = handler.last_save - assert item.filename == 'file.txt' + assert item.filename == "file.txt" with item as f: - assert f.read() == b'contents' + assert f.read() == b"contents" def test_delete_file(handler): - assert not handler.exists('file.txt') + assert not handler.exists("file.txt") - handler.save_data(data=b'contents', filename='file.txt') - assert handler.exists('file.txt') + handler.save_data(data=b"contents", filename="file.txt") + assert handler.exists("file.txt") - handler.delete('file.txt') - assert not handler.exists('file.txt') + handler.delete("file.txt") + assert not handler.exists("file.txt") @pytest.mark.asyncio async def test_async_delete_file(): handler = AsyncDummyHandler() - assert not await handler.async_exists('file.txt') + assert not await handler.async_exists("file.txt") - await handler.async_save_data(data=b'contents', filename='file.txt') - assert await handler.async_exists('file.txt') + await handler.async_save_data(data=b"contents", filename="file.txt") + assert await handler.async_exists("file.txt") - await handler.async_delete('file.txt') - assert not await handler.async_exists('file.txt') + await handler.async_delete("file.txt") + assert not await handler.async_exists("file.txt") def test_subfolder_save(store, handler): store.handler = handler - subfolder = store / 'a' / 'b' + subfolder = store / "a" / "b" - subfolder.save_data(data=b'contents', filename='file.txt') + subfolder.save_data(data=b"contents", filename="file.txt") item = handler.last_save - assert item.filename == 'file.txt' - assert item.path == ('a', 'b') + assert item.filename == "file.txt" + assert item.path == ("a", "b") with item as f: - assert f.read() == b'contents' + assert f.read() == b"contents" def test_subfolder_delete_file(store, handler): store.handler = handler - subfolder = store / 'a' / 'b' - assert not subfolder.exists('file.txt') + subfolder = store / "a" / "b" + assert not subfolder.exists("file.txt") - subfolder.save_data(data=b'contents', filename='file.txt') - assert subfolder.exists('file.txt') + subfolder.save_data(data=b"contents", filename="file.txt") + assert subfolder.exists("file.txt") - subfolder.delete('file.txt') - assert not subfolder.exists('file.txt') + subfolder.delete("file.txt") + assert not subfolder.exists("file.txt") class MockFilter(FilterBase): @@ -228,14 +228,14 @@ def _apply(self, item: FileItem) -> FileItem: def test_calls_filter(store): - filter1 = MockFilter('-1') - filter2 = MockFilter('-2') + filter1 = MockFilter("-1") + filter2 = MockFilter("-2") store.handler = DummyHandler(filters=[filter1, filter2]) - result = store.save_data(data=b'contents', filename='file.txt') + result = store.save_data(data=b"contents", filename="file.txt") filter1.mock._apply.assert_called() filter2.mock._apply.assert_called() - assert result == 'file.txt-1-2' + assert result == "file.txt-1-2" def test_filter_class_not_instance(): @@ -248,14 +248,14 @@ def test_filter_class_not_instance(): handler.validate() assert str(err.value) == ( - 'Filter MockFilter is a class, not an instance. ' + "Filter MockFilter is a class, not an instance. " 'Did you mean to use "filters=[MockFilter()]" instead?' ) def test_subfolder_get_url(store): - store.handler = DummyHandler(base_url='http://foo.bar') - subfolder = store / 'folder' + store.handler = DummyHandler(base_url="http://foo.bar") + subfolder = store / "folder" - assert subfolder.base_url == 'http://foo.bar' - assert subfolder.get_url('test.txt') == 'http://foo.bar/folder/test.txt' + assert subfolder.base_url == "http://foo.bar" + assert subfolder.get_url("test.txt") == "http://foo.bar/folder/test.txt" diff --git a/tests/test_pyramid_config.py b/tests/test_pyramid_config.py index a64d1ee..af2626b 100644 --- a/tests/test_pyramid_config.py +++ b/tests/test_pyramid_config.py @@ -20,9 +20,7 @@ def __init__(self, settings: Dict): def get_settings(self): return self._settings - def add_request_method( - self, callable=None, name=None, property=False, reify=False - ): + def add_request_method(self, callable=None, name=None, property=False, reify=False): self._request_methods[name] = callable def get_request_prop(self, name): @@ -32,33 +30,33 @@ def get_request_prop(self, name): def test_pyramid_includeme(): settings = { - 'store.use_global': 'false', - 'store.handler': 'DummyHandler', - 'store.handler.base_url': 'http://foo.bar', + "store.use_global": "false", + "store.handler": "DummyHandler", + "store.handler.base_url": "http://foo.bar", } config = MockPyramidConfig(settings) pyramid_config.includeme(config) - pyramid_store = config.get_request_prop('store') + pyramid_store = config.get_request_prop("store") assert store is not pyramid_store assert isinstance(pyramid_store, StorageContainer) - assert pyramid_store.base_url == 'http://foo.bar' + assert pyramid_store.base_url == "http://foo.bar" def test_pyramid_different_prop_name(): settings = { - 'store.use_global': 'false', - 'store.request_property': 'my_store', - 'store.handler': 'DummyHandler', - 'store.handler.base_url': 'http://foo.bar', + "store.use_global": "false", + "store.request_property": "my_store", + "store.handler": "DummyHandler", + "store.handler.base_url": "http://foo.bar", } config = MockPyramidConfig(settings) pyramid_config.includeme(config) - pyramid_store = config.get_request_prop('my_store') + pyramid_store = config.get_request_prop("my_store") assert store is not pyramid_store assert isinstance(pyramid_store, StorageContainer) - assert pyramid_store.base_url == 'http://foo.bar' + assert pyramid_store.base_url == "http://foo.bar" def test_pyramid_no_config(): @@ -67,7 +65,7 @@ def test_pyramid_no_config(): config = MockPyramidConfig(settings) pyramid_config.includeme(config) - pyramid_store = config.get_request_prop('store') + pyramid_store = config.get_request_prop("store") assert store is pyramid_store assert pyramid_store.finalized is False @@ -75,16 +73,16 @@ def test_pyramid_no_config(): def test_pyramid_local_store(): # Setup two stores and ensure they're different. settings = { - 'store.use_global': 'false', - 'store.handler': 'DummyHandler', + "store.use_global": "false", + "store.handler": "DummyHandler", } config1 = MockPyramidConfig(settings) pyramid_config.includeme(config1) config2 = MockPyramidConfig(settings) pyramid_config.includeme(config2) - pyramid_store1 = config1.get_request_prop('store') - pyramid_store2 = config2.get_request_prop('store') + pyramid_store1 = config1.get_request_prop("store") + pyramid_store2 = config2.get_request_prop("store") assert store is not pyramid_store1 assert store is not pyramid_store2 assert pyramid_store1 is not pyramid_store2 diff --git a/tests/test_storage_container.py b/tests/test_storage_container.py index 2ce387b..3d0c62b 100644 --- a/tests/test_storage_container.py +++ b/tests/test_storage_container.py @@ -15,7 +15,7 @@ def store(): @pytest.fixture def handler(store): - return DummyHandler(base_url='http://eppx.com/', path=('static',)) + return DummyHandler(base_url="http://eppx.com/", path=("static",)) @pytest.fixture @@ -61,7 +61,7 @@ def test_use_after_do_not_use_config(store, handler): def test_validate_async_handler(store, handler, async_handler): store.handler = handler - store['a'].handler = async_handler + store["a"].handler = async_handler store.finalize_config() @@ -93,8 +93,8 @@ async def test_async_validate_error(store, async_handler): def test_child_stores_naming(store): - sub_a = store['a'] - sub_b = store['a']['b'] + sub_a = store["a"] + sub_b = store["a"]["b"] assert not store.name assert sub_a.name == "['a']" @@ -106,45 +106,42 @@ def test_cant_get_children_after_final(store, handler): store.finalize_config() with pytest.raises(FilestorageConfigError) as err: - store['a'] + store["a"] assert str(err.value) == "Getting store['a']: store already finalized!" def test_path_by_div(store, handler): - sub_a = store / 'a' - sub_b = sub_a / 'b' + sub_a = store / "a" + sub_b = sub_a / "b" store.handler = handler - sub_b.save_data(filename='new_file.txt', data=b'As a cucumber.') + sub_b.save_data(filename="new_file.txt", data=b"As a cucumber.") item = store.handler.last_save with item as f: - assert f.read() == b'As a cucumber.' - assert item.url_path == 'static/a/b/new_file.txt' + assert f.read() == b"As a cucumber." + assert item.url_path == "static/a/b/new_file.txt" def test_bad_handler_setting(store): with pytest.raises(FilestorageConfigError) as err: # Handler must be a handler! - store.handler = 'foo' + store.handler = "foo" - assert ( - str(err.value) - == "Setting store.handler: 'foo' is not a StorageHandler" - ) + assert str(err.value) == "Setting store.handler: 'foo' is not a StorageHandler" def test_finalized_without_setting(store): with pytest.raises(FilestorageConfigError) as err: store.finalize_config() - assert str(err.value) == 'No handler provided for store' + assert str(err.value) == "No handler provided for store" def test_finalized_without_setting_substore(store, handler): store.handler = handler - store_b = store['b'] # noqa + store_b = store["b"] # noqa with pytest.raises(FilestorageConfigError) as err: store.finalize_config() @@ -157,9 +154,9 @@ def test_finalized_finalizes_all_substores(store, handler): handler_a = DummyHandler() handler_b = DummyHandler() handler_ac = DummyHandler() - store['a'].handler = handler_a - store['b'].handler = handler_b - store['a']['c'].handler = handler_ac + store["a"].handler = handler_a + store["b"].handler = handler_b + store["a"]["c"].handler = handler_ac store.finalize_config() From f67cd22f4edba2f403cf47860eab3cfc5e7b3ccf Mon Sep 17 00:00:00 2001 From: Ian Epperson Date: Thu, 15 Aug 2024 11:18:53 -0700 Subject: [PATCH 4/4] Updated Black to use shorter line length (79) --- filestorage/config_utils.py | 12 +++- filestorage/config_utils.pyi | 8 ++- filestorage/file_item.py | 7 ++- filestorage/filter_base.py | 3 +- filestorage/filters/valid_extensions.py | 4 +- filestorage/handler_base.py | 4 +- filestorage/handler_base.pyi | 8 ++- filestorage/handlers/dummy.py | 38 ++++++++--- filestorage/handlers/file.py | 8 ++- filestorage/handlers/s3.py | 32 +++++++--- filestorage/handlers/s3.pyi | 4 +- filestorage/storage_container.py | 16 +++-- filestorage/utils.py | 4 +- filestorage/utils.pyi | 4 +- setup.py | 2 +- tests/filters/test_randomize_filename.py | 4 +- tests/filters/test_valid_extensions.py | 4 +- tests/handlers/test_local_file.py | 80 ++++++++++++++++++------ tests/handlers/test_s3.py | 28 +++++++-- tests/test_config_utils.py | 4 +- tests/test_pyramid_config.py | 4 +- tests/test_storage_container.py | 5 +- 22 files changed, 214 insertions(+), 69 deletions(-) diff --git a/filestorage/config_utils.py b/filestorage/config_utils.py index 73760fd..be8cfd9 100644 --- a/filestorage/config_utils.py +++ b/filestorage/config_utils.py @@ -142,7 +142,9 @@ def get_handler(key_prefix: str, settings_dict: Dict) -> StorageHandlerBase: try: return handler_cls(**kwargs) except Exception as err: - raise FilestorageConfigError(f"Pyramid settings bad args for {name}: {err}") + raise FilestorageConfigError( + f"Pyramid settings bad args for {name}: {err}" + ) def get_all_filters(key_prefix: str, settings_dict: Dict) -> List[FilterBase]: @@ -168,7 +170,9 @@ def get_filter(key_prefix: str, settings_dict: Dict) -> FilterBase: try: filter_cls = try_import("filestorage.filters", filter_name) except ValueError: - raise FilestorageConfigError(f"Pyramid settings bad value for {key_prefix}") + raise FilestorageConfigError( + f"Pyramid settings bad value for {key_prefix}" + ) kwargs = {key: decode_kwarg(value) for key, value in settings_dict.items()} try: @@ -206,7 +210,9 @@ def decode_kwarg(value) -> Any: try: return eval(value, {}, {}) except Exception as err: - raise FilestorageConfigError(f"Pyramid settings bad value {value}: {err}") + raise FilestorageConfigError( + f"Pyramid settings bad value {value}: {err}" + ) if value.isdigit(): return int(value) diff --git a/filestorage/config_utils.pyi b/filestorage/config_utils.pyi index 1dcde52..dfa2935 100644 --- a/filestorage/config_utils.pyi +++ b/filestorage/config_utils.pyi @@ -16,8 +16,12 @@ def setup_from_settings( def setup_store( store: StorageContainer, key_prefix: str, name: str, settings_dict: Dict ) -> Any: ... -def get_handler(key_prefix: str, settings_dict: Dict) -> StorageHandlerBase: ... -def get_all_filters(key_prefix: str, settings_dict: Dict) -> List[FilterBase]: ... +def get_handler( + key_prefix: str, settings_dict: Dict +) -> StorageHandlerBase: ... +def get_all_filters( + key_prefix: str, settings_dict: Dict +) -> List[FilterBase]: ... def get_filter(key_prefix: str, settings_dict: Dict) -> FilterBase: ... def unquote(value: str) -> str: ... def decode_kwarg(value: Any) -> Any: ... diff --git a/filestorage/file_item.py b/filestorage/file_item.py index c0da65e..c2bbc5b 100644 --- a/filestorage/file_item.py +++ b/filestorage/file_item.py @@ -59,12 +59,15 @@ def copy(self, **kwargs) -> "FileItem": data = kwargs.get("data", self.data) media_type = kwargs.get("media_type", self.media_type) - return FileItem(filename=filename, path=path, data=data, media_type=media_type) + return FileItem( + filename=filename, path=path, data=data, media_type=media_type + ) def __repr__(self) -> str: has_data = "no data" if self.data is None else "with data" return ( - f"" + f"" ) @property diff --git a/filestorage/filter_base.py b/filestorage/filter_base.py index f0350f7..0f5154c 100644 --- a/filestorage/filter_base.py +++ b/filestorage/filter_base.py @@ -25,7 +25,8 @@ async def async_call(self, item: FileItem) -> FileItem: """Apply the filter asynchronously""" if not self.async_ok: raise FilestorageConfigError( - f"The {self.__class__.__name__} filter cannot be used " "asynchronously" + f"The {self.__class__.__name__} filter cannot be used " + "asynchronously" ) if iscoroutinefunction(self._apply): diff --git a/filestorage/filters/valid_extensions.py b/filestorage/filters/valid_extensions.py index f52a47e..b4cdfad 100644 --- a/filestorage/filters/valid_extensions.py +++ b/filestorage/filters/valid_extensions.py @@ -13,7 +13,9 @@ class ValidateExtension(FilterBase): async_ok = True def __init__(self, extensions: List[str]): - self.extensions = set(ext.lower().strip(os.path.extsep) for ext in extensions) + self.extensions = set( + ext.lower().strip(os.path.extsep) for ext in extensions + ) def extension_allowed(self, ext: str) -> bool: """Determine if the provided file extension is allowed.""" diff --git a/filestorage/handler_base.py b/filestorage/handler_base.py index cc6e39a..216334b 100644 --- a/filestorage/handler_base.py +++ b/filestorage/handler_base.py @@ -236,7 +236,9 @@ def save_field(self, field: "cgi.FieldStorage") -> str: if not field.file: raise RuntimeError("No file data in the field") - return self.save_file(field.filename or "file", cast(BinaryIO, field.file)) + return self.save_file( + field.filename or "file", cast(BinaryIO, field.file) + ) def save_data(self, filename: str, data: bytes) -> str: """Save a file from the byte data provided.""" diff --git a/filestorage/handler_base.pyi b/filestorage/handler_base.pyi index 68254f8..15e5acc 100644 --- a/filestorage/handler_base.pyi +++ b/filestorage/handler_base.pyi @@ -45,7 +45,9 @@ class StorageHandlerBase(ABC, metaclass=abc.ABCMeta): class AsyncStorageHandlerBase(StorageHandlerBase, ABC, metaclass=abc.ABCMeta): allow_sync_methods: Any = ... - def __init__(self, allow_sync_methods: bool = ..., **kwargs: Any) -> None: ... + def __init__( + self, allow_sync_methods: bool = ..., **kwargs: Any + ) -> None: ... def validate(self) -> Optional[Awaitable]: ... async def async_exists(self, filename: str) -> bool: ... async def async_get_size(self, filename: str) -> int: ... @@ -64,7 +66,9 @@ class Folder(AsyncStorageHandlerBase): def filters(self) -> List[FilterBase]: ... @property def base_url(self) -> str: ... - def __init__(self, store: StorageContainer, path: Tuple[str, ...]) -> None: ... + def __init__( + self, store: StorageContainer, path: Tuple[str, ...] + ) -> None: ... def subfolder(self, folder_name: str) -> Folder: ... def __eq__(self, other: Any) -> bool: ... def __truediv__(self, other: str) -> Folder: ... diff --git a/filestorage/handlers/dummy.py b/filestorage/handlers/dummy.py index fd102d4..59dd77b 100644 --- a/filestorage/handlers/dummy.py +++ b/filestorage/handlers/dummy.py @@ -45,7 +45,9 @@ def _get_size(self, item: FileItem) -> int: """Indicate if the given file size is equal to the anticipated size.""" return len(self.files[item.url_path].contents) - def assert_get_size(self, filename: str, path: Tuple[str, ...], size: int) -> None: + def assert_get_size( + self, filename: str, path: Tuple[str, ...], size: int + ) -> None: """Assert that given file size is equal to the anticipated size.""" assert self._get_size(FileItem(filename=filename, path=path)) == size @@ -57,7 +59,10 @@ def assert_get_accessed_time( self, filename: str, path: Tuple[str, ...], date: datetime ) -> None: """Assert that given file access time is equal to the anticipated time.""" - assert self._get_accessed_time(FileItem(filename=filename, path=path)) == date + assert ( + self._get_accessed_time(FileItem(filename=filename, path=path)) + == date + ) def _get_created_time(self, item: FileItem) -> datetime: """Indicate if the given file creation time is equal to the anticipated time.""" @@ -67,7 +72,10 @@ def assert_get_created_time( self, filename: str, path: Tuple[str, ...], date: datetime ) -> None: """Assert that given file creation time is equal to the anticipated time.""" - assert self._get_created_time(FileItem(filename=filename, path=path)) == date + assert ( + self._get_created_time(FileItem(filename=filename, path=path)) + == date + ) def _get_modified_time(self, item: FileItem) -> datetime: """Indicate if the given file modification time is equal to the anticipated time.""" @@ -77,7 +85,10 @@ def assert_get_modified_time( self, filename: str, path: Tuple[str, ...], date: datetime ) -> None: """Assert that given file modification time is equal to the anticipated time.""" - assert self._get_modified_time(FileItem(filename=filename, path=path)) == date + assert ( + self._get_modified_time(FileItem(filename=filename, path=path)) + == date + ) def _save(self, item: FileItem) -> str: """Save the provided file to the given filename in the storage @@ -130,7 +141,9 @@ async def _async_get_size(self, item: FileItem) -> int: """Indicate if the given file size is equal to the anticipated size.""" return len(self.files[item.url_path].contents) - def assert_get_size(self, filename: str, path: Tuple[str, ...], size: int) -> None: + def assert_get_size( + self, filename: str, path: Tuple[str, ...], size: int + ) -> None: """Assert that given file size is equal to the anticipated size.""" assert self._get_size(FileItem(filename=filename, path=path)) == size @@ -142,7 +155,10 @@ def assert_get_accessed_time( self, filename: str, path: Tuple[str, ...], date: datetime ) -> None: """Assert that given file access time is equal to the anticipated time.""" - assert self._get_accessed_time(FileItem(filename=filename, path=path)) == date + assert ( + self._get_accessed_time(FileItem(filename=filename, path=path)) + == date + ) async def _async_get_created_time(self, item: FileItem) -> bool: """Indicate if the given file creation time is equal to the anticipated time.""" @@ -152,7 +168,10 @@ def assert_get_created_time( self, filename: str, path: Tuple[str, ...], date: datetime ) -> None: """Assert that given file creation time is equal to the anticipated time.""" - assert self._get_created_time(FileItem(filename=filename, path=path)) == date + assert ( + self._get_created_time(FileItem(filename=filename, path=path)) + == date + ) async def _async_get_modified_time(self, item: FileItem) -> bool: """Indicate if the given file modification time is equal to the anticipated time.""" @@ -162,7 +181,10 @@ def assert_get_modified_time( self, filename: str, path: Tuple[str, ...], date: datetime ) -> None: """Assert that given file modification time is equal to the anticipated time.""" - assert self._get_modified_time(FileItem(filename=filename, path=path)) == date + assert ( + self._get_modified_time(FileItem(filename=filename, path=path)) + == date + ) async def _async_save(self, item: FileItem) -> str: """Save the provided file to the given filename in the storage diff --git a/filestorage/handlers/file.py b/filestorage/handlers/file.py index 770700b..40a40a2 100644 --- a/filestorage/handlers/file.py +++ b/filestorage/handlers/file.py @@ -116,7 +116,9 @@ def resolve_filename(self, item: FileItem) -> FileItem: if not self._exists(item): return item else: - raise RuntimeError(f"Cannot get unique name for file {basename}{ext}") + raise RuntimeError( + f"Cannot get unique name for file {basename}{ext}" + ) def os_wrap(fn: utils.SyncCallable) -> utils.AsyncCallable: @@ -220,7 +222,9 @@ async def async_resolve_filename(self, item: FileItem) -> FileItem: if not await self._async_exists(item): return item else: - raise RuntimeError(f"Cannot get unique name for file {basename}{ext}") + raise RuntimeError( + f"Cannot get unique name for file {basename}{ext}" + ) def _save(self, item: FileItem) -> str: if not self.allow_sync_methods: diff --git a/filestorage/handlers/s3.py b/filestorage/handlers/s3.py index eb9b864..a872120 100644 --- a/filestorage/handlers/s3.py +++ b/filestorage/handlers/s3.py @@ -96,7 +96,9 @@ def __init__( } if addressing_style: - self.aio_config_params["s3"] = {"addressing_style": addressing_style} + self.aio_config_params["s3"] = { + "addressing_style": addressing_style + } if region_name: self.aio_config_params["region_name"] = region_name @@ -109,7 +111,9 @@ def __conn_options(self): if self.__memoized_conn_options: return self.__memoized_conn_options - self.__memoized_conn_options = {"config": AioConfig(**self.aio_config_params)} + self.__memoized_conn_options = { + "config": AioConfig(**self.aio_config_params) + } # This could be blank if the dev wants to use the local auth mechanisms # for AWS - either environment variables: @@ -139,7 +143,9 @@ def __conn_options(self): self.aws_session_token = "(hidden)" if self.profile_name: - self.__memoized_conn_options["profile_name"] = str(self.profile_name) + self.__memoized_conn_options["profile_name"] = str( + self.profile_name + ) # The endpoint_url isn't part of the configuration. if self.host_url: @@ -149,7 +155,9 @@ def __conn_options(self): async def _validate(self) -> Optional[Awaitable]: """Perform any setup or validation.""" if aioboto3 is None: - raise FilestorageConfigError("aioboto3 library required but not installed.") + raise FilestorageConfigError( + "aioboto3 library required but not installed." + ) # Call this in order to populate the options self.__conn_options @@ -192,7 +200,9 @@ async def _async_exists(self, item: FileItem, s3=None) -> bool: return await self._async_exists(item, s3) try: - await s3.meta.client.head_object(Bucket=self.bucket_name, Key=item.url_path) + await s3.meta.client.head_object( + Bucket=self.bucket_name, Key=item.url_path + ) except ClientError as err: if int(err.response.get("Error", {}).get("Code")) == 404: return False @@ -211,17 +221,23 @@ async def _async_get_size(self, item: FileItem, s3=None) -> int: return int(head["ContentLength"]) - async def _async_get_accessed_time(self, item: FileItem, s3=None) -> datetime: + async def _async_get_accessed_time( + self, item: FileItem, s3=None + ) -> datetime: raise NotImplementedError( "get_accessed_time is not supported with the S3 handler" ) - async def _async_get_created_time(self, item: FileItem, s3=None) -> datetime: + async def _async_get_created_time( + self, item: FileItem, s3=None + ) -> datetime: raise NotImplementedError( "get_created_time is not supported with the S3 handler" ) - async def _async_get_modified_time(self, item: FileItem, s3=None) -> datetime: + async def _async_get_modified_time( + self, item: FileItem, s3=None + ) -> datetime: if s3 is None: # If not called with the s3 context, do it again. async with self.resource as s3: diff --git a/filestorage/handlers/s3.pyi b/filestorage/handlers/s3.pyi index 6b71eae..d3b2347 100644 --- a/filestorage/handlers/s3.pyi +++ b/filestorage/handlers/s3.pyi @@ -9,7 +9,9 @@ from typing import Any, Optional class AioBotoS3ResourceContext: async def __aenter__(self) -> None: ... - async def __aexit__(self, exc_type: str, exc: Exception, tb: Any) -> Any: ... + async def __aexit__( + self, exc_type: str, exc: Exception, tb: Any + ) -> Any: ... TypeACL: Any TypeAddressingStyle: Any diff --git a/filestorage/storage_container.py b/filestorage/storage_container.py index a2c855c..ad14894 100644 --- a/filestorage/storage_container.py +++ b/filestorage/storage_container.py @@ -53,7 +53,9 @@ def do_not_use(self) -> bool: def sync_handler(self) -> StorageHandlerBase: handler = self.handler if handler is None: - raise FilestorageConfigError(f"No handler provided for store{self.name}") + raise FilestorageConfigError( + f"No handler provided for store{self.name}" + ) return cast(StorageHandlerBase, handler) @property @@ -73,7 +75,9 @@ def handler( if self._do_not_use: return None if self._handler is None: - raise FilestorageConfigError(f"No handler provided for store{self.name}") + raise FilestorageConfigError( + f"No handler provided for store{self.name}" + ) return self._handler @handler.setter @@ -107,7 +111,9 @@ async def async_finalize_config(self) -> None: return if self._handler is None: - raise FilestorageConfigError(f"No handler provided for store{self.name}") + raise FilestorageConfigError( + f"No handler provided for store{self.name}" + ) result = self._handler.validate() if iscoroutine(result) or isfuture(result): @@ -135,4 +141,6 @@ def __getitem__(self, key: str) -> "StorageContainer": raise FilestorageConfigError( f"Getting store{self.name}[{key!r}]: store already finalized!" ) - return self._children.setdefault(key, StorageContainer(name=key, parent=self)) + return self._children.setdefault( + key, StorageContainer(name=key, parent=self) + ) diff --git a/filestorage/utils.py b/filestorage/utils.py index ad1f153..61bda27 100644 --- a/filestorage/utils.py +++ b/filestorage/utils.py @@ -30,7 +30,9 @@ def async_to_sync(fn: AsyncCallable) -> SyncCallable: return cast(SyncCallable, sync.async_to_sync(fn)) -def any_to_async(fn: MaybeAsyncCallable, thread_sensitive=True) -> AsyncCallable: +def any_to_async( + fn: MaybeAsyncCallable, thread_sensitive=True +) -> AsyncCallable: if iscoroutinefunction(fn): return fn return sync_to_async(fn, thread_sensitive=thread_sensitive) diff --git a/filestorage/utils.pyi b/filestorage/utils.pyi index 263843f..326e66b 100644 --- a/filestorage/utils.pyi +++ b/filestorage/utils.pyi @@ -6,7 +6,9 @@ AsyncCallable = Callable[[T], Awaitable[R]] SyncCallable = Callable[[T], R] MaybeAsyncCallable = Union[SyncCallable, AsyncCallable] -def sync_to_async(fn: SyncCallable, thread_sensitive: Any = ...) -> AsyncCallable: ... +def sync_to_async( + fn: SyncCallable, thread_sensitive: Any = ... +) -> AsyncCallable: ... def async_to_sync(fn: AsyncCallable) -> SyncCallable: ... def any_to_async( fn: MaybeAsyncCallable, thread_sensitive: Any = ... diff --git a/setup.py b/setup.py index 5cf707c..261a54f 100644 --- a/setup.py +++ b/setup.py @@ -21,7 +21,7 @@ "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", ], - python_requires=">=3.6", + python_requires=">=3.8", platforms="any", package_data={ "filestorage": ["*.pyi", "py.typed", "VERSION"], diff --git a/tests/filters/test_randomize_filename.py b/tests/filters/test_randomize_filename.py index 58bf1d7..168b8d2 100644 --- a/tests/filters/test_randomize_filename.py +++ b/tests/filters/test_randomize_filename.py @@ -8,7 +8,9 @@ @pytest.fixture def item(): - return FileItem(filename="file.txt", path=("folder",), data=BytesIO(b"content")) + return FileItem( + filename="file.txt", path=("folder",), data=BytesIO(b"content") + ) def with_spam(old_name): diff --git a/tests/filters/test_valid_extensions.py b/tests/filters/test_valid_extensions.py index 0fe897d..bd7024d 100644 --- a/tests/filters/test_valid_extensions.py +++ b/tests/filters/test_valid_extensions.py @@ -9,7 +9,9 @@ @pytest.fixture def item(): - return FileItem(filename="file.txt", path=("folder",), data=BytesIO(b"content")) + return FileItem( + filename="file.txt", path=("folder",), data=BytesIO(b"content") + ) def test_valid_extension(item): diff --git a/tests/handlers/test_local_file.py b/tests/handlers/test_local_file.py index 2fc0422..4d08b07 100644 --- a/tests/handlers/test_local_file.py +++ b/tests/handlers/test_local_file.py @@ -117,7 +117,9 @@ def test_local_file_handler_get_accessed_time(directory): item = handler.get_item("test.txt") atime = handler.get_accessed_time("test.txt") - assert atime == datetime.fromtimestamp(os.path.getatime(handler.local_path(item))) + assert atime == datetime.fromtimestamp( + os.path.getatime(handler.local_path(item)) + ) def test_local_file_handler_get_created_time(directory): @@ -127,7 +129,9 @@ def test_local_file_handler_get_created_time(directory): item = handler.get_item("test.txt") ctime = handler.get_created_time("test.txt") - assert ctime == datetime.fromtimestamp(os.path.getctime(handler.local_path(item))) + assert ctime == datetime.fromtimestamp( + os.path.getctime(handler.local_path(item)) + ) def test_local_file_handler_get_modified_time(directory): @@ -137,7 +141,9 @@ def test_local_file_handler_get_modified_time(directory): item = handler.get_item("test.txt") mtime = handler.get_modified_time("test.txt") - assert mtime == datetime.fromtimestamp(os.path.getmtime(handler.local_path(item))) + assert mtime == datetime.fromtimestamp( + os.path.getmtime(handler.local_path(item)) + ) def test_local_file_handler_delete(directory): @@ -221,7 +227,9 @@ async def test_async_local_file_handler_get_accessed_time(directory): item = handler.get_item("test.txt") atime = await handler.async_get_accessed_time("test.txt") - assert atime == datetime.fromtimestamp(os.path.getatime(handler.local_path(item))) + assert atime == datetime.fromtimestamp( + os.path.getatime(handler.local_path(item)) + ) @pytest.mark.asyncio @@ -232,7 +240,9 @@ async def test_async_local_file_handler_get_created_time(directory): item = handler.get_item("test.txt") ctime = await handler.async_get_created_time("test.txt") - assert ctime == datetime.fromtimestamp(os.path.getctime(handler.local_path(item))) + assert ctime == datetime.fromtimestamp( + os.path.getctime(handler.local_path(item)) + ) @pytest.mark.asyncio @@ -243,7 +253,9 @@ async def test_async_local_file_handler_get_modified_time(directory): item = handler.get_item("test.txt") mtime = await handler.async_get_modified_time("test.txt") - assert mtime == datetime.fromtimestamp(os.path.getmtime(handler.local_path(item))) + assert mtime == datetime.fromtimestamp( + os.path.getmtime(handler.local_path(item)) + ) @pytest.mark.asyncio @@ -292,7 +304,9 @@ async def test_async_to_sync_local_file_handler_get_accessed_time(directory): item = handler.get_item("test.txt") atime = handler.get_accessed_time("test.txt") - assert atime == datetime.fromtimestamp(os.path.getatime(handler.local_path(item))) + assert atime == datetime.fromtimestamp( + os.path.getatime(handler.local_path(item)) + ) @pytest.mark.asyncio @@ -303,7 +317,9 @@ async def test_async_to_sync_local_file_handler_get_created_time(directory): item = handler.get_item("test.txt") ctime = handler.get_created_time("test.txt") - assert ctime == datetime.fromtimestamp(os.path.getctime(handler.local_path(item))) + assert ctime == datetime.fromtimestamp( + os.path.getctime(handler.local_path(item)) + ) @pytest.mark.asyncio @@ -314,7 +330,9 @@ async def test_async_to_sync_local_file_handler_get_modified_time(directory): item = handler.get_item("test.txt") mtime = handler.get_modified_time("test.txt") - assert mtime == datetime.fromtimestamp(os.path.getmtime(handler.local_path(item))) + assert mtime == datetime.fromtimestamp( + os.path.getmtime(handler.local_path(item)) + ) @pytest.mark.asyncio @@ -330,7 +348,9 @@ async def test_async_to_sync_local_file_handler_delete(directory): @pytest.mark.asyncio async def test_async_local_file_handler_try_save_subfolder(directory, store): - store.handler = AsyncLocalFileHandler(base_path=directory, auto_make_dir=True) + store.handler = AsyncLocalFileHandler( + base_path=directory, auto_make_dir=True + ) handler = store / "folder" / "subfolder" await handler.async_save_data(filename="test.txt", data=b"contents") @@ -344,9 +364,15 @@ async def test_async_local_file_handler_try_save_subfolder(directory, store): async def test_async_local_file_save_same_filename(directory): handler = AsyncLocalFileHandler(base_path=directory) - first = await handler.async_save_data(filename="test.txt", data=b"contents 1") - second = await handler.async_save_data(filename="test.txt", data=b"contents 2") - third = await handler.async_save_data(filename="test.txt", data=b"contents 3") + first = await handler.async_save_data( + filename="test.txt", data=b"contents 1" + ) + second = await handler.async_save_data( + filename="test.txt", data=b"contents 2" + ) + third = await handler.async_save_data( + filename="test.txt", data=b"contents 3" + ) assert first == "test.txt" assert second == "test-1.txt" @@ -362,7 +388,9 @@ async def test_async_local_file_save_same_filename(directory): def test_async_only_save(directory): - handler = AsyncLocalFileHandler(base_path=directory, allow_sync_methods=False) + handler = AsyncLocalFileHandler( + base_path=directory, allow_sync_methods=False + ) with pytest.raises(RuntimeError) as err: handler.save_data(filename="test.txt", data=b"contents") @@ -371,7 +399,9 @@ def test_async_only_save(directory): def test_async_only_exists(directory): - handler = AsyncLocalFileHandler(base_path=directory, allow_sync_methods=False) + handler = AsyncLocalFileHandler( + base_path=directory, allow_sync_methods=False + ) with pytest.raises(RuntimeError) as err: handler.exists(filename="test.txt") @@ -381,7 +411,9 @@ def test_async_only_exists(directory): @pytest.mark.asyncio async def test_async_only_get_size(directory): - handler = AsyncLocalFileHandler(base_path=directory, allow_sync_methods=False) + handler = AsyncLocalFileHandler( + base_path=directory, allow_sync_methods=False + ) with pytest.raises(RuntimeError) as err: handler.get_size(filename="test.txt") @@ -391,7 +423,9 @@ async def test_async_only_get_size(directory): @pytest.mark.asyncio async def test_async_only_get_accessed_time(directory): - handler = AsyncLocalFileHandler(base_path=directory, allow_sync_methods=False) + handler = AsyncLocalFileHandler( + base_path=directory, allow_sync_methods=False + ) with pytest.raises(RuntimeError) as err: handler.get_accessed_time(filename="test.txt") @@ -401,7 +435,9 @@ async def test_async_only_get_accessed_time(directory): @pytest.mark.asyncio async def test_async_only_get_created_time(directory): - handler = AsyncLocalFileHandler(base_path=directory, allow_sync_methods=False) + handler = AsyncLocalFileHandler( + base_path=directory, allow_sync_methods=False + ) with pytest.raises(RuntimeError) as err: handler.get_created_time(filename="test.txt") @@ -411,7 +447,9 @@ async def test_async_only_get_created_time(directory): @pytest.mark.asyncio async def test_async_only_get_modified_time(directory): - handler = AsyncLocalFileHandler(base_path=directory, allow_sync_methods=False) + handler = AsyncLocalFileHandler( + base_path=directory, allow_sync_methods=False + ) with pytest.raises(RuntimeError) as err: handler.get_modified_time(filename="test.txt") @@ -420,7 +458,9 @@ async def test_async_only_get_modified_time(directory): def test_async_only_delete(directory): - handler = AsyncLocalFileHandler(base_path=directory, allow_sync_methods=False) + handler = AsyncLocalFileHandler( + base_path=directory, allow_sync_methods=False + ) with pytest.raises(RuntimeError) as err: handler.delete(filename="test.txt") diff --git a/tests/handlers/test_s3.py b/tests/handlers/test_s3.py index 9a9626b..eb05053 100644 --- a/tests/handlers/test_s3.py +++ b/tests/handlers/test_s3.py @@ -82,7 +82,10 @@ async def test_async_get_accessed_time(mock_s3_resource, handler): with pytest.raises(NotImplementedError) as err: await handler._async_get_accessed_time(item) - assert str(err.value) == "get_accessed_time is not supported with the S3 handler" + assert ( + str(err.value) + == "get_accessed_time is not supported with the S3 handler" + ) def test_get_accessed_time(mock_s3_resource, handler): @@ -91,7 +94,10 @@ def test_get_accessed_time(mock_s3_resource, handler): with pytest.raises(NotImplementedError) as err: handler._get_accessed_time(item) - assert str(err.value) == "get_accessed_time is not supported with the S3 handler" + assert ( + str(err.value) + == "get_accessed_time is not supported with the S3 handler" + ) @pytest.mark.asyncio @@ -101,7 +107,10 @@ async def test_async_get_created_time(mock_s3_resource, handler): with pytest.raises(NotImplementedError) as err: await handler._async_get_created_time(item) - assert str(err.value) == "get_created_time is not supported with the S3 handler" + assert ( + str(err.value) + == "get_created_time is not supported with the S3 handler" + ) def test_get_created_time(mock_s3_resource, handler): @@ -110,7 +119,10 @@ def test_get_created_time(mock_s3_resource, handler): with pytest.raises(NotImplementedError) as err: handler._get_created_time(item) - assert str(err.value) == "get_created_time is not supported with the S3 handler" + assert ( + str(err.value) + == "get_created_time is not supported with the S3 handler" + ) @pytest.mark.asyncio @@ -234,11 +246,15 @@ def test_cant_delete(async_only_handler): @pytest.mark.asyncio async def test_async_save_in_folder(mock_s3_resource, handler): - item = handler.get_item("foo.txt", data=BytesIO(b"contents"), subpath=("folder",)) + item = handler.get_item( + "foo.txt", data=BytesIO(b"contents"), subpath=("folder",) + ) await handler._async_save(item) - assert mock_s3_resource._bucket._upload_fileobj_filename == "folder/foo.txt" + assert ( + mock_s3_resource._bucket._upload_fileobj_filename == "folder/foo.txt" + ) @pytest.mark.asyncio diff --git a/tests/test_config_utils.py b/tests/test_config_utils.py index b7ba442..a9207fe 100644 --- a/tests/test_config_utils.py +++ b/tests/test_config_utils.py @@ -110,7 +110,9 @@ def test_missing_required_parameter(store): config_utils.setup_from_settings(settings, store) assert "store.handler.filters[0]" in str(err.value) - assert "missing 1 required positional argument: 'extensions'" in str(err.value) + assert "missing 1 required positional argument: 'extensions'" in str( + err.value + ) def test_decode_ints_and_strings(store): diff --git a/tests/test_pyramid_config.py b/tests/test_pyramid_config.py index af2626b..a2eb384 100644 --- a/tests/test_pyramid_config.py +++ b/tests/test_pyramid_config.py @@ -20,7 +20,9 @@ def __init__(self, settings: Dict): def get_settings(self): return self._settings - def add_request_method(self, callable=None, name=None, property=False, reify=False): + def add_request_method( + self, callable=None, name=None, property=False, reify=False + ): self._request_methods[name] = callable def get_request_prop(self, name): diff --git a/tests/test_storage_container.py b/tests/test_storage_container.py index 3d0c62b..835eca3 100644 --- a/tests/test_storage_container.py +++ b/tests/test_storage_container.py @@ -129,7 +129,10 @@ def test_bad_handler_setting(store): # Handler must be a handler! store.handler = "foo" - assert str(err.value) == "Setting store.handler: 'foo' is not a StorageHandler" + assert ( + str(err.value) + == "Setting store.handler: 'foo' is not a StorageHandler" + ) def test_finalized_without_setting(store):