diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index c8654d9..6ce9c4b 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -1,8 +1,8 @@ # See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.166.1/containers/python-3/.devcontainer/base.Dockerfile -# [Choice] Python version: 3, 3.9, 3.8, 3.7, 3.6 +# [Choice] Python version: 3, 3.11, 3.10, 3.9, 3.8, 3.7, 3.6 ARG VARIANT="3" -FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT} +FROM mcr.microsoft.com/vscode/devcontainers/python:${VARIANT} # [Optional] If your pip requirements rarely change, uncomment this section to add them to the image. COPY test_requirements.txt dev_requirements.txt /tmp/pip-tmp/ diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index f3f0426..0511e10 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,37 +1,41 @@ -// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at: -// https://github.com/microsoft/vscode-dev-containers/tree/v0.166.1/containers/python-3 +// For format details, see https://aka.ms/devcontainer.json. For config options, see the +// README at: https://github.com/devcontainers/templates/tree/main/src/python { "name": "Proxmoxer Development", "build": { "dockerfile": "Dockerfile", "context": "..", "args": { - // Update 'VARIANT' to pick a Python version: 3, 3.6, 3.7, 3.8, 3.9 - "VARIANT": "3", + // Update 'VARIANT' to pick a Python version: 3, 3.6, 3.7, 3.8, 3.9, 3.10, 3.11 + "VARIANT": "3.8" } }, // Set *default* container specific settings.json values on container create. - "settings": { - "terminal.integrated.shell.linux": "/bin/bash", - "python.pythonPath": "/usr/local/bin/python", - "python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8", - "python.formatting.blackPath": "/usr/local/py-utils/bin/black", - "python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf", - "python.linting.banditPath": "/usr/local/py-utils/bin/bandit", - "python.linting.flake8Path": "/usr/local/py-utils/bin/flake8", - "python.linting.mypyPath": "/usr/local/py-utils/bin/mypy", - "python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle", - "python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle", - "python.linting.pylintPath": "/usr/local/py-utils/bin/pylint" + "customizations": { + "vscode": { + "settings": { + "terminal.integrated.shell.linux": "/bin/bash", + "python.pythonPath": "/usr/local/bin/python", + "python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8", + "python.formatting.blackPath": "/usr/local/py-utils/bin/black", + "python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf", + "python.linting.banditPath": "/usr/local/py-utils/bin/bandit", + "python.linting.flake8Path": "/usr/local/py-utils/bin/flake8", + "python.linting.mypyPath": "/usr/local/py-utils/bin/mypy", + "python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle", + "python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle", + "python.linting.pylintPath": "/usr/local/py-utils/bin/pylint" + }, + // Add the IDs of extensions you want installed when the container is created. + "extensions": [ + "mhutchie.git-graph", + "ms-python.python", + "njpwerner.autodocstring", + "ryanluker.vscode-coverage-gutters", + "streetsidesoftware.code-spell-checker" + ] + } }, - // Add the IDs of extensions you want installed when the container is created. - "extensions": [ - "mhutchie.git-graph", - "ms-python.python", - "njpwerner.autodocstring", - "ryanluker.vscode-coverage-gutters", - "streetsidesoftware.code-spell-checker" - ], // Use 'forwardPorts' to make a list of ports inside the container available locally. // "forwardPorts": [], // Run commands to prepare the container for use diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 9ef315a..e03614f 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -16,11 +16,11 @@ jobs: fail-fast: false matrix: python-version: - - "3.7" - "3.8" - "3.9" - "3.10" - "3.11" + - "3.12" steps: - name: Checkout diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 56450e9..1ad6def 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,13 +1,13 @@ repos: ###### FORMATTING ###### - - repo: https://github.com/psf/black - rev: 22.6.0 + - repo: https://github.com/psf/black-pre-commit-mirror + rev: 23.11.0 hooks: - id: black language_version: python3 # Should be a command that runs python3.6+ - repo: https://github.com/PyCQA/isort - rev: 5.10.1 + rev: 5.12.0 hooks: - id: isort name: isort (python) @@ -17,13 +17,13 @@ repos: ###### LINTING ###### - repo: https://github.com/PyCQA/bandit - rev: 1.7.4 + rev: 1.7.5 hooks: - id: bandit args: ["--configfile", ".bandit", "--baseline", "tests/known_issues.json"] - repo: https://github.com/PyCQA/flake8 - rev: 4.0.1 + rev: 6.1.0 hooks: - id: flake8 # any flake8 plugins must be included in the hook venv @@ -35,7 +35,7 @@ repos: # - id: pylint - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.3.0 + rev: v4.5.0 hooks: - id: check-case-conflict - id: check-symlinks @@ -46,13 +46,13 @@ repos: args: [--fix=no] - repo: https://github.com/asottile/blacken-docs - rev: v1.12.1 + rev: 1.16.0 hooks: - id: blacken-docs - additional_dependencies: [black==21.5b1] + additional_dependencies: [black==23.11.0] - repo: https://github.com/pre-commit/pygrep-hooks - rev: v1.9.0 + rev: v1.10.0 hooks: - id: python-no-eval - id: rst-backticks diff --git a/.vscode/tasks.json b/.vscode/tasks.json index daeaf7b..3b6faf4 100644 --- a/.vscode/tasks.json +++ b/.vscode/tasks.json @@ -4,7 +4,7 @@ "version": "2.0.0", "tasks": [ { - "label": "Run Tests (with coverage)", + "label": "Run Tests (with coverage file)", "type": "shell", "command": "pytest -v --cov --cov-report xml:coverage.xml tests/", "problemMatcher": [], @@ -28,6 +28,31 @@ "clear": true }, }, + { + "label": "Run Tests (with coverage)", + "type": "shell", + "command": "pytest --cov tests/", + "problemMatcher": [], + "icon": { + "id": "beaker", + "color": "terminal.ansiGreen" + }, + "runOptions": { + "instanceLimit": 1 + }, + "group": { + "kind": "test", + "isDefault": false + }, + "presentation": { + "echo": true, + "reveal": "always", + "focus": false, + "panel": "dedicated", + "showReuseMessage": true, + "clear": true + }, + }, { "label": "Run Tests", "type": "shell", @@ -74,7 +99,7 @@ { "label": "Clean Cache/tmp files", "type": "shell", - "command": "rm -rf ./.mypy_cache/ ./.pytest_cache/ ./.coverage.xml ./.coverage", + "command": "rm -rf ./.mypy_cache/ ./.pytest_cache/ ./coverage.xml ./.coverage", "problemMatcher": [], "group": { "kind": "none" diff --git a/CHANGELOG.md b/CHANGELOG.md index e587eea..f981ab6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,14 @@ +## 2.1.0 (2024-08-10) + +* Improvement (docs): Update Readme with updated example ([Rob Wolinski](https://github.com/trekie86)) +* Addition (tools): Added Files tools ([John Hollowell](https://github.com/jhollowe)) +* Improvement (all): Add repr to some classes and add to tests ([John Hollowell](https://github.com/jhollowe)) +* Bugfix (all): Correct metadata to match supported Python versions (3.6+) ([Alexei Znamensky](https://github.com/russoz)) +* Bugfix (https): Fix BytesWarning when logging response status/content ([Walter Doekes](https://github.com/wdoekes)) +* Improvement (meta): Update devcontainer to modern unified schema ([John Hollowell](https://github.com/jhollowe)) +* Improvement (meta): Add 3.12 to CI matrix, remove 3.7 testing ([John Hollowell](https://github.com/jhollowe)) +* Improvement (all): Fix improper spliting of non-exec QEMU commands ([John Hollowell](https://github.com/jhollowe)) + ## 2.0.1 (2022-12-19) * Bugfix (https): properly pass verify_ssl all the way to the backend auth ([Dominik Rimpf](https://github.com/domrim)) diff --git a/README.rst b/README.rst index 4e5688c..93d3a66 100644 --- a/README.rst +++ b/README.rst @@ -76,8 +76,8 @@ API calls using the access methods above. .. code-block:: pycon >>> for node in proxmox.nodes.get(): - ... for vm in proxmox.nodes(node["node"]).openvz.get(): - ... print "{0}. {1} => {2}".format(vm["vmid"], vm["name"], vm["status"]) + ... for vm in proxmox.nodes(node["node"]).qemu.get(): + ... print(f"{vm['vmid']}. {vm['name']} => {vm['status']}") ... 141. puppet-2.london.example.com => running diff --git a/proxmoxer/__init__.py b/proxmoxer/__init__.py index de633e8..4bb2269 100644 --- a/proxmoxer/__init__.py +++ b/proxmoxer/__init__.py @@ -1,6 +1,6 @@ __author__ = "Oleg Butovich" __copyright__ = "(c) Oleg Butovich 2013-2017" -__version__ = "2.0.1" +__version__ = "2.1.0" __license__ = "MIT" from .core import * # noqa diff --git a/proxmoxer/backends/command_base.py b/proxmoxer/backends/command_base.py index c8a38c3..3e2aec0 100644 --- a/proxmoxer/backends/command_base.py +++ b/proxmoxer/backends/command_base.py @@ -145,11 +145,15 @@ def loads_errors(self, response): class CommandBaseBackend: + def __init__(self): + self.session = None + self.target = "" + def get_session(self): return self.session def get_base_url(self): - return "" + return self.target def get_serializer(self): return JsonSimpleSerializer() diff --git a/proxmoxer/backends/https.py b/proxmoxer/backends/https.py index 6d238f5..3bb476a 100644 --- a/proxmoxer/backends/https.py +++ b/proxmoxer/backends/https.py @@ -24,6 +24,9 @@ import requests from requests.auth import AuthBase from requests.cookies import cookiejar_from_dict + + # Disable warnings about using untrusted TLS + requests.packages.urllib3.disable_warnings() except ImportError: logger.error("Chosen backend requires 'requests' module\n") sys.exit(1) @@ -171,7 +174,6 @@ def request( cert=None, serializer=None, ): - a = auth or self.auth c = cookies or self.cookies @@ -192,7 +194,7 @@ def request( total_file_size = 0 for k, v in data.copy().items(): # split qemu exec commands for proper parsing by PVE (issue#89) - if k == "command": + if k == "command" and url.endswith("agent/exec"): if isinstance(v, list): data[k] = v elif "Windows" not in platform.platform(): @@ -201,7 +203,8 @@ def request( total_file_size += get_file_size(v) # add in filename from file pointer (patch for https://github.com/requests/toolbelt/pull/316) - files[k] = (requests.utils.guess_filename(v), v) + # add Content-Type since Proxmox requires it (https://bugzilla.proxmox.com/show_bug.cgi?id=4344) + files[k] = (requests.utils.guess_filename(v), v, "application/octet-stream") del data[k] # if there are any large files, send all data and files using streaming multipart encoding @@ -261,7 +264,6 @@ def __init__( path_prefix=None, service="PVE", ): - host_port = "" if len(host.split(":")) > 2: # IPv6 if host.startswith("["): diff --git a/proxmoxer/backends/local.py b/proxmoxer/backends/local.py index 42e36cc..00bb1cd 100644 --- a/proxmoxer/backends/local.py +++ b/proxmoxer/backends/local.py @@ -22,3 +22,4 @@ def upload_file_obj(self, file_obj, remote_path): class Backend(CommandBaseBackend): def __init__(self, *args, **kwargs): self.session = LocalSession(*args, **kwargs) + self.target = "localhost" diff --git a/proxmoxer/backends/openssh.py b/proxmoxer/backends/openssh.py index 4f6b5f0..d9cd7d1 100644 --- a/proxmoxer/backends/openssh.py +++ b/proxmoxer/backends/openssh.py @@ -64,3 +64,4 @@ def upload_file_obj(self, file_obj, remote_path): class Backend(CommandBaseBackend): def __init__(self, *args, **kwargs): self.session = OpenSSHSession(*args, **kwargs) + self.target = self.session.host diff --git a/proxmoxer/backends/ssh_paramiko.py b/proxmoxer/backends/ssh_paramiko.py index 79584c3..0d0aa8c 100644 --- a/proxmoxer/backends/ssh_paramiko.py +++ b/proxmoxer/backends/ssh_paramiko.py @@ -74,3 +74,4 @@ def upload_file_obj(self, file_obj, remote_path): class Backend(CommandBaseBackend): def __init__(self, *args, **kwargs): self.session = SshParamikoSession(*args, **kwargs) + self.target = self.session.host diff --git a/proxmoxer/core.py b/proxmoxer/core.py index d32f17d..fdf42cc 100644 --- a/proxmoxer/core.py +++ b/proxmoxer/core.py @@ -78,18 +78,16 @@ def __init__(self, status_code, status_message, content, errors=None): class AuthenticationError(Exception): - def __init__(self, msg): - super().__init__(msg) - self.msg = msg - - def __str__(self): - return self.msg + pass class ProxmoxResource: def __init__(self, **kwargs): self._store = kwargs + def __repr__(self): + return f"ProxmoxResource ({self._store.get('base_url')})" + def __getattr__(self, item): if item.startswith("_"): raise AttributeError(item) @@ -142,7 +140,7 @@ def _request(self, method, data=None, params=None): del data[key] resp = self._store["session"].request(method, url, data=data, params=params) - logger.debug(f"Status code: {resp.status_code}, output: {resp.content}") + logger.debug(f"Status code: {resp.status_code}, output: {resp.content!r}") if resp.status_code >= 400: if hasattr(resp, "reason"): @@ -218,6 +216,9 @@ def __init__(self, host=None, backend="https", service="PVE", **kwargs): "serializer": self._backend.get_serializer(), } + def __repr__(self): + return f"ProxmoxAPI ({self._backend_name} backend for {self._store['base_url']})" + def get_tokens(self): """Return the auth and csrf tokens. diff --git a/proxmoxer/tools/__init__.py b/proxmoxer/tools/__init__.py index bc215a2..d4f5d0b 100644 --- a/proxmoxer/tools/__init__.py +++ b/proxmoxer/tools/__init__.py @@ -3,4 +3,5 @@ __license__ = "MIT" from . import * # noqa: F401 F403 +from .files import * # noqa: F401 F403 from .tasks import * # noqa: F401 F403 diff --git a/proxmoxer/tools/files.py b/proxmoxer/tools/files.py new file mode 100644 index 0000000..83371c5 --- /dev/null +++ b/proxmoxer/tools/files.py @@ -0,0 +1,279 @@ +__author__ = "John Hollowell" +__copyright__ = "(c) John Hollowell 2023" +__license__ = "MIT" + +import hashlib +import logging +import os +import sys +from enum import Enum +from pathlib import Path +from typing import Optional +from urllib.parse import urljoin, urlparse + +from proxmoxer import ProxmoxResource, ResourceException +from proxmoxer.tools.tasks import Tasks + +CHECKSUM_CHUNK_SIZE = 16384 # read 16k at a time while calculating the checksum for upload + +logger = logging.getLogger(__name__) +logger.setLevel(level=logging.WARNING) + +try: + import requests +except ImportError: + logger.error("Files tools requires 'requests' module\n") + sys.exit(1) + + +class ChecksumInfo: + def __init__(self, name: str, hex_size: int): + self.name = name + self.hex_size = hex_size + + def __str__(self): + return self.name + + def __repr__(self): + return f"{self.name} ({self.hex_size} digits)" + + +class SupportedChecksums(Enum): + """ + An Enum of the checksum types supported by Proxmox + """ + + # ordered by preference for longer/stronger checksums first + SHA512 = ChecksumInfo("sha512", 128) + SHA256 = ChecksumInfo("sha256", 64) + SHA224 = ChecksumInfo("sha224", 56) + SHA384 = ChecksumInfo("sha384", 96) + MD5 = ChecksumInfo("md5", 32) + SHA1 = ChecksumInfo("sha1", 40) + + +class Files: + """ + Ease-of-use tools for interacting with the uploading/downloading files + in Proxmox VE + """ + + def __init__(self, prox: ProxmoxResource, node: str, storage: str): + self._prox = prox + self._node = node + self._storage = storage + + def __repr__(self): + return f"Files ({self._node}/{self._storage} at {self._prox})" + + def upload_local_file_to_storage( + self, + filename: str, + do_checksum_check: bool = True, + blocking_status: bool = True, + ): + file_path = Path(filename) + + if not file_path.is_file(): + logger.error(f'"{file_path.absolute()}" does not exist or is not a file') + return None + + # init to None in case errors cause no values to be set + upid: str = "" + checksum: str = None + checksum_type: str = None + + try: + with open(file_path.absolute(), "rb") as f_obj: + if do_checksum_check: + # iterate through SupportedChecksums and find the first one in hashlib.algorithms_available + for checksum_info in (v.value for v in SupportedChecksums): + if checksum_info.name in hashlib.algorithms_available: + checksum_type = checksum_info.name + break + + if checksum_type is None: + logger.warning( + "There are no Proxmox supported checksums which are supported by hashlib. Skipping checksum validation" + ) + else: + h = hashlib.new(checksum_type) + + # Iterate through the file in CHECKSUM_CHUNK_SIZE size + for byte_block in iter(lambda: f_obj.read(CHECKSUM_CHUNK_SIZE), b""): + h.update(byte_block) + checksum = h.hexdigest() + logger.debug( + f"The {checksum_type} checksum of {file_path.absolute()} is {checksum}" + ) + + # reset to the start of the file so the upload can use the same file handle + f_obj.seek(0) + + params = { + "content": "iso" if file_path.absolute().name.endswith("iso") else "vztmpl", + "checksum-algorithm": checksum_type, + "checksum": checksum, + "filename": f_obj, + } + upid = self._prox.nodes(self._node).storage(self._storage).upload.post(**params) + except OSError as e: + logger.error(e) + return None + + if blocking_status: + return Tasks.blocking_status(self._prox, upid) + else: + return self._prox.nodes(self._node).tasks(upid).status.get() + + def download_file_to_storage( + self, + url: str, + checksum: Optional[str] = None, + checksum_type: Optional[str] = None, + blocking_status: bool = True, + ): + file_info = self.get_file_info(url) + filename = None + + if file_info is not None: + filename = file_info.get("filename") + + if checksum is None and checksum_type is None: + checksum, checksum_info = self.get_checksums_from_file_url(url, filename) + checksum_type = checksum_info.name if checksum_info else None + elif checksum is None or checksum_type is None: + logger.error( + "Must pass both checksum and checksum_type or leave both None for auto-discovery" + ) + return None + + if checksum is None or checksum_type is None: + logger.warning("Unable to discover checksum. Will not do checksum validation") + + params = { + "checksum-algorithm": checksum_type, + "url": url, + "checksum": checksum, + "content": "iso" if url.endswith("iso") else "vztmpl", + "filename": filename, + } + upid = self._prox.nodes(self._node).storage(self._storage)("download-url").post(**params) + + if blocking_status: + return Tasks.blocking_status(self._prox, upid) + else: + return self._prox.nodes(self._node).tasks(upid).status.get() + + def get_file_info(self, url: str): + try: + return self._prox.nodes(self._node)("query-url-metadata").get(url=url) + + except ResourceException as e: + logger.warning(f"Unable to get information for {url}: {e}") + return None + + @staticmethod + def get_checksums_from_file_url( + url: str, filename: str = None, preferred_type=SupportedChecksums.SHA512.value + ): + getters_by_quality = [ + Files._get_checksum_from_sibling_file, + Files._get_checksum_from_extension, + Files._get_checksum_from_extension_upper, + ] + + # hacky way to try the preferred_type first while still trying all types with no duplicates + all_types_with_priority = list( + dict.fromkeys([preferred_type, *(map(lambda t: t.value, SupportedChecksums))]) + ) + for c_info in all_types_with_priority: + for getter in getters_by_quality: + checksum: str = getter(url, c_info, filename) + if checksum is not None: + logger.info(f"{getter} found {str(c_info)} checksum {checksum}") + return (checksum, c_info) + else: + logger.debug(f"{getter} found no {str(c_info)} checksum") + + return (None, None) + + @staticmethod + def _get_checksum_from_sibling_file( + url: str, checksum_info: ChecksumInfo, filename: Optional[str] = None + ) -> Optional[str]: + """ + Uses a checksum file in the same path as the target file to discover the checksum + + :param url: the URL string of the target file + :type url: str + :param checksum_info: the type of checksum to search for + :type checksum_info: ChecksumInfo + :param filename: the filename to use for finding the checksum. If None, it will be discovered from the url + :type filename: str | None + :return: a string of the checksum if found, else None + :rtype: str | None + """ + sumfile_url = urljoin(url, (checksum_info.name + "SUMS").upper()) + filename = filename or os.path.basename(urlparse(url).path) + + return Files._get_checksum_helper(sumfile_url, filename, checksum_info) + + @staticmethod + def _get_checksum_from_extension( + url: str, checksum_info: ChecksumInfo, filename: Optional[str] = None + ) -> Optional[str]: + """ + Uses a checksum file with a checksum extension added to the target file to discover the checksum + + :param url: the URL string of the target file + :type url: str + :param checksum_info: the type of checksum to search for + :type checksum_info: ChecksumInfo + :param filename: the filename to use for finding the checksum. If None, it will be discovered from the url + :type filename: str | None + :return: a string of the checksum if found, else None + :rtype: str | None + """ + sumfile_url = url + "." + checksum_info.name + filename = filename or os.path.basename(urlparse(url).path) + + return Files._get_checksum_helper(sumfile_url, filename, checksum_info) + + @staticmethod + def _get_checksum_from_extension_upper( + url: str, checksum_info: ChecksumInfo, filename: Optional[str] = None + ) -> Optional[str]: + """ + Uses a checksum file with a checksum extension added to the target file to discover the checksum + + :param url: the URL string of the target file + :type url: str + :param checksum_info: the type of checksum to search for + :type checksum_info: ChecksumInfo + :param filename: the filename to use for finding the checksum. If None, it will be discovered from the url + :type filename: str | None + :return: a string of the checksum if found, else None + :rtype: str | None + """ + sumfile_url = url + "." + checksum_info.name.upper() + filename = filename or os.path.basename(urlparse(url).path) + + return Files._get_checksum_helper(sumfile_url, filename, checksum_info) + + @staticmethod + def _get_checksum_helper(sumfile_url: str, filename: str, checksum_info: ChecksumInfo): + logger.debug(f"getting {sumfile_url}") + try: + resp = requests.get(sumfile_url, timeout=10) + except (requests.exceptions.ConnectionError, requests.exceptions.ReadTimeout): + logger.info(f"Failed when trying to get {sumfile_url}") + return None + + if resp.status_code == 200: + for line in resp.iter_lines(): + line_str = line.decode("utf-8") + logger.debug(f"checking for '{filename}' in '{line_str}'") + if filename in str(line_str): + return line_str[0 : checksum_info.hex_size] + return None diff --git a/proxmoxer/tools/tasks.py b/proxmoxer/tools/tasks.py index 6baea71..9144a6e 100644 --- a/proxmoxer/tools/tasks.py +++ b/proxmoxer/tools/tasks.py @@ -12,7 +12,7 @@ class Tasks: """ @staticmethod - def blocking_status(prox, task_id, timeout=300, polling_interval=0.01): + def blocking_status(prox, task_id, timeout=300, polling_interval=1): """ Turns getting the status of a Proxmox task into a blocking call by polling the API until the task completes @@ -23,7 +23,7 @@ def blocking_status(prox, task_id, timeout=300, polling_interval=0.01): :type task_id: str :param timeout: If the task does not complete in this time (in seconds) return None, defaults to 300 :type timeout: int, optional - :param polling_interval: the time to wait between checking for status updates, defaults to 0.01 + :param polling_interval: the time to wait between checking for status updates, defaults to 1 :type polling_interval: float, optional :return: the status of the task :rtype: dict diff --git a/setup.py b/setup.py index b6dc370..e0537fa 100644 --- a/setup.py +++ b/setup.py @@ -41,12 +41,6 @@ "Intended Audience :: System Administrators", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 3.4", - "Programming Language :: Python :: 3.5", - "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", diff --git a/tests/api_mock.py b/tests/api_mock.py index 6002d7d..0488545 100644 --- a/tests/api_mock.py +++ b/tests/api_mock.py @@ -4,7 +4,7 @@ import json import re -from urllib.parse import parse_qsl +from urllib.parse import parse_qsl, urlparse import pytest import responses @@ -26,7 +26,6 @@ class PVERegistry(responses.registries.FirstMatchRegistry): "Pragma": "no-cache", "Server": "pve-api-daemon/3.0", "Content-Type": "application/json;charset=UTF-8", - # "Content-Encoding": "gzip", } def __init__(self): @@ -49,6 +48,35 @@ def _generate_static_responses(self): ) ) + resps.append( + responses.Response( + method="POST", + url=re.compile(self.base_url + r"/nodes/[^/]+/storage/[^/]+/download-url"), + # "done" added to UPID so polling will terminate (status checking is tested elsewhere) + json={ + "data": "UPID:node:003094EA:095F1EFE:63E88772:download:file.iso:root@pam:done", + "success": 1, + }, + ) + ) + + resps.append( + responses.Response( + method="POST", + url=re.compile(self.base_url + r"/nodes/[^/]+/storage/storage1/upload"), + # "done" added to UPID so polling will terminate (status checking is tested elsewhere) + json={"data": "UPID:node:0017C594:0ADB2769:63EC5455:imgcopy::root@pam:done"}, + ) + ) + resps.append( + responses.Response( + method="POST", + url=re.compile(self.base_url + r"/nodes/[^/]+/storage/missing/upload"), + status=500, + body="storage 'missing' does not exist", + ) + ) + return resps def _generate_dynamic_responses(self): @@ -75,11 +103,35 @@ def _generate_dynamic_responses(self): resps.append( responses.CallbackResponse( method="GET", - url=re.compile(self.base_url + r"/nodes/\w+/tasks/[^/]+/status"), + url=re.compile(self.base_url + r"/nodes/[^/]+/qemu/[^/]+/agent/exec"), + callback=self._cb_echo, + ) + ) + + resps.append( + responses.CallbackResponse( + method="GET", + url=re.compile(self.base_url + r"/nodes/[^/]+/qemu/[^/]+/monitor"), + callback=self._cb_qemu_monitor, + ) + ) + + resps.append( + responses.CallbackResponse( + method="GET", + url=re.compile(self.base_url + r"/nodes/[^/]+/tasks/[^/]+/status"), callback=self._cb_task_status, ) ) + resps.append( + responses.CallbackResponse( + method="GET", + url=re.compile(self.base_url + r"/nodes/[^/]+/query-url-metadata.*"), + callback=self._cb_url_metadata, + ) + ) + return resps ################################### @@ -219,3 +271,90 @@ def _cb_task_status(self, request): } return (200, self.common_headers, json.dumps(resp)) + + def _cb_url_metadata(self, request): + form_data_dict = dict(parse_qsl((urlparse(request.url)).query)) + + if "file.iso" in form_data_dict.get("url", ""): + return ( + 200, + self.common_headers, + json.dumps( + { + "data": { + "size": 123456, + "filename": "file.iso", + "mimetype": "application/x-iso9660-image", + # "mimetype": "application/octet-stream", + }, + "success": 1, + } + ), + ) + elif "invalid.iso" in form_data_dict.get("url", ""): + return ( + 500, + self.common_headers, + json.dumps( + { + "status": 500, + "message": "invalid server response: '500 Can't connect to sub.domain.tld:443 (certificate verify failed)'\n", + "success": 0, + "data": None, + } + ), + ) + elif "missing.iso" in form_data_dict.get("url", ""): + return ( + 500, + self.common_headers, + json.dumps( + { + "status": 500, + "success": 0, + "message": "invalid server response: '404 Not Found'\n", + "data": None, + } + ), + ) + + elif "index.html" in form_data_dict.get("url", ""): + return ( + 200, + self.common_headers, + json.dumps( + { + "success": 1, + "data": {"filename": "index.html", "mimetype": "text/html", "size": 17664}, + } + ), + ) + + def _cb_qemu_monitor(self, request): + body = request.body + if body is not None: + body = body if isinstance(body, str) else str(body, "utf-8") + + # if the command is an array, throw the type error PVE would throw + if "&" in body: + return ( + 400, + self.common_headers, + json.dumps( + { + "data": None, + "errors": {"command": "type check ('string') failed - got ARRAY"}, + } + ), + ) + else: + resp = { + "method": request.method, + "url": request.url, + "headers": dict(request.headers), + "cookies": request._cookies.get_dict(), + "body": body, + # "body_json": dict(parse_qsl(request.body)), + } + print(resp) + return (200, self.common_headers, json.dumps(resp)) diff --git a/tests/files_mock.py b/tests/files_mock.py new file mode 100644 index 0000000..bcdbdae --- /dev/null +++ b/tests/files_mock.py @@ -0,0 +1,127 @@ +__author__ = "John Hollowell" +__copyright__ = "(c) John Hollowell 2022" +__license__ = "MIT" + +import re + +import pytest +import responses +from requests import exceptions + +from .api_mock import PVERegistry + + +@pytest.fixture() +def mock_files(): + with responses.RequestsMock( + registry=FilesRegistry, assert_all_requests_are_fired=False + ) as rsps: + yield rsps + + +class FilesRegistry(responses.registries.FirstMatchRegistry): + base_url = "https://sub.domain.tld" + + common_headers = { + "Cache-Control": "max-age=0", + "Connection": "close, Keep-Alive", + "Pragma": "no-cache", + "Server": "pve-api-daemon/3.0", + "Content-Type": "application/json;charset=UTF-8", + } + + def __init__(self): + super().__init__() + for resp in self._generate_static_responses(): + self.add(resp) + + def _generate_static_responses(self): + resps = [] + + # Basic GET requests + resps.append(responses.Response(method="GET", url=self.base_url, body="hello world")) + resps.append( + responses.Response(method="GET", url=self.base_url + "/file.iso", body="CONTENTS") + ) + + # sibling + resps.append( + responses.Response( + method="GET", url=self.base_url + "/sibling/file.iso", body="CONTENTS\n" + ) + ) + resps.append( + responses.Response( + method="GET", + url=self.base_url + "/sibling/TESTINGSUMS", + body="this_is_the_hash file.iso", + ) + ) + + # extension + resps.append( + responses.Response( + method="GET", url=self.base_url + "/extension/file.iso", body="CONTENTS\n" + ) + ) + resps.append( + responses.Response( + method="GET", + url=self.base_url + "/extension/file.iso.testing", + body="this_is_the_hash file.iso", + ) + ) + resps.append( + responses.Response( + method="GET", + url=self.base_url + "/extension/connectionerror.iso.testing", + body=exceptions.ConnectionError(), + ) + ) + resps.append( + responses.Response( + method="GET", + url=self.base_url + "/extension/readtimeout.iso.testing", + body=exceptions.ReadTimeout(), + ) + ) + + # extension upper + resps.append( + responses.Response( + method="GET", url=self.base_url + "/upper/file.iso", body="CONTENTS\n" + ) + ) + resps.append( + responses.Response( + method="GET", + url=self.base_url + "/upper/file.iso.TESTING", + body="this_is_the_hash file.iso", + ) + ) + + resps.append( + responses.Response( + method="GET", + url=re.compile(self.base_url + r"/checksums/file.iso.\w+"), + body="1234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890 file.iso", + ) + ) + + return resps + + +@pytest.fixture() +def mock_files_and_pve(): + with responses.RequestsMock(registry=BothRegistry, assert_all_requests_are_fired=False) as rsps: + yield rsps + + +class BothRegistry(responses.registries.FirstMatchRegistry): + def __init__(self): + super().__init__() + registries = [FilesRegistry(), PVERegistry()] + + for reg in registries: + for resp in reg.registered: + self.add(resp) diff --git a/tests/test_command_base.py b/tests/test_command_base.py index 0fd5e3b..707a280 100644 --- a/tests/test_command_base.py +++ b/tests/test_command_base.py @@ -209,6 +209,12 @@ class TestCommandBaseBackend: backend.session = sess + def test_init(self): + b = command_base.CommandBaseBackend() + + assert b.session is None + assert b.target == "" + def test_get_session(self): assert self.backend.get_session() == self.sess @@ -233,7 +239,6 @@ def _exec_echo(_, cmd): @classmethod def _exec_err(_, cmd): - print("\n".join(cmd)) return None, "\n".join(cmd) diff --git a/tests/test_core.py b/tests/test_core.py index 720333a..fd9cfcb 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -15,6 +15,7 @@ PVERegistry, mock_pve, ) +from .test_paramiko import mock_ssh_client # pylint: disable=unused-import # noqa: F401 # pylint: disable=no-self-use,protected-access @@ -30,6 +31,7 @@ def test_init_none(self): assert e.content is None assert e.errors is None assert str(e) == "None None: None" + assert repr(e) == "ResourceException('None None: None')" def test_init_basic(self): e = core.ResourceException(500, "Internal Error", "Unable to do the thing") @@ -39,6 +41,7 @@ def test_init_basic(self): assert e.content == "Unable to do the thing" assert e.errors is None assert str(e) == "500 Internal Error: Unable to do the thing" + assert repr(e) == "ResourceException('500 Internal Error: Unable to do the thing')" def test_init_error(self): e = core.ResourceException( @@ -50,6 +53,10 @@ def test_init_error(self): assert e.content == "Unable to do the thing" assert e.errors == "functionality not found" assert str(e) == "500 Internal Error: Unable to do the thing - functionality not found" + assert ( + repr(e) + == "ResourceException('500 Internal Error: Unable to do the thing - functionality not found')" + ) class TestProxmoxResource: @@ -72,11 +79,14 @@ def test_url_join_all_segments(self): "https://www.example.com/base#div1?search=query", "path" ) + def test_repr(self): + obj = core.ProxmoxResource(base_url="root") + assert repr(obj.first.second("third")) == "ProxmoxResource (root/first/second/third)" + def test_getattr_private(self): with pytest.raises(AttributeError) as exc_info: self.obj._thing - print(exc_info) assert str(exc_info.value) == "_thing" def test_getattr_single(self): @@ -292,6 +302,26 @@ def test_init_local_with_host(self): assert str(exc_info.value) == "local backend does not support host keyword" + def test_repr_https(self): + prox = core.ProxmoxAPI("host", token_name="name", token_value="value", backend="hTtPs") + + assert repr(prox) == "ProxmoxAPI (https backend for https://host:8006/api2/json)" + + def test_repr_local(self): + prox = core.ProxmoxAPI(backend="local") + + assert repr(prox) == "ProxmoxAPI (local backend for localhost)" + + def test_repr_openssh(self): + prox = core.ProxmoxAPI("host", user="user", backend="openssh") + + assert repr(prox) == "ProxmoxAPI (openssh backend for host)" + + def test_repr_paramiko(self, mock_ssh_client): + prox = core.ProxmoxAPI("host", user="user", backend="ssh_paramiko") + + assert repr(prox) == "ProxmoxAPI (ssh_paramiko backend for host)" + def test_get_tokens_https(self, mock_pve): prox = core.ProxmoxAPI("1.2.3.4:1234", user="user", password="password", backend="https") ticket, csrf = prox.get_tokens() diff --git a/tests/test_https.py b/tests/test_https.py index 69d7d08..53c3bcb 100644 --- a/tests/test_https.py +++ b/tests/test_https.py @@ -103,7 +103,6 @@ def test_get_tokens_api_token(self): assert backend.get_tokens() == (None, None) def test_get_tokens_password(self, mock_pve): - backend = https.Backend("1.2.3.4:1234", password="name") assert ("ticket", "CSRFPreventionToken") == backend.get_tokens() @@ -242,6 +241,10 @@ def test_auth_failure(self, mock_pve): str(exc_info.value) == f"Couldn't authenticate user: bad_auth to {self.base_url}/access/ticket" ) + assert ( + repr(exc_info.value) + == f'AuthenticationError("Couldn\'t authenticate user: bad_auth to {self.base_url}/access/ticket")' + ) def test_auth_otp(self, mock_pve): https.ProxmoxHTTPAuth( @@ -256,6 +259,10 @@ def test_auth_otp_missing(self, mock_pve): str(exc_info.value) == "Couldn't authenticate user: missing Two Factor Authentication (TFA)" ) + assert ( + repr(exc_info.value) + == 'AuthenticationError("Couldn\'t authenticate user: missing Two Factor Authentication (TFA)")' + ) class TestProxmoxHttpSession: @@ -284,25 +291,51 @@ def test_request_data(self, mock_pve): assert content["body"] == "key=value" assert content["headers"]["Content-Type"] == "application/x-www-form-urlencoded" - def test_request_command_list(self, mock_pve): + def test_request_monitor_command_list(self, mock_pve): + resp = self._session.request( + "GET", + self.base_url + "/nodes/node_name/qemu/100/monitor", + data={"command": ["info", "block"]}, + ) + + assert resp.status_code == 400 + + def test_request_exec_command_list(self, mock_pve): resp = self._session.request( - "GET", self.base_url + "/fake/echo", data={"command": ["echo", "hello", "world"]} + "GET", + self.base_url + "/nodes/node_name/qemu/100/agent/exec", + data={"command": ["echo", "hello", "world"]}, ) content = resp.json() assert content["method"] == "GET" - assert content["url"] == self.base_url + "/fake/echo" + assert content["url"] == self.base_url + "/nodes/node_name/qemu/100/agent/exec" assert content["body"] == "command=echo&command=hello&command=world" assert content["headers"]["Content-Type"] == "application/x-www-form-urlencoded" - def test_request_command_string(self, mock_pve): + def test_request_monitor_command_string(self, mock_pve): resp = self._session.request( - "GET", self.base_url + "/fake/echo", data={"command": "echo hello world"} + "GET", + self.base_url + "/nodes/node_name/qemu/100/monitor", + data={"command": "echo hello world"}, ) content = resp.json() assert content["method"] == "GET" - assert content["url"] == self.base_url + "/fake/echo" + assert content["url"] == self.base_url + "/nodes/node_name/qemu/100/monitor" + assert content["body"] == "command=echo+hello+world" + assert content["headers"]["Content-Type"] == "application/x-www-form-urlencoded" + + def test_request_exec_command_string(self, mock_pve): + resp = self._session.request( + "GET", + self.base_url + "/nodes/node_name/qemu/100/agent/exec", + data={"command": "echo hello world"}, + ) + content = resp.json() + + assert content["method"] == "GET" + assert content["url"] == self.base_url + "/nodes/node_name/qemu/100/agent/exec" assert content["body"] == "command=echo&command=hello&command=world" assert content["headers"]["Content-Type"] == "application/x-www-form-urlencoded" @@ -316,7 +349,7 @@ def test_request_file(self, mock_pve): content = resp.json() # decode multipart file - body_regex = f'--([0-9a-f]*)\r\nContent-Disposition: form-data; name="iso"\r\n\r\na{{{size}}}\r\n--\\1--\r\n' + body_regex = f'--([0-9a-f]*)\r\nContent-Disposition: form-data; name="iso"\r\nContent-Type: application/octet-stream\r\n\r\na{{{size}}}\r\n--\\1--\r\n' m = re.match(body_regex, content["body"]) assert content["method"] == "GET" @@ -336,7 +369,7 @@ def test_request_streaming(self, toolbelt_on_off, caplog, mock_pve): content = resp.json() # decode multipart file - body_regex = f'--([0-9a-f]*)\r\nContent-Disposition: form-data; name="iso"\r\n\r\na{{{size}}}\r\n--\\1--\r\n' + body_regex = f'--([0-9a-f]*)\r\nContent-Disposition: form-data; name="iso"\r\nContent-Type: application/octet-stream\r\n\r\na{{{size}}}\r\n--\\1--\r\n' m = re.match(body_regex, content["body"]) assert content["method"] == "GET" @@ -354,7 +387,6 @@ def test_request_streaming(self, toolbelt_on_off, caplog, mock_pve): ] def test_request_large_file(self, shrink_thresholds, toolbelt_on_off, caplog, mock_pve): - size = https.SSL_OVERFLOW_THRESHOLD + 1 content = {} with tempfile.TemporaryFile("w+b") as f_obj: @@ -368,7 +400,7 @@ def test_request_large_file(self, shrink_thresholds, toolbelt_on_off, caplog, mo content = resp.json() # decode multipart file - body_regex = f'--([0-9a-f]*)\r\nContent-Disposition: form-data; name="iso"\r\n\r\na{{{size}}}\r\n--\\1--\r\n' + body_regex = f'--([0-9a-f]*)\r\nContent-Disposition: form-data; name="iso"\r\nContent-Type: application/octet-stream\r\n\r\na{{{size}}}\r\n--\\1--\r\n' m = re.match(body_regex, content["body"]) assert content["method"] == "GET" diff --git a/tests/test_imports.py b/tests/test_imports.py index b6c4af4..41f930c 100644 --- a/tests/test_imports.py +++ b/tests/test_imports.py @@ -26,6 +26,23 @@ def test_missing_requests(requests_off, caplog): ] +def test_missing_requests_tools_files(requests_off, caplog): + with pytest.raises(SystemExit) as exit_exp: + import proxmoxer.tools.files as test_files + + # force re-importing of the module with `requests` gone so the validation is triggered + reload(test_files) + + assert exit_exp.value.code == 1 + assert caplog.record_tuples == [ + ( + "proxmoxer.tools.files", + logging.ERROR, + "Files tools requires 'requests' module\n", + ) + ] + + def test_missing_openssh_wrapper(openssh_off, caplog): with pytest.raises(SystemExit) as exit_exp: import proxmoxer.backends.openssh as test_openssh diff --git a/tests/test_local.py b/tests/test_local.py index 1ccac39..fd7359c 100644 --- a/tests/test_local.py +++ b/tests/test_local.py @@ -14,10 +14,10 @@ def test_init(self): back = local.Backend() assert isinstance(back.session, local.LocalSession) + assert back.target == "localhost" class TestLocalSession: - _session = local.LocalSession() def test_upload_file_obj(self): diff --git a/tests/test_openssh.py b/tests/test_openssh.py index 0892f4a..9bd3f97 100644 --- a/tests/test_openssh.py +++ b/tests/test_openssh.py @@ -20,6 +20,7 @@ def test_init(self): assert isinstance(back.session, openssh.OpenSSHSession) assert back.session.host == "host" assert back.session.user == "user" + assert back.target == "host" class TestOpenSSHSession: diff --git a/tests/test_paramiko.py b/tests/test_paramiko.py index 269f462..5205089 100644 --- a/tests/test_paramiko.py +++ b/tests/test_paramiko.py @@ -20,6 +20,7 @@ def test_init(self, mock_connect): assert isinstance(back.session, ssh_paramiko.SshParamikoSession) assert back.session.host == "host" assert back.session.user == "user" + assert back.target == "host" class TestSshParamikoSession: diff --git a/tests/tools/test_files.py b/tests/tools/test_files.py new file mode 100644 index 0000000..a2cb3d8 --- /dev/null +++ b/tests/tools/test_files.py @@ -0,0 +1,375 @@ +__author__ = "John Hollowell" +__copyright__ = "(c) John Hollowell 2023" +__license__ = "MIT" + +import logging +import tempfile +from unittest import mock + +import pytest + +from proxmoxer import ProxmoxAPI, core +from proxmoxer.tools import ChecksumInfo, Files, SupportedChecksums + +from ..api_mock import mock_pve # pylint: disable=unused-import # noqa: F401 +from ..files_mock import ( # pylint: disable=unused-import # noqa: F401 + mock_files, + mock_files_and_pve, +) + +MODULE_LOGGER_NAME = "proxmoxer.tools.files" + + +class TestChecksumInfo: + def test_basic(self): + info = ChecksumInfo("name", 123) + + assert info.name == "name" + assert info.hex_size == 123 + + def test_str(self): + info = ChecksumInfo("name", 123) + + assert str(info) == "name" + + def test_repr(self): + info = ChecksumInfo("name", 123) + + assert repr(info) == "name (123 digits)" + + +class TestGetChecksum: + def test_get_checksum_from_sibling_file_success(self, mock_files): + url = "https://sub.domain.tld/sibling/file.iso" + exp_hash = "this_is_the_hash" + info = ChecksumInfo("testing", 16) + res1 = Files._get_checksum_from_sibling_file(url, checksum_info=info) + res2 = Files._get_checksum_from_sibling_file(url, checksum_info=info, filename="file.iso") + + assert res1 == exp_hash + assert res2 == exp_hash + + def test_get_checksum_from_sibling_file_fail(self, mock_files): + url = "https://sub.domain.tld/sibling/missing.iso" + info = ChecksumInfo("testing", 16) + res1 = Files._get_checksum_from_sibling_file(url, checksum_info=info) + res2 = Files._get_checksum_from_sibling_file( + url, checksum_info=info, filename="missing.iso" + ) + + assert res1 is None + assert res2 is None + + def test_get_checksum_from_extension_success(self, mock_files): + url = "https://sub.domain.tld/extension/file.iso" + exp_hash = "this_is_the_hash" + info = ChecksumInfo("testing", 16) + res1 = Files._get_checksum_from_extension(url, checksum_info=info) + res2 = Files._get_checksum_from_extension(url, checksum_info=info, filename="file.iso") + + assert res1 == exp_hash + assert res2 == exp_hash + + def test_get_checksum_from_extension_fail(self, mock_files): + url = "https://sub.domain.tld/extension/missing.iso" + + info = ChecksumInfo("testing", 16) + res1 = Files._get_checksum_from_extension(url, checksum_info=info) + res2 = Files._get_checksum_from_extension( + url, checksum_info=info, filename="connectionerror.iso" + ) + res3 = Files._get_checksum_from_extension( + url, checksum_info=info, filename="readtimeout.iso" + ) + + assert res1 is None + assert res2 is None + assert res3 is None + + def test_get_checksum_from_extension_upper_success(self, mock_files): + url = "https://sub.domain.tld/upper/file.iso" + exp_hash = "this_is_the_hash" + info = ChecksumInfo("testing", 16) + res1 = Files._get_checksum_from_extension_upper(url, checksum_info=info) + res2 = Files._get_checksum_from_extension_upper( + url, checksum_info=info, filename="file.iso" + ) + + assert res1 == exp_hash + assert res2 == exp_hash + + def test_get_checksum_from_extension_upper_fail(self, mock_files): + url = "https://sub.domain.tld/upper/missing.iso" + info = ChecksumInfo("testing", 16) + res1 = Files._get_checksum_from_extension_upper(url, checksum_info=info) + res2 = Files._get_checksum_from_extension_upper( + url, checksum_info=info, filename="missing.iso" + ) + + assert res1 is None + assert res2 is None + + def test_get_checksums_from_file_url_all_checksums(self, mock_files): + base_url = "https://sub.domain.tld/checksums/file.iso" + full_checksum_string = "1234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890" + for types_enum in SupportedChecksums: + checksum_info = types_enum.value + + data = Files.get_checksums_from_file_url(base_url, preferred_type=checksum_info) + + assert data[0] == full_checksum_string[0 : checksum_info.hex_size] + assert data[1] == checksum_info + + def test_get_checksums_from_file_url_missing(self, mock_files): + url = "https://sub.domain.tld/missing.iso" + + data = Files.get_checksums_from_file_url(url) + + assert data[0] is None + assert data[1] is None + + +class TestFiles: + prox = ProxmoxAPI("1.2.3.4:1234", token_name="name", token_value="value") + + def test_init_basic(self): + f = Files(self.prox, "node1", "storage1") + + assert f._prox == self.prox + assert f._node == "node1" + assert f._storage == "storage1" + + def test_repr(self): + f = Files(self.prox, "node1", "storage1") + assert ( + repr(f) + == "Files (node1/storage1 at ProxmoxAPI (https backend for https://1.2.3.4:1234/api2/json))" + ) + + def test_get_file_info_pass(self, mock_pve): + f = Files(self.prox, "node1", "storage1") + info = f.get_file_info("https://sub.domain.tld/file.iso") + + assert info["filename"] == "file.iso" + assert info["mimetype"] == "application/x-iso9660-image" + assert info["size"] == 123456 + + def test_get_file_info_fail(self, mock_pve): + f = Files(self.prox, "node1", "storage1") + info = f.get_file_info("https://sub.domain.tld/invalid.iso") + + assert info is None + + +class TestFilesDownload: + prox = ProxmoxAPI("1.2.3.4:1234", token_name="name", token_value="value") + f = Files(prox, "node1", "storage1") + + def test_download_discover_checksum(self, mock_files_and_pve, caplog): + status = self.f.download_file_to_storage("https://sub.domain.tld/checksums/file.iso") + + # this is the default "done" task mock information + assert status == { + "upid": "UPID:node1:000FF1FD:10F9374C:630D702C:vzdump:110:root@pam:done", + "starttime": 1661825068, + "user": "root@pam", + "type": "vzdump", + "pstart": 284768076, + "status": "stopped", + "exitstatus": "OK", + "pid": 1044989, + "id": "110", + "node": "node1", + } + assert caplog.record_tuples == [] + + def test_download_no_blocking(self, mock_files_and_pve, caplog): + status = self.f.download_file_to_storage( + "https://sub.domain.tld/checksums/file.iso", blocking_status=False + ) + + # this is the default "done" task mock information + assert status == { + "upid": "UPID:node1:000FF1FD:10F9374C:630D702C:vzdump:110:root@pam:done", + "starttime": 1661825068, + "user": "root@pam", + "type": "vzdump", + "pstart": 284768076, + "status": "stopped", + "exitstatus": "OK", + "pid": 1044989, + "id": "110", + "node": "node1", + } + assert caplog.record_tuples == [] + + def test_download_no_discover_checksum(self, mock_files_and_pve, caplog): + caplog.set_level(logging.WARNING, logger=MODULE_LOGGER_NAME) + + status = self.f.download_file_to_storage("https://sub.domain.tld/file.iso") + + # this is the default "stopped" task mock information + assert status == { + "upid": "UPID:node1:000FF1FD:10F9374C:630D702C:vzdump:110:root@pam:done", + "starttime": 1661825068, + "user": "root@pam", + "type": "vzdump", + "pstart": 284768076, + "status": "stopped", + "exitstatus": "OK", + "pid": 1044989, + "id": "110", + "node": "node1", + } + assert caplog.record_tuples == [ + ( + MODULE_LOGGER_NAME, + logging.WARNING, + "Unable to discover checksum. Will not do checksum validation", + ), + ] + + def test_uneven_checksum(self, caplog, mock_files_and_pve): + caplog.set_level(logging.DEBUG, logger=MODULE_LOGGER_NAME) + status = self.f.download_file_to_storage("https://sub.domain.tld/file.iso", checksum="asdf") + + assert status is None + + assert caplog.record_tuples == [ + ( + MODULE_LOGGER_NAME, + logging.ERROR, + "Must pass both checksum and checksum_type or leave both None for auto-discovery", + ), + ] + + def test_uneven_checksum_type(self, caplog, mock_files_and_pve): + caplog.set_level(logging.DEBUG, logger=MODULE_LOGGER_NAME) + status = self.f.download_file_to_storage( + "https://sub.domain.tld/file.iso", checksum_type="asdf" + ) + + assert status is None + + assert caplog.record_tuples == [ + ( + MODULE_LOGGER_NAME, + logging.ERROR, + "Must pass both checksum and checksum_type or leave both None for auto-discovery", + ), + ] + + def test_get_file_info_missing(self, mock_pve): + f = Files(self.prox, "node1", "storage1") + info = f.get_file_info("https://sub.domain.tld/missing.iso") + + assert info is None + + def test_get_file_info_non_iso(self, mock_pve): + f = Files(self.prox, "node1", "storage1") + info = f.get_file_info("https://sub.domain.tld/index.html") + + assert info["filename"] == "index.html" + assert info["mimetype"] == "text/html" + + +class TestFilesUpload: + prox = ProxmoxAPI("1.2.3.4:1234", token_name="name", token_value="value") + f = Files(prox, "node1", "storage1") + + def test_upload_no_file(self, mock_files_and_pve, caplog): + status = self.f.upload_local_file_to_storage("/does-not-exist.iso") + + assert status is None + assert caplog.record_tuples == [ + ( + MODULE_LOGGER_NAME, + logging.ERROR, + '"/does-not-exist.iso" does not exist or is not a file', + ), + ] + + def test_upload_dir(self, mock_files_and_pve, caplog): + with tempfile.TemporaryDirectory() as tmp_dir: + status = self.f.upload_local_file_to_storage(tmp_dir) + + assert status is None + assert caplog.record_tuples == [ + ( + MODULE_LOGGER_NAME, + logging.ERROR, + f'"{tmp_dir}" does not exist or is not a file', + ), + ] + + def test_upload_empty_file(self, mock_files_and_pve, caplog): + with tempfile.NamedTemporaryFile("rb") as f_obj: + status = self.f.upload_local_file_to_storage(filename=f_obj.name) + + assert status is not None + assert caplog.record_tuples == [] + + def test_upload_non_empty_file(self, mock_files_and_pve, caplog): + with tempfile.NamedTemporaryFile("w+b") as f_obj: + f_obj.write(b"a" * 100) + f_obj.seek(0) + status = self.f.upload_local_file_to_storage(filename=f_obj.name) + + assert status is not None + assert caplog.record_tuples == [] + + def test_upload_no_checksum(self, mock_files_and_pve, caplog): + with tempfile.NamedTemporaryFile("rb") as f_obj: + status = self.f.upload_local_file_to_storage( + filename=f_obj.name, do_checksum_check=False + ) + + assert status is not None + assert caplog.record_tuples == [] + + def test_upload_checksum_unavailable(self, mock_files_and_pve, caplog, apply_no_checksums): + with tempfile.NamedTemporaryFile("rb") as f_obj: + status = self.f.upload_local_file_to_storage(filename=f_obj.name) + + assert status is not None + assert caplog.record_tuples == [ + ( + MODULE_LOGGER_NAME, + logging.WARNING, + "There are no Proxmox supported checksums which are supported by hashlib. Skipping checksum validation", + ) + ] + + def test_upload_non_blocking(self, mock_files_and_pve, caplog): + with tempfile.NamedTemporaryFile("rb") as f_obj: + status = self.f.upload_local_file_to_storage(filename=f_obj.name, blocking_status=False) + + assert status is not None + assert caplog.record_tuples == [] + + def test_upload_proxmox_error(self, mock_files_and_pve, caplog): + with tempfile.NamedTemporaryFile("rb") as f_obj: + f_copy = Files(self.f._prox, self.f._node, "missing") + + with pytest.raises(core.ResourceException) as exc_info: + f_copy.upload_local_file_to_storage(filename=f_obj.name) + + assert exc_info.value.status_code == 500 + assert exc_info.value.status_message == "Internal Server Error" + # assert exc_info.value.content == "storage 'missing' does not exist" + + def test_upload_io_error(self, mock_files_and_pve, caplog): + with tempfile.NamedTemporaryFile("rb") as f_obj: + mo = mock.mock_open() + mo.side_effect = IOError("ERROR MESSAGE") + with mock.patch("builtins.open", mo): + status = self.f.upload_local_file_to_storage(filename=f_obj.name) + + assert status is None + assert caplog.record_tuples == [(MODULE_LOGGER_NAME, logging.ERROR, "ERROR MESSAGE")] + + +@pytest.fixture +def apply_no_checksums(): + with mock.patch("hashlib.algorithms_available", set()): + yield diff --git a/tests/tools/test_tasks.py b/tests/tools/test_tasks.py index 7a9d44b..fde50c3 100644 --- a/tests/tools/test_tasks.py +++ b/tests/tools/test_tasks.py @@ -115,8 +115,8 @@ def test_timeout(self, mocked_prox, caplog): status = Tasks.blocking_status( mocked_prox, "UPID:node1:000FF1FD:10F9374C:630D702C:vzdump:110:root@pam:keep-running", - 0.021, - 0.01, + timeout=0.021, + polling_interval=0.01, ) assert status is None