From fe5b19dbcac6ab801165861552c03cb90a2b525b Mon Sep 17 00:00:00 2001 From: wwakabobik Date: Tue, 1 Oct 2024 19:20:45 +0200 Subject: [PATCH 1/9] Fix linters (sync) --- src/leonardo_api/leonardo_sync.py | 147 ++++++++++++++++-------------- 1 file changed, 78 insertions(+), 69 deletions(-) diff --git a/src/leonardo_api/leonardo_sync.py b/src/leonardo_api/leonardo_sync.py index 246d6d1..b50f6b6 100644 --- a/src/leonardo_api/leonardo_sync.py +++ b/src/leonardo_api/leonardo_sync.py @@ -5,7 +5,7 @@ Copyright (c) 2023. All rights reserved. Created: 29.08.2023 -Last Modified: 24.11.2023 +Last Modified: 01.10.2024 Description: This file contains synchronous implementation for Leonardo.ai API @@ -40,9 +40,9 @@ def __init__(self, auth_token: str, logger: Optional[logging.Logger] = None) -> :param logger: default logger. Default will be initialized if not provided. :type logger: logging.Logger, optional """ - self.___auth_token = auth_token - self.___logger = logger if logger else setup_logger("Leonardo", "leonardo_async.log") - self.___logger.debug("Leonardo init complete") + self.__auth_token = auth_token + self.__logger = logger if logger else setup_logger("Leonardo", "leonardo_async.log") + self.__logger.debug("Leonardo init complete") def ___get_client_session(self, request_type: str = "get", empty: bool = False) -> requests.Session: """ @@ -57,7 +57,7 @@ def ___get_client_session(self, request_type: str = "get", empty: bool = False) """ headers = {} if not empty: - headers = {"Authorization": f"Bearer {self.___auth_token}"} + headers = {"Authorization": f"Bearer {self.__auth_token}"} if request_type.lower() == "get" or request_type.lower() == "delete": headers.update({"content-type": "application/json"}) if request_type.lower() == "post": @@ -77,20 +77,20 @@ def get_user_info(self) -> dict: Exception: If an error occurs while getting user info. """ url = "https://cloud.leonardo.ai/api/rest/v1/me" - self.___logger.debug(f"Requesting user info: GET {url}") + self.__logger.debug("Requesting user info: GET %s", url) session = self.___get_client_session("get") try: response = session.get(url) response.raise_for_status() response_dict = response.json() - self.___logger.debug(f"User info: {response_dict}") + self.__logger.debug("User info: %s", response_dict) session.close() return response_dict except Exception as error: - self.___logger.error(f"Error occurred while getting user info: {str(error)}") + self.__logger.error("Error occurred while getting user info: %s", str(error)) raise error - def post_generations( + def post_generations( # pylint: disable=too-many-positional-arguments self, prompt: str, negative_prompt: Optional[str] = None, @@ -180,16 +180,16 @@ def post_generations( "controlNet": control_net, "controlNetType": control_net_type, } - self.___logger.debug(f"Requesting post generations: POST {url} with payload: {payload}") + self.__logger.debug("Requesting post generations: POST %s with payload: %s", url, payload) session = self.___get_client_session("post") try: response = session.post(url, json=payload) response.raise_for_status() response_dict = response.json() - self.___logger.debug(f"Post generations: {response_dict}") + self.__logger.debug("Post generations: %s", response_dict) return response_dict except Exception as error: - self.___logger.error(f"Error occurred while post generations: {str(error)}") + self.__logger.error("Error occurred while post generations: %s", str(error)) raise error def get_single_generation(self, generation_id: str) -> dict: @@ -205,16 +205,16 @@ def get_single_generation(self, generation_id: str) -> dict: Exception: If an error occurs while getting generation info. """ url = f"https://cloud.leonardo.ai/api/rest/v1/generations/{generation_id}" - self.___logger.debug(f"Requested single generations: GET {url} with generation_id={generation_id}") + self.__logger.debug("Requested single generations: GET %s with generation_id=%s", url, generation_id) session = self.___get_client_session("get") try: response = session.get(url) response.raise_for_status() response_dict = response.json() - self.___logger.debug(f"Single generations: {response_dict}") + self.__logger.debug("Single generations: %s", response_dict) return response_dict except Exception as error: - self.___logger.error(f"Error occurred while get single generations: {str(error)}") + self.__logger.error("Error occurred while get single generations: %s", str(error)) raise error def delete_single_generation(self, generation_id: str) -> requests.Response: @@ -230,15 +230,15 @@ def delete_single_generation(self, generation_id: str) -> requests.Response: Exception: If an error occurs while deleting generation. """ url = f"https://cloud.leonardo.ai/api/rest/v1/generations/{generation_id}" - self.___logger.debug(f"Delete generations with generation_id={generation_id}: DELETE {url}") + self.__logger.debug("Delete generations with generation_id=%s: DELETE %s", generation_id, url) session = self.___get_client_session("delete") try: response = session.delete(url) response.raise_for_status() - self.___logger.debug(f"Generations {generation_id} has been deleted: {response}") + self.__logger.debug("Generations %s has been deleted: %s", generation_id, response) return response except Exception as error: - self.___logger.error(f"Error occurred while delete generation: {str(error)}") + self.__logger.error("Error occurred while delete generation: %s", str(error)) raise error def get_generations_by_user(self, user_id: str, offset: int = 0, limit: int = 10) -> dict: @@ -259,16 +259,16 @@ def get_generations_by_user(self, user_id: str, offset: int = 0, limit: int = 10 """ url = f"https://cloud.leonardo.ai/api/rest/v1/generations/user/{user_id}" params = {"offset": offset, "limit": limit} - self.___logger.debug(f"Requested generations for {user_id} with params {params}: GET {url}") + self.__logger.debug("Requested generations for %s with params %s: GET %s", user_id, params, url) session = self.___get_client_session("get") try: response = session.get(url, params=params) response.raise_for_status() response_dict = response.json() - self.___logger.debug(f"Generations for user {user_id} are: {response}") + self.__logger.debug("Generations for user %s are: %s", user_id, response) return response_dict except Exception as error: - self.___logger.error(f"Error occurred while obtaining user's generations: {str(error)}") + self.__logger.error("Error occurred while obtaining user's generations: %s", str(error)) raise error def upload_init_image(self, file_path: str) -> str: @@ -288,33 +288,33 @@ def upload_init_image(self, file_path: str) -> str: valid_extensions = ["png", "jpg", "jpeg", "webp"] extension = os.path.splitext(file_path)[1].strip(".") if extension not in valid_extensions: - raise ValueError(f"Invalid file extension. Must be one of {valid_extensions}") + raise ValueError("Invalid file extension. Must be one of %s", valid_extensions) url = "https://cloud.leonardo.ai/api/rest/v1/init-image" payload = {"extension": extension} - self.___logger.debug(f"Init image {file_path} upload requested with payload = {payload}: POST {url}") + self.__logger.debug(f"Init image %s upload requested with payload = %s: POST %s", file_path, payload, url) session = self.___get_client_session("post") try: response = session.post(url, json=payload) response.raise_for_status() data = response.json() - self.___logger.debug(f"Init image {file_path} initiated: {data}") + self.__logger.debug("Init image %s initiated: %s", file_path, data) upload_url = data["uploadInitImage"]["url"] fields = json.loads(data["uploadInitImage"]["fields"]) generation_id = data["uploadInitImage"]["id"] session.close() - self.___logger.debug(f"Init image {file_path} uploading as binary: POST {upload_url}") + self.__logger.debug("Init image %s uploading as binary: POST %s", file_path, upload_url) session = self.___get_client_session("post", empty=True) with open(file_path, "rb") as file: file_data = file.read() fields.update({"file": file_data}) response = session.post(upload_url, data=fields) response.raise_for_status() - self.___logger.debug(f"Init image {file_path} has been uploaded with generation_id={generation_id}") + self.__logger.debug("Init image %s has been uploaded with generation_id=%s", file_path, generation_id) return generation_id except Exception as error: - self.___logger.error(f"Error occurred while upload init image: {str(error)}") + self.__logger.error("Error occurred while upload init image: %s", str(error)) raise error def get_single_init_image(self, image_id: str) -> dict: @@ -327,16 +327,16 @@ def get_single_init_image(self, image_id: str) -> dict: :rtype: dict """ url = f"https://cloud.leonardo.ai/api/rest/v1/init-image/{image_id}" - self.___logger.debug(f"Requested single image with image_id={image_id}: GET {url}") + self.__logger.debug(f"Requested single image with image_id=%s: GET %s", image_id, url) session = self.___get_client_session("get") try: response = session.get(url) response.raise_for_status() response_dict = response.json() - self.___logger.debug(f"Single image provided: {response_dict}") + self.__logger.debug("Single image provided: %s", response_dict) return response_dict except Exception as error: - self.___logger.error(f"Error occurred while obtain single init image: {str(error)}") + self.__logger.error("Error occurred while obtain single init image: %s", str(error)) raise error def delete_init_image(self, image_id: str) -> requests.Response: @@ -352,16 +352,16 @@ def delete_init_image(self, image_id: str) -> requests.Response: Exception: If an error occurs while deleting init image. """ url = f"https://cloud.leonardo.ai/api/rest/v1/init-image/{image_id}" - self.___logger.debug(f"Requested to delete single image with image_id={image_id}: DELETE {url}") + self.__logger.debug("Requested to delete single image with image_id=%s: DELETE %s", image_id, url) session = self.___get_client_session("delete") try: response = session.delete(url) response.raise_for_status() - self.___logger.debug(f"Single image deleted: {response}") + self.__logger.debug("Single image deleted: %s", response) session.close() return response except Exception as error: - self.___logger.error(f"Error occurred while deleting init image: {str(error)}") + self.__logger.error("Error occurred while deleting init image: %s", str(error)) raise error def create_upscale(self, image_id: str) -> requests.Response: @@ -378,16 +378,16 @@ def create_upscale(self, image_id: str) -> requests.Response: """ url = "https://cloud.leonardo.ai/api/rest/v1/variations/upscale" payload = {"id": image_id} - self.___logger.debug(f"Requested to upscale image with payload {payload}: POST {url}") + self.__logger.debug("Requested to upscale image with payload %s: POST %s", payload, url) session = self.___get_client_session("post") try: response = session.post(url, json=payload) response.raise_for_status() - self.___logger.debug(f"Upscale created: {response}") + self.__logger.debug("Upscale created: %s", response) session.close() return response except Exception as error: - self.___logger.error(f"Error occurred while up-scaling image: {str(error)}") + self.__logger.error("Error occurred while up-scaling image: %s", str(error)) raise error def get_variation_by_id(self, generation_id: str) -> requests.Response: @@ -403,16 +403,16 @@ def get_variation_by_id(self, generation_id: str) -> requests.Response: Exception: If an error occurs while getting variation. """ url = f"https://cloud.leonardo.ai/api/rest/v1/variations/{generation_id}" - self.___logger.debug(f"Requested to obtain variation by id {generation_id}: GET {url}") + self.__logger.debug("Requested to obtain variation by id %s: GET %s", generation_id, url) session = self.___get_client_session("get") try: response = session.get(url) response.raise_for_status() response_dict = response.json() - self.___logger.debug(f"Get variation by ID: {response_dict}") + self.__logger.debug("Get variation by ID: %s", response_dict) return response_dict except Exception as error: - self.___logger.error(f"Error occurred while get variation by id: {str(error)}") + self.__logger.error("Error occurred while get variation by id: %s", str(error)) raise error def create_dataset(self, name: str, description: Optional[str] = None) -> requests.Response: @@ -431,16 +431,16 @@ def create_dataset(self, name: str, description: Optional[str] = None) -> reques """ url = "https://cloud.leonardo.ai/api/rest/v1/datasets" payload = {"name": name, "description": description} - self.___logger.debug(f"Requested to create dataset with payload {payload}: POST {url}") + self.__logger.debug("Requested to create dataset with payload %s: POST %s", payload, url) session = self.___get_client_session("post") try: response = session.post(url, json=payload) response.raise_for_status() - self.___logger.debug(f"Dataset has been created: {response}") + self.__logger.debug("Dataset has been created: %s", response) session.close() return response except Exception as error: - self.___logger.error(f"Error occurred while create dataset: {str(error)}") + self.__logger.error("Error occurred while create dataset: %s", str(error)) raise error def get_dataset_by_id(self, dataset_id: str) -> dict: @@ -456,16 +456,16 @@ def get_dataset_by_id(self, dataset_id: str) -> dict: Exception: If an error occurs while getting dataset. """ url = f"https://cloud.leonardo.ai/api/rest/v1/datasets/{dataset_id}" - self.___logger.debug(f"Requested to obtain dataset dataset_id={dataset_id}: GET {url}") + self.__logger.debug("Requested to obtain dataset dataset_id=%s: GET %s", dataset_id, url) session = self.___get_client_session("get") try: response = session.get(url) response.raise_for_status() response_dict = response.json() - self.___logger.debug(f"Dataset with dataset_id={dataset_id} provided: {response_dict}") + self.__logger.debug("Dataset with dataset_id=%s provided: %s", dataset_id, response_dict) return response_dict except Exception as error: - self.___logger.error(f"Error occurred while get dataset: {str(error)}") + self.__logger.error("Error occurred while get dataset: %s", str(error)) raise error def delete_dataset_by_id(self, dataset_id: str) -> requests.Response: @@ -481,15 +481,15 @@ def delete_dataset_by_id(self, dataset_id: str) -> requests.Response: Exception: If an error occurs while deleting dataset. """ url = f"https://cloud.leonardo.ai/api/rest/v1/datasets/{dataset_id}" - self.___logger.debug(f"Requested to delete dataset dataset_id={dataset_id}: DELETE {url}") + self.__logger.debug("Requested to delete dataset dataset_id=%s: DELETE %s", dataset_id, url) session = self.___get_client_session("delete") try: response = session.delete(url) response.raise_for_status() - self.___logger.debug(f"Dataset with dataset_id={dataset_id} has been deleted: {response}") + self.__logger.debug("Dataset with dataset_id=%s has been deleted: %s", dataset_id, response) return response except Exception as error: - self.___logger.error(f"Error occurred while delete dataset: {str(error)}") + self.__logger.error("Error occurred while delete dataset: %s", str(error)) raise error def upload_dataset_image(self, dataset_id: str, file_path: str) -> requests.Response: @@ -515,31 +515,34 @@ def upload_dataset_image(self, dataset_id: str, file_path: str) -> requests.Resp url = f"https://cloud.leonardo.ai/api/rest/v1/datasets/{dataset_id}/upload" payload = {"extension": extension} - self.___logger.debug(f"Requested to upload dataset_id={dataset_id} from {file_path}: POST {url}") + self.__logger.debug("Requested to upload dataset_id=%s from %s: POST %s", dataset_id, file_path, url) session = self.___get_client_session("post") try: response = session.post(url, json=payload) response.raise_for_status() data = response.json() - self.___logger.debug( - f"Dataset with dataset_id={dataset_id} started to upload from {file_path}:" f" {response}" + self.__logger.debug( + "Dataset with dataset_id=%s started to upload from %s: %s", + dataset_id, + file_path, + response ) upload_url = data["uploadDatasetImage"]["url"] fields = json.loads(data["uploadDatasetImage"]["fields"]) dataset_id = data["uploadDatasetImage"]["datasetId"] - self.___logger.debug(f"Uploading dataset_id={dataset_id} from {file_path}: POST {url}") + self.__logger.debug("Uploading dataset_id=%s from %s: POST %s", dataset_id, file_path, url) session = self.___get_client_session("post", empty=True) with open(file_path, "rb") as file: file_data = file.read() fields.update({"file": file_data}) response = session.post(upload_url, data=fields) response.raise_for_status() - self.___logger.debug(f"Dataset with dataset_id={dataset_id} uploaded using {file_path}") + self.__logger.debug("Dataset with dataset_id=%s uploaded using %s", dataset_id, file_path) session.close() return response except Exception as error: - self.___logger.error(f"Error occurred uploading dataset: {str(error)}") + self.__logger.error("Error occurred uploading dataset: %s", str(error)) raise error def upload_generated_image_to_dataset(self, dataset_id: str, generated_image_id: str) -> requests.Response: @@ -558,20 +561,26 @@ def upload_generated_image_to_dataset(self, dataset_id: str, generated_image_id: """ url = f"https://cloud.leonardo.ai/api/rest/v1/datasets/{dataset_id}/upload/gen" payload = {"generatedImageId": generated_image_id} - self.___logger.debug( - f"Requested to upload generated_image_id={generated_image_id} " f"to dataset_id={dataset_id}: POST {url}" + self.__logger.debug( + "Requested to upload generated_image_id=%s to dataset_id=%s: POST %s", + generated_image_id, + dataset_id, + url ) session = self.___get_client_session("post") try: response = session.post(url, json=payload) response.raise_for_status() - self.___logger.debug( - f"Image with image_id={generated_image_id} has been uploaded to " f"dataset_id={dataset_id}: {response}" + self.__logger.debug( + "Image with image_id=%s has been uploaded to dataset_id=%s: %s", + generated_image_id, + dataset_id, + response ) session.close() return response except Exception as error: - self.___logger.error(f"Error occurred while upload generated image to dataset: {str(error)}") + self.__logger.error("Error occurred while upload generated image to dataset: %s", str(error)) raise error def train_custom_model( @@ -623,16 +632,16 @@ def train_custom_model( "sd_Version": sd_version, "strength": strength, } - self.___logger.debug(f"Requested to train custom model with payload {payload}: POST {url}") + self.__logger.debug("Requested to train custom model with payload %s: POST %s", payload, url) session = self.___get_client_session("post") try: response = session.post(url, json=payload) response.raise_for_status() - self.___logger.debug(f"Custom modal has been trained: {response}") + self.__logger.debug("Custom modal has been trained: %s", response) session.close() return response except Exception as error: - self.___logger.error(f"Error training custom model: {str(error)}") + self.__logger.error("Error training custom model: %s", str(error)) raise error def get_custom_model_by_id(self, model_id: str) -> dict: @@ -648,17 +657,17 @@ def get_custom_model_by_id(self, model_id: str) -> dict: Exception: If an error occurs while getting custom model. """ url = f"https://cloud.leonardo.ai/api/rest/v1/models/{model_id}" - self.___logger.debug(f"Requested to obtain custom model by model_id={model_id}: GET {url}") + self.__logger.debug("Requested to obtain custom model by model_id=%s: GET %s", model_id, url) session = self.___get_client_session("get") try: response = session.get(url) response.raise_for_status() response_dict = response.json() - self.___logger.debug(f"Custom modal has been trained: {response_dict}") + self.__logger.debug("Custom modal has been trained: %s", response_dict) session.close() return response_dict except Exception as error: - self.___logger.error(f"Error obtaining custom model: {str(error)}") + self.__logger.error("Error obtaining custom model: %s", str(error)) raise error def delete_custom_model_by_id(self, model_id: str) -> requests.Response: @@ -674,15 +683,15 @@ def delete_custom_model_by_id(self, model_id: str) -> requests.Response: Exception: If an error occurs while deleting custom model. """ url = f"https://cloud.leonardo.ai/api/rest/v1/models/{model_id}" - self.___logger.debug(f"Requested to delete custom model by model_id={model_id}: GET {url}") + self.__logger.debug("Requested to delete custom model by model_id=%s: GET %s", model_id, url) session = self.___get_client_session("delete") try: response = session.delete(url) response.raise_for_status() - self.___logger.debug(f"Custom modal has been deleted: {response}") + self.__logger.debug("Custom modal has been deleted: %s", response) return response except Exception as error: - self.___logger.error(f"Error delete custom model: {str(error)}") + self.__logger.error("Error delete custom model: %s", str(error)) raise error def wait_for_image_generation( @@ -725,6 +734,6 @@ def wait_for_image_generation( time.sleep(poll_interval) if timeout_counter >= (timeout / poll_interval): - raise TimeoutError(f"Image has not been generated in {timeout} seconds") + raise TimeoutError("Image has not been generated in %s seconds", timeout) timeout_counter += 1 From eddde38e7d55736f7eb8e2f6372233d3adf74e32 Mon Sep 17 00:00:00 2001 From: wwakabobik Date: Tue, 1 Oct 2024 19:24:21 +0200 Subject: [PATCH 2/9] Fix linters (sync) --- src/leonardo_api/leonardo_sync.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/leonardo_api/leonardo_sync.py b/src/leonardo_api/leonardo_sync.py index b50f6b6..0c47b8f 100644 --- a/src/leonardo_api/leonardo_sync.py +++ b/src/leonardo_api/leonardo_sync.py @@ -288,11 +288,11 @@ def upload_init_image(self, file_path: str) -> str: valid_extensions = ["png", "jpg", "jpeg", "webp"] extension = os.path.splitext(file_path)[1].strip(".") if extension not in valid_extensions: - raise ValueError("Invalid file extension. Must be one of %s", valid_extensions) + raise ValueError(f"Invalid file extension. Must be one of {valid_extensions}") url = "https://cloud.leonardo.ai/api/rest/v1/init-image" payload = {"extension": extension} - self.__logger.debug(f"Init image %s upload requested with payload = %s: POST %s", file_path, payload, url) + self.__logger.debug("Init image %s upload requested with payload = %s: POST %s", file_path, payload, url) session = self.___get_client_session("post") try: response = session.post(url, json=payload) @@ -327,7 +327,7 @@ def get_single_init_image(self, image_id: str) -> dict: :rtype: dict """ url = f"https://cloud.leonardo.ai/api/rest/v1/init-image/{image_id}" - self.__logger.debug(f"Requested single image with image_id=%s: GET %s", image_id, url) + self.__logger.debug("Requested single image with image_id=%s: GET %s", image_id, url) session = self.___get_client_session("get") try: response = session.get(url) @@ -583,7 +583,7 @@ def upload_generated_image_to_dataset(self, dataset_id: str, generated_image_id: self.__logger.error("Error occurred while upload generated image to dataset: %s", str(error)) raise error - def train_custom_model( + def train_custom_model( # pylint: disable=too-many-positional-arguments self, name: str, dataset_id: str, @@ -734,6 +734,6 @@ def wait_for_image_generation( time.sleep(poll_interval) if timeout_counter >= (timeout / poll_interval): - raise TimeoutError("Image has not been generated in %s seconds", timeout) + raise TimeoutError(f"Image has not been generated in {timeout} seconds") timeout_counter += 1 From 7beab1d05cba26472b4e9a1fbbe4e645dc8ec357 Mon Sep 17 00:00:00 2001 From: wwakabobik Date: Tue, 1 Oct 2024 19:26:36 +0200 Subject: [PATCH 3/9] Fix linters (sync) --- src/leonardo_api/leonardo_sync.py | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/src/leonardo_api/leonardo_sync.py b/src/leonardo_api/leonardo_sync.py index 0c47b8f..99fcc91 100644 --- a/src/leonardo_api/leonardo_sync.py +++ b/src/leonardo_api/leonardo_sync.py @@ -522,10 +522,7 @@ def upload_dataset_image(self, dataset_id: str, file_path: str) -> requests.Resp response.raise_for_status() data = response.json() self.__logger.debug( - "Dataset with dataset_id=%s started to upload from %s: %s", - dataset_id, - file_path, - response + "Dataset with dataset_id=%s started to upload from %s: %s", dataset_id, file_path, response ) upload_url = data["uploadDatasetImage"]["url"] fields = json.loads(data["uploadDatasetImage"]["fields"]) @@ -562,10 +559,7 @@ def upload_generated_image_to_dataset(self, dataset_id: str, generated_image_id: url = f"https://cloud.leonardo.ai/api/rest/v1/datasets/{dataset_id}/upload/gen" payload = {"generatedImageId": generated_image_id} self.__logger.debug( - "Requested to upload generated_image_id=%s to dataset_id=%s: POST %s", - generated_image_id, - dataset_id, - url + "Requested to upload generated_image_id=%s to dataset_id=%s: POST %s", generated_image_id, dataset_id, url ) session = self.___get_client_session("post") try: @@ -575,7 +569,7 @@ def upload_generated_image_to_dataset(self, dataset_id: str, generated_image_id: "Image with image_id=%s has been uploaded to dataset_id=%s: %s", generated_image_id, dataset_id, - response + response, ) session.close() return response From bf91d1c440ea3357e3f3d79250d157d92c7ec4ac Mon Sep 17 00:00:00 2001 From: wwakabobik Date: Tue, 1 Oct 2024 19:44:55 +0200 Subject: [PATCH 4/9] Remove support of python 3.8 --- .github/workflows/linters.yml | 2 +- .github/workflows/master-linters.yml | 2 +- pyproject.toml | 3 +-- setup.cfg | 1 - 4 files changed, 3 insertions(+), 5 deletions(-) diff --git a/.github/workflows/linters.yml b/.github/workflows/linters.yml index 15f0239..d2175e8 100644 --- a/.github/workflows/linters.yml +++ b/.github/workflows/linters.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12"] steps: - name: Checkout uses: actions/checkout@v4 diff --git a/.github/workflows/master-linters.yml b/.github/workflows/master-linters.yml index 63d4dd1..cf21bc4 100644 --- a/.github/workflows/master-linters.yml +++ b/.github/workflows/master-linters.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12"] steps: - name: Checkout uses: actions/checkout@v4 diff --git a/pyproject.toml b/pyproject.toml index fdd543e..0e62419 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -15,7 +15,7 @@ keywords = ["leonardo", "ai", "image generation", "artificial intelligence", "ap description = "Leonardo.ai Python API" readme = "README.md" license = { file="LICENSE" } -requires-python = ">=3.8" +requires-python = ">=3.9" dependencies = [ 'requests', 'aiohttp', @@ -29,7 +29,6 @@ dependencies = [ 'frozenlist' ] classifiers = [ - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", diff --git a/setup.cfg b/setup.cfg index a801492..1c25666 100644 --- a/setup.cfg +++ b/setup.cfg @@ -13,7 +13,6 @@ keywords = leonardo, leonardo.ai, image generation, stablediffusion, api, llm, a license = MIT License python_requires = >=3.9 classifiers = - Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 From 3ea866907ee573a88dd23d44e769c67b8a2ca7d5 Mon Sep 17 00:00:00 2001 From: wwakabobik Date: Fri, 8 Nov 2024 10:06:42 +0100 Subject: [PATCH 5/9] Fix async --- requirements.txt | 12 +-- src/leonardo_api/leonardo_async.py | 146 +++++++++++++++-------------- src/leonardo_api/leonardo_sync.py | 2 +- 3 files changed, 84 insertions(+), 76 deletions(-) diff --git a/requirements.txt b/requirements.txt index 6dc1dae..569ad32 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,9 +1,9 @@ -aiohttp==3.10.4 +aiohttp==3.10.10 aiofiles==24.1.0 asyncio==3.4.3 requests==2.32.3 -urllib3==2.2.2 -async-timeout==4.0.3 -certifi==2024.7.4 -charset-normalizer==3.3.2 -frozenlist==1.4.1 +urllib3==2.2.3 +async-timeout==5.0.1 +certifi==2024.8.30 +charset-normalizer==3.4.0 +frozenlist==1.5.0 diff --git a/src/leonardo_api/leonardo_async.py b/src/leonardo_api/leonardo_async.py index 1cca270..22a18de 100644 --- a/src/leonardo_api/leonardo_async.py +++ b/src/leonardo_api/leonardo_async.py @@ -2,10 +2,10 @@ """ Filename: leonardo_async.py Author: Iliya Vereshchagin -Copyright (c) 2023. All rights reserved. +Copyright (c) 2024. All rights reserved. Created: 28.08.2023 -Last Modified: 24.11.2023 +Last Modified: 08.10.2024 Description: This file contains asynchronous implementation for Leonardo.ai API @@ -44,8 +44,8 @@ def __init__(self, auth_token: str, logger: Optional[logging.Logger] = None) -> :type logger: logging.Logger, optional """ self.___auth_token = auth_token - self.___logger = logger if logger else setup_logger("Leonardo", "leonardo_async.log") - self.___logger.debug("Leonardo init complete") + self.__logger = logger if logger else setup_logger("Leonardo", "leonardo_async.log") + self.__logger.debug("Leonardo init complete") async def ___get_client_session(self, request_type: str = "get", empty: bool = False) -> aiohttp.ClientSession: """ @@ -79,22 +79,22 @@ async def get_user_info(self) -> dict: """ url = "https://cloud.leonardo.ai/api/rest/v1/me" - self.___logger.debug(f"Requesting user info: GET {url}") + self.__logger.debug("Requesting user info: GET %s", url) session = await self.___get_client_session("get") try: async with session.get(url) as response: response.raise_for_status() response_dict = await response.json() - self.___logger.debug(f"User info: {response_dict}") + self.__logger.debug("User info: %s", response_dict) await session.close() return response_dict except Exception as error: - self.___logger.error(f"Error occurred while getting user info: {str(error)}") + self.__logger.error("Error occurred while getting user info: %s", str(error)) if not session.closed: await session.close() raise error - async def post_generations( + async def post_generations( # pylint: disable=too-many-positional-arguments self, prompt: str, negative_prompt: Optional[str] = None, @@ -187,17 +187,17 @@ async def post_generations( "controlNet": control_net, "controlNetType": control_net_type, } - self.___logger.debug(f"Requesting post generations: POST {url} with payload: {payload}") + self.__logger.debug("Requesting post generations: POST %s with payload: %s", url, payload) session = await self.___get_client_session("post") try: async with session.post(url, json=payload) as response: response.raise_for_status() response_dict = await response.json() - self.___logger.debug(f"Post generations: {response_dict}") + self.__logger.debug("Post generations: %s", response_dict) await session.close() return response_dict except Exception as error: - self.___logger.error(f"Error occurred while post generations: {str(error)}") + self.__logger.error("Error occurred while post generations: %s", str(error)) if not session.closed: await session.close() raise error @@ -215,17 +215,17 @@ async def get_single_generation(self, generation_id: str) -> dict: Exception: if error occurred while get single generation """ url = f"https://cloud.leonardo.ai/api/rest/v1/generations/{generation_id}" - self.___logger.debug(f"Requested single generations: GET {url} with generation_id={generation_id}") + self.__logger.debug("Requested single generations: GET %s with generation_id=%s", url, generation_id) session = await self.___get_client_session("get") try: async with session.get(url) as response: response.raise_for_status() response_dict = await response.json() - self.___logger.debug(f"Single generations: {response}") + self.__logger.debug("Single generations: %s", response) await session.close() return response_dict except Exception as error: - self.___logger.error(f"Error occurred while get single generations: {str(error)}") + self.__logger.error("Error occurred while get single generations: %s", str(error)) if not session.closed: await session.close() raise @@ -243,16 +243,16 @@ async def delete_single_generation(self, generation_id: str) -> aiohttp.ClientRe Exception: if error occurred while delete single generation """ url = f"https://cloud.leonardo.ai/api/rest/v1/generations/{generation_id}" - self.___logger.debug(f"Delete generations with generation_id={generation_id}: DELETE {url}") + self.__logger.debug("Delete generations with generation_id=%s: DELETE %s", generation_id, url) session = await self.___get_client_session("delete") try: async with session.delete(url) as response: response.raise_for_status() - self.___logger.debug(f"Generations {generation_id} has been deleted: {response}") + self.__logger.debug("Generations %s has been deleted: %s", generation_id, response) await session.close() return response except Exception as error: - self.___logger.error(f"Error occurred while delete generation: {str(error)}") + self.__logger.error("Error occurred while delete generation: %s", str(error)) if not session.closed: await session.close() raise error @@ -275,17 +275,17 @@ async def get_generations_by_user(self, user_id: str, offset: int = 0, limit: in """ url = f"https://cloud.leonardo.ai/api/rest/v1/generations/user/{user_id}" params = {"offset": offset, "limit": limit} - self.___logger.debug(f"Requested generations for {user_id} with params {params}: GET {url}") + self.__logger.debug("Requested generations for %s with params %s: GET %s", user_id, params, url) session = await self.___get_client_session("get") try: async with session.get(url, params=params) as response: response.raise_for_status() response_dict = await response.json() - self.___logger.debug(f"Generations for user {user_id} are: {response_dict}") + self.__logger.debug("Generations for user %s are: %s", user_id, response_dict) await session.close() return response_dict except Exception as error: - self.___logger.error(f"Error occurred while obtaining user's generations: {str(error)}") + self.__logger.error("Error occurred while obtaining user's generations: %s", str(error)) if not session.closed: await session.close() raise error @@ -310,19 +310,19 @@ async def upload_init_image(self, file_path: str) -> str: # pylint: disable=too url = "https://cloud.leonardo.ai/api/rest/v1/init-image" payload = {"extension": extension} - self.___logger.debug(f"Init image {file_path} upload requested with payload = {payload}: POST {url}") + self.__logger.debug("Init image %s upload requested with payload = %s: POST %s", file_path, payload, url) session = await self.___get_client_session("post") try: async with session.post(url, json=payload) as response: response.raise_for_status() data = await response.json() await session.close() - self.___logger.debug(f"Init image {file_path} initiated as: {data['uploadInitImage']['url']}") + self.__logger.debug("Init image %s initiated as: %s", file_path, data["uploadInitImage"]["url"]) generation_id = data["uploadInitImage"]["id"] upload_url = data["uploadInitImage"]["url"] fields = json.loads(data["uploadInitImage"]["fields"]) - self.___logger.debug(f"Init image {file_path} uploading with as binary: POST {upload_url}") + self.__logger.debug("Init image %s uploading with as binary: POST %s", file_path, upload_url) async with aiofiles.open(file_path, "rb") as file: file_data = await file.read() data = aiohttp.FormData() @@ -332,11 +332,13 @@ async def upload_init_image(self, file_path: str) -> str: # pylint: disable=too session = await self.___get_client_session("post", empty=True) async with session.post(upload_url, data=data) as response: response.raise_for_status() - self.___logger.debug(f"Init image {file_path} has been uploaded, generation_id is: {generation_id}") + self.__logger.debug( + "Init image %s has been uploaded, generation_id is: %s", file_path, generation_id + ) await session.close() return generation_id except Exception as error: - self.___logger.error(f"Error occurred while upload init image: {str(error)}") + self.__logger.error("Error occurred while upload init image: %s", str(error)) if not session.closed: await session.close() raise error @@ -354,17 +356,17 @@ async def get_single_init_image(self, image_id: str) -> dict: Exception: if error occurred while get single init image """ url = f"https://cloud.leonardo.ai/api/rest/v1/init-image/{image_id}" - self.___logger.debug(f"Requested single image with image_id={image_id}: GET {url}") + self.__logger.debug("Requested single image with image_id=%s: GET %s", image_id, url) session = await self.___get_client_session("get") try: async with session.get(url) as response: response.raise_for_status() response_dict = await response.json() - self.___logger.debug(f"Single image provided: {response_dict}") + self.__logger.debug("Single image provided: %s", response_dict) await session.close() return response_dict except Exception as error: - self.___logger.error(f"Error occurred while obtain single init image: {str(error)}") + self.__logger.error("Error occurred while obtain single init image: %s", str(error)) if not session.closed: await session.close() raise @@ -382,16 +384,16 @@ async def delete_init_image(self, image_id: str) -> aiohttp.ClientResponse: Exception: if error occurred while delete init image """ url = f"https://cloud.leonardo.ai/api/rest/v1/init-image/{image_id}" - self.___logger.debug(f"Requested to delete single image with image_id={image_id}: DELETE {url}") + self.__logger.debug("Requested to delete single image with image_id=%s: DELETE %s", image_id, url) session = await self.___get_client_session("delete") try: async with session.delete(url) as response: response.raise_for_status() - self.___logger.debug(f"Single image deleted: {response}") + self.__logger.debug("Single image deleted: %s", response) await session.close() return response except Exception as error: - self.___logger.error(f"Error occurred while deleting init image: {str(error)}") + self.__logger.error("Error occurred while deleting init image: %s", str(error)) if not session.closed: await session.close() raise error @@ -410,17 +412,17 @@ async def create_upscale(self, image_id: str) -> aiohttp.ClientResponse: """ url = "https://cloud.leonardo.ai/api/rest/v1/variations/upscale" payload = {"id": image_id} - self.___logger.debug(f"Requested to upscale image with payload {payload}: POST {url}") + self.__logger.debug("Requested to upscale image with payload %s: POST %s", payload, url) session = await self.___get_client_session("post") try: async with session.post(url, json=payload) as response: response.raise_for_status() response_dict = await response.json() - self.___logger.debug(f"Upscale created: {response_dict}") + self.__logger.debug("Upscale created: %s", response_dict) await session.close() return response_dict except Exception as error: - self.___logger.error(f"Error occurred while up-scaling image: {str(error)}") + self.__logger.error("Error occurred while up-scaling image: %s", str(error)) if not session.closed: await session.close() raise error @@ -438,17 +440,17 @@ async def get_variation_by_id(self, generation_id: str) -> dict: Exception: if error occurred while get variation by id """ url = f"https://cloud.leonardo.ai/api/rest/v1/variations/{generation_id}" - self.___logger.debug(f"Requested to obtain variation by id {generation_id}: GET {url}") + self.__logger.debug("Requested to obtain variation by id %s: GET %s", generation_id, url) session = await self.___get_client_session("get") try: async with session.get(url) as response: response.raise_for_status() response_dict = await response.json() - self.___logger.debug(f"Get variation by ID: {response_dict}") + self.__logger.debug("Get variation by ID: %s", response_dict) await session.close() return response_dict except Exception as error: - self.___logger.error(f"Error occurred while get variation by id: {str(error)}") + self.__logger.error("Error occurred while get variation by id: %s", str(error)) if not session.closed: await session.close() raise error @@ -469,16 +471,16 @@ async def create_dataset(self, name: str, description: Optional[str] = None) -> """ url = "https://cloud.leonardo.ai/api/rest/v1/datasets" payload = {"name": name, "description": description} - self.___logger.debug(f"Requested to create dataset with payload {payload}: POST {url}") + self.__logger.debug("Requested to create dataset with payload %s: POST %s", payload, url) session = await self.___get_client_session("post") try: async with session.post(url, json=payload) as response: response.raise_for_status() - self.___logger.debug(f"Dataset has been created: {response}") + self.__logger.debug("Dataset has been created: %s", response) await session.close() return response except Exception as error: - self.___logger.error(f"Error occurred while create dataset: {str(error)}") + self.__logger.error("Error occurred while create dataset: %s", str(error)) if not session.closed: await session.close() raise error @@ -496,17 +498,17 @@ async def get_dataset_by_id(self, dataset_id: str) -> dict: Exception: if error occurred while get dataset by id """ url = f"https://cloud.leonardo.ai/api/rest/v1/datasets/{dataset_id}" - self.___logger.debug(f"Requested to obtain dataset dataset_id={dataset_id}: GET {url}") + self.__logger.debug("Requested to obtain dataset dataset_id=%s: GET %s", dataset_id, url) session = await self.___get_client_session("get") try: async with session.get(url) as response: response.raise_for_status() response_dict = await response.json() - self.___logger.debug(f"Dataset with dataset_id={dataset_id} provided: {response_dict}") + self.__logger.debug("Dataset with dataset_id=%s, provided: %s", dataset_id, response_dict) await session.close() return response_dict except Exception as error: - self.___logger.error(f"Error occurred while get dataset: {str(error)}") + self.__logger.error("Error occurred while get dataset: %s", str(error)) if not session.closed: await session.close() raise @@ -524,16 +526,16 @@ async def delete_dataset_by_id(self, dataset_id: str) -> aiohttp.ClientResponse: Exception: if error occurred while delete dataset by id """ url = f"https://cloud.leonardo.ai/api/rest/v1/datasets/{dataset_id}" - self.___logger.debug(f"Requested to delete dataset dataset_id={dataset_id}: DELETE {url}") + self.__logger.debug("Requested to delete dataset dataset_id=%s: DELETE %s", dataset_id, url) session = await self.___get_client_session("delete") try: async with session.delete(url) as response: response.raise_for_status() - self.___logger.debug(f"Dataset with dataset_id={dataset_id} has been deleted: {response}") + self.__logger.debug("Dataset with dataset_id=%s has been deleted: %s", dataset_id, response) await session.close() return response except Exception as error: - self.___logger.error(f"Error occurred while delete dataset: {str(error)}") + self.__logger.error("Error occurred while delete dataset: %s", str(error)) if not session.closed: await session.close() raise error @@ -561,21 +563,23 @@ async def upload_dataset_image(self, dataset_id: str, file_path: str) -> aiohttp url = f"https://cloud.leonardo.ai/api/rest/v1/datasets/{dataset_id}/upload" payload = {"extension": extension} - self.___logger.debug(f"Requested to upload dataset_id={dataset_id} from {file_path}: POST {url}") + self.__logger.debug("Requested to upload dataset_id=%s from %s: POST %s", dataset_id, file_path, url) session = await self.___get_client_session("post") try: async with session.post(url, json=payload) as response: response.raise_for_status() data = await response.json() await session.close() - self.___logger.debug( - f"Dataset with dataset_id={dataset_id} started to upload from {file_path} as " - f"{data['uploadDatasetImage']['url']}" + self.__logger.debug( + "Dataset with dataset_id=%s started to upload from %s as %s", + dataset_id, + file_path, + data["uploadDatasetImage"]["url"], ) upload_url = data["uploadDatasetImage"]["url"] fields = json.loads(data["uploadDatasetImage"]["fields"]) - self.___logger.debug(f"Uploading dataset_id={dataset_id} from {file_path}: POST {url}") + self.__logger.debug("Uploading dataset_id=%s from %s: POST %s", dataset_id, file_path, url) async with aiofiles.open(file_path, "rb") as file: file_data = await file.read() data = aiohttp.FormData() @@ -585,11 +589,13 @@ async def upload_dataset_image(self, dataset_id: str, file_path: str) -> aiohttp session = await self.___get_client_session("post", empty=True) async with session.post(upload_url, data=fields) as response: response.raise_for_status() - self.___logger.debug(f"Dataset with dataset_id={dataset_id} uploaded using {file_path}: {response}") + self.__logger.debug( + "Dataset with dataset_id=%s uploaded using %s: %s", dataset_id, file_path, response + ) await session.close() return response except Exception as error: - self.___logger.error(f"Error occurred uploading dataset: {str(error)}") + self.__logger.error("Error occurred uploading dataset: %s", str(error)) if not session.closed: await session.close() raise @@ -612,26 +618,28 @@ async def upload_generated_image_to_dataset( """ url = f"https://cloud.leonardo.ai/api/rest/v1/datasets/{dataset_id}/upload/gen" payload = {"generatedImageId": generated_image_id} - self.___logger.debug( - f"Requested to upload generated_image_id={generated_image_id} " f"to dataset_id={dataset_id}: POST {url}" + self.__logger.debug( + "Requested to upload generated_image_id=%s to dataset_id=%s: POST %s", generated_image_id, dataset_id, url ) session = await self.___get_client_session("post") try: async with session.post(url, json=payload) as response: response.raise_for_status() - self.___logger.debug( - f"Image with image_id={generated_image_id} has been uploaded to " - f"dataset_id={dataset_id}: {response}" + self.__logger.debug( + "Image with image_id=%s has been uploaded to dataset_id=%s: %s", + generated_image_id, + dataset_id, + response, ) await session.close() return response except Exception as error: - self.___logger.error(f"Error occurred while upload generated image to dataset: {str(error)}") + self.__logger.error("Error occurred while upload generated image to dataset: %s", str(error)) if not session.closed: await session.close() raise - async def train_custom_model( + async def train_custom_model( # pylint: disable=too-many-positional-arguments self, name: str, dataset_id: str, @@ -683,16 +691,16 @@ async def train_custom_model( "sd_Version": sd_version, "strength": strength, } - self.___logger.debug(f"Requested to train custom model with payload {payload}: POST {url}") + self.__logger.debug("Requested to train custom model with payload %s: POST %s", payload, url) session = await self.___get_client_session("post") try: async with session.post(url, json=payload) as response: response.raise_for_status() - self.___logger.debug(f"Custom modal has been trained: {response}") + self.__logger.debug("Custom modal has been trained: %s", response) await session.close() return response except Exception as error: - self.___logger.error(f"Error training custom model: {str(error)}") + self.__logger.error("Error training custom model: %s", str(error)) if not session.closed: await session.close() raise error @@ -710,17 +718,17 @@ async def get_custom_model_by_id(self, model_id: str) -> dict: Exception: if error occurred while get custom model by id """ url = f"https://cloud.leonardo.ai/api/rest/v1/models/{model_id}" - self.___logger.debug(f"Requested to obtain custom model by model_id={model_id}: GET {url}") + self.__logger.debug("Requested to obtain custom model by model_id=%s: GET %s", model_id, url) session = await self.___get_client_session("get") try: async with session.get(url) as response: response.raise_for_status() response_dict = await response.json() - self.___logger.debug(f"Custom modal has been trained: {response_dict}") + self.__logger.debug("Custom modal has been trained: %s", response_dict) await session.close() return response_dict except Exception as error: - self.___logger.error(f"Error obtaining custom model: {str(error)}") + self.__logger.error("Error obtaining custom model: %s", str(error)) if not session.closed: await session.close() raise @@ -738,16 +746,16 @@ async def delete_custom_model_by_id(self, model_id: str) -> aiohttp.ClientRespon Exception: if error occurred while delete custom model by id """ url = f"https://cloud.leonardo.ai/api/rest/v1/models/{model_id}" - self.___logger.debug(f"Requested to delete custom model by model_id={model_id}: GET {url}") + self.__logger.debug("Requested to delete custom model by model_id=%s: GET %s", model_id, url) session = await self.___get_client_session("delete") try: async with session.delete(url) as response: response.raise_for_status() - self.___logger.debug(f"Custom modal has been deleted: {response}") + self.__logger.debug("Custom modal has been deleted: %s", response) await session.close() return response except Exception as error: - self.___logger.error(f"Error delete custom model: {str(error)}") + self.__logger.error("Error delete custom model: %s", str(error)) if not session.closed: await session.close() raise error diff --git a/src/leonardo_api/leonardo_sync.py b/src/leonardo_api/leonardo_sync.py index 99fcc91..f485b9c 100644 --- a/src/leonardo_api/leonardo_sync.py +++ b/src/leonardo_api/leonardo_sync.py @@ -2,7 +2,7 @@ """ Filename: leonardo_sync.py Author: Iliya Vereshchagin -Copyright (c) 2023. All rights reserved. +Copyright (c) 2024. All rights reserved. Created: 29.08.2023 Last Modified: 01.10.2024 From 1a9de0d89271d609711a9efc1d50ed9056419262 Mon Sep 17 00:00:00 2001 From: wwakabobik Date: Fri, 8 Nov 2024 10:11:10 +0100 Subject: [PATCH 6/9] Update README.md --- CHANGELOG.md | 6 ++++++ README.md | 7 ++++++- src/README.md | 7 ++++++- 3 files changed, 18 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cb7d956..e7463b9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -63,3 +63,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ### Fixed - Fixed image upload methods (headers should be purged before poking s3) - Fixed session headers update to much more generic + +## [0.0.11] - 2024-11-14 + +### Fixed +- Fixed linters problems (logging) +- Bumped requirements & badges \ No newline at end of file diff --git a/README.md b/README.md index b01f29a..dae5e69 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,11 @@ ## This is Leonardo.ai API. -[![PyPI version](https://badge.fury.io/py/leonardo-api.svg)](https://badge.fury.io/py/leonardo-api) [![Linters](https://github.com/wwakabobik/leonardo_api/actions/workflows/master-linters.yml/badge.svg?branch=master)](https://github.com/wwakabobik/leonardo_api/actions/workflows/master-linters.yml) +[![PyPI version](https://badge.fury.io/py/leonardo-api.svg)](https://badge.fury.io/py/leonardo-api) +[![Linters](https://github.com/wwakabobik/leonardo_api/actions/workflows/master-linters.yml/badge.svg?branch=master)](https://github.com/wwakabobik/leonardo_api/actions/workflows/master-linters.yml) +![PyPI - License](https://img.shields.io/pypi/l/leonardo-api) +![PyPI - Python Version](https://img.shields.io/pypi/pyversions/leonardo-api) +[![Downloads](https://static.pepy.tech/badge/leonardo-api)](https://pepy.tech/project/leonardo-api) +[![Downloads](https://static.pepy.tech/badge/leonardo-api/month)](https://pepy.tech/project/leonardo-api) This package contains Python API for [Leonardo.ai](https://leonardo.ai/) based on official [API documentation](https://docs.leonardo.ai/reference). diff --git a/src/README.md b/src/README.md index b01f29a..dae5e69 100644 --- a/src/README.md +++ b/src/README.md @@ -1,6 +1,11 @@ ## This is Leonardo.ai API. -[![PyPI version](https://badge.fury.io/py/leonardo-api.svg)](https://badge.fury.io/py/leonardo-api) [![Linters](https://github.com/wwakabobik/leonardo_api/actions/workflows/master-linters.yml/badge.svg?branch=master)](https://github.com/wwakabobik/leonardo_api/actions/workflows/master-linters.yml) +[![PyPI version](https://badge.fury.io/py/leonardo-api.svg)](https://badge.fury.io/py/leonardo-api) +[![Linters](https://github.com/wwakabobik/leonardo_api/actions/workflows/master-linters.yml/badge.svg?branch=master)](https://github.com/wwakabobik/leonardo_api/actions/workflows/master-linters.yml) +![PyPI - License](https://img.shields.io/pypi/l/leonardo-api) +![PyPI - Python Version](https://img.shields.io/pypi/pyversions/leonardo-api) +[![Downloads](https://static.pepy.tech/badge/leonardo-api)](https://pepy.tech/project/leonardo-api) +[![Downloads](https://static.pepy.tech/badge/leonardo-api/month)](https://pepy.tech/project/leonardo-api) This package contains Python API for [Leonardo.ai](https://leonardo.ai/) based on official [API documentation](https://docs.leonardo.ai/reference). From f27fcaada85f1de17092a78dc6cd2ffd4d9e27c6 Mon Sep 17 00:00:00 2001 From: wwakabobik Date: Fri, 8 Nov 2024 10:14:53 +0100 Subject: [PATCH 7/9] Bump requirements.txt --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index 569ad32..0b2cfeb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ aiofiles==24.1.0 asyncio==3.4.3 requests==2.32.3 urllib3==2.2.3 -async-timeout==5.0.1 +async-timeout>=4.0.3 certifi==2024.8.30 charset-normalizer==3.4.0 frozenlist==1.5.0 From 9b82070499440c4864a8b7947c13351860b9b4c7 Mon Sep 17 00:00:00 2001 From: wwakabobik Date: Fri, 8 Nov 2024 10:16:54 +0100 Subject: [PATCH 8/9] Add support of python 3.13 --- .github/workflows/linters.yml | 2 +- .github/workflows/master-linters.yml | 2 +- pyproject.toml | 1 + setup.cfg | 1 + 4 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/linters.yml b/.github/workflows/linters.yml index d2175e8..a3d341a 100644 --- a/.github/workflows/linters.yml +++ b/.github/workflows/linters.yml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] steps: - name: Checkout uses: actions/checkout@v4 diff --git a/.github/workflows/master-linters.yml b/.github/workflows/master-linters.yml index cf21bc4..37e26e1 100644 --- a/.github/workflows/master-linters.yml +++ b/.github/workflows/master-linters.yml @@ -8,7 +8,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] steps: - name: Checkout uses: actions/checkout@v4 diff --git a/pyproject.toml b/pyproject.toml index 0e62419..8292da7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,6 +33,7 @@ classifiers = [ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Intended Audience :: Developers", diff --git a/setup.cfg b/setup.cfg index 1c25666..d4fdd96 100644 --- a/setup.cfg +++ b/setup.cfg @@ -17,6 +17,7 @@ classifiers = Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 Programming Language :: Python :: 3.12 + Programming Language :: Python :: 3.13 Operating System :: OS Independent Intended Audience :: Developers Intended Audience :: Information Technology From f521fdc90937270aef45197ebddb5a26dd5f1f69 Mon Sep 17 00:00:00 2001 From: wwakabobik Date: Fri, 8 Nov 2024 10:24:42 +0100 Subject: [PATCH 9/9] Prepare to publish 0.0.11 --- LICENSE | 2 +- pyproject.toml | 2 +- setup.cfg | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/LICENSE b/LICENSE index 0bec452..6ad26ee 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ MIT License -Copyright (c) 2023 Ilya Vereshchagin +Copyright (c) 2024 Ilya Vereshchagin Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/pyproject.toml b/pyproject.toml index 8292da7..e62c1ef 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "hatchling.build" [project] name = "leonardo_api" -version = "0.0.10" +version = "0.0.11" authors = [ { name="Iliya Vereshchagin", email="i.vereshchagin@gmail.com" }, ] diff --git a/setup.cfg b/setup.cfg index d4fdd96..4e6fca3 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] name = leonardo_api -version = attr: leonardo_api.0.0.10 +version = attr: leonardo_api.0.0.11 author = Iliya Vereshchagin author_email = i.vereshchagin@gmail.com maintainer = Iliya Vereshchagin