diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index b365839c..033da80c 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,5 +1,5 @@ - # Contributing - -Welcome to the Vertex Templates Project, and thank you for your interest in contributing! - -This guide is chiefly for users wishing to contribute to the opensource version. Those who want to edit the templates to suit their own purposes should look at [USAGE](USAGE.md), but may find some sections useful. - -## Links to Important Resources -- [pytest](https://docs.pytest.org) -- [unittest.mock](https://docs.python.org/3/library/unittest.mock.html) -- [pipenv](https://pipenv-fork.readthedocs.io/en/latest/index.html) -- [Kubeflow Pipelines](https://www.kubeflow.org/docs/components/pipelines/overview) -- [Vertex AI](https://cloud.google.com/vertex-ai/docs) -- [AI Platform SDK](https://googleapis.dev/python/aiplatform/latest/index.html) - -## Python code guidelines - -### Code linting and formatting -We use linting to highlight problems in our Python code which could later produce errors or affect efficiency. For example, linting detects things such as uninitialised or undefined variables, unused imported modules, and missing parentheses. To detect and enact fixes to these problems, we make use of the Python linter: [Flake8](https://flake8.pycqa.org/en/latest/) and the formatter [Black](https://black.readthedocs.io/en/stable/). These are run as part of our [pre-commit checks](#Pre-commit-checks). We have some configurations for Flake8 that you can find [here](.flake8). -### Docstring format -Please include docstrings that are compliant with the [Google Style](https://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html). - -In brief, the docstring is a triple quoted string placed immediately after the function definition and should contain: -- A brief description of the function -- A section `Args:` where you list the function arguments - - For each argument, include the argument type in brackets -- A section `Returns:` where you list what the function returns - - For each returned item, include the type -## Testing -Testing is an important way for us to check that our code works as expected and forms a key part of our CI/CD strategy. For example, pull requests will not pass our checks if any unit tests fail. You should therefore ensure that your pull request passes all tests before merging. - -### Unit Tests - -We use unit tests to test small sections of logically isolated code in our pipeline components. - -#### Mocking & patching -When we test a function that makes an external API call (for example to a service on GCP), we mock the associated module or class and its functions and attributes. The rationale behind this is that we want to test our own logic and not the API call(s) themselves. Indeed, API calls can be broken, computationally expensive, slow, or limited, and we do not want our unit tests to fail because of any of these. - -We do mocking/patching in two different ways: -1. [`monkeypatch`](https://docs.pytest.org/en/6.2.x/monkeypatch.html): this built-in `pytest` fixture allows you to modify an attribute (such as an instance method in a class). We use `monkeypatch` only in the relevant `conftest.py` file for the fixtures that are applied before every unit test. We have used `monkeypatch` in the following fixtures: - - `mock_kfp_artifact`: used to mock the `Artifact` object (and thus any derived classes such as `Dataset`, `Model`, etc.) in `kfp.v2.dsl` to return the URI as - the path. This lets us create mock Artifact objects locally for our unit tests. -2. `unittest.mock.patch`: this object in the `unittest` library enables us to mock classes (and its associated attributes and methods) within a context manager. We use `mock.patch` inside the individual test scripts to mock object(s) that are used in the function being tested. This allows us to replace the target class/object with a Mock object, ultimately allowing us to make assertions on how this Mock object has been used. For example, the `assert_called_once_with` method allows us to check that a specific method of a Mock object was called once with specific arguments. Alternatively, we can set the attributes of our Mock objects to specific values, and assert that the component logic being tested handles these cases correctly (e.g. by raising a `ValueError`). An example of using the `mock.patch` context manager is in [`test_lookup_model.py`](tests/kfp_components/aiplatform/test_lookup_model.py) for [`lookup_model.py`](./pipeline_components/aiplatform/aiplatform/lookup_model/component.py), where there is an API call to list models (in Vertex AI) based on a filter, namely `google.cloud.aiplatform.Model.list`. When we test this KFP component, we are not interested in actually making this API call, so instead we mock it. We do this by mocking the `google.cloud.aiplatform.Model` class: -``` -with mock.patch("google.cloud.aiplatform.Model") as mock_model: - - # Mock attribute and method - mock_model.resource_name = "my-model-resource-name" - mock_model.list.return_value = [mock_model] - - # Invoke the model look up - found_model_resource_name = lookup_model( - model_name="my-model", - project_location="europe-west4", - project_id="my-project_id", - order_models_by="create_time desc", - fail_on_model_not_found=False, - ) - - assert found_model_resource_name == "my-model-resource-name" -``` - -#### `pytest` fixtures & `conftest.py` -- `tmpdir`: a built-in `pytest` fixture that we use to create temporary paths for our unit tests. -- `monkeypatch`: see above. -- The file `conftest.py` contains our custom `pytest` fixtures which we apply for each testing session. - -#### How to write unit tests -Some things to consider testing for in your code: -- **Do you have any logical conditions?** Consider writing tests that assert the desired outcome occurs for each possible outcome. In particular, you can assert that a certain error is raised in your function under certain conditions. For example, in [`lookup_model.py`](./pipeline_components/aiplatform/aiplatform/lookup_model/component.py), the function `lookup_model` raises a `RuntimeError` if no models are found: -``` -if fail_on_model_not_found: - raise RuntimeError(f"Failed as model not found") -``` -Now in [`test_lookup_model.py`](./pipeline_components/aiplatform/tests/test_lookup_model.py), in the unit test `test_lookup_model_when_no_models_fail` you can use `pytest.raises` to check that `lookup_model` actually raises a `RuntimeError`: -``` -with pytest.raises(RuntimeError): - lookup_model( - model_name="my-model", - project_location="europe-west4", - project_id="my-project-id", - order_models_by="create_time desc", - fail_on_model_not_found=True, - ) -``` - -#### How to run unit tests -Unit tests for pipeline components are run automatically on each pull request. You can also run them on your local machine: -``` -make test-all-components -``` - -Or to just run the unit tests for a given component group (e.g. `aiplatform`): -``` -make test-components GROUP=vertex-components -``` - -### End-to-end (E2E) pipeline tests -We use End-to-end (E2E) pipeline tests to ensure that our pipelines are running as expected. Our E2E tests ensure: -- That the pipeline is successfully triggered locally, and that the pipeline run is completed -- That common tasks(components), which are stored in a dictionary object (`common_tasks`), occurred in the pipeline -- That if any task in a conditional tasks dictionary object occurred in the pipeline, the remaining tasks based on that condition should have all occurred as well -- That these pipeline tasks output the correct artifacts, by checking whether they have been saved to a GCS URI or have been generated successfully in Vertex AI. - -Note: -These dictionary objects (`common_tasks`, `conditional_tasks`) are defined in `test_e2e.py` in each pipeline folder e.g (`./pipelines/tests/xgboost/training/test_e2e.py`). -The E2E test only allows one common tasks group but the number of conditional tasks group is not limited. To define the correct task group, -please go to pipeline job on Vertex AI for more information. -For example, in the XGBoost training pipeline, we have two conditional tasks groups that are bounded in the dashed frame. -Thus, in `./pipelines/tests/xgboost/training/test_e2e.py`, there are two dictionaries of two conditional tasks group. - -- Optionally check for executed tasks and created output artifacts. - -#### How to run end-to-end (E2E) pipeline tests -E2E tests are run on each PR that is merged to the main branch. You can also run them on your local machine: -``` -make e2e-tests pipeline= -``` - -### How to adapt the end-to-end (E2E) pipeline tests for your own pipeline -As described above, we provide our E2E tests with a dictionary of expected outputs for the pipeline components, and confirm that these outputs are stored in a GCS uri or generated successfully in Vertex AI. - For information on how tasks and outputs are stored in your pipeline, we recommend looking at these [AI Platform documents](https://googleapis.dev/python/aiplatform/latest/aiplatform_v1beta1/types.html#google.cloud.aiplatform_v1beta1.types.PipelineJob). The following briefly describes how we created this dictionary, and you can use this to create your own dictionary of expected tasks: -1. Trigger a pipeline (using the default pipeline input parameters), and collect the pipeline tasks and their details. For example: -``` -from trigger.main import trigger_pipeline_from_payload - -... - - payload = { - "attributes": { - "template_path": template_path, - "enable_caching": False - } - } - pl = trigger_pipeline_from_payload(payload) - pl.wait() - details = pl.to_dict() - tasks = details["jobDetail"]["taskDetails"] - -``` -2. Check missing tasks by comparing the actual task produced in pipeline and the expected tasks defined in each pipeline test file ( e.g `./pipelines/tests/xgboost/training/test_e2e.py`)) -``` - missing_tasks = [ - task_name - for task_name in expected_tasks.keys() - if task_name not in actual_tasks.keys() - ] -``` -3. Check all tasks outputs are as expected and accessible -``` - storage_client = storage.Client() - # 2. Missing outputs check - for task_name, expected_output in expected_tasks.items(): - actual_outputs = actual_tasks[task_name] - # 2-2. if the output artifact are as expected - diff = set(expected_output).symmetric_difference(actual_outputs.keys()) - assert ( - len(diff) == 0 - ), f"task: {task_name}, expected_output {expected_output}, actual_outputs: {actual_outputs.keys()}" - for output_artifact in expected_output: - output_uri = actual_outputs[output_artifact] - - # 2-2. if output is generated successfully - # for all gcs uri check its file size - if output_uri.startswith("gs://"): - file_size = test_gcs_uri(output_uri, storage_client) - assert ( - file_size > 0 - ), f"{output_artifact} in task {task_name} is not accessible" - # for Vertex resource check if the resource exists - else: - # all Vertex AI resource uri following this pattern: - # β€˜projects//locations///’ - artifact_type = output_uri.split('/')[-2] - object_existence = test_functions[artifact_type](output_uri.split('/')[-1]) - assert ( - object_existence - ), f"{output_artifact} in task {task_name} is not accessible" - -``` -Notes: -This template only provides three Vertex AI resource check functions: `test_vertex_model_uri`,`test_vertex_endpoint_uri`,`test_batch_prediction_job_uri`. -For other Vertex artifacts, you can create a new one following this code: -``` -def test_vertex_endpoint_uri(output_uri: str): - from google.cloud.aiplatform import Model - try: - Model( - endpoint_name=output_uri, - project=project_id, - location=project_location, - ) - return True - except: - return False - -``` - -## Adding or changing python dependencies -We use [pipenv](https://pipenv-fork.readthedocs.io/en/latest/index.html) to handle our packages and their dependencies. Each group of pipeline components (e.g. [aiplatform](./pipeline_components/aiplatform/)) containers its own pipenv environment, and there is a [separate pipenv environment](./pipelines/) for the ML pipelines themselves and the pipeline trigger code. - -### Adding python dependencies -You may need to add new packages for your own use cases. To do this, run the following from the relevant directory ([pipelines](./pipelines) for the main ML pipeline dependencies or the directory of the relevant component group e.g. [aiplatform](./pipeline_components/aiplatform/)): -``` -pipenv install -``` - -## Committing Changes - -### Add changed files to staging area -``` -git add -``` -### Commit messages -To allow others (and yourself!) to understand your changes easily, we encourage writing detailed commit messages. We use [Commitizen](https://commitizen-tools.github.io/commitizen/) as a guide. In brief, all commit messages should start with one of the following prefixes: -- **feat**: A new feature -- **fix**: A bug fix -- **docs**: Documentation only changes -- **style**: Changes that do not affect the meaning of the code (white-space, formatting, missing semi-colons etc) -- **refactor**: A change that neither fixes a bug or adds a feature -- **perf**: A change that improves performance -- **test**: Add new tests or correct existing tests -- **build**: Changes that affect the build system or external dependencies (e.g. pip, docker, npm) -- **ci**: Changes to CI configuration files and scripts - -After the prefix, you can specify the scope (e.g. the class or file name) of the change in brackets. Finally, you include the details of the change. For example -``` -git commit -s -m"prefix(scope): message here" -``` - -### Things to avoid in your commit messages: -- Too brief or non-specific messages (e.g. `bug fix`, `small changes`) -- Commit message does not explain why changes were made - - -### Pre-commit checks -We use pre-commit hooks to automatically identify and correct issues in code. You can find the details of the hooks we use [here](.pre-commit-config.yaml). If any of these fail, the commit will be unsuccessful and you will be able to see which hook failed and some additional details in your terminal. - -To run the pre-commit hooks over the entire repo, run: -``` -make pre-commit -``` - -#### What to do if pre-commit checks fail: -- **Checks fail and the pre-commit hook edits a file to fix the error**. Sometimes the pre-commit hook will identify an error (e.g. the absence of a blank line at the end of a file), and will correct this automatically by changing your file. You will then need to re-add these files to the staging area before trying to commit again. -- **Checks fail and displays an error message**. Some errors cannot be automatically fixed by pre-commit hooks, and instead they will display the error number and the file and line which failed. For more details beyond the error message, you can look up the error number online. The most common errors are caused by lines which exceed the character limit. Once you identify the cause of the error, you will need to fix this in your code, add the edited file to the staging area, and then commit again. - -### Commit changes to Python packages and dependencies -If you have changes to `Pipfile` and `Pipfile.lock`, please make sure you commit these files! - -## Makefile -This project contains a [Makefile](Makefile) which contains "rules" describing the commands to be executed by the system. These allow you to quickly and easily run commands for specific purposes, for example running all of the unit-tests, or compiling a pipeline. You can find the full set of available `make` rules by running: -``` -make help -``` - -Some of these rules use the environment variables specified in [`env.sh`](env.sh). - -**It is not expected that you will need to change the Makefile or create a new one.** - -## Assets folder - -For a brief description of the Assets folder, please refer to our [general documentation](README.md#Assets). -To make sure that assets are available while running the ML pipelines, `make run` ensure that these will be uploaded automatically to the respective Google Cloud Storage locations. - -### Common assets - -Within the [assets](./assets/) folder, there are common files stored which need to be uploaded to Google Cloud Storage so that the pipelines running Vertex AI can consume such assets, namely: diff --git a/Makefile b/Makefile index 05a3c1e8..2d9a3a96 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,4 @@ -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,79 +15,110 @@ -include env.sh export -help: ## Display this help screen - @grep -h -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' - -pre-commit: ## Runs the pre-commit checks over entire repo - @cd pipelines && \ - pipenv run pre-commit run --all-files - -setup: ## Set up local environment for Python development on pipelines - @pip install pipenv && \ - cd pipelines && \ - pipenv install --dev - -test-trigger: ## Runs unit tests for the pipeline trigger code - @cd pipelines && \ - pipenv run python -m pytest tests/trigger - -compile-pipeline: ## Compile the pipeline to training.json or prediction.json. Must specify pipeline= - @cd pipelines/src && \ - pipenv run python -m pipelines.${PIPELINE_TEMPLATE}.${pipeline}.pipeline - -setup-components: ## Run unit tests for a component group - @cd "components/${GROUP}" && \ - pipenv install --dev - -setup-all-components: ## Run unit tests for all pipeline components - @set -e && \ - for component_group in components/*/ ; do \ - echo "Setup components under $$component_group" && \ - $(MAKE) setup-components GROUP=$$(basename $$component_group) ; \ - done - -test-components: ## Run unit tests for a component group - @cd "components/${GROUP}" && \ - pipenv run pytest - -test-all-components: ## Run unit tests for all pipeline components - @set -e && \ - for component_group in components/*/ ; do \ - echo "Test components under $$component_group" && \ - $(MAKE) test-components GROUP=$$(basename $$component_group) ; \ - done - -sync-assets: ## Sync assets folder to GCS. Must specify pipeline= - @if [ -d "./pipelines/src/pipelines/${PIPELINE_TEMPLATE}/$(pipeline)/assets/" ] ; then \ - echo "Syncing assets to GCS" && \ - gsutil -m rsync -r -d ./pipelines/src/pipelines/${PIPELINE_TEMPLATE}/$(pipeline)/assets ${PIPELINE_FILES_GCS_PATH}/$(pipeline)/assets ; \ - else \ - echo "No assets folder found for pipeline $(pipeline)" ; \ - fi ; - -run: ## Compile pipeline, copy assets to GCS, and run pipeline in sandbox environment. Must specify pipeline=. Optionally specify enable_pipeline_caching= (defaults to default Vertex caching behaviour) - @ $(MAKE) compile-pipeline && \ - $(MAKE) sync-assets && \ - cd pipelines/src && \ - pipenv run python -m pipelines.trigger --template_path=./$(pipeline).json --enable_caching=$(enable_pipeline_caching) - -sync_assets ?= true -e2e-tests: ## (Optionally) copy assets to GCS, and perform end-to-end (E2E) pipeline tests. Must specify pipeline=. Optionally specify enable_pipeline_caching= (defaults to default Vertex caching behaviour). Optionally specify sync_assets= (defaults to true) - @if [ $$sync_assets = true ] ; then \ - $(MAKE) sync-assets; \ - else \ - echo "Skipping syncing assets to GCS"; \ - fi && \ - cd pipelines && \ - pipenv run pytest --log-cli-level=INFO tests/${PIPELINE_TEMPLATE}/$(pipeline) --enable_caching=$(enable_pipeline_caching) +help: ## Display this help screen. + @grep -h -E '^[a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | \ + awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' env ?= dev -deploy-infra: ## Deploy the Terraform infrastructure to your project. Requires VERTEX_PROJECT_ID and VERTEX_LOCATION env variables to be set in env.sh. Optionally specify env= (default = dev) - @ cd terraform/envs/$(env) && \ +AUTO_APPROVE_FLAG := +deploy: ## Deploy infrastructure to your project. Optionally set env= (default=dev). + @echo "################################################################################" && \ + echo "# Deploy $$env environment" && \ + echo "################################################################################" && \ + if [ "$(auto-approve)" = "true" ]; then \ + AUTO_APPROVE_FLAG="-auto-approve"; \ + fi; \ + cd terraform/envs/$(env) && \ terraform init -backend-config='bucket=${VERTEX_PROJECT_ID}-tfstate' && \ - terraform apply -var 'project_id=${VERTEX_PROJECT_ID}' -var 'region=${VERTEX_LOCATION}' - -destroy-infra: ## DESTROY the Terraform infrastructure in your project. Requires VERTEX_PROJECT_ID and VERTEX_LOCATION env variables to be set in env.sh. Optionally specify env= (default = dev) - @ cd terraform/envs/$(env) && \ + terraform apply -var 'project_id=${VERTEX_PROJECT_ID}' -var 'region=${VERTEX_LOCATION}' $$AUTO_APPROVE_FLAG + +undeploy: ## Destroy the infrastructure in your project. Optionally set env= (default=dev). + @echo "################################################################################" && \ + echo "# Destroy $$env environment" && \ + echo "################################################################################" && \ + if [ "$(auto-approve)" = "true" ]; then \ + AUTO_APPROVE_FLAG="-auto-approve"; \ + fi; \ + cd terraform/envs/$(env) && \ terraform init -backend-config='bucket=${VERTEX_PROJECT_ID}-tfstate' && \ - terraform destroy -var 'project_id=${VERTEX_PROJECT_ID}' -var 'region=${VERTEX_LOCATION}' + terraform destroy -var 'project_id=${VERTEX_PROJECT_ID}' -var 'region=${VERTEX_LOCATION}' $$AUTO_APPROVE_FLAG + +install: ## Set up local Python environment for development. + @echo "################################################################################" && \ + echo "# Install Python dependencies" && \ + echo "################################################################################" && \ + cd model && \ + poetry install --no-root && \ + cd ../pipelines && \ + poetry install --with dev && \ + cd ../components && \ + poetry install --with dev + +compile: ## Compile pipeline. Set pipeline=. + @echo "################################################################################" && \ + echo "# Compile $$pipeline pipeline" && \ + echo "################################################################################" && \ + cd pipelines/src && \ + poetry run kfp dsl compile --py pipelines/${pipeline}.py --output pipelines/${pipeline}.yaml --function pipeline + +images ?= training prediction +build: ## Build and push container(s). Set images= (default="training prediction"). + @echo "################################################################################" && \ + echo "# Build $$images image(s)" && \ + echo "################################################################################" && \ + cd model && \ + for image in $$images ; do \ + echo "Build $$image image" && \ + gcloud builds submit . \ + --region=${VERTEX_LOCATION} \ + --project=${VERTEX_PROJECT_ID} \ + --gcs-source-staging-dir=gs://${VERTEX_PROJECT_ID}-staging/source \ + --substitutions=_DOCKER_TARGET=$$image,_DESTINATION_IMAGE_URI=${CONTAINER_IMAGE_REGISTRY}/$$image:${RESOURCE_SUFFIX} \ + --suppress-logs ; \ + done + +compile ?= true +build ?= true +cache ?= true +wait ?= false +run: ## Run a pipeline. Set pipeline=. Optionally set compile= (default=true), build=, cache= (default=true). wait= (default=false). + @if [ $(compile) = "true" ]; then \ + $(MAKE) compile ; \ + elif [ $(compile) != "false" ]; then \ + echo "ValueError: compile must be either true or false" ; \ + exit ; \ + fi && \ + if [ $(build) = "true" ]; then \ + $(MAKE) build ; \ + elif [ $(build) != "false" ]; then \ + echo "ValueError: build must be either true or false" ; \ + exit ; \ + fi && \ + echo "################################################################################" && \ + echo "# Run $$pipeline pipeline" && \ + echo "################################################################################" && \ + cd pipelines/src && \ + ENABLE_PIPELINE_CACHING=$$cache poetry run python -m pipelines.utils.trigger_pipeline \ + --template_path=pipelines/${pipeline}.yaml --display_name=${pipeline} --wait=${wait} + +training: ## Run training pipeline. Rebuilds training and prediction images. Supports same options as run. + @$(MAKE) run pipeline=training + +prediction: ## Run prediction pipeline. Doesn't rebuilt images. Supports same options as run. + @$(MAKE) run pipeline=prediction build=false + +packages ?= pipelines components +test: ## Run unit tests. Optionally set packages= (default="pipelines components"). + @echo "################################################################################" && \ + echo "# Test $$packages package(s)" && \ + echo "################################################################################" && \ + for package in $$packages ; do \ + echo "Testing $$package package" && \ + cd $$package && \ + poetry run pytest && \ + cd .. ; \ + done + +pre-commit: ## Run pre-commit checks for pipelines. + @cd pipelines && \ + poetry run pre-commit run --all-files diff --git a/README.md b/README.md index 09940d13..da853fc5 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ - # Vertex Pipelines End-to-end Samples -## Introduction - -This repository provides a reference implementation of [Vertex Pipelines](https://cloud.google.com/vertex-ai/docs/pipelines/) for creating a production-ready MLOps solution on Google Cloud. - -### Why does this matter? - -It is hard to productionalize data science use cases, especially because the journey from experimentation is lacking standardisation. -Thus, this project aims at boosting, scalability, productivity and standardisation of data science use cases amongst data science teams. -As a result, this will free up time for data scientists so that they can focus on data science with minimal engineering overhead. - -This project bundles reusable code and provides the creation of a MLOps platform via an template-driven approach allowing to: - -- **Create a new use case from a template.** Create a new ML training pipeline and batch prediction pipeline based on a template. -- **Execute a one-off pipeline run in a sandbox environment.** Try out a pipeline during the development cycle in a sandbox (development) environment. -- **Deploy a pipeline to a production environment.** Deploy a new or updated pipeline to a production environment allowing for orchestration, schedules and triggers. -- **Publish a new template.** Customize or extend the existing templates to create a new pipeline template that can be used by the data scientists on the team to support new use cases. - -As such, this project includes the infrastructure on Google Cloud, a CI/CD integration and existing templates to support training and prediction pipelines for common ML frameworks such as TensorFlow and XGBoost. -To showcase the pipelines in action, the [Public Chicago Taxi Trips Dataset](https://console.cloud.google.com/bigquery?p=bigquery-public-data&d=chicago_taxi_trips&page=dataset) is used to predict the total fare of a taxi trip in Chicago. +# Vertex Pipelines End-to-End Samples -### Cloud Architecture +_AKA "Vertex AI Turbo Templates"_ -Vertex AI Pipelines is a serverless orchestrator for running ML pipelines, using either the KFP SDK or TFX. However, unlike Kubeflow Pipelines, it does not have a built-in mechanism for saving Pipelines so that they can be run later, either on a schedule or via an external trigger. Instead, every time you want to run an ML pipeline in Vertex AI, you must make the API call to Vertex AI Pipelines, including the full pipeline definition, and Vertex Pipelines will run the pipeline there and then. +![Shell](https://github.com/teamdatatonic/vertex-pipelines-end-to-end-samples/wiki/images/shell.gif) -In a production MLOps solution, your ML pipelines need to be repeatable. So, we have created a Cloud Function to trigger the execution of ML pipelines on Vertex AI. This can be done either using a schedule (via Cloud Scheduler), or from an external system using Pub/Sub. We use Cloud Build to compile the pipelines using the KFP SDK, and publish them to a GCS bucket. The Cloud Function retrieves the pipeline definition from the bucket, and triggers an execution of the pipeline in Vertex AI. - -![Using a Cloud Function to trigger Vertex Pipelines](docs/images/cf_view.png) - -## Getting started - -### Prerequisites - -- [pyenv](https://github.com/pyenv/pyenv#installation) for managing Python versions -- [Cloud SDK](https://cloud.google.com/sdk/docs/quickstart) -- Make - -### Local setup - -1. Clone the repository locally -1. Install Python: `pyenv install` -1. Install pipenv and pipenv dependencies: `make setup` -1. Install pre-commit hooks: `cd pipelines && pipenv run pre-commit install` -1. Copy `env.sh.example` to `env.sh`, and update the environment variables in `env.sh` -1. Load the environment variables in `env.sh` by running `source env.sh` +## Introduction -### Deploying Cloud Infrastructure +This repository provides a reference implementation of [Vertex Pipelines](https://cloud.google.com/vertex-ai/docs/pipelines/) for creating a production-ready MLOps solution on Google Cloud. +You can take this repository as a starting point you own ML use cases. +The implementation includes: -The cloud infrastructure is managed using Terraform and is defined in the [`terraform`](terraform) directory. There are three Terraform modules defined in [`terraform/modules`](terraform/modules): +* **Infrastructure-as-Code** using Terraform for a typical dev/test/prod setup of Vertex AI and other relevant services +* **ML training and prediction pipelines** using the Kubeflow Pipelines +* **Reusable Kubeflow components** that can be used in common ML pipelines +* **CI/CD** using Google Cloud Build for linting, testing, and deploying ML pipelines +* **Developer scripts** (Makefile, Python scripts etc.) -- `cloudfunction` - deploys a (Pub/Sub-triggered) Cloud Function from local source code -- `scheduled_pipelines` - deploys Cloud Scheduler jobs that will trigger Vertex Pipeline runs (via the above Cloud Function) -- `vertex_deployment` - deploys Cloud infrastructure required for running Vertex Pipelines, including enabling APIs, creating buckets, service accounts, and IAM permissions. +**Get started today by following [this step-by-step notebook tutorial](./docs/notebooks)! πŸš€** +In this three-part notebook series you'll deploy a Google Cloud project and run production-ready ML pipelines using Vertex AI without writing a single line of code. -There is a Terraform configuration for each environment (dev/test/prod) under [`terraform/envs`](terraform/envs/). +## Cloud Architecture -#### Deploying only the dev environment +The diagram below shows the cloud architecture for this repository. -We recommend that you set up CI/CD to deploy your environments. However, if you would prefer to deploy the dev environment manually (for example to try out the repo), you can do so as follows: +![Cloud Architecture diagram](./docs/images/architecture.png) -(Assuming you have an empty Google Cloud project where you are an Owner) +There are four different Google Cloud projects in use -1. Install Terraform on your local machine. We recommend using [`tfswitch`](https://tfswitch.warrensbox.com/) to automatically choose and download an appropriate version for you (run `tfswitch` from the [`terraform/envs/dev`](terraform/envs/dev/) directory). -2. Using the `gsutil` command line tool, create a Cloud Storage bucket for the Terraform state: +* `dev` - a shared sandbox environment for use during development +* `test` - environment for testing new changes before they are promoted to production. This environment should be treated as much as possible like a production environment. +* `prod` - production environment +* `admin` - separate Google Cloud project for setting up CI/CD in Cloud Build (since the CI/CD pipelines operate across the different environments) -``` -gsutil mb -l ${VERTEX_LOCATION} -p ${VERTEX_PROJECT_ID} --pap=enforced gs://${VERTEX_PROJECT_ID}-tfstate && gsutil ubla set on gs://${VERTEX_PROJECT_ID}-tfstate -``` +Vertex Pipelines are scheduled using Google Cloud Scheduler. +Cloud Scheduler emits a Pub/Sub message that triggers a Cloud Function, which in turn triggers the Vertex Pipeline to run. +_In future, this will be replaced with the Vertex Pipelines Scheduler (once there is a Terraform resource for it)._ -3. Deploy the cloud infrastructure by running the `make deploy-infra` command from the root of the repository. +## Setup -#### Full deployment of dev/test/prod using CI/CD +**Prerequisites:** -You will need four Google Cloud projects: +- [Terraform](https://www.terraform.io/) for managing cloud infrastructure +- [tfswitch](https://tfswitch.warrensbox.com/) to automatically choose and download an appropriate Terraform version (recommended) +- [Pyenv](https://github.com/pyenv/pyenv#installation) for managing Python versions +- [Poetry](https://python-poetry.org/) for managing Python dependencies +- [Google Cloud SDK (gcloud)](https://cloud.google.com/sdk/docs/quickstart) +- Make +- Cloned repo -- dev -- test -- prod -- admin +**Deploy infrastructure:** +You will need four Google Cloud projects dev, test, prod, and admin. The Cloud Build pipelines will run in the _admin_ project, and deploy resources into the dev/test/prod projects. - Before your CI/CD pipelines can deploy the infrastructure, you will need to set up a Terraform state bucket for each environment: ```bash -gsutil mb -l -p --pap=enforced gs://-tfstate && gsutil ubla set on gs://-tfstate - -gsutil mb -l -p --pap=enforced gs://-tfstate && gsutil ubla set on gs://-tfstate - -gsutil mb -l -p --pap=enforced gs://-tfstate && gsutil ubla set on gs://-tfstate +export DEV_PROJECT_ID=my-dev-gcp-project +export DEV_LOCATION=europe-west2 +gsutil mb -l $DEV_LOCATION -p $DEV_PROJECT_ID --pap=enforced gs://$DEV_PROJECT_ID-tfstate && \ + gsutil ubla set on gs://$DEV_PROJECT_ID-tfstate ``` -You will also need to manually enable the Cloud Resource Manager and Service Usage APs for your _admin_ project: +Enable APIs in admin project: ```bash -gcloud services enable cloudresourcemanager.googleapis.com --project= -gcloud services enable serviceusage.googleapis.com --project= +export ADMIN_PROJECT_ID=my-admin-gcp-project +gcloud services enable cloudresourcemanager.googleapis.com serviceusage.googleapis.com --project=$ADMIN_PROJECT_ID ``` -Now that you have created a Terraform state bucket for each environment, you can set up the CI/CD pipelines. You can find instructions for this [here](cloudbuild/README.md). - -#### Tearing down infrastructure - -To tear down the infrastructure you have created with Terraform, run `make destroy-infra`. - -### Example ML pipelines - -This repository contains example ML training and prediction pipelines for two popular frameworks (XGBoost/sklearn and Tensorflow) using the popular [Chicago Taxi Dataset](https://console.cloud.google.com/marketplace/details/city-of-chicago-public-data/chicago-taxi-trips). The details of these can be found in the [separate README](pipelines/README.md). - -#### Pre-requisites - -Before you can run these example pipelines successfully there are a few additional things you will need to deploy (they have not been included in the Terraform code as they are specific to these pipelines) - -1. Create a new BigQuery dataset for the Chicago Taxi data: - -``` -bq --location=${VERTEX_LOCATION} mk --dataset "${VERTEX_PROJECT_ID}:chicago_taxi_trips" +```bash +make deploy env=dev ``` -2. Create a new BigQuery dataset for data processing during the pipelines: - -``` -bq --location=${VERTEX_LOCATION} mk --dataset "${VERTEX_PROJECT_ID}:preprocessing" -``` +More details about infrastructure is explained in [this guide](docs/Infrastructure.md). +It describes the scheduling of pipelines and how to tear down infrastructure. -3. Set up a BigQuery transfer job to mirror the Chicago Taxi dataset to your project +**Install dependencies:** -``` -bq mk --transfer_config \ - --project_id=${VERTEX_PROJECT_ID} \ - --data_source="cross_region_copy" \ - --target_dataset="chicago_taxi_trips" \ - --display_name="Chicago taxi trip mirror" \ - --params='{"source_dataset_id":"'"chicago_taxi_trips"'","source_project_id":"'"bigquery-public-data"'"}' +```bash +pyenv install -skip-existing # install Python +poetry config virtualenvs.prefer-active-python true # configure Poetry +make install # install Python dependencies +cd pipelines && poetry run pre-commit install # install pre-commit hooks +cp env.sh.example env.sh ``` -#### Running Pipelines +Update the environment variables for your dev environment in `env.sh`. -You can run the XGBoost training pipeline (for example) with: +**Authenticate to Google Cloud:** ```bash -make run PIPELINE_TEMPLATE=xgboost pipeline=training +gcloud auth login +gcloud auth application-default login ``` -Alternatively, add the environment variable `PIPELINE_TEMPLATE=xgboost` and/or `pipeline=training` to `env.sh`, then: +## Run -```bash -make run pipeline= -``` +This repository contains example ML training and prediction pipelines which are explained in [this guide](docs/Pipelines.md). -This will execute the pipeline using the chosen template on Vertex AI, namely it will: +**Build containers:** The [model/](/model/) directory contains the code for custom training and prediction container images, including the model training script at [model/training/train.py](model/training/train.py). +You can modify this to suit your own use case. +Build the training and prediction container images and push them to Artifact Registry with: -1. Compile the pipeline using the Kubeflow Pipelines SDK -1. Copy the `assets` folders to Cloud Storage -1. Trigger the pipeline with the help of `pipelines/trigger/main.py` +```bash +make build [ images="training prediction" ] +``` -#### Pipeline input parameters +Optionally specify the `images` variable to only build one of the images. -The ML pipelines have input parameters. As you can see in the pipeline definition files (`pipelines/pipelines///pipeline.py`), they have default values, and some of these default values are derived from environment variables (which in turn are defined in `env.sh`). +**Execute pipelines:** Vertex AI Pipelines uses KubeFlow to orchestrate your training steps, as such you'll need to: -When triggering ad hoc runs in your dev/sandbox environment, or when running the E2E tests in CI, these default values are used. For the test and production deployments, the pipeline parameters are defined in the Terraform code for the Cloud Scheduler jobs (`terraform/envs//variables.auto.tfvars`). +1. Compile the pipeline +1. Build dependent Docker containers +1. Run the pipeline in Vertex AI -### Assets +Execute the following command to run through steps 1-3: -In each pipeline folder, there is an `assets` directory (`pipelines/pipelines///assets/`). -This can be used for any additional files that may be needed during execution of the pipelines. -This directory is rsync'd to Google Cloud Storage when running a pipeline in the sandbox environment or as part of the CD pipeline (see [CI/CD setup](cloudbuild/README.md)). +```bash +make run pipeline=training [ build= ] [ compile= ] [ cache= ] [ wait= ] +``` -## Testing +The command has the following true/false flags: -Unit tests and end-to-end (E2E) pipeline tests are performed using [pytest](https://docs.pytest.org). -The unit tests for custom KFP components are run on each pull request, and the E2E tests are run on merge to the main branch. To run them on your local machine: +- `build` - re-build containers for training & prediction code (limit by setting images=training to build only one of the containers) +- `compile` - re-compile the pipeline to YAML +- `cache` - cache pipeline steps +- `wait` - run the pipeline (a-)sync -``` -make setup-all-components -make test-all-components -``` +**Shortcuts:** Use these commands which support the same options as `run` to run the training or prediction pipeline: -Alternatively, only setup and install one of the components groups by running: -``` -make setup-components GROUP=vertex-components -make test-components GROUP=vertex-components +```bash +make training +make prediction ``` -To run end-to-end tests of a single pipeline, you can use: - -``` -make e2e-tests pipeline= [ enable_caching= ] [ sync_assets= ] -``` +## Test -There are also unit tests for the pipeline triggering code. -This is not run as part of a CI/CD pipeline, as we don't expect this to be changed for each use case. To run them on your local machine: +Unit tests are performed using [pytest](https://docs.pytest.org). +The unit tests are run on each pull request. +To run them locally you can execute the following command and optionally enable or disable testing of components: ``` -make test-trigger +make test [ packages= ] ``` -## Customize pipelines - -### Update existing pipelines - -See existing [XGBoost](pipelines/src/pipelines/xgboost) and [Tensorflow](pipelines/src/pipelines/tensorflow) pipelines as part of this template. -Update `PIPELINE_TEMPLATE` to `xgboost` or `tensorflow` in [env.sh](env.sh.example) to specify whether to run the XGBoost pipelines or TensorFlow pipelines. -Make changes to the ML pipelines and their associated tests. -Refer to the [contribution instructions](CONTRIBUTING.md) for more information on committing changes. - -### Add new pipelines +## Automation -See [USAGE](USAGE.md) for guidelines on how to add new pipelines (e.g. other than XGBoost and TensorFlow). +For details on setting up CI/CD, see [this guide](./docs/Automation.md). -### Scheduling pipelines +## Issues with Vertex AI Custom Code Service Agent -Terraform is used to deploy Cloud Scheduler jobs that trigger the Vertex Pipeline runs. This is done by the CI/CD pipelines (see section below on CI/CD). +If you run custom training code to train a custom-trained model, then the [Vertex AI Custom Code Service Agent](https://cloud.google.com/vertex-ai/docs/general/access-control) will be used. +In those cases, the agent is created only when you first try to run custom training code which means you can't assign permissions to the agent, like artifact registry reader, from the very beginning. +To tackle this, you can use [this guide](https://github.com/teamdatatonic/terraform-google-vertex-cc-service-agent). +This repo uses the curl command to create a simple custom training job which triggers the creation of the service agent. +You can also edit that code to use `gcloud ai custom-jobs create` to create the job if you want. -To schedule pipelines into an environment, you will need to provide the `cloud_schedulers_config` variable to the Terraform configuration for the relevant environment. You can find an example of this configuration in [`terraform/modules/scheduled_pipelines/scheduled_jobs.auto.tfvars.example`](terraform/modules/scheduled_pipelines/scheduled_jobs.auto.tfvars.example). Copy this example file into the relevant directory for your environment (e.g. `terraform/envs/dev` for the dev environment) and remove the `.example` suffix. Adjust the configuration file as appropriate. - -### CI/CD - -There are five CI/CD pipelines located under the [cloudbuild](cloudbuild) directory: - -1. `pr-checks.yaml` - runs pre-commit checks and unit tests on the custom KFP components, and checks that the ML pipelines (training and prediction) can compile. -2. `e2e-test.yaml` - copies the "assets" folders to the chosen GCS destination (versioned by git commit hash - see below) and runs end-to-end tests of the training and prediction pipeline. -3. `release.yaml` - Compiles the training and prediction pipelines, and copies the compiled pipelines, along with their respective `assets` directories, to Google Cloud Storage in the build / CI/CD environment. The Google Cloud Storage destination is namespaced using the git tag (see below). Following this, the E2E tests are run on the new compiled pipelines. - -Below is a diagram of how the files are published in each environment in the `e2e-test.yaml` and `release.yaml` pipelines: - -``` -. <-- GCS directory set by _PIPELINE_PUBLISH_GCS_PATH -└── TAG_NAME or GIT COMMIT HASH <-- Git tag used for the release (release.yaml) OR git commit hash (e2e-test.yaml) - β”œβ”€β”€ prediction - β”‚ β”œβ”€β”€ assets - β”‚ β”‚ └── some_useful_file.json - β”‚ └── prediction.json <-- compiled prediction pipeline - └── training - β”œβ”€β”€ assets - β”‚ └── training_task.py - └── training.json <-- compiled training pipeline -``` +Alternatively, Google has another method of triggering the creation of service agents that is currently in pre-GA that can be used instead of the above solution. +You can read more about it [here](https://cloud.google.com/iam/docs/create-service-agents#create). -4. `terraform-plan.yaml` - Checks the Terraform configuration under `terraform/envs/` (e.g. `terraform/envs/test`), and produces a summary of any proposed changes that will be applied on merge to the main branch. -5. `terraform-apply.yaml` - Applies the Terraform configuration under `terraform/envs/` (e.g. `terraform/envs/test`). +## Putting it all together -For more details on setting up CI/CD, see the [separate README](cloudbuild/README.md). +For a full walkthrough of the journey from changing the ML pipeline code to having it scheduled and running in production, please see the guide [here](./docs/Production.md). -For a full walkthrough of the journey from changing the ML pipeline code to having it scheduled and running in production, please see the guide [here](docs/PRODUCTION.md). +We value your contribution, see [this guide](./docs/Contribution.md) for contributing to this project. diff --git a/USAGE.md b/USAGE.md deleted file mode 100644 index 12688e02..00000000 --- a/USAGE.md +++ /dev/null @@ -1,72 +0,0 @@ - - # USAGE.md - -## Introduction -This document is for users who are editing these templates for their own purposes and would like to add their own pipelines. If you are looking to contribute to the open source templates themselves, please refer to [`CONTRIBUTING`](CONTRIBUTING.md). - -## How to add a new pipeline - -### Folder structure -Add your new pipeline as a folder under the `pipelines` directory. Within your new pipeline folder you should have two separate folders: `training` and `prediction`. For both `training` and `prediction`, you should include the appropriate `pipeline.py` which describes the structure of the pipeline built from the components under `pipelines/kfp_components/`. Your pipeline may make use of supporting SQL scripts, and you can store these under a separate folder named `queries`. - -See below for an example folder structure: - -``` -kfp-template-0 -β”‚ -β”œβ”€β”€ pipelines -β”‚ β”‚ -β”‚ β”œβ”€β”€ new-pipeline -β”‚ β”‚ β”‚ -β”‚ β”‚ β”œβ”€β”€ training -β”‚ β”‚ β”‚ β”œβ”€β”€ queries -β”‚ β”‚ β”‚ └── pipeline.py -β”‚ β”‚ β”‚ -β”‚ β”‚ └── prediction -β”‚ β”‚ β”œβ”€β”€ queries -β”‚ β”‚ └── pipeline.py - -``` - -## Testing -Please refer to [`CONTRIBUTING.md`](CONTRIBUTING.md#Testing) for information on how to write both unit and end-to-end (E2E) pipeline tests for your pipeline. It is advisable to check that your pipelines pass all tests before being shared with others. - -## Compiling and running your pipeline using the Makefile -To use the same rules listed in the Makefile, you will only need to update the environment variables. In [`.env.sh`](.env.sh), change the value of `PIPELINE_TEMPLATE` to the name of your new pipeline (this should be the same name as the new folder you have created for the pipeline). - -If you have organised your pipeline as described [above](#Folder-structure), you should then be able to use the Makefile as usual. - -Before compiling your pipeline, make sure to re-compile your pipeline components if you have made any changes: - -``` -make compile-components GROUP= -``` - -Or to re-compile all pipeline components to YAML: -``` -make compile-all-components -``` - -You can compile your pipeline to `training.json` or `prediction.json` with the following command: -``` -make compile pipeline= -``` - -Or you can compile your pipeline, and then immediately run it with the following command: -``` -make run pipeline= -``` diff --git a/cloudbuild/e2e-test.yaml b/cloudbuild/e2e-test.yaml index 3aa9a513..dcfb1211 100644 --- a/cloudbuild/e2e-test.yaml +++ b/cloudbuild/e2e-test.yaml @@ -14,39 +14,53 @@ --- steps: - # Copy assets files to a new directory in GCS - # (via a new local directory) - # Directory name = git commit hash - - name: gcr.io/cloud-builders/gsutil - entrypoint: bash + - id: build-training-image + name: gcr.io/kaniko-project/executor:latest args: - - -c - - | - mkdir -p ${COMMIT_SHA}/training/assets && \ - mkdir -p ${COMMIT_SHA}/prediction/assets && \ - cp -r pipelines/src/pipelines/${_PIPELINE_TEMPLATE}/training/assets ${COMMIT_SHA}/training/ && \ - cp -r pipelines/src/pipelines/${_PIPELINE_TEMPLATE}/prediction/assets ${COMMIT_SHA}/prediction/ && \ - gsutil cp -r ${COMMIT_SHA} ${_PIPELINE_PUBLISH_GCS_PATH}/${COMMIT_SHA} + - --context=dir:///workspace/model + - --destination=${_TEST_VERTEX_LOCATION}-docker.pkg.dev/${_TEST_VERTEX_PROJECT_ID}/vertex-images/training:${COMMIT_SHA} + - --target=training + - --cache=true + - --compressed-caching=false + waitFor: ['-'] + + - id: build-prediction-image + name: gcr.io/kaniko-project/executor:latest + args: + - --context=dir:///workspace/model + - --destination=${_TEST_VERTEX_LOCATION}-docker.pkg.dev/${_TEST_VERTEX_PROJECT_ID}/vertex-images/prediction:${COMMIT_SHA} + - --target=prediction + - --cache=true + - --compressed-caching=false + waitFor: ['-'] # Install Python deps # Run end-to-end (E2E) pipeline tests on both pipelines - - name: python:3.7 + - id: e2e-tests + name: python:3.9 entrypoint: /bin/sh args: - -c - | - make setup && \ - make e2e-tests pipeline=training enable_pipeline_caching=False sync_assets=false && \ - make e2e-tests pipeline=prediction enable_pipeline_caching=False sync_assets=false + curl -sSL https://install.python-poetry.org | python3 - && \ + export PATH="/builder/home/.local/bin:$$PATH" && \ + make install && \ + make run pipeline=training build=false wait=true && \ + make run pipeline=prediction build=false wait=true env: + - ENABLE_PIPELINE_CACHING=${_TEST_ENABLE_PIPELINE_CACHING} - VERTEX_LOCATION=${_TEST_VERTEX_LOCATION} - VERTEX_PROJECT_ID=${_TEST_VERTEX_PROJECT_ID} + - BQ_LOCATION=${_TEST_BQ_LOCATION} - VERTEX_SA_EMAIL=${_TEST_VERTEX_SA_EMAIL} - VERTEX_CMEK_IDENTIFIER=${_TEST_VERTEX_CMEK_IDENTIFIER} - VERTEX_NETWORK=${_TEST_VERTEX_NETWORK} - - PIPELINE_TEMPLATE=${_PIPELINE_TEMPLATE} - VERTEX_PIPELINE_ROOT=${_TEST_VERTEX_PIPELINE_ROOT} - - PIPELINE_FILES_GCS_PATH=${_PIPELINE_PUBLISH_GCS_PATH}/${COMMIT_SHA} + - RESOURCE_SUFFIX=${COMMIT_SHA} + - CONTAINER_IMAGE_REGISTRY=${_TEST_VERTEX_LOCATION}-docker.pkg.dev/${_TEST_VERTEX_PROJECT_ID}/vertex-images + waitFor: + - build-training-image + - build-prediction-image options: logging: CLOUD_LOGGING_ONLY diff --git a/cloudbuild/pr-checks.yaml b/cloudbuild/pr-checks.yaml index 5e132a7a..a11710ff 100644 --- a/cloudbuild/pr-checks.yaml +++ b/cloudbuild/pr-checks.yaml @@ -14,28 +14,29 @@ --- steps: - # Install pipenv and deps, run pre-commit and unit tests + # Install poetry and deps, run pre-commit and unit tests # Then compile pipelines (to make sure they can compile) # need to run "git init" for pre-commit checks to work - - name: python:3.7 + - name: python:3.9 entrypoint: /bin/sh args: - -c - | - make setup && \ + curl -sSL https://install.python-poetry.org | python3 - && \ + export PATH="/builder/home/.local/bin:$$PATH" && \ + make install && \ git init && \ git add . && \ - make pre-commit && \ - make compile-pipeline pipeline=training && \ - make compile-pipeline pipeline=prediction && \ - make setup-all-components && \ - make test-all-components + make compile pipeline=training && \ + make compile pipeline=prediction && \ + make test env: - SKIP=terraform-fmt,git-dirty - - PIPELINE_TEMPLATE=${_PIPELINE_TEMPLATE} + - CONTAINER_IMAGE_REGISTRY=dummy_value + - RESOURCE_SUFFIX=default options: logging: CLOUD_LOGGING_ONLY -# Increase timeout to allow pipenv to resolve dependencies +# Increase timeout to allow poetry to resolve dependencies timeout: 3600s diff --git a/cloudbuild/release.yaml b/cloudbuild/release.yaml index 63e60115..a0e72668 100644 --- a/cloudbuild/release.yaml +++ b/cloudbuild/release.yaml @@ -14,36 +14,50 @@ --- steps: - # Install pipenv, install deps, compile pipelines - - name: python:3.7 - entrypoint: /bin/sh + - id: build-container-images + name: gcr.io/cloud-builders/docker + dir: /workspace/model + entrypoint: sh args: - -c - | - make setup && \ - make compile-all-components && \ - make compile-pipeline pipeline=training && \ - make compile-pipeline pipeline=prediction - env: - - PIPELINE_TEMPLATE=${_PIPELINE_TEMPLATE} + docker build --target=training -t training:latest . && \ + docker build --target=prediction -t prediction:latest . && \ + for proj in ${_DESTINATION_PROJECTS} ; do \ + docker tag training:latest ${_VERTEX_LOCATION}-docker.pkg.dev/$$proj/vertex-images/training:${TAG_NAME} && \ + docker tag prediction:latest ${_VERTEX_LOCATION}-docker.pkg.dev/$$proj/vertex-images/prediction:${TAG_NAME} && \ + docker push ${_VERTEX_LOCATION}-docker.pkg.dev/$$proj/vertex-images/training:${TAG_NAME} && \ + docker push ${_VERTEX_LOCATION}-docker.pkg.dev/$$proj/vertex-images/prediction:${TAG_NAME}; \ + done - # Copy pipelines and files to a new directory in GCS - # (via a new local directory) - # Directory name = git commit hash - - name: gcr.io/cloud-builders/gsutil - entrypoint: bash + # Install poetry, install deps, compile pipelines + - id: compile-and-publish-pipelines + name: python:3.9 + entrypoint: /bin/sh args: - -c - | - mkdir -p ${TAG_NAME}/training/assets && \ - mkdir -p ${TAG_NAME}/prediction/assets && \ - cp pipelines/training.json ${TAG_NAME}/training/training.json && \ - cp pipelines/prediction.json ${TAG_NAME}/prediction/prediction.json && \ - cp -r pipelines/pipelines/${_PIPELINE_TEMPLATE}/training/assets ${TAG_NAME}/training/ && \ - cp -r pipelines/pipelines/${_PIPELINE_TEMPLATE}/prediction/assets ${TAG_NAME}/prediction/ && \ - for dest in ${_PIPELINE_PUBLISH_GCS_PATHS} ; do \ - gsutil cp -r ${TAG_NAME} $$dest ; \ + curl -sSL https://install.python-poetry.org | python3 - && \ + export PATH="/builder/home/.local/bin:$$PATH" && \ + make install && \ + for proj in ${_DESTINATION_PROJECTS} ; do \ + CONTAINER_IMAGE_REGISTRY=${_VERTEX_LOCATION}-docker.pkg.dev/$$proj/vertex-images \ + make compile pipeline=training && \ + make compile pipeline=prediction && \ + cd pipelines && \ + poetry run python -m pipelines.utils.upload_pipeline \ + --dest=https://${_VERTEX_LOCATION}-kfp.pkg.dev/$$proj/vertex-pipelines \ + --yaml=src/pipelines/training.yaml \ + --tag=latest \ + --tag=${TAG_NAME} && \ + poetry run python -m pipelines.utils.upload_pipeline \ + --dest=https://${_VERTEX_LOCATION}-kfp.pkg.dev/$$proj/vertex-pipelines \ + --yaml=src/pipelines/prediction.yaml \ + --tag=latest \ + --tag=${TAG_NAME}; \ done + env: + - RESOURCE_SUFFIX=default options: logging: CLOUD_LOGGING_ONLY diff --git a/cloudbuild/trigger-tests.yaml b/cloudbuild/trigger-tests.yaml index a3387438..07073738 100644 --- a/cloudbuild/trigger-tests.yaml +++ b/cloudbuild/trigger-tests.yaml @@ -1,4 +1,4 @@ -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,13 +13,17 @@ # limitations under the License. --- steps: - - name: 'python:3.7' + - name: 'python:3.9' + dir: /workspace/terraform/modules/cloudfunction + entrypoint: /bin/sh args: - '-c' - | - make setup && \ - make test-trigger - entrypoint: /bin/sh + pip install -r src/requirements.txt && \ + pip install -r src/requirements-dev.txt && \ + pytest + env: + - PYTHONPATH=. options: logging: CLOUD_LOGGING_ONLY diff --git a/components/.python-version b/components/.python-version new file mode 100644 index 00000000..9f3d4c17 --- /dev/null +++ b/components/.python-version @@ -0,0 +1 @@ +3.9.16 diff --git a/components/README.md b/components/README.md deleted file mode 100644 index e1b5fac9..00000000 --- a/components/README.md +++ /dev/null @@ -1,28 +0,0 @@ -# Kubeflow Pipelines Components - -This directory contains multiple Python packages that are used to define pipeline components with the Kubeflow Pipelines SDK. Each subdirectory is a package with its own set of Python dependencies. Components with the same Python dependencies can reside in the same package, but components with different Python dependencies should be split into different Python packages. - -## Creating a new pipeline components package - -To create a new set of components (with different Python dependencies), copy one of the existing subdirectories and rename the different files and directories as appropriate (e.g. `bigquery-components` -> `my-new-components`). You will also need to update any references in the Python files themselves, as well as the `Pipfile` and `pyproject.toml`. - -Your Python dependencies should be defined in `Pipfile`, `pyproject.toml`, and in `packages_to_install` (in the `@component` decorator): - -- In `Pipfile`, add `kfp` to the `[packages]` section (pinned to a specific version), and add any dependencies that your component uses under `[dev-packages]` (each pinned to a specific version) -- In `pyproject.toml`, add `kfp` to the `[dependencies]` section (pinned to a specific version), and add any dependencies that your component uses under `[[project.optional-dependencies]` -> `tests` (each pinned to a specific version) -- In `packages_to_install` (in the `@component` decorator used to define your component), add any dependencies that your component uses (each pinned to a specific version) - -Define your pipeline components using the `@component` decorator in Python files under `my-new-components/src/my-new-components`. You will need to update the `__init__.py` file to provide tests - see the [Kubeflow Pipelines documentation](https://www.kubeflow.org/docs/components/pipelines/v1/sdk-v2/python-function-components/#building-python-function-based-components) for more information about writing pipeline components. - -Finally, you will need to install this new components package into the [`pipelines`](../pipelines) package. In [`pipelines/Pipfile`](../pipelines/Pipfile), add the following line to the `packages` section: -```ini -my-new-components = {editable = true, path = "./../components/my-new-components"} -``` - -Once you have added this line to [`pipelines/Pipfile`](../pipelines/Pipfile), run `make setup` from the root of the repository to install the new components package into the `pipelines` package. - -## Testing components - -Unit tests for components are defined using pytest and should be created under `my-new-components/tests`. Take a look at the existing components to see examples of how you can write these tests and perform mocking/patching of KFP Artifact types. - -To run the unit tests, you will first need to set up the virtual environment for the new components by running `make setup-components GROUP=my-new-components` from the root of the repository. Once you have done this, you can run the unit tests using `make test-components GROUP=my-new-components`. diff --git a/components/bigquery-components/.python-version b/components/bigquery-components/.python-version deleted file mode 100644 index f7e5aa84..00000000 --- a/components/bigquery-components/.python-version +++ /dev/null @@ -1 +0,0 @@ -3.7.12 diff --git a/components/bigquery-components/Pipfile b/components/bigquery-components/Pipfile deleted file mode 100644 index cfe725a8..00000000 --- a/components/bigquery-components/Pipfile +++ /dev/null @@ -1,14 +0,0 @@ -[[source]] -url = "https://pypi.org/simple" -verify_ssl = true -name = "pypi" - -[packages] -kfp = "==1.8.21" - -[dev-packages] -google-cloud-bigquery = "==2.30.0" -pytest = ">=7.3.1,<8.0.0" - -[requires] -python_version = "3.7" diff --git a/components/bigquery-components/Pipfile.lock b/components/bigquery-components/Pipfile.lock deleted file mode 100644 index 602d5cba..00000000 --- a/components/bigquery-components/Pipfile.lock +++ /dev/null @@ -1,1220 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "5d62fbd9c31800d2f58a5c338a68119081148c391a4437bdc919329e2d2b3029" - }, - "pipfile-spec": 6, - "requires": { - "python_version": "3.7" - }, - "sources": [ - { - "name": "pypi", - "url": "https://pypi.org/simple", - "verify_ssl": true - } - ] - }, - "default": { - "absl-py": { - "hashes": [ - "sha256:0d3fe606adfa4f7db64792dd4c7aee4ee0c38ab75dfd353b7a83ed3e957fcb47", - "sha256:d2c244d01048ba476e7c080bd2c6df5e141d211de80223460d5b3b8a2a58433d" - ], - "markers": "python_version >= '3.6'", - "version": "==1.4.0" - }, - "attrs": { - "hashes": [ - "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04", - "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015" - ], - "markers": "python_version >= '3.7'", - "version": "==23.1.0" - }, - "cachetools": { - "hashes": [ - "sha256:13dfddc7b8df938c21a940dfa6557ce6e94a2f1cdfa58eb90c805721d58f2c14", - "sha256:429e1a1e845c008ea6c85aa35d4b98b65d6a9763eeef3e37e92728a12d1de9d4" - ], - "markers": "python_version ~= '3.7'", - "version": "==5.3.0" - }, - "certifi": { - "hashes": [ - "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7", - "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716" - ], - "markers": "python_version >= '3.6'", - "version": "==2023.5.7" - }, - "charset-normalizer": { - "hashes": [ - "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6", - "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1", - "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e", - "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373", - "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62", - "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230", - "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be", - "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c", - "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0", - "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448", - "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f", - "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649", - "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d", - "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0", - "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706", - "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a", - "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59", - "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23", - "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5", - "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb", - "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e", - "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e", - "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c", - "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28", - "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d", - "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41", - "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974", - "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce", - "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f", - "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1", - "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d", - "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8", - "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017", - "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31", - "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7", - "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8", - "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e", - "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14", - "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd", - "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d", - "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795", - "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b", - "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b", - "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b", - "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203", - "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f", - "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19", - "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1", - "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a", - "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac", - "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9", - "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0", - "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137", - "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f", - "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6", - "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5", - "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909", - "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f", - "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0", - "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324", - "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755", - "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb", - "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854", - "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c", - "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60", - "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84", - "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0", - "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b", - "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1", - "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531", - "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1", - "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11", - "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326", - "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df", - "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab" - ], - "markers": "python_full_version >= '3.7.0'", - "version": "==3.1.0" - }, - "click": { - "hashes": [ - "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e", - "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48" - ], - "markers": "python_version >= '3.7'", - "version": "==8.1.3" - }, - "cloudpickle": { - "hashes": [ - "sha256:61f594d1f4c295fa5cd9014ceb3a1fc4a70b0de1164b94fbc2d854ccba056f9f", - "sha256:d89684b8de9e34a2a43b3460fbca07d09d6e25ce858df4d5a44240403b6178f5" - ], - "markers": "python_version >= '3.6'", - "version": "==2.2.1" - }, - "deprecated": { - "hashes": [ - "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d", - "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.2.13" - }, - "docstring-parser": { - "hashes": [ - "sha256:48ddc093e8b1865899956fcc03b03e66bb7240c310fac5af81814580c55bf682", - "sha256:d1679b86250d269d06a99670924d6bce45adc00b08069dae8c47d98e89b667a9" - ], - "markers": "python_version >= '3.6' and python_version < '4.0'", - "version": "==0.15" - }, - "fire": { - "hashes": [ - "sha256:a6b0d49e98c8963910021f92bba66f65ab440da2982b78eb1bbf95a0a34aacc6" - ], - "version": "==0.5.0" - }, - "google-api-core": { - "hashes": [ - "sha256:4b9bb5d5a380a0befa0573b302651b8a9a89262c1730e37bf423cec511804c22", - "sha256:ce222e27b0de0d7bc63eb043b956996d6dccab14cc3b690aaea91c9cc99dc16e" - ], - "markers": "python_version >= '3.7'", - "version": "==2.11.0" - }, - "google-api-python-client": { - "hashes": [ - "sha256:1b4bd42a46321e13c0542a9e4d96fa05d73626f07b39f83a73a947d70ca706a9", - "sha256:7e0a1a265c8d3088ee1987778c72683fcb376e32bada8d7767162bd9c503fd9b" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.12.11" - }, - "google-auth": { - "hashes": [ - "sha256:ce311e2bc58b130fddf316df57c9b3943c2a7b4f6ec31de9663a9333e4064efc", - "sha256:f586b274d3eb7bd932ea424b1c702a30e0393a2e2bc4ca3eae8263ffd8be229f" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==2.17.3" - }, - "google-auth-httplib2": { - "hashes": [ - "sha256:31e49c36c6b5643b57e82617cb3e021e3e1d2df9da63af67252c02fa9c1f4a10", - "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac" - ], - "version": "==0.1.0" - }, - "google-cloud-core": { - "hashes": [ - "sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe", - "sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a" - ], - "markers": "python_version >= '3.7'", - "version": "==2.3.2" - }, - "google-cloud-storage": { - "hashes": [ - "sha256:83a90447f23d5edd045e0037982c270302e3aeb45fc1288d2c2ca713d27bad94", - "sha256:9b6ae7b509fc294bdacb84d0f3ea8e20e2c54a8b4bbe39c5707635fec214eff3" - ], - "markers": "python_version >= '3.7'", - "version": "==2.9.0" - }, - "google-crc32c": { - "hashes": [ - "sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a", - "sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876", - "sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c", - "sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289", - "sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298", - "sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02", - "sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f", - "sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2", - "sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a", - "sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb", - "sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210", - "sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5", - "sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee", - "sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c", - "sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a", - "sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314", - "sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd", - "sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65", - "sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37", - "sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4", - "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13", - "sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894", - "sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31", - "sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e", - "sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709", - "sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740", - "sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc", - "sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d", - "sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c", - "sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c", - "sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d", - "sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906", - "sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61", - "sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57", - "sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c", - "sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a", - "sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438", - "sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946", - "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7", - "sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96", - "sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091", - "sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae", - "sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d", - "sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88", - "sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2", - "sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd", - "sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541", - "sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728", - "sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178", - "sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968", - "sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346", - "sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8", - "sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93", - "sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7", - "sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273", - "sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462", - "sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94", - "sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd", - "sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e", - "sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57", - "sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b", - "sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9", - "sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a", - "sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100", - "sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325", - "sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183", - "sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556", - "sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4" - ], - "markers": "python_version >= '3.7'", - "version": "==1.5.0" - }, - "google-resumable-media": { - "hashes": [ - "sha256:218931e8e2b2a73a58eb354a288e03a0fd5fb1c4583261ac6e4c078666468c93", - "sha256:da1bd943e2e114a56d85d6848497ebf9be6a14d3db23e9fc57581e7c3e8170ec" - ], - "markers": "python_version >= '3.7'", - "version": "==2.5.0" - }, - "googleapis-common-protos": { - "hashes": [ - "sha256:4168fcb568a826a52f23510412da405abd93f4d23ba544bb68d943b14ba3cb44", - "sha256:b287dc48449d1d41af0c69f4ea26242b5ae4c3d7249a38b0984c86a4caffff1f" - ], - "markers": "python_version >= '3.7'", - "version": "==1.59.0" - }, - "httplib2": { - "hashes": [ - "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc", - "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==0.22.0" - }, - "idna": { - "hashes": [ - "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4", - "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2" - ], - "markers": "python_version >= '3.5'", - "version": "==3.4" - }, - "importlib-metadata": { - "hashes": [ - "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed", - "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705" - ], - "markers": "python_version < '3.8'", - "version": "==6.6.0" - }, - "importlib-resources": { - "hashes": [ - "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6", - "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a" - ], - "markers": "python_version < '3.9'", - "version": "==5.12.0" - }, - "jsonschema": { - "hashes": [ - "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d", - "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6" - ], - "markers": "python_version >= '3.7'", - "version": "==4.17.3" - }, - "kfp": { - "hashes": [ - "sha256:038d77ec9145ccfade95ab3b4b53c32668ae498fede06647ed0425d093819b1c" - ], - "index": "pypi", - "version": "==1.8.21" - }, - "kfp-pipeline-spec": { - "hashes": [ - "sha256:4cefae00ac50145cf862127202a8b8a783ed7504c773d7d7c517bd115283be25" - ], - "markers": "python_full_version >= '3.7.0'", - "version": "==0.1.16" - }, - "kfp-server-api": { - "hashes": [ - "sha256:482d71765ba57c003164dbb980a8cb1a18d234b578d064dc88dbeb3e4c7ab6de" - ], - "version": "==1.8.5" - }, - "kubernetes": { - "hashes": [ - "sha256:213befbb4e5aed95f94950c7eed0c2322fc5a2f8f40932e58d28fdd42d90836c", - "sha256:eb42333dad0bb5caf4e66460c6a4a1a36f0f057a040f35018f6c05a699baed86" - ], - "markers": "python_version >= '3.6'", - "version": "==25.3.0" - }, - "oauthlib": { - "hashes": [ - "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca", - "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918" - ], - "markers": "python_version >= '3.6'", - "version": "==3.2.2" - }, - "pkgutil-resolve-name": { - "hashes": [ - "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174", - "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e" - ], - "markers": "python_version < '3.9'", - "version": "==1.3.10" - }, - "protobuf": { - "hashes": [ - "sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7", - "sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c", - "sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2", - "sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b", - "sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050", - "sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9", - "sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7", - "sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454", - "sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480", - "sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469", - "sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c", - "sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e", - "sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db", - "sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905", - "sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b", - "sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86", - "sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4", - "sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402", - "sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7", - "sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4", - "sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99", - "sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee" - ], - "markers": "python_version >= '3.7'", - "version": "==3.20.3" - }, - "pyasn1": { - "hashes": [ - "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57", - "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==0.5.0" - }, - "pyasn1-modules": { - "hashes": [ - "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c", - "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==0.3.0" - }, - "pydantic": { - "hashes": [ - "sha256:01aea3a42c13f2602b7ecbbea484a98169fb568ebd9e247593ea05f01b884b2e", - "sha256:0cd181f1d0b1d00e2b705f1bf1ac7799a2d938cce3376b8007df62b29be3c2c6", - "sha256:10a86d8c8db68086f1e30a530f7d5f83eb0685e632e411dbbcf2d5c0150e8dcd", - "sha256:193924c563fae6ddcb71d3f06fa153866423ac1b793a47936656e806b64e24ca", - "sha256:464855a7ff7f2cc2cf537ecc421291b9132aa9c79aef44e917ad711b4a93163b", - "sha256:516f1ed9bc2406a0467dd777afc636c7091d71f214d5e413d64fef45174cfc7a", - "sha256:6434b49c0b03a51021ade5c4daa7d70c98f7a79e95b551201fff682fc1661245", - "sha256:64d34ab766fa056df49013bb6e79921a0265204c071984e75a09cbceacbbdd5d", - "sha256:670bb4683ad1e48b0ecb06f0cfe2178dcf74ff27921cdf1606e527d2617a81ee", - "sha256:68792151e174a4aa9e9fc1b4e653e65a354a2fa0fed169f7b3d09902ad2cb6f1", - "sha256:701daea9ffe9d26f97b52f1d157e0d4121644f0fcf80b443248434958fd03dc3", - "sha256:7d45fc99d64af9aaf7e308054a0067fdcd87ffe974f2442312372dfa66e1001d", - "sha256:80b1fab4deb08a8292d15e43a6edccdffa5377a36a4597bb545b93e79c5ff0a5", - "sha256:82dffb306dd20bd5268fd6379bc4bfe75242a9c2b79fec58e1041fbbdb1f7914", - "sha256:8c7f51861d73e8b9ddcb9916ae7ac39fb52761d9ea0df41128e81e2ba42886cd", - "sha256:950ce33857841f9a337ce07ddf46bc84e1c4946d2a3bba18f8280297157a3fd1", - "sha256:976cae77ba6a49d80f461fd8bba183ff7ba79f44aa5cfa82f1346b5626542f8e", - "sha256:9f6f0fd68d73257ad6685419478c5aece46432f4bdd8d32c7345f1986496171e", - "sha256:a7cd2251439988b413cb0a985c4ed82b6c6aac382dbaff53ae03c4b23a70e80a", - "sha256:abfb7d4a7cd5cc4e1d1887c43503a7c5dd608eadf8bc615413fc498d3e4645cd", - "sha256:ae150a63564929c675d7f2303008d88426a0add46efd76c3fc797cd71cb1b46f", - "sha256:b0f85904f73161817b80781cc150f8b906d521fa11e3cdabae19a581c3606209", - "sha256:b4a849d10f211389502059c33332e91327bc154acc1845f375a99eca3afa802d", - "sha256:c15582f9055fbc1bfe50266a19771bbbef33dd28c45e78afbe1996fd70966c2a", - "sha256:c230c0d8a322276d6e7b88c3f7ce885f9ed16e0910354510e0bae84d54991143", - "sha256:cc1dde4e50a5fc1336ee0581c1612215bc64ed6d28d2c7c6f25d2fe3e7c3e918", - "sha256:cf135c46099ff3f919d2150a948ce94b9ce545598ef2c6c7bf55dca98a304b52", - "sha256:cfc83c0678b6ba51b0532bea66860617c4cd4251ecf76e9846fa5a9f3454e97e", - "sha256:d2a5ebb48958754d386195fe9e9c5106f11275867051bf017a8059410e9abf1f", - "sha256:d71e69699498b020ea198468e2480a2f1e7433e32a3a99760058c6520e2bea7e", - "sha256:d75ae19d2a3dbb146b6f324031c24f8a3f52ff5d6a9f22f0683694b3afcb16fb", - "sha256:dfe2507b8ef209da71b6fb5f4e597b50c5a34b78d7e857c4f8f3115effaef5fe", - "sha256:e0cfe895a504c060e5d36b287ee696e2fdad02d89e0d895f83037245218a87fe", - "sha256:e79e999e539872e903767c417c897e729e015872040e56b96e67968c3b918b2d", - "sha256:ecbbc51391248116c0a055899e6c3e7ffbb11fb5e2a4cd6f2d0b93272118a209", - "sha256:f4a2b50e2b03d5776e7f21af73e2070e1b5c0d0df255a827e7c632962f8315af" - ], - "markers": "python_version >= '3.7'", - "version": "==1.10.7" - }, - "pyparsing": { - "hashes": [ - "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb", - "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc" - ], - "markers": "python_version >= '3.1'", - "version": "==3.0.9" - }, - "pyrsistent": { - "hashes": [ - "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8", - "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440", - "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a", - "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c", - "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3", - "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393", - "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9", - "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da", - "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf", - "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64", - "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a", - "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3", - "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98", - "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2", - "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8", - "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf", - "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc", - "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7", - "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28", - "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2", - "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b", - "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a", - "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64", - "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19", - "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1", - "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9", - "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c" - ], - "markers": "python_version >= '3.7'", - "version": "==0.19.3" - }, - "python-dateutil": { - "hashes": [ - "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", - "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.8.2" - }, - "pyyaml": { - "hashes": [ - "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", - "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", - "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", - "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", - "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", - "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", - "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", - "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", - "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", - "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", - "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", - "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", - "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347", - "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", - "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541", - "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", - "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", - "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc", - "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", - "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa", - "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", - "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122", - "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", - "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", - "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", - "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc", - "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247", - "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6", - "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==5.4.1" - }, - "requests": { - "hashes": [ - "sha256:10e94cc4f3121ee6da529d358cdaeaff2f1c409cd377dbc72b825852f2f7e294", - "sha256:239d7d4458afcb28a692cdd298d87542235f4ca8d36d03a15bfc128a6559a2f4" - ], - "markers": "python_version >= '3.7'", - "version": "==2.30.0" - }, - "requests-oauthlib": { - "hashes": [ - "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5", - "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.3.1" - }, - "requests-toolbelt": { - "hashes": [ - "sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7", - "sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==0.10.1" - }, - "rsa": { - "hashes": [ - "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7", - "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21" - ], - "markers": "python_version >= '3.6'", - "version": "==4.9" - }, - "setuptools": { - "hashes": [ - "sha256:23aaf86b85ca52ceb801d32703f12d77517b2556af839621c641fca11287952b", - "sha256:f104fa03692a2602fa0fec6c6a9e63b6c8a968de13e17c026957dd1f53d80990" - ], - "markers": "python_version >= '3.7'", - "version": "==67.7.2" - }, - "six": { - "hashes": [ - "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", - "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.16.0" - }, - "strip-hints": { - "hashes": [ - "sha256:307c2bd147cd35997c8ed2e9a3bdca48ad9c9617e04ea46599095201b4ce998f" - ], - "version": "==0.1.10" - }, - "tabulate": { - "hashes": [ - "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", - "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f" - ], - "markers": "python_version >= '3.7'", - "version": "==0.9.0" - }, - "termcolor": { - "hashes": [ - "sha256:3afb05607b89aed0ffe25202399ee0867ad4d3cb4180d98aaf8eefa6a5f7d475", - "sha256:b5b08f68937f138fe92f6c089b99f1e2da0ae56c52b78bf7075fd95420fd9a5a" - ], - "markers": "python_version >= '3.7'", - "version": "==2.3.0" - }, - "typer": { - "hashes": [ - "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2", - "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee" - ], - "markers": "python_version >= '3.6'", - "version": "==0.9.0" - }, - "typing-extensions": { - "hashes": [ - "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb", - "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4" - ], - "markers": "python_version < '3.9'", - "version": "==4.5.0" - }, - "uritemplate": { - "hashes": [ - "sha256:07620c3f3f8eed1f12600845892b0e036a2420acf513c53f7de0abd911a5894f", - "sha256:5af8ad10cec94f215e3f48112de2022e1d5a37ed427fbd88652fa908f2ab7cae" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==3.0.1" - }, - "urllib3": { - "hashes": [ - "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305", - "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==1.26.15" - }, - "websocket-client": { - "hashes": [ - "sha256:3f09e6d8230892547132177f575a4e3e73cfdf06526e20cc02aa1c3b47184d40", - "sha256:cdf5877568b7e83aa7cf2244ab56a3213de587bbe0ce9d8b9600fc77b455d89e" - ], - "markers": "python_version >= '3.7'", - "version": "==1.5.1" - }, - "wheel": { - "hashes": [ - "sha256:cd1196f3faee2b31968d626e1731c94f99cbdb67cf5a46e4f5656cbee7738873", - "sha256:d236b20e7cb522daf2390fa84c55eea81c5c30190f90f29ae2ca1ad8355bf247" - ], - "markers": "python_version >= '3.7'", - "version": "==0.40.0" - }, - "wrapt": { - "hashes": [ - "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0", - "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420", - "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a", - "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c", - "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079", - "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923", - "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f", - "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1", - "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8", - "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86", - "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0", - "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364", - "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e", - "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c", - "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e", - "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c", - "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727", - "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff", - "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e", - "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29", - "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7", - "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72", - "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475", - "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a", - "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317", - "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2", - "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd", - "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640", - "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98", - "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248", - "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e", - "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d", - "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec", - "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1", - "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e", - "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9", - "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92", - "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb", - "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094", - "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46", - "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29", - "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd", - "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705", - "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8", - "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975", - "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb", - "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e", - "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b", - "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418", - "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019", - "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1", - "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba", - "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6", - "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2", - "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3", - "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7", - "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752", - "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416", - "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f", - "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1", - "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc", - "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145", - "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee", - "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a", - "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7", - "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b", - "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653", - "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0", - "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90", - "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29", - "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6", - "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034", - "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09", - "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559", - "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==1.15.0" - }, - "zipp": { - "hashes": [ - "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b", - "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556" - ], - "markers": "python_version < '3.10'", - "version": "==3.15.0" - } - }, - "develop": { - "cachetools": { - "hashes": [ - "sha256:13dfddc7b8df938c21a940dfa6557ce6e94a2f1cdfa58eb90c805721d58f2c14", - "sha256:429e1a1e845c008ea6c85aa35d4b98b65d6a9763eeef3e37e92728a12d1de9d4" - ], - "markers": "python_version ~= '3.7'", - "version": "==5.3.0" - }, - "certifi": { - "hashes": [ - "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7", - "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716" - ], - "markers": "python_version >= '3.6'", - "version": "==2023.5.7" - }, - "charset-normalizer": { - "hashes": [ - "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6", - "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1", - "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e", - "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373", - "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62", - "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230", - "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be", - "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c", - "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0", - "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448", - "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f", - "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649", - "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d", - "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0", - "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706", - "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a", - "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59", - "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23", - "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5", - "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb", - "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e", - "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e", - "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c", - "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28", - "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d", - "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41", - "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974", - "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce", - "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f", - "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1", - "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d", - "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8", - "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017", - "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31", - "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7", - "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8", - "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e", - "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14", - "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd", - "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d", - "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795", - "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b", - "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b", - "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b", - "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203", - "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f", - "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19", - "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1", - "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a", - "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac", - "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9", - "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0", - "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137", - "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f", - "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6", - "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5", - "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909", - "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f", - "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0", - "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324", - "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755", - "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb", - "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854", - "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c", - "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60", - "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84", - "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0", - "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b", - "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1", - "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531", - "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1", - "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11", - "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326", - "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df", - "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab" - ], - "markers": "python_full_version >= '3.7.0'", - "version": "==3.1.0" - }, - "exceptiongroup": { - "hashes": [ - "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e", - "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785" - ], - "markers": "python_version < '3.11'", - "version": "==1.1.1" - }, - "google-api-core": { - "hashes": [ - "sha256:4b9bb5d5a380a0befa0573b302651b8a9a89262c1730e37bf423cec511804c22", - "sha256:ce222e27b0de0d7bc63eb043b956996d6dccab14cc3b690aaea91c9cc99dc16e" - ], - "markers": "python_version >= '3.7'", - "version": "==2.11.0" - }, - "google-auth": { - "hashes": [ - "sha256:ce311e2bc58b130fddf316df57c9b3943c2a7b4f6ec31de9663a9333e4064efc", - "sha256:f586b274d3eb7bd932ea424b1c702a30e0393a2e2bc4ca3eae8263ffd8be229f" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==2.17.3" - }, - "google-cloud-bigquery": { - "hashes": [ - "sha256:f6546ee96d57f45eaac8c4b24b52228a7f21a33669730efc9e69569bc88a04ad", - "sha256:fdb57aa5e8af7d692eb5835739d9339dc1b5e89836430fe88dd1ddc1b0047639" - ], - "index": "pypi", - "version": "==2.30.0" - }, - "google-cloud-core": { - "hashes": [ - "sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe", - "sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a" - ], - "markers": "python_version >= '3.7'", - "version": "==2.3.2" - }, - "google-crc32c": { - "hashes": [ - "sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a", - "sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876", - "sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c", - "sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289", - "sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298", - "sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02", - "sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f", - "sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2", - "sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a", - "sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb", - "sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210", - "sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5", - "sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee", - "sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c", - "sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a", - "sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314", - "sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd", - "sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65", - "sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37", - "sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4", - "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13", - "sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894", - "sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31", - "sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e", - "sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709", - "sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740", - "sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc", - "sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d", - "sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c", - "sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c", - "sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d", - "sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906", - "sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61", - "sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57", - "sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c", - "sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a", - "sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438", - "sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946", - "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7", - "sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96", - "sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091", - "sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae", - "sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d", - "sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88", - "sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2", - "sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd", - "sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541", - "sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728", - "sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178", - "sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968", - "sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346", - "sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8", - "sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93", - "sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7", - "sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273", - "sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462", - "sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94", - "sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd", - "sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e", - "sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57", - "sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b", - "sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9", - "sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a", - "sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100", - "sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325", - "sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183", - "sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556", - "sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4" - ], - "markers": "python_version >= '3.7'", - "version": "==1.5.0" - }, - "google-resumable-media": { - "hashes": [ - "sha256:218931e8e2b2a73a58eb354a288e03a0fd5fb1c4583261ac6e4c078666468c93", - "sha256:da1bd943e2e114a56d85d6848497ebf9be6a14d3db23e9fc57581e7c3e8170ec" - ], - "markers": "python_version >= '3.7'", - "version": "==2.5.0" - }, - "googleapis-common-protos": { - "hashes": [ - "sha256:4168fcb568a826a52f23510412da405abd93f4d23ba544bb68d943b14ba3cb44", - "sha256:b287dc48449d1d41af0c69f4ea26242b5ae4c3d7249a38b0984c86a4caffff1f" - ], - "markers": "python_version >= '3.7'", - "version": "==1.59.0" - }, - "grpcio": { - "hashes": [ - "sha256:02000b005bc8b72ff50c477b6431e8886b29961159e8b8d03c00b3dd9139baed", - "sha256:031bbd26656e0739e4b2c81c172155fb26e274b8d0312d67aefc730bcba915b6", - "sha256:1209d6b002b26e939e4c8ea37a3d5b4028eb9555394ea69fb1adbd4b61a10bb8", - "sha256:125ed35aa3868efa82eabffece6264bf638cfdc9f0cd58ddb17936684aafd0f8", - "sha256:1382bc499af92901c2240c4d540c74eae8a671e4fe9839bfeefdfcc3a106b5e2", - "sha256:16bca8092dd994f2864fdab278ae052fad4913f36f35238b2dd11af2d55a87db", - "sha256:1c59d899ee7160638613a452f9a4931de22623e7ba17897d8e3e348c2e9d8d0b", - "sha256:1d109df30641d050e009105f9c9ca5a35d01e34d2ee2a4e9c0984d392fd6d704", - "sha256:1fa7d6ddd33abbd3c8b3d7d07c56c40ea3d1891ce3cd2aa9fa73105ed5331866", - "sha256:21c4a1aae861748d6393a3ff7867473996c139a77f90326d9f4104bebb22d8b8", - "sha256:224166f06ccdaf884bf35690bf4272997c1405de3035d61384ccb5b25a4c1ca8", - "sha256:2262bd3512ba9e9f0e91d287393df6f33c18999317de45629b7bd46c40f16ba9", - "sha256:2585b3c294631a39b33f9f967a59b0fad23b1a71a212eba6bc1e3ca6e6eec9ee", - "sha256:27fb030a4589d2536daec5ff5ba2a128f4f155149efab578fe2de2cb21596d3d", - "sha256:30fbbce11ffeb4f9f91c13fe04899aaf3e9a81708bedf267bf447596b95df26b", - "sha256:3930669c9e6f08a2eed824738c3d5699d11cd47a0ecc13b68ed11595710b1133", - "sha256:3b170e441e91e4f321e46d3cc95a01cb307a4596da54aca59eb78ab0fc03754d", - "sha256:3db71c6f1ab688d8dfc102271cedc9828beac335a3a4372ec54b8bf11b43fd29", - "sha256:48cb7af77238ba16c77879009003f6b22c23425e5ee59cb2c4c103ec040638a5", - "sha256:49eace8ea55fbc42c733defbda1e4feb6d3844ecd875b01bb8b923709e0f5ec8", - "sha256:533eaf5b2a79a3c6f35cbd6a095ae99cac7f4f9c0e08bdcf86c130efd3c32adf", - "sha256:5942a3e05630e1ef5b7b5752e5da6582460a2e4431dae603de89fc45f9ec5aa9", - "sha256:62117486460c83acd3b5d85c12edd5fe20a374630475388cfc89829831d3eb79", - "sha256:650f5f2c9ab1275b4006707411bb6d6bc927886874a287661c3c6f332d4c068b", - "sha256:6dc1e2c9ac292c9a484ef900c568ccb2d6b4dfe26dfa0163d5bc815bb836c78d", - "sha256:73c238ef6e4b64272df7eec976bb016c73d3ab5a6c7e9cd906ab700523d312f3", - "sha256:775a2f70501370e5ba54e1ee3464413bff9bd85bd9a0b25c989698c44a6fb52f", - "sha256:860fcd6db7dce80d0a673a1cc898ce6bc3d4783d195bbe0e911bf8a62c93ff3f", - "sha256:87f47bf9520bba4083d65ab911f8f4c0ac3efa8241993edd74c8dd08ae87552f", - "sha256:960b176e0bb2b4afeaa1cd2002db1e82ae54c9b6e27ea93570a42316524e77cf", - "sha256:a7caf553ccaf715ec05b28c9b2ab2ee3fdb4036626d779aa09cf7cbf54b71445", - "sha256:a947d5298a0bbdd4d15671024bf33e2b7da79a70de600ed29ba7e0fef0539ebb", - "sha256:a97b0d01ae595c997c1d9d8249e2d2da829c2d8a4bdc29bb8f76c11a94915c9a", - "sha256:b7655f809e3420f80ce3bf89737169a9dce73238af594049754a1128132c0da4", - "sha256:c33744d0d1a7322da445c0fe726ea6d4e3ef2dfb0539eadf23dce366f52f546c", - "sha256:c55a9cf5cba80fb88c850915c865b8ed78d5e46e1f2ec1b27692f3eaaf0dca7e", - "sha256:d2f62fb1c914a038921677cfa536d645cb80e3dd07dc4859a3c92d75407b90a5", - "sha256:d8ae6e0df3a608e99ee1acafaafd7db0830106394d54571c1ece57f650124ce9", - "sha256:e355ee9da9c1c03f174efea59292b17a95e0b7b4d7d2a389265f731a9887d5a9", - "sha256:e3e526062c690517b42bba66ffe38aaf8bc99a180a78212e7b22baa86902f690", - "sha256:eb0807323572642ab73fd86fe53d88d843ce617dd1ddf430351ad0759809a0ae", - "sha256:ebff0738be0499d7db74d20dca9f22a7b27deae31e1bf92ea44924fd69eb6251", - "sha256:ed36e854449ff6c2f8ee145f94851fe171298e1e793f44d4f672c4a0d78064e7", - "sha256:ed3d458ded32ff3a58f157b60cc140c88f7ac8c506a1c567b2a9ee8a2fd2ce54", - "sha256:f4a7dca8ccd8023d916b900aa3c626f1bd181bd5b70159479b142f957ff420e4" - ], - "markers": "python_version >= '3.7'", - "version": "==1.54.0" - }, - "grpcio-status": { - "hashes": [ - "sha256:96968314e0c8576b2b631be3917c665964c8018900cb980d58a736fbff828578", - "sha256:b50305d52c0df6169493cca5f2e39b9b4d773b3f30d4a7a6b6dd7c18cb89007c" - ], - "version": "==1.54.0" - }, - "idna": { - "hashes": [ - "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4", - "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2" - ], - "markers": "python_version >= '3.5'", - "version": "==3.4" - }, - "importlib-metadata": { - "hashes": [ - "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed", - "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705" - ], - "markers": "python_version < '3.8'", - "version": "==6.6.0" - }, - "iniconfig": { - "hashes": [ - "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", - "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" - ], - "markers": "python_version >= '3.7'", - "version": "==2.0.0" - }, - "packaging": { - "hashes": [ - "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61", - "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f" - ], - "markers": "python_version >= '3.7'", - "version": "==23.1" - }, - "pluggy": { - "hashes": [ - "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159", - "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3" - ], - "markers": "python_version >= '3.6'", - "version": "==1.0.0" - }, - "proto-plus": { - "hashes": [ - "sha256:0e8cda3d5a634d9895b75c573c9352c16486cb75deb0e078b5fda34db4243165", - "sha256:de34e52d6c9c6fcd704192f09767cb561bb4ee64e70eede20b0834d841f0be4d" - ], - "markers": "python_version >= '3.6'", - "version": "==1.22.2" - }, - "protobuf": { - "hashes": [ - "sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7", - "sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c", - "sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2", - "sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b", - "sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050", - "sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9", - "sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7", - "sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454", - "sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480", - "sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469", - "sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c", - "sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e", - "sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db", - "sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905", - "sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b", - "sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86", - "sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4", - "sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402", - "sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7", - "sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4", - "sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99", - "sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee" - ], - "markers": "python_version >= '3.7'", - "version": "==3.20.3" - }, - "pyasn1": { - "hashes": [ - "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57", - "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==0.5.0" - }, - "pyasn1-modules": { - "hashes": [ - "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c", - "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==0.3.0" - }, - "pytest": { - "hashes": [ - "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362", - "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3" - ], - "index": "pypi", - "version": "==7.3.1" - }, - "python-dateutil": { - "hashes": [ - "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", - "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.8.2" - }, - "requests": { - "hashes": [ - "sha256:10e94cc4f3121ee6da529d358cdaeaff2f1c409cd377dbc72b825852f2f7e294", - "sha256:239d7d4458afcb28a692cdd298d87542235f4ca8d36d03a15bfc128a6559a2f4" - ], - "markers": "python_version >= '3.7'", - "version": "==2.30.0" - }, - "rsa": { - "hashes": [ - "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7", - "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21" - ], - "markers": "python_version >= '3.6'", - "version": "==4.9" - }, - "six": { - "hashes": [ - "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", - "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.16.0" - }, - "tomli": { - "hashes": [ - "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", - "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" - ], - "markers": "python_version < '3.11'", - "version": "==2.0.1" - }, - "typing-extensions": { - "hashes": [ - "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb", - "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4" - ], - "markers": "python_version < '3.9'", - "version": "==4.5.0" - }, - "urllib3": { - "hashes": [ - "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305", - "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==1.26.15" - }, - "zipp": { - "hashes": [ - "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b", - "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556" - ], - "markers": "python_version < '3.10'", - "version": "==3.15.0" - } - } -} diff --git a/components/bigquery-components/README.md b/components/bigquery-components/README.md deleted file mode 100644 index 2fce84dd..00000000 --- a/components/bigquery-components/README.md +++ /dev/null @@ -1,25 +0,0 @@ - - -# BigQuery Components - -A python package which provides common BigQuery components for interacting with BigQuery. -Currently, the following components are implemented: - -- `bq_query_to_table`: Execute a SQL query and persist results in a table. -- `extract_bq_to_dataset`: Export a table to a KubeFlow dataset on Cloud Storage. - -These components either augment, extend, or add new functionalities that aren't found in [Google Cloud Pipeline Components list](https://cloud.google.com/vertex-ai/docs/pipelines/gcpc-list). diff --git a/components/bigquery-components/pyproject.toml b/components/bigquery-components/pyproject.toml deleted file mode 100644 index 0ea3c6dc..00000000 --- a/components/bigquery-components/pyproject.toml +++ /dev/null @@ -1,45 +0,0 @@ -[project] -name = "bigquery-components" -authors = [ - {name = "Example User", email = "user@example.com"}, -] -description = "BigQuery components" -readme = "README.md" -classifiers = [ - "Development Status :: 3 - Alpha", - "Intended Audience :: Developers", - "Programming Language :: Python :: 3.7", -] -requires-python = ">=3.7" -dynamic = ["version"] -dependencies = [ - "kfp == 1.8.21", -] - -[project.optional-dependencies] -tests = [ - "google-cloud-bigquery == 2.30.0", - "pytest >= 7.3.1,<8.0.0", -] - -[build-system] -requires = ["setuptools", "wheel"] -build-backend = "setuptools.build_meta:__legacy__" - -[tool.flake8] -max-line-length = 88 -per-file-ignores = [ - "E203", - "F841" -] -ignore = [ - "E203", - "F841", -] - -[tool.pytest.ini_options] -pythonpath = [ - "src" -] -testpaths = "tests" -junit_family = "xunit2" diff --git a/components/bigquery-components/src/bigquery_components/__init__.py b/components/bigquery-components/src/bigquery_components/__init__.py deleted file mode 100644 index 2e3870ac..00000000 --- a/components/bigquery-components/src/bigquery_components/__init__.py +++ /dev/null @@ -1,9 +0,0 @@ -from .bq_query_to_table import bq_query_to_table -from .extract_bq_to_dataset import extract_bq_to_dataset - - -__version__ = "0.0.1" -__all__ = [ - "bq_query_to_table", - "extract_bq_to_dataset", -] diff --git a/components/bigquery-components/src/bigquery_components/bq_query_to_table.py b/components/bigquery-components/src/bigquery_components/bq_query_to_table.py deleted file mode 100644 index 9530bd92..00000000 --- a/components/bigquery-components/src/bigquery_components/bq_query_to_table.py +++ /dev/null @@ -1,73 +0,0 @@ -# Copyright 2022 Google LLC - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# https://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from kfp.v2.dsl import component - - -@component( - base_image="python:3.7", - packages_to_install=["google-cloud-bigquery==2.30.0"], -) -def bq_query_to_table( - query: str, - bq_client_project_id: str, - destination_project_id: str, - dataset_id: str = None, - table_id: str = None, - dataset_location: str = "EU", - query_job_config: dict = None, -) -> None: - """ - Run query & create a new BigQuery table - Args: - query (str): SQL query to execute, results are saved in a BigQuery table - bq_client_project_id (str): project id that will be used by the bq client - destination_project_id (str): project id where BQ table will be created - dataset_id (str): dataset id where BQ table will be created - table_id (str): table name (without project id and dataset id) - dataset_location (str): bq dataset location - query_job_config (dict): dict containing optional parameters - required by the bq query operation. No need to specify destination param - See available parameters here - https://googleapis.dev/python/bigquery/latest/generated/google.cloud.bigquery.job.QueryJobConfig.html - Returns: - None - """ - from google.cloud.exceptions import GoogleCloudError - from google.cloud import bigquery - import logging - - logging.getLogger().setLevel(logging.INFO) - - if (dataset_id is not None) and (table_id is not None): - dest_table_ref = f"{destination_project_id}.{dataset_id}.{table_id}" - else: - dest_table_ref = None - if query_job_config is None: - query_job_config = {} - job_config = bigquery.QueryJobConfig(destination=dest_table_ref, **query_job_config) - - bq_client = bigquery.client.Client( - project=bq_client_project_id, location=dataset_location - ) - query_job = bq_client.query(query, job_config=job_config) - - try: - result = query_job.result() - logging.info(f"BQ table {dest_table_ref} created") - except GoogleCloudError as e: - logging.error(e) - logging.error(query_job.error_result) - logging.error(query_job.errors) - raise e diff --git a/components/bigquery-components/src/bigquery_components/extract_bq_to_dataset.py b/components/bigquery-components/src/bigquery_components/extract_bq_to_dataset.py deleted file mode 100644 index 3effe09a..00000000 --- a/components/bigquery-components/src/bigquery_components/extract_bq_to_dataset.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright 2022 Google LLC - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# https://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from kfp.v2.dsl import Dataset, Output, component - - -@component( - base_image="python:3.7", - packages_to_install=["google-cloud-bigquery==2.30.0"], -) -def extract_bq_to_dataset( - bq_client_project_id: str, - source_project_id: str, - dataset_id: str, - table_name: str, - dataset: Output[Dataset], - destination_gcs_uri: str = None, - dataset_location: str = "EU", - extract_job_config: dict = None, - skip_if_exists: bool = True, -): - """ - Extract BQ table in GCS. - Args: - bq_client_project_id (str): project id that will be used by the bq client - source_project_id (str): project id from where BQ table will be extracted - dataset_id (str): dataset id from where BQ table will be extracted - table_name (str): table name (without project id and dataset id) - dataset (Output[Dataset]): output dataset artifact generated by the operation, - this parameter will be passed automatically by the orchestrator - dataset_location (str): bq dataset location. Defaults to "EU". - extract_job_config (dict): dict containing optional parameters - required by the bq extract operation. Defaults to None. - See available parameters here - https://googleapis.dev/python/bigquery/latest/generated/google.cloud.bigquery.job.ExtractJobConfig.html # noqa - destination_gcs_uri (str): GCS URI to use for saving query results (optional). - - Returns: - Outputs (NamedTuple (str, list)): Output dataset directory and its GCS uri. - """ - - import logging - from pathlib import Path - from google.cloud.exceptions import GoogleCloudError - from google.cloud import bigquery - - # set uri of output dataset if destination_gcs_uri is provided - if destination_gcs_uri: - dataset.uri = destination_gcs_uri - - logging.info(f"Checking if destination exists: {dataset.path}") - if Path(dataset.path).exists() and skip_if_exists: - logging.info("Destination already exists, skipping table extraction!") - return - - full_table_id = f"{source_project_id}.{dataset_id}.{table_name}" - table = bigquery.table.Table(table_ref=full_table_id) - - if extract_job_config is None: - extract_job_config = {} - job_config = bigquery.job.ExtractJobConfig(**extract_job_config) - - logging.info(f"Extract table {table} to {dataset.uri}") - client = bigquery.client.Client( - project=bq_client_project_id, location=dataset_location - ) - extract_job = client.extract_table( - table, - dataset.uri, - job_config=job_config, - ) - - try: - result = extract_job.result() - logging.info("Table extracted, result: {}".format(result)) - except GoogleCloudError as e: - logging.error(e) - logging.error(extract_job.error_result) - logging.error(extract_job.errors) - raise e diff --git a/components/bigquery-components/tests/test_dummy.py b/components/bigquery-components/tests/test_dummy.py deleted file mode 100644 index 10cf3ad0..00000000 --- a/components/bigquery-components/tests/test_dummy.py +++ /dev/null @@ -1,2 +0,0 @@ -def test_dummy(): - pass diff --git a/components/poetry.lock b/components/poetry.lock new file mode 100644 index 00000000..29592fb5 --- /dev/null +++ b/components/poetry.lock @@ -0,0 +1,1219 @@ +# This file is automatically @generated by Poetry 1.5.0 and should not be changed by hand. + +[[package]] +name = "cachetools" +version = "5.3.2" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, + {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, +] + +[[package]] +name = "certifi" +version = "2023.11.17" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "docstring-parser" +version = "0.15" +description = "Parse Python docstrings in reST, Google and Numpydoc format" +optional = false +python-versions = ">=3.6,<4.0" +files = [ + {file = "docstring_parser-0.15-py3-none-any.whl", hash = "sha256:d1679b86250d269d06a99670924d6bce45adc00b08069dae8c47d98e89b667a9"}, + {file = "docstring_parser-0.15.tar.gz", hash = "sha256:48ddc093e8b1865899956fcc03b03e66bb7240c310fac5af81814580c55bf682"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "google-api-core" +version = "2.15.0" +description = "Google API client core library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-api-core-2.15.0.tar.gz", hash = "sha256:abc978a72658f14a2df1e5e12532effe40f94f868f6e23d95133bd6abcca35ca"}, + {file = "google_api_core-2.15.0-py3-none-any.whl", hash = "sha256:2aa56d2be495551e66bbff7f729b790546f87d5c90e74781aa77233bcb395a8a"}, +] + +[package.dependencies] +google-auth = ">=2.14.1,<3.0.dev0" +googleapis-common-protos = ">=1.56.2,<2.0.dev0" +grpcio = {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""} +grpcio-status = {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "extra == \"grpc\""} +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +requests = ">=2.18.0,<3.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] + +[[package]] +name = "google-auth" +version = "2.25.2" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-auth-2.25.2.tar.gz", hash = "sha256:42f707937feb4f5e5a39e6c4f343a17300a459aaf03141457ba505812841cc40"}, + {file = "google_auth-2.25.2-py2.py3-none-any.whl", hash = "sha256:473a8dfd0135f75bb79d878436e568f2695dce456764bf3a02b6f8c540b1d256"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] + +[[package]] +name = "google-cloud-aiplatform" +version = "1.38.1" +description = "Vertex AI API client library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "google-cloud-aiplatform-1.38.1.tar.gz", hash = "sha256:30439d914bb028443c0506cc0e6dd825cff5401aeb8233e13d8cfd77c3c87da1"}, + {file = "google_cloud_aiplatform-1.38.1-py2.py3-none-any.whl", hash = "sha256:5e1fcd1068dd2c4f0fc89aa616e34a8b9434eaa72ea6216f5036ef26f08bd448"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.32.0,<2.0.dev0 || >=2.8.dev0,<3.0.0dev", extras = ["grpc"]} +google-cloud-bigquery = ">=1.15.0,<4.0.0dev" +google-cloud-resource-manager = ">=1.3.3,<3.0.0dev" +google-cloud-storage = ">=1.32.0,<3.0.0dev" +packaging = ">=14.3" +proto-plus = ">=1.22.0,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" +shapely = "<3.0.0dev" + +[package.extras] +autologging = ["mlflow (>=1.27.0,<=2.1.1)"] +cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] +datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=3.0.0,<8.0dev)"] +endpoint = ["requests (>=2.28.1)"] +full = ["cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<0.103.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (==0.0.11)", "google-vizier (==0.0.4)", "google-vizier (>=0.0.14)", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml (==5.3.1)", "ray[default] (>=2.4,<2.5)", "ray[default] (>=2.5,<2.5.1)", "requests (>=2.28.1)", "starlette (>=0.17.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)"] +lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0dev)"] +metadata = ["numpy (>=1.15.0)", "pandas (>=1.0.0)"] +pipelines = ["pyyaml (==5.3.1)"] +prediction = ["docker (>=5.0.3)", "fastapi (>=0.71.0,<0.103.1)", "httpx (>=0.23.0,<0.25.0)", "starlette (>=0.17.1)", "uvicorn[standard] (>=0.16.0)"] +preview = ["cloudpickle (<3.0)", "google-cloud-logging (<4.0)"] +private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"] +ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "pandas (>=1.0.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "ray[default] (>=2.4,<2.5)", "ray[default] (>=2.5,<2.5.1)"] +tensorboard = ["tensorflow (>=2.3.0,<3.0.0dev)"] +testing = ["bigframes", "cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<0.103.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (==0.0.11)", "google-vizier (==0.0.4)", "google-vizier (>=0.0.14)", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "ipython", "kfp", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyfakefs", "pytest-asyncio", "pytest-xdist", "pyyaml (==5.3.1)", "ray[default] (>=2.4,<2.5)", "ray[default] (>=2.5,<2.5.1)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<=2.12.0)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost", "xgboost-ray"] +vizier = ["google-vizier (==0.0.11)", "google-vizier (==0.0.4)", "google-vizier (>=0.0.14)", "google-vizier (>=0.1.6)"] +xai = ["tensorflow (>=2.3.0,<3.0.0dev)"] + +[[package]] +name = "google-cloud-bigquery" +version = "3.14.1" +description = "Google BigQuery API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-bigquery-3.14.1.tar.gz", hash = "sha256:aa15bd86f79ea76824c7d710f5ae532323c4b3ba01ef4abff42d4ee7a2e9b142"}, + {file = "google_cloud_bigquery-3.14.1-py2.py3-none-any.whl", hash = "sha256:a8ded18455da71508db222b7c06197bc12b6dbc6ed5b0b64e7007b76d7016957"}, +] + +[package.dependencies] +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" +google-cloud-core = ">=1.6.0,<3.0.0dev" +google-resumable-media = ">=0.6.0,<3.0dev" +packaging = ">=20.0.0" +python-dateutil = ">=2.7.2,<3.0dev" +requests = ">=2.21.0,<3.0.0dev" + +[package.extras] +all = ["Shapely (>=1.8.4,<3.0.0dev)", "db-dtypes (>=0.3.0,<2.0.0dev)", "geopandas (>=0.9.0,<1.0dev)", "google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "importlib-metadata (>=1.0.0)", "ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)", "ipywidgets (>=7.7.0)", "opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)", "pandas (>=1.1.0)", "proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)", "pyarrow (>=3.0.0)", "tqdm (>=4.7.4,<5.0.0dev)"] +bigquery-v2 = ["proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)"] +bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "pyarrow (>=3.0.0)"] +geopandas = ["Shapely (>=1.8.4,<3.0.0dev)", "geopandas (>=0.9.0,<1.0dev)"] +ipython = ["ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)"] +ipywidgets = ["ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)"] +opentelemetry = ["opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)"] +pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "importlib-metadata (>=1.0.0)", "pandas (>=1.1.0)", "pyarrow (>=3.0.0)"] +tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"] + +[[package]] +name = "google-cloud-core" +version = "2.4.1" +description = "Google Cloud API client core library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"}, + {file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"}, +] + +[package.dependencies] +google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" +google-auth = ">=1.25.0,<3.0dev" + +[package.extras] +grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] + +[[package]] +name = "google-cloud-pipeline-components" +version = "2.8.0" +description = "This SDK enables a set of First Party (Google owned) pipeline components that allow users to take their experience from Vertex AI SDK and other Google Cloud services and create a corresponding pipeline using KFP or Managed Pipelines." +optional = false +python-versions = ">=3.7.0,<3.12.0" +files = [ + {file = "google_cloud_pipeline_components-2.8.0-py3-none-any.whl", hash = "sha256:2b21bc6f65e4c47f21a5c5de472a770fe8ca7c639252031fe2f4c054a6095f4a"}, +] + +[package.dependencies] +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" +google-cloud-aiplatform = ">=1.14.0,<2" +Jinja2 = "3.1.2" +kfp = ">=2.0.0b10,<=2.4.0" + +[package.extras] +docs = ["autodocsumm (==0.2.9)", "commonmark (==0.9.1)", "grpcio-status (<=1.47.0)", "m2r2 (==0.3.2)", "protobuf (>=3.19.0,<4.0.0dev)", "sphinx (==5.0.2)", "sphinx-immaterial (==0.9.0)", "sphinx-notfound-page (==0.8.3)", "sphinx-rtd-theme (==1.0.0)"] +tests = ["flake8 (>=3.0.0)", "mock (>=4.0.0)", "pytest (>=6.0.0)"] + +[[package]] +name = "google-cloud-resource-manager" +version = "1.11.0" +description = "Google Cloud Resource Manager API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-resource-manager-1.11.0.tar.gz", hash = "sha256:a64ba6bb595634ecd2472b8b0322e8f012a76327756659a2dde9f392d7fa1af2"}, + {file = "google_cloud_resource_manager-1.11.0-py2.py3-none-any.whl", hash = "sha256:bafde909b1d434a620eefcd144b14fcccb72f268afcf158c5bcfcdce5e04a72b"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" +proto-plus = ">=1.22.3,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" + +[[package]] +name = "google-cloud-storage" +version = "2.14.0" +description = "Google Cloud Storage API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-storage-2.14.0.tar.gz", hash = "sha256:2d23fcf59b55e7b45336729c148bb1c464468c69d5efbaee30f7201dd90eb97e"}, + {file = "google_cloud_storage-2.14.0-py2.py3-none-any.whl", hash = "sha256:8641243bbf2a2042c16a6399551fbb13f062cbc9a2de38d6c0bb5426962e9dbd"}, +] + +[package.dependencies] +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" +google-auth = ">=2.23.3,<3.0dev" +google-cloud-core = ">=2.3.0,<3.0dev" +google-crc32c = ">=1.0,<2.0dev" +google-resumable-media = ">=2.6.0" +requests = ">=2.18.0,<3.0.0dev" + +[package.extras] +protobuf = ["protobuf (<5.0.0dev)"] + +[[package]] +name = "google-crc32c" +version = "1.5.0" +description = "A python wrapper of the C library 'Google CRC32C'" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-crc32c-1.5.0.tar.gz", hash = "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7"}, + {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13"}, + {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346"}, + {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65"}, + {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b"}, + {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02"}, + {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4"}, + {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e"}, + {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c"}, + {file = "google_crc32c-1.5.0-cp310-cp310-win32.whl", hash = "sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee"}, + {file = "google_crc32c-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289"}, + {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273"}, + {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298"}, + {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57"}, + {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438"}, + {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906"}, + {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183"}, + {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd"}, + {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c"}, + {file = "google_crc32c-1.5.0-cp311-cp311-win32.whl", hash = "sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709"}, + {file = "google_crc32c-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-win32.whl", hash = "sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740"}, + {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8"}, + {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37"}, + {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894"}, + {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-win32.whl", hash = "sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4"}, + {file = "google_crc32c-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c"}, + {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7"}, + {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210"}, + {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd"}, + {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96"}, + {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61"}, + {file = "google_crc32c-1.5.0-cp39-cp39-win32.whl", hash = "sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c"}, + {file = "google_crc32c-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93"}, +] + +[package.extras] +testing = ["pytest"] + +[[package]] +name = "google-resumable-media" +version = "2.7.0" +description = "Utilities for Google Media Downloads and Resumable Uploads" +optional = false +python-versions = ">= 3.7" +files = [ + {file = "google-resumable-media-2.7.0.tar.gz", hash = "sha256:5f18f5fa9836f4b083162064a1c2c98c17239bfda9ca50ad970ccf905f3e625b"}, + {file = "google_resumable_media-2.7.0-py2.py3-none-any.whl", hash = "sha256:79543cfe433b63fd81c0844b7803aba1bb8950b47bedf7d980c38fa123937e08"}, +] + +[package.dependencies] +google-crc32c = ">=1.0,<2.0dev" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "google-auth (>=1.22.0,<2.0dev)"] +requests = ["requests (>=2.18.0,<3.0.0dev)"] + +[[package]] +name = "googleapis-common-protos" +version = "1.62.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, + {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, +] + +[package.dependencies] +grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""} +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] + +[[package]] +name = "grpc-google-iam-v1" +version = "0.13.0" +description = "IAM API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpc-google-iam-v1-0.13.0.tar.gz", hash = "sha256:fad318608b9e093258fbf12529180f400d1c44453698a33509cc6ecf005b294e"}, + {file = "grpc_google_iam_v1-0.13.0-py2.py3-none-any.whl", hash = "sha256:53902e2af7de8df8c1bd91373d9be55b0743ec267a7428ea638db3775becae89"}, +] + +[package.dependencies] +googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} +grpcio = ">=1.44.0,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" + +[[package]] +name = "grpcio" +version = "1.60.0" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpcio-1.60.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:d020cfa595d1f8f5c6b343530cd3ca16ae5aefdd1e832b777f9f0eb105f5b139"}, + {file = "grpcio-1.60.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b98f43fcdb16172dec5f4b49f2fece4b16a99fd284d81c6bbac1b3b69fcbe0ff"}, + {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:20e7a4f7ded59097c84059d28230907cd97130fa74f4a8bfd1d8e5ba18c81491"}, + {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452ca5b4afed30e7274445dd9b441a35ece656ec1600b77fff8c216fdf07df43"}, + {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43e636dc2ce9ece583b3e2ca41df5c983f4302eabc6d5f9cd04f0562ee8ec1ae"}, + {file = "grpcio-1.60.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e306b97966369b889985a562ede9d99180def39ad42c8014628dd3cc343f508"}, + {file = "grpcio-1.60.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f897c3b127532e6befdcf961c415c97f320d45614daf84deba0a54e64ea2457b"}, + {file = "grpcio-1.60.0-cp310-cp310-win32.whl", hash = "sha256:b87efe4a380887425bb15f220079aa8336276398dc33fce38c64d278164f963d"}, + {file = "grpcio-1.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:a9c7b71211f066908e518a2ef7a5e211670761651039f0d6a80d8d40054047df"}, + {file = "grpcio-1.60.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:fb464479934778d7cc5baf463d959d361954d6533ad34c3a4f1d267e86ee25fd"}, + {file = "grpcio-1.60.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:4b44d7e39964e808b071714666a812049765b26b3ea48c4434a3b317bac82f14"}, + {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:90bdd76b3f04bdb21de5398b8a7c629676c81dfac290f5f19883857e9371d28c"}, + {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91229d7203f1ef0ab420c9b53fe2ca5c1fbeb34f69b3bc1b5089466237a4a134"}, + {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b36a2c6d4920ba88fa98075fdd58ff94ebeb8acc1215ae07d01a418af4c0253"}, + {file = "grpcio-1.60.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:297eef542156d6b15174a1231c2493ea9ea54af8d016b8ca7d5d9cc65cfcc444"}, + {file = "grpcio-1.60.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:87c9224acba0ad8bacddf427a1c2772e17ce50b3042a789547af27099c5f751d"}, + {file = "grpcio-1.60.0-cp311-cp311-win32.whl", hash = "sha256:95ae3e8e2c1b9bf671817f86f155c5da7d49a2289c5cf27a319458c3e025c320"}, + {file = "grpcio-1.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:467a7d31554892eed2aa6c2d47ded1079fc40ea0b9601d9f79204afa8902274b"}, + {file = "grpcio-1.60.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:a7152fa6e597c20cb97923407cf0934e14224af42c2b8d915f48bc3ad2d9ac18"}, + {file = "grpcio-1.60.0-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:7db16dd4ea1b05ada504f08d0dca1cd9b926bed3770f50e715d087c6f00ad748"}, + {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:b0571a5aef36ba9177e262dc88a9240c866d903a62799e44fd4aae3f9a2ec17e"}, + {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fd9584bf1bccdfff1512719316efa77be235469e1e3295dce64538c4773840b"}, + {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6a478581b1a1a8fdf3318ecb5f4d0cda41cacdffe2b527c23707c9c1b8fdb55"}, + {file = "grpcio-1.60.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:77c8a317f0fd5a0a2be8ed5cbe5341537d5c00bb79b3bb27ba7c5378ba77dbca"}, + {file = "grpcio-1.60.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1c30bb23a41df95109db130a6cc1b974844300ae2e5d68dd4947aacba5985aa5"}, + {file = "grpcio-1.60.0-cp312-cp312-win32.whl", hash = "sha256:2aef56e85901c2397bd557c5ba514f84de1f0ae5dd132f5d5fed042858115951"}, + {file = "grpcio-1.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:e381fe0c2aa6c03b056ad8f52f8efca7be29fb4d9ae2f8873520843b6039612a"}, + {file = "grpcio-1.60.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:92f88ca1b956eb8427a11bb8b4a0c0b2b03377235fc5102cb05e533b8693a415"}, + {file = "grpcio-1.60.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:e278eafb406f7e1b1b637c2cf51d3ad45883bb5bd1ca56bc05e4fc135dfdaa65"}, + {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:a48edde788b99214613e440fce495bbe2b1e142a7f214cce9e0832146c41e324"}, + {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de2ad69c9a094bf37c1102b5744c9aec6cf74d2b635558b779085d0263166454"}, + {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:073f959c6f570797272f4ee9464a9997eaf1e98c27cb680225b82b53390d61e6"}, + {file = "grpcio-1.60.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c826f93050c73e7769806f92e601e0efdb83ec8d7c76ddf45d514fee54e8e619"}, + {file = "grpcio-1.60.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9e30be89a75ee66aec7f9e60086fadb37ff8c0ba49a022887c28c134341f7179"}, + {file = "grpcio-1.60.0-cp37-cp37m-win_amd64.whl", hash = "sha256:b0fb2d4801546598ac5cd18e3ec79c1a9af8b8f2a86283c55a5337c5aeca4b1b"}, + {file = "grpcio-1.60.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:9073513ec380434eb8d21970e1ab3161041de121f4018bbed3146839451a6d8e"}, + {file = "grpcio-1.60.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:74d7d9fa97809c5b892449b28a65ec2bfa458a4735ddad46074f9f7d9550ad13"}, + {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:1434ca77d6fed4ea312901122dc8da6c4389738bf5788f43efb19a838ac03ead"}, + {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e61e76020e0c332a98290323ecfec721c9544f5b739fab925b6e8cbe1944cf19"}, + {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675997222f2e2f22928fbba640824aebd43791116034f62006e19730715166c0"}, + {file = "grpcio-1.60.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5208a57eae445ae84a219dfd8b56e04313445d146873117b5fa75f3245bc1390"}, + {file = "grpcio-1.60.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:428d699c8553c27e98f4d29fdc0f0edc50e9a8a7590bfd294d2edb0da7be3629"}, + {file = "grpcio-1.60.0-cp38-cp38-win32.whl", hash = "sha256:83f2292ae292ed5a47cdcb9821039ca8e88902923198f2193f13959360c01860"}, + {file = "grpcio-1.60.0-cp38-cp38-win_amd64.whl", hash = "sha256:705a68a973c4c76db5d369ed573fec3367d7d196673fa86614b33d8c8e9ebb08"}, + {file = "grpcio-1.60.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c193109ca4070cdcaa6eff00fdb5a56233dc7610216d58fb81638f89f02e4968"}, + {file = "grpcio-1.60.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:676e4a44e740deaba0f4d95ba1d8c5c89a2fcc43d02c39f69450b1fa19d39590"}, + {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5ff21e000ff2f658430bde5288cb1ac440ff15c0d7d18b5fb222f941b46cb0d2"}, + {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c86343cf9ff7b2514dd229bdd88ebba760bd8973dac192ae687ff75e39ebfab"}, + {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fd3b3968ffe7643144580f260f04d39d869fcc2cddb745deef078b09fd2b328"}, + {file = "grpcio-1.60.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:30943b9530fe3620e3b195c03130396cd0ee3a0d10a66c1bee715d1819001eaf"}, + {file = "grpcio-1.60.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b10241250cb77657ab315270b064a6c7f1add58af94befa20687e7c8d8603ae6"}, + {file = "grpcio-1.60.0-cp39-cp39-win32.whl", hash = "sha256:79a050889eb8d57a93ed21d9585bb63fca881666fc709f5d9f7f9372f5e7fd03"}, + {file = "grpcio-1.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:8a97a681e82bc11a42d4372fe57898d270a2707f36c45c6676e49ce0d5c41353"}, + {file = "grpcio-1.60.0.tar.gz", hash = "sha256:2199165a1affb666aa24adf0c97436686d0a61bc5fc113c037701fb7c7fceb96"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.60.0)"] + +[[package]] +name = "grpcio-status" +version = "1.48.2" +description = "Status proto mapping for gRPC" +optional = false +python-versions = ">=3.6" +files = [ + {file = "grpcio-status-1.48.2.tar.gz", hash = "sha256:53695f45da07437b7c344ee4ef60d370fd2850179f5a28bb26d8e2aa1102ec11"}, + {file = "grpcio_status-1.48.2-py3-none-any.whl", hash = "sha256:2c33bbdbe20188b2953f46f31af669263b6ee2a9b2d38fa0d36ee091532e21bf"}, +] + +[package.dependencies] +googleapis-common-protos = ">=1.5.5" +grpcio = ">=1.48.2" +protobuf = ">=3.12.0" + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "kfp" +version = "2.4.0" +description = "Kubeflow Pipelines SDK" +optional = false +python-versions = ">=3.7.0,<3.12.0" +files = [ + {file = "kfp-2.4.0.tar.gz", hash = "sha256:44c25ce80b948629bf0fb0815b49d8d97039a825597a4378078bbaa0610aef69"}, +] + +[package.dependencies] +click = ">=8.0.0,<9" +docstring-parser = ">=0.7.3,<1" +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" +google-auth = ">=1.6.1,<3" +google-cloud-storage = ">=2.2.1,<3" +kfp-pipeline-spec = "0.2.2" +kfp-server-api = ">=2.0.0,<2.1.0" +kubernetes = ">=8.0.0,<27" +protobuf = ">=3.13.0,<4" +PyYAML = ">=5.3,<7" +requests-toolbelt = ">=0.8.0,<1" +tabulate = ">=0.8.6,<1" +urllib3 = "<2.0.0" + +[package.extras] +all = ["docker", "kfp-kubernetes (<2)"] +kubernetes = ["kfp-kubernetes (<2)"] + +[[package]] +name = "kfp-pipeline-spec" +version = "0.2.2" +description = "Kubeflow Pipelines pipeline spec" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "kfp_pipeline_spec-0.2.2-py3-none-any.whl", hash = "sha256:e83b58ffed3f7ca154d62ab20e14dc9a1a3c9dad589e856dd2b421e226f3b8d0"}, +] + +[package.dependencies] +protobuf = ">=3.13.0,<4" + +[[package]] +name = "kfp-server-api" +version = "2.0.5" +description = "Kubeflow Pipelines API" +optional = false +python-versions = "*" +files = [ + {file = "kfp-server-api-2.0.5.tar.gz", hash = "sha256:c9cfbf0e87271d3bfe96e5ecc9ffbdd6ab566bc1c9a9ddc2a39d7698a16e26ff"}, +] + +[package.dependencies] +certifi = "*" +python-dateutil = "*" +six = ">=1.10" +urllib3 = ">=1.15" + +[[package]] +name = "kubernetes" +version = "26.1.0" +description = "Kubernetes python client" +optional = false +python-versions = ">=3.6" +files = [ + {file = "kubernetes-26.1.0-py2.py3-none-any.whl", hash = "sha256:e3db6800abf7e36c38d2629b5cb6b74d10988ee0cba6fba45595a7cbe60c0042"}, + {file = "kubernetes-26.1.0.tar.gz", hash = "sha256:5854b0c508e8d217ca205591384ab58389abdae608576f9c9afc35a3c76a366c"}, +] + +[package.dependencies] +certifi = ">=14.05.14" +google-auth = ">=1.0.1" +python-dateutil = ">=2.5.3" +pyyaml = ">=5.4.1" +requests = "*" +requests-oauthlib = "*" +setuptools = ">=21.0.0" +six = ">=1.9.0" +urllib3 = ">=1.24.2" +websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" + +[package.extras] +adal = ["adal (>=1.0.2)"] + +[[package]] +name = "markupsafe" +version = "2.1.3" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, +] + +[[package]] +name = "numpy" +version = "1.26.2" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3703fc9258a4a122d17043e57b35e5ef1c5a5837c3db8be396c82e04c1cf9b0f"}, + {file = "numpy-1.26.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cc392fdcbd21d4be6ae1bb4475a03ce3b025cd49a9be5345d76d7585aea69440"}, + {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36340109af8da8805d8851ef1d74761b3b88e81a9bd80b290bbfed61bd2b4f75"}, + {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcc008217145b3d77abd3e4d5ef586e3bdfba8fe17940769f8aa09b99e856c00"}, + {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3ced40d4e9e18242f70dd02d739e44698df3dcb010d31f495ff00a31ef6014fe"}, + {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b272d4cecc32c9e19911891446b72e986157e6a1809b7b56518b4f3755267523"}, + {file = "numpy-1.26.2-cp310-cp310-win32.whl", hash = "sha256:22f8fc02fdbc829e7a8c578dd8d2e15a9074b630d4da29cda483337e300e3ee9"}, + {file = "numpy-1.26.2-cp310-cp310-win_amd64.whl", hash = "sha256:26c9d33f8e8b846d5a65dd068c14e04018d05533b348d9eaeef6c1bd787f9919"}, + {file = "numpy-1.26.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b96e7b9c624ef3ae2ae0e04fa9b460f6b9f17ad8b4bec6d7756510f1f6c0c841"}, + {file = "numpy-1.26.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aa18428111fb9a591d7a9cc1b48150097ba6a7e8299fb56bdf574df650e7d1f1"}, + {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06fa1ed84aa60ea6ef9f91ba57b5ed963c3729534e6e54055fc151fad0423f0a"}, + {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96ca5482c3dbdd051bcd1fce8034603d6ebfc125a7bd59f55b40d8f5d246832b"}, + {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:854ab91a2906ef29dc3925a064fcd365c7b4da743f84b123002f6139bcb3f8a7"}, + {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f43740ab089277d403aa07567be138fc2a89d4d9892d113b76153e0e412409f8"}, + {file = "numpy-1.26.2-cp311-cp311-win32.whl", hash = "sha256:a2bbc29fcb1771cd7b7425f98b05307776a6baf43035d3b80c4b0f29e9545186"}, + {file = "numpy-1.26.2-cp311-cp311-win_amd64.whl", hash = "sha256:2b3fca8a5b00184828d12b073af4d0fc5fdd94b1632c2477526f6bd7842d700d"}, + {file = "numpy-1.26.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a4cd6ed4a339c21f1d1b0fdf13426cb3b284555c27ac2f156dfdaaa7e16bfab0"}, + {file = "numpy-1.26.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5d5244aabd6ed7f312268b9247be47343a654ebea52a60f002dc70c769048e75"}, + {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a3cdb4d9c70e6b8c0814239ead47da00934666f668426fc6e94cce869e13fd7"}, + {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa317b2325f7aa0a9471663e6093c210cb2ae9c0ad824732b307d2c51983d5b6"}, + {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:174a8880739c16c925799c018f3f55b8130c1f7c8e75ab0a6fa9d41cab092fd6"}, + {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f79b231bf5c16b1f39c7f4875e1ded36abee1591e98742b05d8a0fb55d8a3eec"}, + {file = "numpy-1.26.2-cp312-cp312-win32.whl", hash = "sha256:4a06263321dfd3598cacb252f51e521a8cb4b6df471bb12a7ee5cbab20ea9167"}, + {file = "numpy-1.26.2-cp312-cp312-win_amd64.whl", hash = "sha256:b04f5dc6b3efdaab541f7857351aac359e6ae3c126e2edb376929bd3b7f92d7e"}, + {file = "numpy-1.26.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4eb8df4bf8d3d90d091e0146f6c28492b0be84da3e409ebef54349f71ed271ef"}, + {file = "numpy-1.26.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a13860fdcd95de7cf58bd6f8bc5a5ef81c0b0625eb2c9a783948847abbef2c2"}, + {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64308ebc366a8ed63fd0bf426b6a9468060962f1a4339ab1074c228fa6ade8e3"}, + {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baf8aab04a2c0e859da118f0b38617e5ee65d75b83795055fb66c0d5e9e9b818"}, + {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d73a3abcac238250091b11caef9ad12413dab01669511779bc9b29261dd50210"}, + {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b361d369fc7e5e1714cf827b731ca32bff8d411212fccd29ad98ad622449cc36"}, + {file = "numpy-1.26.2-cp39-cp39-win32.whl", hash = "sha256:bd3f0091e845164a20bd5a326860c840fe2af79fa12e0469a12768a3ec578d80"}, + {file = "numpy-1.26.2-cp39-cp39-win_amd64.whl", hash = "sha256:2beef57fb031dcc0dc8fa4fe297a742027b954949cabb52a2a376c144e5e6060"}, + {file = "numpy-1.26.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1cc3d5029a30fb5f06704ad6b23b35e11309491c999838c31f124fee32107c79"}, + {file = "numpy-1.26.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94cc3c222bb9fb5a12e334d0479b97bb2df446fbe622b470928f5284ffca3f8d"}, + {file = "numpy-1.26.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe6b44fb8fcdf7eda4ef4461b97b3f63c466b27ab151bec2366db8b197387841"}, + {file = "numpy-1.26.2.tar.gz", hash = "sha256:f65738447676ab5777f11e6bbbdb8ce11b785e105f690bc45966574816b6d3ea"}, +] + +[[package]] +name = "oauthlib" +version = "3.2.2" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +optional = false +python-versions = ">=3.6" +files = [ + {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, + {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, +] + +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "pluggy" +version = "1.3.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "proto-plus" +version = "1.23.0" +description = "Beautiful, Pythonic protocol buffers." +optional = false +python-versions = ">=3.6" +files = [ + {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, + {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, +] + +[package.dependencies] +protobuf = ">=3.19.0,<5.0.0dev" + +[package.extras] +testing = ["google-api-core[grpc] (>=1.31.5)"] + +[[package]] +name = "protobuf" +version = "3.20.3" +description = "Protocol Buffers" +optional = false +python-versions = ">=3.7" +files = [ + {file = "protobuf-3.20.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99"}, + {file = "protobuf-3.20.3-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e"}, + {file = "protobuf-3.20.3-cp310-cp310-win32.whl", hash = "sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c"}, + {file = "protobuf-3.20.3-cp310-cp310-win_amd64.whl", hash = "sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7"}, + {file = "protobuf-3.20.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469"}, + {file = "protobuf-3.20.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4"}, + {file = "protobuf-3.20.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4"}, + {file = "protobuf-3.20.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454"}, + {file = "protobuf-3.20.3-cp37-cp37m-win32.whl", hash = "sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905"}, + {file = "protobuf-3.20.3-cp37-cp37m-win_amd64.whl", hash = "sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c"}, + {file = "protobuf-3.20.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7"}, + {file = "protobuf-3.20.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee"}, + {file = "protobuf-3.20.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050"}, + {file = "protobuf-3.20.3-cp38-cp38-win32.whl", hash = "sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86"}, + {file = "protobuf-3.20.3-cp38-cp38-win_amd64.whl", hash = "sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9"}, + {file = "protobuf-3.20.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b"}, + {file = "protobuf-3.20.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b"}, + {file = "protobuf-3.20.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402"}, + {file = "protobuf-3.20.3-cp39-cp39-win32.whl", hash = "sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480"}, + {file = "protobuf-3.20.3-cp39-cp39-win_amd64.whl", hash = "sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7"}, + {file = "protobuf-3.20.3-py2.py3-none-any.whl", hash = "sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db"}, + {file = "protobuf-3.20.3.tar.gz", hash = "sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2"}, +] + +[[package]] +name = "pyasn1" +version = "0.5.1" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, + {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.3.0" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, + {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, +] + +[package.dependencies] +pyasn1 = ">=0.4.6,<0.6.0" + +[[package]] +name = "pytest" +version = "7.4.3" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, + {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-oauthlib" +version = "1.3.1" +description = "OAuthlib authentication support for Requests." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, + {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, +] + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] + +[[package]] +name = "requests-toolbelt" +version = "0.10.1" +description = "A utility belt for advanced users of python-requests" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "requests-toolbelt-0.10.1.tar.gz", hash = "sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d"}, + {file = "requests_toolbelt-0.10.1-py2.py3-none-any.whl", hash = "sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7"}, +] + +[package.dependencies] +requests = ">=2.0.1,<3.0.0" + +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "setuptools" +version = "69.0.2" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.0.2-py3-none-any.whl", hash = "sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2"}, + {file = "setuptools-69.0.2.tar.gz", hash = "sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "shapely" +version = "2.0.2" +description = "Manipulation and analysis of geometric objects" +optional = false +python-versions = ">=3.7" +files = [ + {file = "shapely-2.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6ca8cffbe84ddde8f52b297b53f8e0687bd31141abb2c373fd8a9f032df415d6"}, + {file = "shapely-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:baa14fc27771e180c06b499a0a7ba697c7988c7b2b6cba9a929a19a4d2762de3"}, + {file = "shapely-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:36480e32c434d168cdf2f5e9862c84aaf4d714a43a8465ae3ce8ff327f0affb7"}, + {file = "shapely-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ef753200cbffd4f652efb2c528c5474e5a14341a473994d90ad0606522a46a2"}, + {file = "shapely-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9a41ff4323fc9d6257759c26eb1cf3a61ebc7e611e024e6091f42977303fd3a"}, + {file = "shapely-2.0.2-cp310-cp310-win32.whl", hash = "sha256:72b5997272ae8c25f0fd5b3b967b3237e87fab7978b8d6cd5fa748770f0c5d68"}, + {file = "shapely-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:34eac2337cbd67650248761b140d2535855d21b969d76d76123317882d3a0c1a"}, + {file = "shapely-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5b0c052709c8a257c93b0d4943b0b7a3035f87e2d6a8ac9407b6a992d206422f"}, + {file = "shapely-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2d217e56ae067e87b4e1731d0dc62eebe887ced729ba5c2d4590e9e3e9fdbd88"}, + {file = "shapely-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94ac128ae2ab4edd0bffcd4e566411ea7bdc738aeaf92c32a8a836abad725f9f"}, + {file = "shapely-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa3ee28f5e63a130ec5af4dc3c4cb9c21c5788bb13c15e89190d163b14f9fb89"}, + {file = "shapely-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:737dba15011e5a9b54a8302f1748b62daa207c9bc06f820cd0ad32a041f1c6f2"}, + {file = "shapely-2.0.2-cp311-cp311-win32.whl", hash = "sha256:45ac6906cff0765455a7b49c1670af6e230c419507c13e2f75db638c8fc6f3bd"}, + {file = "shapely-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:dc9342fc82e374130db86a955c3c4525bfbf315a248af8277a913f30911bed9e"}, + {file = "shapely-2.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:06f193091a7c6112fc08dfd195a1e3846a64306f890b151fa8c63b3e3624202c"}, + {file = "shapely-2.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:eebe544df5c018134f3c23b6515877f7e4cd72851f88a8d0c18464f414d141a2"}, + {file = "shapely-2.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7e92e7c255f89f5cdf777690313311f422aa8ada9a3205b187113274e0135cd8"}, + {file = "shapely-2.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be46d5509b9251dd9087768eaf35a71360de6afac82ce87c636990a0871aa18b"}, + {file = "shapely-2.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5533a925d8e211d07636ffc2fdd9a7f9f13d54686d00577eeb11d16f00be9c4"}, + {file = "shapely-2.0.2-cp312-cp312-win32.whl", hash = "sha256:084b023dae8ad3d5b98acee9d3bf098fdf688eb0bb9b1401e8b075f6a627b611"}, + {file = "shapely-2.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:ea84d1cdbcf31e619d672b53c4532f06253894185ee7acb8ceb78f5f33cbe033"}, + {file = "shapely-2.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ed1e99702125e7baccf401830a3b94d810d5c70b329b765fe93451fe14cf565b"}, + {file = "shapely-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7d897e6bdc6bc64f7f65155dbbb30e49acaabbd0d9266b9b4041f87d6e52b3a"}, + {file = "shapely-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0521d76d1e8af01e712db71da9096b484f081e539d4f4a8c97342e7971d5e1b4"}, + {file = "shapely-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:5324be299d4c533ecfcfd43424dfd12f9428fd6f12cda38a4316da001d6ef0ea"}, + {file = "shapely-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:78128357a0cee573257a0c2c388d4b7bf13cb7dbe5b3fe5d26d45ebbe2a39e25"}, + {file = "shapely-2.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:87dc2be34ac3a3a4a319b963c507ac06682978a5e6c93d71917618b14f13066e"}, + {file = "shapely-2.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:42997ac806e4583dad51c80a32d38570fd9a3d4778f5e2c98f9090aa7db0fe91"}, + {file = "shapely-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ccfd5fa10a37e67dbafc601c1ddbcbbfef70d34c3f6b0efc866ddbdb55893a6c"}, + {file = "shapely-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7c95d3379ae3abb74058938a9fcbc478c6b2e28d20dace38f8b5c587dde90aa"}, + {file = "shapely-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a21353d28209fb0d8cc083e08ca53c52666e0d8a1f9bbe23b6063967d89ed24"}, + {file = "shapely-2.0.2-cp38-cp38-win32.whl", hash = "sha256:03e63a99dfe6bd3beb8d5f41ec2086585bb969991d603f9aeac335ad396a06d4"}, + {file = "shapely-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:c6fd29fbd9cd76350bd5cc14c49de394a31770aed02d74203e23b928f3d2f1aa"}, + {file = "shapely-2.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1f217d28ecb48e593beae20a0082a95bd9898d82d14b8fcb497edf6bff9a44d7"}, + {file = "shapely-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:394e5085b49334fd5b94fa89c086edfb39c3ecab7f669e8b2a4298b9d523b3a5"}, + {file = "shapely-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fd3ad17b64466a033848c26cb5b509625c87d07dcf39a1541461cacdb8f7e91c"}, + {file = "shapely-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d41a116fcad58048d7143ddb01285e1a8780df6dc1f56c3b1e1b7f12ed296651"}, + {file = "shapely-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dea9a0651333cf96ef5bb2035044e3ad6a54f87d90e50fe4c2636debf1b77abc"}, + {file = "shapely-2.0.2-cp39-cp39-win32.whl", hash = "sha256:b8eb0a92f7b8c74f9d8fdd1b40d395113f59bd8132ca1348ebcc1f5aece94b96"}, + {file = "shapely-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:794affd80ca0f2c536fc948a3afa90bd8fb61ebe37fe873483ae818e7f21def4"}, + {file = "shapely-2.0.2.tar.gz", hash = "sha256:1713cc04c171baffc5b259ba8531c58acc2a301707b7f021d88a15ed090649e7"}, +] + +[package.dependencies] +numpy = ">=1.14" + +[package.extras] +docs = ["matplotlib", "numpydoc (==1.1.*)", "sphinx", "sphinx-book-theme", "sphinx-remove-toctrees"] +test = ["pytest", "pytest-cov"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "urllib3" +version = "1.26.18" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, + {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, +] + +[package.extras] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "websocket-client" +version = "1.7.0" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.8" +files = [ + {file = "websocket-client-1.7.0.tar.gz", hash = "sha256:10e511ea3a8c744631d3bd77e61eb17ed09304c413ad42cf6ddfa4c7787e8fe6"}, + {file = "websocket_client-1.7.0-py3-none-any.whl", hash = "sha256:f4c3d22fec12a2461427a29957ff07d35098ee2d976d3ba244e688b8b4057588"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[metadata] +lock-version = "2.0" +python-versions = ">=3.9,<3.11" +content-hash = "ba3460b4c978535d1cddd62e9530c1997537e408cd4df6375bc913b62b717aa2" diff --git a/components/pyproject.toml b/components/pyproject.toml new file mode 100644 index 00000000..b5b546ce --- /dev/null +++ b/components/pyproject.toml @@ -0,0 +1,43 @@ +[tool.poetry] +name = "components" +version = "0.1.0" +authors = ["Example User "] +description = "KubeFlow components for Google Cloud" +classifiers = [ + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "Programming Language :: Python :: 3.9", +] + +[tool.poetry.dependencies] +python = ">=3.9,<3.11" +kfp = ">=2.0.1,<3.0.0" +pyyaml = "6.0.1" + +[tool.poetry.group.dev.dependencies] +google-cloud-aiplatform = ">=1.30.1" +google-cloud-bigquery = ">=2.30.0" +google-cloud-pipeline-components = "^2.1.0" +pytest = ">=7.3.1,<8.0.0" + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" + +[tool.flake8] +max-line-length = 88 +per-file-ignores = [ + "E203", + "F841" +] +ignore = [ + "E203", + "F841", +] + +[tool.pytest.ini_options] +pythonpath = [ + "src" +] +testpaths = "tests" +junit_family = "xunit2" diff --git a/pipelines/tests/conftest.py b/components/src/components/__init__.py similarity index 60% rename from pipelines/tests/conftest.py rename to components/src/components/__init__.py index 4d1ece88..9313d29a 100644 --- a/pipelines/tests/conftest.py +++ b/components/src/components/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -12,18 +12,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -import pytest +from .extract_table import extract_table +from .lookup_model import lookup_model +from .model_batch_predict import model_batch_predict +from .upload_model import upload_model -def pytest_addoption(parser): - parser.addoption( - "--enable_caching", - type=str, - help="Whether to enable or disable caching for all pipeline steps", - default=None, - ) - - -@pytest.fixture(scope="session") -def enable_caching(pytestconfig): - return pytestconfig.getoption("enable_caching") +__version__ = "0.0.1" +__all__ = [ + "extract_table", + "lookup_model", + "model_batch_predict", + "upload_model", +] diff --git a/components/src/components/extract_table.py b/components/src/components/extract_table.py new file mode 100644 index 00000000..5fe413f4 --- /dev/null +++ b/components/src/components/extract_table.py @@ -0,0 +1,42 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from kfp.dsl import Dataset, Output, ContainerSpec, container_component + + +@container_component +def extract_table( + project: str, + location: str, + table: str, + data: Output[Dataset], + destination_format: str = "CSV", + compression: str = "NONE", + field_delimiter: str = ",", + print_header: str = "true", +): + return ContainerSpec( + image="google/cloud-sdk:alpine", + command=["bq"], + args=[ + "extract", + f"--project_id={project}", + f"--location={location}", + f"--destination_format={destination_format}", + f"--compression={compression}", + f"--field_delimiter={field_delimiter}", + f"--print_header={print_header}", + table, + data.uri, + ], + ) diff --git a/components/vertex-components/src/vertex_components/lookup_model.py b/components/src/components/lookup_model.py similarity index 68% rename from components/vertex-components/src/vertex_components/lookup_model.py rename to components/src/components/lookup_model.py index c235b373..1f00035b 100644 --- a/components/vertex-components/src/vertex_components/lookup_model.py +++ b/components/src/components/lookup_model.py @@ -1,31 +1,30 @@ -# Copyright 2022 Google LLC - +# Copyright 2023 Google LLC +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at - -# https://www.apache.org/licenses/LICENSE-2.0 - +# +# http://www.apache.org/licenses/LICENSE-2.0 +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from kfp.v2.dsl import component, Output, Model +from kfp.dsl import component, Output, Model from typing import NamedTuple @component( - base_image="python:3.7", - packages_to_install=["google-cloud-aiplatform==1.24.1"], + base_image="python:3.9", + packages_to_install=["google-cloud-aiplatform==1.30.1"], ) def lookup_model( model_name: str, - project_location: str, - project_id: str, + location: str, + project: str, model: Output[Model], - order_models_by: str = "create_time desc", fail_on_model_not_found: bool = False, ) -> NamedTuple("Outputs", [("model_resource_name", str), ("training_dataset", dict)]): """ @@ -33,15 +32,9 @@ def lookup_model( Args: model_name (str): display name of the model - project_location (str): location of the Google Cloud project - project_id (str): project id of the Google Cloud project + location (str): location of the Google Cloud project + project (str): project id of the Google Cloud project model (Output[Model]): a Vertex AI model - order_models_by (str): if multiple models are found based on the display name, - use a filter clause: - A comma-separated list of fields to order by, sorted in - ascending order. Use "desc" after a field name for descending. - Supported fields: `display_name`, `create_time`, `update_time` - Defaults to "create_time desc". fail_on_model_not_found (bool): if set to True, raise runtime error if model is not found @@ -60,25 +53,23 @@ def lookup_model( logging.info(f"listing models with display name {model_name}") models = Model.list( filter=f'display_name="{model_name}"', - order_by=order_models_by, - location=project_location, - project=project_id, + location=location, + project=project, ) - logging.info(f"found {len(models)} models") + logging.info(f"found {len(models)} model(s)") training_dataset = {} model_resource_name = "" if len(models) == 0: logging.error( - f"No model found with name {model_name}" - + f"(project: {project_id} location: {project_location})" + f"No model found with name {model_name} " + + f"(project: {project} location: {location})" ) if fail_on_model_not_found: raise RuntimeError(f"Failed as model was not found") elif len(models) == 1: target_model = models[0] model_resource_name = target_model.resource_name - logging.info(f"choosing model by order ({order_models_by})") logging.info(f"model display name: {target_model.display_name}") logging.info(f"model resource name: {target_model.resource_name}") logging.info(f"model uri: {target_model.uri}") diff --git a/components/vertex-components/src/vertex_components/model_batch_predict.py b/components/src/components/model_batch_predict.py similarity index 87% rename from components/vertex-components/src/vertex_components/model_batch_predict.py rename to components/src/components/model_batch_predict.py index 849ebd28..b99687e8 100644 --- a/components/vertex-components/src/vertex_components/model_batch_predict.py +++ b/components/src/components/model_batch_predict.py @@ -1,33 +1,34 @@ -# Copyright 2022 Google LLC - +# Copyright 2023 Google LLC +# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at - -# https://www.apache.org/licenses/LICENSE-2.0 - +# +# http://www.apache.org/licenses/LICENSE-2.0 +# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -from kfp.v2.dsl import Input, Model, component -from typing import List, NamedTuple +from kfp.dsl import Input, Model, component, OutputPath +from typing import List @component( - base_image="python:3.7", + base_image="python:3.9", packages_to_install=[ - "google-cloud-aiplatform==1.24.1", + "google-cloud-aiplatform==1.30.1", "google-cloud-pipeline-components==1.0.33", ], ) def model_batch_predict( model: Input[Model], + gcp_resources: OutputPath(str), job_display_name: str, - project_location: str, - project_id: str, + location: str, + project: str, source_uri: str, destination_uri: str, source_format: str, @@ -37,17 +38,18 @@ def model_batch_predict( max_replica_count: int = 1, monitoring_training_dataset: dict = None, monitoring_alert_email_addresses: List[str] = None, + notification_channels: List[str] = [], monitoring_skew_config: dict = None, instance_config: dict = None, -) -> NamedTuple("Outputs", [("gcp_resources", str)]): +): """ Trigger a batch prediction job and enable monitoring. Args: model (Input[Model]): Input model to use for calculating predictions. job_display_name: Name of the batch prediction job. - project_location (str): location of the Google Cloud project. Defaults to None. - project_id (str): project id of the Google Cloud project. Defaults to None. + location (str): location of the Google Cloud project. Defaults to None. + project (str): project id of the Google Cloud project. Defaults to None. source_uri (str): bq:// URI or a list of gcs:// URIs to read input instances. destination_uri (str): bq:// or gs:// URI to store output predictions. source_format (str): E.g. "bigquery", "jsonl", "csv". See: @@ -61,13 +63,16 @@ def model_batch_predict( https://cloud.google.com/python/docs/reference/aiplatform/latest/google.cloud.aiplatform_v1beta1.types.ModelMonitoringObjectiveConfig.TrainingPredictionSkewDetectionConfig monitoring_alert_email_addresses (List[str]): Email addresses to send alerts to (optional). + notification_channels (List[str]): + Notification channels to send alerts to (optional). + Format: projects//notificationChannels/ monitoring_training_dataset (dict): Metadata of training dataset. See: https://cloud.google.com/python/docs/reference/aiplatform/latest/google.cloud.aiplatform_v1beta1.types.ModelMonitoringObjectiveConfig.TrainingDataset instance_config (dict): Configuration defining how to transform batch prediction input instances to the instances that the Model accepts. See: https://cloud.google.com/vertex-ai/docs/reference/rest/v1beta1/projects.locations.batchPredictionJobs#instanceconfig Returns: - NamedTuple: gcp_resources for Vertex AI UI integration. + OutputPath: gcp_resources for Vertex AI UI integration. """ import logging @@ -118,7 +123,7 @@ def is_job_successful(job_state: JobState) -> bool: _POLLING_INTERVAL_IN_SECONDS = 20 _CONNECTION_ERROR_RETRY_LIMIT = 5 - api_endpoint = f"{project_location}-aiplatform.googleapis.com" + api_endpoint = f"{location}-aiplatform.googleapis.com" input_config = {"instancesFormat": source_format} output_config = {"predictionsFormat": destination_format} @@ -151,7 +156,9 @@ def is_job_successful(job_state: JobState) -> bool: message["modelMonitoringConfig"] = { "alertConfig": { - "emailAlertConfig": {"userEmails": monitoring_alert_email_addresses} + "emailAlertConfig": {"userEmails": monitoring_alert_email_addresses}, + "notificationChannels": notification_channels, + "enableLogging": True, }, "objectiveConfigs": [ { @@ -167,11 +174,19 @@ def is_job_successful(job_state: JobState) -> bool: logging.info(request) client = JobServiceClient(client_options={"api_endpoint": api_endpoint}) response = client.create_batch_prediction_job( - parent=f"projects/{project_id}/locations/{project_location}", + parent=f"projects/{project}/locations/{location}", batch_prediction_job=request, ) logging.info(f"Submitted batch prediction job: {response.name}") + # output GCP resource for Vertex AI UI integration + batch_job_resources = GcpResources() + dr = batch_job_resources.resources.add() + dr.resource_type = "BatchPredictionJob" + dr.resource_uri = response.name + with open(gcp_resources, "w") as f: + f.write(MessageToJson(batch_job_resources)) + with execution_context.ExecutionContext( on_cancel=partial( send_cancel_request, @@ -206,12 +221,3 @@ def is_job_successful(job_state: JobState) -> bool: f"Waiting for {_POLLING_INTERVAL_IN_SECONDS} seconds for next poll." ) time.sleep(_POLLING_INTERVAL_IN_SECONDS) - - # return GCP resource for Vertex AI UI integration - batch_job_resources = GcpResources() - dr = batch_job_resources.resources.add() - dr.resource_type = "BatchPredictionJob" - dr.resource_uri = response.name - gcp_resources = MessageToJson(batch_job_resources) - - return (gcp_resources,) diff --git a/components/src/components/upload_model.py b/components/src/components/upload_model.py new file mode 100644 index 00000000..ec5c20a4 --- /dev/null +++ b/components/src/components/upload_model.py @@ -0,0 +1,205 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from kfp.dsl import Dataset, Input, Metrics, Model, Output, component +from google_cloud_pipeline_components.types.artifact_types import VertexModel + + +@component( + base_image="python:3.9", + packages_to_install=[ + "google-cloud-aiplatform==1.30.1", + "google-cloud-pipeline-components==2.1.0", + ], +) +def upload_model( + model: Input[Model], + test_data: Input[Dataset], + model_evaluation: Input[Metrics], + vertex_model: Output[VertexModel], + project: str, + location: str, + model_name: str, + eval_metric: str, + eval_lower_is_better: bool, + pipeline_job_id: str, + serving_container_image: str, + model_description: str = None, + evaluation_name: str = "Imported evaluation", +) -> None: + """ + Args: + model (Model): Input challenger model. + test_data (Dataset): Test dataset used for evaluating challenger model. + vertex_model (VertexModel): Output model uploaded to Vertex AI Model Registry. + model_evaluation (Metrics): Evaluation metrics of challenger model. + project (str): project id of the Google Cloud project. + location (str): location of the Google Cloud project. + pipeline_job_id (str): + model_name (str): Name of champion and challenger model in + Vertex AI Model Registry. + eval_metric (str): Metric name to compare champion and challenger on. + eval_lower_is_better (bool): Usually True for losses and + False for classification metrics. + serving_container_image (str): Container URI for serving the model. + model_description (str): Optional. Description of model. + evaluation_name (str): Optional. Name of evaluation results which are + displayed in the Vertex AI UI of the challenger model. + """ + + import json + import logging + import google.cloud.aiplatform as aip + from google.protobuf.json_format import MessageToDict + from google.cloud.aiplatform_v1 import ModelEvaluation, ModelServiceClient + from google.protobuf.json_format import ParseDict + + def lookup_model(model_name: str) -> aip.Model: + """Look up model in model registry.""" + logging.info(f"listing models with display name {model_name}") + models = aip.Model.list( + filter=f'display_name="{model_name}"', + location=location, + project=project, + ) + logging.info(f"found {len(models)} models") + + if len(models) == 0: + logging.info( + f"No model found with name {model_name}" + + f"(project: {project} location: {location})" + ) + return None + elif len(models) == 1: + return models[0] + else: + raise RuntimeError(f"Multiple models with name {model_name} were found.") + + def compare_models( + champion_metrics: dict, + challenger_metrics: dict, + eval_lower_is_better: bool, + ) -> bool: + """Compare models by evaluating a primary metric.""" + logging.info(f"Comparing {eval_metric} of models") + logging.debug(f"Champion metrics: {champion_metrics}") + logging.debug(f"Challenger metrics: {challenger_metrics}") + + m_champ = champion_metrics[eval_metric] + m_chall = challenger_metrics[eval_metric] + logging.info(f"Champion={m_champ} Challenger={m_chall}") + + challenger_wins = ( + (m_chall < m_champ) if eval_lower_is_better else (m_chall > m_champ) + ) + logging.info(f"{'Challenger' if challenger_wins else 'Champion'} wins!") + + return challenger_wins + + def upload_model_to_registry( + is_default_version: bool, parent_model_uri: str = None + ) -> Model: + """Upload model to registry.""" + logging.info(f"Uploading model {model_name} (default: {is_default_version}") + uploaded_model = aip.Model.upload( + display_name=model_name, + description=model_description, + artifact_uri=model.uri, + serving_container_image_uri=serving_container_image, + serving_container_predict_route="/predict", + serving_container_health_route="/health", + parent_model=parent_model_uri, + is_default_version=is_default_version, + ) + logging.info(f"Uploaded model {uploaded_model}") + + # Output google.VertexModel artifact + vertex_model.uri = ( + f"https://{location}-aiplatform.googleapis.com/v1/" + f"{uploaded_model.versioned_resource_name}" + ) + vertex_model.metadata["resourceName"] = uploaded_model.versioned_resource_name + + return uploaded_model + + def import_evaluation( + parsed_metrics: dict, + challenger_model: aip.Model, + evaluation_name: str, + ) -> str: + """Import model evaluation.""" + logging.info(f"Evaluation metrics: {parsed_metrics}") + problem_type = parsed_metrics.pop("problemType") + schema = ( + f"gs://google-cloud-aiplatform/schema/modelevaluation/" + f"{problem_type}_metrics_1.0.0.yaml" + ) + evaluation = { + "displayName": evaluation_name, + "metricsSchemaUri": schema, + "metrics": parsed_metrics, + "metadata": { + "pipeline_job_id": pipeline_job_id, + "evaluation_dataset_type": "gcs", + "evaluation_dataset_path": [test_data.uri], + }, + } + + request = ParseDict(evaluation, ModelEvaluation()._pb) + logging.debug(f"Request: {request}") + challenger_name = challenger_model.versioned_resource_name + client = ModelServiceClient( + client_options={"api_endpoint": location + "-aiplatform.googleapis.com"} + ) + logging.info(f"Uploading model evaluation for {challenger_name}") + response = client.import_model_evaluation( + parent=challenger_name, + model_evaluation=request, + ) + logging.debug(f"Response: {response}") + return response.name + + # Parse metrics to dict + with open(model_evaluation.path, "r") as f: + challenger_metrics = json.load(f) + + champion_model = lookup_model(model_name=model_name) + + challenger_wins = True + parent_model_uri = None + if champion_model is None: + logging.info("No champion model found, uploading new model.") + else: + # Compare models + logging.info( + f"Model default version {champion_model.version_id} " + "is being challenged by new model." + ) + # Look up Vertex model evaluation for champion model + champion_eval = champion_model.get_model_evaluation() + champion_metrics = MessageToDict(champion_eval._gca_resource._pb)["metrics"] + + challenger_wins = compare_models( + champion_metrics=champion_metrics, + challenger_metrics=challenger_metrics, + eval_lower_is_better=eval_lower_is_better, + ) + parent_model_uri = champion_model.resource_name + + model = upload_model_to_registry(challenger_wins, parent_model_uri) + import_evaluation( + parsed_metrics=challenger_metrics, + challenger_model=model, + evaluation_name=evaluation_name, + ) diff --git a/components/bigquery-components/tests/conftest.py b/components/tests/conftest.py similarity index 91% rename from components/bigquery-components/tests/conftest.py rename to components/tests/conftest.py index 898c2590..eb9014b4 100644 --- a/components/bigquery-components/tests/conftest.py +++ b/components/tests/conftest.py @@ -1,4 +1,4 @@ -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -13,7 +13,7 @@ # limitations under the License. import pytest -import kfp.v2.dsl +import kfp.dsl @pytest.fixture(autouse=True) @@ -31,7 +31,7 @@ def mock_kfp_artifact(monkeypatch): Args: monkeypatch: Used to patch the decorator `@component` in `kfp.v2.dsl`. This prevents KFP from changing the Python functions when applying - pytests. + pytest. Returns: None @@ -47,4 +47,4 @@ def _get_path(self): return self.uri # mock the _get_path method of Artifact which is used by the property path - monkeypatch.setattr(kfp.v2.dsl.Artifact, "_get_path", _get_path) + monkeypatch.setattr(kfp.dsl.Artifact, "_get_path", _get_path) diff --git a/components/tests/test_lookup_model.py b/components/tests/test_lookup_model.py new file mode 100644 index 00000000..58003203 --- /dev/null +++ b/components/tests/test_lookup_model.py @@ -0,0 +1,90 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from kfp.dsl import Model +from unittest import mock +import pytest + +import components + +lookup_model = components.lookup_model.python_func + + +@mock.patch("google.cloud.aiplatform.Model") +def test_lookup_model(mock_model, tmp_path): + """ + Assert lookup_model produces expected resource name, and that list method is + called with the correct arguemnts + """ + + # Mock attribute and method + mock_path = str(tmp_path / "model") + mock_model.resource_name = "my-model-resource-name" + mock_model.uri = mock_path + mock_model.list.return_value = [mock_model] + + # Invoke the model look up + found_model_resource_name, _ = lookup_model( + model_name="my-model", + location="europe-west4", + project="my-project-id", + fail_on_model_not_found=False, + model=Model(uri=mock_path), + ) + + assert found_model_resource_name == "my-model-resource-name" + + # Check the list method was called once with the correct arguments + mock_model.list.assert_called_once_with( + filter='display_name="my-model"', + location="europe-west4", + project="my-project-id", + ) + + +@mock.patch("google.cloud.aiplatform.Model") +def test_lookup_model_when_no_models(mock_model, tmp_path): + """ + Checks that when there are no models and fail_on_model_found = False, + lookup_model returns an empty string. + """ + mock_model.list.return_value = [] + exported_model_resource_name, _ = lookup_model( + model_name="my-model", + location="europe-west4", + project="my-project-id", + fail_on_model_not_found=False, + model=Model(uri=str(tmp_path / "model")), + ) + + assert exported_model_resource_name == "" + + +@mock.patch("google.cloud.aiplatform.Model") +def test_lookup_model_when_no_models_fail(mock_model, tmp_path): + """ + Checks that when there are no models and fail_on_model_found = True, + lookup_model raises a RuntimeError. + """ + mock_model.list.return_value = [] + + # Verify that a ValueError is raised + with pytest.raises(RuntimeError): + lookup_model( + model_name="my-model", + location="europe-west4", + project="my-project-id", + fail_on_model_not_found=True, + model=Model(uri=str(tmp_path / "model")), + ) diff --git a/components/vertex-components/tests/test_model_batch_predict.py b/components/tests/test_model_batch_predict.py similarity index 65% rename from components/vertex-components/tests/test_model_batch_predict.py rename to components/tests/test_model_batch_predict.py index 10d9e808..aec99fad 100644 --- a/components/vertex-components/tests/test_model_batch_predict.py +++ b/components/tests/test_model_batch_predict.py @@ -1,4 +1,4 @@ -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,16 +11,15 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import json import pytest -from unittest.mock import Mock, patch -from kfp.v2.dsl import Model +from unittest import mock +from kfp.dsl import Model from google.cloud.aiplatform_v1beta1.types.job_state import JobState -import vertex_components +import components -model_batch_predict = vertex_components.model_batch_predict.python_func +model_batch_predict = components.model_batch_predict.python_func SKEW_THRESHOLD = {"defaultSkewThreshold": {"value": 0.001}} @@ -30,7 +29,19 @@ "targetField": "col", } +mock_job1 = mock.Mock() +mock_job1.name = "mock-batch-job" +mock_job1.state = JobState.JOB_STATE_SUCCEEDED + +@mock.patch( + "google.cloud.aiplatform_v1beta1.services.job_service.JobServiceClient.create_batch_prediction_job", # noqa : E501 + return_value=mock_job1, +) +@mock.patch( + "google.cloud.aiplatform_v1beta1.services.job_service.JobServiceClient.get_batch_prediction_job", # noqa : E501 + return_value=mock_job1, +) @pytest.mark.parametrize( ( "source_format,destination_format,source_uri,monitoring_training_dataset," @@ -44,7 +55,9 @@ ], ) def test_model_batch_predict( - tmpdir, + create_job, + get_job, + tmp_path, source_format, destination_format, source_uri, @@ -55,26 +68,15 @@ def test_model_batch_predict( """ Asserts model_batch_predict successfully creates requests given different arguments. """ - mock_resource_name = "mock-batch-job" - - mock_job1 = Mock() - mock_job1.name = mock_resource_name - mock_job1.state = JobState.JOB_STATE_SUCCEEDED - - mock_model = Model(uri=tmpdir, metadata={"resourceName": ""}) + mock_model = Model(uri=str(tmp_path / "model"), metadata={"resourceName": ""}) + gcp_resources_path = tmp_path / "gcp_resources.json" - with patch( - "google.cloud.aiplatform_v1beta1.services.job_service.JobServiceClient.create_batch_prediction_job", # noqa: E501 - return_value=mock_job1, - ) as create_job, patch( - "google.cloud.aiplatform_v1beta1.services.job_service.JobServiceClient.get_batch_prediction_job", # noqa: E501 - return_value=mock_job1, - ) as get_job: - (gcp_resources,) = model_batch_predict( + try: + model_batch_predict( model=mock_model, job_display_name="", - project_location="", - project_id="", + location="", + project="", source_uri=source_uri, destination_uri=destination_format, source_format=source_format, @@ -82,10 +84,11 @@ def test_model_batch_predict( monitoring_training_dataset=monitoring_training_dataset, monitoring_alert_email_addresses=monitoring_alert_email_addresses, monitoring_skew_config=monitoring_skew_config, + gcp_resources=str(gcp_resources_path), ) - create_job.assert_called_once() - get_job.assert_called_once() - assert ( - json.loads(gcp_resources)["resources"][0]["resourceUri"] == mock_resource_name - ) + create_job.assert_called_once() + get_job.assert_called_once() + assert gcp_resources_path.exists() + finally: + gcp_resources_path.unlink(missing_ok=True) diff --git a/components/tests/test_upload_model.py b/components/tests/test_upload_model.py new file mode 100644 index 00000000..d4b96e4c --- /dev/null +++ b/components/tests/test_upload_model.py @@ -0,0 +1,340 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import logging +from unittest import mock +from kfp.dsl import Dataset, Metrics, Model +from google.protobuf.json_format import ParseDict +from google.cloud.aiplatform_v1 import ModelEvaluation +from google_cloud_pipeline_components.types.artifact_types import VertexModel + +import components + +upload_model = components.upload_model.python_func + + +@mock.patch("google.cloud.aiplatform_v1.ModelServiceClient") +@mock.patch("google.cloud.aiplatform.Model") +def test_model_upload_no_champion( + mock_model_class, mock_model_service_client, caplog, tmp_path +): + + caplog.set_level(logging.INFO) + + metrics_json = json.dumps( + { + "problemType": "regression", + "auc": 0.4, + "accuracy": 0.80, + } + ) + metrics_file_path = tmp_path / "metrics.json" + metrics_file_path.write_text(metrics_json) + + mock_model_class.list.return_value = [] + + model = Model(uri="dummy-model-uri") + serving_container_image = "dummy_image:latest" + model_name = "dummy-model-name" + model_description = "dummy model_description" + vertex_model = VertexModel.create( + name=model_name, uri="chall_uri", model_resource_name="chall_resource_name" + ) + project = "dummy-project" + location = "dummy-location" + model_evaluation = Metrics(uri="") + model_evaluation.path = str(metrics_file_path) + eval_metric = "auc" + eval_lower_is_better = False + + pipeline_job_id = "dummy-pipeline-job-id" + test_dataset = Dataset(uri="test-dataset-uri") + evaluation_name = "dummy evaluation name" + + upload_model( + model=model, + model_description=model_description, + serving_container_image=serving_container_image, + vertex_model=vertex_model, + project=project, + location=location, + model_evaluation=model_evaluation, + eval_metric=eval_metric, + eval_lower_is_better=eval_lower_is_better, + model_name=model_name, + pipeline_job_id=pipeline_job_id, + test_data=test_dataset, + evaluation_name=evaluation_name, + ) + + # Check that model lookup is performed, and no existing model is found + mock_model_class.list.assert_called_once_with( + filter=f'display_name="{model_name}"', + location=location, + project=project, + ) + assert "found 0 models" in caplog.text + + # Check no model comparison occurs + assert "wins" not in caplog.text + + # Check model upload call + mock_model_class.upload.assert_called_once_with( + display_name=model_name, + description=model_description, + artifact_uri="dummy-model-uri", + serving_container_image_uri=serving_container_image, + serving_container_predict_route="/predict", + serving_container_health_route="/health", + parent_model=None, + is_default_version=True, + ) + + # check model output URI + assert vertex_model.uri == f"https://{location}-aiplatform.googleapis.com/v1/" + ( + str(mock_model_class.upload.return_value.versioned_resource_name) + ) + + # check evaluation import + mock_model_service_client.return_value.import_model_evaluation.assert_called_once_with( # noqa + parent=mock_model_class.upload.return_value.versioned_resource_name, + model_evaluation=mock.ANY, + ) + + +@mock.patch("google.cloud.aiplatform_v1.ModelServiceClient") +@mock.patch("google.cloud.aiplatform.Model") +def test_model_upload_challenger_wins( + mock_model_class, mock_model_service_client, caplog, tmp_path +): + + caplog.set_level(logging.INFO) + + metrics_json = json.dumps( + { + "problemType": "regression", + "auc": 0.4, + "accuracy": 0.80, + } + ) + metrics_file_path = tmp_path / "metrics.json" + metrics_file_path.write_text(metrics_json) + + # create mock champion model + mock_champion_model = mock.Mock() + mock_champion_model.version_id = "123" + dummy_champion_eval = ModelEvaluation() + dummy_champion_metrics = { + "auc": 0.2, + "f1": 0.7, + } + message_dict = { + "displayName": "Previously imported evaluation", + "metricsSchemaUri": "gs://google-cloud-aiplatform/schema/modelevaluation/regression_metrics_1.0.0.yaml", # noqa + "metrics": dummy_champion_metrics, + "metadata": { + "pipeline_job_id": "dummy-pipeline-id", + "evaluation_dataset_type": "gcs", + "evaluation_dataset_path": ["dummy-gcs-uri"], + }, + } + ParseDict(message_dict, dummy_champion_eval._pb) + mock_champion_model.get_model_evaluation.return_value._gca_resource = ( + dummy_champion_eval + ) + mock_champion_model.resource_name = "dummy-champion-resource-name" + mock_model_class.list.return_value = [mock_champion_model] + + # create mock challenger model + model = Model(uri="dummy-model-uri") + serving_container_image = "dummy_image:latest" + model_name = "dummy-model-name" + model_description = "dummy model_description" + vertex_model = VertexModel.create( + name=model_name, uri="chall_uri", model_resource_name="chall_resource_name" + ) + project = "dummy-project" + location = "dummy-location" + model_evaluation = Metrics(uri="") + model_evaluation.path = str(metrics_file_path) + eval_metric = "auc" + eval_lower_is_better = False + + pipeline_job_id = "dummy-pipeline-job-id" + test_dataset = Dataset(uri="test-dataset-uri") + evaluation_name = "dummy evaluation name" + + upload_model( + model=model, + model_description=model_description, + serving_container_image=serving_container_image, + vertex_model=vertex_model, + project=project, + location=location, + model_evaluation=model_evaluation, + eval_metric=eval_metric, + eval_lower_is_better=eval_lower_is_better, + model_name=model_name, + pipeline_job_id=pipeline_job_id, + test_data=test_dataset, + evaluation_name=evaluation_name, + ) + + # Check that model lookup is performed, and no existing model is found + mock_model_class.list.assert_called_once_with( + filter=f'display_name="{model_name}"', + location=location, + project=project, + ) + assert "is being challenged by new model" in caplog.text + + # Check challenger wins in model comparison + assert "Challenger wins!" in caplog.text + + # Check model upload call + mock_model_class.upload.assert_called_once_with( + display_name=model_name, + description=model_description, + artifact_uri="dummy-model-uri", + serving_container_image_uri=serving_container_image, + serving_container_predict_route="/predict", + serving_container_health_route="/health", + parent_model=mock_champion_model.resource_name, + is_default_version=True, + ) + + # check model output URI + assert vertex_model.uri == f"https://{location}-aiplatform.googleapis.com/v1/" + ( + str(mock_model_class.upload.return_value.versioned_resource_name) + ) + + # check evaluation import + mock_model_service_client.return_value.import_model_evaluation.assert_called_once_with( # noqa + parent=mock_model_class.upload.return_value.versioned_resource_name, + model_evaluation=mock.ANY, + ) + + +@mock.patch("google.cloud.aiplatform_v1.ModelServiceClient") +@mock.patch("google.cloud.aiplatform.Model") +def test_model_upload_champion_wins( + mock_model_class, mock_model_service_client, caplog, tmp_path +): + + caplog.set_level(logging.INFO) + + metrics_json = json.dumps( + { + "problemType": "regression", + "auc": 0.4, + "accuracy": 0.80, + } + ) + metrics_file_path = tmp_path / "metrics.json" + metrics_file_path.write_text(metrics_json) + + # Create mock champion model + mock_champion_model = mock.Mock() + mock_champion_model.version_id = "123" + dummy_champion_eval = ModelEvaluation() + dummy_champion_metrics = { + "auc": 0.8, + "f1": 0.7, + } + message_dict = { + "displayName": "Previously imported evaluation", + "metricsSchemaUri": "gs://google-cloud-aiplatform/schema/modelevaluation/regression_metrics_1.0.0.yaml", # noqa + "metrics": dummy_champion_metrics, + "metadata": { + "pipeline_job_id": "dummy-pipeline-id", + "evaluation_dataset_type": "gcs", + "evaluation_dataset_path": ["dummy-gcs-uri"], + }, + } + ParseDict(message_dict, dummy_champion_eval._pb) + mock_champion_model.get_model_evaluation.return_value._gca_resource = ( + dummy_champion_eval + ) + mock_champion_model.resource_name = "dummy-champion-resource-name" + mock_model_class.list.return_value = [mock_champion_model] + + # create mock challenger model + model = Model(uri="dummy-model-uri") + serving_container_image = "dummy_image:latest" + model_name = "dummy-model-name" + model_description = "dummy model_description" + vertex_model = VertexModel.create( + name=model_name, uri="chall_uri", model_resource_name="chall_resource_name" + ) + project = "dummy-project" + location = "dummy-location" + model_evaluation = Metrics(uri="") + model_evaluation.path = str(metrics_file_path) + eval_metric = "auc" + eval_lower_is_better = False + + pipeline_job_id = "dummy-pipeline-job-id" + test_dataset = Dataset(uri="test-dataset-uri") + evaluation_name = "dummy evaluation name" + + upload_model( + model=model, + model_description=model_description, + serving_container_image=serving_container_image, + vertex_model=vertex_model, + project=project, + location=location, + model_evaluation=model_evaluation, + eval_metric=eval_metric, + eval_lower_is_better=eval_lower_is_better, + model_name=model_name, + pipeline_job_id=pipeline_job_id, + test_data=test_dataset, + evaluation_name=evaluation_name, + ) + + # Check that model lookup is performed, and no existing model is found + mock_model_class.list.assert_called_once_with( + filter=f'display_name="{model_name}"', + location=location, + project=project, + ) + assert "is being challenged by new model" in caplog.text + + # Check champion wins in model comparison + assert "Champion wins!" in caplog.text + + # Check model upload call + mock_model_class.upload.assert_called_once_with( + display_name=model_name, + description=model_description, + artifact_uri="dummy-model-uri", + serving_container_image_uri=serving_container_image, + serving_container_predict_route="/predict", + serving_container_health_route="/health", + parent_model=mock_champion_model.resource_name, + is_default_version=False, + ) + + # check model output URI + assert vertex_model.uri == f"https://{location}-aiplatform.googleapis.com/v1/" + ( + str(mock_model_class.upload.return_value.versioned_resource_name) + ) + + # check evaluation import + mock_model_service_client.return_value.import_model_evaluation.assert_called_once_with( # noqa + parent=mock_model_class.upload.return_value.versioned_resource_name, + model_evaluation=mock.ANY, + ) diff --git a/components/vertex-components/.python-version b/components/vertex-components/.python-version deleted file mode 100644 index f7e5aa84..00000000 --- a/components/vertex-components/.python-version +++ /dev/null @@ -1 +0,0 @@ -3.7.12 diff --git a/components/vertex-components/Pipfile b/components/vertex-components/Pipfile deleted file mode 100644 index 6de5c5ef..00000000 --- a/components/vertex-components/Pipfile +++ /dev/null @@ -1,16 +0,0 @@ -[[source]] -url = "https://pypi.org/simple" -verify_ssl = true -name = "pypi" - -[packages] -kfp = "==1.8.21" - -[dev-packages] -google-cloud-aiplatform = "==1.24.1" -google-cloud-pipeline-components = "==1.0.42" -pytest = ">=7.3.1,<8.0.0" -pre-commit = ">=2.14.1,<3.0.0" - -[requires] -python_version = "3.7" diff --git a/components/vertex-components/Pipfile.lock b/components/vertex-components/Pipfile.lock deleted file mode 100644 index e275cc15..00000000 --- a/components/vertex-components/Pipfile.lock +++ /dev/null @@ -1,1792 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "42000dc26266b6808ea2b6a7261983f236b59a8966b57835907a9b134fbef804" - }, - "pipfile-spec": 6, - "requires": { - "python_version": "3.7" - }, - "sources": [ - { - "name": "pypi", - "url": "https://pypi.org/simple", - "verify_ssl": true - } - ] - }, - "default": { - "absl-py": { - "hashes": [ - "sha256:0d3fe606adfa4f7db64792dd4c7aee4ee0c38ab75dfd353b7a83ed3e957fcb47", - "sha256:d2c244d01048ba476e7c080bd2c6df5e141d211de80223460d5b3b8a2a58433d" - ], - "markers": "python_version >= '3.6'", - "version": "==1.4.0" - }, - "attrs": { - "hashes": [ - "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04", - "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015" - ], - "markers": "python_version >= '3.7'", - "version": "==23.1.0" - }, - "cachetools": { - "hashes": [ - "sha256:13dfddc7b8df938c21a940dfa6557ce6e94a2f1cdfa58eb90c805721d58f2c14", - "sha256:429e1a1e845c008ea6c85aa35d4b98b65d6a9763eeef3e37e92728a12d1de9d4" - ], - "markers": "python_version ~= '3.7'", - "version": "==5.3.0" - }, - "certifi": { - "hashes": [ - "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7", - "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716" - ], - "markers": "python_version >= '3.6'", - "version": "==2023.5.7" - }, - "charset-normalizer": { - "hashes": [ - "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6", - "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1", - "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e", - "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373", - "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62", - "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230", - "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be", - "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c", - "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0", - "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448", - "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f", - "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649", - "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d", - "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0", - "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706", - "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a", - "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59", - "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23", - "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5", - "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb", - "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e", - "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e", - "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c", - "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28", - "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d", - "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41", - "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974", - "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce", - "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f", - "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1", - "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d", - "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8", - "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017", - "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31", - "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7", - "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8", - "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e", - "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14", - "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd", - "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d", - "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795", - "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b", - "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b", - "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b", - "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203", - "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f", - "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19", - "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1", - "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a", - "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac", - "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9", - "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0", - "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137", - "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f", - "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6", - "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5", - "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909", - "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f", - "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0", - "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324", - "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755", - "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb", - "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854", - "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c", - "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60", - "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84", - "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0", - "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b", - "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1", - "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531", - "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1", - "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11", - "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326", - "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df", - "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab" - ], - "markers": "python_full_version >= '3.7.0'", - "version": "==3.1.0" - }, - "click": { - "hashes": [ - "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e", - "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48" - ], - "markers": "python_version >= '3.7'", - "version": "==8.1.3" - }, - "cloudpickle": { - "hashes": [ - "sha256:61f594d1f4c295fa5cd9014ceb3a1fc4a70b0de1164b94fbc2d854ccba056f9f", - "sha256:d89684b8de9e34a2a43b3460fbca07d09d6e25ce858df4d5a44240403b6178f5" - ], - "markers": "python_version >= '3.6'", - "version": "==2.2.1" - }, - "deprecated": { - "hashes": [ - "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d", - "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.2.13" - }, - "docstring-parser": { - "hashes": [ - "sha256:48ddc093e8b1865899956fcc03b03e66bb7240c310fac5af81814580c55bf682", - "sha256:d1679b86250d269d06a99670924d6bce45adc00b08069dae8c47d98e89b667a9" - ], - "markers": "python_version >= '3.6' and python_version < '4.0'", - "version": "==0.15" - }, - "fire": { - "hashes": [ - "sha256:a6b0d49e98c8963910021f92bba66f65ab440da2982b78eb1bbf95a0a34aacc6" - ], - "version": "==0.5.0" - }, - "google-api-core": { - "hashes": [ - "sha256:4b9bb5d5a380a0befa0573b302651b8a9a89262c1730e37bf423cec511804c22", - "sha256:ce222e27b0de0d7bc63eb043b956996d6dccab14cc3b690aaea91c9cc99dc16e" - ], - "markers": "python_version >= '3.7'", - "version": "==2.11.0" - }, - "google-api-python-client": { - "hashes": [ - "sha256:1b4bd42a46321e13c0542a9e4d96fa05d73626f07b39f83a73a947d70ca706a9", - "sha256:7e0a1a265c8d3088ee1987778c72683fcb376e32bada8d7767162bd9c503fd9b" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.12.11" - }, - "google-auth": { - "hashes": [ - "sha256:ce311e2bc58b130fddf316df57c9b3943c2a7b4f6ec31de9663a9333e4064efc", - "sha256:f586b274d3eb7bd932ea424b1c702a30e0393a2e2bc4ca3eae8263ffd8be229f" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==2.17.3" - }, - "google-auth-httplib2": { - "hashes": [ - "sha256:31e49c36c6b5643b57e82617cb3e021e3e1d2df9da63af67252c02fa9c1f4a10", - "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac" - ], - "version": "==0.1.0" - }, - "google-cloud-core": { - "hashes": [ - "sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe", - "sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a" - ], - "markers": "python_version >= '3.7'", - "version": "==2.3.2" - }, - "google-cloud-storage": { - "hashes": [ - "sha256:83a90447f23d5edd045e0037982c270302e3aeb45fc1288d2c2ca713d27bad94", - "sha256:9b6ae7b509fc294bdacb84d0f3ea8e20e2c54a8b4bbe39c5707635fec214eff3" - ], - "markers": "python_version >= '3.7'", - "version": "==2.9.0" - }, - "google-crc32c": { - "hashes": [ - "sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a", - "sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876", - "sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c", - "sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289", - "sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298", - "sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02", - "sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f", - "sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2", - "sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a", - "sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb", - "sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210", - "sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5", - "sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee", - "sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c", - "sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a", - "sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314", - "sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd", - "sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65", - "sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37", - "sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4", - "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13", - "sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894", - "sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31", - "sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e", - "sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709", - "sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740", - "sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc", - "sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d", - "sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c", - "sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c", - "sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d", - "sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906", - "sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61", - "sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57", - "sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c", - "sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a", - "sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438", - "sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946", - "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7", - "sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96", - "sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091", - "sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae", - "sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d", - "sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88", - "sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2", - "sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd", - "sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541", - "sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728", - "sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178", - "sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968", - "sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346", - "sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8", - "sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93", - "sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7", - "sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273", - "sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462", - "sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94", - "sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd", - "sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e", - "sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57", - "sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b", - "sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9", - "sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a", - "sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100", - "sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325", - "sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183", - "sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556", - "sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4" - ], - "markers": "python_version >= '3.7'", - "version": "==1.5.0" - }, - "google-resumable-media": { - "hashes": [ - "sha256:218931e8e2b2a73a58eb354a288e03a0fd5fb1c4583261ac6e4c078666468c93", - "sha256:da1bd943e2e114a56d85d6848497ebf9be6a14d3db23e9fc57581e7c3e8170ec" - ], - "markers": "python_version >= '3.7'", - "version": "==2.5.0" - }, - "googleapis-common-protos": { - "hashes": [ - "sha256:4168fcb568a826a52f23510412da405abd93f4d23ba544bb68d943b14ba3cb44", - "sha256:b287dc48449d1d41af0c69f4ea26242b5ae4c3d7249a38b0984c86a4caffff1f" - ], - "markers": "python_version >= '3.7'", - "version": "==1.59.0" - }, - "httplib2": { - "hashes": [ - "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc", - "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==0.22.0" - }, - "idna": { - "hashes": [ - "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4", - "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2" - ], - "markers": "python_version >= '3.5'", - "version": "==3.4" - }, - "importlib-metadata": { - "hashes": [ - "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed", - "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705" - ], - "markers": "python_version < '3.8'", - "version": "==6.6.0" - }, - "importlib-resources": { - "hashes": [ - "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6", - "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a" - ], - "markers": "python_version < '3.9'", - "version": "==5.12.0" - }, - "jsonschema": { - "hashes": [ - "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d", - "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6" - ], - "markers": "python_version >= '3.7'", - "version": "==4.17.3" - }, - "kfp": { - "hashes": [ - "sha256:038d77ec9145ccfade95ab3b4b53c32668ae498fede06647ed0425d093819b1c" - ], - "index": "pypi", - "version": "==1.8.21" - }, - "kfp-pipeline-spec": { - "hashes": [ - "sha256:4cefae00ac50145cf862127202a8b8a783ed7504c773d7d7c517bd115283be25" - ], - "markers": "python_full_version >= '3.7.0'", - "version": "==0.1.16" - }, - "kfp-server-api": { - "hashes": [ - "sha256:482d71765ba57c003164dbb980a8cb1a18d234b578d064dc88dbeb3e4c7ab6de" - ], - "version": "==1.8.5" - }, - "kubernetes": { - "hashes": [ - "sha256:213befbb4e5aed95f94950c7eed0c2322fc5a2f8f40932e58d28fdd42d90836c", - "sha256:eb42333dad0bb5caf4e66460c6a4a1a36f0f057a040f35018f6c05a699baed86" - ], - "markers": "python_version >= '3.6'", - "version": "==25.3.0" - }, - "oauthlib": { - "hashes": [ - "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca", - "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918" - ], - "markers": "python_version >= '3.6'", - "version": "==3.2.2" - }, - "pkgutil-resolve-name": { - "hashes": [ - "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174", - "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e" - ], - "markers": "python_version < '3.9'", - "version": "==1.3.10" - }, - "protobuf": { - "hashes": [ - "sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7", - "sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c", - "sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2", - "sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b", - "sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050", - "sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9", - "sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7", - "sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454", - "sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480", - "sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469", - "sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c", - "sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e", - "sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db", - "sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905", - "sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b", - "sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86", - "sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4", - "sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402", - "sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7", - "sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4", - "sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99", - "sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee" - ], - "markers": "python_version >= '3.7'", - "version": "==3.20.3" - }, - "pyasn1": { - "hashes": [ - "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57", - "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==0.5.0" - }, - "pyasn1-modules": { - "hashes": [ - "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c", - "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==0.3.0" - }, - "pydantic": { - "hashes": [ - "sha256:01aea3a42c13f2602b7ecbbea484a98169fb568ebd9e247593ea05f01b884b2e", - "sha256:0cd181f1d0b1d00e2b705f1bf1ac7799a2d938cce3376b8007df62b29be3c2c6", - "sha256:10a86d8c8db68086f1e30a530f7d5f83eb0685e632e411dbbcf2d5c0150e8dcd", - "sha256:193924c563fae6ddcb71d3f06fa153866423ac1b793a47936656e806b64e24ca", - "sha256:464855a7ff7f2cc2cf537ecc421291b9132aa9c79aef44e917ad711b4a93163b", - "sha256:516f1ed9bc2406a0467dd777afc636c7091d71f214d5e413d64fef45174cfc7a", - "sha256:6434b49c0b03a51021ade5c4daa7d70c98f7a79e95b551201fff682fc1661245", - "sha256:64d34ab766fa056df49013bb6e79921a0265204c071984e75a09cbceacbbdd5d", - "sha256:670bb4683ad1e48b0ecb06f0cfe2178dcf74ff27921cdf1606e527d2617a81ee", - "sha256:68792151e174a4aa9e9fc1b4e653e65a354a2fa0fed169f7b3d09902ad2cb6f1", - "sha256:701daea9ffe9d26f97b52f1d157e0d4121644f0fcf80b443248434958fd03dc3", - "sha256:7d45fc99d64af9aaf7e308054a0067fdcd87ffe974f2442312372dfa66e1001d", - "sha256:80b1fab4deb08a8292d15e43a6edccdffa5377a36a4597bb545b93e79c5ff0a5", - "sha256:82dffb306dd20bd5268fd6379bc4bfe75242a9c2b79fec58e1041fbbdb1f7914", - "sha256:8c7f51861d73e8b9ddcb9916ae7ac39fb52761d9ea0df41128e81e2ba42886cd", - "sha256:950ce33857841f9a337ce07ddf46bc84e1c4946d2a3bba18f8280297157a3fd1", - "sha256:976cae77ba6a49d80f461fd8bba183ff7ba79f44aa5cfa82f1346b5626542f8e", - "sha256:9f6f0fd68d73257ad6685419478c5aece46432f4bdd8d32c7345f1986496171e", - "sha256:a7cd2251439988b413cb0a985c4ed82b6c6aac382dbaff53ae03c4b23a70e80a", - "sha256:abfb7d4a7cd5cc4e1d1887c43503a7c5dd608eadf8bc615413fc498d3e4645cd", - "sha256:ae150a63564929c675d7f2303008d88426a0add46efd76c3fc797cd71cb1b46f", - "sha256:b0f85904f73161817b80781cc150f8b906d521fa11e3cdabae19a581c3606209", - "sha256:b4a849d10f211389502059c33332e91327bc154acc1845f375a99eca3afa802d", - "sha256:c15582f9055fbc1bfe50266a19771bbbef33dd28c45e78afbe1996fd70966c2a", - "sha256:c230c0d8a322276d6e7b88c3f7ce885f9ed16e0910354510e0bae84d54991143", - "sha256:cc1dde4e50a5fc1336ee0581c1612215bc64ed6d28d2c7c6f25d2fe3e7c3e918", - "sha256:cf135c46099ff3f919d2150a948ce94b9ce545598ef2c6c7bf55dca98a304b52", - "sha256:cfc83c0678b6ba51b0532bea66860617c4cd4251ecf76e9846fa5a9f3454e97e", - "sha256:d2a5ebb48958754d386195fe9e9c5106f11275867051bf017a8059410e9abf1f", - "sha256:d71e69699498b020ea198468e2480a2f1e7433e32a3a99760058c6520e2bea7e", - "sha256:d75ae19d2a3dbb146b6f324031c24f8a3f52ff5d6a9f22f0683694b3afcb16fb", - "sha256:dfe2507b8ef209da71b6fb5f4e597b50c5a34b78d7e857c4f8f3115effaef5fe", - "sha256:e0cfe895a504c060e5d36b287ee696e2fdad02d89e0d895f83037245218a87fe", - "sha256:e79e999e539872e903767c417c897e729e015872040e56b96e67968c3b918b2d", - "sha256:ecbbc51391248116c0a055899e6c3e7ffbb11fb5e2a4cd6f2d0b93272118a209", - "sha256:f4a2b50e2b03d5776e7f21af73e2070e1b5c0d0df255a827e7c632962f8315af" - ], - "markers": "python_version >= '3.7'", - "version": "==1.10.7" - }, - "pyparsing": { - "hashes": [ - "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb", - "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc" - ], - "markers": "python_version >= '3.1'", - "version": "==3.0.9" - }, - "pyrsistent": { - "hashes": [ - "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8", - "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440", - "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a", - "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c", - "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3", - "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393", - "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9", - "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da", - "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf", - "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64", - "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a", - "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3", - "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98", - "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2", - "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8", - "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf", - "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc", - "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7", - "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28", - "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2", - "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b", - "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a", - "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64", - "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19", - "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1", - "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9", - "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c" - ], - "markers": "python_version >= '3.7'", - "version": "==0.19.3" - }, - "python-dateutil": { - "hashes": [ - "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", - "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.8.2" - }, - "pyyaml": { - "hashes": [ - "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", - "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", - "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", - "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", - "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", - "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", - "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", - "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", - "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", - "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", - "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", - "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", - "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347", - "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", - "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541", - "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", - "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", - "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc", - "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", - "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa", - "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", - "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122", - "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", - "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", - "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", - "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc", - "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247", - "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6", - "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==5.4.1" - }, - "requests": { - "hashes": [ - "sha256:10e94cc4f3121ee6da529d358cdaeaff2f1c409cd377dbc72b825852f2f7e294", - "sha256:239d7d4458afcb28a692cdd298d87542235f4ca8d36d03a15bfc128a6559a2f4" - ], - "markers": "python_version >= '3.7'", - "version": "==2.30.0" - }, - "requests-oauthlib": { - "hashes": [ - "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5", - "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.3.1" - }, - "requests-toolbelt": { - "hashes": [ - "sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7", - "sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==0.10.1" - }, - "rsa": { - "hashes": [ - "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7", - "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21" - ], - "markers": "python_version >= '3.6'", - "version": "==4.9" - }, - "setuptools": { - "hashes": [ - "sha256:23aaf86b85ca52ceb801d32703f12d77517b2556af839621c641fca11287952b", - "sha256:f104fa03692a2602fa0fec6c6a9e63b6c8a968de13e17c026957dd1f53d80990" - ], - "markers": "python_version >= '3.7'", - "version": "==67.7.2" - }, - "six": { - "hashes": [ - "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", - "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.16.0" - }, - "strip-hints": { - "hashes": [ - "sha256:307c2bd147cd35997c8ed2e9a3bdca48ad9c9617e04ea46599095201b4ce998f" - ], - "version": "==0.1.10" - }, - "tabulate": { - "hashes": [ - "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", - "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f" - ], - "markers": "python_version >= '3.7'", - "version": "==0.9.0" - }, - "termcolor": { - "hashes": [ - "sha256:3afb05607b89aed0ffe25202399ee0867ad4d3cb4180d98aaf8eefa6a5f7d475", - "sha256:b5b08f68937f138fe92f6c089b99f1e2da0ae56c52b78bf7075fd95420fd9a5a" - ], - "markers": "python_version >= '3.7'", - "version": "==2.3.0" - }, - "typer": { - "hashes": [ - "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2", - "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee" - ], - "markers": "python_version >= '3.6'", - "version": "==0.9.0" - }, - "typing-extensions": { - "hashes": [ - "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb", - "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4" - ], - "markers": "python_version < '3.9'", - "version": "==4.5.0" - }, - "uritemplate": { - "hashes": [ - "sha256:07620c3f3f8eed1f12600845892b0e036a2420acf513c53f7de0abd911a5894f", - "sha256:5af8ad10cec94f215e3f48112de2022e1d5a37ed427fbd88652fa908f2ab7cae" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==3.0.1" - }, - "urllib3": { - "hashes": [ - "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305", - "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==1.26.15" - }, - "websocket-client": { - "hashes": [ - "sha256:3f09e6d8230892547132177f575a4e3e73cfdf06526e20cc02aa1c3b47184d40", - "sha256:cdf5877568b7e83aa7cf2244ab56a3213de587bbe0ce9d8b9600fc77b455d89e" - ], - "markers": "python_version >= '3.7'", - "version": "==1.5.1" - }, - "wheel": { - "hashes": [ - "sha256:cd1196f3faee2b31968d626e1731c94f99cbdb67cf5a46e4f5656cbee7738873", - "sha256:d236b20e7cb522daf2390fa84c55eea81c5c30190f90f29ae2ca1ad8355bf247" - ], - "markers": "python_version >= '3.7'", - "version": "==0.40.0" - }, - "wrapt": { - "hashes": [ - "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0", - "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420", - "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a", - "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c", - "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079", - "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923", - "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f", - "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1", - "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8", - "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86", - "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0", - "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364", - "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e", - "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c", - "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e", - "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c", - "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727", - "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff", - "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e", - "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29", - "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7", - "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72", - "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475", - "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a", - "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317", - "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2", - "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd", - "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640", - "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98", - "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248", - "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e", - "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d", - "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec", - "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1", - "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e", - "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9", - "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92", - "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb", - "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094", - "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46", - "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29", - "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd", - "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705", - "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8", - "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975", - "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb", - "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e", - "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b", - "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418", - "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019", - "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1", - "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba", - "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6", - "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2", - "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3", - "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7", - "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752", - "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416", - "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f", - "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1", - "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc", - "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145", - "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee", - "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a", - "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7", - "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b", - "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653", - "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0", - "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90", - "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29", - "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6", - "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034", - "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09", - "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559", - "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==1.15.0" - }, - "zipp": { - "hashes": [ - "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b", - "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556" - ], - "markers": "python_version < '3.10'", - "version": "==3.15.0" - } - }, - "develop": { - "absl-py": { - "hashes": [ - "sha256:0d3fe606adfa4f7db64792dd4c7aee4ee0c38ab75dfd353b7a83ed3e957fcb47", - "sha256:d2c244d01048ba476e7c080bd2c6df5e141d211de80223460d5b3b8a2a58433d" - ], - "markers": "python_version >= '3.6'", - "version": "==1.4.0" - }, - "attrs": { - "hashes": [ - "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04", - "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015" - ], - "markers": "python_version >= '3.7'", - "version": "==23.1.0" - }, - "cachetools": { - "hashes": [ - "sha256:13dfddc7b8df938c21a940dfa6557ce6e94a2f1cdfa58eb90c805721d58f2c14", - "sha256:429e1a1e845c008ea6c85aa35d4b98b65d6a9763eeef3e37e92728a12d1de9d4" - ], - "markers": "python_version ~= '3.7'", - "version": "==5.3.0" - }, - "certifi": { - "hashes": [ - "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7", - "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716" - ], - "markers": "python_version >= '3.6'", - "version": "==2023.5.7" - }, - "cfgv": { - "hashes": [ - "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426", - "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736" - ], - "markers": "python_full_version >= '3.6.1'", - "version": "==3.3.1" - }, - "charset-normalizer": { - "hashes": [ - "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6", - "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1", - "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e", - "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373", - "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62", - "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230", - "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be", - "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c", - "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0", - "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448", - "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f", - "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649", - "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d", - "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0", - "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706", - "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a", - "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59", - "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23", - "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5", - "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb", - "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e", - "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e", - "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c", - "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28", - "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d", - "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41", - "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974", - "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce", - "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f", - "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1", - "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d", - "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8", - "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017", - "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31", - "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7", - "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8", - "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e", - "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14", - "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd", - "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d", - "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795", - "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b", - "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b", - "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b", - "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203", - "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f", - "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19", - "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1", - "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a", - "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac", - "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9", - "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0", - "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137", - "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f", - "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6", - "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5", - "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909", - "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f", - "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0", - "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324", - "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755", - "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb", - "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854", - "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c", - "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60", - "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84", - "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0", - "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b", - "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1", - "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531", - "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1", - "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11", - "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326", - "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df", - "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab" - ], - "markers": "python_full_version >= '3.7.0'", - "version": "==3.1.0" - }, - "click": { - "hashes": [ - "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e", - "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48" - ], - "markers": "python_version >= '3.7'", - "version": "==8.1.3" - }, - "cloudpickle": { - "hashes": [ - "sha256:61f594d1f4c295fa5cd9014ceb3a1fc4a70b0de1164b94fbc2d854ccba056f9f", - "sha256:d89684b8de9e34a2a43b3460fbca07d09d6e25ce858df4d5a44240403b6178f5" - ], - "markers": "python_version >= '3.6'", - "version": "==2.2.1" - }, - "deprecated": { - "hashes": [ - "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d", - "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.2.13" - }, - "distlib": { - "hashes": [ - "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46", - "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e" - ], - "version": "==0.3.6" - }, - "docstring-parser": { - "hashes": [ - "sha256:48ddc093e8b1865899956fcc03b03e66bb7240c310fac5af81814580c55bf682", - "sha256:d1679b86250d269d06a99670924d6bce45adc00b08069dae8c47d98e89b667a9" - ], - "markers": "python_version >= '3.6' and python_version < '4.0'", - "version": "==0.15" - }, - "exceptiongroup": { - "hashes": [ - "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e", - "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785" - ], - "markers": "python_version < '3.11'", - "version": "==1.1.1" - }, - "filelock": { - "hashes": [ - "sha256:ad98852315c2ab702aeb628412cbf7e95b7ce8c3bf9565670b4eaecf1db370a9", - "sha256:fc03ae43288c013d2ea83c8597001b1129db351aad9c57fe2409327916b8e718" - ], - "markers": "python_version >= '3.7'", - "version": "==3.12.0" - }, - "fire": { - "hashes": [ - "sha256:a6b0d49e98c8963910021f92bba66f65ab440da2982b78eb1bbf95a0a34aacc6" - ], - "version": "==0.5.0" - }, - "google-api-core": { - "hashes": [ - "sha256:4b9bb5d5a380a0befa0573b302651b8a9a89262c1730e37bf423cec511804c22", - "sha256:ce222e27b0de0d7bc63eb043b956996d6dccab14cc3b690aaea91c9cc99dc16e" - ], - "markers": "python_version >= '3.7'", - "version": "==2.11.0" - }, - "google-api-python-client": { - "hashes": [ - "sha256:1b4bd42a46321e13c0542a9e4d96fa05d73626f07b39f83a73a947d70ca706a9", - "sha256:7e0a1a265c8d3088ee1987778c72683fcb376e32bada8d7767162bd9c503fd9b" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.12.11" - }, - "google-auth": { - "hashes": [ - "sha256:ce311e2bc58b130fddf316df57c9b3943c2a7b4f6ec31de9663a9333e4064efc", - "sha256:f586b274d3eb7bd932ea424b1c702a30e0393a2e2bc4ca3eae8263ffd8be229f" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==2.17.3" - }, - "google-auth-httplib2": { - "hashes": [ - "sha256:31e49c36c6b5643b57e82617cb3e021e3e1d2df9da63af67252c02fa9c1f4a10", - "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac" - ], - "version": "==0.1.0" - }, - "google-cloud-aiplatform": { - "hashes": [ - "sha256:0ce9e97bf5c977397e52b3b7c4dc78610c135fbde11a60a6c0b77a4fdf776400", - "sha256:942765a6bad97e46e262dd6599dc5f171663ce952130e0b0b2eb97e0b1b98bfd" - ], - "index": "pypi", - "version": "==1.24.1" - }, - "google-cloud-bigquery": { - "hashes": [ - "sha256:4b02def076e2db8cec66f65fb627d13904a9fc3cf4fee315ede43dcb7038a8df", - "sha256:848a3cbce0ba7d4f1e9551400a7c99aa0eab72290d5a1bbbe69f18a24a10bd3a" - ], - "markers": "python_version >= '3.7'", - "version": "==3.10.0" - }, - "google-cloud-core": { - "hashes": [ - "sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe", - "sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a" - ], - "markers": "python_version >= '3.7'", - "version": "==2.3.2" - }, - "google-cloud-notebooks": { - "hashes": [ - "sha256:8fbffb7ba535fc02c61f135d8863324a5a2d20dd58cafaae592f0b0172d6bdab", - "sha256:dac73a5cd983a4344d1fa96f9a8e5849b0ff75d7a5fdde921023a2ef4566e75e" - ], - "markers": "python_version >= '3.7'", - "version": "==1.7.0" - }, - "google-cloud-pipeline-components": { - "hashes": [ - "sha256:bf833f325d1b4a89f1db9627d3f3e75d1bffc7b7725d4eb739488a68808fa623" - ], - "index": "pypi", - "version": "==1.0.42" - }, - "google-cloud-resource-manager": { - "hashes": [ - "sha256:26beb595b957972df50173f1d0fd51c00d280551eac73566017ebdda62b1616a", - "sha256:bfc3e60eb92e25ac562a9248bb8fc17e9bef04c3dc9f031ffbe0dfe28d919287" - ], - "markers": "python_version >= '3.7'", - "version": "==1.10.0" - }, - "google-cloud-storage": { - "hashes": [ - "sha256:83a90447f23d5edd045e0037982c270302e3aeb45fc1288d2c2ca713d27bad94", - "sha256:9b6ae7b509fc294bdacb84d0f3ea8e20e2c54a8b4bbe39c5707635fec214eff3" - ], - "markers": "python_version >= '3.7'", - "version": "==2.9.0" - }, - "google-crc32c": { - "hashes": [ - "sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a", - "sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876", - "sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c", - "sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289", - "sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298", - "sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02", - "sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f", - "sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2", - "sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a", - "sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb", - "sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210", - "sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5", - "sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee", - "sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c", - "sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a", - "sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314", - "sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd", - "sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65", - "sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37", - "sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4", - "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13", - "sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894", - "sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31", - "sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e", - "sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709", - "sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740", - "sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc", - "sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d", - "sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c", - "sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c", - "sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d", - "sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906", - "sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61", - "sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57", - "sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c", - "sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a", - "sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438", - "sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946", - "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7", - "sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96", - "sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091", - "sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae", - "sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d", - "sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88", - "sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2", - "sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd", - "sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541", - "sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728", - "sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178", - "sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968", - "sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346", - "sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8", - "sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93", - "sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7", - "sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273", - "sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462", - "sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94", - "sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd", - "sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e", - "sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57", - "sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b", - "sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9", - "sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a", - "sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100", - "sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325", - "sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183", - "sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556", - "sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4" - ], - "markers": "python_version >= '3.7'", - "version": "==1.5.0" - }, - "google-resumable-media": { - "hashes": [ - "sha256:218931e8e2b2a73a58eb354a288e03a0fd5fb1c4583261ac6e4c078666468c93", - "sha256:da1bd943e2e114a56d85d6848497ebf9be6a14d3db23e9fc57581e7c3e8170ec" - ], - "markers": "python_version >= '3.7'", - "version": "==2.5.0" - }, - "googleapis-common-protos": { - "hashes": [ - "sha256:4168fcb568a826a52f23510412da405abd93f4d23ba544bb68d943b14ba3cb44", - "sha256:b287dc48449d1d41af0c69f4ea26242b5ae4c3d7249a38b0984c86a4caffff1f" - ], - "markers": "python_version >= '3.7'", - "version": "==1.59.0" - }, - "grpc-google-iam-v1": { - "hashes": [ - "sha256:2bc4b8fdf22115a65d751c9317329322602c39b7c86a289c9b72d228d960ef5f", - "sha256:5c10f3d8dc2d88678ab1a9b0cb5482735c5efee71e6c0cd59f872eef22913f5c" - ], - "markers": "python_version >= '3.7'", - "version": "==0.12.6" - }, - "grpcio": { - "hashes": [ - "sha256:02000b005bc8b72ff50c477b6431e8886b29961159e8b8d03c00b3dd9139baed", - "sha256:031bbd26656e0739e4b2c81c172155fb26e274b8d0312d67aefc730bcba915b6", - "sha256:1209d6b002b26e939e4c8ea37a3d5b4028eb9555394ea69fb1adbd4b61a10bb8", - "sha256:125ed35aa3868efa82eabffece6264bf638cfdc9f0cd58ddb17936684aafd0f8", - "sha256:1382bc499af92901c2240c4d540c74eae8a671e4fe9839bfeefdfcc3a106b5e2", - "sha256:16bca8092dd994f2864fdab278ae052fad4913f36f35238b2dd11af2d55a87db", - "sha256:1c59d899ee7160638613a452f9a4931de22623e7ba17897d8e3e348c2e9d8d0b", - "sha256:1d109df30641d050e009105f9c9ca5a35d01e34d2ee2a4e9c0984d392fd6d704", - "sha256:1fa7d6ddd33abbd3c8b3d7d07c56c40ea3d1891ce3cd2aa9fa73105ed5331866", - "sha256:21c4a1aae861748d6393a3ff7867473996c139a77f90326d9f4104bebb22d8b8", - "sha256:224166f06ccdaf884bf35690bf4272997c1405de3035d61384ccb5b25a4c1ca8", - "sha256:2262bd3512ba9e9f0e91d287393df6f33c18999317de45629b7bd46c40f16ba9", - "sha256:2585b3c294631a39b33f9f967a59b0fad23b1a71a212eba6bc1e3ca6e6eec9ee", - "sha256:27fb030a4589d2536daec5ff5ba2a128f4f155149efab578fe2de2cb21596d3d", - "sha256:30fbbce11ffeb4f9f91c13fe04899aaf3e9a81708bedf267bf447596b95df26b", - "sha256:3930669c9e6f08a2eed824738c3d5699d11cd47a0ecc13b68ed11595710b1133", - "sha256:3b170e441e91e4f321e46d3cc95a01cb307a4596da54aca59eb78ab0fc03754d", - "sha256:3db71c6f1ab688d8dfc102271cedc9828beac335a3a4372ec54b8bf11b43fd29", - "sha256:48cb7af77238ba16c77879009003f6b22c23425e5ee59cb2c4c103ec040638a5", - "sha256:49eace8ea55fbc42c733defbda1e4feb6d3844ecd875b01bb8b923709e0f5ec8", - "sha256:533eaf5b2a79a3c6f35cbd6a095ae99cac7f4f9c0e08bdcf86c130efd3c32adf", - "sha256:5942a3e05630e1ef5b7b5752e5da6582460a2e4431dae603de89fc45f9ec5aa9", - "sha256:62117486460c83acd3b5d85c12edd5fe20a374630475388cfc89829831d3eb79", - "sha256:650f5f2c9ab1275b4006707411bb6d6bc927886874a287661c3c6f332d4c068b", - "sha256:6dc1e2c9ac292c9a484ef900c568ccb2d6b4dfe26dfa0163d5bc815bb836c78d", - "sha256:73c238ef6e4b64272df7eec976bb016c73d3ab5a6c7e9cd906ab700523d312f3", - "sha256:775a2f70501370e5ba54e1ee3464413bff9bd85bd9a0b25c989698c44a6fb52f", - "sha256:860fcd6db7dce80d0a673a1cc898ce6bc3d4783d195bbe0e911bf8a62c93ff3f", - "sha256:87f47bf9520bba4083d65ab911f8f4c0ac3efa8241993edd74c8dd08ae87552f", - "sha256:960b176e0bb2b4afeaa1cd2002db1e82ae54c9b6e27ea93570a42316524e77cf", - "sha256:a7caf553ccaf715ec05b28c9b2ab2ee3fdb4036626d779aa09cf7cbf54b71445", - "sha256:a947d5298a0bbdd4d15671024bf33e2b7da79a70de600ed29ba7e0fef0539ebb", - "sha256:a97b0d01ae595c997c1d9d8249e2d2da829c2d8a4bdc29bb8f76c11a94915c9a", - "sha256:b7655f809e3420f80ce3bf89737169a9dce73238af594049754a1128132c0da4", - "sha256:c33744d0d1a7322da445c0fe726ea6d4e3ef2dfb0539eadf23dce366f52f546c", - "sha256:c55a9cf5cba80fb88c850915c865b8ed78d5e46e1f2ec1b27692f3eaaf0dca7e", - "sha256:d2f62fb1c914a038921677cfa536d645cb80e3dd07dc4859a3c92d75407b90a5", - "sha256:d8ae6e0df3a608e99ee1acafaafd7db0830106394d54571c1ece57f650124ce9", - "sha256:e355ee9da9c1c03f174efea59292b17a95e0b7b4d7d2a389265f731a9887d5a9", - "sha256:e3e526062c690517b42bba66ffe38aaf8bc99a180a78212e7b22baa86902f690", - "sha256:eb0807323572642ab73fd86fe53d88d843ce617dd1ddf430351ad0759809a0ae", - "sha256:ebff0738be0499d7db74d20dca9f22a7b27deae31e1bf92ea44924fd69eb6251", - "sha256:ed36e854449ff6c2f8ee145f94851fe171298e1e793f44d4f672c4a0d78064e7", - "sha256:ed3d458ded32ff3a58f157b60cc140c88f7ac8c506a1c567b2a9ee8a2fd2ce54", - "sha256:f4a7dca8ccd8023d916b900aa3c626f1bd181bd5b70159479b142f957ff420e4" - ], - "version": "==1.54.0" - }, - "grpcio-status": { - "hashes": [ - "sha256:2154fdb8aad20452488712be6879657b508115ca06139fde8897ea8e9bc79367", - "sha256:c9ce3213e84c6fd8801c31aca3ea4a6b3453eaa40b93a6c0a23ea8999808fa00" - ], - "markers": "python_version >= '3.6'", - "version": "==1.47.0" - }, - "httplib2": { - "hashes": [ - "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc", - "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==0.22.0" - }, - "identify": { - "hashes": [ - "sha256:0aac67d5b4812498056d28a9a512a483f5085cc28640b02b258a59dac34301d4", - "sha256:986dbfb38b1140e763e413e6feb44cd731faf72d1909543178aa79b0e258265d" - ], - "markers": "python_version >= '3.7'", - "version": "==2.5.24" - }, - "idna": { - "hashes": [ - "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4", - "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2" - ], - "markers": "python_version >= '3.5'", - "version": "==3.4" - }, - "importlib-metadata": { - "hashes": [ - "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed", - "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705" - ], - "markers": "python_version < '3.8'", - "version": "==6.6.0" - }, - "importlib-resources": { - "hashes": [ - "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6", - "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a" - ], - "markers": "python_version < '3.9'", - "version": "==5.12.0" - }, - "iniconfig": { - "hashes": [ - "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", - "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" - ], - "markers": "python_version >= '3.7'", - "version": "==2.0.0" - }, - "jsonschema": { - "hashes": [ - "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d", - "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6" - ], - "markers": "python_version >= '3.7'", - "version": "==4.17.3" - }, - "kfp": { - "hashes": [ - "sha256:038d77ec9145ccfade95ab3b4b53c32668ae498fede06647ed0425d093819b1c" - ], - "index": "pypi", - "version": "==1.8.21" - }, - "kfp-pipeline-spec": { - "hashes": [ - "sha256:4cefae00ac50145cf862127202a8b8a783ed7504c773d7d7c517bd115283be25" - ], - "markers": "python_full_version >= '3.7.0'", - "version": "==0.1.16" - }, - "kfp-server-api": { - "hashes": [ - "sha256:482d71765ba57c003164dbb980a8cb1a18d234b578d064dc88dbeb3e4c7ab6de" - ], - "version": "==1.8.5" - }, - "kubernetes": { - "hashes": [ - "sha256:213befbb4e5aed95f94950c7eed0c2322fc5a2f8f40932e58d28fdd42d90836c", - "sha256:eb42333dad0bb5caf4e66460c6a4a1a36f0f057a040f35018f6c05a699baed86" - ], - "markers": "python_version >= '3.6'", - "version": "==25.3.0" - }, - "nodeenv": { - "hashes": [ - "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e", - "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6'", - "version": "==1.7.0" - }, - "oauthlib": { - "hashes": [ - "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca", - "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918" - ], - "markers": "python_version >= '3.6'", - "version": "==3.2.2" - }, - "packaging": { - "hashes": [ - "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61", - "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f" - ], - "markers": "python_version >= '3.7'", - "version": "==23.1" - }, - "pkgutil-resolve-name": { - "hashes": [ - "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174", - "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e" - ], - "markers": "python_version < '3.9'", - "version": "==1.3.10" - }, - "platformdirs": { - "hashes": [ - "sha256:47692bc24c1958e8b0f13dd727307cff1db103fca36399f457da8e05f222fdc4", - "sha256:7954a68d0ba23558d753f73437c55f89027cf8f5108c19844d4b82e5af396335" - ], - "markers": "python_version >= '3.7'", - "version": "==3.5.0" - }, - "pluggy": { - "hashes": [ - "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159", - "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3" - ], - "markers": "python_version >= '3.6'", - "version": "==1.0.0" - }, - "pre-commit": { - "hashes": [ - "sha256:31ef31af7e474a8d8995027fefdfcf509b5c913ff31f2015b4ec4beb26a6f658", - "sha256:e2f91727039fc39a92f58a588a25b87f936de6567eed4f0e673e0507edc75bad" - ], - "index": "pypi", - "version": "==2.21.0" - }, - "proto-plus": { - "hashes": [ - "sha256:0e8cda3d5a634d9895b75c573c9352c16486cb75deb0e078b5fda34db4243165", - "sha256:de34e52d6c9c6fcd704192f09767cb561bb4ee64e70eede20b0834d841f0be4d" - ], - "markers": "python_version >= '3.6'", - "version": "==1.22.2" - }, - "protobuf": { - "hashes": [ - "sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7", - "sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c", - "sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2", - "sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b", - "sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050", - "sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9", - "sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7", - "sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454", - "sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480", - "sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469", - "sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c", - "sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e", - "sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db", - "sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905", - "sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b", - "sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86", - "sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4", - "sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402", - "sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7", - "sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4", - "sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99", - "sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee" - ], - "markers": "python_version >= '3.7'", - "version": "==3.20.3" - }, - "pyasn1": { - "hashes": [ - "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57", - "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==0.5.0" - }, - "pyasn1-modules": { - "hashes": [ - "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c", - "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==0.3.0" - }, - "pydantic": { - "hashes": [ - "sha256:01aea3a42c13f2602b7ecbbea484a98169fb568ebd9e247593ea05f01b884b2e", - "sha256:0cd181f1d0b1d00e2b705f1bf1ac7799a2d938cce3376b8007df62b29be3c2c6", - "sha256:10a86d8c8db68086f1e30a530f7d5f83eb0685e632e411dbbcf2d5c0150e8dcd", - "sha256:193924c563fae6ddcb71d3f06fa153866423ac1b793a47936656e806b64e24ca", - "sha256:464855a7ff7f2cc2cf537ecc421291b9132aa9c79aef44e917ad711b4a93163b", - "sha256:516f1ed9bc2406a0467dd777afc636c7091d71f214d5e413d64fef45174cfc7a", - "sha256:6434b49c0b03a51021ade5c4daa7d70c98f7a79e95b551201fff682fc1661245", - "sha256:64d34ab766fa056df49013bb6e79921a0265204c071984e75a09cbceacbbdd5d", - "sha256:670bb4683ad1e48b0ecb06f0cfe2178dcf74ff27921cdf1606e527d2617a81ee", - "sha256:68792151e174a4aa9e9fc1b4e653e65a354a2fa0fed169f7b3d09902ad2cb6f1", - "sha256:701daea9ffe9d26f97b52f1d157e0d4121644f0fcf80b443248434958fd03dc3", - "sha256:7d45fc99d64af9aaf7e308054a0067fdcd87ffe974f2442312372dfa66e1001d", - "sha256:80b1fab4deb08a8292d15e43a6edccdffa5377a36a4597bb545b93e79c5ff0a5", - "sha256:82dffb306dd20bd5268fd6379bc4bfe75242a9c2b79fec58e1041fbbdb1f7914", - "sha256:8c7f51861d73e8b9ddcb9916ae7ac39fb52761d9ea0df41128e81e2ba42886cd", - "sha256:950ce33857841f9a337ce07ddf46bc84e1c4946d2a3bba18f8280297157a3fd1", - "sha256:976cae77ba6a49d80f461fd8bba183ff7ba79f44aa5cfa82f1346b5626542f8e", - "sha256:9f6f0fd68d73257ad6685419478c5aece46432f4bdd8d32c7345f1986496171e", - "sha256:a7cd2251439988b413cb0a985c4ed82b6c6aac382dbaff53ae03c4b23a70e80a", - "sha256:abfb7d4a7cd5cc4e1d1887c43503a7c5dd608eadf8bc615413fc498d3e4645cd", - "sha256:ae150a63564929c675d7f2303008d88426a0add46efd76c3fc797cd71cb1b46f", - "sha256:b0f85904f73161817b80781cc150f8b906d521fa11e3cdabae19a581c3606209", - "sha256:b4a849d10f211389502059c33332e91327bc154acc1845f375a99eca3afa802d", - "sha256:c15582f9055fbc1bfe50266a19771bbbef33dd28c45e78afbe1996fd70966c2a", - "sha256:c230c0d8a322276d6e7b88c3f7ce885f9ed16e0910354510e0bae84d54991143", - "sha256:cc1dde4e50a5fc1336ee0581c1612215bc64ed6d28d2c7c6f25d2fe3e7c3e918", - "sha256:cf135c46099ff3f919d2150a948ce94b9ce545598ef2c6c7bf55dca98a304b52", - "sha256:cfc83c0678b6ba51b0532bea66860617c4cd4251ecf76e9846fa5a9f3454e97e", - "sha256:d2a5ebb48958754d386195fe9e9c5106f11275867051bf017a8059410e9abf1f", - "sha256:d71e69699498b020ea198468e2480a2f1e7433e32a3a99760058c6520e2bea7e", - "sha256:d75ae19d2a3dbb146b6f324031c24f8a3f52ff5d6a9f22f0683694b3afcb16fb", - "sha256:dfe2507b8ef209da71b6fb5f4e597b50c5a34b78d7e857c4f8f3115effaef5fe", - "sha256:e0cfe895a504c060e5d36b287ee696e2fdad02d89e0d895f83037245218a87fe", - "sha256:e79e999e539872e903767c417c897e729e015872040e56b96e67968c3b918b2d", - "sha256:ecbbc51391248116c0a055899e6c3e7ffbb11fb5e2a4cd6f2d0b93272118a209", - "sha256:f4a2b50e2b03d5776e7f21af73e2070e1b5c0d0df255a827e7c632962f8315af" - ], - "markers": "python_version >= '3.7'", - "version": "==1.10.7" - }, - "pyparsing": { - "hashes": [ - "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb", - "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc" - ], - "markers": "python_version >= '3.1'", - "version": "==3.0.9" - }, - "pyrsistent": { - "hashes": [ - "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8", - "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440", - "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a", - "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c", - "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3", - "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393", - "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9", - "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da", - "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf", - "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64", - "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a", - "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3", - "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98", - "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2", - "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8", - "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf", - "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc", - "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7", - "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28", - "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2", - "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b", - "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a", - "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64", - "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19", - "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1", - "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9", - "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c" - ], - "markers": "python_version >= '3.7'", - "version": "==0.19.3" - }, - "pytest": { - "hashes": [ - "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362", - "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3" - ], - "index": "pypi", - "version": "==7.3.1" - }, - "python-dateutil": { - "hashes": [ - "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", - "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.8.2" - }, - "pyyaml": { - "hashes": [ - "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", - "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", - "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", - "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", - "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", - "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", - "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", - "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", - "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", - "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", - "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", - "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", - "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347", - "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", - "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541", - "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", - "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", - "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc", - "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", - "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa", - "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", - "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122", - "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", - "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", - "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", - "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc", - "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247", - "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6", - "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==5.4.1" - }, - "requests": { - "hashes": [ - "sha256:10e94cc4f3121ee6da529d358cdaeaff2f1c409cd377dbc72b825852f2f7e294", - "sha256:239d7d4458afcb28a692cdd298d87542235f4ca8d36d03a15bfc128a6559a2f4" - ], - "markers": "python_version >= '3.7'", - "version": "==2.30.0" - }, - "requests-oauthlib": { - "hashes": [ - "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5", - "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.3.1" - }, - "requests-toolbelt": { - "hashes": [ - "sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7", - "sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==0.10.1" - }, - "rsa": { - "hashes": [ - "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7", - "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21" - ], - "markers": "python_version >= '3.6'", - "version": "==4.9" - }, - "setuptools": { - "hashes": [ - "sha256:23aaf86b85ca52ceb801d32703f12d77517b2556af839621c641fca11287952b", - "sha256:f104fa03692a2602fa0fec6c6a9e63b6c8a968de13e17c026957dd1f53d80990" - ], - "markers": "python_version >= '3.7'", - "version": "==67.7.2" - }, - "shapely": { - "hashes": [ - "sha256:02dd5d7dc6e46515d88874134dc8fcdc65826bca93c3eecee59d1910c42c1b17", - "sha256:0b4ee3132ee90f07d63db3aea316c4c065ed7a26231458dda0874414a09d6ba3", - "sha256:0d885cb0cf670c1c834df3f371de8726efdf711f18e2a75da5cfa82843a7ab65", - "sha256:147066da0be41b147a61f8eb805dea3b13709dbc873a431ccd7306e24d712bc0", - "sha256:21776184516a16bf82a0c3d6d6a312b3cd15a4cabafc61ee01cf2714a82e8396", - "sha256:2e0a8c2e55f1be1312b51c92b06462ea89e6bb703fab4b114e7a846d941cfc40", - "sha256:2fd15397638df291c427a53d641d3e6fd60458128029c8c4f487190473a69a91", - "sha256:3480657460e939f45a7d359ef0e172a081f249312557fe9aa78c4fd3a362d993", - "sha256:370b574c78dc5af3a198a6da5d9b3d7c04654bd2ef7e80e80a3a0992dfb2d9cd", - "sha256:38f0fbbcb8ca20c16451c966c1f527cc43968e121c8a048af19ed3e339a921cd", - "sha256:4728666fff8cccc65a07448cae72c75a8773fea061c3f4f139c44adc429b18c3", - "sha256:48dcfffb9e225c0481120f4bdf622131c8c95f342b00b158cdbe220edbbe20b6", - "sha256:4b47bb6f9369e8bf3e6dbd33e6a25a47ee02b2874792a529fe04a49bf8bc0df6", - "sha256:532a55ee2a6c52d23d6f7d1567c8f0473635f3b270262c44e1b0c88096827e22", - "sha256:5d7f85c2d35d39ff53c9216bc76b7641c52326f7e09aaad1789a3611a0f812f2", - "sha256:65b21243d8f6bcd421210daf1fabb9de84de2c04353c5b026173b88d17c1a581", - "sha256:66bdac74fbd1d3458fa787191a90fa0ae610f09e2a5ec398c36f968cc0ed743f", - "sha256:6d388c0c1bd878ed1af4583695690aa52234b02ed35f93a1c8486ff52a555838", - "sha256:6fe855e7d45685926b6ba00aaeb5eba5862611f7465775dacd527e081a8ced6d", - "sha256:753ed0e21ab108bd4282405b9b659f2e985e8502b1a72b978eaa51d3496dee19", - "sha256:783bad5f48e2708a0e2f695a34ed382e4162c795cb2f0368b39528ac1d6db7ed", - "sha256:78fb9d929b8ee15cfd424b6c10879ce1907f24e05fb83310fc47d2cd27088e40", - "sha256:84010db15eb364a52b74ea8804ef92a6a930dfc1981d17a369444b6ddec66efd", - "sha256:89164e7a9776a19e29f01369a98529321994e2e4d852b92b7e01d4d9804c55bf", - "sha256:8d086591f744be483b34628b391d741e46f2645fe37594319e0a673cc2c26bcf", - "sha256:8e59817b0fe63d34baedaabba8c393c0090f061917d18fc0bcc2f621937a8f73", - "sha256:99a2f0da0109e81e0c101a2b4cd8412f73f5f299e7b5b2deaf64cd2a100ac118", - "sha256:99ab0ddc05e44acabdbe657c599fdb9b2d82e86c5493bdae216c0c4018a82dee", - "sha256:a23ef3882d6aa203dd3623a3d55d698f59bfbd9f8a3bfed52c2da05a7f0f8640", - "sha256:a354199219c8d836f280b88f2c5102c81bb044ccea45bd361dc38a79f3873714", - "sha256:a74631e511153366c6dbe3229fa93f877e3c87ea8369cd00f1d38c76b0ed9ace", - "sha256:ab38f7b5196ace05725e407cb8cab9ff66edb8e6f7bb36a398e8f73f52a7aaa2", - "sha256:adcf8a11b98af9375e32bff91de184f33a68dc48b9cb9becad4f132fa25cfa3c", - "sha256:b65f5d530ba91e49ffc7c589255e878d2506a8b96ffce69d3b7c4500a9a9eaf8", - "sha256:be9423d5a3577ac2e92c7e758bd8a2b205f5e51a012177a590bc46fc51eb4834", - "sha256:c2822111ddc5bcfb116e6c663e403579d0fe3f147d2a97426011a191c43a7458", - "sha256:c6a9a4a31cd6e86d0fbe8473ceed83d4fe760b19d949fb557ef668defafea0f6", - "sha256:d048f93e42ba578b82758c15d8ae037d08e69d91d9872bca5a1895b118f4e2b0", - "sha256:d8a2b2a65fa7f97115c1cd989fe9d6f39281ca2a8a014f1d4904c1a6e34d7f25", - "sha256:e9c30b311de2513555ab02464ebb76115d242842b29c412f5a9aa0cac57be9f6", - "sha256:ec14ceca36f67cb48b34d02d7f65a9acae15cd72b48e303531893ba4a960f3ea", - "sha256:ef3be705c3eac282a28058e6c6e5503419b250f482320df2172abcbea642c831" - ], - "markers": "python_version >= '3.6'", - "version": "==1.8.5.post1" - }, - "six": { - "hashes": [ - "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", - "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.16.0" - }, - "strip-hints": { - "hashes": [ - "sha256:307c2bd147cd35997c8ed2e9a3bdca48ad9c9617e04ea46599095201b4ce998f" - ], - "version": "==0.1.10" - }, - "tabulate": { - "hashes": [ - "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", - "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f" - ], - "markers": "python_version >= '3.7'", - "version": "==0.9.0" - }, - "termcolor": { - "hashes": [ - "sha256:3afb05607b89aed0ffe25202399ee0867ad4d3cb4180d98aaf8eefa6a5f7d475", - "sha256:b5b08f68937f138fe92f6c089b99f1e2da0ae56c52b78bf7075fd95420fd9a5a" - ], - "markers": "python_version >= '3.7'", - "version": "==2.3.0" - }, - "tomli": { - "hashes": [ - "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", - "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" - ], - "markers": "python_version < '3.11'", - "version": "==2.0.1" - }, - "typer": { - "hashes": [ - "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2", - "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee" - ], - "markers": "python_version >= '3.6'", - "version": "==0.9.0" - }, - "typing-extensions": { - "hashes": [ - "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb", - "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4" - ], - "markers": "python_version < '3.9'", - "version": "==4.5.0" - }, - "uritemplate": { - "hashes": [ - "sha256:07620c3f3f8eed1f12600845892b0e036a2420acf513c53f7de0abd911a5894f", - "sha256:5af8ad10cec94f215e3f48112de2022e1d5a37ed427fbd88652fa908f2ab7cae" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==3.0.1" - }, - "urllib3": { - "hashes": [ - "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305", - "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==1.26.15" - }, - "virtualenv": { - "hashes": [ - "sha256:6abec7670e5802a528357fdc75b26b9f57d5d92f29c5462ba0fbe45feacc685e", - "sha256:a85caa554ced0c0afbd0d638e7e2d7b5f92d23478d05d17a76daeac8f279f924" - ], - "markers": "python_version >= '3.7'", - "version": "==20.23.0" - }, - "websocket-client": { - "hashes": [ - "sha256:3f09e6d8230892547132177f575a4e3e73cfdf06526e20cc02aa1c3b47184d40", - "sha256:cdf5877568b7e83aa7cf2244ab56a3213de587bbe0ce9d8b9600fc77b455d89e" - ], - "markers": "python_version >= '3.7'", - "version": "==1.5.1" - }, - "wheel": { - "hashes": [ - "sha256:cd1196f3faee2b31968d626e1731c94f99cbdb67cf5a46e4f5656cbee7738873", - "sha256:d236b20e7cb522daf2390fa84c55eea81c5c30190f90f29ae2ca1ad8355bf247" - ], - "markers": "python_version >= '3.7'", - "version": "==0.40.0" - }, - "wrapt": { - "hashes": [ - "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0", - "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420", - "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a", - "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c", - "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079", - "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923", - "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f", - "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1", - "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8", - "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86", - "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0", - "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364", - "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e", - "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c", - "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e", - "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c", - "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727", - "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff", - "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e", - "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29", - "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7", - "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72", - "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475", - "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a", - "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317", - "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2", - "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd", - "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640", - "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98", - "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248", - "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e", - "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d", - "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec", - "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1", - "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e", - "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9", - "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92", - "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb", - "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094", - "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46", - "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29", - "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd", - "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705", - "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8", - "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975", - "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb", - "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e", - "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b", - "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418", - "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019", - "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1", - "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba", - "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6", - "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2", - "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3", - "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7", - "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752", - "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416", - "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f", - "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1", - "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc", - "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145", - "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee", - "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a", - "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7", - "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b", - "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653", - "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0", - "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90", - "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29", - "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6", - "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034", - "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09", - "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559", - "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==1.15.0" - }, - "zipp": { - "hashes": [ - "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b", - "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556" - ], - "markers": "python_version < '3.10'", - "version": "==3.15.0" - } - } -} diff --git a/components/vertex-components/README.md b/components/vertex-components/README.md deleted file mode 100644 index bd674ccc..00000000 --- a/components/vertex-components/README.md +++ /dev/null @@ -1,28 +0,0 @@ - - -# Vertex Components - -A python package which provides common KubeFlow components for interacting with Vertex AI. -Currently, the following components are implemented: - -- `custom_train_job`: Train a model in a [Custom Training Job](https://cloud.google.com/vertex-ai/docs/training/create-custom-job). -- `import_model_evaluation`: Import model evaluation results to a model in the model registry. -- `lookup_model`: Look up a model which was previously uploaded to the model registry. -- `model_batch_predict`: Run a [Batch Prediction Job](https://cloud.google.com/ai-platform/prediction/docs/batch-predict). -- `update_best_model`: Using two model evaluations and a comparison metric, update the better model to the default model. - -These components either augment, extend, or add new functionalities that aren't found in [Google Cloud Pipeline Components list](https://cloud.google.com/vertex-ai/docs/pipelines/gcpc-list). diff --git a/components/vertex-components/pyproject.toml b/components/vertex-components/pyproject.toml deleted file mode 100644 index bc16a237..00000000 --- a/components/vertex-components/pyproject.toml +++ /dev/null @@ -1,47 +0,0 @@ -[project] -name = "vertex-components" -authors = [ - {name = "Example User", email = "user@example.com"}, -] -description = "Vertex AI components" -readme = "README.md" -classifiers = [ - "Development Status :: 3 - Alpha", - "Intended Audience :: Developers", - "Programming Language :: Python :: 3.7", -] -requires-python = ">=3.7" -dynamic = ["version"] -dependencies = [ - "kfp == 1.8.21", -] - -[project.optional-dependencies] -tests = [ - "google-cloud-aiplatform == 1.24.1", - "google-cloud-pipeline-components == 1.0.42", - "pytest >= 7.3.1,<8.0.0", - "pre-commit >= 2.14.1,<3.0.0", -] - -[build-system] -requires = ["setuptools", "wheel"] -build-backend = "setuptools.build_meta:__legacy__" - -[tool.flake8] -max-line-length = 88 -per-file-ignores = [ - "E203", - "F841" -] -ignore = [ - "E203", - "F841", -] - -[tool.pytest.ini_options] -pythonpath = [ - "src" -] -testpaths = "tests" -junit_family = "xunit2" diff --git a/components/vertex-components/src/vertex_components/__init__.py b/components/vertex-components/src/vertex_components/__init__.py deleted file mode 100644 index 22ca7d04..00000000 --- a/components/vertex-components/src/vertex_components/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -from .custom_train_job import custom_train_job -from .import_model_evaluation import import_model_evaluation -from .lookup_model import lookup_model -from .model_batch_predict import model_batch_predict -from .update_best_model import update_best_model - - -__version__ = "0.0.1" -__all__ = [ - "custom_train_job", - "import_model_evaluation", - "lookup_model", - "model_batch_predict", - "update_best_model", -] diff --git a/components/vertex-components/src/vertex_components/custom_train_job.py b/components/vertex-components/src/vertex_components/custom_train_job.py deleted file mode 100644 index 0a2773bd..00000000 --- a/components/vertex-components/src/vertex_components/custom_train_job.py +++ /dev/null @@ -1,145 +0,0 @@ -# Copyright 2022 Google LLC -from typing import List, Dict - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# https://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from kfp.v2.dsl import Input, component, Metrics, Output, Artifact, Dataset - - -@component( - base_image="python:3.7", - packages_to_install=["google-cloud-aiplatform==1.24.1"], -) -def custom_train_job( - train_script_uri: str, - train_data: Input[Dataset], - valid_data: Input[Dataset], - test_data: Input[Dataset], - project_id: str, - project_location: str, - model_display_name: str, - train_container_uri: str, - serving_container_uri: str, - model: Output[Artifact], - metrics: Output[Metrics], - staging_bucket: str, - requirements: List[str] = None, - job_name: str = None, - hparams: Dict[str, str] = None, - replica_count: int = 1, - machine_type: str = "n1-standard-4", - accelerator_type: str = "ACCELERATOR_TYPE_UNSPECIFIED", - accelerator_count: int = 0, - parent_model: str = None, -): - """Run a custom training job using a training script. - - The provided script will be invoked by passing the following command-line arguments: - - ``` - train.py \ - --train_data \ - --valid_data \ - --test_data \ - --metrics \ - --hparams json.dumps() - ``` - - Ensure that your train script can read these arguments and outputs metrics - to the provided path and the model to the correct path based on: - https://cloud.google.com/vertex-ai/docs/training/code-requirements. - - Args: - train_script_uri (str): gs:// uri to python train script. See: - https://cloud.google.com/vertex-ai/docs/training/code-requirements. - train_data (Dataset): Training data (passed as an argument to train script) - valid_data (Dataset): Validation data (passed as an argument to train script) - test_data (Dataset): Test data (passed as an argument to train script). - staging_bucket (str): Staging bucket for CustomTrainingJob. - project_location (str): location of the Google Cloud project. - project_id (str): project id of the Google Cloud project. - model_display_name (str): Name of the new trained model version. - train_container_uri (str): Container URI for running train script. - serving_container_uri (str): Container URI for deploying the output model. - model (Model): Trained model output. - metrics (Metrics): Output metrics of trained model. - requirements (List[str]): Additional python dependencies for training script. - job_name (str): Name of training job. - hparams (Dict[str, str]): Hyperparameters (passed as a JSON serialised argument - to train script) - replica_count (int): Number of replicas (increase for distributed training). - machine_type (str): Machine type of compute. - accelerator_type (str): Accelerator type (change for GPU support). - accelerator_count (str): Accelerator count (increase for GPU cores). - parent_model (str): Resource URI of existing parent model (optional). If `None`, - a new model will be uploaded. Otherwise, a new model version for the parent - model will be uploaded. - Returns: - parent_model (str): Resource URI of the parent model (empty string if the - trained model is the first model version of its kind). - """ - import json - import logging - import os.path - import time - import google.cloud.aiplatform as aip - - logging.info(f"Using train script: {train_script_uri}") - script_path = "/gcs/" + train_script_uri[5:] - if not os.path.exists(script_path): - raise ValueError( - "Train script was not found. " - f"Check if the path is correct: {train_script_uri}" - ) - - job = aip.CustomTrainingJob( - project=project_id, - location=project_location, - staging_bucket=staging_bucket, - display_name=job_name if job_name else f"Custom job {int(time.time())}", - script_path=script_path, - container_uri=train_container_uri, - requirements=requirements, - model_serving_container_image_uri=serving_container_uri, - ) - cmd_args = [ - f"--train_data={train_data.path}", - f"--valid_data={valid_data.path}", - f"--test_data={test_data.path}", - f"--metrics={metrics.path}", - f"--hparams={json.dumps(hparams if hparams else {})}", - ] - uploaded_model = job.run( - model_display_name=model_display_name, - parent_model=parent_model, - is_default_version=(not parent_model), - args=cmd_args, - replica_count=replica_count, - machine_type=machine_type, - accelerator_type=accelerator_type, - accelerator_count=accelerator_count, - ) - - resource_name = f"{uploaded_model.resource_name}@{uploaded_model.version_id}" - model.metadata["resourceName"] = resource_name - model.metadata["containerSpec"] = {"imageUri": serving_container_uri} - model.uri = uploaded_model.uri - model.TYPE_NAME = "google.VertexModel" - - with open(metrics.path, "r") as fp: - parsed_metrics = json.load(fp) - - logging.info(parsed_metrics) - for k, v in parsed_metrics.items(): - if type(v) is float: - metrics.log_metric(k, v) diff --git a/components/vertex-components/src/vertex_components/import_model_evaluation.py b/components/vertex-components/src/vertex_components/import_model_evaluation.py deleted file mode 100644 index b7a2913e..00000000 --- a/components/vertex-components/src/vertex_components/import_model_evaluation.py +++ /dev/null @@ -1,86 +0,0 @@ -# Copyright 2022 Google LLC - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# https://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from kfp.v2.dsl import Input, Model, Metrics, component, Dataset -from typing import NamedTuple - - -@component( - base_image="python:3.7", - packages_to_install=[ - "google-cloud-aiplatform==1.24.1", - ], -) -def import_model_evaluation( - model: Input[Model], - metrics: Input[Metrics], - test_dataset: Input[Dataset], - pipeline_job_id: str, - project_location: str, - evaluation_name: str = "Imported evaluation", -) -> NamedTuple("Outputs", [("model_evaluation", str)]): - """Import an evaluation result for a model version. - - Args: - model (Model): Input model version. - metrics (Metrics): Input metrics. The contents of the artifact are expected - to follow the evaluation schema linked in the docs: - https://cloud.google.com/vertex-ai/docs/evaluation/introduction#features. - test_dataset (Dataset): Input test dataset which will be linked to evaluation. - pipeline_job_id (str): Pipeline job id which will be linked to evaluation. - project_location (str): Location of the Google Cloud project. - evaluation_name (str): Display name of model evaluation. - Returns: - model_evaluation (str): Resource URI of imported model evaluation. - """ - import json - import logging - from google.cloud.aiplatform_v1 import ModelEvaluation, ModelServiceClient - from google.protobuf.json_format import ParseDict - - logging.info(f"Read metrics from: {metrics.path}") - with open(metrics.path) as fp: - parsed_metrics = json.load(fp) - logging.info(f"Parsed metrics: {parsed_metrics}") - - schema_template = ( - "gs://google-cloud-aiplatform/schema/modelevaluation/%s_metrics_1.0.0.yaml" - ) - schema = schema_template % parsed_metrics.pop("problemType") - evaluation = { - "displayName": evaluation_name, - "metricsSchemaUri": schema, - "metrics": parsed_metrics, - "metadata": { - "pipeline_job_id": pipeline_job_id, - "evaluation_dataset_type": "gcs", - "evaluation_dataset_path": [test_dataset.uri], - }, - } - - request = ParseDict(evaluation, ModelEvaluation()._pb) - logging.info(f"Request: {request}") - - model_name = model.metadata["resourceName"] - logging.info(model_name) - - client = ModelServiceClient( - client_options={"api_endpoint": project_location + "-aiplatform.googleapis.com"} - ) - response = client.import_model_evaluation( - parent=model_name, - model_evaluation=request, - ) - logging.info(f"Response: {response}") - return (response.name,) diff --git a/components/vertex-components/src/vertex_components/update_best_model.py b/components/vertex-components/src/vertex_components/update_best_model.py deleted file mode 100644 index eeb45461..00000000 --- a/components/vertex-components/src/vertex_components/update_best_model.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright 2022 Google LLC -from typing import NamedTuple - -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at - -# https://www.apache.org/licenses/LICENSE-2.0 - -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from kfp.v2.dsl import Input, Model, component - - -@component( - base_image="python:3.7", - packages_to_install=["google-cloud-aiplatform==1.24.1"], -) -def update_best_model( - challenger: Input[Model], - challenger_evaluation: str, - parent_model: str, - project_id: str, - project_location: str, - eval_metric: str, - eval_lower_is_better: bool, - model_alias: str = "default", -) -> NamedTuple("Outputs", [("challenger_wins", bool)]): - """ - Args: - challenger (Model): Challenger model. - challenger_evaluation (str): Resource URI of challenger model evaluation e.g. - `projects/.../locations/.../models/.../evaluations/...` - parent_model (str): Resource URI of parent model. - eval_metric (str): Metric to compare champion and challenger on. - eval_lower_is_better (bool): Usually True for losses and - False for classification metrics. - project_id (str): project id of the Google Cloud project. - project_location (str): location of the Google Cloud project. - model_alias (str): alias of the parent model. - """ - - import logging - import google.cloud.aiplatform as aip - from google.cloud.aiplatform.models import ModelRegistry - from google.protobuf.json_format import MessageToDict - - logging.info("Get models...") - if model_alias: - parent_model += "@" + model_alias - champion = aip.Model(parent_model) - challenger = aip.Model(challenger.metadata["resourceName"]) - logging.info( - f"Model default version {champion.version_id} " - f"is being challenged by version {challenger.version_id}!" - ) - - eval_champion = challenger.get_model_evaluation() - eval_challenger = aip.model_evaluation.ModelEvaluation(challenger_evaluation) - metrics_champion = MessageToDict(eval_champion._gca_resource._pb)["metrics"] - metrics_challenger = MessageToDict(eval_challenger._gca_resource._pb)["metrics"] - - logging.info(f"Comparing {eval_metric} of models") - logging.debug(f"Champion metrics: {metrics_champion}") - logging.debug(f"Challenger metrics: {metrics_challenger}") - - m_champ = metrics_champion[eval_metric] - m_chall = metrics_challenger[eval_metric] - logging.info(f"Champion={m_champ} Challenger={m_chall}") - - challenger_wins = ( - (m_chall < m_champ) if eval_lower_is_better else (m_chall > m_champ) - ) - if challenger_wins: - logging.info(f"Updating champion to version: {challenger.version_id}") - model_registry = ModelRegistry( - model=champion, project=project_id, location=project_location - ) - model_registry.add_version_aliases(["default"], challenger.version_id) - return (True,) - - logging.info(f"Keeping current champion!") - return (False,) diff --git a/components/vertex-components/tests/conftest.py b/components/vertex-components/tests/conftest.py deleted file mode 100644 index 898c2590..00000000 --- a/components/vertex-components/tests/conftest.py +++ /dev/null @@ -1,50 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import pytest -import kfp.v2.dsl - - -@pytest.fixture(autouse=True) -def mock_kfp_artifact(monkeypatch): - """ - This fixture runs once after all tests are collected. It mocks the Artifact object - (and thus any derived classes such as Dataset, Model, etc.) to return the URI as - the path. - - Unit tests set the URI of artifacts, however, KFP components use Artifact.path to - retrieve paths to files. If a URI doesn't start with gs:// or minio:// or s3://, - the path with be None. This behaviour is avoided by mocking the Artifact._get_path - method. - - Args: - monkeypatch: Used to patch the decorator `@component` in `kfp.v2.dsl`. - This prevents KFP from changing the Python functions when applying - pytests. - - Returns: - None - - """ - - def _get_path(self): - """ - Returns: - str: The URI path of the Artifact - """ - # simply return the URI - return self.uri - - # mock the _get_path method of Artifact which is used by the property path - monkeypatch.setattr(kfp.v2.dsl.Artifact, "_get_path", _get_path) diff --git a/components/vertex-components/tests/test_lookup_model.py b/components/vertex-components/tests/test_lookup_model.py deleted file mode 100644 index 1ba31f81..00000000 --- a/components/vertex-components/tests/test_lookup_model.py +++ /dev/null @@ -1,114 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import google.cloud.aiplatform # noqa -from kfp.v2.dsl import Model -from unittest import mock -import pytest - -import vertex_components - -lookup_model = vertex_components.lookup_model.python_func - - -def test_lookup_model(tmpdir): - """ - Assert lookup_model produces expected resource name, and that list method is - called with the correct arguemnts - - Args: - tmpdir: built-in pytest tmpdir fixture - - Returns: - None - """ - with mock.patch("google.cloud.aiplatform.Model") as mock_model: - - # Mock attribute and method - - mock_path = tmpdir - mock_model.resource_name = "my-model-resource-name" - mock_model.uri = mock_path - mock_model.list.return_value = [mock_model] - - # Invoke the model look up - found_model_resource_name, _ = lookup_model( - model_name="my-model", - project_location="europe-west4", - project_id="my-project-id", - order_models_by="create_time desc", - fail_on_model_not_found=False, - model=Model(uri=mock_path), - ) - - assert found_model_resource_name == "my-model-resource-name" - - # Check the list method was called once with the correct arguments - mock_model.list.assert_called_once_with( - filter='display_name="my-model"', - order_by="create_time desc", - location="europe-west4", - project="my-project-id", - ) - - -def test_lookup_model_when_no_models(tmpdir): - """ - Checks that when there are no models and fail_on_model_found = False, - lookup_model returns an empty string. - - Args: - tmpdir: built-in pytest tmpdir fixture - - Returns: - None - """ - with mock.patch("google.cloud.aiplatform.Model") as mock_model: - mock_model.list.return_value = [] - exported_model_resource_name, _ = lookup_model( - model_name="my-model", - project_location="europe-west4", - project_id="my-project-id", - order_models_by="create_time desc", - fail_on_model_not_found=False, - model=Model(uri=str(tmpdir)), - ) - print(exported_model_resource_name) - assert exported_model_resource_name == "" - - -def test_lookup_model_when_no_models_fail(tmpdir): - """ - Checks that when there are no models and fail_on_model_found = True, - lookup_model raises a RuntimeError. - - Args: - tmpdir: built-in pytest tmpdir fixture - - Returns: - None - """ - with mock.patch("google.cloud.aiplatform.Model") as mock_model: - mock_model.list.return_value = [] - - # Verify that a ValueError is raised - with pytest.raises(RuntimeError): - lookup_model( - model_name="my-model", - project_location="europe-west4", - project_id="my-project-id", - order_models_by="create_time desc", - fail_on_model_not_found=True, - model=Model(uri=str(tmpdir)), - ) diff --git a/components/vertex-components/tests/test_update_best_model.py b/components/vertex-components/tests/test_update_best_model.py deleted file mode 100644 index bce9a9df..00000000 --- a/components/vertex-components/tests/test_update_best_model.py +++ /dev/null @@ -1,46 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from unittest.mock import patch - -from kfp.v2.dsl import Model - - -import vertex_components - -update_best_model = vertex_components.update_best_model.python_func - - -def test_model_batch_predict(tmpdir): - """ - Asserts model_batch_predict successfully creates requests given different arguments. - """ - mock_model = Model(uri=tmpdir, metadata={"resourceName": ""}) - mock_message = {"metrics": {"rmse": 0.01}} - - with patch("google.cloud.aiplatform.Model",), patch( - "google.cloud.aiplatform.model_evaluation.ModelEvaluation", - ), patch("google.cloud.aiplatform.models.ModelRegistry",), patch( - "google.protobuf.json_format.MessageToDict", return_value=mock_message - ): - - (challenger_wins,) = update_best_model( - challenger=mock_model, - challenger_evaluation="", - parent_model="", - project_id="", - project_location="", - eval_metric="rmse", - eval_lower_is_better=True, - ) - assert not challenger_wins diff --git a/cloudbuild/README.md b/docs/Automation.md similarity index 60% rename from cloudbuild/README.md rename to docs/Automation.md index 97eb2a4a..b9c4adc2 100644 --- a/cloudbuild/README.md +++ b/docs/Automation.md @@ -1,5 +1,5 @@ +# Kubeflow Pipelines Components + +The package `components` contains KubeFlow pipeline components for interacting with Google Cloud. +The following components are implemented: + +- `extract_table`: Extract a table from BigQuery to Cloud Storage. +- `lookup_model`: Look up a mode in the Vertex AI Model Registry given a name (and version optionally). +- `model_batch_predict`: Run a [Batch Prediction Job](https://cloud.google.com/ai-platform/prediction/docs/batch-predict). +- `upload_model`: Uploads a new model version to the Vertex Model Registry, importing a model evaluation, and updating the "default" tag on the model if the new version (challenger) is superior to the previous (champion) model. + +These components either augment, extend, or add new functionalities that aren't found in [Google Cloud Pipeline Components list](https://cloud.google.com/vertex-ai/docs/pipelines/gcpc-list). + +## Creating a new pipeline components package + +Update Python dependencies in `poetry.lock`, `pyproject.toml`, and in `packages_to_install` (in the `@component` decorator): + +- In `pyproject.toml`, add any dependencies that your component uses under `[tool.poetry.dependencies]`(each pinned to a specific version) +- In `packages_to_install` (in the `@component` decorator used to define your component), add any dependencies that your component uses (each pinned to a specific version) + +Define your pipeline components using the `@component` decorator in Python files under `components/src/components`. +You will need to update the `__init__.py` file to provide tests. +See the [Kubeflow Pipelines documentation](https://www.kubeflow.org/docs/components/pipelines/v1/sdk-v2/python-function-components/#building-python-function-based-components) for more information about writing pipeline components. + +Finally, you will need to install this new components package into the [`pipelines`](../pipelines) package. +Run `make install` from the root of the repository to install the new components. + +## Testing components + +Unit tests for components are defined using pytest and should be created under `components/tests`. +Take a look at the existing components to see examples of how you can write these tests and perform mocking/patching of KFP Artifact types. + +To run the unit tests, run `make test` from the root of the repository. + diff --git a/docs/TESTING_SETUP.md b/docs/Contribution.md similarity index 62% rename from docs/TESTING_SETUP.md rename to docs/Contribution.md index cdf9ca00..5bfe996d 100644 --- a/docs/TESTING_SETUP.md +++ b/docs/Contribution.md @@ -1,6 +1,22 @@ + # Automated testing for Vertex Pipelines E2E samples -This document details the steps required to set up automated testing with Cloud Build for the [E2E samples repo](https://github.com/GoogleCloudPlatform/vertex-pipelines-end-to-end-samples) (rather than setting up automated testing for your own repo derived from it). It is focused on the Vertex Pipelines components and pipelines themselves, not on the other elements of the codebase such as infrastructure setup. +This document details the steps required to set up automated testing with Cloud Build for the [E2E samples repo](https://github.com/GoogleCloudPlatform/vertex-pipelines-end-to-end-samples) (rather than setting up automated testing for your own repo derived from it). +It is focused on the Vertex Pipelines components and pipelines themselves, not on the other elements of the codebase such as infrastructure setup. This guide assumes that you are working in a brand-new Google Cloud project, and that you have Owner permission for this project. @@ -47,38 +63,22 @@ storage.googleapis.com \ --project $GCP_PROJECT_ID ``` -### Google Cloud Storage buckets +### Google Cloud Storage -Two buckets will need to be created - one for publishing the compiled JSON pipelines (and any other files required for running the pipelines), and one for the pipeline root. +Create a bucket to use for the Vertex Pipelines pipeline root. ``` -gsutil mb -l ${GCP_REGION} -p ${GCP_PROJECT_ID} gs://${GCP_PROJECT_ID}-pl-root -gsutil mb -l ${GCP_REGION} -p ${GCP_PROJECT_ID} gs://${GCP_PROJECT_ID}-assets +gsutil mb -l ${GCP_REGION} -p ${GCP_PROJECT_ID} --pap=enforced gs://${GCP_PROJECT_ID}-pl-root && gsutil ubla set on gs://${GCP_PROJECT_ID}-pl-root ``` -### BigQuery - -Create a new BigQuery dataset for the Chicago Taxi data: - -``` -bq --location=${GCP_REGION} mk --dataset "${GCP_PROJECT_ID}:chicago_taxi_trips" -``` +### Artifact Registry -Create a new BigQuery dataset for data processing during the pipelines: +Create a new container image registry in Artifact Registry. This is where training and serving container images will be stored before they can be used in the E2E tests. ``` -bq --location=${GCP_REGION} mk --dataset "${GCP_PROJECT_ID}:preprocessing" -``` - -Set up a BigQuery transfer job to mirror the Chicago Taxi dataset to your project - -``` -bq mk --transfer_config \ - --project_id=${GCP_PROJECT_ID} \ - --data_source="cross_region_copy" \ - --target_dataset="chicago_taxi_trips" \ - --display_name="Chicago taxi trip mirror" \ - --params='{"source_dataset_id":"'"chicago_taxi_trips"'","source_project_id":"'"bigquery-public-data"'"}' +gcloud artifacts repositories create vertex-images \ + --repository-format=docker \ + --location=${GCP_REGION} ``` ### Service Accounts @@ -109,11 +109,13 @@ The service account we have created for Cloud Build requires the following proje - roles/logging.logWriter - roles/storage.admin - roles/aiplatform.user +- roles/artifactregistry.writer ``` gcloud projects add-iam-policy-binding $GCP_PROJECT_ID --member="serviceAccount:cloud-build@${GCP_PROJECT_ID}.iam.gserviceaccount.com" --role="roles/logging.logWriter" --condition=None gcloud projects add-iam-policy-binding $GCP_PROJECT_ID --member="serviceAccount:cloud-build@${GCP_PROJECT_ID}.iam.gserviceaccount.com" --role="roles/storage.admin" --condition=None gcloud projects add-iam-policy-binding $GCP_PROJECT_ID --member="serviceAccount:cloud-build@${GCP_PROJECT_ID}.iam.gserviceaccount.com" --role="roles/aiplatform.user" --condition=None +gcloud projects add-iam-policy-binding $GCP_PROJECT_ID --member="serviceAccount:cloud-build@${GCP_PROJECT_ID}.iam.gserviceaccount.com" --role="roles/artifactregistry.writer" --condition=None ``` It also requires the "Service Account User" role for the Vertex Pipelines service account ([docs here](https://cloud.google.com/iam/docs/impersonating-service-accounts#impersonate-sa-level)): @@ -137,15 +139,11 @@ gcloud projects add-iam-policy-binding $GCP_PROJECT_ID --member="serviceAccount: gcloud projects add-iam-policy-binding $GCP_PROJECT_ID --member="serviceAccount:vertex-pipelines@${GCP_PROJECT_ID}.iam.gserviceaccount.com" --role="roles/bigquery.jobUser" --condition=None ``` -The Vertex Pipelines service account requires read access to the assets bucket, read/write access to the pipeline root bucket, and access to list both buckets: +The Vertex Pipelines service account requires read/write/list access to the pipeline root bucket: ``` -gsutil iam ch serviceAccount:vertex-pipelines@${GCP_PROJECT_ID}.iam.gserviceaccount.com:objectViewer gs://${GCP_PROJECT_ID}-assets - gsutil iam ch serviceAccount:vertex-pipelines@${GCP_PROJECT_ID}.iam.gserviceaccount.com:objectAdmin gs://${GCP_PROJECT_ID}-pl-root -gsutil iam ch serviceAccount:vertex-pipelines@${GCP_PROJECT_ID}.iam.gserviceaccount.com:legacyBucketReader gs://${GCP_PROJECT_ID}-assets - gsutil iam ch serviceAccount:vertex-pipelines@${GCP_PROJECT_ID}.iam.gserviceaccount.com:legacyBucketReader gs://${GCP_PROJECT_ID}-pl-root ``` @@ -157,13 +155,11 @@ Follow the [Google Cloud documentation](https://cloud.google.com/build/docs/auto ### Set up Cloud Build Triggers -There are five Cloud Build triggers to set up. +There are three Cloud Build triggers to set up. -1. `pr-checks.yaml` (tensorflow) -2. `pr-checks.yaml` (xgboost) -3. `trigger-tests.yaml` -4. `e2e-test.yaml` (tensorflow) -5. `e2e-test.yaml` (xgboost) +1. `pr-checks.yaml` +2. `trigger-tests.yaml` +3. `e2e-test.yaml` For each of the above, create a Cloud Build trigger with the following settings: @@ -177,8 +173,6 @@ For each of the above, create a Cloud Build trigger with the following settings: | Cloud Build Trigger | Substitution variables | |-------------------------------|-------------------------------------| -| `pr-checks.yaml` (tensorflow) | _PIPELINE_TEMPLATE = `tensorflow` | -| `pr-checks.yaml` (xgboost) | _PIPELINE_TEMPLATE = `xgboost` | +| `pr-checks.yaml` | | | `trigger-tests.yaml` | | -| `e2e-test.yaml` (tensorflow) | _PIPELINE_TEMPLATE = `tensorflow`
_PIPELINE_PUBLISH_GCS_PATH = `gs://-assets/e2e-test-tensorflow`
_TEST_ENABLE_PIPELINE_CACHING = `False`
_TEST_TRAIN_STATS_GCS_PATH = `gs://-pl-root/e2e-test-tensorflow/train-stats/train.stats`
_TEST_VERTEX_LOCATION = ``
_TEST_VERTEX_PIPELINE_ROOT = `gs://-pl-root`
_TEST_VERTEX_PROJECT_ID = ``
_TEST_VERTEX_SA_EMAIL = `vertex-pipelines@.iam.gserviceaccount.com` | -| `e2e-test.yaml` (xgboost) | _PIPELINE_TEMPLATE = `xgboost`
_PIPELINE_PUBLISH_GCS_PATH = `gs://-assets/e2e-test-xgboost`
_TEST_ENABLE_PIPELINE_CACHING = `False`
_TEST_TRAIN_STATS_GCS_PATH = `gs://-pl-root/e2e-test-xgboost/train-stats/train.stats`
_TEST_VERTEX_LOCATION = ``
_TEST_VERTEX_PIPELINE_ROOT = `gs://-pl-root`
_TEST_VERTEX_PROJECT_ID = ``
_TEST_VERTEX_SA_EMAIL = `vertex-pipelines@.iam.gserviceaccount.com` | +| `e2e-test.yaml` | _TEST_ENABLE_PIPELINE_CACHING = `False`
_TEST_VERTEX_LOCATION = ``
_TEST_VERTEX_PIPELINE_ROOT = `gs://-pl-root`
_TEST_VERTEX_PROJECT_ID = ``
_TEST_VERTEX_SA_EMAIL = `vertex-pipelines@.iam.gserviceaccount.com` | diff --git a/docs/Infrastructure.md b/docs/Infrastructure.md new file mode 100644 index 00000000..08593d75 --- /dev/null +++ b/docs/Infrastructure.md @@ -0,0 +1,43 @@ + +# Infrastructure + +We recommend using [`tfswitch`](https://tfswitch.warrensbox.com/) to automatically choose and download an appropriate version for you (run `tfswitch` from the [`terraform/envs/dev`](terraform/envs/dev/) directory). + +The cloud infrastructure is managed using Terraform and is defined in the [`terraform`](terraform) directory. There are three Terraform modules defined in [`terraform/modules`](terraform/modules): + +- `cloudfunction` - deploys a (Pub/Sub-triggered) Cloud Function from local source code +- `scheduled_pipelines` - deploys Cloud Scheduler jobs that will trigger Vertex Pipeline runs (via the above Cloud Function) +- `vertex_deployment` - deploys Cloud infrastructure required for running Vertex Pipelines, including enabling APIs, creating buckets, Artifact Registry repos, service accounts, and IAM permissions. + +There is a Terraform configuration for each environment (dev/test/prod) under [`terraform/envs`](terraform/envs/). + +## Schedule pipelines + +Terraform is used to deploy Cloud Scheduler jobs that trigger the Vertex Pipeline runs. This is done by the CI/CD pipelines (see section below on CI/CD). + +To schedule pipelines into an environment, you will need to provide the `cloud_schedulers_config` variable to the Terraform configuration for the relevant environment. +You can find an example of this configuration in [`terraform/modules/scheduled_pipelines/scheduled_jobs.auto.tfvars.example`](terraform/modules/scheduled_pipelines/scheduled_jobs.auto.tfvars.example). +Copy this example file into the relevant directory for your environment (e.g. `terraform/envs/dev` for the dev environment) and remove the `.example` suffix. +Adjust the configuration file as appropriate. + +## Tear down infrastructure + +To tear down the infrastructure you have created with Terraform, run these commands: + +```bash +make undeploy env=dev VERTEX_PROJECT_ID= +``` diff --git a/docs/Pipelines.md b/docs/Pipelines.md new file mode 100644 index 00000000..55b25b58 --- /dev/null +++ b/docs/Pipelines.md @@ -0,0 +1,163 @@ + +# ML Pipelines + +There are two ML pipelines defined in this repository: +- training pipeline (located in `pipelines/src/pipelines/training.py` +- batch prediction pipeline (located in `pipelines/src/pipelines/prediction.py`) + +## Pipeline input parameters + +The ML pipelines have input parameters. +As you can see in the pipeline definition files (`pipelines/src/pipelines//pipeline.py`), they have default values, and some of these default values are derived from environment variables (which in turn are defined in `env.sh`). + +When triggering ad hoc runs in your dev (or sandbox) environment, or when running the E2E tests in CI, these default values are used. +For the test and production deployments, the pipeline parameters are defined in the Terraform code for the Cloud Scheduler jobs (`terraform/envs//scheduled_jobs.auto.tfvars`) - see the section on [Scheduling pipelines](#scheduling-pipelines). + +## Customize pipelines + +### Adding a new pipeline + +This repository contains a training and a (batch) prediction pipeline. To add another ML pipeline (e.g. for continuous evaluation), create a new directory under the `pipelines/src/pipelines` directory. Within your new pipeline folder, create a `pipeline.py` file - this is where you should provide your pipeline definition using the KFP DSL (in a function named `pipeline`). + +Alternatively, you can just copy and paste the `training` or `prediction` directory. + +See below for an example folder structure: + +``` +vertex-pipelines-end-to-end-samples +| +β”œβ”€β”€ pipelines +β”‚ β”œβ”€β”€ src +β”‚ β”‚ β”œβ”€β”€ pipelines +β”‚ β”‚ β”‚ β”œβ”€β”€ new_pipeline +β”‚ β”‚ β”‚ β”‚ β”œβ”€β”€ pipeline.py +β”‚ β”‚ β”‚ β”‚ └── queries +β”‚ β”‚ β”‚ β”‚ └── my_query.sql +``` + +Make sure that you give the ML pipeline a unique name in the `@pipeline` decorator. + +To run your pipeline, use `make run` as before (optionally adding parameter to wait until pipeline is finished before returning - defaults to false): + +```bash +make run pipeline=your_new_pipeline +``` + +Some of the scripts e.g. CI/CD pipelines assume only a training and prediction pipeline. You will need to adapt these to add in the compile, run and upload steps for your new pipeline in [cloudbuild/pr-checks.yaml](/cloudbuild/pr-checks.yaml), [cloudbuild/e2e-test.yaml](/cloudbuild/e2e-test.yaml) and [cloudbuild/release.yaml](/cloudbuild/release.yaml). + +## Training pipeline + +A screenshot of the completed ML pipeline is shown below. + +![Screenshot of the training pipeline in Vertex Pipelines](./images/training_pipeline.png) + +In the next sections we will walk through the different pipeline steps. + +### Ingestion / preprocessing step + +The first pipeline step runs a SQL script in BigQuery to extract data from the source table and load it into tables according to a train/test/validation split. + +The SQL query for this can be found in `pipelines/src/pipelines/queries/preprocessing.sql`. + +As you can see in this SQL query, there are some placeholder values (marked by the curly brace syntax `{{ }}`). When the pipeline runs, these are replaced with values provided from the ML pipeline. + +In the pipeline definition, the `generate_query` function is run at pipeline compile time to generate a SQL query (as a string) from the template file (`preprocessing.sql`). The placeholders (`{{ }}` values) are replaced with KFP placeholders that represent pipeline parameters, or values passed from other pipeline components at runtime. In turn, these placeholders are automatically replaced with the actual values at runtime by Vertex Pipelines. + +The `preprocessing` step in the pipeline uses this string (`preprocessing_query`) in the `BigqueryQueryJobOp` component (provided by [Google Cloud Pipeline Components](https://cloud.google.com/vertex-ai/docs/pipelines/components-introduction)) + +### Extraction steps + +Once the data has been split into three tables (for train/test/validation split), each table is downloaded to Google Cloud Storage as a CSV file. This is done so that there is a copy of the train/test/validation data for each pipeline run that you have a record of. + +(Alternatively, you could choose to omit this step, leave the data in BigQuery and consume the data directly from BigQuery for your training step). + +This step is performed using a custom KFP component located in [components/bigquery-components/src/bigquery_components/extract_bq_to_dataset.py](/vertex_components/extract_bq_to_dataset.py). + +### Training step + +The training step is defined as a [KFP container component](https://www.kubeflow.org/docs/components/pipelines/v2/components/container-components/) in the [pipeline.py](/pipelines/src/pipelines/training.py) file. + +The container image used for this component is built using CI/CD (or the `make build target=training` command if you want to build it during development). + +The source code for this container image (and the prediction container image) can be found in the [model](/model/) directory. Dependencies are managed using Poetry. The model training script can be found at [model/training/train.py](/model/training/train.py) and can be modified to suit your use case. + +The training script trains a simple XGBoost model wrapped in a scikit-learn pipeline, and saves it as `model.joblib`. + +![Architecture of the XGBoost model](./images/xgboost_architecture.png) + +The model is evaluated and metrics are saved as a JSON file. In the Vertex pipeline, the model appears as a KFP Model artifact, and the JSON file appears as a KFP Metrics artifact. + +### Upload model step + +The upload model step uploads the model to the Vertex model registry. This step uses a custom KFP component that can be +found in [components/vertex-components/src/vertex_components/upload_model.py](//vertex_components/upload_model.py). It does the following: + +1. Checks if there is an existing "champion" model with the same name in the Vertex Model Registry +1. If there is, fetch its latest model evaluation and compare it with the model evaluation of the newly trained "challenger" model +1. If the new model performs better, or if there is no existing champion model, upload the newly trained "challenger" model and tag it with the `default` alias to designate it as the new champion model +1. If the new model performs worse than the existing "champion" model, upload the new model to the registry, but don't tag it with the `default` alias +1. Import the model evaluation of the newly-trained model and attach it to the newly-uploaded model in the Vertex Registry + +| :bulb: Quick note on Champion-Challenger comparisons | +|:-------------------| +| In practice, you should be aware of that and give the model a specific name related to the ML project you are working on once the new model is not comparable with the previous models. +For example, when you want to train a new model using different features, the best practice is to change your model name in the pipeline input parameters. | + +## Batch prediction pipeline + +A screenshot of the completed ML pipeline is shown below. + +![Screenshot of the prediction pipeline in Vertex Pipelines](./images/prediction_pipeline.png) + +In the next sections we will walk through the different pipeline steps. + +### Ingestion / preprocessing step + +The first pipeline step runs a SQL script in BigQuery to extract data from the source table and load it into a different BigQuery table, ready for predictions to generated. + +The SQL query for this can be found in [pipelines/src/pipelines/prediction/queries/preprocessing.sql](/pipelines/src/pipelines/prediction/queries/preprocessing.sql). + +As you can see in this SQL query, there are some placeholder values (marked by the curly brace syntax `{{ }}`). When the pipeline runs, these are replaced with values provided from the ML pipeline. + +In the pipeline definition, the `generate_query` function is run at pipeline compile time to generate a SQL query (as a string) from the template file (`preprocessing.sql`). The placeholders (`{{ }}` values) are replaced with KFP placeholders that represent pipeline parameters, or values passed from other pipeline components at runtime. In turn, these placeholders are automatically replaced with the actual values at runtime by Vertex Pipelines. + +The `preprocessing` step in the pipeline uses this string (`preprocessing_query`) in the `BigqueryQueryJobOp` component (provided by [Google Cloud Pipeline Components](https://cloud.google.com/vertex-ai/docs/pipelines/components-introduction)) + +### Look up model + +This step looks up the "champion" model from the Vertex Model Registry. +It uses a custom KFP component that can be found in [components/vertex-components/src/vertex_components/lookup_model.py](//vertex_components/lookup_model.py). It uses the Vertex AI Python SDK to list models with a given model name and retrieve the model version that uses the `default` alias, indicating that it is the "champion" model. + +### Batch Prediction + +This step submits a Vertex Batch Prediction job that generates predictions from the BigQuery table from the ingestion/preprocessing step. It uses a custom KFP component that can be found in [components/vertex-components/src/vertex_components/model_batch_predict.py](//vertex_components/model_batch_predict.py). It uses Vertex Model Monitoring for batch prediction to monitor the data for drift. + +## Pipeline input parameters + +The ML pipelines have input parameters. As you can see in the pipeline definition files (`src/pipelines//pipeline.py`), they have default values, and some of these default values are derived from environment variables (which in turn are defined in `env.sh` as described above). + +When triggering ad hoc runs in your dev/sandbox environment, or when running the E2E tests in CI, these default values are used. For the test and production deployments, the pipeline parameters are defined in the Terraform code for the Cloud Scheduler jobs (`envs//scheduled_jobs.auto.tfvars`). + +### Cache Usage in pipeline + +When Vertex AI Pipelines runs a pipeline, it checks to see whether or not an execution exists in Vertex ML Metadata with the interface (cache key) of each pipeline step (component). +If the component is exactly the same and the arguments are exactly the same as in some previous execution, then the task can be skipped and the outputs of the old step can be used. +Since most of the ML projects take a long time and expensive computation resources, it is cost-effective to use cache when you are sure that the output of components is correct. +In terms of [how to control cache reuse behavior](https://cloud.google.com/vertex-ai/docs/pipelines/configure-caching), in generally, you can do it for either a component or the entire pipeline (for all components). +If you want to control caching behavior for individual components, add `.set_caching_options()` after each component when building a pipeline. +To change the caching behaviour of ALL components within a pipeline, you can specify this when you trigger the pipeline like so: `make run pipeline=` +It is suggested to start by disabling caching of components during development, until you have a good idea of how the caching behaviour works, as it can lead to unexpected results. diff --git a/docs/PRODUCTION.md b/docs/Production.md similarity index 50% rename from docs/PRODUCTION.md rename to docs/Production.md index db4a0f5d..2c0b0719 100644 --- a/docs/PRODUCTION.md +++ b/docs/Production.md @@ -1,5 +1,5 @@ - -# Troubleshooting - -List of common issues during project setup: - -```bash -$ make run pipeline=` - -google.auth.exceptions.DefaultCredentialsError: Could not automatically determine credentials. -Please set GOOGLE_APPLICATION_CREDENTIALS or explicitly create credentials and re-run the application. -``` - -**Solution:** `gcloud auth application-default login` - -```bash -$ make run pipeline= - -google.api_core.exceptions.InvalidArgument: 400 You do not have permission to act as service_account: -<...>@<...>.iam.gserviceaccount.com. (or it may not exist). -``` - -**Solution:** Ensure the service account mentioned in `.env.sh` (`VERTEX_SA_EMAIL`) -exists and you have permission to act as the service account. - -```bash -$ make test-components GROUP=_xgboost - -FAILED tests/test_train.py::test_xgboost_train - xgboost.core.XGBoostError: XGBoost Library (libxgboost.dylib) could not be loaded. -``` - -**Solution:** if you are using macOS, try to also install XGBoost using Homebrew: `brew install xgboost` diff --git a/docs/images/architecture.png b/docs/images/architecture.png new file mode 100644 index 00000000..a6b58745 Binary files /dev/null and b/docs/images/architecture.png differ diff --git a/docs/images/cf_view.png b/docs/images/cf_view.png deleted file mode 100644 index 7a81234d..00000000 Binary files a/docs/images/cf_view.png and /dev/null differ diff --git a/docs/images/hparam_tuning.png b/docs/images/hparam_tuning.png new file mode 100644 index 00000000..d0fab044 Binary files /dev/null and b/docs/images/hparam_tuning.png differ diff --git a/docs/images/prediction_pipeline.png b/docs/images/prediction_pipeline.png new file mode 100644 index 00000000..cdefc060 Binary files /dev/null and b/docs/images/prediction_pipeline.png differ diff --git a/docs/images/tf_model_architecture.png b/docs/images/tf_model_architecture.png deleted file mode 100644 index 84cba221..00000000 Binary files a/docs/images/tf_model_architecture.png and /dev/null differ diff --git a/docs/images/training_pipeline.png b/docs/images/training_pipeline.png new file mode 100644 index 00000000..f84f6d14 Binary files /dev/null and b/docs/images/training_pipeline.png differ diff --git a/docs/images/use_template.png b/docs/images/use_template.png deleted file mode 100644 index fd7063c8..00000000 Binary files a/docs/images/use_template.png and /dev/null differ diff --git a/docs/notebooks/01_infrastructure_setup.ipynb b/docs/notebooks/01_infrastructure_setup.ipynb new file mode 100644 index 00000000..b8a4c4b2 --- /dev/null +++ b/docs/notebooks/01_infrastructure_setup.ipynb @@ -0,0 +1,293 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "ur8xi4C7S06n" + }, + "outputs": [], + "source": [ + "# Copyright 2023 Google LLC\n", + "#\n", + "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "# you may not use this file except in compliance with the License.\n", + "# You may obtain a copy of the License at\n", + "#\n", + "# https://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing, software\n", + "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "# See the License for the specific language governing permissions and\n", + "# limitations under the License." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "JAPoU8Sm5E6e", + "tags": [] + }, + "source": [ + "# Getting Started with Vertex AI Turbo Templates\n", + "\n", + "This notebook sets up infrastructure to run production-ready pipelines on Google Cloud. Follow this three-part notebook series to get started in a local Jupyter notebook or in [Vertex AI Workbench](https://cloud.google.com/vertex-ai-notebooks):\n", + "\n", + "1. **[Infrastructure Setup](./02_run_pipelines.ipynb) - this notebook**\n", + "1. [Run Pipelines](./02_run_pipelines.ipynb)\n", + "1. [Infrastructure Clean Up](./02_run_pipelines.ipynb)\n", + "\n", + "\n", + "**Prerequisites:**\n", + "\n", + "- [Google Cloud SDK (gcloud)](https://cloud.google.com/sdk/docs/quickstart)\n", + "- Make\n", + "- [Terraform](https://www.terraform.io)\n", + "\n", + "**For Vertex AI Workbench users**: \n", + "Uncomment and execute the following cell to install Terraform.\n", + "Restart the notebook kernel or the Workbench instance to ensure `terraform` is available in the `PATH`.\n", + "Then return to this notebook and continue with the next section." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# ! bash ./scripts/install_terraform.sh" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Authenticate\n", + "\n", + "Set your project ID and authenticate using your Google Account:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "VERTEX_PROJECT_ID = \"my-project-id\"\n", + "GOOGLE_ACCOUNT = \"user@company.com\"\n", + "! gcloud config set project {VERTEX_PROJECT_ID}\n", + "! gcloud config set account {GOOGLE_ACCOUNT}\n", + "! gcloud auth login" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Clone Code\n", + "\n", + "**If you haven't cloned the template, yet:** Uncomment and execute the following cell to clone the code." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "#! git clone -b develop https://github.com/teamdatatonic/vertex-pipelines-end-to-end-samples" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Switch to the folder in which the template code is cloned to:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%cd vertex-pipelines-end-to-end-samples/" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Configure your code by setting the variables:\n", + "- `VERTEX_PROJECT_ID` - as set above\n", + "- `VERTEX_LOCATION` - location of the cloud project\n", + "- `BQ_LOCATION` - location of the BigQuery dataset, for this notebook example you can leave this as-is\n", + "- `RESOURCE_SUFFIX` - suffix (e.g. ``) to facilitate running concurrent pipelines in the same Google Cloud project. Change if working in a team to avoid overwriting resources during development " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "%%writefile env.sh\n", + "#!/bin/bash\n", + "VERTEX_PROJECT_ID=my-project-id\n", + "VERTEX_LOCATION=europe-west2\n", + "BQ_LOCATION=US\n", + "RESOURCE_SUFFIX=default" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "For most use cases you won't need to change the following variables unless you've modified the Terraform code." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%%writefile -a env.sh\n", + "# Optional\n", + "VERTEX_CMEK_IDENTIFIER=\n", + "VERTEX_NETWORK=\n", + "# Leave as-is\n", + "VERTEX_SA_EMAIL=vertex-pipelines@${VERTEX_PROJECT_ID}.iam.gserviceaccount.com\n", + "VERTEX_PIPELINE_ROOT=gs://${VERTEX_PROJECT_ID}-pl-root\n", + "CONTAINER_IMAGE_REGISTRY=${VERTEX_LOCATION}-docker.pkg.dev/${VERTEX_PROJECT_ID}/vertex-images" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "zgPO1eR3CYjk", + "tags": [] + }, + "source": [ + "## Deploy Infrastructure\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The cloud infrastructure is managed using Terraform and is defined in the [`terraform`](terraform) directory. There are three Terraform modules defined in [`terraform/modules`](terraform/modules):\n", + "\n", + "- `cloudfunction` - deploys a (Pub/Sub-triggered) Cloud Function from local source code\n", + "- `scheduled_pipelines` - deploys Cloud Scheduler jobs that will trigger Vertex Pipeline runs (via the above Cloud Function)\n", + "- `vertex_deployment` - deploys Cloud infrastructure required for running Vertex Pipelines, including enabling APIs, creating buckets, Artifact Registry repos, service accounts, and IAM permissions." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Enable APIs**:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "! gcloud services enable cloudresourcemanager.googleapis.com serviceusage.googleapis.com" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Create Cloud Storage bucket:**\n", + "\n", + "Store the [Terraform state files](https://developer.hashicorp.com/terraform/language/state/remote) in the bucket `[project-id]-tfstate`:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "NIq7R4HZCfIc", + "tags": [] + }, + "outputs": [], + "source": [ + "! source env.sh && gsutil mb -l $VERTEX_LOCATION -p $VERTEX_PROJECT_ID gs://$VERTEX_PROJECT_ID-tfstate" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "tags": [] + }, + "source": [ + "**Deploy:**" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "! make deploy auto-approve=true" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You've successfully deployed a `dev` environment! πŸŽ‰ \n", + "Continue with [this notebook](./02_run_pipelines.ipynb) to run your first Vertex AI Pipelines in the deployed project.\n", + "\n", + "**Note:** If you'd like to deploy separate cloud environments as shown below, try out `make deploy env=dev` where you can replace `dev` with `test` or `prod`.\n", + "\n", + "**Troubleshooting:** If enabling of APIs or the deployment fails, check whether your Google user account has the appropriate permissions." + ] + } + ], + "metadata": { + "colab": { + "collapsed_sections": [], + "name": "notebook_template.ipynb", + "toc_visible": true + }, + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.16" + }, + "vscode": { + "interpreter": { + "hash": "bb5c7b0035bb37e2e2e56e6840dfdd8f7fa070884ae8e041fbcae450545b1006" + } + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/docs/notebooks/02_run_pipelines.ipynb b/docs/notebooks/02_run_pipelines.ipynb new file mode 100644 index 00000000..045babe0 --- /dev/null +++ b/docs/notebooks/02_run_pipelines.ipynb @@ -0,0 +1,255 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "ur8xi4C7S06n" + }, + "outputs": [], + "source": [ + "# Copyright 2023 Google LLC\n", + "#\n", + "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "# you may not use this file except in compliance with the License.\n", + "# You may obtain a copy of the License at\n", + "#\n", + "# https://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing, software\n", + "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "# See the License for the specific language governing permissions and\n", + "# limitations under the License." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "JAPoU8Sm5E6e" + }, + "source": [ + "# Getting Started with Vertex AI Turbo Templates\n", + "\n", + "In this notebook you'll run your first production-ready training and prediction pipelines on Google Cloud. Follow this three-part notebook series to get started in a local Jupyter notebook or in [Vertex AI Workbench](https://cloud.google.com/vertex-ai-notebooks):\n", + "\n", + "1. [Infrastructure Setup](./02_run_pipelines.ipynb)\n", + "1. **[Run Pipelines](./02_run_pipelines.ipynb) - this notebook**\n", + "1. [Infrastructure Clean Up](./02_run_pipelines.ipynb)\n", + "\n", + "\n", + "**Prerequisites:**\n", + "\n", + "- Deployed `dev` project\n", + "- [Pyenv](https://github.com/pyenv/pyenv#installation) for managing Python versions\n", + "- [Google Cloud SDK (gcloud)](https://cloud.google.com/sdk/docs/quickstart)\n", + "- Make\n", + "- [Poetry](https://python-poetry.org)\n", + "- [pyenv](https://github.com/pyenv/pyenv)\n", + "\n", + "**For Vertex AI Workbench users**: \n", + "Uncomment and execute the following cell install pyenv and Poetry.\n", + "Restart the notebook kernel or the Workbench instance to ensure `pyenv` and `poetry` are available in the `PATH`.\n", + "Then return to this notebook and continue with the next section." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# ! curl https://pyenv.run | bash\n", + "# ! bash ./scripts/install_poetry.sh" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Authenticate\n", + "\n", + "Set your project ID and authenticate using your Google Account:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "VERTEX_PROJECT_ID = \"my-project-id\"\n", + "GOOGLE_ACCOUNT = \"user@company.com\"\n", + "! gcloud config set project {VERTEX_PROJECT_ID}\n", + "! gcloud config set account {GOOGLE_ACCOUNT}\n", + "! gcloud auth login" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "i7EUnXsZhAGF", + "tags": [] + }, + "source": [ + "## Install Dependencies\n", + "\n", + "Install Python and Poetry dependencies:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%cd vertex-pipelines-end-to-end-samples/" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "2b4ef9b72d43", + "tags": [] + }, + "outputs": [], + "source": [ + "! pyenv install -skip-existing\n", + "! poetry config virtualenvs.prefer-active-python true" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The `Makefile` installs virtual Python environments for the Vertex AI Pipelines and components within using Poetry:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "! make install" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Run Training Pipeline\n", + "\n", + "Vertex AI Pipelines uses KubeFlow to orchestrate your training steps, as such you'll need to:\n", + "\n", + "1. Compile the pipeline\n", + "1. Build dependent Docker containers\n", + "1. Run the pipeline in Vertex AI\n", + "\n", + "The already templated training pipeline will execute a pipeline similar to the image below in Vertex AI:\n", + "\n", + "![Training Pipeline](../images/training_pipeline.png)\n", + "\n", + "Don't worry about executing steps 1-3 manually (and each time you run your pipeline!), simply run the following command:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "! make training wait=true" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "While the pipeline executes, here's a more detailed explanation of what's happening:\n", + "\n", + "**1. Compile the pipeline:** By using the KubeFlow SDK, you've compiled the training pipeline in `pipelines/training` to YAML.\n", + "\n", + "**2. Build dependent Docker containers:** In `model` you can maintain your training (and prediction) code which is containerised and pushed to [Artifact Registry](https://cloud.google.com/artifact-registry). \n", + "In this way, your training pipeline can execute your training code in the `Train model` pipeline step.\n", + "\n", + "**3. Run the pipeline in Vertex AI:** By using the Vertex AI Python SDK and the pipeline YAML file, you execute your training pipeline." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Run Prediction Pipeline\n", + "\n", + "After running a successful training pipeline job, run the prediction pipeline which will look similar to:\n", + "\n", + "\"image\"" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "! make prediction" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "tags": [] + }, + "source": [ + "**Note:** The command has the following true/false flags:\n", + "\n", + "- `build` - re-build containers for training & prediction code (limit by setting `images=training` to build only one of the containers)\n", + "- `compile` - re-compile the pipeline to YAML\n", + "- `wait` - run the pipeline (a-)sync" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "tags": [] + }, + "source": [ + "You've successfully run your first training and prediction pipeline in Vertex AI! πŸŽ‰\n", + "Continue with [this notebook](./03_infrastructure_cleanup.ipynb) to run clean up the infrastructure." + ] + } + ], + "metadata": { + "colab": { + "collapsed_sections": [], + "name": "notebook_template.ipynb", + "toc_visible": true + }, + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.16" + }, + "vscode": { + "interpreter": { + "hash": "bb5c7b0035bb37e2e2e56e6840dfdd8f7fa070884ae8e041fbcae450545b1006" + } + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/docs/notebooks/03_infrastructure_cleanup.ipynb b/docs/notebooks/03_infrastructure_cleanup.ipynb new file mode 100644 index 00000000..d2f7c2dd --- /dev/null +++ b/docs/notebooks/03_infrastructure_cleanup.ipynb @@ -0,0 +1,196 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "ur8xi4C7S06n" + }, + "outputs": [], + "source": [ + "# Copyright 2023 Google LLC\n", + "#\n", + "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "# you may not use this file except in compliance with the License.\n", + "# You may obtain a copy of the License at\n", + "#\n", + "# https://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing, software\n", + "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "# See the License for the specific language governing permissions and\n", + "# limitations under the License." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "JAPoU8Sm5E6e" + }, + "source": [ + "# Getting Started with Vertex AI Turbo Templates\n", + "\n", + "This notebook cleans up any infrastructure used run production-ready pipelines on Google Cloud. Follow this three-part notebook series to get started in a local Jupter notebook or in [Vertex AI Workbench](https://cloud.google.com/vertex-ai-notebooks):\n", + "\n", + "1. [Infrastructure Setup](./02_run_pipelines.ipynb)\n", + "1. [Run Pipelines](./02_run_pipelines.ipynb)\n", + "1. **[Infrastructure Clean Up](./02_run_pipelines.ipynb) - this notebook**\n", + "\n", + "\n", + "**Prerequisites:**\n", + "\n", + "- Deployed `dev` project\n", + "- [Google Cloud SDK (gcloud)](https://cloud.google.com/sdk/docs/quickstart)\n", + "- Make\n", + "- [Terraform](https://www.terraform.io)" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "tags": [] + }, + "source": [ + "## Authenticate\n", + "\n", + "Set your project ID and authenticate using your Google Account:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "VERTEX_PROJECT_ID = \"my-project-id\"\n", + "GOOGLE_ACCOUNT = \"user@company.com\"\n", + "! gcloud config set project {VERTEX_PROJECT_ID}\n", + "! gcloud config set account {GOOGLE_ACCOUNT}\n", + "! gcloud auth login" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "TpV-iwP9qw9c" + }, + "source": [ + "## Clean Up Infrastructure\n", + "\n", + "To clean up all Google Cloud resources used in this project, you can [delete the Google Cloud\n", + "project](https://cloud.google.com/resource-manager/docs/creating-managing-projects#shutting_down_projects) you used for the tutorial.\n", + "\n", + "Otherwise, you can delete the individual resources you created in this tutorial:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%cd vertex-pipelines-end-to-end-samples/" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Empty the buckets to enable their deletion:**" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "%%bash\n", + "gsutil rm -a gs://${VERTEX_PROJECT_ID}-staging/**\n", + "gsutil rm -a gs://${VERTEX_PROJECT_ID}-pl-root/**" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "tags": [] + }, + "source": [ + "**Delete BigQuery dataset for data processing during the pipelines:**" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "tags": [] + }, + "outputs": [], + "source": [ + "! bq --location=${VERTEX_LOCATION} rm -f -r --dataset \"${VERTEX_PROJECT_ID}:turbo_templates\" " + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Destroy infrastructure by executing undeploy command (terraform destroy):**" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "! make undeploy auto-approve=true" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You've successfully cleaned your dev environment! πŸŽ‰ \n", + "This is the end of the three-part notebook series.\n", + "\n", + "**Now it's time to adapt the template to your own use case.** \n", + "You can start by updating the training and prediction code in `model` folder. " + ] + } + ], + "metadata": { + "colab": { + "collapsed_sections": [], + "name": "notebook_template.ipynb", + "toc_visible": true + }, + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.16" + }, + "vscode": { + "interpreter": { + "hash": "fd2d2b37aae7c0649f55b7e5d509b0b6c853821eea0be4d0f279f57c5531e1e5" + } + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/docs/notebooks/extras/01_hyperparameter_tuning.ipynb b/docs/notebooks/extras/01_hyperparameter_tuning.ipynb new file mode 100644 index 00000000..b49bd952 --- /dev/null +++ b/docs/notebooks/extras/01_hyperparameter_tuning.ipynb @@ -0,0 +1,447 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "ur8xi4C7S06n" + }, + "outputs": [], + "source": [ + "# Copyright 2023 Google LLC\n", + "#\n", + "# Licensed under the Apache License, Version 2.0 (the \"License\");\n", + "# you may not use this file except in compliance with the License.\n", + "# You may obtain a copy of the License at\n", + "#\n", + "# https://www.apache.org/licenses/LICENSE-2.0\n", + "#\n", + "# Unless required by applicable law or agreed to in writing, software\n", + "# distributed under the License is distributed on an \"AS IS\" BASIS,\n", + "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n", + "# See the License for the specific language governing permissions and\n", + "# limitations under the License." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# Hyperparameter Tuning with Vertex AI Turbo Templates\n", + "\n", + "This notebook guides you through production-ready pipelines on Google Cloud and how to enable hyperparameter tuning in your training pipeline. \n", + "If you're new to the Vertex AI Turbo Template, start with the [three-part notebook series](../01_infrastructure_setup.ipynb) first. \n", + "\n", + "**Prerequisites:**\n", + "\n", + "- deployed cloud project\n", + "- cloned template\n", + "- `env.sh` configured\n", + "- Python dependencies installed\n", + "\n", + "Let's change into the root of the repository:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "%cd ../../" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Model training\n", + "\n", + "Start by updating the model training code and it's dependencies by:\n", + "\n", + "- updating your Python dependencies\n", + "- extending your training code\n", + "- reporting evaluation metrics for each training trial\n", + "\n", + "**Update Python dependencies:** Add the following line to `model/pyproject.toml` to install [cloudml-hypertune](https://github.com/GoogleCloudPlatform/cloudml-hypertune) as part of your model training code:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "! cd model && poetry add cloudml-hypertune" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Extend training code:** Add hyperparameters which require tuning to the expected command-line arguments in `model/training/train.py`.\n", + "In this example we're adding the `learning_rate` hyperparameter:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "parser.add_argument(\n", + " \"--learning_rate\", type=float, default=0.3, help=\"Learning rate (default: 0.3)\"\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Report evaluation metric:** Don't forget to report the primary evaluation metric `rootMeanSquaredError` once the model is trained and evaluated:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import hypertune\n", + "\n", + "hpt = hypertune.HyperTune()\n", + "hpt.report_hyperparameter_tuning_metric(\n", + " hyperparameter_metric_tag=\"rootMeanSquaredError\",\n", + " metric_value=calculated_metric,\n", + ")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Training pipeline\n", + "\n", + "Since your training code is updated now, let's navigate to the training pipeline and adapt it. \n", + "You'll train multiple models concurrently with different hyperparameters while optimising for your primary evaluation metric. \n", + "This is done by:\n", + "\n", + "- adding new Python imports\n", + "- implementing new pipeline components\n", + "- replacing the training operation with a new set of component operations" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Add Python imports:** Add the `HyperparameterTuningJobRunOp` to your training pipeline in `pipelines/src/pipelines/training.py`:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from google_cloud_pipeline_components.v1.hyperparameter_tuning_job import HyperparameterTuningJobRunOp as hpt_job_op\n", + "from google.cloud.aiplatform import hyperparameter_tuning as hpt\n", + "from google_cloud_pipeline_components.v1 import hyperparameter_tuning_job as hpt_job" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Before adapting the training pipeline, we'll need to implement three new components:\n", + "\n", + "- **get_worker_pool_specs:** Generate a specification for worker pools as a necessary input for performing hyperparameter tuning.\n", + "- **hpt_job_op:** This runs the actual job. This components doesn't need to be implemented as it's already provided by [Google Cloud Pipeline Components](https://cloud.google.com/vertex-ai/docs/pipelines/components-introduction).\n", + "- **get_trials:** Retrieve all trials and their evaluation metrics after a successful hyperparameter tuning job. \n", + "- **get_best_trial:** Get the best trial from all trials. This allows to understand which hyperparameters have led to the best model and also to retrieve the trained model artifact.\n", + "\n", + "Your updated training pipeline will look similar to this Vertex AI pipeline once implemented:\n", + "\n", + "

\n", + " \"image\"\n", + "

\n", + "\n", + "Now add the following components to your training pipeline code:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "@dsl.component(base_image=\"python:3.9\")\n", + "def get_worker_pool_specs(\n", + " input_data: Input[Dataset],\n", + " arguments: dict,\n", + ") -> list:\n", + " \"\"\"\n", + " Generate a specification for worker pools to perform hyperparameter tuning.\n", + "\n", + " Args:\n", + " input_data (Input[Dataset]): Input data for training\n", + " hparams (dict): Hyperparameters\n", + "\n", + " Returns:\n", + " list: A list of worker pool specifications, each specifying the machine type,\n", + " Docker image, and command arguments for a worker pool.\n", + "\n", + " \"\"\"\n", + "\n", + " COMMAND = [\"python\"]\n", + " ARGS = [\n", + " \"-m\", \"training\",\n", + " \"--input_path\", input_data.path,\n", + " ]\n", + " for key, value in arguments.items():\n", + " ARGS.extend([\"--\" + str(key), str(value)])\n", + "\n", + " return [{\n", + " \"machine_spec\": {\n", + " \"machine_type\": \"n1-standard-4\",\n", + " },\n", + " \"replica_count\": 1,\n", + " \"container_spec\": {\n", + " \"image_uri\": TRAINING_IMAGE,\n", + " \"command\": COMMAND,\n", + " \"args\": ARGS,\n", + " },\n", + " }]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "@dsl.component(\n", + " packages_to_install=[\n", + " \"google-cloud-aiplatform\",\n", + " \"google-cloud-pipeline-components\",\n", + " \"protobuf\",\n", + " ],\n", + " base_image=\"python:3.9\",\n", + ")\n", + "def get_trials(gcp_resources: str) -> list:\n", + " \"\"\"Retrieve all trials after a successful hyperparameters tuning job.\n", + "\n", + " Args:\n", + " gcp_resources (str): Proto tracking the hyperparameter tuning job.\n", + "\n", + " Returns:\n", + " List of strings representing the intermediate JSON representation of the\n", + " trials from the hyperparameter tuning job.\n", + " \"\"\"\n", + " from google.cloud import aiplatform\n", + " from google_cloud_pipeline_components.proto.gcp_resources_pb2 import GcpResources\n", + " from google.protobuf.json_format import Parse\n", + " from google.cloud.aiplatform_v1.types import study\n", + "\n", + " api_endpoint_suffix = \"-aiplatform.googleapis.com\"\n", + " gcp_resources_proto = Parse(gcp_resources, GcpResources())\n", + " gcp_resources_split = gcp_resources_proto.resources[0].resource_uri.partition(\n", + " \"projects\"\n", + " )\n", + " resource_name = gcp_resources_split[1] + gcp_resources_split[2]\n", + " prefix_str = gcp_resources_split[0]\n", + " prefix_str = prefix_str[: prefix_str.find(api_endpoint_suffix)]\n", + " api_endpoint = prefix_str[(prefix_str.rfind(\"//\") + 2) :] + api_endpoint_suffix\n", + "\n", + " client_options = {\"api_endpoint\": api_endpoint}\n", + " job_client = aiplatform.gapic.JobServiceClient(client_options=client_options)\n", + " response = job_client.get_hyperparameter_tuning_job(name=resource_name)\n", + "\n", + " return [study.Trial.to_json(trial) for trial in response.trials]" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "@dsl.component(packages_to_install=[\"google-cloud-aiplatform\"], base_image=\"python:3.9\")\n", + "def get_best_trial(trials: list, study_spec_metrics: list) -> str:\n", + " \"\"\"Retrieves the best trial from the trials.\n", + "\n", + " Args:\n", + " trials (list): Required. List representing the intermediate\n", + " JSON representation of the trials from the hyperparameter tuning job.\n", + " study_spec_metrics (list): Required. List serialized from dictionary\n", + " representing the metrics to optimize.\n", + " The dictionary key is the metric_id, which is reported by your training\n", + " job, and the dictionary value is the optimization goal of the metric\n", + " ('minimize' or 'maximize'). example:\n", + " metrics = hyperparameter_tuning_job.serialize_metrics(\n", + " {'loss': 'minimize', 'accuracy': 'maximize'})\n", + "\n", + " Returns:\n", + " String representing the intermediate JSON representation of the best\n", + " trial from the list of trials.\n", + "\n", + " Raises:\n", + " RuntimeError: If there are multiple metrics.\n", + " \"\"\"\n", + " from google.cloud.aiplatform_v1.types import study\n", + "\n", + " if len(study_spec_metrics) > 1:\n", + " raise RuntimeError(\n", + " \"Unable to determine best parameters for multi-objective\"\n", + " \" hyperparameter tuning.\"\n", + " )\n", + " trials_list = [study.Trial.from_json(trial) for trial in trials]\n", + " best_trial = None\n", + " goal = study_spec_metrics[0][\"goal\"]\n", + " best_fn = None\n", + " if goal == study.StudySpec.MetricSpec.GoalType.MAXIMIZE:\n", + " best_fn = max\n", + " elif goal == study.StudySpec.MetricSpec.GoalType.MINIMIZE:\n", + " best_fn = min\n", + " best_trial = best_fn(\n", + " trials_list, key=lambda trial: trial.final_measurement.metrics[0].value\n", + " )\n", + "\n", + " return study.Trial.to_json(best_trial)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Training pipeline:** In the training pipeline define new variables and replace the original training component with the newly implemented components." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "@dsl.pipeline(name=\"...\")\n", + "def pipeline():\n", + "\n", + " # ...\n", + "\n", + " # define static arguments which will be used for all tuning jobs\n", + " # `learning_rate` is excluded as we'll optimise this hyperparameter\n", + " static_arguments = dict(\n", + " n_estimators=200,\n", + " label=label,\n", + " # ...\n", + " )\n", + " # the metric(s) we want to optimise for\n", + " metrics = hpt_job.serialize_metrics({\"rootMeanSquaredError\": \"minimize\"})\n", + " \n", + " # prepare your data ...\n", + " \n", + " data_op = # ...\n", + " \n", + " # now use the new components ...\n", + " \n", + " specs_op = get_worker_pool_specs(\n", + " input_data=data_op.outputs[\"data\"],\n", + " arguments=training_arguments,\n", + " )\n", + " \n", + " tuning_op = hpt_job_op(\n", + " display_name=\"tuning-job\",\n", + " project=project,\n", + " location=location,\n", + " worker_pool_specs=specs_op.output,\n", + " \n", + " # TODO: adjust to your use case\n", + " base_output_directory=\"...\", # e.g. a folder in your pipeline bucket root\n", + " max_trial_count=3, # e.g. run up to 3x trials\n", + " parallel_trial_count=3, # e.g. run all of them in parallel\n", + " study_spec_algorithm=\"ALGORITHM_UNSPECIFIED\",\n", + " study_spec_measurement_selection_type=\"BEST_MEASUREMENT\",\n", + " study_spec_metrics=metrics,\n", + " \n", + " # TODO: specify the hyperparameters to be tuned\n", + " study_spec_parameters=hpt_job.serialize_parameters({\n", + " \"learning_rate\": hpt.DoubleParameterSpec(min=0.001, max=1, scale=\"log\"),\n", + " }),\n", + " )\n", + " \n", + " trials_op = get_trials(gcp_resources=tuning_op.outputs[\"gcp_resources\"])\n", + " \n", + " best_trial_op = get_best_trial(\n", + " trials=trials_op.output, \n", + " study_spec_metrics=metrics,\n", + " )\n", + " \n", + " # TODO: upload the model with the best trial ..." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Run pipeline\n", + "\n", + "Vertex AI Pipelines uses KubeFlow to orchestrate your training steps, as such you'll need to:\n", + "\n", + "1. Compile the pipeline\n", + "2. Re-build the training container (as we've update the dependencies and code)\n", + "3. Run the pipeline in Vertex AI\n", + "\n", + "Don't worry about executing steps 1-3 manually (and each time you run your pipeline!), simply run the following command:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "! make training" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "You've successfully updated your training pipeline to support hyperparameter tuning! πŸŽ‰ \n", + "\n", + "Continue by reading more about [hyperparameter tuning in Vertex AI](https://cloud.google.com/vertex-ai/docs/training/hyperparameter-tuning-overview) or by improving your new training pipeline by uploading the model with the best evaluation result." + ] + } + ], + "metadata": { + "colab": { + "collapsed_sections": [], + "name": "notebook_template.ipynb", + "toc_visible": true + }, + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.9.16" + }, + "vscode": { + "interpreter": { + "hash": "bb5c7b0035bb37e2e2e56e6840dfdd8f7fa070884ae8e041fbcae450545b1006" + } + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/docs/notebooks/scripts/install_poetry.sh b/docs/notebooks/scripts/install_poetry.sh new file mode 100644 index 00000000..ff31ce95 --- /dev/null +++ b/docs/notebooks/scripts/install_poetry.sh @@ -0,0 +1,7 @@ +#!/bin/bash +# install poetry +curl -sSL https://install.python-poetry.org | POETRY_HOME=$HOME/.poetry python3 - +# add poetry to PATH +echo "export PATH=$HOME/.poetry/bin:$PATH" >> $HOME/.bash_profile +source $HOME/.bash_profile +poetry --version diff --git a/docs/notebooks/scripts/install_terraform.sh b/docs/notebooks/scripts/install_terraform.sh new file mode 100644 index 00000000..afe01620 --- /dev/null +++ b/docs/notebooks/scripts/install_terraform.sh @@ -0,0 +1,11 @@ +#!/bin/bash +# create temporary folder +mkdir tmp && cd tmp || exit +# download terraform and unzip +curl -so terraform.zip https://releases.hashicorp.com/terraform/1.5.7/terraform_1.5.7_linux_amd64.zip && unzip terraform.zip > /dev/null +# move binaries to ~/.bin +mkdir -p $HOME/.bin && mv terraform $HOME/.bin > /dev/null +# clean up temporary folder +cd .. && rm -r tmp || exit +echo "export PATH=$HOME/.bin:$PATH" >> $HOME/.bash_profile +terraform -version diff --git a/env.sh.example b/env.sh.example index 59b518f6..1a00f38d 100644 --- a/env.sh.example +++ b/env.sh.example @@ -13,15 +13,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -# pipeline template - update to any pipelines under the pipelines folder -# tensorflow or xgboost -export PIPELINE_TEMPLATE=tensorflow -export VERTEX_CMEK_IDENTIFIER= # optional -export VERTEX_LOCATION=europe-west2 -export VERTEX_NETWORK= # optional -export VERTEX_PROJECT_ID=my-gcp-project +VERTEX_PROJECT_ID=my-gcp-project +VERTEX_LOCATION=europe-west2 +BQ_LOCATION=US +VERTEX_NETWORK= +VERTEX_CMEK_IDENTIFIER= + +# Suffix (e.g. '') to facilitate running concurrent pipelines in the same Google Cloud project. Change if working in a team to avoid overwriting resources during development +RESOURCE_SUFFIX=default # Leave as-is -export VERTEX_SA_EMAIL=vertex-pipelines@${VERTEX_PROJECT_ID}.iam.gserviceaccount.com -export PIPELINE_FILES_GCS_PATH=gs://${VERTEX_PROJECT_ID}-pl-assets -export VERTEX_PIPELINE_ROOT=gs://${VERTEX_PROJECT_ID}-pl-root +VERTEX_SA_EMAIL=vertex-pipelines@${VERTEX_PROJECT_ID}.iam.gserviceaccount.com +VERTEX_PIPELINE_ROOT=gs://${VERTEX_PROJECT_ID}-pl-root +CONTAINER_IMAGE_REGISTRY=${VERTEX_LOCATION}-docker.pkg.dev/${VERTEX_PROJECT_ID}/vertex-images diff --git a/model/.gcloudignore b/model/.gcloudignore new file mode 100644 index 00000000..510bf9ce --- /dev/null +++ b/model/.gcloudignore @@ -0,0 +1,2 @@ +.venv +.DS_Store diff --git a/model/Dockerfile b/model/Dockerfile new file mode 100644 index 00000000..11e889b9 --- /dev/null +++ b/model/Dockerfile @@ -0,0 +1,37 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +FROM python:3.9.16-slim AS builder + +ENV PIP_NO_CACHE_DIR=off \ + PIP_DISABLE_PIP_VERSION_CHECK=on \ + PIP_DEFAULT_TIMEOUT=100 \ + POETRY_VIRTUALENVS_CREATE=false + +ARG POETRY_VERSION=1.5.1 +RUN pip install poetry==${POETRY_VERSION} +COPY pyproject.toml poetry.lock . + +FROM builder AS training + +RUN poetry install --no-cache --no-interaction --without prediction +COPY training training + +CMD python -m training + +FROM builder AS prediction + +RUN poetry install --no-cache --no-interaction --with prediction +COPY prediction prediction + +CMD exec uvicorn prediction.main:app --host "0.0.0.0" --port "$AIP_HTTP_PORT" diff --git a/model/cloudbuild.yaml b/model/cloudbuild.yaml new file mode 100644 index 00000000..43998648 --- /dev/null +++ b/model/cloudbuild.yaml @@ -0,0 +1,20 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +--- +steps: + - name: 'gcr.io/kaniko-project/executor:latest' + args: + - --destination=${_DESTINATION_IMAGE_URI} + - --target=${_DOCKER_TARGET} + - --cache=true diff --git a/model/poetry.lock b/model/poetry.lock new file mode 100644 index 00000000..6bf6a81f --- /dev/null +++ b/model/poetry.lock @@ -0,0 +1,1028 @@ +# This file is automatically @generated by Poetry 1.5.0 and should not be changed by hand. + +[[package]] +name = "annotated-types" +version = "0.6.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + +[[package]] +name = "anyio" +version = "4.1.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.1.0-py3-none-any.whl", hash = "sha256:56a415fbc462291813a94528a779597226619c8e78af7de0507333f700011e5f"}, + {file = "anyio-4.1.0.tar.gz", hash = "sha256:5a0bec7085176715be77df87fc66d6c9d70626bd752fcc85f57cdbee5b3760da"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["Sphinx (>=7)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.17)"] +trio = ["trio (>=0.23)"] + +[[package]] +name = "cachetools" +version = "5.3.2" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, + {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, +] + +[[package]] +name = "certifi" +version = "2023.11.17" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fastapi" +version = "0.100.1" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.7" +files = [ + {file = "fastapi-0.100.1-py3-none-any.whl", hash = "sha256:ec6dd52bfc4eff3063cfcd0713b43c87640fefb2687bbbe3d8a08d94049cdf32"}, + {file = "fastapi-0.100.1.tar.gz", hash = "sha256:522700d7a469e4a973d92321ab93312448fbe20fca9c8da97effc7e7bc56df23"}, +] + +[package.dependencies] +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<3.0.0" +starlette = ">=0.27.0,<0.28.0" +typing-extensions = ">=4.5.0" + +[package.extras] +all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=2.11.2)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.5)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] + +[[package]] +name = "google-api-core" +version = "2.15.0" +description = "Google API client core library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-api-core-2.15.0.tar.gz", hash = "sha256:abc978a72658f14a2df1e5e12532effe40f94f868f6e23d95133bd6abcca35ca"}, + {file = "google_api_core-2.15.0-py3-none-any.whl", hash = "sha256:2aa56d2be495551e66bbff7f729b790546f87d5c90e74781aa77233bcb395a8a"}, +] + +[package.dependencies] +google-auth = ">=2.14.1,<3.0.dev0" +googleapis-common-protos = ">=1.56.2,<2.0.dev0" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +requests = ">=2.18.0,<3.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] + +[[package]] +name = "google-auth" +version = "2.25.2" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-auth-2.25.2.tar.gz", hash = "sha256:42f707937feb4f5e5a39e6c4f343a17300a459aaf03141457ba505812841cc40"}, + {file = "google_auth-2.25.2-py2.py3-none-any.whl", hash = "sha256:473a8dfd0135f75bb79d878436e568f2695dce456764bf3a02b6f8c540b1d256"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] + +[[package]] +name = "google-cloud-core" +version = "2.4.1" +description = "Google Cloud API client core library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"}, + {file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"}, +] + +[package.dependencies] +google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" +google-auth = ">=1.25.0,<3.0dev" + +[package.extras] +grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] + +[[package]] +name = "google-cloud-storage" +version = "2.14.0" +description = "Google Cloud Storage API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-storage-2.14.0.tar.gz", hash = "sha256:2d23fcf59b55e7b45336729c148bb1c464468c69d5efbaee30f7201dd90eb97e"}, + {file = "google_cloud_storage-2.14.0-py2.py3-none-any.whl", hash = "sha256:8641243bbf2a2042c16a6399551fbb13f062cbc9a2de38d6c0bb5426962e9dbd"}, +] + +[package.dependencies] +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" +google-auth = ">=2.23.3,<3.0dev" +google-cloud-core = ">=2.3.0,<3.0dev" +google-crc32c = ">=1.0,<2.0dev" +google-resumable-media = ">=2.6.0" +requests = ">=2.18.0,<3.0.0dev" + +[package.extras] +protobuf = ["protobuf (<5.0.0dev)"] + +[[package]] +name = "google-crc32c" +version = "1.5.0" +description = "A python wrapper of the C library 'Google CRC32C'" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-crc32c-1.5.0.tar.gz", hash = "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7"}, + {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13"}, + {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346"}, + {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65"}, + {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b"}, + {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02"}, + {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4"}, + {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e"}, + {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c"}, + {file = "google_crc32c-1.5.0-cp310-cp310-win32.whl", hash = "sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee"}, + {file = "google_crc32c-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289"}, + {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273"}, + {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298"}, + {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57"}, + {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438"}, + {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906"}, + {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183"}, + {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd"}, + {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c"}, + {file = "google_crc32c-1.5.0-cp311-cp311-win32.whl", hash = "sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709"}, + {file = "google_crc32c-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-win32.whl", hash = "sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740"}, + {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8"}, + {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37"}, + {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894"}, + {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-win32.whl", hash = "sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4"}, + {file = "google_crc32c-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c"}, + {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7"}, + {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210"}, + {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd"}, + {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96"}, + {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61"}, + {file = "google_crc32c-1.5.0-cp39-cp39-win32.whl", hash = "sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c"}, + {file = "google_crc32c-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93"}, +] + +[package.extras] +testing = ["pytest"] + +[[package]] +name = "google-resumable-media" +version = "2.7.0" +description = "Utilities for Google Media Downloads and Resumable Uploads" +optional = false +python-versions = ">= 3.7" +files = [ + {file = "google-resumable-media-2.7.0.tar.gz", hash = "sha256:5f18f5fa9836f4b083162064a1c2c98c17239bfda9ca50ad970ccf905f3e625b"}, + {file = "google_resumable_media-2.7.0-py2.py3-none-any.whl", hash = "sha256:79543cfe433b63fd81c0844b7803aba1bb8950b47bedf7d980c38fa123937e08"}, +] + +[package.dependencies] +google-crc32c = ">=1.0,<2.0dev" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "google-auth (>=1.22.0,<2.0dev)"] +requests = ["requests (>=2.18.0,<3.0.0dev)"] + +[[package]] +name = "googleapis-common-protos" +version = "1.62.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, + {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, +] + +[package.dependencies] +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] + +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "joblib" +version = "1.3.2" +description = "Lightweight pipelining with Python functions" +optional = false +python-versions = ">=3.7" +files = [ + {file = "joblib-1.3.2-py3-none-any.whl", hash = "sha256:ef4331c65f239985f3f2220ecc87db222f08fd22097a3dd5698f693875f8cbb9"}, + {file = "joblib-1.3.2.tar.gz", hash = "sha256:92f865e621e17784e7955080b6d042489e3b8e294949cc44c6eac304f59772b1"}, +] + +[[package]] +name = "numpy" +version = "1.26.2" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3703fc9258a4a122d17043e57b35e5ef1c5a5837c3db8be396c82e04c1cf9b0f"}, + {file = "numpy-1.26.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cc392fdcbd21d4be6ae1bb4475a03ce3b025cd49a9be5345d76d7585aea69440"}, + {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36340109af8da8805d8851ef1d74761b3b88e81a9bd80b290bbfed61bd2b4f75"}, + {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcc008217145b3d77abd3e4d5ef586e3bdfba8fe17940769f8aa09b99e856c00"}, + {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3ced40d4e9e18242f70dd02d739e44698df3dcb010d31f495ff00a31ef6014fe"}, + {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b272d4cecc32c9e19911891446b72e986157e6a1809b7b56518b4f3755267523"}, + {file = "numpy-1.26.2-cp310-cp310-win32.whl", hash = "sha256:22f8fc02fdbc829e7a8c578dd8d2e15a9074b630d4da29cda483337e300e3ee9"}, + {file = "numpy-1.26.2-cp310-cp310-win_amd64.whl", hash = "sha256:26c9d33f8e8b846d5a65dd068c14e04018d05533b348d9eaeef6c1bd787f9919"}, + {file = "numpy-1.26.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b96e7b9c624ef3ae2ae0e04fa9b460f6b9f17ad8b4bec6d7756510f1f6c0c841"}, + {file = "numpy-1.26.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aa18428111fb9a591d7a9cc1b48150097ba6a7e8299fb56bdf574df650e7d1f1"}, + {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06fa1ed84aa60ea6ef9f91ba57b5ed963c3729534e6e54055fc151fad0423f0a"}, + {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96ca5482c3dbdd051bcd1fce8034603d6ebfc125a7bd59f55b40d8f5d246832b"}, + {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:854ab91a2906ef29dc3925a064fcd365c7b4da743f84b123002f6139bcb3f8a7"}, + {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f43740ab089277d403aa07567be138fc2a89d4d9892d113b76153e0e412409f8"}, + {file = "numpy-1.26.2-cp311-cp311-win32.whl", hash = "sha256:a2bbc29fcb1771cd7b7425f98b05307776a6baf43035d3b80c4b0f29e9545186"}, + {file = "numpy-1.26.2-cp311-cp311-win_amd64.whl", hash = "sha256:2b3fca8a5b00184828d12b073af4d0fc5fdd94b1632c2477526f6bd7842d700d"}, + {file = "numpy-1.26.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a4cd6ed4a339c21f1d1b0fdf13426cb3b284555c27ac2f156dfdaaa7e16bfab0"}, + {file = "numpy-1.26.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5d5244aabd6ed7f312268b9247be47343a654ebea52a60f002dc70c769048e75"}, + {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a3cdb4d9c70e6b8c0814239ead47da00934666f668426fc6e94cce869e13fd7"}, + {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa317b2325f7aa0a9471663e6093c210cb2ae9c0ad824732b307d2c51983d5b6"}, + {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:174a8880739c16c925799c018f3f55b8130c1f7c8e75ab0a6fa9d41cab092fd6"}, + {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f79b231bf5c16b1f39c7f4875e1ded36abee1591e98742b05d8a0fb55d8a3eec"}, + {file = "numpy-1.26.2-cp312-cp312-win32.whl", hash = "sha256:4a06263321dfd3598cacb252f51e521a8cb4b6df471bb12a7ee5cbab20ea9167"}, + {file = "numpy-1.26.2-cp312-cp312-win_amd64.whl", hash = "sha256:b04f5dc6b3efdaab541f7857351aac359e6ae3c126e2edb376929bd3b7f92d7e"}, + {file = "numpy-1.26.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4eb8df4bf8d3d90d091e0146f6c28492b0be84da3e409ebef54349f71ed271ef"}, + {file = "numpy-1.26.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a13860fdcd95de7cf58bd6f8bc5a5ef81c0b0625eb2c9a783948847abbef2c2"}, + {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64308ebc366a8ed63fd0bf426b6a9468060962f1a4339ab1074c228fa6ade8e3"}, + {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baf8aab04a2c0e859da118f0b38617e5ee65d75b83795055fb66c0d5e9e9b818"}, + {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d73a3abcac238250091b11caef9ad12413dab01669511779bc9b29261dd50210"}, + {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b361d369fc7e5e1714cf827b731ca32bff8d411212fccd29ad98ad622449cc36"}, + {file = "numpy-1.26.2-cp39-cp39-win32.whl", hash = "sha256:bd3f0091e845164a20bd5a326860c840fe2af79fa12e0469a12768a3ec578d80"}, + {file = "numpy-1.26.2-cp39-cp39-win_amd64.whl", hash = "sha256:2beef57fb031dcc0dc8fa4fe297a742027b954949cabb52a2a376c144e5e6060"}, + {file = "numpy-1.26.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1cc3d5029a30fb5f06704ad6b23b35e11309491c999838c31f124fee32107c79"}, + {file = "numpy-1.26.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94cc3c222bb9fb5a12e334d0479b97bb2df446fbe622b470928f5284ffca3f8d"}, + {file = "numpy-1.26.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe6b44fb8fcdf7eda4ef4461b97b3f63c466b27ab151bec2366db8b197387841"}, + {file = "numpy-1.26.2.tar.gz", hash = "sha256:f65738447676ab5777f11e6bbbdb8ce11b785e105f690bc45966574816b6d3ea"}, +] + +[[package]] +name = "pandas" +version = "2.1.4" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bdec823dc6ec53f7a6339a0e34c68b144a7a1fd28d80c260534c39c62c5bf8c9"}, + {file = "pandas-2.1.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:294d96cfaf28d688f30c918a765ea2ae2e0e71d3536754f4b6de0ea4a496d034"}, + {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b728fb8deba8905b319f96447a27033969f3ea1fea09d07d296c9030ab2ed1d"}, + {file = "pandas-2.1.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00028e6737c594feac3c2df15636d73ace46b8314d236100b57ed7e4b9ebe8d9"}, + {file = "pandas-2.1.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:426dc0f1b187523c4db06f96fb5c8d1a845e259c99bda74f7de97bd8a3bb3139"}, + {file = "pandas-2.1.4-cp310-cp310-win_amd64.whl", hash = "sha256:f237e6ca6421265643608813ce9793610ad09b40154a3344a088159590469e46"}, + {file = "pandas-2.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b7d852d16c270e4331f6f59b3e9aa23f935f5c4b0ed2d0bc77637a8890a5d092"}, + {file = "pandas-2.1.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7d5f2f54f78164b3d7a40f33bf79a74cdee72c31affec86bfcabe7e0789821"}, + {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0aa6e92e639da0d6e2017d9ccff563222f4eb31e4b2c3cf32a2a392fc3103c0d"}, + {file = "pandas-2.1.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d797591b6846b9db79e65dc2d0d48e61f7db8d10b2a9480b4e3faaddc421a171"}, + {file = "pandas-2.1.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d2d3e7b00f703aea3945995ee63375c61b2e6aa5aa7871c5d622870e5e137623"}, + {file = "pandas-2.1.4-cp311-cp311-win_amd64.whl", hash = "sha256:dc9bf7ade01143cddc0074aa6995edd05323974e6e40d9dbde081021ded8510e"}, + {file = "pandas-2.1.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:482d5076e1791777e1571f2e2d789e940dedd927325cc3cb6d0800c6304082f6"}, + {file = "pandas-2.1.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8a706cfe7955c4ca59af8c7a0517370eafbd98593155b48f10f9811da440248b"}, + {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0513a132a15977b4a5b89aabd304647919bc2169eac4c8536afb29c07c23540"}, + {file = "pandas-2.1.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9f17f2b6fc076b2a0078862547595d66244db0f41bf79fc5f64a5c4d635bead"}, + {file = "pandas-2.1.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:45d63d2a9b1b37fa6c84a68ba2422dc9ed018bdaa668c7f47566a01188ceeec1"}, + {file = "pandas-2.1.4-cp312-cp312-win_amd64.whl", hash = "sha256:f69b0c9bb174a2342818d3e2778584e18c740d56857fc5cdb944ec8bbe4082cf"}, + {file = "pandas-2.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3f06bda01a143020bad20f7a85dd5f4a1600112145f126bc9e3e42077c24ef34"}, + {file = "pandas-2.1.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab5796839eb1fd62a39eec2916d3e979ec3130509930fea17fe6f81e18108f6a"}, + {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edbaf9e8d3a63a9276d707b4d25930a262341bca9874fcb22eff5e3da5394732"}, + {file = "pandas-2.1.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ebfd771110b50055712b3b711b51bee5d50135429364d0498e1213a7adc2be8"}, + {file = "pandas-2.1.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8ea107e0be2aba1da619cc6ba3f999b2bfc9669a83554b1904ce3dd9507f0860"}, + {file = "pandas-2.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:d65148b14788b3758daf57bf42725caa536575da2b64df9964c563b015230984"}, + {file = "pandas-2.1.4.tar.gz", hash = "sha256:fcb68203c833cc735321512e13861358079a96c174a61f5116a1de89c58c0ef7"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.22.4,<2", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2,<2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0,<2", markers = "python_version >= \"3.12\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.1" + +[package.extras] +all = ["PyQt5 (>=5.15.6)", "SQLAlchemy (>=1.4.36)", "beautifulsoup4 (>=4.11.1)", "bottleneck (>=1.3.4)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=0.8.1)", "fsspec (>=2022.05.0)", "gcsfs (>=2022.05.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.8.0)", "matplotlib (>=3.6.1)", "numba (>=0.55.2)", "numexpr (>=2.8.0)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pandas-gbq (>=0.17.5)", "psycopg2 (>=2.9.3)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.5)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "pyxlsb (>=1.0.9)", "qtpy (>=2.2.0)", "s3fs (>=2022.05.0)", "scipy (>=1.8.1)", "tables (>=3.7.0)", "tabulate (>=0.8.10)", "xarray (>=2022.03.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)", "zstandard (>=0.17.0)"] +aws = ["s3fs (>=2022.05.0)"] +clipboard = ["PyQt5 (>=5.15.6)", "qtpy (>=2.2.0)"] +compression = ["zstandard (>=0.17.0)"] +computation = ["scipy (>=1.8.1)", "xarray (>=2022.03.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pyxlsb (>=1.0.9)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)"] +feather = ["pyarrow (>=7.0.0)"] +fss = ["fsspec (>=2022.05.0)"] +gcp = ["gcsfs (>=2022.05.0)", "pandas-gbq (>=0.17.5)"] +hdf5 = ["tables (>=3.7.0)"] +html = ["beautifulsoup4 (>=4.11.1)", "html5lib (>=1.1)", "lxml (>=4.8.0)"] +mysql = ["SQLAlchemy (>=1.4.36)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.8.10)"] +parquet = ["pyarrow (>=7.0.0)"] +performance = ["bottleneck (>=1.3.4)", "numba (>=0.55.2)", "numexpr (>=2.8.0)"] +plot = ["matplotlib (>=3.6.1)"] +postgresql = ["SQLAlchemy (>=1.4.36)", "psycopg2 (>=2.9.3)"] +spss = ["pyreadstat (>=1.1.5)"] +sql-other = ["SQLAlchemy (>=1.4.36)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.8.0)"] + +[[package]] +name = "protobuf" +version = "4.25.1" +description = "" +optional = false +python-versions = ">=3.8" +files = [ + {file = "protobuf-4.25.1-cp310-abi3-win32.whl", hash = "sha256:193f50a6ab78a970c9b4f148e7c750cfde64f59815e86f686c22e26b4fe01ce7"}, + {file = "protobuf-4.25.1-cp310-abi3-win_amd64.whl", hash = "sha256:3497c1af9f2526962f09329fd61a36566305e6c72da2590ae0d7d1322818843b"}, + {file = "protobuf-4.25.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:0bf384e75b92c42830c0a679b0cd4d6e2b36ae0cf3dbb1e1dfdda48a244f4bcd"}, + {file = "protobuf-4.25.1-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:0f881b589ff449bf0b931a711926e9ddaad3b35089cc039ce1af50b21a4ae8cb"}, + {file = "protobuf-4.25.1-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:ca37bf6a6d0046272c152eea90d2e4ef34593aaa32e8873fc14c16440f22d4b7"}, + {file = "protobuf-4.25.1-cp38-cp38-win32.whl", hash = "sha256:abc0525ae2689a8000837729eef7883b9391cd6aa7950249dcf5a4ede230d5dd"}, + {file = "protobuf-4.25.1-cp38-cp38-win_amd64.whl", hash = "sha256:1484f9e692091450e7edf418c939e15bfc8fc68856e36ce399aed6889dae8bb0"}, + {file = "protobuf-4.25.1-cp39-cp39-win32.whl", hash = "sha256:8bdbeaddaac52d15c6dce38c71b03038ef7772b977847eb6d374fc86636fa510"}, + {file = "protobuf-4.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:becc576b7e6b553d22cbdf418686ee4daa443d7217999125c045ad56322dda10"}, + {file = "protobuf-4.25.1-py3-none-any.whl", hash = "sha256:a19731d5e83ae4737bb2a089605e636077ac001d18781b3cf489b9546c7c80d6"}, + {file = "protobuf-4.25.1.tar.gz", hash = "sha256:57d65074b4f5baa4ab5da1605c02be90ac20c8b40fb137d6a8df9f416b0d0ce2"}, +] + +[[package]] +name = "pyasn1" +version = "0.5.1" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, + {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.3.0" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, + {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, +] + +[package.dependencies] +pyasn1 = ">=0.4.6,<0.6.0" + +[[package]] +name = "pydantic" +version = "2.5.2" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-2.5.2-py3-none-any.whl", hash = "sha256:80c50fb8e3dcecfddae1adbcc00ec5822918490c99ab31f6cf6140ca1c1429f0"}, + {file = "pydantic-2.5.2.tar.gz", hash = "sha256:ff177ba64c6faf73d7afa2e8cad38fd456c0dbe01c9954e71038001cd15a6edd"}, +] + +[package.dependencies] +annotated-types = ">=0.4.0" +pydantic-core = "2.14.5" +typing-extensions = ">=4.6.1" + +[package.extras] +email = ["email-validator (>=2.0.0)"] + +[[package]] +name = "pydantic-core" +version = "2.14.5" +description = "" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic_core-2.14.5-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:7e88f5696153dc516ba6e79f82cc4747e87027205f0e02390c21f7cb3bd8abfd"}, + {file = "pydantic_core-2.14.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4641e8ad4efb697f38a9b64ca0523b557c7931c5f84e0fd377a9a3b05121f0de"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:774de879d212db5ce02dfbf5b0da9a0ea386aeba12b0b95674a4ce0593df3d07"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ebb4e035e28f49b6f1a7032920bb9a0c064aedbbabe52c543343d39341a5b2a3"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b53e9ad053cd064f7e473a5f29b37fc4cc9dc6d35f341e6afc0155ea257fc911"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8aa1768c151cf562a9992462239dfc356b3d1037cc5a3ac829bb7f3bda7cc1f9"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eac5c82fc632c599f4639a5886f96867ffced74458c7db61bc9a66ccb8ee3113"}, + {file = "pydantic_core-2.14.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2ae91f50ccc5810b2f1b6b858257c9ad2e08da70bf890dee02de1775a387c66"}, + {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:6b9ff467ffbab9110e80e8c8de3bcfce8e8b0fd5661ac44a09ae5901668ba997"}, + {file = "pydantic_core-2.14.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:61ea96a78378e3bd5a0be99b0e5ed00057b71f66115f5404d0dae4819f495093"}, + {file = "pydantic_core-2.14.5-cp310-none-win32.whl", hash = "sha256:bb4c2eda937a5e74c38a41b33d8c77220380a388d689bcdb9b187cf6224c9720"}, + {file = "pydantic_core-2.14.5-cp310-none-win_amd64.whl", hash = "sha256:b7851992faf25eac90bfcb7bfd19e1f5ffa00afd57daec8a0042e63c74a4551b"}, + {file = "pydantic_core-2.14.5-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:4e40f2bd0d57dac3feb3a3aed50f17d83436c9e6b09b16af271b6230a2915459"}, + {file = "pydantic_core-2.14.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ab1cdb0f14dc161ebc268c09db04d2c9e6f70027f3b42446fa11c153521c0e88"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aae7ea3a1c5bb40c93cad361b3e869b180ac174656120c42b9fadebf685d121b"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:60b7607753ba62cf0739177913b858140f11b8af72f22860c28eabb2f0a61937"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2248485b0322c75aee7565d95ad0e16f1c67403a470d02f94da7344184be770f"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:823fcc638f67035137a5cd3f1584a4542d35a951c3cc68c6ead1df7dac825c26"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96581cfefa9123accc465a5fd0cc833ac4d75d55cc30b633b402e00e7ced00a6"}, + {file = "pydantic_core-2.14.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a33324437018bf6ba1bb0f921788788641439e0ed654b233285b9c69704c27b4"}, + {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9bd18fee0923ca10f9a3ff67d4851c9d3e22b7bc63d1eddc12f439f436f2aada"}, + {file = "pydantic_core-2.14.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:853a2295c00f1d4429db4c0fb9475958543ee80cfd310814b5c0ef502de24dda"}, + {file = "pydantic_core-2.14.5-cp311-none-win32.whl", hash = "sha256:cb774298da62aea5c80a89bd58c40205ab4c2abf4834453b5de207d59d2e1651"}, + {file = "pydantic_core-2.14.5-cp311-none-win_amd64.whl", hash = "sha256:e87fc540c6cac7f29ede02e0f989d4233f88ad439c5cdee56f693cc9c1c78077"}, + {file = "pydantic_core-2.14.5-cp311-none-win_arm64.whl", hash = "sha256:57d52fa717ff445cb0a5ab5237db502e6be50809b43a596fb569630c665abddf"}, + {file = "pydantic_core-2.14.5-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:e60f112ac88db9261ad3a52032ea46388378034f3279c643499edb982536a093"}, + {file = "pydantic_core-2.14.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6e227c40c02fd873c2a73a98c1280c10315cbebe26734c196ef4514776120aeb"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0cbc7fff06a90bbd875cc201f94ef0ee3929dfbd5c55a06674b60857b8b85ed"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:103ef8d5b58596a731b690112819501ba1db7a36f4ee99f7892c40da02c3e189"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c949f04ecad823f81b1ba94e7d189d9dfb81edbb94ed3f8acfce41e682e48cef"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1452a1acdf914d194159439eb21e56b89aa903f2e1c65c60b9d874f9b950e5d"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb4679d4c2b089e5ef89756bc73e1926745e995d76e11925e3e96a76d5fa51fc"}, + {file = "pydantic_core-2.14.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf9d3fe53b1ee360e2421be95e62ca9b3296bf3f2fb2d3b83ca49ad3f925835e"}, + {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:70f4b4851dbb500129681d04cc955be2a90b2248d69273a787dda120d5cf1f69"}, + {file = "pydantic_core-2.14.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:59986de5710ad9613ff61dd9b02bdd2f615f1a7052304b79cc8fa2eb4e336d2d"}, + {file = "pydantic_core-2.14.5-cp312-none-win32.whl", hash = "sha256:699156034181e2ce106c89ddb4b6504c30db8caa86e0c30de47b3e0654543260"}, + {file = "pydantic_core-2.14.5-cp312-none-win_amd64.whl", hash = "sha256:5baab5455c7a538ac7e8bf1feec4278a66436197592a9bed538160a2e7d11e36"}, + {file = "pydantic_core-2.14.5-cp312-none-win_arm64.whl", hash = "sha256:e47e9a08bcc04d20975b6434cc50bf82665fbc751bcce739d04a3120428f3e27"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_10_7_x86_64.whl", hash = "sha256:af36f36538418f3806048f3b242a1777e2540ff9efaa667c27da63d2749dbce0"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-macosx_11_0_arm64.whl", hash = "sha256:45e95333b8418ded64745f14574aa9bfc212cb4fbeed7a687b0c6e53b5e188cd"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e47a76848f92529879ecfc417ff88a2806438f57be4a6a8bf2961e8f9ca9ec7"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d81e6987b27bc7d101c8597e1cd2bcaa2fee5e8e0f356735c7ed34368c471550"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:34708cc82c330e303f4ce87758828ef6e457681b58ce0e921b6e97937dd1e2a3"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:652c1988019752138b974c28f43751528116bcceadad85f33a258869e641d753"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e4d090e73e0725b2904fdbdd8d73b8802ddd691ef9254577b708d413bf3006e"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5c7d5b5005f177764e96bd584d7bf28d6e26e96f2a541fdddb934c486e36fd59"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a71891847f0a73b1b9eb86d089baee301477abef45f7eaf303495cd1473613e4"}, + {file = "pydantic_core-2.14.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a717aef6971208f0851a2420b075338e33083111d92041157bbe0e2713b37325"}, + {file = "pydantic_core-2.14.5-cp37-none-win32.whl", hash = "sha256:de790a3b5aa2124b8b78ae5faa033937a72da8efe74b9231698b5a1dd9be3405"}, + {file = "pydantic_core-2.14.5-cp37-none-win_amd64.whl", hash = "sha256:6c327e9cd849b564b234da821236e6bcbe4f359a42ee05050dc79d8ed2a91588"}, + {file = "pydantic_core-2.14.5-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:ef98ca7d5995a82f43ec0ab39c4caf6a9b994cb0b53648ff61716370eadc43cf"}, + {file = "pydantic_core-2.14.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c6eae413494a1c3f89055da7a5515f32e05ebc1a234c27674a6956755fb2236f"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcf4e6d85614f7a4956c2de5a56531f44efb973d2fe4a444d7251df5d5c4dcfd"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6637560562134b0e17de333d18e69e312e0458ee4455bdad12c37100b7cad706"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:77fa384d8e118b3077cccfcaf91bf83c31fe4dc850b5e6ee3dc14dc3d61bdba1"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16e29bad40bcf97aac682a58861249ca9dcc57c3f6be22f506501833ddb8939c"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:531f4b4252fac6ca476fbe0e6f60f16f5b65d3e6b583bc4d87645e4e5ddde331"}, + {file = "pydantic_core-2.14.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:074f3d86f081ce61414d2dc44901f4f83617329c6f3ab49d2bc6c96948b2c26b"}, + {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c2adbe22ab4babbca99c75c5d07aaf74f43c3195384ec07ccbd2f9e3bddaecec"}, + {file = "pydantic_core-2.14.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0f6116a558fd06d1b7c2902d1c4cf64a5bd49d67c3540e61eccca93f41418124"}, + {file = "pydantic_core-2.14.5-cp38-none-win32.whl", hash = "sha256:fe0a5a1025eb797752136ac8b4fa21aa891e3d74fd340f864ff982d649691867"}, + {file = "pydantic_core-2.14.5-cp38-none-win_amd64.whl", hash = "sha256:079206491c435b60778cf2b0ee5fd645e61ffd6e70c47806c9ed51fc75af078d"}, + {file = "pydantic_core-2.14.5-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:a6a16f4a527aae4f49c875da3cdc9508ac7eef26e7977952608610104244e1b7"}, + {file = "pydantic_core-2.14.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:abf058be9517dc877227ec3223f0300034bd0e9f53aebd63cf4456c8cb1e0863"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:49b08aae5013640a3bfa25a8eebbd95638ec3f4b2eaf6ed82cf0c7047133f03b"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c2d97e906b4ff36eb464d52a3bc7d720bd6261f64bc4bcdbcd2c557c02081ed2"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3128e0bbc8c091ec4375a1828d6118bc20404883169ac95ffa8d983b293611e6"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88e74ab0cdd84ad0614e2750f903bb0d610cc8af2cc17f72c28163acfcf372a4"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c339dabd8ee15f8259ee0f202679b6324926e5bc9e9a40bf981ce77c038553db"}, + {file = "pydantic_core-2.14.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3387277f1bf659caf1724e1afe8ee7dbc9952a82d90f858ebb931880216ea955"}, + {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ba6b6b3846cfc10fdb4c971980a954e49d447cd215ed5a77ec8190bc93dd7bc5"}, + {file = "pydantic_core-2.14.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ca61d858e4107ce5e1330a74724fe757fc7135190eb5ce5c9d0191729f033209"}, + {file = "pydantic_core-2.14.5-cp39-none-win32.whl", hash = "sha256:ec1e72d6412f7126eb7b2e3bfca42b15e6e389e1bc88ea0069d0cc1742f477c6"}, + {file = "pydantic_core-2.14.5-cp39-none-win_amd64.whl", hash = "sha256:c0b97ec434041827935044bbbe52b03d6018c2897349670ff8fe11ed24d1d4ab"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:79e0a2cdbdc7af3f4aee3210b1172ab53d7ddb6a2d8c24119b5706e622b346d0"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:678265f7b14e138d9a541ddabbe033012a2953315739f8cfa6d754cc8063e8ca"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b15e855ae44f0c6341ceb74df61b606e11f1087e87dcb7482377374aac6abe"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09b0e985fbaf13e6b06a56d21694d12ebca6ce5414b9211edf6f17738d82b0f8"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3ad873900297bb36e4b6b3f7029d88ff9829ecdc15d5cf20161775ce12306f8a"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:2d0ae0d8670164e10accbeb31d5ad45adb71292032d0fdb9079912907f0085f4"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:d37f8ec982ead9ba0a22a996129594938138a1503237b87318392a48882d50b7"}, + {file = "pydantic_core-2.14.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:35613015f0ba7e14c29ac6c2483a657ec740e5ac5758d993fdd5870b07a61d8b"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ab4ea451082e684198636565224bbb179575efc1658c48281b2c866bfd4ddf04"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ce601907e99ea5b4adb807ded3570ea62186b17f88e271569144e8cca4409c7"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb2ed8b3fe4bf4506d6dab3b93b83bbc22237e230cba03866d561c3577517d18"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:70f947628e074bb2526ba1b151cee10e4c3b9670af4dbb4d73bc8a89445916b5"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:4bc536201426451f06f044dfbf341c09f540b4ebdb9fd8d2c6164d733de5e634"}, + {file = "pydantic_core-2.14.5-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f4791cf0f8c3104ac668797d8c514afb3431bc3305f5638add0ba1a5a37e0d88"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:038c9f763e650712b899f983076ce783175397c848da04985658e7628cbe873b"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:27548e16c79702f1e03f5628589c6057c9ae17c95b4c449de3c66b589ead0520"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97bee68898f3f4344eb02fec316db93d9700fb1e6a5b760ffa20d71d9a46ce3"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9b759b77f5337b4ea024f03abc6464c9f35d9718de01cfe6bae9f2e139c397e"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:439c9afe34638ace43a49bf72d201e0ffc1a800295bed8420c2a9ca8d5e3dbb3"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:ba39688799094c75ea8a16a6b544eb57b5b0f3328697084f3f2790892510d144"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ccd4d5702bb90b84df13bd491be8d900b92016c5a455b7e14630ad7449eb03f8"}, + {file = "pydantic_core-2.14.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:81982d78a45d1e5396819bbb4ece1fadfe5f079335dd28c4ab3427cd95389944"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:7f8210297b04e53bc3da35db08b7302a6a1f4889c79173af69b72ec9754796b8"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8c8a8812fe6f43a3a5b054af6ac2d7b8605c7bcab2804a8a7d68b53f3cd86e00"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:206ed23aecd67c71daf5c02c3cd19c0501b01ef3cbf7782db9e4e051426b3d0d"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2027d05c8aebe61d898d4cffd774840a9cb82ed356ba47a90d99ad768f39789"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:40180930807ce806aa71eda5a5a5447abb6b6a3c0b4b3b1b1962651906484d68"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:615a0a4bff11c45eb3c1996ceed5bdaa2f7b432425253a7c2eed33bb86d80abc"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5e412d717366e0677ef767eac93566582518fe8be923361a5c204c1a62eaafe"}, + {file = "pydantic_core-2.14.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:513b07e99c0a267b1d954243845d8a833758a6726a3b5d8948306e3fe14675e3"}, + {file = "pydantic_core-2.14.5.tar.gz", hash = "sha256:6d30226dfc816dd0fdf120cae611dd2215117e4f9b124af8c60ab9093b6e8e71"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pytz" +version = "2023.3.post1" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +files = [ + {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, + {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "scikit-learn" +version = "1.3.2" +description = "A set of python modules for machine learning and data mining" +optional = false +python-versions = ">=3.8" +files = [ + {file = "scikit-learn-1.3.2.tar.gz", hash = "sha256:a2f54c76accc15a34bfb9066e6c7a56c1e7235dda5762b990792330b52ccfb05"}, + {file = "scikit_learn-1.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e326c0eb5cf4d6ba40f93776a20e9a7a69524c4db0757e7ce24ba222471ee8a1"}, + {file = "scikit_learn-1.3.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:535805c2a01ccb40ca4ab7d081d771aea67e535153e35a1fd99418fcedd1648a"}, + {file = "scikit_learn-1.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1215e5e58e9880b554b01187b8c9390bf4dc4692eedeaf542d3273f4785e342c"}, + {file = "scikit_learn-1.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ee107923a623b9f517754ea2f69ea3b62fc898a3641766cb7deb2f2ce450161"}, + {file = "scikit_learn-1.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:35a22e8015048c628ad099da9df5ab3004cdbf81edc75b396fd0cff8699ac58c"}, + {file = "scikit_learn-1.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6fb6bc98f234fda43163ddbe36df8bcde1d13ee176c6dc9b92bb7d3fc842eb66"}, + {file = "scikit_learn-1.3.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:18424efee518a1cde7b0b53a422cde2f6625197de6af36da0b57ec502f126157"}, + {file = "scikit_learn-1.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3271552a5eb16f208a6f7f617b8cc6d1f137b52c8a1ef8edf547db0259b2c9fb"}, + {file = "scikit_learn-1.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc4144a5004a676d5022b798d9e573b05139e77f271253a4703eed295bde0433"}, + {file = "scikit_learn-1.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:67f37d708f042a9b8d59551cf94d30431e01374e00dc2645fa186059c6c5d78b"}, + {file = "scikit_learn-1.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8db94cd8a2e038b37a80a04df8783e09caac77cbe052146432e67800e430c028"}, + {file = "scikit_learn-1.3.2-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:61a6efd384258789aa89415a410dcdb39a50e19d3d8410bd29be365bcdd512d5"}, + {file = "scikit_learn-1.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb06f8dce3f5ddc5dee1715a9b9f19f20d295bed8e3cd4fa51e1d050347de525"}, + {file = "scikit_learn-1.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b2de18d86f630d68fe1f87af690d451388bb186480afc719e5f770590c2ef6c"}, + {file = "scikit_learn-1.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:0402638c9a7c219ee52c94cbebc8fcb5eb9fe9c773717965c1f4185588ad3107"}, + {file = "scikit_learn-1.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a19f90f95ba93c1a7f7924906d0576a84da7f3b2282ac3bfb7a08a32801add93"}, + {file = "scikit_learn-1.3.2-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:b8692e395a03a60cd927125eef3a8e3424d86dde9b2370d544f0ea35f78a8073"}, + {file = "scikit_learn-1.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15e1e94cc23d04d39da797ee34236ce2375ddea158b10bee3c343647d615581d"}, + {file = "scikit_learn-1.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:785a2213086b7b1abf037aeadbbd6d67159feb3e30263434139c98425e3dcfcf"}, + {file = "scikit_learn-1.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:64381066f8aa63c2710e6b56edc9f0894cc7bf59bd71b8ce5613a4559b6145e0"}, + {file = "scikit_learn-1.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6c43290337f7a4b969d207e620658372ba3c1ffb611f8bc2b6f031dc5c6d1d03"}, + {file = "scikit_learn-1.3.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:dc9002fc200bed597d5d34e90c752b74df516d592db162f756cc52836b38fe0e"}, + {file = "scikit_learn-1.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d08ada33e955c54355d909b9c06a4789a729977f165b8bae6f225ff0a60ec4a"}, + {file = "scikit_learn-1.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:763f0ae4b79b0ff9cca0bf3716bcc9915bdacff3cebea15ec79652d1cc4fa5c9"}, + {file = "scikit_learn-1.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:ed932ea780517b00dae7431e031faae6b49b20eb6950918eb83bd043237950e0"}, +] + +[package.dependencies] +joblib = ">=1.1.1" +numpy = ">=1.17.3,<2.0" +scipy = ">=1.5.0" +threadpoolctl = ">=2.0.0" + +[package.extras] +benchmark = ["matplotlib (>=3.1.3)", "memory-profiler (>=0.57.0)", "pandas (>=1.0.5)"] +docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.1.3)", "memory-profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.16.2)", "seaborn (>=0.9.0)", "sphinx (>=6.0.0)", "sphinx-copybutton (>=0.5.2)", "sphinx-gallery (>=0.10.1)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"] +examples = ["matplotlib (>=3.1.3)", "pandas (>=1.0.5)", "plotly (>=5.14.0)", "pooch (>=1.6.0)", "scikit-image (>=0.16.2)", "seaborn (>=0.9.0)"] +tests = ["black (>=23.3.0)", "matplotlib (>=3.1.3)", "mypy (>=1.3)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "pooch (>=1.6.0)", "pyamg (>=4.0.0)", "pytest (>=7.1.2)", "pytest-cov (>=2.9.0)", "ruff (>=0.0.272)", "scikit-image (>=0.16.2)"] + +[[package]] +name = "scipy" +version = "1.11.4" +description = "Fundamental algorithms for scientific computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "scipy-1.11.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bc9a714581f561af0848e6b69947fda0614915f072dfd14142ed1bfe1b806710"}, + {file = "scipy-1.11.4-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:cf00bd2b1b0211888d4dc75656c0412213a8b25e80d73898083f402b50f47e41"}, + {file = "scipy-1.11.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9999c008ccf00e8fbcce1236f85ade5c569d13144f77a1946bef8863e8f6eb4"}, + {file = "scipy-1.11.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:933baf588daa8dc9a92c20a0be32f56d43faf3d1a60ab11b3f08c356430f6e56"}, + {file = "scipy-1.11.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8fce70f39076a5aa62e92e69a7f62349f9574d8405c0a5de6ed3ef72de07f446"}, + {file = "scipy-1.11.4-cp310-cp310-win_amd64.whl", hash = "sha256:6550466fbeec7453d7465e74d4f4b19f905642c89a7525571ee91dd7adabb5a3"}, + {file = "scipy-1.11.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f313b39a7e94f296025e3cffc2c567618174c0b1dde173960cf23808f9fae4be"}, + {file = "scipy-1.11.4-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:1b7c3dca977f30a739e0409fb001056484661cb2541a01aba0bb0029f7b68db8"}, + {file = "scipy-1.11.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00150c5eae7b610c32589dda259eacc7c4f1665aedf25d921907f4d08a951b1c"}, + {file = "scipy-1.11.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:530f9ad26440e85766509dbf78edcfe13ffd0ab7fec2560ee5c36ff74d6269ff"}, + {file = "scipy-1.11.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5e347b14fe01003d3b78e196e84bd3f48ffe4c8a7b8a1afbcb8f5505cb710993"}, + {file = "scipy-1.11.4-cp311-cp311-win_amd64.whl", hash = "sha256:acf8ed278cc03f5aff035e69cb511741e0418681d25fbbb86ca65429c4f4d9cd"}, + {file = "scipy-1.11.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:028eccd22e654b3ea01ee63705681ee79933652b2d8f873e7949898dda6d11b6"}, + {file = "scipy-1.11.4-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:2c6ff6ef9cc27f9b3db93a6f8b38f97387e6e0591600369a297a50a8e96e835d"}, + {file = "scipy-1.11.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b030c6674b9230d37c5c60ab456e2cf12f6784596d15ce8da9365e70896effc4"}, + {file = "scipy-1.11.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad669df80528aeca5f557712102538f4f37e503f0c5b9541655016dd0932ca79"}, + {file = "scipy-1.11.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ce7fff2e23ab2cc81ff452a9444c215c28e6305f396b2ba88343a567feec9660"}, + {file = "scipy-1.11.4-cp312-cp312-win_amd64.whl", hash = "sha256:36750b7733d960d7994888f0d148d31ea3017ac15eef664194b4ef68d36a4a97"}, + {file = "scipy-1.11.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6e619aba2df228a9b34718efb023966da781e89dd3d21637b27f2e54db0410d7"}, + {file = "scipy-1.11.4-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:f3cd9e7b3c2c1ec26364856f9fbe78695fe631150f94cd1c22228456404cf1ec"}, + {file = "scipy-1.11.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d10e45a6c50211fe256da61a11c34927c68f277e03138777bdebedd933712fea"}, + {file = "scipy-1.11.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91af76a68eeae0064887a48e25c4e616fa519fa0d38602eda7e0f97d65d57937"}, + {file = "scipy-1.11.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6df1468153a31cf55ed5ed39647279beb9cfb5d3f84369453b49e4b8502394fd"}, + {file = "scipy-1.11.4-cp39-cp39-win_amd64.whl", hash = "sha256:ee410e6de8f88fd5cf6eadd73c135020bfbbbdfcd0f6162c36a7638a1ea8cc65"}, + {file = "scipy-1.11.4.tar.gz", hash = "sha256:90a2b78e7f5733b9de748f589f09225013685f9b218275257f8a8168ededaeaa"}, +] + +[package.dependencies] +numpy = ">=1.21.6,<1.28.0" + +[package.extras] +dev = ["click", "cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pydevtool", "rich-click", "ruff", "types-psutil", "typing_extensions"] +doc = ["jupytext", "matplotlib (>2)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"] +test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + +[[package]] +name = "starlette" +version = "0.27.0" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.7" +files = [ + {file = "starlette-0.27.0-py3-none-any.whl", hash = "sha256:918416370e846586541235ccd38a474c08b80443ed31c578a418e2209b3eef91"}, + {file = "starlette-0.27.0.tar.gz", hash = "sha256:6a6b0d042acb8d469a01eba54e9cda6cbd24ac602c4cd016723117d6a7e73b75"}, +] + +[package.dependencies] +anyio = ">=3.4.0,<5" +typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} + +[package.extras] +full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart", "pyyaml"] + +[[package]] +name = "threadpoolctl" +version = "3.2.0" +description = "threadpoolctl" +optional = false +python-versions = ">=3.8" +files = [ + {file = "threadpoolctl-3.2.0-py3-none-any.whl", hash = "sha256:2b7818516e423bdaebb97c723f86a7c6b0a83d3f3b0970328d66f4d9104dc032"}, + {file = "threadpoolctl-3.2.0.tar.gz", hash = "sha256:c96a0ba3bdddeaca37dc4cc7344aafad41cdb8c313f74fdfe387a867bba93355"}, +] + +[[package]] +name = "typing-extensions" +version = "4.9.0" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +files = [ + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, +] + +[[package]] +name = "tzdata" +version = "2023.3" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, + {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, +] + +[[package]] +name = "urllib3" +version = "2.1.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.8" +files = [ + {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, + {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "uvicorn" +version = "0.23.2" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.8" +files = [ + {file = "uvicorn-0.23.2-py3-none-any.whl", hash = "sha256:1f9be6558f01239d4fdf22ef8126c39cb1ad0addf76c40e760549d2c2f43ab53"}, + {file = "uvicorn-0.23.2.tar.gz", hash = "sha256:4d3cc12d7727ba72b64d12d3cc7743124074c0a69f7b201512fc50c3e3f1569a"}, +] + +[package.dependencies] +click = ">=7.0" +h11 = ">=0.8" +typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} + +[package.extras] +standard = ["colorama (>=0.4)", "httptools (>=0.5.0)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1)", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "xgboost" +version = "1.7.6" +description = "XGBoost Python Package" +optional = false +python-versions = ">=3.8" +files = [ + {file = "xgboost-1.7.6-py3-none-macosx_10_15_x86_64.macosx_11_0_x86_64.macosx_12_0_x86_64.whl", hash = "sha256:4c34675b4d2678c624ddde5d45361e7e16046923e362e4e609b88353e6b87124"}, + {file = "xgboost-1.7.6-py3-none-macosx_12_0_arm64.whl", hash = "sha256:59b4b366d2cafc7f645e87d897983a5b59be02876194b1d213bd8d8b811d8ce8"}, + {file = "xgboost-1.7.6-py3-none-manylinux2014_aarch64.whl", hash = "sha256:281c3c6f4fbed2d36bf95cd02a641afa95e72e9abde70064056da5e76233e8df"}, + {file = "xgboost-1.7.6-py3-none-manylinux2014_x86_64.whl", hash = "sha256:b1d5db49b199152d62bd9217c98760207d3de86d2b9d243260c573ffe638f80a"}, + {file = "xgboost-1.7.6-py3-none-win_amd64.whl", hash = "sha256:127cf1f5e2ec25cd41429394c6719b87af1456ce583e89f0bffd35d02ad18bcb"}, + {file = "xgboost-1.7.6.tar.gz", hash = "sha256:1c527554a400445e0c38186039ba1a00425dcdb4e40b37eed0e74cb39a159c47"}, +] + +[package.dependencies] +numpy = "*" +scipy = "*" + +[package.extras] +dask = ["dask", "distributed", "pandas"] +datatable = ["datatable"] +pandas = ["pandas"] +plotting = ["graphviz", "matplotlib"] +pyspark = ["cloudpickle", "pyspark", "scikit-learn"] +scikit-learn = ["scikit-learn"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.9" +content-hash = "1906c721cd1ef436f516e02f8a63b97fd918852b1c9e194fcf2f90c16cce4ad9" diff --git a/model/prediction/main.py b/model/prediction/main.py new file mode 100644 index 00000000..32f76450 --- /dev/null +++ b/model/prediction/main.py @@ -0,0 +1,42 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import joblib +import os + +import pandas as pd +from fastapi import FastAPI, Request +from google.cloud import storage + +app = FastAPI() +client = storage.Client() + +with open("model.joblib", "wb") as f: + client.download_blob_to_file(f"{os.environ['AIP_STORAGE_URI']}/model.joblib", f) +_model = joblib.load("model.joblib") + + +@app.get(os.environ.get("AIP_HEALTH_ROUTE", "/healthz")) +def health(): + return {} + + +@app.post(os.environ.get("AIP_PREDICT_ROUTE", "/predict")) +async def predict(request: Request): + body = await request.json() + + instances = body["instances"] + inputs_df = pd.DataFrame(instances) + outputs = _model.predict(inputs_df).tolist() + + return {"predictions": outputs} diff --git a/model/pyproject.toml b/model/pyproject.toml new file mode 100644 index 00000000..4a62162b --- /dev/null +++ b/model/pyproject.toml @@ -0,0 +1,21 @@ +[tool.poetry] +name = "model" +version = "0.1.0" +description = "" +authors = ["Your Name "] +readme = "README.md" + +[tool.poetry.dependencies] +python = "^3.9" +scikit-learn = "^1.3.0" +xgboost = "^1.7.6" +pandas = "^2.0.3" + +[tool.poetry.group.prediction.dependencies] +fastapi = {extras = ["uvicorn"], version = "^0.100.0"} +uvicorn = "^0.23.1" +google-cloud-storage = "^2.10.0" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/pipelines/tests/__init__.py b/model/training/__init__.py similarity index 95% rename from pipelines/tests/__init__.py rename to model/training/__init__.py index 6d6d1266..7ba50f93 100644 --- a/pipelines/tests/__init__.py +++ b/model/training/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/model/training/__main__.py b/model/training/__main__.py new file mode 100644 index 00000000..60e48174 --- /dev/null +++ b/model/training/__main__.py @@ -0,0 +1,35 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import json +import os +import logging + +from .train import train + +logging.basicConfig(level=logging.DEBUG) + +parser = argparse.ArgumentParser() +parser.add_argument("--input_path", type=str, required=True) +parser.add_argument("--input_test_path", type=str, required=False) +parser.add_argument("--output_train_path", type=str, required=True) +parser.add_argument("--output_valid_path", type=str, required=True) +parser.add_argument("--output_test_path", type=str, required=True) +parser.add_argument("--output_model", default=os.getenv("AIP_MODEL_DIR"), type=str) +parser.add_argument("--output_metrics", type=str, required=True) +parser.add_argument("--hparams", default={}, type=json.loads) +args = vars(parser.parse_args()) + +train(**args) diff --git a/model/training/train.py b/model/training/train.py new file mode 100644 index 00000000..978dae30 --- /dev/null +++ b/model/training/train.py @@ -0,0 +1,137 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from pathlib import Path + +import joblib +import logging + +import pandas as pd +from sklearn.compose import ColumnTransformer +from sklearn.model_selection import train_test_split +from sklearn.pipeline import Pipeline +from sklearn.preprocessing import StandardScaler, OrdinalEncoder, OneHotEncoder +from xgboost import XGBRegressor + +from .utils import indices_in_list, save_metrics, save_monitoring_info, split_xy + + +# used for monitoring during prediction time +TRAINING_DATASET_INFO = "training_dataset.json" +# numeric/categorical features in Chicago trips dataset to be preprocessed +NUM_COLS = ["dayofweek", "hourofday", "trip_distance", "trip_miles", "trip_seconds"] +ORD_COLS = ["company"] +OHE_COLS = ["payment_type"] + + +def train( + input_path: str, + input_test_path: str, + output_train_path: str, + output_valid_path: str, + output_test_path: str, + output_model: str, + output_metrics: str, + hparams: dict, +): + + logging.info("Read csv files into dataframes") + df = pd.read_csv(input_path) + + logging.info("Split dataframes") + label = hparams.pop("label") + + if input_test_path: + # if static test data is used, only split into train & valid dataframes + if input_test_path.startswith("gs://"): + input_test_path = "/gcs/" + input_test_path[5:] + df_train, df_valid = train_test_split(df, test_size=0.2, random_state=1) + df_test = pd.read_csv(input_test_path) + else: + # otherwise, split into train, valid, and test dataframes + df_train, df_test = train_test_split(df, test_size=0.2, random_state=1) + df_train, df_valid = train_test_split(df_train, test_size=0.25, random_state=1) + + # create output folders + for x in [output_metrics, output_train_path, output_test_path, output_test_path]: + Path(x).parent.mkdir(parents=True, exist_ok=True) + Path(output_model).mkdir(parents=True, exist_ok=True) + + df_train.to_csv(output_train_path, index=False) + df_valid.to_csv(output_valid_path, index=False) + df_test.to_csv(output_test_path, index=False) + + X_train, y_train = split_xy(df_train, label) + X_valid, y_valid = split_xy(df_valid, label) + X_test, y_test = split_xy(df_test, label) + + logging.info("Get the number of unique categories for ordinal encoded columns") + ordinal_columns = X_train[ORD_COLS] + n_unique_cat = ordinal_columns.nunique() + + logging.info("Get indices of columns in base data") + col_list = X_train.columns.tolist() + num_indices = indices_in_list(NUM_COLS, col_list) + cat_indices_onehot = indices_in_list(OHE_COLS, col_list) + cat_indices_ordinal = indices_in_list(ORD_COLS, col_list) + + ordinal_transformers = [ + ( + f"ordinal encoding for {ord_col}", + OrdinalEncoder( + handle_unknown="use_encoded_value", unknown_value=n_unique_cat[ord_col] + ), + [ord_index], + ) + for ord_col in ORD_COLS + for ord_index in cat_indices_ordinal + ] + all_transformers = [ + ("numeric_scaling", StandardScaler(), num_indices), + ( + "one_hot_encoding", + OneHotEncoder(handle_unknown="ignore"), + cat_indices_onehot, + ), + ] + ordinal_transformers + + logging.info("Build sklearn preprocessing steps") + preprocesser = ColumnTransformer(transformers=all_transformers) + logging.info("Build sklearn pipeline with XGBoost model") + xgb_model = XGBRegressor(**hparams) + + pipeline = Pipeline( + steps=[("feature_engineering", preprocesser), ("train_model", xgb_model)] + ) + + logging.info("Transform validation data") + valid_preprocesser = preprocesser.fit(X_train) + X_valid_transformed = valid_preprocesser.transform(X_valid) + + logging.info("Fit model") + pipeline.fit( + X_train, y_train, train_model__eval_set=[(X_valid_transformed, y_valid)] + ) + + logging.info("Predict test data") + y_pred = pipeline.predict(X_test) + y_pred = y_pred.clip(0) + + logging.info(f"Save model to: {output_model}") + joblib.dump(pipeline, f"{output_model}/model.joblib") + + save_metrics(y_test, y_pred, output_metrics) + save_monitoring_info( + output_train_path, label, f"{output_model}/{TRAINING_DATASET_INFO}" + ) diff --git a/model/training/utils.py b/model/training/utils.py new file mode 100644 index 00000000..56b9b8a0 --- /dev/null +++ b/model/training/utils.py @@ -0,0 +1,66 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import logging +import json + +import numpy as np +import pandas as pd +from sklearn import metrics + + +def split_xy(df: pd.DataFrame, label: str) -> (pd.DataFrame, pd.Series): + """Split dataframe into X and y.""" + return df.drop(columns=[label]), df[label] + + +def indices_in_list(elements: list, base_list: list) -> list: + """Get indices of specific elements in a base list""" + return [idx for idx, elem in enumerate(base_list) if elem in elements] + + +def save_metrics(y_test: pd.DataFrame, y_pred: pd.DataFrame, output_path: str): + """Save metrics in JSON format for Vertex AI Evaluation.""" + data = { + "problemType": "regression", + "rootMeanSquaredError": np.sqrt(metrics.mean_squared_error(y_test, y_pred)), + "meanAbsoluteError": metrics.mean_absolute_error(y_test, y_pred), + "meanAbsolutePercentageError": metrics.mean_absolute_percentage_error( + y_test, y_pred + ), + "rSquared": metrics.r2_score(y_test, y_pred), + "rootMeanSquaredLogError": np.sqrt( + metrics.mean_squared_log_error(y_test, y_pred) + ), + } + + logging.info(f"Metrics: {metrics}") + with open(output_path, "w") as fp: + json.dump(data, fp) + + +def save_monitoring_info(train_path: str, label: str, output_path: str): + """Persist URIs of training file(s) for model monitoring in batch predictions. + For the expected schema see: + https://cloud.google.com/python/docs/reference/aiplatform/latest/google.cloud.aiplatform_v1beta1.types.ModelMonitoringObjectiveConfig.TrainingDataset # noqa: E501 + """ + training_dataset_for_monitoring = { + "gcsSource": {"uris": [train_path]}, + "dataFormat": "csv", + "targetField": label, + } + logging.info(f"Training dataset info: {training_dataset_for_monitoring}") + + with open(output_path, "w") as fp: + logging.info(f"Save training dataset info for model monitoring: {output_path}") + json.dump(training_dataset_for_monitoring, fp) diff --git a/pipelines/.python-version b/pipelines/.python-version index f7e5aa84..9f3d4c17 100644 --- a/pipelines/.python-version +++ b/pipelines/.python-version @@ -1 +1 @@ -3.7.12 +3.9.16 diff --git a/pipelines/Pipfile b/pipelines/Pipfile deleted file mode 100644 index 507b4e6d..00000000 --- a/pipelines/Pipfile +++ /dev/null @@ -1,19 +0,0 @@ -[[source]] -url = "https://pypi.org/simple" -verify_ssl = true -name = "pypi" - -[packages] -kfp = "==1.8.21" -google-cloud-aiplatform = "==1.24.1" -google-cloud-pipeline-components = "==1.0.42" -Jinja2 = ">=3.0.1,<4.0.0" -vertex-components = {editable = true, path = "./../components/vertex-components"} -bigquery-components = {editable = true, path = "./../components/bigquery-components"} - -[dev-packages] -pytest = ">=7.3.1,<8.0.0" -pre-commit = ">=2.14.1,<3.0.0" - -[requires] -python_version = "3.7" diff --git a/pipelines/Pipfile.lock b/pipelines/Pipfile.lock deleted file mode 100644 index 92d8d064..00000000 --- a/pipelines/Pipfile.lock +++ /dev/null @@ -1,1193 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "c889b828537547fe3e76c0ca6f812f331bffdf13bfd8c72104145db94eb555e0" - }, - "pipfile-spec": 6, - "requires": { - "python_version": "3.7" - }, - "sources": [ - { - "name": "pypi", - "url": "https://pypi.org/simple", - "verify_ssl": true - } - ] - }, - "default": { - "absl-py": { - "hashes": [ - "sha256:0d3fe606adfa4f7db64792dd4c7aee4ee0c38ab75dfd353b7a83ed3e957fcb47", - "sha256:d2c244d01048ba476e7c080bd2c6df5e141d211de80223460d5b3b8a2a58433d" - ], - "markers": "python_version >= '3.6'", - "version": "==1.4.0" - }, - "attrs": { - "hashes": [ - "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04", - "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015" - ], - "markers": "python_version >= '3.7'", - "version": "==23.1.0" - }, - "bigquery-components": { - "editable": true, - "path": "./../components/bigquery-components" - }, - "cachetools": { - "hashes": [ - "sha256:13dfddc7b8df938c21a940dfa6557ce6e94a2f1cdfa58eb90c805721d58f2c14", - "sha256:429e1a1e845c008ea6c85aa35d4b98b65d6a9763eeef3e37e92728a12d1de9d4" - ], - "markers": "python_version ~= '3.7'", - "version": "==5.3.0" - }, - "certifi": { - "hashes": [ - "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7", - "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716" - ], - "markers": "python_version >= '3.6'", - "version": "==2023.5.7" - }, - "charset-normalizer": { - "hashes": [ - "sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6", - "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1", - "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e", - "sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373", - "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62", - "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230", - "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be", - "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c", - "sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0", - "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448", - "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f", - "sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649", - "sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d", - "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0", - "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706", - "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a", - "sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59", - "sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23", - "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5", - "sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb", - "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e", - "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e", - "sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c", - "sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28", - "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d", - "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41", - "sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974", - "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce", - "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f", - "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1", - "sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d", - "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8", - "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017", - "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31", - "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7", - "sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8", - "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e", - "sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14", - "sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd", - "sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d", - "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795", - "sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b", - "sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b", - "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b", - "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203", - "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f", - "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19", - "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1", - "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a", - "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac", - "sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9", - "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0", - "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137", - "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f", - "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6", - "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5", - "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909", - "sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f", - "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0", - "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324", - "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755", - "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb", - "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854", - "sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c", - "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60", - "sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84", - "sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0", - "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b", - "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1", - "sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531", - "sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1", - "sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11", - "sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326", - "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df", - "sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab" - ], - "markers": "python_full_version >= '3.7.0'", - "version": "==3.1.0" - }, - "click": { - "hashes": [ - "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e", - "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48" - ], - "markers": "python_version >= '3.7'", - "version": "==8.1.3" - }, - "cloudpickle": { - "hashes": [ - "sha256:61f594d1f4c295fa5cd9014ceb3a1fc4a70b0de1164b94fbc2d854ccba056f9f", - "sha256:d89684b8de9e34a2a43b3460fbca07d09d6e25ce858df4d5a44240403b6178f5" - ], - "markers": "python_version >= '3.6'", - "version": "==2.2.1" - }, - "deprecated": { - "hashes": [ - "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d", - "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.2.13" - }, - "docstring-parser": { - "hashes": [ - "sha256:48ddc093e8b1865899956fcc03b03e66bb7240c310fac5af81814580c55bf682", - "sha256:d1679b86250d269d06a99670924d6bce45adc00b08069dae8c47d98e89b667a9" - ], - "markers": "python_version >= '3.6' and python_version < '4.0'", - "version": "==0.15" - }, - "fire": { - "hashes": [ - "sha256:a6b0d49e98c8963910021f92bba66f65ab440da2982b78eb1bbf95a0a34aacc6" - ], - "version": "==0.5.0" - }, - "google-api-core": { - "extras": [ - "grpc" - ], - "hashes": [ - "sha256:4b9bb5d5a380a0befa0573b302651b8a9a89262c1730e37bf423cec511804c22", - "sha256:ce222e27b0de0d7bc63eb043b956996d6dccab14cc3b690aaea91c9cc99dc16e" - ], - "markers": "python_version >= '3.7'", - "version": "==2.11.0" - }, - "google-api-python-client": { - "hashes": [ - "sha256:1b4bd42a46321e13c0542a9e4d96fa05d73626f07b39f83a73a947d70ca706a9", - "sha256:7e0a1a265c8d3088ee1987778c72683fcb376e32bada8d7767162bd9c503fd9b" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.12.11" - }, - "google-auth": { - "hashes": [ - "sha256:ce311e2bc58b130fddf316df57c9b3943c2a7b4f6ec31de9663a9333e4064efc", - "sha256:f586b274d3eb7bd932ea424b1c702a30e0393a2e2bc4ca3eae8263ffd8be229f" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==2.17.3" - }, - "google-auth-httplib2": { - "hashes": [ - "sha256:31e49c36c6b5643b57e82617cb3e021e3e1d2df9da63af67252c02fa9c1f4a10", - "sha256:a07c39fd632becacd3f07718dfd6021bf396978f03ad3ce4321d060015cc30ac" - ], - "version": "==0.1.0" - }, - "google-cloud-aiplatform": { - "hashes": [ - "sha256:0ce9e97bf5c977397e52b3b7c4dc78610c135fbde11a60a6c0b77a4fdf776400", - "sha256:942765a6bad97e46e262dd6599dc5f171663ce952130e0b0b2eb97e0b1b98bfd" - ], - "index": "pypi", - "version": "==1.24.1" - }, - "google-cloud-bigquery": { - "hashes": [ - "sha256:4b02def076e2db8cec66f65fb627d13904a9fc3cf4fee315ede43dcb7038a8df", - "sha256:848a3cbce0ba7d4f1e9551400a7c99aa0eab72290d5a1bbbe69f18a24a10bd3a" - ], - "markers": "python_version >= '3.7'", - "version": "==3.10.0" - }, - "google-cloud-core": { - "hashes": [ - "sha256:8417acf6466be2fa85123441696c4badda48db314c607cf1e5d543fa8bdc22fe", - "sha256:b9529ee7047fd8d4bf4a2182de619154240df17fbe60ead399078c1ae152af9a" - ], - "markers": "python_version >= '3.7'", - "version": "==2.3.2" - }, - "google-cloud-notebooks": { - "hashes": [ - "sha256:8fbffb7ba535fc02c61f135d8863324a5a2d20dd58cafaae592f0b0172d6bdab", - "sha256:dac73a5cd983a4344d1fa96f9a8e5849b0ff75d7a5fdde921023a2ef4566e75e" - ], - "markers": "python_version >= '3.7'", - "version": "==1.7.0" - }, - "google-cloud-pipeline-components": { - "hashes": [ - "sha256:bf833f325d1b4a89f1db9627d3f3e75d1bffc7b7725d4eb739488a68808fa623" - ], - "index": "pypi", - "version": "==1.0.42" - }, - "google-cloud-resource-manager": { - "hashes": [ - "sha256:26beb595b957972df50173f1d0fd51c00d280551eac73566017ebdda62b1616a", - "sha256:bfc3e60eb92e25ac562a9248bb8fc17e9bef04c3dc9f031ffbe0dfe28d919287" - ], - "markers": "python_version >= '3.7'", - "version": "==1.10.0" - }, - "google-cloud-storage": { - "hashes": [ - "sha256:83a90447f23d5edd045e0037982c270302e3aeb45fc1288d2c2ca713d27bad94", - "sha256:9b6ae7b509fc294bdacb84d0f3ea8e20e2c54a8b4bbe39c5707635fec214eff3" - ], - "markers": "python_version >= '3.7'", - "version": "==2.9.0" - }, - "google-crc32c": { - "hashes": [ - "sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a", - "sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876", - "sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c", - "sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289", - "sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298", - "sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02", - "sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f", - "sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2", - "sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a", - "sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb", - "sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210", - "sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5", - "sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee", - "sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c", - "sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a", - "sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314", - "sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd", - "sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65", - "sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37", - "sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4", - "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13", - "sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894", - "sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31", - "sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e", - "sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709", - "sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740", - "sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc", - "sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d", - "sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c", - "sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c", - "sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d", - "sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906", - "sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61", - "sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57", - "sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c", - "sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a", - "sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438", - "sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946", - "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7", - "sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96", - "sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091", - "sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae", - "sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d", - "sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88", - "sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2", - "sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd", - "sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541", - "sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728", - "sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178", - "sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968", - "sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346", - "sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8", - "sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93", - "sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7", - "sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273", - "sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462", - "sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94", - "sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd", - "sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e", - "sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57", - "sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b", - "sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9", - "sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a", - "sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100", - "sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325", - "sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183", - "sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556", - "sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4" - ], - "markers": "python_version >= '3.7'", - "version": "==1.5.0" - }, - "google-resumable-media": { - "hashes": [ - "sha256:218931e8e2b2a73a58eb354a288e03a0fd5fb1c4583261ac6e4c078666468c93", - "sha256:da1bd943e2e114a56d85d6848497ebf9be6a14d3db23e9fc57581e7c3e8170ec" - ], - "markers": "python_version >= '3.7'", - "version": "==2.5.0" - }, - "googleapis-common-protos": { - "hashes": [ - "sha256:4168fcb568a826a52f23510412da405abd93f4d23ba544bb68d943b14ba3cb44", - "sha256:b287dc48449d1d41af0c69f4ea26242b5ae4c3d7249a38b0984c86a4caffff1f" - ], - "markers": "python_version >= '3.7'", - "version": "==1.59.0" - }, - "grpc-google-iam-v1": { - "hashes": [ - "sha256:2bc4b8fdf22115a65d751c9317329322602c39b7c86a289c9b72d228d960ef5f", - "sha256:5c10f3d8dc2d88678ab1a9b0cb5482735c5efee71e6c0cd59f872eef22913f5c" - ], - "markers": "python_version >= '3.7'", - "version": "==0.12.6" - }, - "grpcio": { - "hashes": [ - "sha256:02000b005bc8b72ff50c477b6431e8886b29961159e8b8d03c00b3dd9139baed", - "sha256:031bbd26656e0739e4b2c81c172155fb26e274b8d0312d67aefc730bcba915b6", - "sha256:1209d6b002b26e939e4c8ea37a3d5b4028eb9555394ea69fb1adbd4b61a10bb8", - "sha256:125ed35aa3868efa82eabffece6264bf638cfdc9f0cd58ddb17936684aafd0f8", - "sha256:1382bc499af92901c2240c4d540c74eae8a671e4fe9839bfeefdfcc3a106b5e2", - "sha256:16bca8092dd994f2864fdab278ae052fad4913f36f35238b2dd11af2d55a87db", - "sha256:1c59d899ee7160638613a452f9a4931de22623e7ba17897d8e3e348c2e9d8d0b", - "sha256:1d109df30641d050e009105f9c9ca5a35d01e34d2ee2a4e9c0984d392fd6d704", - "sha256:1fa7d6ddd33abbd3c8b3d7d07c56c40ea3d1891ce3cd2aa9fa73105ed5331866", - "sha256:21c4a1aae861748d6393a3ff7867473996c139a77f90326d9f4104bebb22d8b8", - "sha256:224166f06ccdaf884bf35690bf4272997c1405de3035d61384ccb5b25a4c1ca8", - "sha256:2262bd3512ba9e9f0e91d287393df6f33c18999317de45629b7bd46c40f16ba9", - "sha256:2585b3c294631a39b33f9f967a59b0fad23b1a71a212eba6bc1e3ca6e6eec9ee", - "sha256:27fb030a4589d2536daec5ff5ba2a128f4f155149efab578fe2de2cb21596d3d", - "sha256:30fbbce11ffeb4f9f91c13fe04899aaf3e9a81708bedf267bf447596b95df26b", - "sha256:3930669c9e6f08a2eed824738c3d5699d11cd47a0ecc13b68ed11595710b1133", - "sha256:3b170e441e91e4f321e46d3cc95a01cb307a4596da54aca59eb78ab0fc03754d", - "sha256:3db71c6f1ab688d8dfc102271cedc9828beac335a3a4372ec54b8bf11b43fd29", - "sha256:48cb7af77238ba16c77879009003f6b22c23425e5ee59cb2c4c103ec040638a5", - "sha256:49eace8ea55fbc42c733defbda1e4feb6d3844ecd875b01bb8b923709e0f5ec8", - "sha256:533eaf5b2a79a3c6f35cbd6a095ae99cac7f4f9c0e08bdcf86c130efd3c32adf", - "sha256:5942a3e05630e1ef5b7b5752e5da6582460a2e4431dae603de89fc45f9ec5aa9", - "sha256:62117486460c83acd3b5d85c12edd5fe20a374630475388cfc89829831d3eb79", - "sha256:650f5f2c9ab1275b4006707411bb6d6bc927886874a287661c3c6f332d4c068b", - "sha256:6dc1e2c9ac292c9a484ef900c568ccb2d6b4dfe26dfa0163d5bc815bb836c78d", - "sha256:73c238ef6e4b64272df7eec976bb016c73d3ab5a6c7e9cd906ab700523d312f3", - "sha256:775a2f70501370e5ba54e1ee3464413bff9bd85bd9a0b25c989698c44a6fb52f", - "sha256:860fcd6db7dce80d0a673a1cc898ce6bc3d4783d195bbe0e911bf8a62c93ff3f", - "sha256:87f47bf9520bba4083d65ab911f8f4c0ac3efa8241993edd74c8dd08ae87552f", - "sha256:960b176e0bb2b4afeaa1cd2002db1e82ae54c9b6e27ea93570a42316524e77cf", - "sha256:a7caf553ccaf715ec05b28c9b2ab2ee3fdb4036626d779aa09cf7cbf54b71445", - "sha256:a947d5298a0bbdd4d15671024bf33e2b7da79a70de600ed29ba7e0fef0539ebb", - "sha256:a97b0d01ae595c997c1d9d8249e2d2da829c2d8a4bdc29bb8f76c11a94915c9a", - "sha256:b7655f809e3420f80ce3bf89737169a9dce73238af594049754a1128132c0da4", - "sha256:c33744d0d1a7322da445c0fe726ea6d4e3ef2dfb0539eadf23dce366f52f546c", - "sha256:c55a9cf5cba80fb88c850915c865b8ed78d5e46e1f2ec1b27692f3eaaf0dca7e", - "sha256:d2f62fb1c914a038921677cfa536d645cb80e3dd07dc4859a3c92d75407b90a5", - "sha256:d8ae6e0df3a608e99ee1acafaafd7db0830106394d54571c1ece57f650124ce9", - "sha256:e355ee9da9c1c03f174efea59292b17a95e0b7b4d7d2a389265f731a9887d5a9", - "sha256:e3e526062c690517b42bba66ffe38aaf8bc99a180a78212e7b22baa86902f690", - "sha256:eb0807323572642ab73fd86fe53d88d843ce617dd1ddf430351ad0759809a0ae", - "sha256:ebff0738be0499d7db74d20dca9f22a7b27deae31e1bf92ea44924fd69eb6251", - "sha256:ed36e854449ff6c2f8ee145f94851fe171298e1e793f44d4f672c4a0d78064e7", - "sha256:ed3d458ded32ff3a58f157b60cc140c88f7ac8c506a1c567b2a9ee8a2fd2ce54", - "sha256:f4a7dca8ccd8023d916b900aa3c626f1bd181bd5b70159479b142f957ff420e4" - ], - "version": "==1.54.0" - }, - "grpcio-status": { - "hashes": [ - "sha256:2154fdb8aad20452488712be6879657b508115ca06139fde8897ea8e9bc79367", - "sha256:c9ce3213e84c6fd8801c31aca3ea4a6b3453eaa40b93a6c0a23ea8999808fa00" - ], - "markers": "python_version >= '3.6'", - "version": "==1.47.0" - }, - "httplib2": { - "hashes": [ - "sha256:14ae0a53c1ba8f3d37e9e27cf37eabb0fb9980f435ba405d546948b009dd64dc", - "sha256:d7a10bc5ef5ab08322488bde8c726eeee5c8618723fdb399597ec58f3d82df81" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==0.22.0" - }, - "idna": { - "hashes": [ - "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4", - "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2" - ], - "markers": "python_version >= '3.5'", - "version": "==3.4" - }, - "importlib-metadata": { - "hashes": [ - "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed", - "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705" - ], - "markers": "python_version < '3.8'", - "version": "==6.6.0" - }, - "importlib-resources": { - "hashes": [ - "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6", - "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a" - ], - "markers": "python_version < '3.9'", - "version": "==5.12.0" - }, - "jinja2": { - "hashes": [ - "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852", - "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61" - ], - "index": "pypi", - "version": "==3.1.2" - }, - "jsonschema": { - "hashes": [ - "sha256:0f864437ab8b6076ba6707453ef8f98a6a0d512a80e93f8abdb676f737ecb60d", - "sha256:a870ad254da1a8ca84b6a2905cac29d265f805acc57af304784962a2aa6508f6" - ], - "markers": "python_version >= '3.7'", - "version": "==4.17.3" - }, - "kfp": { - "hashes": [ - "sha256:038d77ec9145ccfade95ab3b4b53c32668ae498fede06647ed0425d093819b1c" - ], - "index": "pypi", - "version": "==1.8.21" - }, - "kfp-pipeline-spec": { - "hashes": [ - "sha256:4cefae00ac50145cf862127202a8b8a783ed7504c773d7d7c517bd115283be25" - ], - "markers": "python_full_version >= '3.7.0'", - "version": "==0.1.16" - }, - "kfp-server-api": { - "hashes": [ - "sha256:482d71765ba57c003164dbb980a8cb1a18d234b578d064dc88dbeb3e4c7ab6de" - ], - "version": "==1.8.5" - }, - "kubernetes": { - "hashes": [ - "sha256:213befbb4e5aed95f94950c7eed0c2322fc5a2f8f40932e58d28fdd42d90836c", - "sha256:eb42333dad0bb5caf4e66460c6a4a1a36f0f057a040f35018f6c05a699baed86" - ], - "markers": "python_version >= '3.6'", - "version": "==25.3.0" - }, - "markupsafe": { - "hashes": [ - "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed", - "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc", - "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2", - "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460", - "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7", - "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0", - "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1", - "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa", - "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03", - "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323", - "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65", - "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013", - "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036", - "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f", - "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4", - "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419", - "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2", - "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619", - "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a", - "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a", - "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd", - "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7", - "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666", - "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65", - "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859", - "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625", - "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff", - "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156", - "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd", - "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba", - "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f", - "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1", - "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094", - "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a", - "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513", - "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed", - "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d", - "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3", - "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147", - "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c", - "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603", - "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601", - "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a", - "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1", - "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d", - "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3", - "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54", - "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2", - "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6", - "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58" - ], - "markers": "python_version >= '3.7'", - "version": "==2.1.2" - }, - "oauthlib": { - "hashes": [ - "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca", - "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918" - ], - "markers": "python_version >= '3.6'", - "version": "==3.2.2" - }, - "packaging": { - "hashes": [ - "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61", - "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f" - ], - "markers": "python_version >= '3.7'", - "version": "==23.1" - }, - "pkgutil-resolve-name": { - "hashes": [ - "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174", - "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e" - ], - "markers": "python_version < '3.9'", - "version": "==1.3.10" - }, - "proto-plus": { - "hashes": [ - "sha256:0e8cda3d5a634d9895b75c573c9352c16486cb75deb0e078b5fda34db4243165", - "sha256:de34e52d6c9c6fcd704192f09767cb561bb4ee64e70eede20b0834d841f0be4d" - ], - "markers": "python_version >= '3.6'", - "version": "==1.22.2" - }, - "protobuf": { - "hashes": [ - "sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7", - "sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c", - "sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2", - "sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b", - "sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050", - "sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9", - "sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7", - "sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454", - "sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480", - "sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469", - "sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c", - "sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e", - "sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db", - "sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905", - "sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b", - "sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86", - "sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4", - "sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402", - "sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7", - "sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4", - "sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99", - "sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee" - ], - "markers": "python_version >= '3.7'", - "version": "==3.20.3" - }, - "pyasn1": { - "hashes": [ - "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57", - "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==0.5.0" - }, - "pyasn1-modules": { - "hashes": [ - "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c", - "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==0.3.0" - }, - "pydantic": { - "hashes": [ - "sha256:01aea3a42c13f2602b7ecbbea484a98169fb568ebd9e247593ea05f01b884b2e", - "sha256:0cd181f1d0b1d00e2b705f1bf1ac7799a2d938cce3376b8007df62b29be3c2c6", - "sha256:10a86d8c8db68086f1e30a530f7d5f83eb0685e632e411dbbcf2d5c0150e8dcd", - "sha256:193924c563fae6ddcb71d3f06fa153866423ac1b793a47936656e806b64e24ca", - "sha256:464855a7ff7f2cc2cf537ecc421291b9132aa9c79aef44e917ad711b4a93163b", - "sha256:516f1ed9bc2406a0467dd777afc636c7091d71f214d5e413d64fef45174cfc7a", - "sha256:6434b49c0b03a51021ade5c4daa7d70c98f7a79e95b551201fff682fc1661245", - "sha256:64d34ab766fa056df49013bb6e79921a0265204c071984e75a09cbceacbbdd5d", - "sha256:670bb4683ad1e48b0ecb06f0cfe2178dcf74ff27921cdf1606e527d2617a81ee", - "sha256:68792151e174a4aa9e9fc1b4e653e65a354a2fa0fed169f7b3d09902ad2cb6f1", - "sha256:701daea9ffe9d26f97b52f1d157e0d4121644f0fcf80b443248434958fd03dc3", - "sha256:7d45fc99d64af9aaf7e308054a0067fdcd87ffe974f2442312372dfa66e1001d", - "sha256:80b1fab4deb08a8292d15e43a6edccdffa5377a36a4597bb545b93e79c5ff0a5", - "sha256:82dffb306dd20bd5268fd6379bc4bfe75242a9c2b79fec58e1041fbbdb1f7914", - "sha256:8c7f51861d73e8b9ddcb9916ae7ac39fb52761d9ea0df41128e81e2ba42886cd", - "sha256:950ce33857841f9a337ce07ddf46bc84e1c4946d2a3bba18f8280297157a3fd1", - "sha256:976cae77ba6a49d80f461fd8bba183ff7ba79f44aa5cfa82f1346b5626542f8e", - "sha256:9f6f0fd68d73257ad6685419478c5aece46432f4bdd8d32c7345f1986496171e", - "sha256:a7cd2251439988b413cb0a985c4ed82b6c6aac382dbaff53ae03c4b23a70e80a", - "sha256:abfb7d4a7cd5cc4e1d1887c43503a7c5dd608eadf8bc615413fc498d3e4645cd", - "sha256:ae150a63564929c675d7f2303008d88426a0add46efd76c3fc797cd71cb1b46f", - "sha256:b0f85904f73161817b80781cc150f8b906d521fa11e3cdabae19a581c3606209", - "sha256:b4a849d10f211389502059c33332e91327bc154acc1845f375a99eca3afa802d", - "sha256:c15582f9055fbc1bfe50266a19771bbbef33dd28c45e78afbe1996fd70966c2a", - "sha256:c230c0d8a322276d6e7b88c3f7ce885f9ed16e0910354510e0bae84d54991143", - "sha256:cc1dde4e50a5fc1336ee0581c1612215bc64ed6d28d2c7c6f25d2fe3e7c3e918", - "sha256:cf135c46099ff3f919d2150a948ce94b9ce545598ef2c6c7bf55dca98a304b52", - "sha256:cfc83c0678b6ba51b0532bea66860617c4cd4251ecf76e9846fa5a9f3454e97e", - "sha256:d2a5ebb48958754d386195fe9e9c5106f11275867051bf017a8059410e9abf1f", - "sha256:d71e69699498b020ea198468e2480a2f1e7433e32a3a99760058c6520e2bea7e", - "sha256:d75ae19d2a3dbb146b6f324031c24f8a3f52ff5d6a9f22f0683694b3afcb16fb", - "sha256:dfe2507b8ef209da71b6fb5f4e597b50c5a34b78d7e857c4f8f3115effaef5fe", - "sha256:e0cfe895a504c060e5d36b287ee696e2fdad02d89e0d895f83037245218a87fe", - "sha256:e79e999e539872e903767c417c897e729e015872040e56b96e67968c3b918b2d", - "sha256:ecbbc51391248116c0a055899e6c3e7ffbb11fb5e2a4cd6f2d0b93272118a209", - "sha256:f4a2b50e2b03d5776e7f21af73e2070e1b5c0d0df255a827e7c632962f8315af" - ], - "markers": "python_version >= '3.7'", - "version": "==1.10.7" - }, - "pyparsing": { - "hashes": [ - "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb", - "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc" - ], - "markers": "python_version >= '3.1'", - "version": "==3.0.9" - }, - "pyrsistent": { - "hashes": [ - "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8", - "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440", - "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a", - "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c", - "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3", - "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393", - "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9", - "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da", - "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf", - "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64", - "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a", - "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3", - "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98", - "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2", - "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8", - "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf", - "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc", - "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7", - "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28", - "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2", - "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b", - "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a", - "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64", - "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19", - "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1", - "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9", - "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c" - ], - "markers": "python_version >= '3.7'", - "version": "==0.19.3" - }, - "python-dateutil": { - "hashes": [ - "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", - "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.8.2" - }, - "pyyaml": { - "hashes": [ - "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", - "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", - "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", - "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", - "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", - "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", - "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", - "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", - "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", - "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", - "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", - "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", - "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347", - "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", - "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541", - "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", - "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", - "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc", - "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", - "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa", - "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", - "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122", - "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", - "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", - "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", - "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc", - "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247", - "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6", - "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==5.4.1" - }, - "requests": { - "hashes": [ - "sha256:10e94cc4f3121ee6da529d358cdaeaff2f1c409cd377dbc72b825852f2f7e294", - "sha256:239d7d4458afcb28a692cdd298d87542235f4ca8d36d03a15bfc128a6559a2f4" - ], - "markers": "python_version >= '3.7'", - "version": "==2.30.0" - }, - "requests-oauthlib": { - "hashes": [ - "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5", - "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.3.1" - }, - "requests-toolbelt": { - "hashes": [ - "sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7", - "sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==0.10.1" - }, - "rsa": { - "hashes": [ - "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7", - "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21" - ], - "markers": "python_version >= '3.6'", - "version": "==4.9" - }, - "setuptools": { - "hashes": [ - "sha256:23aaf86b85ca52ceb801d32703f12d77517b2556af839621c641fca11287952b", - "sha256:f104fa03692a2602fa0fec6c6a9e63b6c8a968de13e17c026957dd1f53d80990" - ], - "markers": "python_version >= '3.7'", - "version": "==67.7.2" - }, - "shapely": { - "hashes": [ - "sha256:02dd5d7dc6e46515d88874134dc8fcdc65826bca93c3eecee59d1910c42c1b17", - "sha256:0b4ee3132ee90f07d63db3aea316c4c065ed7a26231458dda0874414a09d6ba3", - "sha256:0d885cb0cf670c1c834df3f371de8726efdf711f18e2a75da5cfa82843a7ab65", - "sha256:147066da0be41b147a61f8eb805dea3b13709dbc873a431ccd7306e24d712bc0", - "sha256:21776184516a16bf82a0c3d6d6a312b3cd15a4cabafc61ee01cf2714a82e8396", - "sha256:2e0a8c2e55f1be1312b51c92b06462ea89e6bb703fab4b114e7a846d941cfc40", - "sha256:2fd15397638df291c427a53d641d3e6fd60458128029c8c4f487190473a69a91", - "sha256:3480657460e939f45a7d359ef0e172a081f249312557fe9aa78c4fd3a362d993", - "sha256:370b574c78dc5af3a198a6da5d9b3d7c04654bd2ef7e80e80a3a0992dfb2d9cd", - "sha256:38f0fbbcb8ca20c16451c966c1f527cc43968e121c8a048af19ed3e339a921cd", - "sha256:4728666fff8cccc65a07448cae72c75a8773fea061c3f4f139c44adc429b18c3", - "sha256:48dcfffb9e225c0481120f4bdf622131c8c95f342b00b158cdbe220edbbe20b6", - "sha256:4b47bb6f9369e8bf3e6dbd33e6a25a47ee02b2874792a529fe04a49bf8bc0df6", - "sha256:532a55ee2a6c52d23d6f7d1567c8f0473635f3b270262c44e1b0c88096827e22", - "sha256:5d7f85c2d35d39ff53c9216bc76b7641c52326f7e09aaad1789a3611a0f812f2", - "sha256:65b21243d8f6bcd421210daf1fabb9de84de2c04353c5b026173b88d17c1a581", - "sha256:66bdac74fbd1d3458fa787191a90fa0ae610f09e2a5ec398c36f968cc0ed743f", - "sha256:6d388c0c1bd878ed1af4583695690aa52234b02ed35f93a1c8486ff52a555838", - "sha256:6fe855e7d45685926b6ba00aaeb5eba5862611f7465775dacd527e081a8ced6d", - "sha256:753ed0e21ab108bd4282405b9b659f2e985e8502b1a72b978eaa51d3496dee19", - "sha256:783bad5f48e2708a0e2f695a34ed382e4162c795cb2f0368b39528ac1d6db7ed", - "sha256:78fb9d929b8ee15cfd424b6c10879ce1907f24e05fb83310fc47d2cd27088e40", - "sha256:84010db15eb364a52b74ea8804ef92a6a930dfc1981d17a369444b6ddec66efd", - "sha256:89164e7a9776a19e29f01369a98529321994e2e4d852b92b7e01d4d9804c55bf", - "sha256:8d086591f744be483b34628b391d741e46f2645fe37594319e0a673cc2c26bcf", - "sha256:8e59817b0fe63d34baedaabba8c393c0090f061917d18fc0bcc2f621937a8f73", - "sha256:99a2f0da0109e81e0c101a2b4cd8412f73f5f299e7b5b2deaf64cd2a100ac118", - "sha256:99ab0ddc05e44acabdbe657c599fdb9b2d82e86c5493bdae216c0c4018a82dee", - "sha256:a23ef3882d6aa203dd3623a3d55d698f59bfbd9f8a3bfed52c2da05a7f0f8640", - "sha256:a354199219c8d836f280b88f2c5102c81bb044ccea45bd361dc38a79f3873714", - "sha256:a74631e511153366c6dbe3229fa93f877e3c87ea8369cd00f1d38c76b0ed9ace", - "sha256:ab38f7b5196ace05725e407cb8cab9ff66edb8e6f7bb36a398e8f73f52a7aaa2", - "sha256:adcf8a11b98af9375e32bff91de184f33a68dc48b9cb9becad4f132fa25cfa3c", - "sha256:b65f5d530ba91e49ffc7c589255e878d2506a8b96ffce69d3b7c4500a9a9eaf8", - "sha256:be9423d5a3577ac2e92c7e758bd8a2b205f5e51a012177a590bc46fc51eb4834", - "sha256:c2822111ddc5bcfb116e6c663e403579d0fe3f147d2a97426011a191c43a7458", - "sha256:c6a9a4a31cd6e86d0fbe8473ceed83d4fe760b19d949fb557ef668defafea0f6", - "sha256:d048f93e42ba578b82758c15d8ae037d08e69d91d9872bca5a1895b118f4e2b0", - "sha256:d8a2b2a65fa7f97115c1cd989fe9d6f39281ca2a8a014f1d4904c1a6e34d7f25", - "sha256:e9c30b311de2513555ab02464ebb76115d242842b29c412f5a9aa0cac57be9f6", - "sha256:ec14ceca36f67cb48b34d02d7f65a9acae15cd72b48e303531893ba4a960f3ea", - "sha256:ef3be705c3eac282a28058e6c6e5503419b250f482320df2172abcbea642c831" - ], - "markers": "python_version >= '3.6'", - "version": "==1.8.5.post1" - }, - "six": { - "hashes": [ - "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", - "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.16.0" - }, - "strip-hints": { - "hashes": [ - "sha256:307c2bd147cd35997c8ed2e9a3bdca48ad9c9617e04ea46599095201b4ce998f" - ], - "version": "==0.1.10" - }, - "tabulate": { - "hashes": [ - "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c", - "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f" - ], - "markers": "python_version >= '3.7'", - "version": "==0.9.0" - }, - "termcolor": { - "hashes": [ - "sha256:3afb05607b89aed0ffe25202399ee0867ad4d3cb4180d98aaf8eefa6a5f7d475", - "sha256:b5b08f68937f138fe92f6c089b99f1e2da0ae56c52b78bf7075fd95420fd9a5a" - ], - "markers": "python_version >= '3.7'", - "version": "==2.3.0" - }, - "typer": { - "hashes": [ - "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2", - "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee" - ], - "markers": "python_version >= '3.6'", - "version": "==0.9.0" - }, - "typing-extensions": { - "hashes": [ - "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb", - "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4" - ], - "markers": "python_version < '3.9'", - "version": "==4.5.0" - }, - "uritemplate": { - "hashes": [ - "sha256:07620c3f3f8eed1f12600845892b0e036a2420acf513c53f7de0abd911a5894f", - "sha256:5af8ad10cec94f215e3f48112de2022e1d5a37ed427fbd88652fa908f2ab7cae" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==3.0.1" - }, - "urllib3": { - "hashes": [ - "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305", - "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==1.26.15" - }, - "vertex-components": { - "editable": true, - "path": "./../components/vertex-components" - }, - "websocket-client": { - "hashes": [ - "sha256:3f09e6d8230892547132177f575a4e3e73cfdf06526e20cc02aa1c3b47184d40", - "sha256:cdf5877568b7e83aa7cf2244ab56a3213de587bbe0ce9d8b9600fc77b455d89e" - ], - "markers": "python_version >= '3.7'", - "version": "==1.5.1" - }, - "wheel": { - "hashes": [ - "sha256:cd1196f3faee2b31968d626e1731c94f99cbdb67cf5a46e4f5656cbee7738873", - "sha256:d236b20e7cb522daf2390fa84c55eea81c5c30190f90f29ae2ca1ad8355bf247" - ], - "markers": "python_version >= '3.7'", - "version": "==0.40.0" - }, - "wrapt": { - "hashes": [ - "sha256:02fce1852f755f44f95af51f69d22e45080102e9d00258053b79367d07af39c0", - "sha256:077ff0d1f9d9e4ce6476c1a924a3332452c1406e59d90a2cf24aeb29eeac9420", - "sha256:078e2a1a86544e644a68422f881c48b84fef6d18f8c7a957ffd3f2e0a74a0d4a", - "sha256:0970ddb69bba00670e58955f8019bec4a42d1785db3faa043c33d81de2bf843c", - "sha256:1286eb30261894e4c70d124d44b7fd07825340869945c79d05bda53a40caa079", - "sha256:21f6d9a0d5b3a207cdf7acf8e58d7d13d463e639f0c7e01d82cdb671e6cb7923", - "sha256:230ae493696a371f1dbffaad3dafbb742a4d27a0afd2b1aecebe52b740167e7f", - "sha256:26458da5653aa5b3d8dc8b24192f574a58984c749401f98fff994d41d3f08da1", - "sha256:2cf56d0e237280baed46f0b5316661da892565ff58309d4d2ed7dba763d984b8", - "sha256:2e51de54d4fb8fb50d6ee8327f9828306a959ae394d3e01a1ba8b2f937747d86", - "sha256:2fbfbca668dd15b744418265a9607baa970c347eefd0db6a518aaf0cfbd153c0", - "sha256:38adf7198f8f154502883242f9fe7333ab05a5b02de7d83aa2d88ea621f13364", - "sha256:3a8564f283394634a7a7054b7983e47dbf39c07712d7b177b37e03f2467a024e", - "sha256:3abbe948c3cbde2689370a262a8d04e32ec2dd4f27103669a45c6929bcdbfe7c", - "sha256:3bbe623731d03b186b3d6b0d6f51865bf598587c38d6f7b0be2e27414f7f214e", - "sha256:40737a081d7497efea35ab9304b829b857f21558acfc7b3272f908d33b0d9d4c", - "sha256:41d07d029dd4157ae27beab04d22b8e261eddfc6ecd64ff7000b10dc8b3a5727", - "sha256:46ed616d5fb42f98630ed70c3529541408166c22cdfd4540b88d5f21006b0eff", - "sha256:493d389a2b63c88ad56cdc35d0fa5752daac56ca755805b1b0c530f785767d5e", - "sha256:4ff0d20f2e670800d3ed2b220d40984162089a6e2c9646fdb09b85e6f9a8fc29", - "sha256:54accd4b8bc202966bafafd16e69da9d5640ff92389d33d28555c5fd4f25ccb7", - "sha256:56374914b132c702aa9aa9959c550004b8847148f95e1b824772d453ac204a72", - "sha256:578383d740457fa790fdf85e6d346fda1416a40549fe8db08e5e9bd281c6a475", - "sha256:58d7a75d731e8c63614222bcb21dd992b4ab01a399f1f09dd82af17bbfc2368a", - "sha256:5c5aa28df055697d7c37d2099a7bc09f559d5053c3349b1ad0c39000e611d317", - "sha256:5fc8e02f5984a55d2c653f5fea93531e9836abbd84342c1d1e17abc4a15084c2", - "sha256:63424c681923b9f3bfbc5e3205aafe790904053d42ddcc08542181a30a7a51bd", - "sha256:64b1df0f83706b4ef4cfb4fb0e4c2669100fd7ecacfb59e091fad300d4e04640", - "sha256:74934ebd71950e3db69960a7da29204f89624dde411afbfb3b4858c1409b1e98", - "sha256:75669d77bb2c071333417617a235324a1618dba66f82a750362eccbe5b61d248", - "sha256:75760a47c06b5974aa5e01949bf7e66d2af4d08cb8c1d6516af5e39595397f5e", - "sha256:76407ab327158c510f44ded207e2f76b657303e17cb7a572ffe2f5a8a48aa04d", - "sha256:76e9c727a874b4856d11a32fb0b389afc61ce8aaf281ada613713ddeadd1cfec", - "sha256:77d4c1b881076c3ba173484dfa53d3582c1c8ff1f914c6461ab70c8428b796c1", - "sha256:780c82a41dc493b62fc5884fb1d3a3b81106642c5c5c78d6a0d4cbe96d62ba7e", - "sha256:7dc0713bf81287a00516ef43137273b23ee414fe41a3c14be10dd95ed98a2df9", - "sha256:7eebcdbe3677e58dd4c0e03b4f2cfa346ed4049687d839adad68cc38bb559c92", - "sha256:896689fddba4f23ef7c718279e42f8834041a21342d95e56922e1c10c0cc7afb", - "sha256:96177eb5645b1c6985f5c11d03fc2dbda9ad24ec0f3a46dcce91445747e15094", - "sha256:96e25c8603a155559231c19c0349245eeb4ac0096fe3c1d0be5c47e075bd4f46", - "sha256:9d37ac69edc5614b90516807de32d08cb8e7b12260a285ee330955604ed9dd29", - "sha256:9ed6aa0726b9b60911f4aed8ec5b8dd7bf3491476015819f56473ffaef8959bd", - "sha256:a487f72a25904e2b4bbc0817ce7a8de94363bd7e79890510174da9d901c38705", - "sha256:a4cbb9ff5795cd66f0066bdf5947f170f5d63a9274f99bdbca02fd973adcf2a8", - "sha256:a74d56552ddbde46c246b5b89199cb3fd182f9c346c784e1a93e4dc3f5ec9975", - "sha256:a89ce3fd220ff144bd9d54da333ec0de0399b52c9ac3d2ce34b569cf1a5748fb", - "sha256:abd52a09d03adf9c763d706df707c343293d5d106aea53483e0ec8d9e310ad5e", - "sha256:abd8f36c99512755b8456047b7be10372fca271bf1467a1caa88db991e7c421b", - "sha256:af5bd9ccb188f6a5fdda9f1f09d9f4c86cc8a539bd48a0bfdc97723970348418", - "sha256:b02f21c1e2074943312d03d243ac4388319f2456576b2c6023041c4d57cd7019", - "sha256:b06fa97478a5f478fb05e1980980a7cdf2712015493b44d0c87606c1513ed5b1", - "sha256:b0724f05c396b0a4c36a3226c31648385deb6a65d8992644c12a4963c70326ba", - "sha256:b130fe77361d6771ecf5a219d8e0817d61b236b7d8b37cc045172e574ed219e6", - "sha256:b56d5519e470d3f2fe4aa7585f0632b060d532d0696c5bdfb5e8319e1d0f69a2", - "sha256:b67b819628e3b748fd3c2192c15fb951f549d0f47c0449af0764d7647302fda3", - "sha256:ba1711cda2d30634a7e452fc79eabcadaffedf241ff206db2ee93dd2c89a60e7", - "sha256:bbeccb1aa40ab88cd29e6c7d8585582c99548f55f9b2581dfc5ba68c59a85752", - "sha256:bd84395aab8e4d36263cd1b9308cd504f6cf713b7d6d3ce25ea55670baec5416", - "sha256:c99f4309f5145b93eca6e35ac1a988f0dc0a7ccf9ccdcd78d3c0adf57224e62f", - "sha256:ca1cccf838cd28d5a0883b342474c630ac48cac5df0ee6eacc9c7290f76b11c1", - "sha256:cd525e0e52a5ff16653a3fc9e3dd827981917d34996600bbc34c05d048ca35cc", - "sha256:cdb4f085756c96a3af04e6eca7f08b1345e94b53af8921b25c72f096e704e145", - "sha256:ce42618f67741d4697684e501ef02f29e758a123aa2d669e2d964ff734ee00ee", - "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a", - "sha256:d5fe3e099cf07d0fb5a1e23d399e5d4d1ca3e6dfcbe5c8570ccff3e9208274f7", - "sha256:d6bcbfc99f55655c3d93feb7ef3800bd5bbe963a755687cbf1f490a71fb7794b", - "sha256:d787272ed958a05b2c86311d3a4135d3c2aeea4fc655705f074130aa57d71653", - "sha256:e169e957c33576f47e21864cf3fc9ff47c223a4ebca8960079b8bd36cb014fd0", - "sha256:e20076a211cd6f9b44a6be58f7eeafa7ab5720eb796975d0c03f05b47d89eb90", - "sha256:e826aadda3cae59295b95343db8f3d965fb31059da7de01ee8d1c40a60398b29", - "sha256:eef4d64c650f33347c1f9266fa5ae001440b232ad9b98f1f43dfe7a79435c0a6", - "sha256:f2e69b3ed24544b0d3dbe2c5c0ba5153ce50dcebb576fdc4696d52aa22db6034", - "sha256:f87ec75864c37c4c6cb908d282e1969e79763e0d9becdfe9fe5473b7bb1e5f09", - "sha256:fbec11614dba0424ca72f4e8ba3c420dba07b4a7c206c8c8e4e73f2e98f4c559", - "sha256:fd69666217b62fa5d7c6aa88e507493a34dec4fa20c5bd925e4bc12fce586639" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==1.15.0" - }, - "zipp": { - "hashes": [ - "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b", - "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556" - ], - "markers": "python_version < '3.10'", - "version": "==3.15.0" - } - }, - "develop": { - "cfgv": { - "hashes": [ - "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426", - "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736" - ], - "markers": "python_full_version >= '3.6.1'", - "version": "==3.3.1" - }, - "distlib": { - "hashes": [ - "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46", - "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e" - ], - "version": "==0.3.6" - }, - "exceptiongroup": { - "hashes": [ - "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e", - "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785" - ], - "markers": "python_version < '3.11'", - "version": "==1.1.1" - }, - "filelock": { - "hashes": [ - "sha256:ad98852315c2ab702aeb628412cbf7e95b7ce8c3bf9565670b4eaecf1db370a9", - "sha256:fc03ae43288c013d2ea83c8597001b1129db351aad9c57fe2409327916b8e718" - ], - "markers": "python_version >= '3.7'", - "version": "==3.12.0" - }, - "identify": { - "hashes": [ - "sha256:0aac67d5b4812498056d28a9a512a483f5085cc28640b02b258a59dac34301d4", - "sha256:986dbfb38b1140e763e413e6feb44cd731faf72d1909543178aa79b0e258265d" - ], - "markers": "python_version >= '3.7'", - "version": "==2.5.24" - }, - "importlib-metadata": { - "hashes": [ - "sha256:43dd286a2cd8995d5eaef7fee2066340423b818ed3fd70adf0bad5f1fac53fed", - "sha256:92501cdf9cc66ebd3e612f1b4f0c0765dfa42f0fa38ffb319b6bd84dd675d705" - ], - "markers": "python_version < '3.8'", - "version": "==6.6.0" - }, - "iniconfig": { - "hashes": [ - "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", - "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" - ], - "markers": "python_version >= '3.7'", - "version": "==2.0.0" - }, - "nodeenv": { - "hashes": [ - "sha256:27083a7b96a25f2f5e1d8cb4b6317ee8aeda3bdd121394e5ac54e498028a042e", - "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6'", - "version": "==1.7.0" - }, - "packaging": { - "hashes": [ - "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61", - "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f" - ], - "markers": "python_version >= '3.7'", - "version": "==23.1" - }, - "platformdirs": { - "hashes": [ - "sha256:47692bc24c1958e8b0f13dd727307cff1db103fca36399f457da8e05f222fdc4", - "sha256:7954a68d0ba23558d753f73437c55f89027cf8f5108c19844d4b82e5af396335" - ], - "markers": "python_version >= '3.7'", - "version": "==3.5.0" - }, - "pluggy": { - "hashes": [ - "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159", - "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3" - ], - "markers": "python_version >= '3.6'", - "version": "==1.0.0" - }, - "pre-commit": { - "hashes": [ - "sha256:31ef31af7e474a8d8995027fefdfcf509b5c913ff31f2015b4ec4beb26a6f658", - "sha256:e2f91727039fc39a92f58a588a25b87f936de6567eed4f0e673e0507edc75bad" - ], - "index": "pypi", - "version": "==2.21.0" - }, - "pytest": { - "hashes": [ - "sha256:3799fa815351fea3a5e96ac7e503a96fa51cc9942c3753cda7651b93c1cfa362", - "sha256:434afafd78b1d78ed0addf160ad2b77a30d35d4bdf8af234fe621919d9ed15e3" - ], - "index": "pypi", - "version": "==7.3.1" - }, - "pyyaml": { - "hashes": [ - "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", - "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", - "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", - "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", - "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", - "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", - "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", - "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", - "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", - "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", - "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", - "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", - "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347", - "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", - "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541", - "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", - "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", - "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc", - "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", - "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa", - "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", - "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122", - "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", - "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", - "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", - "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc", - "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247", - "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6", - "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0" - ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", - "version": "==5.4.1" - }, - "setuptools": { - "hashes": [ - "sha256:23aaf86b85ca52ceb801d32703f12d77517b2556af839621c641fca11287952b", - "sha256:f104fa03692a2602fa0fec6c6a9e63b6c8a968de13e17c026957dd1f53d80990" - ], - "markers": "python_version >= '3.7'", - "version": "==67.7.2" - }, - "tomli": { - "hashes": [ - "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", - "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" - ], - "markers": "python_version < '3.11'", - "version": "==2.0.1" - }, - "typing-extensions": { - "hashes": [ - "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb", - "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4" - ], - "markers": "python_version < '3.9'", - "version": "==4.5.0" - }, - "virtualenv": { - "hashes": [ - "sha256:6abec7670e5802a528357fdc75b26b9f57d5d92f29c5462ba0fbe45feacc685e", - "sha256:a85caa554ced0c0afbd0d638e7e2d7b5f92d23478d05d17a76daeac8f279f924" - ], - "markers": "python_version >= '3.7'", - "version": "==20.23.0" - }, - "zipp": { - "hashes": [ - "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b", - "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556" - ], - "markers": "python_version < '3.10'", - "version": "==3.15.0" - } - } -} diff --git a/pipelines/README.md b/pipelines/README.md deleted file mode 100644 index 1453995d..00000000 --- a/pipelines/README.md +++ /dev/null @@ -1,133 +0,0 @@ - - # Pipelines - -## Introduction - -This repository provides templates and reference implementations of [Vertex AI Pipelines](https://cloud.google.com/vertex-ai/docs/pipelines/) for production-grade training and batch prediction pipelines on GCP for: -- [TensorFlow](https://www.tensorflow.org/api_docs) -- [XGBoost](https://xgboost.readthedocs.io/en/stable/) (using the [Scikit-Learn wrapper interface for XGBoost](https://xgboost.readthedocs.io/en/latest/python/python_api.html#module-xgboost.sklearn)) - -Further useful documentation: -- [KubeFlow Pipelines SDK](https://www.kubeflow.org/docs/components/pipelines/sdk/sdk-overview/) -- [Google Cloud Pipeline Components - Vertex Training Wrapper](https://github.com/kubeflow/pipelines/blob/master/components/google-cloud/google_cloud_pipeline_components/experimental/custom_job/utils.py) -- [Vertex AI](https://cloud.google.com/vertex-ai) - -The sections below provide a general description of the ML pipelines (training and prediction) for both the TensorFlow template and XGBoost template. These two templates are similar in most ways and a complete overview of their key differences are given in their own README files: -- [TensorFlow pipelines README](src/pipelines/tensorflow/README.md) -- [XGBoost pipelines README](src/pipelines/xgboost/README.md) - -## Training Pipeline -### Prerequisites for training pipeline - -Optional: an existing champion model - -### Components in training pipeline - -The training pipeline can be broken down into the following sequence of components at a high level: - -| Step No | Step Name | Description | Input(s) | Output(s) | -|:-------:| :----------------------------: | :--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :------------------------------------------------------------------------------------- | -| 1 | Generate Queries | Generate base preprocessing & train-test-validation split queries for Google BigQuery. This component only needs a `.sql` file and all parametrized values in that `.sql` file (`source_dataset`, `source_table`, `filter_column`, and so on) | | | -| 2 | BQ Query to Table | This component takes the generated query from the previous component & runs it on Google BigQuery to create the required table (preprocessed data/ train-test-validation data). | Output from **Generate Queries** | New Google BigQuery table created | -| 3 | Extract BQ to Dataset | Creates CSV file/s in Google Cloud Storage from the Google BigQuery tables created in the previous component | BigQuery table created from **BQ Query to Table** | BigQuery table converted to Google Cloud Storage objects as CSV/JSONL files and corresponding file directory and path | -| 4 | Vertex Training | Run a Vertex Training job with train-validation data using a training component wrapped in a ContainerOp from `google-cloud-pipeline-components` |
  • *Train/Validation data* - Google Cloud Storage CSV files for `train_data` + `valid_data` from **Extract BQ to Dataset**
  • *Model Parameters* - Specific model parameters for model training
| Trained challenger model object/s or binaries stored in Google Cloud Storage | -| 5 | Challenger Model Predictions | Use the trained model to get challenger predictions for evaluation purposes |
  • *Test data* - Google Cloud Storage CSV files for `test_data` from **Extract BQ to Dataset**
  • *Trained Model* - Trained challenger model binaries stored in Google Cloud Storage from **Vertex Training**
| Challenger predictions on test data stored as CSV files in Google Cloud Storage | -| 6 | Calculate Evaluation Metrics | Use predictions from previous component to compute user-defined evaluation metrics. This component uses TFMA. |
  • Test data predictions stored as CSV files in Google Cloud Storage from **Challenger Model Predictions**
  • Data slices if required
    • | Evaluation metrics stored in Google Cloud Storage. Plots for all evaluation metrics and slices stored as HTML files in Google CLoud Storage | -| 7 | Lookup Model | Fetch the required model and its resource name if a previous champion model exists in Vertex AI | Base model name to check if it exists in Vertex AI | Model resource name as a string if a champion model exists else an empty string | -| 8 | Champion-Challenger Comparison | All model training activities until this point were for a new challenger model. Basic idea is to compare this newly trained challenger model with an existing champion model & decide deployment of this challenger model based on performance comparison.
      • *If champion model does not exist* - Upload Challenger model as the new Champion model to Vertex AI
      • *If champion model exists* -
        • **Calculate evaluation metrics** - Compute user-defined evaluation metrics on the same `test_data`
        • **Compare Models** - Compare computed evaluation metrics for the champion model
          • *If challenger model performance is better* - Upload Challenger model as the new Champion model to Vertex AI
          • *If champion model performance is better* - End pipeline (Champion model remains as is in Vertex AI)
      | | | - -Each of these components can be connected using `.after({component_name})` or if the output of a preceding component is used as input for that component -Every component can be set with a display name using `.set_display_name({display_name})`. Note that this applies to both the training pipeline and the prediction pipeline. - - -## Prediction Pipeline -### Prerequisites for prediction pipeline - -- A champion model -- A successful run of the training pipeline - -### Components in prediction pipeline - -The prediction pipeline can be broken down into the following sequence of components at a high level: - -| Step No | Name | Description | Input(s) | Output(s) | -|:-------:| :------------------------------------------------: | :----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | :---------------------------------------------------------------------------- | -| 1 | Generate Queries | Generate base preprocessing & prediction data creation queries for Google BigQuery. This component only needs a `.sql` file & all parametrized values in that `.sql` file (source_dataset, source_table, filter_column etc) | | | -| 2 | BQ Query to Table | This component takes the generated query from the previous component & runs it on Google BigQuery to create the required table (preprocessed data/ prediction data). | Output from **Generate Queries** | New Google BigQuery table created | -| 3 | Extract BQ to Dataset | Creates CSV/JSONL file/s in Google Cloud Storage from the Google BigQuery tables created in the previous component | BigQuery table created from *BQ Query to Table*. Full table name required i.e `{project_id}.{dataset_id}.{table_id}` | BigQuery table converted to Google Cloud Storage objects as CSV/JSONL files, and corresponding file directory and path | -| 4 | Lookup Model | Fetch the required model resource name for a champion model in Vertex AI. Since the prediction pipeline will always run after the training pipeline, a champion model will always exist | Base champion model name as a string | Champion model resource name as a string | -| 5 | Vertex Batch Predictions from Google Cloud Storage | Run a Vertex Batch Prediction job with prediction data as input in Tensorflow prediction Pipeline |
      • *Prediction data* - The uris of Google Cloud Storage CSV/JSONL files for `prediction_data` from **Extract BQ to Dataset**
      • *Model* - Champion model as per Vertex AI
      | A batch prediction job artifact with metadata: resourceName(batch prediction job ID) and gcsOutputDirectory(output JSONL files in Google Cloud Storage) | -| 6 | Vertex Batch Predictions from BigQuery | Run a Vertex Batch Prediction job with prediction data as input in XGBoost prediction Pipeline |
      • *Prediction data* - Google BigQuery table of `prediction_data` from **Extract BQ to Dataset**
      • *Model Resource name* - Champion model as per Vertex AI
      | A batch prediction job artifact with metadata: resourceName(batch prediction job ID) and bigqueryOutputTable(output BigQuery tables) | -| 7 | Load Dataset to BQ | Upload the batch predictions stored in Google Cloud Storage to a BigQuery table in Tensorflow prediction Pipeline | Batch predictions stored as JSONL files in Google Cloud Storage from **Vertex Batch Predictions** The specific URL could be read from metadata from batch prediction job | Google BigQuery table with the batch predictions & input instances/features | - -Each of these components can be connected using `.after({component_name})` or if the output of a preceding component is used as input for that component -Every component can be set with a display name using `.set_display_name({display_name})`. Note that this applies to both the training pipeline and the prediction pipeline. - -The final loading stage has an optional argument `dataset_location` which is the location to run the loading job and must match the location of the destination table. It is defaulted to "EU" in the [component definition](pipeline_components/bigquery/bigquery/upload_prediction/component.py). - -## Trigger - -- Trigger script can be found [here](src/pipelines/trigger) - -## Pipeline configuration - -In order to orchestrate machine learning (ML) pipelines on Vertex AI, you need to configure a few things. - -### env.sh - -The first thing to do is to copy `env.sh.example` to `env.sh`, and fill in the values of `env.sh` with the values relevant to your development/sandbox environment. These environment variables will be used when you trigger pipeline runs in Vertex AI. - -### Pipeline input parameters - -The ML pipelines have input parameters. As you can see in the pipeline definition files (`pipelines///pipeline.py`), they have default values, and some of these default values are derived from environment variables (which in turn are defined in `env.sh` as described above). - -When triggering ad hoc runs in your dev/sandbox environment, or when running the E2E tests in CI, these default values are used. For the test and production deployments, the pipeline parameters are defined in the Terraform code for the Cloud Scheduler jobs (`envs//variables.auto.tfvars`). - -### Python packages and Docker images - -You can specify the Python base image and packages required for KFP components in the `@component` decorator using the `base_image` and `packages_to_install` arguments respectively. - -### Compute resources configuration in pipeline -In general there are two methods to configure compute resources in each pipeline. -Firstly, by setting the `machine_type` variable in [XGBoost training pipeline](src/pipelines/xgboost/training/pipeline.py), [XGBoost prediction pipeline](src/pipelines/xgboost/prediction/pipeline.py), [TensorFlow training pipeline](src/pipelines/tensorflow/training/pipeline.py), [TensorFlow prediction pipeline](src/pipelines/tensorflow/prediction/pipeline.py). The default value is `n1-standard-4` with 4 core CPUs and 15GB memory. -Secondly, in order to manage the requirements of each step in your pipeline, you can set up machine type on the pipeline steps. -This is because some steps might need more computational resources than others. -You can increase CPU and memory limits by applying `.set_cpu_limit({CPU_LIMIT})` and `.set_memory_limit('MEMORY_LIMIT')` for any component. -- CPU_LIMIT: The maximum CPU limit for this operator. This string value can be a number (integer value for number of CPUs), or a number followed by "m", which means 1/1000. You can specify at most 96 CPUs. -- MEMORY_LIMIT: The maximum memory limit for this operator. This string value can be a number, or a number followed by "K" (kilobyte), "M" (megabyte), or "G" (gigabyte). At most 624GB is supported. - -For more information, please refer to the guide on [specifying machine types for a pipeline step](https://cloud.google.com/vertex-ai/docs/pipelines/machine-types). - -### Cache Usage in pipeline -When Vertex AI Pipelines runs a pipeline, it checks to see whether or not an execution exists in Vertex ML Metadata with the interface (cache key) of each pipeline step (component). -If the component is exactly the same and the arguments are exactly the same as in some previous execution, then the task can be skipped and the outputs of the old step can be used. -Since most of the ML projects take a long time and expensive computation resources, it is cost-effective to use cache when you are sure that the output of components is correct. -In terms of [how to control cache reuse behavior](https://cloud.google.com/vertex-ai/docs/pipelines/configure-caching), in generally, you can do it for either a component or the entire pipeline (for all components). -If you want to control caching behavior for individual components, add `.set_caching_options()` after each component when building a pipeline. -To change the caching behaviour of ALL components within a pipeline, you can specify this when you trigger the pipeline like so: `make run pipeline= enable_caching=` -It is suggested to start by disabling caching of components during development, until you have a good idea of how the caching behaviour works, as it can lead to unexpected results. - -Note: -Disable caching if you want to use new data: -When caching is enabled for the pipeline, changing `timestamp` or source of data (such as `ingestion_dataset_id`) will only change the output of -corresponding step, for example `Ingest data`. While for the other components, which take the same arguments, they will use caches instead of using the new data. - -### Champion / Challenger evaluation - -In the training pipelines, a Champion-Challenger evaluation is conducted via the models with same name pattern in the same project. Explore [`lookup_model.py`](../pipeline_components/aiplatform/aiplatform/lookup_model/component.py) for more detailed information. -In practice, you should be aware of that and give the model a specific name related to the ML project you are working on once the new model is not comparable with the previous models. -For example, when you want to train a new model using different features, the best practice is to change you model name in the pipeline input parameters. diff --git a/pipelines/poetry.lock b/pipelines/poetry.lock new file mode 100644 index 00000000..2080b2f7 --- /dev/null +++ b/pipelines/poetry.lock @@ -0,0 +1,1418 @@ +# This file is automatically @generated by Poetry 1.5.0 and should not be changed by hand. + +[[package]] +name = "cachetools" +version = "5.3.2" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.2-py3-none-any.whl", hash = "sha256:861f35a13a451f94e301ce2bec7cac63e881232ccce7ed67fab9b5df4d3beaa1"}, + {file = "cachetools-5.3.2.tar.gz", hash = "sha256:086ee420196f7b2ab9ca2db2520aca326318b68fe5ba8bc4d49cca91add450f2"}, +] + +[[package]] +name = "certifi" +version = "2023.11.17" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, +] + +[[package]] +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "components" +version = "0.1.0" +description = "KubeFlow components for Google Cloud" +optional = false +python-versions = ">=3.9,<3.11" +files = [] +develop = true + +[package.dependencies] +kfp = ">=2.0.1,<3.0.0" +pyyaml = "6.0.1" + +[package.source] +type = "directory" +url = "../components" + +[[package]] +name = "coverage" +version = "7.2.5" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "coverage-7.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:883123d0bbe1c136f76b56276074b0c79b5817dd4238097ffa64ac67257f4b6c"}, + {file = "coverage-7.2.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d2fbc2a127e857d2f8898aaabcc34c37771bf78a4d5e17d3e1f5c30cd0cbc62a"}, + {file = "coverage-7.2.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f3671662dc4b422b15776cdca89c041a6349b4864a43aa2350b6b0b03bbcc7f"}, + {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:780551e47d62095e088f251f5db428473c26db7829884323e56d9c0c3118791a"}, + {file = "coverage-7.2.5-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:066b44897c493e0dcbc9e6a6d9f8bbb6607ef82367cf6810d387c09f0cd4fe9a"}, + {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9a4ee55174b04f6af539218f9f8083140f61a46eabcaa4234f3c2a452c4ed11"}, + {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:706ec567267c96717ab9363904d846ec009a48d5f832140b6ad08aad3791b1f5"}, + {file = "coverage-7.2.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ae453f655640157d76209f42c62c64c4d4f2c7f97256d3567e3b439bd5c9b06c"}, + {file = "coverage-7.2.5-cp310-cp310-win32.whl", hash = "sha256:f81c9b4bd8aa747d417407a7f6f0b1469a43b36a85748145e144ac4e8d303cb5"}, + {file = "coverage-7.2.5-cp310-cp310-win_amd64.whl", hash = "sha256:dc945064a8783b86fcce9a0a705abd7db2117d95e340df8a4333f00be5efb64c"}, + {file = "coverage-7.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:40cc0f91c6cde033da493227797be2826cbf8f388eaa36a0271a97a332bfd7ce"}, + {file = "coverage-7.2.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a66e055254a26c82aead7ff420d9fa8dc2da10c82679ea850d8feebf11074d88"}, + {file = "coverage-7.2.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c10fbc8a64aa0f3ed136b0b086b6b577bc64d67d5581acd7cc129af52654384e"}, + {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a22cbb5ede6fade0482111fa7f01115ff04039795d7092ed0db43522431b4f2"}, + {file = "coverage-7.2.5-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:292300f76440651529b8ceec283a9370532f4ecba9ad67d120617021bb5ef139"}, + {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7ff8f3fb38233035028dbc93715551d81eadc110199e14bbbfa01c5c4a43f8d8"}, + {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a08c7401d0b24e8c2982f4e307124b671c6736d40d1c39e09d7a8687bddf83ed"}, + {file = "coverage-7.2.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ef9659d1cda9ce9ac9585c045aaa1e59223b143f2407db0eaee0b61a4f266fb6"}, + {file = "coverage-7.2.5-cp311-cp311-win32.whl", hash = "sha256:30dcaf05adfa69c2a7b9f7dfd9f60bc8e36b282d7ed25c308ef9e114de7fc23b"}, + {file = "coverage-7.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:97072cc90f1009386c8a5b7de9d4fc1a9f91ba5ef2146c55c1f005e7b5c5e068"}, + {file = "coverage-7.2.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:bebea5f5ed41f618797ce3ffb4606c64a5de92e9c3f26d26c2e0aae292f015c1"}, + {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:828189fcdda99aae0d6bf718ea766b2e715eabc1868670a0a07bf8404bf58c33"}, + {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e8a95f243d01ba572341c52f89f3acb98a3b6d1d5d830efba86033dd3687ade"}, + {file = "coverage-7.2.5-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8834e5f17d89e05697c3c043d3e58a8b19682bf365048837383abfe39adaed5"}, + {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d1f25ee9de21a39b3a8516f2c5feb8de248f17da7eead089c2e04aa097936b47"}, + {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1637253b11a18f453e34013c665d8bf15904c9e3c44fbda34c643fbdc9d452cd"}, + {file = "coverage-7.2.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8e575a59315a91ccd00c7757127f6b2488c2f914096077c745c2f1ba5b8c0969"}, + {file = "coverage-7.2.5-cp37-cp37m-win32.whl", hash = "sha256:509ecd8334c380000d259dc66feb191dd0a93b21f2453faa75f7f9cdcefc0718"}, + {file = "coverage-7.2.5-cp37-cp37m-win_amd64.whl", hash = "sha256:12580845917b1e59f8a1c2ffa6af6d0908cb39220f3019e36c110c943dc875b0"}, + {file = "coverage-7.2.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b5016e331b75310610c2cf955d9f58a9749943ed5f7b8cfc0bb89c6134ab0a84"}, + {file = "coverage-7.2.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:373ea34dca98f2fdb3e5cb33d83b6d801007a8074f992b80311fc589d3e6b790"}, + {file = "coverage-7.2.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a063aad9f7b4c9f9da7b2550eae0a582ffc7623dca1c925e50c3fbde7a579771"}, + {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:38c0a497a000d50491055805313ed83ddba069353d102ece8aef5d11b5faf045"}, + {file = "coverage-7.2.5-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b3b05e22a77bb0ae1a3125126a4e08535961c946b62f30985535ed40e26614"}, + {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0342a28617e63ad15d96dca0f7ae9479a37b7d8a295f749c14f3436ea59fdcb3"}, + {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf97ed82ca986e5c637ea286ba2793c85325b30f869bf64d3009ccc1a31ae3fd"}, + {file = "coverage-7.2.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c2c41c1b1866b670573657d584de413df701f482574bad7e28214a2362cb1fd1"}, + {file = "coverage-7.2.5-cp38-cp38-win32.whl", hash = "sha256:10b15394c13544fce02382360cab54e51a9e0fd1bd61ae9ce012c0d1e103c813"}, + {file = "coverage-7.2.5-cp38-cp38-win_amd64.whl", hash = "sha256:a0b273fe6dc655b110e8dc89b8ec7f1a778d78c9fd9b4bda7c384c8906072212"}, + {file = "coverage-7.2.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c587f52c81211d4530fa6857884d37f514bcf9453bdeee0ff93eaaf906a5c1b"}, + {file = "coverage-7.2.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4436cc9ba5414c2c998eaedee5343f49c02ca93b21769c5fdfa4f9d799e84200"}, + {file = "coverage-7.2.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6599bf92f33ab041e36e06d25890afbdf12078aacfe1f1d08c713906e49a3fe5"}, + {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:857abe2fa6a4973f8663e039ead8d22215d31db613ace76e4a98f52ec919068e"}, + {file = "coverage-7.2.5-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6f5cab2d7f0c12f8187a376cc6582c477d2df91d63f75341307fcdcb5d60303"}, + {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aa387bd7489f3e1787ff82068b295bcaafbf6f79c3dad3cbc82ef88ce3f48ad3"}, + {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:156192e5fd3dbbcb11cd777cc469cf010a294f4c736a2b2c891c77618cb1379a"}, + {file = "coverage-7.2.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd3b4b8175c1db502adf209d06136c000df4d245105c8839e9d0be71c94aefe1"}, + {file = "coverage-7.2.5-cp39-cp39-win32.whl", hash = "sha256:ddc5a54edb653e9e215f75de377354e2455376f416c4378e1d43b08ec50acc31"}, + {file = "coverage-7.2.5-cp39-cp39-win_amd64.whl", hash = "sha256:338aa9d9883aaaad53695cb14ccdeb36d4060485bb9388446330bef9c361c252"}, + {file = "coverage-7.2.5-pp37.pp38.pp39-none-any.whl", hash = "sha256:8877d9b437b35a85c18e3c6499b23674684bf690f5d96c1006a1ef61f9fdf0f3"}, + {file = "coverage-7.2.5.tar.gz", hash = "sha256:f99ef080288f09ffc687423b8d60978cf3a465d3f404a18d1a05474bd8575a47"}, +] + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "distlib" +version = "0.3.8" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, + {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, +] + +[[package]] +name = "docstring-parser" +version = "0.15" +description = "Parse Python docstrings in reST, Google and Numpydoc format" +optional = false +python-versions = ">=3.6,<4.0" +files = [ + {file = "docstring_parser-0.15-py3-none-any.whl", hash = "sha256:d1679b86250d269d06a99670924d6bce45adc00b08069dae8c47d98e89b667a9"}, + {file = "docstring_parser-0.15.tar.gz", hash = "sha256:48ddc093e8b1865899956fcc03b03e66bb7240c310fac5af81814580c55bf682"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, + {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "filelock" +version = "3.13.1" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.8" +files = [ + {file = "filelock-3.13.1-py3-none-any.whl", hash = "sha256:57dbda9b35157b05fb3e58ee91448612eb674172fab98ee235ccb0b5bee19a1c"}, + {file = "filelock-3.13.1.tar.gz", hash = "sha256:521f5f56c50f8426f5e03ad3b281b490a87ef15bc6c526f168290f0c7148d44e"}, +] + +[package.extras] +docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)"] +typing = ["typing-extensions (>=4.8)"] + +[[package]] +name = "google-api-core" +version = "2.15.0" +description = "Google API client core library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-api-core-2.15.0.tar.gz", hash = "sha256:abc978a72658f14a2df1e5e12532effe40f94f868f6e23d95133bd6abcca35ca"}, + {file = "google_api_core-2.15.0-py3-none-any.whl", hash = "sha256:2aa56d2be495551e66bbff7f729b790546f87d5c90e74781aa77233bcb395a8a"}, +] + +[package.dependencies] +google-auth = ">=2.14.1,<3.0.dev0" +googleapis-common-protos = ">=1.56.2,<2.0.dev0" +grpcio = {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""} +grpcio-status = {version = ">=1.33.2,<2.0.dev0", optional = true, markers = "extra == \"grpc\""} +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" +requests = ">=2.18.0,<3.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.33.2,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "grpcio-status (>=1.33.2,<2.0.dev0)", "grpcio-status (>=1.49.1,<2.0.dev0)"] +grpcgcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] +grpcio-gcp = ["grpcio-gcp (>=0.2.2,<1.0.dev0)"] + +[[package]] +name = "google-auth" +version = "2.25.2" +description = "Google Authentication Library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-auth-2.25.2.tar.gz", hash = "sha256:42f707937feb4f5e5a39e6c4f343a17300a459aaf03141457ba505812841cc40"}, + {file = "google_auth-2.25.2-py2.py3-none-any.whl", hash = "sha256:473a8dfd0135f75bb79d878436e568f2695dce456764bf3a02b6f8c540b1d256"}, +] + +[package.dependencies] +cachetools = ">=2.0.0,<6.0" +pyasn1-modules = ">=0.2.1" +rsa = ">=3.1.4,<5" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0.dev0)", "requests (>=2.20.0,<3.0.0.dev0)"] +enterprise-cert = ["cryptography (==36.0.2)", "pyopenssl (==22.0.0)"] +pyopenssl = ["cryptography (>=38.0.3)", "pyopenssl (>=20.0.0)"] +reauth = ["pyu2f (>=0.1.5)"] +requests = ["requests (>=2.20.0,<3.0.0.dev0)"] + +[[package]] +name = "google-cloud-aiplatform" +version = "1.38.1" +description = "Vertex AI API client library" +optional = false +python-versions = ">=3.8" +files = [ + {file = "google-cloud-aiplatform-1.38.1.tar.gz", hash = "sha256:30439d914bb028443c0506cc0e6dd825cff5401aeb8233e13d8cfd77c3c87da1"}, + {file = "google_cloud_aiplatform-1.38.1-py2.py3-none-any.whl", hash = "sha256:5e1fcd1068dd2c4f0fc89aa616e34a8b9434eaa72ea6216f5036ef26f08bd448"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.32.0,<2.0.dev0 || >=2.8.dev0,<3.0.0dev", extras = ["grpc"]} +google-cloud-bigquery = ">=1.15.0,<4.0.0dev" +google-cloud-resource-manager = ">=1.3.3,<3.0.0dev" +google-cloud-storage = ">=1.32.0,<3.0.0dev" +packaging = ">=14.3" +proto-plus = ">=1.22.0,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" +shapely = "<3.0.0dev" + +[package.extras] +autologging = ["mlflow (>=1.27.0,<=2.1.1)"] +cloud-profiler = ["tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.4.0,<3.0.0dev)", "werkzeug (>=2.0.0,<2.1.0dev)"] +datasets = ["pyarrow (>=10.0.1)", "pyarrow (>=3.0.0,<8.0dev)"] +endpoint = ["requests (>=2.28.1)"] +full = ["cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<0.103.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (==0.0.11)", "google-vizier (==0.0.4)", "google-vizier (>=0.0.14)", "google-vizier (>=0.1.6)", "httpx (>=0.23.0,<0.25.0)", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyyaml (==5.3.1)", "ray[default] (>=2.4,<2.5)", "ray[default] (>=2.5,<2.5.1)", "requests (>=2.28.1)", "starlette (>=0.17.1)", "tensorflow (>=2.3.0,<3.0.0dev)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)"] +lit = ["explainable-ai-sdk (>=1.0.0)", "lit-nlp (==0.4.0)", "pandas (>=1.0.0)", "tensorflow (>=2.3.0,<3.0.0dev)"] +metadata = ["numpy (>=1.15.0)", "pandas (>=1.0.0)"] +pipelines = ["pyyaml (==5.3.1)"] +prediction = ["docker (>=5.0.3)", "fastapi (>=0.71.0,<0.103.1)", "httpx (>=0.23.0,<0.25.0)", "starlette (>=0.17.1)", "uvicorn[standard] (>=0.16.0)"] +preview = ["cloudpickle (<3.0)", "google-cloud-logging (<4.0)"] +private-endpoints = ["requests (>=2.28.1)", "urllib3 (>=1.21.1,<1.27)"] +ray = ["google-cloud-bigquery", "google-cloud-bigquery-storage", "pandas (>=1.0.0)", "pyarrow (>=6.0.1)", "pydantic (<2)", "ray[default] (>=2.4,<2.5)", "ray[default] (>=2.5,<2.5.1)"] +tensorboard = ["tensorflow (>=2.3.0,<3.0.0dev)"] +testing = ["bigframes", "cloudpickle (<3.0)", "docker (>=5.0.3)", "explainable-ai-sdk (>=1.0.0)", "fastapi (>=0.71.0,<0.103.1)", "google-cloud-bigquery", "google-cloud-bigquery-storage", "google-cloud-logging (<4.0)", "google-vizier (==0.0.11)", "google-vizier (==0.0.4)", "google-vizier (>=0.0.14)", "google-vizier (>=0.1.6)", "grpcio-testing", "httpx (>=0.23.0,<0.25.0)", "ipython", "kfp", "lit-nlp (==0.4.0)", "mlflow (>=1.27.0,<=2.1.1)", "numpy (>=1.15.0)", "pandas (>=1.0.0)", "pyarrow (>=10.0.1)", "pyarrow (>=3.0.0,<8.0dev)", "pyarrow (>=6.0.1)", "pydantic (<2)", "pyfakefs", "pytest-asyncio", "pytest-xdist", "pyyaml (==5.3.1)", "ray[default] (>=2.4,<2.5)", "ray[default] (>=2.5,<2.5.1)", "requests (>=2.28.1)", "requests-toolbelt (<1.0.0)", "scikit-learn", "starlette (>=0.17.1)", "tensorboard-plugin-profile (>=2.4.0,<3.0.0dev)", "tensorflow (>=2.3.0,<3.0.0dev)", "tensorflow (>=2.3.0,<=2.12.0)", "tensorflow (>=2.4.0,<3.0.0dev)", "torch (>=2.0.0,<2.1.0)", "urllib3 (>=1.21.1,<1.27)", "uvicorn[standard] (>=0.16.0)", "werkzeug (>=2.0.0,<2.1.0dev)", "xgboost", "xgboost-ray"] +vizier = ["google-vizier (==0.0.11)", "google-vizier (==0.0.4)", "google-vizier (>=0.0.14)", "google-vizier (>=0.1.6)"] +xai = ["tensorflow (>=2.3.0,<3.0.0dev)"] + +[[package]] +name = "google-cloud-bigquery" +version = "3.14.1" +description = "Google BigQuery API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-bigquery-3.14.1.tar.gz", hash = "sha256:aa15bd86f79ea76824c7d710f5ae532323c4b3ba01ef4abff42d4ee7a2e9b142"}, + {file = "google_cloud_bigquery-3.14.1-py2.py3-none-any.whl", hash = "sha256:a8ded18455da71508db222b7c06197bc12b6dbc6ed5b0b64e7007b76d7016957"}, +] + +[package.dependencies] +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" +google-cloud-core = ">=1.6.0,<3.0.0dev" +google-resumable-media = ">=0.6.0,<3.0dev" +packaging = ">=20.0.0" +python-dateutil = ">=2.7.2,<3.0dev" +requests = ">=2.21.0,<3.0.0dev" + +[package.extras] +all = ["Shapely (>=1.8.4,<3.0.0dev)", "db-dtypes (>=0.3.0,<2.0.0dev)", "geopandas (>=0.9.0,<1.0dev)", "google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "importlib-metadata (>=1.0.0)", "ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)", "ipywidgets (>=7.7.0)", "opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)", "pandas (>=1.1.0)", "proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)", "pyarrow (>=3.0.0)", "tqdm (>=4.7.4,<5.0.0dev)"] +bigquery-v2 = ["proto-plus (>=1.15.0,<2.0.0dev)", "protobuf (>=3.19.5,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5,<5.0.0dev)"] +bqstorage = ["google-cloud-bigquery-storage (>=2.6.0,<3.0.0dev)", "grpcio (>=1.47.0,<2.0dev)", "grpcio (>=1.49.1,<2.0dev)", "pyarrow (>=3.0.0)"] +geopandas = ["Shapely (>=1.8.4,<3.0.0dev)", "geopandas (>=0.9.0,<1.0dev)"] +ipython = ["ipykernel (>=6.0.0)", "ipython (>=7.23.1,!=8.1.0)"] +ipywidgets = ["ipykernel (>=6.0.0)", "ipywidgets (>=7.7.0)"] +opentelemetry = ["opentelemetry-api (>=1.1.0)", "opentelemetry-instrumentation (>=0.20b0)", "opentelemetry-sdk (>=1.1.0)"] +pandas = ["db-dtypes (>=0.3.0,<2.0.0dev)", "importlib-metadata (>=1.0.0)", "pandas (>=1.1.0)", "pyarrow (>=3.0.0)"] +tqdm = ["tqdm (>=4.7.4,<5.0.0dev)"] + +[[package]] +name = "google-cloud-core" +version = "2.4.1" +description = "Google Cloud API client core library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-core-2.4.1.tar.gz", hash = "sha256:9b7749272a812bde58fff28868d0c5e2f585b82f37e09a1f6ed2d4d10f134073"}, + {file = "google_cloud_core-2.4.1-py2.py3-none-any.whl", hash = "sha256:a9e6a4422b9ac5c29f79a0ede9485473338e2ce78d91f2370c01e730eab22e61"}, +] + +[package.dependencies] +google-api-core = ">=1.31.6,<2.0.dev0 || >2.3.0,<3.0.0dev" +google-auth = ">=1.25.0,<3.0dev" + +[package.extras] +grpc = ["grpcio (>=1.38.0,<2.0dev)", "grpcio-status (>=1.38.0,<2.0.dev0)"] + +[[package]] +name = "google-cloud-pipeline-components" +version = "2.8.0" +description = "This SDK enables a set of First Party (Google owned) pipeline components that allow users to take their experience from Vertex AI SDK and other Google Cloud services and create a corresponding pipeline using KFP or Managed Pipelines." +optional = false +python-versions = ">=3.7.0,<3.12.0" +files = [ + {file = "google_cloud_pipeline_components-2.8.0-py3-none-any.whl", hash = "sha256:2b21bc6f65e4c47f21a5c5de472a770fe8ca7c639252031fe2f4c054a6095f4a"}, +] + +[package.dependencies] +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" +google-cloud-aiplatform = ">=1.14.0,<2" +Jinja2 = "3.1.2" +kfp = ">=2.0.0b10,<=2.4.0" + +[package.extras] +docs = ["autodocsumm (==0.2.9)", "commonmark (==0.9.1)", "grpcio-status (<=1.47.0)", "m2r2 (==0.3.2)", "protobuf (>=3.19.0,<4.0.0dev)", "sphinx (==5.0.2)", "sphinx-immaterial (==0.9.0)", "sphinx-notfound-page (==0.8.3)", "sphinx-rtd-theme (==1.0.0)"] +tests = ["flake8 (>=3.0.0)", "mock (>=4.0.0)", "pytest (>=6.0.0)"] + +[[package]] +name = "google-cloud-resource-manager" +version = "1.11.0" +description = "Google Cloud Resource Manager API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-resource-manager-1.11.0.tar.gz", hash = "sha256:a64ba6bb595634ecd2472b8b0322e8f012a76327756659a2dde9f392d7fa1af2"}, + {file = "google_cloud_resource_manager-1.11.0-py2.py3-none-any.whl", hash = "sha256:bafde909b1d434a620eefcd144b14fcccb72f268afcf158c5bcfcdce5e04a72b"}, +] + +[package.dependencies] +google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} +grpc-google-iam-v1 = ">=0.12.4,<1.0.0dev" +proto-plus = ">=1.22.3,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" + +[[package]] +name = "google-cloud-storage" +version = "2.12.0" +description = "Google Cloud Storage API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-cloud-storage-2.12.0.tar.gz", hash = "sha256:57c0bcda2f5e11f008a155d8636d8381d5abab46b58e0cae0e46dd5e595e6b46"}, + {file = "google_cloud_storage-2.12.0-py2.py3-none-any.whl", hash = "sha256:bc52563439d42981b6e21b071a76da2791672776eda3ba99d13a8061ebbd6e5e"}, +] + +[package.dependencies] +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" +google-auth = ">=2.23.3,<3.0dev" +google-cloud-core = ">=2.3.0,<3.0dev" +google-crc32c = ">=1.0,<2.0dev" +google-resumable-media = ">=2.6.0" +requests = ">=2.18.0,<3.0.0dev" + +[package.extras] +protobuf = ["protobuf (<5.0.0dev)"] + +[[package]] +name = "google-crc32c" +version = "1.5.0" +description = "A python wrapper of the C library 'Google CRC32C'" +optional = false +python-versions = ">=3.7" +files = [ + {file = "google-crc32c-1.5.0.tar.gz", hash = "sha256:89284716bc6a5a415d4eaa11b1726d2d60a0cd12aadf5439828353662ede9dd7"}, + {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:596d1f98fc70232fcb6590c439f43b350cb762fb5d61ce7b0e9db4539654cc13"}, + {file = "google_crc32c-1.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:be82c3c8cfb15b30f36768797a640e800513793d6ae1724aaaafe5bf86f8f346"}, + {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:461665ff58895f508e2866824a47bdee72497b091c730071f2b7575d5762ab65"}, + {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2096eddb4e7c7bdae4bd69ad364e55e07b8316653234a56552d9c988bd2d61b"}, + {file = "google_crc32c-1.5.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:116a7c3c616dd14a3de8c64a965828b197e5f2d121fedd2f8c5585c547e87b02"}, + {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5829b792bf5822fd0a6f6eb34c5f81dd074f01d570ed7f36aa101d6fc7a0a6e4"}, + {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:64e52e2b3970bd891309c113b54cf0e4384762c934d5ae56e283f9a0afcd953e"}, + {file = "google_crc32c-1.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:02ebb8bf46c13e36998aeaad1de9b48f4caf545e91d14041270d9dca767b780c"}, + {file = "google_crc32c-1.5.0-cp310-cp310-win32.whl", hash = "sha256:2e920d506ec85eb4ba50cd4228c2bec05642894d4c73c59b3a2fe20346bd00ee"}, + {file = "google_crc32c-1.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:07eb3c611ce363c51a933bf6bd7f8e3878a51d124acfc89452a75120bc436289"}, + {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cae0274952c079886567f3f4f685bcaf5708f0a23a5f5216fdab71f81a6c0273"}, + {file = "google_crc32c-1.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1034d91442ead5a95b5aaef90dbfaca8633b0247d1e41621d1e9f9db88c36298"}, + {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c42c70cd1d362284289c6273adda4c6af8039a8ae12dc451dcd61cdabb8ab57"}, + {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8485b340a6a9e76c62a7dce3c98e5f102c9219f4cfbf896a00cf48caf078d438"}, + {file = "google_crc32c-1.5.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77e2fd3057c9d78e225fa0a2160f96b64a824de17840351b26825b0848022906"}, + {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f583edb943cf2e09c60441b910d6a20b4d9d626c75a36c8fcac01a6c96c01183"}, + {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:a1fd716e7a01f8e717490fbe2e431d2905ab8aa598b9b12f8d10abebb36b04dd"}, + {file = "google_crc32c-1.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:72218785ce41b9cfd2fc1d6a017dc1ff7acfc4c17d01053265c41a2c0cc39b8c"}, + {file = "google_crc32c-1.5.0-cp311-cp311-win32.whl", hash = "sha256:66741ef4ee08ea0b2cc3c86916ab66b6aef03768525627fd6a1b34968b4e3709"}, + {file = "google_crc32c-1.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba1eb1843304b1e5537e1fca632fa894d6f6deca8d6389636ee5b4797affb968"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:98cb4d057f285bd80d8778ebc4fde6b4d509ac3f331758fb1528b733215443ae"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd8536e902db7e365f49e7d9029283403974ccf29b13fc7028b97e2295b33556"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19e0a019d2c4dcc5e598cd4a4bc7b008546b0358bd322537c74ad47a5386884f"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02c65b9817512edc6a4ae7c7e987fea799d2e0ee40c53ec573a692bee24de876"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6ac08d24c1f16bd2bf5eca8eaf8304812f44af5cfe5062006ec676e7e1d50afc"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3359fc442a743e870f4588fcf5dcbc1bf929df1fad8fb9905cd94e5edb02e84c"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1e986b206dae4476f41bcec1faa057851f3889503a70e1bdb2378d406223994a"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:de06adc872bcd8c2a4e0dc51250e9e65ef2ca91be023b9d13ebd67c2ba552e1e"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-win32.whl", hash = "sha256:d3515f198eaa2f0ed49f8819d5732d70698c3fa37384146079b3799b97667a94"}, + {file = "google_crc32c-1.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:67b741654b851abafb7bc625b6d1cdd520a379074e64b6a128e3b688c3c04740"}, + {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c02ec1c5856179f171e032a31d6f8bf84e5a75c45c33b2e20a3de353b266ebd8"}, + {file = "google_crc32c-1.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edfedb64740750e1a3b16152620220f51d58ff1b4abceb339ca92e934775c27a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:84e6e8cd997930fc66d5bb4fde61e2b62ba19d62b7abd7a69920406f9ecca946"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:024894d9d3cfbc5943f8f230e23950cd4906b2fe004c72e29b209420a1e6b05a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:998679bf62b7fb599d2878aa3ed06b9ce688b8974893e7223c60db155f26bd8d"}, + {file = "google_crc32c-1.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:83c681c526a3439b5cf94f7420471705bbf96262f49a6fe546a6db5f687a3d4a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:4c6fdd4fccbec90cc8a01fc00773fcd5fa28db683c116ee3cb35cd5da9ef6c37"}, + {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5ae44e10a8e3407dbe138984f21e536583f2bba1be9491239f942c2464ac0894"}, + {file = "google_crc32c-1.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:37933ec6e693e51a5b07505bd05de57eee12f3e8c32b07da7e73669398e6630a"}, + {file = "google_crc32c-1.5.0-cp38-cp38-win32.whl", hash = "sha256:fe70e325aa68fa4b5edf7d1a4b6f691eb04bbccac0ace68e34820d283b5f80d4"}, + {file = "google_crc32c-1.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:74dea7751d98034887dbd821b7aae3e1d36eda111d6ca36c206c44478035709c"}, + {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c6c777a480337ac14f38564ac88ae82d4cd238bf293f0a22295b66eb89ffced7"}, + {file = "google_crc32c-1.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:759ce4851a4bb15ecabae28f4d2e18983c244eddd767f560165563bf9aefbc8d"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f13cae8cc389a440def0c8c52057f37359014ccbc9dc1f0827936bcd367c6100"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e560628513ed34759456a416bf86b54b2476c59144a9138165c9a1575801d0d9"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1674e4307fa3024fc897ca774e9c7562c957af85df55efe2988ed9056dc4e57"}, + {file = "google_crc32c-1.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:278d2ed7c16cfc075c91378c4f47924c0625f5fc84b2d50d921b18b7975bd210"}, + {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d5280312b9af0976231f9e317c20e4a61cd2f9629b7bfea6a693d1878a264ebd"}, + {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:8b87e1a59c38f275c0e3676fc2ab6d59eccecfd460be267ac360cc31f7bcde96"}, + {file = "google_crc32c-1.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7c074fece789b5034b9b1404a1f8208fc2d4c6ce9decdd16e8220c5a793e6f61"}, + {file = "google_crc32c-1.5.0-cp39-cp39-win32.whl", hash = "sha256:7f57f14606cd1dd0f0de396e1e53824c371e9544a822648cd76c034d209b559c"}, + {file = "google_crc32c-1.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:a2355cba1f4ad8b6988a4ca3feed5bff33f6af2d7f134852cf279c2aebfde541"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f314013e7dcd5cf45ab1945d92e713eec788166262ae8deb2cfacd53def27325"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b747a674c20a67343cb61d43fdd9207ce5da6a99f629c6e2541aa0e89215bcd"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8f24ed114432de109aa9fd317278518a5af2d31ac2ea6b952b2f7782b43da091"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8667b48e7a7ef66afba2c81e1094ef526388d35b873966d8a9a447974ed9178"}, + {file = "google_crc32c-1.5.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:1c7abdac90433b09bad6c43a43af253e688c9cfc1c86d332aed13f9a7c7f65e2"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6f998db4e71b645350b9ac28a2167e6632c239963ca9da411523bb439c5c514d"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c99616c853bb585301df6de07ca2cadad344fd1ada6d62bb30aec05219c45d2"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ad40e31093a4af319dadf503b2467ccdc8f67c72e4bcba97f8c10cb078207b5"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cd67cf24a553339d5062eff51013780a00d6f97a39ca062781d06b3a73b15462"}, + {file = "google_crc32c-1.5.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:398af5e3ba9cf768787eef45c803ff9614cc3e22a5b2f7d7ae116df8b11e3314"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b1f8133c9a275df5613a451e73f36c2aea4fe13c5c8997e22cf355ebd7bd0728"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ba053c5f50430a3fcfd36f75aff9caeba0440b2d076afdb79a318d6ca245f88"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:272d3892a1e1a2dbc39cc5cde96834c236d5327e2122d3aaa19f6614531bb6eb"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:635f5d4dd18758a1fbd1049a8e8d2fee4ffed124462d837d1a02a0e009c3ab31"}, + {file = "google_crc32c-1.5.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c672d99a345849301784604bfeaeba4db0c7aae50b95be04dd651fd2a7310b93"}, +] + +[package.extras] +testing = ["pytest"] + +[[package]] +name = "google-resumable-media" +version = "2.7.0" +description = "Utilities for Google Media Downloads and Resumable Uploads" +optional = false +python-versions = ">= 3.7" +files = [ + {file = "google-resumable-media-2.7.0.tar.gz", hash = "sha256:5f18f5fa9836f4b083162064a1c2c98c17239bfda9ca50ad970ccf905f3e625b"}, + {file = "google_resumable_media-2.7.0-py2.py3-none-any.whl", hash = "sha256:79543cfe433b63fd81c0844b7803aba1bb8950b47bedf7d980c38fa123937e08"}, +] + +[package.dependencies] +google-crc32c = ">=1.0,<2.0dev" + +[package.extras] +aiohttp = ["aiohttp (>=3.6.2,<4.0.0dev)", "google-auth (>=1.22.0,<2.0dev)"] +requests = ["requests (>=2.18.0,<3.0.0dev)"] + +[[package]] +name = "googleapis-common-protos" +version = "1.62.0" +description = "Common protobufs used in Google APIs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "googleapis-common-protos-1.62.0.tar.gz", hash = "sha256:83f0ece9f94e5672cced82f592d2a5edf527a96ed1794f0bab36d5735c996277"}, + {file = "googleapis_common_protos-1.62.0-py2.py3-none-any.whl", hash = "sha256:4750113612205514f9f6aa4cb00d523a94f3e8c06c5ad2fee466387dc4875f07"}, +] + +[package.dependencies] +grpcio = {version = ">=1.44.0,<2.0.0.dev0", optional = true, markers = "extra == \"grpc\""} +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0.dev0" + +[package.extras] +grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] + +[[package]] +name = "grpc-google-iam-v1" +version = "0.13.0" +description = "IAM API client library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpc-google-iam-v1-0.13.0.tar.gz", hash = "sha256:fad318608b9e093258fbf12529180f400d1c44453698a33509cc6ecf005b294e"}, + {file = "grpc_google_iam_v1-0.13.0-py2.py3-none-any.whl", hash = "sha256:53902e2af7de8df8c1bd91373d9be55b0743ec267a7428ea638db3775becae89"}, +] + +[package.dependencies] +googleapis-common-protos = {version = ">=1.56.0,<2.0.0dev", extras = ["grpc"]} +grpcio = ">=1.44.0,<2.0.0dev" +protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" + +[[package]] +name = "grpcio" +version = "1.60.0" +description = "HTTP/2-based RPC framework" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpcio-1.60.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:d020cfa595d1f8f5c6b343530cd3ca16ae5aefdd1e832b777f9f0eb105f5b139"}, + {file = "grpcio-1.60.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:b98f43fcdb16172dec5f4b49f2fece4b16a99fd284d81c6bbac1b3b69fcbe0ff"}, + {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:20e7a4f7ded59097c84059d28230907cd97130fa74f4a8bfd1d8e5ba18c81491"}, + {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452ca5b4afed30e7274445dd9b441a35ece656ec1600b77fff8c216fdf07df43"}, + {file = "grpcio-1.60.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43e636dc2ce9ece583b3e2ca41df5c983f4302eabc6d5f9cd04f0562ee8ec1ae"}, + {file = "grpcio-1.60.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6e306b97966369b889985a562ede9d99180def39ad42c8014628dd3cc343f508"}, + {file = "grpcio-1.60.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f897c3b127532e6befdcf961c415c97f320d45614daf84deba0a54e64ea2457b"}, + {file = "grpcio-1.60.0-cp310-cp310-win32.whl", hash = "sha256:b87efe4a380887425bb15f220079aa8336276398dc33fce38c64d278164f963d"}, + {file = "grpcio-1.60.0-cp310-cp310-win_amd64.whl", hash = "sha256:a9c7b71211f066908e518a2ef7a5e211670761651039f0d6a80d8d40054047df"}, + {file = "grpcio-1.60.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:fb464479934778d7cc5baf463d959d361954d6533ad34c3a4f1d267e86ee25fd"}, + {file = "grpcio-1.60.0-cp311-cp311-macosx_10_10_universal2.whl", hash = "sha256:4b44d7e39964e808b071714666a812049765b26b3ea48c4434a3b317bac82f14"}, + {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:90bdd76b3f04bdb21de5398b8a7c629676c81dfac290f5f19883857e9371d28c"}, + {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91229d7203f1ef0ab420c9b53fe2ca5c1fbeb34f69b3bc1b5089466237a4a134"}, + {file = "grpcio-1.60.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b36a2c6d4920ba88fa98075fdd58ff94ebeb8acc1215ae07d01a418af4c0253"}, + {file = "grpcio-1.60.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:297eef542156d6b15174a1231c2493ea9ea54af8d016b8ca7d5d9cc65cfcc444"}, + {file = "grpcio-1.60.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:87c9224acba0ad8bacddf427a1c2772e17ce50b3042a789547af27099c5f751d"}, + {file = "grpcio-1.60.0-cp311-cp311-win32.whl", hash = "sha256:95ae3e8e2c1b9bf671817f86f155c5da7d49a2289c5cf27a319458c3e025c320"}, + {file = "grpcio-1.60.0-cp311-cp311-win_amd64.whl", hash = "sha256:467a7d31554892eed2aa6c2d47ded1079fc40ea0b9601d9f79204afa8902274b"}, + {file = "grpcio-1.60.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:a7152fa6e597c20cb97923407cf0934e14224af42c2b8d915f48bc3ad2d9ac18"}, + {file = "grpcio-1.60.0-cp312-cp312-macosx_10_10_universal2.whl", hash = "sha256:7db16dd4ea1b05ada504f08d0dca1cd9b926bed3770f50e715d087c6f00ad748"}, + {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:b0571a5aef36ba9177e262dc88a9240c866d903a62799e44fd4aae3f9a2ec17e"}, + {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fd9584bf1bccdfff1512719316efa77be235469e1e3295dce64538c4773840b"}, + {file = "grpcio-1.60.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6a478581b1a1a8fdf3318ecb5f4d0cda41cacdffe2b527c23707c9c1b8fdb55"}, + {file = "grpcio-1.60.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:77c8a317f0fd5a0a2be8ed5cbe5341537d5c00bb79b3bb27ba7c5378ba77dbca"}, + {file = "grpcio-1.60.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1c30bb23a41df95109db130a6cc1b974844300ae2e5d68dd4947aacba5985aa5"}, + {file = "grpcio-1.60.0-cp312-cp312-win32.whl", hash = "sha256:2aef56e85901c2397bd557c5ba514f84de1f0ae5dd132f5d5fed042858115951"}, + {file = "grpcio-1.60.0-cp312-cp312-win_amd64.whl", hash = "sha256:e381fe0c2aa6c03b056ad8f52f8efca7be29fb4d9ae2f8873520843b6039612a"}, + {file = "grpcio-1.60.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:92f88ca1b956eb8427a11bb8b4a0c0b2b03377235fc5102cb05e533b8693a415"}, + {file = "grpcio-1.60.0-cp37-cp37m-macosx_10_10_universal2.whl", hash = "sha256:e278eafb406f7e1b1b637c2cf51d3ad45883bb5bd1ca56bc05e4fc135dfdaa65"}, + {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:a48edde788b99214613e440fce495bbe2b1e142a7f214cce9e0832146c41e324"}, + {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de2ad69c9a094bf37c1102b5744c9aec6cf74d2b635558b779085d0263166454"}, + {file = "grpcio-1.60.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:073f959c6f570797272f4ee9464a9997eaf1e98c27cb680225b82b53390d61e6"}, + {file = "grpcio-1.60.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c826f93050c73e7769806f92e601e0efdb83ec8d7c76ddf45d514fee54e8e619"}, + {file = "grpcio-1.60.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9e30be89a75ee66aec7f9e60086fadb37ff8c0ba49a022887c28c134341f7179"}, + {file = "grpcio-1.60.0-cp37-cp37m-win_amd64.whl", hash = "sha256:b0fb2d4801546598ac5cd18e3ec79c1a9af8b8f2a86283c55a5337c5aeca4b1b"}, + {file = "grpcio-1.60.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:9073513ec380434eb8d21970e1ab3161041de121f4018bbed3146839451a6d8e"}, + {file = "grpcio-1.60.0-cp38-cp38-macosx_10_10_universal2.whl", hash = "sha256:74d7d9fa97809c5b892449b28a65ec2bfa458a4735ddad46074f9f7d9550ad13"}, + {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:1434ca77d6fed4ea312901122dc8da6c4389738bf5788f43efb19a838ac03ead"}, + {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e61e76020e0c332a98290323ecfec721c9544f5b739fab925b6e8cbe1944cf19"}, + {file = "grpcio-1.60.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675997222f2e2f22928fbba640824aebd43791116034f62006e19730715166c0"}, + {file = "grpcio-1.60.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5208a57eae445ae84a219dfd8b56e04313445d146873117b5fa75f3245bc1390"}, + {file = "grpcio-1.60.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:428d699c8553c27e98f4d29fdc0f0edc50e9a8a7590bfd294d2edb0da7be3629"}, + {file = "grpcio-1.60.0-cp38-cp38-win32.whl", hash = "sha256:83f2292ae292ed5a47cdcb9821039ca8e88902923198f2193f13959360c01860"}, + {file = "grpcio-1.60.0-cp38-cp38-win_amd64.whl", hash = "sha256:705a68a973c4c76db5d369ed573fec3367d7d196673fa86614b33d8c8e9ebb08"}, + {file = "grpcio-1.60.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c193109ca4070cdcaa6eff00fdb5a56233dc7610216d58fb81638f89f02e4968"}, + {file = "grpcio-1.60.0-cp39-cp39-macosx_10_10_universal2.whl", hash = "sha256:676e4a44e740deaba0f4d95ba1d8c5c89a2fcc43d02c39f69450b1fa19d39590"}, + {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5ff21e000ff2f658430bde5288cb1ac440ff15c0d7d18b5fb222f941b46cb0d2"}, + {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c86343cf9ff7b2514dd229bdd88ebba760bd8973dac192ae687ff75e39ebfab"}, + {file = "grpcio-1.60.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fd3b3968ffe7643144580f260f04d39d869fcc2cddb745deef078b09fd2b328"}, + {file = "grpcio-1.60.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:30943b9530fe3620e3b195c03130396cd0ee3a0d10a66c1bee715d1819001eaf"}, + {file = "grpcio-1.60.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b10241250cb77657ab315270b064a6c7f1add58af94befa20687e7c8d8603ae6"}, + {file = "grpcio-1.60.0-cp39-cp39-win32.whl", hash = "sha256:79a050889eb8d57a93ed21d9585bb63fca881666fc709f5d9f7f9372f5e7fd03"}, + {file = "grpcio-1.60.0-cp39-cp39-win_amd64.whl", hash = "sha256:8a97a681e82bc11a42d4372fe57898d270a2707f36c45c6676e49ce0d5c41353"}, + {file = "grpcio-1.60.0.tar.gz", hash = "sha256:2199165a1affb666aa24adf0c97436686d0a61bc5fc113c037701fb7c7fceb96"}, +] + +[package.extras] +protobuf = ["grpcio-tools (>=1.60.0)"] + +[[package]] +name = "grpcio-status" +version = "1.48.2" +description = "Status proto mapping for gRPC" +optional = false +python-versions = ">=3.6" +files = [ + {file = "grpcio-status-1.48.2.tar.gz", hash = "sha256:53695f45da07437b7c344ee4ef60d370fd2850179f5a28bb26d8e2aa1102ec11"}, + {file = "grpcio_status-1.48.2-py3-none-any.whl", hash = "sha256:2c33bbdbe20188b2953f46f31af669263b6ee2a9b2d38fa0d36ee091532e21bf"}, +] + +[package.dependencies] +googleapis-common-protos = ">=1.5.5" +grpcio = ">=1.48.2" +protobuf = ">=3.12.0" + +[[package]] +name = "identify" +version = "2.5.33" +description = "File identification library for Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "identify-2.5.33-py2.py3-none-any.whl", hash = "sha256:d40ce5fcd762817627670da8a7d8d8e65f24342d14539c59488dc603bf662e34"}, + {file = "identify-2.5.33.tar.gz", hash = "sha256:161558f9fe4559e1557e1bff323e8631f6a0e4837f7497767c1782832f16b62d"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "idna" +version = "3.6" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, + {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, +] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "kfp" +version = "2.4.0" +description = "Kubeflow Pipelines SDK" +optional = false +python-versions = ">=3.7.0,<3.12.0" +files = [ + {file = "kfp-2.4.0.tar.gz", hash = "sha256:44c25ce80b948629bf0fb0815b49d8d97039a825597a4378078bbaa0610aef69"}, +] + +[package.dependencies] +click = ">=8.0.0,<9" +docstring-parser = ">=0.7.3,<1" +google-api-core = ">=1.31.5,<2.0.dev0 || >2.3.0,<3.0.0dev" +google-auth = ">=1.6.1,<3" +google-cloud-storage = ">=2.2.1,<3" +kfp-pipeline-spec = "0.2.2" +kfp-server-api = ">=2.0.0,<2.1.0" +kubernetes = ">=8.0.0,<27" +protobuf = ">=3.13.0,<4" +PyYAML = ">=5.3,<7" +requests-toolbelt = ">=0.8.0,<1" +tabulate = ">=0.8.6,<1" +urllib3 = "<2.0.0" + +[package.extras] +all = ["docker", "kfp-kubernetes (<2)"] +kubernetes = ["kfp-kubernetes (<2)"] + +[[package]] +name = "kfp-pipeline-spec" +version = "0.2.2" +description = "Kubeflow Pipelines pipeline spec" +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "kfp_pipeline_spec-0.2.2-py3-none-any.whl", hash = "sha256:e83b58ffed3f7ca154d62ab20e14dc9a1a3c9dad589e856dd2b421e226f3b8d0"}, +] + +[package.dependencies] +protobuf = ">=3.13.0,<4" + +[[package]] +name = "kfp-server-api" +version = "2.0.5" +description = "Kubeflow Pipelines API" +optional = false +python-versions = "*" +files = [ + {file = "kfp-server-api-2.0.5.tar.gz", hash = "sha256:c9cfbf0e87271d3bfe96e5ecc9ffbdd6ab566bc1c9a9ddc2a39d7698a16e26ff"}, +] + +[package.dependencies] +certifi = "*" +python-dateutil = "*" +six = ">=1.10" +urllib3 = ">=1.15" + +[[package]] +name = "kubernetes" +version = "26.1.0" +description = "Kubernetes python client" +optional = false +python-versions = ">=3.6" +files = [ + {file = "kubernetes-26.1.0-py2.py3-none-any.whl", hash = "sha256:e3db6800abf7e36c38d2629b5cb6b74d10988ee0cba6fba45595a7cbe60c0042"}, + {file = "kubernetes-26.1.0.tar.gz", hash = "sha256:5854b0c508e8d217ca205591384ab58389abdae608576f9c9afc35a3c76a366c"}, +] + +[package.dependencies] +certifi = ">=14.05.14" +google-auth = ">=1.0.1" +python-dateutil = ">=2.5.3" +pyyaml = ">=5.4.1" +requests = "*" +requests-oauthlib = "*" +setuptools = ">=21.0.0" +six = ">=1.9.0" +urllib3 = ">=1.24.2" +websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" + +[package.extras] +adal = ["adal (>=1.0.2)"] + +[[package]] +name = "markupsafe" +version = "2.1.3" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, +] + +[[package]] +name = "nodeenv" +version = "1.8.0" +description = "Node.js virtual environment builder" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +files = [ + {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, + {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, +] + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "numpy" +version = "1.26.2" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "numpy-1.26.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3703fc9258a4a122d17043e57b35e5ef1c5a5837c3db8be396c82e04c1cf9b0f"}, + {file = "numpy-1.26.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cc392fdcbd21d4be6ae1bb4475a03ce3b025cd49a9be5345d76d7585aea69440"}, + {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36340109af8da8805d8851ef1d74761b3b88e81a9bd80b290bbfed61bd2b4f75"}, + {file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcc008217145b3d77abd3e4d5ef586e3bdfba8fe17940769f8aa09b99e856c00"}, + {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3ced40d4e9e18242f70dd02d739e44698df3dcb010d31f495ff00a31ef6014fe"}, + {file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b272d4cecc32c9e19911891446b72e986157e6a1809b7b56518b4f3755267523"}, + {file = "numpy-1.26.2-cp310-cp310-win32.whl", hash = "sha256:22f8fc02fdbc829e7a8c578dd8d2e15a9074b630d4da29cda483337e300e3ee9"}, + {file = "numpy-1.26.2-cp310-cp310-win_amd64.whl", hash = "sha256:26c9d33f8e8b846d5a65dd068c14e04018d05533b348d9eaeef6c1bd787f9919"}, + {file = "numpy-1.26.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b96e7b9c624ef3ae2ae0e04fa9b460f6b9f17ad8b4bec6d7756510f1f6c0c841"}, + {file = "numpy-1.26.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aa18428111fb9a591d7a9cc1b48150097ba6a7e8299fb56bdf574df650e7d1f1"}, + {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06fa1ed84aa60ea6ef9f91ba57b5ed963c3729534e6e54055fc151fad0423f0a"}, + {file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96ca5482c3dbdd051bcd1fce8034603d6ebfc125a7bd59f55b40d8f5d246832b"}, + {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:854ab91a2906ef29dc3925a064fcd365c7b4da743f84b123002f6139bcb3f8a7"}, + {file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f43740ab089277d403aa07567be138fc2a89d4d9892d113b76153e0e412409f8"}, + {file = "numpy-1.26.2-cp311-cp311-win32.whl", hash = "sha256:a2bbc29fcb1771cd7b7425f98b05307776a6baf43035d3b80c4b0f29e9545186"}, + {file = "numpy-1.26.2-cp311-cp311-win_amd64.whl", hash = "sha256:2b3fca8a5b00184828d12b073af4d0fc5fdd94b1632c2477526f6bd7842d700d"}, + {file = "numpy-1.26.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a4cd6ed4a339c21f1d1b0fdf13426cb3b284555c27ac2f156dfdaaa7e16bfab0"}, + {file = "numpy-1.26.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5d5244aabd6ed7f312268b9247be47343a654ebea52a60f002dc70c769048e75"}, + {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a3cdb4d9c70e6b8c0814239ead47da00934666f668426fc6e94cce869e13fd7"}, + {file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa317b2325f7aa0a9471663e6093c210cb2ae9c0ad824732b307d2c51983d5b6"}, + {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:174a8880739c16c925799c018f3f55b8130c1f7c8e75ab0a6fa9d41cab092fd6"}, + {file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f79b231bf5c16b1f39c7f4875e1ded36abee1591e98742b05d8a0fb55d8a3eec"}, + {file = "numpy-1.26.2-cp312-cp312-win32.whl", hash = "sha256:4a06263321dfd3598cacb252f51e521a8cb4b6df471bb12a7ee5cbab20ea9167"}, + {file = "numpy-1.26.2-cp312-cp312-win_amd64.whl", hash = "sha256:b04f5dc6b3efdaab541f7857351aac359e6ae3c126e2edb376929bd3b7f92d7e"}, + {file = "numpy-1.26.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4eb8df4bf8d3d90d091e0146f6c28492b0be84da3e409ebef54349f71ed271ef"}, + {file = "numpy-1.26.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a13860fdcd95de7cf58bd6f8bc5a5ef81c0b0625eb2c9a783948847abbef2c2"}, + {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64308ebc366a8ed63fd0bf426b6a9468060962f1a4339ab1074c228fa6ade8e3"}, + {file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baf8aab04a2c0e859da118f0b38617e5ee65d75b83795055fb66c0d5e9e9b818"}, + {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d73a3abcac238250091b11caef9ad12413dab01669511779bc9b29261dd50210"}, + {file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b361d369fc7e5e1714cf827b731ca32bff8d411212fccd29ad98ad622449cc36"}, + {file = "numpy-1.26.2-cp39-cp39-win32.whl", hash = "sha256:bd3f0091e845164a20bd5a326860c840fe2af79fa12e0469a12768a3ec578d80"}, + {file = "numpy-1.26.2-cp39-cp39-win_amd64.whl", hash = "sha256:2beef57fb031dcc0dc8fa4fe297a742027b954949cabb52a2a376c144e5e6060"}, + {file = "numpy-1.26.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1cc3d5029a30fb5f06704ad6b23b35e11309491c999838c31f124fee32107c79"}, + {file = "numpy-1.26.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94cc3c222bb9fb5a12e334d0479b97bb2df446fbe622b470928f5284ffca3f8d"}, + {file = "numpy-1.26.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe6b44fb8fcdf7eda4ef4461b97b3f63c466b27ab151bec2366db8b197387841"}, + {file = "numpy-1.26.2.tar.gz", hash = "sha256:f65738447676ab5777f11e6bbbdb8ce11b785e105f690bc45966574816b6d3ea"}, +] + +[[package]] +name = "oauthlib" +version = "3.2.2" +description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" +optional = false +python-versions = ">=3.6" +files = [ + {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, + {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, +] + +[package.extras] +rsa = ["cryptography (>=3.0.0)"] +signals = ["blinker (>=1.4.0)"] +signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] + +[[package]] +name = "packaging" +version = "23.2" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, + {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, +] + +[[package]] +name = "platformdirs" +version = "4.1.0" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.8" +files = [ + {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, + {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, +] + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] + +[[package]] +name = "pluggy" +version = "1.3.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pre-commit" +version = "2.21.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pre_commit-2.21.0-py2.py3-none-any.whl", hash = "sha256:e2f91727039fc39a92f58a588a25b87f936de6567eed4f0e673e0507edc75bad"}, + {file = "pre_commit-2.21.0.tar.gz", hash = "sha256:31ef31af7e474a8d8995027fefdfcf509b5c913ff31f2015b4ec4beb26a6f658"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "proto-plus" +version = "1.23.0" +description = "Beautiful, Pythonic protocol buffers." +optional = false +python-versions = ">=3.6" +files = [ + {file = "proto-plus-1.23.0.tar.gz", hash = "sha256:89075171ef11988b3fa157f5dbd8b9cf09d65fffee97e29ce403cd8defba19d2"}, + {file = "proto_plus-1.23.0-py3-none-any.whl", hash = "sha256:a829c79e619e1cf632de091013a4173deed13a55f326ef84f05af6f50ff4c82c"}, +] + +[package.dependencies] +protobuf = ">=3.19.0,<5.0.0dev" + +[package.extras] +testing = ["google-api-core[grpc] (>=1.31.5)"] + +[[package]] +name = "protobuf" +version = "3.20.3" +description = "Protocol Buffers" +optional = false +python-versions = ">=3.7" +files = [ + {file = "protobuf-3.20.3-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:f4bd856d702e5b0d96a00ec6b307b0f51c1982c2bf9c0052cf9019e9a544ba99"}, + {file = "protobuf-3.20.3-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:9aae4406ea63d825636cc11ffb34ad3379335803216ee3a856787bcf5ccc751e"}, + {file = "protobuf-3.20.3-cp310-cp310-win32.whl", hash = "sha256:28545383d61f55b57cf4df63eebd9827754fd2dc25f80c5253f9184235db242c"}, + {file = "protobuf-3.20.3-cp310-cp310-win_amd64.whl", hash = "sha256:67a3598f0a2dcbc58d02dd1928544e7d88f764b47d4a286202913f0b2801c2e7"}, + {file = "protobuf-3.20.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:899dc660cd599d7352d6f10d83c95df430a38b410c1b66b407a6b29265d66469"}, + {file = "protobuf-3.20.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e64857f395505ebf3d2569935506ae0dfc4a15cb80dc25261176c784662cdcc4"}, + {file = "protobuf-3.20.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:d9e4432ff660d67d775c66ac42a67cf2453c27cb4d738fc22cb53b5d84c135d4"}, + {file = "protobuf-3.20.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:74480f79a023f90dc6e18febbf7b8bac7508420f2006fabd512013c0c238f454"}, + {file = "protobuf-3.20.3-cp37-cp37m-win32.whl", hash = "sha256:b6cc7ba72a8850621bfec987cb72623e703b7fe2b9127a161ce61e61558ad905"}, + {file = "protobuf-3.20.3-cp37-cp37m-win_amd64.whl", hash = "sha256:8c0c984a1b8fef4086329ff8dd19ac77576b384079247c770f29cc8ce3afa06c"}, + {file = "protobuf-3.20.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:de78575669dddf6099a8a0f46a27e82a1783c557ccc38ee620ed8cc96d3be7d7"}, + {file = "protobuf-3.20.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:f4c42102bc82a51108e449cbb32b19b180022941c727bac0cfd50170341f16ee"}, + {file = "protobuf-3.20.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:44246bab5dd4b7fbd3c0c80b6f16686808fab0e4aca819ade6e8d294a29c7050"}, + {file = "protobuf-3.20.3-cp38-cp38-win32.whl", hash = "sha256:c02ce36ec760252242a33967d51c289fd0e1c0e6e5cc9397e2279177716add86"}, + {file = "protobuf-3.20.3-cp38-cp38-win_amd64.whl", hash = "sha256:447d43819997825d4e71bf5769d869b968ce96848b6479397e29fc24c4a5dfe9"}, + {file = "protobuf-3.20.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:398a9e0c3eaceb34ec1aee71894ca3299605fa8e761544934378bbc6c97de23b"}, + {file = "protobuf-3.20.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bf01b5720be110540be4286e791db73f84a2b721072a3711efff6c324cdf074b"}, + {file = "protobuf-3.20.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:daa564862dd0d39c00f8086f88700fdbe8bc717e993a21e90711acfed02f2402"}, + {file = "protobuf-3.20.3-cp39-cp39-win32.whl", hash = "sha256:819559cafa1a373b7096a482b504ae8a857c89593cf3a25af743ac9ecbd23480"}, + {file = "protobuf-3.20.3-cp39-cp39-win_amd64.whl", hash = "sha256:03038ac1cfbc41aa21f6afcbcd357281d7521b4157926f30ebecc8d4ea59dcb7"}, + {file = "protobuf-3.20.3-py2.py3-none-any.whl", hash = "sha256:a7ca6d488aa8ff7f329d4c545b2dbad8ac31464f1d8b1c87ad1346717731e4db"}, + {file = "protobuf-3.20.3.tar.gz", hash = "sha256:2e3427429c9cffebf259491be0af70189607f365c2f41c7c3764af6f337105f2"}, +] + +[[package]] +name = "pyasn1" +version = "0.5.1" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, + {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, +] + +[[package]] +name = "pyasn1-modules" +version = "0.3.0" +description = "A collection of ASN.1-based protocols modules" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, + {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, +] + +[package.dependencies] +pyasn1 = ">=0.4.6,<0.6.0" + +[[package]] +name = "pytest" +version = "7.4.3" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, + {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +files = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "requests" +version = "2.31.0" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.7" +files = [ + {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, + {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "requests-oauthlib" +version = "1.3.1" +description = "OAuthlib authentication support for Requests." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "requests-oauthlib-1.3.1.tar.gz", hash = "sha256:75beac4a47881eeb94d5ea5d6ad31ef88856affe2332b9aafb52c6452ccf0d7a"}, + {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, +] + +[package.dependencies] +oauthlib = ">=3.0.0" +requests = ">=2.0.0" + +[package.extras] +rsa = ["oauthlib[signedtoken] (>=3.0.0)"] + +[[package]] +name = "requests-toolbelt" +version = "0.10.1" +description = "A utility belt for advanced users of python-requests" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "requests-toolbelt-0.10.1.tar.gz", hash = "sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d"}, + {file = "requests_toolbelt-0.10.1-py2.py3-none-any.whl", hash = "sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7"}, +] + +[package.dependencies] +requests = ">=2.0.1,<3.0.0" + +[[package]] +name = "rsa" +version = "4.9" +description = "Pure-Python RSA implementation" +optional = false +python-versions = ">=3.6,<4" +files = [ + {file = "rsa-4.9-py3-none-any.whl", hash = "sha256:90260d9058e514786967344d0ef75fa8727eed8a7d2e43ce9f4bcf1b536174f7"}, + {file = "rsa-4.9.tar.gz", hash = "sha256:e38464a49c6c85d7f1351b0126661487a7e0a14a50f1675ec50eb34d4f20ef21"}, +] + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "setuptools" +version = "69.0.2" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-69.0.2-py3-none-any.whl", hash = "sha256:1e8fdff6797d3865f37397be788a4e3cba233608e9b509382a2777d25ebde7f2"}, + {file = "setuptools-69.0.2.tar.gz", hash = "sha256:735896e78a4742605974de002ac60562d286fa8051a7e2299445e8e8fbb01aa6"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "shapely" +version = "2.0.2" +description = "Manipulation and analysis of geometric objects" +optional = false +python-versions = ">=3.7" +files = [ + {file = "shapely-2.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6ca8cffbe84ddde8f52b297b53f8e0687bd31141abb2c373fd8a9f032df415d6"}, + {file = "shapely-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:baa14fc27771e180c06b499a0a7ba697c7988c7b2b6cba9a929a19a4d2762de3"}, + {file = "shapely-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:36480e32c434d168cdf2f5e9862c84aaf4d714a43a8465ae3ce8ff327f0affb7"}, + {file = "shapely-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ef753200cbffd4f652efb2c528c5474e5a14341a473994d90ad0606522a46a2"}, + {file = "shapely-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9a41ff4323fc9d6257759c26eb1cf3a61ebc7e611e024e6091f42977303fd3a"}, + {file = "shapely-2.0.2-cp310-cp310-win32.whl", hash = "sha256:72b5997272ae8c25f0fd5b3b967b3237e87fab7978b8d6cd5fa748770f0c5d68"}, + {file = "shapely-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:34eac2337cbd67650248761b140d2535855d21b969d76d76123317882d3a0c1a"}, + {file = "shapely-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5b0c052709c8a257c93b0d4943b0b7a3035f87e2d6a8ac9407b6a992d206422f"}, + {file = "shapely-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2d217e56ae067e87b4e1731d0dc62eebe887ced729ba5c2d4590e9e3e9fdbd88"}, + {file = "shapely-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:94ac128ae2ab4edd0bffcd4e566411ea7bdc738aeaf92c32a8a836abad725f9f"}, + {file = "shapely-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa3ee28f5e63a130ec5af4dc3c4cb9c21c5788bb13c15e89190d163b14f9fb89"}, + {file = "shapely-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:737dba15011e5a9b54a8302f1748b62daa207c9bc06f820cd0ad32a041f1c6f2"}, + {file = "shapely-2.0.2-cp311-cp311-win32.whl", hash = "sha256:45ac6906cff0765455a7b49c1670af6e230c419507c13e2f75db638c8fc6f3bd"}, + {file = "shapely-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:dc9342fc82e374130db86a955c3c4525bfbf315a248af8277a913f30911bed9e"}, + {file = "shapely-2.0.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:06f193091a7c6112fc08dfd195a1e3846a64306f890b151fa8c63b3e3624202c"}, + {file = "shapely-2.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:eebe544df5c018134f3c23b6515877f7e4cd72851f88a8d0c18464f414d141a2"}, + {file = "shapely-2.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7e92e7c255f89f5cdf777690313311f422aa8ada9a3205b187113274e0135cd8"}, + {file = "shapely-2.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be46d5509b9251dd9087768eaf35a71360de6afac82ce87c636990a0871aa18b"}, + {file = "shapely-2.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5533a925d8e211d07636ffc2fdd9a7f9f13d54686d00577eeb11d16f00be9c4"}, + {file = "shapely-2.0.2-cp312-cp312-win32.whl", hash = "sha256:084b023dae8ad3d5b98acee9d3bf098fdf688eb0bb9b1401e8b075f6a627b611"}, + {file = "shapely-2.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:ea84d1cdbcf31e619d672b53c4532f06253894185ee7acb8ceb78f5f33cbe033"}, + {file = "shapely-2.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ed1e99702125e7baccf401830a3b94d810d5c70b329b765fe93451fe14cf565b"}, + {file = "shapely-2.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7d897e6bdc6bc64f7f65155dbbb30e49acaabbd0d9266b9b4041f87d6e52b3a"}, + {file = "shapely-2.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0521d76d1e8af01e712db71da9096b484f081e539d4f4a8c97342e7971d5e1b4"}, + {file = "shapely-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:5324be299d4c533ecfcfd43424dfd12f9428fd6f12cda38a4316da001d6ef0ea"}, + {file = "shapely-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:78128357a0cee573257a0c2c388d4b7bf13cb7dbe5b3fe5d26d45ebbe2a39e25"}, + {file = "shapely-2.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:87dc2be34ac3a3a4a319b963c507ac06682978a5e6c93d71917618b14f13066e"}, + {file = "shapely-2.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:42997ac806e4583dad51c80a32d38570fd9a3d4778f5e2c98f9090aa7db0fe91"}, + {file = "shapely-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ccfd5fa10a37e67dbafc601c1ddbcbbfef70d34c3f6b0efc866ddbdb55893a6c"}, + {file = "shapely-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7c95d3379ae3abb74058938a9fcbc478c6b2e28d20dace38f8b5c587dde90aa"}, + {file = "shapely-2.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a21353d28209fb0d8cc083e08ca53c52666e0d8a1f9bbe23b6063967d89ed24"}, + {file = "shapely-2.0.2-cp38-cp38-win32.whl", hash = "sha256:03e63a99dfe6bd3beb8d5f41ec2086585bb969991d603f9aeac335ad396a06d4"}, + {file = "shapely-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:c6fd29fbd9cd76350bd5cc14c49de394a31770aed02d74203e23b928f3d2f1aa"}, + {file = "shapely-2.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1f217d28ecb48e593beae20a0082a95bd9898d82d14b8fcb497edf6bff9a44d7"}, + {file = "shapely-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:394e5085b49334fd5b94fa89c086edfb39c3ecab7f669e8b2a4298b9d523b3a5"}, + {file = "shapely-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fd3ad17b64466a033848c26cb5b509625c87d07dcf39a1541461cacdb8f7e91c"}, + {file = "shapely-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d41a116fcad58048d7143ddb01285e1a8780df6dc1f56c3b1e1b7f12ed296651"}, + {file = "shapely-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dea9a0651333cf96ef5bb2035044e3ad6a54f87d90e50fe4c2636debf1b77abc"}, + {file = "shapely-2.0.2-cp39-cp39-win32.whl", hash = "sha256:b8eb0a92f7b8c74f9d8fdd1b40d395113f59bd8132ca1348ebcc1f5aece94b96"}, + {file = "shapely-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:794affd80ca0f2c536fc948a3afa90bd8fb61ebe37fe873483ae818e7f21def4"}, + {file = "shapely-2.0.2.tar.gz", hash = "sha256:1713cc04c171baffc5b259ba8531c58acc2a301707b7f021d88a15ed090649e7"}, +] + +[package.dependencies] +numpy = ">=1.14" + +[package.extras] +docs = ["matplotlib", "numpydoc (==1.1.*)", "sphinx", "sphinx-book-theme", "sphinx-remove-toctrees"] +test = ["pytest", "pytest-cov"] + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +files = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] + +[[package]] +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "urllib3" +version = "1.26.18" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +files = [ + {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, + {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, +] + +[package.extras] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "virtualenv" +version = "20.25.0" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.7" +files = [ + {file = "virtualenv-20.25.0-py3-none-any.whl", hash = "sha256:4238949c5ffe6876362d9c0180fc6c3a824a7b12b80604eeb8085f2ed7460de3"}, + {file = "virtualenv-20.25.0.tar.gz", hash = "sha256:bf51c0d9c7dd63ea8e44086fa1e4fb1093a31e963b86959257378aef020e1f1b"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + +[[package]] +name = "websocket-client" +version = "1.7.0" +description = "WebSocket client for Python with low level API options" +optional = false +python-versions = ">=3.8" +files = [ + {file = "websocket-client-1.7.0.tar.gz", hash = "sha256:10e511ea3a8c744631d3bd77e61eb17ed09304c413ad42cf6ddfa4c7787e8fe6"}, + {file = "websocket_client-1.7.0-py3-none-any.whl", hash = "sha256:f4c3d22fec12a2461427a29957ff07d35098ee2d976d3ba244e688b8b4057588"}, +] + +[package.extras] +docs = ["Sphinx (>=6.0)", "sphinx-rtd-theme (>=1.1.0)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[metadata] +lock-version = "2.0" +python-versions = ">=3.9,<3.11" +content-hash = "0be210fbd2f02c00c34f9c03fb76646129ec4a80cb8dee6705e2f06ff6a36639" diff --git a/pipelines/pyproject.toml b/pipelines/pyproject.toml index a5e2cbb7..dd288fb8 100644 --- a/pipelines/pyproject.toml +++ b/pipelines/pyproject.toml @@ -1,30 +1,35 @@ -[project] -name = "turbo-template" -authors = [ - {name = "Example User", email = "user@example.com"}, -] -description = "Turbo Template" -readme = "README.md" +[tool.poetry] +name = "pipelines" +version = "0.1.0" +authors = ["Example User "] +description = "Vertex AI Pipelines end-to-end sample" classifiers = [ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", - "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.9", ] -requires-python = ">=3.7" -dynamic = ["version"] -dependencies = [ - "kfp == 1.8.21", +packages = [ + { include = "pipelines", from = "src" }, ] -[project.optional-dependencies] -tests = [ - "google-cloud-bigquery == 2.30.0", - "pytest >= 7.3.1,<8.0.0", -] +[tool.poetry.dependencies] +python = ">=3.9,<3.11" +Jinja2 = ">=3.0.1,<4.0.0" +google-cloud-aiplatform = ">=1.30.1" +google-cloud-storage = "<2.13.0" +google-cloud-pipeline-components = "^2.1.0" +components = { path = "../components", develop = true } +kfp = "^2.0.1" +pyyaml = "6.0.1" + +[tool.poetry.group.dev.dependencies] +pytest = ">=7.3.1,<8.0.0" +pre-commit = ">=2.14.1,<3.0.0" +coverage = "==7.2.5" [build-system] -requires = ["setuptools", "wheel"] -build-backend = "setuptools.build_meta:__legacy__" +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" [tool.flake8] max-line-length = 88 diff --git a/pipelines/src/pipelines/__init__.py b/pipelines/src/pipelines/__init__.py index 113da917..7ba50f93 100644 --- a/pipelines/src/pipelines/__init__.py +++ b/pipelines/src/pipelines/__init__.py @@ -1,4 +1,4 @@ -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,23 +11,3 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -from pathlib import Path -from jinja2 import Template - - -def generate_query(input_file: Path, **replacements) -> str: - """ - Read input file and replace placeholder using Jinja. - - Args: - input_file (Path): input file to read - replacements: keyword arguments to use to replace placeholders - Returns: - str: replaced content of input file - """ - - with open(input_file, "r") as f: - query_template = f.read() - - return Template(query_template).render(**replacements) diff --git a/pipelines/src/pipelines/prediction.py b/pipelines/src/pipelines/prediction.py new file mode 100644 index 00000000..62dd739d --- /dev/null +++ b/pipelines/src/pipelines/prediction.py @@ -0,0 +1,118 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import pathlib +from os import environ as env + +from google_cloud_pipeline_components.v1.bigquery import BigqueryQueryJobOp +from kfp import dsl + +from pipelines.utils.query import generate_query +from components import lookup_model, model_batch_predict + + +RESOURCE_SUFFIX = env.get("RESOURCE_SUFFIX", "default") +# set training-serving skew thresholds and emails to receive alerts: +ALERT_EMAILS = [] +NOTIFICATION_CHANNELS = [] +SKEW_THRESHOLDS = {"defaultSkewThreshold": {"value": 0.001}} +# or set different thresholds per feature: +# SKEW_THRESHOLDS = {"skewThresholds": {"payment_type": {"value": 0.001}}, ... } + + +@dsl.pipeline(name="turbo-prediction-pipeline") +def pipeline( + project: str = env.get("VERTEX_PROJECT_ID"), + location: str = env.get("VERTEX_LOCATION"), + bq_location: str = env.get("BQ_LOCATION"), + bq_source_uri: str = "bigquery-public-data.chicago_taxi_trips.taxi_trips", + model_name: str = "xgb_regressor", + dataset: str = "turbo_templates", + timestamp: str = "2022-12-01 00:00:00", + machine_type: str = "n2-standard-4", + min_replicas: int = 3, + max_replicas: int = 10, +): + """ + Prediction pipeline which: + 1. Looks up the default model version (champion). + 2. Runs a batch prediction job with BigQuery as input and output + 3. Optionally monitors training-serving skew + + Args: + project (str): project id of the Google Cloud project + location (str): location of the Google Cloud project + bq_location (str): location of dataset in BigQuery + bq_source_uri (str): `..` of ingestion data in BigQuery + model_name (str): name of model + dataset (str): dataset id to store staging data & predictions in BigQuery + timestamp (str): Optional. Empty or a specific timestamp in ISO 8601 format + (YYYY-MM-DDThh:mm:ss.sssΒ±hh:mm or YYYY-MM-DDThh:mm:ss). + If any time part is missing, it will be regarded as zero + machine_type (str): Machine type to be used for Vertex Batch + Prediction. Example machine_types - n1-standard-4, n1-standard-16 etc. + min_replicas (int): Minimum no of machines to distribute the + Vertex Batch Prediction job for horizontal scalability + max_replicas (int): Maximum no of machines to distribute the + Vertex Batch Prediction job for horizontal scalability + """ + + queries_folder = pathlib.Path(__file__).parent / "queries" + table = f"prep_prediction_{RESOURCE_SUFFIX}" + + prep_query = generate_query( + queries_folder / "preprocessing.sql", + source=bq_source_uri, + location=bq_location, + dataset=f"{project}.{dataset}", + table=table, + start_timestamp=timestamp, + ) + + prep_op = BigqueryQueryJobOp( + project=project, + location=bq_location, + query=prep_query, + ).set_display_name("Ingest & preprocess data") + + lookup_op = lookup_model( + model_name=model_name, + location=location, + project=project, + fail_on_model_not_found=True, + ).set_display_name("Look up champion model") + + ( + model_batch_predict( + model=lookup_op.outputs["model"], + job_display_name="turbo-template-predict-job", + location=location, + project=project, + source_uri=f"bq://{project}.{dataset}.{table}", + destination_uri=f"bq://{project}.{dataset}", + source_format="bigquery", + destination_format="bigquery", + instance_config={ + "instanceType": "object", + }, + machine_type=machine_type, + starting_replica_count=min_replicas, + max_replica_count=max_replicas, + monitoring_training_dataset=lookup_op.outputs["training_dataset"], + monitoring_alert_email_addresses=ALERT_EMAILS, + notification_channels=NOTIFICATION_CHANNELS, + monitoring_skew_config=SKEW_THRESHOLDS, + ) + .after(prep_op) + .set_display_name("Run prediction job") + ) diff --git a/pipelines/src/pipelines/queries/preprocessing.sql b/pipelines/src/pipelines/queries/preprocessing.sql new file mode 100644 index 00000000..0cd8f953 --- /dev/null +++ b/pipelines/src/pipelines/queries/preprocessing.sql @@ -0,0 +1,56 @@ +-- Create dataset if it doesn't exist +CREATE SCHEMA IF NOT EXISTS `{{ dataset }}` + OPTIONS ( + description = 'Chicago Taxi Trips with Turbo Template', + location = '{{ location }}'); + +-- Create (or replace) table with preprocessed data +DROP TABLE IF EXISTS `{{ dataset }}.{{ table }}`; +CREATE TABLE `{{ dataset }}.{{ table }}` AS ( +WITH start_timestamps AS ( +SELECT + IF('{{ start_timestamp }}' = '', + CURRENT_DATETIME(), + CAST('{{ start_timestamp }}' AS DATETIME)) AS start_timestamp +) +-- Ingest data between 2 and 3 months ago +,filtered_data AS ( + SELECT + * + FROM `{{ source }}`, start_timestamps + WHERE + DATE(trip_start_timestamp) BETWEEN + DATE_SUB(DATE(CAST(start_timestamps.start_timestamp AS DATETIME)), INTERVAL 3 MONTH) AND + DATE_SUB(DATE(start_timestamp), INTERVAL 2 MONTH) +) +-- Use the average trip_seconds as a replacement for NULL or 0 values +,mean_time AS ( + SELECT CAST(avg(trip_seconds) AS INT64) as avg_trip_seconds + FROM filtered_data +) + +SELECT + CAST(EXTRACT(DAYOFWEEK FROM trip_start_timestamp) AS FLOAT64) AS dayofweek, + CAST(EXTRACT(HOUR FROM trip_start_timestamp) AS FLOAT64) AS hourofday, + ST_DISTANCE( + ST_GEOGPOINT(pickup_longitude, pickup_latitude), + ST_GEOGPOINT(dropoff_longitude, dropoff_latitude)) AS trip_distance, + trip_miles, + CAST( CASE WHEN trip_seconds is NULL then m.avg_trip_seconds + WHEN trip_seconds <= 0 then m.avg_trip_seconds + ELSE trip_seconds + END AS FLOAT64) AS trip_seconds, + payment_type, + company, + {% if label %} + (fare + tips + tolls + extras) AS `{{ label }}`, + {% endif %} +FROM filtered_data AS t, mean_time AS m +WHERE + trip_miles > 0 AND fare > 0 AND fare < 1500 + {% for field in [ + 'fare', 'trip_start_timestamp', 'pickup_longitude', 'pickup_latitude', + 'dropoff_longitude', 'dropoff_latitude','payment_type','company' ] %} + AND `{{ field }}` IS NOT NULL + {% endfor %} +); diff --git a/pipelines/src/pipelines/tensorflow/README.md b/pipelines/src/pipelines/tensorflow/README.md deleted file mode 100644 index a5a6bfec..00000000 --- a/pipelines/src/pipelines/tensorflow/README.md +++ /dev/null @@ -1,89 +0,0 @@ - -# TensorFlow Pipelines - -## Training pipeline -The TensorFlow training pipeline can be found in [`training/pipeline.py`](training/pipeline.py). The training component [`train_tensorflow_model`](../../../pipeline_components/_tensorflow/_tensorflow/train/component.py) is the main training component which contains the implementation of a TensorFlow Keras model. -This component can then be wrapped in a custom kfp ContainerOp from [`google-cloud-pipeline-components`](https://github.com/kubeflow/pipelines/blob/master/components/google-cloud/google_cloud_pipeline_components/experimental/custom_job/utils.py) which submits a Vertex Training job with added flexibility for `machine_type`, `replica_count`, `accelerator_type` among other machine configurations. - -### Data -The input data is split into three parts in BigQuery and stored in Google Cloud Storage: -- 80% of the input data is used for model training -- 10% of the input data is used for model validation -- 10% of the input data is used for model testing/evaluation - -### Model Architecture -The architecture of the example TensorFlow Keras model is shown below: - -![TensorFlow Model Architecture](../../docs/images/tf_model_architecture.png) - -- **Input layer**: there is one input node for each of the 7 features used in the example: - - `dayofweek` - - `hourofday` - - `trip_distance` - - `trip_miles` - - `trip_seconds` - - `payment_type` - - `company` -- **Pre-processing layers** - - Categorical encoding for categorical features is done using Tensorflow's `StringLookup` layer. New/unknown values are handled using this layer's default parameters. (https://www.tensorflow.org/api_docs/python/tf/keras/layers/StringLookup). - - The feature `payment_type` is one-hot encoded. New/unknown categories are assigned to a one-hot encoded array with zeroes everywhere. - - The feature `company` is ordinal encoded. New/unknown categories are assigned to zero. - - Normalization for the numerical features (`dayofweek`, `hourofday`, `trip_distance`, `trip_miles`, `trip_seconds`) -- **Dense layers** - - One `Dense` layer with 64 units whose activation function is ReLU. - - One `Dense` layer with 32 units whose activation function is ReLU. -- **Output layer** - - One `Dense` layer with 1 unit where no activation is applied (this is because the example is a regression problem) - - -### Model hyperparameters -You can specify different hyperparameters through the `model_params` argument of `train_tensorflow_model`, including: -- Batch size -- No. of epochs to check for early stopping -- Learning rate -- Number of hidden units and type of activation function in each layer -- Loss function -- Optimization method -- Evaluation metrics -- Whether you want early stopping - -For a comprehensive list of options for the above hyperparameters, see the docstring in [`train.py`](../../../pipeline_components/_tensorflow/_tensorflow/train/component.py). - -### Model artifacts -A number of different model artifacts/objects are created by the training of the TensorFlow model. With these files, you can load the model into a new script (without any of the original training code) and run it or resume training from exactly where you left off. For more information, see [this](https://www.tensorflow.org/api_docs/python/tf/keras/models/save_model). - - -![tensorflow_component_model&metrics_artifact](../../docs/images/tensorflow_component_model&metrics_artifact.png) -### Model test/evaluation -Once the model is trained, it will be used to get challenger predictions for evaluation purposes. In general, the component [`predict_tensorflow_model`](../kfp_components/tensorflow/predict.py) -which expects a single CSV file to create predictions for test data is implemented in the pipeline, However, if you are working working with larger test data, it is more efficient to -replace it with a prebuilt component provided by Google, [`ModelBatchPredictOp`](https://google-cloud-pipeline-components.readthedocs.io/en/google-cloud-pipeline-components-0.2.1/google_cloud_pipeline_components.aiplatform.html), -to avoid crash caused by insufficent memory usage. - -### Distribution strategy -In deep learning, it is common to use GPUs, which utilise a large number of simple cores allowing parallel computing though thousands of threads at a time, to train complicated neural networks fed by massive datasets. -For optimisation tasks, it is often better to use CPUs. - - There is a variable, `distribute_strategy`, in tensorflow training pipeline that allows you to set up distribution strategy. You have three options: -|Value| description | -|---|---| -|`single` | This strategy use GPU is a GPU device of the requested kind is available, otherwise, it uses CPU | -|`mirror` | This strategy is typically used for training on one machine with multiple GPUs. | -|`multi`|This strategy implements synchronous distributed training across multiple machines, each with potentially multiple GPUs| - -## Prediction pipeline -The TensorFlow prediction pipeline can be found in [prediction/pipeline.py](prediction/pipeline.py). diff --git a/pipelines/src/pipelines/tensorflow/prediction/assets/.gitkeep b/pipelines/src/pipelines/tensorflow/prediction/assets/.gitkeep deleted file mode 100644 index e69de29b..00000000 diff --git a/pipelines/src/pipelines/tensorflow/prediction/pipeline.py b/pipelines/src/pipelines/tensorflow/prediction/pipeline.py deleted file mode 100644 index 5121aea3..00000000 --- a/pipelines/src/pipelines/tensorflow/prediction/pipeline.py +++ /dev/null @@ -1,152 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -import os -import pathlib - -from kfp.v2 import compiler, dsl - -from pipelines import generate_query -from bigquery_components import bq_query_to_table -from vertex_components import lookup_model, model_batch_predict - - -@dsl.pipeline(name="tensorflow-prediction-pipeline") -def tensorflow_pipeline( - project_id: str = os.environ.get("VERTEX_PROJECT_ID"), - project_location: str = os.environ.get("VERTEX_LOCATION"), - ingestion_project_id: str = os.environ.get("VERTEX_PROJECT_ID"), - model_name: str = "simple_tensorflow", - dataset_id: str = "preprocessing", - dataset_location: str = os.environ.get("VERTEX_LOCATION"), - ingestion_dataset_id: str = "chicago_taxi_trips", - timestamp: str = "2022-12-01 00:00:00", - batch_prediction_machine_type: str = "n1-standard-4", - batch_prediction_min_replicas: int = 3, - batch_prediction_max_replicas: int = 10, -): - """ - Tensorflow prediction pipeline which: - 1. Extracts a dataset from BQ - 2. Looks up the default model version (champion) and - dataset which was used to the train model. - 3. Runs a BatchPredictionJob with optional training-serving skew detection. - 4. Post-processes predictions - 5. Loads predictions into BQ - - Args: - project_id (str): project id of the Google Cloud project - project_location (str): location of the Google Cloud project - pipeline_files_gcs_path (str): GCS path where the pipeline files are located - ingestion_project_id (str): project id containing the source bigquery data - for ingestion. This can be the same as `project_id` if the source data is - in the same project where the ML pipeline is executed. - model_name (str): name of model - model_label (str): label of model - dataset_id (str): id of BQ dataset used to store all staging data & predictions - dataset_location (str): location of dataset - ingestion_dataset_id (str): dataset id of ingestion data - timestamp (str): Optional. Empty or a specific timestamp in ISO 8601 format - (YYYY-MM-DDThh:mm:ss.sssΒ±hh:mm or YYYY-MM-DDThh:mm:ss). - If any time part is missing, it will be regarded as zero. - batch_prediction_machine_type (str): Machine type to be used for Vertex Batch - Prediction. Example machine_types - n1-standard-4, n1-standard-16 etc - batch_prediction_min_replicas (int): Minimum no of machines to distribute the - Vertex Batch Prediction job for horizontal scalability - batch_prediction_max_replicas (int): Maximum no of machines to distribute the - Vertex Batch Prediction job for horizontal scalability. - - Returns: - None - - """ - - # Create variables to ensure the same arguments are passed - # into different components of the pipeline - file_pattern = "" # e.g. "files-*.csv", used as file pattern on storage - time_column = "trip_start_timestamp" - ingestion_table = "taxi_trips" - table_suffix = "_tf_prediction" # suffix to table names - ingested_table = "ingested_data" + table_suffix - monitoring_alert_email_addresses = [] - monitoring_skew_config = {"defaultSkewThreshold": {"value": 0.001}} - - # generate sql queries which are used in ingestion and preprocessing - # operations - queries_folder = pathlib.Path(__file__).parent / "queries" - - ingest_query = generate_query( - queries_folder / "ingest.sql", - source_dataset=f"{ingestion_project_id}.{ingestion_dataset_id}", - source_table=ingestion_table, - filter_column=time_column, - filter_start_value=timestamp, - ) - - # data ingestion and preprocessing operations - kwargs = dict( - bq_client_project_id=project_id, - destination_project_id=project_id, - dataset_id=dataset_id, - dataset_location=dataset_location, - query_job_config=json.dumps(dict(write_disposition="WRITE_TRUNCATE")), - ) - ingest = bq_query_to_table( - query=ingest_query, table_id=ingested_table, **kwargs - ).set_display_name("Ingest data") - - # lookup champion model - champion_model = lookup_model( - model_name=model_name, - project_location=project_location, - project_id=project_id, - fail_on_model_not_found=True, - ).set_display_name("Look up champion model") - - # batch predict from BigQuery to BigQuery - bigquery_source_input_uri = f"bq://{project_id}.{dataset_id}.{ingested_table}" - bigquery_destination_output_uri = f"bq://{project_id}.{dataset_id}" - instance_config = {"instanceType": "object"} - - # predict data - batch_prediction = ( - model_batch_predict( - model=champion_model.outputs["model"], - job_display_name="my-tensorflow-batch-prediction-job", - project_location=project_location, - project_id=project_id, - source_uri=bigquery_source_input_uri, - destination_uri=bigquery_destination_output_uri, - source_format="bigquery", - destination_format="bigquery", - machine_type=batch_prediction_machine_type, - starting_replica_count=batch_prediction_min_replicas, - max_replica_count=batch_prediction_max_replicas, - monitoring_training_dataset=champion_model.outputs["training_dataset"], - monitoring_alert_email_addresses=monitoring_alert_email_addresses, - monitoring_skew_config=monitoring_skew_config, - instance_config=instance_config, - ) - .after(ingest) - .set_display_name("Batch prediction job") - ) - - -if __name__ == "__main__": - compiler.Compiler().compile( - pipeline_func=tensorflow_pipeline, - package_path="prediction.json", - type_check=False, - ) diff --git a/pipelines/src/pipelines/tensorflow/prediction/queries/ingest.sql b/pipelines/src/pipelines/tensorflow/prediction/queries/ingest.sql deleted file mode 100644 index e01a7c57..00000000 --- a/pipelines/src/pipelines/tensorflow/prediction/queries/ingest.sql +++ /dev/null @@ -1,56 +0,0 @@ --- Copyright 2022 Google LLC - --- Licensed under the Apache License, Version 2.0 (the "License"); --- you may not use this file except in compliance with the License. --- You may obtain a copy of the License at - --- https://www.apache.org/licenses/LICENSE-2.0 - --- Unless required by applicable law or agreed to in writing, software --- distributed under the License is distributed on an "AS IS" BASIS, --- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. --- See the License for the specific language governing permissions and --- limitations under the License. - --- Treat "filter_start_value" as the current time, unless it is empty then use CURRENT_DATETIME() instead --- This allows us to set the filter_start_value to a specific time for testing or for backfill -with filter_start_values as ( - SELECT - IF("{{ filter_start_value }}" = '', CURRENT_DATETIME(), CAST("{{ filter_start_value }}" AS DATETIME)) as filter_start_value -) --- Ingest data between 2 and 3 months ago -,filtered_data as ( - SELECT - * - FROM `{{ source_dataset }}.{{ source_table }}`, filter_start_values - WHERE - DATE({{ filter_column }}) BETWEEN - DATE_SUB(DATE(CAST(filter_start_values.filter_start_value as DATETIME)), INTERVAL 3 MONTH) AND - DATE_SUB(DATE(filter_start_value), INTERVAL 2 MONTH) -) --- Use the average trip_seconds as a replacement for NULL or 0 values -,mean_time as ( - SELECT CAST(avg(trip_seconds) AS INT64) as avg_trip_seconds - FROM filtered_data -) - -SELECT - CAST(EXTRACT(DAYOFWEEK FROM trip_start_timestamp) AS FLOAT64) AS dayofweek, - CAST(EXTRACT(HOUR FROM trip_start_timestamp) AS FLOAT64) AS hourofday, - ST_DISTANCE( - ST_GEOGPOINT(pickup_longitude, pickup_latitude), - ST_GEOGPOINT(dropoff_longitude, dropoff_latitude)) AS trip_distance, - trip_miles, - CAST( CASE WHEN trip_seconds is NULL then m.avg_trip_seconds - WHEN trip_seconds <= 0 then m.avg_trip_seconds - ELSE trip_seconds - END AS FLOAT64) AS trip_seconds, - payment_type, - company, -FROM filtered_data as t, mean_time as m -WHERE - trip_miles > 0 AND fare > 0 AND fare < 1500 - {% for field in ["fare", "trip_start_timestamp", "pickup_longitude", - "pickup_latitude", "dropoff_longitude", "dropoff_latitude","payment_type","company"] %} - AND `{{ field }}` IS NOT NULL - {% endfor %} diff --git a/pipelines/src/pipelines/tensorflow/training/assets/train_tf_model.py b/pipelines/src/pipelines/tensorflow/training/assets/train_tf_model.py deleted file mode 100644 index c89a7880..00000000 --- a/pipelines/src/pipelines/tensorflow/training/assets/train_tf_model.py +++ /dev/null @@ -1,294 +0,0 @@ -import argparse -import os -import json -import logging -import sys - -import tensorflow as tf -from pathlib import Path -from tensorflow.data import Dataset -from tensorflow.keras import Input, Model, optimizers -from tensorflow.keras.layers import Dense, Normalization, StringLookup, Concatenate - -# used for monitoring during prediction time -TRAINING_DATASET_INFO = "training_dataset.json" -# numeric/categorical features in Chicago trips dataset to be preprocessed -NUM_COLS = ["dayofweek", "hourofday", "trip_distance", "trip_miles", "trip_seconds"] -ORD_COLS = ["company"] -OHE_COLS = ["payment_type"] -DEFAULT_HPARAMS = dict( - batch_size=100, - epochs=1, - loss_fn="MeanSquaredError", - optimizer="Adam", - learning_rate=0.001, - metrics=[ - "RootMeanSquaredError", - "MeanAbsoluteError", - "MeanAbsolutePercentageError", - "MeanSquaredLogarithmicError", - ], - hidden_units=[(10, "relu")], - distribute_strategy="single", - early_stopping_epochs=5, - label="total_fare", -) - -logging.getLogger().setLevel(logging.INFO) - - -def create_dataset(input_data: Path, label_name: str, model_params: dict) -> Dataset: - """Create a TF Dataset from input csv files. - Args: - input_data (Input[Dataset]): Train/Valid data in CSV format - label_name (str): Name of column containing the labels - model_params (dict): model parameters - file_pattern (str): Read data from one or more files. If empty, then - training and validation data is read from single file respectively. - For multiple files, use a pattern e.g. "files-*.csv". - Returns: - dataset (TF Dataset): TF dataset where each element is a (features, labels) - tuple that corresponds to a batch of CSV rows - """ - - # shuffle & shuffle_buffer_size added to rearrange input data - # passed into model training - # num_rows_for_inference is for auto detection of datatypes - # while creating the dataset. - # If a float column has a high proportion of integer values (0/1 etc), - # the method wrongly detects it as a tf.int32 which fails during training time, - # hence the high hardcoded value (default is 100) - - # Apply data sharding: Sharded elements are produced by the dataset - # Each worker will process the whole dataset and discard the portion that is - # not for itself. Note that for this mode to correctly partitions the dataset - # elements, the dataset needs to produce elements in a deterministic order. - data_options = tf.data.Options() - data_options.experimental_distribute.auto_shard_policy = ( - tf.data.experimental.AutoShardPolicy.DATA - ) - - logging.info(f"Creating dataset from CSV file(s) at {input_data}...") - created_dataset = tf.data.experimental.make_csv_dataset( - file_pattern=str(input_data), - batch_size=model_params["batch_size"], - label_name=label_name, - num_epochs=model_params["epochs"], - shuffle=True, - shuffle_buffer_size=1000, - num_rows_for_inference=20000, - ) - return created_dataset.with_options(data_options) - - -def get_distribution_strategy(distribute_strategy: str) -> tf.distribute.Strategy: - """Set distribute strategy based on input string. - Args: - distribute_strategy (str): single, mirror or multi - Returns: - strategy (tf.distribute.Strategy): distribution strategy - """ - logging.info(f"Distribution strategy: {distribute_strategy}") - - # Single machine, single compute device - if distribute_strategy == "single": - if len(tf.config.list_physical_devices("GPU")): - strategy = tf.distribute.OneDeviceStrategy(device="/gpu:0") - else: - strategy = tf.distribute.OneDeviceStrategy(device="/cpu:0") - # Single machine, multiple compute device - elif distribute_strategy == "mirror": - strategy = tf.distribute.MirroredStrategy() - # Multiple machine, multiple compute device - elif distribute_strategy == "multi": - strategy = tf.distribute.MultiWorkerMirroredStrategy() - else: - raise RuntimeError(f"Distribute strategy: {distribute_strategy} not supported") - return strategy - - -def normalization(name: str, dataset: Dataset) -> Normalization: - """Create a Normalization layer for a feature. - Args: - name (str): name of feature to be normalized - dataset (Dataset): dataset to adapt layer - Returns: - normalization layer (Normalization): adapted normalization layer - of shape (?,1) - """ - logging.info(f"Normalizing numerical input '{name}'...") - x = Normalization(axis=None, name=f"normalize_{name}") - x.adapt(dataset.map(lambda y, _: y[name])) - return x - - -def str_lookup(name: str, dataset: Dataset, output_mode: str) -> StringLookup: - """Create a StringLookup layer for a feature. - Args: - name (str): name of feature to be encoded - dataset (Dataset): dataset to adapt layer - output_mode (str): argument for StringLookup layer (e.g. 'one_hot', 'int') - Returns: - StringLookup layer (StringLookup): adapted StringLookup layer of shape (?,X) - """ - logging.info(f"Encoding categorical input '{name}' ({output_mode})...") - x = StringLookup(output_mode=output_mode, name=f"str_lookup_{output_mode}_{name}") - x.adapt(dataset.map(lambda y, _: y[name])) - logging.info(f"Vocabulary: {x.get_vocabulary()}") - return x - - -def build_and_compile_model(dataset: Dataset, model_params: dict) -> Model: - """Build and compile model. - Args: - dataset (Dataset): training dataset - model_params (dict): model parameters - Returns: - model (Model): built and compiled model - """ - - # create inputs (scalars with shape `()`) - num_ins = {name: Input(shape=(), name=name, dtype=tf.float32) for name in NUM_COLS} - ord_ins = {name: Input(shape=(), name=name, dtype=tf.string) for name in ORD_COLS} - cat_ins = {name: Input(shape=(), name=name, dtype=tf.string) for name in OHE_COLS} - - # join all inputs and expand by 1 dimension. NOTE: this is useful when passing - # in scalar inputs to a model in Vertex AI batch predictions or endpoints e.g. - # `{"instances": {"input1": 1.0, "input2": "str"}}` instead of - # `{"instances": {"input1": [1.0], "input2": ["str"]}` - all_ins = {**num_ins, **ord_ins, **cat_ins} - exp_ins = {n: tf.expand_dims(i, axis=-1) for n, i in all_ins.items()} - - # preprocess expanded inputs - num_encoded = [normalization(n, dataset)(exp_ins[n]) for n in NUM_COLS] - ord_encoded = [str_lookup(n, dataset, "int")(exp_ins[n]) for n in ORD_COLS] - ohe_encoded = [str_lookup(n, dataset, "one_hot")(exp_ins[n]) for n in OHE_COLS] - - # ensure ordinal encoded layers is of type float32 (like the other layers) - ord_encoded = [tf.cast(x, tf.float32) for x in ord_encoded] - - # concat encoded inputs and add dense layers including output layer - x = num_encoded + ord_encoded + ohe_encoded - x = Concatenate()(x) - for units, activation in model_params["hidden_units"]: - x = Dense(units, activation=activation)(x) - x = Dense(1, name="output", activation="linear")(x) - - model = Model(inputs=all_ins, outputs=x, name="nn_model") - model.summary() - - logging.info(f"Use optimizer {model_params['optimizer']}") - optimizer = optimizers.get(model_params["optimizer"]) - optimizer.learning_rate = model_params["learning_rate"] - - model.compile( - loss=model_params["loss_fn"], - optimizer=optimizer, - metrics=model_params["metrics"], - ) - - return model - - -def _is_chief(strategy: tf.distribute.Strategy) -> bool: - """Determine whether current worker is the chief (master). See more info: - - https://www.tensorflow.org/tutorials/distribute/multi_worker_with_keras - - https://www.tensorflow.org/api_docs/python/tf/distribute/cluster_resolver/ClusterResolver # noqa: E501 - Args: - strategy (tf.distribute.Strategy): strategy - Returns: - is_chief (bool): True if worker is chief, otherwise False - """ - cr = strategy.cluster_resolver - return (cr is None) or (cr.task_type == "chief" and cr.task_id == 0) - - -def _get_temp_dir(dirpath, task_id): - base_dirpath = "workertemp_" + str(task_id) - temp_dir = os.path.join(dirpath, base_dirpath) - tf.io.gfile.makedirs(temp_dir) - return temp_dir - - -parser = argparse.ArgumentParser() -parser.add_argument("--train_data", type=str, required=True) -parser.add_argument("--valid_data", type=str, required=True) -parser.add_argument("--test_data", type=str, required=True) -parser.add_argument("--model", default=os.getenv("AIP_MODEL_DIR"), type=str, help="") -parser.add_argument("--metrics", type=str, required=True) -parser.add_argument("--hparams", default={}, type=json.loads) -args = parser.parse_args() - -# merge dictionaries by overwriting default_model_params if provided in model_params -hparams = {**DEFAULT_HPARAMS, **args.hparams} -logging.info(f"Using model hyper-parameters: {hparams}") -label = hparams["label"] - -# Set distribute strategy before any TF operations -strategy = get_distribution_strategy(hparams["distribute_strategy"]) - -train_ds = create_dataset(Path(args.train_data), label, hparams) -valid_ds = create_dataset(Path(args.valid_data), label, hparams) -test_ds = create_dataset(Path(args.test_data), label, hparams) - -train_features = list(train_ds.element_spec[0].keys()) -valid_features = list(valid_ds.element_spec[0].keys()) -logging.info(f"Training feature names: {train_features}") -logging.info(f"Validation feature names: {valid_features}") - -if len(train_features) != len(valid_features): - raise RuntimeError(f"No. of training features != # validation features") - -with strategy.scope(): - tf_model = build_and_compile_model(train_ds, hparams) - -logging.info("Use early stopping") -callback = tf.keras.callbacks.EarlyStopping( - monitor="loss", mode="min", patience=hparams["early_stopping_epochs"] -) - -logging.info("Fit model...") -history = tf_model.fit( - train_ds, - batch_size=hparams["batch_size"], - epochs=hparams["epochs"], - validation_data=valid_ds, - callbacks=[callback], -) - -# only persist output files if current worker is chief -if not _is_chief(strategy): - logging.info("not chief node, exiting now") - sys.exit() - -os.makedirs(args.model, exist_ok=True) -logging.info(f"Save model to: {args.model}") -tf_model.save(args.model, save_format="tf") - -logging.info(f"Save metrics to: {args.metrics}") -eval_metrics = dict(zip(tf_model.metrics_names, tf_model.evaluate(test_ds))) - -metrics = { - "problemType": "regression", - "rootMeanSquaredError": eval_metrics["root_mean_squared_error"], - "meanAbsoluteError": eval_metrics["mean_absolute_error"], - "meanAbsolutePercentageError": eval_metrics["mean_absolute_percentage_error"], - "rSquared": None, - "rootMeanSquaredLogError": eval_metrics["mean_squared_logarithmic_error"], -} - -with open(args.metrics, "w") as fp: - json.dump(metrics, fp) - -# Persist URIs of training file(s) for model monitoring in batch predictions -path = Path(args.model) / TRAINING_DATASET_INFO -training_dataset_for_monitoring = { - "gcsSource": {"uris": [args.train_data]}, - "dataFormat": "csv", - "targetField": hparams["label"], -} -logging.info(f"Save training dataset info for model monitoring: {path}") -logging.info(f"Training dataset: {training_dataset_for_monitoring}") - -with open(path, "w") as fp: - json.dump(training_dataset_for_monitoring, fp) diff --git a/pipelines/src/pipelines/tensorflow/training/pipeline.py b/pipelines/src/pipelines/tensorflow/training/pipeline.py deleted file mode 100644 index cb99e091..00000000 --- a/pipelines/src/pipelines/tensorflow/training/pipeline.py +++ /dev/null @@ -1,261 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -import os -import pathlib - -from kfp.v2 import compiler, dsl -from pipelines import generate_query -from bigquery_components import bq_query_to_table, extract_bq_to_dataset -from vertex_components import ( - lookup_model, - custom_train_job, - import_model_evaluation, - update_best_model, -) - - -@dsl.pipeline(name="tensorflow-train-pipeline") -def tensorflow_pipeline( - project_id: str = os.environ.get("VERTEX_PROJECT_ID"), - project_location: str = os.environ.get("VERTEX_LOCATION"), - ingestion_project_id: str = os.environ.get("VERTEX_PROJECT_ID"), - model_name: str = "simple_tensorflow", - dataset_id: str = "preprocessing", - dataset_location: str = os.environ.get("VERTEX_LOCATION"), - ingestion_dataset_id: str = "chicago_taxi_trips", - timestamp: str = "2022-12-01 00:00:00", - staging_bucket: str = os.environ.get("VERTEX_PIPELINE_ROOT"), - pipeline_files_gcs_path: str = os.environ.get("PIPELINE_FILES_GCS_PATH"), - test_dataset_uri: str = "", -): - """ - Tensorflow Keras training pipeline which: - 1. Splits and extracts a dataset from BQ to GCS - 2. Trains a model via Vertex AI CustomTrainingJob - 3. Evaluates the model against the current champion model - 4. If better the model becomes the new default model - - Args: - project_id (str): project id of the Google Cloud project - project_location (str): location of the Google Cloud project - pipeline_files_gcs_path (str): GCS path where the pipeline files are located - ingestion_project_id (str): project id containing the source bigquery data - for ingestion. This can be the same as `project_id` if the source data is - in the same project where the ML pipeline is executed. - model_name (str): name of model - model_label (str): label of model - dataset_id (str): id of BQ dataset used to store all staging data & predictions - dataset_location (str): location of dataset - ingestion_dataset_id (str): dataset id of ingestion data - timestamp (str): Optional. Empty or a specific timestamp in ISO 8601 format - (YYYY-MM-DDThh:mm:ss.sssΒ±hh:mm or YYYY-MM-DDThh:mm:ss). - If any time part is missing, it will be regarded as zero. - staging_bucket (str): Staging bucket for pipeline artifacts. - pipeline_files_gcs_path (str): GCS path where the pipeline files are located - test_dataset_uri (str): Optional. GCS URI of statis held-out test dataset. - """ - - # Create variables to ensure the same arguments are passed - # into different components of the pipeline - label_column_name = "total_fare" - time_column = "trip_start_timestamp" - ingestion_table = "taxi_trips" - table_suffix = "_tf_training" # suffix to table names - ingested_table = "ingested_data" + table_suffix - preprocessed_table = "preprocessed_data" + table_suffix - train_table = "train_data" + table_suffix - valid_table = "valid_data" + table_suffix - test_table = "test_data" + table_suffix - primary_metric = "rootMeanSquaredError" - train_script_uri = f"{pipeline_files_gcs_path}/training/assets/train_tf_model.py" - hparams = dict( - batch_size=100, - epochs=5, - loss_fn="MeanSquaredError", - optimizer="Adam", - learning_rate=0.01, - hidden_units=[(64, "relu"), (32, "relu")], - distribute_strategy="single", - early_stopping_epochs=5, - ) - - # generate sql queries which are used in ingestion and preprocessing - # operations - - queries_folder = pathlib.Path(__file__).parent / "queries" - - ingest_query = generate_query( - queries_folder / "ingest.sql", - source_dataset=f"{ingestion_project_id}.{ingestion_dataset_id}", - source_table=ingestion_table, - filter_column=time_column, - target_column=label_column_name, - filter_start_value=timestamp, - ) - split_train_query = generate_query( - queries_folder / "sample.sql", - source_dataset=dataset_id, - source_table=ingested_table, - num_lots=10, - lots=tuple(range(8)), - ) - split_valid_query = generate_query( - queries_folder / "sample.sql", - source_dataset=dataset_id, - source_table=ingested_table, - num_lots=10, - lots="(8)", - ) - split_test_query = generate_query( - queries_folder / "sample.sql", - source_dataset=dataset_id, - source_table=ingested_table, - num_lots=10, - lots="(9)", - ) - data_cleaning_query = generate_query( - queries_folder / "engineer_features.sql", - source_dataset=dataset_id, - source_table=train_table, - ) - - # data ingestion and preprocessing operations - - kwargs = dict( - bq_client_project_id=project_id, - destination_project_id=project_id, - dataset_id=dataset_id, - dataset_location=dataset_location, - query_job_config=json.dumps(dict(write_disposition="WRITE_TRUNCATE")), - ) - ingest = bq_query_to_table( - query=ingest_query, table_id=ingested_table, **kwargs - ).set_display_name("Ingest data") - - # exporting data to GCS from BQ - split_train_data = ( - bq_query_to_table(query=split_train_query, table_id=train_table, **kwargs) - .after(ingest) - .set_display_name("Split train data") - ) - split_valid_data = ( - bq_query_to_table(query=split_valid_query, table_id=valid_table, **kwargs) - .after(ingest) - .set_display_name("Split validation data") - ) - split_test_data = ( - bq_query_to_table(query=split_test_query, table_id=test_table, **kwargs) - .after(ingest) - .set_display_name("Split test data") - ) - data_cleaning = ( - bq_query_to_table( - query=data_cleaning_query, table_id=preprocessed_table, **kwargs - ) - .after(split_train_data) - .set_display_name("Clean data") - ) - - # data extraction to gcs - - train_dataset = ( - extract_bq_to_dataset( - bq_client_project_id=project_id, - source_project_id=project_id, - dataset_id=dataset_id, - table_name=preprocessed_table, - dataset_location=dataset_location, - ) - .after(data_cleaning) - .set_display_name("Extract train data to storage") - ).outputs["dataset"] - valid_dataset = ( - extract_bq_to_dataset( - bq_client_project_id=project_id, - source_project_id=project_id, - dataset_id=dataset_id, - table_name=valid_table, - dataset_location=dataset_location, - ) - .after(split_valid_data) - .set_display_name("Extract validation data to storage") - ).outputs["dataset"] - test_dataset = ( - extract_bq_to_dataset( - bq_client_project_id=project_id, - source_project_id=project_id, - dataset_id=dataset_id, - table_name=test_table, - dataset_location=dataset_location, - destination_gcs_uri=test_dataset_uri, - ) - .after(split_test_data) - .set_display_name("Extract test data to storage") - .set_caching_options(False) - ).outputs["dataset"] - - existing_model = ( - lookup_model( - model_name=model_name, - project_location=project_location, - project_id=project_id, - fail_on_model_not_found=False, - ) - .set_display_name("Lookup past model") - .outputs["model_resource_name"] - ) - - train_model = custom_train_job( - train_script_uri=train_script_uri, - train_data=train_dataset, - valid_data=valid_dataset, - test_data=test_dataset, - project_id=project_id, - project_location=project_location, - model_display_name=model_name, - train_container_uri="europe-docker.pkg.dev/vertex-ai/training/tf-cpu.2-6:latest", # noqa: E501 - serving_container_uri="europe-docker.pkg.dev/vertex-ai/prediction/tf2-cpu.2-6:latest", # noqa: E501 - hparams=hparams, - staging_bucket=staging_bucket, - parent_model=existing_model, - ).set_display_name("Train model") - - evaluation = import_model_evaluation( - model=train_model.outputs["model"], - metrics=train_model.outputs["metrics"], - test_dataset=test_dataset, - pipeline_job_id="{{$.pipeline_job_name}}", - project_location=project_location, - ).set_display_name("Import evaluation") - - with dsl.Condition(existing_model != "", "champion-exists"): - update_best_model( - challenger=train_model.outputs["model"], - challenger_evaluation=evaluation.outputs["model_evaluation"], - parent_model=existing_model, - eval_metric=primary_metric, - eval_lower_is_better=True, - project_id=project_id, - project_location=project_location, - ).set_display_name("Update best model") - - -if __name__ == "__main__": - compiler.Compiler().compile( - pipeline_func=tensorflow_pipeline, - package_path="training.json", - type_check=False, - ) diff --git a/pipelines/src/pipelines/tensorflow/training/queries/engineer_features.sql b/pipelines/src/pipelines/tensorflow/training/queries/engineer_features.sql deleted file mode 100644 index b8094a35..00000000 --- a/pipelines/src/pipelines/tensorflow/training/queries/engineer_features.sql +++ /dev/null @@ -1,18 +0,0 @@ --- Copyright 2022 Google LLC - --- Licensed under the Apache License, Version 2.0 (the "License"); --- you may not use this file except in compliance with the License. --- You may obtain a copy of the License at - --- https://www.apache.org/licenses/LICENSE-2.0 - --- Unless required by applicable law or agreed to in writing, software --- distributed under the License is distributed on an "AS IS" BASIS, --- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. --- See the License for the specific language governing permissions and --- limitations under the License. - -/* The Purpose of this query is to clean the data before the training*/ -SELECT - * -FROM `{{ source_dataset }}.{{ source_table }}` diff --git a/pipelines/src/pipelines/tensorflow/training/queries/ingest.sql b/pipelines/src/pipelines/tensorflow/training/queries/ingest.sql deleted file mode 100644 index de9459fc..00000000 --- a/pipelines/src/pipelines/tensorflow/training/queries/ingest.sql +++ /dev/null @@ -1,57 +0,0 @@ --- Copyright 2022 Google LLC - --- Licensed under the Apache License, Version 2.0 (the "License"); --- you may not use this file except in compliance with the License. --- You may obtain a copy of the License at - --- https://www.apache.org/licenses/LICENSE-2.0 - --- Unless required by applicable law or agreed to in writing, software --- distributed under the License is distributed on an "AS IS" BASIS, --- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. --- See the License for the specific language governing permissions and --- limitations under the License. - --- Treat "filter_start_value" as the current time, unless it is empty then use CURRENT_DATETIME() instead --- This allows us to set the filter_start_value to a specific time for testing or for backfill -with filter_start_values as ( - SELECT - IF("{{ filter_start_value }}" = '', CURRENT_DATETIME(), CAST("{{ filter_start_value }}" AS DATETIME)) as filter_start_value -) --- Ingest data between 2 and 3 months ago -,filtered_data as ( - SELECT - * - FROM `{{ source_dataset }}.{{ source_table }}`, filter_start_values - WHERE - DATE({{ filter_column }}) BETWEEN - DATE_SUB(DATE(CAST(filter_start_values.filter_start_value as DATETIME)), INTERVAL 3 MONTH) AND - DATE_SUB(DATE(filter_start_value), INTERVAL 2 MONTH) -) --- Use the average trip_seconds as a replacement for NULL or 0 values -,mean_time as ( - SELECT CAST(avg(trip_seconds) AS INT64) as avg_trip_seconds - FROM filtered_data -) - -SELECT - CAST(EXTRACT(DAYOFWEEK FROM trip_start_timestamp) AS FLOAT64) AS dayofweek, - CAST(EXTRACT(HOUR FROM trip_start_timestamp) AS FLOAT64) AS hourofday, - ST_DISTANCE( - ST_GEOGPOINT(pickup_longitude, pickup_latitude), - ST_GEOGPOINT(dropoff_longitude, dropoff_latitude)) AS trip_distance, - trip_miles, - CAST( CASE WHEN trip_seconds is NULL then m.avg_trip_seconds - WHEN trip_seconds <= 0 then m.avg_trip_seconds - ELSE trip_seconds - END AS FLOAT64) AS trip_seconds, - payment_type, - company, - (fare + tips + tolls + extras) AS `{{ target_column }}`, -FROM filtered_data as t, mean_time as m -WHERE - trip_miles > 0 AND fare > 0 AND fare < 1500 - {% for field in ["fare", "trip_start_timestamp", "pickup_longitude", - "pickup_latitude", "dropoff_longitude", "dropoff_latitude","payment_type","company"] %} - AND `{{ field }}` IS NOT NULL - {% endfor %} diff --git a/pipelines/src/pipelines/tensorflow/training/queries/sample.sql b/pipelines/src/pipelines/tensorflow/training/queries/sample.sql deleted file mode 100644 index bf47bbf6..00000000 --- a/pipelines/src/pipelines/tensorflow/training/queries/sample.sql +++ /dev/null @@ -1,20 +0,0 @@ --- Copyright 2022 Google LLC - --- Licensed under the Apache License, Version 2.0 (the "License"); --- you may not use this file except in compliance with the License. --- You may obtain a copy of the License at - --- https://www.apache.org/licenses/LICENSE-2.0 - --- Unless required by applicable law or agreed to in writing, software --- distributed under the License is distributed on an "AS IS" BASIS, --- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. --- See the License for the specific language governing permissions and --- limitations under the License. - -SELECT * -FROM - `{{ source_dataset }}.{{ source_table }}` AS t -WHERE - MOD(ABS(FARM_FINGERPRINT(TO_JSON_STRING(t))), - {{ num_lots }}) IN {{ lots }} diff --git a/pipelines/src/pipelines/training.py b/pipelines/src/pipelines/training.py new file mode 100644 index 00000000..2041fe42 --- /dev/null +++ b/pipelines/src/pipelines/training.py @@ -0,0 +1,160 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import pathlib +from os import environ as env + +from google_cloud_pipeline_components.v1.bigquery import BigqueryQueryJobOp +from kfp import dsl +from kfp.dsl import Dataset, Input, Metrics, Model, Output + +from pipelines.utils.query import generate_query +from components import extract_table, upload_model + + +LABEL = "total_fare" +PRIMARY_METRIC = "rootMeanSquaredError" +HPARAMS = dict( + n_estimators=200, + early_stopping_rounds=10, + objective="reg:squarederror", + booster="gbtree", + learning_rate=0.3, + min_split_loss=0, + max_depth=6, + label=LABEL, +) +RESOURCE_SUFFIX = env.get("RESOURCE_SUFFIX", "default") +TRAINING_IMAGE = f"{env['CONTAINER_IMAGE_REGISTRY']}/training:{RESOURCE_SUFFIX}" +PREDICTION_IMAGE = f"{env['CONTAINER_IMAGE_REGISTRY']}/prediction:{RESOURCE_SUFFIX}" + + +@dsl.container_component +def train( + input_data: Input[Dataset], + input_test_path: str, + hparams: dict, + train_data: Output[Dataset], + valid_data: Output[Dataset], + test_data: Output[Dataset], + model: Output[Model], + metrics: Output[Metrics], +): + return dsl.ContainerSpec( + image=TRAINING_IMAGE, + command=["python"], + args=[ + "-m" "training", + "--input_path", + input_data.path, + dsl.IfPresentPlaceholder( + input_name="input_test_path", + then=["--input_test_path", input_test_path], + ), + "--hparams", + hparams, + "--output_train_path", + train_data.path, + "--output_valid_path", + valid_data.path, + "--output_test_path", + test_data.path, + "--output_model", + model.path, + "--output_metrics", + metrics.path, + ], + ) + + +@dsl.pipeline(name="turbo-training-pipeline") +def pipeline( + project: str = env.get("VERTEX_PROJECT_ID"), + location: str = env.get("VERTEX_LOCATION"), + bq_location: str = env.get("BQ_LOCATION"), + bq_source_uri: str = "bigquery-public-data.chicago_taxi_trips.taxi_trips", + model_name: str = "xgb_regressor", + dataset: str = "turbo_templates", + timestamp: str = "2022-12-01 00:00:00", + test_data_gcs_uri: str = "", +): + """ + Training pipeline which: + 1. Preprocesses data in BigQuery + 2. Extracts data to Cloud Storage + 3. Trains a model using a custom prebuilt container + 4. Uploads the model to Model Registry + 5. Evaluates the model against a champion model + 6. Selects a new champion based on the primary metrics + + Args: + project (str): project id of the Google Cloud project + location (str): location of the Google Cloud project + bq_location (str): location of dataset in BigQuery + bq_source_uri (str): `..
      ` of ingestion data in BigQuery + model_name (str): name of model + dataset (str): dataset id to store staging data & predictions in BigQuery + timestamp (str): Optional. Empty or a specific timestamp in ISO 8601 format + (YYYY-MM-DDThh:mm:ss.sssΒ±hh:mm or YYYY-MM-DDThh:mm:ss). + If any time part is missing, it will be regarded as zero. + test_data_gcs_uri (str): Optional. GCS URI of static held-out test dataset. + """ + + table = f"prep_training_{RESOURCE_SUFFIX}" + queries_folder = pathlib.Path(__file__).parent / "queries" + + prep_query = generate_query( + queries_folder / "preprocessing.sql", + source=bq_source_uri, + location=bq_location, + dataset=f"{project}.{dataset}", + table=table, + label=LABEL, + start_timestamp=timestamp, + ) + + prep_op = BigqueryQueryJobOp( + project=project, + location=bq_location, + query=prep_query, + ).set_display_name("Ingest & preprocess data") + + data_op = ( + extract_table( + project=project, + location=bq_location, + table=f"{project}:{dataset}.{table}", + ) + .after(prep_op) + .set_display_name("Extract data") + ) + + train_op = train( + input_data=data_op.outputs["data"], + input_test_path=test_data_gcs_uri, + hparams=HPARAMS, + ).set_display_name("Train model") + + upload_model( + project=project, + location=location, + model=train_op.outputs["model"], + model_evaluation=train_op.outputs["metrics"], + test_data=train_op.outputs["test_data"], + eval_metric=PRIMARY_METRIC, + eval_lower_is_better=True, + serving_container_image=PREDICTION_IMAGE, + model_name=model_name, + model_description="Predict price of a taxi trip.", + pipeline_job_id="{{$.pipeline_job_name}}", + ).set_display_name("Upload model") diff --git a/pipelines/src/pipelines/trigger/README.md b/pipelines/src/pipelines/trigger/README.md deleted file mode 100644 index 215b0caf..00000000 --- a/pipelines/src/pipelines/trigger/README.md +++ /dev/null @@ -1,64 +0,0 @@ - -# Trigger pipeline runs in the sandbox environment - -The simplest way to trigger a pipeline run in the sandbox environment is to use the following make command: -``` -make run pipeline= [ enable_caching= ] -``` -This command compiles the pipeline, copies assets to GCS, and then triggers the pipeline. It relies on several environment variables which you must specify beforehand in `env.sh`. These variables are: - -- `VERTEX_PROJECT_ID`: your project id -- `VERTEX_LOCATION`: your project location -- `VERTEX_PIPELINE_ROOT`: URI for root directory -- `VERTEX_SA_EMAIL`: service account -- `VERTEX_CMEK_IDENTIFIER`: customer-managed encryption key (can be `""` or `None`) -- `VERTEX_NETWORK`: private network (can be `""` or `None`) - -Before you run the pipeline, if you have made any changes to pipeline components, make sure to re-compile the pipeline components to their YAML format with: - -```bash -make compile-components GROUP= -``` - -Or to re-compile all pipeline components to YAML: -```bash -make compile-all-components -``` - -### Deploy as a Cloud Function - -This directory can also be deployed as a Cloud Function to trigger pipeline runs from a Pub/Sub message. The format of the Pub/Sub message is as follows: - -#### Pub/Sub attributes - -- `template_path` - the GCS path to the compiled KFP pipeline definition (JSON) -- `enable_caching` (optional) - True or False. This can be used to override the default Vertex Pipelines caching behaviour. - -#### Pub/Sub data - -The data field contains the pipeline input parameters as a JSON string, encoded using base64 encoding. - -**NB**: Please note that the Cloud Function service account will need permission to: -- read from the bucket where the compiled pipeline is stored -- have "service account user" permission on the service account being used to run the Vertex pipelines -- subscribe to the pub/sub topic) - -## Testing the trigger -To run the unit tests developed for the trigger code, use the following make command: -``` -make test-trigger -``` diff --git a/pipelines/src/pipelines/trigger/__main__.py b/pipelines/src/pipelines/trigger/__main__.py deleted file mode 100644 index 93b22bb0..00000000 --- a/pipelines/src/pipelines/trigger/__main__.py +++ /dev/null @@ -1,5 +0,0 @@ -from .main import sandbox_run - - -if __name__ == "__main__": - sandbox_run() diff --git a/pipelines/src/pipelines/trigger/main.py b/pipelines/src/pipelines/trigger/main.py deleted file mode 100644 index 8aae023f..00000000 --- a/pipelines/src/pipelines/trigger/main.py +++ /dev/null @@ -1,222 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import argparse -import base64 -import json -import logging -import os -import distutils.util -from typing import Optional, List - -from google.cloud import aiplatform - - -def cf_handler(event, context) -> aiplatform.PipelineJob: - """Handle the Pub/Sub message and make the call to trigger_pipeline_from_payload() - to trigger the ML pipeline on Vertex. Returns the PipelineJob object. - - Args: - event (dict): The dictionary with data specific to this type of - event. The `@type` field maps to - `type.googleapis.com/google.pubsub.v1.PubsubMessage`. - The `data` field maps to the PubsubMessage data - in a base64-encoded string. The `attributes` field maps - to the PubsubMessage attributes if any is present. - context (google.cloud.functions.Context): Metadata of triggering event - including `event_id` which maps to the PubsubMessage - messageId, `timestamp` which maps to the PubsubMessage - publishTime, `event_type` which maps to - `google.pubsub.topic.publish`, and `resource` which is - a dictionary that describes the service API endpoint - pubsub.googleapis.com, the triggering topic's name, and - the triggering event type - `type.googleapis.com/google.pubsub.v1.PubsubMessage`. - Returns: - aiplatform.PipelineJob - """ - - # take event["data"], base64-decode it and JSON decode to dict - event["data"] = base64.b64decode(event["data"]).decode("utf-8") - event["data"] = json.loads(event["data"]) - - return trigger_pipeline_from_payload(event) - - -def trigger_pipeline_from_payload(payload: dict) -> aiplatform.PipelineJob: - """Triggers a pipeline run from a payload dict, JSON pipeline definition, - and env variables. - Args: - payload (dict): payload containing attributes and data. - template_path (str): File path (local or GCS) of compiled pipeline definition. - """ - - payload = convert_payload(payload) - env = get_env() - - return trigger_pipeline( - project_id=env["project_id"], - location=env["location"], - template_path=payload["attributes"]["template_path"], - parameter_values=payload["data"], - pipeline_root=env["pipeline_root"], - service_account=env["service_account"], - encryption_spec_key_name=env["encryption_spec_key_name"], - network=env["network"], - enable_caching=payload["attributes"]["enable_caching"], - ) - - -def trigger_pipeline( - project_id: str, - location: str, - template_path: str, - pipeline_root: str, - service_account: str, - parameter_values: dict = {}, - encryption_spec_key_name: Optional[str] = None, - network: Optional[str] = None, - enable_caching: Optional[bool] = None, -) -> aiplatform.PipelineJob: - """Trigger the Vertex Pipeline run. - Args: - project_id (str): GCP Project ID in which to run the Vertex Pipeline - location (str): GCP region in which to run the Vertex Pipeline - template_path (str): local or GCS path containing the (JSON) KFP - pipeline definition - pipeline_root (str): GCS path to use as the pipeline root (for passing - metadata/artifacts within the pipeline) - parameter_values (dict): dictionary containing the input parameters - for the KFP pipeline - service_account (str): email address of the service account that - should be used to execute the ML pipeline in Vertex - encryption_spec_key_name (Optional[str]): Cloud KMS resource ID - of the customer managed encryption key (CMEK) that will protect the job - network (Optional[str]): name of Compute Engine network to - which the job should be visible - enable_caching (Optional[bool]): Whether to enable caching of pipeline - component results if component+inputs are the same. Defaults to None - (enable caching, except where disabled at a component level) - """ - - # Initialise API client - aiplatform.init(project=project_id, location=location) - - # Instantiate PipelineJob object - pl = aiplatform.pipeline_jobs.PipelineJob( - # Display name is required but seemingly not used - # see - # https://github.com/googleapis/python-aiplatform/blob/9dcf6fb0bc8144d819938a97edf4339fe6f2e1e6/google/cloud/aiplatform/pipeline_jobs.py#L260 # noqa - display_name=template_path, - enable_caching=enable_caching, - template_path=template_path, - parameter_values=parameter_values, - pipeline_root=pipeline_root, - encryption_spec_key_name=encryption_spec_key_name, - ) - - # Execute pipeline in Vertex - pl.submit( - service_account=service_account, - network=network, - ) - - return pl - - -def convert_payload(payload: dict) -> dict: - """ - Processes the payload dictionary. - Converts enable_caching and adds their defaults if they are missing. - - Args: - payload (dict): Cloud Function event payload, - or the contents of a payload JSON file - """ - - # make a copy of the payload so we are not modifying the original - payload = payload.copy() - - # if payload["data"] is missing, add it as empty dict - payload["data"] = payload.get("data", {}) - - # if enable_caching value is present and not None, convert from str to bool - # otherwise, it needs to be None - if payload["attributes"].get("enable_caching") is not None: - payload["attributes"]["enable_caching"] = bool( - distutils.util.strtobool(payload["attributes"]["enable_caching"]) - ) - else: - payload["attributes"]["enable_caching"] = None - - return payload - - -def get_env() -> dict: - """Get the necessary environment variables for pipeline runs, - and return them as a dictionary. - """ - - project_id = os.environ["VERTEX_PROJECT_ID"] - location = os.environ["VERTEX_LOCATION"] - pipeline_root = os.environ["VERTEX_PIPELINE_ROOT"] - service_account = os.environ["VERTEX_SA_EMAIL"] - # For CMEK and network, we want an empty string to become None, so we add "or None" - encryption_spec_key_name = os.environ.get("VERTEX_CMEK_IDENTIFIER") or None - network = os.environ.get("VERTEX_NETWORK") or None - - return { - "project_id": project_id, - "location": location, - "pipeline_root": pipeline_root, - "service_account": service_account, - "encryption_spec_key_name": encryption_spec_key_name, - "network": network, - } - - -def sandbox_run(args: List[str] = None) -> aiplatform.PipelineJob: - """Trigger a Vertex Pipeline run from a (local) compiled pipeline definition. - Returns the PipelineJob object of the triggered pipeline run. - Usage: python main.py --template_path=pipeline.json --enable_caching=true - """ - logging.basicConfig(level=logging.DEBUG) - - parser = argparse.ArgumentParser() - parser.add_argument( - "--template_path", help="Path to the compiled pipeline (JSON)", type=str - ) - parser.add_argument("--enable_caching", type=str, default=None) - - # Get commandline args - args = parser.parse_args(args) - - # If empty value for enable_caching provided on commandline default to None - if args.enable_caching == "": - args.enable_caching = None - - payload = { - "attributes": { - "template_path": args.template_path, - "enable_caching": args.enable_caching, - } - # "data" omitted as pipeline params are taken from the default args - # in compiled JSON pipeline - } - - return trigger_pipeline_from_payload(payload) - - -if __name__ == "__main__": - sandbox_run() diff --git a/pipelines/tests/xgboost/training/test_e2e.py b/pipelines/src/pipelines/utils/query.py similarity index 50% rename from pipelines/tests/xgboost/training/test_e2e.py rename to pipelines/src/pipelines/utils/query.py index b494fe9e..c25ddb39 100644 --- a/pipelines/tests/xgboost/training/test_e2e.py +++ b/pipelines/src/pipelines/utils/query.py @@ -1,4 +1,4 @@ -# Copyright 2022 Google LLC +# Copyright 2023 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -11,27 +11,22 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from pathlib import Path +from jinja2 import Template -from pipelines.xgboost.training.pipeline import xgboost_pipeline -from tests.e2e.test_e2e import pipeline_e2e_test - -def test_pipeline_run(enable_caching) -> None: +def generate_query(input_file: Path, **replacements) -> str: """ - Tests if pipeline is run successfully - Triggers pipeline synchronously. - Tests will fail if: - - Any errors are thrown during execution - - Any of the expected component outputs are empty (size == 0kb) - - Arguments: - None + Read input file and replace placeholder using Jinja. + Args: + input_file (Path): input file to read + replacements: keyword arguments to use to replace placeholders Returns: - None + str: replaced content of input file """ - pipeline_e2e_test( - xgboost_pipeline, - enable_caching=enable_caching, - common_tasks={}, - ) + + with open(input_file, "r") as f: + query_template = f.read() + + return Template(query_template).render(**replacements) diff --git a/pipelines/src/pipelines/utils/trigger_pipeline.py b/pipelines/src/pipelines/utils/trigger_pipeline.py new file mode 100644 index 00000000..6d217020 --- /dev/null +++ b/pipelines/src/pipelines/utils/trigger_pipeline.py @@ -0,0 +1,112 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +import os +from google.cloud import aiplatform + + +def trigger_pipeline( + template_path: str, + display_name: str, + wait: bool = False, +) -> aiplatform.PipelineJob: + """Trigger a Vertex Pipeline run from a (local) compiled pipeline definition. + + Args: + template_path (str): file path to the compiled YAML pipeline + display_name (str): Display name to use for the PipelineJob + wait (bool): Wait for the pipeline to finish running + + Returns: + aiplatform.PipelineJob: the Vertex PipelineJob object + """ + project_id = os.environ["VERTEX_PROJECT_ID"] + location = os.environ["VERTEX_LOCATION"] + pipeline_root = os.environ["VERTEX_PIPELINE_ROOT"] + service_account = os.environ["VERTEX_SA_EMAIL"] + + enable_caching = os.environ.get("ENABLE_PIPELINE_CACHING") + if enable_caching is not None: + true_ = ["1", "true"] + false_ = ["0", "false"] + if enable_caching.lower() not in true_ + false_: + raise ValueError( + "ENABLE_PIPELINE_CACHING env variable must be " + "'true', 'false', '1' or '0', or not set." + ) + enable_caching = enable_caching.lower() in true_ + + # For below options, we want an empty string to become None, so we add "or None" + encryption_spec_key_name = os.environ.get("VERTEX_CMEK_IDENTIFIER") or None + network = os.environ.get("VERTEX_NETWORK") or None + + # Instantiate PipelineJob object + pl = aiplatform.PipelineJob( + project=project_id, + location=location, + display_name=display_name, + enable_caching=enable_caching, + template_path=template_path, + pipeline_root=pipeline_root, + encryption_spec_key_name=encryption_spec_key_name, + ) + + # Execute pipeline in Vertex + pl.submit( + service_account=service_account, + network=network, + ) + + if wait: + # Wait for pipeline to finish running before returning + pl.wait() + + return pl + + +if __name__ == "__main__": + + parser = argparse.ArgumentParser() + parser.add_argument( + "--template_path", + help="Path to the compiled pipeline (YAML)", + type=str, + ) + parser.add_argument( + "--display_name", + help="Display name for the PipelineJob", + type=str, + ) + + parser.add_argument( + "--wait", + help="Wait for the pipeline to finish running", + type=str, + ) + # Get commandline args + args = parser.parse_args() + + if args.wait == "true": + wait = True + elif args.wait == "false": + wait = False + else: + raise ValueError("wait variable must be 'true' or 'false'") + + trigger_pipeline( + template_path=args.template_path, + display_name=args.display_name, + wait=wait, + ) diff --git a/pipelines/src/pipelines/utils/upload_pipeline.py b/pipelines/src/pipelines/utils/upload_pipeline.py new file mode 100644 index 00000000..35cc7d53 --- /dev/null +++ b/pipelines/src/pipelines/utils/upload_pipeline.py @@ -0,0 +1,58 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import argparse +from typing import List +from kfp.registry import RegistryClient + + +def upload_pipeline(host: str, file_name: str, tags: List[str]) -> tuple[str, str]: + """Upload a compiled YAML pipeline to Artifact Registry + + Args: + host (str): URL of the Artifact Registry repository + file_name (str): File path to the compiled YAML pipeline + tags (List[str]): List of tags to use for the uploaded pipeline + + Returns: + A tuple of the package name and the version. + """ + + client = RegistryClient( + host=host, + ) + + return client.upload_pipeline( + file_name=file_name, + tags=tags, + ) + + +def main(args: List[str] = None): + """CLI entrypoint for the upload_pipeline.py script""" + parser = argparse.ArgumentParser() + parser.add_argument("--dest", type=str, required=True) + parser.add_argument("--yaml", type=str, required=True) + parser.add_argument("--tag", type=str, action="append") + parsed_args = parser.parse_args(args) + + upload_pipeline( + host=parsed_args.dest, + file_name=parsed_args.yaml, + tags=parsed_args.tag, + ) + + +if __name__ == "__main__": + main() diff --git a/pipelines/src/pipelines/xgboost/README.md b/pipelines/src/pipelines/xgboost/README.md deleted file mode 100644 index b40ec332..00000000 --- a/pipelines/src/pipelines/xgboost/README.md +++ /dev/null @@ -1,49 +0,0 @@ -# XGBoost Pipelines - -## Training pipeline - -The XGBoost training pipeline can be found in [`training/pipeline.py`](training/pipeline.py) . Within the kubeflow pipeline, [`train_xgboost_model`](../kfp_components/xgboost/train.py) is the main training component which contains the implementation of an XGB model with `scikit-learn` preprocessing.This component can then be wrapped in a custom kfp ContainerOp from [`google-cloud-pipeline-components`](https://github.com/kubeflow/pipelines/blob/master/components/google-cloud/google_cloud_pipeline_components/experimental/custom_job/utils.py) which submits a Vertex Training job with added flexibility for `machine_type`, `replica_count`, `accelerator_type` among other machine configurations. - -The training phase is preceded by a preprocessing phase where different transformations are applied to the training and evaluation data using scikit-learn preprocessing functions. The **preprocessing** step and the **training** step define the two components of the Scikit-Learn pipeline as shown in the diagram below. - -![Training process](../../docs/images/xgboost_architecture.png) - -## Preprocessing with Scikit-learn -The 3 data transformation steps considered in the `train.py` script are: - -|Encoder|Description|Features| -|:----|:----|:----| -|[StandardScaler()](https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.StandardScaler.html)|Centering and scaling numerical values| `dayofweek`, `hourofday`, `trip_distance`, `trip_miles`, `trip_seconds`| -|[OneHotEncoder()](https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.OneHotEncoder.html)|Encoding a chosen subset of categorical features as a one-hot numeric array|`payment_type`, new/unknown values in categorial features are represented as zeroes everywhere in the one-hot numeric array| -|[OrdinalEncoder()](https://scikit-learn.org/stable/modules/generated/sklearn.preprocessing.OrdinalEncoder.html)|Encoding a chosen subset of categorical features as an integer array|`company`, new/unknown values in categorical features are assigned to an integer equal to the number of categories for that feature in the training set| - -More processing steps can be included to the pipeline. For more details, see the [official documentation](https://scikit-learn.org/stable/modules/preprocessing.html). Ensure that these additional pre-processing steps can handle new/unknown values in test data. - -## The XGBoost Model - -In our example implementation, we have a regression problem of predicting the total fare of a taxi trip in Chicago. Thus, we use XGBRegressor whose hyperparameteres are defined in the variable `model_params` in the file [training/pipeline.py](training/pipeline.py). - -### Model Hyperparameters - -You can specify different hyperparameters through the `model_params` argument of `train_xgboost_model`, including: - - `Booster`: the type of booster (`gbtree` is a tree based booster used by default). - - `max_depth`: the depth of each tree. - - `Objective`: equivalent to the loss function (squared loss, `reg:squarederror`, is the default). - - `min_split_loss`: the minimum loss reduction required to make a further partition on a leaf node of the tree. - -More hyperparameters can be used to customize your training. For more details consult the [XGBoost documentation](https://xgboost.readthedocs.io/en/stable/parameter.html) - -### Model artifacts -Two model artifacts are generated when we run the training job: - - `Model.joblib` : The model is exported to GCS file as a [joblib](https://joblib.readthedocs.io/en/latest/why.html#benefits-of-pipelines) object. - - `Eval_result` : The evaluation metrics are exported to GCS as JSON file. - -![xgboost_component_model&metrics_artifact](../../docs/images/xgboost_component_model&metrics_artifact.png) -### Model test/evaluation -Once the model is trained, it will be used to get challenger predictions for evaluation purposes. In general, the component [`predict_tensorflow_model`](../kfp_components/tensorflow/predict.py) -which expects a single CSV file to create predictions for test data is implemented in the pipeline. However, if you are working with larger test data, it is more efficient to -replace it with a Google prebuilt component, [`ModelBatchPredictOp`](https://google-cloud-pipeline-components.readthedocs.io/en/google-cloud-pipeline-components-0.2.1/google_cloud_pipeline_components.aiplatform.html), -to avoid crash caused by memory overload. - -## Prediction pipeline -The XGBoost prediction pipeline can be found in [prediction/pipeline.py](prediction/pipeline.py). diff --git a/pipelines/src/pipelines/xgboost/prediction/assets/.gitkeep b/pipelines/src/pipelines/xgboost/prediction/assets/.gitkeep deleted file mode 100644 index e69de29b..00000000 diff --git a/pipelines/src/pipelines/xgboost/prediction/pipeline.py b/pipelines/src/pipelines/xgboost/prediction/pipeline.py deleted file mode 100644 index 0bd1ec91..00000000 --- a/pipelines/src/pipelines/xgboost/prediction/pipeline.py +++ /dev/null @@ -1,143 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -import os -import pathlib - -from kfp.v2 import compiler, dsl - -from pipelines import generate_query -from bigquery_components import bq_query_to_table -from vertex_components import lookup_model, model_batch_predict - - -@dsl.pipeline(name="xgboost-prediction-pipeline") -def xgboost_pipeline( - project_id: str = os.environ.get("VERTEX_PROJECT_ID"), - project_location: str = os.environ.get("VERTEX_LOCATION"), - ingestion_project_id: str = os.environ.get("VERTEX_PROJECT_ID"), - model_name: str = "simple_xgboost", - dataset_id: str = "preprocessing", - dataset_location: str = os.environ.get("VERTEX_LOCATION"), - ingestion_dataset_id: str = "chicago_taxi_trips", - timestamp: str = "2022-12-01 00:00:00", - batch_prediction_machine_type: str = "n1-standard-4", - batch_prediction_min_replicas: int = 3, - batch_prediction_max_replicas: int = 10, -): - """ - XGB prediction pipeline which: - 1. Looks up the default model version (champion) and - dataset which was used to the train model. - 2. Runs a BatchPredictionJob with optional training-serving skew detection. - - Args: - project_id (str): project id of the Google Cloud project - project_location (str): location of the Google Cloud project - ingestion_project_id (str): project id containing the source bigquery data - for ingestion. This can be the same as `project_id` if the source data is - in the same project where the ML pipeline is executed. - model_name (str): name of model - dataset_id (str): id of BQ dataset used to store all staging data & predictions - dataset_location (str): location of dataset - ingestion_dataset_id (str): dataset id of ingestion data - timestamp (str): Optional. Empty or a specific timestamp in ISO 8601 format - (YYYY-MM-DDThh:mm:ss.sssΒ±hh:mm or YYYY-MM-DDThh:mm:ss). - If any time part is missing, it will be regarded as zero. - batch_prediction_machine_type (str): Machine type to be used for Vertex Batch - Prediction. Example machine_types - n1-standard-4, n1-standard-16 etc - batch_prediction_min_replicas (int): Minimum no of machines to distribute the - Vertex Batch Prediction job for horizontal scalability - batch_prediction_max_replicas (int): Maximum no of machines to distribute the - Vertex Batch Prediction job for horizontal scalability. - - Returns: - None - - """ - - # Create variables to ensure the same arguments are passed - # into different components of the pipeline - time_column = "trip_start_timestamp" - ingestion_table = "taxi_trips" - table_suffix = "_xgb_prediction" # suffix to table names - ingested_table = "ingested_data" + table_suffix - monitoring_alert_email_addresses = [] - monitoring_skew_config = {"defaultSkewThreshold": {"value": 0.001}} - - # generate sql queries which are used in ingestion and preprocessing - # operations - queries_folder = pathlib.Path(__file__).parent / "queries" - - ingest_query = generate_query( - queries_folder / "ingest.sql", - source_dataset=f"{ingestion_project_id}.{ingestion_dataset_id}", - source_table=ingestion_table, - filter_column=time_column, - filter_start_value=timestamp, - ) - - # data ingestion and preprocessing operations - kwargs = dict( - bq_client_project_id=project_id, - destination_project_id=project_id, - dataset_id=dataset_id, - dataset_location=dataset_location, - query_job_config=json.dumps(dict(write_disposition="WRITE_TRUNCATE")), - ) - ingest = bq_query_to_table( - query=ingest_query, table_id=ingested_table, **kwargs - ).set_display_name("Ingest data") - - # lookup champion model - champion_model = lookup_model( - model_name=model_name, - project_location=project_location, - project_id=project_id, - fail_on_model_not_found=True, - ).set_display_name("Look up champion model") - - # batch predict from BigQuery to BigQuery - bigquery_source_input_uri = f"bq://{project_id}.{dataset_id}.{ingested_table}" - bigquery_destination_output_uri = f"bq://{project_id}.{dataset_id}" - - batch_prediction = ( - model_batch_predict( - model=champion_model.outputs["model"], - job_display_name="my-xgboost-batch-prediction-job", - project_location=project_location, - project_id=project_id, - source_uri=bigquery_source_input_uri, - destination_uri=bigquery_destination_output_uri, - source_format="bigquery", - destination_format="bigquery", - machine_type=batch_prediction_machine_type, - starting_replica_count=batch_prediction_min_replicas, - max_replica_count=batch_prediction_max_replicas, - monitoring_training_dataset=champion_model.outputs["training_dataset"], - monitoring_alert_email_addresses=monitoring_alert_email_addresses, - monitoring_skew_config=monitoring_skew_config, - ) - .after(ingest) - .set_display_name("Batch prediction job") - ) - - -if __name__ == "__main__": - compiler.Compiler().compile( - pipeline_func=xgboost_pipeline, - package_path="prediction.json", - type_check=False, - ) diff --git a/pipelines/src/pipelines/xgboost/prediction/queries/ingest.sql b/pipelines/src/pipelines/xgboost/prediction/queries/ingest.sql deleted file mode 100644 index e01a7c57..00000000 --- a/pipelines/src/pipelines/xgboost/prediction/queries/ingest.sql +++ /dev/null @@ -1,56 +0,0 @@ --- Copyright 2022 Google LLC - --- Licensed under the Apache License, Version 2.0 (the "License"); --- you may not use this file except in compliance with the License. --- You may obtain a copy of the License at - --- https://www.apache.org/licenses/LICENSE-2.0 - --- Unless required by applicable law or agreed to in writing, software --- distributed under the License is distributed on an "AS IS" BASIS, --- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. --- See the License for the specific language governing permissions and --- limitations under the License. - --- Treat "filter_start_value" as the current time, unless it is empty then use CURRENT_DATETIME() instead --- This allows us to set the filter_start_value to a specific time for testing or for backfill -with filter_start_values as ( - SELECT - IF("{{ filter_start_value }}" = '', CURRENT_DATETIME(), CAST("{{ filter_start_value }}" AS DATETIME)) as filter_start_value -) --- Ingest data between 2 and 3 months ago -,filtered_data as ( - SELECT - * - FROM `{{ source_dataset }}.{{ source_table }}`, filter_start_values - WHERE - DATE({{ filter_column }}) BETWEEN - DATE_SUB(DATE(CAST(filter_start_values.filter_start_value as DATETIME)), INTERVAL 3 MONTH) AND - DATE_SUB(DATE(filter_start_value), INTERVAL 2 MONTH) -) --- Use the average trip_seconds as a replacement for NULL or 0 values -,mean_time as ( - SELECT CAST(avg(trip_seconds) AS INT64) as avg_trip_seconds - FROM filtered_data -) - -SELECT - CAST(EXTRACT(DAYOFWEEK FROM trip_start_timestamp) AS FLOAT64) AS dayofweek, - CAST(EXTRACT(HOUR FROM trip_start_timestamp) AS FLOAT64) AS hourofday, - ST_DISTANCE( - ST_GEOGPOINT(pickup_longitude, pickup_latitude), - ST_GEOGPOINT(dropoff_longitude, dropoff_latitude)) AS trip_distance, - trip_miles, - CAST( CASE WHEN trip_seconds is NULL then m.avg_trip_seconds - WHEN trip_seconds <= 0 then m.avg_trip_seconds - ELSE trip_seconds - END AS FLOAT64) AS trip_seconds, - payment_type, - company, -FROM filtered_data as t, mean_time as m -WHERE - trip_miles > 0 AND fare > 0 AND fare < 1500 - {% for field in ["fare", "trip_start_timestamp", "pickup_longitude", - "pickup_latitude", "dropoff_longitude", "dropoff_latitude","payment_type","company"] %} - AND `{{ field }}` IS NOT NULL - {% endfor %} diff --git a/pipelines/src/pipelines/xgboost/training/assets/.gitkeep b/pipelines/src/pipelines/xgboost/training/assets/.gitkeep deleted file mode 100644 index e69de29b..00000000 diff --git a/pipelines/src/pipelines/xgboost/training/assets/train_xgb_model.py b/pipelines/src/pipelines/xgboost/training/assets/train_xgb_model.py deleted file mode 100644 index 31d95247..00000000 --- a/pipelines/src/pipelines/xgboost/training/assets/train_xgb_model.py +++ /dev/null @@ -1,125 +0,0 @@ -import argparse -import joblib -import json -import os -import logging - -import numpy as np -import pandas as pd -from sklearn import metrics -from sklearn.compose import ColumnTransformer -from sklearn.pipeline import Pipeline -from sklearn.preprocessing import StandardScaler, OrdinalEncoder, OneHotEncoder -from xgboost import XGBRegressor - -logging.basicConfig(level=logging.DEBUG) - - -NUM_COLS = ["dayofweek", "hourofday", "trip_distance", "trip_miles", "trip_seconds"] -ORD_COLS = ["company"] -OHE_COLS = ["payment_type"] - - -def split_xy(df: pd.DataFrame, label: str) -> (pd.DataFrame, pd.Series): - """Split dataframe into X and y.""" - return df.drop(columns=[label]), df[label] - - -def indices_in_list(elements: list, base_list: list) -> list: - """Get indices of specific elements in a base list""" - return [idx for idx, elem in enumerate(base_list) if elem in elements] - - -parser = argparse.ArgumentParser() -parser.add_argument("--train_data", type=str, required=True) -parser.add_argument("--valid_data", type=str, required=True) -parser.add_argument("--test_data", type=str, required=True) -parser.add_argument("--model", default=os.getenv("AIP_MODEL_DIR"), type=str, help="") -parser.add_argument("--metrics", type=str, required=True) -parser.add_argument("--hparams", default={}, type=json.loads) -args = parser.parse_args() - -logging.info("Read csv files into dataframes") -df_train = pd.read_csv(args.train_data) -df_valid = pd.read_csv(args.valid_data) -df_test = pd.read_csv(args.test_data) - -logging.info("Split dataframes") -label = args.hparams["label"] -X_train, y_train = split_xy(df_train, label) -X_valid, y_valid = split_xy(df_valid, label) -X_test, y_test = split_xy(df_test, label) - -logging.info("Get the number of unique categories for ordinal encoded columns") -ordinal_columns = X_train[ORD_COLS] -n_unique_cat = ordinal_columns.nunique() - -logging.info("Get indices of columns in base data") -col_list = X_train.columns.tolist() -num_indices = indices_in_list(NUM_COLS, col_list) -cat_indices_onehot = indices_in_list(OHE_COLS, col_list) -cat_indices_ordinal = indices_in_list(ORD_COLS, col_list) - -ordinal_transformers = [ - ( - f"ordinal encoding for {ord_col}", - OrdinalEncoder( - handle_unknown="use_encoded_value", unknown_value=n_unique_cat[ord_col] - ), - [ord_index], - ) - for ord_col in ORD_COLS - for ord_index in cat_indices_ordinal -] -all_transformers = [ - ("numeric_scaling", StandardScaler(), num_indices), - ( - "one_hot_encoding", - OneHotEncoder(handle_unknown="ignore"), - cat_indices_onehot, - ), -] + ordinal_transformers - -logging.info("Build sklearn preprocessing steps") -preprocesser = ColumnTransformer(transformers=all_transformers) -logging.info("Build sklearn pipeline with XGBoost model") -xgb_model = XGBRegressor(**args.hparams) - -pipeline = Pipeline( - steps=[("feature_engineering", preprocesser), ("train_model", xgb_model)] -) - -logging.info("Transform validation data") -valid_preprocesser = preprocesser.fit(X_train) -X_valid_transformed = valid_preprocesser.transform(X_valid) - -logging.info("Fit model") -pipeline.fit(X_train, y_train, train_model__eval_set=[(X_valid_transformed, y_valid)]) - -logging.info("Predict test data") -y_pred = pipeline.predict(X_test) -y_pred = y_pred.clip(0) - -metrics = { - "problemType": "regression", - "rootMeanSquaredError": np.sqrt(metrics.mean_squared_error(y_test, y_pred)), - "meanAbsoluteError": metrics.mean_absolute_error(y_test, y_pred), - "meanAbsolutePercentageError": metrics.mean_absolute_percentage_error( - y_test, y_pred - ), - "rSquared": metrics.r2_score(y_test, y_pred), - "rootMeanSquaredLogError": np.sqrt(metrics.mean_squared_log_error(y_test, y_pred)), -} - -try: - model_path = args.model.replace("gs://", "/gcs/") - logging.info(f"Save model to: {model_path}") - os.makedirs(model_path, exist_ok=True) - joblib.dump(pipeline, model_path + "model.joblib") -except Exception as e: - print(e) - raise e - -logging.info(f"Metrics: {metrics}") -with open(args.metrics, "w") as fp: - json.dump(metrics, fp) diff --git a/pipelines/src/pipelines/xgboost/training/pipeline.py b/pipelines/src/pipelines/xgboost/training/pipeline.py deleted file mode 100644 index 1db2023c..00000000 --- a/pipelines/src/pipelines/xgboost/training/pipeline.py +++ /dev/null @@ -1,259 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import json -import os -import pathlib - -from kfp.v2 import compiler, dsl -from pipelines import generate_query -from bigquery_components import bq_query_to_table, extract_bq_to_dataset -from vertex_components import ( - lookup_model, - custom_train_job, - import_model_evaluation, - update_best_model, -) - - -@dsl.pipeline(name="xgboost-train-pipeline") -def xgboost_pipeline( - project_id: str = os.environ.get("VERTEX_PROJECT_ID"), - project_location: str = os.environ.get("VERTEX_LOCATION"), - ingestion_project_id: str = os.environ.get("VERTEX_PROJECT_ID"), - model_name: str = "simple_xgboost", - dataset_id: str = "preprocessing", - dataset_location: str = os.environ.get("VERTEX_LOCATION"), - ingestion_dataset_id: str = "chicago_taxi_trips", - timestamp: str = "2022-12-01 00:00:00", - staging_bucket: str = os.environ.get("VERTEX_PIPELINE_ROOT"), - pipeline_files_gcs_path: str = os.environ.get("PIPELINE_FILES_GCS_PATH"), - test_dataset_uri: str = "", -): - """ - XGB training pipeline which: - 1. Splits and extracts a dataset from BQ to GCS - 2. Trains a model via Vertex AI CustomTrainingJob - 3. Evaluates the model against the current champion model - 4. If better the model becomes the new default model - - Args: - project_id (str): project id of the Google Cloud project - project_location (str): location of the Google Cloud project - ingestion_project_id (str): project id containing the source bigquery data - for ingestion. This can be the same as `project_id` if the source data is - in the same project where the ML pipeline is executed. - model_name (str): name of model - dataset_id (str): id of BQ dataset used to store all staging data & predictions - dataset_location (str): location of dataset - ingestion_dataset_id (str): dataset id of ingestion data - timestamp (str): Optional. Empty or a specific timestamp in ISO 8601 format - (YYYY-MM-DDThh:mm:ss.sssΒ±hh:mm or YYYY-MM-DDThh:mm:ss). - If any time part is missing, it will be regarded as zero. - staging_bucket (str): Staging bucket for pipeline artifacts. - pipeline_files_gcs_path (str): GCS path where the pipeline files are located. - test_dataset_uri (str): Optional. GCS URI of statis held-out test dataset. - """ - - # Create variables to ensure the same arguments are passed - # into different components of the pipeline - label_column_name = "total_fare" - time_column = "trip_start_timestamp" - ingestion_table = "taxi_trips" - table_suffix = "_xgb_training" # suffix to table names - ingested_table = "ingested_data" + table_suffix - preprocessed_table = "preprocessed_data" + table_suffix - train_table = "train_data" + table_suffix - valid_table = "valid_data" + table_suffix - test_table = "test_data" + table_suffix - primary_metric = "rootMeanSquaredError" - train_script_uri = f"{pipeline_files_gcs_path}/training/assets/train_xgb_model.py" - hparams = dict( - n_estimators=200, - early_stopping_rounds=10, - objective="reg:squarederror", - booster="gbtree", - learning_rate=0.3, - min_split_loss=0, - max_depth=6, - label=label_column_name, - ) - - # generate sql queries which are used in ingestion and preprocessing - # operations - - queries_folder = pathlib.Path(__file__).parent / "queries" - - ingest_query = generate_query( - queries_folder / "ingest.sql", - source_dataset=f"{ingestion_project_id}.{ingestion_dataset_id}", - source_table=ingestion_table, - filter_column=time_column, - target_column=label_column_name, - filter_start_value=timestamp, - ) - split_train_query = generate_query( - queries_folder / "sample.sql", - source_dataset=dataset_id, - source_table=ingested_table, - num_lots=10, - lots=tuple(range(8)), - ) - split_valid_query = generate_query( - queries_folder / "sample.sql", - source_dataset=dataset_id, - source_table=ingested_table, - num_lots=10, - lots="(8)", - ) - data_cleaning_query = generate_query( - queries_folder / "engineer_features.sql", - source_dataset=dataset_id, - source_table=train_table, - ) - split_test_query = generate_query( - queries_folder / "sample.sql", - source_dataset=dataset_id, - source_table=ingested_table, - num_lots=10, - lots="(9)", - ) - - # data ingestion and preprocessing operations - - kwargs = dict( - bq_client_project_id=project_id, - destination_project_id=project_id, - dataset_id=dataset_id, - dataset_location=dataset_location, - query_job_config=json.dumps(dict(write_disposition="WRITE_TRUNCATE")), - ) - ingest = bq_query_to_table( - query=ingest_query, table_id=ingested_table, **kwargs - ).set_display_name("Ingest data") - - split_train_data = ( - bq_query_to_table(query=split_train_query, table_id=train_table, **kwargs) - .after(ingest) - .set_display_name("Split train data") - ) - split_valid_data = ( - bq_query_to_table(query=split_valid_query, table_id=valid_table, **kwargs) - .after(ingest) - .set_display_name("Split validation data") - ) - split_test_data = ( - bq_query_to_table(query=split_test_query, table_id=test_table, **kwargs) - .after(ingest) - .set_display_name("Split test data") - ) - data_cleaning = ( - bq_query_to_table( - query=data_cleaning_query, table_id=preprocessed_table, **kwargs - ) - .after(split_train_data) - .set_display_name("Clean data") - ) - - # data extraction to gcs - - train_dataset = ( - extract_bq_to_dataset( - bq_client_project_id=project_id, - source_project_id=project_id, - dataset_id=dataset_id, - table_name=preprocessed_table, - dataset_location=dataset_location, - ) - .after(data_cleaning) - .set_display_name("Extract train data to storage") - ).outputs["dataset"] - valid_dataset = ( - extract_bq_to_dataset( - bq_client_project_id=project_id, - source_project_id=project_id, - dataset_id=dataset_id, - table_name=valid_table, - dataset_location=dataset_location, - ) - .after(split_valid_data) - .set_display_name("Extract validation data to storage") - ).outputs["dataset"] - test_dataset = ( - extract_bq_to_dataset( - bq_client_project_id=project_id, - source_project_id=project_id, - dataset_id=dataset_id, - table_name=test_table, - dataset_location=dataset_location, - destination_gcs_uri=test_dataset_uri, - ) - .after(split_test_data) - .set_display_name("Extract test data to storage") - .set_caching_options(False) - ).outputs["dataset"] - - existing_model = ( - lookup_model( - model_name=model_name, - project_location=project_location, - project_id=project_id, - fail_on_model_not_found=False, - ) - .set_display_name("Lookup past model") - .outputs["model_resource_name"] - ) - - train_model = custom_train_job( - train_script_uri=train_script_uri, - train_data=train_dataset, - valid_data=valid_dataset, - test_data=test_dataset, - project_id=project_id, - project_location=project_location, - model_display_name=model_name, - train_container_uri="europe-docker.pkg.dev/vertex-ai/training/scikit-learn-cpu.0-23:latest", # noqa: E501 - serving_container_uri="europe-docker.pkg.dev/vertex-ai/prediction/sklearn-cpu.0-24:latest", # noqa: E501 - hparams=hparams, - requirements=["scikit-learn==0.24.0"], - staging_bucket=staging_bucket, - parent_model=existing_model, - ).set_display_name("Train model") - - evaluation = import_model_evaluation( - model=train_model.outputs["model"], - metrics=train_model.outputs["metrics"], - test_dataset=test_dataset, - pipeline_job_id="{{$.pipeline_job_name}}", - project_location=project_location, - ).set_display_name("Import evaluation") - - with dsl.Condition(existing_model != "", "champion-exists"): - update_best_model( - challenger=train_model.outputs["model"], - challenger_evaluation=evaluation.outputs["model_evaluation"], - parent_model=existing_model, - eval_metric=primary_metric, - eval_lower_is_better=True, - project_id=project_id, - project_location=project_location, - ).set_display_name("Update best model") - - -if __name__ == "__main__": - compiler.Compiler().compile( - pipeline_func=xgboost_pipeline, - package_path="training.json", - type_check=False, - ) diff --git a/pipelines/src/pipelines/xgboost/training/queries/engineer_features.sql b/pipelines/src/pipelines/xgboost/training/queries/engineer_features.sql deleted file mode 100644 index 148d9ce7..00000000 --- a/pipelines/src/pipelines/xgboost/training/queries/engineer_features.sql +++ /dev/null @@ -1,4 +0,0 @@ -/* The Purpose of this query is to clean the data before the training*/ -SELECT - * -FROM `{{ source_dataset }}.{{ source_table }}` diff --git a/pipelines/src/pipelines/xgboost/training/queries/ingest.sql b/pipelines/src/pipelines/xgboost/training/queries/ingest.sql deleted file mode 100644 index de9459fc..00000000 --- a/pipelines/src/pipelines/xgboost/training/queries/ingest.sql +++ /dev/null @@ -1,57 +0,0 @@ --- Copyright 2022 Google LLC - --- Licensed under the Apache License, Version 2.0 (the "License"); --- you may not use this file except in compliance with the License. --- You may obtain a copy of the License at - --- https://www.apache.org/licenses/LICENSE-2.0 - --- Unless required by applicable law or agreed to in writing, software --- distributed under the License is distributed on an "AS IS" BASIS, --- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. --- See the License for the specific language governing permissions and --- limitations under the License. - --- Treat "filter_start_value" as the current time, unless it is empty then use CURRENT_DATETIME() instead --- This allows us to set the filter_start_value to a specific time for testing or for backfill -with filter_start_values as ( - SELECT - IF("{{ filter_start_value }}" = '', CURRENT_DATETIME(), CAST("{{ filter_start_value }}" AS DATETIME)) as filter_start_value -) --- Ingest data between 2 and 3 months ago -,filtered_data as ( - SELECT - * - FROM `{{ source_dataset }}.{{ source_table }}`, filter_start_values - WHERE - DATE({{ filter_column }}) BETWEEN - DATE_SUB(DATE(CAST(filter_start_values.filter_start_value as DATETIME)), INTERVAL 3 MONTH) AND - DATE_SUB(DATE(filter_start_value), INTERVAL 2 MONTH) -) --- Use the average trip_seconds as a replacement for NULL or 0 values -,mean_time as ( - SELECT CAST(avg(trip_seconds) AS INT64) as avg_trip_seconds - FROM filtered_data -) - -SELECT - CAST(EXTRACT(DAYOFWEEK FROM trip_start_timestamp) AS FLOAT64) AS dayofweek, - CAST(EXTRACT(HOUR FROM trip_start_timestamp) AS FLOAT64) AS hourofday, - ST_DISTANCE( - ST_GEOGPOINT(pickup_longitude, pickup_latitude), - ST_GEOGPOINT(dropoff_longitude, dropoff_latitude)) AS trip_distance, - trip_miles, - CAST( CASE WHEN trip_seconds is NULL then m.avg_trip_seconds - WHEN trip_seconds <= 0 then m.avg_trip_seconds - ELSE trip_seconds - END AS FLOAT64) AS trip_seconds, - payment_type, - company, - (fare + tips + tolls + extras) AS `{{ target_column }}`, -FROM filtered_data as t, mean_time as m -WHERE - trip_miles > 0 AND fare > 0 AND fare < 1500 - {% for field in ["fare", "trip_start_timestamp", "pickup_longitude", - "pickup_latitude", "dropoff_longitude", "dropoff_latitude","payment_type","company"] %} - AND `{{ field }}` IS NOT NULL - {% endfor %} diff --git a/pipelines/src/pipelines/xgboost/training/queries/sample.sql b/pipelines/src/pipelines/xgboost/training/queries/sample.sql deleted file mode 100644 index bb2dc9ce..00000000 --- a/pipelines/src/pipelines/xgboost/training/queries/sample.sql +++ /dev/null @@ -1,6 +0,0 @@ -SELECT * -FROM - `{{ source_dataset }}.{{ source_table }}` AS t -WHERE - MOD(ABS(FARM_FINGERPRINT(TO_JSON_STRING(t))), - {{ num_lots }}) IN {{ lots }} diff --git a/pipelines/tests/e2e/test_e2e.py b/pipelines/tests/e2e/test_e2e.py deleted file mode 100644 index d4c70393..00000000 --- a/pipelines/tests/e2e/test_e2e.py +++ /dev/null @@ -1,311 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -import logging -from typing import Callable - -import pytest -import os -from google.cloud import storage -from kfp.v2 import compiler - -from pipelines.trigger.main import trigger_pipeline_from_payload - -project_id = os.environ["VERTEX_PROJECT_ID"] -project_location = os.environ["VERTEX_LOCATION"] - - -def split_output_uri(output_uri: str): - """ - Splits an output uri into its bucket name and blob path - Args: output_uri: (str) The output uri to split - Returns: A tuple of (bucket_name, blob_path) - """ - output_uri_noprefix = output_uri[5:] # remove the gs:// - output_uri_noprefix_split = output_uri_noprefix.split("/") - bucket_name = output_uri_noprefix_split[0] - blob_path = "/".join(output_uri_noprefix_split[1:]) - return (bucket_name, blob_path) - - -def test_gcs_uri(output_uri: str, storage_client): - """ - Tests whether a uri exists on GCS. If not, check whether it is a folder path. - Args: - output_uri: (str) The output_uri to test existence of - storage_client: (GCS client object) The GCS client for the project - Returns: (int) - None if the output_uri is not a file or a folder and therefore does not exist, - The uri's size otherwise (folder size if it is a folder) - """ - bucket_name, blob_path = split_output_uri(output_uri) - bucket = storage_client.get_bucket(bucket_name) - blob = bucket.get_blob(blob_path) - # If not a file, assume its a folder and sum the size of the blobs inside it - if not blob: - # All files in the folder would have a prefix of the folder path - blob_contents = storage_client.list_blobs(bucket_name, prefix=blob_path) - if not blob_contents: - return None - else: - folder_size = 0 - for blob in blob_contents: - folder_size += blob.size - return folder_size - return blob.size - - -def test_vertex_model_uri( - model_id: str, - model_location: str, -) -> bool: - """ - Tests whether model artifact exist in Vertex AI. - - Args: - model_id (str): Vertex model id - model_location (str) : Vertex model location - - Returns: - bool: True, if the model exists in Vertex AI. - Otherwises, False - """ - from google.cloud.aiplatform import Model - - try: - Model( - model_name=model_id, - project=project_id, - location=model_location, - ) - return True - except Exception: - return False - - -def test_vertex_endpoint_uri( - endpoint_id: str, - endpoint_location: str, -) -> bool: - """ - Tests whether endpoint artifact exist in Vertex AI. - - Args: - endpoint_id (str): Vertex endpoint id - endpoint_location (str) : Vertex endpoint location - - Returns: - bool: True, if the endpoint exists in Vertex AI. - Otherwises, False - """ - from google.cloud.aiplatform import Endpoint - - try: - Endpoint( - endpoint_name=endpoint_id, - project=project_id, - location=endpoint_location, - ) - return True - except Exception: - return False - - -def test_batch_prediction_job_uri( - job_id: str, - job_location: str, -) -> bool: - """ - Tests whether endpoint artifact exist in Vertex AI. - - Args: - job_id (str): Vertex batch prediction job id - job_location (str) : Vertex batch prediction location - - Returns: - bool: True, if the prediction job exists in Vertex AI. - Otherwises, False - """ - from google.cloud.aiplatform import BatchPredictionJob - - try: - BatchPredictionJob( - batch_prediction_job_name=job_id, - project=project_id, - location=job_location, - ) - return True - except Exception: - return False - - -def pipeline_e2e_test( - pipeline_func: Callable, - common_tasks: dict, - enable_caching: bool = None, - **kwargs: dict, -): - """ - Test pipeline e2e for all expected tasks - 1. Check all expected tasks occured in the pipeline - 2. Check if these tasks output the correct artifacts and they are all accessible - - Args: - pipeline_func (Callable): KFP pipeline function to test - enable_caching (bool): enable pipeline caching - common_tasks (dict): tasks in pipline that are executed everytime - **kwargs (dict): conditional tasks groups in dictionary - """ - - pipeline_json = f"{pipeline_func.__name__}.json" - - compiler.Compiler().compile( - pipeline_func=pipeline_func, - package_path=pipeline_json, - type_check=False, - ) - - # If empty value for enable_caching provided on commandline default to None - if enable_caching == "": - enable_caching = None - - payload = { - "attributes": {"template_path": pipeline_json, "enable_caching": enable_caching} - } - - try: - pl = trigger_pipeline_from_payload(payload) - - pl.wait() - - except Exception: - pytest.fail("pipeline not completed") - - # Tests - # 1. Check all expected tasks occured in the pipeline - # 2-1. Check if these tasks output the correct artifacts - # 2-2. Check if these artifacts are accessible - details = pl.to_dict() - tasks = details["jobDetail"]["taskDetails"] - - # check common tasks - if len(common_tasks): - test_pipeline_tasks( - tasks=tasks, - expected_tasks=common_tasks, - allow_tasks_missing=False, - ) - - # check conditioanl tasks - for _, conditional_tasks in kwargs.items(): - test_pipeline_tasks( - tasks=tasks, - expected_tasks=conditional_tasks, - allow_tasks_missing=True, - ) - - -def test_pipeline_tasks(tasks: list, expected_tasks: dict, allow_tasks_missing: bool): - """ - Test if expected_task meets all the requirements: - 1. if expected tasks occured in the pipeline - 2. if these tasks output the correct artifacts - - Args: - tasks (list): all pipeline tasks - expected_tasks (dict): tasks for check - allow_tasks_missing (bool): if missing tasks in one task group is allowed - Only allow missing tasks in conditional task groups as sometimes they are - not executed. - """ - # Tests - - actual_tasks_expected = { - # Create a dict key for a task, where the corresponding value is another dict - # This corresponding dict will contain output_names and their output_uri - task["taskName"]: { - # Create a key/value pair of output_name and output_uri - # Do this for every output name - output_name: output_dict["artifacts"][0]["uri"] - if output_dict["artifacts"][0].get("uri") is not None - else None - for output_name, output_dict in task.get("outputs", {}).items() - } - # Create the above task dictionary for each task in the pipeline's tasks - # But only if the task is in expected tasks - for task in tasks - if task["taskName"] in list(expected_tasks.keys()) - } - - # 1. Missing task check - # If all of tasks in a group are not executed, - # the remaining check logic will be skipped. - # Otherwise, it will check if there are missing tasks. - if (len(actual_tasks_expected) == 0) and allow_tasks_missing: - logging.info(f"task: {expected_tasks} are not executed") - return None - else: - missing_tasks = [ - task_name - for task_name in expected_tasks.keys() - if task_name not in actual_tasks_expected.keys() - ] - assert len(missing_tasks) == 0, f"expected but missing tasks: {missing_tasks}" - - # test functions mappiing - test_functions = { - "models": test_vertex_model_uri, - "endpoints": test_vertex_endpoint_uri, - "batchPredictionJobs": test_batch_prediction_job_uri, - } - - # initialise GCS client for the project - storage_client = storage.Client(project=project_id) - - # 2. Outputs check - for task_name, expected_output in expected_tasks.items(): - actual_outputs = actual_tasks_expected[task_name] - # 2-1. if the output artifact are as expected - diff = set(expected_output).symmetric_difference(actual_outputs.keys()) - assert ( - len(diff) == 0 - ), f"task: {task_name}, \ - expected_output {expected_output}, \ - actual_outputs: {actual_outputs.keys()}" - for output_artifact in expected_output: - output_uri = actual_outputs[output_artifact] - - # 2-2. if output is generated successfully - # if there is no output uri, skip - if output_uri is None: - continue - # if the output uri is a gcs path, fetch the file - elif output_uri.startswith("gs://"): - file_size = test_gcs_uri(output_uri, storage_client) - assert ( - file_size > 0 - ), f"{output_artifact} in task {task_name} is not accessible" - # for Vertex resource check if the resource exists - else: - # all Vertex AI resource uri following this pattern: - # β€˜projects//locations///’ - artifact_type = output_uri.split("/")[-2] - object_existence = test_functions[artifact_type]( - output_uri.split("/")[-1], # id - output_uri.split("/")[-3], # location - ) - assert ( - object_existence - ), f"{output_artifact} in task {task_name} is not accessible \ - with {output_uri.split('/')[-1]}" diff --git a/pipelines/tests/tensorflow/prediction/test_e2e.py b/pipelines/tests/tensorflow/prediction/test_e2e.py deleted file mode 100644 index c0e56e8e..00000000 --- a/pipelines/tests/tensorflow/prediction/test_e2e.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from pipelines.tensorflow.prediction.pipeline import tensorflow_pipeline -from tests.e2e.test_e2e import pipeline_e2e_test - - -def test_pipeline_run(enable_caching) -> None: - """ - Tests if pipeline is run successfully - Triggers pipeline synchronously. - Tests will fail if: - - Any errors are thrown during execution - - Any of the expected component outputs are empty (size == 0kb) - - Arguments: - None - - Returns: - None - """ - pipeline_e2e_test( - tensorflow_pipeline, - enable_caching=enable_caching, - common_tasks={}, - ) diff --git a/pipelines/tests/tensorflow/training/test_e2e.py b/pipelines/tests/tensorflow/training/test_e2e.py deleted file mode 100644 index fa306ec7..00000000 --- a/pipelines/tests/tensorflow/training/test_e2e.py +++ /dev/null @@ -1,37 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from pipelines.tensorflow.training.pipeline import tensorflow_pipeline -from tests.e2e.test_e2e import pipeline_e2e_test - - -def test_pipeline_run(enable_caching) -> None: - """ - Tests if pipeline is run successfully - Triggers pipeline synchronously. - Tests will fail if: - - Any errors are thrown during execution - - Any of the expected component outputs are empty (size == 0kb) - - Arguments: - None - - Returns: - None - """ - pipeline_e2e_test( - tensorflow_pipeline, - common_tasks={}, - enable_caching=enable_caching, - ) diff --git a/pipelines/tests/trigger/test_main.py b/pipelines/tests/trigger/test_main.py deleted file mode 100644 index 90e7dbe7..00000000 --- a/pipelines/tests/trigger/test_main.py +++ /dev/null @@ -1,244 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from unittest import mock -import pytest -import base64 -import json -import os - -from pipelines.trigger.main import cf_handler, convert_payload, get_env - - -def test_cf_handler(): - - test_parameter_values = {"key1": "val1", "key2": "val2"} - - encoded_param_values = str( - base64.b64encode(json.dumps(test_parameter_values).encode("utf-8")), "utf-8" - ) - - payload = { - "attributes": {"template_path": "gs://my-bucket/my-template-path.json"}, - "data": encoded_param_values, - } - - with mock.patch( - "pipelines.trigger.main.trigger_pipeline_from_payload" - ) as mock_trigger_pipeline_from_payload: - - cf_handler(payload, {}) - - mock_trigger_pipeline_from_payload.assert_called_with( - payload, - ) - - -def test_trigger_pipeline(): - - project_id = "my-test-project" - location = "europe-west4" - template_path = "gs://my-bucket/pipeline.json" - parameter_values = {"key1": "val1", "key2": "val2"} - pipeline_root = "gs://my-bucket/pipeline_root" - service_account = "my_service_account@my-test-project.iam.gserviceaccount.com" - encryption_spec_key_name = "my-cmek" - network = "my-network" - enable_caching = True - - with mock.patch("pipelines.trigger.main.aiplatform") as mock_aiplatform: - - from pipelines.trigger.main import trigger_pipeline - - pl = trigger_pipeline( - project_id=project_id, - location=location, - template_path=template_path, - parameter_values=parameter_values, - pipeline_root=pipeline_root, - service_account=service_account, - encryption_spec_key_name=encryption_spec_key_name, - network=network, - enable_caching=enable_caching, - ) - - mock_aiplatform.init.assert_called_with(project=project_id, location=location) - - mock_aiplatform.pipeline_jobs.PipelineJob.assert_called_with( - display_name=template_path, - enable_caching=enable_caching, - template_path=template_path, - parameter_values=parameter_values, - pipeline_root=pipeline_root, - encryption_spec_key_name=encryption_spec_key_name, - ) - - pl.submit.assert_called_with( - service_account=service_account, - network=network, - ) - - -@pytest.mark.parametrize( - "env_vars,test_input,expected", - [ - # enable_caching - ( - {}, - {"attributes": {"template_path": "pipeline.json"}}, - { - "attributes": { - "template_path": "pipeline.json", - "enable_caching": None, - }, - "data": {}, - }, - ), - # enable_caching true - ( - {}, - { - "attributes": { - "template_path": "pipeline.json", - "enable_caching": "true", - } - }, - { - "attributes": { - "template_path": "pipeline.json", - "enable_caching": True, - }, - "data": {}, - }, - ), - ], -) -def test_convert_payload(env_vars, test_input, expected): - - with mock.patch.dict(os.environ, env_vars, clear=True): - assert convert_payload(test_input) == expected - - -@pytest.mark.parametrize( - "env_vars,expected", - [ - # encryption_spec_key_name and network present - ( - { - "VERTEX_PROJECT_ID": "my-project-id", - "VERTEX_LOCATION": "europe-west4", - "VERTEX_PIPELINE_ROOT": "gs://my-pipeline-root/folder", - "VERTEX_SA_EMAIL": "my-sa@my-project-id.iam.gserviceaccount.com", - "VERTEX_CMEK_IDENTIFIER": "my-cmek", - "VERTEX_NETWORK": "my-network", - }, - { - "project_id": "my-project-id", - "location": "europe-west4", - "pipeline_root": "gs://my-pipeline-root/folder", - "service_account": "my-sa@my-project-id.iam.gserviceaccount.com", - "encryption_spec_key_name": "my-cmek", - "network": "my-network", - }, - ), - # encryption_spec_key_name and network are empty string - ( - { - "VERTEX_PROJECT_ID": "my-project-id", - "VERTEX_LOCATION": "europe-west4", - "VERTEX_PIPELINE_ROOT": "gs://my-pipeline-root/folder", - "VERTEX_SA_EMAIL": "my-sa@my-project-id.iam.gserviceaccount.com", - "VERTEX_CMEK_IDENTIFIER": "", - "VERTEX_NETWORK": "", - }, - { - "project_id": "my-project-id", - "location": "europe-west4", - "pipeline_root": "gs://my-pipeline-root/folder", - "service_account": "my-sa@my-project-id.iam.gserviceaccount.com", - "encryption_spec_key_name": None, - "network": None, - }, - ), - # encryption_spec_key_name and network absent - ( - { - "VERTEX_PROJECT_ID": "my-project-id", - "VERTEX_LOCATION": "europe-west4", - "VERTEX_PIPELINE_ROOT": "gs://my-pipeline-root/folder", - "VERTEX_SA_EMAIL": "my-sa@my-project-id.iam.gserviceaccount.com", - }, - { - "project_id": "my-project-id", - "location": "europe-west4", - "pipeline_root": "gs://my-pipeline-root/folder", - "service_account": "my-sa@my-project-id.iam.gserviceaccount.com", - "encryption_spec_key_name": None, - "network": None, - }, - ), - ], -) -def test_get_env(env_vars, expected, monkeypatch): - - for k, v in env_vars.items(): - monkeypatch.setenv(k, v) - - env = get_env() - - assert env == expected - - -@pytest.mark.parametrize( - "cmdline_args,expected_payload", - [ - ( - [ - "--template_path=pipeline.json", - "--enable_caching=True", - ], - { - "attributes": { - "template_path": "pipeline.json", - "enable_caching": "True", - }, - }, - ), - # enable_caching omitted on commandline - ( - [ - "--template_path=pipeline.json", - "--enable_caching=", - ], - { - "attributes": { - "template_path": "pipeline.json", - "enable_caching": None, - }, - }, - ), - ], -) -def test_sandbox_run(cmdline_args, expected_payload): - - # mock get_args(), and trigger_pipeline_from_payload() - with mock.patch( - "pipelines.trigger.main.trigger_pipeline_from_payload" - ) as mock_trigger_pipeline_from_payload: - - from pipelines.trigger.main import sandbox_run - - sandbox_run(cmdline_args) - - mock_trigger_pipeline_from_payload.assert_called_once_with(expected_payload) diff --git a/pipelines/tests/utils/test_trigger_pipeline.py b/pipelines/tests/utils/test_trigger_pipeline.py new file mode 100644 index 00000000..7de8731c --- /dev/null +++ b/pipelines/tests/utils/test_trigger_pipeline.py @@ -0,0 +1,114 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from unittest import mock +import pytest +import os + +from pipelines.utils.trigger_pipeline import trigger_pipeline + + +@pytest.mark.parametrize( + "test_enable_caching_input,enable_caching_expected", + # test for different values of "ENABLE_PIPELINE_CACHING" env var (left) + # vs actual value passed to PipelineJob enable_caching (right) + [ + ("True", True), + ("true", True), + ("False", False), + ("false", False), + ("1", True), + ("0", False), + (None, None), # ENABLE_PIPELINE_CACHING env var not set + ], +) +@mock.patch("google.cloud.aiplatform.PipelineJob") +def test_trigger_pipeline( + mock_pipelinejob, test_enable_caching_input, enable_caching_expected +): + + template_path = "path/to/template.yaml" + enable_caching = test_enable_caching_input + pipeline_root = "gs://my-pipeline-root/abc123" + encryption_spec_key_name = "dummy_encryption_spec_key_name" + project_id = "dummy_project_id" + region = "dummy_region" + service_account = "dummy_sa@dummy_project_id.iam.gserviceaccount.com" + network = "dummy_network" + display_name = "my first pipeline" + + env_vars = { + "VERTEX_PROJECT_ID": project_id, + "VERTEX_LOCATION": region, + "VERTEX_SA_EMAIL": service_account, + "VERTEX_NETWORK": network, + "VERTEX_CMEK_IDENTIFIER": encryption_spec_key_name, + "VERTEX_PIPELINE_ROOT": pipeline_root, + } + + if enable_caching is not None: + env_vars["ENABLE_PIPELINE_CACHING"] = enable_caching + + with mock.patch.dict(os.environ, env_vars): + + trigger_pipeline( + template_path=template_path, + display_name=display_name, + ) + + mock_pipelinejob.assert_called_with( + project=project_id, + location=region, + display_name=display_name, + enable_caching=enable_caching_expected, + template_path=template_path, + pipeline_root=pipeline_root, + encryption_spec_key_name=encryption_spec_key_name, + ) + + mock_pipelinejob.return_value.submit.assert_called_with( + service_account=service_account, + network=network, + ) + + +@mock.patch("google.cloud.aiplatform.PipelineJob") +def test_trigger_pipeline_invalid_caching_env_var(mock_pipelinejob): + + template_path = "path/to/template.yaml" + enable_caching = "invalid_value" + pipeline_root = "gs://my-pipeline-root/abc123" + encryption_spec_key_name = "dummy_encryption_spec_key_name" + project_id = "dummy_project_id" + region = "dummy_region" + service_account = "dummy_sa@dummy_project_id.iam.gserviceaccount.com" + network = "dummy_network" + display_name = "my first pipeline" + + env_vars = { + "VERTEX_PROJECT_ID": project_id, + "VERTEX_LOCATION": region, + "VERTEX_SA_EMAIL": service_account, + "VERTEX_NETWORK": network, + "VERTEX_CMEK_IDENTIFIER": encryption_spec_key_name, + "VERTEX_PIPELINE_ROOT": pipeline_root, + "ENABLE_PIPELINE_CACHING": enable_caching, + } + + with mock.patch.dict(os.environ, env_vars), pytest.raises(ValueError): + + trigger_pipeline( + template_path=template_path, + display_name=display_name, + ) diff --git a/pipelines/tests/utils/test_upload_pipeline.py b/pipelines/tests/utils/test_upload_pipeline.py new file mode 100644 index 00000000..00830696 --- /dev/null +++ b/pipelines/tests/utils/test_upload_pipeline.py @@ -0,0 +1,73 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from unittest import mock +import pytest +from pipelines.utils.upload_pipeline import main, upload_pipeline + + +@pytest.mark.parametrize( + "input_tags", + # test for different combinations of AR tags to use + # when uploading the compiled pipeline to Artifact Registry + [ + None, # no tag + ["dummy_tag1"], # single tag + ["dummy_tag1", "dummy_tag2"], # multiple tags + ], +) +@mock.patch("pipelines.utils.upload_pipeline.upload_pipeline") +def test_main(mock_upload_pipeline, input_tags): + yaml = "path/to/template.yaml" + dest = "https://europe-west1-kfp.pkg.dev/dummy-project/dummy-repo" + + args = [ + f"--dest={dest}", + f"--yaml={yaml}", + ] + + if input_tags: + args.extend([f"--tag={tag}" for tag in input_tags]) + + main(args) + + mock_upload_pipeline.assert_called_with( + host=dest, + file_name=yaml, + tags=input_tags, + ) + + +@mock.patch("pipelines.utils.upload_pipeline.RegistryClient") +def test_upload_pipeline(mock_registry_client): + + yaml = "path/to/template.yaml" + dest = "https://europe-west1-kfp.pkg.dev/dummy-project/dummy-repo" + + input_tags = ["dummy_tag1", "dummy_tag2"] + + upload_pipeline( + host=dest, + file_name=yaml, + tags=input_tags, + ) + + mock_registry_client.assert_called_with( + host=dest, + ) + + mock_registry_client.return_value.upload_pipeline.assert_called_with( + file_name=yaml, + tags=input_tags, + ) diff --git a/pipelines/tests/xgboost/prediction/test_e2e.py b/pipelines/tests/xgboost/prediction/test_e2e.py deleted file mode 100644 index 635fdd36..00000000 --- a/pipelines/tests/xgboost/prediction/test_e2e.py +++ /dev/null @@ -1,41 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -from pipelines.xgboost.prediction.pipeline import xgboost_pipeline -from tests.e2e.test_e2e import pipeline_e2e_test - - -def test_pipeline_run(enable_caching) -> None: - """ - Tests if pipeline is run successfully - Triggers pipeline synchronously. - Tests will fail if: - - Any errors are thrown during execution - - Any of the expected component outputs are empty (size == 0kb) - - Arguments: - None - - Returns: - None - """ - - pipeline_json = "prediction.json" - - # tasks (components) and outputs for tasks which occur unconditionally - pipeline_e2e_test( - xgboost_pipeline, - enable_caching=enable_caching, - common_tasks={}, - ) diff --git a/terraform/envs/dev/main.tf b/terraform/envs/dev/main.tf index c7c46208..a5b9e07c 100644 --- a/terraform/envs/dev/main.tf +++ b/terraform/envs/dev/main.tf @@ -48,7 +48,7 @@ module "scheduler" { source = "../../modules/scheduled_pipelines" project_id = var.project_id region = var.region - name = each.value.name + name = each.key description = lookup(each.value, "description", null) schedule = each.value.schedule time_zone = lookup(each.value, "time_zone", "UTC") diff --git a/terraform/envs/dev/variables.tf b/terraform/envs/dev/variables.tf index 543dac1d..0bc03ded 100644 --- a/terraform/envs/dev/variables.tf +++ b/terraform/envs/dev/variables.tf @@ -27,7 +27,6 @@ variable "region" { variable "cloud_schedulers_config" { description = "Map of configurations for cloud scheduler jobs (each a different pipeline schedule)." type = map(object({ - name = string description = string schedule = string time_zone = string diff --git a/terraform/envs/prod/main.tf b/terraform/envs/prod/main.tf index c7c46208..a5b9e07c 100644 --- a/terraform/envs/prod/main.tf +++ b/terraform/envs/prod/main.tf @@ -48,7 +48,7 @@ module "scheduler" { source = "../../modules/scheduled_pipelines" project_id = var.project_id region = var.region - name = each.value.name + name = each.key description = lookup(each.value, "description", null) schedule = each.value.schedule time_zone = lookup(each.value, "time_zone", "UTC") diff --git a/terraform/envs/test/main.tf b/terraform/envs/test/main.tf index c7c46208..a5b9e07c 100644 --- a/terraform/envs/test/main.tf +++ b/terraform/envs/test/main.tf @@ -48,7 +48,7 @@ module "scheduler" { source = "../../modules/scheduled_pipelines" project_id = var.project_id region = var.region - name = each.value.name + name = each.key description = lookup(each.value, "description", null) schedule = each.value.schedule time_zone = lookup(each.value, "time_zone", "UTC") diff --git a/terraform/modules/cloudfunction/.python-version b/terraform/modules/cloudfunction/.python-version new file mode 100644 index 00000000..9f3d4c17 --- /dev/null +++ b/terraform/modules/cloudfunction/.python-version @@ -0,0 +1 @@ +3.9.16 diff --git a/terraform/modules/cloudfunction/README.md b/terraform/modules/cloudfunction/README.md new file mode 100644 index 00000000..6d152d84 --- /dev/null +++ b/terraform/modules/cloudfunction/README.md @@ -0,0 +1,28 @@ +# Cloud Function for triggering Vertex Pipelines + +This Terraform module deploys a Google Cloud Function that is used to trigger the Vertex Pipeline run. The Cloud Function is triggered by a Pub/Sub message, which is published by Cloud Scheduler. + +## Pub/Sub message format + +The Pub/Sub message data is a base64-encoded JSON string containing these fields: + +```python +{ + "template_path": , + "enable_caching": , + "pipeline_parameters": { # Pipeline input parameters + "foo": "bar", + } +} +``` + +To use this module, you just need to provide the Terraform variables - see [scheduling pipelines](/README.md#scheduling-pipelines) in the main README file for more details. + +## Development + +1. Change the working directory to this directory e.g. `cd terraform/modules/cloudfunction`. +1. Make sure that you are using the correct version of Python that matches `.python-version`. You can use [pyenv](https://github.com/pyenv/pyenv) to manage the different Python versions on your system. +1. Create a new virtual environment `python3 -m venv .venv`. +1. Activate the new virtual environment `source .venv/bin/activate`. +1. Install Python dependencies and dev dependencies `pip install -r src/requirements.txt && pip install -r src/requirements-dev.txt`. +1. To run unit tests, run `PYTHONPATH=. pytest`. diff --git a/terraform/modules/cloudfunction/function.tf b/terraform/modules/cloudfunction/function.tf index 4d8ae574..ab9c7ce0 100644 --- a/terraform/modules/cloudfunction/function.tf +++ b/terraform/modules/cloudfunction/function.tf @@ -16,11 +16,13 @@ locals { + cloudfunction_dir = "${path.module}/src" + # List of all the files in the Cloud Function source dir - cloudfunction_files = fileset(var.source_dir, "*") + cloudfunction_files = fileset(local.cloudfunction_dir, "*") # Get the MD5 of each file in the directory - file_hashes = [for f in local.cloudfunction_files : filemd5("${var.source_dir}/${f}")] + file_hashes = [for f in local.cloudfunction_files : filemd5("${local.cloudfunction_dir}/${f}")] # Concatenate hashes and then hash them for an overall hash # This method ensures that the hash only changes (and therefore the Cloud Function only re-deploys) @@ -31,12 +33,12 @@ locals { data "archive_file" "function_archive" { type = "zip" - source_dir = var.source_dir - output_path = "${var.function_name}.zip" + source_dir = local.cloudfunction_dir + output_path = "${var.function_name}_${local.cloudfunction_hash}.zip" } resource "google_storage_bucket_object" "archive" { - name = "${data.archive_file.function_archive.output_path}_${local.cloudfunction_hash}.zip" + name = data.archive_file.function_archive.output_path bucket = var.source_code_bucket_name source = data.archive_file.function_archive.output_path diff --git a/terraform/modules/cloudfunction/src/main.py b/terraform/modules/cloudfunction/src/main.py new file mode 100644 index 00000000..bec77328 --- /dev/null +++ b/terraform/modules/cloudfunction/src/main.py @@ -0,0 +1,96 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import base64 +import json +import os +import re +from distutils.util import strtobool +from google.cloud import aiplatform +from kfp.registry import RegistryClient + + +def cf_handler(event, context): + """Background Cloud Function to be triggered by Pub/Sub. + Args: + event (dict): The dictionary with data specific to this type of + event. The `@type` field maps to + `type.googleapis.com/google.pubsub.v1.PubsubMessage`. + The `data` field maps to the PubsubMessage data + in a base64-encoded string. The `attributes` field maps + to the PubsubMessage attributes if any is present. + context (google.cloud.functions.Context): Metadata of triggering event + including `event_id` which maps to the PubsubMessage + messageId, `timestamp` which maps to the PubsubMessage + publishTime, `event_type` which maps to + `google.pubsub.topic.publish`, and `resource` which is + a dictionary that describes the service API endpoint + pubsub.googleapis.com, the triggering topic's name, and + the triggering event type + `type.googleapis.com/google.pubsub.v1.PubsubMessage`. + Returns: + None. The output is written to Cloud Logging. + """ + + payload = json.loads(base64.b64decode(event["data"]).decode("utf-8")) + + project_id = os.environ["VERTEX_PROJECT_ID"] + location = os.environ["VERTEX_LOCATION"] + pipeline_root = os.environ["VERTEX_PIPELINE_ROOT"] + service_account = os.environ["VERTEX_SA_EMAIL"] + + template_path = payload["template_path"] + display_name = payload["display_name"] + pipeline_parameters = payload.get("pipeline_parameters") + enable_caching = payload.get("enable_pipeline_caching") + if enable_caching: + enable_caching = bool(strtobool(enable_caching)) + + # For below options, we want an empty string to become None, so we add "or None" + encryption_spec_key_name = os.environ.get("VERTEX_CMEK_IDENTIFIER") or None + network = os.environ.get("VERTEX_NETWORK") or None + + # If template_path is an AR URL and a tag is used, resolve to exact version + # Workaround for known issue + # https://github.com/googleapis/python-aiplatform/issues/2181 + _VALID_AR_URL = re.compile( + r"https://([\w\-]+)-kfp\.pkg\.dev/([\w\-]+)/([\w\-]+)/([\w\-]+)/([\w\-.]+)", + re.IGNORECASE, + ) + match = _VALID_AR_URL.match(template_path) + if match and "sha256:" not in template_path: + region, project, repo, package_name, tag = match.group(1, 2, 3, 4, 5) + host = f"https://{region}-kfp.pkg.dev/{project}/{repo}" + client = RegistryClient(host=host) + metadata = client.get_tag(package_name, tag) + version = metadata["version"][metadata["version"].find("sha256:") :] + template_path = f"{host}/{package_name}/{version}" + + # Instantiate PipelineJob object + pl = aiplatform.pipeline_jobs.PipelineJob( + project=project_id, + location=location, + display_name=display_name, + enable_caching=enable_caching, + template_path=template_path, + parameter_values=pipeline_parameters, + pipeline_root=pipeline_root, + encryption_spec_key_name=encryption_spec_key_name, + ) + + # Execute pipeline in Vertex + pl.submit( + service_account=service_account, + network=network, + ) diff --git a/terraform/modules/cloudfunction/src/requirements-dev.txt b/terraform/modules/cloudfunction/src/requirements-dev.txt new file mode 100644 index 00000000..7a04e1cf --- /dev/null +++ b/terraform/modules/cloudfunction/src/requirements-dev.txt @@ -0,0 +1,15 @@ +# Copyright 2022 Google LLC + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at + +# https://www.apache.org/licenses/LICENSE-2.0 + +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +pytest diff --git a/pipelines/src/pipelines/trigger/requirements.txt b/terraform/modules/cloudfunction/src/requirements.txt similarity index 91% rename from pipelines/src/pipelines/trigger/requirements.txt rename to terraform/modules/cloudfunction/src/requirements.txt index 75056d44..3567f18a 100644 --- a/pipelines/src/pipelines/trigger/requirements.txt +++ b/terraform/modules/cloudfunction/src/requirements.txt @@ -12,4 +12,5 @@ # See the License for the specific language governing permissions and # limitations under the License. -google-cloud-aiplatform==1.24.1 +google-cloud-aiplatform[pipelines]==1.30.1 +kfp==2.1.2 diff --git a/terraform/modules/cloudfunction/test/test_main.py b/terraform/modules/cloudfunction/test/test_main.py new file mode 100644 index 00000000..42b546ff --- /dev/null +++ b/terraform/modules/cloudfunction/test/test_main.py @@ -0,0 +1,154 @@ +# Copyright 2023 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from unittest import mock +import pytest +import os +import base64 +import json +from src.main import cf_handler + + +@pytest.mark.parametrize( + "test_enable_caching_input,enable_caching_expected", + # test for different values of "enable_pipeline_caching" env var (left) + # vs actual value passed to PipelineJob enable_caching (right) + [ + ("True", True), + ("true", True), + ("False", False), + ("false", False), + (None, None), # enable_pipeline_caching env var not set + ], +) +@mock.patch("google.cloud.aiplatform.pipeline_jobs.PipelineJob") +def test_trigger_pipeline_local_yaml( + mock_pipelinejob, test_enable_caching_input, enable_caching_expected +): + + template_path = "path/to/template.yaml" + enable_caching = test_enable_caching_input + pipeline_root = "gs://my-pipeline-root/abc123" + encryption_spec_key_name = "dummy_encryption_spec_key_name" + project_id = "dummy_project_id" + region = "dummy_region" + service_account = "dummy_sa@dummy_project_id.iam.gserviceaccount.com" + network = "dummy_network" + display_name = "my first pipeline" + + payload = { + "template_path": template_path, + "display_name": display_name, + "pipeline_parameters": { + "foo": "bar", + "abc": 123, + }, + } + + if enable_caching is not None: + payload["enable_pipeline_caching"] = enable_caching + + # encode as base64 to feed into Cloud Function + event = {"data": base64.b64encode(json.dumps(payload).encode("utf-8"))} + + env_vars = { + "VERTEX_PROJECT_ID": project_id, + "VERTEX_LOCATION": region, + "VERTEX_SA_EMAIL": service_account, + "VERTEX_NETWORK": network, + "VERTEX_CMEK_IDENTIFIER": encryption_spec_key_name, + "VERTEX_PIPELINE_ROOT": pipeline_root, + } + + with mock.patch.dict(os.environ, env_vars): + + cf_handler(event, None) + + mock_pipelinejob.assert_called_with( + project=project_id, + location=region, + display_name=display_name, + enable_caching=enable_caching_expected, + template_path=template_path, + pipeline_root=pipeline_root, + encryption_spec_key_name=encryption_spec_key_name, + parameter_values=payload["pipeline_parameters"], + ) + + mock_pipelinejob.return_value.submit.assert_called_with( + service_account=service_account, + network=network, + ) + + +@mock.patch("src.main.RegistryClient") +@mock.patch("google.cloud.aiplatform.pipeline_jobs.PipelineJob") +def test_trigger_pipeline_ar_yaml_with_tag(mock_pipelinejob, mock_registryclient): + + template_path = "https://europe-west2-kfp.pkg.dev/project/repo/package_name/tag" + enable_caching = None + pipeline_root = "gs://my-pipeline-root/abc123" + encryption_spec_key_name = "dummy_encryption_spec_key_name" + project_id = "dummy_project_id" + region = "dummy_region" + service_account = "dummy_sa@dummy_project_id.iam.gserviceaccount.com" + network = "dummy_network" + display_name = "my first pipeline" + + payload = { + "template_path": template_path, + "display_name": display_name, + "pipeline_parameters": { + "foo": "bar", + "abc": 123, + }, + } + + if enable_caching is not None: + payload["enable_pipeline_caching"] = enable_caching + + # encode as base64 to feed into Cloud Function + event = {"data": base64.b64encode(json.dumps(payload).encode("utf-8"))} + + env_vars = { + "VERTEX_PROJECT_ID": project_id, + "VERTEX_LOCATION": region, + "VERTEX_SA_EMAIL": service_account, + "VERTEX_NETWORK": network, + "VERTEX_CMEK_IDENTIFIER": encryption_spec_key_name, + "VERTEX_PIPELINE_ROOT": pipeline_root, + } + + with mock.patch.dict(os.environ, env_vars): + + cf_handler(event, None) + + mock_pipelinejob.assert_called_with( + project=project_id, + location=region, + display_name=display_name, + enable_caching=None, + template_path=( + f"https://europe-west2-kfp.pkg.dev/project/repo/package_name/" + f"{mock_registryclient.return_value.get_tag.return_value.__getitem__().__getitem__()}" # noqa + ), + pipeline_root=pipeline_root, + encryption_spec_key_name=encryption_spec_key_name, + parameter_values=payload["pipeline_parameters"], + ) + + mock_pipelinejob.return_value.submit.assert_called_with( + service_account=service_account, + network=network, + ) diff --git a/terraform/modules/cloudfunction/variables.tf b/terraform/modules/cloudfunction/variables.tf index baafb9a2..4a8d6667 100644 --- a/terraform/modules/cloudfunction/variables.tf +++ b/terraform/modules/cloudfunction/variables.tf @@ -114,11 +114,6 @@ variable "source_code_bucket_name" { type = string } -variable "source_dir" { - description = "The pathname of the directory which contains the function source code." - type = string -} - variable "max_instances" { description = "The maximum number of parallel executions of the function." type = number diff --git a/terraform/modules/scheduled_pipelines/main.tf b/terraform/modules/scheduled_pipelines/main.tf index 5163060c..20f2c813 100644 --- a/terraform/modules/scheduled_pipelines/main.tf +++ b/terraform/modules/scheduled_pipelines/main.tf @@ -25,10 +25,11 @@ resource "google_cloud_scheduler_job" "scheduled_pipeline" { pubsub_target { topic_name = var.topic_name - attributes = { - template_path = var.template_path, - enable_caching = var.enable_caching - } - data = base64encode(jsonencode(var.pipeline_parameters)) + data = base64encode(jsonencode({ + template_path = var.template_path, + display_name = var.name, + enable_caching = var.enable_caching, + pipeline_parameters = var.pipeline_parameters, + })) } } diff --git a/terraform/modules/scheduled_pipelines/scheduled_jobs.auto.tfvars.example b/terraform/modules/scheduled_pipelines/scheduled_jobs.auto.tfvars.example index a92fd2df..77682e81 100644 --- a/terraform/modules/scheduled_pipelines/scheduled_jobs.auto.tfvars.example +++ b/terraform/modules/scheduled_pipelines/scheduled_jobs.auto.tfvars.example @@ -16,101 +16,41 @@ cloud_schedulers_config = { - xgboost_training = { - name = "xgboost-training-pipeline-trigger" - description = "Trigger my XGBoost training pipeline in Vertex" + training = { + description = "Trigger training pipeline in Vertex AI" schedule = "0 0 * * 0" time_zone = "UTC" - template_path = "gs://my-assets-bucket//training/training.json" + template_path = "https://-kfp.pkg.dev//vertex-pipelines/xgboost-train-pipeline/" enable_caching = null pipeline_parameters = { - project_id = "my-project-id" - project_location = "europe-west2" - pipeline_files_gcs_path = "gs://my-assets-bucket/" - ingestion_project_id = "my-project-id" - model_name = "xgboost_with_preprocessing" - model_label = "label_name" - tfdv_schema_filename = "tfdv_schema_training.pbtxt" - tfdv_train_stats_path = "gs://my-pipeline-root-bucket/train_stats/train.stats" - dataset_id = "preprocessing" - dataset_location = "europe-west2" - ingestion_dataset_id = "chicago_taxi_trips" + project = "my-project-id" + location = "europe-west2" + bq_location = "US" + bq_source_uri = "bigquery-public-data.chicago_taxi_trips.taxi_trips" + model_name = "xgb_regressor" + dataset = "turbo_templates" timestamp = "2022-12-01 00:00:00" - }, + test_data_gcs_uri: str = "" + } }, - xgboost_prediction = { - name = "xgboost-prediction-pipeline-trigger" - description = "Trigger my XGBoost prediction pipeline in Vertex" + prediction = { + description = "Trigger prediction pipeline in Vertex AI" schedule = "0 0 * * 0" time_zone = "UTC" - template_path = "gs://my-assets-bucket//prediction/prediction.json" + template_path = "https://-kfp.pkg.dev//vertex-pipelines/xgboost-prediction-pipeline/" enable_caching = null pipeline_parameters = { - project_id = "my-project-id" - project_location = "europe-west4" - pipeline_files_gcs_path = "gs://my-assets-bucket/" - ingestion_project_id = "my-project-id" - model_name = "xgboost_with_preprocessing" - model_label = "label_name" - tfdv_schema_filename = "tfdv_schema_training.pbtxt" - tfdv_train_stats_path = "gs://my-pipeline-root-bucket/train_stats/train.stats" - dataset_id = "preprocessing" - dataset_location = "europe-west2" - ingestion_dataset_id = "chicago_taxi_trips" + project = "my-project-id" + location = "europe-west2" + bq_location = "US" + bq_source_uri = "bigquery-public-data.chicago_taxi_trips.taxi_trips" + model_name = "xgb_regressor" + dataset = "turbo_templates" timestamp = "2022-12-01 00:00:00" - batch_prediction_machine_type = "n1-standard-4" - batch_prediction_min_replicas = 3 - batch_prediction_max_replicas = 5 - }, - }, - - tensorflow_training = { - name = "tensorflow-training-pipeline-trigger" - description = "Trigger my TensorFlow training pipeline in Vertex" - schedule = "0 0 * * 0" - time_zone = "UTC" - template_path = "gs://my-assets-bucket//training/training.json" - enable_caching = null - pipeline_parameters = { - project_id = "my-project-id" - project_location = "europe-west4" - pipeline_files_gcs_path = "gs://my-assets-bucket/" - ingestion_project_id = "my-project-id" - model_name = "tensorflow_with_preprocessing" - model_label = "label_name" - tfdv_schema_filename = "tfdv_schema_training.pbtxt" - tfdv_train_stats_path = "gs://my-pipeline-root-bucket/train_stats/train.stats" - dataset_id = "preprocessing" - dataset_location = "europe-west2" - ingestion_dataset_id = "chicago_taxi_trips" - timestamp = "2022-12-01 00:00:00" - }, - }, - - tensorflow_prediction = { - name = "tensorflow-prediction-pipeline-trigger" - description = "Trigger my TensorFlow prediction pipeline in Vertex" - schedule = "0 0 * * 0" - time_zone = "UTC" - template_path = "gs://my-assets-bucket//prediction/prediction.json" - enable_caching = null - pipeline_parameters = { - project_id = "my-project-id" - project_location = "europe-west4" - pipeline_files_gcs_path = "gs://my-assets-bucket/" - ingestion_project_id = "my-project-id" - model_name = "tensorflow_with_preprocessing" - model_label = "label_name" - tfdv_schema_filename = "tfdv_schema_training.pbtxt" - tfdv_train_stats_path = "gs://my-pipeline-root-bucket/train_stats/train.stats" - dataset_id = "preprocessing" - dataset_location = "europe-west2" - ingestion_dataset_id = "chicago_taxi_trips" - timestamp = "2022-12-01 00:00:00" - batch_prediction_machine_type = "n1-standard-4" - batch_prediction_min_replicas = 3 - batch_prediction_max_replicas = 5 - }, - }, + machine_type = "n2-standard-4" + min_replicas = 3 + max_replicas = 10 + } + } } diff --git a/terraform/modules/vertex_deployment/iam.tf b/terraform/modules/vertex_deployment/iam.tf index 8cacdcbe..1df87314 100644 --- a/terraform/modules/vertex_deployment/iam.tf +++ b/terraform/modules/vertex_deployment/iam.tf @@ -26,28 +26,6 @@ resource "google_storage_bucket_iam_member" "pipelines_sa_pipeline_root_bucket_i role = each.key } -# Give pipelines SA read-only access to objects in "assets" bucket -resource "google_storage_bucket_iam_member" "pipelines_sa_pipeline_assets_bucket_iam" { - for_each = toset([ - "roles/storage.objectViewer", - "roles/storage.legacyBucketReader", - ]) - bucket = google_storage_bucket.pipeline_assets_bucket.name - member = google_service_account.pipelines_sa.member - role = each.key -} - -# Give cloud functions SA access to read compiled pipelines from bucket -resource "google_storage_bucket_iam_member" "cloudfunction_sa_pipeline_assets_bucket_iam" { - for_each = toset([ - "roles/storage.objectViewer", - "roles/storage.legacyBucketReader", - ]) - bucket = google_storage_bucket.pipeline_assets_bucket.name - member = google_service_account.vertex_cloudfunction_sa.member - role = each.key -} - # Give cloud functions SA access to use the pipelines SA for triggering pipelines resource "google_service_account_iam_member" "cloudfunction_sa_can_use_pipelines_sa" { service_account_id = google_service_account.pipelines_sa.name @@ -55,6 +33,22 @@ resource "google_service_account_iam_member" "cloudfunction_sa_can_use_pipelines member = google_service_account.vertex_cloudfunction_sa.member } +# Give cloud functions SA access to KFP Artifact Registry to access compiled pipelines +resource "google_artifact_registry_repository_iam_member" "cloudfunction_sa_can_access_ar" { + project = google_artifact_registry_repository.vertex-pipelines.project + location = google_artifact_registry_repository.vertex-pipelines.location + repository = google_artifact_registry_repository.vertex-pipelines.name + role = "roles/artifactregistry.reader" + member = google_service_account.vertex_cloudfunction_sa.member +} + +# Give cloud functions SA access to pipeline root bucket to check it exists +resource "google_storage_bucket_iam_member" "cloudfunction_sa_can_get_pl_root_bucket" { + bucket = google_storage_bucket.pipeline_root_bucket.name + role = "roles/storage.legacyBucketReader" + member = google_service_account.vertex_cloudfunction_sa.member +} + ## Project IAM roles ## # Vertex Pipelines SA project roles diff --git a/terraform/modules/vertex_deployment/main.tf b/terraform/modules/vertex_deployment/main.tf index 35498a20..3b58bae5 100644 --- a/terraform/modules/vertex_deployment/main.tf +++ b/terraform/modules/vertex_deployment/main.tf @@ -16,10 +16,11 @@ ## Google Cloud APIs to enable ## resource "google_project_service" "gcp_services" { - for_each = toset(var.gcp_service_list) - project = var.project_id - service = each.key - disable_on_destroy = var.disable_services_on_destroy + for_each = toset(var.gcp_service_list) + project = var.project_id + service = each.key + disable_on_destroy = var.disable_services_on_destroy + disable_dependent_services = var.disable_dependent_services } ## Service Accounts ## @@ -50,15 +51,6 @@ resource "google_storage_bucket" "pipeline_root_bucket" { depends_on = [google_project_service.gcp_services] } -resource "google_storage_bucket" "pipeline_assets_bucket" { - name = "${var.project_id}-pl-assets" - location = var.region - project = var.project_id - uniform_bucket_level_access = true - public_access_prevention = "enforced" - depends_on = [google_project_service.gcp_services] -} - ## Cloud Function - used to trigger pipelines ## locals { @@ -75,9 +67,9 @@ resource "google_pubsub_topic" "pipeline_trigger_topic" { depends_on = [google_project_service.gcp_services] } -# Cloud Function staging bucket -resource "google_storage_bucket" "cf_staging_bucket" { - name = "${var.project_id}-cf-staging" +# Cloud Function / Cloud Build staging bucket +resource "google_storage_bucket" "staging_bucket" { + name = "${var.project_id}-staging" location = local.cloudfunction_region project = var.project_id uniform_bucket_level_access = true @@ -92,8 +84,7 @@ module "cloudfunction" { region = local.cloudfunction_region function_name = var.cloudfunction_name description = var.cloudfunction_description - source_dir = "../../../pipelines/src/pipelines/trigger" - source_code_bucket_name = google_storage_bucket.cf_staging_bucket.name + source_code_bucket_name = google_storage_bucket.staging_bucket.name runtime = "python39" entry_point = "cf_handler" cf_service_account = google_service_account.vertex_cloudfunction_sa.email @@ -122,3 +113,23 @@ resource "google_vertex_ai_metadata_store" "default_metadata_store" { region = var.region depends_on = [google_project_service.gcp_services] } + +## Artifact Registry - container images ## +resource "google_artifact_registry_repository" "vertex-images" { + repository_id = "vertex-images" + description = "Container image repository for training container images" + project = var.project_id + location = var.region + format = "DOCKER" + depends_on = [google_project_service.gcp_services] +} + +## Artifact Registry - KFP pipelines ## +resource "google_artifact_registry_repository" "vertex-pipelines" { + repository_id = "vertex-pipelines" + description = "KFP repository for Vertex Pipelines" + project = var.project_id + location = var.region + format = "KFP" + depends_on = [google_project_service.gcp_services] +} diff --git a/terraform/modules/vertex_deployment/outputs.tf b/terraform/modules/vertex_deployment/outputs.tf index 80bd3462..e77014e6 100644 --- a/terraform/modules/vertex_deployment/outputs.tf +++ b/terraform/modules/vertex_deployment/outputs.tf @@ -18,18 +18,14 @@ output "pubsub_topic_id" { value = google_pubsub_topic.pipeline_trigger_topic.id } -output "cf_staging_bucket_name" { - value = google_storage_bucket.cf_staging_bucket.name +output "staging_bucket_name" { + value = google_storage_bucket.staging_bucket.name } output "pipeline_root_bucket_name" { value = google_storage_bucket.pipeline_root_bucket.name } -output "pipeline_assets_bucket_name" { - value = google_storage_bucket.pipeline_assets_bucket.name -} - output "vertex_pipelines_sa_email" { value = google_service_account.pipelines_sa.email } diff --git a/terraform/modules/vertex_deployment/variables.tf b/terraform/modules/vertex_deployment/variables.tf index 93d81205..074d66ec 100644 --- a/terraform/modules/vertex_deployment/variables.tf +++ b/terraform/modules/vertex_deployment/variables.tf @@ -51,7 +51,11 @@ variable "disable_services_on_destroy" { type = bool default = true } - +variable "disable_dependent_services" { + description = "If true, services that are enabled and which depend on this service should also be disabled when this service is destroyed. If false or unset, an error will be generated if any enabled services depend on this service when destroying it." + type = bool + default = true +} variable "cloudfunction_region" { description = "Google Cloud region to use for the Cloud Function (and CF staging bucket). Defaults to the same as var.region" type = string