From 9a5b50c65573cba8e1fcc24aee807baa311ee1b7 Mon Sep 17 00:00:00 2001 From: Julian LaNeve Date: Wed, 31 Jan 2024 18:00:57 -0500 Subject: [PATCH] move inline test scripts to dedicated directory --- pyproject.toml | 137 ++++++----------------- scripts/test/integration-expensive.sh | 7 ++ scripts/test/integration-setup.sh | 6 + scripts/test/integration-sqlite-setup.sh | 4 + scripts/test/integration-sqlite.sh | 7 ++ scripts/test/integration.sh | 8 ++ scripts/test/unit-cov.sh | 9 ++ scripts/test/unit.sh | 6 + 8 files changed, 79 insertions(+), 105 deletions(-) create mode 100644 scripts/test/integration-expensive.sh create mode 100644 scripts/test/integration-setup.sh create mode 100644 scripts/test/integration-sqlite-setup.sh create mode 100644 scripts/test/integration-sqlite.sh create mode 100644 scripts/test/integration.sh create mode 100644 scripts/test/unit-cov.sh create mode 100644 scripts/test/unit.sh diff --git a/pyproject.toml b/pyproject.toml index 864b0fb42..3f76ae4dc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,16 +9,8 @@ description = "Orchestrate your dbt projects in Airflow" readme = "README.rst" license = "Apache-2.0" requires-python = ">=3.8" -authors = [ - { name = "Astronomer", email = "humans@astronomer.io" }, -] -keywords = [ - "airflow", - "apache-airflow", - "astronomer", - "dags", - "dbt", -] +authors = [{ name = "Astronomer", email = "humans@astronomer.io" }] +keywords = ["airflow", "apache-airflow", "astronomer", "dags", "dbt"] classifiers = [ "Development Status :: 3 - Alpha", "Environment :: Web Environment", @@ -55,48 +47,23 @@ dbt-all = [ "dbt-spark", "dbt-vertica", ] -dbt-athena = [ - "dbt-athena-community", - "apache-airflow-providers-amazon>=8.0.0", -] -dbt-bigquery = [ - "dbt-bigquery", -] -dbt-databricks = [ - "dbt-databricks", -] -dbt-exasol = [ - "dbt-exasol", -] -dbt-postgres = [ - "dbt-postgres", -] -dbt-redshift = [ - "dbt-redshift", -] -dbt-snowflake = [ - "dbt-snowflake", -] -dbt-spark = [ - "dbt-spark", -] -dbt-vertica = [ - "dbt-vertica<=1.5.4", -] -openlineage = [ - "openlineage-integration-common", - "openlineage-airflow", -] -all = [ - "astronomer-cosmos[dbt-all]", - "astronomer-cosmos[openlineage]" -] -docs =[ +dbt-athena = ["dbt-athena-community", "apache-airflow-providers-amazon>=8.0.0"] +dbt-bigquery = ["dbt-bigquery"] +dbt-databricks = ["dbt-databricks"] +dbt-exasol = ["dbt-exasol"] +dbt-postgres = ["dbt-postgres"] +dbt-redshift = ["dbt-redshift"] +dbt-snowflake = ["dbt-snowflake"] +dbt-spark = ["dbt-spark"] +dbt-vertica = ["dbt-vertica<=1.5.4"] +openlineage = ["openlineage-integration-common", "openlineage-airflow"] +all = ["astronomer-cosmos[dbt-all]", "astronomer-cosmos[openlineage]"] +docs = [ "sphinx", "pydata-sphinx-theme", "sphinx-autobuild", "sphinx-autoapi", - "apache-airflow-providers-cncf-kubernetes>=5.1.1" + "apache-airflow-providers-cncf-kubernetes>=5.1.1", ] tests = [ "packaging", @@ -111,15 +78,9 @@ tests = [ "mypy", "sqlalchemy-stubs", # Change when sqlalchemy is upgraded https://docs.sqlalchemy.org/en/14/orm/extensions/mypy.html ] -docker = [ - "apache-airflow-providers-docker>=3.5.0", -] -kubernetes = [ - "apache-airflow-providers-cncf-kubernetes>=5.1.1", -] -pydantic = [ - "pydantic>=1.10.0", -] +docker = ["apache-airflow-providers-docker>=3.5.0"] +kubernetes = ["apache-airflow-providers-cncf-kubernetes>=5.1.1"] +pydantic = ["pydantic>=1.10.0"] [project.entry-points.cosmos] provider_info = "cosmos:get_provider_info" @@ -133,9 +94,7 @@ Documentation = "https://astronomer.github.io/astronomer-cosmos" path = "cosmos/__init__.py" [tool.hatch.build.targets.sdist] -include = [ - "/cosmos", -] +include = ["/cosmos"] [tool.hatch.build.targets.wheel] packages = ["cosmos"] @@ -156,13 +115,14 @@ dependencies = [ "apache-airflow-providers-docker>=3.5.0", ] # Airflow install with constraint file, Airflow versions < 2.7 require a workaround for PyYAML -pre-install-commands = [""" +pre-install-commands = [ + """ if [[ "2.3 2.4 2.5 2.6" =~ "{matrix:airflow}" ]]; then echo "Cython < 3" >> /tmp/constraint.txt pip wheel "PyYAML==6.0.0" -c /tmp/constraint.txt fi pip install 'apache-airflow=={matrix:airflow}' --constraint 'https://raw.githubusercontent.com/apache/airflow/constraints-{matrix:airflow}.0/constraints-{matrix:python}.txt' - """ + """, ] [[tool.hatch.envs.tests.matrix]] python = ["3.8", "3.9", "3.10"] @@ -172,51 +132,18 @@ airflow = ["2.3", "2.4", "2.5", "2.6", "2.7", "2.8"] [tool.hatch.envs.tests.scripts] freeze = "pip freeze" type-check = "mypy cosmos" -test = 'pytest -vv --durations=0 . -m "not integration" --ignore=tests/test_example_dags.py --ignore=tests/test_example_dags_no_connections.py' -test-cov = """pytest -vv --cov=cosmos --cov-report=term-missing --cov-report=xml --durations=0 -m "not integration" --ignore=tests/test_example_dags.py --ignore=tests/test_example_dags_no_connections.py""" -# we install using the following workaround to overcome installation conflicts, such as: -# apache-airflow 2.3.0 and dbt-core [0.13.0 - 1.5.2] and jinja2>=3.0.0 because these package versions have conflicting dependencies -test-integration-setup = """pip uninstall -y dbt-postgres dbt-databricks dbt-vertica; \ -rm -rf airflow.*; \ -airflow db init; \ -pip install 'dbt-core' 'dbt-databricks' 'dbt-postgres' 'dbt-vertica' 'openlineage-airflow'""" -test-integration = """rm -rf dbt/jaffle_shop/dbt_packages; -pytest -vv \ ---cov=cosmos \ ---cov-report=term-missing \ ---cov-report=xml \ ---durations=0 \ --m integration \ --k 'not (sqlite or example_cosmos_sources or example_cosmos_python_models or example_virtualenv)'""" -test-integration-expensive = """pytest -vv \ ---cov=cosmos \ ---cov-report=term-missing \ ---cov-report=xml \ ---durations=0 \ --m integration \ --k 'example_cosmos_python_models or example_virtualenv'""" -test-integration-sqlite-setup = """pip uninstall -y dbt-core dbt-sqlite openlineage-airflow openlineage-integration-common; \ -rm -rf airflow.*; \ -airflow db init; \ -pip install 'dbt-core==1.4' 'dbt-sqlite<=1.4' 'dbt-databricks<=1.4' 'dbt-postgres<=1.4' """ -test-integration-sqlite = """ -pytest -vv \ ---cov=cosmos \ ---cov-report=term-missing \ ---cov-report=xml \ ---durations=0 \ --m integration \ --k 'example_cosmos_sources or sqlite'""" +test-unit = 'sh scripts/test/unit.sh' +test-unit-cov = 'sh scripts/test/unit-cov.sh' +test-integration-setup = 'sh scripts/test/integration-setup.sh' +test-integration = 'sh scripts/test/integration.sh' +test-integration-expensive = 'sh scripts/test/integration-expensive.sh' +test-integration-sqlite-setup = 'sh scripts/test/integration-sqlite-setup.sh' +test-integration-sqlite = 'sh scripts/test/integration-sqlite.sh' [tool.pytest.ini_options] -filterwarnings = [ - "ignore::DeprecationWarning", -] +filterwarnings = ["ignore::DeprecationWarning"] minversion = "6.0" -markers = [ - "integration", - "sqlite" -] +markers = ["integration", "sqlite"] ###################################### # DOCS @@ -230,7 +157,7 @@ dependencies = [ "sphinx-autobuild", "sphinx-autoapi", "openlineage-airflow", - "apache-airflow-providers-cncf-kubernetes>=5.1.1" + "apache-airflow-providers-cncf-kubernetes>=5.1.1", ] [tool.hatch.envs.docs.scripts] diff --git a/scripts/test/integration-expensive.sh b/scripts/test/integration-expensive.sh new file mode 100644 index 000000000..6a6249ffb --- /dev/null +++ b/scripts/test/integration-expensive.sh @@ -0,0 +1,7 @@ +pytest -vv \ + --cov=cosmos \ + --cov-report=term-missing \ + --cov-report=xml \ + --durations=0 \ + -m integration \ + -k 'example_cosmos_python_models or example_virtualenv' \ No newline at end of file diff --git a/scripts/test/integration-setup.sh b/scripts/test/integration-setup.sh new file mode 100644 index 000000000..64a3a04a4 --- /dev/null +++ b/scripts/test/integration-setup.sh @@ -0,0 +1,6 @@ +# we install using the following workaround to overcome installation conflicts, such as: +# apache-airflow 2.3.0 and dbt-core [0.13.0 - 1.5.2] and jinja2>=3.0.0 because these package versions have conflicting dependencies +pip uninstall -y dbt-postgres dbt-databricks dbt-vertica; \ +rm -rf airflow.*; \ +airflow db init; \ +pip install 'dbt-core' 'dbt-databricks' 'dbt-postgres' 'dbt-vertica' 'openlineage-airflow' \ No newline at end of file diff --git a/scripts/test/integration-sqlite-setup.sh b/scripts/test/integration-sqlite-setup.sh new file mode 100644 index 000000000..78e1307b7 --- /dev/null +++ b/scripts/test/integration-sqlite-setup.sh @@ -0,0 +1,4 @@ +pip uninstall -y dbt-core dbt-sqlite openlineage-airflow openlineage-integration-common; \ +rm -rf airflow.*; \ +airflow db init; \ +pip install 'dbt-core==1.4' 'dbt-sqlite<=1.4' 'dbt-databricks<=1.4' 'dbt-postgres<=1.4' \ No newline at end of file diff --git a/scripts/test/integration-sqlite.sh b/scripts/test/integration-sqlite.sh new file mode 100644 index 000000000..d309ad088 --- /dev/null +++ b/scripts/test/integration-sqlite.sh @@ -0,0 +1,7 @@ +pytest -vv \ + --cov=cosmos \ + --cov-report=term-missing \ + --cov-report=xml \ + --durations=0 \ + -m integration \ + -k 'example_cosmos_sources or sqlite'""" \ No newline at end of file diff --git a/scripts/test/integration.sh b/scripts/test/integration.sh new file mode 100644 index 000000000..24b768a1b --- /dev/null +++ b/scripts/test/integration.sh @@ -0,0 +1,8 @@ +rm -rf dbt/jaffle_shop/dbt_packages; +pytest -vv \ + --cov=cosmos \ + --cov-report=term-missing \ + --cov-report=xml \ + --durations=0 \ + -m integration \ + -k 'not (sqlite or example_cosmos_sources or example_cosmos_python_models or example_virtualenv)' \ No newline at end of file diff --git a/scripts/test/unit-cov.sh b/scripts/test/unit-cov.sh new file mode 100644 index 000000000..3a75ede2e --- /dev/null +++ b/scripts/test/unit-cov.sh @@ -0,0 +1,9 @@ +pytest \ + -vv \ + --cov=cosmos \ + --cov-report=term-missing \ + --cov-report=xml \ + --durations=0 \ + -m "not integration" \ + --ignore=tests/test_example_dags.py \ + --ignore=tests/test_example_dags_no_connections.p \ No newline at end of file diff --git a/scripts/test/unit.sh b/scripts/test/unit.sh new file mode 100644 index 000000000..8f3cbb18b --- /dev/null +++ b/scripts/test/unit.sh @@ -0,0 +1,6 @@ +pytest \ + -vv \ + --durations=0 \ + -m "not integration" \ + --ignore=tests/test_example_dags.py \ + --ignore=tests/test_example_dags_no_connections.py \ No newline at end of file