Test providers RC releases #168
Workflow file for this run
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
--- | ||
name: Build and test astro Python SDK | ||
on: | ||
workflow_dispatch: | ||
defaults: | ||
run: | ||
working-directory: python-sdk | ||
# This allows a subsequently queued workflow run to interrupt and cancel previous runs | ||
concurrency: | ||
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} | ||
cancel-in-progress: true | ||
# This list should only have non-sensitive env vars | ||
# Env vars with secrets should be in the specific jobs | ||
env: | ||
SETUPTOOLS_USE_DISTUTILS: stdlib | ||
POSTGRES_HOST: postgres | ||
POSTGRES_PORT: 5432 | ||
AIRFLOW__ASTRO_SDK__SQL_SCHEMA: astroflow_ci | ||
REDSHIFT_DATABASE: dev | ||
REDSHIFT_HOST: utkarsh-cluster.cdru7mxqmtyx.us-east-2.redshift.amazonaws.com | ||
SNOWFLAKE_SCHEMA: ASTROFLOW_CI | ||
SNOWFLAKE_DATABASE: SANDBOX | ||
SNOWFLAKE_WAREHOUSE: DEMO | ||
SNOWFLAKE_HOST: https://gp21411.us-east-1.snowflakecomputing.com | ||
SNOWFLAKE_ACCOUNT: gp21411 | ||
SNOWFLAKE_REGION: us-east-1 | ||
SNOWFLAKE_ROLE: AIRFLOW_TEST_USER | ||
SFTP_HOSTNAME: ${{ secrets.SFTP_HOSTNAME }} | ||
SFTP_USERNAME: ${{ secrets.SFTP_USERNAME }} | ||
SFTP_PASSWORD: ${{ secrets.SFTP_PASSWORD }} | ||
AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS: True | ||
AZURE_WASB_ACCESS_KEY: ${{ secrets.AZURE_WASB_ACCESS_KEY }} | ||
AZURE_WASB_CONN_STRING: ${{ secrets.AZURE_WASB_CONN_STRING }} | ||
AIRFLOW_VAR_FOO: templated_file_name | ||
AWS_BUCKET: tmp9 | ||
GOOGLE_BUCKET: dag-authoring | ||
FORCE_COLOR: "true" | ||
MSSQL_DB: ${{ secrets.MSSQL_DB }} | ||
MSSQL_HOST: ${{ secrets.MSSQL_HOST }} | ||
MSSQL_LOGIN: ${{ secrets.MSSQL_LOGIN }} | ||
MSSQL_PASSWORD: ${{ secrets.MSSQL_PASSWORD }} | ||
MYSQL_DB: ${{ secrets.MYSQL_DB }} | ||
MYSQL_HOST: ${{ secrets.MYSQL_HOST }} | ||
MYSQL_LOGIN: ${{ secrets.MYSQL_LOGIN }} | ||
MYSQL_PASSWORD: ${{ secrets.MYSQL_PASSWORD }} | ||
jobs: | ||
Run-Integration-tests-Airflow-2-7: | ||
Check failure on line 50 in .github/workflows/ci-rc-test.yaml GitHub Actions / Build and test astro Python SDKInvalid workflow file
|
||
needs: Create-Databricks-Cluster | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
group: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] | ||
runs-on: ubuntu-latest | ||
services: | ||
postgres: | ||
# Docker Hub image | ||
image: dimberman/pagila-test | ||
env: | ||
POSTGRES_PASSWORD: postgres | ||
# Set health checks to wait until postgres has started | ||
options: >- | ||
--health-cmd pg_isready | ||
--health-interval 10s | ||
--health-timeout 5s | ||
--health-retries 5 | ||
ports: | ||
- 5432:5432 | ||
sftp: | ||
image: ghcr.io/astronomer/astro-sdk/sftp_docker | ||
ports: | ||
- 2222:22 | ||
ftp: | ||
image: ghcr.io/astronomer/astro-sdk/ftp_docker | ||
ports: | ||
- 21:21 | ||
- 30000-30009:30000-30009 | ||
env: | ||
FTP_USER_NAME: ${{ secrets.SFTP_USERNAME }} | ||
FTP_USER_PASS: ${{ secrets.SFTP_PASSWORD }} | ||
FTP_USER_HOME: /home/foo | ||
PUBLICHOST: "localhost" | ||
steps: | ||
- uses: actions/checkout@v3 | ||
if: github.event_name != 'pull_request_target' | ||
- name: Checkout pull/${{ github.event.number }} | ||
uses: actions/checkout@v3 | ||
with: | ||
ref: ${{ github.event.pull_request.head.sha }} | ||
if: github.event_name == 'pull_request_target' | ||
- uses: actions/setup-python@v3 | ||
with: | ||
python-version: "3.10" | ||
architecture: "x64" | ||
- uses: actions/cache@v3 | ||
with: | ||
path: | | ||
~/.cache/pip | ||
.nox | ||
key: ${{ runner.os }}-2.7-${{ hashFiles('python-sdk/pyproject.toml') }}-${{ hashFiles('python-sdk/src/astro/__init__.py') }} | ||
- run: cat ../.github/ci-test-connections.yaml > test-connections.yaml | ||
- run: python -c 'import os; print(os.getenv("GOOGLE_APPLICATION_CREDENTIALS_JSON", "").strip())' > ${{ env.GOOGLE_APPLICATION_CREDENTIALS }} | ||
- run: sqlite3 /tmp/sqlite_default.db "VACUUM;" | ||
- run: pip3 install nox | ||
- run: nox -s "test-3.10(airflow='2.7')" -- tests_integration/ -k "not test_load_file.py and not test_example_dags.py and not redshift" --splits 11 --group ${{ matrix.group }} --store-durations --durations-path /tmp/durations-${{ matrix.group }} --cov=src --cov-report=xml --cov-branch | ||
- run: cat /tmp/durations-${{ matrix.group }} | ||
- name: Upload coverage | ||
uses: actions/upload-artifact@v2 | ||
with: | ||
name: coverage-${{ matrix.group }}-integration-tests | ||
path: ./python-sdk/.coverage | ||
- name: Collect pytest durations | ||
uses: actions/upload-artifact@v2 | ||
with: | ||
name: pytest_durations_integration_tests_${{ matrix.group }} | ||
path: /tmp/durations-${{ matrix.group }} | ||
env: | ||
GOOGLE_APPLICATION_CREDENTIALS_JSON: ${{ secrets.GOOGLE_APPLICATION_CREDENTIALS_JSON }} | ||
GOOGLE_APPLICATION_CREDENTIALS: /tmp/google_credentials.json | ||
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} | ||
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} | ||
REDSHIFT_NATIVE_LOAD_IAM_ROLE_ARN: ${{ secrets.REDSHIFT_NATIVE_LOAD_IAM_ROLE_ARN }} | ||
REDSHIFT_USERNAME: ${{ secrets.REDSHIFT_USERNAME }} | ||
REDSHIFT_PASSWORD: ${{ secrets.REDSHIFT_PASSWORD }} | ||
SNOWFLAKE_ACCOUNT_NAME: ${{ secrets.SNOWFLAKE_UNAME }} | ||
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD }} | ||
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} | ||
AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: "1003-075535-xz329x7r" | ||
AZURE_WASB_CONN_STRING: ${{ secrets.AZURE_WASB_CONN_STRING }} | ||
AZURE_WASB_ACCESS_KEY: ${{ secrets.AZURE_WASB_ACCESS_KEY }} | ||
Delete-Databricks-Cluster: | ||
if: ${{ always() }} | ||
needs: | ||
- Create-Databricks-Cluster | ||
- Run-Optional-Packages-tests-python-sdk | ||
- Run-Unit-tests-Airflow-2-7 | ||
- Run-example-dag-tests-Airflow-2-2-5 | ||
- Run-Integration-tests-Airflow-2-7 | ||
- Run-load-file-Integration-Airflow-2-7 | ||
- Run-example-dag-Integration-Airflow-2-7 | ||
runs-on: ubuntu-latest | ||
steps: | ||
- uses: actions/checkout@v3 | ||
if: github.event_name != 'pull_request_target' | ||
- name: Checkout pull/${{ github.event.number }} | ||
uses: actions/checkout@v3 | ||
with: | ||
ref: ${{ github.event.pull_request.head.sha }} | ||
if: github.event_name == 'pull_request_target' | ||
- uses: actions/setup-python@v3 | ||
with: | ||
python-version: "3.8" | ||
architecture: "x64" | ||
- uses: actions/cache@v3 | ||
with: | ||
path: | | ||
~/.cache/pip | ||
.nox | ||
key: ${{ runner.os }}-${{ hashFiles('python-sdk/pyproject.toml') }}-${{ hashFiles('python-sdk/src/astro/__init__.py') }} | ||
- name: Install dependencies | ||
working-directory: python-sdk/dev/scripts | ||
run: pip install -r requirements.txt | ||
- name: Terminate Databricks cluster | ||
id: terminate_databricks_cluster_and_wait | ||
working-directory: python-sdk/dev/scripts | ||
run: | | ||
python databricks.py terminate_cluster $DATABRICKS_HOST $DATABRICKS_TOKEN --cluster-id "1003-075535-xz329x7r" | ||
env: | ||
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} | ||
DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }} |