diff --git a/.github/workflows/ci-rc-test.yaml b/.github/workflows/ci-rc-test.yaml index 12301ea3a..a6c848218 100644 --- a/.github/workflows/ci-rc-test.yaml +++ b/.github/workflows/ci-rc-test.yaml @@ -1,55 +1,132 @@ --- -name: Test providers RC releases - -on: # yamllint disable-line rule:truthy - schedule: - - cron: "0 0,12 * * *" +name: Build and test astro Python SDK +on: workflow_dispatch: - inputs: - rc_testing_branch: - # If a branch is given, the workflow will use it for deployment and testing. - # If no branch is provided, the workflow will create a new rc testing branch - # for deployment and testing. - description: | - rc_testing_branch: existing testing branch - (Either rc_testing_branch or issue_url is required, and you cannot give both.) - required: false - default: "" - issue_url: - description: | - issue_url: the GitHub issue URL that tracks the status of Providers release - (Either rc_testing_branch or issue_url is required, and you cannot give both.) - required: false - base_git_rev: - description: "The base git revision to test Providers RCs" - required: false - type: string - default: "main" defaults: run: working-directory: python-sdk +# This allows a subsequently queued workflow run to interrupt and cancel previous runs +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +# This list should only have non-sensitive env vars +# Env vars with secrets should be in the specific jobs +env: + SETUPTOOLS_USE_DISTUTILS: stdlib + POSTGRES_HOST: postgres + POSTGRES_PORT: 5432 + AIRFLOW__ASTRO_SDK__SQL_SCHEMA: astroflow_ci + REDSHIFT_DATABASE: dev + REDSHIFT_HOST: utkarsh-cluster.cdru7mxqmtyx.us-east-2.redshift.amazonaws.com + SNOWFLAKE_SCHEMA: ASTROFLOW_CI + SNOWFLAKE_DATABASE: SANDBOX + SNOWFLAKE_WAREHOUSE: DEMO + SNOWFLAKE_HOST: https://gp21411.us-east-1.snowflakecomputing.com + SNOWFLAKE_ACCOUNT: gp21411 + SNOWFLAKE_REGION: us-east-1 + SNOWFLAKE_ROLE: AIRFLOW_TEST_USER + SFTP_HOSTNAME: ${{ secrets.SFTP_HOSTNAME }} + SFTP_USERNAME: ${{ secrets.SFTP_USERNAME }} + SFTP_PASSWORD: ${{ secrets.SFTP_PASSWORD }} + AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS: True + AZURE_WASB_ACCESS_KEY: ${{ secrets.AZURE_WASB_ACCESS_KEY }} + AZURE_WASB_CONN_STRING: ${{ secrets.AZURE_WASB_CONN_STRING }} + AIRFLOW_VAR_FOO: templated_file_name + AWS_BUCKET: tmp9 + GOOGLE_BUCKET: dag-authoring + FORCE_COLOR: "true" + MSSQL_DB: ${{ secrets.MSSQL_DB }} + MSSQL_HOST: ${{ secrets.MSSQL_HOST }} + MSSQL_LOGIN: ${{ secrets.MSSQL_LOGIN }} + MSSQL_PASSWORD: ${{ secrets.MSSQL_PASSWORD }} + MYSQL_DB: ${{ secrets.MYSQL_DB }} + MYSQL_HOST: ${{ secrets.MYSQL_HOST }} + MYSQL_LOGIN: ${{ secrets.MYSQL_LOGIN }} + MYSQL_PASSWORD: ${{ secrets.MYSQL_PASSWORD }} + jobs: - check-airflow-provider-rc-release: - uses: astronomer/astronomer-providers/.github/workflows/reuse-wf-check-rc-release.yaml@main - with: - rc_testing_branch: ${{ inputs.rc_testing_branch }} - issue_url: ${{ inputs.issue_url }} - base_git_rev: ${{ inputs.base_git_rev }} - git_email: "airflow-oss-bot@astronomer.io" - git_username: "airflow-oss-bot" - working_directory: "python-sdk" - secrets: - BOT_ACCESS_TOKEN: ${{ secrets.BOT_ACCESS_TOKEN }} + Run-Integration-tests-Airflow-2-7: + strategy: + fail-fast: false + matrix: + group: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11] + runs-on: ubuntu-latest + services: + postgres: + # Docker Hub image + image: dimberman/pagila-test + env: + POSTGRES_PASSWORD: postgres + # Set health checks to wait until postgres has started + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + sftp: + image: ghcr.io/astronomer/astro-sdk/sftp_docker + ports: + - 2222:22 + ftp: + image: ghcr.io/astronomer/astro-sdk/ftp_docker + ports: + - 21:21 + - 30000-30009:30000-30009 + env: + FTP_USER_NAME: ${{ secrets.SFTP_USERNAME }} + FTP_USER_PASS: ${{ secrets.SFTP_PASSWORD }} + FTP_USER_HOME: /home/foo + PUBLICHOST: "localhost" + steps: + - uses: actions/checkout@v3 + if: github.event_name != 'pull_request_target' - deploy-and-trigger-example-dag: - needs: check-airflow-provider-rc-release - if: | - always() && - needs.check-airflow-provider-rc-release.result == 'success' - uses: ./.github/workflows/ci-astro-deploy.yml - with: - environment_to_deploy: "both" - dags_to_trigger_after_deployment: "example_master_dag" - git_rev: ${{ needs.check-airflow-provider-rc-release.outputs.rc_testing_branch }} - secrets: inherit + - name: Checkout pull/${{ github.event.number }} + uses: actions/checkout@v3 + with: + ref: ${{ github.event.pull_request.head.sha }} + if: github.event_name == 'pull_request_target' + - uses: actions/setup-python@v3 + with: + python-version: "3.10" + architecture: "x64" + - uses: actions/cache@v3 + with: + path: | + ~/.cache/pip + .nox + key: ${{ runner.os }}-2.7-${{ hashFiles('python-sdk/pyproject.toml') }}-${{ hashFiles('python-sdk/src/astro/__init__.py') }} + - run: cat ../.github/ci-test-connections.yaml > test-connections.yaml + - run: python -c 'import os; print(os.getenv("GOOGLE_APPLICATION_CREDENTIALS_JSON", "").strip())' > ${{ env.GOOGLE_APPLICATION_CREDENTIALS }} + - run: sqlite3 /tmp/sqlite_default.db "VACUUM;" + - run: pip3 install nox + - run: nox -s "test-3.10(airflow='2.7')" -- tests_integration/ -k "not test_load_file.py and not test_example_dags.py and not redshift" --splits 11 --group ${{ matrix.group }} --store-durations --durations-path /tmp/durations-${{ matrix.group }} --cov=src --cov-report=xml --cov-branch + - run: cat /tmp/durations-${{ matrix.group }} + - name: Upload coverage + uses: actions/upload-artifact@v2 + with: + name: coverage-${{ matrix.group }}-integration-tests + path: ./python-sdk/.coverage + - name: Collect pytest durations + uses: actions/upload-artifact@v2 + with: + name: pytest_durations_integration_tests_${{ matrix.group }} + path: /tmp/durations-${{ matrix.group }} + env: + GOOGLE_APPLICATION_CREDENTIALS_JSON: ${{ secrets.GOOGLE_APPLICATION_CREDENTIALS_JSON }} + GOOGLE_APPLICATION_CREDENTIALS: /tmp/google_credentials.json + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + REDSHIFT_NATIVE_LOAD_IAM_ROLE_ARN: ${{ secrets.REDSHIFT_NATIVE_LOAD_IAM_ROLE_ARN }} + REDSHIFT_USERNAME: ${{ secrets.REDSHIFT_USERNAME }} + REDSHIFT_PASSWORD: ${{ secrets.REDSHIFT_PASSWORD }} + SNOWFLAKE_ACCOUNT_NAME: ${{ secrets.SNOWFLAKE_UNAME }} + SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD }} + DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }} + AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }} + AZURE_WASB_CONN_STRING: ${{ secrets.AZURE_WASB_CONN_STRING }} + AZURE_WASB_ACCESS_KEY: ${{ secrets.AZURE_WASB_ACCESS_KEY }}