Skip to content

Commit

Permalink
test
Browse files Browse the repository at this point in the history
  • Loading branch information
Lee-W committed Oct 5, 2023
1 parent ca99146 commit 5ea20ae
Showing 1 changed file with 45 additions and 46 deletions.
91 changes: 45 additions & 46 deletions .github/workflows/ci-rc-test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -556,49 +556,48 @@ jobs:
AZURE_WASB_CONN_STRING: ${{ secrets.AZURE_WASB_CONN_STRING }}
AZURE_WASB_ACCESS_KEY: ${{ secrets.AZURE_WASB_ACCESS_KEY }}

# Delete-Databricks-Cluster:
# if: ${{ always() }}
# needs:
# - Create-Databricks-Cluster
# - Run-Optional-Packages-tests-python-sdk
# - Run-Unit-tests-Airflow-2-7
# - Run-example-dag-tests-Airflow-2-2-5
# - Run-Integration-tests-Airflow-2-7
# - Run-load-file-Integration-Airflow-2-7
# - Run-example-dag-Integration-Airflow-2-7
# runs-on: ubuntu-latest
# steps:
# - uses: actions/checkout@v3
# if: github.event_name != 'pull_request_target'
#
# - name: Checkout pull/${{ github.event.number }}
# uses: actions/checkout@v3
# with:
# ref: ${{ github.event.pull_request.head.sha }}
# if: github.event_name == 'pull_request_target'
#
# - uses: actions/setup-python@v3
# with:
# python-version: "3.8"
# architecture: "x64"
#
# - uses: actions/cache@v3
# with:
# path: |
# ~/.cache/pip
# .nox
# key: ${{ runner.os }}-${{ hashFiles('python-sdk/pyproject.toml') }}-${{ hashFiles('python-sdk/src/astro/__init__.py') }}
#
# - name: Install dependencies
# working-directory: python-sdk/dev/scripts
# run: pip install -r requirements.txt
#
# - name: Terminate Databricks cluster
# id: terminate_databricks_cluster_and_wait
# working-directory: python-sdk/dev/scripts
# run: |
# python databricks.py terminate_cluster $DATABRICKS_HOST $DATABRICKS_TOKEN --cluster-id $AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID
# env:
# AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ needs.Create-Databricks-Cluster.outputs.databricks_cluster_id }}
# DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }}
# DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }}
Delete-Databricks-Cluster:
if: ${{ always() }}
needs:
- Run-Optional-Packages-tests-python-sdk
- Run-Unit-tests-Airflow-2-7
- Run-example-dag-tests-Airflow-2-2-5
- Run-Integration-tests-Airflow-2-7
- Run-load-file-Integration-Airflow-2-7
- Run-example-dag-Integration-Airflow-2-7
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
if: github.event_name != 'pull_request_target'

- name: Checkout pull/${{ github.event.number }}
uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.sha }}
if: github.event_name == 'pull_request_target'

- uses: actions/setup-python@v3
with:
python-version: "3.8"
architecture: "x64"

- uses: actions/cache@v3
with:
path: |
~/.cache/pip
.nox
key: ${{ runner.os }}-${{ hashFiles('python-sdk/pyproject.toml') }}-${{ hashFiles('python-sdk/src/astro/__init__.py') }}

- name: Install dependencies
working-directory: python-sdk/dev/scripts
run: pip install -r requirements.txt

- name: Terminate Databricks cluster
id: terminate_databricks_cluster_and_wait
working-directory: python-sdk/dev/scripts
run: |
python databricks.py terminate_cluster $DATABRICKS_HOST $DATABRICKS_TOKEN --cluster-id $AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID
env:
AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ needs.Create-Databricks-Cluster.outputs.databricks_cluster_id }}
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }}
DATABRICKS_HOST: ${{ secrets.DATABRICKS_HOST }}

0 comments on commit 5ea20ae

Please sign in to comment.