-
Notifications
You must be signed in to change notification settings - Fork 45
132 lines (128 loc) · 5.18 KB
/
ci-rc-test.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
---
name: Build and test astro Python SDK
on:
workflow_dispatch:
defaults:
run:
working-directory: python-sdk
# This allows a subsequently queued workflow run to interrupt and cancel previous runs
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
# This list should only have non-sensitive env vars
# Env vars with secrets should be in the specific jobs
env:
SETUPTOOLS_USE_DISTUTILS: stdlib
POSTGRES_HOST: postgres
POSTGRES_PORT: 5432
AIRFLOW__ASTRO_SDK__SQL_SCHEMA: astroflow_ci
REDSHIFT_DATABASE: dev
REDSHIFT_HOST: utkarsh-cluster.cdru7mxqmtyx.us-east-2.redshift.amazonaws.com
SNOWFLAKE_SCHEMA: ASTROFLOW_CI
SNOWFLAKE_DATABASE: SANDBOX
SNOWFLAKE_WAREHOUSE: DEMO
SNOWFLAKE_HOST: https://gp21411.us-east-1.snowflakecomputing.com
SNOWFLAKE_ACCOUNT: gp21411
SNOWFLAKE_REGION: us-east-1
SNOWFLAKE_ROLE: AIRFLOW_TEST_USER
SFTP_HOSTNAME: ${{ secrets.SFTP_HOSTNAME }}
SFTP_USERNAME: ${{ secrets.SFTP_USERNAME }}
SFTP_PASSWORD: ${{ secrets.SFTP_PASSWORD }}
AIRFLOW__CORE__LOAD_DEFAULT_CONNECTIONS: True
AZURE_WASB_ACCESS_KEY: ${{ secrets.AZURE_WASB_ACCESS_KEY }}
AZURE_WASB_CONN_STRING: ${{ secrets.AZURE_WASB_CONN_STRING }}
AIRFLOW_VAR_FOO: templated_file_name
AWS_BUCKET: tmp9
GOOGLE_BUCKET: dag-authoring
FORCE_COLOR: "true"
MSSQL_DB: ${{ secrets.MSSQL_DB }}
MSSQL_HOST: ${{ secrets.MSSQL_HOST }}
MSSQL_LOGIN: ${{ secrets.MSSQL_LOGIN }}
MSSQL_PASSWORD: ${{ secrets.MSSQL_PASSWORD }}
MYSQL_DB: ${{ secrets.MYSQL_DB }}
MYSQL_HOST: ${{ secrets.MYSQL_HOST }}
MYSQL_LOGIN: ${{ secrets.MYSQL_LOGIN }}
MYSQL_PASSWORD: ${{ secrets.MYSQL_PASSWORD }}
jobs:
Run-Integration-tests-Airflow-2-7:
strategy:
fail-fast: false
matrix:
group: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
runs-on: ubuntu-latest
services:
postgres:
# Docker Hub image
image: dimberman/pagila-test
env:
POSTGRES_PASSWORD: postgres
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
- 5432:5432
sftp:
image: ghcr.io/astronomer/astro-sdk/sftp_docker
ports:
- 2222:22
ftp:
image: ghcr.io/astronomer/astro-sdk/ftp_docker
ports:
- 21:21
- 30000-30009:30000-30009
env:
FTP_USER_NAME: ${{ secrets.SFTP_USERNAME }}
FTP_USER_PASS: ${{ secrets.SFTP_PASSWORD }}
FTP_USER_HOME: /home/foo
PUBLICHOST: "localhost"
steps:
- uses: actions/checkout@v3
if: github.event_name != 'pull_request_target'
- name: Checkout pull/${{ github.event.number }}
uses: actions/checkout@v3
with:
ref: ${{ github.event.pull_request.head.sha }}
if: github.event_name == 'pull_request_target'
- uses: actions/setup-python@v3
with:
python-version: "3.10"
architecture: "x64"
- uses: actions/cache@v3
with:
path: |
~/.cache/pip
.nox
key: ${{ runner.os }}-2.7-${{ hashFiles('python-sdk/pyproject.toml') }}-${{ hashFiles('python-sdk/src/astro/__init__.py') }}
- run: cat ../.github/ci-test-connections.yaml > test-connections.yaml
- run: python -c 'import os; print(os.getenv("GOOGLE_APPLICATION_CREDENTIALS_JSON", "").strip())' > ${{ env.GOOGLE_APPLICATION_CREDENTIALS }}
- run: sqlite3 /tmp/sqlite_default.db "VACUUM;"
- run: pip3 install nox
- run: nox -s "test-3.10(airflow='2.7')" -- tests_integration/ -k "not test_load_file.py and not test_example_dags.py and not redshift" --splits 11 --group ${{ matrix.group }} --store-durations --durations-path /tmp/durations-${{ matrix.group }} --cov=src --cov-report=xml --cov-branch
- run: cat /tmp/durations-${{ matrix.group }}
- name: Upload coverage
uses: actions/upload-artifact@v2
with:
name: coverage-${{ matrix.group }}-integration-tests
path: ./python-sdk/.coverage
- name: Collect pytest durations
uses: actions/upload-artifact@v2
with:
name: pytest_durations_integration_tests_${{ matrix.group }}
path: /tmp/durations-${{ matrix.group }}
env:
GOOGLE_APPLICATION_CREDENTIALS_JSON: ${{ secrets.GOOGLE_APPLICATION_CREDENTIALS_JSON }}
GOOGLE_APPLICATION_CREDENTIALS: /tmp/google_credentials.json
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
REDSHIFT_NATIVE_LOAD_IAM_ROLE_ARN: ${{ secrets.REDSHIFT_NATIVE_LOAD_IAM_ROLE_ARN }}
REDSHIFT_USERNAME: ${{ secrets.REDSHIFT_USERNAME }}
REDSHIFT_PASSWORD: ${{ secrets.REDSHIFT_PASSWORD }}
SNOWFLAKE_ACCOUNT_NAME: ${{ secrets.SNOWFLAKE_UNAME }}
SNOWFLAKE_PASSWORD: ${{ secrets.SNOWFLAKE_PASSWORD }}
DATABRICKS_TOKEN: ${{ secrets.DATABRICKS_TOKEN }}
AIRFLOW__ASTRO_SDK__DATABRICKS_CLUSTER_ID: ${{ secrets.DATABRICKS_CLUSTER_ID }}
AZURE_WASB_CONN_STRING: ${{ secrets.AZURE_WASB_CONN_STRING }}
AZURE_WASB_ACCESS_KEY: ${{ secrets.AZURE_WASB_ACCESS_KEY }}