-
Notifications
You must be signed in to change notification settings - Fork 72
159 lines (154 loc) · 5.57 KB
/
test-upstream.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
name: Nightly upstream testing
on:
schedule:
- cron: "0 0 * * *" # Daily “At 00:00” UTC
workflow_dispatch: # allows you to trigger the workflow run manually
# Required shell entrypoint to have properly activated conda environments
defaults:
run:
shell: bash -l {0}
jobs:
test-dev:
name: "Test upstream dev (${{ matrix.os }}, python: ${{ matrix.python }}, distributed: ${{ matrix.distributed }}, query-planning: ${{ matrix.query-planning }})"
runs-on: ${{ matrix.os }}
env:
CONDA_FILE: continuous_integration/environment-${{ matrix.python }}.yaml
DASK_SQL_DISTRIBUTED_TESTS: ${{ matrix.distributed }}
DASK_DATAFRAME__QUERY_PLANNING: ${{ matrix.query-planning }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest, macos-latest]
python: ["3.9", "3.10", "3.11", "3.12"]
distributed: [false]
query-planning: [true]
include:
# run tests on a distributed client
- os: "ubuntu-latest"
python: "3.9"
distributed: true
query-planning: true
- os: "ubuntu-latest"
python: "3.11"
distributed: true
query-planning: true
# run tests with query planning disabled
- os: "ubuntu-latest"
python: "3.9"
distributed: false
query-planning: false
- os: "ubuntu-latest"
python: "3.11"
distributed: false
query-planning: false
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0 # Fetch all history for all branches and tags.
- name: Set up Python
uses: conda-incubator/[email protected]
with:
miniforge-variant: Mambaforge
use-mamba: true
python-version: ${{ matrix.python }}
channel-priority: strict
activate-environment: dask-sql
environment-file: ${{ env.CONDA_FILE }}
- uses: actions-rs/toolchain@v1
with:
toolchain: 1.72
default: true
- name: Install x86_64-apple-darwin target
if: matrix.os == 'macos-latest'
run: rustup target add x86_64-apple-darwin
- name: Build the Rust DataFusion bindings
run: |
maturin develop
- name: Install hive testing dependencies
if: matrix.os == 'ubuntu-latest'
run: |
docker pull bde2020/hive:2.3.2-postgresql-metastore
docker pull bde2020/hive-metastore-postgresql:2.3.0
- name: Install upstream dev Dask
run: |
mamba install --no-channel-priority dask/label/dev::dask
- name: Install pytest-reportlog
run: |
# TODO: add pytest-reportlog to testing environments if we move over to JSONL output
mamba install pytest-reportlog
- name: Test with pytest
id: run_tests
run: |
pytest --report-log test-${{ matrix.os }}-py${{ matrix.python }}-results.jsonl --cov-report=xml -n auto tests --dist loadfile
- name: Upload pytest results for failure
if: |
always()
&& steps.run_tests.outcome != 'skipped'
uses: actions/upload-artifact@v3
with:
name: test-${{ matrix.os }}-py${{ matrix.python }}-results
path: test-${{ matrix.os }}-py${{ matrix.python }}-results.jsonl
import-dev:
name: "Test importing with bare requirements and upstream dev (query-planning: ${{ matrix.query-planning }})"
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
query-planning: [true, false]
steps:
- uses: actions/checkout@v4
- name: Set up Python
uses: conda-incubator/[email protected]
with:
miniforge-variant: Mambaforge
use-mamba: true
python-version: "3.9"
channel-priority: strict
- uses: actions-rs/toolchain@v1
with:
toolchain: 1.72
default: true
- name: Install dependencies and nothing else
run: |
pip install -e . -vv
which python
pip list
mamba list
- name: Install upstream dev Dask
run: |
python -m pip install git+https://github.com/dask/dask
python -m pip install git+https://github.com/dask/dask-expr
python -m pip install git+https://github.com/dask/distributed
- name: Try to import dask-sql
env:
DASK_DATAFRAME_QUERY_PLANNING: ${{ matrix.query-planning }}
run: |
python -c "import dask_sql; print('ok')"
report-failures:
name: Open issue for upstream dev failures
needs: [test-dev, import-dev]
if: |
always()
&& (
needs.test-dev.result == 'failure'
|| needs.import-dev.result == 'failure'
)
&& github.repository == 'dask-contrib/dask-sql'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/download-artifact@v3
- name: Prepare logs & issue label
run: |
# TODO: remove this if xarray-contrib/issue-from-pytest-log no longer needs a log-path
if [ -f test-ubuntu-latest-py3.10-results/test-ubuntu-latest-py3.10-results.jsonl ]; then
cp test-ubuntu-latest-py3.10-results/test-ubuntu-latest-py3.10-results.jsonl results.jsonl
else
touch results.jsonl
fi
- name: Open or update issue on failure
uses: xarray-contrib/[email protected]
with:
log-path: results.jsonl
issue-title: ⚠️ Upstream CI failed ⚠️
issue-label: upstream