forked from Kitware/pybsm
-
Notifications
You must be signed in to change notification settings - Fork 0
/
.gitlab-ci.yml
360 lines (336 loc) · 11.4 KB
/
.gitlab-ci.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
# Use stages to define stages that contain groups of jobs. Use stage in a job
# to configure the job to run in a specific stage.
stages:
- test
- docs
- publish
# Global default environment variables set for all jobs unless overridden by
# job-specific configuration.
variables:
# Make sure output supports UTF-8
LC_ALL: "C.UTF-8"
LANG: "C.UTF-8"
# Global default parameters set for all jobs unless overridden by job-specific
# configuration.
default:
image: python:3.8
interruptible: true
tags:
- kitware
###############################################################################
# Run Conditions
#
# In the future, this could be broken out into a separate file that we
# `include` here.
#
# REMINDER: The "." prefix causes the "job" to be hidden (does not get run),
# but can still be used for inheritance.
#
# Run rules to activate at the major junction points: merge requests, tag
# pipelines and branch pipelines for main.
.run_automatically:
rules:
# If changes are make to an active merge request.
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
when: on_success
# If changes are pushed for a tag.
- if: $CI_COMMIT_TAG
when: on_success
# If changes are pushed to the default branch.
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
when: on_success
- when: never # explicit fail-exclude terminal condition.
# Run rule to trigger on a tag push/update that matches our expected release
# versioning pattern.
.run_on_upstream_version_tag:
rules:
# If changes are pushed for a tag and the tag matching the release version
# pattern. Also checking that the repository is the official upstream
# source.
- if: $CI_COMMIT_TAG =~ /^v\d+(\.\d+)*$/ && $CI_SERVER_HOST == "gitlab.jatic.net" && $CI_PROJECT_NAMESPACE == "jatic/kitware"
when: on_success
- when: never # explicit fail-exclude terminal condition.
###############################################################################
# Jobs -- Testing
#
# In the future, `.`-prefixed templates could be broken out into a separate
# file that we `include` here.
#
# REMINDER: The "." prefix causes the "job" to be hidden (does not get run),
# but can still be used for inheritance.
# For internal git dependencies
.setup_ci_git: &setup_ci_git
- git config --global url."https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.jatic.net".insteadof "ssh://[email protected]"
- git config --global url."https://gitlab-ci-token:${CI_JOB_TOKEN}@gitlab.jatic.net/".insteadOf "[email protected]:"
.setup_poetry:
variables:
# Change pip's cache directory to be inside the project directory since we
# can only cache local items. Same for poetry cache
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
cache:
# We are only caching the pip cache, NOT THE VENV. Caches should be
# python version agnostic.
- key: py-package-cache
paths:
- $PIP_CACHE_DIR
before_script:
- export PATH=${HOME}/.local/bin:${PATH}
# Will make use of .cache/pip
- pip install --user -U poetry
- command -v python
- python --version
- command -v pip
- pip --version
- command -v poetry
- poetry -V
- poetry config --local virtualenvs.in-project true
- *setup_ci_git
.poetry_install:
extends:
- .setup_poetry
variables:
# Change poetry's cache directory to be inside the project directory since
# we can only cache local items. Same for poetry cache
POETRY_CACHE_DIR: "$CI_PROJECT_DIR/.cache/poetry"
cache:
# We are only caching the poetry cache, NOT THE VENV. Caches should be
# python version agnostic.
- !reference [.setup_poetry, cache]
- key: py-poetry-cache
paths:
- $POETRY_CACHE_DIR
before_script:
- !reference [.setup_poetry, before_script]
# Will make use of .cache/poetry
- poetry install --sync
.test_preamble:
extends:
- .run_automatically
stage: test
# Note: usage of default image/interruptable/tags
.test_defaults:
extends:
- .test_preamble
- .poetry_install
.test_coverage:
extends:
- .test_preamble
before_script:
- export PATH=${HOME}/.local/bin:${PATH}
- pip install --user coverage
.test_headless:
extends:
- .test_defaults
before_script:
- !reference [.test_defaults, before_script]
- |
VERSION="$(poetry run pip list --format=json | poetry run ./scripts/pycv2_installed_version.py)"
if [[ "$?" -eq 0 ]]
then
echo "OpenCV-Python installed, replacing with equivalent headless version."
poetry run pip uninstall -y opencv-python opencv-python-headless
poetry run pip install --no-deps opencv-python-headless=="$VERSION"
else
echo "OpenCV-Python NOT installed, skipping."
fi
# Job to typecheck python code
test-py-typecheck:
extends: .test_defaults
script:
- poetry run mypy
test-docs-build:
extends: .test_headless
script:
- cd docs
- poetry run make html
artifacts:
paths:
- docs/_build/html/
test-py-lint:
extends: .test_defaults
script:
- poetry run flake8
# Job to run unittests via pytest
test-pytest:
extends: .test_headless
parallel:
matrix:
- PY_VERSION: [ "3.8", "3.9", "3.10", "3.11", "3.12" ]
variables:
COVERAGE_TARGET: ".coverage-py${PY_VERSION}"
JUNIT_TARGET: ".junit-py${PY_VERSION}.xml"
image: python:${PY_VERSION}
script:
- poetry run pytest --junit-xml="${JUNIT_TARGET}"
# Rename out coverage database files appropriately
- mv .coverage "${COVERAGE_TARGET}"
artifacts:
paths:
- ${COVERAGE_TARGET}
reports:
junit: "${JUNIT_TARGET}"
expire_in: 1 day
# Job to combine test coverage report into a single one for reporting to GitLab
test-coverage-report:
extends: .test_coverage
needs:
- job: test-pytest
artifacts: true
script:
# Combine all the coverage reports from the pytest matrix into a single
# database file.
- coverage combine ./.coverage*
# This converts the now combined `.coverage` database file into a single
# `coverage.xml` file
- coverage xml
artifacts:
paths:
- ".coverage"
reports:
coverage_report:
coverage_format: cobertura
path: coverage.xml
expire_in: 1 day
test-coverage-percent:
extends: .test_coverage
needs:
- job: test-coverage-report
artifacts: true
allow_failure: true
script:
# TODO: Read required percentages from config file?
- coverage report --include 'pybsm/*' --precision=2 --fail-under=99
- coverage report --include 'tests/*' --precision=2 --fail-under=100
# Job to check the release notes folder
#
# We only want to run this when a "run_automatically" rule is true, except for
# when the latest commit is a merge commit (assuming standard merge commit
# text).
#
test-release-notes-check:
extends: .test_preamble
allow_failure: true
script:
- git fetch
- scripts/check_for_release_notes.sh origin/${CI_MERGE_REQUEST_TARGET_BRANCH_NAME}
rules:
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $CI_COMMIT_TITLE =~ /Merge branch.*/
when: never
- if: $CI_COMMIT_BRANCH == "release" && $CI_COMMIT_TITLE =~ /Merge branch.*/
when: never
- !reference [.run_automatically, rules] # Don't overwrite normal rules
# # Job to ensure the provided example script still runs
# NOTE: this version of tests was to confirm the original pybsm_example.py
# functioned properly, just as a dummy placeholder for unit testing. Until
# we've updated this and/or the unit tests, we're using the notebook below.
# test-pytest:
# extends: .test_defaults
# parallel:
# matrix:
# - PY_VERSION: [ "3.8", "3.9", "3.10", "3.11" ]
# image: python:${PY_VERSION}
# script:
# - poetry run pip uninstall -qy opencv-python opencv-python-headless
# - poetry run pip install -q opencv-python-headless
# - poetry run python examples/pybsm_example.py
# - ls examples/fig_2.png
# - ls examples/fig_4a.png
# - ls examples/fig_5a.png
# - ls examples/fig_showcase.png
# Job to test-run the example jupyter notebooks
#
# This job has a parallel matrix to parameterize different working-directories
# and notebooks within to run. Each parallel instance of this job should only
# run a single notebook. !reference:
#
# See GitLab docs for parallel-matrix functionality:
# https://docs.gitlab.com/ee/ci/yaml/#parallelmatrix
#
# The parallel-matrix list may have multiple items, and each entry should have
# a pair of keys: "NOTEBOOK_DIR" and "NOTEBOOK_FILE". (Given the documentation
# for the parallel-matrix functionality, combinatorics are only applied within
# an item, not across items.)
# * "NOTEBOOK_DIR" should be a single string that notes the directory in which
# notebook files should be run in (basically the working directory, generally
# the directory the notebook lives in). This path should be relative to the
# root of the repository.
# * "NOTEBOOK_FILE" should be a list of strings that denote the notebook files
# to be run. These paths path should be relative to the "NOTEBOOK_DIR". Files
# in this list will be combinatorially combined with the path provided in
# the associated "NOTEBOOK_DIR" to create a job parameterization instance.
test-notebooks:
extends: .test_defaults
# Note: using of default image/interruptable/tags
variables:
TORCH_HOME: "${CI_PROJECT_DIR}/.cache/torch"
# Merge inherited caches
cache:
- !reference [.test_defaults, cache]
- key: dummy-cache
paths:
- ${TORCH_HOME}
# Specifying the various notebooks that we want to be tested. Each invocation
# of this job should try to execute only one notebook via papermill.
parallel:
matrix:
# Sequences combinatorically combine within a list entry
- NOTEBOOK_DIR: "examples"
NOTEBOOK_FILE: [
"pybsm_visualization.ipynb"
]
# Using default container image defined above
script:
- cd "${NOTEBOOK_DIR}"
- poetry run papermill
--progress-bar -k python3 --stdout-file - --stderr-file -
"${NOTEBOOK_FILE}" /dev/null
pages:
stage: docs
needs:
- job: test-docs-build
artifacts: true
script:
- mv docs/_build/html/ public/
artifacts:
paths:
- public
rules:
# Only run this rule if on main branch
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
###############################################################################
# Jobs -- Publishing
#
# Jobs related to automation around publishing our package.before_script:
#
# When a tag is created matching the release pattern, build and publish our
# package to PYPI.
publish-on-tag:
extends:
- .run_on_upstream_version_tag
- .setup_poetry
# Note: using of default image/interruptable/tags
stage: publish
script:
# Ensure there is alignment with the tag reference and the reported
# version of the package
- |
PACKAGE_VERSION="v$(poetry version -s)"
if [[ "${CI_COMMIT_TAG}" != "${PACKAGE_VERSION}" ]]
then
echo "ERROR: Git tag reference and package version are NOT synonymous."
echo " Package version: ${PACKAGE_VERSION}"
echo " Git ref name : ${CI_COMMIT_TAG}"
exit 1
fi
# Ensure that we have token
- |
if [[ -z "${PYPI_PUBLISH_TOKEN}" ]]
then
echo "ERROR: Expected PYPI token variable was blank."
echo " Did you forget to set the appropriate PYPI_PUBLISH_TOKEN secret?"
exit 1
fi
# Actually publish
- |
export POETRY_PYPI_TOKEN_PYPI="${PYPI_PUBLISH_TOKEN}"
poetry publish --build