-
Notifications
You must be signed in to change notification settings - Fork 114
/
.travis.yml
309 lines (298 loc) · 15.1 KB
/
.travis.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
os: linux
dist: bionic
language: python
python:
- '3.8'
cache: pip
env:
global:
- DEBIAN_FRONTEND=noninteractive
- POSTGRES_HOST=localhost
- USASPENDING_DB_HOST=localhost
- USASPENDING_DB_PORT=5432
- USASPENDING_DB_USER=usaspending
- USASPENDING_DB_PASSWORD=usaspender
- USASPENDING_DB_NAME=data_store_api
- DATABASE_URL=postgres://${USASPENDING_DB_USER}:${USASPENDING_DB_PASSWORD}@${USASPENDING_DB_HOST}:${USASPENDING_DB_PORT}/${USASPENDING_DB_NAME}
- DOWNLOAD_DATABASE_URL=postgres://${USASPENDING_DB_USER}:${USASPENDING_DB_PASSWORD}@${USASPENDING_DB_HOST}:${USASPENDING_DB_PORT}/${USASPENDING_DB_NAME}
- DJANGO_SETTINGS_MODULE='usaspending_api.settings'
- ES_SCHEME=http
- ES_HOST=localhost
- ES_PORT=9200
- ES_HOSTNAME=${ES_SCHEME}://${ES_HOST}:${ES_PORT}
- BROKER_DB_HOST=localhost
- BROKER_DB_PORT=5432
- BROKER_DB_USER=admin
- BROKER_DB_PASSWORD=root
- BROKER_DB_NAME=data_broker
- DATA_BROKER_DATABASE_URL=postgres://${BROKER_DB_USER}:${BROKER_DB_PASSWORD}@${BROKER_DB_HOST}:${BROKER_DB_PORT}/${BROKER_DB_NAME}
- DATA_BROKER_SRC_PATH="${TRAVIS_BUILD_DIR}/../data-act-broker-backend" # Location in host machine where broker src code root can be found
- BROKER_REPO_URL=https://github.com/fedspendingtransparency/data-act-broker-backend.git
- BROKER_REPO_BRANCH=$(if [ "${TRAVIS_EVENT_TYPE}" = "pull_request" ] && [ ! -z "`git ls-remote --heads ${BROKER_REPO_URL} ${TRAVIS_BRANCH}`" ]; then echo "${TRAVIS_BRANCH}"; else echo "qat"; fi;)
- BROKER_REPO_FOLDER=${DATA_BROKER_SRC_PATH}
- BROKER_DOCKER_IMAGE=dataact-broker-backend
- GRANTS_API_KEY=${GRANTS_API_KEY}
- MINIO_DATA_DIR=${HOME}/Development/data/usaspending/docker/usaspending-s3 # needs to be same place docker-compose will look for it (based on .env file)
- MINIO_HOST=localhost
- PYTEST_XDIST_NUMPROCESSES=4
- COLUMNS=240 # for wider terminal output
- TRAVIS_JOB_INDEX="$(echo $TRAVIS_JOB_NUMBER | cut -d'.' -f2)"
jobs:
include:
- stage: Build
name: pip install
workspaces:
create:
name: pip
paths:
- $HOME/virtualenv/
- $TRAVIS_BUILD_DIR/usaspending_api.egg_info/
before_install: "" # override default to no-op
install:
- travis_retry pip install setuptools==60.8.2
- travis_retry pip install .[dev]
- travis_retry pip install coveralls
before_script: "" # override default to no-op
script: "" # override default to no-op
- name: docker build
workspaces:
create:
name: docker
paths:
- docker_images/
before_install: "" # override default to no-op
install:
# Checkout dependent broker code used to spin up a broker integration test db. Put it in its own folder alongside this repo's code
- echo "Using ${BROKER_REPO_BRANCH} branch from ${BROKER_REPO_URL}"
- travis_retry git clone --branch ${BROKER_REPO_BRANCH} --single-branch --depth 1 ${BROKER_REPO_URL} ${BROKER_REPO_FOLDER}
- docker build -t ${BROKER_DOCKER_IMAGE} ${BROKER_REPO_FOLDER} # build image from which to call Broker scripts
before_script: "" # override default to no-op
script: "" # override default to no-op
before_cache:
# Save off docker images+layers used by this build into the `docker_images` cache dir
- mkdir -p docker_images
- docker save -o docker_images/${BROKER_DOCKER_IMAGE}.tar ${BROKER_DOCKER_IMAGE}
- stage: Static Code Analysis
name: flake8
workspaces:
use: [ pip ]
before_install: "" # override default to no-op
install: "" # override default to no-op
before_script: "" # override default to no-op
script:
- flake8
- name: black
workspaces:
use: [ pip ]
before_install: "" # override default to no-op
install: "" # override default to no-op
before_script: "" # override default to no-op
script:
- black --check --diff .
- name: API Docs
workspaces:
use: [ pip ]
before_install: "" # override default to no-op
install: travis_retry npm install --global [email protected] # add dredd for API contract testing
before_script: "" # override default to no-op
script:
- python manage.py check_for_endpoint_documentation
- dredd > dredd-results.txt && echo '! grep -E "^[warn:|error:]" dredd-results.txt' | bash
- stage: Automated Tests
# NOTE: See conftest.py pytest_collection_modifyitems for how Marks are assigned to tests
name: Spark Integration Tests - test_load_transactions_in_delta_fabs_fpds.py
env:
# Concurrent sessions does not seem to be efficient for this test. Keeping at 1 session
- PYTEST_XDIST_NUMPROCESSES=0
- PYTEST_SETUP_TEST_DATABASES=true
- PYTEST_PRELOAD_SPARK_JARS=true
- PYTEST_INCLUDE_GLOB='test_*.py *_test.py'
- PYTEST_EXCLUDE_GLOB=
- PYTEST_MATCH_EXPRESSION=test_load_transactions_in_delta_fabs_fpds.py
- PYTEST_MARK_EXPRESSION=spark
workspaces:
use: [ pip,docker ]
create:
name: ws1
paths:
- coverage.*.xml
# Inherits "global" job phases defined below (e.g. before_install, install, before_script, script, etc.)
- name: Spark Integration Tests - test_load_transactions_in_delta_lookups.py
env:
# Concurrent sessions does not seem to be efficient for this test. Keeping at 1 session
- PYTEST_XDIST_NUMPROCESSES=0
- PYTEST_SETUP_TEST_DATABASES=true
- PYTEST_PRELOAD_SPARK_JARS=true
- PYTEST_INCLUDE_GLOB='test_*.py *_test.py'
- PYTEST_EXCLUDE_GLOB=
- PYTEST_MATCH_EXPRESSION=test_load_transactions_in_delta_lookups.py
- PYTEST_MARK_EXPRESSION=spark
workspaces:
use: [ pip,docker ]
create:
name: ws2
paths:
- coverage.*.xml
# Inherits "global" job phases defined below (e.g. before_install, install, before_script, script, etc.)
- name: Spark Integration Tests - test_load_to_from_delta.py
env:
- PYTEST_XDIST_NUMPROCESSES=2
- PYTEST_SETUP_TEST_DATABASES=true
- PYTEST_PRELOAD_SPARK_JARS=true
- PYTEST_INCLUDE_GLOB='test_*.py *_test.py'
- PYTEST_EXCLUDE_GLOB=
- PYTEST_MATCH_EXPRESSION=test_load_to_from_delta.py
- PYTEST_MARK_EXPRESSION=spark
workspaces:
use: [ pip,docker ]
create:
name: ws3
paths:
- coverage.*.xml
# Inherits "global" job phases defined below (e.g. before_install, install, before_script, script, etc.)
- name: Spark Integration Tests - Other
env:
- PYTEST_XDIST_NUMPROCESSES=4
- PYTEST_SETUP_TEST_DATABASES=true
- PYTEST_PRELOAD_SPARK_JARS=true
- PYTEST_INCLUDE_GLOB='test_*.py *_test.py'
- PYTEST_EXCLUDE_GLOB=
- PYTEST_MATCH_EXPRESSION='(not test_load_to_from_delta.py and not test_load_transactions_in_delta_lookups.py and not test_load_transactions_in_delta_fabs_fpds.py)'
- PYTEST_MARK_EXPRESSION=spark
workspaces:
use: [ pip,docker ]
create:
name: ws4
paths:
- coverage.*.xml
# Inherits "global" job phases defined below (e.g. before_install, install, before_script, script, etc.)
- name: Non-Spark Integration Tests
env:
- PYTEST_SETUP_TEST_DATABASES=true
- PYTEST_PRELOAD_SPARK_JARS=false
- PYTEST_INCLUDE_GLOB=**/tests/integration/*
- PYTEST_EXCLUDE_GLOB=
- PYTEST_MATCH_EXPRESSION=
- PYTEST_MARK_EXPRESSION='(not spark and not signal_handling)'
workspaces:
use: [ pip,docker ]
create:
name: ws5
paths:
- coverage.*.xml
# Inherits "global" job phases defined below (e.g. before_install, install, before_script, script, etc.)
- name: Non-Spark Integration Tests - Using Signal Handling
env:
- PYTEST_XDIST_NUMPROCESSES=0
- PYTEST_SETUP_TEST_DATABASES=true
- PYTEST_PRELOAD_SPARK_JARS=false
- PYTEST_INCLUDE_GLOB=**/tests/integration/*
- PYTEST_EXCLUDE_GLOB=
- PYTEST_MATCH_EXPRESSION=
- PYTEST_MARK_EXPRESSION='(signal_handling and not spark)'
workspaces:
use: [ pip,docker ]
create:
name: ws6
paths:
- coverage.*.xml
# Inherits "global" job phases defined below (e.g. before_install, install, before_script, script, etc.)
- name: Unit Tests
env:
- PYTEST_SETUP_TEST_DATABASES=false
- PYTEST_PRELOAD_SPARK_JARS=false
- PYTEST_INCLUDE_GLOB='test_*.py *_test.py'
- PYTEST_EXCLUDE_GLOB=**/tests/integration/*
- PYTEST_MATCH_EXPRESSION=
- PYTEST_MARK_EXPRESSION='(not spark and not database and not elasticsearch and not signal_handling)'
workspaces:
use: [ pip,docker ]
create:
name: ws7
paths:
- coverage.*.xml
# Inherits "global" job phases defined below (e.g. before_install, install, before_script, script, etc.)
- name: Unit Tests - Using Signal Handling
env:
- PYTEST_XDIST_NUMPROCESSES=0
- PYTEST_SETUP_TEST_DATABASES=false
- PYTEST_PRELOAD_SPARK_JARS=false
- PYTEST_INCLUDE_GLOB='test_*.py *_test.py'
- PYTEST_EXCLUDE_GLOB=**/tests/integration/*
- PYTEST_MATCH_EXPRESSION=
- PYTEST_MARK_EXPRESSION='(signal_handling)'
workspaces:
use: [ pip,docker ]
create:
name: ws8
paths:
- coverage.*.xml
# Inherits "global" job phases defined below (e.g. before_install, install, before_script, script, etc.)
- stage: Code Coverage
# NOTE: This stage will only run if ALL test stage jobs passed (there's no point to collect and report coverage if they did not)
env:
- IS_CODE_COVERAGE_REPORT=true
workspaces:
use: [ws1,ws2,ws3,ws4,ws5,ws6,ws7,ws8]
before_install: "" # override default to no-op
install: "" # override default to no-op
before_script:
# Get dependencies to report code coverage to code climate
- travis_retry curl -L https://codeclimate.com/downloads/test-reporter/test-reporter-latest-linux-amd64 > ./cc-test-reporter
- chmod +x ./cc-test-reporter
script:
- ls -lh coverage*
- for cf in coverage.*.xml; do ./cc-test-reporter format-coverage --prefix $TRAVIS_BUILD_DIR --input-type coverage.py --output coverage/codeclimate.$(echo "$cf" | cut -d'.' -f2).xml coverage.$(echo "$cf" | cut -d'.' -f2).xml; done
- ls coverage/
- ./cc-test-reporter sum-coverage --output - --parts $(find . -maxdepth 1 -name 'coverage.*.xml' | wc -l) ./coverage/codeclimate.*.xml | ./cc-test-reporter upload-coverage --input -
####
#### Below are the default job lifecycle phases if they are not overridden above on a per-job basis
####
before_install:
# Reload any cached images and their layers from previous builds, to shorten docker pulls/builds
- docker load -i docker_images/*.tar || true
install:
# Checkout dependent broker code used to spin up a broker integration test db. Put it in its own folder alongside this repo's code
- echo "Using ${BROKER_REPO_BRANCH} branch from ${BROKER_REPO_URL}"
- travis_retry git clone --branch ${BROKER_REPO_BRANCH} --single-branch --depth 1 ${BROKER_REPO_URL} ${BROKER_REPO_FOLDER}
before_script:
# Get dependencies to report code coverage to code climate
- travis_retry curl -L https://codeclimate.com/downloads/test-reporter/test-reporter-latest-linux-amd64 > ./cc-test-reporter
- chmod +x ./cc-test-reporter
- ./cc-test-reporter before-build
- make docker-compose-up-usaspending args="-d usaspending-db usaspending-es"
# Wait for services to be up
- ttl=30; echo "Try DB conn from container for $ttl seconds"; until [ $ttl -le 0 ] || psql $DATABASE_URL -c 'select 1 where 1=1'; do echo $ttl; ((ttl--)); sleep 1; done; [ $ttl -gt 0 ]
- ttl=30; echo "Try ES conn from container for $ttl seconds"; until [ $ttl -le 0 ] || curl --silent -XGET --fail $ES_HOSTNAME; do echo $ttl; ((ttl--)); sleep 1; done; [ $ttl -gt 0 ]
# Our Postgres DB provided by Travis needs to have the (super) users specified by our env var DB URLs used
- psql postgres://${USASPENDING_DB_USER}:${USASPENDING_DB_PASSWORD}@${USASPENDING_DB_HOST}:${USASPENDING_DB_PORT}/postgres -c "ALTER USER ${USASPENDING_DB_USER} SET search_path TO public,raw,int,temp,rpt"
- psql postgres://${USASPENDING_DB_USER}:${USASPENDING_DB_PASSWORD}@${USASPENDING_DB_HOST}:${USASPENDING_DB_PORT}/postgres -c "CREATE USER ${BROKER_DB_USER} PASSWORD '${BROKER_DB_PASSWORD}' SUPERUSER"
# Postgres DB also needs a readonly user, that is referenced in some code
- psql postgres://${USASPENDING_DB_USER}:${USASPENDING_DB_PASSWORD}@${USASPENDING_DB_HOST}:${USASPENDING_DB_PORT}/postgres -c "CREATE ROLE readonly;"
- >
psql postgres://${USASPENDING_DB_USER}:${USASPENDING_DB_PASSWORD}@${USASPENDING_DB_HOST}:${USASPENDING_DB_PORT}/postgres -c "\copy (
SELECT
'GRANT USAGE ON SCHEMA ' || nspname || ' TO readonly; '
|| 'GRANT SELECT ON ALL TABLES IN SCHEMA ' || nspname || ' TO readonly; '
|| 'ALTER DEFAULT PRIVILEGES IN SCHEMA ' || nspname || ' GRANT SELECT ON TABLES TO readonly; '
FROM pg_namespace WHERE nspname IN ('raw','int','rpt','temp','public')
) TO grant_to_readonly.sql;"
- psql postgres://${USASPENDING_DB_USER}:${USASPENDING_DB_PASSWORD}@${USASPENDING_DB_HOST}:${USASPENDING_DB_PORT}/postgres -c "\i grant_to_readonly.sql"
# Trigger the setup of multiple test DBs that will be left for the next pytest run to reuse --numprocesses
# Also, must manually set --numprocesses on Travis CI VMs; can't use auto (see: https://github.com/pytest-dev/pytest-xdist/pull/317)
- if [ "${PYTEST_SETUP_TEST_DATABASES}" = true ]; then pytest --create-db --reuse-db --numprocesses ${PYTEST_XDIST_NUMPROCESSES} --no-cov --disable-warnings -r=fEs --verbosity=3 --capture=no --log-cli-level=WARNING --show-capture=log 2> /dev/null 'usaspending_api/tests/integration/test_setup_of_test_dbs.py::test_trigger_test_db_setup'; fi;
# Trigger preloading of Spark dependent JARs to avoid Ivy (Maven) repo download race condition from multiple pytest-xdist worker test sessions
- if [ "${PYTEST_PRELOAD_SPARK_JARS}" = true ]; then pytest --no-cov --disable-warnings -r=fEs --verbosity=3 'usaspending_api/tests/integration/test_setup_of_spark_dependencies.py::test_preload_spark_jars'; fi;
- psql postgres://${USASPENDING_DB_USER}:${USASPENDING_DB_PASSWORD}@${USASPENDING_DB_HOST}:${USASPENDING_DB_PORT}/postgres -c "\l"
# Ensure MinIO can run as our s3 substitute
- mkdir -p "${MINIO_DATA_DIR}"
- mkdir -p "${HOME}/.ivy2"
- make docker-compose-up-s3 args="-d"
script:
- stty cols 240 # for wider terminal output
- cd ${TRAVIS_BUILD_DIR} # run build script out of repo dir
# Check that no integration tests are outside **/tests/integration/ folders
- return $(pytest --collect-only --quiet --ignore-glob='**/tests/integration/*' -m '(spark or database or elasticsearch)' --no-cov --disable-warnings | grep '^usaspending_api.*$' | wc -l)
- test $? -gt 0 && echo 'Failing because integration tests would be improperly captured as unit tests. Run the previous pytest command locally to figure out which to move to a **/tests/integration/ folder'
# Must manually set --numprocesses on Travis CI VMs; can't use auto (see: https://github.com/pytest-dev/pytest-xdist/pull/317)
- pytest --override-ini=python_files="${PYTEST_INCLUDE_GLOB}" --ignore-glob="${PYTEST_EXCLUDE_GLOB}" -m "${PYTEST_MARK_EXPRESSION}" -k "${PYTEST_MATCH_EXPRESSION}" --cov=usaspending_api --cov-report term --cov-report xml:coverage.$TRAVIS_JOB_INDEX.xml --reuse-db -r=fEs --numprocesses ${PYTEST_XDIST_NUMPROCESSES} --dist worksteal --verbosity=1 --durations 50