-
Notifications
You must be signed in to change notification settings - Fork 4
/
linux.gitlab-ci.yml
166 lines (158 loc) · 5.01 KB
/
linux.gitlab-ci.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
.prepare:
cache:
- key: ${CI_COMMIT_BRANCH}
paths:
- "${CI_PROJECT_DIR}/.m2/repository"
- "${HOME}/.cache/pip"
- "${CI_PROJECT_DIR}/**/target"
before_script:
# build project
- mvn -q $MAVEN_CLI_OPTS package install
- ls "${CI_PROJECT_DIR}/gpt-tests-executer/target"
# Clean old report
- mkdir -p $REPORT_DIR $REPORT_DIR/report $REPORT_DIR/report/output $TEMP_DIR
- pip3 install -q -r $CI_PROJECT_DIR/requirements.txt
# Produce a list of tests to run
- python3 $CI_PROJECT_DIR/pygpt/filter_json.py gpt-tests-resources/tests $SCOPE $REPORT_DIR
# Get test data for single test file
- |
export test=$(sed "${CI_NODE_INDEX}q;d" $REPORT_DIR/JSONTestFiles.txt)
python3 $CI_PROJECT_DIR/pygpt/get_test_data_list.py $test $REPORT_DIR $TEST_DATA_DIR
- cat $REPORT_DIR/$TEST_DATA_LIST
# Download test data
- $CI_PROJECT_DIR/download_test_data.sh "${REPORT_DIR}" "${TEST_DATA_DIR}" "${TEST_DATA_LIST}" "${S3_BUCKET}" "${S3_ARGS}"
.test:
stage: test
extends: .prepare
script:
- echo "Running $(sed "${CI_NODE_INDEX}q;d" ${REPORT_DIR}/JSONTestFiles.txt)"
- |
export FILE=$(sed "${CI_NODE_INDEX}q;d" $REPORT_DIR/JSONTestFiles.txt)
python3 pygpt/snap_gpt_test.py java "${JAVA_OPTIONS} -cp ${CI_PROJECT_DIR}/gpt-tests-executer/target/gpt-test-exec.jar" \
org.esa.snap.test.TestOutput $PROPERTIES_PATH $SCOPE $FILE $REPORT_DIR/report/output true
after_script:
# DEBUG
- ls ~/.snap/auxdata/ && echo "DEBUG END"
- ls $REPORT_DIR/report
- ls $REPORT_DIR/report/output
# TODO remove next line when test data will be cropped (ATM some tests produce gigabytes of tests data)
- rm -rf $REPORT_DIR/report/output/*.zip $REPORT_DIR/report/output/*.dim $REPORT_DIR/report/output/*.data
- mv $REPORT_DIR $CI_PROJECT_DIR/result
retry:
max: 2
when:
- runner_system_failure
- stuck_or_timeout_failure
- script_failure
- unknown_failure
artifacts:
when: always
expire_in: "1 day"
paths:
- $CI_PROJECT_DIR/result
test_CItest:
tags: [kube]
variables:
TEST_DATA_LIST: singleTestData.txt
rules:
- if: $SCOPE == 'CItest' && $PLATFORM == 'linux'
when: always
- when: never
parallel: 2
extends: .test
test_s3tbx:
tags: [kube]
variables:
TEST_DATA_LIST: singleTestData.txt
rules:
- if: $SCOPE == 's3tbx' && $PLATFORM == 'linux'
when: always
- when: never
parallel: 22
extends: .test
test_s2tbx:
tags: [kube]
rules:
- if: $SCOPE == 's2tbx' && $PLATFORM == 'linux'
when: always
- when: never
parallel: 100
extends: .test
test_s1tbx:
tags: [kube]
rules:
- if: $SCOPE == 's1tbx' && $PLATFORM == 'linux'
when: always
- when: never
parallel: 34
extends: .test
test_snap:
tags: [kube]
rules:
- if: $SCOPE == 'snap' && $PLATFORM == 'linux'
when: always
- when: never
parallel: 26
extends: .test
test_weekly:
tags: [kube]
rules:
- if: $SCOPE == 'weekly' && $PLATFORM == 'linux'
when: always
- when: never
parallel: 117
extends: .test
test_daily:
tags: [kube]
rules:
- if: $SCOPE == 'daily' && $PLATFORM == 'linux'
when: always
- when: never
parallel: 181
extends: .test
report:
stage: report
rules:
- if: $PLATFORM == 'linux' && $SCOPE != 'CItest'
when: always
- when: never
- if: $CI_COMMIT_BRANCH == 'master'
variables:
VERSION: snap:master
- if: $CI_COMMIT_TAG
variables:
VERSION: snap:$CI_COMMIT_TAG
cache: []
variables:
# output upload and download progress every 2 seconds
TRANSFER_METER_FREQUENCY: "2s"
# Use fast compression for artifacts, resulting in larger archives
ARTIFACT_COMPRESSION_LEVEL: "fast"
# Use no compression for caches
CACHE_COMPRESSION_LEVEL: "fastest"
# Set maximum duration of cache upload and download
CACHE_REQUEST_TIMEOUT: 5
before_script:
- pip3 install -r $CI_PROJECT_DIR/requirements.txt
script:
- echo "Copy assets for report generation in result/report"
- ls result/report/output/
- cp -rf result/report/output/* result/report
- cp -rf pygpt/statics/* result/report
- ls result/report
- echo "Generate report"
- python3 pygpt/report_utils.py pygpt/templates result/report $SCOPE snap:master
# Upload to s3
- aws s3 rm "s3://${S3_REPORTS}/linux" --recursive --endpoint-url https://s3.sbg.io.cloud.ovh.net --region sbg
- aws s3 sync result/report "s3://${S3_REPORTS}/linux" --endpoint-url https://s3.sbg.io.cloud.ovh.net --region sbg --acl public-read
after_script:
- |
if [[ "${CI_COMMIT_BRANCH}" == "master" ]] || [[ "${CI_COMMIT_BRANCH}" =~ "*.x$" ]] || [[ "${CI_COMMIT_BRANCH}" =~ "*RC*" ]] || [[ ! -z "${CI_COMMIT_TAG}" ]]; then
ls result/report/json
echo "Update database"
python3 pygpt/stats_db.py $DB_PATH "snap:${CI_COMMIT_REF_NAME}" $SCOPE result/report ${CI_JOB_ID:6:10} $CI_COMMIT_REF_NAME
fi
artifacts:
expire_in: 1 day
paths:
- result/report