-
Notifications
You must be signed in to change notification settings - Fork 19
128 lines (122 loc) · 4.26 KB
/
tests.yaml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
---
name: tests
env:
GINKGO_VERSION: v1.15.2
on:
push:
branches-ignore:
- development/**
- q/*/**
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Docker Buildk
uses: docker/setup-buildx-action@v3
- name: Login to Registry
uses: docker/login-action@v3
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ github.token }}
- name: Build and push kafka
uses: docker/build-push-action@v5
with:
push: true
context: .github/dockerfiles/kafka
tags: ghcr.io/${{ github.repository }}/ci-kafka:${{ github.sha }}
cache-from: type=gha,scope=ci-kafka
cache-to: type=gha,mode=max,scope=ci-kafka
- name: Build and push syntheticbucketd
uses: docker/build-push-action@v5
with:
push: true
context: .
file: .github/dockerfiles/syntheticbucketd/Dockerfile
tags: ghcr.io/${{ github.repository }}/syntheticbucketd:${{ github.sha }}
cache-from: type=gha,scope=syntheticbucketd
cache-to: type=gha,mode=max,scope=syntheticbucketd
tests:
needs: build
runs-on: ubuntu-latest
services:
redis:
image: redis:alpine
ports:
- 6379:6379
syntheticbucketd:
image: ghcr.io/${{ github.repository }}/syntheticbucketd:${{ github.sha }}
ports:
- 9001:9001
kafka:
image: ghcr.io/${{ github.repository }}/ci-kafka:${{ github.sha }}
credentials:
username: ${{ github.repository_owner }}
password: ${{ github.token }}
ports:
- 2181:2181
- 9092:9092
env:
ADVERTISED_HOST: "localhost"
ADVERTISED_PORT: 9092
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install build dependencies
run: |
sudo apt-get update
sudo apt-get install -y build-essential
- uses: actions/setup-go@v5
with:
go-version: '1.16.2'
cache-dependency-path: ./bucket-scanner/go.sum
- uses: actions/setup-node@v4
with:
node-version: '16'
cache: yarn
- name: Install node dependencies
run: yarn install --frozen-lockfile --network-concurrency 1
- name: Install ginkgo
run: go get github.com/onsi/ginkgo/ginkgo@${GINKGO_VERSION}
- name: Lint markdown
run: yarn run --silent lint_md
- name: Lint Javascript
run: yarn run --silent lint
- name: Run unit tests
run: yarn test
- name: Run bucket scanner unit tests
run: ginkgo -r --randomizeAllSpecs --randomizeSuites --failOnPending --cover --trace --race --progress -nodes 1 -stream -timeout 5m -slowSpecThreshold 60
working-directory: bucket-scanner
- name: Run backbeat routes test
run: .github/scripts/run_server_tests.bash ft_test:api:routes
- name: Run backbeat retry tests with account authentication
run: .github/scripts/run_server_tests.bash ft_test:api:retry
env:
CI_AUTH_TYPE: "account"
BACKBEAT_CONFIG_FILE: "tests/config.json"
- name: Run backbeat retry tests with role authentication
run: .github/scripts/run_server_tests.bash ft_test:api:retry
env:
CI_AUTH_TYPE: "role"
BACKBEAT_CONFIG_FILE: "tests/config.roleAuth.json"
- name: Run backbeat lib feature tests
run: yarn run ft_test:lib
- name: Run backbeat replication feature tests
run: yarn run ft_test:replication
- name: run feature lifecycle tests
run: .github/scripts/run_ft_tests.bash ft_test:lifecycle
env:
EXPIRE_ONE_DAY_EARLIER: "true"
TRANSITION_ONE_DAY_EARLIER: "true"
BACKBEAT_CONFIG_FILE: "tests/config.json"
- name: run backbeat notification feature tests
run: yarn run ft_test:notification
- name: run ballooning tests for lifecycle conductor
run: yarn mocha tests/performance/lifecycle/conductor-with-bucketd-check-memory-balloon.js
env:
# Constrain heap long-lived heap size to 90MB, so that pushing 200K messages
# will crash if they end up in memory all at the same time (circuit breaking
# ineffective) while waiting to be committed to the kafka topic.
NODE_OPTIONS: '--max-old-space-size=90'