Skip to content

Add rdkafka metrics #3416

Add rdkafka metrics

Add rdkafka metrics #3416

Workflow file for this run

---
name: tests
env:
GINKGO_VERSION: v1.15.2
on:
push:
branches-ignore:
- development/**
- q/*/**
jobs:
build:
runs-on: ubuntu-latest
permissions:
# Need to explicitely add package write permissions for dependabot
contents: read
packages: write
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Set up Docker Buildk
uses: docker/setup-buildx-action@v2
with:
buildkitd-flags: --debug
- name: Login to Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push kafka
uses: docker/build-push-action@v4
with:
push: true
context: .github/dockerfiles/kafka
tags: "ghcr.io/scality/backbeat/ci-kafka:${{ github.sha }}"
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Build and push syntheticbucketd
uses: docker/build-push-action@v4
with:
push: true
context: .
file: .github/dockerfiles/syntheticbucketd/Dockerfile
tags: "ghcr.io/scality/backbeat/syntheticbucketd:${{ github.sha }}"
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Build and push MongoDB
uses: docker/build-push-action@v4
with:
push: true
context: .github/dockerfiles/mongodb
tags: "ghcr.io/scality/backbeat/ci-mongodb:${{ github.sha }}"
cache-from: type=gha
cache-to: type=gha,mode=max
tests:
needs: build
runs-on: ubuntu-latest
services:
redis:
image: redis:alpine
ports:
- 6379:6379
syntheticbucketd:
image: ghcr.io/scality/backbeat/syntheticbucketd:${{ github.sha }}
ports:
- 9001:9001
kafka:
image: ghcr.io/scality/backbeat/ci-kafka:${{ github.sha }}
credentials:
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
ports:
- 2181:2181
- 9092:9092
env:
ADVERTISED_HOST: "localhost"
ADVERTISED_PORT: 9092
mongo:
image: ghcr.io/scality/backbeat/ci-mongodb:${{ github.sha }}
ports:
- 27017:27017
- 27018:27018
- 27019:27019
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Install build dependencies
run: |
sudo apt-get update
sudo apt-get install -y build-essential libzstd-dev
- uses: actions/setup-go@v4
with:
go-version: '1.16.2'
- uses: actions/setup-node@v3
with:
node-version: '16'
cache: yarn
- name: Install node dependencies
run: yarn install --ignore-engines --frozen-lockfile --network-concurrency 1
- name: Install ginkgo
run: go get github.com/onsi/ginkgo/ginkgo@${GINKGO_VERSION}
- name: Lint markdown
run: yarn run --silent lint_md
- name: Lint Javascript
run: yarn run --silent lint
- name: Run unit tests
run: yarn test
env:
BACKBEAT_CONFIG_FILE: tests/config.json
- name: Run bucket scanner unit tests
run: ginkgo -r --randomizeAllSpecs --randomizeSuites --failOnPending --cover --trace --race --progress -nodes 1 -stream -timeout 5m -slowSpecThreshold 60
working-directory: bucket-scanner
- name: Run backbeat routes test
run: .github/scripts/run_server_tests.bash ft_test:api:routes
env:
BACKBEAT_CONFIG_FILE: "tests/config.json"
MANAGEMENT_BACKEND: operator
- name: Run backbeat retry tests with account authentication
run: .github/scripts/run_server_tests.bash ft_test:api:retry
env:
BACKBEAT_CONFIG_FILE: "tests/config.json"
- name: run feature replication tests
run: .github/scripts/run_ft_tests.bash ft_test:replication
env:
BACKBEAT_CONFIG_FILE: "tests/config.json"
- name: run feature lifecycle tests
run: .github/scripts/run_ft_tests.bash ft_test:lifecycle
env:
EXPIRE_ONE_DAY_EARLIER: "true"
TRANSITION_ONE_DAY_EARLIER: "true"
BACKBEAT_CONFIG_FILE: "tests/config.json"
- name: run feature ingestion tests
run: .github/scripts/run_ft_tests.bash ft_test:ingestion
env:
BACKBEAT_CONFIG_FILE: "tests/config.json"
- name: run misc functional tests
run: .github/scripts/run_ft_tests.bash ft_test:lib
env:
BACKBEAT_CONFIG_FILE: "tests/config.json"
- name: run backbeat notification feature tests
run: yarn run ft_test:notification
- name: run ballooning tests for lifecycle conductor
run: yarn mocha tests/performance/lifecycle/conductor-check-memory-balloon.js
env:
# Constrain heap long-lived heap size to 150MB, so that pushing 200K messages
# will crash if they end up in memory all at the same time (circuit breaking
# ineffective) while waiting to be committed to the kafka topic.
NODE_OPTIONS: '--max-old-space-size=150'