Skip to content

[pull] master from getsentry:master #28807

[pull] master from getsentry:master

[pull] master from getsentry:master #28807

Workflow file for this run

name: backend
on:
push:
branches:
- master
pull_request:
# Cancel in progress workflows on pull_requests.
# https://docs.github.com/en/actions/using-jobs/using-concurrency#example-using-a-fallback-value
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
# hack for https://github.com/actions/cache/issues/810#issuecomment-1222550359
env:
SEGMENT_DOWNLOAD_TIMEOUT_MINS: 3
jobs:
files-changed:
name: detect what files changed
runs-on: ubuntu-24.04
timeout-minutes: 3
# Map a step output to a job output
outputs:
api_docs: ${{ steps.changes.outputs.api_docs }}
backend: ${{ steps.changes.outputs.backend_all }}
backend_dependencies: ${{ steps.changes.outputs.backend_dependencies }}
backend_any_type: ${{ steps.changes.outputs.backend_any_type }}
migration_lockfile: ${{ steps.changes.outputs.migration_lockfile }}
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Check for backend file changes
uses: dorny/paths-filter@0bc4621a3135347011ad047f9ecf449bf72ce2bd # v3.0.0
id: changes
with:
token: ${{ github.token }}
filters: .github/file-filters.yml
api-docs:
if: needs.files-changed.outputs.api_docs == 'true'
needs: files-changed
name: api docs test
runs-on: ubuntu-24.04
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4
id: setup-node
with:
node-version-file: '.volta.json'
- name: Setup sentry python env
uses: ./.github/actions/setup-sentry
id: setup
with:
snuba: true
- name: Run API docs tests
# install ts-node for ts build scripts to execute properly without potentially installing
# conflicting deps when running scripts locally
# see: https://github.com/getsentry/sentry/pull/32328/files
run: |
yarn add ts-node && make test-api-docs
backend-test:
if: needs.files-changed.outputs.backend == 'true'
needs: files-changed
name: backend test
runs-on: ubuntu-24.04
timeout-minutes: 60
permissions:
contents: read
id-token: write
strategy:
# This helps not having to run multiple jobs because one fails, thus, reducing resource usage
# and reducing the risk that one of many runs would turn red again (read: intermittent tests)
fail-fast: false
matrix:
# XXX: When updating this, make sure you also update MATRIX_INSTANCE_TOTAL.
instance: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
pg-version: ['14']
env:
# XXX: `MATRIX_INSTANCE_TOTAL` must be hardcoded to the length of `strategy.matrix.instance`.
# If this increases, make sure to also increase `flags.backend.after_n_builds` in `codecov.yml`.
MATRIX_INSTANCE_TOTAL: 11
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Setup sentry env
uses: ./.github/actions/setup-sentry
id: setup
with:
redis_cluster: true
kafka: true
snuba: true
symbolicator: true
# Right now, we run so few bigtable related tests that the
# overhead of running bigtable in all backend tests
# is way smaller than the time it would take to run in its own job.
bigtable: true
pg-version: ${{ matrix.pg-version }}
- name: Run backend test (${{ steps.setup.outputs.matrix-instance-number }} of ${{ steps.setup.outputs.matrix-instance-total }})
run: |
make test-python-ci
- name: Collect test data
uses: ./.github/actions/collect-test-data
if: ${{ !cancelled() }}
with:
artifact_path: .artifacts/pytest.json
gcs_bucket: ${{ secrets.COLLECT_TEST_DATA_GCS_BUCKET }}
gcp_project_id: ${{ secrets.COLLECT_TEST_DATA_GCP_PROJECT_ID }}
workload_identity_provider: ${{ secrets.SENTRY_GCP_DEV_WORKLOAD_IDENTITY_POOL }}
service_account_email: ${{ secrets.COLLECT_TEST_DATA_SERVICE_ACCOUNT_EMAIL }}
matrix_instance_number: ${{ steps.setup.outputs.matrix-instance-number }}
# Upload coverage data even if running the tests step fails since
# it reduces large coverage fluctuations
- name: Handle artifacts
if: ${{ always() }}
uses: ./.github/actions/artifacts
with:
token: ${{ secrets.CODECOV_TOKEN }}
commit_sha: ${{ github.event.pull_request.head.sha }}
backend-migration-tests:
if: needs.files-changed.outputs.backend == 'true'
needs: files-changed
name: backend migration tests
runs-on: ubuntu-24.04
timeout-minutes: 30
strategy:
matrix:
pg-version: ['14']
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Setup sentry env
uses: ./.github/actions/setup-sentry
id: setup
with:
snuba: true
pg-version: ${{ matrix.pg-version }}
- name: run tests
run: |
PYTEST_ADDOPTS="$PYTEST_ADDOPTS -m migrations --migrations --reruns 0" make test-python-ci
# Upload coverage data even if running the tests step fails since
# it reduces large coverage fluctuations
- name: Handle artifacts
if: ${{ always() }}
uses: ./.github/actions/artifacts
with:
token: ${{ secrets.CODECOV_TOKEN }}
commit_sha: ${{ github.event.pull_request.head.sha }}
cli:
if: needs.files-changed.outputs.backend == 'true'
needs: files-changed
name: cli test
runs-on: ubuntu-24.04
timeout-minutes: 10
strategy:
matrix:
pg-version: ['14']
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Setup sentry env
uses: ./.github/actions/setup-sentry
id: setup
with:
pg-version: ${{ matrix.pg-version }}
- name: Run test
run: |
make test-cli
# Upload coverage data even if running the tests step fails since
# it reduces large coverage fluctuations
- name: Handle artifacts
if: ${{ always() }}
uses: ./.github/actions/artifacts
with:
token: ${{ secrets.CODECOV_TOKEN }}
commit_sha: ${{ github.event.pull_request.head.sha }}
requirements:
if: needs.files-changed.outputs.backend_dependencies == 'true'
needs: files-changed
name: requirements check
runs-on: ubuntu-24.04
timeout-minutes: 3
steps:
- uses: getsentry/action-github-app-token@d4b5da6c5e37703f8c3b3e43abb5705b46e159cc # v3.0.0
id: token
continue-on-error: true
with:
app_id: ${{ vars.SENTRY_INTERNAL_APP_ID }}
private_key: ${{ secrets.SENTRY_INTERNAL_APP_PRIVATE_KEY }}
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- uses: getsentry/action-setup-venv@a133e6fd5fa6abd3f590a1c106abda344f5df69f # v2.1.0
with:
python-version: 3.12.6
cache-dependency-path: requirements-dev-frozen.txt
install-cmd: pip install -q --constraint requirements-dev-frozen.txt pip-tools
- name: check requirements
run: |
python -S -m tools.freeze_requirements
if ! git diff --exit-code; then
echo $'\n\nrun `make freeze-requirements` locally to update requirements'
exit 1
fi
- name: apply any requirements changes
if: steps.token.outcome == 'success' && github.ref != 'refs/heads/master' && always()
uses: getsentry/action-github-commit@31f6706ca1a7b9ad6d22c1b07bf3a92eabb05632 # v2.0.0
with:
github-token: ${{ steps.token.outputs.token }}
message: ':snowflake: re-freeze requirements'
migration:
if: needs.files-changed.outputs.migration_lockfile == 'true'
needs: files-changed
name: check migration
runs-on: ubuntu-24.04
strategy:
matrix:
pg-version: ['14']
steps:
- name: Checkout sentry
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Setup sentry env
uses: ./.github/actions/setup-sentry
id: setup
with:
pg-version: ${{ matrix.pg-version }}
- name: Migration & lockfile checks
env:
SENTRY_LOG_LEVEL: ERROR
PGPASSWORD: postgres
run: |
./.github/workflows/scripts/migration-check.sh
monolith-dbs:
if: needs.files-changed.outputs.backend == 'true'
needs: files-changed
name: monolith-dbs test
runs-on: ubuntu-24.04
timeout-minutes: 20
permissions:
contents: read
id-token: write
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- name: Setup sentry env
uses: ./.github/actions/setup-sentry
id: setup
- name: Run test
run: |
make test-monolith-dbs
- name: Collect test data
uses: ./.github/actions/collect-test-data
if: ${{ !cancelled() }}
with:
artifact_path: .artifacts/pytest.monolith-dbs.json
gcs_bucket: ${{ secrets.COLLECT_TEST_DATA_GCS_BUCKET }}
gcp_project_id: ${{ secrets.COLLECT_TEST_DATA_GCP_PROJECT_ID }}
workload_identity_provider: ${{ secrets.SENTRY_GCP_DEV_WORKLOAD_IDENTITY_POOL }}
service_account_email: ${{ secrets.COLLECT_TEST_DATA_SERVICE_ACCOUNT_EMAIL }}
# Upload coverage data even if running the tests step fails since
# it reduces large coverage fluctuations
- name: Handle artifacts
if: ${{ always() }}
uses: ./.github/actions/artifacts
with:
token: ${{ secrets.CODECOV_TOKEN }}
commit_sha: ${{ github.event.pull_request.head.sha }}
typing:
if: needs.files-changed.outputs.backend == 'true'
needs: files-changed
name: backend typing
runs-on: ubuntu-24.04
timeout-minutes: 20
steps:
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- uses: getsentry/action-setup-venv@a133e6fd5fa6abd3f590a1c106abda344f5df69f # v2.1.0
with:
python-version: 3.12.6
cache-dependency-path: requirements-dev-frozen.txt
install-cmd: pip install -r requirements-dev-frozen.txt
- name: setup sentry (lite)
run: |
python3 -m tools.fast_editable --path .
sentry init
- run: PYTHONWARNINGS=error::RuntimeWarning mypy
id: run
- uses: getsentry/action-github-app-token@d4b5da6c5e37703f8c3b3e43abb5705b46e159cc # v3.0.0
id: token
continue-on-error: true
with:
app_id: ${{ vars.SENTRY_INTERNAL_APP_ID }}
private_key: ${{ secrets.SENTRY_INTERNAL_APP_PRIVATE_KEY }}
# only if `mypy` succeeds should we try and trim the blocklist
- run: python3 -m tools.mypy_helpers.make_module_ignores
id: regen-blocklist
- run: git diff --exit-code
- run: |
# mypy does not have granular codes so don't allow specific messages to regress
set -euo pipefail
! grep "'Settings' object has no attribute" .artifacts/mypy-all
! grep 'Cannot override class variable' .artifacts/mypy-all
! grep 'Exception type must be derived from BaseException' .artifacts/mypy-all
! grep 'Incompatible default for argument' .artifacts/mypy-all
! grep 'Incompatible return value type (got "HttpResponseBase"' .artifacts/mypy-all
! grep 'Incompatible types in "yield"' .artifacts/mypy-all
! grep 'Module "sentry.*has no attribute' .artifacts/mypy-all
! grep 'No return value expected' .artifacts/mypy-all
! grep 'Unpacking a string is disallowed' .artifacts/mypy-all
! grep 'base class .* defined the type as.*Permission' .artifacts/mypy-all
! grep 'does not explicitly export attribute' .artifacts/mypy-all
! grep 'gets multiple values for' .artifacts/mypy-all
- name: apply blocklist changes
if: |
steps.token.outcome == 'success' &&
steps.run.outcome == 'success' &&
steps.regen-blocklist.outcome == 'success' &&
github.ref != 'refs/heads/master' &&
always()
uses: getsentry/action-github-commit@31f6706ca1a7b9ad6d22c1b07bf3a92eabb05632 # v2.0.0
with:
github-token: ${{ steps.token.outputs.token }}
message: ':knife: regenerate mypy module blocklist'
# This check runs once all dependent jobs have passed
# It symbolizes that all required Backend checks have succesfully passed (Or skipped)
# This step is the only required backend check
backend-required-check:
needs:
[
api-docs,
backend-test,
backend-migration-tests,
cli,
files-changed,
requirements,
migration,
monolith-dbs,
typing,
]
name: Backend
# This is necessary since a failed/skipped dependent job would cause this job to be skipped
if: always()
runs-on: ubuntu-24.04
steps:
# If any jobs we depend on fail, we will fail since this is a required check
# NOTE: A timeout is considered a failure
- name: Check for failures
if: contains(needs.*.result, 'failure') || contains(needs.*.result, 'cancelled')
run: |
echo "One of the dependent jobs have failed. You may need to re-run it." && exit 1