diff --git a/mev-boost-relay/.dockerignore b/mev-boost-relay/.dockerignore deleted file mode 100644 index 191381ee7..000000000 --- a/mev-boost-relay/.dockerignore +++ /dev/null @@ -1 +0,0 @@ -.git \ No newline at end of file diff --git a/mev-boost-relay/.github/ISSUE_TEMPLATE/bug_report.md b/mev-boost-relay/.github/ISSUE_TEMPLATE/bug_report.md deleted file mode 100644 index cc7a1da49..000000000 --- a/mev-boost-relay/.github/ISSUE_TEMPLATE/bug_report.md +++ /dev/null @@ -1,31 +0,0 @@ ---- -name: Bug report -about: Create a report to help us improve -title: '' -labels: 'bug' -assignees: '' - ---- - -**Describe the bug** -A clear and concise description of what the bug is. - -**Steps to reproduce** -Steps to reproduce the behavior: -1. Go to '...' -2. Click on '....' -3. Scroll down to '....' -4. See error - -**Expected behavior** -A clear and concise description of what you expected to happen. - -**Screenshots** -If applicable, add screenshots to help explain your problem. - -**Which version are you using:** -- OS: [e.g. Ubuntu 22.10] -- Relay: run `git describe --tags --always --dirty=-dev` - -**Additional context** -Add any other context about the problem here. diff --git a/mev-boost-relay/.github/ISSUE_TEMPLATE/feature_request.md b/mev-boost-relay/.github/ISSUE_TEMPLATE/feature_request.md deleted file mode 100644 index fefa40d5f..000000000 --- a/mev-boost-relay/.github/ISSUE_TEMPLATE/feature_request.md +++ /dev/null @@ -1,20 +0,0 @@ ---- -name: Feature request -about: Suggest an idea for this project -title: '' -labels: 'feature request' -assignees: '' - ---- - -**Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] - -**Describe the solution you'd like** -A clear and concise description of what you want to happen. - -**Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. - -**Additional context** -Add any other context or screenshots about the feature request here. diff --git a/mev-boost-relay/.github/ISSUE_TEMPLATE/proposal.md b/mev-boost-relay/.github/ISSUE_TEMPLATE/proposal.md deleted file mode 100644 index b16b2be71..000000000 --- a/mev-boost-relay/.github/ISSUE_TEMPLATE/proposal.md +++ /dev/null @@ -1,9 +0,0 @@ ---- -name: Proposal -about: Propose an idea -title: '' -labels: 'proposal' -assignees: '' - ---- - diff --git a/mev-boost-relay/.github/dependabot.yml b/mev-boost-relay/.github/dependabot.yml deleted file mode 100644 index 83f5768c1..000000000 --- a/mev-boost-relay/.github/dependabot.yml +++ /dev/null @@ -1,11 +0,0 @@ -# Please see the documentation for all configuration options: -# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates - -version: 2 -updates: - - package-ecosystem: gomod - directory: / - schedule: - interval: daily - reviewers: - - "metachris" \ No newline at end of file diff --git a/mev-boost-relay/.github/pull_request_template.md b/mev-boost-relay/.github/pull_request_template.md deleted file mode 100644 index 0458eea93..000000000 --- a/mev-boost-relay/.github/pull_request_template.md +++ /dev/null @@ -1,20 +0,0 @@ -## 📝 Summary - - - -## ⛱ Motivation and Context - - - -## 📚 References - - - ---- - -## ✅ I have run these commands - -* [ ] `make lint` -* [ ] `make test-race` -* [ ] `go mod tidy` -* [ ] I have seen and agree to `CONTRIBUTING.md` diff --git a/mev-boost-relay/.github/workflows/checks.yml b/mev-boost-relay/.github/workflows/checks.yml deleted file mode 100644 index 601118028..000000000 --- a/mev-boost-relay/.github/workflows/checks.yml +++ /dev/null @@ -1,65 +0,0 @@ -name: Checks - -on: - push: - branches: - - main - pull_request: - -jobs: - test: - name: Test - runs-on: ubuntu-latest - steps: - - name: Set up Go - uses: actions/setup-go@v3 - with: - go-version: ^1.21 - id: go - - - name: Check out code into the Go module directory - uses: actions/checkout@v2 - - - name: Run Docker compose to spin up services - run: docker compose build && docker compose up -d db - - - name: Run unit tests and generate the coverage report - run: RUN_DB_TESTS=1 make test-coverage - - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v2 - with: - files: ./coverage.out - verbose: false - flags: unittests - - lint: - name: Lint - runs-on: ubuntu-latest - steps: - - name: Set up Go - uses: actions/setup-go@v3 - with: - go-version: ^1.21 - id: go - - - name: Check out code into the Go module directory - uses: actions/checkout@v3 - - - name: Install gofumpt - run: go install mvdan.cc/gofumpt@v0.4.0 - - - name: Install staticcheck - run: go install honnef.co/go/tools/cmd/staticcheck@v0.4.6 - - - name: Install golangci-lint - run: go install github.com/golangci/golangci-lint/cmd/golangci-lint@v1.55.0 - - - name: Lint - run: make lint - - - name: Ensure go mod tidy runs without changes - run: | - go mod tidy - git diff-index HEAD - git diff-index --quiet HEAD diff --git a/mev-boost-relay/.github/workflows/release.yml b/mev-boost-relay/.github/workflows/release.yml deleted file mode 100644 index de3dfda64..000000000 --- a/mev-boost-relay/.github/workflows/release.yml +++ /dev/null @@ -1,88 +0,0 @@ -name: Release - -on: - workflow_dispatch: - push: - tags: - - 'v*' - -jobs: - docker-image: - name: Publish Docker Image - runs-on: ubuntu-latest - - steps: - - name: Checkout sources - uses: actions/checkout@v2 - - - name: Get tag version - run: echo "RELEASE_VERSION=${GITHUB_REF#refs/*/}" >> $GITHUB_ENV - - - name: Print version - run: | - echo $RELEASE_VERSION - echo ${{ env.RELEASE_VERSION }} - - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: Extract metadata (tags, labels) for Docker - id: meta - uses: docker/metadata-action@v5 - with: - images: flashbots/mev-boost-relay - tags: | - type=sha - type=pep440,pattern={{version}} - type=pep440,pattern={{major}}.{{minor}} - type=raw,value=latest,enable=${{ !contains(env.RELEASE_VERSION, '-') }} - - - name: Login to DockerHub - uses: docker/login-action@v3 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - - - name: Go Build Cache for Docker - uses: actions/cache@v3 - with: - path: go-build-cache - key: ${{ runner.os }}-go-build-cache-${{ hashFiles('**/go.sum') }} - - - name: inject go-build-cache into docker - uses: reproducible-containers/buildkit-cache-dance@v2.1.2 - with: - cache-source: go-build-cache - - - name: Build and push - uses: docker/build-push-action@v5 - with: - context: . - push: true - build-args: | - VERSION=${{ env.RELEASE_VERSION }} - platforms: linux/amd64,linux/arm64 - tags: ${{ steps.meta.outputs.tags }} - labels: ${{ steps.meta.outputs.labels }} - cache-from: type=gha - cache-to: type=gha,mode=max - - github-release: - runs-on: ubuntu-latest - steps: - - name: Checkout sources - uses: actions/checkout@v2 - - - name: Create release - id: create_release - uses: actions/create-release@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - tag_name: ${{ github.ref }} - release_name: ${{ github.ref }} - draft: true - prerelease: false diff --git a/mev-boost-relay/.gitignore b/mev-boost-relay/.gitignore deleted file mode 100644 index 68f41fa9e..000000000 --- a/mev-boost-relay/.gitignore +++ /dev/null @@ -1,27 +0,0 @@ -# Binaries for programs and plugins -*.exe -*.exe~ -*.dll -*.so -*.dylib - -# Test binary, built with `go test -c` -*.test - -# Output of the go coverage tool, specifically when used with LiteIDE -*.out - -# IDE directories -.idea -.vscode - -# Random stuff -/tmp/ -/boost-relay -/mev-boost-relay -/dump.rdb -/deploy.sh -.DS_Store -/website-index.html -/README.internal.md -/internal/_investigations/ \ No newline at end of file diff --git a/mev-boost-relay/.golangci.yaml b/mev-boost-relay/.golangci.yaml deleted file mode 100644 index 3229c00fd..000000000 --- a/mev-boost-relay/.golangci.yaml +++ /dev/null @@ -1,131 +0,0 @@ -# https://golangci-lint.run/usage/linters -linters: - enable-all: true - disable: - - cyclop - - depguard - - forbidigo - - funlen - - gochecknoglobals - - gochecknoinits - - gocritic - - godot - - godox - - gomnd - - lll - - musttag - - nestif - - nilnil - - nlreturn - - noctx - - nonamedreturns - - nosnakecase - - paralleltest - - revive - - testpackage - - unparam - - varnamelen - - wrapcheck - - wsl - - deadcode - - varcheck - - interfacebloat - - exhaustruct - - # - # Disabled because of generics: - # - - contextcheck - - rowserrcheck - - sqlclosecheck - - structcheck - - wastedassign - - # - # Disabled because deprecated: - # - - exhaustivestruct - - golint - - ifshort - - interfacer - - maligned - - scopelint - -linters-settings: - # - # The G108 rule throws a false positive. We're not actually vulnerable. If - # you're not careful the profiling endpoint is automatically exposed on - # /debug/pprof if you import net/http/pprof. See this link: - # - # https://mmcloughlin.com/posts/your-pprof-is-showing - # - gosec: - excludes: - - G108 - - gocognit: - min-complexity: 85 # default: 30 - - gocyclo: - min-complexity: 70 # default: 30 - - gomoddirectives: - replace-allow-list: - - github.com/attestantio/go-builder-client - - github.com/attestantio/go-eth2-client - - maintidx: - under: 5 - - tagliatelle: - case: - rules: - json: snake - - gofumpt: - extra-rules: true - - exhaustruct: - exclude: - # - # Because it's easier to read without the other fields. - # - - 'GetPayloadsFilters' - # - # Easier to read with only one of the versioned payloads. - # - - 'VersionedSubmitBlindedBlockResponse' - - 'VersionedExecutionPayload' - - 'VersionedSignedBuilderBid' - - # - # Structures outside our control that have a ton of settings. It doesn't - # make sense to specify all of the fields. - # - - 'cobra.Command' - - 'database.*Entry' - - 'http.Server' - - 'logrus.*Formatter' - - 'Options' # redis - - # - # Excluded because there are private fields (not capitalized) that are - # not initialized. If possible, I think these should be altered. - # - - 'Datastore' - - 'Housekeeper' - - 'MockBeaconClient' - - 'RelayAPI' - - 'Webserver' - -issues: - exclude-rules: - - path: (.+)_test.go - linters: - - exhaustruct - - path: database/.*.go - linters: - - goconst - - path: cmd/tool/.*.go - linters: - - goconst \ No newline at end of file diff --git a/mev-boost-relay/ARCHITECTURE.md b/mev-boost-relay/ARCHITECTURE.md deleted file mode 100644 index 3f4677245..000000000 --- a/mev-boost-relay/ARCHITECTURE.md +++ /dev/null @@ -1,90 +0,0 @@ -Notes about the architecture and usage of the relay. - -See also: - -* https://github.com/flashbots/mev-boost-relay -* https://github.com/flashbots/mev-boost-relay/issues - -This document covers more details about running a relay at scale: https://flashbots.notion.site/Draft-Running-a-relay-4040ccd5186c425d9a860cbb29bbfe09 - -## Overview - -The relay consists of three main components: - -1. [Housekeeper](https://github.com/flashbots/mev-boost-relay/tree/main/services/housekeeper): update known validators and proposer duties, and syncs DB->Redis on startup. Needs to run as single instance, will be replaced by cronjob in the future. -1. [Website](https://github.com/flashbots/mev-boost-relay/tree/main/services/website): handles the root website requests (information is pulled from Redis and database). -1. [API](https://github.com/flashbots/mev-boost-relay/tree/main/services/api): for proposer, block builder, data. - -The API can run as a single instance, but for production can (and should) be deployed and scaled independently! These are the recommended deployments: - -1. Proposer API (registerValidator, getHeader, getPayload) -1. Builder API (getValidatorDuties, submitNewBlock) -1. Data API (read-only access to DB read replica) -1. Internal API (setting builder status) - ---- - -## Logging - -* Logs with level `error` are always system errors and something to investigate (never use the error level for bad request payloads or other user errors). -* Put differently: if you want to make an error show up in the logs and dashboards, then use the `error` level! - ---- - -## Utilities - -* https://github.com/buger/jsonparser for really fast JSON request body processing - ---- - -## System startup sequence - -* First, Redis and Postgres have to be ready, as well as the beacon node(s) -* The housekeeper syncs important data from the beacon node and database to Redis -* The API needs access to the data in Redis to operate (i.e. all bids are going through Redis) - -### Housekeeper - -The housekeeper updates Redis with important information: - -1. Active and pending validators (source: beacon node) -1. Proposer duties (source: beacon node (duties) + database (validator registrations)) -1. Validator registrations (source: database) -1. Builder status (source: database) - -Afterwards, there's important ongoing, regular housekeeper tasks: - -1. Update known validators and proposer duties in Redis -2. Update active validators in database (source: Redis) (TODO) - ---- - -## Tradeoffs - -- Validator registrations in are only saved to the database if `feeRecipient` or `gasLimit` changed. If a registration has a newer timestamp but same `feeRecipient` and `gasLimit` it is not saved, to avoid filling up the database with unnecessary data. - (some CL clients create a new validator registration every epoch, not just if preferences change, as was the original idea). - ---- - -## Infrastructure - -A full infrastructure might include these components: - -1. Load balancer + Firewall -1. 2x proposer API (4 CPU, 1GB RAM) -1. 2x builder API (2-4 CPU, 1GB RAM) -1. 2x data API (1 CPU, 1GB RAM) -1. 2x website (1 CPU, 2GB RAM) -1. 1x housekeeper (2 CPU, 1GB RAM) -1. Redis (4GB) -1. Postgres DB (100GB+) -1. A bunch of beacon-nodes (3 for redundancy?) -1. Block validation EL nodes - -For more discussion about running a relay see also https://collective.flashbots.net/t/ideas-for-incentivizing-relays/586 - ---- - -## Further notes - -* Use [architecture decision records (ADRs)](https://github.com/joelparkerhenderson/architecture-decision-record) based on [this template](https://github.com/joelparkerhenderson/architecture-decision-record/blob/main/templates/decision-record-template-by-michael-nygard/index.md) \ No newline at end of file diff --git a/mev-boost-relay/CODE_OF_CONDUCT.md b/mev-boost-relay/CODE_OF_CONDUCT.md deleted file mode 100644 index f4f5f9f01..000000000 --- a/mev-boost-relay/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,133 +0,0 @@ -# Contributor Covenant Code of Conduct - -## Our Pledge - -We as members, contributors, and leaders pledge to make participation in our -community a harassment-free experience for everyone, regardless of age, body -size, visible or invisible disability, ethnicity, sex characteristics, gender -identity and expression, level of experience, education, socio-economic status, -nationality, personal appearance, race, caste, color, religion, or sexual -identity and orientation. - -We pledge to act and interact in ways that contribute to an open, welcoming, -diverse, inclusive, and healthy community. - -## Our Standards - -Examples of behavior that contributes to a positive environment for our -community include: - -* Demonstrating empathy and kindness toward other people -* Being respectful of differing opinions, viewpoints, and experiences -* Giving and gracefully accepting constructive feedback -* Accepting responsibility and apologizing to those affected by our mistakes, - and learning from the experience -* Focusing on what is best not just for us as individuals, but for the overall - community - -Examples of unacceptable behavior include: - -* The use of sexualized language or imagery, and sexual attention or advances of - any kind -* Trolling, insulting or derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or email address, - without their explicit permission -* Other conduct which could reasonably be considered inappropriate in a - professional setting - -## Enforcement Responsibilities - -Community leaders are responsible for clarifying and enforcing our standards of -acceptable behavior and will take appropriate and fair corrective action in -response to any behavior that they deem inappropriate, threatening, offensive, -or harmful. - -Community leaders have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions that are -not aligned to this Code of Conduct, and will communicate reasons for moderation -decisions when appropriate. - -## Scope - -This Code of Conduct applies within all community spaces, and also applies when -an individual is officially representing the community in public spaces. -Examples of representing our community include using an official e-mail address, -posting via an official social media account, or acting as an appointed -representative at an online or offline event. - -## Enforcement - -Instances of abusive, harassing, or otherwise unacceptable behavior may be -reported to the community leaders responsible for enforcement writing an -email to leo@flashbots.net or contacting elopio#8526 in -[Discord](https://discord.com/invite/7hvTycdNcK). -All complaints will be reviewed and investigated promptly and fairly. - -All community leaders are obligated to respect the privacy and security of the -reporter of any incident. - -## Enforcement Guidelines - -Community leaders will follow these Community Impact Guidelines in determining -the consequences for any action they deem in violation of this Code of Conduct: - -### 1. Correction - -**Community Impact**: Use of inappropriate language or other behavior deemed -unprofessional or unwelcome in the community. - -**Consequence**: A private, written warning from community leaders, providing -clarity around the nature of the violation and an explanation of why the -behavior was inappropriate. A public apology may be requested. - -### 2. Warning - -**Community Impact**: A violation through a single incident or series of -actions. - -**Consequence**: A warning with consequences for continued behavior. No -interaction with the people involved, including unsolicited interaction with -those enforcing the Code of Conduct, for a specified period of time. This -includes avoiding interactions in community spaces as well as external channels -like social media. Violating these terms may lead to a temporary or permanent -ban. - -### 3. Temporary Ban - -**Community Impact**: A serious violation of community standards, including -sustained inappropriate behavior. - -**Consequence**: A temporary ban from any sort of interaction or public -communication with the community for a specified period of time. No public or -private interaction with the people involved, including unsolicited interaction -with those enforcing the Code of Conduct, is allowed during this period. -Violating these terms may lead to a permanent ban. - -### 4. Permanent Ban - -**Community Impact**: Demonstrating a pattern of violation of community -standards, including sustained inappropriate behavior, harassment of an -individual, or aggression toward or disparagement of classes of individuals. - -**Consequence**: A permanent ban from any sort of public interaction within the -community. - -## Attribution - -This Code of Conduct is adapted from the [Contributor Covenant][homepage], -version 2.1, available at -[https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1]. - -Community Impact Guidelines were inspired by -[Mozilla's code of conduct enforcement ladder][Mozilla CoC]. - -For answers to common questions about this code of conduct, see the FAQ at -[https://www.contributor-covenant.org/faq][FAQ]. Translations are available at -[https://www.contributor-covenant.org/translations][translations]. - -[homepage]: https://www.contributor-covenant.org -[v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html -[Mozilla CoC]: https://github.com/mozilla/diversity -[FAQ]: https://www.contributor-covenant.org/faq -[translations]: https://www.contributor-covenant.org/translations diff --git a/mev-boost-relay/CONTRIBUTING.md b/mev-boost-relay/CONTRIBUTING.md deleted file mode 100644 index a0b4f2b0b..000000000 --- a/mev-boost-relay/CONTRIBUTING.md +++ /dev/null @@ -1,67 +0,0 @@ -# Contributing guide - -Welcome to the Flashbots collective! - -Thanks for your help improving the project! We are so happy to have you! We just ask you to be nice when you play with us. - -Please start by reading our [license agreement](#individual-contributor-license-agreement) below, and our [code of conduct](CODE_OF_CONDUCT.md). - -## Install dependencies - -```bash -go install mvdan.cc/gofumpt@latest -go install honnef.co/go/tools/cmd/staticcheck@latest -go install github.com/golangci/golangci-lint/cmd/golangci-lint@latest -go install github.com/daixiang0/gci@latest -``` - -## Test - -```bash -make lint -make test -make test-race -make fmt -``` - -## Code style - -Start by making sure that your code is readable, consistent, and pretty. -Follow the [Clean Code](https://flashbots.notion.site/Clean-Code-13016c5c7ca649fba31ae19d797d7304) recommendations. - -## Send a pull request - -- Your proposed changes should be first described and discussed in an issue. -- Open the branch in a personal fork, not in the team repository. -- Every pull request should be small and represent a single change. If the problem is complicated, split it in multiple issues and pull requests. -- Every pull request should be covered by unit tests. - -We appreciate you, friend <3. - ---- - -# Individual Contributor License Agreement - -This text is adapted from Google's contributors license agreement: https://cla.developers.google.com/about/google-individual - -You accept and agree to the following terms and conditions for Your present and future Contributions submitted to Flashbots. Except for the license granted herein to Flashbots and recipients of software distributed by Flashbots, You reserve all right, title, and interest in and to Your Contributions. - -1. Definitions. - -"You" (or "Your") shall mean the copyright owner or legal entity authorized by the copyright owner that is making this Agreement with Flashbots. For legal entities, the entity making a Contribution and all other entities that control, are controlled by, or are under common control with that entity are considered to be a single Contributor. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. - -"Contribution" shall mean any original work of authorship, including any modifications or additions to an existing work, that is intentionally submitted by You to Flashbots for inclusion in, or documentation of, any of the products owned or managed by Flashbots (the "Work"). For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to Flashbots or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, Flashbots for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by You as "Not a Contribution." - -2. Grant of Copyright License. Subject to the terms and conditions of this Agreement, You hereby grant to Flashbots and to recipients of software distributed by Flashbots a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare derivative works of, publicly display, publicly perform, sublicense, and distribute Your Contributions and such derivative works, under the terms of the license which the project is using on the Submission Date or any licenses which are approved by the Open Source Initiative. - -3. Grant of Patent License. Subject to the terms and conditions of this Agreement, You hereby grant to Flashbots and to recipients of software distributed by Flashbots a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by You that are necessarily infringed by Your Contribution(s) alone or by combination of Your Contribution(s) with the Work to which such Contribution(s) was submitted. If any entity institutes patent litigation against You or any other entity (including a cross-claim or counterclaim in a lawsuit) alleging that your Contribution, or the Work to which you have contributed, constitutes direct or contributory patent infringement, then any patent licenses granted to that entity under this Agreement for that Contribution or Work shall terminate as of the date such litigation is filed. - -4. You represent that you are legally entitled to grant the above license. If your employer(s) has rights to intellectual property that you create that includes your Contributions, you represent that you have received permission to make Contributions on behalf of that employer, that your employer has waived such rights for your Contributions to Flashbots, or that your employer has executed a separate Corporate CLA with Flashbots. - -5. You represent that each of Your Contributions is Your original creation (see section 7 for submissions on behalf of others). You represent that Your Contribution submissions include complete details of any third-party license or other restriction (including, but not limited to, related patents and trademarks) of which you are personally aware and which are associated with any part of Your Contributions. - -6. You are not expected to provide support for Your Contributions, except to the extent You desire to provide support. You may provide support for free, for a fee, or not at all. Unless required by applicable law or agreed to in writing, You provide Your Contributions on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON- INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. - -7. Should You wish to submit work that is not Your original creation, You may submit it to Flashbots separately from any Contribution, identifying the complete details of its source and of any license or other restriction (including, but not limited to, related patents, trademarks, and license agreements) of which you are personally aware, and conspicuously marking the work as "Submitted on behalf of a third-party: [named here]". - -8. You agree to notify Flashbots of any facts or circumstances of which you become aware that would make these representations inaccurate in any respect. \ No newline at end of file diff --git a/mev-boost-relay/Dockerfile b/mev-boost-relay/Dockerfile deleted file mode 100644 index eced4ce05..000000000 --- a/mev-boost-relay/Dockerfile +++ /dev/null @@ -1,19 +0,0 @@ -# syntax=docker/dockerfile:1 -FROM golang:1.22 AS builder -ARG VERSION -WORKDIR /build - -# Cache for the modules -COPY go.mod go.sum ./ -RUN --mount=type=cache,target=/root/.cache/go-build go mod download - -# Now adding all the code and start building -ADD . . -RUN --mount=type=cache,target=/root/.cache/go-build GOOS=linux go build -trimpath -ldflags "-s -X cmd.Version=$VERSION -X main.Version=$VERSION -linkmode external -extldflags '-static'" -v -o mev-boost-relay . - -FROM alpine -RUN apk add --no-cache libstdc++ libc6-compat -WORKDIR /app -COPY --from=builder /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ -COPY --from=builder /build/mev-boost-relay /app/mev-boost-relay -ENTRYPOINT ["/app/mev-boost-relay"] diff --git a/mev-boost-relay/LICENSE b/mev-boost-relay/LICENSE deleted file mode 100644 index 0ad25db4b..000000000 --- a/mev-boost-relay/LICENSE +++ /dev/null @@ -1,661 +0,0 @@ - GNU AFFERO GENERAL PUBLIC LICENSE - Version 3, 19 November 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU Affero General Public License is a free, copyleft license for -software and other kinds of works, specifically designed to ensure -cooperation with the community in the case of network server software. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -our General Public Licenses are intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - Developers that use our General Public Licenses protect your rights -with two steps: (1) assert copyright on the software, and (2) offer -you this License which gives you legal permission to copy, distribute -and/or modify the software. - - A secondary benefit of defending all users' freedom is that -improvements made in alternate versions of the program, if they -receive widespread use, become available for other developers to -incorporate. Many developers of free software are heartened and -encouraged by the resulting cooperation. However, in the case of -software used on network servers, this result may fail to come about. -The GNU General Public License permits making a modified version and -letting the public access it on a server without ever releasing its -source code to the public. - - The GNU Affero General Public License is designed specifically to -ensure that, in such cases, the modified source code becomes available -to the community. It requires the operator of a network server to -provide the source code of the modified version running there to the -users of that server. Therefore, public use of a modified version, on -a publicly accessible server, gives the public access to the source -code of the modified version. - - An older license, called the Affero General Public License and -published by Affero, was designed to accomplish similar goals. This is -a different license, not a version of the Affero GPL, but Affero has -released a new version of the Affero GPL which permits relicensing under -this license. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU Affero General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Remote Network Interaction; Use with the GNU General Public License. - - Notwithstanding any other provision of this License, if you modify the -Program, your modified version must prominently offer all users -interacting with it remotely through a computer network (if your version -supports such interaction) an opportunity to receive the Corresponding -Source of your version by providing access to the Corresponding Source -from a network server at no charge, through some standard or customary -means of facilitating copying of software. This Corresponding Source -shall include the Corresponding Source for any work covered by version 3 -of the GNU General Public License that is incorporated pursuant to the -following paragraph. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the work with which it is combined will remain governed by version -3 of the GNU General Public License. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU Affero General Public License from time to time. Such new versions -will be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU Affero General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU Affero General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU Affero General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU Affero General Public License as published - by the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU Affero General Public License for more details. - - You should have received a copy of the GNU Affero General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If your software can interact with users remotely through a computer -network, you should also make sure that it provides a way for users to -get its source. For example, if your program is a web application, its -interface could display a "Source" link that leads users to an archive -of the code. There are many ways you could offer source, and different -solutions will be better for different programs; see section 13 for the -specific requirements. - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU AGPL, see -. diff --git a/mev-boost-relay/Makefile b/mev-boost-relay/Makefile deleted file mode 100644 index a42fa7db0..000000000 --- a/mev-boost-relay/Makefile +++ /dev/null @@ -1,49 +0,0 @@ -VERSION ?= $(shell git describe --tags --always --dirty="-dev") - -all: clean build - -v: - @echo "Version: ${VERSION}" - -clean: - git clean -fdx - -build: - go build -trimpath -ldflags "-s -X cmd.Version=${VERSION} -X main.Version=${VERSION}" -v -o mev-boost-relay . - -test: - go test ./... - -test-race: - go test -race ./... - -lint: - gofmt -d -s . - gofumpt -d -extra . - go vet ./... - staticcheck ./... - golangci-lint run - -gofumpt: - gofumpt -l -w -extra . - -fmt: - gofmt -s -w . - gofumpt -extra -w . - gci write . - go mod tidy - -test-coverage: - go test -race -v -covermode=atomic -coverprofile=coverage.out ./... - go tool cover -func coverage.out - -cover-html: - go test -coverprofile=/tmp/boost-relay.cover.tmp ./... - go tool cover -html=/tmp/boost-relay.cover.tmp - unlink /tmp/boost-relay.cover.tmp - -docker-image: - DOCKER_BUILDKIT=1 docker build --platform linux/amd64 --build-arg VERSION=${VERSION} . -t flashbots/mev-boost-relay - -lt: lint test -flt: fmt lint test \ No newline at end of file diff --git a/mev-boost-relay/README.flashbots.md b/mev-boost-relay/README.flashbots.md deleted file mode 100644 index edb49d4c2..000000000 --- a/mev-boost-relay/README.flashbots.md +++ /dev/null @@ -1,401 +0,0 @@ -# MEV-Boost Relay - -[![Goreport status](https://goreportcard.com/badge/github.com/flashbots/mev-boost-relay)](https://goreportcard.com/report/github.com/flashbots/mev-boost-relay) -[![Test status](https://github.com/flashbots/mev-boost-relay/workflows/Checks/badge.svg)](https://github.com/flashbots/mev-boost-relay/actions?query=workflow%3A%22Checks%22) -[![Docker hub](https://badgen.net/docker/size/flashbots/mev-boost-relay?icon=docker&label=image)](https://hub.docker.com/r/flashbots/mev-boost-relay/tags) - -MEV-Boost Relay for Ethereum proposer/builder separation (PBS). - -Currently live at: - -* [boost-relay.flashbots.net](https://boost-relay.flashbots.net) (also on [Goerli](https://boost-relay-goerli.flashbots.net), [Sepolia](https://boost-relay-sepolia.flashbots.net) and [Holesky](https://boost-relay-holesky.flashbots.net)) -* [relay.ultrasound.money](https://relay.ultrasound.money), [agnostic-relay.net](https://agnostic-relay.net), bloXroute relays ([light fork](https://github.com/bloXroute-Labs/mev-relay)) -* [mainnet.aestus.live](https://mainnet.aestus.live), [relay.edennetwork.io/info](https://relay.edennetwork.io/info), [mainnet-relay.securerpc.com](https://mainnet-relay.securerpc.com) - -Alternatives (not audited or endorsed): [blocknative/dreamboat](https://github.com/blocknative/dreamboat), [manifold/mev-freelay](https://github.com/manifoldfinance/mev-freelay) - -### See also - -* [Docker images](https://hub.docker.com/r/flashbots/mev-boost-relay) -* [mev-boost](https://github.com/flashbots/mev-boost) -* [Relay API specs](https://flashbots.github.io/relay-specs) -* [Guide for running mev-boost-relay at scale](https://flashbots.notion.site/Running-mev-boost-relay-at-scale-draft-4040ccd5186c425d9a860cbb29bbfe09) -* [Running relay and builders in custom devnets](https://gist.github.com/metachris/66df812f2920e6b0047afb9fdaf7df91#using-unnamed-devnets) -* [More docs](/docs/docs/) - -### Components - -The relay consists of three main components, which are designed to run and scale independently, and to be as simple as possible: - -1. [API](https://github.com/flashbots/mev-boost-relay/tree/main/services/api): Services that provide APIs for (a) proposers, (b) block builders, (c) data. -1. [Website](https://github.com/flashbots/mev-boost-relay/tree/main/services/website): Serving the [website requests](https://boost-relay.flashbots.net/) (information is pulled from Redis and database). -1. [Housekeeper](https://github.com/flashbots/mev-boost-relay/tree/main/services/housekeeper): Updates known validators, proposer duties, and more in the background. Only a single instance of this should run. - -### Dependencies - -1. Redis -1. PostgreSQL -1. one or more beacon nodes -1. block submission validation nodes -1. [optional] Memcached - -### Beacon nodes / CL clients - -- The relay services need access to one or more beacon node for event subscriptions (in particular the `head` and `payload_attributes` topics). -- You can specify multiple beacon nodes by providing a comma separated list of beacon node URIs. -- The beacon nodes need to support the [`payload_attributes` SSE event](https://github.com/ethereum/beacon-APIs/pull/305). -- Support the [v2 CL publish block endpoint](https://github.com/ethereum/beacon-APIs/pull/317) in the current main branch, since August 2. This is still - experimental and may or may not fully work. It requires at least one of these CL clients - - **Lighthouse+** [v4.3.0](https://github.com/sigp/lighthouse/releases) or later. Here's a [quick guide](https://gist.github.com/metachris/bcae9ae42e2fc834804241f991351c4e) for setting up Lighthouse. - - **Prysm** [v4.0.6](https://github.com/prysmaticlabs/prysm/releases) or later. -- The latest release (v0.26) still uses the old V1 broadcast endpoint using CL clients with custom validate-before-broadcast patches (see [README of the release for more details](https://github.com/flashbots/mev-boost-relay/tree/v0.26#beacon-nodes--cl-clients)) - -**Relays are strongly advised to run multiple beacon nodes!** -* The reason is that on getPayload, the block has to be validated and broadcast by a local beacon node before it is returned to the proposer. -* If the local beacon nodes don't accept it (i.e. because it's down), the block won't be returned to the proposer, which leads to the proposer missing the slot. -* The relay makes the validate+broadcast request to all beacon nodes concurrently, and returns as soon as the first request is successful. - -### Security - -A security assessment for the relay was conducted on 2022-08-22 by [lotusbumi](https://github.com/lotusbumi). Additional information can be found in the [Security](#security) section of this repository. - -If you find a security vulnerability on this project or any other initiative related to Flashbots, please let us know sending an email to security@flashbots.net. - ---- - -# Background - -MEV is a centralizing force on Ethereum. Unattended, the competition for MEV opportunities leads to consensus security instability and permissioned communication infrastructure between traders and block producers. This erodes neutrality, transparency, decentralization, and permissionlessness. - -Flashbots is a research and development organization working on mitigating the negative externalities of MEV. Flashbots started as a builder specializing in MEV extraction in proof-of-work Ethereum to democratize access to MEV and make the most profitable blocks available to all miners. >90% of miners are outsourcing some of their block construction to Flashbots today. - -The mev-boost relay is a trusted mediator between block producers and block builders. It enables all Ethereum proof-of-stake validators to offer their blockspace to not just Flashbots but other builders as well. This opens up the market to more builders and creates competition between them, leading to more revenue and choice for validators, and better censorship-resistance for Ethereum. - -In the future, [proposer/builder separation](https://ethresear.ch/t/two-slot-proposer-builder-separation/10980) will be enshrined in the Ethereum protocol itself to further harden its trust model. - -Read more in [Why run mev-boost?](https://writings.flashbots.net/writings/why-run-mevboost/) and in the [Frequently Asked Questions](https://github.com/flashbots/mev-boost/wiki/Frequently-Asked-Questions). - ---- - -# Usage - -## Running Postgres, Redis and Memcached -```bash -# Start PostgreSQL & Redis individually: -docker run -d -p 5432:5432 -e POSTGRES_USER=postgres -e POSTGRES_PASSWORD=postgres -e POSTGRES_DB=postgres postgres -docker run -d -p 6379:6379 redis - -# [optional] Start Memcached -docker run -d -p 11211:11211 memcached - -# Or with docker-compose: -docker-compose up -``` - -Note: docker-compose also runs an Adminer (a web frontend for Postgres) on http://localhost:8093/?username=postgres (db: `postgres`, username: `postgres`, password: `postgres`) - -Now start the services: - -```bash -# The housekeeper sets up the validators, and does various housekeeping -go run . housekeeper --network sepolia --db postgres://postgres:postgres@localhost:5432/postgres?sslmode=disable - -# Run APIs for sepolia (using a dummy BLS secret key) -go run . api --network sepolia --secret-key 0x607a11b45a7219cc61a3d9c5fd08c7eebd602a6a19a977f8d3771d5711a550f2 --db postgres://postgres:postgres@localhost:5432/postgres?sslmode=disable - -# Run Website for sepolia -go run . website --network sepolia --db postgres://postgres:postgres@localhost:5432/postgres?sslmode=disable - -# Query status -curl localhost:9062/eth/v1/builder/status - -# Send test validator registrations -curl -X POST -H'Content-Encoding: gzip' localhost:9062/eth/v1/builder/validators --data-binary @testdata/valreg2.json.gz - -# Delete previous registrations -redis-cli DEL boost-relay/sepolia:validators-registration boost-relay/sepolia:validators-registration-timestamp -``` - - -## Environment variables - -#### General - -* `ACTIVE_VALIDATOR_HOURS` - number of hours to track active proposers in redis (default: `3`) -* `API_MAX_HEADER_BYTES` - http maximum header bytes (default: `60_000`) -* `API_TIMEOUT_READ_MS` - http read timeout in milliseconds (default: `1_500`) -* `API_TIMEOUT_READHEADER_MS` - http read header timeout in milliseconds (default: `600`) -* `API_TIMEOUT_WRITE_MS` - http write timeout in milliseconds (default: `10_000`) -* `API_TIMEOUT_IDLE_MS` - http idle timeout in milliseconds (default: `3_000`) -* `API_SHUTDOWN_WAIT_SEC` - how long to wait on shutdown before stopping server, to allow draining of requests (default: `30`) -* `API_SHUTDOWN_STOP_SENDING_BIDS` - whether API should stop sending bids during shutdown (nly useful in single-instance/testnet setups, default: `false`) -* `BLOCKSIM_MAX_CONCURRENT` - maximum number of concurrent block-sim requests (0 for no maximum, default: `4`) -* `BLOCKSIM_TIMEOUT_MS` - builder block submission validation request timeout (default: `3000`) -* `BROADCAST_MODE` - which broadcast mode to use for block publishing (default: `consensus_and_equivocation`) -* `DB_DONT_APPLY_SCHEMA` - disable applying DB schema on startup (useful for connecting data API to read-only replica) -* `DB_TABLE_PREFIX` - prefix to use for db tables (default uses `dev`) -* `GETPAYLOAD_RETRY_TIMEOUT_MS` - getPayload retry getting a payload if first try failed (default: `100`) -* `MEMCACHED_URIS` - optional comma separated list of memcached endpoints, typically used as secondary storage alongside Redis -* `MEMCACHED_EXPIRY_SECONDS` - item expiry timeout when using memcache (default: `45`) -* `MEMCACHED_CLIENT_TIMEOUT_MS` - client timeout in milliseconds (default: `250`) -* `MEMCACHED_MAX_IDLE_CONNS` - client max idle conns (default: `10`) -* `NUM_ACTIVE_VALIDATOR_PROCESSORS` - proposer API - number of goroutines to listen to the active validators channel -* `NUM_VALIDATOR_REG_PROCESSORS` - proposer API - number of goroutines to listen to the validator registration channel -* `NO_HEADER_USERAGENTS` - proposer API - comma separated list of user agents for which no bids should be returned -* `ENABLE_BUILDER_CANCELLATIONS` - whether to enable block builder cancellations -* `REDIS_URI` - main redis URI (default: `localhost:6379`) -* `REDIS_READONLY_URI` - optional, a secondary redis instance for heavy read operations - -#### Feature Flags - -* `DISABLE_PAYLOAD_DATABASE_STORAGE` - builder API - disable storing execution payloads in the database (i.e. when using memcached as data availability redundancy) -* `DISABLE_LOWPRIO_BUILDERS` - reject block submissions by low-prio builders -* `FORCE_GET_HEADER_204` - force 204 as getHeader response -* `ENABLE_IGNORABLE_VALIDATION_ERRORS` - enable ignorable validation errors -* `USE_V1_PUBLISH_BLOCK_ENDPOINT` - uses the v1 publish block endpoint on the beacon node -* `USE_SSZ_ENCODING_PUBLISH_BLOCK` - uses the SSZ encoding for the publish block endpoint - -#### Development Environment Variables - -* `RUN_DB_TESTS` - when set to "1" enables integration tests with Postgres using endpoint specified by environment variable `TEST_DB_DSN` -* `RUN_INTEGRATION_TESTS` - when set to "1" enables integration tests, currently used for testing Memcached using comma separated list of endpoints specified by `MEMCACHED_URIS` -* `TEST_DB_DSN` - specifies connection string using Data Source Name (DSN) for Postgres (default: postgres://postgres:postgres@localhost:5432/postgres?sslmode=disable) - -#### Redis Tuning - -* `REDIS_CONNECTION_POOL_SIZE`, `REDIS_MIN_IDLE_CONNECTIONS`, `REDIS_READ_TIMEOUT_SEC`, `REDIS_POOL_TIMEOUT_SEC`, `REDIS_WRITE_TIMEOUT_SEC` (see also [the code here](https://github.com/flashbots/mev-boost-relay/blob/e39cd38010de26bf9a51d1a3e77fc235ea87b12f/datastore/redis.go#L35-L41)) - -#### Website - -* `LINK_BEACONCHAIN` - url for beaconcha.in (default: `https://beaconcha.in`) -* `LINK_DATA_API` - origin url for data api (https://domain:port) -* `LINK_ETHERSCAN` - url for etherscan (default: `https://etherscan.io`) -* `LISTEN_ADDR` - listen address for webserver (default: `localhost:9060`) -* `RELAY_URL` - full url for the relay (https://pubkey@host) -* `SHOW_CONFIG_DETAILS` - when set to "1", logs configuration details - -## Updating the website - -* Edit the HTML in `services/website/website.html` -* Edit template values in `testdata/website-htmldata.json` -* Generate a static version of the website with `go run scripts/website-staticgen/main.go` - -This builds a local copy of the template and saves it in `website-index.html` - -The website is using: -* [PureCSS](https://purecss.io/) -* [HeroIcons](https://heroicons.com/) - ---- - -# Technical Notes - -See [ARCHITECTURE.md](ARCHITECTURE.md) and [Running MEV-Boost-Relay at scale](https://flashbots.notion.site/Draft-Running-a-relay-4040ccd5186c425d9a860cbb29bbfe09) for more technical details! - -## Storing execution payloads and redundant data availability - -By default, the execution payloads for all block submission are stored in Redis and also in the Postgres database, -to provide redundant data availability for getPayload responses. But the database table is not pruned automatically, -because it takes a lot of resources to rebuild the indexes (and a better option is using `TRUNCATE`). - -Storing all the payloads in the database can lead to terabytes of data in this particular table. Now it's also possible -to use memcached as a second data availability layer. Using memcached is optional and disabled by default. - -To enable memcached, you just need to supply the memcached URIs either via environment variable (i.e. -`MEMCACHED_URIS=localhost:11211`) or through command line flag (`--memcached-uris`). - -You can disable storing the execution payloads in the database with this environment variable: -`DISABLE_PAYLOAD_DATABASE_STORAGE=1`. - -## Builder submission validation nodes - -You can use the [builder project](https://github.com/flashbots/builder) to validate block builder submissions: https://github.com/flashbots/builder - -Here's an example systemd config: - -
-/etc/systemd/system/geth.service - -```ini -[Unit] -Description=mev-boost -Wants=network-online.target -After=network-online.target - -[Service] -User=ubuntu -Group=ubuntu -Environment=HOME=/home/ubuntu -Type=simple -KillMode=mixed -KillSignal=SIGINT -TimeoutStopSec=90 -Restart=on-failure -RestartSec=10s -ExecStart=/home/ubuntu/builder/build/bin/geth \ - --syncmode=snap \ - --datadir /var/lib/goethereum \ - --metrics \ - --metrics.expensive \ - --http \ - --http.api="engine,eth,web3,net,debug,flashbots" \ - --http.corsdomain "*" \ - --http.addr "0.0.0.0" \ - --http.port 8545 \ - --http.vhosts '*' \ - --ws \ - --ws.api="engine,eth,web3,net,debug" \ - --ws.addr 0.0.0.0 \ - --ws.port 8546 \ - --ws.api engine,eth,net,web3 \ - --ws.origins '*' \ - --graphql \ - --graphql.corsdomain '*' \ - --graphql.vhosts '*' \ - --authrpc.addr="0.0.0.0" \ - --authrpc.jwtsecret=/var/lib/goethereum/jwtsecret \ - --authrpc.vhosts '*' \ - --cache=8192 - -[Install] -WantedBy=multi-user.target -``` -
- -Sending blocks to the validation node: - -- The built-in [blocksim-ratelimiter](services/api/blocksim_ratelimiter.go) is a simple example queue implementation. -- By default, `BLOCKSIM_MAX_CONCURRENT` is set to 4, which allows 4 concurrent block simulations per API node -- For production use, use the [prio-load-balancer](https://github.com/flashbots/prio-load-balancer) project for a single priority queue, - and disable the internal concurrency limit (set `BLOCKSIM_MAX_CONCURRENT` to `0`). - -## Beacon node setup - -### Lighthouse - -- Lighthouse with validation and equivocaation check before broadcast: https://github.com/sigp/lighthouse/pull/4168 -- with `--always-prepare-payload` and `--prepare-payload-lookahead 12000` flags, and some junk feeRecipeint - -Here's a [quick guide](https://gist.github.com/metachris/bcae9ae42e2fc834804241f991351c4e) for setting up Lighthouse. - -Here's an example Lighthouse systemd config: - -
-/etc/systemd/system/lighthouse.service - -```ini -[Unit] -Description=Lighthouse -After=network.target -Wants=network.target - -[Service] -User=ubuntu -Group=ubuntu -Type=simple -Restart=always -RestartSec=5 -TimeoutStopSec=180 -ExecStart=/home/ubuntu/.cargo/bin/lighthouse bn \ - --network mainnet \ - --checkpoint-sync-url=https://mainnet-checkpoint-sync.attestant.io \ - --eth1 \ - --http \ - --http-address "0.0.0.0" \ - --http-port 3500 \ - --datadir=/mnt/data/lighthouse \ - --http-allow-sync-stalled \ - --execution-endpoints=http://localhost:8551 \ - --jwt-secrets=/var/lib/goethereum/jwtsecret \ - --disable-deposit-contract-sync \ - --always-prepare-payload \ - --prepare-payload-lookahead 12000 - -[Install] -WantedBy=default.target -``` - -
- - -### Prysm - -- Prysm with validation and equivocaation check before broadcast: https://github.com/prysmaticlabs/prysm/pull/12335 -- use `--grpc-max-msg-size 104857600`, because by default the getAllValidators response is too big and fails - -Here's an example Prysm systemd config: - -
-/etc/systemd/system/prysm.service - -```ini -[Unit] -Description=Prysm -After=network.target -Wants=network.target - -[Service] -User=ubuntu -Group=ubuntu -Type=simple -Restart=always -RestartSec=5 -TimeoutStopSec=180 -ExecStart=/home/ubuntu/prysm/bazel-bin/cmd/beacon-chain/beacon-chain_/beacon-chain \ - --accept-terms-of-use \ - --enable-debug-rpc-endpoints \ - --checkpoint-sync-url=https://mainnet-checkpoint-sync.attestant.io \ - --genesis-beacon-api-url=https://mainnet-checkpoint-sync.attestant.io \ - --grpc-gateway-host "0.0.0.0" \ - --datadir=/mnt/data/prysm \ - --p2p-max-peers 100 \ - --execution-endpoint=http://localhost:8551 \ - --jwt-secret=/var/lib/goethereum/jwtsecret \ - --min-sync-peers=1 \ - --grpc-max-msg-size 104857600 \ - --prepare-all-payloads \ - --disable-reorg-late-blocks - -[Install] -WantedBy=default.target -``` - -
- -## Bid Cancellations - -Block builders can opt into cancellations by submitting blocks to `/relay/v1/builder/blocks?cancellations=1`. This may incur a performance penalty (i.e. validation of submissions taking significantly longer). See also https://github.com/flashbots/mev-boost-relay/issues/348 - ---- - -# Maintainers - -- [@metachris](https://twitter.com/metachris) -- [@Ruteri](https://twitter.com/mmrosum) -- [@avalonche](https://github.com/avalonche) - -# Contributing - -[Flashbots](https://flashbots.net) is a research and development collective working on mitigating the negative externalities of decentralized economies. We contribute with the larger free software community to illuminate the dark forest. - -You are welcome here <3. - -- If you have a question, feedback or a bug report for this project, please [open a new Issue](https://github.com/flashbots/mev-boost/issues). -- If you would like to contribute with code, check the [CONTRIBUTING file](CONTRIBUTING.md) for further info about the development environment. -- We just ask you to be nice. Read our [code of conduct](CODE_OF_CONDUCT.md). - -# Security - -If you find a security vulnerability on this project or any other initiative related to Flashbots, please let us know sending an email to security@flashbots.net. - -## Audits - -- [20220822](docs/audit-20220822.md), by [lotusbumi](https://github.com/lotusbumi). - -# License - -The code in this project is free software under the [AGPL License version 3 or later](LICENSE). - ---- - -Made with ☀️ by the ⚡🤖 collective. diff --git a/mev-boost-relay/README.md b/mev-boost-relay/README.md deleted file mode 100644 index 0680ab693..000000000 --- a/mev-boost-relay/README.md +++ /dev/null @@ -1,16 +0,0 @@ -[flashbots mev-boost-relay readme](README.flashbots.md) - -# Bolt MEV-Boost Relay - -Bolt MEV-Boost Relay is a fork of the Flashbots MEV-Boost Relay package that -implements the functionality of the Constraints API. - -## How it works - -The MEV-Boost Relay package has the standard functionality of the Flashbots MEV-Boost Relay, -but with the added functionality of the Constraints API which can be summarized as follows: - -1. Listen for incoming constraint messages from proposers -2. Propagate constraints to connected builders -3. Validate incoming signed bids and inclusion proofs from builders -4. Forward the best bid to the proposer's MEV-Boost sidecar diff --git a/mev-boost-relay/SECURITY.md b/mev-boost-relay/SECURITY.md deleted file mode 100644 index 5e3a1d115..000000000 --- a/mev-boost-relay/SECURITY.md +++ /dev/null @@ -1,27 +0,0 @@ -# Security Policy - -The Flashbots team would appreciate any contributions, responsible disclosures and will make every effort to acknowledge your contributions. - -## Scope - -Bugs that affect the security of the Ethereum protocol in the `mev-boost` and `mev-boost-relay` repositories are in scope. Bugs in third-party dependencies are not in scope unless they result in a bug in `mev-boost` with demonstrable security impact. - -## Reporting a Vulnerability - -To report a vulnerability, please email security@flashbots.net and provide all the necessary details to reproduce it, such as: - -- Release version -- Operating System -- Consensus / Execution client combination and version -- Network (Mainnet or other testnet) - -Please include the steps to reproduce it using as much detail as possible with the corresponding logs from `mev-boost` and / or logs from the consensus / execution client. - -Once we have received your bug report, we will try to reproduce it and provide a more detailed response. -Once the reported bug has been successfully reproduced, the team will work on a fix. - -## Bounty Program - -The bug bounty program will be a shared bounty pool of up to 50k USD between `mev-boost`, `mev-boost-relay`. - -We would like to welcome node operators, builders, searchers and other participants in the ecosystem to contribute to this bounty pool to help make the ecosystem more secure. \ No newline at end of file diff --git a/mev-boost-relay/beaconclient/beacon_client_test.go b/mev-boost-relay/beaconclient/beacon_client_test.go deleted file mode 100644 index 0f13d922d..000000000 --- a/mev-boost-relay/beaconclient/beacon_client_test.go +++ /dev/null @@ -1,243 +0,0 @@ -package beaconclient - -import ( - "errors" - "net/http" - "net/http/httptest" - "testing" - "time" - - "github.com/flashbots/mev-boost-relay/common" - "github.com/gorilla/mux" - "github.com/stretchr/testify/require" -) - -const testPubKey = "0x93247f2209abcacf57b75a51dafae777f9dd38bc7053d1af526f220a7489a6d3a2753e5f3e8b1cfe39b56f43611df74a" - -var errTest = errors.New("test error") - -func validatorResponseEntryToMap(entries []ValidatorResponseEntry) map[string]ValidatorResponseEntry { - m := make(map[string]ValidatorResponseEntry) - for _, entry := range entries { - m[entry.Validator.Pubkey] = entry - } - return m -} - -type testBackend struct { - t require.TestingT - beaconInstances []*MockBeaconInstance - beaconClient IMultiBeaconClient -} - -func newTestBackend(t require.TestingT, numBeaconNodes int) *testBackend { - mockBeaconInstances := make([]*MockBeaconInstance, numBeaconNodes) - beaconInstancesInterface := make([]IBeaconInstance, numBeaconNodes) - for i := 0; i < numBeaconNodes; i++ { - mockBeaconInstances[i] = NewMockBeaconInstance() - beaconInstancesInterface[i] = mockBeaconInstances[i] - } - - return &testBackend{ - t: t, - beaconInstances: mockBeaconInstances, - beaconClient: NewMultiBeaconClient(common.TestLog, beaconInstancesInterface), - } -} - -func TestBeaconInstance(t *testing.T) { - r := mux.NewRouter() - srv := httptest.NewServer(r) - bc := NewProdBeaconInstance(common.TestLog, srv.URL) - - r.HandleFunc("/eth/v1/beacon/states/1/validators", func(w http.ResponseWriter, _ *http.Request) { - resp := []byte(`{ - "execution_optimistic": false, - "data": [ - { - "index": "1", - "balance": "1", - "status": "active_ongoing", - "validator": { - "pubkey": "0x93247f2209abcacf57b75a51dafae777f9dd38bc7053d1af526f220a7489a6d3a2753e5f3e8b1cfe39b56f43611df74a", - "withdrawal_credentials": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2", - "effective_balance": "1", - "slashed": false, - "activation_eligibility_epoch": "1", - "activation_epoch": "1", - "exit_epoch": "1", - "withdrawable_epoch": "1" - } - } - ] -}`) - _, err := w.Write(resp) - require.NoError(t, err) - }) - - vals, err := bc.GetStateValidators("1") - require.NoError(t, err) - require.Len(t, vals.Data, 1) - require.Contains(t, validatorResponseEntryToMap(vals.Data), "0x93247f2209abcacf57b75a51dafae777f9dd38bc7053d1af526f220a7489a6d3a2753e5f3e8b1cfe39b56f43611df74a") -} - -func TestGetSyncStatus(t *testing.T) { - t.Run("returns status of highest head slot", func(t *testing.T) { - syncStatuses := []*SyncStatusPayloadData{ - { - HeadSlot: 3, - IsSyncing: true, - }, - { - HeadSlot: 1, - IsSyncing: false, - }, - { - HeadSlot: 2, - IsSyncing: false, - }, - } - - backend := newTestBackend(t, 3) - for i := 0; i < len(backend.beaconInstances); i++ { - backend.beaconInstances[i].MockSyncStatus = syncStatuses[i] - backend.beaconInstances[i].ResponseDelay = 10 * time.Millisecond * time.Duration(i) - } - - status, err := backend.beaconClient.BestSyncStatus() - require.NoError(t, err) - require.Equal(t, syncStatuses[1], status) - }) - - t.Run("returns status if at least one beacon node does not return error and is synced", func(t *testing.T) { - backend := newTestBackend(t, 2) - backend.beaconInstances[0].MockSyncStatusErr = errTest - status, err := backend.beaconClient.BestSyncStatus() - require.NoError(t, err) - require.NotNil(t, status) - }) - - t.Run("returns error if all beacon nodes return error or syncing", func(t *testing.T) { - backend := newTestBackend(t, 2) - backend.beaconInstances[0].MockSyncStatusErr = errTest - backend.beaconInstances[1].MockSyncStatus = &SyncStatusPayloadData{ - HeadSlot: 1, - IsSyncing: true, - } - status, err := backend.beaconClient.BestSyncStatus() - require.Equal(t, ErrBeaconNodeSyncing, err) - require.Nil(t, status) - }) -} - -func TestUpdateProposerDuties(t *testing.T) { - t.Run("returns err if all of the beacon nodes return error", func(t *testing.T) { - backend := newTestBackend(t, 2) - backend.beaconInstances[0].MockProposerDutiesErr = errTest - backend.beaconInstances[1].MockProposerDutiesErr = errTest - status, err := backend.beaconClient.GetProposerDuties(1) - require.Error(t, err) - require.Nil(t, status) - }) - - t.Run("get propose duties from the first beacon node that does not error", func(t *testing.T) { - mockResponse := &ProposerDutiesResponse{ - Data: []ProposerDutiesResponseData{ - { - Pubkey: testPubKey, - Slot: 2, - }, - }, - } - - backend := newTestBackend(t, 3) - backend.beaconInstances[0].MockProposerDutiesErr = errTest - backend.beaconInstances[1].ResponseDelay = 10 * time.Millisecond - backend.beaconInstances[1].MockProposerDuties = mockResponse - - duties, err := backend.beaconClient.GetProposerDuties(2) - require.NoError(t, err) - require.Equal(t, *mockResponse, *duties) - }) -} - -func TestFetchValidators(t *testing.T) { - t.Run("returns err if all of the beacon nodes return error", func(t *testing.T) { - backend := newTestBackend(t, 2) - backend.beaconInstances[0].MockFetchValidatorsErr = errTest - backend.beaconInstances[1].MockFetchValidatorsErr = errTest - status, err := backend.beaconClient.GetStateValidators("1") - require.Error(t, err) - require.Nil(t, status) - }) - - t.Run("get validator set first from beacon node that did not err", func(t *testing.T) { - entry := ValidatorResponseEntry{ - Validator: ValidatorResponseValidatorData{ - Pubkey: testPubKey, - }, - Index: 0, - Balance: "0", - Status: "", - } - - backend := newTestBackend(t, 3) - backend.beaconInstances[0].MockFetchValidatorsErr = errTest - backend.beaconInstances[1].AddValidator(entry) - backend.beaconInstances[2].MockFetchValidatorsErr = errTest - - validators, err := backend.beaconClient.GetStateValidators("1") - require.NoError(t, err) - require.Len(t, validators.Data, 1) - require.Contains(t, validatorResponseEntryToMap(validators.Data), testPubKey) - - // only beacon 2 should have a validator, and should be used by default - backend.beaconInstances[0].MockFetchValidatorsErr = nil - backend.beaconInstances[1].SetValidators(make(map[common.PubkeyHex]ValidatorResponseEntry)) - backend.beaconInstances[2].MockFetchValidatorsErr = nil - backend.beaconInstances[2].AddValidator(entry) - - validators, err = backend.beaconClient.GetStateValidators("1") - require.NoError(t, err) - require.Len(t, validators.Data, 1) - }) -} - -func TestGetForkSchedule(t *testing.T) { - r := mux.NewRouter() - srv := httptest.NewServer(r) - bc := NewProdBeaconInstance(common.TestLog, srv.URL) - - r.HandleFunc("/eth/v1/config/fork_schedule", func(w http.ResponseWriter, _ *http.Request) { - resp := []byte(`{ - "data": [ - { - "previous_version": "0x00000010", - "current_version": "0x00000020", - "epoch": "0" - }, - { - "previous_version": "0x00000020", - "current_version": "0x00000030", - "epoch": "10" - }, - { - "previous_version": "0x00000030", - "current_version": "0x00000040", - "epoch": "20" - }, - { - "previous_version": "0x00000040", - "current_version": "0x00000050", - "epoch": "30" - } - ] - }`) - _, err := w.Write(resp) - require.NoError(t, err) - }) - - forkSchedule, err := bc.GetForkSchedule() - require.NoError(t, err) - require.Len(t, forkSchedule.Data, 4) -} diff --git a/mev-boost-relay/beaconclient/mock_beacon_instance.go b/mev-boost-relay/beaconclient/mock_beacon_instance.go deleted file mode 100644 index d2d7df3ce..000000000 --- a/mev-boost-relay/beaconclient/mock_beacon_instance.go +++ /dev/null @@ -1,131 +0,0 @@ -package beaconclient - -import ( - "sync" - "time" - - "github.com/flashbots/mev-boost-relay/common" -) - -type MockBeaconInstance struct { - mu sync.RWMutex - validatorSet map[common.PubkeyHex]ValidatorResponseEntry - - MockSyncStatus *SyncStatusPayloadData - MockSyncStatusErr error - MockProposerDuties *ProposerDutiesResponse - MockProposerDutiesErr error - MockFetchValidatorsErr error - - ResponseDelay time.Duration -} - -func NewMockBeaconInstance() *MockBeaconInstance { - return &MockBeaconInstance{ - validatorSet: make(map[common.PubkeyHex]ValidatorResponseEntry), - - MockSyncStatus: &SyncStatusPayloadData{ - HeadSlot: 1, - IsSyncing: false, - }, - MockProposerDuties: &ProposerDutiesResponse{ - Data: []ProposerDutiesResponseData{}, - }, - MockSyncStatusErr: nil, - MockProposerDutiesErr: nil, - MockFetchValidatorsErr: nil, - - ResponseDelay: 0, - - mu: sync.RWMutex{}, - } -} - -func (c *MockBeaconInstance) AddValidator(entry ValidatorResponseEntry) { - c.mu.Lock() - c.validatorSet[common.NewPubkeyHex(entry.Validator.Pubkey)] = entry - c.mu.Unlock() -} - -func (c *MockBeaconInstance) SetValidators(validatorSet map[common.PubkeyHex]ValidatorResponseEntry) { - c.mu.Lock() - c.validatorSet = validatorSet - c.mu.Unlock() -} - -func (c *MockBeaconInstance) IsValidator(pubkey common.PubkeyHex) bool { - c.mu.RLock() - _, found := c.validatorSet[pubkey] - c.mu.RUnlock() - return found -} - -func (c *MockBeaconInstance) NumValidators() uint64 { - c.mu.RLock() - defer c.mu.RUnlock() - return uint64(len(c.validatorSet)) -} - -func (c *MockBeaconInstance) GetStateValidators(stateID string) (*GetStateValidatorsResponse, error) { - c.addDelay() - validatorResp := &GetStateValidatorsResponse{ //nolint:exhaustruct - Data: make([]ValidatorResponseEntry, 0), - } - for _, entry := range c.validatorSet { - validatorResp.Data = append(validatorResp.Data, entry) - } - return validatorResp, c.MockFetchValidatorsErr -} - -func (c *MockBeaconInstance) SyncStatus() (*SyncStatusPayloadData, error) { - c.addDelay() - return c.MockSyncStatus, c.MockSyncStatusErr -} - -func (c *MockBeaconInstance) CurrentSlot() (uint64, error) { - c.addDelay() - return c.MockSyncStatus.HeadSlot, nil -} - -func (c *MockBeaconInstance) SubscribeToHeadEvents(slotC chan HeadEventData) {} - -func (c *MockBeaconInstance) SubscribeToPayloadAttributesEvents(slotC chan PayloadAttributesEvent) {} - -func (c *MockBeaconInstance) GetProposerDuties(epoch uint64) (*ProposerDutiesResponse, error) { - c.addDelay() - return c.MockProposerDuties, c.MockProposerDutiesErr -} - -func (c *MockBeaconInstance) GetURI() string { - return "" -} - -func (c *MockBeaconInstance) addDelay() { - if c.ResponseDelay > 0 { - time.Sleep(c.ResponseDelay) - } -} - -func (c *MockBeaconInstance) PublishBlock(block *common.VersionedSignedProposal, broadcaseMode BroadcastMode) (code int, err error) { - return 0, nil -} - -func (c *MockBeaconInstance) GetGenesis() (*GetGenesisResponse, error) { - return nil, nil -} - -func (c *MockBeaconInstance) GetSpec() (spec *GetSpecResponse, err error) { - return nil, nil -} - -func (c *MockBeaconInstance) GetForkSchedule() (spec *GetForkScheduleResponse, err error) { - return nil, nil -} - -func (c *MockBeaconInstance) GetRandao(slot uint64) (spec *GetRandaoResponse, err error) { - return nil, nil -} - -func (c *MockBeaconInstance) GetWithdrawals(slot uint64) (spec *GetWithdrawalsResponse, err error) { - return nil, nil -} diff --git a/mev-boost-relay/beaconclient/mock_multi_beacon_client.go b/mev-boost-relay/beaconclient/mock_multi_beacon_client.go deleted file mode 100644 index ef34edd94..000000000 --- a/mev-boost-relay/beaconclient/mock_multi_beacon_client.go +++ /dev/null @@ -1,95 +0,0 @@ -package beaconclient - -import ( - "github.com/attestantio/go-eth2-client/spec/capella" - "github.com/flashbots/mev-boost-relay/common" - "github.com/sirupsen/logrus" - uberatomic "go.uber.org/atomic" -) - -type MockMultiBeaconClient struct { - log *logrus.Entry - bestBeaconIndex uberatomic.Int64 - beaconInstances []IBeaconInstance -} - -func NewMockMultiBeaconClient(log *logrus.Entry, beaconInstances []IBeaconInstance) *MockMultiBeaconClient { - return &MockMultiBeaconClient{ - log: log.WithField("component", "mockMultiBeaconClient"), - bestBeaconIndex: uberatomic.Int64{}, - beaconInstances: beaconInstances, - } -} - -func (*MockMultiBeaconClient) BestSyncStatus() (*SyncStatusPayloadData, error) { - return &SyncStatusPayloadData{HeadSlot: 1}, nil //nolint:exhaustruct -} - -func (*MockMultiBeaconClient) SubscribeToHeadEvents(slotC chan HeadEventData) {} - -func (*MockMultiBeaconClient) SubscribeToPayloadAttributesEvents(payloadAttrC chan PayloadAttributesEvent) { -} - -func (c *MockMultiBeaconClient) GetStateValidators(stateID string) (*GetStateValidatorsResponse, error) { - for i, client := range c.beaconInstances { - log := c.log.WithField("uri", client.GetURI()) - log.Debug("fetching validators") - - validators, err := client.GetStateValidators(stateID) - if err != nil { - log.WithError(err).Error("failed to fetch validators") - continue - } - - c.bestBeaconIndex.Store(int64(i)) - - // Received successful response. Set this index as last successful beacon node - return validators, nil - } - - return nil, ErrBeaconNodesUnavailable -} - -func (*MockMultiBeaconClient) GetProposerDuties(epoch uint64) (*ProposerDutiesResponse, error) { - return nil, nil -} - -func (*MockMultiBeaconClient) PublishBlock(block *common.VersionedSignedProposal) (code int, err error) { - return 0, nil -} - -func (*MockMultiBeaconClient) GetGenesis() (*GetGenesisResponse, error) { - resp := &GetGenesisResponse{} //nolint:exhaustruct - resp.Data.GenesisTime = 0 - return resp, nil -} - -func (*MockMultiBeaconClient) GetSpec() (spec *GetSpecResponse, err error) { - return nil, nil -} - -func (*MockMultiBeaconClient) GetForkSchedule() (spec *GetForkScheduleResponse, err error) { - resp := &GetForkScheduleResponse{ - Data: []struct { - PreviousVersion string `json:"previous_version"` - CurrentVersion string `json:"current_version"` - Epoch uint64 `json:"epoch,string"` - }{ - { - CurrentVersion: "", - Epoch: 1, - }, - }, - } - return resp, nil -} - -func (*MockMultiBeaconClient) GetRandao(slot uint64) (spec *GetRandaoResponse, err error) { - return nil, nil -} - -func (*MockMultiBeaconClient) GetWithdrawals(slot uint64) (spec *GetWithdrawalsResponse, err error) { - resp := &GetWithdrawalsResponse{} //nolint:exhaustruct - resp.Data.Withdrawals = append(resp.Data.Withdrawals, &capella.Withdrawal{}) //nolint:exhaustruct - return resp, nil -} diff --git a/mev-boost-relay/beaconclient/multi_beacon_client.go b/mev-boost-relay/beaconclient/multi_beacon_client.go deleted file mode 100644 index 63d4e2717..000000000 --- a/mev-boost-relay/beaconclient/multi_beacon_client.go +++ /dev/null @@ -1,414 +0,0 @@ -// Package beaconclient provides a beacon-node client -package beaconclient - -import ( - "errors" - "fmt" - "os" - "strings" - "sync" - - "github.com/flashbots/mev-boost-relay/common" - "github.com/sirupsen/logrus" - uberatomic "go.uber.org/atomic" -) - -var ( - ErrBeaconNodeSyncing = errors.New("beacon node is syncing or unavailable") - ErrBeaconNodesUnavailable = errors.New("all beacon nodes responded with error") - ErrWithdrawalsBeforeCapella = errors.New("withdrawals are not supported before capella") - ErrBeaconBlock202 = errors.New("beacon block failed validation but was still broadcast (202)") -) - -type BroadcastMode string - -const ( - Gossip BroadcastMode = "gossip" // lightweight gossip checks only - Consensus BroadcastMode = "consensus" // full consensus checks, including validation of all signatures and blocks fields - ConsensusAndEquivocation BroadcastMode = "consensus_and_equivocation" // the same as `consensus`, with an extra equivocation check -) - -// IMultiBeaconClient is the interface for the MultiBeaconClient, which can manage several beacon client instances under the hood -type IMultiBeaconClient interface { - BestSyncStatus() (*SyncStatusPayloadData, error) - SubscribeToHeadEvents(slotC chan HeadEventData) - // SubscribeToPayloadAttributesEvents subscribes to payload attributes events to validate fields such as prevrandao and withdrawals - SubscribeToPayloadAttributesEvents(payloadAttrC chan PayloadAttributesEvent) - - // GetStateValidators returns all active and pending validators from the beacon node - GetStateValidators(stateID string) (*GetStateValidatorsResponse, error) - GetProposerDuties(epoch uint64) (*ProposerDutiesResponse, error) - PublishBlock(block *common.VersionedSignedProposal) (code int, err error) - GetGenesis() (*GetGenesisResponse, error) - GetSpec() (spec *GetSpecResponse, err error) - GetForkSchedule() (spec *GetForkScheduleResponse, err error) - GetRandao(slot uint64) (spec *GetRandaoResponse, err error) - GetWithdrawals(slot uint64) (spec *GetWithdrawalsResponse, err error) -} - -// IBeaconInstance is the interface for a single beacon client instance -type IBeaconInstance interface { - SyncStatus() (*SyncStatusPayloadData, error) - CurrentSlot() (uint64, error) - SubscribeToHeadEvents(slotC chan HeadEventData) - SubscribeToPayloadAttributesEvents(slotC chan PayloadAttributesEvent) - GetStateValidators(stateID string) (*GetStateValidatorsResponse, error) - GetProposerDuties(epoch uint64) (*ProposerDutiesResponse, error) - GetURI() string - PublishBlock(block *common.VersionedSignedProposal, broadcastMode BroadcastMode) (code int, err error) - GetGenesis() (*GetGenesisResponse, error) - GetSpec() (spec *GetSpecResponse, err error) - GetForkSchedule() (spec *GetForkScheduleResponse, err error) - GetRandao(slot uint64) (spec *GetRandaoResponse, err error) - GetWithdrawals(slot uint64) (spec *GetWithdrawalsResponse, err error) -} - -type MultiBeaconClient struct { - log *logrus.Entry - bestBeaconIndex uberatomic.Int64 - beaconInstances []IBeaconInstance - - // feature flags - ffAllowSyncingBeaconNode bool - - broadcastMode BroadcastMode -} - -func NewMultiBeaconClient(log *logrus.Entry, beaconInstances []IBeaconInstance) *MultiBeaconClient { - client := &MultiBeaconClient{ - log: log.WithField("component", "beaconClient"), - beaconInstances: beaconInstances, - bestBeaconIndex: *uberatomic.NewInt64(0), - ffAllowSyncingBeaconNode: false, - broadcastMode: ConsensusAndEquivocation, - } - - // feature flags - if os.Getenv("ALLOW_SYNCING_BEACON_NODE") != "" { - client.log.Warn("env: ALLOW_SYNCING_BEACON_NODE: allow syncing beacon node") - client.ffAllowSyncingBeaconNode = true - } - - broadcastModeStr := os.Getenv("BROADCAST_MODE") - if broadcastModeStr != "" { - broadcastMode, ok := parseBroadcastModeString(broadcastModeStr) - if !ok { - msg := fmt.Sprintf("env: BROADCAST_MODE: invalid value %s, leaving to default value %s", broadcastModeStr, client.broadcastMode) - client.log.Warn(msg) - } else { - client.log.Info(fmt.Sprintf("env: BROADCAST_MODE: setting mode to %s", broadcastMode)) - client.broadcastMode = broadcastMode - } - } - - return client -} - -func (c *MultiBeaconClient) BestSyncStatus() (*SyncStatusPayloadData, error) { - var bestSyncStatus *SyncStatusPayloadData - var foundSyncedNode bool - - // Check each beacon-node sync status - var mu sync.Mutex - var wg sync.WaitGroup - for _, instance := range c.beaconInstances { - wg.Add(1) - go func(instance IBeaconInstance) { - defer wg.Done() - log := c.log.WithField("uri", instance.GetURI()) - log.Debug("getting sync status") - - syncStatus, err := instance.SyncStatus() - if err != nil { - log.WithError(err).Error("failed to get sync status") - return - } - - mu.Lock() - defer mu.Unlock() - - if foundSyncedNode { - return - } - - if bestSyncStatus == nil { - bestSyncStatus = syncStatus - } - - if !syncStatus.IsSyncing { - bestSyncStatus = syncStatus - foundSyncedNode = true - } - }(instance) - } - - // Wait for all requests to complete... - wg.Wait() - - if !foundSyncedNode && !c.ffAllowSyncingBeaconNode { - return nil, ErrBeaconNodeSyncing - } - - if bestSyncStatus == nil { - return nil, ErrBeaconNodesUnavailable - } - - return bestSyncStatus, nil -} - -// SubscribeToHeadEvents subscribes to head events from all beacon nodes. A single head event will be received multiple times, -// likely once for every beacon nodes. -func (c *MultiBeaconClient) SubscribeToHeadEvents(slotC chan HeadEventData) { - for _, instance := range c.beaconInstances { - go instance.SubscribeToHeadEvents(slotC) - } -} - -func (c *MultiBeaconClient) SubscribeToPayloadAttributesEvents(slotC chan PayloadAttributesEvent) { - for _, instance := range c.beaconInstances { - go instance.SubscribeToPayloadAttributesEvents(slotC) - } -} - -// GetStateValidators returns all known validators, and queries the beacon nodes in reverse order (because it is a heavy request for the CL client) -func (c *MultiBeaconClient) GetStateValidators(stateID string) (*GetStateValidatorsResponse, error) { - for i, client := range c.beaconInstancesByLeastUsed() { - log := c.log.WithField("uri", client.GetURI()) - log.Debug("fetching validators") - - validators, err := client.GetStateValidators(stateID) - if err != nil { - log.WithError(err).Error("failed to fetch validators") - continue - } - - c.bestBeaconIndex.Store(int64(i)) - - // Received successful response. Set this index as last successful beacon node - return validators, nil - } - - return nil, ErrBeaconNodesUnavailable -} - -func (c *MultiBeaconClient) GetProposerDuties(epoch uint64) (*ProposerDutiesResponse, error) { - // return the first successful beacon node response - clients := c.beaconInstancesByLastResponse() - log := c.log.WithField("epoch", epoch) - - for i, client := range clients { - log := log.WithField("uri", client.GetURI()) - log.Debug("fetching proposer duties") - - duties, err := client.GetProposerDuties(epoch) - if err != nil { - log.WithError(err).Error("failed to get proposer duties") - continue - } - - c.bestBeaconIndex.Store(int64(i)) - - // Received successful response. Set this index as last successful beacon node - return duties, nil - } - - return nil, ErrBeaconNodesUnavailable -} - -// beaconInstancesByLastResponse returns a list of beacon clients that has the client -// with the last successful response as the first element of the slice -func (c *MultiBeaconClient) beaconInstancesByLastResponse() []IBeaconInstance { - index := c.bestBeaconIndex.Load() - if index == 0 { - return c.beaconInstances - } - - instances := make([]IBeaconInstance, len(c.beaconInstances)) - copy(instances, c.beaconInstances) - instances[0], instances[index] = instances[index], instances[0] - - return instances -} - -// beaconInstancesByLeastUsed returns a list of beacon clients that has the client -// with the last successful response as the last element of the slice (used only by -// GetStateValidators, because it's a heavy call on the CL) -func (c *MultiBeaconClient) beaconInstancesByLeastUsed() []IBeaconInstance { - beaconInstances := c.beaconInstancesByLastResponse() - instances := make([]IBeaconInstance, len(c.beaconInstances)) - for i := 0; i < len(beaconInstances); i++ { - instances[i] = beaconInstances[len(beaconInstances)-i-1] - } - return instances -} - -type publishResp struct { - index int - code int - err error -} - -// PublishBlock publishes the signed beacon block via https://ethereum.github.io/beacon-APIs/#/ValidatorRequiredApi/publishBlock -func (c *MultiBeaconClient) PublishBlock(block *common.VersionedSignedProposal) (code int, err error) { - slot, err := block.Slot() - if err != nil { - c.log.WithError(err).Warn("failed to publish block as block slot is missing") - return 0, err - } - blockHash, err := block.ExecutionBlockHash() - if err != nil { - c.log.WithError(err).Warn("failed to publish block as block hash is missing") - return 0, err - } - log := c.log.WithFields(logrus.Fields{ - "slot": slot, - "blockHash": blockHash.String(), - }) - - clients := c.beaconInstancesByLastResponse() - - // The chan will be cleaner up automatically once the function exists even if it was still being written to - resChans := make(chan publishResp, len(clients)) - - for i, client := range clients { - log := log.WithField("uri", client.GetURI()) - log.Debug("publishing block") - go func(index int, client IBeaconInstance) { - code, err := client.PublishBlock(block, c.broadcastMode) - resChans <- publishResp{ - index: index, - code: code, - err: err, - } - }(i, client) - } - - var lastErrPublishResp publishResp - for i := 0; i < len(clients); i++ { - res := <-resChans - log = log.WithField("beacon", clients[res.index].GetURI()) - if res.err != nil { - log.WithField("statusCode", res.code).WithError(res.err).Warn("failed to publish block") - lastErrPublishResp = res - continue - } else if res.code == 202 { - // Should the block fail full validation, a separate success response code (202) is used to indicate that the block was successfully broadcast but failed integration. - // https://ethereum.github.io/beacon-APIs/?urls.primaryName=dev#/Beacon/publishBlock - log.WithField("statusCode", res.code).WithError(res.err).Warn("CL client failed block integration, but block was successfully broadcast") - lastErrPublishResp = res - continue - } - - c.bestBeaconIndex.Store(int64(res.index)) - - log.WithField("statusCode", res.code).Info("published block") - return res.code, nil - } - - if lastErrPublishResp.err == nil { - return lastErrPublishResp.code, nil - } - log.Error("failed to publish block on any CL node") - return lastErrPublishResp.code, fmt.Errorf("last error: %w", lastErrPublishResp.err) -} - -// GetGenesis returns the genesis info - https://ethereum.github.io/beacon-APIs/#/Beacon/getGenesis -func (c *MultiBeaconClient) GetGenesis() (genesisInfo *GetGenesisResponse, err error) { - clients := c.beaconInstancesByLastResponse() - for i, client := range clients { - log := c.log.WithField("uri", client.GetURI()) - if genesisInfo, err = client.GetGenesis(); err != nil { - log.WithError(err).Warn("failed to get genesis info") - continue - } - - c.bestBeaconIndex.Store(int64(i)) - - return genesisInfo, nil - } - - c.log.WithError(err).Error("failed to get genesis info on any CL node") - return nil, err -} - -// GetSpec - https://ethereum.github.io/beacon-APIs/#/Config/getSpec -func (c *MultiBeaconClient) GetSpec() (spec *GetSpecResponse, err error) { - clients := c.beaconInstancesByLastResponse() - for _, client := range clients { - log := c.log.WithField("uri", client.GetURI()) - if spec, err = client.GetSpec(); err != nil { - log.WithError(err).Warn("failed to get spec") - continue - } - - return spec, nil - } - - c.log.WithError(err).Error("failed to get spec on any CL node") - return nil, err -} - -// GetForkSchedule - https://ethereum.github.io/beacon-APIs/#/Config/getForkSchedule -func (c *MultiBeaconClient) GetForkSchedule() (spec *GetForkScheduleResponse, err error) { - clients := c.beaconInstancesByLastResponse() - for i, client := range clients { - log := c.log.WithField("uri", client.GetURI()) - if spec, err = client.GetForkSchedule(); err != nil { - log.WithError(err).Warn("failed to get fork schedule") - continue - } - - c.bestBeaconIndex.Store(int64(i)) - - return spec, nil - } - - c.log.WithError(err).Error("failed to get fork schedule on any CL node") - return nil, err -} - -// GetRandao - 3500/eth/v1/beacon/states//randao -func (c *MultiBeaconClient) GetRandao(slot uint64) (randaoResp *GetRandaoResponse, err error) { - clients := c.beaconInstancesByLastResponse() - for i, client := range clients { - log := c.log.WithField("uri", client.GetURI()) - if randaoResp, err = client.GetRandao(slot); err != nil { - log.WithField("slot", slot).WithError(err).Warn("failed to get randao") - continue - } - - c.bestBeaconIndex.Store(int64(i)) - - return randaoResp, nil - } - - c.log.WithField("slot", slot).WithError(err).Warn("failed to get randao from any CL node") - return nil, err -} - -// GetWithdrawals - 3500/eth/v1/beacon/states//withdrawals -func (c *MultiBeaconClient) GetWithdrawals(slot uint64) (withdrawalsResp *GetWithdrawalsResponse, err error) { - clients := c.beaconInstancesByLastResponse() - for i, client := range clients { - log := c.log.WithField("uri", client.GetURI()) - if withdrawalsResp, err = client.GetWithdrawals(slot); err != nil { - if strings.Contains(err.Error(), "Withdrawals not enabled before capella") { - break - } - log.WithField("slot", slot).WithError(err).Warn("failed to get withdrawals") - continue - } - - c.bestBeaconIndex.Store(int64(i)) - - return withdrawalsResp, nil - } - - if strings.Contains(err.Error(), "Withdrawals not enabled before capella") { - c.log.WithField("slot", slot).WithError(err).Debug("failed to get withdrawals as capella has not been reached") - return nil, ErrWithdrawalsBeforeCapella - } - - c.log.WithField("slot", slot).WithError(err).Warn("failed to get withdrawals from any CL node") - return nil, err -} diff --git a/mev-boost-relay/beaconclient/prod_beacon_instance.go b/mev-boost-relay/beaconclient/prod_beacon_instance.go deleted file mode 100644 index a77156d8d..000000000 --- a/mev-boost-relay/beaconclient/prod_beacon_instance.go +++ /dev/null @@ -1,373 +0,0 @@ -package beaconclient - -import ( - "encoding/json" - "fmt" - "net/http" - "os" - "strings" - "time" - - "github.com/attestantio/go-eth2-client/spec/capella" - "github.com/flashbots/mev-boost-relay/common" - "github.com/r3labs/sse/v2" - "github.com/sirupsen/logrus" -) - -type ProdBeaconInstance struct { - log *logrus.Entry - beaconURI string - - // feature flags - ffUseV1PublishBlockEndpoint bool - ffUseSSZEncodingPublishBlock bool -} - -func NewProdBeaconInstance(log *logrus.Entry, beaconURI string) *ProdBeaconInstance { - _log := log.WithFields(logrus.Fields{ - "component": "beaconInstance", - "beaconURI": beaconURI, - }) - - client := &ProdBeaconInstance{_log, beaconURI, false, false} - - // feature flags - if os.Getenv("USE_V1_PUBLISH_BLOCK_ENDPOINT") != "" { - _log.Warn("env: USE_V1_PUBLISH_BLOCK_ENDPOINT: use the v1 publish block endpoint") - client.ffUseV1PublishBlockEndpoint = true - } - - if os.Getenv("USE_SSZ_ENCODING_PUBLISH_BLOCK") != "" { - _log.Warn("env: USE_SSZ_ENCODING_PUBLISH_BLOCK: using SSZ encoding to publish blocks") - client.ffUseSSZEncodingPublishBlock = true - } - - return client -} - -// HeadEventData represents the data of a head event -// {"slot":"827256","block":"0x56b683afa68170c775f3c9debc18a6a72caea9055584d037333a6fe43c8ceb83","state":"0x419e2965320d69c4213782dae73941de802a4f436408fddd6f68b671b3ff4e55","epoch_transition":false,"execution_optimistic":false,"previous_duty_dependent_root":"0x5b81a526839b7fb67c3896f1125451755088fb578ad27c2690b3209f3d7c6b54","current_duty_dependent_root":"0x5f3232c0d5741e27e13754e1d88285c603b07dd6164b35ca57e94344a9e42942"} -type HeadEventData struct { - Slot uint64 `json:"slot,string"` - Block string `json:"block"` - State string `json:"state"` -} - -// PayloadAttributesEvent represents the data of a payload_attributes event -// {"version": "capella", "data": {"proposer_index": "123", "proposal_slot": "10", "parent_block_number": "9", "parent_block_root": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2", "parent_block_hash": "0x9a2fefd2fdb57f74993c7780ea5b9030d2897b615b89f808011ca5aebed54eaf", "payload_attributes": {"timestamp": "123456", "prev_randao": "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2", "suggested_fee_recipient": "0x0000000000000000000000000000000000000000", "withdrawals": [{"index": "5", "validator_index": "10", "address": "0x0000000000000000000000000000000000000000", "amount": "15640"}]}}} -type PayloadAttributesEvent struct { - Version string `json:"version"` - Data PayloadAttributesEventData `json:"data"` -} - -type PayloadAttributesEventData struct { - ProposerIndex uint64 `json:"proposer_index,string"` - ProposalSlot uint64 `json:"proposal_slot,string"` - ParentBlockNumber uint64 `json:"parent_block_number,string"` - ParentBlockRoot string `json:"parent_block_root"` - ParentBlockHash string `json:"parent_block_hash"` - PayloadAttributes PayloadAttributes `json:"payload_attributes"` -} - -type PayloadAttributes struct { - Timestamp uint64 `json:"timestamp,string"` - PrevRandao string `json:"prev_randao"` - SuggestedFeeRecipient string `json:"suggested_fee_recipient"` - Withdrawals []*capella.Withdrawal `json:"withdrawals"` - ParentBeaconBlockRoot string `json:"parent_beacon_block_root"` -} - -func (c *ProdBeaconInstance) SubscribeToHeadEvents(slotC chan HeadEventData) { - eventsURL := fmt.Sprintf("%s/eth/v1/events?topics=head", c.beaconURI) - log := c.log.WithField("url", eventsURL) - log.Info("subscribing to head events") - - client := sse.NewClient(eventsURL) - - for { - err := client.SubscribeRaw(func(msg *sse.Event) { - var data HeadEventData - err := json.Unmarshal(msg.Data, &data) - if err != nil { - log.WithError(err).Error("could not unmarshal head event") - } else { - slotC <- data - } - }) - if err != nil { - log.WithError(err).Error("failed to subscribe to head events") - time.Sleep(1 * time.Second) - } - c.log.Warn("beaconclient SubscribeRaw/SubscribeToHeadEvents ended, reconnecting") - time.Sleep(500 * time.Millisecond) - } -} - -func (c *ProdBeaconInstance) SubscribeToPayloadAttributesEvents(payloadAttributesC chan PayloadAttributesEvent) { - eventsURL := fmt.Sprintf("%s/eth/v1/events?topics=payload_attributes", c.beaconURI) - log := c.log.WithField("url", eventsURL) - log.Info("subscribing to payload_attributes events") - - client := sse.NewClient(eventsURL) - - for { - err := client.SubscribeRaw(func(msg *sse.Event) { - var data PayloadAttributesEvent - err := json.Unmarshal(msg.Data, &data) - if err != nil { - log.WithError(err).Error("could not unmarshal payload_attributes event") - } else { - payloadAttributesC <- data - } - }) - if err != nil { - log.WithError(err).Error("failed to subscribe to payload_attributes events") - time.Sleep(1 * time.Second) - } - c.log.Warn("beaconclient SubscribeRaw/SubscribeToPayloadAttributesEvents ended, reconnecting") - time.Sleep(500 * time.Millisecond) - } -} - -type GetStateValidatorsResponse struct { - ExecutionOptimistic bool `json:"execution_optimistic"` - Finalized bool `json:"finalized"` - Data []ValidatorResponseEntry -} - -type ValidatorResponseEntry struct { - Index uint64 `json:"index,string"` // Index of validator in validator registry. - Balance string `json:"balance"` // Current validator balance in gwei. - Status string `json:"status"` - Validator ValidatorResponseValidatorData `json:"validator"` -} - -type ValidatorResponseValidatorData struct { - Pubkey string `json:"pubkey"` - WithdrawalCredentials string `json:"withdrawal_credentials"` - EffectiveBalance string `json:"effective_balance"` - Slashed bool `json:"slashed"` - ActivationEligibility uint64 `json:"activation_eligibility_epoch,string"` - ActivationEpoch uint64 `json:"activation_epoch,string"` - ExitEpoch uint64 `json:"exit_epoch,string"` - WithdrawableEpoch uint64 `json:"withdrawable_epoch,string"` -} - -// GetStateValidators loads all active and pending validators -// https://ethereum.github.io/beacon-APIs/#/Beacon/getStateValidators -func (c *ProdBeaconInstance) GetStateValidators(stateID string) (*GetStateValidatorsResponse, error) { - uri := fmt.Sprintf("%s/eth/v1/beacon/states/%s/validators?status=active,pending", c.beaconURI, stateID) - vd := new(GetStateValidatorsResponse) - _, err := fetchBeacon(http.MethodGet, uri, nil, vd, nil, http.Header{}, false) - return vd, err -} - -// SyncStatusPayload is the response payload for /eth/v1/node/syncing -// {"data":{"head_slot":"251114","sync_distance":"0","is_syncing":false,"is_optimistic":false}} -type SyncStatusPayload struct { - Data SyncStatusPayloadData -} - -type SyncStatusPayloadData struct { - HeadSlot uint64 `json:"head_slot,string"` - IsSyncing bool `json:"is_syncing"` -} - -// SyncStatus returns the current node sync-status -// https://ethereum.github.io/beacon-APIs/#/ValidatorRequiredApi/getSyncingStatus -func (c *ProdBeaconInstance) SyncStatus() (*SyncStatusPayloadData, error) { - uri := c.beaconURI + "/eth/v1/node/syncing" - timeout := 5 * time.Second - resp := new(SyncStatusPayload) - _, err := fetchBeacon(http.MethodGet, uri, nil, resp, &timeout, http.Header{}, false) - if err != nil { - return nil, err - } - return &resp.Data, nil -} - -func (c *ProdBeaconInstance) CurrentSlot() (uint64, error) { - syncStatus, err := c.SyncStatus() - if err != nil { - return 0, err - } - return syncStatus.HeadSlot, nil -} - -type ProposerDutiesResponse struct { - Data []ProposerDutiesResponseData -} - -type ProposerDutiesResponseData struct { - Slot uint64 `json:"slot,string"` - Pubkey string `json:"pubkey"` - ValidatorIndex uint64 `json:"validator_index,string"` -} - -// GetProposerDuties returns proposer duties for every slot in this epoch -// https://ethereum.github.io/beacon-APIs/#/Validator/getProposerDuties -func (c *ProdBeaconInstance) GetProposerDuties(epoch uint64) (*ProposerDutiesResponse, error) { - uri := fmt.Sprintf("%s/eth/v1/validator/duties/proposer/%d", c.beaconURI, epoch) - resp := new(ProposerDutiesResponse) - _, err := fetchBeacon(http.MethodGet, uri, nil, resp, nil, http.Header{}, false) - return resp, err -} - -type GetHeaderResponse struct { - Data struct { - Root string `json:"root"` - Header struct { - Message *GetHeaderResponseMessage - } - } -} - -type GetHeaderResponseMessage struct { - Slot uint64 `json:"slot,string"` - ProposerIndex uint64 `json:"proposer_index,string"` - ParentRoot string `json:"parent_root"` -} - -// GetHeader returns the latest header - https://ethereum.github.io/beacon-APIs/#/Beacon/getBlockHeader -func (c *ProdBeaconInstance) GetHeader() (*GetHeaderResponse, error) { - uri := fmt.Sprintf("%s/eth/v1/beacon/headers/head", c.beaconURI) - resp := new(GetHeaderResponse) - _, err := fetchBeacon(http.MethodGet, uri, nil, resp, nil, http.Header{}, false) - return resp, err -} - -// GetHeaderForSlot returns the header for a given slot - https://ethereum.github.io/beacon-APIs/#/Beacon/getBlockHeader -func (c *ProdBeaconInstance) GetHeaderForSlot(slot uint64) (*GetHeaderResponse, error) { - uri := fmt.Sprintf("%s/eth/v1/beacon/headers/%d", c.beaconURI, slot) - resp := new(GetHeaderResponse) - _, err := fetchBeacon(http.MethodGet, uri, nil, resp, nil, http.Header{}, false) - return resp, err -} - -func (c *ProdBeaconInstance) GetURI() string { - return c.beaconURI -} - -func (c *ProdBeaconInstance) PublishBlock(block *common.VersionedSignedProposal, broadcastMode BroadcastMode) (code int, err error) { - var uri string - if c.ffUseV1PublishBlockEndpoint { - uri = fmt.Sprintf("%s/eth/v1/beacon/blocks", c.beaconURI) - } else { - uri = fmt.Sprintf("%s/eth/v2/beacon/blocks?broadcast_validation=%s", c.beaconURI, broadcastMode) - } - headers := http.Header{} - headers.Add("Eth-Consensus-Version", strings.ToLower(block.Version.String())) // optional in v1, required in v2 - - // FIXME: using SSZ fails for now, let's skip this and just use fetchBeacon directly - // slot, err := block.Slot() - // if err != nil { - // slot = 0 - // } - // - // var payloadBytes []byte - // useSSZ := c.ffUseSSZEncodingPublishBlock - // log := c.log - // encodeStartTime := time.Now().UTC() - // if useSSZ { - // log = log.WithField("publishContentType", "ssz") - // payloadBytes, err = block.MarshalSSZ() - // } else { - // log = log.WithField("publishContentType", "json") - // payloadBytes, err = json.Marshal(block) - // } - // if err != nil { - // return 0, fmt.Errorf("could not marshal request: %w", err) - // } - // publishingStartTime := time.Now().UTC() - // encodeDurationMs := publishingStartTime.Sub(encodeStartTime).Milliseconds() - // code, err = fetchBeacon(http.MethodPost, uri, payloadBytes, nil, nil, headers, useSSZ) - // publishDurationMs := time.Now().UTC().Sub(publishingStartTime).Milliseconds() - // log.WithFields(logrus.Fields{ - // "slot": slot, - // "encodeDurationMs": encodeDurationMs, - // "publishDurationMs": publishDurationMs, - // "payloadBytes": len(payloadBytes), - // }).Info("finished publish block request") - // return code, err - - return fetchBeacon(http.MethodPost, uri, block, nil, nil, headers, false) -} - -type GetGenesisResponse struct { - Data GetGenesisResponseData `json:"data"` -} - -type GetGenesisResponseData struct { - GenesisTime uint64 `json:"genesis_time,string"` - GenesisValidatorsRoot string `json:"genesis_validators_root"` - GenesisForkVersion string `json:"genesis_fork_version"` -} - -// GetGenesis returns the genesis info - https://ethereum.github.io/beacon-APIs/#/Beacon/getGenesis -func (c *ProdBeaconInstance) GetGenesis() (*GetGenesisResponse, error) { - uri := fmt.Sprintf("%s/eth/v1/beacon/genesis", c.beaconURI) - resp := new(GetGenesisResponse) - _, err := fetchBeacon(http.MethodGet, uri, nil, resp, nil, http.Header{}, false) - return resp, err -} - -type GetSpecResponse struct { - SecondsPerSlot uint64 `json:"SECONDS_PER_SLOT,string"` //nolint:tagliatelle - DepositContractAddress string `json:"DEPOSIT_CONTRACT_ADDRESS"` //nolint:tagliatelle - DepositNetworkID string `json:"DEPOSIT_NETWORK_ID"` //nolint:tagliatelle - DomainAggregateAndProof string `json:"DOMAIN_AGGREGATE_AND_PROOF"` //nolint:tagliatelle - InactivityPenaltyQuotient string `json:"INACTIVITY_PENALTY_QUOTIENT"` //nolint:tagliatelle - InactivityPenaltyQuotientAltair string `json:"INACTIVITY_PENALTY_QUOTIENT_ALTAIR"` //nolint:tagliatelle -} - -// GetSpec - https://ethereum.github.io/beacon-APIs/#/Config/getSpec -func (c *ProdBeaconInstance) GetSpec() (spec *GetSpecResponse, err error) { - uri := fmt.Sprintf("%s/eth/v1/config/spec", c.beaconURI) - resp := new(GetSpecResponse) - _, err = fetchBeacon(http.MethodGet, uri, nil, resp, nil, http.Header{}, false) - return resp, err -} - -type GetForkScheduleResponse struct { - Data []struct { - PreviousVersion string `json:"previous_version"` - CurrentVersion string `json:"current_version"` - Epoch uint64 `json:"epoch,string"` - } -} - -// GetForkSchedule - https://ethereum.github.io/beacon-APIs/#/Config/getForkSchedule -func (c *ProdBeaconInstance) GetForkSchedule() (spec *GetForkScheduleResponse, err error) { - uri := fmt.Sprintf("%s/eth/v1/config/fork_schedule", c.beaconURI) - resp := new(GetForkScheduleResponse) - _, err = fetchBeacon(http.MethodGet, uri, nil, resp, nil, http.Header{}, false) - return resp, err -} - -type GetRandaoResponse struct { - Data struct { - Randao string `json:"randao"` - } -} - -// GetRandao - /eth/v1/beacon/states//randao -func (c *ProdBeaconInstance) GetRandao(slot uint64) (randaoResp *GetRandaoResponse, err error) { - uri := fmt.Sprintf("%s/eth/v1/beacon/states/%d/randao", c.beaconURI, slot) - resp := new(GetRandaoResponse) - _, err = fetchBeacon(http.MethodGet, uri, nil, resp, nil, http.Header{}, false) - return resp, err -} - -type GetWithdrawalsResponse struct { - Data struct { - Withdrawals []*capella.Withdrawal `json:"withdrawals"` - } -} - -// GetWithdrawals - /eth/v1/beacon/states//withdrawals -func (c *ProdBeaconInstance) GetWithdrawals(slot uint64) (withdrawalsResp *GetWithdrawalsResponse, err error) { - uri := fmt.Sprintf("%s/eth/v1/beacon/states/%d/withdrawals", c.beaconURI, slot) - resp := new(GetWithdrawalsResponse) - _, err = fetchBeacon(http.MethodGet, uri, nil, resp, nil, http.Header{}, false) - return resp, err -} diff --git a/mev-boost-relay/beaconclient/util.go b/mev-boost-relay/beaconclient/util.go deleted file mode 100644 index 17e6f7e46..000000000 --- a/mev-boost-relay/beaconclient/util.go +++ /dev/null @@ -1,120 +0,0 @@ -package beaconclient - -import ( - "bytes" - "encoding/json" - "errors" - "fmt" - "io" - "net/http" - "strings" - "time" -) - -var ( - ErrHTTPErrorResponse = errors.New("got an HTTP error response") - ErrInvalidRequestPayload = errors.New("invalid request payload") - - StateIDHead = "head" - StateIDGenesis = "genesis" - StateIDFinalized = "finalized" - StateIDJustified = "justified" -) - -func parseBroadcastModeString(s string) (BroadcastMode, bool) { - broadcastModeMap := map[string]BroadcastMode{ - "gossip": Gossip, - "consensus": Consensus, - "consensus_and_equivocation": ConsensusAndEquivocation, - } - b, ok := broadcastModeMap[strings.ToLower(s)] - return b, ok -} - -func makeJSONRequest(method, url string, payload any) (*http.Request, error) { - payloadBytes, err := json.Marshal(payload) - if err != nil { - return nil, fmt.Errorf("could not marshal request: %w", err) - } - req, err := http.NewRequest(method, url, bytes.NewReader(payloadBytes)) - if err != nil { - return nil, fmt.Errorf("invalid request for %s: %w", url, err) - } - // Set content-type - req.Header.Add("Content-Type", "application/json") - return req, nil -} - -func makeSSZRequest(method, url string, payload any) (*http.Request, error) { - payloadBytes, ok := payload.([]byte) - if !ok { - return nil, fmt.Errorf("invalid payload type for SSZ request: %w", ErrInvalidRequestPayload) - } - req, err := http.NewRequest(method, url, bytes.NewReader(payloadBytes)) - if err != nil { - return nil, fmt.Errorf("invalid request for %s: %w", url, err) - } - // Set content-type - req.Header.Add("Content-Type", "application/octet-stream") - return req, nil -} - -func fetchBeacon(method, url string, payload, dst any, timeout *time.Duration, headers http.Header, ssz bool) (code int, err error) { - var req *http.Request - - if payload == nil { - req, err = http.NewRequest(method, url, nil) - } else { - if ssz { - req, err = makeSSZRequest(method, url, payload) - } else { - req, err = makeJSONRequest(method, url, payload) - } - } - - if err != nil { - return 0, fmt.Errorf("invalid request for %s: %w", url, err) - } - - for k, v := range headers { - req.Header.Add(k, v[0]) - } - req.Header.Set("accept", "application/json") - - client := http.DefaultClient - if timeout != nil && timeout.Seconds() > 0 { - client = &http.Client{ //nolint:exhaustruct - Timeout: *timeout, - } - } - resp, err := client.Do(req) - if err != nil { - return 0, fmt.Errorf("client refused for %s: %w", url, err) - } - defer resp.Body.Close() - - bodyBytes, err := io.ReadAll(resp.Body) - if err != nil { - return resp.StatusCode, fmt.Errorf("could not read response body for %s: %w", url, err) - } - - if resp.StatusCode >= http.StatusMultipleChoices { - ec := &struct { - Code int `json:"code"` - Message string `json:"message"` - }{} - if err = json.Unmarshal(bodyBytes, ec); err != nil { - return resp.StatusCode, fmt.Errorf("could not unmarshal error response from beacon node for %s from %s: %w", url, string(bodyBytes), err) - } - return resp.StatusCode, fmt.Errorf("%w: %s", ErrHTTPErrorResponse, ec.Message) - } - - if dst != nil { - err = json.Unmarshal(bodyBytes, dst) - if err != nil { - return resp.StatusCode, fmt.Errorf("could not unmarshal response for %s from %s: %w", url, string(bodyBytes), err) - } - } - - return resp.StatusCode, nil -} diff --git a/mev-boost-relay/cmd/api.go b/mev-boost-relay/cmd/api.go deleted file mode 100644 index 89abbf858..000000000 --- a/mev-boost-relay/cmd/api.go +++ /dev/null @@ -1,207 +0,0 @@ -package cmd - -import ( - "net/url" - "os" - "os/signal" - "strings" - "syscall" - - "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/flashbots/go-boost-utils/bls" - "github.com/flashbots/mev-boost-relay/beaconclient" - "github.com/flashbots/mev-boost-relay/common" - "github.com/flashbots/mev-boost-relay/database" - "github.com/flashbots/mev-boost-relay/datastore" - "github.com/flashbots/mev-boost-relay/services/api" - "github.com/sirupsen/logrus" - "github.com/spf13/cobra" -) - -var ( - apiDefaultListenAddr = common.GetEnv("LISTEN_ADDR", "localhost:9062") - apiDefaultBlockSim = common.GetEnv("BLOCKSIM_URI", "http://localhost:8545") - apiDefaultSecretKey = common.GetEnv("SECRET_KEY", "") - apiDefaultLogTag = os.Getenv("LOG_TAG") - - apiDefaultPprofEnabled = os.Getenv("PPROF") == "1" - apiDefaultInternalAPIEnabled = os.Getenv("ENABLE_INTERNAL_API") == "1" - - // Default Builder, Data, and Proposer API as true. - apiDefaultBuilderAPIEnabled = os.Getenv("DISABLE_BUILDER_API") != "1" - apiDefaultDataAPIEnabled = os.Getenv("DISABLE_DATA_API") != "1" - apiDefaultProposerAPIEnabled = os.Getenv("DISABLE_PROPOSER_API") != "1" - - apiListenAddr string - apiPprofEnabled bool - apiSecretKey string - apiBlockSimURL string - apiDebug bool - apiBuilderAPI bool - apiDataAPI bool - apiInternalAPI bool - apiProposerAPI bool - apiLogTag string -) - -func init() { - rootCmd.AddCommand(apiCmd) - apiCmd.Flags().BoolVar(&logJSON, "json", defaultLogJSON, "log in JSON format instead of text") - apiCmd.Flags().StringVar(&logLevel, "loglevel", defaultLogLevel, "log-level: trace, debug, info, warn/warning, error, fatal, panic") - apiCmd.Flags().StringVar(&apiLogTag, "log-tag", apiDefaultLogTag, "if set, a 'tag' field will be added to all log entries") - apiCmd.Flags().BoolVar(&apiDebug, "debug", false, "debug logging") - - apiCmd.Flags().StringVar(&apiListenAddr, "listen-addr", apiDefaultListenAddr, "listen address for webserver") - apiCmd.Flags().StringSliceVar(&beaconNodeURIs, "beacon-uris", defaultBeaconURIs, "beacon endpoints") - apiCmd.Flags().StringVar(&redisURI, "redis-uri", defaultRedisURI, "redis uri") - apiCmd.Flags().StringVar(&redisReadonlyURI, "redis-readonly-uri", defaultRedisReadonlyURI, "redis readonly uri") - apiCmd.Flags().StringVar(&postgresDSN, "db", defaultPostgresDSN, "PostgreSQL DSN") - apiCmd.Flags().StringSliceVar(&memcachedURIs, "memcached-uris", defaultMemcachedURIs, - "Enable memcached, typically used as secondary backup to Redis for redundancy") - apiCmd.Flags().StringVar(&apiSecretKey, "secret-key", apiDefaultSecretKey, "secret key for signing bids") - apiCmd.Flags().StringVar(&apiBlockSimURL, "blocksim", apiDefaultBlockSim, "URL for block simulator") - apiCmd.Flags().StringVar(&network, "network", defaultNetwork, "Which network to use") - - apiCmd.Flags().BoolVar(&apiPprofEnabled, "pprof", apiDefaultPprofEnabled, "enable pprof API") - apiCmd.Flags().BoolVar(&apiBuilderAPI, "builder-api", apiDefaultBuilderAPIEnabled, "enable builder API (/builder/...)") - apiCmd.Flags().BoolVar(&apiDataAPI, "data-api", apiDefaultDataAPIEnabled, "enable data API (/data/...)") - apiCmd.Flags().BoolVar(&apiInternalAPI, "internal-api", apiDefaultInternalAPIEnabled, "enable internal API (/internal/...)") - apiCmd.Flags().BoolVar(&apiProposerAPI, "proposer-api", apiDefaultProposerAPIEnabled, "enable proposer API (/proposer/...)") -} - -var apiCmd = &cobra.Command{ - Use: "api", - Short: "Start the API server", - Run: func(cmd *cobra.Command, args []string) { - var err error - - if apiDebug { - logLevel = "debug" - } - - log := common.LogSetup(logJSON, logLevel).WithFields(logrus.Fields{ - "service": "relay/api", - "version": Version, - }) - if apiLogTag != "" { - log = log.WithField("tag", apiLogTag) - } - log.Infof("boost-relay %s", Version) - - networkInfo, err := common.NewEthNetworkDetails(network) - if err != nil { - log.WithError(err).Fatalf("error getting network details") - } - log.Infof("Using network: %s", networkInfo.Name) - log.Debug(networkInfo.String()) - - // Connect to beacon clients and ensure it's synced - if len(beaconNodeURIs) == 0 { - log.Fatalf("no beacon endpoints specified") - } - log.Infof("Using beacon endpoints: %s", strings.Join(beaconNodeURIs, ", ")) - var beaconInstances []beaconclient.IBeaconInstance - for _, uri := range beaconNodeURIs { - beaconInstances = append(beaconInstances, beaconclient.NewProdBeaconInstance(log, uri)) - } - beaconClient := beaconclient.NewMultiBeaconClient(log, beaconInstances) - - // Connect to Redis - if redisReadonlyURI == "" { - log.Infof("Connecting to Redis at %s ...", redisURI) - } else { - log.Infof("Connecting to Redis at %s / readonly: %s ...", redisURI, redisReadonlyURI) - } - redis, err := datastore.NewRedisCache(networkInfo.Name, redisURI, redisReadonlyURI) - if err != nil { - log.WithError(err).Fatalf("Failed to connect to Redis at %s", redisURI) - } - - // Connect to Memcached if it exists - var mem *datastore.Memcached - if len(memcachedURIs) > 0 { - log.Infof("Connecting to Memcached at %s ...", strings.Join(memcachedURIs, ", ")) - mem, err = datastore.NewMemcached(networkInfo.Name, memcachedURIs...) - if err != nil { - log.WithError(err).Fatalf("Failed to connect to Memcached") - } - } - - // Connect to Postgres - dbURL, err := url.Parse(postgresDSN) - if err != nil { - log.WithError(err).Fatalf("couldn't read db URL") - } - log.Infof("Connecting to Postgres database at %s%s ...", dbURL.Host, dbURL.Path) - db, err := database.NewDatabaseService(postgresDSN) - if err != nil { - log.WithError(err).Fatalf("Failed to connect to Postgres database at %s%s", dbURL.Host, dbURL.Path) - } - - log.Info("Setting up datastore...") - ds, err := datastore.NewDatastore(redis, mem, db) - if err != nil { - log.WithError(err).Fatalf("Failed setting up prod datastore") - } - - opts := api.RelayAPIOpts{ - Log: log, - ListenAddr: apiListenAddr, - BeaconClient: beaconClient, - Datastore: ds, - Redis: redis, - Memcached: mem, - DB: db, - EthNetDetails: *networkInfo, - BlockSimURL: apiBlockSimURL, - - BlockBuilderAPI: apiBuilderAPI, - DataAPI: apiDataAPI, - InternalAPI: apiInternalAPI, - ProposerAPI: apiProposerAPI, - PprofAPI: apiPprofEnabled, - } - - // Decode the private key - if apiSecretKey == "" { - log.Warn("No secret key specified, block builder API is disabled") - opts.BlockBuilderAPI = false - } else { - envSkBytes, err := hexutil.Decode(apiSecretKey) - if err != nil { - log.WithError(err).Fatal("incorrect secret key provided") - } - opts.SecretKey, err = bls.SecretKeyFromBytes(envSkBytes[:]) - if err != nil { - log.WithError(err).Fatal("incorrect builder API secret key provided") - } - } - - // Create the relay service - log.Info("Setting up relay service...") - srv, err := api.NewRelayAPI(opts) - if err != nil { - log.WithError(err).Fatal("failed to create service") - } - - // Create a signal handler - sigs := make(chan os.Signal, 1) - signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM) - go func() { - sig := <-sigs - log.Infof("signal received: %s", sig) - err := srv.StopServer() - if err != nil { - log.WithError(err).Fatal("error stopping server") - } - }() - - // Start the server - log.Infof("Webserver starting on %s ...", apiListenAddr) - err = srv.StartServer() - if err != nil { - log.WithError(err).Fatal("server error") - } - log.Info("bye") - }, -} diff --git a/mev-boost-relay/cmd/housekeeper.go b/mev-boost-relay/cmd/housekeeper.go deleted file mode 100644 index 9b03d8be9..000000000 --- a/mev-boost-relay/cmd/housekeeper.go +++ /dev/null @@ -1,101 +0,0 @@ -package cmd - -import ( - "net/url" - "os" - "strings" - - "github.com/flashbots/mev-boost-relay/beaconclient" - "github.com/flashbots/mev-boost-relay/common" - "github.com/flashbots/mev-boost-relay/database" - "github.com/flashbots/mev-boost-relay/datastore" - "github.com/flashbots/mev-boost-relay/services/housekeeper" - "github.com/sirupsen/logrus" - "github.com/spf13/cobra" -) - -var ( - hkDefaultPprofEnabled = os.Getenv("PPROF") == "1" - hkDefaultPprofListenAddr = common.GetEnv("PPROF_LISTEN_ADDR", "localhost:9064") - - hkPprofEnabled bool - hkPprofListenAddr string -) - -func init() { - rootCmd.AddCommand(housekeeperCmd) - housekeeperCmd.Flags().BoolVar(&logJSON, "json", defaultLogJSON, "log in JSON format instead of text") - housekeeperCmd.Flags().StringVar(&logLevel, "loglevel", defaultLogLevel, "log-level: trace, debug, info, warn/warning, error, fatal, panic") - - housekeeperCmd.Flags().StringSliceVar(&beaconNodeURIs, "beacon-uris", defaultBeaconURIs, "beacon endpoints") - housekeeperCmd.Flags().StringVar(&redisURI, "redis-uri", defaultRedisURI, "redis uri") - housekeeperCmd.Flags().StringVar(&postgresDSN, "db", defaultPostgresDSN, "PostgreSQL DSN") - - housekeeperCmd.Flags().StringVar(&network, "network", defaultNetwork, "Which network to use") - - housekeeperCmd.Flags().BoolVar(&hkPprofEnabled, "pprof", hkDefaultPprofEnabled, "enable pprof API") - housekeeperCmd.Flags().StringVar(&hkPprofListenAddr, "pprof-listen-addr", hkDefaultPprofListenAddr, "listen address for pprof server") -} - -var housekeeperCmd = &cobra.Command{ - Use: "housekeeper", - Short: "Service that runs in the background and does various housekeeping (removing old bids, updating proposer duties, saving metrics, etc.)", - Run: func(cmd *cobra.Command, args []string) { - var err error - - log := common.LogSetup(logJSON, logLevel).WithFields(logrus.Fields{ - "service": "relay/housekeeper", - "version": Version, - }) - log.Infof("boost-relay %s", Version) - - networkInfo, err := common.NewEthNetworkDetails(network) - if err != nil { - log.WithError(err).Fatalf("error getting network details") - } - log.Infof("Using network: %s", networkInfo.Name) - log.Debug(networkInfo.String()) - - // Connect to beacon clients and ensure it's synced - if len(beaconNodeURIs) == 0 { - log.Fatalf("no beacon endpoints specified") - } - log.Infof("Using beacon endpoints: %s", strings.Join(beaconNodeURIs, ", ")) - var beaconInstances []beaconclient.IBeaconInstance - for _, uri := range beaconNodeURIs { - beaconInstances = append(beaconInstances, beaconclient.NewProdBeaconInstance(log, uri)) - } - beaconClient := beaconclient.NewMultiBeaconClient(log, beaconInstances) - - // Connect to Redis and setup the datastore - redis, err := datastore.NewRedisCache(networkInfo.Name, redisURI, "") - if err != nil { - log.WithError(err).Fatalf("Failed to connect to Redis at %s", redisURI) - } - - // Connect to Postgres - dbURL, err := url.Parse(postgresDSN) - if err != nil { - log.WithError(err).Fatalf("couldn't read db URL") - } - log.Infof("Connecting to Postgres database at %s%s ...", dbURL.Host, dbURL.Path) - db, err := database.NewDatabaseService(postgresDSN) - if err != nil { - log.WithError(err).Fatalf("Failed to connect to Postgres database at %s%s", dbURL.Host, dbURL.Path) - } - - opts := &housekeeper.HousekeeperOpts{ - Log: log, - Redis: redis, - DB: db, - BeaconClient: beaconClient, - - PprofAPI: hkPprofEnabled, - PprofListenAddress: hkPprofListenAddr, - } - service := housekeeper.NewHousekeeper(opts) - log.Info("Starting housekeeper service...") - err = service.Start() - log.WithError(err).Fatalf("Failed to start housekeeper") - }, -} diff --git a/mev-boost-relay/cmd/root.go b/mev-boost-relay/cmd/root.go deleted file mode 100644 index 0ed25cb53..000000000 --- a/mev-boost-relay/cmd/root.go +++ /dev/null @@ -1,26 +0,0 @@ -// Package cmd contains the cobra command line setup -package cmd - -import ( - "fmt" - "os" - - "github.com/spf13/cobra" -) - -var rootCmd = &cobra.Command{ - Use: "mev-boost-relay", - Short: "mev-boost-relay " + Version, - Long: `https://github.com/flashbots/mev-boost-relay`, - Run: func(cmd *cobra.Command, args []string) { - fmt.Printf("mev-boost-relay %s\n", Version) - _ = cmd.Help() - }, -} - -func Execute() { - if err := rootCmd.Execute(); err != nil { - fmt.Println(err) - os.Exit(1) - } -} diff --git a/mev-boost-relay/cmd/tool.go b/mev-boost-relay/cmd/tool.go deleted file mode 100644 index bfac13090..000000000 --- a/mev-boost-relay/cmd/tool.go +++ /dev/null @@ -1,25 +0,0 @@ -package cmd - -import ( - "fmt" - - "github.com/flashbots/mev-boost-relay/cmd/tool" - "github.com/spf13/cobra" -) - -func init() { - toolCmd.AddCommand(tool.DataAPIExportPayloads) - toolCmd.AddCommand(tool.DataAPIExportBids) - toolCmd.AddCommand(tool.ArchiveExecutionPayloads) - toolCmd.AddCommand(tool.Migrate) - rootCmd.AddCommand(toolCmd) -} - -var toolCmd = &cobra.Command{ - Use: "tool", - Short: "tools for managing the database", - Run: func(cmd *cobra.Command, args []string) { - fmt.Println("Error: please use a valid subcommand") - _ = cmd.Help() - }, -} diff --git a/mev-boost-relay/cmd/tool/archive-execution-payloads.go b/mev-boost-relay/cmd/tool/archive-execution-payloads.go deleted file mode 100644 index 884cd42dd..000000000 --- a/mev-boost-relay/cmd/tool/archive-execution-payloads.go +++ /dev/null @@ -1,125 +0,0 @@ -package tool - -import ( - "encoding/csv" - "encoding/json" - "net/url" - "os" - "strings" - - "github.com/flashbots/mev-boost-relay/database" - "github.com/flashbots/mev-boost-relay/database/vars" - "github.com/spf13/cobra" -) - -var doDelete bool - -func init() { - ArchiveExecutionPayloads.Flags().StringVar(&postgresDSN, "db", defaultPostgresDSN, "PostgreSQL DSN") - ArchiveExecutionPayloads.Flags().Uint64Var(&idFirst, "id-from", 0, "start id (inclusive") - ArchiveExecutionPayloads.Flags().Uint64Var(&idLast, "id-to", 0, "end id (inclusive)") - ArchiveExecutionPayloads.Flags().StringVar(&dateStart, "date-start", "", "start date (inclusive)") - ArchiveExecutionPayloads.Flags().StringVar(&dateEnd, "date-end", "", "end date (exclusive)") - ArchiveExecutionPayloads.Flags().BoolVar(&doDelete, "delete", false, "whether to also delete the archived payloads in the DB") - ArchiveExecutionPayloads.Flags().StringSliceVar(&outFiles, "out", []string{}, "output filename") - _ = ArchiveExecutionPayloads.MarkFlagRequired("out") -} - -var ArchiveExecutionPayloads = &cobra.Command{ - Use: "archive-execution-payloads", - Short: "export execution payloads from the DB to a CSV or JSON file and archive by deleting the payloads", - Run: func(cmd *cobra.Command, args []string) { - if len(outFiles) == 0 { - log.Fatal("no output files specified") - } - log.Infof("exporting execution payloads to %s", strings.Join(outFiles, ", ")) - - if idLast == 0 && dateEnd == "" { - log.Fatal("must specify --id-to or --date-end") - } - - // Connect to Postgres - dbURL, err := url.Parse(postgresDSN) - if err != nil { - log.WithError(err).Fatalf("couldn't read db URL") - } - log.Infof("Connecting to Postgres database at %s%s ...", dbURL.Host, dbURL.Path) - db, err := database.NewDatabaseService(postgresDSN) - if err != nil { - log.WithError(err).Fatalf("Failed to connect to Postgres database at %s%s", dbURL.Host, dbURL.Path) - } - - // if date, then find corresponding id - if dateStart != "" { - // find first enrty at or after dateStart - query := `SELECT id FROM ` + vars.TableExecutionPayload + ` WHERE inserted_at::date >= date '` + dateStart + `' ORDER BY id ASC LIMIT 1;` - err = db.DB.QueryRow(query).Scan(&idFirst) - if err != nil { - log.WithError(err).Fatalf("failed to find start id for date %s", dateStart) - } - } - if dateEnd != "" { - // find last enry before dateEnd - query := `SELECT id FROM ` + vars.TableExecutionPayload + ` WHERE inserted_at::date < date '` + dateEnd + `' ORDER BY id DESC LIMIT 1;` - err = db.DB.QueryRow(query).Scan(&idLast) - if err != nil { - log.WithError(err).Fatalf("failed to find end id for date %s", dateEnd) - } - } - log.Infof("exporting ids %d to %d", idFirst, idLast) - - deliveredPayloads, err := db.GetExecutionPayloads(idFirst, idLast) - if err != nil { - log.WithError(err).Fatal("error getting execution payloads") - } - - log.Infof("got %d payloads", len(deliveredPayloads)) - if len(deliveredPayloads) == 0 { - return - } - - writeToFile := func(outFile string) { - f, err := os.Create(outFile) - if err != nil { - log.WithError(err).Fatal("failed to open file") - } - defer f.Close() - - if strings.HasSuffix(outFile, ".csv") { - // write CSV - w := csv.NewWriter(f) - defer w.Flush() - if err := w.Write(database.ExecutionPayloadEntryCSVHeader); err != nil { - log.WithError(err).Fatal("error writing record to file") - } - for _, record := range deliveredPayloads { - if err := w.Write(record.ToCSVRecord()); err != nil { - log.WithError(err).Fatal("error writing record to file") - } - } - } else { - // write JSON - encoder := json.NewEncoder(f) - err = encoder.Encode(deliveredPayloads) - if err != nil { - log.WithError(err).Fatal("failed to write json to file") - } - } - log.Infof("Wrote %d entries to %s", len(deliveredPayloads), outFile) - } - - for _, outFile := range outFiles { - writeToFile(outFile) - } - - if doDelete { - log.Infof("deleting archived payloads from DB") - err = db.DeleteExecutionPayloads(idFirst, idLast) - if err != nil { - log.WithError(err).Fatal("error deleting execution payloads") - } - } - - log.Infof("all done") - }, -} diff --git a/mev-boost-relay/cmd/tool/common.go b/mev-boost-relay/cmd/tool/common.go deleted file mode 100644 index c2d15951e..000000000 --- a/mev-boost-relay/cmd/tool/common.go +++ /dev/null @@ -1,17 +0,0 @@ -// Package tool exports tool subcommands -package tool - -import "github.com/flashbots/mev-boost-relay/common" - -var ( - log = common.LogSetup(false, "info") - defaultPostgresDSN = common.GetEnv("POSTGRES_DSN", "") - - postgresDSN string - outFiles []string - - idFirst uint64 - idLast uint64 - dateStart string - dateEnd string -) diff --git a/mev-boost-relay/cmd/tool/export-data-api-payloads-bids.go b/mev-boost-relay/cmd/tool/export-data-api-payloads-bids.go deleted file mode 100644 index 3a8de66c1..000000000 --- a/mev-boost-relay/cmd/tool/export-data-api-payloads-bids.go +++ /dev/null @@ -1,113 +0,0 @@ -package tool - -import ( - "encoding/csv" - "encoding/json" - "fmt" - "net/url" - "os" - "runtime" - "strings" - - "github.com/flashbots/mev-boost-relay/common" - "github.com/flashbots/mev-boost-relay/database" - "github.com/spf13/cobra" -) - -var ( - slotFrom uint64 - slotTo uint64 -) - -func init() { - DataAPIExportBids.Flags().StringVar(&postgresDSN, "db", defaultPostgresDSN, "PostgreSQL DSN") - DataAPIExportBids.Flags().Uint64Var(&slotFrom, "slot-from", 0, "start slot (inclusive") - DataAPIExportBids.Flags().Uint64Var(&slotTo, "slot-to", 0, "end slot (inclusive)") - DataAPIExportBids.Flags().StringSliceVar(&outFiles, "out", []string{}, "output filename") -} - -var DataAPIExportBids = &cobra.Command{ - Use: "data-api-export-bids", - Run: func(cmd *cobra.Command, args []string) { - if len(outFiles) == 0 { - outFnBase := fmt.Sprintf("builder-submissions_slot-%d-to-%d", slotFrom, slotTo) - outFiles = append(outFiles, outFnBase+".csv") - outFiles = append(outFiles, outFnBase+".json") - } - log.Infof("exporting data-api bids to %s", strings.Join(outFiles, ", ")) - - if slotFrom == 0 || slotTo == 0 { - log.Fatal("must specify --slot-from and --slot-to") - } - - // Connect to Postgres - dbURL, err := url.Parse(postgresDSN) - if err != nil { - log.WithError(err).Fatalf("couldn't read db URL") - } - log.Infof("Connecting to Postgres database at %s%s ...", dbURL.Host, dbURL.Path) - db, err := database.NewDatabaseService(postgresDSN) - if err != nil { - log.WithError(err).Fatalf("Failed to connect to Postgres database at %s%s", dbURL.Host, dbURL.Path) - } - - log.Info("Connected to Postgres database, starting queries") - log.Infof("exporting slots %d to %d (%d slots in total)...", slotFrom, slotTo, slotTo-slotFrom+1) - - bids, err := db.GetBuilderSubmissionsBySlots(slotFrom, slotTo) - if err != nil { - log.WithError(err).Fatal("failed getting bids") - } - - log.Infof("got %d bids", len(bids)) - entries := make([]common.BidTraceV2WithTimestampJSON, len(bids)) - for i, bid := range bids { - entries[i] = database.BuilderSubmissionEntryToBidTraceV2WithTimestampJSON(bid) - } - - if len(entries) == 0 { - return - } - - // Free up some memory - bids = nil //nolint:ineffassign - runtime.GC() - - writeToFile := func(outFile string) { - f, err := os.Create(outFile) - if err != nil { - log.WithError(err).Fatal("failed to open file") - } - defer f.Close() - - if strings.HasSuffix(outFile, ".csv") { - // write CSV - w := csv.NewWriter(f) - defer w.Flush() - if err := w.Write(entries[0].CSVHeader()); err != nil { - log.WithError(err).Fatal("error writing record to file") - } - for _, record := range entries { - if err := w.Write(record.ToCSVRecord()); err != nil { - log.WithError(err).Fatal("error writing record to file") - } - } - - } else { - // write JSON - encoder := json.NewEncoder(f) - err = encoder.Encode(entries) - if err != nil { - log.WithError(err).Fatal("failed to write json to file") - } - } - - log.Infof("Wrote %d entries to %s", len(entries), outFile) - runtime.GC() - } - - for _, outFile := range outFiles { - writeToFile(outFile) - } - }, -} diff --git a/mev-boost-relay/cmd/tool/export-data-api-payloads-delivered.go b/mev-boost-relay/cmd/tool/export-data-api-payloads-delivered.go deleted file mode 100644 index 3060d0b89..000000000 --- a/mev-boost-relay/cmd/tool/export-data-api-payloads-delivered.go +++ /dev/null @@ -1,118 +0,0 @@ -package tool - -import ( - "encoding/csv" - "encoding/json" - "net/url" - "os" - "strings" - - "github.com/flashbots/mev-boost-relay/common" - "github.com/flashbots/mev-boost-relay/database" - "github.com/flashbots/mev-boost-relay/database/vars" - "github.com/spf13/cobra" -) - -func init() { - DataAPIExportPayloads.Flags().StringVar(&postgresDSN, "db", defaultPostgresDSN, "PostgreSQL DSN") - DataAPIExportPayloads.Flags().Uint64Var(&idFirst, "id-from", 0, "start id (inclusive") - DataAPIExportPayloads.Flags().Uint64Var(&idLast, "id-to", 0, "end id (inclusive)") - DataAPIExportPayloads.Flags().StringVar(&dateStart, "date-start", "", "start date (inclusive)") - DataAPIExportPayloads.Flags().StringVar(&dateEnd, "date-end", "", "end date (exclusive)") - DataAPIExportPayloads.Flags().StringSliceVar(&outFiles, "out", []string{}, "output filename") - _ = DataAPIExportPayloads.MarkFlagRequired("out") -} - -var DataAPIExportPayloads = &cobra.Command{ - Use: "data-api-export-payloads", - Short: "export delivered payloads to the proposer from the DB to a CSV or JSON file", - Run: func(cmd *cobra.Command, args []string) { - if len(outFiles) == 0 { - log.Fatal("no output files specified") - } - log.Infof("exporting data-api payloads to %s", strings.Join(outFiles, ", ")) - - if idLast == 0 && dateEnd == "" { - log.Fatal("must specify --id-to or --date-end") - } - - // Connect to Postgres - dbURL, err := url.Parse(postgresDSN) - if err != nil { - log.WithError(err).Fatalf("couldn't read db URL") - } - log.Infof("Connecting to Postgres database at %s%s ...", dbURL.Host, dbURL.Path) - db, err := database.NewDatabaseService(postgresDSN) - if err != nil { - log.WithError(err).Fatalf("Failed to connect to Postgres database at %s%s", dbURL.Host, dbURL.Path) - } - - // if date, then find corresponding id - if dateStart != "" { - // find first enrty at or after dateStart - query := `SELECT id FROM ` + vars.TableDeliveredPayload + ` WHERE inserted_at::date >= date '` + dateStart + `' ORDER BY id ASC LIMIT 1;` - err = db.DB.QueryRow(query).Scan(&idFirst) - if err != nil { - log.WithError(err).Fatalf("failed to find start id for date %s", dateStart) - } - } - if dateEnd != "" { - // find last entry before dateEnd - query := `SELECT id FROM ` + vars.TableDeliveredPayload + ` WHERE inserted_at::date < date '` + dateEnd + `' ORDER BY id DESC LIMIT 1;` - err = db.DB.QueryRow(query).Scan(&idLast) - if err != nil { - log.WithError(err).Fatalf("failed to find end id for date %s", dateEnd) - } - } - log.Infof("exporting ids %d to %d", idFirst, idLast) - - deliveredPayloads, err := db.GetDeliveredPayloads(idFirst, idLast) - if err != nil { - log.WithError(err).Fatal("error getting recent payloads") - } - - log.Infof("got %d payloads", len(deliveredPayloads)) - entries := make([]common.BidTraceV2JSON, len(deliveredPayloads)) - for i, payload := range deliveredPayloads { - entries[i] = database.DeliveredPayloadEntryToBidTraceV2JSON(payload) - } - - if len(entries) == 0 { - return - } - - writeToFile := func(outFile string) { - f, err := os.Create(outFile) - if err != nil { - log.WithError(err).Fatal("failed to open file") - } - defer f.Close() - - if strings.HasSuffix(outFile, ".csv") { - // write CSV - w := csv.NewWriter(f) - defer w.Flush() - if err := w.Write(entries[0].CSVHeader()); err != nil { - log.WithError(err).Fatal("error writing record to file") - } - for _, record := range entries { - if err := w.Write(record.ToCSVRecord()); err != nil { - log.WithError(err).Fatal("error writing record to file") - } - } - } else { - // write JSON - encoder := json.NewEncoder(f) - err = encoder.Encode(entries) - if err != nil { - log.WithError(err).Fatal("failed to write json to file") - } - } - log.Infof("Wrote %d entries to %s", len(entries), outFile) - } - - for _, outFile := range outFiles { - writeToFile(outFile) - } - }, -} diff --git a/mev-boost-relay/cmd/tool/migrate.go b/mev-boost-relay/cmd/tool/migrate.go deleted file mode 100644 index 5bb616976..000000000 --- a/mev-boost-relay/cmd/tool/migrate.go +++ /dev/null @@ -1,40 +0,0 @@ -package tool - -import ( - "net/url" - - "github.com/flashbots/mev-boost-relay/database/migrations" - "github.com/flashbots/mev-boost-relay/database/vars" - "github.com/jmoiron/sqlx" - migrate "github.com/rubenv/sql-migrate" - "github.com/spf13/cobra" -) - -func init() { - Migrate.Flags().StringVar(&postgresDSN, "db", defaultPostgresDSN, "PostgreSQL DSN") -} - -var Migrate = &cobra.Command{ - Use: "migrate", - Short: "migrate the database to the latest schema", - Run: func(cmd *cobra.Command, args []string) { - // Connect to Postgres - dbURL, err := url.Parse(postgresDSN) - if err != nil { - log.WithError(err).Fatalf("couldn't read db URL") - } - log.Infof("Connecting to Postgres database at %s%s ...", dbURL.Host, dbURL.Path) - db, err := sqlx.Connect("postgres", postgresDSN) - if err != nil { - log.WithError(err).Fatalf("Failed to connect to Postgres database at %s%s", dbURL.Host, dbURL.Path) - } - - log.Infof("Migrating database ...") - migrate.SetTable(vars.TableMigrations) - numAppliedMigrations, err := migrate.Exec(db.DB, "postgres", migrations.Migrations, migrate.Up) - if err != nil { - log.WithError(err).Fatalf("Failed to migrate database") - } - log.WithField("num_applied_migrations", numAppliedMigrations).Info("Migrations applied successfully") - }, -} diff --git a/mev-boost-relay/cmd/variables.go b/mev-boost-relay/cmd/variables.go deleted file mode 100644 index 5f49e7e38..000000000 --- a/mev-boost-relay/cmd/variables.go +++ /dev/null @@ -1,29 +0,0 @@ -package cmd - -import ( - "os" - - "github.com/flashbots/mev-boost-relay/common" -) - -var ( - defaultNetwork = common.GetEnv("NETWORK", "") - defaultBeaconURIs = common.GetSliceEnv("BEACON_URIS", []string{"http://localhost:3500"}) - defaultRedisURI = common.GetEnv("REDIS_URI", "localhost:6379") - defaultRedisReadonlyURI = common.GetEnv("REDIS_READONLY_URI", "") - defaultPostgresDSN = common.GetEnv("POSTGRES_DSN", "") - defaultMemcachedURIs = common.GetSliceEnv("MEMCACHED_URIS", nil) - defaultLogJSON = os.Getenv("LOG_JSON") != "" - defaultLogLevel = common.GetEnv("LOG_LEVEL", "info") - - beaconNodeURIs []string - redisURI string - redisReadonlyURI string - postgresDSN string - memcachedURIs []string - - logJSON bool - logLevel string - - network string -) diff --git a/mev-boost-relay/cmd/version.go b/mev-boost-relay/cmd/version.go deleted file mode 100644 index 92e06fc7b..000000000 --- a/mev-boost-relay/cmd/version.go +++ /dev/null @@ -1,22 +0,0 @@ -package cmd - -import ( - "fmt" - - "github.com/spf13/cobra" -) - -var Version = "dev" // is set during build process - -func init() { - rootCmd.AddCommand(versionCmd) -} - -var versionCmd = &cobra.Command{ - Use: "version", - Short: "Print the version number the relay application", - Long: `All software has versions. This is the boost relay's`, - Run: func(cmd *cobra.Command, args []string) { - fmt.Printf("boost-relay %s\n", Version) - }, -} diff --git a/mev-boost-relay/cmd/website.go b/mev-boost-relay/cmd/website.go deleted file mode 100644 index e77814168..000000000 --- a/mev-boost-relay/cmd/website.go +++ /dev/null @@ -1,129 +0,0 @@ -package cmd - -import ( - "net/url" - "os" - - "github.com/flashbots/mev-boost-relay/common" - "github.com/flashbots/mev-boost-relay/database" - "github.com/flashbots/mev-boost-relay/datastore" - "github.com/flashbots/mev-boost-relay/services/website" - "github.com/sirupsen/logrus" - "github.com/spf13/cobra" -) - -var ( - websiteDefaultListenAddr = common.GetEnv("LISTEN_ADDR", "localhost:9060") - websiteDefaultShowConfigDetails = os.Getenv("SHOW_CONFIG_DETAILS") == "1" - websiteDefaultLinkBeaconchain = common.GetEnv("LINK_BEACONCHAIN", "https://beaconcha.in") - websiteDefaultLinkEtherscan = common.GetEnv("LINK_ETHERSCAN", "https://etherscan.io") - websiteDefaultLinkDataAPI = common.GetEnv("LINK_DATA_API", "") - websiteDefaultRelayURL = common.GetEnv("RELAY_URL", "") - - websiteListenAddr string - websitePubkeyOverride string - websiteShowConfigDetails bool - - websiteLinkBeaconchain string - websiteLinkEtherscan string - websiteLinkDataAPI string - websiteRelayURL string -) - -func init() { - rootCmd.AddCommand(websiteCmd) - websiteCmd.Flags().BoolVar(&logJSON, "json", defaultLogJSON, "log in JSON format instead of text") - websiteCmd.Flags().StringVar(&logLevel, "loglevel", defaultLogLevel, "log-level: trace, debug, info, warn/warning, error, fatal, panic") - - websiteCmd.Flags().StringVar(&websiteListenAddr, "listen-addr", websiteDefaultListenAddr, "listen address for webserver") - websiteCmd.Flags().StringVar(&redisURI, "redis-uri", defaultRedisURI, "redis uri") - websiteCmd.Flags().StringVar(&redisReadonlyURI, "redis-readonly-uri", defaultRedisReadonlyURI, "redis readonly uri") - websiteCmd.Flags().StringVar(&postgresDSN, "db", defaultPostgresDSN, "PostgreSQL DSN") - websiteCmd.Flags().StringVar(&websitePubkeyOverride, "pubkey-override", os.Getenv("PUBKEY_OVERRIDE"), "override for public key") - - websiteCmd.Flags().StringVar(&network, "network", defaultNetwork, "Which network to use") - websiteCmd.Flags().BoolVar(&websiteShowConfigDetails, "show-config-details", websiteDefaultShowConfigDetails, "show config details") - websiteCmd.Flags().StringVar(&websiteLinkBeaconchain, "link-beaconchain", websiteDefaultLinkBeaconchain, "url for beaconcha.in") - websiteCmd.Flags().StringVar(&websiteLinkEtherscan, "link-etherscan", websiteDefaultLinkEtherscan, "url for etherscan") - websiteCmd.Flags().StringVar(&websiteLinkDataAPI, "link-data-api", websiteDefaultLinkDataAPI, "origin url for data api (https://domain:port)") - websiteCmd.Flags().StringVar(&websiteRelayURL, "relay-url", websiteDefaultRelayURL, "full url for the relay (https://pubkey@host)") -} - -var websiteCmd = &cobra.Command{ - Use: "website", - Short: "Start the website server", - Run: func(cmd *cobra.Command, args []string) { - var err error - - log := common.LogSetup(logJSON, logLevel).WithFields(logrus.Fields{ - "service": "relay/website", - "version": Version, - }) - log.Infof("boost-relay %s", Version) - - networkInfo, err := common.NewEthNetworkDetails(network) - if err != nil { - log.WithError(err).Fatalf("error getting network details") - } - - log.Infof("Using network: %s", networkInfo.Name) - log.Debug(networkInfo.String()) - - // Connect to Redis - if redisReadonlyURI == "" { - log.Infof("Connecting to Redis at %s ...", redisURI) - } else { - log.Infof("Connecting to Redis at %s / readonly: %s ...", redisURI, redisReadonlyURI) - } - redis, err := datastore.NewRedisCache(networkInfo.Name, redisURI, redisReadonlyURI) - if err != nil { - log.WithError(err).Fatalf("Failed to connect to Redis at %s", redisURI) - } - - relayPubkey := "" - if websitePubkeyOverride != "" { - relayPubkey = websitePubkeyOverride - } else { - relayPubkey, err = redis.GetRelayConfig(datastore.RedisConfigFieldPubkey) - if err != nil { - log.WithError(err).Fatal("failed getting pubkey from Redis") - } - } - - // Connect to Postgres - log.Infof("Connecting to Postgres database...") - dbURL, err := url.Parse(postgresDSN) - if err != nil { - log.WithError(err).Fatalf("couldn't read db URL") - } - log.Infof("Connecting to Postgres database at %s%s ...", dbURL.Host, dbURL.Path) - db, err := database.NewDatabaseService(postgresDSN) - if err != nil { - log.WithError(err).Fatalf("Failed to connect to Postgres database at %s%s", dbURL.Host, dbURL.Path) - } - - // Create the website service - opts := &website.WebserverOpts{ - ListenAddress: websiteListenAddr, - RelayPubkeyHex: relayPubkey, - NetworkDetails: networkInfo, - Redis: redis, - DB: db, - Log: log, - ShowConfigDetails: websiteShowConfigDetails, - LinkBeaconchain: websiteLinkBeaconchain, - LinkEtherscan: websiteLinkEtherscan, - LinkDataAPI: websiteLinkDataAPI, - RelayURL: websiteRelayURL, - } - - srv, err := website.NewWebserver(opts) - if err != nil { - log.WithError(err).Fatal("failed to create service") - } - - // Start the server - log.Infof("Webserver starting on %s ...", websiteListenAddr) - log.Fatal(srv.StartServer()) - }, -} diff --git a/mev-boost-relay/common/common.go b/mev-boost-relay/common/common.go deleted file mode 100644 index bf27b6177..000000000 --- a/mev-boost-relay/common/common.go +++ /dev/null @@ -1,55 +0,0 @@ -// Package common provides things used by various other components -package common - -import ( - "errors" - "fmt" - "time" - - "github.com/thedevbirb/flashbots-go-utils/cli" -) - -var ( - ErrServerAlreadyRunning = errors.New("server already running") - - SecondsPerSlot = uint64(cli.GetEnvInt("SEC_PER_SLOT", 12)) - DurationPerSlot = time.Duration(SecondsPerSlot) * time.Second - - SlotsPerEpoch = uint64(cli.GetEnvInt("SLOTS_PER_EPOCH", 32)) - DurationPerEpoch = DurationPerSlot * time.Duration(SlotsPerEpoch) -) - -func SlotToEpoch(slot uint64) uint64 { - return slot / SlotsPerEpoch -} - -// HTTPServerTimeouts are various timeouts for requests to the mev-boost HTTP server -type HTTPServerTimeouts struct { - Read time.Duration // Timeout for body reads. None if 0. - ReadHeader time.Duration // Timeout for header reads. None if 0. - Write time.Duration // Timeout for writes. None if 0. - Idle time.Duration // Timeout to disconnect idle client connections. None if 0. -} - -// BuilderStatus configures how builder blocks are processed. -type BuilderStatus struct { - IsHighPrio bool - IsBlacklisted bool - IsOptimistic bool -} - -// Profile captures performance metrics for the block submission handler. Each -// field corresponds to the number of microseconds in each stage. The `Total` -// field is the number of microseconds taken for entire flow. -type Profile struct { - PayloadLoad uint64 - Decode uint64 - Prechecks uint64 - Simulation uint64 - RedisUpdate uint64 - Total uint64 -} - -func (p *Profile) String() string { - return fmt.Sprintf("%v,%v,%v,%v,%v", p.Decode, p.Prechecks, p.Simulation, p.RedisUpdate, p.Total) -} diff --git a/mev-boost-relay/common/errors.go b/mev-boost-relay/common/errors.go deleted file mode 100644 index c2bd2ecea..000000000 --- a/mev-boost-relay/common/errors.go +++ /dev/null @@ -1,10 +0,0 @@ -package common - -import "errors" - -var ( - ErrInvalidSlot = errors.New("invalid slot") - ErrInvalidHash = errors.New("invalid hash") - ErrInvalidPubkey = errors.New("invalid pubkey") - ErrInvalidSignature = errors.New("invalid signature") -) diff --git a/mev-boost-relay/common/logging.go b/mev-boost-relay/common/logging.go deleted file mode 100644 index 117f0a89f..000000000 --- a/mev-boost-relay/common/logging.go +++ /dev/null @@ -1,29 +0,0 @@ -package common - -import ( - "os" - - "github.com/sirupsen/logrus" -) - -func LogSetup(json bool, logLevel string) *logrus.Entry { - log := logrus.NewEntry(logrus.New()) - log.Logger.SetOutput(os.Stdout) - - if json { - log.Logger.SetFormatter(&logrus.JSONFormatter{}) - } else { - log.Logger.SetFormatter(&logrus.TextFormatter{ - FullTimestamp: true, - }) - } - - if logLevel != "" { - lvl, err := logrus.ParseLevel(logLevel) - if err != nil { - log.Fatalf("Invalid loglevel: %s", logLevel) - } - log.Logger.SetLevel(lvl) - } - return log -} diff --git a/mev-boost-relay/common/preconf.go b/mev-boost-relay/common/preconf.go deleted file mode 100644 index 1502768e2..000000000 --- a/mev-boost-relay/common/preconf.go +++ /dev/null @@ -1,94 +0,0 @@ -package common - -import ( - "bytes" - "encoding/hex" - "encoding/json" - "errors" - "fmt" - "strings" - - "github.com/sirupsen/logrus" - - builderSpec "github.com/attestantio/go-builder-client/spec" - "github.com/attestantio/go-eth2-client/spec/phase0" -) - -// VersionedSubmitBlockRequestWithProofs is a wrapper struct -// over `builderSpec.VersionedSubmitBlockRequest` -// to include preconfirmation proofs -type VersionedSubmitBlockRequestWithProofs struct { - Inner *VersionedSubmitBlockRequest `json:"inner"` - Proofs *InclusionProof `json:"proofs"` -} - -func (v *VersionedSubmitBlockRequestWithProofs) String() string { - out, err := json.Marshal(v) - if err != nil { - return err.Error() - } - return string(out) -} - -type BidWithPreconfirmationsProofs struct { - // The block bid - Bid *builderSpec.VersionedSignedBuilderBid `json:"bid"` - // The preconfirmations with proofs - Proofs *InclusionProof `json:"proofs"` -} - -func (b *BidWithPreconfirmationsProofs) String() string { - out, err := json.Marshal(b) - if err != nil { - return err.Error() - } - return string(out) -} - -type HexBytes []byte - -// MarshalJSON implements json.Marshaler. -func (h HexBytes) MarshalJSON() ([]byte, error) { - return []byte(fmt.Sprintf(`"%#x"`, []byte(h))), nil -} - -// UnmarshalJSON implements json.Unmarshaler. -func (h *HexBytes) UnmarshalJSON(input []byte) error { - if len(input) == 0 { - return errors.New("input missing") - } - - if !bytes.HasPrefix(input, []byte{'"', '0', 'x'}) { - return errors.New("invalid prefix") - } - - if !bytes.HasSuffix(input, []byte{'"'}) { - return errors.New("invalid suffix") - } - - var data string - json.Unmarshal(input, &data) - - res, _ := hex.DecodeString(strings.TrimPrefix(data, "0x")) - - *h = res - - return nil -} - -func (h HexBytes) String() string { - return JSONStringify(h) -} - -// InclusionProof is a Merkle Multiproof of inclusion of a set of TransactionHashes -type InclusionProof struct { - TransactionHashes []phase0.Hash32 `json:"transaction_hashes"` - GeneralizedIndexes []uint64 `json:"generalized_indexes"` - MerkleHashes []*HexBytes `json:"merkle_hashes"` -} - -func NewBoltLogger(service string) *logrus.Entry { - return LogSetup(false, "info").WithFields(logrus.Fields{ - "service": fmt.Sprintf("BOLT-%s", service), - }) -} diff --git a/mev-boost-relay/common/ssz_test.go b/mev-boost-relay/common/ssz_test.go deleted file mode 100644 index 7876017a9..000000000 --- a/mev-boost-relay/common/ssz_test.go +++ /dev/null @@ -1,196 +0,0 @@ -package common - -import ( - "bytes" - "encoding/json" - "fmt" - "os" - "testing" - - builderApiCapella "github.com/attestantio/go-builder-client/api/capella" - builderApiDeneb "github.com/attestantio/go-builder-client/api/deneb" - builderSpec "github.com/attestantio/go-builder-client/spec" - "github.com/attestantio/go-eth2-client/spec" - "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/stretchr/testify/require" -) - -func TestSSZBuilderSubmission(t *testing.T) { - testCases := []struct { - name string - filepath string - hashTreeRoot string - }{ - { - name: "Capella", - filepath: "../testdata/submitBlockPayloadCapella_Goerli", - hashTreeRoot: "0x014c218ba41c2ed5388e7f0ed055e109b83692c772de5c2800140a95a4b66d13", - }, - { - name: "Deneb", - filepath: "../testdata/submitBlockPayloadDeneb_Goerli", - hashTreeRoot: "0x258007ab62465df2b5d798571d3ba0554302b7569eb1ca99405485d32723d63f", - }, - } - - for _, testCase := range testCases { - t.Run(testCase.name, func(t *testing.T) { - // json matches marshalled SSZ - jsonBytes := LoadGzippedBytes(t, fmt.Sprintf("%s.json.gz", testCase.filepath)) - - submitBlockData := new(VersionedSubmitBlockRequest) - err := json.Unmarshal(jsonBytes, &submitBlockData) - require.NoError(t, err) - - require.False(t, submitBlockData.IsEmpty()) - marshalledSszBytes, err := submitBlockData.MarshalSSZ() - require.NoError(t, err) - - sszBytes := LoadGzippedBytes(t, fmt.Sprintf("%s.ssz.gz", testCase.filepath)) - require.Equal(t, sszBytes, marshalledSszBytes) - - htr, err := submitBlockData.HashTreeRoot() - require.NoError(t, err) - require.Equal(t, testCase.hashTreeRoot, hexutil.Encode(htr[:])) - - // marshalled json matches ssz - submitBlockSSZ := new(VersionedSubmitBlockRequest) - err = submitBlockSSZ.UnmarshalSSZ(sszBytes) - require.NoError(t, err) - marshalledJSONBytes, err := json.Marshal(submitBlockSSZ) - require.NoError(t, err) - // trim white space from expected json - buffer := new(bytes.Buffer) - err = json.Compact(buffer, jsonBytes) - require.NoError(t, err) - require.Equal(t, buffer.Bytes(), marshalledJSONBytes) - }) - } -} - -func TestSSZGetHeaderResponse(t *testing.T) { - testCases := []struct { - name string - filepath string - hashTreeRoot string - }{ - { - name: "Capella", - filepath: "../testdata/getHeaderResponseCapella_Mainnet", - hashTreeRoot: "0x74bfedcdd2da65b4fb14800340ce1abbb202a0dee73aed80b1cf18fb5bc88190", - }, - { - name: "Deneb", - filepath: "../testdata/getHeaderResponseDeneb_Goerli", - hashTreeRoot: "0xc55312d9740709036d0f95168d53576a8c578fbab9cf66f147f8aaf1d2ea74da", - }, - } - - for _, testCase := range testCases { - t.Run(testCase.name, func(t *testing.T) { - // json -> marshalled ssz -> matches expected ssz - payload := new(builderSpec.VersionedSignedBuilderBid) - - jsonBytes, err := os.ReadFile(fmt.Sprintf("%s.json", testCase.filepath)) - require.NoError(t, err) - - err = json.Unmarshal(jsonBytes, &payload) - require.NoError(t, err) - - var ssz []byte - switch payload.Version { //nolint:exhaustive - case spec.DataVersionCapella: - ssz, err = payload.Capella.MarshalSSZ() - require.NoError(t, err) - case spec.DataVersionDeneb: - ssz, err = payload.Deneb.MarshalSSZ() - require.NoError(t, err) - default: - require.Fail(t, "unknown version") - } - - sszExpectedBytes, err := os.ReadFile(fmt.Sprintf("%s.ssz", testCase.filepath)) - require.NoError(t, err) - require.Equal(t, sszExpectedBytes, ssz) - - // check hash tree root - var htr [32]byte - switch payload.Version { //nolint:exhaustive - case spec.DataVersionCapella: - htr, err = payload.Capella.HashTreeRoot() - require.NoError(t, err) - case spec.DataVersionDeneb: - htr, err = payload.Deneb.HashTreeRoot() - require.NoError(t, err) - default: - require.Fail(t, "unknown version") - } - require.NoError(t, err) - require.Equal(t, testCase.hashTreeRoot, hexutil.Encode(htr[:])) - - // ssz -> marshalled json -> matches expected json - switch payload.Version { //nolint:exhaustive - case spec.DataVersionCapella: - payload.Capella = new(builderApiCapella.SignedBuilderBid) - err = payload.Capella.UnmarshalSSZ(sszExpectedBytes) - require.NoError(t, err) - case spec.DataVersionDeneb: - payload.Deneb = new(builderApiDeneb.SignedBuilderBid) - err = payload.Deneb.UnmarshalSSZ(sszExpectedBytes) - require.NoError(t, err) - default: - require.Fail(t, "unknown version") - } - marshalledJSONBytes, err := json.Marshal(payload) - require.NoError(t, err) - // trim white space from expected json - buffer := new(bytes.Buffer) - err = json.Compact(buffer, jsonBytes) - require.NoError(t, err) - require.Equal(t, buffer.Bytes(), marshalledJSONBytes) - }) - } -} - -func BenchmarkDecoding(b *testing.B) { - jsonBytes, err := os.ReadFile("../testdata/getHeaderResponseCapella_Mainnet.json") - require.NoError(b, err) - - sszBytes, err := os.ReadFile("../testdata/getHeaderResponseCapella_Mainnet.ssz") - require.NoError(b, err) - - payload := new(builderSpec.VersionedSignedBuilderBid) - b.Run("capella json", func(b *testing.B) { - for i := 0; i < b.N; i++ { - err = json.Unmarshal(jsonBytes, &payload) - require.NoError(b, err) - } - }) - payload.Capella = new(builderApiCapella.SignedBuilderBid) - b.Run("capella ssz", func(b *testing.B) { - for i := 0; i < b.N; i++ { - err = payload.Capella.UnmarshalSSZ(sszBytes) - require.NoError(b, err) - } - }) - - jsonBytes, err = os.ReadFile("../testdata/getHeaderResponseDeneb_Goerli.json") - require.NoError(b, err) - - sszBytes, err = os.ReadFile("../testdata/getHeaderResponseDeneb_Goerli.ssz") - require.NoError(b, err) - payload = new(builderSpec.VersionedSignedBuilderBid) - b.Run("deneb json", func(b *testing.B) { - for i := 0; i < b.N; i++ { - err = json.Unmarshal(jsonBytes, &payload) - require.NoError(b, err) - } - }) - payload.Deneb = new(builderApiDeneb.SignedBuilderBid) - b.Run("deneb ssz", func(b *testing.B) { - for i := 0; i < b.N; i++ { - err = payload.Deneb.UnmarshalSSZ(sszBytes) - require.NoError(b, err) - } - }) -} diff --git a/mev-boost-relay/common/test_utils.go b/mev-boost-relay/common/test_utils.go deleted file mode 100644 index 410f00007..000000000 --- a/mev-boost-relay/common/test_utils.go +++ /dev/null @@ -1,250 +0,0 @@ -package common - -import ( - "bytes" - "compress/gzip" - "encoding/base64" - "encoding/json" - "io" - "os" - "testing" - "time" - - builderApi "github.com/attestantio/go-builder-client/api" - builderApiCapella "github.com/attestantio/go-builder-client/api/capella" - builderApiDeneb "github.com/attestantio/go-builder-client/api/deneb" - builderApiV1 "github.com/attestantio/go-builder-client/api/v1" - builderSpec "github.com/attestantio/go-builder-client/spec" - "github.com/attestantio/go-eth2-client/spec" - "github.com/attestantio/go-eth2-client/spec/bellatrix" - "github.com/attestantio/go-eth2-client/spec/capella" - "github.com/attestantio/go-eth2-client/spec/deneb" - "github.com/attestantio/go-eth2-client/spec/phase0" - "github.com/flashbots/go-boost-utils/bls" - "github.com/flashbots/go-boost-utils/ssz" - "github.com/flashbots/go-boost-utils/utils" - "github.com/holiman/uint256" - "github.com/sirupsen/logrus" - "github.com/stretchr/testify/require" -) - -// TestLog is used to log information in the test methods -var TestLog = logrus.WithField("testing", true) - -func check(err error, args ...interface{}) { - if err != nil { - TestLog.Error(err, args) - panic(err) - } -} - -// _HexToAddress converts a hexadecimal string to an Ethereum address -func _HexToAddress(s string) (ret bellatrix.ExecutionAddress) { - ret, err := utils.HexToAddress(s) - check(err, " _HexToAddress: ", s) - return ret -} - -// _HexToPubkey converts a hexadecimal string to a BLS Public Key -func _HexToPubkey(s string) (ret phase0.BLSPubKey) { - ret, err := utils.HexToPubkey(s) - check(err, " _HexToPubkey: ", s) - return ret -} - -// _HexToSignature converts a hexadecimal string to a BLS Signature -func _HexToSignature(s string) (ret phase0.BLSSignature) { - ret, err := utils.HexToSignature(s) - check(err, " _HexToSignature: ", s) - return ret -} - -// _HexToHash converts a hexadecimal string to a Hash -func _HexToHash(s string) (ret phase0.Hash32) { - ret, err := utils.HexToHash(s) - check(err, " _HexToHash: ", s) - return ret -} - -var ValidPayloadRegisterValidator = builderApiV1.SignedValidatorRegistration{ - Message: &builderApiV1.ValidatorRegistration{ - FeeRecipient: _HexToAddress("0xdb65fEd33dc262Fe09D9a2Ba8F80b329BA25f941"), - Timestamp: time.Unix(1606824043, 0), - GasLimit: 30000000, - Pubkey: _HexToPubkey( - "0x84e975405f8691ad7118527ee9ee4ed2e4e8bae973f6e29aa9ca9ee4aea83605ae3536d22acc9aa1af0545064eacf82e"), - }, - Signature: _HexToSignature( - "0xaf12df007a0c78abb5575067e5f8b089cfcc6227e4a91db7dd8cf517fe86fb944ead859f0781277d9b78c672e4a18c5d06368b603374673cf2007966cece9540f3a1b3f6f9e1bf421d779c4e8010368e6aac134649c7a009210780d401a778a5"), -} - -func TestBuilderSubmitBlockRequest(sk *bls.SecretKey, bid *BidTraceV2WithBlobFields, version spec.DataVersion) *VersionedSubmitBlockRequest { - signature, err := ssz.SignMessage(bid, ssz.DomainBuilder, sk) - check(err, " SignMessage: ", bid, sk) - if version == spec.DataVersionDeneb { - return &VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ //nolint:exhaustruct - Version: spec.DataVersionDeneb, - Deneb: &builderApiDeneb.SubmitBlockRequest{ - Message: &bid.BidTrace, - Signature: signature, - ExecutionPayload: &deneb.ExecutionPayload{ //nolint:exhaustruct - Transactions: []bellatrix.Transaction{[]byte{0x03}}, - Timestamp: bid.Slot * 12, // 12 seconds per slot. - PrevRandao: _HexToHash("0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"), - Withdrawals: []*capella.Withdrawal{}, - BaseFeePerGas: uint256.NewInt(0), - BlobGasUsed: 321, - ExcessBlobGas: 123, - }, - BlobsBundle: &builderApiDeneb.BlobsBundle{ - Commitments: []deneb.KZGCommitment{}, - Proofs: []deneb.KZGProof{}, - Blobs: []deneb.Blob{}, - }, - }, - }, - } - } - return &VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ //nolint:exhaustruct - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SubmitBlockRequest{ - Message: &bid.BidTrace, - Signature: signature, - ExecutionPayload: &capella.ExecutionPayload{ //nolint:exhaustruct - Transactions: []bellatrix.Transaction{[]byte{0x03}}, - Timestamp: bid.Slot * 12, // 12 seconds per slot. - PrevRandao: _HexToHash("0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2"), - Withdrawals: []*capella.Withdrawal{}, - }, - }, - }, - } -} - -type CreateTestBlockSubmissionOpts struct { - relaySk bls.SecretKey - relayPk phase0.BLSPubKey - domain phase0.Domain - - Version spec.DataVersion - Slot uint64 - ParentHash string - ProposerPubkey string -} - -func CreateTestBlockSubmission(t *testing.T, builderPubkey string, value *uint256.Int, opts *CreateTestBlockSubmissionOpts) (payload *VersionedSubmitBlockRequest, getPayloadResponse *builderApi.VersionedSubmitBlindedBlockResponse, getHeaderResponse *builderSpec.VersionedSignedBuilderBid) { - t.Helper() - var err error - - slot := uint64(0) - relaySk := bls.SecretKey{} - relayPk := phase0.BLSPubKey{} - domain := phase0.Domain{} - proposerPk := phase0.BLSPubKey{} - parentHash := phase0.Hash32{} - version := spec.DataVersionCapella - - if opts != nil { - relaySk = opts.relaySk - relayPk = opts.relayPk - domain = opts.domain - slot = opts.Slot - - if opts.ProposerPubkey != "" { - proposerPk, err = StrToPhase0Pubkey(opts.ProposerPubkey) - require.NoError(t, err) - } - - if opts.ParentHash != "" { - parentHash, err = StrToPhase0Hash(opts.ParentHash) - require.NoError(t, err) - } - - if opts.Version != spec.DataVersionUnknown { - version = opts.Version - } - } - - builderPk, err := StrToPhase0Pubkey(builderPubkey) - require.NoError(t, err) - - bidTrace := &builderApiV1.BidTrace{ //nolint:exhaustruct - BuilderPubkey: builderPk, - Value: value, - Slot: slot, - ParentHash: parentHash, - ProposerPubkey: proposerPk, - } - - if version == spec.DataVersionDeneb { - payload = &VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ //nolint:exhaustruct - Version: version, - Deneb: &builderApiDeneb.SubmitBlockRequest{ - Message: bidTrace, - ExecutionPayload: &deneb.ExecutionPayload{ //nolint:exhaustruct - BaseFeePerGas: uint256.NewInt(0), - }, - BlobsBundle: &builderApiDeneb.BlobsBundle{ //nolint:exhaustruct - Commitments: make([]deneb.KZGCommitment, 0), - }, - Signature: phase0.BLSSignature{}, - }, - }, - } - } else { - payload = &VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ //nolint:exhaustruct - Version: version, - Capella: &builderApiCapella.SubmitBlockRequest{ - Message: bidTrace, - ExecutionPayload: &capella.ExecutionPayload{}, //nolint:exhaustruct - Signature: phase0.BLSSignature{}, - }, - }, - } - } - - getHeaderResponse, err = BuildGetHeaderResponse(payload, &relaySk, &relayPk, domain) - require.NoError(t, err) - - getPayloadResponse, err = BuildGetPayloadResponse(payload) - require.NoError(t, err) - - return payload, getPayloadResponse, getHeaderResponse -} - -func LoadGzippedBytes(t *testing.T, filename string) []byte { - t.Helper() - fi, err := os.Open(filename) - require.NoError(t, err) - defer fi.Close() - fz, err := gzip.NewReader(fi) - require.NoError(t, err) - defer fz.Close() - val, err := io.ReadAll(fz) - require.NoError(t, err) - return val -} - -func LoadGzippedJSON(t *testing.T, filename string, dst any) { - t.Helper() - b := LoadGzippedBytes(t, filename) - err := json.Unmarshal(b, dst) - require.NoError(t, err) -} - -func MustB64Gunzip(s string) []byte { - b, _ := base64.StdEncoding.DecodeString(s) - gzreader, err := gzip.NewReader(bytes.NewReader(b)) - if err != nil { - panic(err) - } - output, err := io.ReadAll(gzreader) - if err != nil { - panic(err) - } - return output -} diff --git a/mev-boost-relay/common/types.go b/mev-boost-relay/common/types.go deleted file mode 100644 index 0cb139be1..000000000 --- a/mev-boost-relay/common/types.go +++ /dev/null @@ -1,684 +0,0 @@ -package common - -import ( - "encoding/json" - "errors" - "fmt" - "os" - "strconv" - "strings" - - builderApiV1 "github.com/attestantio/go-builder-client/api/v1" - "github.com/attestantio/go-eth2-client/spec/bellatrix" - "github.com/attestantio/go-eth2-client/spec/capella" - "github.com/attestantio/go-eth2-client/spec/deneb" - "github.com/attestantio/go-eth2-client/spec/phase0" - ssz "github.com/ferranbt/fastssz" - boostSsz "github.com/flashbots/go-boost-utils/ssz" -) - -var ( - ErrUnknownNetwork = errors.New("unknown network") - ErrEmptyPayload = errors.New("empty payload") - - EthNetworkHolesky = "holesky" - EthNetworkSepolia = "sepolia" - EthNetworkGoerli = "goerli" - EthNetworkMainnet = "mainnet" - EthNetworkCustom = "custom" - - GenesisForkVersionHolesky = "0x01017000" - GenesisForkVersionSepolia = "0x90000069" - GenesisForkVersionGoerli = "0x00001020" - GenesisForkVersionMainnet = "0x00000000" - - GenesisValidatorsRootHolesky = "0x9143aa7c615a7f7115e2b6aac319c03529df8242ae705fba9df39b79c59fa8b1" - GenesisValidatorsRootSepolia = "0xd8ea171f3c94aea21ebc42a1ed61052acf3f9209c00e4efbaaddac09ed9b8078" - GenesisValidatorsRootGoerli = "0x043db0d9a83813551ee2f33450d23797757d430911a9320530ad8a0eabc43efb" - GenesisValidatorsRootMainnet = "0x4b363db94e286120d76eb905340fdd4e54bfe9f06bf33ff6cf5ad27f511bfe95" - - BellatrixForkVersionHolesky = "0x03017000" - BellatrixForkVersionSepolia = "0x90000071" - BellatrixForkVersionGoerli = "0x02001020" - BellatrixForkVersionMainnet = "0x02000000" - - CapellaForkVersionHolesky = "0x04017000" - CapellaForkVersionSepolia = "0x90000072" - CapellaForkVersionGoerli = "0x03001020" - CapellaForkVersionMainnet = "0x03000000" - - DenebForkVersionHolesky = "0x05017000" - DenebForkVersionSepolia = "0x90000073" - DenebForkVersionGoerli = "0x04001020" - DenebForkVersionMainnet = "0x04000000" - - ForkVersionStringBellatrix = "bellatrix" - ForkVersionStringCapella = "capella" - ForkVersionStringDeneb = "deneb" -) - -type EthNetworkDetails struct { - Name string - GenesisForkVersionHex string - GenesisValidatorsRootHex string - BellatrixForkVersionHex string - CapellaForkVersionHex string - DenebForkVersionHex string - - DomainBuilder phase0.Domain - DomainBeaconProposerBellatrix phase0.Domain - DomainBeaconProposerCapella phase0.Domain - DomainBeaconProposerDeneb phase0.Domain -} - -func NewEthNetworkDetails(networkName string) (ret *EthNetworkDetails, err error) { - var genesisForkVersion string - var genesisValidatorsRoot string - var bellatrixForkVersion string - var capellaForkVersion string - var denebForkVersion string - var domainBuilder phase0.Domain - var domainBeaconProposerBellatrix phase0.Domain - var domainBeaconProposerCapella phase0.Domain - var domainBeaconProposerDeneb phase0.Domain - - switch networkName { - case EthNetworkHolesky: - genesisForkVersion = GenesisForkVersionHolesky - genesisValidatorsRoot = GenesisValidatorsRootHolesky - bellatrixForkVersion = BellatrixForkVersionHolesky - capellaForkVersion = CapellaForkVersionHolesky - denebForkVersion = DenebForkVersionHolesky - case EthNetworkSepolia: - genesisForkVersion = GenesisForkVersionSepolia - genesisValidatorsRoot = GenesisValidatorsRootSepolia - bellatrixForkVersion = BellatrixForkVersionSepolia - capellaForkVersion = CapellaForkVersionSepolia - denebForkVersion = DenebForkVersionSepolia - case EthNetworkGoerli: - genesisForkVersion = GenesisForkVersionGoerli - genesisValidatorsRoot = GenesisValidatorsRootGoerli - bellatrixForkVersion = BellatrixForkVersionGoerli - capellaForkVersion = CapellaForkVersionGoerli - denebForkVersion = DenebForkVersionGoerli - case EthNetworkMainnet: - genesisForkVersion = GenesisForkVersionMainnet - genesisValidatorsRoot = GenesisValidatorsRootMainnet - bellatrixForkVersion = BellatrixForkVersionMainnet - capellaForkVersion = CapellaForkVersionMainnet - denebForkVersion = DenebForkVersionMainnet - case EthNetworkCustom: - genesisForkVersion = os.Getenv("GENESIS_FORK_VERSION") - genesisValidatorsRoot = os.Getenv("GENESIS_VALIDATORS_ROOT") - bellatrixForkVersion = os.Getenv("BELLATRIX_FORK_VERSION") - capellaForkVersion = os.Getenv("CAPELLA_FORK_VERSION") - denebForkVersion = os.Getenv("DENEB_FORK_VERSION") - default: - return nil, fmt.Errorf("%w: %s", ErrUnknownNetwork, networkName) - } - - domainBuilder, err = ComputeDomain(boostSsz.DomainTypeAppBuilder, genesisForkVersion, phase0.Root{}.String()) - if err != nil { - return nil, err - } - - domainBeaconProposerBellatrix, err = ComputeDomain(boostSsz.DomainTypeBeaconProposer, bellatrixForkVersion, genesisValidatorsRoot) - if err != nil { - return nil, err - } - - domainBeaconProposerCapella, err = ComputeDomain(boostSsz.DomainTypeBeaconProposer, capellaForkVersion, genesisValidatorsRoot) - if err != nil { - return nil, err - } - - domainBeaconProposerDeneb, err = ComputeDomain(boostSsz.DomainTypeBeaconProposer, denebForkVersion, genesisValidatorsRoot) - if err != nil { - return nil, err - } - - return &EthNetworkDetails{ - Name: networkName, - GenesisForkVersionHex: genesisForkVersion, - GenesisValidatorsRootHex: genesisValidatorsRoot, - BellatrixForkVersionHex: bellatrixForkVersion, - CapellaForkVersionHex: capellaForkVersion, - DenebForkVersionHex: denebForkVersion, - DomainBuilder: domainBuilder, - DomainBeaconProposerBellatrix: domainBeaconProposerBellatrix, - DomainBeaconProposerCapella: domainBeaconProposerCapella, - DomainBeaconProposerDeneb: domainBeaconProposerDeneb, - }, nil -} - -func (e *EthNetworkDetails) String() string { - return fmt.Sprintf( - `EthNetworkDetails{ - Name: %s, - GenesisForkVersionHex: %s, - GenesisValidatorsRootHex: %s, - BellatrixForkVersionHex: %s, - CapellaForkVersionHex: %s, - DenebForkVersionHex: %s, - DomainBuilder: %x, - DomainBeaconProposerBellatrix: %x, - DomainBeaconProposerCapella: %x, - DomainBeaconProposerDeneb: %x -}`, - e.Name, - e.GenesisForkVersionHex, - e.GenesisValidatorsRootHex, - e.BellatrixForkVersionHex, - e.CapellaForkVersionHex, - e.DenebForkVersionHex, - e.DomainBuilder, - e.DomainBeaconProposerBellatrix, - e.DomainBeaconProposerCapella, - e.DomainBeaconProposerDeneb) -} - -type PubkeyHex string - -func NewPubkeyHex(pk string) PubkeyHex { - return PubkeyHex(strings.ToLower(pk)) -} - -func (p PubkeyHex) String() string { - return string(p) -} - -type BuilderGetValidatorsResponseEntry struct { - Slot uint64 `json:"slot,string"` - ValidatorIndex uint64 `json:"validator_index,string"` - Entry *builderApiV1.SignedValidatorRegistration `json:"entry"` -} - -type BidTraceV2 struct { - builderApiV1.BidTrace - BlockNumber uint64 `db:"block_number" json:"block_number,string"` - NumTx uint64 `db:"num_tx" json:"num_tx,string"` -} - -type BidTraceV2JSON struct { - Slot uint64 `json:"slot,string"` - ParentHash string `json:"parent_hash"` - BlockHash string `json:"block_hash"` - BuilderPubkey string `json:"builder_pubkey"` - ProposerPubkey string `json:"proposer_pubkey"` - ProposerFeeRecipient string `json:"proposer_fee_recipient"` - GasLimit uint64 `json:"gas_limit,string"` - GasUsed uint64 `json:"gas_used,string"` - Value string `json:"value"` - NumTx uint64 `json:"num_tx,string"` - BlockNumber uint64 `json:"block_number,string"` -} - -func (b BidTraceV2) MarshalJSON() ([]byte, error) { - return json.Marshal(&BidTraceV2JSON{ - Slot: b.Slot, - ParentHash: b.ParentHash.String(), - BlockHash: b.BlockHash.String(), - BuilderPubkey: b.BuilderPubkey.String(), - ProposerPubkey: b.ProposerPubkey.String(), - ProposerFeeRecipient: b.ProposerFeeRecipient.String(), - GasLimit: b.GasLimit, - GasUsed: b.GasUsed, - Value: b.Value.ToBig().String(), - NumTx: b.NumTx, - BlockNumber: b.BlockNumber, - }) -} - -func (b *BidTraceV2) UnmarshalJSON(data []byte) error { - params := &struct { - NumTx uint64 `json:"num_tx,string"` - BlockNumber uint64 `json:"block_number,string"` - }{} - err := json.Unmarshal(data, params) - if err != nil { - return err - } - b.NumTx = params.NumTx - b.BlockNumber = params.BlockNumber - - bidTrace := new(builderApiV1.BidTrace) - err = json.Unmarshal(data, bidTrace) - if err != nil { - return err - } - b.BidTrace = *bidTrace - return nil -} - -func (b *BidTraceV2JSON) CSVHeader() []string { - return []string{ - "slot", - "parent_hash", - "block_hash", - "builder_pubkey", - "proposer_pubkey", - "proposer_fee_recipient", - "gas_limit", - "gas_used", - "value", - "num_tx", - "block_number", - } -} - -func (b *BidTraceV2JSON) ToCSVRecord() []string { - return []string{ - strconv.FormatUint(b.Slot, 10), - b.ParentHash, - b.BlockHash, - b.BuilderPubkey, - b.ProposerPubkey, - b.ProposerFeeRecipient, - strconv.FormatUint(b.GasLimit, 10), - strconv.FormatUint(b.GasUsed, 10), - b.Value, - strconv.FormatUint(b.NumTx, 10), - strconv.FormatUint(b.BlockNumber, 10), - } -} - -type BidTraceV2WithTimestampJSON struct { - BidTraceV2JSON - Timestamp int64 `json:"timestamp,string,omitempty"` - TimestampMs int64 `json:"timestamp_ms,string,omitempty"` - OptimisticSubmission bool `json:"optimistic_submission"` -} - -func (b *BidTraceV2WithTimestampJSON) CSVHeader() []string { - return []string{ - "slot", - "parent_hash", - "block_hash", - "builder_pubkey", - "proposer_pubkey", - "proposer_fee_recipient", - "gas_limit", - "gas_used", - "value", - "num_tx", - "block_number", - "timestamp", - "timestamp_ms", - "optimistic_submission", - } -} - -func (b *BidTraceV2WithTimestampJSON) ToCSVRecord() []string { - return []string{ - strconv.FormatUint(b.Slot, 10), - b.ParentHash, - b.BlockHash, - b.BuilderPubkey, - b.ProposerPubkey, - b.ProposerFeeRecipient, - strconv.FormatUint(b.GasLimit, 10), - strconv.FormatUint(b.GasUsed, 10), - b.Value, - strconv.FormatUint(b.NumTx, 10), - strconv.FormatUint(b.BlockNumber, 10), - strconv.FormatInt(b.Timestamp, 10), - strconv.FormatInt(b.TimestampMs, 10), - strconv.FormatBool(b.OptimisticSubmission), - } -} - -type BidTraceV2WithBlobFields struct { - builderApiV1.BidTrace - BlockNumber uint64 `db:"block_number" json:"block_number,string"` - NumTx uint64 `db:"num_tx" json:"num_tx,string"` - NumBlobs uint64 `db:"num_blobs" json:"num_blobs,string"` - BlobGasUsed uint64 `db:"blob_gas_used" json:"blob_gas_used,string"` - ExcessBlobGas uint64 `db:"excess_blob_gas" json:"excess_blob_gas,string"` -} - -type BidTraceV2WithBlobFieldsJSON struct { - Slot uint64 `json:"slot,string"` - ParentHash string `json:"parent_hash"` - BlockHash string `json:"block_hash"` - BuilderPubkey string `json:"builder_pubkey"` - ProposerPubkey string `json:"proposer_pubkey"` - ProposerFeeRecipient string `json:"proposer_fee_recipient"` - GasLimit uint64 `json:"gas_limit,string"` - GasUsed uint64 `json:"gas_used,string"` - Value string `json:"value"` - NumTx uint64 `json:"num_tx,string"` - BlockNumber uint64 `json:"block_number,string"` - NumBlobs uint64 `json:"num_blobs,string"` - BlobGasUsed uint64 `json:"blob_gas_used,string"` - ExcessBlobGas uint64 `json:"excess_blob_gas,string"` -} - -func (b BidTraceV2WithBlobFields) MarshalJSON() ([]byte, error) { - return json.Marshal(&BidTraceV2WithBlobFieldsJSON{ - Slot: b.Slot, - ParentHash: b.ParentHash.String(), - BlockHash: b.BlockHash.String(), - BuilderPubkey: b.BuilderPubkey.String(), - ProposerPubkey: b.ProposerPubkey.String(), - ProposerFeeRecipient: b.ProposerFeeRecipient.String(), - GasLimit: b.GasLimit, - GasUsed: b.GasUsed, - Value: b.Value.ToBig().String(), - NumTx: b.NumTx, - BlockNumber: b.BlockNumber, - NumBlobs: b.NumBlobs, - BlobGasUsed: b.BlobGasUsed, - ExcessBlobGas: b.ExcessBlobGas, - }) -} - -func (b *BidTraceV2WithBlobFields) UnmarshalJSON(data []byte) error { - params := &struct { - NumTx uint64 `json:"num_tx,string"` - BlockNumber uint64 `json:"block_number,string"` - NumBlobs uint64 `json:"num_blobs,string"` - BlobGasUsed uint64 `json:"blob_gas_used,string"` - ExcessBlobGas uint64 `json:"excess_blob_gas,string"` - }{} - err := json.Unmarshal(data, params) - if err != nil { - return err - } - b.NumTx = params.NumTx - b.BlockNumber = params.BlockNumber - b.NumBlobs = params.NumBlobs - b.BlobGasUsed = params.BlobGasUsed - b.ExcessBlobGas = params.ExcessBlobGas - - bidTrace := new(builderApiV1.BidTrace) - err = json.Unmarshal(data, bidTrace) - if err != nil { - return err - } - b.BidTrace = *bidTrace - return nil -} - -type BlockSubmissionInfo struct { - BidTrace *builderApiV1.BidTrace - ExecutionPayloadBlockHash phase0.Hash32 - ExecutionPayloadParentHash phase0.Hash32 - GasUsed uint64 - GasLimit uint64 - Timestamp uint64 - BlockNumber uint64 - PrevRandao phase0.Hash32 - Signature phase0.BLSSignature - Transactions []bellatrix.Transaction - Withdrawals []*capella.Withdrawal - Blobs []deneb.Blob - BlobGasUsed uint64 - ExcessBlobGas uint64 -} - -/* -SubmitBlockRequestV2Optimistic is the v2 request from the builder to submit -a block. The message must be SSZ encoded. The first three fields are at most -944 bytes, which fit into a single 1500 MTU ethernet packet. The -`UnmarshalSSZHeaderOnly` function just parses the first three fields, -which is sufficient data to set the bid of the builder. The `Transactions` -and `Withdrawals` fields are required to construct the full SignedBeaconBlock -and are parsed asynchronously. - -Header only layout: -[000-236) = Message (236 bytes) -[236-240) = offset1 ( 4 bytes) -[240-336) = Signature ( 96 bytes) -[336-340) = offset2 ( 4 bytes) -[340-344) = offset3 ( 4 bytes) -[344-944) = EPH (600 bytes) -*/ -type SubmitBlockRequestV2Optimistic struct { - Message *builderApiV1.BidTrace - ExecutionPayloadHeader *capella.ExecutionPayloadHeader - Signature phase0.BLSSignature `ssz-size:"96"` - Transactions []bellatrix.Transaction `ssz-max:"1048576,1073741824" ssz-size:"?,?"` - Withdrawals []*capella.Withdrawal `ssz-max:"16"` -} - -// MarshalSSZ ssz marshals the SubmitBlockRequestV2Optimistic object -func (s *SubmitBlockRequestV2Optimistic) MarshalSSZ() ([]byte, error) { - return ssz.MarshalSSZ(s) -} - -// UnmarshalSSZ ssz unmarshals the SubmitBlockRequestV2Optimistic object -func (s *SubmitBlockRequestV2Optimistic) UnmarshalSSZ(buf []byte) error { - var err error - size := uint64(len(buf)) - if size < 344 { - return ssz.ErrSize - } - - tail := buf - var o1, o3, o4 uint64 - - // Field (0) 'Message' - if s.Message == nil { - s.Message = new(builderApiV1.BidTrace) - } - if err = s.Message.UnmarshalSSZ(buf[0:236]); err != nil { - return err - } - - // Offset (1) 'ExecutionPayloadHeader' - if o1 = ssz.ReadOffset(buf[236:240]); o1 > size { - return ssz.ErrOffset - } - - if o1 < 344 { - return ssz.ErrInvalidVariableOffset - } - - // Field (2) 'Signature' - copy(s.Signature[:], buf[240:336]) - - // Offset (3) 'Transactions' - if o3 = ssz.ReadOffset(buf[336:340]); o3 > size || o1 > o3 { - return ssz.ErrOffset - } - - // Offset (4) 'Withdrawals' - if o4 = ssz.ReadOffset(buf[340:344]); o4 > size || o3 > o4 { - return ssz.ErrOffset - } - - // Field (1) 'ExecutionPayloadHeader' - { - buf = tail[o1:o3] - if s.ExecutionPayloadHeader == nil { - s.ExecutionPayloadHeader = new(capella.ExecutionPayloadHeader) - } - if err = s.ExecutionPayloadHeader.UnmarshalSSZ(buf); err != nil { - return err - } - } - - // Field (3) 'Transactions' - { - buf = tail[o3:o4] - num, err := ssz.DecodeDynamicLength(buf, 1073741824) - if err != nil { - return err - } - s.Transactions = make([]bellatrix.Transaction, num) - err = ssz.UnmarshalDynamic(buf, num, func(indx int, buf []byte) (err error) { - if len(buf) > 1073741824 { - return ssz.ErrBytesLength - } - if cap(s.Transactions[indx]) == 0 { - s.Transactions[indx] = bellatrix.Transaction(make([]byte, 0, len(buf))) - } - s.Transactions[indx] = append(s.Transactions[indx], buf...) - return nil - }) - if err != nil { - return err - } - } - - // Field (4) 'Withdrawals' - { - buf = tail[o4:] - num, err := ssz.DivideInt2(len(buf), 44, 16) - if err != nil { - return err - } - s.Withdrawals = make([]*capella.Withdrawal, num) - for ii := 0; ii < num; ii++ { - if s.Withdrawals[ii] == nil { - s.Withdrawals[ii] = new(capella.Withdrawal) - } - if err = s.Withdrawals[ii].UnmarshalSSZ(buf[ii*44 : (ii+1)*44]); err != nil { - return err - } - } - } - return err -} - -// UnmarshalSSZHeaderOnly ssz unmarshals the first 3 fields of the SubmitBlockRequestV2Optimistic object -func (s *SubmitBlockRequestV2Optimistic) UnmarshalSSZHeaderOnly(buf []byte) error { - var err error - size := uint64(len(buf)) - if size < 344 { - return ssz.ErrSize - } - - tail := buf - var o1, o3 uint64 - - // Field (0) 'Message' - if s.Message == nil { - s.Message = new(builderApiV1.BidTrace) - } - if err = s.Message.UnmarshalSSZ(buf[0:236]); err != nil { - return err - } - - // Offset (1) 'ExecutionPayloadHeader' - if o1 = ssz.ReadOffset(buf[236:240]); o1 > size { - return ssz.ErrOffset - } - - if o1 < 344 { - return ssz.ErrInvalidVariableOffset - } - - // Field (2) 'Signature' - copy(s.Signature[:], buf[240:336]) - - // Offset (3) 'Transactions' - if o3 = ssz.ReadOffset(buf[336:340]); o3 > size || o1 > o3 { - return ssz.ErrOffset - } - - // Field (1) 'ExecutionPayloadHeader' - { - buf = tail[o1:o3] - if s.ExecutionPayloadHeader == nil { - s.ExecutionPayloadHeader = new(capella.ExecutionPayloadHeader) - } - if err = s.ExecutionPayloadHeader.UnmarshalSSZ(buf); err != nil { - return err - } - } - return err -} - -// MarshalSSZTo ssz marshals the SubmitBlockRequestV2Optimistic object to a target array -func (s *SubmitBlockRequestV2Optimistic) MarshalSSZTo(buf []byte) (dst []byte, err error) { - dst = buf - offset := int(344) - - // Field (0) 'Message' - if s.Message == nil { - s.Message = new(builderApiV1.BidTrace) - } - if dst, err = s.Message.MarshalSSZTo(dst); err != nil { - return nil, err - } - - // Offset (1) 'ExecutionPayloadHeader' - dst = ssz.WriteOffset(dst, offset) - if s.ExecutionPayloadHeader == nil { - s.ExecutionPayloadHeader = new(capella.ExecutionPayloadHeader) - } - offset += s.ExecutionPayloadHeader.SizeSSZ() - - // Field (2) 'Signature' - dst = append(dst, s.Signature[:]...) - - // Offset (3) 'Transactions' - dst = ssz.WriteOffset(dst, offset) - for ii := 0; ii < len(s.Transactions); ii++ { - offset += 4 - offset += len(s.Transactions[ii]) - } - - // Offset (4) 'Withdrawals' - dst = ssz.WriteOffset(dst, offset) - - // Field (1) 'ExecutionPayloadHeader' - if dst, err = s.ExecutionPayloadHeader.MarshalSSZTo(dst); err != nil { - return nil, err - } - - // Field (3) 'Transactions' - if size := len(s.Transactions); size > 1073741824 { - err = ssz.ErrListTooBigFn("SubmitBlockRequestV2Optimistic.Transactions", size, 1073741824) - return nil, err - } - { - offset = 4 * len(s.Transactions) - for ii := 0; ii < len(s.Transactions); ii++ { - dst = ssz.WriteOffset(dst, offset) - offset += len(s.Transactions[ii]) - } - } - for ii := 0; ii < len(s.Transactions); ii++ { - if size := len(s.Transactions[ii]); size > 1073741824 { - err = ssz.ErrBytesLengthFn("SubmitBlockRequestV2Optimistic.Transactions[ii]", size, 1073741824) - return nil, err - } - dst = append(dst, s.Transactions[ii]...) - } - - // Field (4) 'Withdrawals' - if size := len(s.Withdrawals); size > 16 { - err = ssz.ErrListTooBigFn("SubmitBlockRequestV2Optimistic.Withdrawals", size, 16) - return nil, err - } - for ii := 0; ii < len(s.Withdrawals); ii++ { - if dst, err = s.Withdrawals[ii].MarshalSSZTo(dst); err != nil { - return nil, err - } - } - return dst, nil -} - -// SizeSSZ returns the ssz encoded size in bytes for the SubmitBlockRequestV2Optimistic object -func (s *SubmitBlockRequestV2Optimistic) SizeSSZ() (size int) { - size = 344 - - // Field (1) 'ExecutionPayloadHeader' - if s.ExecutionPayloadHeader == nil { - s.ExecutionPayloadHeader = new(capella.ExecutionPayloadHeader) - } - size += s.ExecutionPayloadHeader.SizeSSZ() - - // Field (3) 'Transactions' - for ii := 0; ii < len(s.Transactions); ii++ { - size += 4 - size += len(s.Transactions[ii]) - } - - // Field (4) 'Withdrawals' - size += len(s.Withdrawals) * 44 - - return -} diff --git a/mev-boost-relay/common/types_spec.go b/mev-boost-relay/common/types_spec.go deleted file mode 100644 index 1e3ae4915..000000000 --- a/mev-boost-relay/common/types_spec.go +++ /dev/null @@ -1,450 +0,0 @@ -package common - -import ( - "encoding/json" - "fmt" - - builderApi "github.com/attestantio/go-builder-client/api" - builderApiCapella "github.com/attestantio/go-builder-client/api/capella" - builderApiDeneb "github.com/attestantio/go-builder-client/api/deneb" - builderApiV1 "github.com/attestantio/go-builder-client/api/v1" - builderSpec "github.com/attestantio/go-builder-client/spec" - eth2Api "github.com/attestantio/go-eth2-client/api" - eth2ApiV1Capella "github.com/attestantio/go-eth2-client/api/v1/capella" - eth2ApiV1Deneb "github.com/attestantio/go-eth2-client/api/v1/deneb" - "github.com/attestantio/go-eth2-client/spec" - "github.com/attestantio/go-eth2-client/spec/capella" - "github.com/attestantio/go-eth2-client/spec/deneb" - "github.com/attestantio/go-eth2-client/spec/phase0" - "github.com/flashbots/go-boost-utils/bls" - "github.com/flashbots/go-boost-utils/ssz" - "github.com/flashbots/go-boost-utils/utils" - "github.com/pkg/errors" -) - -var ( - ErrMissingRequest = errors.New("req is nil") - ErrMissingSecretKey = errors.New("secret key is nil") - ErrInvalidVersion = errors.New("invalid version") -) - -type HTTPErrorResp struct { - Code int `json:"code"` - Message string `json:"message"` -} - -var NilResponse = struct{}{} - -func BuildGetHeaderResponse(payload *VersionedSubmitBlockRequest, sk *bls.SecretKey, pubkey *phase0.BLSPubKey, domain phase0.Domain) (*builderSpec.VersionedSignedBuilderBid, error) { - if payload == nil { - return nil, ErrMissingRequest - } - - if sk == nil { - return nil, ErrMissingSecretKey - } - - versionedPayload := &builderApi.VersionedExecutionPayload{Version: payload.Version} - switch payload.Version { - case spec.DataVersionCapella: - versionedPayload.Capella = payload.Capella.ExecutionPayload - header, err := utils.PayloadToPayloadHeader(versionedPayload) - if err != nil { - return nil, err - } - signedBuilderBid, err := BuilderBlockRequestToSignedBuilderBid(payload, header, sk, pubkey, domain) - if err != nil { - return nil, err - } - return &builderSpec.VersionedSignedBuilderBid{ - Version: spec.DataVersionCapella, - Capella: signedBuilderBid.Capella, - }, nil - case spec.DataVersionDeneb: - versionedPayload.Deneb = payload.Deneb.ExecutionPayload - header, err := utils.PayloadToPayloadHeader(versionedPayload) - if err != nil { - return nil, err - } - signedBuilderBid, err := BuilderBlockRequestToSignedBuilderBid(payload, header, sk, pubkey, domain) - if err != nil { - return nil, err - } - return &builderSpec.VersionedSignedBuilderBid{ - Version: spec.DataVersionDeneb, - Deneb: signedBuilderBid.Deneb, - }, nil - case spec.DataVersionUnknown, spec.DataVersionPhase0, spec.DataVersionAltair, spec.DataVersionBellatrix: - return nil, ErrInvalidVersion - default: - return nil, ErrEmptyPayload - } -} - -func BuildGetPayloadResponse(payload *VersionedSubmitBlockRequest) (*builderApi.VersionedSubmitBlindedBlockResponse, error) { - switch payload.Version { - case spec.DataVersionCapella: - return &builderApi.VersionedSubmitBlindedBlockResponse{ - Version: spec.DataVersionCapella, - Capella: payload.Capella.ExecutionPayload, - }, nil - case spec.DataVersionDeneb: - return &builderApi.VersionedSubmitBlindedBlockResponse{ - Version: spec.DataVersionDeneb, - Deneb: &builderApiDeneb.ExecutionPayloadAndBlobsBundle{ - ExecutionPayload: payload.Deneb.ExecutionPayload, - BlobsBundle: payload.Deneb.BlobsBundle, - }, - }, nil - case spec.DataVersionUnknown, spec.DataVersionPhase0, spec.DataVersionAltair, spec.DataVersionBellatrix: - return nil, ErrInvalidVersion - } - return nil, ErrEmptyPayload -} - -func BuilderBlockRequestToSignedBuilderBid(payload *VersionedSubmitBlockRequest, header *builderApi.VersionedExecutionPayloadHeader, sk *bls.SecretKey, pubkey *phase0.BLSPubKey, domain phase0.Domain) (*builderSpec.VersionedSignedBuilderBid, error) { - value, err := payload.Value() - if err != nil { - return nil, err - } - - switch payload.Version { //nolint:exhaustive - case spec.DataVersionCapella: - builderBid := builderApiCapella.BuilderBid{ - Value: value, - Header: header.Capella, - Pubkey: *pubkey, - } - - sig, err := ssz.SignMessage(&builderBid, domain, sk) - if err != nil { - return nil, err - } - - return &builderSpec.VersionedSignedBuilderBid{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SignedBuilderBid{ - Message: &builderBid, - Signature: sig, - }, - }, nil - case spec.DataVersionDeneb: - builderBid := builderApiDeneb.BuilderBid{ - Header: header.Deneb, - BlobKZGCommitments: payload.Deneb.BlobsBundle.Commitments, - Value: value, - Pubkey: *pubkey, - } - - sig, err := ssz.SignMessage(&builderBid, domain, sk) - if err != nil { - return nil, err - } - - return &builderSpec.VersionedSignedBuilderBid{ - Version: spec.DataVersionDeneb, - Deneb: &builderApiDeneb.SignedBuilderBid{ - Message: &builderBid, - Signature: sig, - }, - }, nil - default: - return nil, errors.Wrap(ErrInvalidVersion, fmt.Sprintf("%s is not supported", payload.Version)) - } -} - -func SignedBlindedBeaconBlockToBeaconBlock(signedBlindedBeaconBlock *VersionedSignedBlindedBeaconBlock, blockPayload *builderApi.VersionedSubmitBlindedBlockResponse) (*VersionedSignedProposal, error) { - signedBeaconBlock := VersionedSignedProposal{ - eth2Api.VersionedSignedProposal{ //nolint:exhaustruct - Version: signedBlindedBeaconBlock.Version, - }, - } - switch signedBlindedBeaconBlock.Version { - case spec.DataVersionCapella: - capellaBlindedBlock := signedBlindedBeaconBlock.Capella - signedBeaconBlock.Capella = CapellaUnblindSignedBlock(capellaBlindedBlock, blockPayload.Capella) - case spec.DataVersionDeneb: - denebBlindedBlock := signedBlindedBeaconBlock.Deneb - if len(denebBlindedBlock.Message.Body.BlobKZGCommitments) != len(blockPayload.Deneb.BlobsBundle.Blobs) { - return nil, errors.New("number of blinded blobs does not match blobs bundle length") - } - - signedBeaconBlock.Deneb = DenebUnblindSignedBlock(denebBlindedBlock, blockPayload.Deneb) - case spec.DataVersionUnknown, spec.DataVersionPhase0, spec.DataVersionAltair, spec.DataVersionBellatrix: - return nil, errors.Wrap(ErrInvalidVersion, fmt.Sprintf("%s is not supported", signedBlindedBeaconBlock.Version)) - } - return &signedBeaconBlock, nil -} - -func CapellaUnblindSignedBlock(blindedBlock *eth2ApiV1Capella.SignedBlindedBeaconBlock, executionPayload *capella.ExecutionPayload) *capella.SignedBeaconBlock { - return &capella.SignedBeaconBlock{ - Signature: blindedBlock.Signature, - Message: &capella.BeaconBlock{ - Slot: blindedBlock.Message.Slot, - ProposerIndex: blindedBlock.Message.ProposerIndex, - ParentRoot: blindedBlock.Message.ParentRoot, - StateRoot: blindedBlock.Message.StateRoot, - Body: &capella.BeaconBlockBody{ - RANDAOReveal: blindedBlock.Message.Body.RANDAOReveal, - ETH1Data: blindedBlock.Message.Body.ETH1Data, - Graffiti: blindedBlock.Message.Body.Graffiti, - ProposerSlashings: blindedBlock.Message.Body.ProposerSlashings, - AttesterSlashings: blindedBlock.Message.Body.AttesterSlashings, - Attestations: blindedBlock.Message.Body.Attestations, - Deposits: blindedBlock.Message.Body.Deposits, - VoluntaryExits: blindedBlock.Message.Body.VoluntaryExits, - SyncAggregate: blindedBlock.Message.Body.SyncAggregate, - ExecutionPayload: executionPayload, - BLSToExecutionChanges: blindedBlock.Message.Body.BLSToExecutionChanges, - }, - }, - } -} - -func DenebUnblindSignedBlock(blindedBlock *eth2ApiV1Deneb.SignedBlindedBeaconBlock, blockPayload *builderApiDeneb.ExecutionPayloadAndBlobsBundle) *eth2ApiV1Deneb.SignedBlockContents { - return ð2ApiV1Deneb.SignedBlockContents{ - SignedBlock: &deneb.SignedBeaconBlock{ - Message: &deneb.BeaconBlock{ - Slot: blindedBlock.Message.Slot, - ProposerIndex: blindedBlock.Message.ProposerIndex, - ParentRoot: blindedBlock.Message.ParentRoot, - StateRoot: blindedBlock.Message.StateRoot, - Body: &deneb.BeaconBlockBody{ - RANDAOReveal: blindedBlock.Message.Body.RANDAOReveal, - ETH1Data: blindedBlock.Message.Body.ETH1Data, - Graffiti: blindedBlock.Message.Body.Graffiti, - ProposerSlashings: blindedBlock.Message.Body.ProposerSlashings, - AttesterSlashings: blindedBlock.Message.Body.AttesterSlashings, - Attestations: blindedBlock.Message.Body.Attestations, - Deposits: blindedBlock.Message.Body.Deposits, - VoluntaryExits: blindedBlock.Message.Body.VoluntaryExits, - SyncAggregate: blindedBlock.Message.Body.SyncAggregate, - ExecutionPayload: blockPayload.ExecutionPayload, - BLSToExecutionChanges: blindedBlock.Message.Body.BLSToExecutionChanges, - BlobKZGCommitments: blindedBlock.Message.Body.BlobKZGCommitments, - }, - }, - Signature: blindedBlock.Signature, - }, - KZGProofs: blockPayload.BlobsBundle.Proofs, - Blobs: blockPayload.BlobsBundle.Blobs, - } -} - -type BuilderBlockValidationRequest struct { - *VersionedSubmitBlockRequest - RegisteredGasLimit uint64 - ParentBeaconBlockRoot *phase0.Root -} - -type capellaBuilderBlockValidationRequestJSON struct { - Message *builderApiV1.BidTrace `json:"message"` - ExecutionPayload *capella.ExecutionPayload `json:"execution_payload"` - Signature string `json:"signature"` - RegisteredGasLimit uint64 `json:"registered_gas_limit,string"` -} - -type denebBuilderBlockValidationRequestJSON struct { - Message *builderApiV1.BidTrace `json:"message"` - ExecutionPayload *deneb.ExecutionPayload `json:"execution_payload"` - BlobsBundle *builderApiDeneb.BlobsBundle `json:"blobs_bundle"` - Signature string `json:"signature"` - RegisteredGasLimit uint64 `json:"registered_gas_limit,string"` - ParentBeaconBlockRoot string `json:"parent_beacon_block_root"` -} - -func (r *BuilderBlockValidationRequest) MarshalJSON() ([]byte, error) { - switch r.Version { //nolint:exhaustive - case spec.DataVersionCapella: - return json.Marshal(&capellaBuilderBlockValidationRequestJSON{ - Message: r.Capella.Message, - ExecutionPayload: r.Capella.ExecutionPayload, - Signature: r.Capella.Signature.String(), - RegisteredGasLimit: r.RegisteredGasLimit, - }) - case spec.DataVersionDeneb: - return json.Marshal(&denebBuilderBlockValidationRequestJSON{ - Message: r.Deneb.Message, - ExecutionPayload: r.Deneb.ExecutionPayload, - BlobsBundle: r.Deneb.BlobsBundle, - Signature: r.Deneb.Signature.String(), - RegisteredGasLimit: r.RegisteredGasLimit, - ParentBeaconBlockRoot: r.ParentBeaconBlockRoot.String(), - }) - default: - return nil, errors.Wrap(ErrInvalidVersion, fmt.Sprintf("%s is not supported", r.Version)) - } -} - -type VersionedSubmitBlockRequest struct { - builderSpec.VersionedSubmitBlockRequest -} - -func (r *VersionedSubmitBlockRequest) MarshalSSZ() ([]byte, error) { - switch r.Version { //nolint:exhaustive - case spec.DataVersionCapella: - return r.Capella.MarshalSSZ() - case spec.DataVersionDeneb: - return r.Deneb.MarshalSSZ() - default: - return nil, errors.Wrap(ErrInvalidVersion, fmt.Sprintf("%s is not supported", r.Version)) - } -} - -func (r *VersionedSubmitBlockRequest) UnmarshalSSZ(input []byte) error { - var err error - - denebRequest := new(builderApiDeneb.SubmitBlockRequest) - if err = denebRequest.UnmarshalSSZ(input); err == nil { - r.Version = spec.DataVersionDeneb - r.Deneb = denebRequest - return nil - } - - capellaRequest := new(builderApiCapella.SubmitBlockRequest) - if err = capellaRequest.UnmarshalSSZ(input); err == nil { - r.Version = spec.DataVersionCapella - r.Capella = capellaRequest - return nil - } - return errors.Wrap(err, "failed to unmarshal SubmitBlockRequest SSZ") -} - -func (r *VersionedSubmitBlockRequest) HashTreeRoot() (phase0.Root, error) { - switch r.Version { - case spec.DataVersionCapella: - return r.Capella.HashTreeRoot() - case spec.DataVersionDeneb: - return r.Deneb.HashTreeRoot() - case spec.DataVersionUnknown, spec.DataVersionPhase0, spec.DataVersionAltair, spec.DataVersionBellatrix: - fallthrough - default: - return phase0.Root{}, errors.Wrap(ErrInvalidVersion, fmt.Sprintf("%d is not supported", r.Version)) - } -} - -func (r *VersionedSubmitBlockRequest) MarshalJSON() ([]byte, error) { - switch r.Version { //nolint:exhaustive - case spec.DataVersionCapella: - return json.Marshal(r.Capella) - case spec.DataVersionDeneb: - return json.Marshal(r.Deneb) - default: - return nil, errors.Wrap(ErrInvalidVersion, fmt.Sprintf("%s is not supported", r.Version)) - } -} - -func (r *VersionedSubmitBlockRequest) UnmarshalJSON(input []byte) error { - var err error - denebRequest := new(builderApiDeneb.SubmitBlockRequest) - if err = json.Unmarshal(input, denebRequest); err == nil { - r.Version = spec.DataVersionDeneb - r.Deneb = denebRequest - return nil - } - - capellaRequest := new(builderApiCapella.SubmitBlockRequest) - if err = json.Unmarshal(input, capellaRequest); err == nil { - r.Version = spec.DataVersionCapella - r.Capella = capellaRequest - return nil - } - return errors.Wrap(err, "failed to unmarshal SubmitBlockRequest") -} - -type VersionedSignedProposal struct { - eth2Api.VersionedSignedProposal -} - -func (r *VersionedSignedProposal) MarshalSSZ() ([]byte, error) { - switch r.Version { //nolint:exhaustive - case spec.DataVersionCapella: - return r.Capella.MarshalSSZ() - case spec.DataVersionDeneb: - return r.Deneb.MarshalSSZ() - default: - return nil, errors.Wrap(ErrInvalidVersion, fmt.Sprintf("%s is not supported", r.Version)) - } -} - -func (r *VersionedSignedProposal) UnmarshalSSZ(input []byte) error { - var err error - denebRequest := new(eth2ApiV1Deneb.SignedBlockContents) - if err = denebRequest.UnmarshalSSZ(input); err == nil { - r.Version = spec.DataVersionDeneb - r.Deneb = denebRequest - return nil - } - - capellaRequest := new(capella.SignedBeaconBlock) - if err = capellaRequest.UnmarshalSSZ(input); err == nil { - r.Version = spec.DataVersionCapella - r.Capella = capellaRequest - return nil - } - return errors.Wrap(err, "failed to unmarshal SubmitBlockRequest SSZ") -} - -func (r *VersionedSignedProposal) MarshalJSON() ([]byte, error) { - switch r.Version { //nolint:exhaustive - case spec.DataVersionCapella: - return json.Marshal(r.Capella) - case spec.DataVersionDeneb: - return json.Marshal(r.Deneb) - default: - return nil, errors.Wrap(ErrInvalidVersion, fmt.Sprintf("%s is not supported", r.Version)) - } -} - -func (r *VersionedSignedProposal) UnmarshalJSON(input []byte) error { - var err error - - denebContents := new(eth2ApiV1Deneb.SignedBlockContents) - if err = json.Unmarshal(input, denebContents); err == nil { - r.Version = spec.DataVersionDeneb - r.Deneb = denebContents - return nil - } - - capellaBlock := new(capella.SignedBeaconBlock) - if err = json.Unmarshal(input, capellaBlock); err == nil { - r.Version = spec.DataVersionCapella - r.Capella = capellaBlock - return nil - } - return errors.Wrap(err, "failed to unmarshal SignedProposal") -} - -type VersionedSignedBlindedBeaconBlock struct { - eth2Api.VersionedSignedBlindedBeaconBlock -} - -func (r *VersionedSignedBlindedBeaconBlock) MarshalJSON() ([]byte, error) { - switch r.Version { //nolint:exhaustive - case spec.DataVersionCapella: - return json.Marshal(r.Capella) - case spec.DataVersionDeneb: - return json.Marshal(r.Deneb) - default: - return nil, errors.Wrap(ErrInvalidVersion, fmt.Sprintf("%s is not supported", r.Version)) - } -} - -func (r *VersionedSignedBlindedBeaconBlock) UnmarshalJSON(input []byte) error { - var err error - - denebBlock := new(eth2ApiV1Deneb.SignedBlindedBeaconBlock) - if err = json.Unmarshal(input, denebBlock); err == nil { - r.Version = spec.DataVersionDeneb - r.Deneb = denebBlock - return nil - } - - capellaBlock := new(eth2ApiV1Capella.SignedBlindedBeaconBlock) - if err = json.Unmarshal(input, capellaBlock); err == nil { - r.Version = spec.DataVersionCapella - r.Capella = capellaBlock - return nil - } - return errors.Wrap(err, "failed to unmarshal SignedBlindedBeaconBlock") -} diff --git a/mev-boost-relay/common/types_spec_test.go b/mev-boost-relay/common/types_spec_test.go deleted file mode 100644 index 411253ae4..000000000 --- a/mev-boost-relay/common/types_spec_test.go +++ /dev/null @@ -1,137 +0,0 @@ -package common - -import ( - "bytes" - "encoding/json" - "testing" - - "github.com/attestantio/go-eth2-client/spec" - "github.com/stretchr/testify/require" -) - -func TestSubmitBuilderBlockJSON(t *testing.T) { - jsonBytes := LoadGzippedBytes(t, "../testdata/submitBlockPayloadCapella_Goerli.json.gz") - - submitBlockData := new(VersionedSubmitBlockRequest) - err := json.Unmarshal(jsonBytes, &submitBlockData) - require.NoError(t, err) - - marshalledJSONBytes, err := json.Marshal(submitBlockData) - require.NoError(t, err) - buffer := new(bytes.Buffer) - err = json.Compact(buffer, jsonBytes) - require.NoError(t, err) - expectedJSONBytes := buffer.Bytes() - - require.Equal(t, expectedJSONBytes, marshalledJSONBytes) -} - -func TestSignedBeaconBlockJSON(t *testing.T) { - testCases := []struct { - name string - filepath string - }{ - { - name: "Capella", - filepath: "../testdata/signedBeaconBlockCapella_Goerli.json.gz", - }, - { - name: "Deneb", - filepath: "../testdata/signedBeaconBlockContentsDeneb_Goerli.json.gz", - }, - } - - for _, testCase := range testCases { - t.Run(testCase.name, func(t *testing.T) { - jsonBytes := LoadGzippedBytes(t, testCase.filepath) - buffer := new(bytes.Buffer) - err := json.Compact(buffer, jsonBytes) - require.NoError(t, err) - expectedJSONBytes := buffer.Bytes() - - blockRequest := new(VersionedSignedProposal) - err = json.Unmarshal(jsonBytes, blockRequest) - require.NoError(t, err) - - marshalledJSONBytes, err := json.Marshal(blockRequest) - require.NoError(t, err) - - require.Equal(t, expectedJSONBytes, marshalledJSONBytes) - }) - } -} - -func TestSignedBlindedBlockJSON(t *testing.T) { - testCases := []struct { - name string - filepath string - }{ - { - name: "Capella", - filepath: "../testdata/signedBlindedBeaconBlockCapella_Goerli.json.gz", - }, - { - name: "Deneb", - filepath: "../testdata/signedBlindedBeaconBlockDeneb_Goerli.json.gz", - }, - } - - for _, testCase := range testCases { - t.Run(testCase.name, func(t *testing.T) { - jsonBytes := LoadGzippedBytes(t, testCase.filepath) - buffer := new(bytes.Buffer) - err := json.Compact(buffer, jsonBytes) - require.NoError(t, err) - expectedJSONBytes := buffer.Bytes() - - blockRequest := new(VersionedSignedBlindedBeaconBlock) - err = json.Unmarshal(jsonBytes, blockRequest) - require.NoError(t, err) - - marshalledJSONBytes, err := json.Marshal(blockRequest) - require.NoError(t, err) - - require.Equal(t, expectedJSONBytes, marshalledJSONBytes) - }) - } -} - -func TestBuildGetPayloadResponse(t *testing.T) { - testCases := []struct { - name string - filepath string - version spec.DataVersion - blockHash string - }{ - { - name: "Capella", - filepath: "../testdata/submitBlockPayloadCapella_Goerli.json.gz", - version: spec.DataVersionCapella, - blockHash: "0x1bafdc454116b605005364976b134d761dd736cb4788d25c835783b46daeb121", - }, - { - name: "Deneb", - filepath: "../testdata/submitBlockPayloadDeneb_Goerli.json.gz", - version: spec.DataVersionDeneb, - blockHash: "0x195e2aac0a52cf26428336142e74eafd55d9228f315c2f2fe9253406ef9ef544", - }, - } - - for _, testCase := range testCases { - t.Run(testCase.name, func(t *testing.T) { - jsonBytes := LoadGzippedBytes(t, testCase.filepath) - - submitBlockData := new(VersionedSubmitBlockRequest) - err := json.Unmarshal(jsonBytes, &submitBlockData) - require.NoError(t, err) - - resp, err := BuildGetPayloadResponse(submitBlockData) - require.NoError(t, err) - - require.Equal(t, testCase.version, resp.Version) - blockHash, err := resp.BlockHash() - require.NoError(t, err) - require.Equal(t, testCase.blockHash, blockHash.String()) - }) - } -} diff --git a/mev-boost-relay/common/types_test.go b/mev-boost-relay/common/types_test.go deleted file mode 100644 index a0def3ea6..000000000 --- a/mev-boost-relay/common/types_test.go +++ /dev/null @@ -1,134 +0,0 @@ -package common - -import ( - "testing" - - builderApiV1 "github.com/attestantio/go-builder-client/api/v1" - "github.com/attestantio/go-eth2-client/spec" - "github.com/attestantio/go-eth2-client/spec/bellatrix" - "github.com/attestantio/go-eth2-client/spec/capella" - "github.com/attestantio/go-eth2-client/spec/phase0" - "github.com/flashbots/go-boost-utils/utils" - "github.com/holiman/uint256" - "github.com/stretchr/testify/require" -) - -func makeTestSubmitBlockRequestV2Optimistic(t *testing.T) *SubmitBlockRequestV2Optimistic { - t.Helper() - testParentHash, err := utils.HexToHash("0xec51bd499a3fa0270f1446fbf05ff0b61157cfe4ec719bb4c3e834e339ee9c5c") - require.NoError(t, err) - testBlockHash, err := utils.HexToHash("0x3f5b5aaa800a3d25c3f75e72dc45da89fdd58168f1358a9f94aac8b029361a0a") - require.NoError(t, err) - testRandao, err := utils.HexToHash("0x8cf6b7fbfbaf80da001fe769fd02e9b8dbfa0a646d9cf51b5d7137dd4f8103cc") - require.NoError(t, err) - testRoot, err := utils.HexToHash("0x7554727cee6d976a1dfdad80b392b37c87f0651ff5b01f6a0b3402bcfce92077") - require.NoError(t, err) - testBuilderPubkey, err := utils.HexToPubkey("0xae7bde4839fa905b7d8125fd84cfdcd0c32cd74e1be3fa24263d71b520fc78113326ce0a90b95d73f19e6d8150a2f73b") - require.NoError(t, err) - testProposerPubkey, err := utils.HexToPubkey("0xbb8e223239fa905b7d8125fd84cfdcd0c32cd74e1be3fa24263d71b520fc78113326ce0a90b95d73f19e6d8150a2f73b") - require.NoError(t, err) - testAddress, err := utils.HexToAddress("0x95222290DD7278Aa3Ddd389Cc1E1d165CC4BAfe5") - require.NoError(t, err) - testSignature, err := utils.HexToSignature("0xb06311be19c92307c06070578af9ad147c9c6ea902439eac19f785f3dca478c354b79a0af9b09839c3d06c1ccf2185b0162f4d4fbf981220f77586b52ed9ae8a8acfc953baaa30dee15e1b112913c6cbe02c780d7b5363a4af16563fe55c2e88") - require.NoError(t, err) - testValue := new(uint256.Int) - err = testValue.SetFromDecimal("100") - require.NoError(t, err) - - return &SubmitBlockRequestV2Optimistic{ - Message: &builderApiV1.BidTrace{ - Slot: 31, - ParentHash: testParentHash, - BlockHash: testBlockHash, - BuilderPubkey: testBuilderPubkey, - ProposerPubkey: testProposerPubkey, - ProposerFeeRecipient: testAddress, - GasLimit: 30_000_000, - GasUsed: 15_000_000, - Value: testValue, - }, - ExecutionPayloadHeader: &capella.ExecutionPayloadHeader{ - ParentHash: testParentHash, - FeeRecipient: testAddress, - StateRoot: [32]byte(testBlockHash), - ReceiptsRoot: [32]byte(testBlockHash), - LogsBloom: [256]byte{0xaa, 0xbb, 0xcc}, - PrevRandao: [32]byte(testRandao), - BlockNumber: 30, - GasLimit: 30_000_000, - GasUsed: 15_000_000, - Timestamp: 168318215, - ExtraData: make([]byte, 32), - BaseFeePerGas: [32]byte{0xaa, 0xbb}, - BlockHash: testBlockHash, - TransactionsRoot: phase0.Root(testRoot), - WithdrawalsRoot: phase0.Root(testRoot), - }, - Signature: testSignature, - Transactions: []bellatrix.Transaction{ - []byte{0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09}, - []byte{0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19}, - []byte{0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29}, - []byte{0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39}, - []byte{0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49}, - []byte{0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59}, - }, - Withdrawals: []*capella.Withdrawal{ - { - Index: capella.WithdrawalIndex(120), - ValidatorIndex: phase0.ValidatorIndex(121), - Address: testAddress, - Amount: phase0.Gwei(102412521125125), - }, - }, - } -} - -func TestDataVersion(t *testing.T) { - require.Equal(t, ForkVersionStringBellatrix, spec.DataVersionBellatrix.String()) - require.Equal(t, ForkVersionStringCapella, spec.DataVersionCapella.String()) - require.Equal(t, ForkVersionStringDeneb, spec.DataVersionDeneb.String()) -} - -func compareV2RequestEquality(t *testing.T, src, targ *SubmitBlockRequestV2Optimistic) { - t.Helper() - require.Equal(t, src.Message.String(), targ.Message.String()) - require.Equal(t, src.ExecutionPayloadHeader.String(), targ.ExecutionPayloadHeader.String()) - require.Equal(t, src.Signature, targ.Signature) - for i := 0; i < len(src.Transactions); i++ { - require.Equal(t, src.Transactions[i], targ.Transactions[i]) - } - for i := 0; i < len(src.Withdrawals); i++ { - require.Equal(t, src.Withdrawals[i].String(), targ.Withdrawals[i].String()) - } -} - -func TestSubmitBlockRequestV2Optimistic(t *testing.T) { - obj := makeTestSubmitBlockRequestV2Optimistic(t) - - // Encode the object. - sszObj, err := obj.MarshalSSZ() - require.NoError(t, err) - require.Len(t, sszObj, obj.SizeSSZ()) - - // Unmarshal the full object. - unmarshal := new(SubmitBlockRequestV2Optimistic) - err = unmarshal.UnmarshalSSZ(sszObj) - require.NoError(t, err) - - compareV2RequestEquality(t, obj, unmarshal) - - // Clear out non-header data. - obj.Transactions = []bellatrix.Transaction{} - obj.Withdrawals = []*capella.Withdrawal{} - - // Unmarshal just the header. - unmarshalHeader := new(SubmitBlockRequestV2Optimistic) - err = unmarshalHeader.UnmarshalSSZHeaderOnly(sszObj) - require.NoError(t, err) - - compareV2RequestEquality(t, obj, unmarshalHeader) - - // Make sure size is correct (must have 32 bytes of ExtraData). - require.Equal(t, 944, unmarshalHeader.SizeSSZ()) -} diff --git a/mev-boost-relay/common/utils.go b/mev-boost-relay/common/utils.go deleted file mode 100644 index 3ce580173..000000000 --- a/mev-boost-relay/common/utils.go +++ /dev/null @@ -1,286 +0,0 @@ -package common - -import ( - "bytes" - "context" - "encoding/hex" - "encoding/json" - "errors" - "fmt" - "io" - "net/http" - "os" - "strconv" - "strings" - "time" - - builderApi "github.com/attestantio/go-builder-client/api" - builderApiDeneb "github.com/attestantio/go-builder-client/api/deneb" - "github.com/attestantio/go-eth2-client/spec" - "github.com/attestantio/go-eth2-client/spec/deneb" - "github.com/attestantio/go-eth2-client/spec/phase0" - ethcommon "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/flashbots/go-boost-utils/ssz" - "github.com/flashbots/go-boost-utils/types" - "github.com/holiman/uint256" -) - -var ( - ErrInvalidForkVersion = errors.New("invalid fork version") - ErrHTTPErrorResponse = errors.New("got an HTTP error response") - ErrIncorrectLength = errors.New("incorrect length") -) - -// SlotPos returns the slot's position in the epoch (1-based, i.e. 1..32) -func SlotPos(slot uint64) uint64 { - return (slot % SlotsPerEpoch) + 1 -} - -func makeRequest(ctx context.Context, client http.Client, method, url string, payload any) (*http.Response, error) { - var req *http.Request - var err error - - if payload == nil { - req, err = http.NewRequestWithContext(ctx, method, url, nil) - } else { - payloadBytes, err2 := json.Marshal(payload) - if err2 != nil { - return nil, err2 - } - req, err = http.NewRequestWithContext(ctx, method, url, bytes.NewReader(payloadBytes)) - } - if err != nil { - return nil, err - } - - req.Header.Add("Content-Type", "application/json") - resp, err := client.Do(req) - if err != nil { - return nil, err - } - - if resp.StatusCode > 299 { - defer resp.Body.Close() - bodyBytes, err := io.ReadAll(resp.Body) - if err != nil { - return nil, err - } - return resp, fmt.Errorf("%w: %d / %s", ErrHTTPErrorResponse, resp.StatusCode, string(bodyBytes)) - } - - return resp, nil -} - -// ComputeDomain computes the signing domain -func ComputeDomain(domainType phase0.DomainType, forkVersionHex, genesisValidatorsRootHex string) (domain phase0.Domain, err error) { - genesisValidatorsRoot := phase0.Root(ethcommon.HexToHash(genesisValidatorsRootHex)) - forkVersionBytes, err := hexutil.Decode(forkVersionHex) - if err != nil || len(forkVersionBytes) != 4 { - return domain, ErrInvalidForkVersion - } - var forkVersion [4]byte - copy(forkVersion[:], forkVersionBytes[:4]) - return ssz.ComputeDomain(domainType, forkVersion, genesisValidatorsRoot), nil -} - -func GetEnv(key, defaultValue string) string { - if value, ok := os.LookupEnv(key); ok { - return value - } - return defaultValue -} - -func GetSliceEnv(key string, defaultValue []string) []string { - if value, ok := os.LookupEnv(key); ok { - return strings.Split(value, ",") - } - return defaultValue -} - -func GetIPXForwardedFor(r *http.Request) string { - forwarded := r.Header.Get("X-Forwarded-For") - if forwarded != "" { - if strings.Contains(forwarded, ",") { // return first entry of list of IPs - return strings.Split(forwarded, ",")[0] - } - return forwarded - } - return r.RemoteAddr -} - -// GetMevBoostVersionFromUserAgent returns the mev-boost version from an user agent string -// Example ua: "mev-boost/1.0.1 go-http-client" -> returns "1.0.1". If no version is found, returns "-" -func GetMevBoostVersionFromUserAgent(ua string) string { - parts := strings.Split(ua, " ") - if strings.HasPrefix(parts[0], "mev-boost") { - parts2 := strings.Split(parts[0], "/") - if len(parts2) == 2 { - return parts2[1] - } - } - return "-" -} - -func U256StrToUint256(s types.U256Str) *uint256.Int { - i := new(uint256.Int) - i.SetBytes(reverse(s[:])) - return i -} - -func reverse(src []byte) []byte { - dst := make([]byte, len(src)) - copy(dst, src) - for i := len(dst)/2 - 1; i >= 0; i-- { - opp := len(dst) - 1 - i - dst[i], dst[opp] = dst[opp], dst[i] - } - return dst -} - -// GetEnvStrSlice returns a slice of strings from a comma-separated env var -func GetEnvStrSlice(key string, defaultValue []string) []string { - if value, ok := os.LookupEnv(key); ok { - return strings.Split(value, ",") - } - return defaultValue -} - -func StrToPhase0Pubkey(s string) (ret phase0.BLSPubKey, err error) { - pubkeyBytes, err := hex.DecodeString(strings.TrimPrefix(s, "0x")) - if err != nil { - return ret, err - } - if len(pubkeyBytes) != phase0.PublicKeyLength { - return ret, ErrIncorrectLength - } - copy(ret[:], pubkeyBytes) - return ret, nil -} - -func StrToPhase0Hash(s string) (ret phase0.Hash32, err error) { - hashBytes, err := hex.DecodeString(strings.TrimPrefix(s, "0x")) - if err != nil { - return ret, err - } - if len(hashBytes) != phase0.Hash32Length { - return ret, ErrIncorrectLength - } - copy(ret[:], hashBytes) - return ret, nil -} - -// GetEnvDurationSec returns the value of the environment variable as duration in seconds, -// or defaultValue if the environment variable doesn't exist or is not a valid integer -func GetEnvDurationSec(key string, defaultValueSec int) time.Duration { - if value, ok := os.LookupEnv(key); ok { - val, err := strconv.Atoi(value) - if err != nil { - return time.Duration(val) * time.Second - } - } - return time.Duration(defaultValueSec) * time.Second -} - -func GetBlockSubmissionInfo(submission *VersionedSubmitBlockRequest) (*BlockSubmissionInfo, error) { - bidTrace, err := submission.BidTrace() - if err != nil { - return nil, err - } - signature, err := submission.Signature() - if err != nil { - return nil, err - } - executionPayloadBlockHash, err := submission.ExecutionPayloadBlockHash() - if err != nil { - return nil, err - } - executionPayloadParentHash, err := submission.ExecutionPayloadParentHash() - if err != nil { - return nil, err - } - gasUsed, err := submission.GasUsed() - if err != nil { - return nil, err - } - gasLimit, err := submission.GasLimit() - if err != nil { - return nil, err - } - timestamp, err := submission.Timestamp() - if err != nil { - return nil, err - } - txs, err := submission.Transactions() - if err != nil { - return nil, err - } - blockNumber, err := submission.BlockNumber() - if err != nil { - return nil, err - } - prevRandao, err := submission.PrevRandao() - if err != nil { - return nil, err - } - withdrawals, err := submission.Withdrawals() - if err != nil { - return nil, err - } - // TODO (deneb): after deneb fork error if no blob fields - var ( - blobs []deneb.Blob - blobGasUsed uint64 - excessBlobGas uint64 - ) - if submission.Version == spec.DataVersionDeneb { - blobs = submission.Deneb.BlobsBundle.Blobs - blobGasUsed = submission.Deneb.ExecutionPayload.BlobGasUsed - excessBlobGas = submission.Deneb.ExecutionPayload.ExcessBlobGas - } - return &BlockSubmissionInfo{ - BidTrace: bidTrace, - Signature: signature, - ExecutionPayloadBlockHash: executionPayloadBlockHash, - ExecutionPayloadParentHash: executionPayloadParentHash, - GasUsed: gasUsed, - GasLimit: gasLimit, - Timestamp: timestamp, - Transactions: txs, - PrevRandao: prevRandao, - BlockNumber: blockNumber, - Withdrawals: withdrawals, - Blobs: blobs, - BlobGasUsed: blobGasUsed, - ExcessBlobGas: excessBlobGas, - }, nil -} - -func GetBlockSubmissionExecutionPayload(submission *VersionedSubmitBlockRequest) (*builderApi.VersionedSubmitBlindedBlockResponse, error) { - switch submission.Version { - case spec.DataVersionCapella: - return &builderApi.VersionedSubmitBlindedBlockResponse{ - Version: spec.DataVersionCapella, - Capella: submission.Capella.ExecutionPayload, - }, nil - case spec.DataVersionDeneb: - return &builderApi.VersionedSubmitBlindedBlockResponse{ - Version: spec.DataVersionDeneb, - Deneb: &builderApiDeneb.ExecutionPayloadAndBlobsBundle{ - ExecutionPayload: submission.Deneb.ExecutionPayload, - BlobsBundle: submission.Deneb.BlobsBundle, - }, - }, nil - case spec.DataVersionUnknown, spec.DataVersionPhase0, spec.DataVersionAltair, spec.DataVersionBellatrix: - return nil, ErrInvalidForkVersion - } - return nil, ErrEmptyPayload -} - -func JSONStringify(v interface{}) string { - out, err := json.Marshal(v) - if err != nil { - return err.Error() - } - return string(out) -} diff --git a/mev-boost-relay/common/utils_test.go b/mev-boost-relay/common/utils_test.go deleted file mode 100644 index 9565a1da3..000000000 --- a/mev-boost-relay/common/utils_test.go +++ /dev/null @@ -1,194 +0,0 @@ -package common - -import ( - "context" - "fmt" - "net/http" - "os" - "testing" - - builderApiBellatrix "github.com/attestantio/go-builder-client/api/bellatrix" - builderApiCapella "github.com/attestantio/go-builder-client/api/capella" - builderApiV1 "github.com/attestantio/go-builder-client/api/v1" - builderSpec "github.com/attestantio/go-builder-client/spec" - "github.com/attestantio/go-eth2-client/spec" - "github.com/attestantio/go-eth2-client/spec/bellatrix" - "github.com/attestantio/go-eth2-client/spec/capella" - "github.com/ethereum/go-ethereum/common" - boostTypes "github.com/flashbots/go-boost-utils/types" - "github.com/stretchr/testify/require" -) - -func TestMakePostRequest(t *testing.T) { - // Test errors - var x chan bool - resp, err := makeRequest(context.Background(), *http.DefaultClient, http.MethodGet, "", x) - require.Error(t, err) - require.Nil(t, resp) - - // To satisfy the bodyclose linter. - if resp != nil { - resp.Body.Close() - } -} - -func TestGetMevBoostVersionFromUserAgent(t *testing.T) { - tests := []struct { - ua string - version string - }{ - {ua: "", version: "-"}, - {ua: "mev-boost", version: "-"}, - {ua: "mev-boost/v1.0.0", version: "v1.0.0"}, - {ua: "mev-boost/v1.0.0 ", version: "v1.0.0"}, - {ua: "mev-boost/v1.0.0 test", version: "v1.0.0"}, - } - - for _, test := range tests { - t.Run(test.ua, func(t *testing.T) { - require.Equal(t, test.version, GetMevBoostVersionFromUserAgent(test.ua)) - }) - } -} - -func TestU256StrToUint256(t *testing.T) { - tests := []struct { - name string - u256Str boostTypes.U256Str - want string - }{ - { - name: "zero", - u256Str: boostTypes.U256Str(common.HexToHash("0000000000000000000000000000000000000000000000000000000000000000")), - want: "0", - }, - { - name: "one", - u256Str: boostTypes.U256Str(common.HexToHash("0100000000000000000000000000000000000000000000000000000000000000")), - want: "1", - }, - { - name: "bigger value", - u256Str: boostTypes.U256Str(common.HexToHash("69D8340F00000000000000000000000000000000000000000000000000000000")), - want: "255121513", - }, - { - name: "max value", - u256Str: boostTypes.U256Str(common.HexToHash("ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff")), - want: "115792089237316195423570985008687907853269984665640564039457584007913129639935", - }, - } - - for _, test := range tests { - t.Run(test.name, func(t *testing.T) { - got := U256StrToUint256(test.u256Str) - require.Equal(t, test.want, fmt.Sprintf("%d", got)) - }) - } -} - -func TestGetEnvStrSlice(t *testing.T) { - testEnvVar := "TESTENV_TestGetEnvStrSlice" - os.Unsetenv(testEnvVar) - r := GetEnvStrSlice(testEnvVar, nil) - require.Empty(t, r) - - t.Setenv(testEnvVar, "") - r = GetEnvStrSlice(testEnvVar, nil) - require.Len(t, r, 1) - require.Equal(t, "", r[0]) - - t.Setenv(testEnvVar, "str1,str2") - r = GetEnvStrSlice(testEnvVar, nil) - require.Len(t, r, 2) - require.Equal(t, "str1", r[0]) - require.Equal(t, "str2", r[1]) - os.Unsetenv(testEnvVar) -} - -func TestGetBlockSubmissionInfo(t *testing.T) { - cases := []struct { - name string - payload *VersionedSubmitBlockRequest - expected *BlockSubmissionInfo - err string - }{ - { - name: "valid builderApiCapella", - payload: &VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SubmitBlockRequest{ - Message: &builderApiV1.BidTrace{}, - ExecutionPayload: &capella.ExecutionPayload{}, - }, - }, - }, - expected: &BlockSubmissionInfo{ - BidTrace: &builderApiV1.BidTrace{}, - }, - }, - { - name: "unsupported version", - payload: &VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionBellatrix, - Bellatrix: &builderApiBellatrix.SubmitBlockRequest{ - Message: &builderApiV1.BidTrace{}, - ExecutionPayload: &bellatrix.ExecutionPayload{}, - }, - }, - }, - expected: nil, - err: "unsupported version", - }, - { - name: "missing data", - payload: &VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionCapella, - }, - }, - expected: nil, - err: "no data", - }, - { - name: "missing message", - payload: &VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SubmitBlockRequest{ - ExecutionPayload: &capella.ExecutionPayload{}, - }, - }, - }, - expected: nil, - err: "no data message", - }, - { - name: "missing execution payload", - payload: &VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SubmitBlockRequest{ - Message: &builderApiV1.BidTrace{}, - }, - }, - }, - expected: nil, - err: "no data execution payload", - }, - } - - for _, tc := range cases { - t.Run(tc.name, func(t *testing.T) { - submission, err := GetBlockSubmissionInfo(tc.payload) - require.Equal(t, tc.expected, submission) - if tc.err == "" { - require.NoError(t, err) - } else { - require.Equal(t, tc.err, err.Error()) - } - }) - } -} diff --git a/mev-boost-relay/database/database.go b/mev-boost-relay/database/database.go deleted file mode 100644 index d1568103c..000000000 --- a/mev-boost-relay/database/database.go +++ /dev/null @@ -1,661 +0,0 @@ -// Package database exposes the postgres database -package database - -import ( - "context" - "encoding/json" - "fmt" - "os" - "strings" - "time" - - builderApiV1 "github.com/attestantio/go-builder-client/api/v1" - "github.com/flashbots/mev-boost-relay/common" - "github.com/flashbots/mev-boost-relay/database/migrations" - "github.com/flashbots/mev-boost-relay/database/vars" - "github.com/jmoiron/sqlx" - _ "github.com/lib/pq" - migrate "github.com/rubenv/sql-migrate" -) - -type IDatabaseService interface { - NumRegisteredValidators() (count uint64, err error) - SaveValidatorRegistration(entry ValidatorRegistrationEntry) error - GetLatestValidatorRegistrations(timestampOnly bool) ([]*ValidatorRegistrationEntry, error) - GetValidatorRegistration(pubkey string) (*ValidatorRegistrationEntry, error) - GetValidatorRegistrationsForPubkeys(pubkeys []string) ([]*ValidatorRegistrationEntry, error) - - SaveBuilderBlockSubmission(payload *common.VersionedSubmitBlockRequest, - requestError, - validationError error, - receivedAt, - eligibleAt time.Time, - wasSimulated, - saveExecPayload bool, - profile common.Profile, - optimisticSubmission bool, - inclusionProof *common.InclusionProof) (entry *BuilderBlockSubmissionEntry, err error) - GetBlockSubmissionEntry(slot uint64, proposerPubkey, blockHash string) (entry *BuilderBlockSubmissionEntry, err error) - GetBuilderSubmissions(filters GetBuilderSubmissionsFilters) ([]*BuilderBlockSubmissionEntry, error) - GetBuilderSubmissionsBySlots(slotFrom, slotTo uint64) (entries []*BuilderBlockSubmissionEntry, err error) - GetExecutionPayloadEntryByID(executionPayloadID int64) (entry *ExecutionPayloadEntry, err error) - GetExecutionPayloadEntryBySlotPkHash(slot uint64, proposerPubkey, blockHash string) (entry *ExecutionPayloadEntry, err error) - GetExecutionPayloads(idFirst, idLast uint64) (entries []*ExecutionPayloadEntry, err error) - DeleteExecutionPayloads(idFirst, idLast uint64) error - - SaveDeliveredPayload(bidTrace *common.BidTraceV2WithBlobFields, signedBlindedBeaconBlock *common.VersionedSignedBlindedBeaconBlock, signedAt time.Time, publishMs uint64) error - GetNumDeliveredPayloads() (uint64, error) - GetRecentDeliveredPayloads(filters GetPayloadsFilters) ([]*DeliveredPayloadEntry, error) - GetDeliveredPayloads(idFirst, idLast uint64) (entries []*DeliveredPayloadEntry, err error) - - GetBlockBuilders() ([]*BlockBuilderEntry, error) - GetBlockBuilderByPubkey(pubkey string) (*BlockBuilderEntry, error) - SetBlockBuilderStatus(pubkey string, status common.BuilderStatus) error - SetBlockBuilderIDStatusIsOptimistic(pubkey string, isOptimistic bool) error - SetBlockBuilderCollateral(pubkey, builderID, collateral string) error - UpsertBlockBuilderEntryAfterSubmission(lastSubmission *BuilderBlockSubmissionEntry, isError bool) error - IncBlockBuilderStatsAfterGetPayload(builderPubkey string) error - - InsertBuilderDemotion(submitBlockRequest *common.VersionedSubmitBlockRequest, simError error) error - UpdateBuilderDemotion(trace *common.BidTraceV2WithBlobFields, signedBlock *common.VersionedSignedProposal, signedRegistration *builderApiV1.SignedValidatorRegistration) error - GetBuilderDemotion(trace *common.BidTraceV2WithBlobFields) (*BuilderDemotionEntry, error) - - GetTooLateGetPayload(slot uint64) (entries []*TooLateGetPayloadEntry, err error) - InsertTooLateGetPayload(slot uint64, proposerPubkey, blockHash string, slotStart, requestTime, decodeTime, msIntoSlot uint64) error -} - -type DatabaseService struct { - DB *sqlx.DB - - nstmtInsertExecutionPayload *sqlx.NamedStmt - nstmtInsertBlockBuilderSubmission *sqlx.NamedStmt -} - -func NewDatabaseService(dsn string) (*DatabaseService, error) { - db, err := sqlx.Connect("postgres", dsn) - if err != nil { - return nil, err - } - - db.DB.SetMaxOpenConns(50) - db.DB.SetMaxIdleConns(10) - db.DB.SetConnMaxIdleTime(0) - - if os.Getenv("DB_DONT_APPLY_SCHEMA") == "" { - migrate.SetTable(vars.TableMigrations) - _, err := migrate.Exec(db.DB, "postgres", migrations.Migrations, migrate.Up) - if err != nil { - return nil, err - } - } - - dbService := &DatabaseService{DB: db} //nolint:exhaustruct - err = dbService.prepareNamedQueries() - return dbService, err -} - -func (s *DatabaseService) prepareNamedQueries() (err error) { - // Insert execution payload - query := `INSERT INTO ` + vars.TableExecutionPayload + ` - (slot, proposer_pubkey, block_hash, version, payload) VALUES - (:slot, :proposer_pubkey, :block_hash, :version, :payload) - ON CONFLICT (slot, proposer_pubkey, block_hash) DO UPDATE SET slot=:slot - RETURNING id` - s.nstmtInsertExecutionPayload, err = s.DB.PrepareNamed(query) - if err != nil { - return err - } - - // Insert block builder submission - query = `INSERT INTO ` + vars.TableBuilderBlockSubmission + ` - (received_at, eligible_at, execution_payload_id, was_simulated, sim_success, sim_error, sim_req_error, signature, slot, parent_hash, block_hash, builder_pubkey, proposer_pubkey, proposer_fee_recipient, gas_used, gas_limit, num_tx, value, epoch, block_number, decode_duration, prechecks_duration, simulation_duration, redis_update_duration, total_duration, optimistic_submission) VALUES - (:received_at, :eligible_at, :execution_payload_id, :was_simulated, :sim_success, :sim_error, :sim_req_error, :signature, :slot, :parent_hash, :block_hash, :builder_pubkey, :proposer_pubkey, :proposer_fee_recipient, :gas_used, :gas_limit, :num_tx, :value, :epoch, :block_number, :decode_duration, :prechecks_duration, :simulation_duration, :redis_update_duration, :total_duration, :optimistic_submission) - RETURNING id` - s.nstmtInsertBlockBuilderSubmission, err = s.DB.PrepareNamed(query) - return err -} - -func (s *DatabaseService) Close() error { - return s.DB.Close() -} - -// NumRegisteredValidators returns the number of unique pubkeys that have registered -func (s *DatabaseService) NumRegisteredValidators() (count uint64, err error) { - query := `SELECT COUNT(*) FROM (SELECT DISTINCT pubkey FROM ` + vars.TableValidatorRegistration + `) AS temp;` - row := s.DB.QueryRow(query) - err = row.Scan(&count) - return count, err -} - -func (s *DatabaseService) NumValidatorRegistrationRows() (count uint64, err error) { - query := `SELECT COUNT(*) FROM ` + vars.TableValidatorRegistration + `;` - row := s.DB.QueryRow(query) - err = row.Scan(&count) - return count, err -} - -func (s *DatabaseService) SaveValidatorRegistration(entry ValidatorRegistrationEntry) error { - query := `WITH latest_registration AS ( - SELECT DISTINCT ON (pubkey) pubkey, fee_recipient, timestamp, gas_limit, signature FROM ` + vars.TableValidatorRegistration + ` WHERE pubkey=:pubkey ORDER BY pubkey, timestamp DESC limit 1 - ) - INSERT INTO ` + vars.TableValidatorRegistration + ` (pubkey, fee_recipient, timestamp, gas_limit, signature) - SELECT :pubkey, :fee_recipient, :timestamp, :gas_limit, :signature - WHERE NOT EXISTS ( - SELECT 1 from latest_registration WHERE pubkey=:pubkey AND :timestamp <= latest_registration.timestamp OR (:fee_recipient = latest_registration.fee_recipient AND :gas_limit = latest_registration.gas_limit) - );` - _, err := s.DB.NamedExec(query, entry) - return err -} - -func (s *DatabaseService) GetValidatorRegistration(pubkey string) (*ValidatorRegistrationEntry, error) { - query := `SELECT DISTINCT ON (pubkey) pubkey, fee_recipient, timestamp, gas_limit, signature - FROM ` + vars.TableValidatorRegistration + ` - WHERE pubkey=$1 - ORDER BY pubkey, timestamp DESC;` - entry := &ValidatorRegistrationEntry{} - err := s.DB.Get(entry, query, pubkey) - return entry, err -} - -func (s *DatabaseService) GetValidatorRegistrationsForPubkeys(pubkeys []string) (entries []*ValidatorRegistrationEntry, err error) { - query := `SELECT DISTINCT ON (pubkey) pubkey, fee_recipient, timestamp, gas_limit, signature - FROM ` + vars.TableValidatorRegistration + ` - WHERE pubkey IN (?) - ORDER BY pubkey, timestamp DESC;` - - q, args, err := sqlx.In(query, pubkeys) - if err != nil { - return nil, err - } - err = s.DB.Select(&entries, s.DB.Rebind(q), args...) - return entries, err -} - -func (s *DatabaseService) GetLatestValidatorRegistrations(timestampOnly bool) ([]*ValidatorRegistrationEntry, error) { - // query details: https://stackoverflow.com/questions/3800551/select-first-row-in-each-group-by-group/7630564#7630564 - query := `SELECT DISTINCT ON (pubkey) pubkey, fee_recipient, timestamp, gas_limit, signature` - if timestampOnly { - query = `SELECT DISTINCT ON (pubkey) pubkey, timestamp` - } - query += ` FROM ` + vars.TableValidatorRegistration + ` ORDER BY pubkey, timestamp DESC;` - - var registrations []*ValidatorRegistrationEntry - err := s.DB.Select(®istrations, query) - return registrations, err -} - -func (s *DatabaseService) SaveBuilderBlockSubmission( - payload *common.VersionedSubmitBlockRequest, - requestError, - validationError error, - receivedAt, - eligibleAt time.Time, - wasSimulated, - saveExecPayload bool, - profile common.Profile, - optimisticSubmission bool, - inclusionProof *common.InclusionProof, -) (entry *BuilderBlockSubmissionEntry, err error) { - // Save execution_payload: insert, or if already exists update to be able to return the id ('on conflict do nothing' doesn't return an id) - execPayloadEntry, err := PayloadToExecPayloadEntry(payload) - if err != nil { - return nil, err - } - - if saveExecPayload { - err = s.nstmtInsertExecutionPayload.QueryRow(execPayloadEntry).Scan(&execPayloadEntry.ID) - if err != nil { - return nil, err - } - } - - // Save block_submission - simErrStr := "" - if validationError != nil { - simErrStr = validationError.Error() - } - - requestErrStr := "" - if requestError != nil { - requestErrStr = requestError.Error() - } - - submission, err := common.GetBlockSubmissionInfo(payload) - if err != nil { - return nil, err - } - - jsonPreconfirmations, err := json.Marshal(inclusionProof) - if err != nil { - return nil, err - } - - blockSubmissionEntry := &BuilderBlockSubmissionEntry{ - ReceivedAt: NewNullTime(receivedAt), - EligibleAt: NewNullTime(eligibleAt), - ExecutionPayloadID: NewNullInt64(execPayloadEntry.ID), - - WasSimulated: wasSimulated, - SimSuccess: wasSimulated && validationError == nil, - SimError: simErrStr, - SimReqError: requestErrStr, - - Signature: submission.Signature.String(), - - Slot: submission.BidTrace.Slot, - BlockHash: submission.BidTrace.BlockHash.String(), - ParentHash: submission.BidTrace.ParentHash.String(), - - BuilderPubkey: submission.BidTrace.BuilderPubkey.String(), - ProposerPubkey: submission.BidTrace.ProposerPubkey.String(), - ProposerFeeRecipient: submission.BidTrace.ProposerFeeRecipient.String(), - - GasUsed: submission.GasUsed, - GasLimit: submission.GasLimit, - - NumTx: uint64(len(submission.Transactions)), - Value: submission.BidTrace.Value.Dec(), - - Epoch: submission.BidTrace.Slot / common.SlotsPerEpoch, - BlockNumber: submission.BlockNumber, - - DecodeDuration: profile.Decode, - PrechecksDuration: profile.Prechecks, - SimulationDuration: profile.Simulation, - RedisUpdateDuration: profile.RedisUpdate, - TotalDuration: profile.Total, - OptimisticSubmission: optimisticSubmission, - - // BOLT: add preconfirmations - Preconfirmations: string(jsonPreconfirmations), - } - err = s.nstmtInsertBlockBuilderSubmission.QueryRow(blockSubmissionEntry).Scan(&blockSubmissionEntry.ID) - return blockSubmissionEntry, err -} - -func (s *DatabaseService) GetBlockSubmissionEntry(slot uint64, proposerPubkey, blockHash string) (entry *BuilderBlockSubmissionEntry, err error) { - query := `SELECT id, inserted_at, received_at, eligible_at, execution_payload_id, sim_success, sim_error, signature, slot, parent_hash, block_hash, builder_pubkey, proposer_pubkey, proposer_fee_recipient, gas_used, gas_limit, num_tx, value, epoch, block_number, decode_duration, prechecks_duration, simulation_duration, redis_update_duration, total_duration, optimistic_submission - FROM ` + vars.TableBuilderBlockSubmission + ` - WHERE slot=$1 AND proposer_pubkey=$2 AND block_hash=$3 - ORDER BY builder_pubkey ASC - LIMIT 1` - entry = &BuilderBlockSubmissionEntry{} - err = s.DB.Get(entry, query, slot, proposerPubkey, blockHash) - return entry, err -} - -func (s *DatabaseService) GetExecutionPayloadEntryByID(executionPayloadID int64) (entry *ExecutionPayloadEntry, err error) { - query := `SELECT id, inserted_at, slot, proposer_pubkey, block_hash, version, payload FROM ` + vars.TableExecutionPayload + ` WHERE id=$1` - entry = &ExecutionPayloadEntry{} - err = s.DB.Get(entry, query, executionPayloadID) - return entry, err -} - -func (s *DatabaseService) GetExecutionPayloadEntryBySlotPkHash(slot uint64, proposerPubkey, blockHash string) (entry *ExecutionPayloadEntry, err error) { - query := `SELECT id, inserted_at, slot, proposer_pubkey, block_hash, version, payload - FROM ` + vars.TableExecutionPayload + ` - WHERE slot=$1 AND proposer_pubkey=$2 AND block_hash=$3` - entry = &ExecutionPayloadEntry{} - err = s.DB.Get(entry, query, slot, proposerPubkey, blockHash) - return entry, err -} - -func (s *DatabaseService) SaveDeliveredPayload(bidTrace *common.BidTraceV2WithBlobFields, signedBlindedBeaconBlock *common.VersionedSignedBlindedBeaconBlock, signedAt time.Time, publishMs uint64) error { - _signedBlindedBeaconBlock, err := json.Marshal(signedBlindedBeaconBlock) - if err != nil { - return err - } - - deliveredPayloadEntry := DeliveredPayloadEntry{ - SignedAt: NewNullTime(signedAt), - SignedBlindedBeaconBlock: NewNullString(string(_signedBlindedBeaconBlock)), - - Slot: bidTrace.Slot, - Epoch: bidTrace.Slot / common.SlotsPerEpoch, - - BuilderPubkey: bidTrace.BuilderPubkey.String(), - ProposerPubkey: bidTrace.ProposerPubkey.String(), - ProposerFeeRecipient: bidTrace.ProposerFeeRecipient.String(), - - ParentHash: bidTrace.ParentHash.String(), - BlockHash: bidTrace.BlockHash.String(), - BlockNumber: bidTrace.BlockNumber, - - GasUsed: bidTrace.GasUsed, - GasLimit: bidTrace.GasLimit, - - NumTx: bidTrace.NumTx, - Value: bidTrace.Value.ToBig().String(), - - NumBlobs: bidTrace.NumBlobs, - BlobGasUsed: bidTrace.BlobGasUsed, - ExcessBlobGas: bidTrace.ExcessBlobGas, - - PublishMs: publishMs, - } - - query := `INSERT INTO ` + vars.TableDeliveredPayload + ` - (signed_at, signed_blinded_beacon_block, slot, epoch, builder_pubkey, proposer_pubkey, proposer_fee_recipient, parent_hash, block_hash, block_number, gas_used, gas_limit, num_tx, value, num_blobs, blob_gas_used, excess_blob_gas, publish_ms) VALUES - (:signed_at, :signed_blinded_beacon_block, :slot, :epoch, :builder_pubkey, :proposer_pubkey, :proposer_fee_recipient, :parent_hash, :block_hash, :block_number, :gas_used, :gas_limit, :num_tx, :value, :num_blobs, :blob_gas_used, :excess_blob_gas, :publish_ms) - ON CONFLICT DO NOTHING` - _, err = s.DB.NamedExec(query, deliveredPayloadEntry) - return err -} - -func (s *DatabaseService) GetRecentDeliveredPayloads(queryArgs GetPayloadsFilters) ([]*DeliveredPayloadEntry, error) { - arg := map[string]interface{}{ - "limit": queryArgs.Limit, - "slot": queryArgs.Slot, - "cursor": queryArgs.Cursor, - "block_hash": queryArgs.BlockHash, - "block_number": queryArgs.BlockNumber, - "proposer_pubkey": queryArgs.ProposerPubkey, - "builder_pubkey": queryArgs.BuilderPubkey, - } - - fields := "id, inserted_at, signed_at, slot, epoch, builder_pubkey, proposer_pubkey, proposer_fee_recipient, parent_hash, block_hash, block_number, num_tx, value, num_blobs, blob_gas_used, excess_blob_gas, gas_used, gas_limit, publish_ms" - - whereConds := []string{} - if queryArgs.Slot > 0 { - whereConds = append(whereConds, "slot = :slot") - } else if queryArgs.Cursor > 0 { - whereConds = append(whereConds, "slot <= :cursor") - } - if queryArgs.BlockHash != "" { - whereConds = append(whereConds, "block_hash = :block_hash") - } - if queryArgs.BlockNumber > 0 { - whereConds = append(whereConds, "block_number = :block_number") - } - if queryArgs.ProposerPubkey != "" { - whereConds = append(whereConds, "proposer_pubkey = :proposer_pubkey") - } - if queryArgs.BuilderPubkey != "" { - whereConds = append(whereConds, "builder_pubkey = :builder_pubkey") - } - - where := "" - if len(whereConds) > 0 { - where = "WHERE " + strings.Join(whereConds, " AND ") - } - - orderBy := "slot DESC" - if queryArgs.OrderByValue == 1 { - orderBy = "value ASC" - } else if queryArgs.OrderByValue == -1 { - orderBy = "value DESC" - } - - query := fmt.Sprintf("SELECT %s FROM %s %s ORDER BY %s LIMIT :limit", fields, vars.TableDeliveredPayload, where, orderBy) - ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second) - defer cancel() - - entries := []*DeliveredPayloadEntry{} - rows, err := s.DB.NamedQueryContext(ctx, query, arg) - if err != nil { - return nil, err - } - for rows.Next() { - entry := new(DeliveredPayloadEntry) - err = rows.StructScan(entry) - if err != nil { - return nil, err - } - entries = append(entries, entry) - } - return entries, nil -} - -func (s *DatabaseService) GetDeliveredPayloads(idFirst, idLast uint64) (entries []*DeliveredPayloadEntry, err error) { - query := `SELECT id, inserted_at, signed_at, slot, epoch, builder_pubkey, proposer_pubkey, proposer_fee_recipient, parent_hash, block_hash, block_number, num_tx, value, num_blobs, blob_gas_used, excess_blob_gas, gas_used, gas_limit, publish_ms - FROM ` + vars.TableDeliveredPayload + ` - WHERE id >= $1 AND id <= $2 - ORDER BY slot ASC` - - err = s.DB.Select(&entries, query, idFirst, idLast) - return entries, err -} - -func (s *DatabaseService) GetNumDeliveredPayloads() (uint64, error) { - var count uint64 - err := s.DB.QueryRow("SELECT COUNT(*) FROM " + vars.TableDeliveredPayload).Scan(&count) - return count, err -} - -func (s *DatabaseService) GetBuilderSubmissions(filters GetBuilderSubmissionsFilters) ([]*BuilderBlockSubmissionEntry, error) { - arg := map[string]interface{}{ - "limit": filters.Limit, - "slot": filters.Slot, - "block_hash": filters.BlockHash, - "block_number": filters.BlockNumber, - "builder_pubkey": filters.BuilderPubkey, - } - - fields := "id, inserted_at, received_at, eligible_at, slot, epoch, builder_pubkey, proposer_pubkey, proposer_fee_recipient, parent_hash, block_hash, block_number, num_tx, value, gas_used, gas_limit, optimistic_submission" - limit := "LIMIT :limit" - - whereConds := []string{ - "(sim_success = true OR optimistic_submission = true)", - } - if filters.Slot > 0 { - whereConds = append(whereConds, "slot = :slot") - limit = "" // remove the limit when filtering by slot - } - if filters.BlockNumber > 0 { - whereConds = append(whereConds, "block_number = :block_number") - limit = "" // remove the limit when filtering by block_number - } - if filters.BlockHash != "" { - whereConds = append(whereConds, "block_hash = :block_hash") - limit = "" // remove the limit when filtering by block_hash - } - if filters.BuilderPubkey != "" { - whereConds = append(whereConds, "builder_pubkey = :builder_pubkey") - } - - where := "" - if len(whereConds) > 0 { - where = "WHERE " + strings.Join(whereConds, " AND ") - } - - query := fmt.Sprintf("SELECT %s FROM %s %s ORDER BY slot DESC, inserted_at DESC %s", fields, vars.TableBuilderBlockSubmission, where, limit) - ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second) - defer cancel() - - entries := []*BuilderBlockSubmissionEntry{} - rows, err := s.DB.NamedQueryContext(ctx, query, arg) - if err != nil { - return nil, err - } - for rows.Next() { - entry := new(BuilderBlockSubmissionEntry) - err = rows.StructScan(entry) - if err != nil { - return nil, err - } - entries = append(entries, entry) - } - return entries, nil -} - -func (s *DatabaseService) GetBuilderSubmissionsBySlots(slotFrom, slotTo uint64) (entries []*BuilderBlockSubmissionEntry, err error) { - query := `SELECT id, inserted_at, received_at, eligible_at, slot, epoch, builder_pubkey, proposer_pubkey, proposer_fee_recipient, parent_hash, block_hash, block_number, num_tx, value, gas_used, gas_limit - FROM ` + vars.TableBuilderBlockSubmission + ` - WHERE sim_success = true AND slot >= $1 AND slot <= $2 - ORDER BY slot ASC, inserted_at ASC` - - err = s.DB.Select(&entries, query, slotFrom, slotTo) - return entries, err -} - -func (s *DatabaseService) UpsertBlockBuilderEntryAfterSubmission(lastSubmission *BuilderBlockSubmissionEntry, isError bool) error { - entry := BlockBuilderEntry{ - BuilderPubkey: lastSubmission.BuilderPubkey, - LastSubmissionID: NewNullInt64(lastSubmission.ID), - LastSubmissionSlot: lastSubmission.Slot, - NumSubmissionsTotal: 1, - NumSubmissionsSimError: 0, - Collateral: "0", // required to satisfy numeric type, will not override collateral - } - if isError { - entry.NumSubmissionsSimError = 1 - } - - // Upsert - query := `INSERT INTO ` + vars.TableBlockBuilder + ` - (builder_pubkey, description, is_high_prio, is_blacklisted, is_optimistic, collateral, builder_id, last_submission_id, last_submission_slot, num_submissions_total, num_submissions_simerror) VALUES - (:builder_pubkey, :description, :is_high_prio, :is_blacklisted, :is_optimistic, :collateral, :builder_id, :last_submission_id, :last_submission_slot, :num_submissions_total, :num_submissions_simerror) - ON CONFLICT (builder_pubkey) DO UPDATE SET - last_submission_id = :last_submission_id, - last_submission_slot = :last_submission_slot, - num_submissions_total = ` + vars.TableBlockBuilder + `.num_submissions_total + 1, - num_submissions_simerror = ` + vars.TableBlockBuilder + `.num_submissions_simerror + :num_submissions_simerror;` - _, err := s.DB.NamedExec(query, entry) - return err -} - -func (s *DatabaseService) GetBlockBuilders() ([]*BlockBuilderEntry, error) { - query := `SELECT id, inserted_at, builder_pubkey, description, is_high_prio, is_blacklisted, is_optimistic, collateral, builder_id, last_submission_id, last_submission_slot, num_submissions_total, num_submissions_simerror, num_sent_getpayload FROM ` + vars.TableBlockBuilder + ` ORDER BY id ASC;` - entries := []*BlockBuilderEntry{} - err := s.DB.Select(&entries, query) - return entries, err -} - -func (s *DatabaseService) GetBlockBuilderByPubkey(pubkey string) (*BlockBuilderEntry, error) { - query := `SELECT id, inserted_at, builder_pubkey, description, is_high_prio, is_blacklisted, is_optimistic, collateral, builder_id, last_submission_id, last_submission_slot, num_submissions_total, num_submissions_simerror, num_sent_getpayload FROM ` + vars.TableBlockBuilder + ` WHERE builder_pubkey=$1;` - entry := &BlockBuilderEntry{} - err := s.DB.Get(entry, query, pubkey) - return entry, err -} - -func (s *DatabaseService) SetBlockBuilderStatus(pubkey string, status common.BuilderStatus) error { - query := `UPDATE ` + vars.TableBlockBuilder + ` SET is_high_prio=$1, is_blacklisted=$2, is_optimistic=$3 WHERE builder_pubkey=$4;` - _, err := s.DB.Exec(query, status.IsHighPrio, status.IsBlacklisted, status.IsOptimistic, pubkey) - return err -} - -func (s *DatabaseService) SetBlockBuilderIDStatusIsOptimistic(pubkey string, isOptimistic bool) error { - builder, err := s.GetBlockBuilderByPubkey(pubkey) - if err != nil { - return fmt.Errorf("unable to read block builder: %v, %w", pubkey, err) - } - if builder.BuilderID == "" { - return fmt.Errorf("unable update optimistic status of a builder with no builder id: %v", pubkey) //nolint:goerr113 - } - query := `UPDATE ` + vars.TableBlockBuilder + ` SET is_optimistic=$1 WHERE builder_id=$2;` - _, err = s.DB.Exec(query, isOptimistic, builder.BuilderID) - return err -} - -func (s *DatabaseService) SetBlockBuilderCollateral(pubkey, builderID, collateral string) error { - query := `UPDATE ` + vars.TableBlockBuilder + ` SET builder_id=$1, collateral=$2 WHERE builder_pubkey=$3;` - _, err := s.DB.Exec(query, builderID, collateral, pubkey) - return err -} - -func (s *DatabaseService) IncBlockBuilderStatsAfterGetPayload(builderPubkey string) error { - query := `UPDATE ` + vars.TableBlockBuilder + ` - SET num_sent_getpayload=num_sent_getpayload+1 - WHERE builder_pubkey=$1;` - _, err := s.DB.Exec(query, builderPubkey) - return err -} - -func (s *DatabaseService) GetExecutionPayloads(idFirst, idLast uint64) (entries []*ExecutionPayloadEntry, err error) { - query := `SELECT id, inserted_at, slot, proposer_pubkey, block_hash, version, payload FROM ` + vars.TableExecutionPayload + ` WHERE id >= $1 AND id <= $2 ORDER BY id ASC` - err = s.DB.Select(&entries, query, idFirst, idLast) - return entries, err -} - -func (s *DatabaseService) DeleteExecutionPayloads(idFirst, idLast uint64) error { - query := `DELETE FROM ` + vars.TableExecutionPayload + ` WHERE id >= $1 AND id <= $2` - _, err := s.DB.Exec(query, idFirst, idLast) - return err -} - -func (s *DatabaseService) InsertBuilderDemotion(submitBlockRequest *common.VersionedSubmitBlockRequest, simError error) error { - _submitBlockRequest, err := json.Marshal(submitBlockRequest.Capella) - if err != nil { - return err - } - submission, err := common.GetBlockSubmissionInfo(submitBlockRequest) - if err != nil { - return err - } - builderDemotionEntry := BuilderDemotionEntry{ - SubmitBlockRequest: NewNullString(string(_submitBlockRequest)), - - Epoch: submission.BidTrace.Slot / common.SlotsPerEpoch, - Slot: submission.BidTrace.Slot, - - BuilderPubkey: submission.BidTrace.BuilderPubkey.String(), - ProposerPubkey: submission.BidTrace.ProposerPubkey.String(), - - Value: submission.BidTrace.Value.Dec(), - FeeRecipient: submission.BidTrace.ProposerFeeRecipient.String(), - - BlockHash: submission.BidTrace.BlockHash.String(), - SimError: simError.Error(), - } - - query := `INSERT INTO ` + vars.TableBuilderDemotions + ` - (submit_block_request, epoch, slot, builder_pubkey, proposer_pubkey, value, fee_recipient, block_hash, sim_error) VALUES - (:submit_block_request, :epoch, :slot, :builder_pubkey, :proposer_pubkey, :value, :fee_recipient, :block_hash, :sim_error); - ` - _, err = s.DB.NamedExec(query, builderDemotionEntry) - return err -} - -func (s *DatabaseService) UpdateBuilderDemotion(trace *common.BidTraceV2WithBlobFields, signedBlock *common.VersionedSignedProposal, signedRegistration *builderApiV1.SignedValidatorRegistration) error { - _signedBeaconBlock, err := json.Marshal(signedBlock) - if err != nil { - return err - } - _signedValidatorRegistration, err := json.Marshal(signedRegistration) - if err != nil { - return err - } - sbb := NewNullString(string(_signedBeaconBlock)) - svr := NewNullString(string(_signedValidatorRegistration)) - query := `UPDATE ` + vars.TableBuilderDemotions + ` SET - signed_beacon_block=$1, signed_validator_registration=$2 - WHERE slot=$3 AND builder_pubkey=$4 AND block_hash=$5;` - _, err = s.DB.Exec(query, sbb, svr, trace.Slot, trace.BuilderPubkey.String(), trace.BlockHash.String()) - return err -} - -func (s *DatabaseService) GetBuilderDemotion(trace *common.BidTraceV2WithBlobFields) (*BuilderDemotionEntry, error) { - query := `SELECT submit_block_request, signed_beacon_block, signed_validator_registration, epoch, slot, builder_pubkey, proposer_pubkey, value, fee_recipient, block_hash, sim_error FROM ` + vars.TableBuilderDemotions + ` - WHERE slot=$1 AND builder_pubkey=$2 AND block_hash=$3` - entry := &BuilderDemotionEntry{} - err := s.DB.Get(entry, query, trace.Slot, trace.BuilderPubkey.String(), trace.BlockHash.String()) - if err != nil { - return nil, err - } - return entry, nil -} - -func (s *DatabaseService) GetTooLateGetPayload(slot uint64) (entries []*TooLateGetPayloadEntry, err error) { - query := `SELECT id, inserted_at, slot, slot_start_timestamp, request_timestamp, decode_timestamp, proposer_pubkey, block_hash, ms_into_slot FROM ` + vars.TableTooLateGetPayload + ` WHERE slot = $1` - err = s.DB.Select(&entries, query, slot) - return entries, err -} - -func (s *DatabaseService) InsertTooLateGetPayload(slot uint64, proposerPubkey, blockHash string, slotStart, requestTime, decodeTime, msIntoSlot uint64) error { - entry := TooLateGetPayloadEntry{ - Slot: slot, - SlotStartTimestamp: slotStart, - RequestTimestamp: requestTime, - DecodeTimestamp: decodeTime, - ProposerPubkey: proposerPubkey, - BlockHash: blockHash, - MsIntoSlot: msIntoSlot, - } - - query := `INSERT INTO ` + vars.TableTooLateGetPayload + ` - (slot, slot_start_timestamp, request_timestamp, decode_timestamp, proposer_pubkey, block_hash, ms_into_slot) VALUES - (:slot, :slot_start_timestamp, :request_timestamp, :decode_timestamp, :proposer_pubkey, :block_hash, :ms_into_slot) - ON CONFLICT (slot, proposer_pubkey, block_hash) DO NOTHING;` - _, err := s.DB.NamedExec(query, entry) - return err -} diff --git a/mev-boost-relay/database/database_test.go b/mev-boost-relay/database/database_test.go deleted file mode 100644 index b285c6da9..000000000 --- a/mev-boost-relay/database/database_test.go +++ /dev/null @@ -1,491 +0,0 @@ -package database - -import ( - "database/sql" - "fmt" - "os" - "strconv" - "testing" - "time" - - builderApiV1 "github.com/attestantio/go-builder-client/api/v1" - eth2Api "github.com/attestantio/go-eth2-client/api" - eth2ApiV1Deneb "github.com/attestantio/go-eth2-client/api/v1/deneb" - "github.com/attestantio/go-eth2-client/spec" - "github.com/attestantio/go-eth2-client/spec/bellatrix" - "github.com/attestantio/go-eth2-client/spec/capella" - "github.com/attestantio/go-eth2-client/spec/phase0" - "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/flashbots/go-boost-utils/bls" - "github.com/flashbots/mev-boost-relay/common" - "github.com/flashbots/mev-boost-relay/database/migrations" - "github.com/flashbots/mev-boost-relay/database/vars" - "github.com/holiman/uint256" - "github.com/jmoiron/sqlx" - "github.com/stretchr/testify/require" -) - -const ( - slot = uint64(42) - collateral = 1000 - collateralStr = "1000" - builderID = "builder0x69" - randao = "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2" - optimisticSubmission = true -) - -var ( - runDBTests = os.Getenv("RUN_DB_TESTS") == "1" //|| true - feeRecipient = bellatrix.ExecutionAddress{0x02} - blockHashStr = "0xa645370cc112c2e8e3cce121416c7dc849e773506d4b6fb9b752ada711355369" - testDBDSN = common.GetEnv("TEST_DB_DSN", "postgres://postgres:postgres@localhost:5432/postgres?sslmode=disable") - profile = common.Profile{ - Decode: 42, - Prechecks: 43, - Simulation: 44, - RedisUpdate: 45, - Total: 46, - } - errFoo = fmt.Errorf("fake simulation error") -) - -func createValidatorRegistration(pubKey string) ValidatorRegistrationEntry { - return ValidatorRegistrationEntry{ - Pubkey: pubKey, - FeeRecipient: "0xffbb8996515293fcd87ca09b5c6ffe5c17f043c6", - Timestamp: 1663311456, - GasLimit: 30000000, - Signature: "0xab6fa6462f658708f1a9030faeac588d55b1e28cc1f506b3ef938eeeec0171d4209865fb66bbb94e52c0c160a63975e51795ee8d1da38219b3f80d7d14f003421a255d99b744bd71f45f0cb2cd17948afff67ad6c9163fcd20b48f6315dac7cc", - } -} - -func getTestKeyPair(t *testing.T) (*phase0.BLSPubKey, *bls.SecretKey) { - t.Helper() - sk, _, err := bls.GenerateNewKeypair() - require.NoError(t, err) - blsPubkey, err := bls.PublicKeyFromSecretKey(sk) - require.NoError(t, err) - var pubkey phase0.BLSPubKey - bytes := blsPubkey.Bytes() - copy(pubkey[:], bytes[:]) - return &pubkey, sk -} - -func insertTestBuilder(t *testing.T, db IDatabaseService) string { - t.Helper() - pk, sk := getTestKeyPair(t) - var testBlockHash phase0.Hash32 - hashSlice, err := hexutil.Decode(blockHashStr) - require.NoError(t, err) - copy(testBlockHash[:], hashSlice) - req := common.TestBuilderSubmitBlockRequest(sk, &common.BidTraceV2WithBlobFields{ - BidTrace: builderApiV1.BidTrace{ - BlockHash: testBlockHash, - Slot: slot, - BuilderPubkey: *pk, - ProposerPubkey: *pk, - ProposerFeeRecipient: feeRecipient, - Value: uint256.NewInt(collateral), - }, - }, spec.DataVersionDeneb) - entry, err := db.SaveBuilderBlockSubmission(req, nil, nil, time.Now(), time.Now().Add(time.Second), true, true, profile, optimisticSubmission, nil) - require.NoError(t, err) - err = db.UpsertBlockBuilderEntryAfterSubmission(entry, false) - require.NoError(t, err) - builderPubkey, err := req.Builder() - require.NoError(t, err) - return builderPubkey.String() -} - -func resetDatabase(t *testing.T) *DatabaseService { - t.Helper() - if !runDBTests { - t.Skip("Skipping database tests") - } - - // Wipe test database - _db, err := sqlx.Connect("postgres", testDBDSN) - require.NoError(t, err) - _, err = _db.Exec(`DROP SCHEMA public CASCADE; CREATE SCHEMA public;`) - require.NoError(t, err) - - db, err := NewDatabaseService(testDBDSN) - require.NoError(t, err) - return db -} - -func TestSaveValidatorRegistration(t *testing.T) { - db := resetDatabase(t) - - // reg1 is the initial registration - reg1 := createValidatorRegistration("0x8996515293fcd87ca09b5c6ffe5c17f043c6a1a3639cc9494a82ec8eb50a9b55c34b47675e573be40d9be308b1ca2908") - - // reg2 is reg1 with newer timestamp, same fields - should not insert - reg2 := createValidatorRegistration("0x8996515293fcd87ca09b5c6ffe5c17f043c6a1a3639cc9494a82ec8eb50a9b55c34b47675e573be40d9be308b1ca2908") - reg2.Timestamp = reg1.Timestamp + 1 - - // reg3 is reg1 with newer timestamp and new gaslimit - insert - reg3 := createValidatorRegistration("0x8996515293fcd87ca09b5c6ffe5c17f043c6a1a3639cc9494a82ec8eb50a9b55c34b47675e573be40d9be308b1ca2908") - reg3.Timestamp = reg1.Timestamp + 1 - reg3.GasLimit = reg1.GasLimit + 1 - - // reg4 is reg1 with newer timestamp and new fee_recipient - insert - reg4 := createValidatorRegistration("0x8996515293fcd87ca09b5c6ffe5c17f043c6a1a3639cc9494a82ec8eb50a9b55c34b47675e573be40d9be308b1ca2908") - reg4.Timestamp = reg1.Timestamp + 2 - reg4.FeeRecipient = "0xafbb8996515293fcd87ca09b5c6ffe5c17f043c6" - - // reg5 is reg1 with older timestamp and new fee_recipient - should not insert - reg5 := createValidatorRegistration("0x8996515293fcd87ca09b5c6ffe5c17f043c6a1a3639cc9494a82ec8eb50a9b55c34b47675e573be40d9be308b1ca2908") - reg5.Timestamp = reg1.Timestamp - 1 - reg5.FeeRecipient = "0x00bb8996515293fcd87ca09b5c6ffe5c17f043c6" - - // Require empty DB - cnt, err := db.NumValidatorRegistrationRows() - require.NoError(t, err) - require.Equal(t, uint64(0), cnt, "DB not empty to start") - - // Save reg1 - err = db.SaveValidatorRegistration(reg1) - require.NoError(t, err) - regX1, err := db.GetValidatorRegistration(reg1.Pubkey) - require.NoError(t, err) - require.Equal(t, reg1.FeeRecipient, regX1.FeeRecipient) - cnt, err = db.NumValidatorRegistrationRows() - require.NoError(t, err) - require.Equal(t, uint64(1), cnt) - - // Save reg2, should not insert - err = db.SaveValidatorRegistration(reg2) - require.NoError(t, err) - regX1, err = db.GetValidatorRegistration(reg1.Pubkey) - require.NoError(t, err) - require.Equal(t, reg1.Timestamp, regX1.Timestamp) - cnt, err = db.NumValidatorRegistrationRows() - require.NoError(t, err) - require.Equal(t, uint64(1), cnt) - - // Save reg3, should insert - err = db.SaveValidatorRegistration(reg3) - require.NoError(t, err) - regX1, err = db.GetValidatorRegistration(reg1.Pubkey) - require.NoError(t, err) - require.Equal(t, reg3.Timestamp, regX1.Timestamp) - require.Equal(t, reg3.GasLimit, regX1.GasLimit) - cnt, err = db.NumValidatorRegistrationRows() - require.NoError(t, err) - require.Equal(t, uint64(2), cnt) - - // Save reg4, should insert - err = db.SaveValidatorRegistration(reg4) - require.NoError(t, err) - regX1, err = db.GetValidatorRegistration(reg1.Pubkey) - require.NoError(t, err) - require.Equal(t, reg4.Timestamp, regX1.Timestamp) - require.Equal(t, reg4.GasLimit, regX1.GasLimit) - require.Equal(t, reg4.FeeRecipient, regX1.FeeRecipient) - cnt, err = db.NumValidatorRegistrationRows() - require.NoError(t, err) - require.Equal(t, uint64(3), cnt) - - // Save reg5, should not insert - err = db.SaveValidatorRegistration(reg5) - require.NoError(t, err) - regX1, err = db.GetValidatorRegistration(reg1.Pubkey) - require.NoError(t, err) - require.Equal(t, reg4.Timestamp, regX1.Timestamp) - require.Equal(t, reg4.GasLimit, regX1.GasLimit) - require.Equal(t, reg4.FeeRecipient, regX1.FeeRecipient) - cnt, err = db.NumValidatorRegistrationRows() - require.NoError(t, err) - require.Equal(t, uint64(3), cnt) -} - -func TestMigrations(t *testing.T) { - db := resetDatabase(t) - query := `SELECT COUNT(*) FROM ` + vars.TableMigrations + `;` - rowCount := 0 - err := db.DB.QueryRow(query).Scan(&rowCount) - require.NoError(t, err) - require.Len(t, migrations.Migrations.Migrations, rowCount) -} - -func TestSetBlockBuilderStatus(t *testing.T) { - db := resetDatabase(t) - // Four test builders, 2 with matching builder id, 2 with no builder id. - pubkey1 := insertTestBuilder(t, db) - pubkey2 := insertTestBuilder(t, db) - pubkey3 := insertTestBuilder(t, db) - pubkey4 := insertTestBuilder(t, db) - - // Builder 1 & 2 share a builder id. - err := db.SetBlockBuilderCollateral(pubkey1, builderID, collateralStr) - require.NoError(t, err) - err = db.SetBlockBuilderCollateral(pubkey2, builderID, collateralStr) - require.NoError(t, err) - - // Builder 3 has a different builder id. - err = db.SetBlockBuilderCollateral(pubkey3, "builder0x3", collateralStr) - require.NoError(t, err) - - // Before status change. - for _, v := range []string{pubkey1, pubkey2, pubkey3, pubkey4} { - builder, err := db.GetBlockBuilderByPubkey(v) - require.NoError(t, err) - require.False(t, builder.IsHighPrio) - require.False(t, builder.IsOptimistic) - require.False(t, builder.IsBlacklisted) - } - - // Update isOptimistic of builder 1 and 3. - err = db.SetBlockBuilderIDStatusIsOptimistic(pubkey1, true) - require.NoError(t, err) - err = db.SetBlockBuilderIDStatusIsOptimistic(pubkey3, true) - require.NoError(t, err) - - // After status change, builders 1, 2, 3 should be modified. - for _, v := range []string{pubkey1, pubkey2, pubkey3} { - builder, err := db.GetBlockBuilderByPubkey(v) - require.NoError(t, err) - // Just is optimistic should change. - require.True(t, builder.IsOptimistic) - } - // Builder 4 should be unchanged. - builder, err := db.GetBlockBuilderByPubkey(pubkey4) - require.NoError(t, err) - require.False(t, builder.IsHighPrio) - require.False(t, builder.IsOptimistic) - require.False(t, builder.IsBlacklisted) - - // Update status of just builder 1. - err = db.SetBlockBuilderStatus(pubkey1, common.BuilderStatus{ - IsHighPrio: true, - IsOptimistic: false, - }) - require.NoError(t, err) - // Builder 1 should be non-optimistic. - builder, err = db.GetBlockBuilderByPubkey(pubkey1) - require.NoError(t, err) - require.False(t, builder.IsOptimistic) - - // Builder 2 should be optimistic. - builder, err = db.GetBlockBuilderByPubkey(pubkey2) - require.NoError(t, err) - require.True(t, builder.IsOptimistic) -} - -func TestSetBlockBuilderCollateral(t *testing.T) { - db := resetDatabase(t) - pubkey := insertTestBuilder(t, db) - - // Before collateral change. - builder, err := db.GetBlockBuilderByPubkey(pubkey) - require.NoError(t, err) - require.Equal(t, "", builder.BuilderID) - require.Equal(t, "0", builder.Collateral) - - err = db.SetBlockBuilderCollateral(pubkey, builderID, collateralStr) - require.NoError(t, err) - - // After collateral change. - builder, err = db.GetBlockBuilderByPubkey(pubkey) - require.NoError(t, err) - require.Equal(t, builderID, builder.BuilderID) - require.Equal(t, collateralStr, builder.Collateral) -} - -func TestInsertBuilderDemotion(t *testing.T) { - pk, sk := getTestKeyPair(t) - var testBlockHash phase0.Hash32 - hashSlice, err := hexutil.Decode(blockHashStr) - require.NoError(t, err) - copy(testBlockHash[:], hashSlice) - trace := &common.BidTraceV2WithBlobFields{ - BidTrace: builderApiV1.BidTrace{ - BlockHash: testBlockHash, - Slot: slot, - BuilderPubkey: *pk, - ProposerPubkey: *pk, - ProposerFeeRecipient: feeRecipient, - Value: uint256.NewInt(collateral), - }, - } - - cases := []struct { - name string - req *common.VersionedSubmitBlockRequest - }{ - { - name: "Capella", - req: common.TestBuilderSubmitBlockRequest(sk, trace, spec.DataVersionCapella), - }, { - name: "Deneb", - req: common.TestBuilderSubmitBlockRequest(sk, trace, spec.DataVersionDeneb), - }, - } - - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - db := resetDatabase(t) - - err = db.InsertBuilderDemotion(c.req, errFoo) - require.NoError(t, err) - - entry, err := db.GetBuilderDemotion(trace) - require.NoError(t, err) - require.Equal(t, slot, entry.Slot) - require.Equal(t, pk.String(), entry.BuilderPubkey) - require.Equal(t, blockHashStr, entry.BlockHash) - }) - } -} - -func TestUpdateBuilderDemotion(t *testing.T) { - pk, sk := getTestKeyPair(t) - var testBlockHash phase0.Hash32 - hashSlice, err := hexutil.Decode(blockHashStr) - require.NoError(t, err) - copy(testBlockHash[:], hashSlice) - bt := &common.BidTraceV2WithBlobFields{ - BidTrace: builderApiV1.BidTrace{ - BlockHash: testBlockHash, - Slot: slot, - BuilderPubkey: *pk, - ProposerFeeRecipient: feeRecipient, - Value: uint256.NewInt(collateral), - }, - } - - cases := []struct { - name string - req *common.VersionedSubmitBlockRequest - beaconBlock *common.VersionedSignedProposal - }{ - { - name: "Capella", - req: common.TestBuilderSubmitBlockRequest(sk, bt, spec.DataVersionCapella), - beaconBlock: &common.VersionedSignedProposal{ - VersionedSignedProposal: eth2Api.VersionedSignedProposal{ - Version: spec.DataVersionCapella, - Capella: &capella.SignedBeaconBlock{}, - }, - }, - }, { - name: "Deneb", - req: common.TestBuilderSubmitBlockRequest(sk, bt, spec.DataVersionDeneb), - beaconBlock: &common.VersionedSignedProposal{ - VersionedSignedProposal: eth2Api.VersionedSignedProposal{ - Version: spec.DataVersionDeneb, - Deneb: ð2ApiV1Deneb.SignedBlockContents{}, - }, - }, - }, - } - - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - db := resetDatabase(t) - // Should return ErrNoRows because there is no demotion yet. - demotion, err := db.GetBuilderDemotion(bt) - require.Equal(t, sql.ErrNoRows, err) - require.Nil(t, demotion) - - // Insert demotion - err = db.InsertBuilderDemotion(c.req, errFoo) - require.NoError(t, err) - - // Now demotion should show up. - demotion, err = db.GetBuilderDemotion(bt) - require.NoError(t, err) - - // Signed block and validation should be invalid and empty. - require.False(t, demotion.SignedBeaconBlock.Valid) - require.Empty(t, demotion.SignedBeaconBlock.String) - require.False(t, demotion.SignedValidatorRegistration.Valid) - require.Empty(t, demotion.SignedValidatorRegistration.String) - - // Update demotion with the signedBlock and signedRegistration. - err = db.UpdateBuilderDemotion(bt, c.beaconBlock, &builderApiV1.SignedValidatorRegistration{}) - require.NoError(t, err) - - // Signed block and validation should now be valid and non-empty. - demotion, err = db.GetBuilderDemotion(bt) - require.NoError(t, err) - require.True(t, demotion.SignedBeaconBlock.Valid) - require.NotEmpty(t, demotion.SignedBeaconBlock.String) - require.True(t, demotion.SignedValidatorRegistration.Valid) - require.NotEmpty(t, demotion.SignedValidatorRegistration.String) - }) - } -} - -func TestGetBlockSubmissionEntry(t *testing.T) { - db := resetDatabase(t) - pubkey := insertTestBuilder(t, db) - - entry, err := db.GetBlockSubmissionEntry(slot, pubkey, blockHashStr) - require.NoError(t, err) - - require.Equal(t, profile.Decode, entry.DecodeDuration) - require.Equal(t, profile.Prechecks, entry.PrechecksDuration) - require.Equal(t, profile.Simulation, entry.SimulationDuration) - require.Equal(t, profile.RedisUpdate, entry.RedisUpdateDuration) - require.Equal(t, profile.Total, entry.TotalDuration) - - require.True(t, entry.OptimisticSubmission) - require.True(t, entry.EligibleAt.Valid) -} - -func TestGetBuilderSubmissions(t *testing.T) { - db := resetDatabase(t) - pubkey := insertTestBuilder(t, db) - - entries, err := db.GetBuilderSubmissions(GetBuilderSubmissionsFilters{ - BuilderPubkey: pubkey, - Limit: 1, - }) - require.NoError(t, err) - require.Len(t, entries, 1) - e := entries[0] - require.Equal(t, optimisticSubmission, e.OptimisticSubmission) - require.Equal(t, pubkey, e.BuilderPubkey) - require.Equal(t, feeRecipient.String(), e.ProposerFeeRecipient) - require.Equal(t, strconv.Itoa(collateral), e.Value) -} - -func TestUpsertTooLateGetPayload(t *testing.T) { - db := resetDatabase(t) - slot := uint64(12345) - pk := "0x8996515293fcd87ca09b5c6ffe5c17f043c6a1a3639cc9494a82ec8eb50a9b55c34b47675e573be40d9be308b1ca2908" - hash := "0x00bb8996515293fcd87ca09b5c6ffe5c17f043c600bb8996515293fcd8012343" - ms := uint64(4001) - err := db.InsertTooLateGetPayload(slot, pk, hash, 1, 2, 3, ms) - require.NoError(t, err) - - entries, err := db.GetTooLateGetPayload(slot) - require.NoError(t, err) - require.Len(t, entries, 1) - entry := entries[0] - require.Equal(t, pk, entry.ProposerPubkey) - require.Equal(t, hash, entry.BlockHash) - require.Equal(t, ms, entry.MsIntoSlot) - - // Duplicate. - err = db.InsertTooLateGetPayload(slot, pk, hash, 1, 2, 3, ms+1) - require.NoError(t, err) - entries, err = db.GetTooLateGetPayload(slot) - require.NoError(t, err) - // Check ms was not updated (we only want to save the first). - require.Equal(t, ms, entries[0].MsIntoSlot) - - // New block hash (to save equivocations). - hash2 := "0xFFbb8996515293fcd87ca09b5c6ffe5c17f043c600bb8996515293fcd8012343" - err = db.InsertTooLateGetPayload(slot, pk, hash2, 1, 2, 3, ms) - - require.NoError(t, err) - - entries, err = db.GetTooLateGetPayload(slot) - require.NoError(t, err) - require.Len(t, entries, 2) - entry = entries[1] - require.Equal(t, hash2, entry.BlockHash) -} diff --git a/mev-boost-relay/database/migrations/001_init_database.go b/mev-boost-relay/database/migrations/001_init_database.go deleted file mode 100644 index 6ea6b5295..000000000 --- a/mev-boost-relay/database/migrations/001_init_database.go +++ /dev/null @@ -1,147 +0,0 @@ -package migrations - -import ( - "github.com/flashbots/mev-boost-relay/database/vars" - migrate "github.com/rubenv/sql-migrate" -) - -var Migration001InitDatabase = &migrate.Migration{ - Id: "001-init-database", - Up: []string{` - CREATE TABLE IF NOT EXISTS ` + vars.TableValidatorRegistration + ` ( - id bigint GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, - inserted_at timestamp NOT NULL default current_timestamp, - - pubkey varchar(98) NOT NULL, - fee_recipient varchar(42) NOT NULL, - timestamp bigint NOT NULL, - gas_limit bigint NOT NULL, - signature text NOT NULL - ); - - CREATE UNIQUE INDEX IF NOT EXISTS ` + vars.TableValidatorRegistration + `_pubkey_timestamp_uidx ON ` + vars.TableValidatorRegistration + `(pubkey, timestamp DESC); - - - CREATE TABLE IF NOT EXISTS ` + vars.TableExecutionPayload + ` ( - id bigint GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, - inserted_at timestamp NOT NULL default current_timestamp, - - slot bigint NOT NULL, - proposer_pubkey varchar(98) NOT NULL, - block_hash varchar(66) NOT NULL, - - version text NOT NULL, - payload json NOT NULL - ); - - CREATE UNIQUE INDEX IF NOT EXISTS ` + vars.TableExecutionPayload + `_slot_pk_hash_idx ON ` + vars.TableExecutionPayload + `(slot, proposer_pubkey, block_hash); - - - CREATE TABLE IF NOT EXISTS ` + vars.TableBuilderBlockSubmission + ` ( - id bigint GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, - inserted_at timestamp NOT NULL default current_timestamp, - - execution_payload_id bigint, - - -- simulation & verification results - sim_success boolean NOT NULL, - sim_error text NOT NULL, - - -- bidtrace data - signature text NOT NULL, - - slot bigint NOT NULL, - parent_hash varchar(66) NOT NULL, - block_hash varchar(66) NOT NULL, - - builder_pubkey varchar(98) NOT NULL, - proposer_pubkey varchar(98) NOT NULL, - proposer_fee_recipient varchar(42) NOT NULL, - - gas_used bigint NOT NULL, - gas_limit bigint NOT NULL, - - num_tx int NOT NULL, - value NUMERIC(48, 0), - - -- helpers - epoch bigint NOT NULL, - block_number bigint NOT NULL, - was_most_profitable boolean NOT NULL - ); - - CREATE INDEX IF NOT EXISTS ` + vars.TableBuilderBlockSubmission + `_slot_idx ON ` + vars.TableBuilderBlockSubmission + `("slot"); - CREATE INDEX IF NOT EXISTS ` + vars.TableBuilderBlockSubmission + `_blockhash_idx ON ` + vars.TableBuilderBlockSubmission + `("block_hash"); - CREATE INDEX IF NOT EXISTS ` + vars.TableBuilderBlockSubmission + `_blocknumber_idx ON ` + vars.TableBuilderBlockSubmission + `("block_number"); - CREATE INDEX IF NOT EXISTS ` + vars.TableBuilderBlockSubmission + `_builderpubkey_idx ON ` + vars.TableBuilderBlockSubmission + `("builder_pubkey"); - CREATE INDEX IF NOT EXISTS ` + vars.TableBuilderBlockSubmission + `_simsuccess_idx ON ` + vars.TableBuilderBlockSubmission + `("sim_success"); - CREATE INDEX IF NOT EXISTS ` + vars.TableBuilderBlockSubmission + `_mostprofit_idx ON ` + vars.TableBuilderBlockSubmission + `("was_most_profitable"); - CREATE INDEX IF NOT EXISTS ` + vars.TableBuilderBlockSubmission + `_executionpayloadid_idx ON ` + vars.TableBuilderBlockSubmission + `("execution_payload_id"); - - - CREATE TABLE IF NOT EXISTS ` + vars.TableDeliveredPayload + ` ( - id bigint GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, - inserted_at timestamp NOT NULL default current_timestamp, - - signed_blinded_beacon_block json, - - epoch bigint NOT NULL, - slot bigint NOT NULL, - - builder_pubkey varchar(98) NOT NULL, - proposer_pubkey varchar(98) NOT NULL, - proposer_fee_recipient varchar(42) NOT NULL, - - parent_hash varchar(66) NOT NULL, - block_hash varchar(66) NOT NULL, - block_number bigint NOT NULL, - - gas_used bigint NOT NULL, - gas_limit bigint NOT NULL, - - num_tx int NOT NULL, - value NUMERIC(48, 0), - - UNIQUE (slot, proposer_pubkey, block_hash) - ); - - CREATE INDEX IF NOT EXISTS ` + vars.TableDeliveredPayload + `_slot_idx ON ` + vars.TableDeliveredPayload + `("slot"); - CREATE INDEX IF NOT EXISTS ` + vars.TableDeliveredPayload + `_blockhash_idx ON ` + vars.TableDeliveredPayload + `("block_hash"); - CREATE INDEX IF NOT EXISTS ` + vars.TableDeliveredPayload + `_blocknumber_idx ON ` + vars.TableDeliveredPayload + `("block_number"); - CREATE INDEX IF NOT EXISTS ` + vars.TableDeliveredPayload + `_proposerpubkey_idx ON ` + vars.TableDeliveredPayload + `("proposer_pubkey"); - CREATE INDEX IF NOT EXISTS ` + vars.TableDeliveredPayload + `_builderpubkey_idx ON ` + vars.TableDeliveredPayload + `("builder_pubkey"); - CREATE INDEX IF NOT EXISTS ` + vars.TableDeliveredPayload + `_value_idx ON ` + vars.TableDeliveredPayload + `("value"); - - - CREATE TABLE IF NOT EXISTS ` + vars.TableBlockBuilder + ` ( - id bigint GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, - inserted_at timestamp NOT NULL default current_timestamp, - - builder_pubkey varchar(98) NOT NULL, - description text NOT NULL, - - is_high_prio boolean NOT NULL, - is_blacklisted boolean NOT NULL, - - last_submission_id bigint references ` + vars.TableBuilderBlockSubmission + `(id) on delete set null, - last_submission_slot bigint NOT NULL, - - num_submissions_total bigint NOT NULL, - num_submissions_simerror bigint NOT NULL, - num_submissions_topbid bigint NOT NULL, - - num_sent_getpayload bigint NOT NULL DEFAULT 0, - - UNIQUE (builder_pubkey) - ); - `}, - Down: []string{` - DROP TABLE IF EXISTS ` + vars.TableBuilderBlockSubmission + `; - DROP TABLE IF EXISTS ` + vars.TableDeliveredPayload + `; - DROP TABLE IF EXISTS ` + vars.TableBlockBuilder + `; - DROP TABLE IF EXISTS ` + vars.TableExecutionPayload + `; - DROP TABLE IF EXISTS ` + vars.TableValidatorRegistration + `; - `}, - DisableTransactionUp: false, - DisableTransactionDown: false, -} diff --git a/mev-boost-relay/database/migrations/002_bid_remove_isbest_add_receivedat.go b/mev-boost-relay/database/migrations/002_bid_remove_isbest_add_receivedat.go deleted file mode 100644 index b7b3bb229..000000000 --- a/mev-boost-relay/database/migrations/002_bid_remove_isbest_add_receivedat.go +++ /dev/null @@ -1,24 +0,0 @@ -package migrations - -import ( - "github.com/flashbots/mev-boost-relay/database/vars" - migrate "github.com/rubenv/sql-migrate" -) - -var Migration002RemoveIsBestAddReceivedAt = &migrate.Migration{ - Id: "002-remove-isbest-add-receivedat", - Up: []string{` - ALTER TABLE ` + vars.TableBuilderBlockSubmission + ` ADD received_at timestamp; - - ALTER TABLE ` + vars.TableBuilderBlockSubmission + ` DROP COLUMN was_most_profitable; - DROP INDEX IF EXISTS ` + vars.TableBuilderBlockSubmission + `_mostprofit_idx; - - ALTER TABLE ` + vars.TableBlockBuilder + ` DROP COLUMN num_submissions_topbid; - `, ` - CREATE INDEX CONCURRENTLY IF NOT EXISTS ` + vars.TableBuilderBlockSubmission + `_received_idx ON ` + vars.TableBuilderBlockSubmission + `(received_at DESC); - `}, - Down: []string{}, - - DisableTransactionUp: true, // cannot create index concurrently inside a transaction - DisableTransactionDown: true, -} diff --git a/mev-boost-relay/database/migrations/003_bid_add_eligibleat_payload_add_signedat.go b/mev-boost-relay/database/migrations/003_bid_add_eligibleat_payload_add_signedat.go deleted file mode 100644 index d09e5cff6..000000000 --- a/mev-boost-relay/database/migrations/003_bid_add_eligibleat_payload_add_signedat.go +++ /dev/null @@ -1,18 +0,0 @@ -package migrations - -import ( - "github.com/flashbots/mev-boost-relay/database/vars" - migrate "github.com/rubenv/sql-migrate" -) - -var Migration003AddEligibleAtSignedAt = &migrate.Migration{ - Id: "003-add-eligibleat-add-signedat", - Up: []string{` - ALTER TABLE ` + vars.TableBuilderBlockSubmission + ` ADD eligible_at timestamp; - ALTER TABLE ` + vars.TableDeliveredPayload + ` ADD signed_at timestamp; - `}, - Down: []string{}, - - DisableTransactionUp: true, - DisableTransactionDown: true, -} diff --git a/mev-boost-relay/database/migrations/004_blocked_validator.go b/mev-boost-relay/database/migrations/004_blocked_validator.go deleted file mode 100644 index 1d534ff3a..000000000 --- a/mev-boost-relay/database/migrations/004_blocked_validator.go +++ /dev/null @@ -1,26 +0,0 @@ -package migrations - -import ( - "github.com/flashbots/mev-boost-relay/database/vars" - migrate "github.com/rubenv/sql-migrate" -) - -var Migration004BlockedValidator = &migrate.Migration{ - Id: "004-blocked-validator", - Up: []string{` - CREATE TABLE IF NOT EXISTS ` + vars.TableBlockedValidator + ` ( - id bigint GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, - inserted_at timestamp NOT NULL default current_timestamp, - - pubkey varchar(98) NOT NULL, - is_blocked boolean NOT NULL default false, - notes text NOT NULL default '', - - UNIQUE (pubkey) - ); - `}, - Down: []string{}, - - DisableTransactionUp: true, - DisableTransactionDown: true, -} diff --git a/mev-boost-relay/database/migrations/005_remove_blocked_validator.go b/mev-boost-relay/database/migrations/005_remove_blocked_validator.go deleted file mode 100644 index b33767614..000000000 --- a/mev-boost-relay/database/migrations/005_remove_blocked_validator.go +++ /dev/null @@ -1,17 +0,0 @@ -package migrations - -import ( - "github.com/flashbots/mev-boost-relay/database/vars" - migrate "github.com/rubenv/sql-migrate" -) - -var Migration005RemoveBlockedValidator = &migrate.Migration{ - Id: "005-remove-blocked-validator", - Up: []string{` - DROP TABLE IF EXISTS ` + vars.TableBlockedValidator + `; - `}, - Down: []string{}, - - DisableTransactionUp: true, - DisableTransactionDown: true, -} diff --git a/mev-boost-relay/database/migrations/006_create_too_late_get_payloads.go b/mev-boost-relay/database/migrations/006_create_too_late_get_payloads.go deleted file mode 100644 index 600f3b283..000000000 --- a/mev-boost-relay/database/migrations/006_create_too_late_get_payloads.go +++ /dev/null @@ -1,34 +0,0 @@ -package migrations - -import ( - "github.com/flashbots/mev-boost-relay/database/vars" - migrate "github.com/rubenv/sql-migrate" -) - -var Migration006CreateTooLateGetPayload = &migrate.Migration{ - Id: "006-create-too-late-get-payload", - Up: []string{` - CREATE TABLE IF NOT EXISTS ` + vars.TableTooLateGetPayload + ` ( - id bigint GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, - inserted_at timestamp NOT NULL default current_timestamp, - - slot bigint NOT NULL, - - slot_start_timestamp bigint NOT NULL, - request_timestamp bigint NOT NULL, - decode_timestamp bigint NOT NULL, - - proposer_pubkey varchar(98) NOT NULL, - block_hash varchar(66) NOT NULL, - ms_into_slot bigint NOT NULL - ); - - CREATE UNIQUE INDEX IF NOT EXISTS ` + vars.TableTooLateGetPayload + `_slot_pk_hash_idx ON ` + vars.TableTooLateGetPayload + `(slot, proposer_pubkey, block_hash); - `, ` - ALTER TABLE ` + vars.TableDeliveredPayload + ` ADD publish_ms bigint NOT NULL DEFAULT 0; - `}, - Down: []string{}, - - DisableTransactionUp: true, - DisableTransactionDown: true, -} diff --git a/mev-boost-relay/database/migrations/007_builder_submission_was_simulated.go b/mev-boost-relay/database/migrations/007_builder_submission_was_simulated.go deleted file mode 100644 index b9617a224..000000000 --- a/mev-boost-relay/database/migrations/007_builder_submission_was_simulated.go +++ /dev/null @@ -1,18 +0,0 @@ -package migrations - -import ( - "github.com/flashbots/mev-boost-relay/database/vars" - migrate "github.com/rubenv/sql-migrate" -) - -var Migration007BuilderSubmissionWasSimulated = &migrate.Migration{ - Id: "007-builder-submission-was-simulated", - Up: []string{` - ALTER TABLE ` + vars.TableBuilderBlockSubmission + ` ADD was_simulated boolean NOT NULL DEFAULT true; - ALTER TABLE ` + vars.TableBuilderBlockSubmission + ` ADD sim_req_error text NOT NULL DEFAULT ''; - `}, - Down: []string{}, - - DisableTransactionUp: true, - DisableTransactionDown: true, -} diff --git a/mev-boost-relay/database/migrations/008_optimistic.go b/mev-boost-relay/database/migrations/008_optimistic.go deleted file mode 100644 index c3d599df0..000000000 --- a/mev-boost-relay/database/migrations/008_optimistic.go +++ /dev/null @@ -1,53 +0,0 @@ -package migrations - -import ( - "github.com/flashbots/mev-boost-relay/database/vars" - migrate "github.com/rubenv/sql-migrate" -) - -var Migration008Optimistic = &migrate.Migration{ - Id: "008-optimistic", - Up: []string{ - ` - ALTER TABLE ` + vars.TableBuilderBlockSubmission + ` ADD optimistic_submission bool NOT NULL default false; - ALTER TABLE ` + vars.TableBuilderBlockSubmission + ` ADD decode_duration bigint NOT NULL default 0; - ALTER TABLE ` + vars.TableBuilderBlockSubmission + ` ADD prechecks_duration bigint NOT NULL default 0; - ALTER TABLE ` + vars.TableBuilderBlockSubmission + ` ADD simulation_duration bigint NOT NULL default 0; - ALTER TABLE ` + vars.TableBuilderBlockSubmission + ` ADD redis_update_duration bigint NOT NULL default 0; - ALTER TABLE ` + vars.TableBuilderBlockSubmission + ` ADD total_duration bigint NOT NULL default 0; - `, ` - ALTER TABLE ` + vars.TableBlockBuilder + ` ADD is_optimistic bool NOT NULL default false; - ALTER TABLE ` + vars.TableBlockBuilder + ` ADD collateral NUMERIC(48, 0) NOT NULL default 0; - ALTER TABLE ` + vars.TableBlockBuilder + ` ADD builder_id varchar(98) NOT NULL default ''; - `, ` - CREATE TABLE IF NOT EXISTS ` + vars.TableBuilderDemotions + `( - id bigint GENERATED BY DEFAULT AS IDENTITY PRIMARY KEY, - inserted_at timestamp NOT NULL default current_timestamp, - - submit_block_request json, - signed_beacon_block json, - signed_validator_registration json, - - epoch bigint NOT NULL, - slot bigint NOT NULL, - - builder_pubkey varchar(98) NOT NULL, - proposer_pubkey varchar(98) NOT NULL, - - value NUMERIC(48, 0), - - fee_recipient varchar(42) NOT NULL, - - block_hash varchar(66) NOT NULL, - - sim_error text NOT NULL, - - UNIQUE (builder_pubkey, block_hash) - ); - `, - }, - Down: []string{}, - - DisableTransactionUp: true, - DisableTransactionDown: true, -} diff --git a/mev-boost-relay/database/migrations/009_remove_blockbuilder_reference.go b/mev-boost-relay/database/migrations/009_remove_blockbuilder_reference.go deleted file mode 100644 index 6aede71e0..000000000 --- a/mev-boost-relay/database/migrations/009_remove_blockbuilder_reference.go +++ /dev/null @@ -1,23 +0,0 @@ -package migrations - -import ( - "github.com/flashbots/mev-boost-relay/database/vars" - migrate "github.com/rubenv/sql-migrate" -) - -// Migration009BlockBuilderRemoveReference removes the foreign key constraint from -// the blockbuilders table to the latest submissions by a builder. -// -// This reference makes it impossible to migrate to a new database without having -// all bids there first (which is the bulk of the data). Just removing the foreign key -// constraint is the easiest way to solve this constraint, without downsides. -var Migration009BlockBuilderRemoveReference = &migrate.Migration{ - Id: "009-block-builder-remove-reference", - Up: []string{` - ALTER TABLE ` + vars.TableBlockBuilder + ` DROP CONSTRAINT "` + vars.TableBlockBuilder + `_last_submission_id_fkey"; - `}, - Down: []string{}, - - DisableTransactionUp: true, - DisableTransactionDown: true, -} diff --git a/mev-boost-relay/database/migrations/010_payload_add_blob_fields.go b/mev-boost-relay/database/migrations/010_payload_add_blob_fields.go deleted file mode 100644 index 5d76d36e3..000000000 --- a/mev-boost-relay/database/migrations/010_payload_add_blob_fields.go +++ /dev/null @@ -1,21 +0,0 @@ -package migrations - -import ( - "github.com/flashbots/mev-boost-relay/database/vars" - migrate "github.com/rubenv/sql-migrate" -) - -// Migration010PayloadAddBlobFields adds blob related fields for the Dencun fork -// such as the number of blobs, blob gas used and excess blob gas -var Migration010PayloadAddBlobFields = &migrate.Migration{ - Id: "010-payload-add-blob-fields", - Up: []string{` - ALTER TABLE ` + vars.TableDeliveredPayload + ` ADD blob_gas_used bigint NOT NULL DEFAULT 0; - ALTER TABLE ` + vars.TableDeliveredPayload + ` ADD excess_blob_gas bigint NOT NULL DEFAULT 0; - ALTER TABLE ` + vars.TableDeliveredPayload + ` ADD num_blobs int NOT NULL DEFAULT 0; - `}, - Down: []string{}, - - DisableTransactionUp: true, - DisableTransactionDown: true, -} diff --git a/mev-boost-relay/database/migrations/migration.go b/mev-boost-relay/database/migrations/migration.go deleted file mode 100644 index 82b4e1ffa..000000000 --- a/mev-boost-relay/database/migrations/migration.go +++ /dev/null @@ -1,21 +0,0 @@ -// Package migrations contains all the migration files -package migrations - -import ( - migrate "github.com/rubenv/sql-migrate" -) - -var Migrations = migrate.MemoryMigrationSource{ - Migrations: []*migrate.Migration{ - Migration001InitDatabase, - Migration002RemoveIsBestAddReceivedAt, - Migration003AddEligibleAtSignedAt, - Migration004BlockedValidator, - Migration005RemoveBlockedValidator, - Migration006CreateTooLateGetPayload, - Migration007BuilderSubmissionWasSimulated, - Migration008Optimistic, - Migration009BlockBuilderRemoveReference, - Migration010PayloadAddBlobFields, - }, -} diff --git a/mev-boost-relay/database/mockdb.go b/mev-boost-relay/database/mockdb.go deleted file mode 100644 index ad87df1a7..000000000 --- a/mev-boost-relay/database/mockdb.go +++ /dev/null @@ -1,196 +0,0 @@ -package database - -import ( - "database/sql" - "fmt" - "time" - - builderApiV1 "github.com/attestantio/go-builder-client/api/v1" - "github.com/flashbots/mev-boost-relay/common" -) - -type MockDB struct { - ExecPayloads map[string]*ExecutionPayloadEntry - Builders map[string]*BlockBuilderEntry - Demotions map[string]bool - Refunds map[string]bool -} - -func (db MockDB) NumRegisteredValidators() (count uint64, err error) { - return 0, nil -} - -func (db MockDB) SaveValidatorRegistration(entry ValidatorRegistrationEntry) error { - return nil -} - -func (db MockDB) GetValidatorRegistration(pubkey string) (*ValidatorRegistrationEntry, error) { - return nil, nil -} - -func (db MockDB) GetValidatorRegistrationsForPubkeys(pubkeys []string) (entries []*ValidatorRegistrationEntry, err error) { - return nil, nil -} - -func (db MockDB) GetLatestValidatorRegistrations(timestampOnly bool) ([]*ValidatorRegistrationEntry, error) { - return nil, nil -} - -func (db MockDB) SaveBuilderBlockSubmission(payload *common.VersionedSubmitBlockRequest, requestError, validationError error, receivedAt, eligibleAt time.Time, wasSimulated, saveExecPayload bool, profile common.Profile, optimisticSubmission bool, proof *common.InclusionProof) (entry *BuilderBlockSubmissionEntry, err error) { - return nil, nil -} - -func (db MockDB) GetExecutionPayloadEntryByID(executionPayloadID int64) (entry *ExecutionPayloadEntry, err error) { - return nil, nil -} - -func (db MockDB) GetExecutionPayloadEntryBySlotPkHash(slot uint64, proposerPubkey, blockHash string) (entry *ExecutionPayloadEntry, err error) { - key := fmt.Sprintf("%d-%s-%s", slot, proposerPubkey, blockHash) - entry, ok := db.ExecPayloads[key] - if !ok { - return nil, sql.ErrNoRows - } - return entry, nil -} - -func (db MockDB) GetExecutionPayloads(idFirst, idLast uint64) (entries []*ExecutionPayloadEntry, err error) { - return nil, nil -} - -func (db MockDB) DeleteExecutionPayloads(idFirst, idLast uint64) error { - return nil -} - -func (db MockDB) GetBlockSubmissionEntry(slot uint64, proposerPubkey, blockHash string) (entry *BuilderBlockSubmissionEntry, err error) { - return nil, nil -} - -func (db MockDB) GetRecentDeliveredPayloads(filters GetPayloadsFilters) ([]*DeliveredPayloadEntry, error) { - return nil, nil -} - -func (db MockDB) GetDeliveredPayloads(idFirst, idLast uint64) (entries []*DeliveredPayloadEntry, err error) { - return nil, nil -} - -func (db MockDB) GetNumDeliveredPayloads() (uint64, error) { - return 0, nil -} - -func (db MockDB) GetBuilderSubmissions(filters GetBuilderSubmissionsFilters) ([]*BuilderBlockSubmissionEntry, error) { - return nil, nil -} - -func (db MockDB) GetBuilderSubmissionsBySlots(slotFrom, slotTo uint64) (entries []*BuilderBlockSubmissionEntry, err error) { - return nil, nil -} - -func (db MockDB) SaveDeliveredPayload(bidTrace *common.BidTraceV2WithBlobFields, signedBlindedBeaconBlock *common.VersionedSignedBlindedBeaconBlock, signedAt time.Time, publishMs uint64) error { - return nil -} - -func (db MockDB) UpsertBlockBuilderEntryAfterSubmission(lastSubmission *BuilderBlockSubmissionEntry, isError bool) error { - return nil -} - -func (db MockDB) GetBlockBuilders() ([]*BlockBuilderEntry, error) { - res := []*BlockBuilderEntry{} - for _, v := range db.Builders { - res = append(res, v) - } - return res, nil -} - -func (db MockDB) GetBlockBuilderByPubkey(pubkey string) (*BlockBuilderEntry, error) { - builder, ok := db.Builders[pubkey] - if !ok { - return nil, fmt.Errorf("builder with pubkey %v not in Builders map", pubkey) //nolint:goerr113 - } - return builder, nil -} - -func (db MockDB) SetBlockBuilderStatus(pubkey string, status common.BuilderStatus) error { - builder, ok := db.Builders[pubkey] - if !ok { - return fmt.Errorf("builder with pubkey %v not in Builders map", pubkey) //nolint:goerr113 - } - - // Single key. - builder.IsHighPrio = status.IsHighPrio - builder.IsBlacklisted = status.IsBlacklisted - builder.IsOptimistic = status.IsOptimistic - return nil -} - -func (db MockDB) SetBlockBuilderIDStatusIsOptimistic(pubkey string, isOptimistic bool) error { - builder, ok := db.Builders[pubkey] - if !ok { - return fmt.Errorf("builder with pubkey %v not in Builders map", pubkey) //nolint:goerr113 - } - for _, v := range db.Builders { - if v.BuilderID == builder.BuilderID { - v.IsOptimistic = isOptimistic - } - } - return nil -} - -func (db MockDB) SetBlockBuilderCollateral(pubkey, builderID, collateral string) error { - builder, ok := db.Builders[pubkey] - if !ok { - return fmt.Errorf("builder with pubkey %v not in Builders map", pubkey) //nolint:goerr113 - } - builder.BuilderID = builderID - builder.Collateral = collateral - return nil -} - -func (db MockDB) IncBlockBuilderStatsAfterGetHeader(slot uint64, blockhash string) error { - return nil -} - -func (db MockDB) IncBlockBuilderStatsAfterGetPayload(builderPubkey string) error { - return nil -} - -func (db MockDB) InsertBuilderDemotion(submitBlockRequest *common.VersionedSubmitBlockRequest, simError error) error { - pubkey, err := submitBlockRequest.Builder() - if err != nil { - return err - } - db.Demotions[pubkey.String()] = true - return nil -} - -func (db MockDB) UpdateBuilderDemotion(trace *common.BidTraceV2WithBlobFields, signedBlock *common.VersionedSignedProposal, signedRegistration *builderApiV1.SignedValidatorRegistration) error { - pubkey := trace.BuilderPubkey.String() - _, ok := db.Builders[pubkey] - if !ok { - return fmt.Errorf("builder with pubkey %v not in Builders map", pubkey) //nolint:goerr113 - } - if !db.Demotions[pubkey] { - return fmt.Errorf("builder with pubkey %v is not demoted", pubkey) //nolint:goerr113 - } - db.Refunds[pubkey] = true - return nil -} - -func (db MockDB) GetBuilderDemotion(trace *common.BidTraceV2WithBlobFields) (*BuilderDemotionEntry, error) { - pubkey := trace.BuilderPubkey.String() - _, ok := db.Builders[pubkey] - if !ok { - return nil, fmt.Errorf("builder with pubkey %v not in Builders map", pubkey) //nolint:goerr113 - } - if db.Demotions[pubkey] { - return &BuilderDemotionEntry{}, nil - } - return nil, nil -} - -func (db MockDB) GetTooLateGetPayload(slot uint64) (entries []*TooLateGetPayloadEntry, err error) { - return nil, nil -} - -func (db MockDB) InsertTooLateGetPayload(slot uint64, proposerPubkey, blockHash string, slotStart, requestTime, decodeTime, msIntoSlot uint64) error { - return nil -} diff --git a/mev-boost-relay/database/types.go b/mev-boost-relay/database/types.go deleted file mode 100644 index 21d4a74d3..000000000 --- a/mev-boost-relay/database/types.go +++ /dev/null @@ -1,268 +0,0 @@ -package database - -import ( - "database/sql" - "strconv" - "time" - - builderApiV1 "github.com/attestantio/go-builder-client/api/v1" - "github.com/flashbots/go-boost-utils/utils" -) - -func NewNullInt64(i int64) sql.NullInt64 { - return sql.NullInt64{ - Int64: i, - Valid: true, - } -} - -func NewNullString(s string) sql.NullString { - return sql.NullString{ - String: s, - Valid: true, - } -} - -// NewNullTime returns a sql.NullTime with the given time.Time. If the time is -// the zero value, the NullTime is invalid. -func NewNullTime(t time.Time) sql.NullTime { - return sql.NullTime{ - Time: t, - Valid: t != time.Time{}, - } -} - -type GetPayloadsFilters struct { - Slot int64 - Cursor int64 - Limit uint64 - BlockHash string - BlockNumber int64 - ProposerPubkey string - BuilderPubkey string - OrderByValue int8 -} - -type GetBuilderSubmissionsFilters struct { - Slot int64 - Limit int64 - BlockHash string - BlockNumber int64 - BuilderPubkey string -} - -type ValidatorRegistrationEntry struct { - ID int64 `db:"id"` - InsertedAt time.Time `db:"inserted_at"` - - Pubkey string `db:"pubkey"` - FeeRecipient string `db:"fee_recipient"` - Timestamp uint64 `db:"timestamp"` - GasLimit uint64 `db:"gas_limit"` - Signature string `db:"signature"` -} - -func (reg ValidatorRegistrationEntry) ToSignedValidatorRegistration() (*builderApiV1.SignedValidatorRegistration, error) { - pubkey, err := utils.HexToPubkey(reg.Pubkey) - if err != nil { - return nil, err - } - - feeRec, err := utils.HexToAddress(reg.FeeRecipient) - if err != nil { - return nil, err - } - - sig, err := utils.HexToSignature(reg.Signature) - if err != nil { - return nil, err - } - - return &builderApiV1.SignedValidatorRegistration{ - Message: &builderApiV1.ValidatorRegistration{ - Pubkey: pubkey, - FeeRecipient: feeRec, - Timestamp: time.Unix(int64(reg.Timestamp), 0), - GasLimit: reg.GasLimit, - }, - Signature: sig, - }, nil -} - -func SignedValidatorRegistrationToEntry(valReg builderApiV1.SignedValidatorRegistration) ValidatorRegistrationEntry { - return ValidatorRegistrationEntry{ - Pubkey: valReg.Message.Pubkey.String(), - FeeRecipient: valReg.Message.FeeRecipient.String(), - Timestamp: uint64(valReg.Message.Timestamp.Unix()), - GasLimit: valReg.Message.GasLimit, - Signature: valReg.Signature.String(), - } -} - -type ExecutionPayloadEntry struct { - ID int64 `db:"id"` - InsertedAt time.Time `db:"inserted_at"` - - Slot uint64 `db:"slot"` - ProposerPubkey string `db:"proposer_pubkey"` - BlockHash string `db:"block_hash"` - - Version string `db:"version"` - Payload string `db:"payload"` - - // BOLT: merkle proofs of inclusion - Proofs string `db:"proofs"` -} - -var ExecutionPayloadEntryCSVHeader = []string{"id", "inserted_at", "slot", "proposer_pubkey", "block_hash", "version", "payload"} - -func (e *ExecutionPayloadEntry) ToCSVRecord() []string { - return []string{ - strconv.FormatInt(e.ID, 10), - e.InsertedAt.UTC().String(), - strconv.FormatUint(e.Slot, 10), - e.ProposerPubkey, - e.BlockHash, - e.Version, - e.Payload, - } -} - -type BuilderBlockSubmissionEntry struct { - ID int64 `db:"id"` - InsertedAt time.Time `db:"inserted_at"` - ReceivedAt sql.NullTime `db:"received_at"` - EligibleAt sql.NullTime `db:"eligible_at"` - - // Delivered ExecutionPayload - ExecutionPayloadID sql.NullInt64 `db:"execution_payload_id"` - - // Sim Result - WasSimulated bool `db:"was_simulated"` - SimSuccess bool `db:"sim_success"` - SimError string `db:"sim_error"` - SimReqError string `db:"sim_req_error"` - - // BidTrace data - Signature string `db:"signature"` - - Slot uint64 `db:"slot"` - ParentHash string `db:"parent_hash"` - BlockHash string `db:"block_hash"` - - BuilderPubkey string `db:"builder_pubkey"` - ProposerPubkey string `db:"proposer_pubkey"` - ProposerFeeRecipient string `db:"proposer_fee_recipient"` - - GasUsed uint64 `db:"gas_used"` - GasLimit uint64 `db:"gas_limit"` - - NumTx uint64 `db:"num_tx"` - Value string `db:"value"` - - // Helpers - Epoch uint64 `db:"epoch"` - BlockNumber uint64 `db:"block_number"` - - // Profile data. - DecodeDuration uint64 `db:"decode_duration"` - PrechecksDuration uint64 `db:"prechecks_duration"` - SimulationDuration uint64 `db:"simulation_duration"` - RedisUpdateDuration uint64 `db:"redis_update_duration"` - TotalDuration uint64 `db:"total_duration"` - OptimisticSubmission bool `db:"optimistic_submission"` - // BOLT: merkle inclusion proofs - Preconfirmations string `db:"preconfirmations"` -} - -type DeliveredPayloadEntry struct { - ID int64 `db:"id"` - InsertedAt time.Time `db:"inserted_at"` - SignedAt sql.NullTime `db:"signed_at"` - - SignedBlindedBeaconBlock sql.NullString `db:"signed_blinded_beacon_block"` - - Slot uint64 `db:"slot"` - Epoch uint64 `db:"epoch"` - - BuilderPubkey string `db:"builder_pubkey"` - ProposerPubkey string `db:"proposer_pubkey"` - ProposerFeeRecipient string `db:"proposer_fee_recipient"` - - ParentHash string `db:"parent_hash"` - BlockHash string `db:"block_hash"` - BlockNumber uint64 `db:"block_number"` - - GasUsed uint64 `db:"gas_used"` - GasLimit uint64 `db:"gas_limit"` - - NumTx uint64 `db:"num_tx"` - Value string `db:"value"` - - NumBlobs uint64 `db:"num_blobs"` - BlobGasUsed uint64 `db:"blob_gas_used"` - ExcessBlobGas uint64 `db:"excess_blob_gas"` - - PublishMs uint64 `db:"publish_ms"` -} - -type BlockBuilderEntry struct { - ID int64 `db:"id" json:"id"` - InsertedAt time.Time `db:"inserted_at" json:"inserted_at"` - - BuilderPubkey string `db:"builder_pubkey" json:"builder_pubkey"` - Description string `db:"description" json:"description"` - - IsHighPrio bool `db:"is_high_prio" json:"is_high_prio"` - IsBlacklisted bool `db:"is_blacklisted" json:"is_blacklisted"` - IsOptimistic bool `db:"is_optimistic" json:"is_optimistic"` - - Collateral string `db:"collateral" json:"collateral"` - BuilderID string `db:"builder_id" json:"builder_id"` - - LastSubmissionID sql.NullInt64 `db:"last_submission_id" json:"last_submission_id"` - LastSubmissionSlot uint64 `db:"last_submission_slot" json:"last_submission_slot"` - - NumSubmissionsTotal uint64 `db:"num_submissions_total" json:"num_submissions_total"` - NumSubmissionsSimError uint64 `db:"num_submissions_simerror" json:"num_submissions_simerror"` - - NumSentGetPayload uint64 `db:"num_sent_getpayload" json:"num_sent_getpayload"` -} - -type BuilderDemotionEntry struct { - ID int64 `db:"id"` - InsertedAt time.Time `db:"inserted_at"` - - SubmitBlockRequest sql.NullString `db:"submit_block_request"` - SignedBeaconBlock sql.NullString `db:"signed_beacon_block"` - SignedValidatorRegistration sql.NullString `db:"signed_validator_registration"` - - Slot uint64 `db:"slot"` - Epoch uint64 `db:"epoch"` - - BuilderPubkey string `db:"builder_pubkey"` - ProposerPubkey string `db:"proposer_pubkey"` - - Value string `db:"value"` - - FeeRecipient string `db:"fee_recipient"` - - BlockHash string `db:"block_hash"` - - SimError string `db:"sim_error"` -} - -type TooLateGetPayloadEntry struct { - ID int64 `db:"id"` - InsertedAt time.Time `db:"inserted_at"` - - Slot uint64 `db:"slot"` - - SlotStartTimestamp uint64 `db:"slot_start_timestamp"` - RequestTimestamp uint64 `db:"request_timestamp"` - DecodeTimestamp uint64 `db:"decode_timestamp"` - - ProposerPubkey string `db:"proposer_pubkey"` - BlockHash string `db:"block_hash"` - MsIntoSlot uint64 `db:"ms_into_slot"` -} diff --git a/mev-boost-relay/database/typesconv.go b/mev-boost-relay/database/typesconv.go deleted file mode 100644 index 3725c472d..000000000 --- a/mev-boost-relay/database/typesconv.go +++ /dev/null @@ -1,123 +0,0 @@ -package database - -import ( - "encoding/json" - "errors" - - builderApi "github.com/attestantio/go-builder-client/api" - builderApiDeneb "github.com/attestantio/go-builder-client/api/deneb" - "github.com/attestantio/go-eth2-client/spec" - "github.com/attestantio/go-eth2-client/spec/capella" - "github.com/flashbots/mev-boost-relay/common" -) - -var ErrUnsupportedExecutionPayload = errors.New("unsupported execution payload version") - -func PayloadToExecPayloadEntry(payload *common.VersionedSubmitBlockRequest) (*ExecutionPayloadEntry, error) { - var _payload []byte - var version string - var err error - - switch payload.Version { - case spec.DataVersionCapella: - _payload, err = json.Marshal(payload.Capella.ExecutionPayload) - if err != nil { - return nil, err - } - version = common.ForkVersionStringCapella - case spec.DataVersionDeneb: - _payload, err = json.Marshal(builderApiDeneb.ExecutionPayloadAndBlobsBundle{ - ExecutionPayload: payload.Deneb.ExecutionPayload, - BlobsBundle: payload.Deneb.BlobsBundle, - }) - if err != nil { - return nil, err - } - version = common.ForkVersionStringDeneb - case spec.DataVersionUnknown, spec.DataVersionPhase0, spec.DataVersionAltair, spec.DataVersionBellatrix: - return nil, ErrUnsupportedExecutionPayload - } - - submission, err := common.GetBlockSubmissionInfo(payload) - if err != nil { - return nil, err - } - - return &ExecutionPayloadEntry{ - Slot: submission.BidTrace.Slot, - ProposerPubkey: submission.BidTrace.ProposerPubkey.String(), - BlockHash: submission.BidTrace.BlockHash.String(), - - Version: version, - Payload: string(_payload), - }, nil -} - -func DeliveredPayloadEntryToBidTraceV2JSON(payload *DeliveredPayloadEntry) common.BidTraceV2JSON { - return common.BidTraceV2JSON{ - Slot: payload.Slot, - ParentHash: payload.ParentHash, - BlockHash: payload.BlockHash, - BuilderPubkey: payload.BuilderPubkey, - ProposerPubkey: payload.ProposerPubkey, - ProposerFeeRecipient: payload.ProposerFeeRecipient, - GasLimit: payload.GasLimit, - GasUsed: payload.GasUsed, - Value: payload.Value, - NumTx: payload.NumTx, - BlockNumber: payload.BlockNumber, - } -} - -func BuilderSubmissionEntryToBidTraceV2WithTimestampJSON(payload *BuilderBlockSubmissionEntry) common.BidTraceV2WithTimestampJSON { - timestamp := payload.InsertedAt - if payload.ReceivedAt.Valid { - timestamp = payload.ReceivedAt.Time - } - - return common.BidTraceV2WithTimestampJSON{ - Timestamp: timestamp.Unix(), - TimestampMs: timestamp.UnixMilli(), - OptimisticSubmission: payload.OptimisticSubmission, - BidTraceV2JSON: common.BidTraceV2JSON{ - Slot: payload.Slot, - ParentHash: payload.ParentHash, - BlockHash: payload.BlockHash, - BuilderPubkey: payload.BuilderPubkey, - ProposerPubkey: payload.ProposerPubkey, - ProposerFeeRecipient: payload.ProposerFeeRecipient, - GasLimit: payload.GasLimit, - GasUsed: payload.GasUsed, - Value: payload.Value, - NumTx: payload.NumTx, - BlockNumber: payload.BlockNumber, - }, - } -} - -func ExecutionPayloadEntryToExecutionPayload(executionPayloadEntry *ExecutionPayloadEntry) (payload *builderApi.VersionedSubmitBlindedBlockResponse, err error) { - payloadVersion := executionPayloadEntry.Version - if payloadVersion == common.ForkVersionStringDeneb { - executionPayload := new(builderApiDeneb.ExecutionPayloadAndBlobsBundle) - err = json.Unmarshal([]byte(executionPayloadEntry.Payload), executionPayload) - if err != nil { - return nil, err - } - return &builderApi.VersionedSubmitBlindedBlockResponse{ - Version: spec.DataVersionDeneb, - Deneb: executionPayload, - }, nil - } else if payloadVersion == common.ForkVersionStringCapella { - executionPayload := new(capella.ExecutionPayload) - err = json.Unmarshal([]byte(executionPayloadEntry.Payload), executionPayload) - if err != nil { - return nil, err - } - return &builderApi.VersionedSubmitBlindedBlockResponse{ - Version: spec.DataVersionCapella, - Capella: executionPayload, - }, nil - } else { - return nil, ErrUnsupportedExecutionPayload - } -} diff --git a/mev-boost-relay/database/typesconv_test.go b/mev-boost-relay/database/typesconv_test.go deleted file mode 100644 index 8155c08e3..000000000 --- a/mev-boost-relay/database/typesconv_test.go +++ /dev/null @@ -1,48 +0,0 @@ -package database - -import ( - "testing" - "time" - - "github.com/flashbots/mev-boost-relay/common" - "github.com/stretchr/testify/require" -) - -func TestExecutionPayloadEntryToExecutionPayload(t *testing.T) { - filename := "../testdata/executionPayloadCapella_Goerli.json.gz" - payloadBytes := common.LoadGzippedBytes(t, filename) - entry := &ExecutionPayloadEntry{ - ID: 123, - Slot: 5552306, - InsertedAt: time.Unix(1685616301, 0), - - ProposerPubkey: "0x8559727ee65c295279332198029c939557f4d2aba0751fc55f71d0733b8aa17cd0301232a7f21a895f81eacf55c97ec4", - BlockHash: "0x1bafdc454116b605005364976b134d761dd736cb4788d25c835783b46daeb121", - Version: common.ForkVersionStringCapella, - Payload: string(payloadBytes), - } - - payload, err := ExecutionPayloadEntryToExecutionPayload(entry) - require.NoError(t, err) - require.Equal(t, "0x1bafdc454116b605005364976b134d761dd736cb4788d25c835783b46daeb121", payload.Capella.BlockHash.String()) -} - -func TestExecutionPayloadEntryToExecutionPayloadDeneb(t *testing.T) { - filename := "../testdata/executionPayloadAndBlobsBundleDeneb_Goerli.json.gz" - payloadBytes := common.LoadGzippedBytes(t, filename) - entry := &ExecutionPayloadEntry{ - ID: 123, - Slot: 7432891, - InsertedAt: time.Unix(1685616301, 0), - - ProposerPubkey: "0x8559727ee65c295279332198029c939557f4d2aba0751fc55f71d0733b8aa17cd0301232a7f21a895f81eacf55c97ec4", - BlockHash: "0xbd1ae4f7edb2315d2df70a8d9881fab8d6763fb1c00533ae729050928c38d05a", - Version: common.ForkVersionStringDeneb, - Payload: string(payloadBytes), - } - - payload, err := ExecutionPayloadEntryToExecutionPayload(entry) - require.NoError(t, err) - require.Equal(t, "0xbd1ae4f7edb2315d2df70a8d9881fab8d6763fb1c00533ae729050928c38d05a", payload.Deneb.ExecutionPayload.BlockHash.String()) - require.Len(t, payload.Deneb.BlobsBundle.Blobs, 1) -} diff --git a/mev-boost-relay/database/vars/tables.go b/mev-boost-relay/database/vars/tables.go deleted file mode 100644 index 68e496cfd..000000000 --- a/mev-boost-relay/database/vars/tables.go +++ /dev/null @@ -1,18 +0,0 @@ -// Package vars contains the database variables such as dynamic table names -package vars - -import "github.com/flashbots/mev-boost-relay/common" - -var ( - tableBase = common.GetEnv("DB_TABLE_PREFIX", "dev") - - TableMigrations = tableBase + "_migrations" - TableValidatorRegistration = tableBase + "_validator_registration" - TableExecutionPayload = tableBase + "_execution_payload" - TableBuilderBlockSubmission = tableBase + "_builder_block_submission" - TableDeliveredPayload = tableBase + "_payload_delivered" - TableBlockBuilder = tableBase + "_blockbuilder" - TableBuilderDemotions = tableBase + "_builder_demotions" - TableBlockedValidator = tableBase + "_blocked_validator" - TableTooLateGetPayload = tableBase + "_too_late_get_payload" -) diff --git a/mev-boost-relay/datastore/datastore.go b/mev-boost-relay/datastore/datastore.go deleted file mode 100644 index 76e0c390c..000000000 --- a/mev-boost-relay/datastore/datastore.go +++ /dev/null @@ -1,236 +0,0 @@ -// Package datastore helps storing data, utilizing Redis and Postgres as backends -package datastore - -import ( - "database/sql" - "strconv" - "strings" - "sync" - "time" - - builderApi "github.com/attestantio/go-builder-client/api" - builderApiV1 "github.com/attestantio/go-builder-client/api/v1" - "github.com/bradfitz/gomemcache/memcache" - "github.com/flashbots/mev-boost-relay/beaconclient" - "github.com/flashbots/mev-boost-relay/common" - "github.com/flashbots/mev-boost-relay/database" - "github.com/go-redis/redis/v9" - "github.com/pkg/errors" - "github.com/sirupsen/logrus" - uberatomic "go.uber.org/atomic" -) - -var ErrExecutionPayloadNotFound = errors.New("execution payload not found") - -type GetHeaderResponseKey struct { - Slot uint64 - ParentHash string - ProposerPubkey string -} - -type GetPayloadResponseKey struct { - Slot uint64 - ProposerPubkey string - BlockHash string -} - -// Datastore provides a local memory cache with a Redis and DB backend -type Datastore struct { - redis *RedisCache - memcached *Memcached - db database.IDatabaseService - - knownValidatorsByPubkey map[common.PubkeyHex]uint64 - knownValidatorsByIndex map[uint64]common.PubkeyHex - knownValidatorsLock sync.RWMutex - knownValidatorsIsUpdating uberatomic.Bool - knownValidatorsLastSlot uberatomic.Uint64 - - // Used for proposer-API readiness check - KnownValidatorsWasUpdated uberatomic.Bool -} - -func NewDatastore(redisCache *RedisCache, memcached *Memcached, db database.IDatabaseService) (ds *Datastore, err error) { - ds = &Datastore{ - db: db, - memcached: memcached, - redis: redisCache, - knownValidatorsByPubkey: make(map[common.PubkeyHex]uint64), - knownValidatorsByIndex: make(map[uint64]common.PubkeyHex), - } - - return ds, err -} - -// RefreshKnownValidators loads known validators from CL client into memory -// -// For the CL client this is an expensive operation and takes a bunch of resources. -// This is why we schedule the requests for slot 4 and 20 of every epoch, 6 seconds -// into the slot (on suggestion of @potuz). It's also run once at startup. -func (ds *Datastore) RefreshKnownValidators(log *logrus.Entry, beaconClient beaconclient.IMultiBeaconClient, slot uint64) { - // Ensure there's only one at a time - if isAlreadyUpdating := ds.knownValidatorsIsUpdating.Swap(true); isAlreadyUpdating { - return - } - defer ds.knownValidatorsIsUpdating.Store(false) - - headSlotPos := common.SlotPos(slot) // 1-based position in epoch (32 slots, 1..32) - lastUpdateSlot := ds.knownValidatorsLastSlot.Load() - log = log.WithFields(logrus.Fields{ - "datastoreMethod": "RefreshKnownValidators", - "headSlot": slot, - "headSlotPos": headSlotPos, - "lastUpdateSlot": lastUpdateSlot, - }) - - // Only proceed if slot newer than last updated - if slot <= lastUpdateSlot { - return - } - - // Minimum amount of slots between updates - slotsSinceLastUpdate := slot - lastUpdateSlot - if slotsSinceLastUpdate < 6 { - return - } - - log.Debug("RefreshKnownValidators init") - - // Proceed only if forced, or on slot-position 4 or 20 - forceUpdate := slotsSinceLastUpdate > 32 - if !forceUpdate && headSlotPos != 4 && headSlotPos != 20 { - return - } - - // Wait for 6s into the slot - if lastUpdateSlot > 0 { - time.Sleep(6 * time.Second) - } - - log.Info("Querying validators from beacon node... (this may take a while)") - timeStartFetching := time.Now() - validators, err := beaconClient.GetStateValidators(beaconclient.StateIDHead) // head is fastest - if err != nil { - log.WithError(err).Error("failed to fetch validators from all beacon nodes") - return - } - - numValidators := len(validators.Data) - log = log.WithFields(logrus.Fields{ - "numKnownValidators": numValidators, - "durationFetchValidatorsMs": time.Since(timeStartFetching).Milliseconds(), - }) - log.Infof("received known validators from beacon-node") - - err = ds.redis.SetStats(RedisStatsFieldValidatorsTotal, strconv.Itoa(numValidators)) - if err != nil { - log.WithError(err).Error("failed to set stats for RedisStatsFieldValidatorsTotal") - } - - // At this point, consider the update successful - ds.knownValidatorsLastSlot.Store(slot) - - knownValidatorsByPubkey := make(map[common.PubkeyHex]uint64) - knownValidatorsByIndex := make(map[uint64]common.PubkeyHex) - - for _, valEntry := range validators.Data { - pk := common.NewPubkeyHex(valEntry.Validator.Pubkey) - knownValidatorsByPubkey[pk] = valEntry.Index - knownValidatorsByIndex[valEntry.Index] = pk - } - - ds.knownValidatorsLock.Lock() - ds.knownValidatorsByPubkey = knownValidatorsByPubkey - ds.knownValidatorsByIndex = knownValidatorsByIndex - ds.knownValidatorsLock.Unlock() - - ds.KnownValidatorsWasUpdated.Store(true) - log.Infof("known validators updated") -} - -func (ds *Datastore) IsKnownValidator(pubkeyHex common.PubkeyHex) bool { - ds.knownValidatorsLock.RLock() - defer ds.knownValidatorsLock.RUnlock() - _, found := ds.knownValidatorsByPubkey[pubkeyHex] - return found -} - -func (ds *Datastore) GetKnownValidatorPubkeyByIndex(index uint64) (common.PubkeyHex, bool) { - ds.knownValidatorsLock.RLock() - defer ds.knownValidatorsLock.RUnlock() - pk, found := ds.knownValidatorsByIndex[index] - return pk, found -} - -func (ds *Datastore) NumKnownValidators() int { - ds.knownValidatorsLock.RLock() - defer ds.knownValidatorsLock.RUnlock() - return len(ds.knownValidatorsByIndex) -} - -func (ds *Datastore) NumRegisteredValidators() (uint64, error) { - return ds.db.NumRegisteredValidators() -} - -// SaveValidatorRegistration saves a validator registration into both Redis and the database -func (ds *Datastore) SaveValidatorRegistration(entry builderApiV1.SignedValidatorRegistration) error { - // First save in the database - err := ds.db.SaveValidatorRegistration(database.SignedValidatorRegistrationToEntry(entry)) - if err != nil { - return errors.Wrap(err, "failed saving validator registration to database") - } - - // then save in redis - pk := common.NewPubkeyHex(entry.Message.Pubkey.String()) - err = ds.redis.SetValidatorRegistrationTimestampIfNewer(pk, uint64(entry.Message.Timestamp.Unix())) - if err != nil { - return errors.Wrap(err, "failed saving validator registration to redis") - } - - return nil -} - -// GetGetPayloadResponse returns the getPayload response from memory or Redis or Database -func (ds *Datastore) GetGetPayloadResponse(log *logrus.Entry, slot uint64, proposerPubkey, blockHash string) (*builderApi.VersionedSubmitBlindedBlockResponse, error) { - log = log.WithField("datastoreMethod", "GetGetPayloadResponse") - _proposerPubkey := strings.ToLower(proposerPubkey) - _blockHash := strings.ToLower(blockHash) - - // 1. try to get from Redis - resp, err := ds.redis.GetPayloadContents(slot, _proposerPubkey, _blockHash) - if errors.Is(err, redis.Nil) { - log.WithError(err).Warn("execution payload not found in redis") - } else if err != nil { - log.WithError(err).Error("error getting execution payload from redis") - } else { - log.Debug("getPayload response from redis") - return resp, nil - } - - // 2. try to get from Memcached - if ds.memcached != nil { - resp, err = ds.memcached.GetExecutionPayload(slot, _proposerPubkey, _blockHash) - if errors.Is(err, memcache.ErrCacheMiss) { - log.WithError(err).Warn("execution payload not found in memcached") - } else if err != nil { - log.WithError(err).Error("error getting execution payload from memcached") - } else if resp != nil { - log.Debug("getPayload response from memcached") - return resp, nil - } - } - - // 3. try to get from database (should not happen, it's just a backup) - executionPayloadEntry, err := ds.db.GetExecutionPayloadEntryBySlotPkHash(slot, proposerPubkey, blockHash) - if errors.Is(err, sql.ErrNoRows) { - log.WithError(err).Warn("execution payload not found in database") - return nil, ErrExecutionPayloadNotFound - } else if err != nil { - log.WithError(err).Error("error getting execution payload from database") - return nil, err - } - - // Got it from database, now deserialize execution payload and compile full response - log.Warn("getPayload response from database, primary storage failed") - return database.ExecutionPayloadEntryToExecutionPayload(executionPayloadEntry) -} diff --git a/mev-boost-relay/datastore/datastore_test.go b/mev-boost-relay/datastore/datastore_test.go deleted file mode 100644 index 973615109..000000000 --- a/mev-boost-relay/datastore/datastore_test.go +++ /dev/null @@ -1,75 +0,0 @@ -package datastore - -import ( - "testing" - - "github.com/alicebob/miniredis/v2" - "github.com/flashbots/mev-boost-relay/common" - "github.com/flashbots/mev-boost-relay/database" - "github.com/stretchr/testify/require" -) - -func setupTestDatastore(t *testing.T, mockDB *database.MockDB) *Datastore { - t.Helper() - - redisTestServer, err := miniredis.Run() - require.NoError(t, err) - - redisDs, err := NewRedisCache("", redisTestServer.Addr(), "") - require.NoError(t, err) - - ds, err := NewDatastore(redisDs, nil, mockDB) - require.NoError(t, err) - - return ds -} - -func TestGetPayloadFailure(t *testing.T) { - ds := setupTestDatastore(t, &database.MockDB{}) - _, err := ds.GetGetPayloadResponse(common.TestLog, 1, "a", "b") - require.ErrorIs(t, ErrExecutionPayloadNotFound, err) -} - -func TestGetPayloadDatabaseFallback(t *testing.T) { - testCases := []struct { - description string - filename string - version string - blockHash string - }{ - { - description: "Good Capella Payload", - filename: "../testdata/executionPayloadCapella_Goerli.json.gz", - version: common.ForkVersionStringCapella, - blockHash: "0x1bafdc454116b605005364976b134d761dd736cb4788d25c835783b46daeb121", - }, - { - description: "Good Deneb Payload", - filename: "../testdata/executionPayloadAndBlobsBundleDeneb_Goerli.json.gz", - version: common.ForkVersionStringDeneb, - blockHash: "0xbd1ae4f7edb2315d2df70a8d9881fab8d6763fb1c00533ae729050928c38d05a", - }, - } - - for _, testCase := range testCases { - t.Run(testCase.description, func(t *testing.T) { - payloadBytes := common.LoadGzippedBytes(t, testCase.filename) - - // prepare mock database with execution payload entry - mockDB := &database.MockDB{ - ExecPayloads: map[string]*database.ExecutionPayloadEntry{ - "1-a-b": { - Version: testCase.version, - Payload: string(payloadBytes), - }, - }, - } - ds := setupTestDatastore(t, mockDB) - payload, err := ds.GetGetPayloadResponse(common.TestLog, 1, "a", "b") - require.NoError(t, err) - blockHash, err := payload.BlockHash() - require.NoError(t, err) - require.Equal(t, testCase.blockHash, blockHash.String()) - }) - } -} diff --git a/mev-boost-relay/datastore/execution_payload.go b/mev-boost-relay/datastore/execution_payload.go deleted file mode 100644 index 756069d49..000000000 --- a/mev-boost-relay/datastore/execution_payload.go +++ /dev/null @@ -1,11 +0,0 @@ -package datastore - -import ( - builderApi "github.com/attestantio/go-builder-client/api" -) - -// ExecutionPayloadRepository defines methods to fetch and store execution engine payloads -type ExecutionPayloadRepository interface { - GetExecutionPayload(slot uint64, proposerPubKey, blockHash string) (*builderApi.VersionedSubmitBlindedBlockResponse, error) - SaveExecutionPayload(slot uint64, proposerPubKey, blockHash string, payload *builderApi.VersionedSubmitBlindedBlockResponse) error -} diff --git a/mev-boost-relay/datastore/memcached.go b/mev-boost-relay/datastore/memcached.go deleted file mode 100644 index b15ec2d87..000000000 --- a/mev-boost-relay/datastore/memcached.go +++ /dev/null @@ -1,77 +0,0 @@ -package datastore - -import ( - "encoding/json" - "fmt" - "time" - - builderApi "github.com/attestantio/go-builder-client/api" - "github.com/bradfitz/gomemcache/memcache" - "github.com/thedevbirb/flashbots-go-utils/cli" -) - -var ( - defaultMemcachedExpirySeconds = int32(cli.GetEnvInt("MEMCACHED_EXPIRY_SECONDS", 45)) - defaultMemcachedTimeoutMs = cli.GetEnvInt("MEMCACHED_CLIENT_TIMEOUT_MS", 250) - defaultMemcachedMaxIdleConns = cli.GetEnvInt("MEMCACHED_MAX_IDLE_CONNS", 10) -) - -type Memcached struct { - client *memcache.Client - keyPrefix string -} - -// SaveExecutionPayload attempts to insert execution engine payload to memcached using composite key of slot, -// proposer public key, block hash, and cache prefix if specified. Note that writes to the same key value -// (i.e. same slot, proposer public key, and block hash) will overwrite the existing entry. -func (m *Memcached) SaveExecutionPayload(slot uint64, proposerPubKey, blockHash string, payload *builderApi.VersionedSubmitBlindedBlockResponse) error { - // TODO: standardize key format with redis cache and re-use the same function(s) - key := fmt.Sprintf("boost-relay/%s:cache-getpayload-response:%d_%s_%s", m.keyPrefix, slot, proposerPubKey, blockHash) - - bytes, err := json.Marshal(payload) - if err != nil { - return err - } - - //nolint:exhaustruct // "Flags" variable unused and opaque server-side - return m.client.Set(&memcache.Item{Key: key, Value: bytes, Expiration: defaultMemcachedExpirySeconds}) -} - -// GetExecutionPayload attempts to fetch execution engine payload from memcached using composite key of slot, -// proposer public key, block hash, and cache prefix if specified. -func (m *Memcached) GetExecutionPayload(slot uint64, proposerPubKey, blockHash string) (*builderApi.VersionedSubmitBlindedBlockResponse, error) { - // TODO: standardize key format with redis cache and re-use the same function(s) - key := fmt.Sprintf("boost-relay/%s:cache-getpayload-response:%d_%s_%s", m.keyPrefix, slot, proposerPubKey, blockHash) - item, err := m.client.Get(key) - if err != nil { - return nil, err - } - - result := new(builderApi.VersionedSubmitBlindedBlockResponse) - if err = result.UnmarshalJSON(item.Value); err != nil { - return nil, err - } - - return result, nil -} - -func NewMemcached(prefix string, servers ...string) (*Memcached, error) { - if len(servers) == 0 { - return nil, nil - } - - sl := new(memcache.ServerList) - if err := sl.SetServers(servers...); err != nil { - return nil, err - } - - client := memcache.NewFromSelector(sl) - if err := client.Ping(); err != nil { - return nil, err - } - - client.MaxIdleConns = defaultMemcachedMaxIdleConns - client.Timeout = time.Duration(defaultMemcachedTimeoutMs) * time.Millisecond - - return &Memcached{client: client, keyPrefix: prefix}, nil -} diff --git a/mev-boost-relay/datastore/memcached_test.go b/mev-boost-relay/datastore/memcached_test.go deleted file mode 100644 index 5a42fcf70..000000000 --- a/mev-boost-relay/datastore/memcached_test.go +++ /dev/null @@ -1,330 +0,0 @@ -package datastore - -import ( - "bytes" - "errors" - "fmt" - "os" - "testing" - "time" - - builderApi "github.com/attestantio/go-builder-client/api" - builderApiCapella "github.com/attestantio/go-builder-client/api/capella" - builderApiDeneb "github.com/attestantio/go-builder-client/api/deneb" - builderApiV1 "github.com/attestantio/go-builder-client/api/v1" - builderSpec "github.com/attestantio/go-builder-client/spec" - "github.com/attestantio/go-eth2-client/spec" - "github.com/attestantio/go-eth2-client/spec/bellatrix" - "github.com/attestantio/go-eth2-client/spec/capella" - "github.com/attestantio/go-eth2-client/spec/deneb" - "github.com/attestantio/go-eth2-client/spec/phase0" - "github.com/ethereum/go-ethereum/common/math" - "github.com/flashbots/go-boost-utils/bls" - "github.com/flashbots/go-boost-utils/types" - "github.com/flashbots/go-boost-utils/utils" - "github.com/flashbots/mev-boost-relay/common" - "github.com/holiman/uint256" - "github.com/stretchr/testify/require" -) - -// TODO: standardize integration tests to run with single flag/env var - consolidate with RUN_DB_TESTS -var ( - runIntegrationTests = os.Getenv("RUN_INTEGRATION_TESTS") == "1" - memcachedEndpoints = common.GetSliceEnv("MEMCACHED_URIS", nil) - - ErrNoMemcachedServers = errors.New("no memcached servers specified") -) - -func testBuilderSubmitBlockRequest(pubkey phase0.BLSPubKey, signature phase0.BLSSignature, version spec.DataVersion) common.VersionedSubmitBlockRequest { - switch version { //nolint:exhaustive - case spec.DataVersionDeneb: - return common.VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionDeneb, - Deneb: &builderApiDeneb.SubmitBlockRequest{ - Signature: signature, - Message: &builderApiV1.BidTrace{ - Slot: 1, - ParentHash: phase0.Hash32{0x01}, - BlockHash: phase0.Hash32{0x09}, - BuilderPubkey: pubkey, - ProposerPubkey: phase0.BLSPubKey{0x03}, - ProposerFeeRecipient: bellatrix.ExecutionAddress{0x04}, - Value: uint256.NewInt(123), - GasLimit: 5002, - GasUsed: 5003, - }, - ExecutionPayload: &deneb.ExecutionPayload{ - ParentHash: phase0.Hash32{0x01}, - FeeRecipient: bellatrix.ExecutionAddress{0x02}, - StateRoot: phase0.Root{0x03}, - ReceiptsRoot: phase0.Root{0x04}, - LogsBloom: [256]byte{0x05}, - PrevRandao: phase0.Hash32{0x06}, - BlockNumber: 5001, - GasLimit: 5002, - GasUsed: 5003, - Timestamp: 5004, - ExtraData: []byte{0x07}, - BaseFeePerGas: uint256.NewInt(123), - BlockHash: phase0.Hash32{0x09}, - Transactions: []bellatrix.Transaction{}, - BlobGasUsed: 5005, - ExcessBlobGas: 5006, - }, - BlobsBundle: &builderApiDeneb.BlobsBundle{ - Commitments: []deneb.KZGCommitment{}, - Proofs: []deneb.KZGProof{}, - Blobs: []deneb.Blob{}, - }, - }, - }, - } - case spec.DataVersionCapella: - return common.VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SubmitBlockRequest{ - Signature: signature, - Message: &builderApiV1.BidTrace{ - Slot: 1, - ParentHash: phase0.Hash32{0x01}, - BlockHash: phase0.Hash32{0x09}, - BuilderPubkey: pubkey, - ProposerPubkey: phase0.BLSPubKey{0x03}, - ProposerFeeRecipient: bellatrix.ExecutionAddress{0x04}, - Value: uint256.NewInt(123), - GasLimit: 5002, - GasUsed: 5003, - }, - ExecutionPayload: &capella.ExecutionPayload{ - ParentHash: phase0.Hash32{0x01}, - FeeRecipient: bellatrix.ExecutionAddress{0x02}, - StateRoot: phase0.Root{0x03}, - ReceiptsRoot: phase0.Root{0x04}, - LogsBloom: [256]byte{0x05}, - PrevRandao: phase0.Hash32{0x06}, - BlockNumber: 5001, - GasLimit: 5002, - GasUsed: 5003, - Timestamp: 5004, - ExtraData: []byte{0x07}, - BaseFeePerGas: types.IntToU256(123), - BlockHash: phase0.Hash32{0x09}, - Transactions: []bellatrix.Transaction{}, - }, - }, - }, - } - default: - return common.VersionedSubmitBlockRequest{} - } -} - -func initMemcached(t *testing.T) (mem *Memcached, err error) { - t.Helper() - if !runIntegrationTests { - t.Skip("Skipping integration tests for memcached") - } - - if len(memcachedEndpoints) == 0 { - err = ErrNoMemcachedServers - return - } - - mem, err = NewMemcached("test", memcachedEndpoints...) - if err != nil { - return - } - - // reset cache to avoid conflicts between tests - err = mem.client.DeleteAll() - return -} - -// TestMemcached performs integration tests when RUN_INTEGRATION_TESTS is true, using -// a comma separated list of endpoints specified by the environment variable MEMCACHED_URIS. -// Example: -// -// # start memcached docker container locally -// docker run -d -p 11211:11211 memcached -// # navigate to mev-boost-relay working directory and run memcached tests -// RUN_INTEGRATION_TESTS=1 MEMCACHED_URIS="localhost:11211" go test -v -run ".*Memcached.*" ./... -func TestMemcached(t *testing.T) { - type test struct { - Input common.VersionedSubmitBlockRequest - Description string - TestSuite func(tc *test) func(*testing.T) - } - - var ( - mem *Memcached - err error - ) - - mem, err = initMemcached(t) - require.NoError(t, err) - require.NotNil(t, mem) - - builderPk, err := utils.HexToPubkey("0xf9716c94aab536227804e859d15207aa7eaaacd839f39dcbdb5adc942842a8d2fb730f9f49fc719fdb86f1873e0ed1c2") - require.NoError(t, err) - - builderSk, err := utils.HexToSignature("0x8209b5391cd69f392b1f02dbc03bab61f574bb6bb54bf87b59e2a85bdc0756f7db6a71ce1b41b727a1f46ccc77b213bf0df1426177b5b29926b39956114421eaa36ec4602969f6f6370a44de44a6bce6dae2136e5fb594cce2a476354264d1ea") - require.NoError(t, err) - - testCases := []test{ - { - Description: "Given an invalid execution payload, we expect an invalid payload error when attempting to create a payload response", - Input: testBuilderSubmitBlockRequest(builderPk, builderSk, math.MaxUint64), - TestSuite: func(tc *test) func(*testing.T) { - return func(t *testing.T) { - t.Helper() - payload, err := common.GetBlockSubmissionExecutionPayload(&tc.Input) - require.Error(t, err) - require.Equal(t, err, common.ErrEmptyPayload) - require.Nil(t, payload) - } - }, - }, - { - Description: "Given a valid builder submit block request, we expect to successfully store and retrieve the value from memcached", - Input: testBuilderSubmitBlockRequest(builderPk, builderSk, spec.DataVersionCapella), - TestSuite: func(tc *test) func(*testing.T) { - return func(t *testing.T) { - t.Helper() - - payload, err := common.GetBlockSubmissionExecutionPayload(&tc.Input) - require.NoError( - t, - err, - "expected valid execution payload response for builder's submit block request but found [%v]", err, - ) - - inputBytes, err := payload.MarshalJSON() - require.NoError( - t, - err, - "expected no error when marshalling execution payload response but found [%v]", err, - ) - - out := new(builderApi.VersionedSubmitBlindedBlockResponse) - err = out.UnmarshalJSON(inputBytes) - require.NoError( - t, - err, - "expected no error when unmarshalling execution payload response to versioned execution payload but found [%v]", err, - ) - - outputBytes, err := out.MarshalJSON() - require.NoError(t, err) - require.True(t, bytes.Equal(inputBytes, outputBytes)) - - submission, err := common.GetBlockSubmissionInfo(&tc.Input) - require.NoError(t, err) - - // key should not exist in cache yet - empty, err := mem.GetExecutionPayload(submission.BidTrace.Slot, submission.BidTrace.ProposerPubkey.String(), submission.BidTrace.BlockHash.String()) - require.NoError(t, err) - require.Nil(t, empty) - - submission, err = common.GetBlockSubmissionInfo(&tc.Input) - require.NoError(t, err) - err = mem.SaveExecutionPayload(submission.BidTrace.Slot, submission.BidTrace.ProposerPubkey.String(), submission.BidTrace.BlockHash.String(), payload) - require.NoError(t, err) - - get, err := mem.GetExecutionPayload(submission.BidTrace.Slot, submission.BidTrace.ProposerPubkey.String(), submission.BidTrace.BlockHash.String()) - require.NoError(t, err, "expected no error when fetching execution payload from memcached but found [%v]", err) - - getBytes, err := get.MarshalJSON() - require.NoError(t, err) - require.True(t, bytes.Equal(outputBytes, getBytes)) - require.True(t, bytes.Equal(getBytes, inputBytes)) - } - }, - }, - { - Description: "Given a valid builder submit block request, updates to the same key should overwrite existing entry and return the last written value", - Input: testBuilderSubmitBlockRequest(builderPk, builderSk, spec.DataVersionDeneb), - TestSuite: func(tc *test) func(*testing.T) { - return func(t *testing.T) { - t.Helper() - - payload, err := common.GetBlockSubmissionExecutionPayload(&tc.Input) - require.NoError( - t, - err, - "expected valid execution payload response for builder's submit block request but found [%v]", err, - ) - - submission, err := common.GetBlockSubmissionInfo(&tc.Input) - require.NoError(t, err) - - err = mem.SaveExecutionPayload(submission.BidTrace.Slot, submission.BidTrace.ProposerPubkey.String(), submission.BidTrace.BlockHash.String(), payload) - require.NoError(t, err) - - prev, err := mem.GetExecutionPayload(submission.BidTrace.Slot, submission.BidTrace.ProposerPubkey.String(), submission.BidTrace.BlockHash.String()) - require.NoError(t, err) - require.Equal(t, len(prev.Capella.Transactions), len(submission.Transactions)) - - payload.Capella.GasLimit++ - require.NotEqual(t, prev.Capella.GasLimit, payload.Capella.GasLimit) - - err = mem.SaveExecutionPayload(submission.BidTrace.Slot, submission.BidTrace.ProposerPubkey.String(), submission.BidTrace.BlockHash.String(), payload) - require.NoError(t, err) - - current, err := mem.GetExecutionPayload(submission.BidTrace.Slot, submission.BidTrace.ProposerPubkey.String(), submission.BidTrace.BlockHash.String()) - require.NoError(t, err) - require.Equal(t, current.Capella.GasLimit, payload.Capella.GasLimit) - require.NotEqual(t, current.Capella.GasLimit, prev.Capella.GasLimit) - } - }, - }, - { - Description: fmt.Sprintf("Given a valid builder submit block request, memcached entry should expire after %d seconds", defaultMemcachedExpirySeconds), - Input: testBuilderSubmitBlockRequest(builderPk, builderSk, spec.DataVersionCapella), - TestSuite: func(tc *test) func(*testing.T) { - return func(t *testing.T) { - t.Helper() - t.Parallel() - - _, pubkey, err := bls.GenerateNewKeypair() - require.NoError(t, err) - - pk, err := utils.BlsPublicKeyToPublicKey(pubkey) - require.NoError(t, err) - - tc.Input.Capella.Message.ProposerPubkey = pk - payload, err := common.GetBlockSubmissionExecutionPayload(&tc.Input) - require.NoError( - t, - err, - "expected valid execution payload response for builder's submit block request but found [%v]", err, - ) - - submission, err := common.GetBlockSubmissionInfo(&tc.Input) - require.NoError(t, err) - - require.Equal(t, submission.BidTrace.ProposerPubkey.String(), pk.String()) - - err = mem.SaveExecutionPayload(submission.BidTrace.Slot, submission.BidTrace.ProposerPubkey.String(), submission.BidTrace.BlockHash.String(), payload) - require.NoError(t, err) - - ret, err := mem.GetExecutionPayload(submission.BidTrace.Slot, submission.BidTrace.ProposerPubkey.String(), submission.BidTrace.BlockHash.String()) - require.NoError(t, err) - require.Equal(t, len(ret.Capella.Transactions), len(submission.Transactions)) - - time.Sleep((time.Duration(defaultMemcachedExpirySeconds) + 2) * time.Second) - expired, err := mem.GetExecutionPayload(submission.BidTrace.Slot, submission.BidTrace.ProposerPubkey.String(), submission.BidTrace.BlockHash.String()) - require.NoError(t, err) - require.NotEqual(t, ret, expired) - require.Nil(t, expired) - } - }, - }, - } - - for _, tc := range testCases { - testcase := tc - t.Run(testcase.Description, testcase.TestSuite(&testcase)) - } -} diff --git a/mev-boost-relay/datastore/redis.go b/mev-boost-relay/datastore/redis.go deleted file mode 100644 index 45d70f4e5..000000000 --- a/mev-boost-relay/datastore/redis.go +++ /dev/null @@ -1,888 +0,0 @@ -package datastore - -import ( - "context" - "encoding/json" - "errors" - "fmt" - "math/big" - "strconv" - "strings" - "time" - - builderApi "github.com/attestantio/go-builder-client/api" - builderApiDeneb "github.com/attestantio/go-builder-client/api/deneb" - builderSpec "github.com/attestantio/go-builder-client/spec" - "github.com/attestantio/go-eth2-client/spec" - "github.com/attestantio/go-eth2-client/spec/capella" - "github.com/flashbots/mev-boost-relay/common" - "github.com/go-redis/redis/v9" - "github.com/sirupsen/logrus" - "github.com/thedevbirb/flashbots-go-utils/cli" -) - -var ( - redisScheme = "redis://" - redisPrefix = "boost-relay" - - expiryBidCache = 45 * time.Second - - RedisConfigFieldPubkey = "pubkey" - RedisStatsFieldLatestSlot = "latest-slot" - RedisStatsFieldValidatorsTotal = "validators-total" - - ErrFailedUpdatingTopBidNoBids = errors.New("failed to update top bid because no bids were found") - ErrAnotherPayloadAlreadyDeliveredForSlot = errors.New("another payload block hash for slot was already delivered") - ErrPastSlotAlreadyDelivered = errors.New("payload for past slot was already delivered") - - // Docs about redis settings: https://redis.io/docs/reference/clients/ - redisConnectionPoolSize = cli.GetEnvInt("REDIS_CONNECTION_POOL_SIZE", 0) // 0 means use default (10 per CPU) - redisMinIdleConnections = cli.GetEnvInt("REDIS_MIN_IDLE_CONNECTIONS", 0) // 0 means use default - redisReadTimeoutSec = cli.GetEnvInt("REDIS_READ_TIMEOUT_SEC", 0) // 0 means use default (3 sec) - redisPoolTimeoutSec = cli.GetEnvInt("REDIS_POOL_TIMEOUT_SEC", 0) // 0 means use default (ReadTimeout + 1 sec) - redisWriteTimeoutSec = cli.GetEnvInt("REDIS_WRITE_TIMEOUT_SEC", 0) // 0 means use default (3 seconds) -) - -func connectRedis(redisURI string) (*redis.Client, error) { - // Handle both URIs and full URLs, assume unencrypted connections - if !strings.HasPrefix(redisURI, redisScheme) && !strings.HasPrefix(redisURI, "rediss://") { - redisURI = redisScheme + redisURI - } - - redisOpts, err := redis.ParseURL(redisURI) - if err != nil { - return nil, err - } - - if redisConnectionPoolSize > 0 { - redisOpts.PoolSize = redisConnectionPoolSize - } - if redisMinIdleConnections > 0 { - redisOpts.MinIdleConns = redisMinIdleConnections - } - if redisReadTimeoutSec > 0 { - redisOpts.ReadTimeout = time.Duration(redisReadTimeoutSec) * time.Second - } - if redisPoolTimeoutSec > 0 { - redisOpts.PoolTimeout = time.Duration(redisPoolTimeoutSec) * time.Second - } - if redisWriteTimeoutSec > 0 { - redisOpts.WriteTimeout = time.Duration(redisWriteTimeoutSec) * time.Second - } - - redisClient := redis.NewClient(redisOpts) - if _, err := redisClient.Ping(context.Background()).Result(); err != nil { - // unable to connect to redis - return nil, err - } - return redisClient, nil -} - -type RedisCache struct { - client *redis.Client - readonlyClient *redis.Client - boltLog *logrus.Entry - - // prefixes (keys generated with a function) - prefixGetHeaderResponse string - prefixExecPayloadCapella string - prefixPayloadContentsDeneb string - prefixInclusionProof string - prefixBidTrace string - prefixBlockBuilderLatestBids string // latest bid for a given slot - prefixBlockBuilderLatestBidsValue string // value of latest bid for a given slot - prefixBlockBuilderLatestBidsTime string // when the request was received, to avoid older requests overwriting newer ones after a slot validation - prefixTopBidValue string - prefixFloorBid string - prefixFloorBidValue string - - // keys - keyValidatorRegistrationTimestamp string - - keyRelayConfig string - keyStats string - keyProposerDuties string - keyBlockBuilderStatus string - keyLastSlotDelivered string - keyLastHashDelivered string -} - -func NewRedisCache(prefix, redisURI, readonlyURI string) (*RedisCache, error) { - client, err := connectRedis(redisURI) - if err != nil { - return nil, err - } - - roClient := client - if readonlyURI != "" { - roClient, err = connectRedis(readonlyURI) - if err != nil { - return nil, err - } - } - - return &RedisCache{ - client: client, - readonlyClient: roClient, - boltLog: common.NewBoltLogger("REDIS"), - - prefixGetHeaderResponse: fmt.Sprintf("%s/%s:cache-gethead-response", redisPrefix, prefix), - prefixExecPayloadCapella: fmt.Sprintf("%s/%s:cache-execpayload-capella", redisPrefix, prefix), - prefixPayloadContentsDeneb: fmt.Sprintf("%s/%s:cache-payloadcontents-deneb", redisPrefix, prefix), - prefixInclusionProof: fmt.Sprintf("%s/%s:cache-preconfirmations-proofs", redisPrefix, prefix), - prefixBidTrace: fmt.Sprintf("%s/%s:cache-bid-trace", redisPrefix, prefix), - - prefixBlockBuilderLatestBids: fmt.Sprintf("%s/%s:block-builder-latest-bid", redisPrefix, prefix), // hashmap for slot+parentHash+proposerPubkey with builderPubkey as field - prefixBlockBuilderLatestBidsValue: fmt.Sprintf("%s/%s:block-builder-latest-bid-value", redisPrefix, prefix), // hashmap for slot+parentHash+proposerPubkey with builderPubkey as field - prefixBlockBuilderLatestBidsTime: fmt.Sprintf("%s/%s:block-builder-latest-bid-time", redisPrefix, prefix), // hashmap for slot+parentHash+proposerPubkey with builderPubkey as field - prefixTopBidValue: fmt.Sprintf("%s/%s:top-bid-value", redisPrefix, prefix), // prefix:slot_parentHash_proposerPubkey - prefixFloorBid: fmt.Sprintf("%s/%s:bid-floor", redisPrefix, prefix), // prefix:slot_parentHash_proposerPubkey - prefixFloorBidValue: fmt.Sprintf("%s/%s:bid-floor-value", redisPrefix, prefix), // prefix:slot_parentHash_proposerPubkey - - keyValidatorRegistrationTimestamp: fmt.Sprintf("%s/%s:validator-registration-timestamp", redisPrefix, prefix), - keyRelayConfig: fmt.Sprintf("%s/%s:relay-config", redisPrefix, prefix), - - keyStats: fmt.Sprintf("%s/%s:stats", redisPrefix, prefix), - keyProposerDuties: fmt.Sprintf("%s/%s:proposer-duties", redisPrefix, prefix), - keyBlockBuilderStatus: fmt.Sprintf("%s/%s:block-builder-status", redisPrefix, prefix), - keyLastSlotDelivered: fmt.Sprintf("%s/%s:last-slot-delivered", redisPrefix, prefix), - keyLastHashDelivered: fmt.Sprintf("%s/%s:last-hash-delivered", redisPrefix, prefix), - }, nil -} - -func (r *RedisCache) keyCacheGetHeaderResponse(slot uint64, parentHash, proposerPubkey string) string { - return fmt.Sprintf("%s:%d_%s_%s", r.prefixGetHeaderResponse, slot, parentHash, proposerPubkey) -} - -func (r *RedisCache) keyExecPayloadCapella(slot uint64, proposerPubkey, blockHash string) string { - return fmt.Sprintf("%s:%d_%s_%s", r.prefixExecPayloadCapella, slot, proposerPubkey, blockHash) -} - -func (r *RedisCache) keyPayloadContentsDeneb(slot uint64, proposerPubkey, blockHash string) string { - return fmt.Sprintf("%s:%d_%s_%s", r.prefixPayloadContentsDeneb, slot, proposerPubkey, blockHash) -} - -func (r *RedisCache) keyInclusionProof(slot uint64, proposerPubkey string, blockHash string) string { - return fmt.Sprintf("%s:%d_%s_%s", r.prefixInclusionProof, slot, proposerPubkey, blockHash) -} - -func (r *RedisCache) keyCacheBidTrace(slot uint64, proposerPubkey, blockHash string) string { - return fmt.Sprintf("%s:%d_%s_%s", r.prefixBidTrace, slot, proposerPubkey, blockHash) -} - -// keyLatestBidByBuilder returns the key for the getHeader response the latest bid by a specific builder -func (r *RedisCache) keyLatestBidByBuilder(slot uint64, parentHash, proposerPubkey, builderPubkey string) string { - return fmt.Sprintf("%s:%d_%s_%s/%s", r.prefixBlockBuilderLatestBids, slot, parentHash, proposerPubkey, builderPubkey) -} - -// keyBlockBuilderLatestBidValue returns the hashmap key for the value of the latest bid by a specific builder -func (r *RedisCache) keyBlockBuilderLatestBidsValue(slot uint64, parentHash, proposerPubkey string) string { - return fmt.Sprintf("%s:%d_%s_%s", r.prefixBlockBuilderLatestBidsValue, slot, parentHash, proposerPubkey) -} - -// keyBlockBuilderLatestBidValue returns the hashmap key for the time of the latest bid by a specific builder -func (r *RedisCache) keyBlockBuilderLatestBidsTime(slot uint64, parentHash, proposerPubkey string) string { - return fmt.Sprintf("%s:%d_%s_%s", r.prefixBlockBuilderLatestBidsTime, slot, parentHash, proposerPubkey) -} - -// keyTopBidValue returns the hashmap key for the time of the latest bid by a specific builder -func (r *RedisCache) keyTopBidValue(slot uint64, parentHash, proposerPubkey string) string { - return fmt.Sprintf("%s:%d_%s_%s", r.prefixTopBidValue, slot, parentHash, proposerPubkey) -} - -// keyFloorBid returns the key for the highest non-cancellable bid of a given slot+parentHash+proposerPubkey -func (r *RedisCache) keyFloorBid(slot uint64, parentHash, proposerPubkey string) string { - return fmt.Sprintf("%s:%d_%s_%s", r.prefixFloorBid, slot, parentHash, proposerPubkey) -} - -// keyFloorBidValue returns the key for the highest non-cancellable value of a given slot+parentHash+proposerPubkey -func (r *RedisCache) keyFloorBidValue(slot uint64, parentHash, proposerPubkey string) string { - return fmt.Sprintf("%s:%d_%s_%s", r.prefixFloorBidValue, slot, parentHash, proposerPubkey) -} - -func (r *RedisCache) GetObj(key string, obj any) (err error) { - value, err := r.client.Get(context.Background(), key).Result() - if err != nil { - return err - } - - return json.Unmarshal([]byte(value), &obj) -} - -func (r *RedisCache) SetObj(key string, value any, expiration time.Duration) (err error) { - marshalledValue, err := json.Marshal(value) - if err != nil { - return err - } - - return r.client.Set(context.Background(), key, marshalledValue, expiration).Err() -} - -// SetObjPipelined saves an object in the given Redis key on a Redis pipeline (JSON encoded) -func (r *RedisCache) SetObjPipelined(ctx context.Context, pipeliner redis.Pipeliner, key string, value any, expiration time.Duration) (err error) { - marshalledValue, err := json.Marshal(value) - if err != nil { - return err - } - - return pipeliner.Set(ctx, key, marshalledValue, expiration).Err() -} - -func (r *RedisCache) HSetObj(key, field string, value any, expiration time.Duration) (err error) { - marshalledValue, err := json.Marshal(value) - if err != nil { - return err - } - - err = r.client.HSet(context.Background(), key, field, marshalledValue).Err() - if err != nil { - return err - } - - return r.client.Expire(context.Background(), key, expiration).Err() -} - -func (r *RedisCache) GetValidatorRegistrationTimestamp(proposerPubkey common.PubkeyHex) (uint64, error) { - timestamp, err := r.client.HGet(context.Background(), r.keyValidatorRegistrationTimestamp, strings.ToLower(proposerPubkey.String())).Uint64() - if errors.Is(err, redis.Nil) { - return 0, nil - } - return timestamp, err -} - -func (r *RedisCache) SetValidatorRegistrationTimestampIfNewer(proposerPubkey common.PubkeyHex, timestamp uint64) error { - knownTimestamp, err := r.GetValidatorRegistrationTimestamp(proposerPubkey) - if err != nil { - return err - } - if knownTimestamp >= timestamp { - return nil - } - return r.SetValidatorRegistrationTimestamp(proposerPubkey, timestamp) -} - -func (r *RedisCache) SetValidatorRegistrationTimestamp(proposerPubkey common.PubkeyHex, timestamp uint64) error { - return r.client.HSet(context.Background(), r.keyValidatorRegistrationTimestamp, proposerPubkey.String(), timestamp).Err() -} - -func (r *RedisCache) CheckAndSetLastSlotAndHashDelivered(slot uint64, hash string) (err error) { - // More details about Redis optimistic locking: - // - https://redis.uptrace.dev/guide/go-redis-pipelines.html#transactions - // - https://github.com/redis/go-redis/blob/6ecbcf6c90919350c42181ce34c1cbdfbd5d1463/race_test.go#L183 - txf := func(tx *redis.Tx) error { - lastSlotDelivered, err := tx.Get(context.Background(), r.keyLastSlotDelivered).Uint64() - if err != nil && !errors.Is(err, redis.Nil) { - return err - } - - // slot in the past, reject request - if slot < lastSlotDelivered { - return ErrPastSlotAlreadyDelivered - } - - // current slot, reject request if hash is different - if slot == lastSlotDelivered { - lastHashDelivered, err := tx.Get(context.Background(), r.keyLastHashDelivered).Result() - if err != nil && !errors.Is(err, redis.Nil) { - return err - } - if hash != lastHashDelivered { - return ErrAnotherPayloadAlreadyDeliveredForSlot - } - return nil - } - - _, err = tx.TxPipelined(context.Background(), func(pipe redis.Pipeliner) error { - pipe.Set(context.Background(), r.keyLastSlotDelivered, slot, 0) - pipe.Set(context.Background(), r.keyLastHashDelivered, hash, 0) - return nil - }) - - return err - } - - return r.client.Watch(context.Background(), txf, r.keyLastSlotDelivered, r.keyLastHashDelivered) -} - -func (r *RedisCache) GetLastSlotDelivered(ctx context.Context, pipeliner redis.Pipeliner) (slot uint64, err error) { - c := pipeliner.Get(ctx, r.keyLastSlotDelivered) - _, err = pipeliner.Exec(ctx) - if err != nil { - return 0, err - } - return c.Uint64() -} - -func (r *RedisCache) GetLastHashDelivered() (hash string, err error) { - return r.client.Get(context.Background(), r.keyLastHashDelivered).Result() -} - -func (r *RedisCache) SetStats(field string, value any) (err error) { - return r.client.HSet(context.Background(), r.keyStats, field, value).Err() -} - -func (r *RedisCache) GetStats(field string) (value string, err error) { - return r.client.HGet(context.Background(), r.keyStats, field).Result() -} - -// GetStatsUint64 returns (valueUint64, nil), or (0, redis.Nil) if the field does not exist -func (r *RedisCache) GetStatsUint64(field string) (value uint64, err error) { - valStr, err := r.client.HGet(context.Background(), r.keyStats, field).Result() - if err != nil { - return 0, err - } - - value, err = strconv.ParseUint(valStr, 10, 64) - return value, err -} - -func (r *RedisCache) SetProposerDuties(proposerDuties []common.BuilderGetValidatorsResponseEntry) (err error) { - return r.SetObj(r.keyProposerDuties, proposerDuties, 0) -} - -func (r *RedisCache) GetProposerDuties() (proposerDuties []common.BuilderGetValidatorsResponseEntry, err error) { - proposerDuties = make([]common.BuilderGetValidatorsResponseEntry, 0) - err = r.GetObj(r.keyProposerDuties, &proposerDuties) - if errors.Is(err, redis.Nil) { - return proposerDuties, nil - } - return proposerDuties, err -} - -func (r *RedisCache) SetRelayConfig(field, value string) (err error) { - return r.client.HSet(context.Background(), r.keyRelayConfig, field, value).Err() -} - -func (r *RedisCache) GetRelayConfig(field string) (string, error) { - res, err := r.client.HGet(context.Background(), r.keyRelayConfig, field).Result() - if errors.Is(err, redis.Nil) { - return res, nil - } - return res, err -} - -func (r *RedisCache) GetBestBid(slot uint64, parentHash, proposerPubkey string) (*builderSpec.VersionedSignedBuilderBid, error) { - key := r.keyCacheGetHeaderResponse(slot, parentHash, proposerPubkey) - r.boltLog.Info("Getting best bid from Redis with key ", key) - resp := new(builderSpec.VersionedSignedBuilderBid) - err := r.GetObj(key, resp) - if errors.Is(err, redis.Nil) { - r.boltLog.WithError(err).Errorf("Failed to find bid with key %s in Redis", key) - return nil, nil - } - return resp, err -} - -func (r *RedisCache) GetInclusionProof(slot uint64, proposerPubkey string, bidBlockHash string) (*common.InclusionProof, error) { - key := r.keyInclusionProof(slot, proposerPubkey, bidBlockHash) - r.boltLog.Infof("Getting preconfirmations proofs from Redis with key %s", key) - resp := new(common.InclusionProof) - err := r.GetObj(key, &resp) - if errors.Is(err, redis.Nil) { - return nil, nil - } - return resp, err -} - -func (r *RedisCache) GetPayloadContents(slot uint64, proposerPubkey, blockHash string) (*builderApi.VersionedSubmitBlindedBlockResponse, error) { - resp, err := r.GetPayloadContentsDeneb(slot, proposerPubkey, blockHash) - if errors.Is(err, redis.Nil) { - // can't find deneb payload, try find capella payload - return r.GetExecutionPayloadCapella(slot, proposerPubkey, blockHash) - } - return resp, err -} - -func (r *RedisCache) SavePayloadContentsDeneb(ctx context.Context, tx redis.Pipeliner, slot uint64, proposerPubkey, blockHash string, execPayload *builderApiDeneb.ExecutionPayloadAndBlobsBundle) (err error) { - key := r.keyPayloadContentsDeneb(slot, proposerPubkey, blockHash) - r.boltLog.Infof("Saving execution payload deneb with key %s", key) - b, err := execPayload.MarshalSSZ() - if err != nil { - r.boltLog.WithError(err).Errorf("Error while saving executing payload deneb with key %s", key) - return err - } - return tx.Set(ctx, key, b, expiryBidCache).Err() -} - -func (r *RedisCache) GetPayloadContentsDeneb(slot uint64, proposerPubkey, blockHash string) (*builderApi.VersionedSubmitBlindedBlockResponse, error) { - denebPayloadContents := new(builderApiDeneb.ExecutionPayloadAndBlobsBundle) - - key := r.keyPayloadContentsDeneb(slot, proposerPubkey, blockHash) - val, err := r.client.Get(context.Background(), key).Result() - if err != nil { - return nil, err - } - - err = denebPayloadContents.UnmarshalSSZ([]byte(val)) - if err != nil { - return nil, err - } - - return &builderApi.VersionedSubmitBlindedBlockResponse{ - Version: spec.DataVersionDeneb, - Deneb: denebPayloadContents, - }, nil -} - -func (r *RedisCache) SaveExecutionPayloadCapella(ctx context.Context, pipeliner redis.Pipeliner, slot uint64, proposerPubkey, blockHash string, execPayload *capella.ExecutionPayload) (err error) { - key := r.keyExecPayloadCapella(slot, proposerPubkey, blockHash) - r.boltLog.Infof("Saving execution payload capella with key %s", key) - b, err := execPayload.MarshalSSZ() - if err != nil { - r.boltLog.WithError(err).Errorf("Error while saving executing payload with key %s", key) - return err - } - return pipeliner.Set(ctx, key, b, expiryBidCache).Err() -} - -// SavePreconfirmationsProofs saves the preconfirmation proofs in the Redis cache with JSON encoding -// TODO: maybe ssz encoding? -func (r *RedisCache) SaveInclusionProof(ctx context.Context, pipeliner redis.Pipeliner, - slot uint64, proposerPubkey string, bidBlockHash string, proof *common.InclusionProof, -) (err error) { - key := r.keyInclusionProof(slot, proposerPubkey, bidBlockHash) - r.boltLog.Infof("Saving %d inclusion proofs with key %s", len(proof.TransactionHashes), key) - b, err := json.Marshal(proof) - if err != nil { - r.boltLog.WithError(err).Errorf("Failed to marshal preconfirmations proofs for slot %d", slot) - return err - } - return pipeliner.Set(ctx, key, b, expiryBidCache).Err() -} - -func (r *RedisCache) GetExecutionPayloadCapella(slot uint64, proposerPubkey, blockHash string) (*builderApi.VersionedSubmitBlindedBlockResponse, error) { - capellaPayload := new(capella.ExecutionPayload) - - key := r.keyExecPayloadCapella(slot, proposerPubkey, blockHash) - val, err := r.client.Get(context.Background(), key).Result() - if err != nil { - return nil, err - } - - err = capellaPayload.UnmarshalSSZ([]byte(val)) - if err != nil { - return nil, err - } - - return &builderApi.VersionedSubmitBlindedBlockResponse{ - Version: spec.DataVersionCapella, - Capella: capellaPayload, - }, nil -} - -func (r *RedisCache) SaveBidTrace(ctx context.Context, pipeliner redis.Pipeliner, trace *common.BidTraceV2WithBlobFields) (err error) { - key := r.keyCacheBidTrace(trace.Slot, trace.ProposerPubkey.String(), trace.BlockHash.String()) - return r.SetObjPipelined(ctx, pipeliner, key, trace, expiryBidCache) -} - -// GetBidTrace returns (trace, nil), or (nil, redis.Nil) if the trace does not exist -func (r *RedisCache) GetBidTrace(slot uint64, proposerPubkey, blockHash string) (*common.BidTraceV2WithBlobFields, error) { - key := r.keyCacheBidTrace(slot, proposerPubkey, blockHash) - resp := new(common.BidTraceV2WithBlobFields) - err := r.GetObj(key, resp) - return resp, err -} - -func (r *RedisCache) GetBuilderLatestPayloadReceivedAt(ctx context.Context, pipeliner redis.Pipeliner, slot uint64, builderPubkey, parentHash, proposerPubkey string) (int64, error) { - keyLatestBidsTime := r.keyBlockBuilderLatestBidsTime(slot, parentHash, proposerPubkey) - c := pipeliner.HGet(context.Background(), keyLatestBidsTime, builderPubkey) - _, err := pipeliner.Exec(ctx) - if errors.Is(err, redis.Nil) { - return 0, nil - } else if err != nil { - return 0, err - } - return c.Int64() -} - -// SaveBuilderBid saves the latest bid by a specific builder. TODO: use transaction to make these writes atomic -func (r *RedisCache) SaveBuilderBid(ctx context.Context, pipeliner redis.Pipeliner, slot uint64, parentHash, proposerPubkey, builderPubkey string, receivedAt time.Time, headerResp *builderSpec.VersionedSignedBuilderBid) (err error) { - // save the actual bid - keyLatestBid := r.keyLatestBidByBuilder(slot, parentHash, proposerPubkey, builderPubkey) - r.boltLog.Infof("Saving latest builder bid with key %s", keyLatestBid) - err = r.SetObjPipelined(ctx, pipeliner, keyLatestBid, headerResp, expiryBidCache) - if err != nil { - return err - } - - // set the time of the request - keyLatestBidsTime := r.keyBlockBuilderLatestBidsTime(slot, parentHash, proposerPubkey) - err = pipeliner.HSet(ctx, keyLatestBidsTime, builderPubkey, receivedAt.UnixMilli()).Err() - if err != nil { - return err - } - err = pipeliner.Expire(ctx, keyLatestBidsTime, expiryBidCache).Err() - if err != nil { - return err - } - - // set the value last, because that's iterated over when updating the best bid, and the payload has to be available - keyLatestBidsValue := r.keyBlockBuilderLatestBidsValue(slot, parentHash, proposerPubkey) - r.boltLog.Infof("Saving latest builder bid with value and with key %s", keyLatestBidsValue) - value, err := headerResp.Value() - if err != nil { - return err - } - err = pipeliner.HSet(ctx, keyLatestBidsValue, builderPubkey, value.ToBig().String()).Err() - if err != nil { - return err - } - return pipeliner.Expire(ctx, keyLatestBidsValue, expiryBidCache).Err() -} - -type SaveBidAndUpdateTopBidResponse struct { - WasBidSaved bool // Whether this bid was saved - WasTopBidUpdated bool // Whether the top bid was updated - IsNewTopBid bool // Whether the submitted bid became the new top bid - - TopBidValue *big.Int - PrevTopBidValue *big.Int - - TimePrep time.Duration - TimeSavePayload time.Duration - TimeSaveBid time.Duration - TimeSaveTrace time.Duration - TimeUpdateTopBid time.Duration - TimeUpdateFloor time.Duration -} - -func (r *RedisCache) SaveBidAndUpdateTopBid( - ctx context.Context, - pipeliner redis.Pipeliner, - trace *common.BidTraceV2WithBlobFields, - payload *common.VersionedSubmitBlockRequest, - getPayloadResponse *builderApi.VersionedSubmitBlindedBlockResponse, - getHeaderResponse *builderSpec.VersionedSignedBuilderBid, - reqReceivedAt time.Time, - isCancellationEnabled bool, - floorValue *big.Int, - proof *common.InclusionProof, -) (state SaveBidAndUpdateTopBidResponse, err error) { - var prevTime, nextTime time.Time - prevTime = time.Now() - - submission, err := common.GetBlockSubmissionInfo(payload) - if err != nil { - return state, err - } - - // Load latest bids for a given slot+parent+proposer - builderBids, err := NewBuilderBidsFromRedis(ctx, r, pipeliner, submission.BidTrace.Slot, submission.BidTrace.ParentHash.String(), submission.BidTrace.ProposerPubkey.String()) - if err != nil { - return state, err - } - - // Load floor value (if not passed in already) - if floorValue == nil { - floorValue, err = r.GetFloorBidValue(ctx, pipeliner, submission.BidTrace.Slot, submission.BidTrace.ParentHash.String(), submission.BidTrace.ProposerPubkey.String()) - if err != nil { - return state, err - } - } - - // Get the reference top bid value - _, state.TopBidValue = builderBids.getTopBid() - if floorValue.Cmp(state.TopBidValue) == 1 { - state.TopBidValue = floorValue - } - state.PrevTopBidValue = state.TopBidValue - - // Abort now if non-cancellation bid is lower than floor value - isBidAboveFloor := submission.BidTrace.Value.ToBig().Cmp(floorValue) == 1 - if !isCancellationEnabled && !isBidAboveFloor { - return state, nil - } - - // Record time needed - nextTime = time.Now().UTC() - state.TimePrep = nextTime.Sub(prevTime) - prevTime = nextTime - - // - // Time to save things in Redis - // - // 1. Save the execution payload - switch payload.Version { - case spec.DataVersionCapella: - err = r.SaveExecutionPayloadCapella(ctx, pipeliner, submission.BidTrace.Slot, submission.BidTrace.ProposerPubkey.String(), submission.BidTrace.BlockHash.String(), getPayloadResponse.Capella) - if err != nil { - return state, err - } - case spec.DataVersionDeneb: - err = r.SavePayloadContentsDeneb(ctx, pipeliner, submission.BidTrace.Slot, submission.BidTrace.ProposerPubkey.String(), submission.BidTrace.BlockHash.String(), getPayloadResponse.Deneb) - if err != nil { - return state, err - } - case spec.DataVersionUnknown, spec.DataVersionPhase0, spec.DataVersionAltair, spec.DataVersionBellatrix: - return state, fmt.Errorf("unsupported payload version: %s", payload.Version) //nolint:goerr113 - } - - // BOLT: If preconfirmations proofs are available, save them - if proof != nil { - err = r.SaveInclusionProof(ctx, pipeliner, submission.BidTrace.Slot, submission.BidTrace.ProposerPubkey.String(), submission.BidTrace.BlockHash.String(), proof) - if err != nil { - r.boltLog.WithError(err).Errorf("Failed to save preconfirmations proofs to redis for slot %d", submission.BidTrace.Slot) - return state, err - } - } - - // Record time needed to save payload - nextTime = time.Now().UTC() - state.TimeSavePayload = nextTime.Sub(prevTime) - prevTime = nextTime - - // 2. Save latest bid for this builder - err = r.SaveBuilderBid(ctx, pipeliner, submission.BidTrace.Slot, submission.BidTrace.ParentHash.String(), submission.BidTrace.ProposerPubkey.String(), submission.BidTrace.BuilderPubkey.String(), reqReceivedAt, getHeaderResponse) - if err != nil { - r.boltLog.WithError(err).Errorf("Failed to save latest bid for builder %s to redis for slot %d", submission.BidTrace.BuilderPubkey.String(), submission.BidTrace.Slot) - return state, err - } - builderBids.bidValues[submission.BidTrace.BuilderPubkey.String()] = submission.BidTrace.Value.ToBig() - - // Record time needed to save bid - nextTime = time.Now().UTC() - state.TimeSaveBid = nextTime.Sub(prevTime) - prevTime = nextTime - - // 3. Save the bid trace - err = r.SaveBidTrace(ctx, pipeliner, trace) - if err != nil { - return state, err - } - - // Record time needed to save trace - nextTime = time.Now().UTC() - state.TimeSaveTrace = nextTime.Sub(prevTime) - prevTime = nextTime - - // If top bid value hasn't change, abort now - _, state.TopBidValue = builderBids.getTopBid() - if state.TopBidValue.Cmp(state.PrevTopBidValue) == 0 { - return state, nil - } - - state, err = r._updateTopBid(ctx, pipeliner, state, builderBids, submission.BidTrace.Slot, submission.BidTrace.ParentHash.String(), submission.BidTrace.ProposerPubkey.String(), floorValue) - if err != nil { - return state, err - } - state.IsNewTopBid = submission.BidTrace.Value.ToBig().Cmp(state.TopBidValue) == 0 - // An Exec happens in _updateTopBid. - state.WasBidSaved = true - - // Record time needed to update top bid - nextTime = time.Now().UTC() - state.TimeUpdateTopBid = nextTime.Sub(prevTime) - prevTime = nextTime - - if isCancellationEnabled || !isBidAboveFloor { - return state, nil - } - - // Non-cancellable bid above floor should set new floor - keyBidSource := r.keyLatestBidByBuilder(submission.BidTrace.Slot, submission.BidTrace.ParentHash.String(), submission.BidTrace.ProposerPubkey.String(), submission.BidTrace.BuilderPubkey.String()) - keyFloorBid := r.keyFloorBid(submission.BidTrace.Slot, submission.BidTrace.ParentHash.String(), submission.BidTrace.ProposerPubkey.String()) - c := pipeliner.Copy(ctx, keyBidSource, keyFloorBid, 0, true) - _, err = pipeliner.Exec(ctx) - if err != nil { - return state, err - } - - wasCopied, copyErr := c.Result() - if copyErr != nil { - return state, copyErr - } else if wasCopied == 0 { - return state, fmt.Errorf("could not copy floor bid from %s to %s", keyBidSource, keyFloorBid) //nolint:goerr113 - } - err = pipeliner.Expire(ctx, keyFloorBid, expiryBidCache).Err() - if err != nil { - return state, err - } - - keyFloorBidValue := r.keyFloorBidValue(submission.BidTrace.Slot, submission.BidTrace.ParentHash.String(), submission.BidTrace.ProposerPubkey.String()) - err = pipeliner.Set(ctx, keyFloorBidValue, submission.BidTrace.Value.Dec(), expiryBidCache).Err() - if err != nil { - return state, err - } - - // Execute setting the floor bid - _, err = pipeliner.Exec(ctx) - - // Record time needed to update floor - nextTime = time.Now().UTC() - state.TimeUpdateFloor = nextTime.Sub(prevTime) - - return state, err -} - -func (r *RedisCache) _updateTopBid( - ctx context.Context, - pipeliner redis.Pipeliner, - state SaveBidAndUpdateTopBidResponse, - builderBids *BuilderBids, - slot uint64, - parentHash, - proposerPubkey string, - floorValue *big.Int) ( - resp SaveBidAndUpdateTopBidResponse, err error, -) { - r.boltLog.Info("Updating top bid") - - if builderBids == nil { - builderBids, err = NewBuilderBidsFromRedis(ctx, r, pipeliner, slot, parentHash, proposerPubkey) - if err != nil { - return state, err - } - } - - if len(builderBids.bidValues) == 0 { - return state, nil - } - - // Load floor value (if not passed in already) - if floorValue == nil { - floorValue, err = r.GetFloorBidValue(ctx, pipeliner, slot, parentHash, proposerPubkey) - if err != nil { - return state, err - } - } - - topBidBuilder := "" - topBidBuilder, state.TopBidValue = builderBids.getTopBid() - keyBidSource := r.keyLatestBidByBuilder(slot, parentHash, proposerPubkey, topBidBuilder) - - // If floor value is higher than this bid, use floor bid instead - if floorValue.Cmp(state.TopBidValue) == 1 { - state.TopBidValue = floorValue - keyBidSource = r.keyFloorBid(slot, parentHash, proposerPubkey) - } - - // Copy winning bid to top bid cache - keyTopBid := r.keyCacheGetHeaderResponse(slot, parentHash, proposerPubkey) - r.boltLog.Infof("Copying winning bid from %s to %s", keyBidSource, keyTopBid) - c := pipeliner.Copy(context.Background(), keyBidSource, keyTopBid, 0, true) - _, err = pipeliner.Exec(ctx) - if err != nil { - return state, err - } - wasCopied, err := c.Result() - if err != nil { - return state, err - } else if wasCopied == 0 { - return state, fmt.Errorf("could not copy top bid from %s to %s", keyBidSource, keyTopBid) //nolint:goerr113 - } - err = pipeliner.Expire(context.Background(), keyTopBid, expiryBidCache).Err() - if err != nil { - return state, err - } - - state.WasTopBidUpdated = state.PrevTopBidValue == nil || state.PrevTopBidValue.Cmp(state.TopBidValue) != 0 - - // 6. Finally, update the global top bid value - keyTopBidValue := r.keyTopBidValue(slot, parentHash, proposerPubkey) - r.boltLog.Info("Updating global top bid value with key", keyTopBid) - err = pipeliner.Set(context.Background(), keyTopBidValue, state.TopBidValue.String(), expiryBidCache).Err() - if err != nil { - return state, err - } - - _, err = pipeliner.Exec(ctx) - return state, err -} - -// GetTopBidValue gets the top bid value for a given slot+parent+proposer combination -func (r *RedisCache) GetTopBidValue(ctx context.Context, pipeliner redis.Pipeliner, slot uint64, parentHash, proposerPubkey string) (topBidValue *big.Int, err error) { - keyTopBidValue := r.keyTopBidValue(slot, parentHash, proposerPubkey) - c := pipeliner.Get(ctx, keyTopBidValue) - _, err = pipeliner.Exec(ctx) - if errors.Is(err, redis.Nil) { - return big.NewInt(0), nil - } else if err != nil { - return nil, err - } - - topBidValueStr, err := c.Result() - if err != nil { - return nil, err - } - topBidValue = new(big.Int) - topBidValue, ok := topBidValue.SetString(topBidValueStr, 10) - if !ok { - return nil, fmt.Errorf("could not set top bid value from %s", topBidValueStr) //nolint:goerr113 - } - return topBidValue, nil -} - -// GetBuilderLatestValue gets the latest bid value for a given slot+parent+proposer combination for a specific builder pubkey. -func (r *RedisCache) GetBuilderLatestValue(slot uint64, parentHash, proposerPubkey, builderPubkey string) (topBidValue *big.Int, err error) { - keyLatestValue := r.keyBlockBuilderLatestBidsValue(slot, parentHash, proposerPubkey) - topBidValueStr, err := r.client.HGet(context.Background(), keyLatestValue, builderPubkey).Result() - if errors.Is(err, redis.Nil) { - return big.NewInt(0), nil - } else if err != nil { - return nil, err - } - topBidValue = new(big.Int) - topBidValue, ok := topBidValue.SetString(topBidValueStr, 10) - if !ok { - return nil, fmt.Errorf("could not set top bid value from %s", topBidValueStr) //nolint:goerr113 - } - return topBidValue, nil -} - -// DelBuilderBid removes a builders most recent bid -func (r *RedisCache) DelBuilderBid(ctx context.Context, pipeliner redis.Pipeliner, slot uint64, parentHash, proposerPubkey, builderPubkey string) (err error) { - // delete the value - keyLatestValue := r.keyBlockBuilderLatestBidsValue(slot, parentHash, proposerPubkey) - err = r.client.HDel(ctx, keyLatestValue, builderPubkey).Err() - if err != nil && !errors.Is(err, redis.Nil) { - return err - } - - // delete the time - keyLatestBidsTime := r.keyBlockBuilderLatestBidsTime(slot, parentHash, proposerPubkey) - err = r.client.HDel(ctx, keyLatestBidsTime, builderPubkey).Err() - if err != nil { - return err - } - - // update bids now to compute current top bid - state := SaveBidAndUpdateTopBidResponse{} //nolint:exhaustruct - _, err = r._updateTopBid(ctx, pipeliner, state, nil, slot, parentHash, proposerPubkey, nil) - return err -} - -// GetFloorBidValue returns the value of the highest non-cancellable bid -func (r *RedisCache) GetFloorBidValue(ctx context.Context, pipeliner redis.Pipeliner, slot uint64, parentHash, proposerPubkey string) (floorValue *big.Int, err error) { - keyFloorBidValue := r.keyFloorBidValue(slot, parentHash, proposerPubkey) - c := pipeliner.Get(ctx, keyFloorBidValue) - - _, err = pipeliner.Exec(ctx) - if errors.Is(err, redis.Nil) { - return big.NewInt(0), nil - } else if err != nil { - return nil, err - } - - topBidValueStr, err := c.Result() - if err != nil { - return nil, err - } - floorValue = new(big.Int) - floorValue.SetString(topBidValueStr, 10) - return floorValue, nil -} - -// SetFloorBidValue is used only for testing. -func (r *RedisCache) SetFloorBidValue(slot uint64, parentHash, proposerPubkey, value string) error { - keyFloorBidValue := r.keyFloorBidValue(slot, parentHash, proposerPubkey) - err := r.client.Set(context.Background(), keyFloorBidValue, value, 0).Err() - return err -} - -func (r *RedisCache) NewPipeline() redis.Pipeliner { //nolint:ireturn,nolintlint - return r.client.Pipeline() -} - -func (r *RedisCache) NewTxPipeline() redis.Pipeliner { //nolint:ireturn - return r.client.TxPipeline() -} diff --git a/mev-boost-relay/datastore/redis_test.go b/mev-boost-relay/datastore/redis_test.go deleted file mode 100644 index b16bc5458..000000000 --- a/mev-boost-relay/datastore/redis_test.go +++ /dev/null @@ -1,473 +0,0 @@ -package datastore - -import ( - "context" - "errors" - "math/big" - "sync" - "testing" - "time" - - "github.com/alicebob/miniredis/v2" - builderApiCapella "github.com/attestantio/go-builder-client/api/capella" - builderApiV1 "github.com/attestantio/go-builder-client/api/v1" - builderSpec "github.com/attestantio/go-builder-client/spec" - "github.com/attestantio/go-eth2-client/spec" - "github.com/attestantio/go-eth2-client/spec/bellatrix" - "github.com/attestantio/go-eth2-client/spec/phase0" - "github.com/flashbots/mev-boost-relay/common" - "github.com/go-redis/redis/v9" - "github.com/holiman/uint256" - "github.com/stretchr/testify/require" -) - -func setupTestRedis(t *testing.T) *RedisCache { - t.Helper() - var err error - - redisTestServer, err := miniredis.Run() - require.NoError(t, err) - redisService, err := NewRedisCache("", redisTestServer.Addr(), "") - // redisService, err := NewRedisCache("", "localhost:6379", "") - require.NoError(t, err) - - return redisService -} - -func TestRedisValidatorRegistration(t *testing.T) { - cache := setupTestRedis(t) - - t.Run("Can save and get validator registration from cache", func(t *testing.T) { - key := common.ValidPayloadRegisterValidator.Message.Pubkey - value := common.ValidPayloadRegisterValidator - pkHex := common.NewPubkeyHex(key.String()) - err := cache.SetValidatorRegistrationTimestamp(pkHex, uint64(value.Message.Timestamp.Unix())) - require.NoError(t, err) - result, err := cache.GetValidatorRegistrationTimestamp(common.NewPubkeyHex(key.String())) - require.NoError(t, err) - require.Equal(t, result, uint64(value.Message.Timestamp.Unix())) - }) - - t.Run("Returns nil if validator registration is not in cache", func(t *testing.T) { - key := phase0.BLSPubKey{} - result, err := cache.GetValidatorRegistrationTimestamp(common.NewPubkeyHex(key.String())) - require.NoError(t, err) - require.Equal(t, uint64(0), result) - }) - - t.Run("test SetValidatorRegistrationTimestampIfNewer", func(t *testing.T) { - key := common.ValidPayloadRegisterValidator.Message.Pubkey - value := common.ValidPayloadRegisterValidator - - pkHex := common.NewPubkeyHex(key.String()) - timestamp := uint64(value.Message.Timestamp.Unix()) - - err := cache.SetValidatorRegistrationTimestampIfNewer(pkHex, timestamp) - require.NoError(t, err) - - result, err := cache.GetValidatorRegistrationTimestamp(common.NewPubkeyHex(key.String())) - require.NoError(t, err) - require.Equal(t, result, timestamp) - - // Try to set an older timestamp (should not work) - timestamp2 := timestamp - 10 - err = cache.SetValidatorRegistrationTimestampIfNewer(pkHex, timestamp2) - require.NoError(t, err) - result, err = cache.GetValidatorRegistrationTimestamp(common.NewPubkeyHex(key.String())) - require.NoError(t, err) - require.Equal(t, result, timestamp) - - // Try to set an older timestamp (should not work) - timestamp3 := timestamp + 10 - err = cache.SetValidatorRegistrationTimestampIfNewer(pkHex, timestamp3) - require.NoError(t, err) - result, err = cache.GetValidatorRegistrationTimestamp(common.NewPubkeyHex(key.String())) - require.NoError(t, err) - require.Equal(t, result, timestamp3) - }) -} - -func TestRedisProposerDuties(t *testing.T) { - cache := setupTestRedis(t) - duties := []common.BuilderGetValidatorsResponseEntry{ - { - Slot: 1, - Entry: &builderApiV1.SignedValidatorRegistration{ - Signature: phase0.BLSSignature{}, - Message: &builderApiV1.ValidatorRegistration{ - FeeRecipient: bellatrix.ExecutionAddress{0x02}, - GasLimit: 5000, - Timestamp: time.Unix(0xffffffff, 0), - Pubkey: phase0.BLSPubKey{}, - }, - }, - }, - } - err := cache.SetProposerDuties(duties) - require.NoError(t, err) - - duties2, err := cache.GetProposerDuties() - require.NoError(t, err) - - require.Len(t, duties2, 1) - require.Equal(t, duties[0].Entry.Message.FeeRecipient, duties2[0].Entry.Message.FeeRecipient) -} - -func TestBuilderBids(t *testing.T) { - versions := []spec.DataVersion{ - spec.DataVersionCapella, - spec.DataVersionDeneb, - } - - for _, version := range versions { - slot := uint64(2) - parentHash := "0x13e606c7b3d1faad7e83503ce3dedce4c6bb89b0c28ffb240d713c7b110b9747" - proposerPubkey := "0x6ae5932d1e248d987d51b58665b81848814202d7b23b343d20f2a167d12f07dcb01ca41c42fdd60b7fca9c4b90890792" - opts := common.CreateTestBlockSubmissionOpts{ - Slot: 2, - ParentHash: parentHash, - ProposerPubkey: proposerPubkey, - Version: version, - } - - trace := &common.BidTraceV2WithBlobFields{ - BidTrace: builderApiV1.BidTrace{ - Value: uint256.NewInt(123), - }, - } - - // Notation: - // - ba1: builder A, bid 1 - // - ba1c: builder A, bid 1, cancellation enabled - // - // test 1: ba1=10 -> ba2=5 -> ba3c=5 -> bb1=20 -> ba4c=3 -> bb2c=2 - // - bApubkey := "0xfa1ed37c3553d0ce1e9349b2c5063cf6e394d231c8d3e0df75e9462257c081543086109ffddaacc0aa76f33dc9661c83" - bBpubkey := "0x2e02be2c9f9eccf9856478fdb7876598fed2da09f45c233969ba647a250231150ecf38bce5771adb6171c86b79a92f16" - - // Setup redis instance - cache := setupTestRedis(t) - - // Helper to ensure writing to redis worked as expected - ensureBestBidValueEquals := func(expectedValue int64, builderPubkey string) { - bestBid, err := cache.GetBestBid(slot, parentHash, proposerPubkey) - require.NoError(t, err) - value, err := bestBid.Value() - require.NoError(t, err) - require.Equal(t, big.NewInt(expectedValue), value.ToBig()) - - topBidValue, err := cache.GetTopBidValue(context.Background(), cache.client.Pipeline(), slot, parentHash, proposerPubkey) - require.NoError(t, err) - require.Equal(t, big.NewInt(expectedValue), topBidValue) - - if builderPubkey != "" { - latestBidValue, err := cache.GetBuilderLatestValue(slot, parentHash, proposerPubkey, builderPubkey) - require.NoError(t, err) - require.Equal(t, big.NewInt(expectedValue), latestBidValue) - } - } - - ensureBidFloor := func(expectedValue int64) { - floorValue, err := cache.GetFloorBidValue(context.Background(), cache.client.Pipeline(), slot, parentHash, proposerPubkey) - require.NoError(t, err) - require.Equal(t, big.NewInt(expectedValue), floorValue) - } - - // deleting a bid that doesn't exist should not error - err := cache.DelBuilderBid(context.Background(), cache.client.Pipeline(), slot, parentHash, proposerPubkey, bApubkey) - require.NoError(t, err) - - // submit ba1=10 - payload, getPayloadResp, getHeaderResp := common.CreateTestBlockSubmission(t, bApubkey, uint256.NewInt(10), &opts) - resp, err := cache.SaveBidAndUpdateTopBid(context.Background(), cache.NewPipeline(), trace, payload, getPayloadResp, getHeaderResp, time.Now(), false, nil, nil) - require.NoError(t, err) - require.True(t, resp.WasBidSaved, resp) - require.True(t, resp.WasTopBidUpdated) - require.True(t, resp.IsNewTopBid) - require.Equal(t, big.NewInt(10), resp.TopBidValue) - ensureBestBidValueEquals(10, bApubkey) - ensureBidFloor(10) - - // deleting ba1 - err = cache.DelBuilderBid(context.Background(), cache.client.Pipeline(), slot, parentHash, proposerPubkey, bApubkey) - require.NoError(t, err) - - // best bid and floor should still exist, because it was the floor bid - ensureBestBidValueEquals(10, "") - ensureBidFloor(10) - - // submit ba2=5 (should not update, because floor is 10) - payload, getPayloadResp, getHeaderResp = common.CreateTestBlockSubmission(t, bApubkey, uint256.NewInt(5), &opts) - resp, err = cache.SaveBidAndUpdateTopBid(context.Background(), cache.NewPipeline(), trace, payload, getPayloadResp, getHeaderResp, time.Now(), false, nil, nil) - require.NoError(t, err) - require.False(t, resp.WasBidSaved, resp) - require.False(t, resp.WasTopBidUpdated) - require.False(t, resp.IsNewTopBid) - require.Equal(t, big.NewInt(10), resp.TopBidValue) - ensureBestBidValueEquals(10, "") - ensureBidFloor(10) - - // submit ba3c=5 (should not update, because floor is 10) - payload, getPayloadResp, getHeaderResp = common.CreateTestBlockSubmission(t, bApubkey, uint256.NewInt(5), &opts) - resp, err = cache.SaveBidAndUpdateTopBid(context.Background(), cache.NewPipeline(), trace, payload, getPayloadResp, getHeaderResp, time.Now(), true, nil, nil) - require.NoError(t, err) - require.True(t, resp.WasBidSaved) - require.False(t, resp.WasTopBidUpdated) - require.False(t, resp.IsNewTopBid) - require.Equal(t, big.NewInt(10), resp.TopBidValue) - require.Equal(t, big.NewInt(10), resp.PrevTopBidValue) - ensureBestBidValueEquals(10, "") - ensureBidFloor(10) - - // submit bb1=20 - payload, getPayloadResp, getHeaderResp = common.CreateTestBlockSubmission(t, bBpubkey, uint256.NewInt(20), &opts) - resp, err = cache.SaveBidAndUpdateTopBid(context.Background(), cache.NewPipeline(), trace, payload, getPayloadResp, getHeaderResp, time.Now(), false, nil, nil) - require.NoError(t, err) - require.True(t, resp.WasBidSaved) - require.True(t, resp.WasTopBidUpdated) - require.True(t, resp.IsNewTopBid) - require.Equal(t, big.NewInt(20), resp.TopBidValue) - ensureBestBidValueEquals(20, bBpubkey) - ensureBidFloor(20) - - // submit bb2c=22 - payload, getPayloadResp, getHeaderResp = common.CreateTestBlockSubmission(t, bBpubkey, uint256.NewInt(22), &opts) - resp, err = cache.SaveBidAndUpdateTopBid(context.Background(), cache.NewPipeline(), trace, payload, getPayloadResp, getHeaderResp, time.Now(), true, nil, nil) - require.NoError(t, err) - require.True(t, resp.WasBidSaved) - require.True(t, resp.WasTopBidUpdated) - require.True(t, resp.IsNewTopBid) - require.Equal(t, big.NewInt(22), resp.TopBidValue) - ensureBestBidValueEquals(22, bBpubkey) - ensureBidFloor(20) - - // submit bb3c=12 (should update top bid, using floor at 20) - payload, getPayloadResp, getHeaderResp = common.CreateTestBlockSubmission(t, bBpubkey, uint256.NewInt(12), &opts) - resp, err = cache.SaveBidAndUpdateTopBid(context.Background(), cache.NewPipeline(), trace, payload, getPayloadResp, getHeaderResp, time.Now(), true, nil, nil) - require.NoError(t, err) - require.True(t, resp.WasBidSaved) - require.True(t, resp.WasTopBidUpdated) - require.False(t, resp.IsNewTopBid) - require.Equal(t, big.NewInt(20), resp.TopBidValue) - ensureBestBidValueEquals(20, "") - ensureBidFloor(20) - } -} - -func TestRedisURIs(t *testing.T) { - t.Helper() - var err error - - redisTestServer, err := miniredis.Run() - require.NoError(t, err) - - // test connection with and without protocol - _, err = NewRedisCache("", redisTestServer.Addr(), "") - require.NoError(t, err) - _, err = NewRedisCache("", redisScheme+redisTestServer.Addr(), "") - require.NoError(t, err) - - // test connection w/ credentials - username := "user" - password := "pass" - redisTestServer.RequireUserAuth(username, password) - fullURL := redisScheme + username + ":" + password + "@" + redisTestServer.Addr() - _, err = NewRedisCache("", fullURL, "") - require.NoError(t, err) - - // ensure malformed URL throws error - malformURL := "http://" + username + ":" + password + "@" + redisTestServer.Addr() - _, err = NewRedisCache("", malformURL, "") - require.Error(t, err) - malformURL = "redis://" + username + ":" + "wrongpass" + "@" + redisTestServer.Addr() - _, err = NewRedisCache("", malformURL, "") - require.Error(t, err) -} - -func TestCheckAndSetLastSlotAndHashDelivered(t *testing.T) { - cache := setupTestRedis(t) - newSlot := uint64(123) - newHash := "0x0000000000000000000000000000000000000000000000000000000000000000" - - // should return redis.Nil if wasn't set - slot, err := cache.GetLastSlotDelivered(context.Background(), cache.NewPipeline()) - require.ErrorIs(t, err, redis.Nil) - require.Equal(t, uint64(0), slot) - - // should be able to set once - err = cache.CheckAndSetLastSlotAndHashDelivered(newSlot, newHash) - require.NoError(t, err) - - // should get slot - slot, err = cache.GetLastSlotDelivered(context.Background(), cache.NewPipeline()) - require.NoError(t, err) - require.Equal(t, newSlot, slot) - - // should get hash - hash, err := cache.GetLastHashDelivered() - require.NoError(t, err) - require.Equal(t, newHash, hash) - - // should fail on a different payload (mismatch block hash) - differentHash := "0x0000000000000000000000000000000000000000000000000000000000000001" - err = cache.CheckAndSetLastSlotAndHashDelivered(newSlot, differentHash) - require.ErrorIs(t, err, ErrAnotherPayloadAlreadyDeliveredForSlot) - - // should not return error for same hash - err = cache.CheckAndSetLastSlotAndHashDelivered(newSlot, newHash) - require.NoError(t, err) - - // should also fail on earlier slots - err = cache.CheckAndSetLastSlotAndHashDelivered(newSlot-1, newHash) - require.ErrorIs(t, err, ErrPastSlotAlreadyDelivered) -} - -// Test_CheckAndSetLastSlotAndHashDeliveredForTesting ensures the optimistic locking works -// i.e. running CheckAndSetLastSlotAndHashDelivered leading to err == redis.TxFailedErr -func Test_CheckAndSetLastSlotAndHashDeliveredForTesting(t *testing.T) { - cache := setupTestRedis(t) - newSlot := uint64(123) - hash := "0x0000000000000000000000000000000000000000000000000000000000000000" - n := 3 - - errC := make(chan error, n) - waitC := make(chan bool, n) - syncWG := sync.WaitGroup{} - - // Kick off goroutines, that will all try to set the same slot - for i := 0; i < n; i++ { - syncWG.Add(1) - go func() { - errC <- _CheckAndSetLastSlotAndHashDeliveredForTesting(cache, waitC, &syncWG, newSlot, hash) - }() - } - - syncWG.Wait() - - // Continue first goroutine (should succeed) - waitC <- true - err := <-errC - require.NoError(t, err) - - // Continue all other goroutines (all should return the race error redis.TxFailedErr) - for i := 1; i < n; i++ { - waitC <- true - err := <-errC - require.ErrorIs(t, err, redis.TxFailedErr) - } - - // Any later call with a different hash should return ErrPayloadAlreadyDeliveredForSlot - differentHash := "0x0000000000000000000000000000000000000000000000000000000000000001" - err = _CheckAndSetLastSlotAndHashDeliveredForTesting(cache, waitC, &syncWG, newSlot, differentHash) - waitC <- true - require.ErrorIs(t, err, ErrAnotherPayloadAlreadyDeliveredForSlot) -} - -func _CheckAndSetLastSlotAndHashDeliveredForTesting(r *RedisCache, waitC chan bool, wg *sync.WaitGroup, slot uint64, hash string) (err error) { - // copied from redis.go, with added channel and waitgroup to test the race condition in a controlled way - txf := func(tx *redis.Tx) error { - lastSlotDelivered, err := tx.Get(context.Background(), r.keyLastSlotDelivered).Uint64() - if err != nil && !errors.Is(err, redis.Nil) { - return err - } - - if slot < lastSlotDelivered { - return ErrPastSlotAlreadyDelivered - } - - if slot == lastSlotDelivered { - lastHashDelivered, err := tx.Get(context.Background(), r.keyLastHashDelivered).Result() - if err != nil && !errors.Is(err, redis.Nil) { - return err - } - if hash != lastHashDelivered { - return ErrAnotherPayloadAlreadyDeliveredForSlot - } - return nil - } - - wg.Done() - <-waitC - - _, err = tx.TxPipelined(context.Background(), func(pipe redis.Pipeliner) error { - pipe.Set(context.Background(), r.keyLastSlotDelivered, slot, 0) - pipe.Set(context.Background(), r.keyLastHashDelivered, hash, 0) - return nil - }) - - return err - } - - return r.client.Watch(context.Background(), txf, r.keyLastSlotDelivered) -} - -func TestGetBuilderLatestValue(t *testing.T) { - cache := setupTestRedis(t) - - slot := uint64(123) - parentHash := "0x13e606c7b3d1faad7e83503ce3dedce4c6bb89b0c28ffb240d713c7b110b9747" - proposerPubkey := "0x6ae5932d1e248d987d51b58665b81848814202d7b23b343d20f2a167d12f07dcb01ca41c42fdd60b7fca9c4b90890792" - builderPubkey := "0xfa1ed37c3553d0ce1e9349b2c5063cf6e394d231c8d3e0df75e9462257c081543086109ffddaacc0aa76f33dc9661c83" - - // With no bids, should return "0". - v, err := cache.GetBuilderLatestValue(slot, parentHash, proposerPubkey, builderPubkey) - require.NoError(t, err) - require.Equal(t, "0", v.String()) - - // Set a bid of 1 ETH. - newVal, err := uint256.FromDecimal("1000000000000000000") - require.NoError(t, err) - getHeaderResp := &builderSpec.VersionedSignedBuilderBid{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SignedBuilderBid{ - Message: &builderApiCapella.BuilderBid{ - Value: newVal, - }, - }, - } - - _, err = cache.client.TxPipelined(context.Background(), func(pipeliner redis.Pipeliner) error { - return cache.SaveBuilderBid(context.Background(), pipeliner, slot, parentHash, proposerPubkey, builderPubkey, time.Now().UTC(), getHeaderResp) - }) - require.NoError(t, err) - - // Check new string. - v, err = cache.GetBuilderLatestValue(slot, parentHash, proposerPubkey, builderPubkey) - require.NoError(t, err) - require.Zero(t, v.Cmp(newVal.ToBig())) -} - -func TestPipelineNilCheck(t *testing.T) { - cache := setupTestRedis(t) - f, err := cache.GetFloorBidValue(context.Background(), cache.NewPipeline(), 0, "1", "2") - require.NoError(t, err) - require.Equal(t, big.NewInt(0), f) -} - -// func TestPipeline(t *testing.T) { -// cache := setupTestRedis(t) - -// key1 := "test1" -// key2 := "test123" -// val := "foo" -// err := cache.client.Set(context.Background(), key1, val, 0).Err() -// require.NoError(t, err) - -// _, err = cache.client.TxPipelined(context.Background(), func(pipeliner redis.Pipeliner) error { -// c := tx.Get(context.Background(), key1) -// _, err := tx.Exec(context.Background()) -// require.NoError(t, err) -// str, err := c.Result() -// require.NoError(t, err) -// require.Equal(t, val, str) - -// err = tx.Set(context.Background(), key2, val, 0).Err() -// require.NoError(t, err) -// return nil -// }) -// require.NoError(t, err) - -// str, err := cache.client.Get(context.Background(), key2).Result() -// require.NoError(t, err) -// require.Equal(t, val, str) -// } diff --git a/mev-boost-relay/datastore/utils.go b/mev-boost-relay/datastore/utils.go deleted file mode 100644 index d95072086..000000000 --- a/mev-boost-relay/datastore/utils.go +++ /dev/null @@ -1,52 +0,0 @@ -package datastore - -import ( - "context" - "errors" - "math/big" - - "github.com/go-redis/redis/v9" -) - -// BuilderBids supports redis.SaveBidAndUpdateTopBid -type BuilderBids struct { - bidValues map[string]*big.Int -} - -func NewBuilderBidsFromRedis(ctx context.Context, r *RedisCache, pipeliner redis.Pipeliner, slot uint64, parentHash, proposerPubkey string) (*BuilderBids, error) { - keyBidValues := r.keyBlockBuilderLatestBidsValue(slot, parentHash, proposerPubkey) - c := pipeliner.HGetAll(ctx, keyBidValues) - _, err := pipeliner.Exec(ctx) - if err != nil && !errors.Is(err, redis.Nil) { - return nil, err - } - - bidValueMap, err := c.Result() - if err != nil { - return nil, err - } - return NewBuilderBids(bidValueMap), nil -} - -func NewBuilderBids(bidValueMap map[string]string) *BuilderBids { - b := BuilderBids{ - bidValues: make(map[string]*big.Int), - } - for builderPubkey, bidValue := range bidValueMap { - b.bidValues[builderPubkey] = new(big.Int) - b.bidValues[builderPubkey].SetString(bidValue, 10) - } - return &b -} - -func (b *BuilderBids) getTopBid() (string, *big.Int) { - topBidBuilderPubkey := "" - topBidValue := big.NewInt(0) - for builderPubkey, bidValue := range b.bidValues { - if bidValue.Cmp(topBidValue) > 0 { - topBidValue = bidValue - topBidBuilderPubkey = builderPubkey - } - } - return topBidBuilderPubkey, topBidValue -} diff --git a/mev-boost-relay/docker-compose.yml b/mev-boost-relay/docker-compose.yml deleted file mode 100644 index a8071dd4a..000000000 --- a/mev-boost-relay/docker-compose.yml +++ /dev/null @@ -1,40 +0,0 @@ -version: '3.1' - -volumes: - psql_data: - driver: local - -services: - redis: - image: redis - restart: always - ports: - - '6379:6379' - - memcached: - image: memcached - restart: always - ports: - - '11211:11211' - - db: - image: postgres - restart: always - volumes: - - 'psql_data:/var/lib/postgresql/data' - ports: - - '5432:5432' - environment: - POSTGRES_USER: postgres - POSTGRES_PASSWORD: postgres - POSTGRES_DB: postgres - - adminer: - image: adminer - restart: always - depends_on: - - db - ports: - - '8093:8080' - environment: - ADMINER_PLUGINS: tables-filter tinymce diff --git a/mev-boost-relay/docs/docs/20220822-audit.md b/mev-boost-relay/docs/docs/20220822-audit.md deleted file mode 100644 index e3ac14dcc..000000000 --- a/mev-boost-relay/docs/docs/20220822-audit.md +++ /dev/null @@ -1,153 +0,0 @@ -# MEV-Boost-Relay Security Assessment - -Auditors: [lotusbumi](https://github.com/lotusbumi) & [sanatorxd](https://github.com/sanatorxd) - -Start date: 2022-08-22 - -MEV-Boost-Relay Security assessment for the Flashbots Collective ---- - -## System overview - - -[The mev-boost relay software](https://github.com/flashbots/mev-boost-relay) is one of the first implementations of the new proposer/builder block building separation in Ethereum. It presents API endpoints for: - -* Block building. -* Validator registration and retrieval of headers and payloads. -* Historical Data of block building and registrations. - - -The relay repository consists of several components that are designed to run and scale independently: - -1. [Housekeeper](https://github.com/flashbots/mev-boost-relay/tree/fdb359fa6b6a7f96d37fb1f8cabb02c3868f965f/services/housekeeper): update known validators, proposer duties. Soon: save metrics, etc. -2. [API](https://github.com/flashbots/mev-boost-relay/tree/fdb359fa6b6a7f96d37fb1f8cabb02c3868f965f/services/api): REST API with Redis, Postgres and memory datastore. -3. [Website](https://github.com/flashbots/mev-boost-relay/tree/fdb359fa6b6a7f96d37fb1f8cabb02c3868f965f/services/website): handles the root website requests (information is pulled from the API). - ---- - -## Findings - -### Critical - -None. - -### High - -None. - -### Medium - -None. - -### Low - -#### Data race - -A data race occurs when one thread accesses a mutable object while another thread is writing to it. - -When the `api.headSlot ` value is modified in the [`service.go file`]([api.headSlot](https://github.com/flashbots/mev-boost-relay/blob/fdb359fa6b6a7f96d37fb1f8cabb02c3868f965f/services/api/service.go#L382)), it can also be read by [another goroutine](https://github.com/flashbots/mev-boost-relay/blob/fdb359fa6b6a7f96d37fb1f8cabb02c3868f965f/services/api/service.go#L294). - -Data races lead to unexpected behavior and potential crashes. - -Consider making use of a sincronization library to allow atomic modifications. - -#### Incorrect Redis keys - -When a new Redis cache [is created](https://github.com/flashbots/mev-boost-relay/blob/fdb359fa6b6a7f96d37fb1f8cabb02c3868f965f/datastore/redis.go#L55-L75), the `keyKnownValidators` and `keyValidatorRegistrationTimestamp` keys are interchanged so that each one points to the incorrect key when saving data to the Redis instance. - -As the keys in which we later [get](https://github.com/flashbots/mev-boost-relay/blob/fdb359fa6b6a7f96d37fb1f8cabb02c3868f965f/datastore/redis.go#L136) and [set](https://github.com/flashbots/mev-boost-relay/blob/fdb359fa6b6a7f96d37fb1f8cabb02c3868f965f/datastore/redis.go#L154) are wrong, most of the functionality is interchanged between the getters and setters of each value. - -Consider fixing the error so that getters and setters work as expected. - -#### Nil dereference panics - -During the execution of the [`handleSubmitNewBlock`](https://github.com/flashbots/mev-boost-relay/blob/fdb359fa6b6a7f96d37fb1f8cabb02c3868f965f/services/api/service.go#L702) and the [`handleGetPayload`](https://github.com/flashbots/mev-boost-relay/blob/fdb359fa6b6a7f96d37fb1f8cabb02c3868f965f/services/api/service.go#L618) functions of the `service.go` file, the contents of the request JSON body will be decoded with [`json.NewDecoder`](https://pkg.go.dev/encoding/json#NewDecoder). - -However, we can send `{}` as valid input and will be parsed and continue execution until an element of the payload (which value is `nil`) [is tried to access](https://github.com/flashbots/mev-boost-relay/blob/fdb359fa6b6a7f96d37fb1f8cabb02c3868f965f/services/api/service.go#L627). - -This issue can be replicated with the following commands: - -* `curl -i -s -k -X 'POST' -H 'Host: localhost:9062' -H 'Content-Length: 6' --data-binary $'{}' http://localhost:9062/relay/v1/builder/blocks` -* `curl -i -s -k -X 'POST' -H 'Host: localhost:9062' -H 'Content-Length: 6' --data-binary $'{}' http://localhost:9062/eth/v1/builder/blinded_blocks` - -Consider validating the user input to fix this behavior. - -#### Use of Redis non-performant command - -The redis command `HGetAll` is being used in the [`GetKnownValidators` function](https://github.com/flashbots/mev-boost-relay/blob/fdb359fa6b6a7f96d37fb1f8cabb02c3868f965f/datastore/redis.go) of the `redis.go` file. This function brings all keys and values for a given hash. - -As explained in [this blog entry of the official Redis blog](https://redis.com/blog/7-redis-worst-practices/), unbounded returns can be a problem if the amount of information saved is increased in the future. - -As a data point, the retrieval of Goerli validators data with only one key is already taking approximately 1.65 seconds to load. - -Consider analyzing a long term strategy on how to paginate or cache this call. - - -#### Missing use of transactions in postgresql - -During the`SaveBuilderBlockSubmission` function in the [`database.go` file](https://github.com/flashbots/mev-boost-relay/blob/fdb359fa6b6a7f96d37fb1f8cabb02c3868f965f/database/database.go) two inserts are being done to different tables of the database. - -These operations are performed in two steps and are not atomic. If one of them fails, the other will be executed anyway leaving the database in an inconsistent state. - -In the world of databases, a transaction is a single unit of logic or work, made up of multiple operations. - -Consider making use of `sqlx` atomic transactions system with the keywords`sdb.Begin()` followed by`tx.Exec` and finally `tx.Commit()`. - -### Notes - - - -#### Library text/template vulnerable to XSS in use - -The application makes use of the `text/template` package in the [website.go file](https://github.com/flashbots/mev-boost-relay/blob/fdb359fa6b6a7f96d37fb1f8cabb02c3868f965f/services/website/website.go) which is vulnerable to XSS. - -The package [`html/template`](https://pkg.go.dev/html/template) sanitize external content before being reflected in the templates, ensuring the correct encoding of the untrusted inputs. - -This will is not exploitable in the current system but could be if the information is saved to the database through another service that allows unsanitized input. - -#### HTTP Client improvements - -Some opportunities for improvement where found in the use of `http.client` despite the fact that this part of the code is limited to testing. - -In particular the affected files are: - -* [util.go](https://github.com/flashbots/mev-boost-relay/blob/fdb359fa6b6a7f96d37fb1f8cabb02c3868f965f/beaconclient/util.go) from `beaconclient` package. -* [util.go](https://github.com/flashbots/mev-boost-relay/blob/fdb359fa6b6a7f96d37fb1f8cabb02c3868f965f/common/utils.go) from `common`. - -In particular, it is encouraged to ensure that the client: - - * Do not follow redirects. Use `CheckedRedirect` to [handle the redirects or prevent following them](https://blog.logrocket.com/configuring-the-go-http-client/). - * Use the timeout parameter to ensure that the client doesn't hangs waiting for a slow server. - -#### JSON Decoder allows extra information to be loaded in memory - -In the package`API` in the file [service.go](https://github.com/flashbots/mev-boost-relay/blob/fdb359fa6b6a7f96d37fb1f8cabb02c3868f965f/services/api/service.go), in the functions `handleRegisterValidator`, `handleGetHeader` and `handleBuilderGetValidators`, the request payloads are processed by a `Decoder` without making use of the [`DisallowUnknownFields`](https://pkg.go.dev/encoding/json#Decoder.DisallowUnknownFields) function, which would allow the `Decoder` to return an error when the destination is a struct and the input contains object keys which do not match any non-ignored, exported fields in the destination. - -The usage of `DisallowUnknownFields` is recommended to avoid loading to memory and consuming resources decoding an invalid input. - - -#### Docker compose file with harcoded trivial password - -The Postgresql service is being set up through a [docker-compose.yml](https://github.com/flashbots/mev-boost-relay/blob/fdb359fa6b6a7f96d37fb1f8cabb02c3868f965f/docker-compose.yml) file, which stores the credentials in plain text. Any person with access to the public github repository will know the default credentials used to set up other user's environment, which may end up exposed through the internet providing access to the relayers database. - -In order to avoid the use of default credentials in plain text, use .env files to set the credentials for the database service. Consider adding the `.env` file to the `.gitignore` list. - -#### Docker compose file use redis without authentication - -The Redis service is being set up through a [docker-compose.yml](https://github.com/flashbots/mev-boost-relay/blob/fdb359fa6b6a7f96d37fb1f8cabb02c3868f965f/docker-compose.yml) file, which does not set the parameter `--require` password in order to set the service authenticated. Without autentication any user with access to the server can access to all stored data in the Redis service. - -Docker compose file should be configured to use the `--requirepass` command ir order to set a password and get it from an `.env` file. - -#### Insecure postgresql connection string inputted from console parameter - -The connection string for postgresql is set through the terminal using the [`db` switch](https://github.com/flashbots/mev-boost-relay/blob/9d6b43c5a57fafe723959ffbe76b9745946f9b3a/cmd/api.go#L43). Setting the value through the console will save this information in `.bash_history` or other system files in plain text format, allowing an individual to get access to the database information provided they have access to the server where the relayer is running. - -Consider providing this value through an enviroment variable, a key vault or a protected config file. - -#### Unsound implementation of `GetIPXForwardedFor` - -The function `GetIPXForwardedFor` in the [utils.go file](https://github.com/flashbots/mev-boost-relay/blob/main/common/utils.go) is used to log a validator's IP. - -As privacy corcerns around the validator's identity and location are a problem to the Ethereum network due to the fact that a malicious validator could use this information to attack other validator's infrastructure, it is recommended not to log or save this information. - -Furthermore, even if the intention is to log these IPs, `r.RemoteAddr` should be used instead and not make use of unfiltered client-supplied data. diff --git a/mev-boost-relay/docs/docs/20230602-recent-performance-improvements.md b/mev-boost-relay/docs/docs/20230602-recent-performance-improvements.md deleted file mode 100644 index 806548dc5..000000000 --- a/mev-boost-relay/docs/docs/20230602-recent-performance-improvements.md +++ /dev/null @@ -1,48 +0,0 @@ -# Recent performance improvements in the mev-boost-relay codebase - -2023-06-02, by [@metachris](https://twitter.com/metachris) - -See also [part 2 of the performance improvements documentation](20230605-more-redis-performance-improvements.md). - ---- - -There have been a number of significant performance improvements in the mev-boost-relay codebase recently, and I wanted to shine some light on them and add more details and context about the nature of the changes as well as about the impact. - -### Fast-track simulation queue - -Bids with the highest value so far can get fast-tracked: https://github.com/flashbots/mev-boost-relay/pull/361 - -### Skipping block submissions which are below the bid floor - -Bid floor being the highest non-cancellable bid. The top bid value cannot be lower than that going forward. - -- Skipping non-cancellable bids: https://github.com/flashbots/mev-boost-relay/pull/399 -- Skipping cancellable bids: https://github.com/flashbots/mev-boost-relay/pull/401 - -### Redis improvements - -- More Redis options (`connPoolSize`, `minIdleConns`, `readTimeout`, `poolTimeout`, `writeTimeout`): https://github.com/flashbots/mev-boost-relay/pull/419 -- using Redis pipeline for most requests during block submission: https://github.com/flashbots/mev-boost-relay/pull/435 -- After investigating redis `SLOWLOG`, there were two groups of Redis interactions causing the most latency: active validators and known validators (see [`slowlog` output here](https://gist.github.com/metachris/d44fe901cc28f3997d0f2fc234db2b9c)). Improvements with significant impact: - - Disable 'active validators': https://github.com/flashbots/mev-boost-relay/pull/439 - - Getting known validators directly from CL client instead of through Redis: https://github.com/flashbots/mev-boost-relay/pull/440 -- For reference, Flashbots runs Redis in Elasticache, with 2 nodes of type `cache.m6g.4xlarge` (we'll look into downscaling to smaller instance type after these improvements) - ---- - -**These changes resulted both in a significant drop of simulations, as well as reduced Redis load.** - -Drop in Redis load: - -![Redis stats 1](../images/redis-stats-1.png) -![Redis stats 2](../images/redis-stats-2.png) - -This table shows the amount of skipped block submissions, versus those that were simulated. On average we can skip around 400-700 submissions per slot, while simulating 100-200: - -![Skipped simulations](../images/table-skipped-simulations.png) - -As an additional relevant data point, these are the average durations of successful block submissions at the Flashbots relay (full request from start of reading until writing the response): - -![Submission duration](../images/request-duration-successful-block-submission.png) - -The big spikes correlate with blocks containing rollup transactions which contain much larger transactions and increase the simulation delay. diff --git a/mev-boost-relay/docs/docs/20230605-more-redis-performance-improvements.md b/mev-boost-relay/docs/docs/20230605-more-redis-performance-improvements.md deleted file mode 100644 index 58b1a3d75..000000000 --- a/mev-boost-relay/docs/docs/20230605-more-redis-performance-improvements.md +++ /dev/null @@ -1,31 +0,0 @@ -# More Redis performance improvements in the mev-boost-relay codebase - -2023-06-05, by [@metachris](https://twitter.com/metachris) - -See also [part 1 of the performance improvements documentation](20230602-recent-performance-improvements.md). - ---- - -There have been a few additional significant Redis performance improvements that are worth highlighting: - -- [More Redis pipelining for block submissions - PR #447](https://github.com/flashbots/mev-boost-relay/pull/447) -- [Redis: save ExecPayload in SSZ format - PR #448](https://github.com/flashbots/mev-boost-relay/pull/448) - -_Shoutout to Benjamin Hunter from Bloxroute who first shared the Redis improvements in [mev-relay#3](https://github.com/bloXroute-Labs/mev-relay/pull/3)._ - -These changes significantly reduce Redis load, which the following screenshots show in detail: - -![Redis stats 3](../images/redis-stats-3.png) -![Redis stats 4](../images/redis-stats-4.png) -![Redis stats 5](../images/redis-stats-5.png) -![Redis stats 7](../images/redis-stats-7.png) -![Redis stats 8](../images/redis-stats-8.png) - -Additionally, there's a big impact to the performance of fast-tracked submissions in this PR: - -- [Don't fast-track large block submissions - PR #451](https://github.com/flashbots/mev-boost-relay/pull/451) - -Impact on overall fast-track request duration: - -![Request duration 2](../images/request-duration-2.png) -![Request duration 3](../images/request-duration-3.png) diff --git a/mev-boost-relay/docs/docs/20230614-postgres-database-migration-guide.md b/mev-boost-relay/docs/docs/20230614-postgres-database-migration-guide.md deleted file mode 100644 index d9b149d87..000000000 --- a/mev-boost-relay/docs/docs/20230614-postgres-database-migration-guide.md +++ /dev/null @@ -1,465 +0,0 @@ -# MEV-Boost-Relay Database Migration Guide - -2023-06-14, by [@0x416e746f6e](https://github.com/0x416e746f6e), [@metachris](https://twitter.com/metachris) - ---- - -`mev-boost-relay` stores the payloads for all builder submissions in the -Postgres database, in addition to Redis, and the database storage is also used -as data availability fallback in case Redis cannot retrieve the payload. - -Payloads are quite big, typically a few hundred kilobytes, with a few hundred -submissions per slot. This can make the database grow rapidly to many terabytes -of storage, which in turn adds significant operating costs for the Postgres -database service. - -There are several approaches to deal with the Postgres payload storage, and to -avoid storage growth: - -1. Truncating the table `mainnet_execution_payload` regularly (possibly - archiving the payloads to a secondary, cheaper long-term storage). - -2. Not storing the payloads in the database at all, which can be configured - through the `DISABLE_PAYLOAD_DATABASE_STORAGE` environment variable. In this - case, it’s strongly advised to enable Memcached as secondary payload storage. - -Cloud providers like AWS and Google Cloud don’t allow downscaling database -storage sizes of their managed Postgres services. Therefore, if you want to -reduce the costs by downscaling storage, you’ll need to migrate the data to a -new database. - -This guide will help you with that. - ---- - -Approaches we tried: - -- [AWS DMS](https://aws.amazon.com/dms/) (which is - [Qlik Replicate](https://www.qlik.com/us/products/qlik-replicate) under the - hood, if we are any good in searching the internet for error messages) - -- [pgsync](https://github.com/ankane/pgsync) - -- [pgcopydb](https://github.com/dimitri/pgcopydb) - -None of the above (and other less note-worthy) options worked as expected: - -- DMS would seem to work in the beginning, but after a few hours of running it - would start to yield some very cryptic error messages, the solutions to which - would recommend tweaking Qlik's configuration parameters (to which we - obviously did not have access to, as they are hidden behind AWS console). - -- `pgsync`/`pgcopydb` were found to be not mature enough (not yet at least) to - deal with the amount of data updates that their respective - [CDC](https://www.qlik.com/us/change-data-capture/cdc-change-data-capture) - solutions would have to cope with while migrating our instance. - -Which is why we opted to come up with our own (a bit creative, but work-able) -solution. - -## TL;DR - -Therefore the idea of the migration was as follows: - -1. Spin-up a new PSQL instance (with the desired initial storage size). - -2. Copy the schema from old to new instance. - -3. Transfer the big tables (there were 3 of them) using batch-by-batch - `COPY TO`/`COPY FROM` statements. (In the order from bigger to smaller - tables, as the largest table took several days to transfer). - -4. Transfer the rest of data using just `pg_dump`. - -5. Transfer what had accumulated in the 3 largest tables between steps 3 and 4. - -6. Update sequences on the new DB so that they would begin not from `1` but from - some wittingly big number (next power of 10 above the current latest `id`). - -7. Stop the services that write into the old DB → switch them to the new - instance → start those services again. - -8. Switch the read-only services to the new DB (and restart them too). - -9. Back-fill whatever new inserts were accumulated in the old DB between the - moment data was last pumped and the moment of the switch-over. - -The approach we used worked as-expected because: - -- All `mev-boost-relay`'s significant tables use PSQL's auto-incrementing - primary keys (that are in fact - [PSQL’s sequences](https://www.postgresql.org/docs/current/sql-createsequence.html) - under the hood). - -- All updates to the existing records (e.g. `xxx_blockbuilder.last_submission_slot`) - are of temporal nature. This means that if we miss to migrate an update to the - old instance, it's not a big deal as *eventually* there would be the next - update to the same record in the *new* instance that will "make things - all-right". - -Below we provide a bunch of scripts and queries that our fellow peers might find -handy shall they want to migrate away from a huge PSQL and save some costs. - -> **Disclaimer:** -> -> Do not trust this blindly! an experimental migration on some non-critical -> instance is highly advised (e.g. if you run something in goerli, that would be -> a good candidate). - -## License - -All scripts/queries in this article come with MIT license. - -```text -Copyright (c) 2023 Flashbots - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -``` - -## Queries - -`active-sessions.sql` - -Helps to track current sessions to old/new instance (to make sure that all -writing services are indeed migrated). - -```sql -select - client_addr, - string_agg(datname || '(' || pids || ')', ', ') as dbs -from ( - select - client_addr, - datname, - string_agg(pid::varchar, ',') as pids - from pg_stat_activity - where - datname is not null and datname != 'rdsadmin' - and client_addr != 'xxx.yyy.zzz.nnn' -- Put your jumphost IP here - group by client_addr, datname - order by client_addr -) as t -group by client_addr -order by client_addr; -``` - -`alter-sequence.sql` - -Query that generates a few other queries that should be ran on the *target* -instance to update the sequences’ current values. - -Note the `power(10, ceil(log(last_value)))` bit. If you find it a bit wasteful, -you can change the logic to something else (e.g. do simple `+ 10000`). Just make -sure to avoid collisions when backfilling the records from old instance. - -```sql -select - 'alter sequence ' || sequence_name || ' start ' || new_start || '; select setval(''' || sequence_name || ''', ' || new_start || ', false);' as alter_statement -from ( - select *, - power(10, ceil(log(last_value))) as new_start - from ( - select *, - pg_sequence_last_value(sequence_name) as last_value - from ( - select table_name, column_name, - pg_get_serial_sequence(table_name, column_name) as sequence_name - from information_schema.columns - where table_schema = 'public' - and pg_get_serial_sequence(table_name, column_name) is not null - ) as t - ) as t - where last_value is not null -) as t -order by 1; -``` - -## Scripts - -With the three scripts below the whole migration can be expressed like: - -```bash -# Migrate the schema - -mkdir -p ./schema - -PGSRCDB=[SOURCE_DB_ID] ./get-schema.sh > ./schema/boostrelay.sql -PGDSTDB=[TARGET_DB_ID] ./put.sh < ./schema/boostrelay.sql - -# Migrate the large tables - -mkdir -p ./log - -time ( PGSRCDB=[SOURCE_DB_ID] ./batch-load.sh mainnet_builder_block_submission 10000 | PGDSTDB=[TARGET_DB_ID] ./put.sh ) 2> ./log/mainnet_builder_block_submission.log -time ( PGSRCDB=[SOURCE_DB_ID] ./batch-load.sh mainnet_payload_delivered 10000 | PGDSTDB=[TARGET_DB_ID] ./put.sh ) 2> ./log/mainnet_payload_delivered.log -time ( PGSRCDB=[SOURCE_DB_ID] ./batch-load.sh mainnet_validator_registration 10000 | PGDSTDB=[TARGET_DB_ID] ./put.sh ) 2> ./log/mainnet_validator_registration.log - -# Backfill what was added to large tables in the mean time - -PGDSTDB=[TARGET_DB_ID] ./get-start-id.sh mainnet_validator_registration > ./.temp/mainnet_validator_registration.cur -time ( PGSRCDB=[SOURCE_DB_ID] ./batch-load.sh mainnet_validator_registration 10000 | PGDSTDB=[TARGET_DB_ID] ./put.sh ) 2> ./log/mainnet_validator_registration.log - -PGDSTDB=[TARGET_DB_ID] ./get-start-id.sh mainnet_payload_delivered > ./.temp/mainnet_payload_delivered.cur -time ( PGSRCDB=[SOURCE_DB_ID] ./batch-load.sh mainnet_payload_delivered 10000 | PGDSTDB=[TARGET_DB_ID] ./put.sh ) 2> ./log/mainnet_payload_delivered.log - -PGDSTDB=[TARGET_DB_ID] ./get-start-id.sh mainnet_builder_block_submission > ./.temp/mainnet_builder_block_submission.cur -time ( PGSRCDB=[SOURCE_DB_ID] ./batch-load.sh mainnet_builder_block_submission 10000 | PGDSTDB=[TARGET_DB_ID] ./put.sh ) 2> ./log/mainnet_builder_block_submission.log - -# Migrate the rest of the tables - -time ( PGSRCDB=[SOURCE_DB_ID] ./get-data-with-copy.sh mainnet_builder_demotions | PGDSTDB=[TARGET_DB_ID] ./put.sh ) 2> ./log/mainnet_builder_demotions.log -time ( PGSRCDB=[SOURCE_DB_ID] ./get-data-with-copy.sh mainnet_execution_payload | PGDSTDB=[TARGET_DB_ID] ./put.sh ) 2> ./log/mainnet_execution_payload.log -time ( PGSRCDB=[SOURCE_DB_ID] ./get-data-with-copy.sh mainnet_migrations | PGDSTDB=[TARGET_DB_ID] ./put.sh ) 2> ./log/mainnet_migrations.log -time ( PGSRCDB=[SOURCE_DB_ID] ./get-data-with-copy.sh mainnet_blockbuilder | PGDSTDB=[TARGET_DB_ID] ./put.sh ) 2> ./log/mainnet_blockbuilder.log -time ( PGSRCDB=[SOURCE_DB_ID] ./get-data-with-copy.sh mainnet_too_late_get_payload | PGDSTDB=[TARGET_DB_ID] ./put.sh ) 2> ./log/mainnet_too_late_get_payload.log - -### Do the switch here ### - -# Backfill what was added to large tables in between the switch - -PGDSTDB=[TARGET_DB_ID] ./get-start-id.sh mainnet_validator_registration > ./.temp/mainnet_validator_registration.cur -time ( PGSRCDB=[SOURCE_DB_ID] ./batch-load.sh mainnet_validator_registration 10000 | PGDSTDB=[TARGET_DB_ID] ./put.sh ) 2> ./log/mainnet_validator_registration.log - -PGDSTDB=[TARGET_DB_ID] ./get-start-id.sh mainnet_payload_delivered > ./.temp/mainnet_payload_delivered.cur -time ( PGSRCDB=[SOURCE_DB_ID] ./batch-load.sh mainnet_payload_delivered 10000 | PGDSTDB=[TARGET_DB_ID] ./put.sh ) 2> ./log/mainnet_payload_delivered.log - -PGDSTDB=[TARGET_DB_ID] ./get-start-id.sh mainnet_builder_block_submission > ./.temp/mainnet_builder_block_submission.cur -time ( PGSRCDB=[SOURCE_DB_ID] ./batch-load.sh mainnet_builder_block_submission 10000 | PGDSTDB=[TARGET_DB_ID] ./put.sh ) 2> ./log/mainnet_builder_block_submission.log - -``` - ---- - -`put.sh` - -The workhorse that fills the data into the target DB. Unmentioned `PGPASSWORD` -env var should be set to the password used by both old and new instances. - -```bash -#!/bin/bash - -if [[ -z "${PGDSTHOST}" ]]; then echo "Missing PGDSTHOST"; exit 1; fi -if [[ -z "${PGDSTDB}" ]]; then echo "Missing PGDSTDB"; exit 1; fi - -# shellcheck disable=SC2312 -cat - | >&2 psql \ - --host "${PGDSTHOST}" \ - --dbname "${PGDSTDB}" \ - --username postgres -``` - ---- - -`get-schema.sh` - -Retrieves just the schema from the source DB. (Note, you might need to first -transfer all the users/roles by hand). - -```bash -#!/bin/bash - -if [[ -z "${PGSRCHOST}" ]]; then echo "Missing PGSRCHOST"; exit 1; fi -if [[ -z "${PGSRCDB}" ]]; then echo "Missing PGSRCDB"; exit 1; fi - -if [[ -z "$1" ]]; then - pg_dump \ - --host "${PGSRCHOST}" \ - --dbname "${PGSRCDB}" \ - --username postgres \ - --clean --if-exists \ - --no-owner \ - --schema-only \ - --verbose -else - pg_dump \ - --host "${PGSRCHOST}" \ - --dbname "${PGSRCDB}" \ - --username postgres \ - --clean --if-exists \ - --no-owner \ - --schema-only \ - --table "$1" \ - --verbose -fi -``` - ---- - -`get-data-with-copy.sh` - -Convenience wrapper around `pg_dump` to generate bulk load `COPY TO` statement(s). - -```bash -#!/bin/bash - -if [[ -z "${PGSRCHOST}" ]]; then echo "Missing PGSRCHOST"; exit 1; fi -if [[ -z "${PGSRCDB}" ]]; then echo "Missing PGSRCDB"; exit 1; fi - -if [[ -z "$1" ]]; then - pg_dump \ - --host "${PGSRCHOST}" \ - --dbname "${PGSRCDB}" \ - --username postgres \ - --blobs \ - --data-only \ - --verbose -else - pg_dump \ - --compress 0 \ - --host "${PGSRCHOST}" \ - --dbname "${PGSRCDB}" \ - --username postgres \ - --blobs \ - --data-only \ - --table "$1" \ - --verbose -fi -``` - ---- - -`batch-load.sh` - -The most complicated script. Takes two parameters: the table to transfer, and -the batch size (how many records per go). There will be a file at -`./.temp/.cur` that you can use to track progress, or to edit to -re-start transfer from some particular records (e.g. when back-filling). - -```bash -#!/bin/bash - -set -e -o pipefail - -if [[ -z "${PGSRCHOST}" ]]; then echo "Missing PGSRCHOST"; exit 1; fi -if [[ -z "${PGSRCDB}" ]]; then echo "Missing PGSRCDB"; exit 1; fi -if [[ -z "$1" ]]; then echo "Missing table name and step size"; exit 1; fi -if [[ -z "$2" ]]; then echo "Missing step size"; exit 1; fi - -table="$1" -step="$2" - -mkdir -p ./.temp - -max_id=$( - psql \ - --host "${PGSRCHOST}" \ - --dbname "${PGSRCDB}" \ - --username postgres \ - --tuples-only \ - --command "select max(id) from ${table};" -) - -if [[ -f "./.temp/${table}.cur" ]]; then - cur_id=$( cat "./.temp/${table}.cur" ) -else - cur_id=$( - psql \ - --host "${PGSRCHOST}" \ - --dbname "${PGSRCDB}" \ - --username postgres \ - --tuples-only \ - --command "select min(id) from ${table};" - ) -fi - -while [[ ${cur_id} -le ${max_id} ]]; do - # Get the batch of data - query="select * from ${table} where id between ${cur_id} and $(( cur_id + step - 1 ))" - >&2 printf "\n%s;" "${query}" - if ! time psql \ - --host "${PGSRCHOST}" \ - --dbname "${PGSRCDB}" \ - --username postgres \ - --command "copy ( ${query} ) to stdout;" \ - > "./.temp/${table}.dat" - then - remaining=5 - while [[ ${remaining} -gt 0 ]]; do - sleep 5 - remaining=$(( remaining - 1 )) - rm "./.temp/${table}.dat" || true - >&2 echo "Retrying (${remaining} attempts remaining)..." - if ! time psql \ - --host "${PGSRCHOST}" \ - --dbname "${PGSRCDB}" \ - --username postgres \ - --command "copy ( ${query} ) to stdout;" \ - > "./.temp/${table}.dat" - then - if [[ ${remaining} -eq 0 ]]; then - >&2 echo "Failure (no more retries are remaining)" - exit 1 - fi - else - >&2 echo "Success" - remaining=0 - fi - done - fi - - # Push the batch of data - if [[ -s "./.temp/${table}.dat" ]]; then - echo "copy $1 from stdin;" - cat "./.temp/${table}.dat" - echo "\." - fi - - # Increment and remember cursor position - cur_id=$(( cur_id + step )) - echo "${cur_id}" > "./.temp/${table}.cur" - - # Refresh max ID (in case it increased meanwhile) - max_id=$( - psql \ - --host "${PGSRCHOST}" \ - --dbname "${PGSRCDB}" \ - --username postgres \ - --tuples-only \ - --command "select max(id) from ${table};" - ) -done - -rm "./.temp/${table}.dat" || true - ->&2 echo "Done" -``` - ---- - -`get-start-id.sh` - -A script to query `max(id) + 1` in some of the tables on the target instance. -Helpful for backfilling. - -```bash -#!/bin/bash - -if [[ -z "${PGDSTHOST}" ]]; then echo "Missing PGDSTHOST"; exit 1; fi -if [[ -z "${PGDSTDB}" ]]; then echo "Missing PGDSTDB"; exit 1; fi -if [[ -z "$1" ]]; then echo "Missing table name"; exit 1; fi - -table="$1" - -psql \ - --host "${PGDSTHOST}" \ - --dbname "${PGDSTDB}" \ - --username postgres \ - --tuples-only \ - --command "select (max(id) + 1) from ${table};" -``` diff --git a/mev-boost-relay/docs/docs/20230619-startup-shutdown-zero-downtime-deployments.md b/mev-boost-relay/docs/docs/20230619-startup-shutdown-zero-downtime-deployments.md deleted file mode 100644 index 0ca36f164..000000000 --- a/mev-boost-relay/docs/docs/20230619-startup-shutdown-zero-downtime-deployments.md +++ /dev/null @@ -1,118 +0,0 @@ -# On graceful service startup and shutdown, and zero-downtime deployments - -2023-06-19, by [@metachris](https://twitter.com/metachris), [@0x416e746f6e](https://github.com/0x416e746f6e) - ---- - -This document explains the details of API service startup and shutdown behavior, in particular related to: -- Zero-downtime deployments -- Proposer API - - Needing data before being able to handle `getPayload` requests (known validators) - - Draining requests before shutting down - ---- - -## TL;DR - -- We've added two endpoints: `/livez` and `/readyz` (per [k8s docs](https://kubernetes.io/docs/reference/using-api/health-checks/)) in [#469](https://github.com/flashbots/mev-boost-relay/pull/469): -- On startup: - - `/livez` is immediately available and positive, and will stay so until the service is shut down - - `/readyz` starts negative, until all information is loaded to safely process requests (known validators for the proposer API) - - Configure your orchestration tooling to route traffic to the service only if and when `/readyz` is positive! -- On shutdown: - - `/readyz` returns a negative result - - Wait a little and drain all requests (by default, 30 sec -- make sure your orchestration graceful shutdown period is greater than that (i.e. set to 60 sec)) - - Stop the webserver, and stop the program -- See also: https://kubernetes.io/docs/reference/using-api/health-checks/ - ---- - -## Kubernetes background about health-checks - -There are three types of health-checks (probes): [k8s docs](https://kubernetes.io/docs/reference/using-api/health-checks/) - -1. Startup probe -2. Liveness probe (`/livez`) -3. Readiness probe (`/readyz`) - -(All of these can be HTTP requests or commands) - -1. startup check: - - only for the startup phase - - confirm that pod has started - - if it fails, k8s will destroy and recreate -2. liveness check: - - indicated whether the service is alive. if `false`, then k8s should destroy & recreate the pods - - based on rules, timeouts, etc - - status exposed via `/livez` -3. readiness check: - - Applications may be temporarily unable to serve traffic. - - An application might need to load large data or configuration files during startup or depend on external services after startup. - - In such cases, you don't want to kill the application, but you don't want to send it requests either. - - https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/#define-readiness-probes - - status exposed via `/readyz` - - if that is `false`, then k8s will stop sending traffic to that pod but doesn't touch it otherwise - ---- - -## API Startup + Shutdown Sequence - -The proposer API needs to load all known validators before serving traffic, otherwise, there's a risk of missed slots due to `getPayload` not having all the information it needs to succeed. - -**Correct startup sequence:** -1. Service starts -2. Does minimal initial checks -3. Starts HTTP server (`live=true`, `ready=false`) -4. Updates known validators from CL client (can take 10-30 sec) -5. Sets `ready=true`, and starts receiving traffic - -At this point, the pod is operational and can service traffic. - -**Correct shutdown sequence:** - -1. Shutdown initiated (through signals `syscall.SIGINT` or `syscall.SIGTERM`) -2. Set `ready=false` to stop receiving new traffic -3. Wait some time -4. Drain pending requests -5. Shut down (setting `live=false` is not necessary anymore) - - ---- - -## Example k8s + AWS configuration - -```yaml - metadata: - name: boost-relay-api-proposer - annotations: - alb.ingress.kubernetes.io/healthcheck-interval-seconds: "10" - alb.ingress.kubernetes.io/healthcheck-path: /readyz - alb.ingress.kubernetes.io/healthcheck-port: "8080" - spec: - template: - spec: - terminationGracePeriodSeconds: 60 - containers: - - name: boost-relay-api-proposer - livenessProbe: - initialDelaySeconds: 5 - failureThreshold: 2 - httpGet: - path: /livez - port: 8080 - readinessProbe: - initialDelaySeconds: 5 - failureThreshold: 2 - httpGet: - path: /readyz - port: 8080 -``` - ---- - -## See also - -- https://kubernetes.io/docs/reference/using-api/health-checks/ -- https://kubernetes.io/docs/tasks/configure-pod-container/configure-liveness-readiness-startup-probes/ -- https://komodor.com/blog/kubernetes-health-checks-everything-you-need-to-know/ -- https://kubernetes-sigs.github.io/aws-load-balancer-controller/v2.2/guide/ingress/annotations/ diff --git a/mev-boost-relay/docs/images/redis-stats-1.png b/mev-boost-relay/docs/images/redis-stats-1.png deleted file mode 100644 index b809e9fa8..000000000 Binary files a/mev-boost-relay/docs/images/redis-stats-1.png and /dev/null differ diff --git a/mev-boost-relay/docs/images/redis-stats-2.png b/mev-boost-relay/docs/images/redis-stats-2.png deleted file mode 100644 index 2d397c1de..000000000 Binary files a/mev-boost-relay/docs/images/redis-stats-2.png and /dev/null differ diff --git a/mev-boost-relay/docs/images/redis-stats-3.png b/mev-boost-relay/docs/images/redis-stats-3.png deleted file mode 100644 index 76f50060a..000000000 Binary files a/mev-boost-relay/docs/images/redis-stats-3.png and /dev/null differ diff --git a/mev-boost-relay/docs/images/redis-stats-4.png b/mev-boost-relay/docs/images/redis-stats-4.png deleted file mode 100644 index 43982e623..000000000 Binary files a/mev-boost-relay/docs/images/redis-stats-4.png and /dev/null differ diff --git a/mev-boost-relay/docs/images/redis-stats-5.png b/mev-boost-relay/docs/images/redis-stats-5.png deleted file mode 100644 index 2b168e83c..000000000 Binary files a/mev-boost-relay/docs/images/redis-stats-5.png and /dev/null differ diff --git a/mev-boost-relay/docs/images/redis-stats-7.png b/mev-boost-relay/docs/images/redis-stats-7.png deleted file mode 100644 index 8c833bb63..000000000 Binary files a/mev-boost-relay/docs/images/redis-stats-7.png and /dev/null differ diff --git a/mev-boost-relay/docs/images/redis-stats-8.png b/mev-boost-relay/docs/images/redis-stats-8.png deleted file mode 100644 index f784119e1..000000000 Binary files a/mev-boost-relay/docs/images/redis-stats-8.png and /dev/null differ diff --git a/mev-boost-relay/docs/images/request-duration-2.png b/mev-boost-relay/docs/images/request-duration-2.png deleted file mode 100644 index 6e565e3a1..000000000 Binary files a/mev-boost-relay/docs/images/request-duration-2.png and /dev/null differ diff --git a/mev-boost-relay/docs/images/request-duration-3.png b/mev-boost-relay/docs/images/request-duration-3.png deleted file mode 100644 index feafc7851..000000000 Binary files a/mev-boost-relay/docs/images/request-duration-3.png and /dev/null differ diff --git a/mev-boost-relay/docs/images/request-duration-successful-block-submission.png b/mev-boost-relay/docs/images/request-duration-successful-block-submission.png deleted file mode 100644 index c266a10a7..000000000 Binary files a/mev-boost-relay/docs/images/request-duration-successful-block-submission.png and /dev/null differ diff --git a/mev-boost-relay/docs/images/table-skipped-simulations.png b/mev-boost-relay/docs/images/table-skipped-simulations.png deleted file mode 100644 index f0080bd81..000000000 Binary files a/mev-boost-relay/docs/images/table-skipped-simulations.png and /dev/null differ diff --git a/mev-boost-relay/go.mod b/mev-boost-relay/go.mod deleted file mode 100644 index 37f3e4882..000000000 --- a/mev-boost-relay/go.mod +++ /dev/null @@ -1,126 +0,0 @@ -module github.com/flashbots/mev-boost-relay - -go 1.22 - -require ( - github.com/NYTimes/gziphandler v1.1.1 - github.com/alicebob/miniredis/v2 v2.31.0 - github.com/attestantio/go-builder-client v0.4.3-0.20240124194555-d44db06f45fa - github.com/attestantio/go-eth2-client v0.19.9 - github.com/bradfitz/gomemcache v0.0.0-20230124162541-5f7a7d875746 - github.com/btcsuite/btcd/btcutil v1.1.2 - github.com/buger/jsonparser v1.1.1 - github.com/chainbound/shardmap v0.0.2 - github.com/ethereum/go-ethereum v1.13.15 - github.com/flashbots/go-boost-utils v1.8.0 - github.com/go-redis/redis/v9 v9.0.0-rc.1 - github.com/gorilla/mux v1.8.1 - github.com/holiman/uint256 v1.2.4 - github.com/jmoiron/sqlx v1.3.5 - github.com/lib/pq v1.10.8 - github.com/pkg/errors v0.9.1 - github.com/r3labs/sse/v2 v2.10.0 - github.com/sirupsen/logrus v1.9.3 - github.com/spf13/cobra v1.8.0 - github.com/stretchr/testify v1.8.4 - github.com/tdewolff/minify v2.3.6+incompatible - github.com/thedevbirb/flashbots-go-utils v0.0.0-20240522160613-f4f36d8f39af - go.uber.org/atomic v1.11.0 - golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa - golang.org/x/text v0.14.0 - gotest.tools v2.2.0+incompatible -) - -require ( - github.com/DataDog/zstd v1.5.2 // indirect - github.com/StackExchange/wmi v1.2.1 // indirect - github.com/beorn7/perks v1.0.1 // indirect - github.com/bits-and-blooms/bitset v1.10.0 // indirect - github.com/cockroachdb/errors v1.9.1 // indirect - github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b // indirect - github.com/cockroachdb/pebble v0.0.0-20230928194634-aa077af62593 // indirect - github.com/cockroachdb/redact v1.1.3 // indirect - github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 // indirect - github.com/consensys/bavard v0.1.13 // indirect - github.com/consensys/gnark-crypto v0.12.1 // indirect - github.com/crate-crypto/go-ipa v0.0.0-20231025140028-3c0104f4b233 // indirect - github.com/crate-crypto/go-kzg-4844 v0.7.0 // indirect - github.com/emicklei/dot v1.6.2 // indirect - github.com/ethereum/c-kzg-4844 v0.4.0 // indirect - github.com/fatih/color v1.16.0 // indirect - github.com/gballet/go-verkle v0.1.1-0.20231031103413-a67434b50f46 // indirect - github.com/getsentry/sentry-go v0.18.0 // indirect - github.com/go-gorp/gorp/v3 v3.1.0 // indirect - github.com/go-ole/go-ole v1.3.0 // indirect - github.com/goccy/go-yaml v1.11.2 // indirect - github.com/gofrs/flock v0.8.1 // indirect - github.com/gogo/protobuf v1.3.2 // indirect - github.com/golang/protobuf v1.5.3 // indirect - github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb // indirect - github.com/google/go-cmp v0.5.9 // indirect - github.com/google/uuid v1.3.1 // indirect - github.com/klauspost/compress v1.15.15 // indirect - github.com/kr/pretty v0.3.1 // indirect - github.com/kr/text v0.2.0 // indirect - github.com/mattn/go-colorable v0.1.13 // indirect - github.com/mattn/go-isatty v0.0.20 // indirect - github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect - github.com/mmcloughlin/addchain v0.4.0 // indirect - github.com/olekukonko/tablewriter v0.0.5 // indirect - github.com/prometheus/client_golang v1.16.0 // indirect - github.com/prometheus/client_model v0.3.0 // indirect - github.com/prometheus/common v0.42.0 // indirect - github.com/prometheus/procfs v0.10.1 // indirect - github.com/prysmaticlabs/go-bitfield v0.0.0-20210809151128-385d8c5e3fb7 // indirect - github.com/rivo/uniseg v0.2.0 // indirect - github.com/rogpeppe/go-internal v1.11.0 // indirect - github.com/shirou/gopsutil v3.21.4-0.20210419000835-c7a38de76ee5+incompatible // indirect - github.com/supranational/blst v0.3.11 // indirect - github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 // indirect - golang.org/x/sync v0.5.0 // indirect - golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 // indirect - google.golang.org/protobuf v1.30.0 // indirect - rsc.io/tmplfunc v0.0.3 // indirect -) - -require ( - github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a // indirect - github.com/btcsuite/btcd v0.23.0 // indirect - github.com/btcsuite/btcd/btcec/v2 v2.3.2 // indirect - github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1 // indirect - github.com/cespare/xxhash/v2 v2.2.0 // indirect - github.com/davecgh/go-spew v1.1.1 // indirect - github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0 // indirect - github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect - github.com/ferranbt/fastssz v0.1.4-0.20240724090034-31cd371f8688 - github.com/inconshreveable/mousetrap v1.1.0 // indirect - github.com/klauspost/cpuid/v2 v2.2.6 // indirect - github.com/mattn/go-runewidth v0.0.13 // indirect - github.com/minio/sha256-simd v1.0.1 // indirect - github.com/mitchellh/mapstructure v1.5.0 // indirect - github.com/pmezard/go-difflib v1.0.0 // indirect - github.com/rubenv/sql-migrate v1.5.2 - github.com/spf13/pflag v1.0.5 // indirect - github.com/tdewolff/parse v2.3.4+incompatible // indirect - github.com/tdewolff/test v1.0.7 // indirect - github.com/tklauser/go-sysconf v0.3.12 // indirect - github.com/tklauser/numcpus v0.6.1 // indirect - github.com/yuin/gopher-lua v1.1.0 // indirect - go.uber.org/multierr v1.11.0 // indirect - go.uber.org/zap v1.25.0 // indirect - golang.org/x/crypto v0.18.0 - golang.org/x/net v0.18.0 // indirect - golang.org/x/sys v0.16.0 // indirect - gopkg.in/cenkalti/backoff.v1 v1.1.0 // indirect - gopkg.in/yaml.v2 v2.4.0 // indirect - gopkg.in/yaml.v3 v3.0.1 // indirect -) - -// https://go.dev/ref/mod#go-mod-file-retract -retract ( - v1.15.3 - v1.15.2 - v1.0.0-alpha3 - v1.0.0-alpha2 - v1.0.0-alpha1 -) diff --git a/mev-boost-relay/go.sum b/mev-boost-relay/go.sum deleted file mode 100644 index 3d75db6ae..000000000 --- a/mev-boost-relay/go.sum +++ /dev/null @@ -1,659 +0,0 @@ -cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= -github.com/AndreasBriese/bbloom v0.0.0-20190306092124-e2d15f34fcf9/go.mod h1:bOvUY6CB00SOBii9/FifXqc0awNKxLFCL/+pkDPuyl8= -github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= -github.com/CloudyKit/fastprinter v0.0.0-20200109182630-33d98a066a53/go.mod h1:+3IMCy2vIlbG1XG/0ggNQv0SvxCAIpPM5b1nCz56Xno= -github.com/CloudyKit/jet/v3 v3.0.0/go.mod h1:HKQPgSJmdK8hdoAbKUUWajkHyHo4RaU5rMdUywE7VMo= -github.com/DataDog/zstd v1.5.2 h1:vUG4lAyuPCXO0TLbXvPv7EB7cNK1QV/luu55UHLrrn8= -github.com/DataDog/zstd v1.5.2/go.mod h1:g4AWEaM3yOg3HYfnJ3YIawPnVdXJh9QME85blwSAmyw= -github.com/DmitriyVTitov/size v1.5.0/go.mod h1:le6rNI4CoLQV1b9gzp1+3d7hMAD/uu2QcJ+aYbNgiU0= -github.com/Joker/hpp v1.0.0/go.mod h1:8x5n+M1Hp5hC0g8okX3sR3vFQwynaX/UgSOM9MeBKzY= -github.com/NYTimes/gziphandler v1.1.1 h1:ZUDjpQae29j0ryrS0u/B8HZfJBtBQHjqw2rQ2cqUQ3I= -github.com/NYTimes/gziphandler v1.1.1/go.mod h1:n/CVRwUEOgIxrgPvAQhUUr9oeUtvrhMomdKFjzJNB0c= -github.com/Shopify/goreferrer v0.0.0-20181106222321-ec9c9a553398/go.mod h1:a1uqRtAwp2Xwc6WNPJEufxJ7fx3npB4UV/JOLmbu5I0= -github.com/StackExchange/wmi v1.2.1 h1:VIkavFPXSjcnS+O8yTq7NI32k0R5Aj+v39y29VYDOSA= -github.com/StackExchange/wmi v1.2.1/go.mod h1:rcmrprowKIVzvc+NUiLncP2uuArMWLCbu9SBzvHz7e8= -github.com/VictoriaMetrics/fastcache v1.12.1 h1:i0mICQuojGDL3KblA7wUNlY5lOK6a4bwt3uRKnkZU40= -github.com/VictoriaMetrics/fastcache v1.12.1/go.mod h1:tX04vaqcNoQeGLD+ra5pU5sWkuxnzWhEzLwhP9w653o= -github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII= -github.com/ajg/form v1.5.1/go.mod h1:uL1WgH+h2mgNtvBq0339dVnzXdBETtL2LeUXaIv25UY= -github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a h1:HbKu58rmZpUGpz5+4FfNmIU+FmZg2P3Xaj2v2bfNWmk= -github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a/go.mod h1:SGnFV6hVsYE877CKEZ6tDNTjaSXYUk6QqoIK6PrAtcc= -github.com/alicebob/miniredis/v2 v2.31.0 h1:ObEFUNlJwoIiyjxdrYF0QIDE7qXcLc7D3WpSH4c22PU= -github.com/alicebob/miniredis/v2 v2.31.0/go.mod h1:UB/T2Uztp7MlFSDakaX1sTXUv5CASoprx0wulRT6HBg= -github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= -github.com/attestantio/go-builder-client v0.4.3-0.20240124194555-d44db06f45fa h1:Kj6d1tXAA+EAi7fK8z8NakBEpY4WYzZMuCmLZjwBpTM= -github.com/attestantio/go-builder-client v0.4.3-0.20240124194555-d44db06f45fa/go.mod h1:e02i/WO4fjs3/u9oIZEjiC8CK1Qyxy4cpiMMGKx4VqQ= -github.com/attestantio/go-eth2-client v0.19.9 h1:g5LLX3X7cLC0KS0oai/MtxBOZz3U3QPIX5qryYMxgVE= -github.com/attestantio/go-eth2-client v0.19.9/go.mod h1:TTz7YF6w4z6ahvxKiHuGPn6DbQn7gH6HPuWm/DEQeGE= -github.com/aymerick/raymond v2.0.3-0.20180322193309-b565731e1464+incompatible/go.mod h1:osfaiScAUVup+UC9Nfq76eWqDhXlp+4UYaA8uhTBO6g= -github.com/benbjohnson/clock v1.3.0 h1:ip6w0uFQkncKQ979AypyG0ER7mqUSBdKLOgAle/AT8A= -github.com/benbjohnson/clock v1.3.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA= -github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= -github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= -github.com/bits-and-blooms/bitset v1.10.0 h1:ePXTeiPEazB5+opbv5fr8umg2R/1NlzgDsyepwsSr88= -github.com/bits-and-blooms/bitset v1.10.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8= -github.com/bradfitz/gomemcache v0.0.0-20230124162541-5f7a7d875746 h1:wAIE/kN63Oig1DdOzN7O+k4AbFh2cCJoKMFXrwRJtzk= -github.com/bradfitz/gomemcache v0.0.0-20230124162541-5f7a7d875746/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA= -github.com/btcsuite/btcd v0.20.1-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ= -github.com/btcsuite/btcd v0.22.0-beta.0.20220111032746-97732e52810c/go.mod h1:tjmYdS6MLJ5/s0Fj4DbLgSbDHbEqLJrtnHecBFkdz5M= -github.com/btcsuite/btcd v0.23.0 h1:V2/ZgjfDFIygAX3ZapeigkVBoVUtOJKSwrhZdlpSvaA= -github.com/btcsuite/btcd v0.23.0/go.mod h1:0QJIIN1wwIXF/3G/m87gIwGniDMDQqjVn4SZgnFpsYY= -github.com/btcsuite/btcd/btcec/v2 v2.1.0/go.mod h1:2VzYrv4Gm4apmbVVsSq5bqf1Ec8v56E48Vt0Y/umPgA= -github.com/btcsuite/btcd/btcec/v2 v2.1.3/go.mod h1:ctjw4H1kknNJmRN4iP1R7bTQ+v3GJkZBd6mui8ZsAZE= -github.com/btcsuite/btcd/btcec/v2 v2.3.2 h1:5n0X6hX0Zk+6omWcihdYvdAlGf2DfasC0GMf7DClJ3U= -github.com/btcsuite/btcd/btcec/v2 v2.3.2/go.mod h1:zYzJ8etWJQIv1Ogk7OzpWjowwOdXY1W/17j2MW85J04= -github.com/btcsuite/btcd/btcutil v1.0.0/go.mod h1:Uoxwv0pqYWhD//tfTiipkxNfdhG9UrLwaeswfjfdF0A= -github.com/btcsuite/btcd/btcutil v1.1.0/go.mod h1:5OapHB7A2hBBWLm48mmw4MOHNJCcUBTwmWH/0Jn8VHE= -github.com/btcsuite/btcd/btcutil v1.1.2 h1:XLMbX8JQEiwMcYft2EGi8zPUkoa0abKIU6/BJSRsjzQ= -github.com/btcsuite/btcd/btcutil v1.1.2/go.mod h1:UR7dsSJzJUfMmFiiLlIrMq1lS9jh9EdCV7FStZSnpi0= -github.com/btcsuite/btcd/chaincfg/chainhash v1.0.0/go.mod h1:7SFka0XMvUgj3hfZtydOrQY2mwhPclbT2snogU7SQQc= -github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1 h1:q0rUy8C/TYNBQS1+CGKw68tLOFYSNEs0TFnxxnS9+4U= -github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1/go.mod h1:7SFka0XMvUgj3hfZtydOrQY2mwhPclbT2snogU7SQQc= -github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f/go.mod h1:TdznJufoqS23FtqVCzL0ZqgP5MqXbb4fg/WgDys70nA= -github.com/btcsuite/btcutil v0.0.0-20190425235716-9e5f4b9a998d/go.mod h1:+5NJ2+qvTyV9exUAL/rxXi3DcLg2Ts+ymUAY5y4NvMg= -github.com/btcsuite/go-socks v0.0.0-20170105172521-4720035b7bfd/go.mod h1:HHNXQzUsZCxOoE+CPiyCTO6x34Zs86zZUiwtpXoGdtg= -github.com/btcsuite/goleveldb v0.0.0-20160330041536-7834afc9e8cd/go.mod h1:F+uVaaLLH7j4eDXPRvw78tMflu7Ie2bzYOH4Y8rRKBY= -github.com/btcsuite/goleveldb v1.0.0/go.mod h1:QiK9vBlgftBg6rWQIj6wFzbPfRjiykIEhBH4obrXJ/I= -github.com/btcsuite/snappy-go v0.0.0-20151229074030-0bdef8d06723/go.mod h1:8woku9dyThutzjeg+3xrA5iCpBRH8XEEg3lh6TiUghc= -github.com/btcsuite/snappy-go v1.0.0/go.mod h1:8woku9dyThutzjeg+3xrA5iCpBRH8XEEg3lh6TiUghc= -github.com/btcsuite/websocket v0.0.0-20150119174127-31079b680792/go.mod h1:ghJtEyQwv5/p4Mg4C0fgbePVuGr935/5ddU9Z3TmDRY= -github.com/btcsuite/winsvc v1.0.0/go.mod h1:jsenWakMcC0zFBFurPLEAyrnc/teJEM1O46fmI40EZs= -github.com/buger/jsonparser v1.1.1 h1:2PnMjfWD7wBILjqQbt530v576A/cAbQvEW9gGIpYMUs= -github.com/buger/jsonparser v1.1.1/go.mod h1:6RYKKt7H4d4+iWqouImQ9R2FZql3VbhNgx27UK13J/0= -github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= -github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44= -github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= -github.com/chainbound/shardmap v0.0.2 h1:yB1weccdm2vC6dnqzzLwPIvyAnRj7815mJWbkPybiYw= -github.com/chainbound/shardmap v0.0.2/go.mod h1:TBvIzhHyFUbt+oa3UzbijobTUh221st6xIbuki7WzPc= -github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= -github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= -github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= -github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= -github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk= -github.com/cockroachdb/datadriven v1.0.2/go.mod h1:a9RdTaap04u637JoCzcUoIcDmvwSUtcUFtT/C3kJlTU= -github.com/cockroachdb/datadriven v1.0.3-0.20230413201302-be42291fc80f h1:otljaYPt5hWxV3MUfO5dFPFiOXg9CyG5/kCfayTqsJ4= -github.com/cockroachdb/datadriven v1.0.3-0.20230413201302-be42291fc80f/go.mod h1:a9RdTaap04u637JoCzcUoIcDmvwSUtcUFtT/C3kJlTU= -github.com/cockroachdb/errors v1.9.1 h1:yFVvsI0VxmRShfawbt/laCIDy/mtTqqnvoNgiy5bEV8= -github.com/cockroachdb/errors v1.9.1/go.mod h1:2sxOtL2WIc096WSZqZ5h8fa17rdDq9HZOZLBCor4mBk= -github.com/cockroachdb/logtags v0.0.0-20211118104740-dabe8e521a4f/go.mod h1:Vz9DsVWQQhf3vs21MhPMZpMGSht7O/2vFW2xusFUVOs= -github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b h1:r6VH0faHjZeQy818SGhaone5OnYfxFR/+AzdY3sf5aE= -github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b/go.mod h1:Vz9DsVWQQhf3vs21MhPMZpMGSht7O/2vFW2xusFUVOs= -github.com/cockroachdb/pebble v0.0.0-20230928194634-aa077af62593 h1:aPEJyR4rPBvDmeyi+l/FS/VtA00IWvjeFvjen1m1l1A= -github.com/cockroachdb/pebble v0.0.0-20230928194634-aa077af62593/go.mod h1:6hk1eMY/u5t+Cf18q5lFMUA1Rc+Sm5I6Ra1QuPyxXCo= -github.com/cockroachdb/redact v1.1.3 h1:AKZds10rFSIj7qADf0g46UixK8NNLwWTNdCIGS5wfSQ= -github.com/cockroachdb/redact v1.1.3/go.mod h1:BVNblN9mBWFyMyqK1k3AAiSxhvhfK2oOZZ2lK+dpvRg= -github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 h1:zuQyyAKVxetITBuuhv3BI9cMrmStnpT18zmgmTxunpo= -github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06/go.mod h1:7nc4anLGjupUW/PeY5qiNYsdNXj7zopG+eqsS7To5IQ= -github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0/go.mod h1:4Zcjuz89kmFXt9morQgcfYZAYZ5n8WHjt81YYWIwtTM= -github.com/consensys/bavard v0.1.13 h1:oLhMLOFGTLdlda/kma4VOJazblc7IM5y5QPd2A/YjhQ= -github.com/consensys/bavard v0.1.13/go.mod h1:9ItSMtA/dXMAiL7BG6bqW2m3NdSEObYWoH223nGHukI= -github.com/consensys/gnark-crypto v0.12.1 h1:lHH39WuuFgVHONRl3J0LRBtuYdQTumFSDtJF7HpyG8M= -github.com/consensys/gnark-crypto v0.12.1/go.mod h1:v2Gy7L/4ZRosZ7Ivs+9SfUDr0f5UlG+EM5t7MPHiLuY= -github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= -github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk= -github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= -github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= -github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= -github.com/crate-crypto/go-ipa v0.0.0-20231025140028-3c0104f4b233 h1:d28BXYi+wUpz1KBmiF9bWrjEMacUEREV6MBi2ODnrfQ= -github.com/crate-crypto/go-ipa v0.0.0-20231025140028-3c0104f4b233/go.mod h1:geZJZH3SzKCqnz5VT0q/DyIG/tvu/dZk+VIfXicupJs= -github.com/crate-crypto/go-kzg-4844 v0.7.0 h1:C0vgZRk4q4EZ/JgPfzuSoxdCq3C3mOZMBShovmncxvA= -github.com/crate-crypto/go-kzg-4844 v0.7.0/go.mod h1:1kMhvPgI0Ky3yIa+9lFySEBUBXkYxeOi8ZF1sYioxhc= -github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= -github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/decred/dcrd/crypto/blake256 v1.0.0/go.mod h1:sQl2p6Y26YV+ZOcSTP6thNdn47hh8kt6rqSlvmrXFAc= -github.com/decred/dcrd/crypto/blake256 v1.0.1 h1:7PltbUIQB7u/FfZ39+DGa/ShuMyJ5ilcvdfma9wOH6Y= -github.com/decred/dcrd/crypto/blake256 v1.0.1/go.mod h1:2OfgNZ5wDpcsFmHmCK5gZTPcCXqlm2ArzUIkw9czNJo= -github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1/go.mod h1:hyedUtir6IdtD/7lIxGeCxkaw7y45JueMRL4DIyJDKs= -github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0 h1:8UrgZ3GkP4i/CLijOJx79Yu+etlyjdBU4sfcs2WYQMs= -github.com/decred/dcrd/dcrec/secp256k1/v4 v4.2.0/go.mod h1:v57UDF4pDQJcEfFUCRop3lJL149eHGSe9Jvczhzjo/0= -github.com/decred/dcrd/lru v1.0.0/go.mod h1:mxKOwFd7lFjN2GZYsiz/ecgqR6kkYAl+0pz0tEMk218= -github.com/dgraph-io/badger v1.6.0/go.mod h1:zwt7syl517jmP8s94KqSxTlM6IMsdhYy6psNgSztDR4= -github.com/dgryski/go-farm v0.0.0-20190423205320-6a90982ecee2/go.mod h1:SqUrOPUnsFjfmXRMNPybcSiG0BgUW2AuFH8PAnS2iTw= -github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= -github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= -github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= -github.com/eknkc/amber v0.0.0-20171010120322-cdade1c07385/go.mod h1:0vRUJqYpeSZifjYj7uP3BG/gKcuzL9xWVV/Y+cK33KM= -github.com/emicklei/dot v1.6.2 h1:08GN+DD79cy/tzN6uLCT84+2Wk9u+wvqP+Hkx/dIR8A= -github.com/emicklei/dot v1.6.2/go.mod h1:DeV7GvQtIw4h2u73RKBkkFdvVAz0D9fzeJrgPW6gy/s= -github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= -github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= -github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/etcd-io/bbolt v1.3.3/go.mod h1:ZF2nL25h33cCyBtcyWeZ2/I3HQOfTP+0PIEvHjkjCrw= -github.com/ethereum/c-kzg-4844 v0.4.0 h1:3MS1s4JtA868KpJxroZoepdV0ZKBp3u/O5HcZ7R3nlY= -github.com/ethereum/c-kzg-4844 v0.4.0/go.mod h1:VewdlzQmpT5QSrVhbBuGoCdFJkpaJlO1aQputP83wc0= -github.com/ethereum/go-ethereum v1.13.15 h1:U7sSGYGo4SPjP6iNIifNoyIAiNjrmQkz6EwQG+/EZWo= -github.com/ethereum/go-ethereum v1.13.15/go.mod h1:TN8ZiHrdJwSe8Cb6x+p0hs5CxhJZPbqB7hHkaUXcmIU= -github.com/fasthttp-contrib/websocket v0.0.0-20160511215533-1f3b11f56072/go.mod h1:duJ4Jxv5lDcvg4QuQr0oowTf7dz4/CR8NtyCooz9HL8= -github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM= -github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= -github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M= -github.com/ferranbt/fastssz v0.1.3 h1:ZI+z3JH05h4kgmFXdHuR1aWYsgrg7o+Fw7/NCzM16Mo= -github.com/ferranbt/fastssz v0.1.3/go.mod h1:0Y9TEd/9XuFlh7mskMPfXiI2Dkw4Ddg9EyXt1W7MRvE= -github.com/ferranbt/fastssz v0.1.4-0.20240724090034-31cd371f8688 h1:k70X5h1haHaSbpD/9fcjtvAUEVlRlOKtdpvN7Mzhcv4= -github.com/ferranbt/fastssz v0.1.4-0.20240724090034-31cd371f8688/go.mod h1:Ea3+oeoRGGLGm5shYAeDgu6PGUlcvQhE2fILyD9+tGg= -github.com/flashbots/go-boost-utils v1.8.0 h1:z3K1hw+Fbl9AGMNQKnK7Bvf0M/rKgjfruAEvra+Z8Mg= -github.com/flashbots/go-boost-utils v1.8.0/go.mod h1:Ry1Rw8Lx5v1rpAR0+IvR4sV10jYAeQaGVM3vRD8mYdM= -github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= -github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= -github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY= -github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw= -github.com/gavv/httpexpect v2.0.0+incompatible/go.mod h1:x+9tiU1YnrOvnB725RkpoLv1M62hOWzwo5OXotisrKc= -github.com/gballet/go-verkle v0.1.1-0.20231031103413-a67434b50f46 h1:BAIP2GihuqhwdILrV+7GJel5lyPV3u1+PgzrWLc0TkE= -github.com/gballet/go-verkle v0.1.1-0.20231031103413-a67434b50f46/go.mod h1:QNpY22eby74jVhqH4WhDLDwxc/vqsern6pW+u2kbkpc= -github.com/getsentry/sentry-go v0.12.0/go.mod h1:NSap0JBYWzHND8oMbyi0+XZhUalc1TBdRL1M71JZW2c= -github.com/getsentry/sentry-go v0.18.0 h1:MtBW5H9QgdcJabtZcuJG80BMOwaBpkRDZkxRkNC1sN0= -github.com/getsentry/sentry-go v0.18.0/go.mod h1:Kgon4Mby+FJ7ZWHFUAZgVaIa8sxHtnRJRLTXZr51aKQ= -github.com/gin-contrib/sse v0.0.0-20190301062529-5545eab6dad3/go.mod h1:VJ0WA2NBN22VlZ2dKZQPAPnyWw5XTlK1KymzLKsr59s= -github.com/gin-gonic/gin v1.4.0/go.mod h1:OW2EZn3DO8Ln9oIKOvM++LBO+5UPHJJDH72/q/3rZdM= -github.com/go-check/check v0.0.0-20180628173108-788fd7840127/go.mod h1:9ES+weclKsC9YodN5RgxqK/VD9HM9JsCSh7rNhMZE98= -github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q= -github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA= -github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og= -github.com/go-gorp/gorp/v3 v3.1.0 h1:ItKF/Vbuj31dmV4jxA1qblpSwkl9g1typ24xoe70IGs= -github.com/go-gorp/gorp/v3 v3.1.0/go.mod h1:dLEjIyyRNiXvNZ8PSmzpt1GsWAUK8kjVhEpjH8TixEw= -github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab/go.mod h1:/P9AEU963A2AYjv4d1V5eVL1CQbEJq6aCNHDDjibzu8= -github.com/go-ole/go-ole v1.2.5/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= -github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE= -github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= -github.com/go-playground/locales v0.14.0 h1:u50s323jtVGugKlcYeyzC0etD1HifMjqmJqb8WugfUU= -github.com/go-playground/locales v0.14.0/go.mod h1:sawfccIbzZTqEDETgFXqTho0QybSa7l++s0DH+LDiLs= -github.com/go-playground/universal-translator v0.18.0 h1:82dyy6p4OuJq4/CByFNOn/jYrnRPArHwAcmLoJZxyho= -github.com/go-playground/universal-translator v0.18.0/go.mod h1:UvRDBj+xPUEGrFYl+lu/H90nyDXpg0fqeB/AQUGNTVA= -github.com/go-playground/validator/v10 v10.11.1 h1:prmOlTVv+YjZjmRmNSF3VmspqJIxJWXmqUsHwfTRRkQ= -github.com/go-playground/validator/v10 v10.11.1/go.mod h1:i+3WkQ1FvaUjjxh1kSvIA4dMGDBiPU55YFDl0WbKdWU= -github.com/go-redis/redis/v9 v9.0.0-rc.1 h1:/+bS+yeUnanqAbuD3QwlejzQZ+4eqgfUtFTG4b+QnXs= -github.com/go-redis/redis/v9 v9.0.0-rc.1/go.mod h1:8et+z03j0l8N+DvsVnclzjf3Dl/pFHgRk+2Ct1qw66A= -github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE= -github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= -github.com/gobuffalo/logger v1.0.6 h1:nnZNpxYo0zx+Aj9RfMPBm+x9zAU2OayFh/xrAWi34HU= -github.com/gobuffalo/logger v1.0.6/go.mod h1:J31TBEHR1QLV2683OXTAItYIg8pv2JMHnF/quuAbMjs= -github.com/gobuffalo/packd v1.0.1 h1:U2wXfRr4E9DH8IdsDLlRFwTZTK7hLfq9qT/QHXGVe/0= -github.com/gobuffalo/packd v1.0.1/go.mod h1:PP2POP3p3RXGz7Jh6eYEf93S7vA2za6xM7QT85L4+VY= -github.com/gobuffalo/packr/v2 v2.8.3 h1:xE1yzvnO56cUC0sTpKR3DIbxZgB54AftTFMhB2XEWlY= -github.com/gobuffalo/packr/v2 v2.8.3/go.mod h1:0SahksCVcx4IMnigTjiFuyldmTrdTctXsOdiU5KwbKc= -github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo= -github.com/gobwas/pool v0.2.0/go.mod h1:q8bcK0KcYlCgd9e7WYLm9LpyS+YeLd8JVDW6WezmKEw= -github.com/gobwas/ws v1.0.2/go.mod h1:szmBTxLgaFppYjEmNtny/v3w89xOydFnnZMcgRRu/EM= -github.com/goccy/go-yaml v1.11.2 h1:joq77SxuyIs9zzxEjgyLBugMQ9NEgTWxXfz2wVqwAaQ= -github.com/goccy/go-yaml v1.11.2/go.mod h1:wKnAMd44+9JAAnGQpWVEgBzGt3YuTaQ4uXoHvE4m7WU= -github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw= -github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU= -github.com/gogo/googleapis v0.0.0-20180223154316-0cd9801be74a/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFGgqEef3s= -github.com/gogo/googleapis v1.4.1/go.mod h1:2lpHqI5OcWCtVElxXnPt+s8oJvMpySlOyM6xDCrzib4= -github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= -github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= -github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= -github.com/gogo/status v1.1.0/go.mod h1:BFv9nrluPLmrS0EmGVvLaPNmRosr9KapBYd5/hpY1WM= -github.com/golang-jwt/jwt v3.2.2+incompatible/go.mod h1:8pz2t5EyA70fFQQSrl6XZXzqecmYZeUEB8OUGHkxJ+I= -github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= -github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= -github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= -github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= -github.com/golang/protobuf v1.3.5/go.mod h1:6O5/vntMXwX2lRkT1hjjk0nAC1IDOTvTlVgjlRvqsdk= -github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= -github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= -github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= -github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= -github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= -github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= -github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= -github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= -github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/protobuf v1.5.3 h1:KhyjKVUg7Usr/dYsdSqoFveMYd5ko72D+zANwlG1mmg= -github.com/golang/protobuf v1.5.3/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= -github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb h1:PBC98N2aIaM3XXiurYmW7fx4GZkL8feAMVq7nEjURHk= -github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/gomodule/redigo v1.7.1-0.20190724094224-574c33c3df38/go.mod h1:B4C85qUVwatsJoIUNIfCRsp7qO0iAmpGFZ4EELWSbC4= -github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= -github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= -github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= -github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38= -github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= -github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= -github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk= -github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.3.1 h1:KjJaJ9iWZ3jOFZIf1Lqf4laDRCasjl0BCmnEGxkdLb4= -github.com/google/uuid v1.3.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= -github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= -github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= -github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= -github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= -github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/holiman/uint256 v1.2.4 h1:jUc4Nk8fm9jZabQuqr2JzednajVmBpC+oiTiXZJEApU= -github.com/holiman/uint256 v1.2.4/go.mod h1:EOMSn4q6Nyt9P6efbI3bueV4e1b3dGlUCXeiRV4ng7E= -github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= -github.com/huandu/go-clone v1.6.0 h1:HMo5uvg4wgfiy5FoGOqlFLQED/VGRm2D9Pi8g1FXPGc= -github.com/huandu/go-clone v1.6.0/go.mod h1:ReGivhG6op3GYr+UY3lS6mxjKp7MIGTknuU5TbTVaXE= -github.com/huandu/go-clone/generic v1.6.0 h1:Wgmt/fUZ28r16F2Y3APotFD59sHk1p78K0XLdbUYN5U= -github.com/huandu/go-clone/generic v1.6.0/go.mod h1:xgd9ZebcMsBWWcBx5mVMCoqMX24gLWr5lQicr+nVXNs= -github.com/hydrogen18/memlistener v0.0.0-20200120041712-dcc25e7acd91/go.mod h1:qEIFzExnS6016fRpRfxrExeVn2gbClQA99gQhnIcdhE= -github.com/imkira/go-interpol v1.1.0/go.mod h1:z0h2/2T3XF8kyEPpRgJ3kmNv+C43p+I/CoI+jC3w2iA= -github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= -github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= -github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= -github.com/iris-contrib/blackfriday v2.0.0+incompatible/go.mod h1:UzZ2bDEoaSGPbkg6SAB4att1aAwTmVIx/5gCVqeyUdI= -github.com/iris-contrib/go.uuid v2.0.0+incompatible/go.mod h1:iz2lgM/1UnEf1kP0L/+fafWORmlnuysV2EMP8MW+qe0= -github.com/iris-contrib/jade v1.1.3/go.mod h1:H/geBymxJhShH5kecoiOCSssPX7QWYH7UaeZTSWddIk= -github.com/iris-contrib/pongo2 v0.0.1/go.mod h1:Ssh+00+3GAZqSQb30AvBRNxBx7rf0GqwkjqxNd0u65g= -github.com/iris-contrib/schema v0.0.1/go.mod h1:urYA3uvUNG1TIIjOSCzHr9/LmbQo8LrOcOqfqxa4hXw= -github.com/jessevdk/go-flags v0.0.0-20141203071132-1679536dcc89/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= -github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= -github.com/jmoiron/sqlx v1.3.5 h1:vFFPA71p1o5gAeqtEAwLU4dnX2napprKtHr7PYIcN3g= -github.com/jmoiron/sqlx v1.3.5/go.mod h1:nRVWtLre0KfCLJvgxzCsLVMogSvQ1zNJtpYr2Ccp0mQ= -github.com/jrick/logrotate v1.0.0/go.mod h1:LNinyqDIJnpAur+b8yyulnQw/wDuN1+BYKlTRt3OuAQ= -github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= -github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= -github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= -github.com/k0kubun/colorstring v0.0.0-20150214042306-9440f1994b88/go.mod h1:3w7q1U84EfirKl04SVQ/s7nPm1ZPhiXd34z40TNz36k= -github.com/karrick/godirwalk v1.16.1 h1:DynhcF+bztK8gooS0+NDJFrdNZjJ3gzVzC545UNA9iw= -github.com/karrick/godirwalk v1.16.1/go.mod h1:j4mkqPuvaLI8mp1DroR3P6ad7cyYd4c1qeJ3RV7ULlk= -github.com/kataras/golog v0.0.10/go.mod h1:yJ8YKCmyL+nWjERB90Qwn+bdyBZsaQwU3bTVFgkFIp8= -github.com/kataras/iris/v12 v12.1.8/go.mod h1:LMYy4VlP67TQ3Zgriz8RE2h2kMZV2SgMYbq3UhfoFmE= -github.com/kataras/neffos v0.0.14/go.mod h1:8lqADm8PnbeFfL7CLXh1WHw53dG27MC3pgi2R1rmoTE= -github.com/kataras/pio v0.0.2/go.mod h1:hAoW0t9UmXi4R5Oyq5Z4irTbaTsOemSrDGUtaTl7Dro= -github.com/kataras/sitemap v0.0.5/go.mod h1:KY2eugMKiPwsJgx7+U103YZehfvNGOXURubcGyk0Bz8= -github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= -github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= -github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4= -github.com/klauspost/compress v1.8.2/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= -github.com/klauspost/compress v1.9.7/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= -github.com/klauspost/compress v1.15.15 h1:EF27CXIuDsYJ6mmvtBRlEuB2UVOqHG1tAXgZ7yIO+lw= -github.com/klauspost/compress v1.15.15/go.mod h1:ZcK2JAFqKOpnBlxcLsJzYfrS9X1akm9fHZNnD9+Vo/4= -github.com/klauspost/cpuid v1.2.1/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= -github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc= -github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= -github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= -github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk= -github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= -github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= -github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= -github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= -github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= -github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= -github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= -github.com/labstack/echo/v4 v4.5.0/go.mod h1:czIriw4a0C1dFun+ObrXp7ok03xON0N1awStJ6ArI7Y= -github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k= -github.com/leanovate/gopter v0.2.9 h1:fQjYxZaynp97ozCzfOyOuAGOU4aU/z37zf/tOujFk7c= -github.com/leanovate/gopter v0.2.9/go.mod h1:U2L/78B+KVFIx2VmW6onHJQzXtFb+p5y3y2Sh+Jxxv8= -github.com/leodido/go-urn v1.2.1 h1:BqpAaACuzVSgi/VLzGZIobT2z4v53pjosyNd9Yv6n/w= -github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY= -github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= -github.com/lib/pq v1.10.8 h1:3fdt97i/cwSU83+E0hZTC/Xpc9mTZxc6UWSCRcSbxiE= -github.com/lib/pq v1.10.8/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= -github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= -github.com/markbates/errx v1.1.0 h1:QDFeR+UP95dO12JgW+tgi2UVfo0V8YBHiUIOaeBPiEI= -github.com/markbates/errx v1.1.0/go.mod h1:PLa46Oex9KNbVDZhKel8v1OT7hD5JZ2eI7AHhA0wswc= -github.com/markbates/oncer v1.0.0 h1:E83IaVAHygyndzPimgUYJjbshhDTALZyXxvk9FOlQRY= -github.com/markbates/oncer v1.0.0/go.mod h1:Z59JA581E9GP6w96jai+TGqafHPW+cPfRxz2aSZ0mcI= -github.com/markbates/safe v1.0.1 h1:yjZkbvRM6IzKj9tlu/zMJLS0n/V351OZWRnF3QfaUxI= -github.com/markbates/safe v1.0.1/go.mod h1:nAqgmRi7cY2nqMc92/bSEeQA+R4OheNU2T1kNSCBdG0= -github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= -github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc= -github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4= -github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= -github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= -github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= -github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= -github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= -github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94= -github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= -github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= -github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI= -github.com/mattn/go-runewidth v0.0.13 h1:lTGmDsbAYt5DmK6OnoV7EuIF1wEIFAcxld6ypU4OSgU= -github.com/mattn/go-runewidth v0.0.13/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= -github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU= -github.com/mattn/go-sqlite3 v1.14.15 h1:vfoHhTN1af61xCRSWzFIWzx2YskyMTwHLrExkBOjvxI= -github.com/mattn/go-sqlite3 v1.14.15/go.mod h1:2eHXhiwb8IkHr+BDWZGa96P6+rkvnG63S2DGjv9HUNg= -github.com/mattn/goveralls v0.0.2/go.mod h1:8d1ZMHsd7fW6IRPKQh46F2WRpyib5/X4FOpevwGNQEw= -github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= -github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= -github.com/mediocregopher/radix/v3 v3.4.2/go.mod h1:8FL3F6UQRXHXIBSPUs5h0RybMF8i4n7wVopoX3x7Bv8= -github.com/microcosm-cc/bluemonday v1.0.2/go.mod h1:iVP4YcDBq+n/5fb23BhYFvIMq/leAFZyRl6bYmGDlGc= -github.com/minio/sha256-simd v1.0.1 h1:6kaan5IFmwTNynnKKpDHe6FWHohJOHhCPchzK49dzMM= -github.com/minio/sha256-simd v1.0.1/go.mod h1:Pz6AKMiUdngCLpeTL/RJY1M9rUuPMYujV5xJjtbRSN8= -github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= -github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= -github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= -github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= -github.com/mmcloughlin/addchain v0.4.0 h1:SobOdjm2xLj1KkXN5/n0xTIWyZA2+s99UCY1iPfkHRY= -github.com/mmcloughlin/addchain v0.4.0/go.mod h1:A86O+tHqZLMNO4w6ZZ4FlVQEadcoqkyU72HC5wJ4RlU= -github.com/mmcloughlin/profile v0.1.1/go.mod h1:IhHD7q1ooxgwTgjxQYkACGA77oFTDdFVejUS1/tS/qU= -github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= -github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= -github.com/moul/http2curl v1.0.0/go.mod h1:8UbvGypXm98wA/IqH45anm5Y2Z6ep6O31QGOAZ3H0fQ= -github.com/nats-io/jwt v0.3.0/go.mod h1:fRYCDE99xlTsqUzISS1Bi75UBJ6ljOJQOAAu5VglpSg= -github.com/nats-io/nats.go v1.9.1/go.mod h1:ZjDU1L/7fJ09jvUSRVBR2e7+RnLiiIQyqyzEE/Zbp4w= -github.com/nats-io/nkeys v0.1.0/go.mod h1:xpnFELMwJABBLVhffcfd1MZx6VsNRFpEugbxziKVo7w= -github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c= -github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= -github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE= -github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= -github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec= -github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY= -github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.10.3/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= -github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= -github.com/onsi/ginkgo v1.14.0/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY= -github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= -github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= -github.com/onsi/gomega v1.4.1/go.mod h1:C1qb7wdrVGGVU+Z6iS04AVkA3Q65CEZX59MT0QO5uiA= -github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY= -github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= -github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= -github.com/onsi/gomega v1.21.1 h1:OB/euWYIExnPBohllTicTHmGTrMaqJ67nIu80j0/uEM= -github.com/onsi/gomega v1.21.1/go.mod h1:iYAIXgPSaDHak0LCMA+AWBpIKBr8WZicMxnE8luStNc= -github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= -github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4= -github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8= -github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= -github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= -github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/poy/onpar v1.1.2 h1:QaNrNiZx0+Nar5dLgTVp5mXkyoVFIbepjyEoGSnhbAY= -github.com/poy/onpar v1.1.2/go.mod h1:6X8FLNoxyr9kkmnlqpK6LSoiOtrO6MICtWwEuWkLjzg= -github.com/prometheus/client_golang v1.16.0 h1:yk/hx9hDbrGHovbci4BY+pRMfSuuat626eFsHb7tmT8= -github.com/prometheus/client_golang v1.16.0/go.mod h1:Zsulrv/L9oM40tJ7T815tM89lFEugiJ9HzIqaAx4LKc= -github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= -github.com/prometheus/client_model v0.3.0 h1:UBgGFHqYdG/TPFD1B1ogZywDqEkwp3fBMvqdiQ7Xew4= -github.com/prometheus/client_model v0.3.0/go.mod h1:LDGWKZIo7rky3hgvBe+caln+Dr3dPggB5dvjtD7w9+w= -github.com/prometheus/common v0.42.0 h1:EKsfXEYo4JpWMHH5cg+KOUWeuJSov1Id8zGR8eeI1YM= -github.com/prometheus/common v0.42.0/go.mod h1:xBwqVerjNdUDjgODMpudtOMwlOwf2SaTr1yjz4b7Zbc= -github.com/prometheus/procfs v0.10.1 h1:kYK1Va/YMlutzCGazswoHKo//tZVlFpKYh+PymziUAg= -github.com/prometheus/procfs v0.10.1/go.mod h1:nwNm2aOCAYw8uTR/9bWRREkZFxAUcWzPHWJq+XBB/FM= -github.com/prysmaticlabs/go-bitfield v0.0.0-20210809151128-385d8c5e3fb7 h1:0tVE4tdWQK9ZpYygoV7+vS6QkDvQVySboMVEIxBJmXw= -github.com/prysmaticlabs/go-bitfield v0.0.0-20210809151128-385d8c5e3fb7/go.mod h1:wmuf/mdK4VMD+jA9ThwcUKjg3a2XWM9cVfFYjDyY4j4= -github.com/r3labs/sse/v2 v2.10.0 h1:hFEkLLFY4LDifoHdiCN/LlGBAdVJYsANaLqNYa1l/v0= -github.com/r3labs/sse/v2 v2.10.0/go.mod h1:Igau6Whc+F17QUgML1fYe1VPZzTV6EMCnYktEmkNJ7I= -github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= -github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= -github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc= -github.com/rogpeppe/go-internal v1.8.1/go.mod h1:JeRgkft04UBgHMgCIwADu4Pn6Mtm5d4nPKWu0nJ5d+o= -github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= -github.com/rogpeppe/go-internal v1.11.0 h1:cWPaGQEPrBb5/AsnsZesgZZ9yb1OQ+GOISoDNXVBh4M= -github.com/rogpeppe/go-internal v1.11.0/go.mod h1:ddIwULY96R17DhadqLgMfk9H9tvdUzkipdSkR5nkCZA= -github.com/rubenv/sql-migrate v1.5.2 h1:bMDqOnrJVV/6JQgQ/MxOpU+AdO8uzYYA/TxFUBzFtS0= -github.com/rubenv/sql-migrate v1.5.2/go.mod h1:H38GW8Vqf8F0Su5XignRyaRcbXbJunSWxs+kmzlg0Is= -github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= -github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= -github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= -github.com/schollz/closestmatch v2.1.0+incompatible/go.mod h1:RtP1ddjLong6gTkbtmuhtR2uUrrJOpYzYRvbcPAid+g= -github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= -github.com/shirou/gopsutil v3.21.4-0.20210419000835-c7a38de76ee5+incompatible h1:Bn1aCHHRnjv4Bl16T8rcaFjYSrGrIZvpiGO6P3Q4GpU= -github.com/shirou/gopsutil v3.21.4-0.20210419000835-c7a38de76ee5+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA= -github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= -github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= -github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= -github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= -github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA= -github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= -github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= -github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= -github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= -github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= -github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= -github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= -github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= -github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= -github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s= -github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= -github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= -github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= -github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= -github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= -github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= -github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk= -github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo= -github.com/supranational/blst v0.3.11 h1:LyU6FolezeWAhvQk0k6O/d49jqgO52MSDDfYgbeoEm4= -github.com/supranational/blst v0.3.11/go.mod h1:jZJtfjgudtNl4en1tzwPIV3KjUnQUvG3/j+w+fVonLw= -github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 h1:epCh84lMvA70Z7CTTCmYQn2CKbY8j86K7/FAIr141uY= -github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7/go.mod h1:q4W45IWZaF22tdD+VEXcAWRA037jwmWEB5VWYORlTpc= -github.com/tdewolff/minify v2.3.6+incompatible h1:2hw5/9ZvxhWLvBUnHE06gElGYz+Jv9R4Eys0XUzItYo= -github.com/tdewolff/minify v2.3.6+incompatible/go.mod h1:9Ov578KJUmAWpS6NeZwRZyT56Uf6o3Mcz9CEsg8USYs= -github.com/tdewolff/parse v2.3.4+incompatible h1:x05/cnGwIMf4ceLuDMBOdQ1qGniMoxpP46ghf0Qzh38= -github.com/tdewolff/parse v2.3.4+incompatible/go.mod h1:8oBwCsVmUkgHO8M5iCzSIDtpzXOT0WXX9cWhz+bIzJQ= -github.com/tdewolff/test v1.0.7 h1:8Vs0142DmPFW/bQeHRP3MV19m1gvndjUb1sn8yy74LM= -github.com/tdewolff/test v1.0.7/go.mod h1:6DAvZliBAAnD7rhVgwaM7DE5/d9NMOAJ09SqYqeK4QE= -github.com/thedevbirb/flashbots-go-utils v0.0.0-20240522160613-f4f36d8f39af h1:VwsEpQJqCFCT+oZXfJj8alj3FkIpKLvn0wV7GO/cEQY= -github.com/thedevbirb/flashbots-go-utils v0.0.0-20240522160613-f4f36d8f39af/go.mod h1:Xcc4QMCD1U+HqHayd9nqVMcJVV4eAzNyAQVWY1AqhEE= -github.com/tklauser/go-sysconf v0.3.12 h1:0QaGUFOdQaIVdPgfITYzaTegZvdCjmYO52cSFAEVmqU= -github.com/tklauser/go-sysconf v0.3.12/go.mod h1:Ho14jnntGE1fpdOqQEEaiKRpvIavV0hSfmBq8nJbHYI= -github.com/tklauser/numcpus v0.6.1 h1:ng9scYS7az0Bk4OZLvrNXNSAO2Pxr1XXRAPyjhIx+Fk= -github.com/tklauser/numcpus v0.6.1/go.mod h1:1XfjsgE2zo8GVw7POkMbHENHzVg3GzmoZ9fESEdAacY= -github.com/trailofbits/go-fuzz-utils v0.0.0-20210901195358-9657fcfd256c h1:4WU+p200eLYtBsx3M5CKXvkjVdf5SC3W9nMg37y0TFI= -github.com/trailofbits/go-fuzz-utils v0.0.0-20210901195358-9657fcfd256c/go.mod h1:f3jBhpWvuZmue0HZK52GzRHJOYHYSILs/c8+K2S/J+o= -github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc= -github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw= -github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0= -github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY= -github.com/umbracle/gohashtree v0.0.2-alpha.0.20230207094856-5b775a815c10 h1:CQh33pStIp/E30b7TxDlXfM0145bn2e8boI30IxAhTg= -github.com/umbracle/gohashtree v0.0.2-alpha.0.20230207094856-5b775a815c10/go.mod h1:x/Pa0FF5Te9kdrlZKJK82YmAkvL8+f989USgz6Jiw7M= -github.com/urfave/negroni v1.0.0/go.mod h1:Meg73S6kFm/4PpbYdq35yYWoCZ9mS/YSx+lKnmiohz4= -github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= -github.com/valyala/fasthttp v1.6.0/go.mod h1:FstJa9V+Pj9vQ7OJie2qMHdwemEDaDiSdBnvPM1Su9w= -github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= -github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ= -github.com/valyala/tcplisten v0.0.0-20161114210144-ceec8f93295a/go.mod h1:v3UYOV9WzVtRmSR+PDvWpU/qWl4Wa5LApYYX4ZtKbio= -github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= -github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= -github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= -github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= -github.com/yalp/jsonpath v0.0.0-20180802001716-5cc68e5049a0/go.mod h1:/LWChgwKmvncFJFHJ7Gvn9wZArjbV5/FppcK2fKk/tI= -github.com/yudai/gojsondiff v1.0.0/go.mod h1:AY32+k2cwILAkW1fbgxQ5mUmMiZFgLIV+FBNExI05xg= -github.com/yudai/golcs v0.0.0-20170316035057-ecda9a501e82/go.mod h1:lgjkn3NuSvDfVJdfcVVdX+jpBxNmX4rDAzaS45IcYoM= -github.com/yudai/pp v2.0.1+incompatible/go.mod h1:PuxR/8QJ7cyCkFp/aUDS+JY727OFEZkTdatxwunjIkc= -github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= -github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= -github.com/yuin/gopher-lua v1.1.0 h1:BojcDhfyDWgU2f2TOzYK/g5p2gxMrku8oupLDqlnSqE= -github.com/yuin/gopher-lua v1.1.0/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw= -go.uber.org/atomic v1.11.0 h1:ZvwS0R+56ePWxUNi+Atn9dWONBPp/AUETXlHW0DxSjE= -go.uber.org/atomic v1.11.0/go.mod h1:LUxbIzbOniOlMKjJjyPfpl4v+PKK2cNJn91OQbhoJI0= -go.uber.org/goleak v1.2.0 h1:xqgm/S+aQvhWFTtR0XK3Jvg7z8kGV8P4X14IzwN3Eqk= -go.uber.org/goleak v1.2.0/go.mod h1:XJYK+MuIchqpmGmUSAzotztawfKvYLUIgg7guXrwVUo= -go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= -go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= -go.uber.org/zap v1.25.0 h1:4Hvk6GtkucQ790dqmj7l1eEnRdKm3k3ZUrUMS2d5+5c= -go.uber.org/zap v1.25.0/go.mod h1:JIAUzQIH94IC4fOJQm7gMmBJP5k7wQfdcnYdPoEXJYk= -golang.org/x/crypto v0.0.0-20170930174604-9419663f5a44/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= -golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= -golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= -golang.org/x/crypto v0.0.0-20191227163750-53104e6ec876/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= -golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= -golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= -golang.org/x/crypto v0.18.0 h1:PGVlW0xEltQnzFZ55hkuX5+KLyrMYhHld1YHO4AKcdc= -golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg= -golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= -golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa h1:FRnLl4eNAQl8hwxVVC17teOw8kdjVDVAiFMtgUdTSRQ= -golang.org/x/exp v0.0.0-20231110203233-9a3e6036ecaa/go.mod h1:zk2irFbV9DP96SEBUUAy67IdHUaZuSnrz1n472HUCLE= -golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= -golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= -golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= -golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= -golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= -golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= -golang.org/x/net v0.0.0-20180719180050-a680a1efc54d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= -golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190327091125-710a502c58a2/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= -golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191116160921-f9c825593386/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= -golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= -golang.org/x/net v0.0.0-20200813134508-3edf25e44fcc/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= -golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= -golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= -golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= -golang.org/x/net v0.0.0-20211008194852-3b03d305991f/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= -golang.org/x/net v0.18.0 h1:mIYleuAkSbHh0tCv7RvjL3F6ZVbLjq4+R7zbOn3Kokg= -golang.org/x/net v0.18.0/go.mod h1:/czyP5RqHAH4odGYxBJ1qz0+CE5WZ+2j1YgoEo8F2jQ= -golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= -golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.5.0 h1:60k92dhOjHxJkrqnwsfl8KuaHbn/5dl0lUPUklKo3qE= -golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190204203706-41f3e6584952/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= -golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190626221950-04f50cda93cb/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200814200057-3d37ad5750ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210403161142-5e06dd20ab57/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= -golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.11.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.16.0 h1:xWw16ngr6ZMtmxDyKyIgsE93KNKz5HKmMa3b8ALHidU= -golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= -golang.org/x/term v0.16.0 h1:m+B6fahuftsE9qjo0VWp2FW0mB3MTJvR0BaMQrq0pmE= -golang.org/x/term v0.16.0/go.mod h1:yn7UURbUtPyrVJPGPq404EukNFxcm/foM+bV/bfcDsY= -golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= -golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= -golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= -golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= -golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= -golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= -golang.org/x/time v0.0.0-20201208040808-7e3f01d25324/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= -golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20181221001348-537d06c36207/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= -golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= -golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190327201419-c70d86f8b7cf/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= -golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= -golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= -golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= -golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= -golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= -golang.org/x/tools v0.1.3/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= -golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= -golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 h1:+cNy6SZtPcJQH3LJVLOSmiC7MMxXNOb3PU/VUEz+EhU= -golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= -google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= -google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= -google.golang.org/genproto v0.0.0-20180518175338-11a468237815/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= -google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= -google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= -google.golang.org/genproto v0.0.0-20210624195500-8bfb893ecb84/go.mod h1:SzzZ/N+nwJDaO1kznhnlzqS8ocJICar6hYhVyhi++24= -google.golang.org/grpc v1.12.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= -google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= -google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= -google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY= -google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= -google.golang.org/grpc v1.38.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM= -google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= -google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= -google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= -google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= -google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= -google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= -google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= -google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= -google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng= -google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= -gopkg.in/cenkalti/backoff.v1 v1.1.0 h1:Arh75ttbsvlpVA7WtVpH4u9h6Zl46xuptxqLxPiSo4Y= -gopkg.in/cenkalti/backoff.v1 v1.1.0/go.mod h1:J6Vskwqd+OMVJl8C33mmtxTBs2gyzfv7UDAkHu8BrjI= -gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= -gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= -gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= -gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= -gopkg.in/go-playground/assert.v1 v1.2.1/go.mod h1:9RXL0bg/zibRAgZUYszZSwO/z8Y/a8bDuhia5mkpMnE= -gopkg.in/go-playground/validator.v8 v8.18.2/go.mod h1:RX2a/7Ha8BgOhfk7j780h4/u/RRjR0eouCJSH80/M2Y= -gopkg.in/ini.v1 v1.51.1/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= -gopkg.in/mgo.v2 v2.0.0-20180705113604-9856a29383ce/go.mod h1:yeKp02qBN3iKW1OzL3MGk2IdtZzaj7SFntXj72NppTA= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= -gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= -gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= -gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= -gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= -gopkg.in/yaml.v3 v3.0.0-20191120175047-4206685974f2/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= -gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= -gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo= -gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw= -honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= -rsc.io/tmplfunc v0.0.3 h1:53XFQh69AfOa8Tw0Jm7t+GV7KZhOi6jzsCzTtKbMvzU= -rsc.io/tmplfunc v0.0.3/go.mod h1:AG3sTPzElb1Io3Yg4voV9AGZJuleGAwaVRxL9M49PhA= diff --git a/mev-boost-relay/internal/internal.go b/mev-boost-relay/internal/internal.go deleted file mode 100644 index ce1887064..000000000 --- a/mev-boost-relay/internal/internal.go +++ /dev/null @@ -1,4 +0,0 @@ -// Package internal contains internal tooling dependencies. -package internal - -import _ "github.com/btcsuite/btcd/btcutil" // see also https://github.com/flashbots/mev-boost-relay/issues/226 diff --git a/mev-boost-relay/internal/investigations/validator-registration-signature-check/main.go b/mev-boost-relay/internal/investigations/validator-registration-signature-check/main.go deleted file mode 100644 index 4e22ec5e5..000000000 --- a/mev-boost-relay/internal/investigations/validator-registration-signature-check/main.go +++ /dev/null @@ -1,56 +0,0 @@ -package main - -// -// Script to create a signed validator registration -// - -import ( - "fmt" - "time" - - builderApiV1 "github.com/attestantio/go-builder-client/api/v1" - "github.com/flashbots/go-boost-utils/bls" - "github.com/flashbots/go-boost-utils/ssz" - "github.com/flashbots/go-boost-utils/utils" - "github.com/flashbots/mev-boost-relay/common" -) - -var ( - gasLimit = 30000000 - feeRecipient = "0xdb65fEd33dc262Fe09D9a2Ba8F80b329BA25f941" - timestamp = int64(1606824043) -) - -func Perr(err error) { - if err != nil { - panic(err) - } -} - -func main() { - mainnetDetails, err := common.NewEthNetworkDetails(common.EthNetworkMainnet) - Perr(err) - - sk, pubkey, err := bls.GenerateNewKeypair() - Perr(err) - - pk, err := utils.BlsPublicKeyToPublicKey(pubkey) - Perr(err) - - // Fill in validator registration details - validatorRegistration := builderApiV1.ValidatorRegistration{ //nolint:exhaustruct - GasLimit: uint64(gasLimit), - Timestamp: time.Unix(timestamp, 0), - } - - validatorRegistration.Pubkey, err = utils.HexToPubkey(pk.String()) - Perr(err) - validatorRegistration.FeeRecipient, err = utils.HexToAddress(feeRecipient) - Perr(err) - - sig, err := ssz.SignMessage(&validatorRegistration, mainnetDetails.DomainBuilder, sk) - Perr(err) - fmt.Println("privkey:", sk.String()) - fmt.Println("pubkey: ", pk.String()) - fmt.Println("sig: ", sig.String()) -} diff --git a/mev-boost-relay/internal/investigations/validator-registration-signature-check/main_test.go b/mev-boost-relay/internal/investigations/validator-registration-signature-check/main_test.go deleted file mode 100644 index 2b31a2ccf..000000000 --- a/mev-boost-relay/internal/investigations/validator-registration-signature-check/main_test.go +++ /dev/null @@ -1,45 +0,0 @@ -package main - -import ( - "testing" - "time" - - builderApiV1 "github.com/attestantio/go-builder-client/api/v1" - "github.com/flashbots/go-boost-utils/ssz" - "github.com/flashbots/go-boost-utils/utils" - "github.com/flashbots/mev-boost-relay/common" - "github.com/stretchr/testify/require" -) - -// TestValidatorRegistrationSignature can be used to validate the signature of an arbitrary validator registration -func TestValidatorRegistrationSignature(t *testing.T) { - // Fill in validator registration details - pubkey := "0x84e975405f8691ad7118527ee9ee4ed2e4e8bae973f6e29aa9ca9ee4aea83605ae3536d22acc9aa1af0545064eacf82e" - gasLimit := 30000000 - feeRecipient := "0xdb65fed33dc262fe09d9a2ba8f80b329ba25f941" - timestamp := int64(1606824043) - signature := "0xaf12df007a0c78abb5575067e5f8b089cfcc6227e4a91db7dd8cf517fe86fb944ead859f0781277d9b78c672e4a18c5d06368b603374673cf2007966cece9540f3a1b3f6f9e1bf421d779c4e8010368e6aac134649c7a009210780d401a778a5" - - // Constructing the object - payload := builderApiV1.SignedValidatorRegistration{ - Message: &builderApiV1.ValidatorRegistration{ - GasLimit: uint64(gasLimit), - Timestamp: time.Unix(timestamp, 0), - }, - } - - var err error - payload.Message.Pubkey, err = utils.HexToPubkey(pubkey) - require.NoError(t, err) - payload.Signature, err = utils.HexToSignature(signature) - require.NoError(t, err) - payload.Message.FeeRecipient, err = utils.HexToAddress(feeRecipient) - require.NoError(t, err) - - mainnetDetails, err := common.NewEthNetworkDetails(common.EthNetworkMainnet) - require.NoError(t, err) - - ok, err := ssz.VerifySignature(payload.Message, mainnetDetails.DomainBuilder, payload.Message.Pubkey[:], payload.Signature[:]) - require.NoError(t, err) - require.True(t, ok) -} diff --git a/mev-boost-relay/main.go b/mev-boost-relay/main.go deleted file mode 100644 index e79fda6a2..000000000 --- a/mev-boost-relay/main.go +++ /dev/null @@ -1,12 +0,0 @@ -package main - -import ( - "github.com/flashbots/mev-boost-relay/cmd" -) - -var Version = "dev" // is set during build process - -func main() { - cmd.Version = Version - cmd.Execute() -} diff --git a/mev-boost-relay/scripts/create-bls-keypair/main.go b/mev-boost-relay/scripts/create-bls-keypair/main.go deleted file mode 100644 index 118e26456..000000000 --- a/mev-boost-relay/scripts/create-bls-keypair/main.go +++ /dev/null @@ -1,23 +0,0 @@ -package main - -import ( - "fmt" - "log" - - "github.com/flashbots/go-boost-utils/bls" -) - -func main() { - sk, _, err := bls.GenerateNewKeypair() - if err != nil { - log.Fatal(err.Error()) - } - - blsPubkey, err := bls.PublicKeyFromSecretKey(sk) - if err != nil { - log.Fatal(err.Error()) - } - - fmt.Printf("secret key: 0x%x\n", bls.SecretKeyToBytes(sk)) - fmt.Printf("public key: 0x%x\n", bls.PublicKeyToBytes(blsPubkey)) -} diff --git a/mev-boost-relay/scripts/export-bids-auto.sh b/mev-boost-relay/scripts/export-bids-auto.sh deleted file mode 100755 index 338f8dfba..000000000 --- a/mev-boost-relay/scripts/export-bids-auto.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/bin/bash -# -# This script automatically determines the latest exported slot and the latest slot on chain, and -# exports all available buckets in between. -# -set -o errexit -set -o nounset -set -o pipefail -if [[ "${TRACE-0}" == "1" ]]; then - set -o xtrace -fi - -# number of bids to export per bucket -BUCKET_SIZE="${BUCKET_SIZE:-4000}" -echo "bucket_size: $BUCKET_SIZE" - -SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd ) -# echo "SCRIPT_DIR: $SCRIPT_DIR" - -# Get the latest previously exported slot from S3 -latestslot_exported=$( curl -s https://flashbots-boost-relay-public.s3.us-east-2.amazonaws.com/ | tr '\<' '\n' | sed -n -e 's/.*-to-//p' | sort | tail -n 1 | sed 's/[.].*//' ) -echo "latest_slot_exported: $latestslot_exported" - -# Get the latest slot on chain -latestslot=$( curl -s https://beaconcha.in/latestState | jq '.lastProposedSlot' ) -echo "latest slot: $latestslot" - -# Start at last exported slot +1 -slot_start=$((latestslot_exported + 1)) - -# Now loop over buckets until all slots are exported -while true; do - slot_end=$((slot_start + BUCKET_SIZE - 1)) - echo "slots to export: $slot_start - $slot_end" - - # End now if latest slot to export is in the future - if (( slot_end > latestslot )); then - echo "latest slot to export is in the future. exiting now" - exit 0 - fi - - # Export now - cmd="$SCRIPT_DIR/export-bids.sh $slot_start $slot_end" - echo $cmd - $cmd - slot_start=$((slot_start + BUCKET_SIZE)) -done \ No newline at end of file diff --git a/mev-boost-relay/scripts/export-bids.sh b/mev-boost-relay/scripts/export-bids.sh deleted file mode 100755 index 912c37d83..000000000 --- a/mev-boost-relay/scripts/export-bids.sh +++ /dev/null @@ -1,60 +0,0 @@ -#!/bin/bash -set -o errexit -# set -o nounset -set -o pipefail -if [[ "${TRACE-0}" == "1" ]]; then - set -o xtrace -fi - -# number of bids to export per bucket -BUCKET_SIZE="${BUCKET_SIZE:-4000}" - -if [ -z $DB ]; then - echo "missing postgres dns in DB env var" - exit 1 -fi - -if [ -z $1 ]; then - echo "missing slot-from arg1" - exit 1 -fi - -if [ -z $2 ]; then - echo "missing slot-to arg2" - exit 1 -fi - -function export() { - start=$1 - end=$2 - echo "exporting bids from slots $start -> $end" - fn1="builder-submissions_slot-${start}-to-${end}.csv" - fn2="builder-submissions_slot-${start}-to-${end}.json" - DB_DONT_APPLY_SCHEMA=1 DB_TABLE_PREFIX=mainnet go run . tool data-api-export-bids --db $DB --slot-from $start --slot-to $end --out $fn1 --out $fn2 - - echo "compressing $fn1 ..." - gzip $fn1 - echo "compressing $fn2 ..." - gzip $fn2 - - echo "uploading to s3..." - aws --profile l1 s3 cp ./$fn1.gz s3://flashbots-boost-relay-public/data/2_builder-submissions/ - aws --profile l1 s3 cp ./$fn2.gz s3://flashbots-boost-relay-public/data/2_builder-submissions/ - - if [ "$DELETE" == "1" ]; then - rm -f $fn1* $fn2* - fi -} - -start=$1 -slot_end=$2 - -while [[ $start -le $slot_end ]]; do - end=$((start+BUCKET_SIZE-1)) - if [[ $end -gt $slot_end ]]; then - end=$slot_end - fi - # echo "exporting bids from slots $start -> $end" - export $start $end - start=$((end+1)) -done diff --git a/mev-boost-relay/scripts/export-payloads-month.sh b/mev-boost-relay/scripts/export-payloads-month.sh deleted file mode 100755 index 4425e2791..000000000 --- a/mev-boost-relay/scripts/export-payloads-month.sh +++ /dev/null @@ -1,32 +0,0 @@ -#!/bin/bash -set -o errexit -set -o nounset -set -o pipefail -if [[ "${TRACE-0}" == "1" ]]; then - set -o xtrace -fi - -if [ -z $DB ]; then - echo "missing postgres dns in DB env var" - exit 1 -fi - -date_start_default=$(date -d"last month" +%Y-%m-01) -date_start=${DATE:-$date_start_default} -date_end=$(date -d "$date_start+1 month" +%Y-%m-%d) -echo "$date_start -> $date_end" - -fn_base=$(date -d "$date_start" +%Y-%m) -fn1="${fn_base}.csv" -fn2="${fn_base}.json" -echo $fn1 -echo $fn2 -DB_DONT_APPLY_SCHEMA=1 DB_TABLE_PREFIX=mainnet go run . tool data-api-export-payloads --db $DB --date-start $date_start --date-end $date_end --out $fn1 --out $fn2 - -if [[ -z "$DONTASK" ]]; then - echo "press enter to upload to S3..." - read -r -fi - -aws --profile l1 s3 cp $fn1 s3://flashbots-boost-relay-public/data/1_payloads-delivered/monthly/ -aws --profile l1 s3 cp $fn2 s3://flashbots-boost-relay-public/data/1_payloads-delivered/monthly/ diff --git a/mev-boost-relay/scripts/export-payloads-week.sh b/mev-boost-relay/scripts/export-payloads-week.sh deleted file mode 100755 index dc26b2449..000000000 --- a/mev-boost-relay/scripts/export-payloads-week.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash -set -o errexit -set -o nounset -set -o pipefail -if [[ "${TRACE-0}" == "1" ]]; then - set -o xtrace -fi - -if [ -z $DB ]; then - echo "missing postgres dns in DB env var" - exit 1 -fi - -year_last=$(date -d"last week" +%Y) -year_last=${YEAR:-$year_last} -week_last=$(date -d"last week" +%U) -week_last=${WEEK:-$week_last} - -cmd="from datetime import date; d=date.fromisocalendar($year_last, int('$week_last'), 1); print('%s-%s-%02d' % (d.year, d.month, d.day));" -monday_last_week=$(python3 -c "$cmd") -cmd="from datetime import date, timedelta; d=date.fromisocalendar($year_last, int('$week_last'), 1); d=d+timedelta(weeks=1); print('%s-%s-%02d' % (d.year, d.month, d.day));" -monday_this_week=$(python3 -c "$cmd") -echo "$year_last $week_last = $monday_last_week -> $monday_this_week" -# exit 0 - -fn1="${year_last}_w${week_last}.csv" -fn2="${year_last}_w${week_last}.json" -echo $fn1 -echo $fn2 -DB_DONT_APPLY_SCHEMA=1 DB_TABLE_PREFIX=mainnet go run . tool data-api-export-payloads --db $DB --date-start $monday_last_week --date-end $monday_this_week --out $fn1 --out $fn2 - -if [[ -z "$DONTASK" ]]; then - echo "press enter to upload to S3..." - read -r -fi - -aws --profile l1 s3 cp $fn1 s3://flashbots-boost-relay-public/data/1_payloads-delivered/weekly/ -aws --profile l1 s3 cp $fn2 s3://flashbots-boost-relay-public/data/1_payloads-delivered/weekly/ diff --git a/mev-boost-relay/scripts/sse-event-logger/main.go b/mev-boost-relay/scripts/sse-event-logger/main.go deleted file mode 100644 index ed009fb35..000000000 --- a/mev-boost-relay/scripts/sse-event-logger/main.go +++ /dev/null @@ -1,55 +0,0 @@ -package main - -import ( - "os" - "os/signal" - "strings" - "syscall" - "time" - - "github.com/flashbots/mev-boost-relay/beaconclient" - "github.com/flashbots/mev-boost-relay/common" - "github.com/sirupsen/logrus" -) - -var ( - beaconURIs = common.GetSliceEnv("BEACON_URIS", []string{"http://localhost:3500"}) - log *logrus.Entry -) - -func main() { - log = common.LogSetup(false, "info") - - log.Infof("Using beacon endpoints: %s", strings.Join(beaconURIs, ", ")) - for _, uri := range beaconURIs { - beaconInstance := beaconclient.NewProdBeaconInstance(log, uri) - go subscribeHead(beaconInstance) - go subscribePayloadAttr(beaconInstance) - } - - sigs := make(chan os.Signal, 1) - signal.Notify(sigs, syscall.SIGINT, syscall.SIGTERM) - <-sigs -} - -func subscribeHead(instance *beaconclient.ProdBeaconInstance) { - _log := log.WithField("beacon", instance.GetURI()) - _log.Info("subscribeHead") - c := make(chan beaconclient.HeadEventData) - go instance.SubscribeToHeadEvents(c) - for { - headEvent := <-c - _log.WithField("timestamp", time.Now().UTC().UnixMilli()).Infof("headEvent: slot=%d", headEvent.Slot) - } -} - -func subscribePayloadAttr(instance *beaconclient.ProdBeaconInstance) { - _log := log.WithField("beacon", instance.GetURI()) - _log.Info("subscribePayloadAttr") - c := make(chan beaconclient.PayloadAttributesEvent) - go instance.SubscribeToPayloadAttributesEvents(c) - for { - event := <-c - _log.WithField("timestamp", time.Now().UTC().UnixMilli()).Infof("payloadAttrEvent: slot=%d / parent=%s / randao=%s", event.Data.ProposalSlot, event.Data.ParentBlockHash, event.Data.PayloadAttributes.PrevRandao) - } -} diff --git a/mev-boost-relay/scripts/website-staticgen/main.go b/mev-boost-relay/scripts/website-staticgen/main.go deleted file mode 100644 index 07d13c632..000000000 --- a/mev-boost-relay/scripts/website-staticgen/main.go +++ /dev/null @@ -1,43 +0,0 @@ -package main - -import ( - "bytes" - "encoding/json" - "fmt" - "io" - "os" - - "github.com/flashbots/mev-boost-relay/services/website" -) - -func main() { - var data website.StatusHTMLData - - jsonFile, err := os.Open("testdata/website-htmldata.json") - if err != nil { - panic(err) - } - defer jsonFile.Close() - - byteValue, _ := io.ReadAll(jsonFile) - err = json.Unmarshal(byteValue, &data) - if err != nil { - panic(err) - } - - indexTemplate, err := website.ParseIndexTemplate() - if err != nil { - panic(err) - } - - html := bytes.Buffer{} - if err := indexTemplate.Execute(&html, data); err != nil { - panic(err) - } - - if err := os.WriteFile("website-index.html", html.Bytes(), 0o600); err != nil { - panic(err) - } - - fmt.Println("Wrote website-index.html") -} diff --git a/mev-boost-relay/services/api/blocksim_ratelimiter.go b/mev-boost-relay/services/api/blocksim_ratelimiter.go deleted file mode 100644 index 715e5916f..000000000 --- a/mev-boost-relay/services/api/blocksim_ratelimiter.go +++ /dev/null @@ -1,160 +0,0 @@ -package api - -import ( - "bytes" - "context" - "encoding/json" - "errors" - "fmt" - "io" - "net/http" - "sync" - "sync/atomic" - "time" - - "github.com/attestantio/go-eth2-client/spec" - "github.com/thedevbirb/flashbots-go-utils/cli" - "github.com/thedevbirb/flashbots-go-utils/jsonrpc" - "github.com/flashbots/mev-boost-relay/common" -) - -var ( - ErrRequestClosed = errors.New("request context closed") - ErrSimulationFailed = errors.New("simulation failed") - ErrJSONDecodeFailed = errors.New("json error") - ErrNoCapellaPayload = errors.New("capella payload is nil") - ErrNoDenebPayload = errors.New("deneb payload is nil") - - maxConcurrentBlocks = int64(cli.GetEnvInt("BLOCKSIM_MAX_CONCURRENT", 4)) // 0 for no maximum - simRequestTimeout = time.Duration(cli.GetEnvInt("BLOCKSIM_TIMEOUT_MS", 10000)) * time.Millisecond -) - -type IBlockSimRateLimiter interface { - Send(context context.Context, payload *common.BuilderBlockValidationRequest, isHighPrio, fastTrack bool) (error, error) - CurrentCounter() int64 -} - -type BlockSimulationRateLimiter struct { - cv *sync.Cond - counter int64 - blockSimURL string - client http.Client -} - -func NewBlockSimulationRateLimiter(blockSimURL string) *BlockSimulationRateLimiter { - return &BlockSimulationRateLimiter{ - cv: sync.NewCond(&sync.Mutex{}), - counter: 0, - blockSimURL: blockSimURL, - client: http.Client{ //nolint:exhaustruct - Timeout: simRequestTimeout, - Transport: &http.Transport{ - MaxIdleConns: 100, - MaxIdleConnsPerHost: 100, - MaxConnsPerHost: 100, - IdleConnTimeout: 90 * time.Second, - }, - }, - } -} - -func (b *BlockSimulationRateLimiter) Send(context context.Context, payload *common.BuilderBlockValidationRequest, isHighPrio, fastTrack bool) (requestErr, validationErr error) { - b.cv.L.Lock() - cnt := atomic.AddInt64(&b.counter, 1) - if maxConcurrentBlocks > 0 && cnt > maxConcurrentBlocks { - b.cv.Wait() - } - b.cv.L.Unlock() - - defer func() { - b.cv.L.Lock() - atomic.AddInt64(&b.counter, -1) - b.cv.Signal() - b.cv.L.Unlock() - }() - - if err := context.Err(); err != nil { - return fmt.Errorf("%w, %w", ErrRequestClosed, err), nil - } - - var simReq *jsonrpc.JSONRPCRequest - if payload.Version == spec.DataVersionCapella && payload.Capella == nil { - return ErrNoCapellaPayload, nil - } - - if payload.Version == spec.DataVersionDeneb && payload.Deneb == nil { - return ErrNoDenebPayload, nil - } - - submission, err := common.GetBlockSubmissionInfo(payload.VersionedSubmitBlockRequest) - if err != nil { - return err, nil - } - - // Prepare headers - headers := http.Header{} - headers.Add("X-Request-ID", fmt.Sprintf("%d/%s", submission.BidTrace.Slot, submission.BidTrace.BlockHash.String())) - if isHighPrio { - headers.Add("X-High-Priority", "true") - } - if fastTrack { - headers.Add("X-Fast-Track", "true") - } - - // Create and fire off JSON-RPC request - if payload.Version == spec.DataVersionDeneb { - simReq = jsonrpc.NewJSONRPCRequest("1", "flashbots_validateBuilderSubmissionV3", payload) - } else { - simReq = jsonrpc.NewJSONRPCRequest("1", "flashbots_validateBuilderSubmissionV2", payload) - } - _, requestErr, validationErr = SendJSONRPCRequest(&b.client, *simReq, b.blockSimURL, headers) - return requestErr, validationErr -} - -// CurrentCounter returns the number of waiting and active requests -func (b *BlockSimulationRateLimiter) CurrentCounter() int64 { - return atomic.LoadInt64(&b.counter) -} - -// SendJSONRPCRequest sends the request to URL and returns the general JsonRpcResponse, or an error (note: not the JSONRPCError) -func SendJSONRPCRequest(client *http.Client, req jsonrpc.JSONRPCRequest, url string, headers http.Header) (res *jsonrpc.JSONRPCResponse, requestErr, validationErr error) { - buf, err := json.Marshal(req) - if err != nil { - return nil, err, nil - } - - httpReq, err := http.NewRequest(http.MethodPost, url, bytes.NewReader(buf)) - if err != nil { - return nil, err, nil - } - - // set request headers - httpReq.Header.Add("Content-Type", "application/json") - for k, v := range headers { - httpReq.Header.Add(k, v[0]) - } - - // execute request - resp, err := client.Do(httpReq) - if err != nil { - return nil, err, nil - } - defer resp.Body.Close() - - // read all resp bytes - rawResp, err := io.ReadAll(resp.Body) - if err != nil { - return nil, fmt.Errorf("unable to read response bytes: %w", err), nil - } - - // try json parsing - res = new(jsonrpc.JSONRPCResponse) - if err := json.NewDecoder(bytes.NewReader(rawResp)).Decode(res); err != nil { - return nil, fmt.Errorf("%w: %v", ErrJSONDecodeFailed, string(rawResp[:])), nil - } - - if res.Error != nil { - return res, nil, fmt.Errorf("%w: %s", ErrSimulationFailed, res.Error.Message) - } - return res, nil, nil -} diff --git a/mev-boost-relay/services/api/constraints.go b/mev-boost-relay/services/api/constraints.go deleted file mode 100644 index a58a49e99..000000000 --- a/mev-boost-relay/services/api/constraints.go +++ /dev/null @@ -1,338 +0,0 @@ -package api - -import ( - "encoding/binary" - - "github.com/attestantio/go-eth2-client/spec/phase0" - ssz "github.com/ferranbt/fastssz" -) - -// These types are taken from https://chainbound.github.io/bolt-docs/ - -const ( - // Note: we decided to set max constraints per slot to the same value - // as the max transactions per block in Ethereum. This allows bolt operators - // to decide how many commitments to include in a slot without the protocol - // imposing hard limits that would be really hard to change in the future. - // - // Specs: https://github.com/ethereum/consensus-specs/blob/9515f3e7e1ce893f97ac638d0280ea9026518bad/specs/bellatrix/beacon-chain.md#execution - MAX_CONSTRAINTS_PER_SLOT = 1048576 // 2**20 - MAX_BYTES_PER_TRANSACTION = 1073741824 // 2**30 -) - -type SignedConstraints struct { - Message *ConstraintsMessage `json:"message"` - // NOTE: This might change to an ECDSA signature in the future. In such case, - // when encoding/decoding SSZ we should take into account that it is 64 bytes long instead of 96 - Signature phase0.BLSSignature `ssz-size:"96" json:"signature"` -} - -type ConstraintsMessage struct { - ValidatorIndex uint64 `json:"validator_index"` - Slot uint64 `json:"slot"` - Constraints []*Constraint `ssz-max:"1048576" json:"constraints"` -} - -type Constraint struct { - Tx Transaction `ssz-max:"1073741824" json:"tx"` - Index *Index `json:"index"` -} - -// Index is the Union[uint64, None] (For SSZ purposes) -type Index uint64 - -func NewIndex(i uint64) *Index { - idx := Index(i) - return &idx -} - -func (c SignedConstraints) String() string { - return JSONStringify(c) -} - -func (c ConstraintsMessage) String() string { - return JSONStringify(c) -} - -func (c Constraint) String() string { - return JSONStringify(c) -} - -// ConstraintsMap is a map of constraints for a block. -type ConstraintsMap = map[phase0.Hash32]*Constraint - -// ConstraintCache is a cache for constraints. -type ConstraintCache struct { - // map of slots to constraints - constraints map[uint64]ConstraintsMap -} - -func (c *SignedConstraints) MarshalSSZ() ([]byte, error) { - return ssz.MarshalSSZ(c) -} - -func (c *SignedConstraints) MarshalSSZTo(dst []byte) ([]byte, error) { - // We have 4 bytes of an offset to a dinamically sized object - // plus 96 bytes of the BLS signature. This indicates - // where the dynamic data begins - offset := 100 - - // Field (0) `Message` - dst = ssz.WriteOffset(dst, offset) - - // Field (1) `Signature` - dst = append(dst, c.Signature[:]...) - - // Field (0) `Message` - dst, err := c.Message.MarshalSSZTo(dst) - - return dst, err -} - -func (c *SignedConstraints) SizeSSZ() int { - // At minimum, the size is 4 bytes of an offset to a dinamically sized object - // plus 96 bytes of the BLS signature - size := 100 - - // Field (0) 'Message'. We need to add the size of the message with its default values - if c.Message == nil { - c.Message = new(ConstraintsMessage) - } - size += c.Message.SizeSSZ() - - return 0 -} - -func (c *SignedConstraints) UnmarshalSSZ(buf []byte) (err error) { - size := uint64(len(buf)) - if size < 100 { - // The buf must be at least 100 bytes long according to offset + signature - return ssz.ErrSize - } - - tail := buf - var o0 uint64 // Offset (0) 'Message' - - // Offset (0) 'Message'. Handle offset too big and too small respectively - if o0 = ssz.ReadOffset(buf[0:4]); o0 > size { - return ssz.ErrOffset - } - if o0 < 100 { - return ssz.ErrInvalidVariableOffset - } - - // Field (0) 'Message' - buf = tail[o0:] - if c.Message == nil { - c.Message = new(ConstraintsMessage) - } - if err = c.Message.UnmarshalSSZ(buf); err != nil { - return - } - - // Field (1) `Signature` - copy(c.Signature[:], tail[4:100]) - - return -} - -func (m *ConstraintsMessage) MarshalSSZ() ([]byte, error) { - return ssz.MarshalSSZ(m) -} - -func (m *ConstraintsMessage) MarshalSSZTo(buf []byte) (dst []byte, err error) { - // We have 4 bytes of an offset to a dinamically sized object - // plus 16 bytes of the two uint64 fields - offset := 20 - dst = buf - - // Field (0) `ValidatorIndex` - dst = ssz.MarshalUint64(dst, m.ValidatorIndex) - - // Field (1) `Slot` - dst = ssz.MarshalUint64(dst, m.Slot) - - // Field (2) `Constraints` - dst = ssz.WriteOffset(dst, offset) - - // ------- Dynamic fields ------- - - // Field (2) `Constraints` - if size := len(m.Constraints); size > MAX_CONSTRAINTS_PER_SLOT { - err = ssz.ErrListTooBigFn("ConstraintsMessage.Constraints", size, MAX_CONSTRAINTS_PER_SLOT) - return - } - // Each constraint is a dynamically sized object so we first add the offsets - offset = 4 * len(m.Constraints) - for i := 0; i < len(m.Constraints); i++ { - dst = ssz.WriteOffset(dst, offset) - offset += m.Constraints[i].SizeSSZ() - } - // Now we add the actual data - for i := 0; i < len(m.Constraints); i++ { - if dst, err = m.Constraints[i].MarshalSSZTo(dst); err != nil { - return - } - if size := len(m.Constraints[i].Tx); size > MAX_BYTES_PER_TRANSACTION { - err = ssz.ErrBytesLengthFn("Constraints[i].Tx", size, MAX_BYTES_PER_TRANSACTION) - return - } - } - - return -} - -func (m *ConstraintsMessage) SizeSSZ() int { - // At minimum, the size is 4 bytes of an offset to a dinamically sized object - // plus 16 bytes of the two uint64 fields - size := 20 - - // Field (2) 'Constraints'. We need to add the size of the constraints with their default values - for i := 0; i < len(m.Constraints); i++ { - // The offset to the transaction list - size += 4 - - size += len(m.Constraints[i].Tx) - size += m.Constraints[i].Index.SizeSSZ() - } - return size -} - -func (m *ConstraintsMessage) UnmarshalSSZ(buf []byte) (err error) { - size := uint64(len(buf)) - if size < 20 { - // 8 + 8 + 4 bytes for the offset - return ssz.ErrSize - } - - tail := buf - var o2 uint64 - - // Field (0) `ValidatorIndex` - m.ValidatorIndex = binary.LittleEndian.Uint64(buf[0:8]) - - // Field (1) `Slot` - m.Slot = binary.LittleEndian.Uint64(buf[8:16]) - - // Offset (2) 'Constraints' - if o2 = ssz.ReadOffset(buf[16:20]); o2 > size { - return ssz.ErrOffset - } - if o2 < 20 { - return ssz.ErrInvalidVariableOffset - } - - // Field (2) `Constraints` - buf = tail[o2:] - // We first read the amount of offset values we have, by looking - // at how big is the first offset - var length int - if length, err = ssz.DecodeDynamicLength(buf, MAX_CONSTRAINTS_PER_SLOT); err != nil { - return - } - m.Constraints = make([]*Constraint, length) - err = ssz.UnmarshalDynamic(buf, length, func(indx int, buf []byte) (err error) { - if m.Constraints[indx] == nil { - m.Constraints[indx] = new(Constraint) - } - return m.Constraints[indx].UnmarshalSSZ(buf) - }) - - return -} - -func (c *Constraint) MarshalSSZ() ([]byte, error) { - return ssz.MarshalSSZ(c) -} - -func (c *Constraint) MarshalSSZTo(buf []byte) (dst []byte, err error) { - // Both fields are dynamically sized, so we start with two offsets of 4 bytes each - offset := 8 - dst = buf - - // Field (0) `Tx` - dst = ssz.WriteOffset(dst, offset) - offset += len(c.Tx) - - // Field (1) `Index` - dst = ssz.WriteOffset(dst, offset) - - // Field (0) `Tx` - dst = append(dst, c.Tx...) - - // Field (1) `Index` - if c.Index == nil { - dst = append(dst, 0) - } else { - // Index is `Union[None, uint64] - dst = append(dst, 1) - dst = ssz.MarshalUint64(dst, uint64(*c.Index)) - } - - return -} - -func (c *Constraint) SizeSSZ() int { - // Both fields are dynamically sized, so we start with two offsets of 4 bytes each - size := 8 - - // Field (0) 'Tx'. - size += len(c.Tx) - - // Field (1) 'Index'. - size += c.Index.SizeSSZ() - - return size -} - -func (c *Constraint) UnmarshalSSZ(buf []byte) (err error) { - size := uint64(len(buf)) - if size < 8 { - // It needs to contain at least 8 bytes for the two offsets - return ssz.ErrSize - } - - tail := buf - var o0, o1 uint64 - - // Offset (0) 'Tx' - if o0 = ssz.ReadOffset(buf[0:4]); o0 > size { - return ssz.ErrOffset - } - if o0 < 8 { - return ssz.ErrInvalidVariableOffset - } - - // Offset (1) 'Index' - if o1 = ssz.ReadOffset(buf[4:8]); o1 > size || o0 > o1 { - return ssz.ErrOffset - } - - // Field (0) `Tx` - buf = tail[o0:o1] - if len(buf) > MAX_BYTES_PER_TRANSACTION { - return ssz.ErrBytesLengthFn("Constraint.Tx", len(buf), MAX_BYTES_PER_TRANSACTION) - } - c.Tx = make([]byte, 0, len(buf)) - c.Tx = append(c.Tx, buf...) - - // Field (1) `Index` - buf = tail[o1:] - if buf[0] == 0 { - // Means it's a None value - c.Index = nil - } else { - c.Index = new(Index) - *(c.Index) = Index(binary.LittleEndian.Uint64(buf[1:])) - } - - return -} - -func (i *Index) SizeSSZ() int { - if i == nil { - return 1 - } - // selector + uint64 - return 9 -} diff --git a/mev-boost-relay/services/api/constraints_test.go b/mev-boost-relay/services/api/constraints_test.go deleted file mode 100644 index 920ac4e7a..000000000 --- a/mev-boost-relay/services/api/constraints_test.go +++ /dev/null @@ -1,465 +0,0 @@ -package api - -import ( - "encoding/hex" - "reflect" - "testing" - - "github.com/attestantio/go-eth2-client/spec/phase0" - "github.com/flashbots/go-boost-utils/bls" - "github.com/stretchr/testify/require" -) - -func TestSignedConstraints_MarshalSSZTo(t *testing.T) { - type fields struct { - Message *ConstraintsMessage - Signature phase0.BLSSignature - } - type args struct { - dst []byte - } - - tx1, err := hex.DecodeString("0102030405060708090a0b0c0d0e0f") - require.NoError(t, err) - // remember that uints are in little endian! - // offset offset(8+16-1=23) tx none - // "0x08000000_17000000_000102030405060708090a0b0c0d0e0f_00" - // wantDst1, err := hex.DecodeString("08000000170000000102030405060708090a0b0c0d0e0f00) - require.NoError(t, err) - // offset offset(8+16-1=23) tx selector and index - // "0x08000000_17000000_000102030405060708090a0b0c0d0e0f_010100000000000000" - // wantDst2, err := hex.DecodeString("08000000170000000102030405060708090a0b0c0d0e0f010100000000000000") - - // -------------------------------- SignedConstraints --------------------------------------------------------------------------------------------------------------------------------------------------- |-------- ConstraintsMessage ---------------- | -- offsets -- | --- raw constraint data - // | | | - // offset 96 bytes of signature | validatorIndex slot offset(20) | off off | - // 64000000_8b136ad4a3ce9443c1f42b29eeb79bf33c90f966671c2381ac25014d8b1dd4cc4b76731c4cd61dbd3978a9240b9a91ea0f9685c03f18372137a2b49eb0afeadd474476af3a7b84ccf76e7ed6a2973ea2b8eb972a455752f37578e365bf877df2_0200000000000000_0300000000000000_14000000_08000000_20000000_08000000170000000102030405060708090a0b0c0d0e0f00_08000000170000000102030405060708090a0b0c0d0e0f010100000000000000 - // - - wantDst, err := hex.DecodeString("640000008b136ad4a3ce9443c1f42b29eeb79bf33c90f966671c2381ac25014d8b1dd4cc4b76731c4cd61dbd3978a9240b9a91ea0f9685c03f18372137a2b49eb0afeadd474476af3a7b84ccf76e7ed6a2973ea2b8eb972a455752f37578e365bf877df20200000000000000030000000000000014000000080000002000000008000000170000000102030405060708090a0b0c0d0e0f0008000000170000000102030405060708090a0b0c0d0e0f010100000000000000") - require.NoError(t, err) - - skBytes, err := hex.DecodeString("51815cb2c5489f8d7dc4f9889b9771334a80ccc6a82ce9c2a1ef66dc270c9708") - require.NoError(t, err) - sk, _ := bls.SecretKeyFromBytes(skBytes) - require.NoError(t, err) - - message := &ConstraintsMessage{ - ValidatorIndex: 2, - Slot: 3, - Constraints: []*Constraint{ - {Tx: Transaction(tx1), Index: nil}, - {Tx: Transaction(tx1), Index: NewIndex(1)}, - }, - } - - // We tested this works gud below - messsageSSZ, err := message.MarshalSSZ() - require.NoError(t, err) - - sig := bls.Sign(sk, messsageSSZ) - sigBytes := bls.SignatureToBytes(sig) - - type test struct { - name string - fields fields - args args - wantDst []byte - wantErr bool - } - - tests := []test{ - { - name: "nil and non-nil index", - fields: fields{ - Message: message, - Signature: phase0.BLSSignature(sigBytes[:]), - }, - args: args{dst: make([]byte, 0)}, - wantDst: wantDst, - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := &SignedConstraints{ - Message: tt.fields.Message, - Signature: tt.fields.Signature, - } - got, err := c.MarshalSSZTo(tt.args.dst) - if (err != nil) != tt.wantErr { - t.Errorf("SignedConstraints.MarshalSSZTo() error = %v, wantErr %v", err, tt.wantErr) - return - } - if !reflect.DeepEqual(got, tt.wantDst) { - t.Errorf("SignedConstraints.MarshalSSZTo() = %v, want %v", got, tt.wantDst) - } - }) - } -} - -func TestSignedConstraints_UnmarshalSSZ(t *testing.T) { - type fields struct { - Message *ConstraintsMessage - Signature phase0.BLSSignature - } - - type args struct { - buf []byte - } - - tx1, err := hex.DecodeString("0102030405060708090a0b0c0d0e0f") - require.NoError(t, err) - // remember that uints are in little endian! - // offset offset(8+16-1=23) tx none - // "0x08000000_17000000_000102030405060708090a0b0c0d0e0f_00" - // wantDst1, err := hex.DecodeString("08000000170000000102030405060708090a0b0c0d0e0f00) - require.NoError(t, err) - // offset offset(8+16-1=23) tx selector and index - // "0x08000000_17000000_000102030405060708090a0b0c0d0e0f_010100000000000000" - // wantDst2, err := hex.DecodeString("08000000170000000102030405060708090a0b0c0d0e0f010100000000000000") - - // -------------------------------- SignedConstraints --------------------------------------------------------------------------------------------------------------------------------------------------- |-------- ConstraintsMessage ---------------- | -- offsets -- | --- raw constraint data - // | | | - // offset 96 bytes of signature | validatorIndex slot offset(20) | off off | - // 64000000_8b136ad4a3ce9443c1f42b29eeb79bf33c90f966671c2381ac25014d8b1dd4cc4b76731c4cd61dbd3978a9240b9a91ea0f9685c03f18372137a2b49eb0afeadd474476af3a7b84ccf76e7ed6a2973ea2b8eb972a455752f37578e365bf877df2_0200000000000000_0300000000000000_14000000_08000000_20000000_08000000170000000102030405060708090a0b0c0d0e0f00_08000000170000000102030405060708090a0b0c0d0e0f010100000000000000 - // - - buf, err := hex.DecodeString("640000008b136ad4a3ce9443c1f42b29eeb79bf33c90f966671c2381ac25014d8b1dd4cc4b76731c4cd61dbd3978a9240b9a91ea0f9685c03f18372137a2b49eb0afeadd474476af3a7b84ccf76e7ed6a2973ea2b8eb972a455752f37578e365bf877df20200000000000000030000000000000014000000080000002000000008000000170000000102030405060708090a0b0c0d0e0f0008000000170000000102030405060708090a0b0c0d0e0f010100000000000000") - require.NoError(t, err) - - skBytes, err := hex.DecodeString("51815cb2c5489f8d7dc4f9889b9771334a80ccc6a82ce9c2a1ef66dc270c9708") - require.NoError(t, err) - sk, _ := bls.SecretKeyFromBytes(skBytes) - require.NoError(t, err) - - message := &ConstraintsMessage{ - ValidatorIndex: 2, - Slot: 3, - Constraints: []*Constraint{ - {Tx: Transaction(tx1), Index: nil}, - {Tx: Transaction(tx1), Index: NewIndex(1)}, - }, - } - - // We tested this works gud below - messsageSSZ, err := message.MarshalSSZ() - require.NoError(t, err) - - sig := bls.Sign(sk, messsageSSZ) - sigBytes := bls.SignatureToBytes(sig) - - type test struct { - name string - fields fields - args args - wantDst []byte - wantErr bool - } - - tests := []test{ - { - name: "nil and non-nil index", - fields: fields{ - Message: message, - Signature: phase0.BLSSignature(sigBytes[:]), - }, - args: args{buf: buf}, - wantDst: make([]byte, 0), - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - expected := &SignedConstraints{ - Message: tt.fields.Message, - Signature: tt.fields.Signature, - } - actual := &SignedConstraints{} - if err := actual.UnmarshalSSZ(tt.args.buf); (err != nil) != tt.wantErr { - t.Errorf("SignedConstraints.UnmarshalSSZ() error = %v, wantErr %v", err, tt.wantErr) - } - if !reflect.DeepEqual(expected, actual) { - t.Errorf("SignedConstraints.UnmarshalSSZ() = %v, want %v", actual, expected) - } - }) - } -} - -func TestConstraintsMessage_MarshalSSZTo(t *testing.T) { - type fields struct { - ValidatorIndex uint64 - Slot uint64 - Constraints []*Constraint - } - type args struct { - buf []byte - } - - tx1, err := hex.DecodeString("0102030405060708090a0b0c0d0e0f") - require.NoError(t, err) - // remember that uints are in little endian! - // offset offset(8+16-1=23) tx none - // "0x08000000_17000000_000102030405060708090a0b0c0d0e0f_00" - // wantDst1, err := hex.DecodeString("08000000170000000102030405060708090a0b0c0d0e0f00") - require.NoError(t, err) - // offset offset(8+16-1=23) tx selector and index - // "0x08000000_17000000_000102030405060708090a0b0c0d0e0f_010100000000000000" - // wantDst2, err := hex.DecodeString("08000000170000000102030405060708090a0b0c0d0e0f010100000000000000") - - // ----------- ConstraintMessage ---------------| -- offsets -- | --- raw constraint data - // | | - // validatorIndex slot offset | off off | - // 0x0200000000000000_0300000000000000_14000000_08000000_20000000_08000000170000000102030405060708090a0b0c0d0e0f00_08000000170000000102030405060708090a0b0c0d0e0f010100000000000000 - // - - wantDst, err := hex.DecodeString("0200000000000000030000000000000014000000080000002000000008000000170000000102030405060708090a0b0c0d0e0f0008000000170000000102030405060708090a0b0c0d0e0f010100000000000000") - require.NoError(t, err) - - type test struct { - name string - fields fields - args args - wantDst []byte - wantErr bool - } - - tests := []test{ - { - name: "nil and non-nil index", - fields: fields{ - ValidatorIndex: 2, - Slot: 3, - Constraints: []*Constraint{ - {Tx: Transaction(tx1), Index: nil}, - {Tx: Transaction(tx1), Index: NewIndex(1)}, - }, - }, - wantDst: wantDst, - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - m := &ConstraintsMessage{ - ValidatorIndex: tt.fields.ValidatorIndex, - Slot: tt.fields.Slot, - Constraints: tt.fields.Constraints, - } - gotDst, err := m.MarshalSSZTo(tt.args.buf) - if (err != nil) != tt.wantErr { - t.Errorf("ConstraintsMessage.MarshalSSZTo() error = %v, wantErr %v", err, tt.wantErr) - return - } - if !reflect.DeepEqual(gotDst, tt.wantDst) { - t.Errorf("ConstraintsMessage.MarshalSSZTo() = %v, want %v", gotDst, tt.wantDst) - } - }) - } -} - -func TestConstraintsMessage_UnmarshalSSZ(t *testing.T) { - type fields struct { - ValidatorIndex uint64 - Slot uint64 - Constraints []*Constraint - } - type args struct { - buf []byte - } - - tx1, err := hex.DecodeString("0102030405060708090a0b0c0d0e0f") - require.NoError(t, err) - // remember that uints are in little endian! - // offset offset(8+16-1=23) tx none - // "0x08000000_17000000_000102030405060708090a0b0c0d0e0f_00" - // wantDst1, err := hex.DecodeString("08000000170000000102030405060708090a0b0c0d0e0f00") - require.NoError(t, err) - // offset offset(8+16-1=23) tx selector and index - // "0x08000000_17000000_000102030405060708090a0b0c0d0e0f_010100000000000000" - // wantDst2, err := hex.DecodeString("08000000170000000102030405060708090a0b0c0d0e0f010100000000000000") - - // ----------- ConstraintMessage ---------------| -- offsets -- | --- raw constraint data - // | | - // validatorIndex slot offset | off off | - // 0x0200000000000000_0300000000000000_14000000_08000000_20000000_08000000170000000102030405060708090a0b0c0d0e0f00_08000000170000000102030405060708090a0b0c0d0e0f010100000000000000 - // - - buf, err := hex.DecodeString("0200000000000000030000000000000014000000080000002000000008000000170000000102030405060708090a0b0c0d0e0f0008000000170000000102030405060708090a0b0c0d0e0f010100000000000000") - require.NoError(t, err) - - type test struct { - name string - fields fields - args args - wantDst []byte - wantErr bool - } - - tests := []test{ - { - name: "nil and non-nil index", - fields: fields{ - ValidatorIndex: 2, - Slot: 3, - Constraints: []*Constraint{ - {Tx: Transaction(tx1), Index: nil}, - {Tx: Transaction(tx1), Index: NewIndex(1)}, - }, - }, - args: args{buf: buf}, - wantDst: []byte{}, - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - expected := &ConstraintsMessage{ - ValidatorIndex: tt.fields.ValidatorIndex, - Slot: tt.fields.Slot, - Constraints: tt.fields.Constraints, - } - actual := &ConstraintsMessage{} - if err := actual.UnmarshalSSZ(tt.args.buf); (err != nil) != tt.wantErr { - t.Errorf("ConstraintsMessage.UnmarshalSSZ() error = %v, wantErr %v", err, tt.wantErr) - } - if !reflect.DeepEqual(expected, actual) { - t.Errorf("ConstraintMessage.UnmarshalSSZ() = %v, want %v", actual, expected) - } - }) - } -} - -func TestConstraint_MarshalSSZTo(t *testing.T) { - type fields struct { - Tx Transaction - Index *Index - } - type args struct { - buf []byte - } - type test struct { - name string - fields fields - args args - wantDst []byte - wantErr bool - } - - tx1, err := hex.DecodeString("0102030405060708090a0b0c0d0e0f") - require.NoError(t, err) - // remember that uints are in little endian! - // offset offset(8+16-1=23) tx none - // "0x08000000_17000000_000102030405060708090a0b0c0d0e0f_00" - wantDst1, err := hex.DecodeString("08000000170000000102030405060708090a0b0c0d0e0f00") - require.NoError(t, err) - // offset offset(8+16-1=23) tx selector and index - // "0x08000000_17000000_000102030405060708090a0b0c0d0e0f_010100000000000000" - wantDst2, err := hex.DecodeString("08000000170000000102030405060708090a0b0c0d0e0f010100000000000000") - require.NoError(t, err) - - tests := []test{ - { - name: "nil index", - fields: fields{ - Tx: Transaction(tx1), - Index: nil, - }, - args: args{ - buf: make([]byte, 0), - }, - wantDst: wantDst1, - wantErr: false, - }, - { - name: "not-nil index", - fields: fields{ - Tx: Transaction(tx1), - Index: NewIndex(1), - }, - args: args{ - buf: make([]byte, 0), - }, - wantDst: wantDst2, - wantErr: false, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - c := &Constraint{ - Tx: tt.fields.Tx, - Index: tt.fields.Index, - } - gotDst, err := c.MarshalSSZTo(tt.args.buf) - if (err != nil) != tt.wantErr { - t.Errorf("Constraint.MarshalSSZTo() error = %v, wantErr %v", err, tt.wantErr) - return - } - if !reflect.DeepEqual(gotDst, tt.wantDst) { - t.Errorf("Constraint.MarshalSSZTo() = %v, want %v", gotDst, tt.wantDst) - } - }) - } -} - -func TestConstraint_UnmarshalSSZ(t *testing.T) { - type fields struct { - Tx Transaction - Index *Index - } - type args struct { - buf []byte - } - type test struct { - name string - fields fields - args args - wantErr bool - } - - tx1, err := hex.DecodeString("0102030405060708090a0b0c0d0e0f") - require.NoError(t, err) - // remember that uints are in little endian! - // offset offset(8+16-1=23) tx none - // "0x08000000_17000000_000102030405060708090a0b0c0d0e0f_00" - buf1, err := hex.DecodeString("08000000170000000102030405060708090a0b0c0d0e0f00") - require.NoError(t, err) - // offset offset(8+16-1=23) tx selector and index - // "0x08000000_17000000_000102030405060708090a0b0c0d0e0f_010100000000000000" - buf2, err := hex.DecodeString("08000000170000000102030405060708090a0b0c0d0e0f010100000000000000") - require.NoError(t, err) - - tests := []test{ - { - name: "nil index", - fields: fields{ - Tx: Transaction(tx1), - Index: nil, - }, - args: args{buf: buf1}, - }, - { - name: "non-nil index", - fields: fields{ - Tx: Transaction(tx1), - Index: NewIndex(1), - }, - args: args{buf: buf2}, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - want := &Constraint{ - Tx: tt.fields.Tx, - Index: tt.fields.Index, - } - c := &Constraint{} - if err := c.UnmarshalSSZ(tt.args.buf); (err != nil) != tt.wantErr { - t.Errorf("Constraint.UnmarshalSSZ() error = %v, wantErr %v", err, tt.wantErr) - } - require.Equal(t, want.Tx, c.Tx) - require.Equal(t, want.Index, c.Index) - }) - } -} diff --git a/mev-boost-relay/services/api/mock_blocksim_ratelimiter.go b/mev-boost-relay/services/api/mock_blocksim_ratelimiter.go deleted file mode 100644 index 1d180e560..000000000 --- a/mev-boost-relay/services/api/mock_blocksim_ratelimiter.go +++ /dev/null @@ -1,19 +0,0 @@ -package api - -import ( - "context" - - "github.com/flashbots/mev-boost-relay/common" -) - -type MockBlockSimulationRateLimiter struct { - simulationError error -} - -func (m *MockBlockSimulationRateLimiter) Send(context context.Context, payload *common.BuilderBlockValidationRequest, isHighPrio, fastTrack bool) (error, error) { - return nil, m.simulationError -} - -func (m *MockBlockSimulationRateLimiter) CurrentCounter() int64 { - return 0 -} diff --git a/mev-boost-relay/services/api/optimistic_test.go b/mev-boost-relay/services/api/optimistic_test.go deleted file mode 100644 index ed766b4ec..000000000 --- a/mev-boost-relay/services/api/optimistic_test.go +++ /dev/null @@ -1,550 +0,0 @@ -package api - -import ( - "context" - "encoding/json" - "fmt" - "math/big" - "net/http" - "net/http/httptest" - "strconv" - "testing" - "time" - - "github.com/alicebob/miniredis/v2" - builderApiV1 "github.com/attestantio/go-builder-client/api/v1" - "github.com/attestantio/go-eth2-client/spec" - "github.com/attestantio/go-eth2-client/spec/bellatrix" - "github.com/attestantio/go-eth2-client/spec/capella" - "github.com/attestantio/go-eth2-client/spec/phase0" - "github.com/flashbots/go-boost-utils/bls" - "github.com/flashbots/go-boost-utils/utils" - "github.com/flashbots/mev-boost-relay/beaconclient" - "github.com/flashbots/mev-boost-relay/common" - "github.com/flashbots/mev-boost-relay/database" - "github.com/flashbots/mev-boost-relay/datastore" - "github.com/holiman/uint256" - "github.com/stretchr/testify/require" -) - -const ( - slot = uint64(41) - collateral = 1000 - builderID = "builder0x69" - randao = "0xcf8e0d4e9587369b2301d0790347320302cc0943d5a1884560367e8208d920f2" - emptyHash = "0x0000000000000000000000000000000000000000000000000000000000000000" - proposerInd = uint64(987) - genesis = 1606824023 -) - -var ( - feeRecipient = bellatrix.ExecutionAddress{0x02} - errFake = fmt.Errorf("foo error") -) - -func getTestBidTrace(pubkey phase0.BLSPubKey, value, slot uint64) *common.BidTraceV2WithBlobFields { - return &common.BidTraceV2WithBlobFields{ - BidTrace: builderApiV1.BidTrace{ - Slot: slot, - BuilderPubkey: pubkey, - ProposerFeeRecipient: feeRecipient, - Value: uint256.NewInt(value), - }, - } -} - -type blockRequestOpts struct { - pubkey phase0.BLSPubKey - secretkey *bls.SecretKey - blockValue uint64 - slot uint64 - domain phase0.Domain - version spec.DataVersion -} - -func startTestBackend(t *testing.T) (*phase0.BLSPubKey, *bls.SecretKey, *testBackend) { - t.Helper() - // Setup test key pair. - sk, _, err := bls.GenerateNewKeypair() - require.NoError(t, err) - blsPubkey, err := bls.PublicKeyFromSecretKey(sk) - require.NoError(t, err) - pubkey, err := utils.BlsPublicKeyToPublicKey(blsPubkey) - require.NoError(t, err) - pkStr := pubkey.String() - - // Setup test backend. - backend := newTestBackend(t, 1) - backend.relay.genesisInfo = &beaconclient.GetGenesisResponse{} - backend.relay.genesisInfo.Data.GenesisTime = 0 - backend.relay.proposerDutiesMap = map[uint64]*common.BuilderGetValidatorsResponseEntry{ - slot: { - Entry: &builderApiV1.SignedValidatorRegistration{ - Message: &builderApiV1.ValidatorRegistration{ - FeeRecipient: [20]byte(feeRecipient), - GasLimit: 5000, - Timestamp: time.Unix(0xffffffff, 0), - Pubkey: [48]byte(phase0.BLSPubKey{}), - }, - }, - }, - } - backend.relay.opts.BlockBuilderAPI = true - backend.relay.beaconClient = &beaconclient.MockMultiBeaconClient{} - backend.relay.blockSimRateLimiter = &MockBlockSimulationRateLimiter{} - backend.relay.blockBuildersCache = map[string]*blockBuilderCacheEntry{ - pkStr: { - status: common.BuilderStatus{ - IsHighPrio: true, - IsOptimistic: true, - }, - collateral: big.NewInt(int64(collateral)), - }, - } - - // Setup test db, redis, and datastore. - mockDB := &database.MockDB{ - Builders: map[string]*database.BlockBuilderEntry{ - pkStr: { - BuilderPubkey: pkStr, - IsHighPrio: true, - IsOptimistic: true, - BuilderID: builderID, - Collateral: strconv.Itoa(collateral), - }, - }, - Demotions: map[string]bool{}, - Refunds: map[string]bool{}, - } - redisTestServer, err := miniredis.Run() - require.NoError(t, err) - mockRedis, err := datastore.NewRedisCache("", redisTestServer.Addr(), "") - require.NoError(t, err) - mockDS, err := datastore.NewDatastore(mockRedis, nil, mockDB) - require.NoError(t, err) - - backend.relay.datastore = mockDS - backend.relay.redis = mockRedis - backend.relay.db = mockDB - - backend.relay.headSlot.Store(40) - return &pubkey, sk, backend -} - -func runOptimisticBlockSubmission(t *testing.T, opts blockRequestOpts, simErr error, backend *testBackend) *httptest.ResponseRecorder { - t.Helper() - backend.relay.blockSimRateLimiter = &MockBlockSimulationRateLimiter{ - simulationError: simErr, - } - - req := common.TestBuilderSubmitBlockRequest(opts.secretkey, getTestBidTrace(opts.pubkey, opts.blockValue, opts.slot), opts.version) - rr := backend.request(http.MethodPost, pathSubmitNewBlock, &req) - - // Let updates happen async. - time.Sleep(100 * time.Millisecond) - return rr -} - -func TestSimulateBlock(t *testing.T) { - cases := []struct { - description string - version spec.DataVersion - slot uint64 - simulationError error - expectError bool - }{ - { - description: "success_capella", - version: spec.DataVersionCapella, - }, - { - description: "simulation_error_capella", - version: spec.DataVersionCapella, - simulationError: errFake, - expectError: true, - }, - { - description: "block_already_known_capella", - version: spec.DataVersionCapella, - simulationError: fmt.Errorf(ErrBlockAlreadyKnown), //nolint:goerr113 - }, - { - description: "missing_trie_node_capella", - version: spec.DataVersionCapella, - simulationError: fmt.Errorf(ErrMissingTrieNode + "23e21f94cd97b3b27ae5c758277639dd387a6e3da5923c5485f24ec6c71e16b8 (path ) "), //nolint:goerr113 - }, - { - description: "success_deneb", - version: spec.DataVersionDeneb, - }, - { - description: "simulation_error_deneb", - version: spec.DataVersionDeneb, - simulationError: errFake, - expectError: true, - }, - { - description: "block_already_known_deneb", - version: spec.DataVersionDeneb, - simulationError: fmt.Errorf(ErrBlockAlreadyKnown), //nolint:goerr113 - }, - { - description: "missing_trie_node_deneb", - version: spec.DataVersionDeneb, - simulationError: fmt.Errorf(ErrMissingTrieNode + "23e21f94cd97b3b27ae5c758277639dd387a6e3da5923c5485f24ec6c71e16b8 (path ) "), //nolint:goerr113 - }, - } - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - pubkey, secretkey, backend := startTestBackend(t) - backend.relay.blockSimRateLimiter = &MockBlockSimulationRateLimiter{ - simulationError: tc.simulationError, - } - _, simErr := backend.relay.simulateBlock(context.Background(), blockSimOptions{ - isHighPrio: true, - log: backend.relay.log, - builder: &blockBuilderCacheEntry{ - status: common.BuilderStatus{ - IsOptimistic: true, - }, - }, - req: &common.BuilderBlockValidationRequest{ - VersionedSubmitBlockRequest: common.TestBuilderSubmitBlockRequest( - secretkey, getTestBidTrace(*pubkey, collateral, slot), tc.version), - }, - }) - if tc.expectError { - require.Equal(t, tc.simulationError, simErr) - } - }) - } -} - -func TestProcessOptimisticBlock(t *testing.T) { - cases := []struct { - description string - wantStatus common.BuilderStatus - version spec.DataVersion - simulationError error - }{ - { - description: "success_capella", - wantStatus: common.BuilderStatus{ - IsOptimistic: true, - IsHighPrio: true, - }, - version: spec.DataVersionCapella, - }, - { - description: "simulation_error_capella", - wantStatus: common.BuilderStatus{ - IsOptimistic: false, - IsHighPrio: true, - }, - version: spec.DataVersionCapella, - simulationError: errFake, - }, - { - description: "success_deneb", - wantStatus: common.BuilderStatus{ - IsOptimistic: true, - IsHighPrio: true, - }, - version: spec.DataVersionDeneb, - }, - { - description: "simulation_error_deneb", - wantStatus: common.BuilderStatus{ - IsOptimistic: false, - IsHighPrio: true, - }, - version: spec.DataVersionDeneb, - simulationError: errFake, - }, - } - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - pubkey, secretkey, backend := startTestBackend(t) - pkStr := pubkey.String() - backend.relay.blockSimRateLimiter = &MockBlockSimulationRateLimiter{ - simulationError: tc.simulationError, - } - simResultC := make(chan *blockSimResult, 1) - backend.relay.processOptimisticBlock(blockSimOptions{ - isHighPrio: true, - log: backend.relay.log, - builder: &blockBuilderCacheEntry{ - status: common.BuilderStatus{ - IsOptimistic: true, - }, - }, - req: &common.BuilderBlockValidationRequest{ - VersionedSubmitBlockRequest: common.TestBuilderSubmitBlockRequest( - secretkey, getTestBidTrace(*pubkey, collateral, slot), tc.version), - }, - }, simResultC) - - // Check status in db. - builder, err := backend.relay.db.GetBlockBuilderByPubkey(pkStr) - require.NoError(t, err) - require.Equal(t, tc.wantStatus.IsOptimistic, builder.IsOptimistic) - require.Equal(t, tc.wantStatus.IsHighPrio, builder.IsHighPrio) - - // Make sure channel receives correct result - simResult := <-simResultC - require.True(t, simResult.optimisticSubmission) - require.Equal(t, tc.simulationError, simResult.validationErr) - require.NoError(t, simResult.requestErr) - require.True(t, simResult.wasSimulated) - - // Check demotion but no refund. - if tc.simulationError != nil { - mockDB, ok := backend.relay.db.(*database.MockDB) - require.True(t, ok) - require.True(t, mockDB.Demotions[pkStr]) - require.False(t, mockDB.Refunds[pkStr]) - } - }) - } -} - -func TestDemoteBuilder(t *testing.T) { - cases := []struct { - description string - wantStatus common.BuilderStatus - version spec.DataVersion - }{ - { - description: "capella", - wantStatus: common.BuilderStatus{ - IsOptimistic: false, - IsHighPrio: true, - }, - version: spec.DataVersionCapella, - }, - { - description: "deneb", - wantStatus: common.BuilderStatus{ - IsOptimistic: false, - IsHighPrio: true, - }, - version: spec.DataVersionDeneb, - }, - } - - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - pubkey, secretkey, backend := startTestBackend(t) - pkStr := pubkey.String() - req := common.TestBuilderSubmitBlockRequest(secretkey, getTestBidTrace(*pubkey, collateral, slot), tc.version) - backend.relay.demoteBuilder(pkStr, req, errFake) - - // Check status in db. - builder, err := backend.relay.db.GetBlockBuilderByPubkey(pkStr) - require.NoError(t, err) - require.Equal(t, tc.wantStatus.IsOptimistic, builder.IsOptimistic) - require.Equal(t, tc.wantStatus.IsHighPrio, builder.IsHighPrio) - - // Check demotion and refund statuses. - mockDB, ok := backend.relay.db.(*database.MockDB) - require.True(t, ok) - require.True(t, mockDB.Demotions[pkStr]) - }) - } -} - -func TestPrepareBuildersForSlot(t *testing.T) { - pubkey, _, backend := startTestBackend(t) - pkStr := pubkey.String() - // Clear cache. - backend.relay.blockBuildersCache = map[string]*blockBuilderCacheEntry{} - backend.relay.prepareBuildersForSlot(slot + 1) - entry, ok := backend.relay.blockBuildersCache[pkStr] - require.True(t, ok) - require.True(t, entry.status.IsHighPrio) - require.True(t, entry.status.IsOptimistic) - require.False(t, entry.status.IsBlacklisted) - require.Zero(t, entry.collateral.Cmp(big.NewInt(int64(collateral)))) -} - -func TestBuilderApiSubmitNewBlockOptimistic(t *testing.T) { - testCases := []struct { - description string - wantStatus common.BuilderStatus - simulationError error - expectDemotion bool - httpCode uint64 - blockValue uint64 - slot uint64 - version spec.DataVersion - }{ - { - description: "success_value_less_than_collateral_capella", - wantStatus: common.BuilderStatus{ - IsOptimistic: true, - IsHighPrio: true, - }, - simulationError: nil, - expectDemotion: false, - httpCode: 200, // success - blockValue: collateral - 1, - slot: slot, - version: spec.DataVersionCapella, - }, - { - description: "success_value_greater_than_collateral_capella", - wantStatus: common.BuilderStatus{ - IsOptimistic: true, - IsHighPrio: true, - }, - simulationError: nil, - expectDemotion: false, - httpCode: 200, // success - blockValue: collateral + 1, - slot: slot, - version: spec.DataVersionCapella, - }, - { - description: "failure_value_more_than_collateral_capella", - wantStatus: common.BuilderStatus{ - IsOptimistic: true, - IsHighPrio: true, - }, - simulationError: errFake, - expectDemotion: false, - httpCode: 400, // failure (in pessimistic mode, block sim failure happens in response path) - blockValue: collateral + 1, - slot: slot, - version: spec.DataVersionCapella, - }, - { - description: "success_value_less_than_collateral_deneb", - wantStatus: common.BuilderStatus{ - IsOptimistic: true, - IsHighPrio: true, - }, - simulationError: nil, - expectDemotion: false, - httpCode: 200, // success - blockValue: collateral - 1, - slot: slot + 32, - version: spec.DataVersionDeneb, - }, - { - description: "success_value_greater_than_collateral_deneb", - wantStatus: common.BuilderStatus{ - IsOptimistic: true, - IsHighPrio: true, - }, - simulationError: nil, - expectDemotion: false, - httpCode: 200, // success - blockValue: collateral + 1, - slot: slot + 32, - version: spec.DataVersionDeneb, - }, - { - description: "failure_value_more_than_collateral_deneb", - wantStatus: common.BuilderStatus{ - IsOptimistic: true, - IsHighPrio: true, - }, - simulationError: errFake, - expectDemotion: false, - httpCode: 400, // failure (in pessimistic mode, block sim failure happens in response path) - blockValue: collateral + 1, - slot: slot + 32, - version: spec.DataVersionDeneb, - }, - } - - for _, tc := range testCases { - t.Run(tc.description, func(t *testing.T) { - pubkey, secretkey, backend := startTestBackend(t) - backend.relay.optimisticSlot.Store(tc.slot) - backend.relay.capellaEpoch = 1 - backend.relay.denebEpoch = 2 - backend.relay.proposerDutiesMap[tc.slot] = backend.relay.proposerDutiesMap[slot] - - randaoHash, err := utils.HexToHash(randao) - require.NoError(t, err) - withRoot, err := ComputeWithdrawalsRoot([]*capella.Withdrawal{}) - require.NoError(t, err) - backend.relay.payloadAttributes[emptyHash] = payloadAttributesHelper{ - slot: tc.slot, - withdrawalsRoot: withRoot, - payloadAttributes: beaconclient.PayloadAttributes{ - PrevRandao: randaoHash.String(), - }, - } - pkStr := pubkey.String() - rr := runOptimisticBlockSubmission(t, blockRequestOpts{ - secretkey: secretkey, - pubkey: *pubkey, - blockValue: tc.blockValue, - slot: tc.slot, - domain: backend.relay.opts.EthNetDetails.DomainBuilder, - version: tc.version, - }, tc.simulationError, backend) - - // Check http code. - require.Equal(t, uint64(rr.Code), tc.httpCode) - - // Check status in db. - builder, err := backend.relay.db.GetBlockBuilderByPubkey(pkStr) - require.NoError(t, err) - require.Equal(t, tc.wantStatus.IsOptimistic, builder.IsOptimistic) - require.Equal(t, tc.wantStatus.IsHighPrio, builder.IsHighPrio) - - // Check demotion status is set to expected and refund is false. - mockDB, ok := backend.relay.db.(*database.MockDB) - require.True(t, ok) - require.Equal(t, mockDB.Demotions[pkStr], tc.expectDemotion) - require.False(t, mockDB.Refunds[pkStr]) - }) - } -} - -func TestInternalBuilderStatus(t *testing.T) { - pubkey, _, backend := startTestBackend(t) - // Set all to false initially. - err := backend.relay.db.SetBlockBuilderStatus(pubkey.String(), common.BuilderStatus{}) - require.NoError(t, err) - path := "/internal/v1/builder/" + pubkey.String() - - setAndGetStatus := func(arg string, expected common.BuilderStatus) { - // Set & Get. - rr := backend.request(http.MethodPost, path+arg, nil) - require.Equal(t, http.StatusOK, rr.Code) - - rr = backend.request(http.MethodGet, path, nil) - require.Equal(t, http.StatusOK, rr.Code) - resp := &database.BlockBuilderEntry{} - err := json.Unmarshal(rr.Body.Bytes(), &resp) - require.NoError(t, err) - require.Equal(t, expected.IsHighPrio, resp.IsHighPrio) - require.Equal(t, expected.IsBlacklisted, resp.IsBlacklisted) - require.Equal(t, expected.IsOptimistic, resp.IsOptimistic) - } - // Add each on. - setAndGetStatus("?high_prio=true", common.BuilderStatus{IsHighPrio: true}) - setAndGetStatus("?blacklisted=true", common.BuilderStatus{IsHighPrio: true, IsBlacklisted: true}) - setAndGetStatus("?optimistic=true", common.BuilderStatus{IsHighPrio: true, IsBlacklisted: true, IsOptimistic: true}) -} - -func TestInternalBuilderCollateral(t *testing.T) { - pubkey, _, backend := startTestBackend(t) - path := "/internal/v1/builder/collateral/" + pubkey.String() - - // Set & Get. - rr := backend.request(http.MethodPost, path+"?collateral=builder0x69&value=10000", nil) - require.Equal(t, http.StatusOK, rr.Code) - - rr = backend.request(http.MethodGet, "/internal/v1/builder/"+pubkey.String(), nil) - require.Equal(t, http.StatusOK, rr.Code) - resp := &database.BlockBuilderEntry{} - err := json.Unmarshal(rr.Body.Bytes(), &resp) - require.NoError(t, err) - require.Equal(t, "builder0x69", resp.BuilderID) - require.Equal(t, "10000", resp.Collateral) -} diff --git a/mev-boost-relay/services/api/proofs.go b/mev-boost-relay/services/api/proofs.go deleted file mode 100644 index fce7d86eb..000000000 --- a/mev-boost-relay/services/api/proofs.go +++ /dev/null @@ -1,88 +0,0 @@ -package api - -import ( - "errors" - "fmt" - "time" - - "github.com/attestantio/go-eth2-client/spec/phase0" - gethCommon "github.com/ethereum/go-ethereum/common" - fastSsz "github.com/ferranbt/fastssz" - "github.com/flashbots/mev-boost-relay/common" - "github.com/sirupsen/logrus" -) - -var ( - ErrNilConstraint = errors.New("nil constraint") - ErrNilProof = errors.New("nil proof") - ErrInvalidProofs = errors.New("proof verification failed") - ErrInvalidRoot = errors.New("failed getting tx root from bid") - ErrHashesIndexesMismatch = errors.New("proof transaction hashes and indexes length mismatch") - ErrHashesConstraintsMismatch = errors.New("proof transaction hashes and constraints length mismatch") -) - -// verifyInclusionProof verifies the proofs against the constraints, and returns an error if the proofs are invalid. -// -// NOTE: assumes constraints transactions are already without blobs -func verifyInclusionProof(log *logrus.Entry, transactionsRoot phase0.Root, proof *common.InclusionProof, hashToConstraints HashToConstraintDecoded) error { - if proof == nil { - return ErrNilProof - } - - if len(proof.TransactionHashes) != len(proof.GeneralizedIndexes) { - return ErrHashesIndexesMismatch - } - - if len(proof.TransactionHashes) != len(hashToConstraints) { - return ErrHashesIndexesMismatch - } - - leaves := make([][]byte, len(hashToConstraints)) - indexes := make([]int, len(proof.GeneralizedIndexes)) - - for i, hash := range proof.TransactionHashes { - constraint, ok := hashToConstraints[gethCommon.Hash(hash)] - if constraint == nil || !ok { - return ErrNilConstraint - } - - // Compute the hash tree root for the raw preconfirmed transaction - // and use it as "Leaf" in the proof to be verified against - encoded, err := constraint.Tx.MarshalBinary() - if err != nil { - log.WithError(err).Error("error marshalling transaction without blob tx sidecar") - return err - } - - tx := Transaction(encoded) - txHashTreeRoot, err := tx.HashTreeRoot() - if err != nil { - return ErrInvalidRoot - } - - leaves[i] = txHashTreeRoot[:] - indexes[i] = int(proof.GeneralizedIndexes[i]) - i++ - } - - hashes := make([][]byte, len(proof.MerkleHashes)) - for i, hash := range proof.MerkleHashes { - hashes[i] = []byte(*hash) - } - - currentTime := time.Now() - ok, err := fastSsz.VerifyMultiproof(transactionsRoot[:], hashes, leaves, indexes) - elapsed := time.Since(currentTime) - if err != nil { - log.WithError(err).Error("error verifying merkle proof") - return err - } - - if !ok { - return ErrInvalidProofs - } else { - log.Info(fmt.Sprintf("[BOLT]: inclusion proof verified in %s", elapsed)) - } - - return nil -} diff --git a/mev-boost-relay/services/api/service.go b/mev-boost-relay/services/api/service.go deleted file mode 100644 index 81c405cd1..000000000 --- a/mev-boost-relay/services/api/service.go +++ /dev/null @@ -1,3372 +0,0 @@ -// Package api contains the API webserver for the proposer and block-builder APIs -package api - -import ( - "bytes" - "compress/gzip" - "context" - "database/sql" - "encoding/json" - "fmt" - "io" - "math/big" - "net/http" - _ "net/http/pprof" - "os" - "sort" - "strconv" - "strings" - "sync" - "time" - - "github.com/NYTimes/gziphandler" - builderApi "github.com/attestantio/go-builder-client/api" - builderApiV1 "github.com/attestantio/go-builder-client/api/v1" - "github.com/attestantio/go-eth2-client/spec" - "github.com/attestantio/go-eth2-client/spec/phase0" - "github.com/buger/jsonparser" - "github.com/chainbound/shardmap" - "github.com/ethereum/go-ethereum/core/types" - "github.com/flashbots/go-boost-utils/bls" - "github.com/flashbots/go-boost-utils/ssz" - "github.com/flashbots/go-boost-utils/utils" - "github.com/flashbots/mev-boost-relay/beaconclient" - "github.com/flashbots/mev-boost-relay/common" - "github.com/flashbots/mev-boost-relay/database" - "github.com/flashbots/mev-boost-relay/datastore" - "github.com/go-redis/redis/v9" - "github.com/gorilla/mux" - "github.com/holiman/uint256" - "github.com/pkg/errors" - "github.com/sirupsen/logrus" - "github.com/thedevbirb/flashbots-go-utils/cli" - "github.com/thedevbirb/flashbots-go-utils/httplogger" - uberatomic "go.uber.org/atomic" - "golang.org/x/exp/slices" -) - -const ( - ErrBlockAlreadyKnown = "simulation failed: block already known" - ErrBlockRequiresReorg = "simulation failed: block requires a reorg" - ErrMissingTrieNode = "missing trie node" -) - -var ( - ErrMissingLogOpt = errors.New("log parameter is nil") - ErrMissingBeaconClientOpt = errors.New("beacon-client is nil") - ErrMissingDatastoreOpt = errors.New("proposer datastore is nil") - ErrRelayPubkeyMismatch = errors.New("relay pubkey does not match existing one") - ErrServerAlreadyStarted = errors.New("server was already started") - ErrBuilderAPIWithoutSecretKey = errors.New("cannot start builder API without secret key") - ErrNegativeTimestamp = errors.New("timestamp cannot be negative") -) - -var ( - // Proposer API (builder-specs) - pathStatus = "/eth/v1/builder/status" - pathRegisterValidator = "/eth/v1/builder/validators" - pathGetHeader = "/eth/v1/builder/header/{slot:[0-9]+}/{parent_hash:0x[a-fA-F0-9]+}/{pubkey:0x[a-fA-F0-9]+}" - // BOLT: this endpoint will also return constraint proofs if there are any - pathGetHeaderWithProofs = "/eth/v1/builder/header_with_proofs/{slot:[0-9]+}/{parent_hash:0x[a-fA-F0-9]+}/{pubkey:0x[a-fA-F0-9]+}" - pathGetPayload = "/eth/v1/builder/blinded_blocks" - // BOLT: allow relay to receive constraints from the proposer - pathSubmitConstraints = "/eth/v1/builder/constraints" - - // Block builder API - pathBuilderGetValidators = "/relay/v1/builder/validators" - pathSubmitNewBlock = "/relay/v1/builder/blocks" - // BOLT: allow builders to ship merkle proofs with their blocks - pathSubmitNewBlockWithProofs = "/relay/v1/builder/blocks_with_proofs" - // BOLT: allow builders to subscribe to constraints - pathSubscribeConstraints = "/relay/v1/builder/constraints" - - // Data API - pathDataProposerPayloadDelivered = "/relay/v1/data/bidtraces/proposer_payload_delivered" - pathDataBuilderBidsReceived = "/relay/v1/data/bidtraces/builder_blocks_received" - pathDataValidatorRegistration = "/relay/v1/data/validator_registration" - - // Internal API - pathInternalBuilderStatus = "/internal/v1/builder/{pubkey:0x[a-fA-F0-9]+}" - pathInternalBuilderCollateral = "/internal/v1/builder/collateral/{pubkey:0x[a-fA-F0-9]+}" - - // number of goroutines to save active validator - numValidatorRegProcessors = cli.GetEnvInt("NUM_VALIDATOR_REG_PROCESSORS", 10) - - // various timings - timeoutGetPayloadRetryMs = cli.GetEnvInt("GETPAYLOAD_RETRY_TIMEOUT_MS", 100) - getHeaderRequestCutoffMs = cli.GetEnvInt("GETHEADER_REQUEST_CUTOFF_MS", 3000) - getPayloadRequestCutoffMs = cli.GetEnvInt("GETPAYLOAD_REQUEST_CUTOFF_MS", 4000) - getPayloadResponseDelayMs = cli.GetEnvInt("GETPAYLOAD_RESPONSE_DELAY_MS", 1000) - - // api settings - apiReadTimeoutMs = cli.GetEnvInt("API_TIMEOUT_READ_MS", 1500) - apiReadHeaderTimeoutMs = cli.GetEnvInt("API_TIMEOUT_READHEADER_MS", 600) - apiIdleTimeoutMs = cli.GetEnvInt("API_TIMEOUT_IDLE_MS", 3_000) - apiWriteTimeoutMs = cli.GetEnvInt("API_TIMEOUT_WRITE_MS", 0) - apiMaxHeaderBytes = cli.GetEnvInt("API_MAX_HEADER_BYTES", 60_000) - - // api shutdown: wait time (to allow removal from load balancer before stopping http server) - apiShutdownWaitDuration = common.GetEnvDurationSec("API_SHUTDOWN_WAIT_SEC", 30) - - // api shutdown: whether to stop sending bids during shutdown phase (only useful if running a single-instance testnet setup) - apiShutdownStopSendingBids = os.Getenv("API_SHUTDOWN_STOP_SENDING_BIDS") == "1" - - // maximum payload bytes for a block submission to be fast-tracked (large payloads slow down other fast-tracked requests!) - fastTrackPayloadSizeLimit = cli.GetEnvInt("FAST_TRACK_PAYLOAD_SIZE_LIMIT", 230_000) - - // user-agents which shouldn't receive bids - apiNoHeaderUserAgents = common.GetEnvStrSlice("NO_HEADER_USERAGENTS", []string{ - "mev-boost/v1.5.0 Go-http-client/1.1", // Prysm v4.0.1 (Shapella signing issue) - }) -) - -// RelayAPIOpts contains the options for a relay -type RelayAPIOpts struct { - Log *logrus.Entry - - ListenAddr string - BlockSimURL string - - BeaconClient beaconclient.IMultiBeaconClient - Datastore *datastore.Datastore - Redis *datastore.RedisCache - Memcached *datastore.Memcached - DB database.IDatabaseService - - SecretKey *bls.SecretKey // used to sign bids (getHeader responses) - - // Network specific variables - EthNetDetails common.EthNetworkDetails - - // APIs to enable - ProposerAPI bool - BlockBuilderAPI bool - DataAPI bool - PprofAPI bool - InternalAPI bool -} - -type payloadAttributesHelper struct { - slot uint64 - parentHash string - withdrawalsRoot phase0.Root - parentBeaconRoot *phase0.Root - payloadAttributes beaconclient.PayloadAttributes -} - -// Data needed to issue a block validation request. -type blockSimOptions struct { - isHighPrio bool - fastTrack bool - log *logrus.Entry - builder *blockBuilderCacheEntry - req *common.BuilderBlockValidationRequest -} - -type blockBuilderCacheEntry struct { - status common.BuilderStatus - collateral *big.Int -} - -type blockSimResult struct { - wasSimulated bool - optimisticSubmission bool - requestErr error - validationErr error -} - -// RelayAPI represents a single Relay instance -type RelayAPI struct { - opts RelayAPIOpts - log *logrus.Entry - boltLog *logrus.Entry - - blsSk *bls.SecretKey - publicKey *phase0.BLSPubKey - - srv *http.Server - srvStarted uberatomic.Bool - srvShutdown uberatomic.Bool - - beaconClient beaconclient.IMultiBeaconClient - datastore *datastore.Datastore - redis *datastore.RedisCache - memcached *datastore.Memcached - db database.IDatabaseService - constraints *shardmap.FIFOMap[uint64, *[]*SignedConstraints] - constraintsConsumers []chan *SignedConstraints - - headSlot uberatomic.Uint64 - genesisInfo *beaconclient.GetGenesisResponse - capellaEpoch int64 - denebEpoch int64 - - proposerDutiesLock sync.RWMutex - proposerDutiesResponse *[]byte // raw http response - proposerDutiesMap map[uint64]*common.BuilderGetValidatorsResponseEntry - proposerDutiesSlot uint64 - isUpdatingProposerDuties uberatomic.Bool - - blockSimRateLimiter IBlockSimRateLimiter - - validatorRegC chan builderApiV1.SignedValidatorRegistration - - // used to wait on any active getPayload calls on shutdown - getPayloadCallsInFlight sync.WaitGroup - - // Feature flags - ffForceGetHeader204 bool - ffDisableLowPrioBuilders bool - ffDisablePayloadDBStorage bool // disable storing the execution payloads in the database - ffLogInvalidSignaturePayload bool // log payload if getPayload signature validation fails - ffEnableCancellations bool // whether to enable block builder cancellations - ffRegValContinueOnInvalidSig bool // whether to continue processing further validators if one fails - ffIgnorableValidationErrors bool // whether to enable ignorable validation errors - - payloadAttributes map[string]payloadAttributesHelper // key:parentBlockHash - payloadAttributesLock sync.RWMutex - - // The slot we are currently optimistically simulating. - optimisticSlot uberatomic.Uint64 - // The number of optimistic blocks being processed (only used for logging). - optimisticBlocksInFlight uberatomic.Uint64 - // Wait group used to monitor status of per-slot optimistic processing. - optimisticBlocksWG sync.WaitGroup - // Cache for builder statuses and collaterals. - blockBuildersCache map[string]*blockBuilderCacheEntry -} - -// NewRelayAPI creates a new service. if builders is nil, allow any builder -func NewRelayAPI(opts RelayAPIOpts) (api *RelayAPI, err error) { - if opts.Log == nil { - return nil, ErrMissingLogOpt - } - - if opts.BeaconClient == nil { - return nil, ErrMissingBeaconClientOpt - } - - if opts.Datastore == nil { - return nil, ErrMissingDatastoreOpt - } - - // If block-builder API is enabled, then ensure secret key is all set - var publicKey phase0.BLSPubKey - if opts.BlockBuilderAPI { - if opts.SecretKey == nil { - return nil, ErrBuilderAPIWithoutSecretKey - } - - // If using a secret key, ensure it's the correct one - blsPubkey, err := bls.PublicKeyFromSecretKey(opts.SecretKey) - if err != nil { - return nil, err - } - publicKey, err = utils.BlsPublicKeyToPublicKey(blsPubkey) - if err != nil { - return nil, err - } - opts.Log.Infof("Using BLS key: %s", publicKey.String()) - - // ensure pubkey is same across all relay instances - _pubkey, err := opts.Redis.GetRelayConfig(datastore.RedisConfigFieldPubkey) - if err != nil { - return nil, err - } else if _pubkey == "" { - err := opts.Redis.SetRelayConfig(datastore.RedisConfigFieldPubkey, publicKey.String()) - if err != nil { - return nil, err - } - } else if _pubkey != publicKey.String() { - return nil, fmt.Errorf("%w: new=%s old=%s", ErrRelayPubkeyMismatch, publicKey.String(), _pubkey) - } - } - - api = &RelayAPI{ - opts: opts, - log: opts.Log, - boltLog: common.NewBoltLogger("RELAY"), - blsSk: opts.SecretKey, - publicKey: &publicKey, - - datastore: opts.Datastore, - beaconClient: opts.BeaconClient, - redis: opts.Redis, - memcached: opts.Memcached, - db: opts.DB, - constraints: shardmap.NewFIFOMap[uint64, *[]*SignedConstraints](64, 8, shardmap.HashUint64), // 2 epochs cache - constraintsConsumers: make([]chan *SignedConstraints, 0, 10), - - payloadAttributes: make(map[string]payloadAttributesHelper), - - proposerDutiesResponse: &[]byte{}, - blockSimRateLimiter: NewBlockSimulationRateLimiter(opts.BlockSimURL), - - validatorRegC: make(chan builderApiV1.SignedValidatorRegistration, 450_000), - } - - if os.Getenv("FORCE_GET_HEADER_204") == "1" { - api.log.Warn("env: FORCE_GET_HEADER_204 - forcing getHeader to always return 204") - api.ffForceGetHeader204 = true - } - - if os.Getenv("DISABLE_LOWPRIO_BUILDERS") == "1" { - api.log.Warn("env: DISABLE_LOWPRIO_BUILDERS - allowing only high-level builders") - api.ffDisableLowPrioBuilders = true - } - - if os.Getenv("DISABLE_PAYLOAD_DATABASE_STORAGE") == "1" { - api.log.Warn("env: DISABLE_PAYLOAD_DATABASE_STORAGE - disabling storing payloads in the database") - api.ffDisablePayloadDBStorage = true - } - - if os.Getenv("LOG_INVALID_GETPAYLOAD_SIGNATURE") == "1" { - api.log.Warn("env: LOG_INVALID_GETPAYLOAD_SIGNATURE - getPayload payloads with invalid proposer signature will be logged") - api.ffLogInvalidSignaturePayload = true - } - - if os.Getenv("ENABLE_BUILDER_CANCELLATIONS") == "1" { - api.log.Warn("env: ENABLE_BUILDER_CANCELLATIONS - builders are allowed to cancel submissions when using ?cancellation=1") - api.ffEnableCancellations = true - } - - if os.Getenv("REGISTER_VALIDATOR_CONTINUE_ON_INVALID_SIG") == "1" { - api.log.Warn("env: REGISTER_VALIDATOR_CONTINUE_ON_INVALID_SIG - validator registration will continue processing even if one validator has an invalid signature") - api.ffRegValContinueOnInvalidSig = true - } - - if os.Getenv("ENABLE_IGNORABLE_VALIDATION_ERRORS") == "1" { - api.log.Warn("env: ENABLE_IGNORABLE_VALIDATION_ERRORS - some validation errors will be ignored") - api.ffIgnorableValidationErrors = true - } - - return api, nil -} - -func (api *RelayAPI) getRouter() http.Handler { - r := mux.NewRouter() - - r.HandleFunc("/", api.handleRoot).Methods(http.MethodGet) - r.HandleFunc("/livez", api.handleLivez).Methods(http.MethodGet) - r.HandleFunc("/readyz", api.handleReadyz).Methods(http.MethodGet) - - // Proposer API - if api.opts.ProposerAPI { - api.log.Info("proposer API enabled") - r.HandleFunc(pathStatus, api.handleStatus).Methods(http.MethodGet) - r.HandleFunc(pathRegisterValidator, api.handleRegisterValidator).Methods(http.MethodPost) - r.HandleFunc(pathGetHeader, api.handleGetHeader).Methods(http.MethodGet) - r.HandleFunc(pathGetHeaderWithProofs, api.handleGetHeaderWithProofs).Methods(http.MethodGet) - r.HandleFunc(pathGetPayload, api.handleGetPayload).Methods(http.MethodPost) - r.HandleFunc(pathSubmitConstraints, api.handleSubmitConstraints).Methods(http.MethodPost) - } - - // Builder API - if api.opts.BlockBuilderAPI { - api.log.Info("block builder API enabled") - r.HandleFunc(pathBuilderGetValidators, api.handleBuilderGetValidators).Methods(http.MethodGet) - r.HandleFunc(pathSubmitNewBlock, api.handleSubmitNewBlock).Methods(http.MethodPost) - // BOLT - r.HandleFunc(pathSubmitNewBlockWithProofs, api.handleSubmitNewBlockWithProofs).Methods(http.MethodPost) - r.HandleFunc(pathSubscribeConstraints, api.handleSubscribeConstraints).Methods(http.MethodGet) - } - - // Data API - if api.opts.DataAPI { - api.log.Info("data API enabled") - r.HandleFunc(pathDataProposerPayloadDelivered, api.handleDataProposerPayloadDelivered).Methods(http.MethodGet) - r.HandleFunc(pathDataBuilderBidsReceived, api.handleDataBuilderBidsReceived).Methods(http.MethodGet) - r.HandleFunc(pathDataValidatorRegistration, api.handleDataValidatorRegistration).Methods(http.MethodGet) - } - - // Pprof - if api.opts.PprofAPI { - api.log.Info("pprof API enabled") - r.PathPrefix("/debug/pprof/").Handler(http.DefaultServeMux) - } - - // /internal/... - if api.opts.InternalAPI { - api.log.Info("internal API enabled") - r.HandleFunc(pathInternalBuilderStatus, api.handleInternalBuilderStatus).Methods(http.MethodGet, http.MethodPost, http.MethodPut) - r.HandleFunc(pathInternalBuilderCollateral, api.handleInternalBuilderCollateral).Methods(http.MethodPost, http.MethodPut) - } - - mresp := common.MustB64Gunzip("H4sICAtOkWQAA2EudHh0AKWVPW+DMBCGd36Fe9fIi5Mt8uqqs4dIlZiCEqosKKhVO2Txj699GBtDcEl4JwTnh/t4dS7YWom2FcVaiETSDEmIC+pWLGRVgKrD3UY0iwnSj6THofQJDomiR13BnPgjvJDqNWX+OtzH7inWEGvr76GOCGtg3Kp7Ak+lus3zxLNtmXaMUncjcj1cwbOH3xBZtJCYG6/w+hdpB6ErpnqzFPZxO4FdXB3SAEgpscoDqWeULKmJA4qyfYFg0QV+p7hD8GGDd6C8+mElGDKab1CWeUQMVVvVDTJVj6nngHmNOmSoe6yH1BM3KZIKpuRaHKrOFd/3ksQwzdK+ejdM4VTzSDfjJsY1STeVTWb0T9JWZbJs8DvsNvwaddKdUy4gzVIzWWaWk3IF8D35kyUDf3FfKipwk/DYUee2nYyWQD0xEKDHeprzeXYwVmZD/lXt1OOg8EYhFfitsmQVcwmbUutpdt3PoqWdMyd2DYHKbgcmPlEYMxPjR6HhxOfuNG52xZr7TtzpygJJKNtWS14Uf0T6XSmzBwAA") - r.HandleFunc("/miladyz", func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK); w.Write(mresp) }).Methods(http.MethodGet) //nolint:errcheck - - // r.Use(mux.CORSMethodMiddleware(r)) - loggedRouter := httplogger.LoggingMiddlewareLogrus(api.log, r) - withGz := gziphandler.GzipHandler(loggedRouter) - return withGz -} - -// StartServer starts up this API instance and HTTP server -// - First it initializes the cache and updates local information -// - Once that is done, the HTTP server is started -func (api *RelayAPI) StartServer() (err error) { - if api.srvStarted.Swap(true) { - return ErrServerAlreadyStarted - } - - log := api.log.WithField("method", "StartServer") - - // Get best beacon-node status by head slot, process current slot and start slot updates - syncStatus, err := api.beaconClient.BestSyncStatus() - if err != nil { - return err - } - currentSlot := syncStatus.HeadSlot - - // Initialize block builder cache. - api.blockBuildersCache = make(map[string]*blockBuilderCacheEntry) - - // Get genesis info - api.genesisInfo, err = api.beaconClient.GetGenesis() - if err != nil { - return err - } - log.Infof("genesis info: %d", api.genesisInfo.Data.GenesisTime) - - // Get and prepare fork schedule - forkSchedule, err := api.beaconClient.GetForkSchedule() - if err != nil { - return err - } - - api.denebEpoch = -1 - api.capellaEpoch = -1 - for _, fork := range forkSchedule.Data { - log.Infof("forkSchedule: version=%s / epoch=%d", fork.CurrentVersion, fork.Epoch) - switch fork.CurrentVersion { - case api.opts.EthNetDetails.CapellaForkVersionHex: - api.capellaEpoch = int64(fork.Epoch) - case api.opts.EthNetDetails.DenebForkVersionHex: - api.denebEpoch = int64(fork.Epoch) - } - } - - if api.denebEpoch == -1 { - // log warning that deneb epoch was not found in CL fork schedule, suggest CL upgrade - log.Info("Deneb epoch not found in fork schedule") - } - - // Print fork version information - if hasReachedFork(currentSlot, api.denebEpoch) { - log.Infof("deneb fork detected (currentEpoch: %d / denebEpoch: %d)", common.SlotToEpoch(currentSlot), api.denebEpoch) - } else if hasReachedFork(currentSlot, api.capellaEpoch) { - log.Infof("capella fork detected (currentEpoch: %d / capellaEpoch: %d)", common.SlotToEpoch(currentSlot), api.capellaEpoch) - } - - // start proposer API specific things - if api.opts.ProposerAPI { - // Update known validators (which can take 10-30 sec). This is a requirement for service readiness, because without them, - // getPayload() doesn't have the information it needs (known validators), which could lead to missed slots. - go api.datastore.RefreshKnownValidators(api.log, api.beaconClient, currentSlot) - - // Start the validator registration db-save processor - api.log.Infof("starting %d validator registration processors", numValidatorRegProcessors) - for i := 0; i < numValidatorRegProcessors; i++ { - go api.startValidatorRegistrationDBProcessor() - } - } - - // start block-builder API specific things - if api.opts.BlockBuilderAPI { - // Get current proposer duties blocking before starting, to have them ready - api.updateProposerDuties(syncStatus.HeadSlot) - - // Subscribe to payload attributes events (only for builder-api) - go func() { - c := make(chan beaconclient.PayloadAttributesEvent) - api.beaconClient.SubscribeToPayloadAttributesEvents(c) - for { - payloadAttributes := <-c - api.processPayloadAttributes(payloadAttributes) - } - }() - } - - // Process current slot - api.processNewSlot(currentSlot) - - // Start regular slot updates - go func() { - c := make(chan beaconclient.HeadEventData) - api.beaconClient.SubscribeToHeadEvents(c) - for { - headEvent := <-c - api.processNewSlot(headEvent.Slot) - } - }() - - // create and start HTTP server - api.srv = &http.Server{ - Addr: api.opts.ListenAddr, - Handler: api.getRouter(), - - ReadTimeout: time.Duration(apiReadTimeoutMs) * time.Millisecond, - ReadHeaderTimeout: time.Duration(apiReadHeaderTimeoutMs) * time.Millisecond, - WriteTimeout: time.Duration(apiWriteTimeoutMs) * time.Millisecond, - IdleTimeout: time.Duration(apiIdleTimeoutMs) * time.Millisecond, - MaxHeaderBytes: apiMaxHeaderBytes, - } - err = api.srv.ListenAndServe() - if errors.Is(err, http.ErrServerClosed) { - return nil - } - - return err -} - -func (api *RelayAPI) IsReady() bool { - // If server is shutting down, return false - if api.srvShutdown.Load() { - return false - } - - // Proposer API readiness checks - if api.opts.ProposerAPI { - knownValidatorsUpdated := api.datastore.KnownValidatorsWasUpdated.Load() - return knownValidatorsUpdated - } - - // Block-builder API readiness checks - return true -} - -// StopServer gracefully shuts down the HTTP server: -// - Stop returning bids -// - Set ready /readyz to negative status -// - Wait a bit to allow removal of service from load balancer and draining of requests -func (api *RelayAPI) StopServer() (err error) { - // avoid running this twice. setting srvShutdown to true makes /readyz switch to negative status - if wasStopping := api.srvShutdown.Swap(true); wasStopping { - return nil - } - - // start server shutdown - api.log.Info("Stopping server...") - - // stop returning bids on getHeader calls (should only be used when running a single instance) - if api.opts.ProposerAPI && apiShutdownStopSendingBids { - api.ffForceGetHeader204 = true - api.log.Info("Disabled returning bids on getHeader") - } - - // wait some time to get service removed from load balancer - api.log.Infof("Waiting %.2f seconds before shutdown...", apiShutdownWaitDuration.Seconds()) - time.Sleep(apiShutdownWaitDuration) - - // wait for any active getPayload call to finish - api.getPayloadCallsInFlight.Wait() - - // shutdown - return api.srv.Shutdown(context.Background()) -} - -func (api *RelayAPI) isCapella(slot uint64) bool { - return hasReachedFork(slot, api.capellaEpoch) && !hasReachedFork(slot, api.denebEpoch) -} - -func (api *RelayAPI) isDeneb(slot uint64) bool { - return hasReachedFork(slot, api.denebEpoch) -} - -func (api *RelayAPI) startValidatorRegistrationDBProcessor() { - for valReg := range api.validatorRegC { - err := api.datastore.SaveValidatorRegistration(valReg) - if err != nil { - api.log.WithError(err).WithFields(logrus.Fields{ - "reg_pubkey": valReg.Message.Pubkey, - "reg_feeRecipient": valReg.Message.FeeRecipient, - "reg_gasLimit": valReg.Message.GasLimit, - "reg_timestamp": valReg.Message.Timestamp, - }).Error("error saving validator registration") - } - } -} - -// removeConstraintsConsumer is an helper function to remove the consumer from the list -func (api *RelayAPI) removeConstraintsConsumer(ch chan *SignedConstraints) { - for i, c := range api.constraintsConsumers { - if c == ch { - api.constraintsConsumers = append(api.constraintsConsumers[:i], api.constraintsConsumers[i+1:]...) - break - } - } -} - -// simulateBlock sends a request for a block simulation to blockSimRateLimiter. -func (api *RelayAPI) simulateBlock(ctx context.Context, opts blockSimOptions) (requestErr, validationErr error) { - t := time.Now() - requestErr, validationErr = api.blockSimRateLimiter.Send(ctx, opts.req, opts.isHighPrio, opts.fastTrack) - log := opts.log.WithFields(logrus.Fields{ - "durationMs": time.Since(t).Milliseconds(), - "numWaiting": api.blockSimRateLimiter.CurrentCounter(), - }) - if validationErr != nil { - if api.ffIgnorableValidationErrors { - // Operators chooses to ignore certain validation errors - ignoreError := validationErr.Error() == ErrBlockAlreadyKnown || validationErr.Error() == ErrBlockRequiresReorg || strings.Contains(validationErr.Error(), ErrMissingTrieNode) - if ignoreError { - log.WithError(validationErr).Warn("block validation failed with ignorable error") - return nil, nil - } - } - log.WithError(validationErr).Warn("block validation failed") - return nil, validationErr - } - if requestErr != nil { - log.WithError(requestErr).Warn("block validation failed: request error") - return requestErr, nil - } - log.Info("block validation successful") - return nil, nil -} - -func (api *RelayAPI) demoteBuilder(pubkey string, req *common.VersionedSubmitBlockRequest, simError error) { - builderEntry, ok := api.blockBuildersCache[pubkey] - if !ok { - api.log.Warnf("builder %v not in the builder cache", pubkey) - builderEntry = &blockBuilderCacheEntry{} //nolint:exhaustruct - } - newStatus := common.BuilderStatus{ - IsHighPrio: builderEntry.status.IsHighPrio, - IsBlacklisted: builderEntry.status.IsBlacklisted, - IsOptimistic: false, - } - api.log.Infof("demoted builder, new status: %v", newStatus) - if err := api.db.SetBlockBuilderIDStatusIsOptimistic(pubkey, false); err != nil { - api.log.Error(fmt.Errorf("error setting builder: %v status: %w", pubkey, err)) - } - // Write to demotions table. - api.log.WithFields(logrus.Fields{"builder_pubkey": pubkey}).Info("demoting builder") - bidTrace, err := req.BidTrace() - if err != nil { - api.log.WithError(err).Warn("failed to get bid trace from submit block request") - } - if err := api.db.InsertBuilderDemotion(req, simError); err != nil { - api.log.WithError(err).WithFields(logrus.Fields{ - "errorWritingDemotionToDB": true, - "bidTrace": bidTrace, - "simError": simError, - }).Error("failed to save demotion to database") - } -} - -// processOptimisticBlock is called on a new goroutine when a optimistic block -// needs to be simulated. -func (api *RelayAPI) processOptimisticBlock(opts blockSimOptions, simResultC chan *blockSimResult) { - api.optimisticBlocksInFlight.Add(1) - defer func() { api.optimisticBlocksInFlight.Sub(1) }() - api.optimisticBlocksWG.Add(1) - defer api.optimisticBlocksWG.Done() - - ctx := context.Background() - submission, err := common.GetBlockSubmissionInfo(opts.req.VersionedSubmitBlockRequest) - if err != nil { - opts.log.WithError(err).Error("error getting block submission info") - return - } - builderPubkey := submission.BidTrace.BuilderPubkey.String() - opts.log.WithFields(logrus.Fields{ - "builderPubkey": builderPubkey, - // NOTE: this value is just an estimate because many goroutines could be - // updating api.optimisticBlocksInFlight concurrently. Since we just use - // it for logging, it is not atomic to avoid the performance impact. - "optBlocksInFlight": api.optimisticBlocksInFlight, - }).Infof("simulating optimistic block with hash: %v", submission.BidTrace.BlockHash.String()) - reqErr, simErr := api.simulateBlock(ctx, opts) - simResultC <- &blockSimResult{reqErr == nil, true, reqErr, simErr} - if reqErr != nil || simErr != nil { - // Mark builder as non-optimistic. - opts.builder.status.IsOptimistic = false - api.log.WithError(simErr).Warn("block simulation failed in processOptimisticBlock, demoting builder") - - var demotionErr error - if reqErr != nil { - demotionErr = reqErr - } else { - demotionErr = simErr - } - - // Demote the builder. - api.demoteBuilder(builderPubkey, opts.req.VersionedSubmitBlockRequest, demotionErr) - } -} - -func (api *RelayAPI) processPayloadAttributes(payloadAttributes beaconclient.PayloadAttributesEvent) { - apiHeadSlot := api.headSlot.Load() - payloadAttrSlot := payloadAttributes.Data.ProposalSlot - - // require proposal slot in the future - if payloadAttrSlot <= apiHeadSlot { - return - } - log := api.log.WithFields(logrus.Fields{ - "headSlot": apiHeadSlot, - "payloadAttrSlot": payloadAttrSlot, - "payloadAttrParent": payloadAttributes.Data.ParentBlockHash, - }) - - // discard payload attributes if already known - api.payloadAttributesLock.RLock() - _, ok := api.payloadAttributes[payloadAttributes.Data.ParentBlockHash] - api.payloadAttributesLock.RUnlock() - - if ok { - return - } - - var withdrawalsRoot phase0.Root - var err error - if hasReachedFork(payloadAttrSlot, api.capellaEpoch) { - withdrawalsRoot, err = ComputeWithdrawalsRoot(payloadAttributes.Data.PayloadAttributes.Withdrawals) - log = log.WithField("withdrawalsRoot", withdrawalsRoot.String()) - if err != nil { - log.WithError(err).Error("error computing withdrawals root") - return - } - } - - var parentBeaconRoot *phase0.Root - if hasReachedFork(payloadAttrSlot, api.denebEpoch) { - if payloadAttributes.Data.PayloadAttributes.ParentBeaconBlockRoot == "" { - log.Error("parent beacon block root in payload attributes is empty") - return - } - // TODO: (deneb) HexToRoot util function - hash, err := utils.HexToHash(payloadAttributes.Data.PayloadAttributes.ParentBeaconBlockRoot) - if err != nil { - log.WithError(err).Error("error parsing parent beacon block root from payload attributes") - return - } - root := phase0.Root(hash) - parentBeaconRoot = &root - } - - api.payloadAttributesLock.Lock() - defer api.payloadAttributesLock.Unlock() - - // Step 1: clean up old ones - for parentBlockHash, attr := range api.payloadAttributes { - if attr.slot < apiHeadSlot { - delete(api.payloadAttributes, parentBlockHash) - } - } - - // Step 2: save new one - api.payloadAttributes[payloadAttributes.Data.ParentBlockHash] = payloadAttributesHelper{ - slot: payloadAttrSlot, - parentHash: payloadAttributes.Data.ParentBlockHash, - withdrawalsRoot: withdrawalsRoot, - parentBeaconRoot: parentBeaconRoot, - payloadAttributes: payloadAttributes.Data.PayloadAttributes, - } - - log.WithFields(logrus.Fields{ - "randao": payloadAttributes.Data.PayloadAttributes.PrevRandao, - "timestamp": payloadAttributes.Data.PayloadAttributes.Timestamp, - }).Info("updated payload attributes") -} - -func (api *RelayAPI) processNewSlot(headSlot uint64) { - prevHeadSlot := api.headSlot.Load() - if headSlot <= prevHeadSlot { - return - } - - // If there's gaps between previous and new headslot, print the missed slots - if prevHeadSlot > 0 { - for s := prevHeadSlot + 1; s < headSlot; s++ { - api.log.WithField("missedSlot", s).Warnf("missed slot: %d", s) - } - } - - // store the head slot - api.headSlot.Store(headSlot) - - // only for builder-api - if api.opts.BlockBuilderAPI || api.opts.ProposerAPI { - // update proposer duties in the background - go api.updateProposerDuties(headSlot) - - // update the optimistic slot - go api.prepareBuildersForSlot(headSlot) - } - - if api.opts.ProposerAPI { - go api.datastore.RefreshKnownValidators(api.log, api.beaconClient, headSlot) - } - - // log - epoch := headSlot / common.SlotsPerEpoch - api.log.WithFields(logrus.Fields{ - "epoch": epoch, - "slotHead": headSlot, - "slotStartNextEpoch": (epoch + 1) * common.SlotsPerEpoch, - }).Infof("updated headSlot to %d", headSlot) -} - -func (api *RelayAPI) updateProposerDuties(headSlot uint64) { - // Ensure only one updating is running at a time - if api.isUpdatingProposerDuties.Swap(true) { - return - } - defer api.isUpdatingProposerDuties.Store(false) - - // Update once every 8 slots (or more, if a slot was missed) - if headSlot%8 != 0 && headSlot-api.proposerDutiesSlot < 8 { - return - } - - // Load upcoming proposer duties from Redis - duties, err := api.redis.GetProposerDuties() - if err != nil { - api.log.WithError(err).Error("failed getting proposer duties from redis") - return - } - - // Prepare raw bytes for HTTP response - respBytes, err := json.Marshal(duties) - if err != nil { - api.log.WithError(err).Error("error marshalling duties") - } - - // Prepare the map for lookup by slot - dutiesMap := make(map[uint64]*common.BuilderGetValidatorsResponseEntry) - for index, duty := range duties { - dutiesMap[duty.Slot] = &duties[index] - } - - // Update - api.proposerDutiesLock.Lock() - if len(respBytes) > 0 { - api.proposerDutiesResponse = &respBytes - } - api.proposerDutiesMap = dutiesMap - api.proposerDutiesSlot = headSlot - api.proposerDutiesLock.Unlock() - - // pretty-print - _duties := make([]string, len(duties)) - for i, duty := range duties { - _duties[i] = strconv.FormatUint(duty.Slot, 10) - } - sort.Strings(_duties) - api.log.Infof("proposer duties updated: %s", strings.Join(_duties, ", ")) -} - -func (api *RelayAPI) prepareBuildersForSlot(headSlot uint64) { - // Wait until there are no optimistic blocks being processed. Then we can - // safely update the slot. - api.optimisticBlocksWG.Wait() - api.optimisticSlot.Store(headSlot + 1) - - builders, err := api.db.GetBlockBuilders() - if err != nil { - api.log.WithError(err).Error("unable to read block builders from db, not updating builder cache") - return - } - api.log.Debugf("Updating builder cache with %d builders from database", len(builders)) - - newCache := make(map[string]*blockBuilderCacheEntry) - for _, v := range builders { - entry := &blockBuilderCacheEntry{ //nolint:exhaustruct - status: common.BuilderStatus{ - IsHighPrio: v.IsHighPrio, - IsBlacklisted: v.IsBlacklisted, - IsOptimistic: v.IsOptimistic, - }, - } - // Try to parse builder collateral string to big int. - builderCollateral, ok := big.NewInt(0).SetString(v.Collateral, 10) - if !ok { - api.log.WithError(err).Errorf("could not parse builder collateral string %s", v.Collateral) - entry.collateral = big.NewInt(0) - } else { - entry.collateral = builderCollateral - } - newCache[v.BuilderPubkey] = entry - } - api.blockBuildersCache = newCache -} - -func (api *RelayAPI) RespondError(w http.ResponseWriter, code int, message string) { - api.Respond(w, code, HTTPErrorResp{code, message}) -} - -func (api *RelayAPI) RespondOK(w http.ResponseWriter, response any) { - api.Respond(w, http.StatusOK, response) -} - -func (api *RelayAPI) RespondMsg(w http.ResponseWriter, code int, msg string) { - api.Respond(w, code, HTTPMessageResp{msg}) -} - -func (api *RelayAPI) Respond(w http.ResponseWriter, code int, response any) { - w.Header().Set("Content-Type", "application/json") - w.WriteHeader(code) - if response == nil { - return - } - - // write the json response - if err := json.NewEncoder(w).Encode(response); err != nil { - api.log.WithField("response", response).WithError(err).Error("Couldn't write response") - http.Error(w, "", http.StatusInternalServerError) - } -} - -func (api *RelayAPI) handleStatus(w http.ResponseWriter, req *http.Request) { - w.WriteHeader(http.StatusOK) -} - -// --------------- -// PROPOSER APIS -// --------------- - -func (api *RelayAPI) handleRoot(w http.ResponseWriter, req *http.Request) { - w.WriteHeader(http.StatusOK) - fmt.Fprintf(w, "MEV-Boost Relay API") -} - -func (api *RelayAPI) handleRegisterValidator(w http.ResponseWriter, req *http.Request) { - ua := req.UserAgent() - log := api.log.WithFields(logrus.Fields{ - "method": "registerValidator", - "ua": ua, - "mevBoostV": common.GetMevBoostVersionFromUserAgent(ua), - "headSlot": api.headSlot.Load(), - "contentLength": req.ContentLength, - }) - - start := time.Now().UTC() - registrationTimestampUpperBound := start.Unix() + 10 // 10 seconds from now - - numRegTotal := 0 - numRegProcessed := 0 - numRegActive := 0 - numRegNew := 0 - processingStoppedByError := false - - // Setup error handling - handleError := func(_log *logrus.Entry, code int, msg string) { - processingStoppedByError = true - _log.Warnf("error: %s", msg) - api.RespondError(w, code, msg) - } - - // Start processing - if req.ContentLength == 0 { - log.Info("empty request") - api.RespondError(w, http.StatusBadRequest, "empty request") - return - } - - body, err := io.ReadAll(req.Body) - if err != nil { - log.WithError(err).WithField("contentLength", req.ContentLength).Warn("failed to read request body") - api.RespondError(w, http.StatusBadRequest, "failed to read request body") - return - } - req.Body.Close() - - parseRegistration := func(value []byte) (reg *builderApiV1.SignedValidatorRegistration, err error) { - // Pubkey - _pubkey, err := jsonparser.GetUnsafeString(value, "message", "pubkey") - if err != nil { - return nil, fmt.Errorf("registration message error (pubkey): %w", err) - } - - pubkey, err := utils.HexToPubkey(_pubkey) - if err != nil { - return nil, fmt.Errorf("registration message error (pubkey): %w", err) - } - - // Timestamp - _timestamp, err := jsonparser.GetUnsafeString(value, "message", "timestamp") - if err != nil { - return nil, fmt.Errorf("registration message error (timestamp): %w", err) - } - - timestamp, err := strconv.ParseInt(_timestamp, 10, 64) - if err != nil { - return nil, fmt.Errorf("invalid timestamp: %w", err) - } - if timestamp < 0 { - return nil, ErrNegativeTimestamp - } - - // GasLimit - _gasLimit, err := jsonparser.GetUnsafeString(value, "message", "gas_limit") - if err != nil { - return nil, fmt.Errorf("registration message error (gasLimit): %w", err) - } - - gasLimit, err := strconv.ParseUint(_gasLimit, 10, 64) - if err != nil { - return nil, fmt.Errorf("invalid gasLimit: %w", err) - } - - // FeeRecipient - _feeRecipient, err := jsonparser.GetUnsafeString(value, "message", "fee_recipient") - if err != nil { - return nil, fmt.Errorf("registration message error (fee_recipient): %w", err) - } - - feeRecipient, err := utils.HexToAddress(_feeRecipient) - if err != nil { - return nil, fmt.Errorf("registration message error (fee_recipient): %w", err) - } - - // Signature - _signature, err := jsonparser.GetUnsafeString(value, "signature") - if err != nil { - return nil, fmt.Errorf("registration message error (signature): %w", err) - } - - signature, err := utils.HexToSignature(_signature) - if err != nil { - return nil, fmt.Errorf("registration message error (signature): %w", err) - } - - // Construct and return full registration object - reg = &builderApiV1.SignedValidatorRegistration{ - Message: &builderApiV1.ValidatorRegistration{ - FeeRecipient: feeRecipient, - GasLimit: gasLimit, - Timestamp: time.Unix(timestamp, 0), - Pubkey: pubkey, - }, - Signature: signature, - } - - return reg, nil - } - - // Iterate over the registrations - _, err = jsonparser.ArrayEach(body, func(value []byte, dataType jsonparser.ValueType, offset int, _err error) { - numRegTotal += 1 - if processingStoppedByError { - return - } - numRegProcessed += 1 - regLog := log.WithFields(logrus.Fields{ - "numRegistrationsSoFar": numRegTotal, - "numRegistrationsProcessed": numRegProcessed, - }) - - // Extract immediately necessary registration fields - signedValidatorRegistration, err := parseRegistration(value) - if err != nil { - handleError(regLog, http.StatusBadRequest, err.Error()) - return - } - - // Add validator pubkey to logs - pkHex := common.PubkeyHex(signedValidatorRegistration.Message.Pubkey.String()) - regLog = regLog.WithFields(logrus.Fields{ - "pubkey": pkHex, - "signature": signedValidatorRegistration.Signature.String(), - "feeRecipient": signedValidatorRegistration.Message.FeeRecipient.String(), - "gasLimit": signedValidatorRegistration.Message.GasLimit, - "timestamp": signedValidatorRegistration.Message.Timestamp, - }) - - // Ensure a valid timestamp (not too early, and not too far in the future) - registrationTimestamp := signedValidatorRegistration.Message.Timestamp.Unix() - if registrationTimestamp < int64(api.genesisInfo.Data.GenesisTime) { - handleError(regLog, http.StatusBadRequest, "timestamp too early") - return - } else if registrationTimestamp > registrationTimestampUpperBound { - handleError(regLog, http.StatusBadRequest, "timestamp too far in the future") - return - } - - // Check if a real validator - isKnownValidator := api.datastore.IsKnownValidator(pkHex) - if !isKnownValidator { - handleError(regLog, http.StatusBadRequest, fmt.Sprintf("not a known validator: %s", pkHex)) - return - } - - // Check for a previous registration timestamp - prevTimestamp, err := api.redis.GetValidatorRegistrationTimestamp(pkHex) - if err != nil { - regLog.WithError(err).Error("error getting last registration timestamp") - } else if prevTimestamp >= uint64(signedValidatorRegistration.Message.Timestamp.Unix()) { - // abort if the current registration timestamp is older or equal to the last known one - return - } - - // Verify the signature - ok, err := ssz.VerifySignature(signedValidatorRegistration.Message, api.opts.EthNetDetails.DomainBuilder, signedValidatorRegistration.Message.Pubkey[:], signedValidatorRegistration.Signature[:]) - if err != nil { - regLog.WithError(err).Error("error verifying registerValidator signature") - return - } else if !ok { - regLog.Info("invalid validator signature") - if api.ffRegValContinueOnInvalidSig { - return - } else { - handleError(regLog, http.StatusBadRequest, fmt.Sprintf("failed to verify validator signature for %s", signedValidatorRegistration.Message.Pubkey.String())) - return - } - } - - // Now we have a new registration to process - numRegNew += 1 - - // Save to database - select { - case api.validatorRegC <- *signedValidatorRegistration: - default: - regLog.Error("validator registration channel full") - } - }) - - log = log.WithFields(logrus.Fields{ - "timeNeededSec": time.Since(start).Seconds(), - "timeNeededMs": time.Since(start).Milliseconds(), - "numRegistrations": numRegTotal, - "numRegistrationsActive": numRegActive, - "numRegistrationsProcessed": numRegProcessed, - "numRegistrationsNew": numRegNew, - "processingStoppedByError": processingStoppedByError, - }) - - if err != nil { - handleError(log, http.StatusBadRequest, "error in traversing json") - return - } - - log.Info("validator registrations call processed") - w.WriteHeader(http.StatusOK) -} - -func (api *RelayAPI) handleGetHeader(w http.ResponseWriter, req *http.Request) { - vars := mux.Vars(req) - slotStr := vars["slot"] - parentHashHex := vars["parent_hash"] - proposerPubkeyHex := vars["pubkey"] - ua := req.UserAgent() - headSlot := api.headSlot.Load() - - slot, err := strconv.ParseUint(slotStr, 10, 64) - if err != nil { - api.RespondError(w, http.StatusBadRequest, common.ErrInvalidSlot.Error()) - return - } - - requestTime := time.Now().UTC() - slotStartTimestamp := api.genesisInfo.Data.GenesisTime + (slot * common.SecondsPerSlot) - msIntoSlot := requestTime.UnixMilli() - int64((slotStartTimestamp * 1000)) - - log := api.log.WithFields(logrus.Fields{ - "method": "getHeader", - "headSlot": headSlot, - "slot": slotStr, - "parentHash": parentHashHex, - "pubkey": proposerPubkeyHex, - "ua": ua, - "mevBoostV": common.GetMevBoostVersionFromUserAgent(ua), - "requestTimestamp": requestTime.Unix(), - "slotStartSec": slotStartTimestamp, - "msIntoSlot": msIntoSlot, - }) - - if len(proposerPubkeyHex) != 98 { - api.RespondError(w, http.StatusBadRequest, common.ErrInvalidPubkey.Error()) - return - } - - if len(parentHashHex) != 66 { - api.RespondError(w, http.StatusBadRequest, common.ErrInvalidHash.Error()) - return - } - - if slot < headSlot { - api.RespondError(w, http.StatusBadRequest, "slot is too old") - return - } - - api.boltLog.Info("getHeader request received") - - if slices.Contains(apiNoHeaderUserAgents, ua) { - log.Info("rejecting getHeader by user agent") - w.WriteHeader(http.StatusNoContent) - return - } - - if api.ffForceGetHeader204 { - log.Info("forced getHeader 204 response") - w.WriteHeader(http.StatusNoContent) - return - } - - // Only allow requests for the current slot until a certain cutoff time - if getHeaderRequestCutoffMs > 0 && msIntoSlot > 0 && msIntoSlot > int64(getHeaderRequestCutoffMs) { - log.Info("getHeader sent too late") - w.WriteHeader(http.StatusNoContent) - return - } - - bid, err := api.redis.GetBestBid(slot, parentHashHex, proposerPubkeyHex) - if err != nil { - log.WithError(err).Error("could not get bid") - api.RespondError(w, http.StatusBadRequest, err.Error()) - return - } - - if bid == nil || bid.IsEmpty() { - api.boltLog.Info("Bid is nill or is empty") - w.WriteHeader(http.StatusNoContent) - return - } - - value, err := bid.Value() - if err != nil { - log.WithError(err).Info("could not get bid value") - api.RespondError(w, http.StatusBadRequest, err.Error()) - } - blockHash, err := bid.BlockHash() - if err != nil { - log.WithError(err).Info("could not get bid block hash") - api.RespondError(w, http.StatusBadRequest, err.Error()) - } - - // Error on bid without value - if value.Cmp(uint256.NewInt(0)) == 0 { - api.boltLog.Info("Bid has 0 value") - w.WriteHeader(http.StatusNoContent) - return - } - - log.WithFields(logrus.Fields{ - "value": value.String(), - "blockHash": blockHash.String(), - }).Info("bid delivered") - - api.RespondOK(w, bid) -} - -func (api *RelayAPI) handleGetHeaderWithProofs(w http.ResponseWriter, req *http.Request) { - vars := mux.Vars(req) - slotStr := vars["slot"] - parentHashHex := vars["parent_hash"] - proposerPubkeyHex := vars["pubkey"] - ua := req.UserAgent() - headSlot := api.headSlot.Load() - - slot, err := strconv.ParseUint(slotStr, 10, 64) - if err != nil { - api.RespondError(w, http.StatusBadRequest, common.ErrInvalidSlot.Error()) - return - } - - requestTime := time.Now().UTC() - slotStartTimestamp := api.genesisInfo.Data.GenesisTime + (slot * common.SecondsPerSlot) - msIntoSlot := requestTime.UnixMilli() - int64((slotStartTimestamp * 1000)) - - log := api.log.WithFields(logrus.Fields{ - "method": "getHeaderWithProofs", - "headSlot": headSlot, - "slot": slotStr, - "parentHash": parentHashHex, - "pubkey": proposerPubkeyHex, - "ua": ua, - "mevBoostV": common.GetMevBoostVersionFromUserAgent(ua), - "requestTimestamp": requestTime.Unix(), - "slotStartSec": slotStartTimestamp, - "msIntoSlot": msIntoSlot, - }) - - if len(proposerPubkeyHex) != 98 { - api.RespondError(w, http.StatusBadRequest, common.ErrInvalidPubkey.Error()) - return - } - - if len(parentHashHex) != 66 { - api.RespondError(w, http.StatusBadRequest, common.ErrInvalidHash.Error()) - return - } - - if slot < headSlot { - api.RespondError(w, http.StatusBadRequest, "slot is too old") - return - } - - api.boltLog.Info("getHeaderWithProofs request received") - - if slices.Contains(apiNoHeaderUserAgents, ua) { - log.Info("rejecting getHeaderWithProofs by user agent") - w.WriteHeader(http.StatusNoContent) - return - } - - if api.ffForceGetHeader204 { - log.Info("forced getHeaderWithProofs 204 response") - w.WriteHeader(http.StatusNoContent) - return - } - - // Only allow requests for the current slot until a certain cutoff time - if getHeaderRequestCutoffMs > 0 && msIntoSlot > 0 && msIntoSlot > int64(getHeaderRequestCutoffMs) { - log.Info("getHeaderWithProofs sent too late") - w.WriteHeader(http.StatusNoContent) - return - } - - bid, err := api.redis.GetBestBid(slot, parentHashHex, proposerPubkeyHex) - if err != nil { - log.WithError(err).Error("could not get bid") - api.RespondError(w, http.StatusBadRequest, err.Error()) - return - } - - bidBlockHash, err := bid.BlockHash() - if err != nil { - api.boltLog.WithError(err).Error("could not get bid block hash") - api.RespondError(w, http.StatusBadRequest, err.Error()) - return - } - - // BOLT: get preconfirmations proof of the best bid if available - proof, err := api.redis.GetInclusionProof(slot, proposerPubkeyHex, bidBlockHash.String()) - if err != nil { - api.boltLog.WithError(err).Error("failed getting preconfirmation proofs", proof) - // We don't respond with an error and early return since proofs might be missing - } - - if proof != nil { - api.boltLog.Infof("Got inclusion proof from cache") - } - - if bid == nil || bid.IsEmpty() { - api.boltLog.Info("Bid is nill or is empty") - w.WriteHeader(http.StatusNoContent) - return - } - - value, err := bid.Value() - if err != nil { - log.WithError(err).Info("could not get bid value") - api.RespondError(w, http.StatusBadRequest, err.Error()) - } - blockHash, err := bid.BlockHash() - if err != nil { - log.WithError(err).Info("could not get bid block hash") - api.RespondError(w, http.StatusBadRequest, err.Error()) - } - - // Error on bid without value - if value.Cmp(uint256.NewInt(0)) == 0 { - api.boltLog.Info("Bid has 0 value") - w.WriteHeader(http.StatusNoContent) - return - } - - // BOLT: Include the proofs in the final bid - bidWithProofs := &common.BidWithPreconfirmationsProofs{ - Bid: bid, - Proofs: proof, - } - - log.WithFields(logrus.Fields{ - "value": value.String(), - "blockHash": blockHash.String(), - }).Info("bid delivered with proof") - - api.RespondOK(w, bidWithProofs) -} - -func (api *RelayAPI) checkProposerSignature(block *common.VersionedSignedBlindedBeaconBlock, pubKey []byte) (bool, error) { - switch block.Version { //nolint:exhaustive - case spec.DataVersionCapella: - return verifyBlockSignature(block, api.opts.EthNetDetails.DomainBeaconProposerCapella, pubKey) - case spec.DataVersionDeneb: - return verifyBlockSignature(block, api.opts.EthNetDetails.DomainBeaconProposerDeneb, pubKey) - default: - return false, errors.New("unsupported consensus data version") - } -} - -func (api *RelayAPI) handleGetPayload(w http.ResponseWriter, req *http.Request) { - api.getPayloadCallsInFlight.Add(1) - defer api.getPayloadCallsInFlight.Done() - - ua := req.UserAgent() - headSlot := api.headSlot.Load() - receivedAt := time.Now().UTC() - log := api.log.WithFields(logrus.Fields{ - "method": "getPayload", - "ua": ua, - "mevBoostV": common.GetMevBoostVersionFromUserAgent(ua), - "contentLength": req.ContentLength, - "headSlot": headSlot, - "headSlotEpochPos": (headSlot % common.SlotsPerEpoch) + 1, - "idArg": req.URL.Query().Get("id"), - "timestampRequestStart": receivedAt.UnixMilli(), - }) - - // Log at start and end of request - log.Info("request initiated") - defer func() { - log.WithFields(logrus.Fields{ - "timestampRequestFin": time.Now().UTC().UnixMilli(), - "requestDurationMs": time.Since(receivedAt).Milliseconds(), - }).Info("request finished") - }() - - // Read the body first, so we can decode it later - body, err := io.ReadAll(req.Body) - if err != nil { - if strings.Contains(err.Error(), "i/o timeout") { - log.WithError(err).Error("getPayload request failed to decode (i/o timeout)") - api.RespondError(w, http.StatusInternalServerError, err.Error()) - return - } - - log.WithError(err).Error("could not read body of request from the beacon node") - api.RespondError(w, http.StatusBadRequest, err.Error()) - return - } - - // Decode payload - payload := new(common.VersionedSignedBlindedBeaconBlock) - if err := json.NewDecoder(bytes.NewReader(body)).Decode(payload); err != nil { - log.WithError(err).Warn("failed to decode getPayload request") - api.RespondError(w, http.StatusBadRequest, "failed to decode payload") - return - } - - // Take time after the decoding, and add to logging - decodeTime := time.Now().UTC() - slot, err := payload.Slot() - if err != nil { - log.WithError(err).Warn("failed to get payload slot") - api.RespondError(w, http.StatusBadRequest, "failed to get payload slot") - return - } - blockHash, err := payload.ExecutionBlockHash() - if err != nil { - log.WithError(err).Warn("failed to get payload block hash") - api.RespondError(w, http.StatusBadRequest, "failed to get payload block hash") - return - } - proposerIndex, err := payload.ProposerIndex() - if err != nil { - log.WithError(err).Warn("failed to get payload proposer index") - api.RespondError(w, http.StatusBadRequest, "failed to get payload proposer index") - return - } - slotStartTimestamp := api.genesisInfo.Data.GenesisTime + (uint64(slot) * common.SecondsPerSlot) - msIntoSlot := decodeTime.UnixMilli() - int64((slotStartTimestamp * 1000)) - log = log.WithFields(logrus.Fields{ - "slot": slot, - "slotEpochPos": (uint64(slot) % common.SlotsPerEpoch) + 1, - "blockHash": blockHash.String(), - "slotStartSec": slotStartTimestamp, - "msIntoSlot": msIntoSlot, - "timestampAfterDecode": decodeTime.UnixMilli(), - "proposerIndex": proposerIndex, - }) - - // Ensure the proposer index is expected - api.proposerDutiesLock.RLock() - slotDuty := api.proposerDutiesMap[uint64(slot)] - api.proposerDutiesLock.RUnlock() - if slotDuty == nil { - log.Warn("could not find slot duty") - } else { - log = log.WithField("feeRecipient", slotDuty.Entry.Message.FeeRecipient.String()) - if slotDuty.ValidatorIndex != uint64(proposerIndex) { - log.WithField("expectedProposerIndex", slotDuty.ValidatorIndex).Warn("not the expected proposer index") - api.RespondError(w, http.StatusBadRequest, "not the expected proposer index") - return - } - } - - // Get the proposer pubkey based on the validator index from the payload - proposerPubkey, found := api.datastore.GetKnownValidatorPubkeyByIndex(uint64(proposerIndex)) - if !found { - log.Errorf("could not find proposer pubkey for index %d", proposerIndex) - api.RespondError(w, http.StatusBadRequest, "could not match proposer index to pubkey") - return - } - - // Add proposer pubkey to logs - log = log.WithField("proposerPubkey", proposerPubkey.String()) - - // Create a BLS pubkey from the hex pubkey - pk, err := utils.HexToPubkey(proposerPubkey.String()) - if err != nil { - log.WithError(err).Warn("could not convert pubkey to phase0.BLSPubKey") - api.RespondError(w, http.StatusBadRequest, "could not convert pubkey to phase0.BLSPubKey") - return - } - - // Validate proposer signature - ok, err := api.checkProposerSignature(payload, pk[:]) - if !ok || err != nil { - if api.ffLogInvalidSignaturePayload { - txt, _ := json.Marshal(payload) //nolint:errchkjson - log.Info("payload_invalid_sig: ", string(txt), "pubkey:", proposerPubkey.String()) - } - log.WithError(err).Warn("could not verify payload signature") - api.RespondError(w, http.StatusBadRequest, "could not verify payload signature") - return - } - - // Log about received payload (with a valid proposer signature) - log = log.WithField("timestampAfterSignatureVerify", time.Now().UTC().UnixMilli()) - log.Info("getPayload request received") - - var getPayloadResp *builderApi.VersionedSubmitBlindedBlockResponse - var msNeededForPublishing uint64 - - // Save information about delivered payload - defer func() { - bidTrace, err := api.redis.GetBidTrace(uint64(slot), proposerPubkey.String(), blockHash.String()) - if err != nil { - log.WithError(err).Info("failed to get bidTrace for delivered payload from redis") - return - } - - err = api.db.SaveDeliveredPayload(bidTrace, payload, decodeTime, msNeededForPublishing) - if err != nil { - log.WithError(err).WithFields(logrus.Fields{ - "bidTrace": bidTrace, - "payload": payload, - }).Error("failed to save delivered payload") - } - - // Increment builder stats - err = api.db.IncBlockBuilderStatsAfterGetPayload(bidTrace.BuilderPubkey.String()) - if err != nil { - log.WithError(err).Error("failed to increment builder-stats after getPayload") - } - - // Wait until optimistic blocks are complete. - api.optimisticBlocksWG.Wait() - - // Check if there is a demotion for the winning block. - _, err = api.db.GetBuilderDemotion(bidTrace) - // If demotion not found, we are done! - if errors.Is(err, sql.ErrNoRows) { - log.Info("no demotion in getPayload, successful block proposal") - return - } - if err != nil { - log.WithError(err).Error("failed to read demotion table in getPayload") - return - } - // Demotion found, update the demotion table with refund data. - builderPubkey := bidTrace.BuilderPubkey.String() - log = log.WithFields(logrus.Fields{ - "builderPubkey": builderPubkey, - "slot": bidTrace.Slot, - "blockHash": bidTrace.BlockHash, - }) - log.Warn("demotion found in getPayload, inserting refund justification") - - // Prepare refund data. - signedBeaconBlock, err := common.SignedBlindedBeaconBlockToBeaconBlock(payload, getPayloadResp) - if err != nil { - log.WithError(err).Error("failed to convert signed blinded beacon block to beacon block") - api.RespondError(w, http.StatusInternalServerError, "failed to convert signed blinded beacon block to beacon block") - return - } - - // Get registration entry from the DB. - registrationEntry, err := api.db.GetValidatorRegistration(proposerPubkey.String()) - if err != nil { - if errors.Is(err, sql.ErrNoRows) { - log.WithError(err).Error("no registration found for validator " + proposerPubkey.String()) - } else { - log.WithError(err).Error("error reading validator registration") - } - } - var signedRegistration *builderApiV1.SignedValidatorRegistration - if registrationEntry != nil { - signedRegistration, err = registrationEntry.ToSignedValidatorRegistration() - if err != nil { - log.WithError(err).Error("error converting registration to signed registration") - } - } - - err = api.db.UpdateBuilderDemotion(bidTrace, signedBeaconBlock, signedRegistration) - if err != nil { - log.WithFields(logrus.Fields{ - "errorWritingRefundToDB": true, - "bidTrace": bidTrace, - "signedBeaconBlock": signedBeaconBlock, - "signedRegistration": signedRegistration, - }).WithError(err).Error("unable to update builder demotion with refund justification") - } - }() - - // Get the response - from Redis, Memcache or DB - // note that recent mev-boost versions only send getPayload to relays that provided the bid - getPayloadResp, err = api.datastore.GetGetPayloadResponse(log, uint64(slot), proposerPubkey.String(), blockHash.String()) - if err != nil || getPayloadResp == nil { - log.WithError(err).Warn("failed getting execution payload (1/2)") - time.Sleep(time.Duration(timeoutGetPayloadRetryMs) * time.Millisecond) - - // Try again - getPayloadResp, err = api.datastore.GetGetPayloadResponse(log, uint64(slot), proposerPubkey.String(), blockHash.String()) - if err != nil || getPayloadResp == nil { - // Still not found! Error out now. - if errors.Is(err, datastore.ErrExecutionPayloadNotFound) { - // Couldn't find the execution payload, maybe it never was submitted to our relay! Check that now - bid, err := api.db.GetBlockSubmissionEntry(uint64(slot), proposerPubkey.String(), blockHash.String()) - if errors.Is(err, sql.ErrNoRows) { - log.Warn("failed getting execution payload (2/2) - payload not found, block was never submitted to this relay") - api.RespondError(w, http.StatusBadRequest, "no execution payload for this request - block was never seen by this relay") - } else if err != nil { - log.WithError(err).Error("failed getting execution payload (2/2) - payload not found, and error on checking bids") - } else if bid.EligibleAt.Valid { - log.Error("failed getting execution payload (2/2) - payload not found, but found bid in database") - } else { - log.Info("found bid but payload was never saved as bid was ineligible being below floor value") - } - } else { // some other error - log.WithError(err).Error("failed getting execution payload (2/2) - error") - } - api.RespondError(w, http.StatusBadRequest, "no execution payload for this request") - return - } - } - - // Now we know this relay also has the payload - log = log.WithField("timestampAfterLoadResponse", time.Now().UTC().UnixMilli()) - - // Check whether getPayload has already been called -- TODO: do we need to allow multiple submissions of one blinded block? - err = api.redis.CheckAndSetLastSlotAndHashDelivered(uint64(slot), blockHash.String()) - log = log.WithField("timestampAfterAlreadyDeliveredCheck", time.Now().UTC().UnixMilli()) - if err != nil { - if errors.Is(err, datastore.ErrAnotherPayloadAlreadyDeliveredForSlot) { - // BAD VALIDATOR, 2x GETPAYLOAD FOR DIFFERENT PAYLOADS - log.Warn("validator called getPayload twice for different payload hashes") - api.RespondError(w, http.StatusBadRequest, "another payload for this slot was already delivered") - return - } else if errors.Is(err, datastore.ErrPastSlotAlreadyDelivered) { - // BAD VALIDATOR, 2x GETPAYLOAD FOR PAST SLOT - log.Warn("validator called getPayload for past slot") - api.RespondError(w, http.StatusBadRequest, "payload for this slot was already delivered") - return - } else if errors.Is(err, redis.TxFailedErr) { - // BAD VALIDATOR, 2x GETPAYLOAD + RACE - log.Warn("validator called getPayload twice (race)") - api.RespondError(w, http.StatusBadRequest, "payload for this slot was already delivered (race)") - return - } - log.WithError(err).Error("redis.CheckAndSetLastSlotAndHashDelivered failed") - } - - // Handle early/late requests - if msIntoSlot < 0 { - // Wait until slot start (t=0) if still in the future - _msSinceSlotStart := time.Now().UTC().UnixMilli() - int64((slotStartTimestamp * 1000)) - if _msSinceSlotStart < 0 { - delayMillis := _msSinceSlotStart * -1 - log = log.WithField("delayMillis", delayMillis) - log.Info("waiting until slot start t=0") - time.Sleep(time.Duration(delayMillis) * time.Millisecond) - } - } else if getPayloadRequestCutoffMs > 0 && msIntoSlot > int64(getPayloadRequestCutoffMs) { - // Reject requests after cutoff time - log.Warn("getPayload sent too late") - api.RespondError(w, http.StatusBadRequest, fmt.Sprintf("sent too late - %d ms into slot", msIntoSlot)) - - go func() { - err := api.db.InsertTooLateGetPayload(uint64(slot), proposerPubkey.String(), blockHash.String(), slotStartTimestamp, uint64(receivedAt.UnixMilli()), uint64(decodeTime.UnixMilli()), uint64(msIntoSlot)) - if err != nil { - log.WithError(err).Error("failed to insert payload too late into db") - } - }() - return - } - - // Check that BlindedBlockContent fields (sent by the proposer) match our known BlockContents - err = EqBlindedBlockContentsToBlockContents(payload, getPayloadResp) - if err != nil { - log.WithError(err).Warn("ExecutionPayloadHeader not matching known ExecutionPayload") - api.RespondError(w, http.StatusBadRequest, "invalid execution payload header") - return - } - - // Publish the signed beacon block via beacon-node - timeBeforePublish := time.Now().UTC().UnixMilli() - log = log.WithField("timestampBeforePublishing", timeBeforePublish) - signedBeaconBlock, err := common.SignedBlindedBeaconBlockToBeaconBlock(payload, getPayloadResp) - if err != nil { - log.WithError(err).Error("failed to convert signed blinded beacon block to beacon block") - api.RespondError(w, http.StatusInternalServerError, "failed to convert signed blinded beacon block to beacon block") - return - } - code, err := api.beaconClient.PublishBlock(signedBeaconBlock) // errors are logged inside - if err != nil || (code != http.StatusOK && code != http.StatusAccepted) { - log.Infof("failed to publish block: %s", signedBeaconBlock) - log.WithError(err).WithField("code", code).Error("failed to publish block") - api.RespondError(w, http.StatusBadRequest, "failed to publish block") - return - } - - timeAfterPublish := time.Now().UTC().UnixMilli() - msNeededForPublishing = uint64(timeAfterPublish - timeBeforePublish) - log = log.WithField("timestampAfterPublishing", timeAfterPublish) - log.WithField("msNeededForPublishing", msNeededForPublishing).Info("block published through beacon node") - - // give the beacon network some time to propagate the block - time.Sleep(time.Duration(getPayloadResponseDelayMs) * time.Millisecond) - - // respond to the HTTP request - api.RespondOK(w, getPayloadResp) - blockNumber, err := payload.ExecutionBlockNumber() - if err != nil { - log.WithError(err).Info("failed to get block number") - } - txs, err := getPayloadResp.Transactions() - if err != nil { - log.WithError(err).Info("failed to get transactions") - } - log = log.WithFields(logrus.Fields{ - "numTx": len(txs), - "blockNumber": blockNumber, - }) - // deneb specific logging - if getPayloadResp.Deneb != nil { - log = log.WithFields(logrus.Fields{ - "numBlobs": len(getPayloadResp.Deneb.BlobsBundle.Blobs), - "blobGasUsed": getPayloadResp.Deneb.ExecutionPayload.BlobGasUsed, - "excessBlobGas": getPayloadResp.Deneb.ExecutionPayload.ExcessBlobGas, - }) - } - log.Info("execution payload delivered") -} - -func (api *RelayAPI) handleSubmitConstraints(w http.ResponseWriter, req *http.Request) { - ua := req.UserAgent() - headSlot := api.headSlot.Load() - receivedAt := time.Now().UTC() - - log := api.log.WithFields(logrus.Fields{ - "method": "handleSubmitConstraints", - "ua": ua, - "mevBoostV": common.GetMevBoostVersionFromUserAgent(ua), - "contentLength": req.ContentLength, - "headSlot": headSlot, - "headSlotEpochPos": (headSlot % common.SlotsPerEpoch) + 1, - "idArg": req.URL.Query().Get("id"), - "timestampRequestStart": receivedAt.UnixMilli(), - }) - defer func() { - log.WithFields(logrus.Fields{ - "timestampRequestFin": time.Now().UTC().UnixMilli(), - "requestDurationMs": time.Since(receivedAt).Milliseconds(), - }).Info("request finished") - }() - - // Log at start and end of request - log.Info("request initiated") - - // Read the body first, so we can decode it later - body, err := io.ReadAll(req.Body) - if err != nil { - if strings.Contains(err.Error(), "i/o timeout") { - log.WithError(err).Error("handleSubmitConstraints request failed to decode (i/o timeout)") - api.RespondError(w, http.StatusInternalServerError, err.Error()) - return - } - - log.WithError(err).Error("could not read body of request from the beacon node") - api.RespondError(w, http.StatusBadRequest, err.Error()) - return - } - - // Decode payload - payload := new([]*SignedConstraints) - if err := json.NewDecoder(bytes.NewReader(body)).Decode(payload); err != nil { - log.WithError(err).Warn("failed to decode submit contraints body") - api.RespondError(w, http.StatusBadRequest, "failed to decode payload") - return - } - - if len(*payload) == 0 { - api.RespondError(w, http.StatusBadRequest, "No constraints submitted") - return - } else { - log.Infof("Received %d constraints", len(*payload)) - } - - // Add all constraints to the cache - for _, signedConstraints := range *payload { - // Retrieve proposer information - validatorIndex := signedConstraints.Message.ValidatorIndex - proposerPubKeyStr, found := api.datastore.GetKnownValidatorPubkeyByIndex(validatorIndex) - if !found { - log.Errorf("could not find proposer pubkey for index %d", validatorIndex) - api.RespondError(w, http.StatusBadRequest, "could not match proposer index to pubkey") - return - } - proposerPubKey, err := utils.HexToPubkey(proposerPubKeyStr.String()) - if err != nil { - log.WithError(err).Warn("could not convert pubkey to phase0.BLSPubKey") - api.RespondError(w, http.StatusBadRequest, "could not convert pubkey to phase0.BLSPubKey") - return - } - blsPublicKey, err := bls.PublicKeyFromBytes(proposerPubKey[:]) - if err != nil { - log.Errorf("could not convert proposer pubkey to bls.PublicKey: %v", err) - api.RespondError(w, http.StatusInternalServerError, "could not convert proposer pubkey to bls.PublicKey") - return - } - - // Verify signature - signature, err := bls.SignatureFromBytes(signedConstraints.Signature[:]) - if err != nil { - log.Errorf("could not convert signature to bls.Signature: %v", err) - api.RespondError(w, http.StatusBadRequest, "Invalid raw BLS signature") - return - } - - message := signedConstraints.Message - - // NOTE: even if payload is sent with JSON, the signature digest is the SSZ encoding of the message - messageSSZ, err := message.MarshalSSZ() - if err != nil { - log.Errorf("could not marshal constraint message to json: %v", err) - api.RespondError(w, http.StatusInternalServerError, "could not marshal constraint message to json") - return - } - _, err = bls.VerifySignature(signature, blsPublicKey, messageSSZ) - if err != nil { - log.Errorf("error while veryfing signature: %v", err) - api.RespondError(w, http.StatusInternalServerError, "error while veryfing signature") - return - } - - // TODO: uncomment this code once we send messages signed with correct validator pubkey on the sidecar. - // We can for setup this for the devnet but it's not trivial so we'll skip it for now. - // if !ok { - // log.Error("Invalid BLS signature over constraint message") - // api.RespondError(w, http.StatusBadRequest, fmt.Sprintf("Invalid BLS signature over constraint message %s", messageSSZ)) - // return - // } - - broadcastToChannels(api.constraintsConsumers, signedConstraints) - - // Add the constraint to the cache. - slotConstraints, _ := api.constraints.Get(message.Slot) - if slotConstraints == nil { - api.constraints.Put(message.Slot, &[]*SignedConstraints{signedConstraints}) - } else { - *slotConstraints = append(*slotConstraints, signedConstraints) - } - log.Infof("Added %d constraints for slot %d and broadcasted %d to channels", len(*payload), message.Slot, len(api.constraintsConsumers)) - } - - EmitBoltDemoEvent(fmt.Sprintf("received %d valid constraints, sending to builders... (path: %s)", len(*payload), req.URL.Path)) - - // respond to the HTTP request - api.RespondOK(w, nil) -} - -// -------------------- -// -// BLOCK BUILDER APIS -// -// -------------------- -func (api *RelayAPI) handleBuilderGetValidators(w http.ResponseWriter, req *http.Request) { - api.proposerDutiesLock.RLock() - resp := api.proposerDutiesResponse - api.proposerDutiesLock.RUnlock() - _, err := w.Write(*resp) - if err != nil { - api.log.WithError(err).Warn("failed to write response for builderGetValidators") - } -} - -func (api *RelayAPI) checkSubmissionFeeRecipient(w http.ResponseWriter, log *logrus.Entry, bidTrace *builderApiV1.BidTrace) (uint64, bool) { - api.proposerDutiesLock.RLock() - slotDuty := api.proposerDutiesMap[bidTrace.Slot] - api.proposerDutiesLock.RUnlock() - if slotDuty == nil { - log.Warn("could not find slot duty") - api.RespondError(w, http.StatusBadRequest, "could not find slot duty") - return 0, false - } else if !strings.EqualFold(slotDuty.Entry.Message.FeeRecipient.String(), bidTrace.ProposerFeeRecipient.String()) { - log.WithFields(logrus.Fields{ - "expectedFeeRecipient": slotDuty.Entry.Message.FeeRecipient.String(), - "actualFeeRecipient": bidTrace.ProposerFeeRecipient.String(), - }).Info("fee recipient does not match") - api.RespondError(w, http.StatusBadRequest, "fee recipient does not match") - return 0, false - } - return slotDuty.Entry.Message.GasLimit, true -} - -func (api *RelayAPI) checkSubmissionPayloadAttrs(w http.ResponseWriter, log *logrus.Entry, submission *common.BlockSubmissionInfo) (payloadAttributesHelper, bool) { - api.payloadAttributesLock.RLock() - attrs, ok := api.payloadAttributes[submission.BidTrace.ParentHash.String()] - api.payloadAttributesLock.RUnlock() - if !ok || submission.BidTrace.Slot != attrs.slot { - log.WithFields(logrus.Fields{ - "attributesFound": ok, - "payloadSlot": submission.BidTrace.Slot, - "attrsSlot": attrs.slot, - }).Warn("payload attributes not (yet) known") - api.RespondError(w, http.StatusBadRequest, "payload attributes not (yet) known") - return attrs, false - } - - if submission.PrevRandao.String() != attrs.payloadAttributes.PrevRandao { - msg := fmt.Sprintf("incorrect prev_randao - got: %s, expected: %s", submission.PrevRandao.String(), attrs.payloadAttributes.PrevRandao) - log.Info(msg) - api.RespondError(w, http.StatusBadRequest, msg) - return attrs, false - } - - if hasReachedFork(submission.BidTrace.Slot, api.capellaEpoch) { // Capella requires correct withdrawals - withdrawalsRoot, err := ComputeWithdrawalsRoot(submission.Withdrawals) - if err != nil { - log.WithError(err).Warn("could not compute withdrawals root from payload") - api.RespondError(w, http.StatusBadRequest, "could not compute withdrawals root") - return attrs, false - } - - if withdrawalsRoot != attrs.withdrawalsRoot { - msg := fmt.Sprintf("incorrect withdrawals root - got: %s, expected: %s", withdrawalsRoot.String(), attrs.withdrawalsRoot.String()) - log.Info(msg) - api.RespondError(w, http.StatusBadRequest, msg) - return attrs, false - } - } - - return attrs, true -} - -func (api *RelayAPI) checkSubmissionSlotDetails(w http.ResponseWriter, log *logrus.Entry, headSlot uint64, payload *common.VersionedSubmitBlockRequest, submission *common.BlockSubmissionInfo) bool { - if api.isDeneb(submission.BidTrace.Slot) && payload.Deneb == nil { - log.Info("rejecting submission - non deneb payload for deneb fork") - api.RespondError(w, http.StatusBadRequest, "not deneb payload") - return false - } - - if api.isCapella(submission.BidTrace.Slot) && payload.Capella == nil { - log.Info("rejecting submission - non capella payload for capella fork") - api.RespondError(w, http.StatusBadRequest, "not capella payload") - return false - } - - if submission.BidTrace.Slot <= headSlot { - log.Info("submitNewBlock failed: submission for past slot") - api.RespondError(w, http.StatusBadRequest, "submission for past slot") - return false - } - - // Timestamp check - expectedTimestamp := api.genesisInfo.Data.GenesisTime + (submission.BidTrace.Slot * common.SecondsPerSlot) - if submission.Timestamp != expectedTimestamp { - log.Warnf("incorrect timestamp. got %d, expected %d", submission.Timestamp, expectedTimestamp) - api.RespondError(w, http.StatusBadRequest, fmt.Sprintf("incorrect timestamp. got %d, expected %d", submission.Timestamp, expectedTimestamp)) - return false - } - - return true -} - -func (api *RelayAPI) checkBuilderEntry(w http.ResponseWriter, log *logrus.Entry, builderPubkey phase0.BLSPubKey) (*blockBuilderCacheEntry, bool) { - builderEntry, ok := api.blockBuildersCache[builderPubkey.String()] - if !ok { - log.Warnf("unable to read builder: %s from the builder cache, using low-prio and no collateral", builderPubkey.String()) - builderEntry = &blockBuilderCacheEntry{ - status: common.BuilderStatus{ - IsHighPrio: false, - IsOptimistic: false, - IsBlacklisted: false, - }, - collateral: big.NewInt(0), - } - } - - if builderEntry.status.IsBlacklisted { - log.Info("builder is blacklisted") - time.Sleep(200 * time.Millisecond) - w.WriteHeader(http.StatusOK) - return builderEntry, false - } - - // In case only high-prio requests are accepted, fail others - if api.ffDisableLowPrioBuilders && !builderEntry.status.IsHighPrio { - log.Info("rejecting low-prio builder (ff-disable-low-prio-builders)") - time.Sleep(200 * time.Millisecond) - w.WriteHeader(http.StatusOK) - return builderEntry, false - } - - return builderEntry, true -} - -type bidFloorOpts struct { - w http.ResponseWriter - tx redis.Pipeliner - log *logrus.Entry - cancellationsEnabled bool - simResultC chan *blockSimResult - submission *common.BlockSubmissionInfo -} - -func (api *RelayAPI) checkFloorBidValue(opts bidFloorOpts) (*big.Int, bool) { - // Reject new submissions once the payload for this slot was delivered - TODO: store in memory as well - slotLastPayloadDelivered, err := api.redis.GetLastSlotDelivered(context.Background(), opts.tx) - if err != nil && !errors.Is(err, redis.Nil) { - opts.log.WithError(err).Error("failed to get delivered payload slot from redis") - } else if opts.submission.BidTrace.Slot <= slotLastPayloadDelivered { - opts.log.Info("rejecting submission because payload for this slot was already delivered") - api.RespondError(opts.w, http.StatusBadRequest, "payload for this slot was already delivered") - return nil, false - } - - // Grab floor bid value - floorBidValue, err := api.redis.GetFloorBidValue(context.Background(), opts.tx, opts.submission.BidTrace.Slot, opts.submission.BidTrace.ParentHash.String(), opts.submission.BidTrace.ProposerPubkey.String()) - if err != nil { - opts.log.WithError(err).Error("failed to get floor bid value from redis") - } else { - opts.log = opts.log.WithField("floorBidValue", floorBidValue.String()) - } - - // -------------------------------------------- - // Skip submission if below the floor bid value - // -------------------------------------------- - isBidBelowFloor := floorBidValue != nil && opts.submission.BidTrace.Value.ToBig().Cmp(floorBidValue) == -1 - isBidAtOrBelowFloor := floorBidValue != nil && opts.submission.BidTrace.Value.ToBig().Cmp(floorBidValue) < 1 - if opts.cancellationsEnabled && isBidBelowFloor { // with cancellations: if below floor -> delete previous bid - opts.simResultC <- &blockSimResult{false, false, nil, nil} - opts.log.Info("submission below floor bid value, with cancellation") - err := api.redis.DelBuilderBid(context.Background(), opts.tx, opts.submission.BidTrace.Slot, opts.submission.BidTrace.ParentHash.String(), opts.submission.BidTrace.ProposerPubkey.String(), opts.submission.BidTrace.BuilderPubkey.String()) - if err != nil { - opts.log.WithError(err).Error("failed processing cancellable bid below floor") - api.RespondError(opts.w, http.StatusInternalServerError, "failed processing cancellable bid below floor") - return nil, false - } - api.Respond(opts.w, http.StatusAccepted, "accepted bid below floor, skipped validation") - return nil, false - } else if !opts.cancellationsEnabled && isBidAtOrBelowFloor { // without cancellations: if at or below floor -> ignore - opts.simResultC <- &blockSimResult{false, false, nil, nil} - opts.log.Info("submission at or below floor bid value, without cancellation") - api.RespondMsg(opts.w, http.StatusAccepted, "accepted bid below floor, skipped validation") - return nil, false - } - return floorBidValue, true -} - -type redisUpdateBidOpts struct { - w http.ResponseWriter - tx redis.Pipeliner - log *logrus.Entry - cancellationsEnabled bool - receivedAt time.Time - floorBidValue *big.Int - payload *common.VersionedSubmitBlockRequest -} - -func (api *RelayAPI) updateRedisBid( - opts redisUpdateBidOpts) ( - *datastore.SaveBidAndUpdateTopBidResponse, - *builderApi.VersionedSubmitBlindedBlockResponse, bool, -) { - // Prepare the response data - getHeaderResponse, err := common.BuildGetHeaderResponse(opts.payload, api.blsSk, api.publicKey, api.opts.EthNetDetails.DomainBuilder) - if err != nil { - opts.log.WithError(err).Error("could not sign builder bid") - api.RespondError(opts.w, http.StatusBadRequest, err.Error()) - return nil, nil, false - } - - getPayloadResponse, err := common.BuildGetPayloadResponse(opts.payload) - if err != nil { - opts.log.WithError(err).Error("could not build getPayload response") - api.RespondError(opts.w, http.StatusBadRequest, err.Error()) - return nil, nil, false - } - - submission, err := common.GetBlockSubmissionInfo(opts.payload) - if err != nil { - opts.log.WithError(err).Error("could not get block submission info") - api.RespondError(opts.w, http.StatusBadRequest, err.Error()) - return nil, nil, false - } - - bidTrace := common.BidTraceV2WithBlobFields{ - BidTrace: *submission.BidTrace, - BlockNumber: submission.BlockNumber, - NumTx: uint64(len(submission.Transactions)), - NumBlobs: uint64(len(submission.Blobs)), - BlobGasUsed: submission.BlobGasUsed, - ExcessBlobGas: submission.ExcessBlobGas, - } - - // - // Save to Redis - // - updateBidResult, err := api.redis.SaveBidAndUpdateTopBid( - context.Background(), - opts.tx, - &bidTrace, - opts.payload, - getPayloadResponse, - getHeaderResponse, - opts.receivedAt, - opts.cancellationsEnabled, - opts.floorBidValue, - nil) - if err != nil { - opts.log.WithError(err).Error("could not save bid and update top bids") - api.RespondError(opts.w, http.StatusInternalServerError, "failed saving and updating bid") - return nil, nil, false - } - return &updateBidResult, getPayloadResponse, true -} - -func (api *RelayAPI) updateRedisBidWithProofs( - opts redisUpdateBidOpts, - proof *common.InclusionProof) ( - *datastore.SaveBidAndUpdateTopBidResponse, - *builderApi.VersionedSubmitBlindedBlockResponse, bool, -) { - api.boltLog.Info("Updating Redis bid with inclusion proof") - - // Prepare the response data - getHeaderResponse, err := common.BuildGetHeaderResponse(opts.payload, api.blsSk, api.publicKey, api.opts.EthNetDetails.DomainBuilder) - if err != nil { - opts.log.WithError(err).Error("could not sign builder bid") - api.RespondError(opts.w, http.StatusBadRequest, err.Error()) - return nil, nil, false - } - - newSlot := api.headSlot.Load() + 1 - - slotConstraints, _ := api.constraints.Get(newSlot) - if slotConstraints != nil { - transactionsRoot, err := getHeaderResponse.TransactionsRoot() - if err != nil { - api.log.WithError(err).Errorf("Failed to calculate transactions root for slot %d", newSlot) - api.RespondError(opts.w, http.StatusBadRequest, err.Error()) - return nil, nil, false - } - constraints := make(HashToConstraintDecoded) - for _, signedConstraints := range *slotConstraints { - for _, constraint := range signedConstraints.Message.Constraints { - decoded := new(types.Transaction) - if err := decoded.UnmarshalBinary(constraint.Tx); err != nil { - api.log.WithError(err).Error("could not decode transaction") - api.RespondError(opts.w, http.StatusBadRequest, "could not decode transaction") - return nil, nil, false - } - api.log.Infof("Decoded tx hash %s", decoded.Hash().String()) - constraints[decoded.Hash()] = &ConstraintDecoded{Tx: decoded.WithoutBlobTxSidecar(), Index: constraint.Index} - } - } - - if len(constraints) > len(proof.TransactionHashes) { - api.log.Warnf("Constraints and proofs length mismatch for slot %d: %d > %d", newSlot, len(constraints), len(proof.TransactionHashes)) - api.RespondError(opts.w, http.StatusBadRequest, "constraints and proofs length mismatch") - return nil, nil, false - } - - err = verifyInclusionProof(api.log, transactionsRoot, proof, constraints) - if err != nil { - api.log.WithError(err).Error("Constraints proofs verification failed") - api.RespondError(opts.w, http.StatusBadRequest, err.Error()) - return nil, nil, false - } else { - api.log.Infof("[BOLT]: constraints proofs verified for slot %d", newSlot) - } - } - - getPayloadResponse, err := common.BuildGetPayloadResponse(opts.payload) - if err != nil { - opts.log.WithError(err).Error("could not build getPayload response") - api.RespondError(opts.w, http.StatusBadRequest, err.Error()) - return nil, nil, false - } - - submission, err := common.GetBlockSubmissionInfo(opts.payload) - if err != nil { - opts.log.WithError(err).Error("could not get block submission info") - api.RespondError(opts.w, http.StatusBadRequest, err.Error()) - return nil, nil, false - } - - bidTrace := common.BidTraceV2WithBlobFields{ - BidTrace: *submission.BidTrace, - BlockNumber: submission.BlockNumber, - NumTx: uint64(len(submission.Transactions)), - NumBlobs: uint64(len(submission.Blobs)), - BlobGasUsed: submission.BlobGasUsed, - ExcessBlobGas: submission.ExcessBlobGas, - } - - // - // Save to Redis - // - updateBidResult, err := api.redis.SaveBidAndUpdateTopBid( - context.Background(), - opts.tx, - &bidTrace, - opts.payload, - getPayloadResponse, - getHeaderResponse, - opts.receivedAt, - opts.cancellationsEnabled, - opts.floorBidValue, - proof) - if err != nil { - opts.log.WithError(err).Error("could not save bid and update top bids") - api.RespondError(opts.w, http.StatusInternalServerError, "failed saving and updating bid") - return nil, nil, false - } - return &updateBidResult, getPayloadResponse, true -} - -func (api *RelayAPI) handleSubmitNewBlock(w http.ResponseWriter, req *http.Request) { - var pf common.Profile - var prevTime, nextTime time.Time - - headSlot := api.headSlot.Load() - receivedAt := time.Now().UTC() - prevTime = receivedAt - - args := req.URL.Query() - isCancellationEnabled := args.Get("cancellations") == "1" - - log := api.log.WithFields(logrus.Fields{ - "method": "submitNewBlock", - "contentLength": req.ContentLength, - "headSlot": headSlot, - "cancellationEnabled": isCancellationEnabled, - "timestampRequestStart": receivedAt.UnixMilli(), - }) - - // Log at start and end of request - log.Info("request initiated") - defer func() { - log.WithFields(logrus.Fields{ - "timestampRequestFin": time.Now().UTC().UnixMilli(), - "requestDurationMs": time.Since(receivedAt).Milliseconds(), - }).Info("request finished") - }() - - // If cancellations are disabled but builder requested it, return error - if isCancellationEnabled && !api.ffEnableCancellations { - log.Info("builder submitted with cancellations enabled, but feature flag is disabled") - api.RespondError(w, http.StatusBadRequest, "cancellations are disabled") - return - } - - var err error - var r io.Reader = req.Body - isGzip := req.Header.Get("Content-Encoding") == "gzip" - log = log.WithField("reqIsGzip", isGzip) - if isGzip { - r, err = gzip.NewReader(req.Body) - if err != nil { - log.WithError(err).Warn("could not create gzip reader") - api.RespondError(w, http.StatusBadRequest, err.Error()) - return - } - } - - limitReader := io.LimitReader(r, 10*1024*1024) // 10 MB - requestPayloadBytes, err := io.ReadAll(limitReader) - if err != nil { - log.WithError(err).Warn("could not read payload") - api.RespondError(w, http.StatusBadRequest, err.Error()) - return - } - - nextTime = time.Now().UTC() - pf.PayloadLoad = uint64(nextTime.Sub(prevTime).Microseconds()) - prevTime = nextTime - - payload := new(common.VersionedSubmitBlockRequest) - - // Check for SSZ encoding - contentType := req.Header.Get("Content-Type") - if contentType == "application/octet-stream" { - log = log.WithField("reqContentType", "ssz") - if err = payload.UnmarshalSSZ(requestPayloadBytes); err != nil { - log.WithError(err).Warn("could not decode payload - SSZ") - - // SSZ decoding failed. try JSON as fallback (some builders used octet-stream for json before) - if err2 := json.Unmarshal(requestPayloadBytes, payload); err2 != nil { - log.WithError(fmt.Errorf("%w / %w", err, err2)).Warn("could not decode payload - SSZ or JSON") - api.RespondError(w, http.StatusBadRequest, err.Error()) - return - } - log = log.WithField("reqContentType", "json") - } else { - log.Debug("received ssz-encoded payload") - } - } else { - log = log.WithField("reqContentType", "json") - if err := json.Unmarshal(requestPayloadBytes, payload); err != nil { - log.WithError(err).Warn("could not decode payload - JSON") - api.RespondError(w, http.StatusBadRequest, err.Error()) - return - } - } - - nextTime = time.Now().UTC() - pf.Decode = uint64(nextTime.Sub(prevTime).Microseconds()) - prevTime = nextTime - - isLargeRequest := len(requestPayloadBytes) > fastTrackPayloadSizeLimit - // getting block submission info also validates bid trace and execution submission are not empty - submission, err := common.GetBlockSubmissionInfo(payload) - if err != nil { - log.WithError(err).Warn("missing fields in submit block request") - api.RespondError(w, http.StatusBadRequest, err.Error()) - return - } - log = log.WithFields(logrus.Fields{ - "timestampAfterDecoding": time.Now().UTC().UnixMilli(), - "slot": submission.BidTrace.Slot, - "builderPubkey": submission.BidTrace.BuilderPubkey.String(), - "blockHash": submission.BidTrace.BlockHash.String(), - "proposerPubkey": submission.BidTrace.ProposerPubkey.String(), - "parentHash": submission.BidTrace.ParentHash.String(), - "value": submission.BidTrace.Value.Dec(), - "numTx": len(submission.Transactions), - "payloadBytes": len(requestPayloadBytes), - "isLargeRequest": isLargeRequest, - }) - // deneb specific logging - if payload.Deneb != nil { - log = log.WithFields(logrus.Fields{ - "numBlobs": len(payload.Deneb.BlobsBundle.Blobs), - "blobGasUsed": payload.Deneb.ExecutionPayload.BlobGasUsed, - "excessBlobGas": payload.Deneb.ExecutionPayload.ExcessBlobGas, - }) - } - - ok := api.checkSubmissionSlotDetails(w, log, headSlot, payload, submission) - if !ok { - return - } - - builderPubkey := submission.BidTrace.BuilderPubkey - builderEntry, ok := api.checkBuilderEntry(w, log, builderPubkey) - if !ok { - return - } - - log = log.WithField("builderIsHighPrio", builderEntry.status.IsHighPrio) - - gasLimit, ok := api.checkSubmissionFeeRecipient(w, log, submission.BidTrace) - if !ok { - return - } - - // Don't accept blocks with 0 value - if submission.BidTrace.Value.ToBig().Cmp(ZeroU256.BigInt()) == 0 || len(submission.Transactions) == 0 { - log.Info("submitNewBlock failed: block with 0 value or no txs") - w.WriteHeader(http.StatusOK) - return - } - - // Sanity check the submission - err = SanityCheckBuilderBlockSubmission(payload) - if err != nil { - log.WithError(err).Info("block submission sanity checks failed") - api.RespondError(w, http.StatusBadRequest, err.Error()) - return - } - - attrs, ok := api.checkSubmissionPayloadAttrs(w, log, submission) - if !ok { - return - } - - // Verify the signature - log = log.WithField("timestampBeforeSignatureCheck", time.Now().UTC().UnixMilli()) - signature := submission.Signature - ok, err = ssz.VerifySignature(submission.BidTrace, api.opts.EthNetDetails.DomainBuilder, builderPubkey[:], signature[:]) - log = log.WithField("timestampAfterSignatureCheck", time.Now().UTC().UnixMilli()) - if err != nil { - log.WithError(err).Warn("failed verifying builder signature") - api.RespondError(w, http.StatusBadRequest, "failed verifying builder signature") - return - } else if !ok { - log.Warn("invalid builder signature") - api.RespondError(w, http.StatusBadRequest, "invalid signature") - return - } - - log = log.WithField("timestampBeforeCheckingFloorBid", time.Now().UTC().UnixMilli()) - - // Create the redis pipeline tx - tx := api.redis.NewTxPipeline() - - // channel to send simulation result to the deferred function - simResultC := make(chan *blockSimResult, 1) - var eligibleAt time.Time // will be set once the bid is ready - - submission, err = common.GetBlockSubmissionInfo(payload) - if err != nil { - log.WithError(err).Warn("missing fields in submit block request") - api.RespondError(w, http.StatusBadRequest, err.Error()) - return - } - - bfOpts := bidFloorOpts{ - w: w, - tx: tx, - log: log, - cancellationsEnabled: isCancellationEnabled, - simResultC: simResultC, - submission: submission, - } - floorBidValue, ok := api.checkFloorBidValue(bfOpts) - if !ok { - return - } - - log = log.WithField("timestampAfterCheckingFloorBid", time.Now().UTC().UnixMilli()) - - // Deferred saving of the builder submission to database (whenever this function ends) - defer func() { - savePayloadToDatabase := !api.ffDisablePayloadDBStorage - var simResult *blockSimResult - select { - case simResult = <-simResultC: - case <-time.After(10 * time.Second): - log.Warn("timed out waiting for simulation result") - simResult = &blockSimResult{false, false, nil, nil} - } - - submissionEntry, err := api.db.SaveBuilderBlockSubmission(payload, simResult.requestErr, simResult.validationErr, receivedAt, eligibleAt, simResult.wasSimulated, savePayloadToDatabase, pf, simResult.optimisticSubmission, nil) - if err != nil { - log.WithError(err).WithField("payload", payload).Error("saving builder block submission to database failed") - return - } - - err = api.db.UpsertBlockBuilderEntryAfterSubmission(submissionEntry, simResult.validationErr != nil) - if err != nil { - log.WithError(err).Error("failed to upsert block-builder-entry") - } - }() - - // --------------------------------- - // THE BID WILL BE SIMULATED SHORTLY - // --------------------------------- - - log = log.WithField("timestampBeforeCheckingTopBid", time.Now().UTC().UnixMilli()) - - // Get the latest top bid value from Redis - bidIsTopBid := false - topBidValue, err := api.redis.GetTopBidValue(context.Background(), tx, submission.BidTrace.Slot, submission.BidTrace.ParentHash.String(), submission.BidTrace.ProposerPubkey.String()) - if err != nil { - log.WithError(err).Error("failed to get top bid value from redis") - } else { - bidIsTopBid = submission.BidTrace.Value.ToBig().Cmp(topBidValue) == 1 - log = log.WithFields(logrus.Fields{ - "topBidValue": topBidValue.String(), - "newBidIsTopBid": bidIsTopBid, - }) - } - - log = log.WithField("timestampAfterCheckingTopBid", time.Now().UTC().UnixMilli()) - - nextTime = time.Now().UTC() - pf.Prechecks = uint64(nextTime.Sub(prevTime).Microseconds()) - prevTime = nextTime - - // Simulate the block submission and save to db - fastTrackValidation := builderEntry.status.IsHighPrio && bidIsTopBid && !isLargeRequest - timeBeforeValidation := time.Now().UTC() - - log = log.WithFields(logrus.Fields{ - "timestampBeforeValidation": timeBeforeValidation.UTC().UnixMilli(), - "fastTrackValidation": fastTrackValidation, - }) - - // Construct simulation request - opts := blockSimOptions{ - isHighPrio: builderEntry.status.IsHighPrio, - fastTrack: fastTrackValidation, - log: log, - builder: builderEntry, - req: &common.BuilderBlockValidationRequest{ - VersionedSubmitBlockRequest: payload, - RegisteredGasLimit: gasLimit, - ParentBeaconBlockRoot: attrs.parentBeaconRoot, - }, - } - // With sufficient collateral, process the block optimistically. - if builderEntry.status.IsOptimistic && - builderEntry.collateral.Cmp(submission.BidTrace.Value.ToBig()) >= 0 && - submission.BidTrace.Slot == api.optimisticSlot.Load() { - go api.processOptimisticBlock(opts, simResultC) - } else { - // Simulate block (synchronously). - requestErr, validationErr := api.simulateBlock(context.Background(), opts) // success/error logging happens inside - simResultC <- &blockSimResult{requestErr == nil, false, requestErr, validationErr} - validationDurationMs := time.Since(timeBeforeValidation).Milliseconds() - log = log.WithFields(logrus.Fields{ - "timestampAfterValidation": time.Now().UTC().UnixMilli(), - "validationDurationMs": validationDurationMs, - }) - if requestErr != nil { // Request error - if os.IsTimeout(requestErr) { - api.RespondError(w, http.StatusGatewayTimeout, "validation request timeout") - } else { - api.RespondError(w, http.StatusBadRequest, requestErr.Error()) - } - return - } else { - if validationErr != nil { - api.RespondError(w, http.StatusBadRequest, validationErr.Error()) - return - } - } - } - - nextTime = time.Now().UTC() - pf.Simulation = uint64(nextTime.Sub(prevTime).Microseconds()) - prevTime = nextTime - - // If cancellations are enabled, then abort now if this submission is not the latest one - if isCancellationEnabled { - // Ensure this request is still the latest one. This logic intentionally ignores the value of the bids and makes the current active bid the one - // that arrived at the relay last. This allows for builders to reduce the value of their bid (effectively cancel a high bid) by ensuring a lower - // bid arrives later. Even if the higher bid takes longer to simulate, by checking the receivedAt timestamp, this logic ensures that the low bid - // is not overwritten by the high bid. - // - // NOTE: this can lead to a rather tricky race condition. If a builder submits two blocks to the relay concurrently, then the randomness of network - // latency will make it impossible to predict which arrives first. Thus a high bid could unintentionally be overwritten by a low bid that happened - // to arrive a few microseconds later. If builders are submitting blocks at a frequency where they cannot reliably predict which bid will arrive at - // the relay first, they should instead use multiple pubkeys to avoid uninitentionally overwriting their own bids. - latestPayloadReceivedAt, err := api.redis.GetBuilderLatestPayloadReceivedAt(context.Background(), tx, submission.BidTrace.Slot, submission.BidTrace.BuilderPubkey.String(), submission.BidTrace.ParentHash.String(), submission.BidTrace.ProposerPubkey.String()) - if err != nil { - log.WithError(err).Error("failed getting latest payload receivedAt from redis") - } else if receivedAt.UnixMilli() < latestPayloadReceivedAt { - log.Infof("already have a newer payload: now=%d / prev=%d", receivedAt.UnixMilli(), latestPayloadReceivedAt) - api.RespondError(w, http.StatusBadRequest, "already using a newer payload") - return - } - } - - redisOpts := redisUpdateBidOpts{ - w: w, - tx: tx, - log: log, - cancellationsEnabled: isCancellationEnabled, - receivedAt: receivedAt, - floorBidValue: floorBidValue, - payload: payload, - } - updateBidResult, getPayloadResponse, ok := api.updateRedisBid(redisOpts) - if !ok { - return - } - - // Add fields to logs - log = log.WithFields(logrus.Fields{ - "timestampAfterBidUpdate": time.Now().UTC().UnixMilli(), - "wasBidSavedInRedis": updateBidResult.WasBidSaved, - "wasTopBidUpdated": updateBidResult.WasTopBidUpdated, - "topBidValue": updateBidResult.TopBidValue, - "prevTopBidValue": updateBidResult.PrevTopBidValue, - "profileRedisSavePayloadUs": updateBidResult.TimeSavePayload.Microseconds(), - "profileRedisUpdateTopBidUs": updateBidResult.TimeUpdateTopBid.Microseconds(), - "profileRedisUpdateFloorUs": updateBidResult.TimeUpdateFloor.Microseconds(), - }) - - if updateBidResult.WasBidSaved { - // Bid is eligible to win the auction - eligibleAt = time.Now().UTC() - log = log.WithField("timestampEligibleAt", eligibleAt.UnixMilli()) - - // Save to memcache in the background - if api.memcached != nil { - go func() { - err = api.memcached.SaveExecutionPayload(submission.BidTrace.Slot, submission.BidTrace.ProposerPubkey.String(), submission.BidTrace.BlockHash.String(), getPayloadResponse) - if err != nil { - log.WithError(err).Error("failed saving execution payload in memcached") - } - }() - } - } - - nextTime = time.Now().UTC() - pf.RedisUpdate = uint64(nextTime.Sub(prevTime).Microseconds()) - pf.Total = uint64(nextTime.Sub(receivedAt).Microseconds()) - - // All done, log with profiling information - log.WithFields(logrus.Fields{ - "profileDecodeUs": pf.Decode, - "profilePrechecksUs": pf.Prechecks, - "profileSimUs": pf.Simulation, - "profileRedisUs": pf.RedisUpdate, - "profileTotalUs": pf.Total, - }).Info("received block from builder") - w.WriteHeader(http.StatusOK) -} - -func (api *RelayAPI) handleSubmitNewBlockWithProofs(w http.ResponseWriter, req *http.Request) { - var pf common.Profile - var prevTime, nextTime time.Time - - headSlot := api.headSlot.Load() - receivedAt := time.Now().UTC() - prevTime = receivedAt - - args := req.URL.Query() - isCancellationEnabled := args.Get("cancellations") == "1" - - log := api.log.WithFields(logrus.Fields{ - "method": "submitNewBlockWithPreconfs", - "contentLength": req.ContentLength, - "headSlot": headSlot, - "cancellationEnabled": isCancellationEnabled, - "timestampRequestStart": receivedAt.UnixMilli(), - }) - - // Log at start and end of request - log.Info("request initiated") - defer func() { - log.WithFields(logrus.Fields{ - "timestampRequestFin": time.Now().UTC().UnixMilli(), - "requestDurationMs": time.Since(receivedAt).Milliseconds(), - }).Info("request finished") - }() - - // If cancellations are disabled but builder requested it, return error - if isCancellationEnabled && !api.ffEnableCancellations { - log.Info("builder submitted with cancellations enabled, but feature flag is disabled") - api.RespondError(w, http.StatusBadRequest, "cancellations are disabled") - return - } - - var err error - var reader io.Reader = req.Body - isGzip := req.Header.Get("Content-Encoding") == "gzip" - log = log.WithField("reqIsGzip", isGzip) - if isGzip { - reader, err = gzip.NewReader(req.Body) - if err != nil { - log.WithError(err).Warn("could not create gzip reader") - api.RespondError(w, http.StatusBadRequest, err.Error()) - return - } - } - - limitReader := io.LimitReader(reader, 10*1024*1024) // 10 MB - requestPayloadBytes, err := io.ReadAll(limitReader) - if err != nil { - log.WithError(err).Warn("could not read payload") - api.RespondError(w, http.StatusBadRequest, err.Error()) - return - } - - nextTime = time.Now().UTC() - pf.PayloadLoad = uint64(nextTime.Sub(prevTime).Microseconds()) - prevTime = nextTime - - // BOLT: new payload type - payload := new(common.VersionedSubmitBlockRequestWithProofs) - - // Check for SSZ encoding - contentType := req.Header.Get("Content-Type") - if contentType == "application/octet-stream" { - // TODO: (BOLT) implement SSZ decoding - panic("SSZ decoding not implemented for preconfs yet") - } else { - log = log.WithField("reqContentType", "json") - if err := json.Unmarshal(requestPayloadBytes, payload); err != nil { - api.boltLog.WithError(err).Warn("Could not decode payload - JSON") - api.RespondError(w, http.StatusBadRequest, err.Error()) - return - } - } - - log.Infof("Received block bid with proofs from builder: %s", payload) - - // BOLT: Send an event to the web demo - slot, _ := payload.Inner.Slot() - message := fmt.Sprintf("received block bid with %d preconfirmations for slot %d", len(payload.Proofs.TransactionHashes), slot) - EmitBoltDemoEvent(message) - - nextTime = time.Now().UTC() - pf.Decode = uint64(nextTime.Sub(prevTime).Microseconds()) - prevTime = nextTime - - isLargeRequest := len(requestPayloadBytes) > fastTrackPayloadSizeLimit - // getting block submission info also validates bid trace and execution submission are not empty - submission, err := common.GetBlockSubmissionInfo(payload.Inner) - if err != nil { - log.WithError(err).Warn("missing fields in submit block request") - api.RespondError(w, http.StatusBadRequest, err.Error()) - return - } - log = log.WithFields(logrus.Fields{ - "timestampAfterDecoding": time.Now().UTC().UnixMilli(), - "slot": submission.BidTrace.Slot, - "builderPubkey": submission.BidTrace.BuilderPubkey.String(), - "blockHash": submission.BidTrace.BlockHash.String(), - "proposerPubkey": submission.BidTrace.ProposerPubkey.String(), - "parentHash": submission.BidTrace.ParentHash.String(), - "value": submission.BidTrace.Value.Dec(), - "numTx": len(submission.Transactions), - "payloadBytes": len(requestPayloadBytes), - "isLargeRequest": isLargeRequest, - }) - // deneb specific logging - if payload.Inner.Deneb != nil { - log = log.WithFields(logrus.Fields{ - "numBlobs": len(payload.Inner.Deneb.BlobsBundle.Blobs), - "blobGasUsed": payload.Inner.Deneb.ExecutionPayload.BlobGasUsed, - "excessBlobGas": payload.Inner.Deneb.ExecutionPayload.ExcessBlobGas, - }) - } - - ok := api.checkSubmissionSlotDetails(w, log, headSlot, payload.Inner, submission) - if !ok { - return - } - - builderPubkey := submission.BidTrace.BuilderPubkey - builderEntry, ok := api.checkBuilderEntry(w, log, builderPubkey) - if !ok { - return - } - - log = log.WithField("builderIsHighPrio", builderEntry.status.IsHighPrio) - - gasLimit, ok := api.checkSubmissionFeeRecipient(w, log, submission.BidTrace) - if !ok { - return - } - - // Don't accept blocks with 0 value - if submission.BidTrace.Value.ToBig().Cmp(ZeroU256.BigInt()) == 0 || len(submission.Transactions) == 0 { - log.Info("submitNewBlock failed: block with 0 value or no txs") - w.WriteHeader(http.StatusOK) - return - } - - // Sanity check the submission - err = SanityCheckBuilderBlockSubmission(payload.Inner) - if err != nil { - log.WithError(err).Info("block submission sanity checks failed") - api.RespondError(w, http.StatusBadRequest, err.Error()) - return - } - - attrs, ok := api.checkSubmissionPayloadAttrs(w, log, submission) - if !ok { - return - } - - // Verify the signature - log = log.WithField("timestampBeforeSignatureCheck", time.Now().UTC().UnixMilli()) - signature := submission.Signature - ok, err = ssz.VerifySignature(submission.BidTrace, api.opts.EthNetDetails.DomainBuilder, builderPubkey[:], signature[:]) - log = log.WithField("timestampAfterSignatureCheck", time.Now().UTC().UnixMilli()) - if err != nil { - log.WithError(err).Warn("failed verifying builder signature") - api.RespondError(w, http.StatusBadRequest, "failed verifying builder signature") - return - } else if !ok { - log.Warn("invalid builder signature") - api.RespondError(w, http.StatusBadRequest, "invalid signature") - return - } - - log = log.WithField("timestampBeforeCheckingFloorBid", time.Now().UTC().UnixMilli()) - - // Create the redis pipeline tx - tx := api.redis.NewTxPipeline() - - // channel to send simulation result to the deferred function - simResultC := make(chan *blockSimResult, 1) - var eligibleAt time.Time // will be set once the bid is ready - - submission, err = common.GetBlockSubmissionInfo(payload.Inner) - if err != nil { - log.WithError(err).Warn("missing fields in submit block request") - api.RespondError(w, http.StatusBadRequest, err.Error()) - return - } - - bfOpts := bidFloorOpts{ - w: w, - tx: tx, - log: log, - cancellationsEnabled: isCancellationEnabled, - simResultC: simResultC, - submission: submission, - } - floorBidValue, ok := api.checkFloorBidValue(bfOpts) - if !ok { - return - } - - log = log.WithField("timestampAfterCheckingFloorBid", time.Now().UTC().UnixMilli()) - - // Deferred saving of the builder submission to database (whenever this function ends) - defer func() { - savePayloadToDatabase := !api.ffDisablePayloadDBStorage - var simResult *blockSimResult - select { - case simResult = <-simResultC: - case <-time.After(10 * time.Second): - log.Warn("timed out waiting for simulation result") - simResult = &blockSimResult{false, false, nil, nil} - } - - submissionEntry, err := api.db.SaveBuilderBlockSubmission( - payload.Inner, - simResult.requestErr, - simResult.validationErr, - receivedAt, - eligibleAt, - simResult.wasSimulated, - savePayloadToDatabase, - pf, - simResult.optimisticSubmission, - payload.Proofs, // BOLT: add merkle proofs to the submission - ) - if err != nil { - log.WithError(err).WithField("payload", payload).Error("saving builder block submission to database failed") - return - } - - err = api.db.UpsertBlockBuilderEntryAfterSubmission(submissionEntry, simResult.validationErr != nil) - if err != nil { - log.WithError(err).Error("failed to upsert block-builder-entry") - } - }() - - // --------------------------------- - // THE BID WILL BE SIMULATED SHORTLY - // --------------------------------- - - log = log.WithField("timestampBeforeCheckingTopBid", time.Now().UTC().UnixMilli()) - - // Get the latest top bid value from Redis - bidIsTopBid := false - topBidValue, err := api.redis.GetTopBidValue(context.Background(), tx, submission.BidTrace.Slot, submission.BidTrace.ParentHash.String(), submission.BidTrace.ProposerPubkey.String()) - if err != nil { - log.WithError(err).Error("failed to get top bid value from redis") - } else { - bidIsTopBid = submission.BidTrace.Value.ToBig().Cmp(topBidValue) == 1 - log = log.WithFields(logrus.Fields{ - "topBidValue": topBidValue.String(), - "newBidIsTopBid": bidIsTopBid, - }) - } - - log = log.WithField("timestampAfterCheckingTopBid", time.Now().UTC().UnixMilli()) - - nextTime = time.Now().UTC() - pf.Prechecks = uint64(nextTime.Sub(prevTime).Microseconds()) - prevTime = nextTime - - // Simulate the block submission and save to db - fastTrackValidation := builderEntry.status.IsHighPrio && bidIsTopBid && !isLargeRequest - timeBeforeValidation := time.Now().UTC() - - log = log.WithFields(logrus.Fields{ - "timestampBeforeValidation": timeBeforeValidation.UTC().UnixMilli(), - "fastTrackValidation": fastTrackValidation, - }) - - // Construct simulation request - opts := blockSimOptions{ - isHighPrio: builderEntry.status.IsHighPrio, - fastTrack: fastTrackValidation, - log: log, - builder: builderEntry, - req: &common.BuilderBlockValidationRequest{ - VersionedSubmitBlockRequest: payload.Inner, - RegisteredGasLimit: gasLimit, - ParentBeaconBlockRoot: attrs.parentBeaconRoot, - }, - } - // With sufficient collateral, process the block optimistically. - if builderEntry.status.IsOptimistic && - builderEntry.collateral.Cmp(submission.BidTrace.Value.ToBig()) >= 0 && - submission.BidTrace.Slot == api.optimisticSlot.Load() { - go api.processOptimisticBlock(opts, simResultC) - } else { - // Simulate block (synchronously). - requestErr, validationErr := api.simulateBlock(context.Background(), opts) // success/error logging happens inside - simResultC <- &blockSimResult{requestErr == nil, false, requestErr, validationErr} - validationDurationMs := time.Since(timeBeforeValidation).Milliseconds() - log = log.WithFields(logrus.Fields{ - "timestampAfterValidation": time.Now().UTC().UnixMilli(), - "validationDurationMs": validationDurationMs, - }) - if requestErr != nil { // Request error - if os.IsTimeout(requestErr) { - api.RespondError(w, http.StatusGatewayTimeout, "validation request timeout") - } else { - api.RespondError(w, http.StatusBadRequest, requestErr.Error()) - } - return - } else { - if validationErr != nil { - api.RespondError(w, http.StatusBadRequest, validationErr.Error()) - return - } - } - } - - nextTime = time.Now().UTC() - pf.Simulation = uint64(nextTime.Sub(prevTime).Microseconds()) - prevTime = nextTime - - // If cancellations are enabled, then abort now if this submission is not the latest one - if isCancellationEnabled { - // Ensure this request is still the latest one. This logic intentionally ignores the value of the bids and makes the current active bid the one - // that arrived at the relay last. This allows for builders to reduce the value of their bid (effectively cancel a high bid) by ensuring a lower - // bid arrives later. Even if the higher bid takes longer to simulate, by checking the receivedAt timestamp, this logic ensures that the low bid - // is not overwritten by the high bid. - // - // NOTE: this can lead to a rather tricky race condition. If a builder submits two blocks to the relay concurrently, then the randomness of network - // latency will make it impossible to predict which arrives first. Thus a high bid could unintentionally be overwritten by a low bid that happened - // to arrive a few microseconds later. If builders are submitting blocks at a frequency where they cannot reliably predict which bid will arrive at - // the relay first, they should instead use multiple pubkeys to avoid uninitentionally overwriting their own bids. - latestPayloadReceivedAt, err := api.redis.GetBuilderLatestPayloadReceivedAt(context.Background(), tx, submission.BidTrace.Slot, submission.BidTrace.BuilderPubkey.String(), submission.BidTrace.ParentHash.String(), submission.BidTrace.ProposerPubkey.String()) - if err != nil { - log.WithError(err).Error("failed getting latest payload receivedAt from redis") - } else if receivedAt.UnixMilli() < latestPayloadReceivedAt { - log.Infof("already have a newer payload: now=%d / prev=%d", receivedAt.UnixMilli(), latestPayloadReceivedAt) - api.RespondError(w, http.StatusBadRequest, "already using a newer payload") - return - } - } - redisOpts := redisUpdateBidOpts{ - w: w, - tx: tx, - log: log, - cancellationsEnabled: isCancellationEnabled, - receivedAt: receivedAt, - floorBidValue: floorBidValue, - payload: payload.Inner, - } - updateBidResult, getPayloadResponse, ok := api.updateRedisBidWithProofs(redisOpts, payload.Proofs) - if !ok { - return - } - - // Add fields to logs - log = log.WithFields(logrus.Fields{ - "timestampAfterBidUpdate": time.Now().UTC().UnixMilli(), - "wasBidSavedInRedis": updateBidResult.WasBidSaved, - "wasTopBidUpdated": updateBidResult.WasTopBidUpdated, - "topBidValue": updateBidResult.TopBidValue, - "prevTopBidValue": updateBidResult.PrevTopBidValue, - "profileRedisSavePayloadUs": updateBidResult.TimeSavePayload.Microseconds(), - "profileRedisUpdateTopBidUs": updateBidResult.TimeUpdateTopBid.Microseconds(), - "profileRedisUpdateFloorUs": updateBidResult.TimeUpdateFloor.Microseconds(), - }) - - if updateBidResult.WasBidSaved { - // Bid is eligible to win the auction - eligibleAt = time.Now().UTC() - log = log.WithField("timestampEligibleAt", eligibleAt.UnixMilli()) - - // Save to memcache in the background - if api.memcached != nil { - go func() { - err = api.memcached.SaveExecutionPayload(submission.BidTrace.Slot, submission.BidTrace.ProposerPubkey.String(), submission.BidTrace.BlockHash.String(), getPayloadResponse) - if err != nil { - log.WithError(err).Error("failed saving execution payload in memcached") - } - }() - } - } - - nextTime = time.Now().UTC() - pf.RedisUpdate = uint64(nextTime.Sub(prevTime).Microseconds()) - pf.Total = uint64(nextTime.Sub(receivedAt).Microseconds()) - - // All done, log with profiling information - log.WithFields(logrus.Fields{ - "profileDecodeUs": pf.Decode, - "profilePrechecksUs": pf.Prechecks, - "profileSimUs": pf.Simulation, - "profileRedisUs": pf.RedisUpdate, - "profileTotalUs": pf.Total, - }).Info("received block from builder") - w.WriteHeader(http.StatusOK) -} - -func (api *RelayAPI) handleSubscribeConstraints(w http.ResponseWriter, req *http.Request) { - w.Header().Set("Content-Type", "text/event-stream") - w.Header().Set("Cache-Control", "no-cache") - w.Header().Set("Connection", "keep-alive") - - api.log.Infof("New constraints consumer connected") - - // Add the new consumer - constraintsCh := make(chan *SignedConstraints, 256) - api.constraintsConsumers = append(api.constraintsConsumers, constraintsCh) - - // Remove the consumer and close the channel when the client disconnects - defer func() { - api.removeConstraintsConsumer(constraintsCh) - close(constraintsCh) - }() - - flusher, ok := w.(http.Flusher) - if !ok { - http.Error(w, "Streaming unsupported!", http.StatusInternalServerError) - return - } - - w.WriteHeader(http.StatusOK) - - // Monitor client disconnect - notify := req.Context().Done() - - ticker := time.NewTicker(3 * time.Second) - - for { - select { - case <-notify: - // Client disconnected - api.log.Info("Client disconnected from constraints stream") - return - case <-ticker.C: - // Send a keepalive to the client - // NOTE: the length of the message is intentional, do not make it shorter - fmt.Fprint(w, ": keepaliveeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee\n\n") - flusher.Flush() - case constraint := <-constraintsCh: - constraintJSON, err := json.Marshal([]*SignedConstraints{constraint}) - api.log.Infof("New constraint received from channel and ready to be sent to builders: %s", constraint) - - if err != nil { - api.log.Printf("failed to marshal constraint to json: %v", err) - continue - } - fmt.Fprintf(w, "data: %s\n\n", string(constraintJSON)) - - flusher.Flush() - api.log.Infof("Flushed constraints to builders") - } - } -} - -// --------------- -// -// INTERNAL APIS -// -// --------------- -func (api *RelayAPI) handleInternalBuilderStatus(w http.ResponseWriter, req *http.Request) { - vars := mux.Vars(req) - builderPubkey := vars["pubkey"] - builderEntry, err := api.db.GetBlockBuilderByPubkey(builderPubkey) - if err != nil { - if errors.Is(err, sql.ErrNoRows) { - api.RespondError(w, http.StatusBadRequest, "builder not found") - return - } - - api.log.WithError(err).Error("could not get block builder") - api.RespondError(w, http.StatusInternalServerError, err.Error()) - return - } - if req.Method == http.MethodGet { - api.RespondOK(w, builderEntry) - return - } else if req.Method == http.MethodPost || req.Method == http.MethodPut || req.Method == http.MethodPatch { - st := common.BuilderStatus{ - IsHighPrio: builderEntry.IsHighPrio, - IsBlacklisted: builderEntry.IsBlacklisted, - IsOptimistic: builderEntry.IsOptimistic, - } - trueStr := "true" - args := req.URL.Query() - if args.Get("high_prio") != "" { - st.IsHighPrio = args.Get("high_prio") == trueStr - } - if args.Get("blacklisted") != "" { - st.IsBlacklisted = args.Get("blacklisted") == trueStr - } - if args.Get("optimistic") != "" { - st.IsOptimistic = args.Get("optimistic") == trueStr - } - api.log.WithFields(logrus.Fields{ - "builderPubkey": builderPubkey, - "isHighPrio": st.IsHighPrio, - "isBlacklisted": st.IsBlacklisted, - "isOptimistic": st.IsOptimistic, - }).Info("updating builder status") - err := api.db.SetBlockBuilderStatus(builderPubkey, st) - if err != nil { - err := fmt.Errorf("error setting builder: %v status: %w", builderPubkey, err) - api.log.Error(err) - api.RespondError(w, http.StatusInternalServerError, err.Error()) - return - } - api.RespondOK(w, st) - } -} - -func (api *RelayAPI) handleInternalBuilderCollateral(w http.ResponseWriter, req *http.Request) { - vars := mux.Vars(req) - builderPubkey := vars["pubkey"] - if req.Method == http.MethodPost || req.Method == http.MethodPut { - args := req.URL.Query() - collateral := args.Get("collateral") - value := args.Get("value") - log := api.log.WithFields(logrus.Fields{ - "pubkey": builderPubkey, - "collateral": collateral, - "value": value, - }) - log.Infof("updating builder collateral") - if err := api.db.SetBlockBuilderCollateral(builderPubkey, collateral, value); err != nil { - fullErr := fmt.Errorf("unable to set collateral in db for pubkey: %v: %w", builderPubkey, err) - log.Error(fullErr.Error()) - api.RespondError(w, http.StatusInternalServerError, fullErr.Error()) - return - } - api.RespondOK(w, NilResponse) - } -} - -// ----------- -// DATA APIS -// ----------- - -func (api *RelayAPI) handleDataProposerPayloadDelivered(w http.ResponseWriter, req *http.Request) { - var err error - args := req.URL.Query() - - filters := database.GetPayloadsFilters{ - Limit: 200, - } - - if args.Get("slot") != "" && args.Get("cursor") != "" { - api.RespondError(w, http.StatusBadRequest, "cannot specify both slot and cursor") - return - } else if args.Get("slot") != "" { - filters.Slot, err = strconv.ParseInt(args.Get("slot"), 10, 64) - if err != nil { - api.RespondError(w, http.StatusBadRequest, "invalid slot argument") - return - } - } else if args.Get("cursor") != "" { - filters.Cursor, err = strconv.ParseInt(args.Get("cursor"), 10, 64) - if err != nil { - api.RespondError(w, http.StatusBadRequest, "invalid cursor argument") - return - } - } - - if args.Get("block_hash") != "" { - _, err := utils.HexToHash(args.Get("block_hash")) - if err != nil { - api.RespondError(w, http.StatusBadRequest, "invalid block_hash argument") - return - } - filters.BlockHash = args.Get("block_hash") - } - - if args.Get("block_number") != "" { - filters.BlockNumber, err = strconv.ParseInt(args.Get("block_number"), 10, 64) - if err != nil { - api.RespondError(w, http.StatusBadRequest, "invalid block_number argument") - return - } - } - - if args.Get("proposer_pubkey") != "" { - if err = checkBLSPublicKeyHex(args.Get("proposer_pubkey")); err != nil { - api.RespondError(w, http.StatusBadRequest, "invalid proposer_pubkey argument") - return - } - filters.ProposerPubkey = args.Get("proposer_pubkey") - } - - if args.Get("builder_pubkey") != "" { - if err = checkBLSPublicKeyHex(args.Get("builder_pubkey")); err != nil { - api.RespondError(w, http.StatusBadRequest, "invalid builder_pubkey argument") - return - } - filters.BuilderPubkey = args.Get("builder_pubkey") - } - - if args.Get("limit") != "" { - _limit, err := strconv.ParseUint(args.Get("limit"), 10, 64) - if err != nil { - api.RespondError(w, http.StatusBadRequest, "invalid limit argument") - return - } - if _limit > filters.Limit { - api.RespondError(w, http.StatusBadRequest, fmt.Sprintf("maximum limit is %d", filters.Limit)) - return - } - filters.Limit = _limit - } - - if args.Get("order_by") == "value" { - filters.OrderByValue = 1 - } else if args.Get("order_by") == "-value" { - filters.OrderByValue = -1 - } - - deliveredPayloads, err := api.db.GetRecentDeliveredPayloads(filters) - if err != nil { - api.log.WithError(err).Error("error getting recently delivered payloads") - api.RespondError(w, http.StatusInternalServerError, err.Error()) - return - } - - response := make([]common.BidTraceV2JSON, len(deliveredPayloads)) - for i, payload := range deliveredPayloads { - response[i] = database.DeliveredPayloadEntryToBidTraceV2JSON(payload) - } - - api.RespondOK(w, response) -} - -func (api *RelayAPI) handleDataBuilderBidsReceived(w http.ResponseWriter, req *http.Request) { - var err error - args := req.URL.Query() - - filters := database.GetBuilderSubmissionsFilters{ - Limit: 500, - Slot: 0, - BlockHash: "", - BlockNumber: 0, - BuilderPubkey: "", - } - - if args.Get("cursor") != "" { - api.RespondError(w, http.StatusBadRequest, "cursor argument not supported") - return - } - - if args.Get("slot") != "" { - filters.Slot, err = strconv.ParseInt(args.Get("slot"), 10, 64) - if err != nil { - api.RespondError(w, http.StatusBadRequest, "invalid slot argument") - return - } - } - - if args.Get("block_hash") != "" { - _, err := utils.HexToHash(args.Get("block_hash")) - if err != nil { - api.RespondError(w, http.StatusBadRequest, "invalid block_hash argument") - return - } - filters.BlockHash = args.Get("block_hash") - } - - if args.Get("block_number") != "" { - filters.BlockNumber, err = strconv.ParseInt(args.Get("block_number"), 10, 64) - if err != nil { - api.RespondError(w, http.StatusBadRequest, "invalid block_number argument") - return - } - } - - if args.Get("builder_pubkey") != "" { - if err = checkBLSPublicKeyHex(args.Get("builder_pubkey")); err != nil { - api.RespondError(w, http.StatusBadRequest, "invalid builder_pubkey argument") - return - } - filters.BuilderPubkey = args.Get("builder_pubkey") - } - - // at least one query arguments is required - if filters.Slot == 0 && filters.BlockHash == "" && filters.BlockNumber == 0 && filters.BuilderPubkey == "" { - api.RespondError(w, http.StatusBadRequest, "need to query for specific slot or block_hash or block_number or builder_pubkey") - return - } - - if args.Get("limit") != "" { - _limit, err := strconv.ParseInt(args.Get("limit"), 10, 64) - if err != nil { - api.RespondError(w, http.StatusBadRequest, "invalid limit argument") - return - } - if _limit > filters.Limit { - api.RespondError(w, http.StatusBadRequest, fmt.Sprintf("maximum limit is %d", filters.Limit)) - return - } - filters.Limit = _limit - } - - blockSubmissions, err := api.db.GetBuilderSubmissions(filters) - if err != nil { - api.log.WithError(err).Error("error getting recent builder submissions") - api.RespondError(w, http.StatusInternalServerError, err.Error()) - return - } - - response := make([]common.BidTraceV2WithTimestampJSON, len(blockSubmissions)) - for i, payload := range blockSubmissions { - response[i] = database.BuilderSubmissionEntryToBidTraceV2WithTimestampJSON(payload) - } - - api.RespondOK(w, response) -} - -func (api *RelayAPI) handleDataValidatorRegistration(w http.ResponseWriter, req *http.Request) { - pkStr := req.URL.Query().Get("pubkey") - if pkStr == "" { - api.RespondError(w, http.StatusBadRequest, "missing pubkey argument") - return - } - - _, err := utils.HexToPubkey(pkStr) - if err != nil { - api.RespondError(w, http.StatusBadRequest, "invalid pubkey") - return - } - - registrationEntry, err := api.db.GetValidatorRegistration(pkStr) - if err != nil { - if errors.Is(err, sql.ErrNoRows) { - api.RespondError(w, http.StatusBadRequest, "no registration found for validator "+pkStr) - return - } - api.log.WithError(err).Error("error getting validator registration") - api.RespondError(w, http.StatusInternalServerError, err.Error()) - return - } - - signedRegistration, err := registrationEntry.ToSignedValidatorRegistration() - if err != nil { - api.log.WithError(err).Error("error converting registration entry to signed validator registration") - api.RespondError(w, http.StatusInternalServerError, err.Error()) - return - } - - api.RespondOK(w, signedRegistration) -} - -func (api *RelayAPI) handleLivez(w http.ResponseWriter, req *http.Request) { - api.RespondMsg(w, http.StatusOK, "live") -} - -func (api *RelayAPI) handleReadyz(w http.ResponseWriter, req *http.Request) { - if api.IsReady() { - api.RespondMsg(w, http.StatusOK, "ready") - } else { - api.RespondMsg(w, http.StatusServiceUnavailable, "not ready") - } -} diff --git a/mev-boost-relay/services/api/service_test.go b/mev-boost-relay/services/api/service_test.go deleted file mode 100644 index 833195f70..000000000 --- a/mev-boost-relay/services/api/service_test.go +++ /dev/null @@ -1,1424 +0,0 @@ -package api - -import ( - "bufio" - "bytes" - "compress/gzip" - "context" - "encoding/json" - "errors" - "fmt" - "io" - "log" - "math/big" - "net/http" - "net/http/httptest" - "strings" - "testing" - "time" - - "github.com/alicebob/miniredis/v2" - builderApiCapella "github.com/attestantio/go-builder-client/api/capella" - builderApiDeneb "github.com/attestantio/go-builder-client/api/deneb" - builderApiV1 "github.com/attestantio/go-builder-client/api/v1" - builderSpec "github.com/attestantio/go-builder-client/spec" - "github.com/attestantio/go-eth2-client/spec" - "github.com/attestantio/go-eth2-client/spec/bellatrix" - "github.com/attestantio/go-eth2-client/spec/capella" - "github.com/attestantio/go-eth2-client/spec/deneb" - "github.com/attestantio/go-eth2-client/spec/phase0" - "github.com/flashbots/go-boost-utils/bls" - "github.com/flashbots/go-boost-utils/utils" - "github.com/flashbots/mev-boost-relay/beaconclient" - "github.com/flashbots/mev-boost-relay/common" - "github.com/flashbots/mev-boost-relay/database" - "github.com/flashbots/mev-boost-relay/datastore" - "github.com/holiman/uint256" - "github.com/sirupsen/logrus" - "github.com/stretchr/testify/require" - "gotest.tools/assert" -) - -const ( - testGasLimit = uint64(30000000) - testSlot = uint64(42) - testParentHash = "0xbd3291854dc822b7ec585925cda0e18f06af28fa2886e15f52d52dd4b6f94ed6" - testWithdrawalsRoot = "0x7f6d156912a4cb1e74ee37e492ad883f7f7ac856d987b3228b517e490aa0189e" - testPrevRandao = "0x9962816e9d0a39fd4c80935338a741dc916d1545694e41eb5a505e1a3098f9e4" - testBuilderPubkey = "0xfa1ed37c3553d0ce1e9349b2c5063cf6e394d231c8d3e0df75e9462257c081543086109ffddaacc0aa76f33dc9661c83" -) - -var ( - testAddress = bellatrix.ExecutionAddress([20]byte{0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19}) - testAddress2 = bellatrix.ExecutionAddress([20]byte{1, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19}) -) - -type testBackend struct { - t require.TestingT - relay *RelayAPI - datastore *datastore.Datastore - redis *datastore.RedisCache -} - -func newTestBackend(t require.TestingT, numBeaconNodes int) *testBackend { - redisClient, err := miniredis.Run() - require.NoError(t, err) - - redisCache, err := datastore.NewRedisCache("", redisClient.Addr(), "") - require.NoError(t, err) - - db := database.MockDB{} - - ds, err := datastore.NewDatastore(redisCache, nil, db) - require.NoError(t, err) - - sk, _, err := bls.GenerateNewKeypair() - require.NoError(t, err) - - mainnetDetails, err := common.NewEthNetworkDetails(common.EthNetworkMainnet) - require.NoError(t, err) - - opts := RelayAPIOpts{ - Log: common.TestLog, - ListenAddr: "localhost:12345", - BeaconClient: &beaconclient.MultiBeaconClient{}, - Datastore: ds, - Redis: redisCache, - DB: db, - EthNetDetails: *mainnetDetails, - SecretKey: sk, - ProposerAPI: true, - BlockBuilderAPI: true, - DataAPI: true, - InternalAPI: true, - } - - relay, err := NewRelayAPI(opts) - require.NoError(t, err) - - relay.genesisInfo = &beaconclient.GetGenesisResponse{ - Data: beaconclient.GetGenesisResponseData{ - GenesisTime: 1606824023, - }, - } - - backend := testBackend{ - t: t, - relay: relay, - datastore: ds, - redis: redisCache, - } - return &backend -} - -func (be *testBackend) requestBytes(method, path string, payload []byte, headers map[string]string) *httptest.ResponseRecorder { - var req *http.Request - var err error - - req, err = http.NewRequest(method, path, bytes.NewReader(payload)) - require.NoError(be.t, err) - - // Set headers - for k, v := range headers { - req.Header.Set(k, v) - } - - // lfg - rr := httptest.NewRecorder() - be.relay.getRouter().ServeHTTP(rr, req) - return rr -} - -func (be *testBackend) request(method, path string, payload any) *httptest.ResponseRecorder { - var req *http.Request - var err error - - if payload == nil { - req, err = http.NewRequest(method, path, bytes.NewReader(nil)) - } else { - payloadBytes, err2 := json.Marshal(payload) - require.NoError(be.t, err2) - req, err = http.NewRequest(method, path, bytes.NewReader(payloadBytes)) - } - require.NoError(be.t, err) - - // lfg - rr := httptest.NewRecorder() - be.relay.getRouter().ServeHTTP(rr, req) - return rr -} - -func (be *testBackend) requestWithUA(method, path, userAgent string, payload any) *httptest.ResponseRecorder { - var req *http.Request - var err error - - if payload == nil { - req, err = http.NewRequest(method, path, bytes.NewReader(nil)) - } else { - payloadBytes, err2 := json.Marshal(payload) - require.NoError(be.t, err2) - req, err = http.NewRequest(method, path, bytes.NewReader(payloadBytes)) - } - req.Header.Set("User-Agent", userAgent) - - require.NoError(be.t, err) - rr := httptest.NewRecorder() - be.relay.getRouter().ServeHTTP(rr, req) - return rr -} - -func TestWebserver(t *testing.T) { - t.Run("errors when webserver is already existing", func(t *testing.T) { - backend := newTestBackend(t, 1) - backend.relay.srvStarted.Store(true) - err := backend.relay.StartServer() - require.Error(t, err) - }) -} - -func TestWebserverRootHandler(t *testing.T) { - backend := newTestBackend(t, 1) - rr := backend.request(http.MethodGet, "/", nil) - require.Equal(t, http.StatusOK, rr.Code) -} - -func TestStatus(t *testing.T) { - backend := newTestBackend(t, 1) - path := "/eth/v1/builder/status" - rr := backend.request(http.MethodGet, path, common.ValidPayloadRegisterValidator) - require.Equal(t, http.StatusOK, rr.Code) -} - -func TestLivez(t *testing.T) { - backend := newTestBackend(t, 1) - path := "/livez" - rr := backend.request(http.MethodGet, path, nil) - require.Equal(t, http.StatusOK, rr.Code) - require.Equal(t, "{\"message\":\"live\"}\n", rr.Body.String()) -} - -func TestRegisterValidator(t *testing.T) { - path := "/eth/v1/builder/validators" - - t.Run("not a known validator", func(t *testing.T) { - backend := newTestBackend(t, 1) - - rr := backend.request(http.MethodPost, path, []builderApiV1.SignedValidatorRegistration{common.ValidPayloadRegisterValidator}) - require.Equal(t, http.StatusBadRequest, rr.Code) - }) -} - -func TestGetHeader(t *testing.T) { - // Setup backend with headSlot and genesisTime - backend := newTestBackend(t, 1) - backend.relay.genesisInfo = &beaconclient.GetGenesisResponse{ - Data: beaconclient.GetGenesisResponseData{ - GenesisTime: uint64(time.Now().UTC().Unix()), - }, - } - - // request params - slot := uint64(2) - backend.relay.headSlot.Store(slot) - parentHash := "0x13e606c7b3d1faad7e83503ce3dedce4c6bb89b0c28ffb240d713c7b110b9747" - proposerPubkey := "0x6ae5932d1e248d987d51b58665b81848814202d7b23b343d20f2a167d12f07dcb01ca41c42fdd60b7fca9c4b90890792" - builderPubkey := "0xfa1ed37c3553d0ce1e9349b2c5063cf6e394d231c8d3e0df75e9462257c081543086109ffddaacc0aa76f33dc9661c83" - bidValue := uint256.NewInt(99) - trace := &common.BidTraceV2WithBlobFields{ - BidTrace: builderApiV1.BidTrace{ - Value: bidValue, - }, - } - - // request path - path := fmt.Sprintf("/eth/v1/builder/header/%d/%s/%s", slot, parentHash, proposerPubkey) - - // Create a capella bid - opts := common.CreateTestBlockSubmissionOpts{ - Slot: slot, - ParentHash: parentHash, - ProposerPubkey: proposerPubkey, - Version: spec.DataVersionCapella, - } - payload, getPayloadResp, getHeaderResp := common.CreateTestBlockSubmission(t, builderPubkey, bidValue, &opts) - _, err := backend.redis.SaveBidAndUpdateTopBid(context.Background(), backend.redis.NewPipeline(), trace, payload, getPayloadResp, getHeaderResp, time.Now(), false, nil, nil) - require.NoError(t, err) - - // Check 1: regular capella request works and returns a bid - rr := backend.request(http.MethodGet, path, nil) - require.Equal(t, http.StatusOK, rr.Code) - resp := builderSpec.VersionedSignedBuilderBid{} - err = json.Unmarshal(rr.Body.Bytes(), &resp) - require.NoError(t, err) - value, err := resp.Value() - require.NoError(t, err) - require.Equal(t, spec.DataVersionCapella, resp.Version) - require.Equal(t, bidValue.String(), value.String()) - - // Create a deneb bid - path = fmt.Sprintf("/eth/v1/builder/header/%d/%s/%s", slot+1, parentHash, proposerPubkey) - opts = common.CreateTestBlockSubmissionOpts{ - Slot: slot + 1, - ParentHash: parentHash, - ProposerPubkey: proposerPubkey, - Version: spec.DataVersionDeneb, - } - payload, getPayloadResp, getHeaderResp = common.CreateTestBlockSubmission(t, builderPubkey, bidValue, &opts) - _, err = backend.redis.SaveBidAndUpdateTopBid(context.Background(), backend.redis.NewPipeline(), trace, payload, getPayloadResp, getHeaderResp, time.Now(), false, nil, nil) - require.NoError(t, err) - - // Check 2: regular deneb request works and returns a bid - rr = backend.request(http.MethodGet, path, nil) - require.Equal(t, http.StatusOK, rr.Code) - resp = builderSpec.VersionedSignedBuilderBid{} - err = json.Unmarshal(rr.Body.Bytes(), &resp) - require.NoError(t, err) - value, err = resp.Value() - require.NoError(t, err) - require.Equal(t, spec.DataVersionDeneb, resp.Version) - require.Equal(t, bidValue.String(), value.String()) - - // Check 3: Request returns 204 if sending a filtered user agent - rr = backend.requestWithUA(http.MethodGet, path, "mev-boost/v1.5.0 Go-http-client/1.1", nil) - require.Equal(t, http.StatusNoContent, rr.Code) -} - -func TestSubmitConstraints(t *testing.T) { - // Setup backend with headSlot and genesisTime - backend := newTestBackend(t, 1) - backend.relay.genesisInfo = &beaconclient.GetGenesisResponse{ - Data: beaconclient.GetGenesisResponseData{ - GenesisTime: uint64(time.Now().UTC().Unix()), - }, - } - - // request params - slot := uint64(128) - backend.relay.headSlot.Store(slot) - - // Setup mocked beacon client for proposer - beaconClient := beaconclient.NewMockBeaconInstance() - - // Proposer data - proposerSecretKeyEC, proposerPublicKeyEC, err := bls.GenerateNewKeypair() - require.NoError(t, err) - proposerPublicKey, err := utils.BlsPublicKeyToPublicKey(proposerPublicKeyEC) - require.NoError(t, err) - validatorIndex := uint64(1) - mockValidatorEntry := beaconclient.ValidatorResponseEntry{ - Index: validatorIndex, Balance: "1000000", Validator: beaconclient.ValidatorResponseValidatorData{Pubkey: proposerPublicKey.String()}, - } - - // Update beacon client, create MultiBeaconClient and refresh validators in the datastore - beaconClient.AddValidator(mockValidatorEntry) - logger := logrus.New() - loggerEntry := logrus.NewEntry(logger) - - mockMultiBeaconClient := beaconclient.NewMockMultiBeaconClient(loggerEntry, []beaconclient.IBeaconInstance{beaconClient}) - - backend.relay.datastore.RefreshKnownValidators(backend.relay.log, mockMultiBeaconClient, slot) - - // request path - path := "/eth/v1/builder/constraints" - - // txHash := _HexToHash("0xba40436abdc8adc037e2c92ea1099a5849053510c3911037ff663085ce44bc49") - tx := _HexToBytes("0x02f871018304a5758085025ff11caf82565f94388c818ca8b9251b393131c08a736a67ccb1929787a41bb7ee22b41380c001a0c8630f734aba7acb4275a8f3b0ce831cf0c7c487fd49ee7bcca26ac622a28939a04c3745096fa0130a188fa249289fd9e60f9d6360854820dba22ae779ea6f573f") - - constraintMessage := &ConstraintsMessage{ - ValidatorIndex: validatorIndex, - Slot: slot, - Constraints: []*Constraint{{ - Tx: tx, - Index: nil, - }}, - } - - constraintMessageSSZ, err := constraintMessage.MarshalSSZ() - require.NoError(t, err) - signatureEC := bls.Sign(proposerSecretKeyEC, constraintMessageSSZ) - constraintSignature := phase0.BLSSignature(bls.SignatureToBytes(signatureEC)[:]) - - // Build the constraint - signedConstraints := SignedConstraints{ - Message: constraintMessage, - Signature: constraintSignature, - } - - payload := []*SignedConstraints{&signedConstraints} - - t.Run("Constraints sent", func(t *testing.T) { - ch := make(chan *SignedConstraints, 256) - backend.relay.constraintsConsumers = []chan *SignedConstraints{ch} - rr := backend.request(http.MethodPost, path, payload) - require.Equal(t, http.StatusOK, rr.Code) - - constraintCache := backend.relay.constraints - actuals, _ := constraintCache.Get(slot) - require.NotNil(t, actuals) - actual := (*actuals)[0] - actualFromCh := <-backend.relay.constraintsConsumers[0] - - expected := signedConstraints - - require.Equal(t, expected.String(), actual.String(), actualFromCh.String()) - }) - - t.Run("Empty constraint list", func(t *testing.T) { - rr := backend.request(http.MethodPost, path, []*SignedConstraints{}) - require.Equal(t, http.StatusBadRequest, rr.Code) - }) -} - -func TestSubscribeToConstraints(t *testing.T) { - backend := newTestBackend(t, 1) - path := "/relay/v1/builder/constraints" - - // Create and start HTTP server. - // This will server the endpoint to subscribe to constraints via SSE - go func() { - backend.relay.srv = &http.Server{ - Addr: backend.relay.opts.ListenAddr, - Handler: backend.relay.getRouter(), - - ReadTimeout: time.Duration(apiReadTimeoutMs) * time.Millisecond, - ReadHeaderTimeout: time.Duration(apiReadHeaderTimeoutMs) * time.Millisecond, - WriteTimeout: time.Duration(apiWriteTimeoutMs) * time.Millisecond, - IdleTimeout: time.Duration(apiIdleTimeoutMs) * time.Millisecond, - MaxHeaderBytes: apiMaxHeaderBytes, - } - - t.Logf("Server starting on %s", backend.relay.opts.ListenAddr) - err := backend.relay.srv.ListenAndServe() - if errors.Is(err, http.ErrServerClosed) { - t.Log("Server closed") - return - } - }() - - // Wait for the server to start - time.Sleep(500 * time.Millisecond) - - // Run the request in a goroutine so that it doesn't block the test, - // but it finishes as soon as the message is sent over the channel - go func() { - url := "http://" + backend.relay.opts.ListenAddr + path - req, err := http.NewRequest(http.MethodGet, url, nil) - if err != nil { - log.Fatalf("Failed to create request: %v", err) - } - - // Send the request - client := &http.Client{} - // NOTE: this response arrives after the first data is flushed - resp, err := client.Do(req) - assert.Equal(t, err, nil) - assert.Equal(t, resp.StatusCode, http.StatusOK) - defer resp.Body.Close() - - bufReader := bufio.NewReader(resp.Body) - for { - line, err := bufReader.ReadString('\n') - if err != nil { - if err == io.EOF { - fmt.Println("End of stream") - break - } - log.Fatalf("Error reading from response body: %v", err) - } - - if strings.HasPrefix(line, "data: ") { - data := strings.TrimPrefix(line, "data: ") - fmt.Printf("Received event: %s\n", data) - } - } - }() - - // Wait for the HTTP request goroutine to start and add the consumer - time.Sleep(1 * time.Second) - - // Now we can safely send the constraints, and we should get a response - // in the HTTP request defined in the goroutine above - backend.relay.constraintsConsumers[0] <- &SignedConstraints{} - time.Sleep(500 * time.Millisecond) - backend.relay.constraintsConsumers[0] <- &SignedConstraints{} - - // Wait for the HTTP request goroutine to process the constraints - time.Sleep(2 * time.Second) -} - -func TestBuilderApiGetValidators(t *testing.T) { - path := "/relay/v1/builder/validators" - - backend := newTestBackend(t, 1) - duties := []common.BuilderGetValidatorsResponseEntry{ - { - Slot: 1, - Entry: &common.ValidPayloadRegisterValidator, - }, - } - responseBytes, err := json.Marshal(duties) - require.NoError(t, err) - backend.relay.proposerDutiesResponse = &responseBytes - - rr := backend.request(http.MethodGet, path, nil) - require.Equal(t, http.StatusOK, rr.Code) - - resp := []common.BuilderGetValidatorsResponseEntry{} - err = json.Unmarshal(rr.Body.Bytes(), &resp) - require.NoError(t, err) - require.Len(t, resp, 1) - require.Equal(t, uint64(1), resp[0].Slot) - require.Equal(t, common.ValidPayloadRegisterValidator, *resp[0].Entry) -} - -func TestDataApiGetDataProposerPayloadDelivered(t *testing.T) { - path := "/relay/v1/data/bidtraces/proposer_payload_delivered" - - t.Run("Accept valid block_hash", func(t *testing.T) { - backend := newTestBackend(t, 1) - - validBlockHash := "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" - rr := backend.request(http.MethodGet, path+"?block_hash="+validBlockHash, nil) - require.Equal(t, http.StatusOK, rr.Code) - }) - - t.Run("Reject invalid block_hash", func(t *testing.T) { - backend := newTestBackend(t, 1) - - invalidBlockHashes := []string{ - // One character too long. - "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaab", - // One character too short. - "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - // Missing the 0x prefix. - "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - // Has an invalid hex character ('z' at the end). - "0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaz", - } - - for _, invalidBlockHash := range invalidBlockHashes { - rr := backend.request(http.MethodGet, path+"?block_hash="+invalidBlockHash, nil) - require.Equal(t, http.StatusBadRequest, rr.Code) - require.Contains(t, rr.Body.String(), "invalid block_hash argument") - } - }) -} - -func TestBuilderSubmitBlockSSZ(t *testing.T) { - testCases := []struct { - name string - filepath string - sszLength int - }{ - { - name: "Capella", - filepath: "../../testdata/submitBlockPayloadCapella_Goerli.json.gz", - sszLength: 352239, - }, - { - name: "Deneb", - filepath: "../../testdata/submitBlockPayloadDeneb_Goerli.json.gz", - sszLength: 872081, - }, - } - - for _, testCase := range testCases { - t.Run(testCase.name, func(t *testing.T) { - requestPayloadJSONBytes := common.LoadGzippedBytes(t, testCase.filepath) - - req := new(common.VersionedSubmitBlockRequest) - err := json.Unmarshal(requestPayloadJSONBytes, req) - require.NoError(t, err) - - reqSSZ, err := req.MarshalSSZ() - require.NoError(t, err) - require.Len(t, reqSSZ, testCase.sszLength) - - test := new(common.VersionedSubmitBlockRequest) - err = test.UnmarshalSSZ(reqSSZ) - require.NoError(t, err) - }) - } -} - -func TestBuilderSubmitBlock(t *testing.T) { - type testHelper struct { - headSlot uint64 - submissionTimestamp int - parentHash string - feeRecipient string - withdrawalRoot string - prevRandao string - jsonReqSize int - sszReqSize int - jsonGzipReqSize int - sszGzipReqSize int - } - - testCases := []struct { - name string - filepath string - data testHelper - }{ - { - name: "Capella", - filepath: "../../testdata/submitBlockPayloadCapella_Goerli.json.gz", - data: testHelper{ - headSlot: 32, - submissionTimestamp: 1606824419, - parentHash: "0xbd3291854dc822b7ec585925cda0e18f06af28fa2886e15f52d52dd4b6f94ed6", - feeRecipient: "0x5cc0dde14e7256340cc820415a6022a7d1c93a35", - withdrawalRoot: "0xb15ed76298ff84a586b1d875df08b6676c98dfe9c7cd73fab88450348d8e70c8", - prevRandao: "0x9962816e9d0a39fd4c80935338a741dc916d1545694e41eb5a505e1a3098f9e4", - jsonReqSize: 704810, - sszReqSize: 352239, - jsonGzipReqSize: 207788, - sszGzipReqSize: 195923, - }, - }, - { - name: "Deneb", - filepath: "../../testdata/submitBlockPayloadDeneb_Goerli.json.gz", - data: testHelper{ - headSlot: 86, - submissionTimestamp: 1606825067, - parentHash: "0xb1bd772f909db1b6cbad8cf31745d3f2d692294998161369a5709c17a71f630f", - feeRecipient: "0x455E5AA18469bC6ccEF49594645666C587A3a71B", - withdrawalRoot: "0x3cb816ccf6bb079b4f462e81db1262064f321a4afa4ff32c1f7e0a1c603836af", - prevRandao: "0x6d414d3ffba7ba51155c3528739102c2889005940913b5d4c8031eed30764d4d", - jsonReqSize: 1744002, - sszReqSize: 872081, - jsonGzipReqSize: 385043, - sszGzipReqSize: 363271, - }, - }, - } - path := "/relay/v1/builder/blocks" - backend := newTestBackend(t, 1) - - for _, testCase := range testCases { - t.Run(testCase.name, func(t *testing.T) { - headSlot := testCase.data.headSlot - submissionSlot := headSlot + 1 - submissionTimestamp := testCase.data.submissionTimestamp - - // Payload attributes - payloadJSONFilename := testCase.filepath - parentHash := testCase.data.parentHash - feeRec, err := utils.HexToAddress(testCase.data.feeRecipient) - require.NoError(t, err) - withdrawalsRoot, err := utils.HexToHash(testCase.data.withdrawalRoot) - require.NoError(t, err) - prevRandao := testCase.data.prevRandao - - // Setup the test relay backend - backend.relay.headSlot.Store(headSlot) - backend.relay.capellaEpoch = 0 - backend.relay.denebEpoch = 2 - backend.relay.proposerDutiesMap = make(map[uint64]*common.BuilderGetValidatorsResponseEntry) - backend.relay.proposerDutiesMap[headSlot+1] = &common.BuilderGetValidatorsResponseEntry{ - Slot: headSlot, - Entry: &builderApiV1.SignedValidatorRegistration{ - Message: &builderApiV1.ValidatorRegistration{ - FeeRecipient: feeRec, - }, - }, - } - backend.relay.payloadAttributes = make(map[string]payloadAttributesHelper) - backend.relay.payloadAttributes[parentHash] = payloadAttributesHelper{ - slot: submissionSlot, - parentHash: parentHash, - payloadAttributes: beaconclient.PayloadAttributes{ - PrevRandao: prevRandao, - }, - withdrawalsRoot: phase0.Root(withdrawalsRoot), - } - - // Prepare the request payload - req := new(common.VersionedSubmitBlockRequest) - requestPayloadJSONBytes := common.LoadGzippedBytes(t, payloadJSONFilename) - require.NoError(t, err) - err = json.Unmarshal(requestPayloadJSONBytes, req) - require.NoError(t, err) - - // Update - switch req.Version { //nolint:exhaustive - case spec.DataVersionCapella: - req.Capella.Message.Slot = submissionSlot - req.Capella.ExecutionPayload.Timestamp = uint64(submissionTimestamp) - case spec.DataVersionDeneb: - req.Deneb.Message.Slot = submissionSlot - req.Deneb.ExecutionPayload.Timestamp = uint64(submissionTimestamp) - default: - require.Fail(t, "unknown data version") - } - - // Send JSON encoded request - reqJSONBytes, err := json.Marshal(req) - require.NoError(t, err) - require.Len(t, reqJSONBytes, testCase.data.jsonReqSize) - reqJSONBytes2, err := json.Marshal(req) - require.NoError(t, err) - require.Equal(t, reqJSONBytes, reqJSONBytes2) - rr := backend.requestBytes(http.MethodPost, path, reqJSONBytes, nil) - require.Contains(t, rr.Body.String(), "invalid signature") - require.Equal(t, http.StatusBadRequest, rr.Code) - - // Send SSZ encoded request - reqSSZBytes, err := req.MarshalSSZ() - require.NoError(t, err) - require.Len(t, reqSSZBytes, testCase.data.sszReqSize) - rr = backend.requestBytes(http.MethodPost, path, reqSSZBytes, map[string]string{ - "Content-Type": "application/octet-stream", - }) - require.Contains(t, rr.Body.String(), "invalid signature") - require.Equal(t, http.StatusBadRequest, rr.Code) - - // Send JSON+GZIP encoded request - headers := map[string]string{ - "Content-Encoding": "gzip", - } - jsonGzip := gzipBytes(t, reqJSONBytes) - require.Len(t, jsonGzip, testCase.data.jsonGzipReqSize) - rr = backend.requestBytes(http.MethodPost, path, jsonGzip, headers) - require.Contains(t, rr.Body.String(), "invalid signature") - require.Equal(t, http.StatusBadRequest, rr.Code) - - // Send SSZ+GZIP encoded request - headers = map[string]string{ - "Content-Type": "application/octet-stream", - "Content-Encoding": "gzip", - } - - sszGzip := gzipBytes(t, reqSSZBytes) - require.Len(t, sszGzip, testCase.data.sszGzipReqSize) - rr = backend.requestBytes(http.MethodPost, path, sszGzip, headers) - require.Contains(t, rr.Body.String(), "invalid signature") - require.Equal(t, http.StatusBadRequest, rr.Code) - }) - } -} - -func TestCheckSubmissionFeeRecipient(t *testing.T) { - cases := []struct { - description string - slotDuty *common.BuilderGetValidatorsResponseEntry - payload *common.VersionedSubmitBlockRequest - expectOk bool - expectGasLimit uint64 - }{ - { - description: "success", - slotDuty: &common.BuilderGetValidatorsResponseEntry{ - Entry: &builderApiV1.SignedValidatorRegistration{ - Message: &builderApiV1.ValidatorRegistration{ - FeeRecipient: testAddress, - GasLimit: testGasLimit, - }, - }, - }, - payload: &common.VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SubmitBlockRequest{ - Message: &builderApiV1.BidTrace{ - Slot: testSlot, - ProposerFeeRecipient: testAddress, - }, - ExecutionPayload: &capella.ExecutionPayload{}, - }, - }, - }, - expectOk: true, - expectGasLimit: testGasLimit, - }, - { - description: "failure_nil_slot_duty", - slotDuty: nil, - payload: &common.VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SubmitBlockRequest{ - Message: &builderApiV1.BidTrace{ - Slot: testSlot, - }, - ExecutionPayload: &capella.ExecutionPayload{}, - }, - }, - }, - expectOk: false, - expectGasLimit: 0, - }, - { - description: "failure_diff_fee_recipient", - slotDuty: &common.BuilderGetValidatorsResponseEntry{ - Entry: &builderApiV1.SignedValidatorRegistration{ - Message: &builderApiV1.ValidatorRegistration{ - FeeRecipient: testAddress, - GasLimit: testGasLimit, - }, - }, - }, - payload: &common.VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SubmitBlockRequest{ - Message: &builderApiV1.BidTrace{ - Slot: testSlot, - ProposerFeeRecipient: testAddress2, - }, - ExecutionPayload: &capella.ExecutionPayload{}, - }, - }, - }, - expectOk: false, - expectGasLimit: 0, - }, - } - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - _, _, backend := startTestBackend(t) - backend.relay.proposerDutiesLock.RLock() - slot, err := tc.payload.Slot() - require.NoError(t, err) - backend.relay.proposerDutiesMap[slot] = tc.slotDuty - backend.relay.proposerDutiesLock.RUnlock() - - w := httptest.NewRecorder() - logger := logrus.New() - log := logrus.NewEntry(logger) - submission, err := common.GetBlockSubmissionInfo(tc.payload) - require.NoError(t, err) - gasLimit, ok := backend.relay.checkSubmissionFeeRecipient(w, log, submission.BidTrace) - require.Equal(t, tc.expectGasLimit, gasLimit) - require.Equal(t, tc.expectOk, ok) - }) - } -} - -func TestCheckSubmissionPayloadAttrs(t *testing.T) { - withdrawalsRoot, err := utils.HexToHash(testWithdrawalsRoot) - require.NoError(t, err) - prevRandao, err := utils.HexToHash(testPrevRandao) - require.NoError(t, err) - parentHash, err := utils.HexToHash(testParentHash) - require.NoError(t, err) - - cases := []struct { - description string - attrs payloadAttributesHelper - payload *common.VersionedSubmitBlockRequest - expectOk bool - }{ - { - description: "success", - attrs: payloadAttributesHelper{ - slot: testSlot, - parentHash: testParentHash, - withdrawalsRoot: phase0.Root(withdrawalsRoot), - payloadAttributes: beaconclient.PayloadAttributes{ - PrevRandao: testPrevRandao, - }, - }, - payload: &common.VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SubmitBlockRequest{ - ExecutionPayload: &capella.ExecutionPayload{ - PrevRandao: prevRandao, - Withdrawals: []*capella.Withdrawal{ - { - Index: 989694, - }, - }, - }, - Message: &builderApiV1.BidTrace{ - Slot: testSlot, - ParentHash: parentHash, - }, - }, - }, - }, - expectOk: true, - }, - { - description: "failure_attrs_not_known", - attrs: payloadAttributesHelper{ - slot: testSlot, - }, - payload: &common.VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SubmitBlockRequest{ - Message: &builderApiV1.BidTrace{ - Slot: testSlot + 1, // submission for a future slot - }, - ExecutionPayload: &capella.ExecutionPayload{}, - }, - }, - }, - expectOk: false, - }, - { - description: "failure_wrong_prev_randao", - attrs: payloadAttributesHelper{ - slot: testSlot, - payloadAttributes: beaconclient.PayloadAttributes{ - PrevRandao: testPrevRandao, - }, - }, - payload: &common.VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SubmitBlockRequest{ - Message: &builderApiV1.BidTrace{ - Slot: testSlot, - ParentHash: parentHash, - }, - ExecutionPayload: &capella.ExecutionPayload{ - PrevRandao: [32]byte(parentHash), // use a different hash to cause an error - }, - }, - }, - }, - expectOk: false, - }, - { - description: "failure_nil_withdrawals", - attrs: payloadAttributesHelper{ - slot: testSlot, - payloadAttributes: beaconclient.PayloadAttributes{ - PrevRandao: testPrevRandao, - }, - }, - payload: &common.VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SubmitBlockRequest{ - Message: &builderApiV1.BidTrace{ - Slot: testSlot, - ParentHash: parentHash, - }, - ExecutionPayload: &capella.ExecutionPayload{ - PrevRandao: [32]byte(prevRandao), - Withdrawals: nil, // set to nil to cause an error - }, - }, - }, - }, - expectOk: false, - }, - { - description: "failure_wrong_withdrawal_root", - attrs: payloadAttributesHelper{ - slot: testSlot, - parentHash: testParentHash, - withdrawalsRoot: phase0.Root(prevRandao), // use different root to cause an error - payloadAttributes: beaconclient.PayloadAttributes{ - PrevRandao: testPrevRandao, - }, - }, - payload: &common.VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SubmitBlockRequest{ - ExecutionPayload: &capella.ExecutionPayload{ - PrevRandao: [32]byte(prevRandao), - Withdrawals: []*capella.Withdrawal{ - { - Index: 989694, - }, - }, - }, - Message: &builderApiV1.BidTrace{ - Slot: testSlot, - ParentHash: parentHash, - }, - }, - }, - }, - expectOk: false, - }, - } - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - _, _, backend := startTestBackend(t) - backend.relay.payloadAttributesLock.RLock() - backend.relay.payloadAttributes[testParentHash] = tc.attrs - backend.relay.payloadAttributesLock.RUnlock() - - w := httptest.NewRecorder() - logger := logrus.New() - log := logrus.NewEntry(logger) - submission, err := common.GetBlockSubmissionInfo(tc.payload) - require.NoError(t, err) - _, ok := backend.relay.checkSubmissionPayloadAttrs(w, log, submission) - require.Equal(t, tc.expectOk, ok) - }) - } -} - -func TestCheckSubmissionSlotDetails(t *testing.T) { - cases := []struct { - description string - payload *common.VersionedSubmitBlockRequest - expectOk bool - }{ - { - description: "success", - payload: &common.VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SubmitBlockRequest{ - ExecutionPayload: &capella.ExecutionPayload{ - Timestamp: testSlot * common.SecondsPerSlot, - }, - Message: &builderApiV1.BidTrace{ - Slot: testSlot, - }, - }, - }, - }, - expectOk: true, - }, - { - description: "non_capella_slot", - payload: &common.VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SubmitBlockRequest{ - ExecutionPayload: &capella.ExecutionPayload{ - Timestamp: testSlot * common.SecondsPerSlot, - }, - Message: &builderApiV1.BidTrace{ - Slot: testSlot + 32, - }, - }, - }, - }, - expectOk: false, - }, - { - description: "non_deneb_slot", - payload: &common.VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionDeneb, - Deneb: &builderApiDeneb.SubmitBlockRequest{ - ExecutionPayload: &deneb.ExecutionPayload{ - Timestamp: testSlot * common.SecondsPerSlot, - }, - BlobsBundle: &builderApiDeneb.BlobsBundle{}, - Message: &builderApiV1.BidTrace{ - Slot: testSlot, - }, - }, - }, - }, - expectOk: false, - }, - { - description: "failure_past_slot", - payload: &common.VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SubmitBlockRequest{ - Message: &builderApiV1.BidTrace{ - Slot: testSlot - 1, // use old slot to cause error - }, - ExecutionPayload: &capella.ExecutionPayload{}, - }, - }, - }, - expectOk: false, - }, - { - description: "failure_wrong_timestamp", - payload: &common.VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SubmitBlockRequest{ - ExecutionPayload: &capella.ExecutionPayload{ - Timestamp: testSlot*common.SecondsPerSlot - 1, // use wrong timestamp to cause error - }, - Message: &builderApiV1.BidTrace{ - Slot: testSlot, - }, - }, - }, - }, - expectOk: false, - }, - } - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - _, _, backend := startTestBackend(t) - backend.relay.capellaEpoch = 1 - backend.relay.denebEpoch = 2 - headSlot := testSlot - 1 - w := httptest.NewRecorder() - logger := logrus.New() - log := logrus.NewEntry(logger) - submission, err := common.GetBlockSubmissionInfo(tc.payload) - require.NoError(t, err) - ok := backend.relay.checkSubmissionSlotDetails(w, log, headSlot, tc.payload, submission) - require.Equal(t, tc.expectOk, ok) - }) - } -} - -func TestCheckBuilderEntry(t *testing.T) { - builderPubkey, err := utils.HexToPubkey(testBuilderPubkey) - require.NoError(t, err) - diffPubkey := builderPubkey - diffPubkey[0] = 0xff - cases := []struct { - description string - entry *blockBuilderCacheEntry - pk phase0.BLSPubKey - expectOk bool - }{ - { - description: "success", - entry: &blockBuilderCacheEntry{ - status: common.BuilderStatus{ - IsHighPrio: true, - }, - }, - pk: builderPubkey, - expectOk: true, - }, - { - description: "failure_blacklisted", - entry: &blockBuilderCacheEntry{ - status: common.BuilderStatus{ - IsBlacklisted: true, // set blacklisted to true to cause failure - }, - }, - pk: builderPubkey, - expectOk: false, - }, - { - description: "failure_low_prio", - entry: &blockBuilderCacheEntry{ - status: common.BuilderStatus{ - IsHighPrio: false, // set low-prio to cause failure - }, - }, - pk: builderPubkey, - expectOk: false, - }, - { - description: "failure_nil_entry_low_prio", - entry: nil, - pk: diffPubkey, // set to different pubkey, so no entry is found - expectOk: false, - }, - } - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - _, _, backend := startTestBackend(t) - backend.relay.blockBuildersCache[tc.pk.String()] = tc.entry - backend.relay.ffDisableLowPrioBuilders = true - w := httptest.NewRecorder() - logger := logrus.New() - log := logrus.NewEntry(logger) - _, ok := backend.relay.checkBuilderEntry(w, log, builderPubkey) - require.Equal(t, tc.expectOk, ok) - }) - } -} - -func TestCheckFloorBidValue(t *testing.T) { - cases := []struct { - description string - payload *common.VersionedSubmitBlockRequest - cancellationsEnabled bool - floorValue string - expectOk bool - }{ - { - description: "success", - payload: &common.VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SubmitBlockRequest{ - Message: &builderApiV1.BidTrace{ - Slot: testSlot, - Value: uint256.NewInt(1), - }, - ExecutionPayload: &capella.ExecutionPayload{}, - }, - }, - }, - expectOk: true, - }, - { - description: "failure_slot_already_delivered", - payload: &common.VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SubmitBlockRequest{ - Message: &builderApiV1.BidTrace{ - Slot: 0, - }, - ExecutionPayload: &capella.ExecutionPayload{}, - }, - }, - }, - expectOk: false, - }, - { - description: "failure_cancellations_below_floor", - payload: &common.VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SubmitBlockRequest{ - Message: &builderApiV1.BidTrace{ - Slot: testSlot, - Value: uint256.NewInt(1), - }, - ExecutionPayload: &capella.ExecutionPayload{}, - }, - }, - }, - expectOk: false, - cancellationsEnabled: true, - floorValue: "2", - }, - { - description: "failure_no_cancellations_at_floor", - payload: &common.VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SubmitBlockRequest{ - Message: &builderApiV1.BidTrace{ - Slot: testSlot, - Value: uint256.NewInt(0), - }, - ExecutionPayload: &capella.ExecutionPayload{}, - }, - }, - }, - expectOk: false, - }, - } - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - _, _, backend := startTestBackend(t) - submission, err := common.GetBlockSubmissionInfo(tc.payload) - require.NoError(t, err) - err = backend.redis.SetFloorBidValue(submission.BidTrace.Slot, submission.BidTrace.ParentHash.String(), submission.BidTrace.ProposerPubkey.String(), tc.floorValue) - require.NoError(t, err) - - w := httptest.NewRecorder() - logger := logrus.New() - log := logrus.NewEntry(logger) - tx := backend.redis.NewTxPipeline() - simResultC := make(chan *blockSimResult, 1) - submission, err = common.GetBlockSubmissionInfo(tc.payload) - require.NoError(t, err) - bfOpts := bidFloorOpts{ - w: w, - tx: tx, - log: log, - cancellationsEnabled: tc.cancellationsEnabled, - simResultC: simResultC, - submission: submission, - } - floor, ok := backend.relay.checkFloorBidValue(bfOpts) - require.Equal(t, tc.expectOk, ok) - if ok { - require.NotNil(t, floor) - require.NotNil(t, log) - } - }) - } -} - -func TestUpdateRedis(t *testing.T) { - cases := []struct { - description string - cancellationsEnabled bool - floorValue string - payload *common.VersionedSubmitBlockRequest - expectOk bool - }{ - { - description: "success", - floorValue: "10", - payload: &common.VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SubmitBlockRequest{ - Message: &builderApiV1.BidTrace{ - Slot: testSlot, - Value: uint256.NewInt(1), - }, - ExecutionPayload: &capella.ExecutionPayload{}, - }, - }, - }, - expectOk: true, - }, - { - description: "failure_no_payload", - floorValue: "10", - payload: nil, - expectOk: false, - }, - { - description: "failure_encode_failure_too_long_extra_data", - floorValue: "10", - payload: &common.VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SubmitBlockRequest{ - Message: &builderApiV1.BidTrace{ - Slot: testSlot, - Value: uint256.NewInt(1), - }, - ExecutionPayload: &capella.ExecutionPayload{ - ExtraData: make([]byte, 33), // Max extra data length is 32. - }, - }, - }, - }, - expectOk: false, - }, - } - for _, tc := range cases { - t.Run(tc.description, func(t *testing.T) { - _, _, backend := startTestBackend(t) - w := httptest.NewRecorder() - logger := logrus.New() - log := logrus.NewEntry(logger) - tx := backend.redis.NewTxPipeline() - - floorValue := new(big.Int) - floorValue, ok := floorValue.SetString(tc.floorValue, 10) - require.True(t, ok) - rOpts := redisUpdateBidOpts{ - w: w, - tx: tx, - log: log, - cancellationsEnabled: tc.cancellationsEnabled, - floorBidValue: floorValue, - payload: tc.payload, - } - updateResp, getPayloadResp, ok := backend.relay.updateRedisBid(rOpts) - require.Equal(t, tc.expectOk, ok) - if ok { - require.NotNil(t, updateResp) - require.NotNil(t, getPayloadResp) - } - }) - } -} - -func TestCheckProposerSignature(t *testing.T) { - t.Run("Unsupported version", func(t *testing.T) { - _, _, backend := startTestBackend(t) - payload := new(common.VersionedSignedBlindedBeaconBlock) - payload.Version = spec.DataVersionBellatrix - ok, err := backend.relay.checkProposerSignature(payload, []byte{}) - require.Error(t, err, "unsupported consensus data version") - require.False(t, ok) - }) - - t.Run("Valid Capella Signature", func(t *testing.T) { - jsonBytes := common.LoadGzippedBytes(t, "../../testdata/signedBlindedBeaconBlockCapella_Goerli.json.gz") - payload := new(common.VersionedSignedBlindedBeaconBlock) - err := json.Unmarshal(jsonBytes, payload) - require.NoError(t, err) - // start backend with goerli network - _, _, backend := startTestBackend(t) - goerli, err := common.NewEthNetworkDetails(common.EthNetworkGoerli) - require.NoError(t, err) - backend.relay.opts.EthNetDetails = *goerli - // check signature - pubkey, err := utils.HexToPubkey("0xa8afcb5313602f936864b30600f568e04069e596ceed9b55e2a1c872c959ddcb90589636469c15d97e7565344d9ed4ad") - require.NoError(t, err) - ok, err := backend.relay.checkProposerSignature(payload, pubkey[:]) - require.NoError(t, err) - require.True(t, ok) - }) - - t.Run("Invalid Capella Signature", func(t *testing.T) { - jsonBytes := common.LoadGzippedBytes(t, "../../testdata/signedBlindedBeaconBlockCapella_Goerli.json.gz") - payload := new(common.VersionedSignedBlindedBeaconBlock) - err := json.Unmarshal(jsonBytes, payload) - require.NoError(t, err) - // change signature - signature, err := utils.HexToSignature( - "0x942d85822e86a182b0a535361b379015a03e5ce4416863d3baa46b42eef06f070462742b79fbc77c0802699ba6d2ab00" + - "11740dad6bfcf05b1f15c5a11687ae2aa6a08c03ad1ff749d7a48e953d13b5d7c2bd1da4cfcf30ba6d918b587d6525f0", - ) - require.NoError(t, err) - payload.Capella.Signature = signature - // start backend with goerli network - _, _, backend := startTestBackend(t) - goerli, err := common.NewEthNetworkDetails(common.EthNetworkGoerli) - require.NoError(t, err) - backend.relay.opts.EthNetDetails = *goerli - // check signature - pubkey, err := utils.HexToPubkey("0xa8afcb5313602f936864b30600f568e04069e596ceed9b55e2a1c872c959ddcb90589636469c15d97e7565344d9ed4ad") - require.NoError(t, err) - ok, err := backend.relay.checkProposerSignature(payload, pubkey[:]) - require.NoError(t, err) - require.False(t, ok) - }) - - t.Run("Valid Deneb Signature", func(t *testing.T) { - jsonBytes := common.LoadGzippedBytes(t, "../../testdata/signedBlindedBeaconBlockDeneb_Goerli.json.gz") - payload := new(common.VersionedSignedBlindedBeaconBlock) - err := json.Unmarshal(jsonBytes, payload) - require.NoError(t, err) - // start backend with goerli network - _, _, backend := startTestBackend(t) - goerli, err := common.NewEthNetworkDetails(common.EthNetworkGoerli) - require.NoError(t, err) - backend.relay.opts.EthNetDetails = *goerli - // check signature - t.Log(payload.Deneb.Message.Slot) - pubkey, err := utils.HexToPubkey("0x8322b8af5c6d97e855cc75ad19d59b381a880630cded89268c14acb058cf3c5720ebcde5fa6087dcbb64dbd826936148") - require.NoError(t, err) - ok, err := backend.relay.checkProposerSignature(payload, pubkey[:]) - require.NoError(t, err) - require.True(t, ok) - }) - - t.Run("Invalid Deneb Signature", func(t *testing.T) { - jsonBytes := common.LoadGzippedBytes(t, "../../testdata/signedBlindedBeaconBlockDeneb_Goerli.json.gz") - payload := new(common.VersionedSignedBlindedBeaconBlock) - err := json.Unmarshal(jsonBytes, payload) - require.NoError(t, err) - // change signature - signature, err := utils.HexToSignature( - "0x942d85822e86a182b0a535361b379015a03e5ce4416863d3baa46b42eef06f070462742b79fbc77c0802699ba6d2ab00" + - "11740dad6bfcf05b1f15c5a11687ae2aa6a08c03ad1ff749d7a48e953d13b5d7c2bd1da4cfcf30ba6d918b587d6525f0", - ) - require.NoError(t, err) - payload.Deneb.Signature = signature - // start backend with goerli network - _, _, backend := startTestBackend(t) - goerli, err := common.NewEthNetworkDetails(common.EthNetworkGoerli) - require.NoError(t, err) - backend.relay.opts.EthNetDetails = *goerli - // check signature - pubkey, err := utils.HexToPubkey("0x8322b8af5c6d97e855cc75ad19d59b381a880630cded89268c14acb058cf3c5720ebcde5fa6087dcbb64dbd826936148") - require.NoError(t, err) - ok, err := backend.relay.checkProposerSignature(payload, pubkey[:]) - require.NoError(t, err) - require.False(t, ok) - }) -} - -func gzipBytes(t *testing.T, b []byte) []byte { - t.Helper() - var buf bytes.Buffer - zw := gzip.NewWriter(&buf) - _, err := zw.Write(b) - require.NoError(t, err) - require.NoError(t, zw.Close()) - return buf.Bytes() -} diff --git a/mev-boost-relay/services/api/test_utils.go b/mev-boost-relay/services/api/test_utils.go deleted file mode 100644 index 943ee1dcc..000000000 --- a/mev-boost-relay/services/api/test_utils.go +++ /dev/null @@ -1,61 +0,0 @@ -package api - -import ( - "github.com/attestantio/go-eth2-client/spec/bellatrix" - "github.com/attestantio/go-eth2-client/spec/phase0" - "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/flashbots/go-boost-utils/utils" - "github.com/sirupsen/logrus" -) - -// testLog is used to log information in the test methods -var testLog = logrus.NewEntry(logrus.New()) - -// _HexToBytes converts a hexadecimal string to a byte array -func _HexToBytes(hex string) []byte { - res, err := hexutil.Decode(hex) - if err != nil { - panic(err) - } - return res -} - -// _HexToHash converts a hexadecimal string to an Ethereum hash -func _HexToHash(s string) (ret phase0.Hash32) { - ret, err := utils.HexToHash(s) - if err != nil { - testLog.Error(err, " _HexToHash: ", s) - panic(err) - } - return ret -} - -// _HexToAddress converts a hexadecimal string to an Ethereum address -func _HexToAddress(s string) (ret bellatrix.ExecutionAddress) { - ret, err := utils.HexToAddress(s) - if err != nil { - testLog.Error(err, " _HexToAddress: ", s) - panic(err) - } - return ret -} - -// _HexToPubkey converts a hexadecimal string to a BLS Public Key -func _HexToPubkey(s string) (ret phase0.BLSPubKey) { - ret, err := utils.HexToPubkey(s) - if err != nil { - testLog.Error(err, " _HexToPubkey: ", s) - panic(err) - } - return -} - -// _HexToSignature converts a hexadecimal string to a BLS Signature -func _HexToSignature(s string) (ret phase0.BLSSignature) { - ret, err := utils.HexToSignature(s) - if err != nil { - testLog.Error(err, " _HexToSignature: ", s) - panic(err) - } - return -} diff --git a/mev-boost-relay/services/api/transaction_ssz.go b/mev-boost-relay/services/api/transaction_ssz.go deleted file mode 100644 index 02192477d..000000000 --- a/mev-boost-relay/services/api/transaction_ssz.go +++ /dev/null @@ -1,65 +0,0 @@ -package api - -import ( - ssz "github.com/ferranbt/fastssz" - "github.com/flashbots/mev-boost-relay/common" -) - -// MaxBytesPerTransaction is the maximum length in bytes of a raw RLP-encoded transaction -var MaxBytesPerTransaction uint64 = 1_073_741_824 // 2**30 - -// Transaction is a wrapper type of `common.HexBytes` to implement the ssz.HashRoot interface -type Transaction common.HexBytes - -// HashTreeRoot calculates the hash tree root of the transaction, which -// is a list of basic types (byte). -// -// Reference: https://github.com/ethereum/consensus-specs/blob/dev/ssz/simple-serialize.md#merkleization -func (tx *Transaction) HashTreeRoot() ([32]byte, error) { - hasher := ssz.NewHasher() - tx.HashTreeRootWith(hasher) - root, err := hasher.HashRoot() - - return root, err -} - -func (tx *Transaction) HashTreeRootWith(hh ssz.HashWalker) error { - var err error - byteLen := uint64(len(*tx)) - - if byteLen > MaxBytesPerTransaction { - err = ssz.ErrIncorrectListSize - return err - } - - // Load the bytes of the transaction into the hasher - hh.AppendBytes32(*tx) - // Perform `mix_in_length(merkleize(pack(value), limit=chunk_count(type)), len(value))` - // Reference: https://github.com/ethereum/consensus-specs/blob/dev/ssz/simple-serialize.md#merkleization - // - // The `indx` parameters is set to `0` as we need to consider the whole hh.buf buffer for this. - // In an implementation of more complex types, this parameter would be used to indicate the starting - // index of the buffer to be merkleized. It is used a single buffer to do everything for - // optimization purposes. - hh.MerkleizeWithMixin(0, byteLen, (1073741824+31)/32) - - return nil -} - -func (tx *Transaction) GetTree() (*ssz.Node, error) { - w := &ssz.Wrapper{} - tx.HashTreeRootWith(w) - return w.Node(), nil -} - -func (tx Transaction) MarshalJSON() ([]byte, error) { - return common.HexBytes(tx).MarshalJSON() -} - -func (tx *Transaction) UnmarshalJSON(buf []byte) error { - return (*common.HexBytes)(tx).UnmarshalJSON(buf) -} - -func (tx Transaction) String() string { - return JSONStringify(tx) -} diff --git a/mev-boost-relay/services/api/types.go b/mev-boost-relay/services/api/types.go deleted file mode 100644 index 298b52a70..000000000 --- a/mev-boost-relay/services/api/types.go +++ /dev/null @@ -1,51 +0,0 @@ -package api - -import ( - "encoding/json" - "errors" - "fmt" - - "github.com/attestantio/go-eth2-client/spec/phase0" - gethCommon "github.com/ethereum/go-ethereum/common" - "github.com/ethereum/go-ethereum/core/types" - boostTypes "github.com/flashbots/go-boost-utils/types" -) - -var ( - ErrMissingRequest = errors.New("req is nil") - ErrMissingSecretKey = errors.New("secret key is nil") - ErrEmptyPayload = errors.New("nil payload") - - NilResponse = struct{}{} - ZeroU256 = boostTypes.IntToU256(0) -) - -type HTTPErrorResp struct { - Code int `json:"code"` - Message string `json:"message"` -} - -type HTTPMessageResp struct { - Message string `json:"message"` -} - -type ConstraintSubscriptionAuth struct { - PublicKey phase0.BLSPubKey `json:"publicKey"` - Slot uint64 `json:"slot"` -} - -func (c *ConstraintSubscriptionAuth) String() string { - buf, err := json.Marshal(c) - if err != nil { - return fmt.Sprintf("failed to marshal ConstraintSubscriptionAuth: %v", err) - } - return string(buf) -} - -type ( - HashToConstraintDecoded = map[gethCommon.Hash]*ConstraintDecoded - ConstraintDecoded struct { - Index *Index - Tx *types.Transaction - } -) diff --git a/mev-boost-relay/services/api/types_test.go b/mev-boost-relay/services/api/types_test.go deleted file mode 100644 index 07256f22f..000000000 --- a/mev-boost-relay/services/api/types_test.go +++ /dev/null @@ -1,148 +0,0 @@ -package api - -import ( - "testing" - - builderApiCapella "github.com/attestantio/go-builder-client/api/capella" - builderApiDeneb "github.com/attestantio/go-builder-client/api/deneb" - builderApiV1 "github.com/attestantio/go-builder-client/api/v1" - builderSpec "github.com/attestantio/go-builder-client/spec" - "github.com/attestantio/go-eth2-client/spec" - "github.com/attestantio/go-eth2-client/spec/bellatrix" - "github.com/attestantio/go-eth2-client/spec/capella" - "github.com/attestantio/go-eth2-client/spec/deneb" - "github.com/attestantio/go-eth2-client/spec/phase0" - "github.com/flashbots/go-boost-utils/bls" - "github.com/flashbots/go-boost-utils/ssz" - "github.com/flashbots/go-boost-utils/types" - "github.com/flashbots/go-boost-utils/utils" - "github.com/flashbots/mev-boost-relay/common" - "github.com/holiman/uint256" - "github.com/stretchr/testify/require" -) - -func TestBuilderBlockRequestToSignedBuilderBid(t *testing.T) { - builderPk, err := utils.HexToPubkey("0xf9716c94aab536227804e859d15207aa7eaaacd839f39dcbdb5adc942842a8d2fb730f9f49fc719fdb86f1873e0ed1c2") - require.NoError(t, err) - - builderSk, err := utils.HexToSignature("0x8209b5391cd69f392b1f02dbc03bab61f574bb6bb54bf87b59e2a85bdc0756f7db6a71ce1b41b727a1f46ccc77b213bf0df1426177b5b29926b39956114421eaa36ec4602969f6f6370a44de44a6bce6dae2136e5fb594cce2a476354264d1ea") - require.NoError(t, err) - - cases := []struct { - name string - reqPayload *common.VersionedSubmitBlockRequest - }{ - { - name: "Capella", - reqPayload: &common.VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionCapella, - Capella: &builderApiCapella.SubmitBlockRequest{ - ExecutionPayload: &capella.ExecutionPayload{ - ParentHash: phase0.Hash32{0x01}, - FeeRecipient: bellatrix.ExecutionAddress{0x02}, - StateRoot: phase0.Root{0x03}, - ReceiptsRoot: phase0.Root{0x04}, - LogsBloom: [256]byte{0x05}, - PrevRandao: phase0.Hash32{0x06}, - BlockNumber: 5001, - GasLimit: 5002, - GasUsed: 5003, - Timestamp: 5004, - ExtraData: []byte{0x07}, - BaseFeePerGas: types.IntToU256(123), - BlockHash: phase0.Hash32{0x09}, - Transactions: []bellatrix.Transaction{}, - }, - Message: &builderApiV1.BidTrace{ - Slot: 1, - ParentHash: phase0.Hash32{0x01}, - BlockHash: phase0.Hash32{0x09}, - BuilderPubkey: builderPk, - ProposerPubkey: phase0.BLSPubKey{0x03}, - ProposerFeeRecipient: bellatrix.ExecutionAddress{0x04}, - Value: uint256.NewInt(123), - GasLimit: 5002, - GasUsed: 5003, - }, - Signature: builderSk, - }, - }, - }, - }, - { - name: "Deneb", - reqPayload: &common.VersionedSubmitBlockRequest{ - VersionedSubmitBlockRequest: builderSpec.VersionedSubmitBlockRequest{ - Version: spec.DataVersionDeneb, - Deneb: &builderApiDeneb.SubmitBlockRequest{ - ExecutionPayload: &deneb.ExecutionPayload{ - ParentHash: phase0.Hash32{0x01}, - FeeRecipient: bellatrix.ExecutionAddress{0x02}, - StateRoot: phase0.Root{0x03}, - ReceiptsRoot: phase0.Root{0x04}, - LogsBloom: [256]byte{0x05}, - PrevRandao: phase0.Hash32{0x06}, - BlockNumber: 5001, - GasLimit: 5002, - GasUsed: 5003, - Timestamp: 5004, - ExtraData: []byte{0x07}, - BaseFeePerGas: uint256.NewInt(123), - BlockHash: phase0.Hash32{0x09}, - Transactions: []bellatrix.Transaction{}, - BlobGasUsed: 5005, - ExcessBlobGas: 5006, - }, - BlobsBundle: &builderApiDeneb.BlobsBundle{ - Commitments: []deneb.KZGCommitment{}, - Proofs: []deneb.KZGProof{}, - Blobs: []deneb.Blob{}, - }, - Message: &builderApiV1.BidTrace{ - Slot: 1, - ParentHash: phase0.Hash32{0x01}, - BlockHash: phase0.Hash32{0x09}, - BuilderPubkey: builderPk, - ProposerPubkey: phase0.BLSPubKey{0x03}, - ProposerFeeRecipient: bellatrix.ExecutionAddress{0x04}, - Value: uint256.NewInt(123), - GasLimit: 5002, - GasUsed: 5003, - }, - Signature: builderSk, - }, - }, - }, - }, - } - - for _, tc := range cases { - t.Run(tc.name, func(t *testing.T) { - sk, _, err := bls.GenerateNewKeypair() - require.NoError(t, err) - - pubkey, err := bls.PublicKeyFromSecretKey(sk) - require.NoError(t, err) - - publicKey, err := utils.BlsPublicKeyToPublicKey(pubkey) - require.NoError(t, err) - - signedBuilderBid, err := common.BuildGetHeaderResponse(tc.reqPayload, sk, &publicKey, ssz.DomainBuilder) - require.NoError(t, err) - - bidValue, err := signedBuilderBid.Value() - require.NoError(t, err) - respValue, err := tc.reqPayload.Value() - require.NoError(t, err) - - bidHash, err := signedBuilderBid.BlockHash() - require.NoError(t, err) - respHash, err := tc.reqPayload.BlockHash() - require.NoError(t, err) - - require.Equal(t, 0, bidValue.Cmp(respValue)) - require.Equal(t, respHash, bidHash) - }) - } -} diff --git a/mev-boost-relay/services/api/utils.go b/mev-boost-relay/services/api/utils.go deleted file mode 100644 index 9c4b98008..000000000 --- a/mev-boost-relay/services/api/utils.go +++ /dev/null @@ -1,187 +0,0 @@ -package api - -import ( - "encoding/json" - "fmt" - "net/http" - "strings" - - builderApi "github.com/attestantio/go-builder-client/api" - "github.com/attestantio/go-eth2-client/spec" - "github.com/attestantio/go-eth2-client/spec/capella" - "github.com/attestantio/go-eth2-client/spec/phase0" - eth2UtilCapella "github.com/attestantio/go-eth2-client/util/capella" - "github.com/flashbots/go-boost-utils/bls" - "github.com/flashbots/go-boost-utils/utils" - "github.com/flashbots/mev-boost-relay/common" - "github.com/pkg/errors" -) - -var ( - ErrBlockHashMismatch = errors.New("blockHash mismatch") - ErrParentHashMismatch = errors.New("parentHash mismatch") - - ErrUnsupportedPayload = errors.New("unsupported payload version") - ErrNoWithdrawals = errors.New("no withdrawals") - ErrPayloadMismatch = errors.New("beacon-block and payload version mismatch") - ErrHeaderHTRMismatch = errors.New("beacon-block and payload header mismatch") - ErrBlobMismatch = errors.New("beacon-block and payload blob contents mismatch") -) - -func SanityCheckBuilderBlockSubmission(payload *common.VersionedSubmitBlockRequest) error { - submission, err := common.GetBlockSubmissionInfo(payload) - if err != nil { - return err - } - if submission.BidTrace.BlockHash.String() != submission.ExecutionPayloadBlockHash.String() { - return ErrBlockHashMismatch - } - - if submission.BidTrace.ParentHash.String() != submission.ExecutionPayloadParentHash.String() { - return ErrParentHashMismatch - } - - return nil -} - -func ComputeWithdrawalsRoot(w []*capella.Withdrawal) (phase0.Root, error) { - if w == nil { - return phase0.Root{}, ErrNoWithdrawals - } - withdrawals := eth2UtilCapella.ExecutionPayloadWithdrawals{Withdrawals: w} - return withdrawals.HashTreeRoot() -} - -func EqBlindedBlockContentsToBlockContents(bb *common.VersionedSignedBlindedBeaconBlock, payload *builderApi.VersionedSubmitBlindedBlockResponse) error { - if bb.Version != payload.Version { - return errors.Wrap(ErrPayloadMismatch, fmt.Sprintf("beacon block version %d does not match payload version %d", bb.Version, payload.Version)) - } - - versionedPayload := &builderApi.VersionedExecutionPayload{ //nolint:exhaustivestruct - Version: payload.Version, - } - switch bb.Version { //nolint:exhaustive - case spec.DataVersionCapella: - bbHeaderHtr, err := bb.Capella.Message.Body.ExecutionPayloadHeader.HashTreeRoot() - if err != nil { - return err - } - - versionedPayload.Capella = payload.Capella - payloadHeader, err := utils.PayloadToPayloadHeader(versionedPayload) - if err != nil { - return err - } - - payloadHeaderHtr, err := payloadHeader.Capella.HashTreeRoot() - if err != nil { - return err - } - - if bbHeaderHtr != payloadHeaderHtr { - return ErrHeaderHTRMismatch - } - case spec.DataVersionDeneb: - block := bb.Deneb.Message - bbHeaderHtr, err := block.Body.ExecutionPayloadHeader.HashTreeRoot() - if err != nil { - return err - } - - versionedPayload.Deneb = payload.Deneb.ExecutionPayload - payloadHeader, err := utils.PayloadToPayloadHeader(versionedPayload) - if err != nil { - return err - } - - payloadHeaderHtr, err := payloadHeader.Deneb.HashTreeRoot() - if err != nil { - return err - } - - if bbHeaderHtr != payloadHeaderHtr { - return ErrHeaderHTRMismatch - } - - if len(bb.Deneb.Message.Body.BlobKZGCommitments) != len(payload.Deneb.BlobsBundle.Commitments) { - return errors.Wrap(ErrBlobMismatch, "mismatched number of KZG commitments") - } - - for i, commitment := range bb.Deneb.Message.Body.BlobKZGCommitments { - if commitment != payload.Deneb.BlobsBundle.Commitments[i] { - return errors.Wrap(ErrBlobMismatch, fmt.Sprintf("mismatched KZG commitment at index %d", i)) - } - } - default: - return ErrUnsupportedPayload - } - // block and payload are equal - return nil -} - -func checkBLSPublicKeyHex(pkHex string) error { - _, err := utils.HexToPubkey(pkHex) - return err -} - -func hasReachedFork(slot uint64, forkEpoch int64) bool { - if forkEpoch < 0 { - return false - } - currentEpoch := slot / common.SlotsPerEpoch - return currentEpoch >= uint64(forkEpoch) -} - -func verifyBlockSignature(block *common.VersionedSignedBlindedBeaconBlock, domain phase0.Domain, pubKey []byte) (bool, error) { - root, err := block.Root() - if err != nil { - return false, err - } - sig, err := block.Signature() - if err != nil { - return false, err - } - signingData := phase0.SigningData{ObjectRoot: root, Domain: domain} - msg, err := signingData.HashTreeRoot() - if err != nil { - return false, err - } - - return bls.VerifySignatureBytes(msg[:], sig[:], pubKey[:]) -} - -func broadcastToChannels[T any](constraintsConsumers []chan *T, constraint *T) { - for _, consumer := range constraintsConsumers { - consumer <- constraint - } -} - -func JSONStringify[T any](obj T) string { - out, err := json.Marshal(obj) - if err != nil { - return fmt.Sprintf("Error while marshalling: %v", err) - } - return string(out) -} - -func Find[T any](slice []*T, predicate func(arg *T) bool) *T { - for _, item := range slice { - if predicate(item) { - return item - } - } - return nil -} - -// EmitBoltDemoEvent sends a message to the web demo backend to log an event. -// This is only used for demo purposes and should be removed in production. -func EmitBoltDemoEvent(message string) { - event := strings.NewReader(fmt.Sprintf("{ \"message\": \"BOLT-RELAY: %s\"}", message)) - eventRes, err := http.Post("http://host.docker.internal:3001/events", "application/json", event) - if err != nil { - fmt.Printf("Failed to send web demo event: %v", err) - } - if eventRes != nil { - defer eventRes.Body.Close() - } -} diff --git a/mev-boost-relay/services/housekeeper/housekeeper.go b/mev-boost-relay/services/housekeeper/housekeeper.go deleted file mode 100644 index 419c2e48b..000000000 --- a/mev-boost-relay/services/housekeeper/housekeeper.go +++ /dev/null @@ -1,266 +0,0 @@ -// Package housekeeper contains the service doing all required regular tasks -// -// - Update known validators -// - Updating proposer duties -// - Saving metrics -// - Deleting old bids -// - ... -package housekeeper - -import ( - "errors" - "net/http" - _ "net/http/pprof" - "sort" - "strconv" - "strings" - "time" - - builderApiV1 "github.com/attestantio/go-builder-client/api/v1" - "github.com/flashbots/mev-boost-relay/beaconclient" - "github.com/flashbots/mev-boost-relay/common" - "github.com/flashbots/mev-boost-relay/database" - "github.com/flashbots/mev-boost-relay/datastore" - "github.com/gorilla/mux" - "github.com/sirupsen/logrus" - uberatomic "go.uber.org/atomic" -) - -type HousekeeperOpts struct { - Log *logrus.Entry - Redis *datastore.RedisCache - DB database.IDatabaseService - BeaconClient beaconclient.IMultiBeaconClient - - PprofAPI bool - PprofListenAddress string -} - -type Housekeeper struct { - opts *HousekeeperOpts - log *logrus.Entry - - redis *datastore.RedisCache - db database.IDatabaseService - beaconClient beaconclient.IMultiBeaconClient - - pprofAPI bool - pprofListenAddress string - - isStarted uberatomic.Bool - isUpdatingProposerDuties uberatomic.Bool - proposerDutiesSlot uint64 - - headSlot uberatomic.Uint64 - - proposersAlreadySaved map[uint64]string // to avoid repeating redis writes -} - -var ErrServerAlreadyStarted = errors.New("server was already started") - -func NewHousekeeper(opts *HousekeeperOpts) *Housekeeper { - server := &Housekeeper{ - opts: opts, - log: opts.Log, - redis: opts.Redis, - db: opts.DB, - beaconClient: opts.BeaconClient, - pprofAPI: opts.PprofAPI, - pprofListenAddress: opts.PprofListenAddress, - proposersAlreadySaved: make(map[uint64]string), - } - - return server -} - -// Start starts the housekeeper service, blocking -func (hk *Housekeeper) Start() (err error) { - defer hk.isStarted.Store(false) - if hk.isStarted.Swap(true) { - return ErrServerAlreadyStarted - } - - // Get best beacon-node status by head slot, process current slot and start slot updates - bestSyncStatus, err := hk.beaconClient.BestSyncStatus() - if err != nil { - return err - } - - // Start pprof API, if requested - if hk.pprofAPI { - go hk.startPprofAPI() - } - - // Start initial tasks - go hk.updateValidatorRegistrationsInRedis() - - // Process the current slot - hk.processNewSlot(bestSyncStatus.HeadSlot) - - // Start regular slot updates - c := make(chan beaconclient.HeadEventData) - hk.beaconClient.SubscribeToHeadEvents(c) - for { - headEvent := <-c - hk.processNewSlot(headEvent.Slot) - } -} - -func (hk *Housekeeper) startPprofAPI() { - r := mux.NewRouter() - hk.log.Infof("Starting pprof API at %s", hk.pprofListenAddress) - r.PathPrefix("/debug/pprof/").Handler(http.DefaultServeMux) - srv := http.Server{ //nolint:gosec - Addr: hk.pprofListenAddress, - Handler: r, - } - err := srv.ListenAndServe() - if err != nil { - hk.log.WithError(err).Error("failed to start pprof API") - } -} - -func (hk *Housekeeper) processNewSlot(headSlot uint64) { - prevHeadSlot := hk.headSlot.Load() - if headSlot <= prevHeadSlot { - return - } - hk.headSlot.Store(headSlot) - - log := hk.log.WithFields(logrus.Fields{ - "headSlot": headSlot, - "headSlotPos": common.SlotPos(headSlot), - "prevHeadSlot": prevHeadSlot, - }) - - // Print any missed slots - if prevHeadSlot > 0 { - for s := prevHeadSlot + 1; s < headSlot; s++ { - log.WithField("missedSlot", s).Warnf("missed slot: %d", s) - } - } - - // Update proposer duties - go hk.updateProposerDuties(headSlot) - - // Set headSlot in redis (for the website) - err := hk.redis.SetStats(datastore.RedisStatsFieldLatestSlot, headSlot) - if err != nil { - log.WithError(err).Error("failed to set stats") - } - - currentEpoch := headSlot / common.SlotsPerEpoch - log.WithFields(logrus.Fields{ - "epoch": currentEpoch, - "slotStartNextEpoch": (currentEpoch + 1) * common.SlotsPerEpoch, - }).Infof("updated headSlot to %d", headSlot) -} - -func (hk *Housekeeper) updateProposerDuties(headSlot uint64) { - // Should only happen once at a time - if hk.isUpdatingProposerDuties.Swap(true) { - return - } - defer hk.isUpdatingProposerDuties.Store(false) - - slotsForHalfAnEpoch := common.SlotsPerEpoch / 2 - if headSlot%slotsForHalfAnEpoch != 0 && headSlot-hk.proposerDutiesSlot < slotsForHalfAnEpoch { - return - } - - epoch := headSlot / common.SlotsPerEpoch - - log := hk.log.WithFields(logrus.Fields{ - "epochFrom": epoch, - "epochTo": epoch + 1, - }) - log.Debug("updating proposer duties...") - - // Query current epoch - r, err := hk.beaconClient.GetProposerDuties(epoch) - if err != nil { - log.WithError(err).Error("failed to get proposer duties for all beacon nodes") - return - } - entries := r.Data - - // Query next epoch - r2, err := hk.beaconClient.GetProposerDuties(epoch + 1) - if err != nil { - log.WithError(err).Error("failed to get proposer duties for next epoch for all beacon nodes") - } else if r2 != nil { - entries = append(entries, r2.Data...) - } - - // Get registrations from database - pubkeys := []string{} - for _, entry := range entries { - pubkeys = append(pubkeys, entry.Pubkey) - } - validatorRegistrationEntries, err := hk.db.GetValidatorRegistrationsForPubkeys(pubkeys) - if err != nil { - log.WithError(err).Error("failed to get validator registrations") - return - } - - // Convert db entries to signed validator registration type - signedValidatorRegistrations := make(map[string]*builderApiV1.SignedValidatorRegistration) - for _, regEntry := range validatorRegistrationEntries { - signedEntry, err := regEntry.ToSignedValidatorRegistration() - if err != nil { - log.WithError(err).Error("failed to convert validator registration entry to signed validator registration") - continue - } - signedValidatorRegistrations[regEntry.Pubkey] = signedEntry - } - - // Prepare proposer duties - proposerDuties := []common.BuilderGetValidatorsResponseEntry{} - for _, duty := range entries { - reg := signedValidatorRegistrations[duty.Pubkey] - if reg != nil { - proposerDuties = append(proposerDuties, common.BuilderGetValidatorsResponseEntry{ - Slot: duty.Slot, - ValidatorIndex: duty.ValidatorIndex, - Entry: reg, - }) - } - } - - // Save duties to Redis - err = hk.redis.SetProposerDuties(proposerDuties) - if err != nil { - log.WithError(err).Error("failed to set proposer duties") - return - } - hk.proposerDutiesSlot = headSlot - - // Pretty-print - _duties := make([]string, len(proposerDuties)) - for i, duty := range proposerDuties { - _duties[i] = strconv.FormatUint(duty.Slot, 10) - } - sort.Strings(_duties) - log.WithField("numDuties", len(_duties)).Infof("proposer duties updated: %s", strings.Join(_duties, ", ")) -} - -// updateValidatorRegistrationsInRedis saves all latest validator registrations from the database to Redis -func (hk *Housekeeper) updateValidatorRegistrationsInRedis() { - regs, err := hk.db.GetLatestValidatorRegistrations(true) - if err != nil { - hk.log.WithError(err).Error("failed to get latest validator registrations") - return - } - - hk.log.Infof("updating %d validator registrations in Redis...", len(regs)) - timeStarted := time.Now() - - for _, reg := range regs { - err = hk.redis.SetValidatorRegistrationTimestampIfNewer(common.NewPubkeyHex(reg.Pubkey), reg.Timestamp) - if err != nil { - hk.log.WithError(err).Error("failed to set validator registration") - continue - } - } - hk.log.Infof("updating %d validator registrations in Redis done - %f sec", len(regs), time.Since(timeStarted).Seconds()) -} diff --git a/mev-boost-relay/services/website/html.go b/mev-boost-relay/services/website/html.go deleted file mode 100644 index 568706319..000000000 --- a/mev-boost-relay/services/website/html.go +++ /dev/null @@ -1,81 +0,0 @@ -package website - -import ( - _ "embed" - "math/big" - "text/template" - - "github.com/flashbots/mev-boost-relay/database" - "golang.org/x/text/cases" - "golang.org/x/text/language" - "golang.org/x/text/message" -) - -var ( - // Printer for pretty printing numbers - printer = message.NewPrinter(language.English) - - // Caser is used for casing strings - caser = cases.Title(language.English) -) - -type StatusHTMLData struct { //nolint:musttag - Network string - RelayPubkey string - ValidatorsTotal uint64 - ValidatorsRegistered uint64 - BellatrixForkVersion string - CapellaForkVersion string - GenesisForkVersion string - GenesisValidatorsRoot string - BuilderSigningDomain string - BeaconProposerSigningDomain string - HeadSlot uint64 - NumPayloadsDelivered uint64 - Payloads []*database.DeliveredPayloadEntry - - ValueLink string - ValueOrderIcon string - - ShowConfigDetails bool - LinkBeaconchain string - LinkEtherscan string - LinkDataAPI string - RelayURL string -} - -func weiToEth(wei string) string { - weiBigInt := new(big.Int) - weiBigInt.SetString(wei, 10) - ethValue := weiBigIntToEthBigFloat(weiBigInt) - return ethValue.String() -} - -func weiBigIntToEthBigFloat(wei *big.Int) (ethValue *big.Float) { - // wei / 10^18 - fbalance := new(big.Float) - fbalance.SetString(wei.String()) - ethValue = new(big.Float).Quo(fbalance, big.NewFloat(1e18)) - return -} - -func prettyInt(i uint64) string { - return printer.Sprintf("%d", i) -} - -func caseIt(s string) string { - return caser.String(s) -} - -var funcMap = template.FuncMap{ - "weiToEth": weiToEth, - "prettyInt": prettyInt, - "caseIt": caseIt, -} - -//go:embed website.html -var htmlContent string - -func ParseIndexTemplate() (*template.Template, error) { - return template.New("index").Funcs(funcMap).Parse(htmlContent) -} diff --git a/mev-boost-relay/services/website/website.go b/mev-boost-relay/services/website/website.go deleted file mode 100644 index 81fb869e1..000000000 --- a/mev-boost-relay/services/website/website.go +++ /dev/null @@ -1,276 +0,0 @@ -// Package website contains the service delivering the website -package website - -import ( - "bytes" - "errors" - "net/http" - _ "net/http/pprof" - "os" - "strconv" - "sync" - "text/template" - "time" - - "github.com/NYTimes/gziphandler" - "github.com/ethereum/go-ethereum/common/hexutil" - "github.com/thedevbirb/flashbots-go-utils/httplogger" - "github.com/flashbots/mev-boost-relay/common" - "github.com/flashbots/mev-boost-relay/database" - "github.com/flashbots/mev-boost-relay/datastore" - "github.com/go-redis/redis/v9" - "github.com/gorilla/mux" - "github.com/sirupsen/logrus" - "github.com/tdewolff/minify" - "github.com/tdewolff/minify/html" - uberatomic "go.uber.org/atomic" -) - -var ( - ErrServerAlreadyStarted = errors.New("server was already started") - EnablePprof = os.Getenv("PPROF") == "1" -) - -type WebserverOpts struct { - ListenAddress string - RelayPubkeyHex string - NetworkDetails *common.EthNetworkDetails - Redis *datastore.RedisCache - DB *database.DatabaseService - Log *logrus.Entry - - ShowConfigDetails bool - LinkBeaconchain string - LinkEtherscan string - LinkDataAPI string - RelayURL string -} - -type Webserver struct { - opts *WebserverOpts - log *logrus.Entry - - redis *datastore.RedisCache - db *database.DatabaseService - - srv *http.Server - srvStarted uberatomic.Bool - - indexTemplate *template.Template - statusHTMLData StatusHTMLData - rootResponseLock sync.RWMutex - - htmlDefault *[]byte - htmlByValueDesc *[]byte - htmlByValueAsc *[]byte - - minifier *minify.M -} - -func NewWebserver(opts *WebserverOpts) (*Webserver, error) { - var err error - - minifier := minify.New() - minifier.AddFunc("text/css", html.Minify) - minifier.AddFunc("text/html", html.Minify) - - server := &Webserver{ - opts: opts, - log: opts.Log, - redis: opts.Redis, - db: opts.DB, - - htmlDefault: &[]byte{}, - htmlByValueDesc: &[]byte{}, - htmlByValueAsc: &[]byte{}, - - minifier: minifier, - } - - server.indexTemplate, err = ParseIndexTemplate() - if err != nil { - return nil, err - } - - server.statusHTMLData = StatusHTMLData{ - Network: opts.NetworkDetails.Name, - RelayPubkey: opts.RelayPubkeyHex, - ValidatorsTotal: 0, - ValidatorsRegistered: 0, - BellatrixForkVersion: opts.NetworkDetails.BellatrixForkVersionHex, - CapellaForkVersion: opts.NetworkDetails.CapellaForkVersionHex, - GenesisForkVersion: opts.NetworkDetails.GenesisForkVersionHex, - GenesisValidatorsRoot: opts.NetworkDetails.GenesisValidatorsRootHex, - BuilderSigningDomain: hexutil.Encode(opts.NetworkDetails.DomainBuilder[:]), - BeaconProposerSigningDomain: hexutil.Encode(opts.NetworkDetails.DomainBeaconProposerBellatrix[:]), - HeadSlot: 0, - NumPayloadsDelivered: 0, - Payloads: []*database.DeliveredPayloadEntry{}, - ValueLink: "", - ValueOrderIcon: "", - ShowConfigDetails: opts.ShowConfigDetails, - LinkBeaconchain: opts.LinkBeaconchain, - LinkEtherscan: opts.LinkEtherscan, - LinkDataAPI: opts.LinkDataAPI, - RelayURL: opts.RelayURL, - } - - return server, nil -} - -func (srv *Webserver) StartServer() (err error) { - if srv.srvStarted.Swap(true) { - return ErrServerAlreadyStarted - } - - // Start background task to regularly update status HTML data - go func() { - for { - srv.updateHTML() - time.Sleep(10 * time.Second) - } - }() - - srv.srv = &http.Server{ - Addr: srv.opts.ListenAddress, - Handler: srv.getRouter(), - - ReadTimeout: 600 * time.Millisecond, - ReadHeaderTimeout: 400 * time.Millisecond, - WriteTimeout: 3 * time.Second, - IdleTimeout: 3 * time.Second, - } - - err = srv.srv.ListenAndServe() - if errors.Is(err, http.ErrServerClosed) { - return nil - } - return err -} - -func (srv *Webserver) getRouter() http.Handler { - r := mux.NewRouter() - r.HandleFunc("/", srv.handleRoot).Methods(http.MethodGet) - if EnablePprof { - srv.log.Info("pprof API enabled") - r.PathPrefix("/debug/pprof/").Handler(http.DefaultServeMux) - } - - loggedRouter := httplogger.LoggingMiddlewareLogrus(srv.log, r) - withGz := gziphandler.GzipHandler(loggedRouter) - return withGz -} - -func (srv *Webserver) updateHTML() { - _numRegistered, err := srv.db.NumRegisteredValidators() - if err != nil { - srv.log.WithError(err).Error("error getting number of registered validators in updateStatusHTMLData") - } - - payloads, err := srv.db.GetRecentDeliveredPayloads(database.GetPayloadsFilters{Limit: 30}) - if err != nil { - srv.log.WithError(err).Error("error getting recent payloads") - } - - payloadsByValueDesc, err := srv.db.GetRecentDeliveredPayloads(database.GetPayloadsFilters{Limit: 30, OrderByValue: -1}) - if err != nil { - srv.log.WithError(err).Error("error getting recent payloads") - } - - payloadsByValueAsc, err := srv.db.GetRecentDeliveredPayloads(database.GetPayloadsFilters{Limit: 30, OrderByValue: 1}) - if err != nil { - srv.log.WithError(err).Error("error getting recent payloads") - } - - _numPayloadsDelivered, err := srv.db.GetNumDeliveredPayloads() - if err != nil { - srv.log.WithError(err).Error("error getting number of delivered payloads") - } - - _latestSlot, err := srv.redis.GetStats(datastore.RedisStatsFieldLatestSlot) - if err != nil && !errors.Is(err, redis.Nil) { - srv.log.WithError(err).Error("error getting latest slot") - } - _latestSlotInt, _ := strconv.ParseUint(_latestSlot, 10, 64) - if len(payloads) > 0 && payloads[0].Slot > _latestSlotInt { - _latestSlotInt = payloads[0].Slot - } - - _validatorsTotal, err := srv.redis.GetStats(datastore.RedisStatsFieldValidatorsTotal) - if err != nil && !errors.Is(err, redis.Nil) { - srv.log.WithError(err).Error("error getting latest stats: validators_total") - } - _validatorsTotalInt, _ := strconv.ParseUint(_validatorsTotal, 10, 64) - - srv.statusHTMLData.ValidatorsTotal = _validatorsTotalInt - srv.statusHTMLData.ValidatorsRegistered = _numRegistered - srv.statusHTMLData.NumPayloadsDelivered = _numPayloadsDelivered - srv.statusHTMLData.HeadSlot = _latestSlotInt - - // Now generate the HTML - htmlDefault := bytes.Buffer{} - htmlByValueDesc := bytes.Buffer{} - htmlByValueAsc := bytes.Buffer{} - - // default view - srv.statusHTMLData.Payloads = payloads - srv.statusHTMLData.ValueLink = "/?order_by=-value" - srv.statusHTMLData.ValueOrderIcon = "" - if err := srv.indexTemplate.Execute(&htmlDefault, srv.statusHTMLData); err != nil { - srv.log.WithError(err).Error("error rendering template") - } - - // descending order view - srv.statusHTMLData.Payloads = payloadsByValueDesc - srv.statusHTMLData.ValueLink = "/?order_by=value" - srv.statusHTMLData.ValueOrderIcon = " " - if err := srv.indexTemplate.Execute(&htmlByValueDesc, srv.statusHTMLData); err != nil { - srv.log.WithError(err).Error("error rendering template (by value)") - } - - // ascending order view - srv.statusHTMLData.Payloads = payloadsByValueAsc - srv.statusHTMLData.ValueLink = "/" - srv.statusHTMLData.ValueOrderIcon = " " - if err := srv.indexTemplate.Execute(&htmlByValueAsc, srv.statusHTMLData); err != nil { - srv.log.WithError(err).Error("error rendering template (by -value)") - } - - // Minify - htmlDefaultBytes, err := srv.minifier.Bytes("text/html", htmlDefault.Bytes()) - if err != nil { - srv.log.WithError(err).Error("error minifying htmlDefault") - } - htmlValueDescBytes, err := srv.minifier.Bytes("text/html", htmlByValueDesc.Bytes()) - if err != nil { - srv.log.WithError(err).Error("error minifying htmlByValueDesc") - } - htmlValueDescAsc, err := srv.minifier.Bytes("text/html", htmlByValueAsc.Bytes()) - if err != nil { - srv.log.WithError(err).Error("error minifying htmlByValueAsc") - } - - // Swap the html pointers - srv.rootResponseLock.Lock() - srv.htmlDefault = &htmlDefaultBytes - srv.htmlByValueDesc = &htmlValueDescBytes - srv.htmlByValueAsc = &htmlValueDescAsc - srv.rootResponseLock.Unlock() -} - -func (srv *Webserver) handleRoot(w http.ResponseWriter, req *http.Request) { - var err error - - srv.rootResponseLock.RLock() - defer srv.rootResponseLock.RUnlock() - if req.URL.Query().Get("order_by") == "-value" { - _, err = w.Write(*srv.htmlByValueDesc) - } else if req.URL.Query().Get("order_by") == "value" { - _, err = w.Write(*srv.htmlByValueAsc) - } else { - _, err = w.Write(*srv.htmlDefault) - } - if err != nil { - srv.log.WithError(err).Error("error writing template") - } -} diff --git a/mev-boost-relay/services/website/website.html b/mev-boost-relay/services/website/website.html deleted file mode 100644 index 977af627b..000000000 --- a/mev-boost-relay/services/website/website.html +++ /dev/null @@ -1,300 +0,0 @@ - - - - - - - - Flashbots MEV-Boost Relay - {{ .Network | caseIt }} - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
-
- - - - - -

- Flashbots Boost Relay - {{ .Network | caseIt }} -

- - {{if .RelayURL}} -
-
{{ .RelayURL }}
- - Copy Relay URL to clipboard - - -
- {{end}} - - {{if .ShowConfigDetails}} -

Configuration:

-
    -
  • Relay Pubkey: {{ .RelayPubkey }}
  • -
  • Capella fork version: {{ .CapellaForkVersion }}
  • -
  • Bellatrix fork version: {{ .BellatrixForkVersion }}
  • -
  • Genesis fork version: {{ .GenesisForkVersion }}
  • -
  • Genesis validators root: {{ .GenesisValidatorsRoot }}
  • -
  • Builder signing domain: {{ .BuilderSigningDomain }}
  • -
  • Beacon proposer signing domain: {{ .BeaconProposerSigningDomain }}
  • -
- {{end}} - -
-
-
- -

- -

-
-

- Stats -

- - - - - - - - - - - - - - - - -
Validators active or in queue{{ .ValidatorsTotal | prettyInt }}
Validators registered (all time){{ .ValidatorsRegistered| prettyInt }}
Latest slot{{ .HeadSlot| prettyInt }}
-
- - -
- -

- -
-
-
- -

-

- Recently Delivered Payloads -

- - - - - - - - - - - - - - - - {{$linkBeaconchain := .LinkBeaconchain}} - {{$linkEtherscan := .LinkEtherscan}} - {{$linkDataAPI := .LinkDataAPI}} - {{ range .Payloads }} - - - - - - - - - - - {{ end }} - -
EpochSlotBlock number - Value (ETH{{.ValueOrderIcon}}) - - - - - - - Num txsBlobsBlock hash
{{.Epoch | prettyInt}} - {{.Slot | prettyInt}} - {{.BlockNumber | prettyInt}}{{.Value | weiToEth}}{{.NumTx }} -
{{.NumBlobs }}
-
{{.BlockHash}} -
- {{ if ne $linkBeaconchain "" }} - -   - {{ end }} - {{ if ne $linkEtherscan "" }} - - {{ end }} -
-
-

- -
-

{{.NumPayloadsDelivered | prettyInt}} payloads delivered

-

- - Data API · Bulk Data · Docs - -

-
-
-
- - - diff --git a/mev-boost-relay/static/s3/index.html b/mev-boost-relay/static/s3/index.html deleted file mode 100644 index b53686f29..000000000 --- a/mev-boost-relay/static/s3/index.html +++ /dev/null @@ -1,238 +0,0 @@ - - - - - - Flashbots Boost Relay - Data - - - - - - - - - - - -

- Flashbots Boost Relay - Data -

- -

- File listing in XML format -

-
-
- -
- -
-
- - - - \ No newline at end of file diff --git a/mev-boost-relay/staticcheck.conf b/mev-boost-relay/staticcheck.conf deleted file mode 100644 index 528438b97..000000000 --- a/mev-boost-relay/staticcheck.conf +++ /dev/null @@ -1 +0,0 @@ -checks = ["all"] diff --git a/mev-boost-relay/testdata/executionPayloadAndBlobsBundleDeneb_Goerli.json.gz b/mev-boost-relay/testdata/executionPayloadAndBlobsBundleDeneb_Goerli.json.gz deleted file mode 100644 index 699558694..000000000 Binary files a/mev-boost-relay/testdata/executionPayloadAndBlobsBundleDeneb_Goerli.json.gz and /dev/null differ diff --git a/mev-boost-relay/testdata/executionPayloadCapella_Goerli.json.gz b/mev-boost-relay/testdata/executionPayloadCapella_Goerli.json.gz deleted file mode 100644 index 312803e58..000000000 Binary files a/mev-boost-relay/testdata/executionPayloadCapella_Goerli.json.gz and /dev/null differ diff --git a/mev-boost-relay/testdata/getHeaderResponseCapella_Mainnet.json b/mev-boost-relay/testdata/getHeaderResponseCapella_Mainnet.json deleted file mode 100644 index cdb6af380..000000000 --- a/mev-boost-relay/testdata/getHeaderResponseCapella_Mainnet.json +++ /dev/null @@ -1,27 +0,0 @@ -{ - "version": "capella", - "data": { - "message": { - "header": { - "parent_hash": "0xfa98f9121134c059d3c180815cb3a0455870a4b8254c729d490970ed9ba6a34e", - "fee_recipient": "0xb636A68F834B4D75aF9EDC5FB0138bB4758eD293", - "state_root": "0x2deedbe6351b82bfea1129e9bd26207bca377d0168a4447ce1340690460d3631", - "receipts_root": "0xedf52431d776813fb4e7a2f0d250bfd05e3b34bde334e7d258478961f9cc42b4", - "logs_bloom": "0xc026020000926000000004d8d01100311060000200001815000e040088010182102c45600001205000300009500400002464000483102a640a000eccc0241c00908040002c49020c121e120a004a0020a88442002048b040013811168814001801084010530200020441118010081c0010210e4201c0650880204210080812007000028288c0202a20010884900100000008541102191808252081d8140201000e08001048240251400001200014c00400894005208622100404c08000b010023020400300402401200229c0840a14485080d04046c82210a0030422000160880050000020000214201a08a22a0000200400c63c6f11205100224000008c0280", - "prev_randao": "0x3c581f8dd1fcf1c34680e1c53131ce715e1e13d93e4c12bf1609a7a38678e7ec", - "block_number": "8935745", - "gas_limit": "30000000", - "gas_used": "5856634", - "timestamp": "1683133332", - "extra_data": "0x496c6c756d696e61746520446d6f63726174697a6520447374726962757465", - "base_fee_per_gas": "48921829295", - "block_hash": "0x5ff3ec146a9ce847be4341371b7dedaaf25f24bf906a77d47ebac7242082382e", - "transactions_root": "0x9a1a61a9c22b306f22e1347185b47eb6f8e082709a13371e442ff28d76e597b1", - "withdrawals_root": "0x2772adcdf63fd67f16166938713b64da0adf5f64e82d3be7d972107ad0e7ea0f" - }, - "value": "27496006531531316", - "pubkey": "0xafa4c6985aa049fb79dd37010438cfebeb0f2bd42b115b89dd678dab0670c1de38da0c4e9138c9290a398ecd9a0b3110" - }, - "signature": "0x942d85822e86a182b0a535361b379015a03e5ce4416863d3baa46b42eef06f070462742b79fbc77c0802699ba6d2ab0011740dad6bfcf05b1f15c5a11687ae2aa6a08c03ad1ff749d7a48e953d13b5d7c2bd1da4cfcf30ba6d918b587d6525f0" - } -} \ No newline at end of file diff --git a/mev-boost-relay/testdata/getHeaderResponseCapella_Mainnet.ssz b/mev-boost-relay/testdata/getHeaderResponseCapella_Mainnet.ssz deleted file mode 100644 index db0572742..000000000 Binary files a/mev-boost-relay/testdata/getHeaderResponseCapella_Mainnet.ssz and /dev/null differ diff --git a/mev-boost-relay/testdata/getHeaderResponseDeneb_Goerli.json b/mev-boost-relay/testdata/getHeaderResponseDeneb_Goerli.json deleted file mode 100644 index a89f13f1e..000000000 --- a/mev-boost-relay/testdata/getHeaderResponseDeneb_Goerli.json +++ /dev/null @@ -1,37 +0,0 @@ -{ - "version": "deneb", - "data": { - "message": { - "header": { - "parent_hash": "0xab0deb2741c1fad04e79b3a46080a65faf9947f19d597ce200067015c6e918da", - "fee_recipient": "0xb636A68F834B4D75aF9EDC5FB0138bB4758eD293", - "state_root": "0xfd956133d063dbd2fc494fa135025411ebf244048b16551ceda91ae72ecf318c", - "receipts_root": "0x1881520b7ccfcf14adc70b5ec3769c855a8b229f199f4bb5bfbb5b9e9c56dd1b", - "logs_bloom": "0x80010013100000200a0c000001010448408808010200404400020002000605004200900604042000500008400000004000c65240243321040018875052640000000a0046a40010080008061de2228001c00000100340209080088004000000a0013200014680400000001880800048520d041c040000000c001000150088080894040a20000008892000028903000041c0010480801410c6807131104440000003089051110000000b0301c08001000983170d03010002c000440224260001204080c00318004040000220164014030824400000008000000000000200042118051003000810804d2802000080800801020010c88200004508080000120e0000", - "prev_randao": "0xbf95f66969756951e11ef4ae6c1b9b57e1f3f0e6c63b3d3efd20135ae27ab0ef", - "block_number": "10402031", - "gas_limit": "30000000", - "gas_used": "21146510", - "timestamp": "1705708476", - "extra_data": "0x496c6c756d696e61746520446d6f63726174697a6520447374726962757465", - "base_fee_per_gas": "296", - "block_hash": "0xc3c5eb1a2725c1065488ba4491f3597b633c945a08301ae9f07603993334a318", - "transactions_root": "0x297d8366756cec251945166e19d1f54e67002a28422558e76ead5db3677798f9", - "withdrawals_root": "0x70ab14824381c971b39cad93b2066b8e9389c89d683ccdb3b56a2ff911b4ff68", - "blob_gas_used": "786432", - "excess_blob_gas": "86769664" - }, - "blob_kzg_commitments": [ - "0x81a51fd61ec96b1aea13f02cd97f1f9fee16c9cde185913753a3f5a5041cee44a15ee8d44ea9dc91721fc7fc660bbdb4", - "0x94b5bad83bfc5ae5497642091c7b7772e3093dde8b08e1f82c9b6038088a8f8c31e5cc4d6e791a2ff1f6d68d69454641", - "0xaa61b98d8d0712768c4cd135592d59ac78c74e0a4af9a6ef15930dfc79956aea82dbe5678076a3864741be4bcd85bac2", - "0x856e43253dab176c81619eb4e559b2f7d609b2fed3cb1d14fe310e9d57fc32b1c155128a328e09b22eaf6599857ce6af", - "0x9314c48e5550108cbb324ad4c977162df4c352ead7f7622ddffdfac2d6ead14c78c5809a6f3e03563e9892c4c7bffdb4", - "0x88d99876da8d398e067c25612b5154deec197b5b88d7f0acee3f977ddc992aa5fdf59e8d126ef4fad8545fbb671aa5d5" - ], - "value": "37851860177112740", - "pubkey": "0xafa4c6985aa049fb79dd37010438cfebeb0f2bd42b115b89dd678dab0670c1de38da0c4e9138c9290a398ecd9a0b3110" - }, - "signature": "0xae22615ca70b0a93b58f43fc36a6fb50226ef510e81332566f430143a8ec5fb05c580cbf4c2e623fdec09878b955ff6709189acd859d15e8656860968bb5c7753936fe45634fd61b18a58bb6a4969aa126437dd19e984e5a726edf1c8f556ba0" - } -} diff --git a/mev-boost-relay/testdata/getHeaderResponseDeneb_Goerli.ssz b/mev-boost-relay/testdata/getHeaderResponseDeneb_Goerli.ssz deleted file mode 100644 index cd2dd69f4..000000000 Binary files a/mev-boost-relay/testdata/getHeaderResponseDeneb_Goerli.ssz and /dev/null differ diff --git a/mev-boost-relay/testdata/signedBeaconBlockCapella_Goerli.json.gz b/mev-boost-relay/testdata/signedBeaconBlockCapella_Goerli.json.gz deleted file mode 100644 index 803c58bcf..000000000 Binary files a/mev-boost-relay/testdata/signedBeaconBlockCapella_Goerli.json.gz and /dev/null differ diff --git a/mev-boost-relay/testdata/signedBeaconBlockContentsDeneb_Goerli.json.gz b/mev-boost-relay/testdata/signedBeaconBlockContentsDeneb_Goerli.json.gz deleted file mode 100644 index 902bbea61..000000000 Binary files a/mev-boost-relay/testdata/signedBeaconBlockContentsDeneb_Goerli.json.gz and /dev/null differ diff --git a/mev-boost-relay/testdata/signedBlindedBeaconBlockCapella_Goerli.json.gz b/mev-boost-relay/testdata/signedBlindedBeaconBlockCapella_Goerli.json.gz deleted file mode 100644 index ded4a04ea..000000000 Binary files a/mev-boost-relay/testdata/signedBlindedBeaconBlockCapella_Goerli.json.gz and /dev/null differ diff --git a/mev-boost-relay/testdata/signedBlindedBeaconBlockDeneb_Goerli.json.gz b/mev-boost-relay/testdata/signedBlindedBeaconBlockDeneb_Goerli.json.gz deleted file mode 100644 index 75ae8ed0c..000000000 Binary files a/mev-boost-relay/testdata/signedBlindedBeaconBlockDeneb_Goerli.json.gz and /dev/null differ diff --git a/mev-boost-relay/testdata/submitBlockPayload.json.gz b/mev-boost-relay/testdata/submitBlockPayload.json.gz deleted file mode 100644 index 473086a63..000000000 Binary files a/mev-boost-relay/testdata/submitBlockPayload.json.gz and /dev/null differ diff --git a/mev-boost-relay/testdata/submitBlockPayloadCapella_Goerli.json.gz b/mev-boost-relay/testdata/submitBlockPayloadCapella_Goerli.json.gz deleted file mode 100644 index 20be4f0c6..000000000 Binary files a/mev-boost-relay/testdata/submitBlockPayloadCapella_Goerli.json.gz and /dev/null differ diff --git a/mev-boost-relay/testdata/submitBlockPayloadCapella_Goerli.ssz.gz b/mev-boost-relay/testdata/submitBlockPayloadCapella_Goerli.ssz.gz deleted file mode 100644 index 0cfe19136..000000000 Binary files a/mev-boost-relay/testdata/submitBlockPayloadCapella_Goerli.ssz.gz and /dev/null differ diff --git a/mev-boost-relay/testdata/submitBlockPayloadDeneb_Goerli.json.gz b/mev-boost-relay/testdata/submitBlockPayloadDeneb_Goerli.json.gz deleted file mode 100644 index 5337ef06e..000000000 Binary files a/mev-boost-relay/testdata/submitBlockPayloadDeneb_Goerli.json.gz and /dev/null differ diff --git a/mev-boost-relay/testdata/submitBlockPayloadDeneb_Goerli.ssz.gz b/mev-boost-relay/testdata/submitBlockPayloadDeneb_Goerli.ssz.gz deleted file mode 100644 index 10b5b6fb8..000000000 Binary files a/mev-boost-relay/testdata/submitBlockPayloadDeneb_Goerli.ssz.gz and /dev/null differ diff --git a/mev-boost-relay/testdata/valreg0.json b/mev-boost-relay/testdata/valreg0.json deleted file mode 100644 index c41e2cb71..000000000 --- a/mev-boost-relay/testdata/valreg0.json +++ /dev/null @@ -1,11 +0,0 @@ -[ - { - "message": { - "fee_recipient": "0x7ba68795465c94160ff2771a61052049ef1e74d1", - "gas_limit": "30000000", - "timestamp": "1656684360", - "pubkey": "0xb824bf6975d4da5d69193a72bd0bb13af673003f1d4e4e0950f6b232c904c2c32e4164363e5b2796df933364dfc8ed7f" - }, - "signature": "0x831d8cb1581b4507d0c0070446b08a550b5c266e2319c1f85db618a7b85b3b0217aa3fe752ad574b322b0ab5f1ac500d0d9ca485bc00338377423fac02c1041c4ff08abc3d6cd15a57d0d7d57b30c387f4019a4c1440c093fe6d81ed63fa970d" - } -] \ No newline at end of file diff --git a/mev-boost-relay/testdata/valreg1.json b/mev-boost-relay/testdata/valreg1.json deleted file mode 100644 index 2af73440f..000000000 --- a/mev-boost-relay/testdata/valreg1.json +++ /dev/null @@ -1 +0,0 @@ -[{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xb824bf6975d4da5d69193a72bd0bb13af673003f1d4e4e0950f6b232c904c2c32e4164363e5b2796df933364dfc8ed7f"},"signature":"0x831d8cb1581b4507d0c0070446b08a550b5c266e2319c1f85db618a7b85b3b0217aa3fe752ad574b322b0ab5f1ac500d0d9ca485bc00338377423fac02c1041c4ff08abc3d6cd15a57d0d7d57b30c387f4019a4c1440c093fe6d81ed63fa970d"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xb0636afc77a28e690372ef45555af924059eb1922cce3bba999ba198f55d608fc4c435c6961d4c4495fc42c95d59284b"},"signature":"0xb9fc78056ae5cfa1f7c60e8728b2de9692106ad7e6994ea66f29e2a710f4335bef5f86df8426dd276cf67354c7216ec1018e0f5738e45c2c63670df7eecd56fd568a82c68f0bc7659ac309032f84e716791eb8b290f29d5f76e7db3a1883267a"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x8f504818e48f93ba079453816da4ee3eb7a81eb56974eb3304acf63f7edcd813300f28fee13a3a1aa932fa8a84d5216b"},"signature":"0xa4d6e55b5501ed66ca2bce59f7bf54e2bc0adac13ae54b38a97418541307036d5bb01203798d9ff6878ca40d5841cee209cd8604e8c6674bd8a47f860fa503262d4652f2335b644d852f867031cdeb3c023d5824bf5bf2c7e2a0d04c3bfcfbb5"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xb78fb0704ae889f3beba2245bd9d0cf8d53c60ea0578b095bb0c666e214b6354b2a49c01f5fe2addeeef8e93872e1695"},"signature":"0x81cc0c421fee915853e17cdd369eef207b4a7718c76fa89cad6e7bf940c2249e8c8dc6b4195a1d99391b11f64c5fa90815c7b41ca5ebad86e569873b235daa5ef118c067874ba2efcc10c4a9dee5d7c511cecab5450ef20ddd7489291a20ecd8"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x8fc984782a118acfa9153f4d6d220775a9307a27d5176e202cbafb5726b1b8635a90acf0131f46c274e58fcc4e0d39dd"},"signature":"0x922f2463bd26a0accb517fa1a80c7ea833f15bf6e527c61a21529b3b38600b8c6acbf78237e9709b86c7a2a54bf5fc140f474f3b13ad3cda4572f46d59f6c7a48f799b949a0b4a7688912d941329ce7edea585287e97b09a7e29635fcbbf959e"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x80ecd2020439bc2d3434935b22d46c148beca7fde3743f9537d7902c1a4844bcc3acb2c0aa611c27aee7f2f2f6a0fcf0"},"signature":"0xb7ade653aa3366f1d909181219409fef3d8b0139098dabf70c36414c7a68d063d384e1bab95e8d984c9d889af68ca8ad1832cfd1f0d89e2ad9d957540dd5287ec84cec38a7c27eeb2899eb9230626fac024597316ecd1d1a00463fdc337255dc"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xb638ab65a153eb91740ee89f406d85d0b6963bf448ee1346a9a2f84363006d936a2e35dc61d2607ea2ce0d0599f9b282"},"signature":"0x859dc54b5d3034e848836291bd8a3abe4afde2fe0c00cfc13a5be0f746511493e190488ce61c4fa9d4a7a668c92d3dfb0eaf3262fa5567e662a537a0d266032577531e923dbb904824fb1af12a58d48d96f5bc031005514d73f4f3f922430f36"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x875fb8e115b9a76eb0369d1f2c09dee69f312580f9a5155c525ad218d71e2ebe5c8524ad1c8aca064313aadf2874171a"},"signature":"0x840533ba20a59ec304d5b60a74cd2f7c57742724f8eb3bd2b14122bbb7935100cc088dc299acda4b26b0c8652e6b42b2070fe3bb7b9b8d822e41387afcd23f82023e98ff2f3bda6d8dbf8de7126d258fc205aa615274f772b6f4858f3e82a3bb"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x910d0af00b1aa7611ea2afb6d6d2c75fd6d426c4f8f79baf21e947af348bd30d68e63495a340a7fb7409aa742bd8e981"},"signature":"0x80f5df5cf9799d82c480e04e12163c21888eb5d5ad1dbab38b0d3665a3d709c71543624721e460879264b783892bedf20497949ef852b55ffad6a17ecdc85af88c63f0af3e0314ceb68e63589e5337d8eb0eb79ae78de2a206cb6cbc60293844"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x93ed47c2d251f8efba82f78a99d065f20c2442f5520e216466901036db06f81b0274d4d798eb7217062cb210fbd7c1b6"},"signature":"0x836f176850d689112b1822826fb52338d77eadd74748322552b1309c0c1f245760bf0da8f6c25592f92e5ca7ddbb91b916219a7600a97a05de3fa692bc3c364ef8642cb122b8573818e31b40c29988d76018108aa008d73cc5fb320ef3a4b1e6"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xa24576baf57a4cd1952106e0af5e512009c41fba7684dfa2d029537450b1287c90c8d5a1f1d5ebc19cde277a6eba0253"},"signature":"0x8751f1043a8b2789092af2ff8995144d2aa5dc6cbbe578616833befc46c4db3e480ac0b11bc560b730b4979d4655900f09535ca65f7b24710ebf3cf6843242fdb1a926c8eb0863ca0f21c1b81c9c8036930365e62c0ec2a12a5d945e0f0b9a29"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xb3dfc30a4821aab01c2117a59b6ae9bd75715e06bb4ce6554c8493305306fcac086791833092e5c06a8730bb855a2bf6"},"signature":"0x872b18d46bcd79b47cefbfdb4f004a80eeb11a0e7a8fd8f5e0bf5aa3deb7c4169a5267622c2f258300c1cd88b4b3aeb603ac2e70e22df15ab6429d1d0476ecb5978e89118d8ec405fd12aa30b1b17413f8e926cf735bf9019c87909c307ae6f7"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x97c7681f9403be1548ca906b118103699e652f72ef8244827303b5b7f467e828e92293c2cd0488542ebf0ad5715819d4"},"signature":"0xb3f6601696a70784cf2dfb6b320879bdc44a03c69615c60c02eb82f3605c00cde58ece9f4dbe875b52e845c5ff3e95da150bd8d52d8b4a46d21a1348fae6232880228b06c157fdee8843c7d24a39e974c9e74f55967284877c4f51c1aa659176"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x8aa7aa44ef011a4517c44c09cf87badccd7cf5f87c13682d8e2661c2e9910c14f5d96185220b3d7fa55392419de17439"},"signature":"0x834ceb32b1d988c031c4ed31de5922e0447df9fff3706bed2425c2d8680e927f84ec57f3c2042f3e7467e59e70f25197185dae76e591b8e7d50ee00549e67da3eeda1b2ceafb9390a5a20eb31845f4aacdc60e4c9c0db035c2a8bd53568ff5fe"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x92243f83d4b7efb214b8ca011da0e2be2fda9c7498253cb701b66d5e1637a4d5f9ff03f23774aaef97a4bf6fa7c25797"},"signature":"0x83bdc4271e546e7cdaed4278535f135dd69b2d38f46d4b60c00e5216f77065bcc793640409481eff4e9755a04c8245d00631b3ab65736110304e3da8940c9c095089671abc5848f9d14eb3f03b1550ce79506428356cd4f7fbba195d2a92880f"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x8afbc0e46fa761e574d9a9e47ab6302aa2ca7a9d9ab54b2fcccf560e5c9365e71338f39043e0d3c1743691865e5b0be0"},"signature":"0x9954fec2856a0640067f48aeb8044daabc1157d528d8e66e2c51dfb1d3982f98410cc205068bc40e8983b111dde844c808d15141a48b9557216c69ec3b1f53b1381e71d74674f35b1145440ac4031f0501b66c7fc673693f48c469deef442b5e"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x83a680ca2b042e359fd0b2ed71740ab84f3f8a5f3f74199c53a186741b7285c90f6c21d37bf06c3f5724507461da9239"},"signature":"0xa409cbafb8d13361830658b02e7a7df73a20dcf0bc4a0d16a1ec9a6b067f5a610bb59612a7e19868f34ae21919c0cea3133069236d7ba0ffcc0597fbf5ec1844656ac7c9f9afdaeae20adb7fa4974c51e1b018d9496886fafa3230967b8d6648"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x808ec46ef2fdcfa1968b5094bb3a95c5c5453029154315f3fcf80443d69cc703a4758e7dca792f6bb23caf52a1550e41"},"signature":"0xaf918d6ea9e063811672cdf674f6e934e632d98c56caf061a012d88c280c87f824c0561f1e453c358c3e13c29affb9d60b5399ab9b89232eea92bbadc5619c36e39180f210af11c884c09d520ce09eb2c573fffe6a952377173687ba7c6dc26c"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x9749940077ddfb9f30c8fb4d2efef16845430210f54294a7d2012fbfae103de10deeb79fe012154992a656bf395f45cd"},"signature":"0xa3de51b87ee13ae48f4c632c29131582afd9e0de8e5e932a7e100dc2024d4040508ed7dfc5e578730bf584d0d92ddd1316a78a20c351806f8bcd5bc24712e137b9c2ba87961f289b6453f2f000c92c772dbfb1cfdc132d017e4ee319fdb97abb"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xa60f3eee5158c78b5301392f2541659e77c3397422493f544cc2152c45bc2a284fc01af250ed6b6f3e0803cac9a661d2"},"signature":"0x86b080939a0c1e64fb4563f5612f94e4e8e1beb28de9e78ae2b028ad24c8129f5a2657267a7c9195e4f25bd58b6d0ea405e1ef13021c25cff5e86c02019c67842335fc6b3b07e73f038ea53f484a867b7e484efab811dc09e30bccc5232b3995"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xb11f2493c3fb5d114a8203fc2ffba4b8cfe2451484e1a6e2831b926637a3fd5916a98153d6acb9ba0d4e92f6dad1e14b"},"signature":"0xb528d7082655c250ed8fe92785d37b6f4b7fb8974335a2ed33e908b52e8e21ec296fa8cc6cd2bb7a6e10d91f2484416811aecdbb9e9ea5c51ead0e182b2a2a8c012e225c3fbf38fd08501c7952384ddf788b2fe7bbc0b7d20fcdd4086fc2aa62"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xa82de37417774486b6fe6720a130c94a90a5a3ea5fb43900880a5fae1ea1ec26463752a0d7741e8d32320f93ca9ba0a3"},"signature":"0x97aabcd8957999721cde87b161c4a433cce28b42bf04b43f6ff34d207a7cac5a53b1bb9027c6792111a6429dc1b794c8083ac919a2341504b49ce7238f124a560c544a9d8495c6880eec50dc56aaaaef4ce185583b6970055bad5797b7a2de0d"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x9309873d12c1823e909a7491eadb1a76156bdaf6ef75671a58b02ecb47fc2f10f8661f6541c45d5e8e046f8986c0447b"},"signature":"0xb7d1995ece07b18770b3a00410e279ea1ebdfb74f4a880f2f9aabbd8592992f12c72ac05d320561b9d3a2aed2d5bc9300fd527df9184caed50f1b83f0588c47f015431e4d5720abefbe03530401ecb0a5b08cc25263f0fc7c2fb1f4ef6d4f21d"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xb316a4f7c2a4998ce02749335ab6356001ef4f232f1f26b5d21ee464ca54464f6ca753fc97929fb83134f409a4964b69"},"signature":"0xb83c5c90998a6bcbf823605ae94af6522a46b20ca56c7f5e5c12c789a364a4e46a0b93eb2d1cd09cd95d7da93351c813124ec7c7f717b00ed14d3458ee911e06cd4b9f90fd3de278f8a6c7840b8d95b659c301e8b345ae36bfbad174181b6f20"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xa3e049b0f30243843e11a4783674d97128f65ef60d58274975dd708c9242cccc2ad94371930a3190564c1b8e493d17b7"},"signature":"0x81afe2cfe3f1e70eec6362fb56204a25811f5821c2698e8257c8e30f344dfb357abfce1975f2a8b1133d24e6f724ba250ae7d110fe44f1111947b49ec389ca837f5710a813396274845a758e93d4a818591272393badb26e85d00712d0ae77ed"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xb11883d60b54bea8a473033347dfe6888d4687872da1d702942578ae7fb40047e8ccabdb546ba61bea2573bd1e3d8664"},"signature":"0xad27b62bf5dc649790ae0eac96fc1b4b21c4367dbfbc6e6845ac59ed88f7f92a141c32b59d0923654656d46c77a5ba6607565af79e9a04a6967e9cd93e6fb2afd9cc6cf9b552375991f4ce443fce81f2dfefd78b2c9f10616b3dd6e6692de696"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x9861744497f24a924df49b499e2d9ccedcab3ea481d47721d67d820dc3a966952c739b2666a7a65d592c0c2a889421c6"},"signature":"0xaed989e2a7c329710de5a6b9f69b7003acc8968944a8ef8ff0cb67779089b7cb365b4c491b55a95dc81aebbcd8a1ab45178c61d3525ab5844727dc35b9519ec98fc0f362be9395abf6534b3424e090679fb77da71a9bdcd71524bf80b211a0ff"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xac93956a4fdad1ca676c514fbd98efb10723ea64921ea6b69414884cf660613ca13c7ff8f32883d987b5f4c1a6ad6fd6"},"signature":"0x9734c8f7a54e5be9cc9d17cc323aaf7e6ae73ab881354e936ee3f25c4020b21a8eda7baf06f19cdae1205c5daa1e31a6060d9dd1ae937c82cbdf7b1ac87905b7c24016e94f786a4be63881075f3834a4d2fc553a26f95d57d7dd478a52591fa3"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xb072e8389a6a908dbd3175d7f4fecb862d4f6b5963aaf894f5b8c4b7bd4b17efa0d1d14bd73acd695549bb6fa845cc67"},"signature":"0x848c17d983c6f58ea2bc32535d4c0fd9a5ef88da5c8c7fffadeb23f5236c667f8d8240fcbcd32e99d90e7e59506c688315be95775a091db7d23ac12d6926df2bb686914080eac27442231253e1c4eca1247364eb0f1e20e0ce3ef8730d3dbf53"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x8e2f6dc60776d86b5e91f07b198aba00229156436de1b10b9930a92a1eec30ad13255add7082ae3bf3aac012fab47754"},"signature":"0xb89d691055daf658741a0865fdd86658f10bde0d9aebfc3362e55a16cd575d0a9d00fb71d0b2702f425454654ac3404319399ab1ee7e52914a6e5e710518db326c5fcf7789a40c311522fa5a6c1fa94c83e1a4393ef4e94174a2ed98fe787a6c"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x98b3ad8e922c5bcf925c48ff792dc4dc60a98bff9dcf62ff5809aa93fe45cf2431e1cac72d25d5c3d643b1b2310242c1"},"signature":"0x8aa42e1b19dd96274b742c1e336a169207bce5ef9384960bd4a828ea8ed049397cad96edf0a79cdad4d83567e273cb0d0c571c226e90a39c79e7faf930a7fa49154f09a5a36eb7c3c7e6d030719c04bdaa3c9e4a642e7ea9a7b34763c1d95ce7"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x8e973921826dc77265efe7d08941a7cb121f4f1b9f6746bbffdade078f3e1d682e43472776e5289df418a915baf4a035"},"signature":"0x8598c07cc90130c347820c73d2e9a40e23e4843af1a79708f73543d7ea0036dd1119510dc5adce8764a1f1f2c2bac19605439b68d5eb17725383e8ff8646cf7c06cf4e73d3dbc5461aca83d1b82c531b674ec39c9fe91f22eeef68e82289fa06"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x86959f3e4fb6ef90db7ecb36afbb4eca6635aa2059dc400bced20f26a79407d2721296714943b202c13ef801ab63f604"},"signature":"0xb55a935e319aedb0b071dd9987f59fa1d59cf53e5fe7fbfbed472d9b1d1977023b055131dee01bb10e27099aacf2f61d0dfc074c0944257f5788536601a8180c3d6c4dd3b7c455075c93da4e3fbddf2fd73e08b0f2b9aed5c471aec7024c9c66"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xb376d7450dfcfca23b0f3291939b69bb7968474b1967b5f42c81073d5a524a21d8302ae0946250e88819fea3ed540225"},"signature":"0xab99b4dbe0dced97a9e1167366b62efc2e26101ef81230dd650465283fda77eb3982e4e29d29e7508db2fe1041ca7eb619e341d85b6d215fe76741aecaf47fdfaa35a9df59c59c1b58f3e2730b8c827b9a04a8f1dc88b3f40b7e0646678fbc35"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xac6490196f8f18fa43d76dee7f2ec21a0bf9c7f8e07cfa49b83621ed8f9e3046db11834255261aee729bdcfc4ef085b2"},"signature":"0x98f641febe943e05185811e05b87fc543e1b860c85b9c3baa8d12d969830725dc18b7b2c8970bee4ed4dcda9c09c5c531828c89af5eba245ffb010885aa6be9a88ff8f175b096ef04b5bf702f28c73851e50c17e99f307229d4dd71d5e1e6348"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xb0c9c585ec8dc111f71a5f11193aa6a22a85c16db4a1ebb0b06eb7f6407e2dd7f6b430da1be4f338fa0f417c2dc33485"},"signature":"0xadce95d818639ec99acbc92a5c3a1f24517c07a88221de330d18f8603681b20dc4916d6d1726be76a75989532c39fe3708b3bb46ef7e4225b1ecef27ec18428bef1409a2cdbcd7daf2b310419fd4bf1ea3b93aa5c277a23f76cf2f2363d66945"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xa4fc19951bdd8ad2088b8ac17a9f33ed861b4a0cc23e0a85583b5850a1d424b89dfdbd04c5751c5d7e9d1d1654d1a0fe"},"signature":"0xa7dc19ec5ac0360ef2448c99208977bc3e4d672ed6a68b3a2f2661d87006378f2bd776348bd8c1a434dcc523fe999646064a27c378b0d0fe090d410d84b6f51471959a0673741d62d6010567a2a0ece86e994805c4e3f11e4cade3b070aded5f"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x95245ad23e52a9dcc3e6cd0361eb977f2320032c6a108aadc5de36a721c08b02d3144ad28fb04a4aa56a865606eb3aa0"},"signature":"0xa00fbd84ef21c56d861047f8f477261baaedad0372c119d8010cee203f6a81a64dd9ce69ea948b2d228d96731f8160c203018238bbd801846d0e7472a2db5ee7ce1b4f41c56e95e40c91c15d812df3927785d5d312da36c4cc281506851ec0b3"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x86c1327d789e4962c4fc5de603b984a11c2323c180b8dfdcc73b3181a518cffa2accb88a3c1edd40f3d91570b59891e2"},"signature":"0xb906752e8b9097c01bac82cd6b4e55c9450ae4bb11b9e7433c95dfe8918ea048a4b104ffa4bb74a217e3ebe2b033554f092017273b42efecbd93c88e1719a4d8c068d9873472cd80a04c7baf8d4fd105ab15d53a8dda4ccce4c9df74acda170f"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xb035fb895afb783114e867247530deb447d4e858e2c0c4444f5ced2f179e824bdde900c458b920e0b90ac0e564092bbe"},"signature":"0x9865910561da6dc86ef103b2e645cfbb161b54a326dc46834b3f914a17be89e5f518c76fd4ff42c9332902feb6a0efec04dcc623a8cb155ec656c5297d7def8143faa3f96cb70fef8ed8276588e855cf7e900173274bedd41b42c4df086655de"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x99dc15d3c4deaf27f43bb5b242c95d5a8c3d529e030fad06eecf560519892b1625640dbf785cdf45c74c02c040e5eeb2"},"signature":"0x955f6840c7ed9cb891e585301f1824911277f177a431965f6ad8a069a0f7bd7a124457084f308a781f16f19c98fe2f230f62972c984df7fa22c9a0ce13917f08eff5d3e55d4f09492b2ff206428d4bc2e37ff68a058f98d222b3696a5780f27b"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x872720349661e821dcdbce34a558208b24e8258f9adfb2c1a06e874cfd67c344479b7165bf61d73d82d91313584bd5dd"},"signature":"0x888c73759f247d8895a3d14e059df142d8650152fb003acb8d9a3a5b1ec52301bec42052a5dc1d23673b1b3afdea3654112750ab3a223ceede68b7581f40b51b05c0e83544f902394b79abfd9dc829746cc0760db5c9c75aefce5801cd2a4484"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x82bf3e8b5f99d4d2bdef1b205c39813eecd790bf42e526f3c78ed247c45877f1190a1b70c8eede5263ddf9c29fa5c5fd"},"signature":"0x8f1f8572f643de0dac88f63b1210c02f839c62798ad2b4d893ce16f378b18ded9cc4f7dbebe663b1c57ce3c46ffa79d704864dbafb3a344e9241a547fbae74afd5789cbc2ca4307703bb7527b22dced2598dc2fa094c49c68c67df78c537944d"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xb78749795e73f02251a7af9fc14c5af38e9ca290dab694f71a4c0ed8db37d7790ff0ffde6a72ea83e3d4da8d564b21f6"},"signature":"0x81e3c6e27e889fb8c76d27b36bf292eb5590e06dbe5d4e32e020a7b9df5ef1fd3a4c9a794101bd3bf3b3967498959eab181cf32ce4b7f922eaf8669ee468a50bc86bb628dfcf1a773562cacc34e356d330deeb7af3b1324bfc40ea608095afae"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x843121165c934fc6d76b5ee2211aa1508a0edfa6e9cf139769408046783f254088dbd3ea833e7da79ea72f92ede0b196"},"signature":"0x8b7dab1fa90a9636d5edf63686a6c89e6a7ff1bce857b755be30ce3636f77117956c641a11455c6818a1083bc86d189f038eb0230e545008412d55cf7f6239957eac8731dae302f3497bce5deb40998abc637ba6eb696c6c4620692bfe5f6ebe"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xab916f5f215b9a5849bc6d071364ffe99a466bbc55b4f1ba6d2f1af252de05818647c2f58c8f0069c643aac40c283530"},"signature":"0xa29560445a7c4221517a3976f3a861069df3e4b6f216ab8f0c1841b0f293b0110d81921f9419983dc131158b90a2ffdc0ac79b1d117bf21ea99d119ec6c4c90ec2da2cfd64e1f013116b5d681e2bebc44f09738fe50a11241f113fbdeb8ca818"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x950eeac7ea0f7931b76c01e92b482db7c8f59b5f38f856270c5125836f9347bf5f22eae5e22d6106969b44f435e9545b"},"signature":"0xa19d602176225adc79cb4a81088d34279a1d40fb698e391101ae03b87a18c5f95af37ffffe8ee387089cddbd6559a80d0355c0a01fe2f04deded349741667ca042b639dedbf578eafd68d9ef91c84784c226b693b7ba7ca170255dff8ea7005b"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xa449480b877bb44aba50664c7bc38a8aa3d2bcd6e3ad944599bff79fcf35575f69fc1254b629a4349b2603804e608a3c"},"signature":"0xae03e20e0a58d8bd84f4bd282576b56b71a0755c56e63844af39c91aae5e163163a4750c503c7a647332dee8e647467618c751bbc8aecd987540c6dd32a555190fa45660f974c5f5f009e6092c1a5d8ab2d750874bd4e252b668f80bd4042950"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x85bc2b1fcd3291e940826df38e3e8536b543025154c1d62702e28de8c290b491fe4dad4530d9072e89484379329af199"},"signature":"0x95b756507c980ca33cf8eca6a59ef3bdcdbcc9a215dbcea3c9a1ca8a77adb519594e6d8e50085604309e54047ec290bf0e7f330fbbfba85338967ea4c20bc1631645c6c3061b67acb10d419ddd737754477be3d21ed116014a418d6ddd362598"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xad8644ca37dbce321f2bc02a3ec48d9b329efcf970a3faa6c0233ef4c954519102e67186d07e3cada9245e06a036cf28"},"signature":"0xa9a676089897e42f4583bb1d126ca72e172bf7ccf7f72c92ad1a102aad0c8d0925bac5f1191cc7cf44081b97b7976cd90dba64ce2dca9220bcc821e6448c8b08704e09a7f4c15e8929d62da9979570ea40fcbce04af8b3bf2217dacd24eaf3a2"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x98a515f1e73e48c450229c4c343c6d7cbac7897105892e4604ed1a18f5e9a8b6f1e10762d6e8d455cf2f623b9ad754e6"},"signature":"0xa2364cf28646bf00c13ac44df1cdc15494930e5341f425ba239097b17b892abe3e9b7b846498ab1c44bc1cc2945d062c14843ed963e5744cea2588b4825d88e2f2b37c019cca2881bde6b2cb733ac88b49de9ea26c96485876e290a9eef9b885"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x818d5dbc7be5f2347224cfb03c450d6f86651e1ec54db749f9f4c84b3fcf0abf43ad67a27f943d59b94ff39fce0eead3"},"signature":"0xaa75920a996bffe0e63201b38c1e789254cc0c74d70946d8f154db95f67c0cd381846db1159d4e366a1a0ffb246de1530f39aad9b74ab752c661dd69a3d9e8e01a5cd02c60128733759dadedc0eb4b14f0da639cdf54444126635436c88266e6"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xa6457bc4f81d694c1f56b05401b812f8c47fb20978b8c4a26861f3e00c993f6b3079120ffab44c96974bf19383dd74a5"},"signature":"0x8ebc5027083f19613c2c2e666b67fc8a36810603286a38f9a5b9e14fcc9ed2d6316f40a340bf057c224416279bb976df02f6de1c79b645a6c10fa158de84b789e84883f620a9a5690d5afd8aa603f0e76dbfd90cc83e5f025b6f0b2b272bd954"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xb596dc5537da72bdb58ac7949e25b660084b3302ccd279d03b3d5ff253ec134ad825ee7651228937da8fd53956a55e6a"},"signature":"0xa60ae25e9d2abda6cbc2b97c26fc67cb70ac756e5b143f1832778fffd9be21857ad4f563c3cfc1178467ec8df1c42f7c16fa54c2a66e209500c439a298976c9810b40d29913373eadfd7136bca2950fbf77bc0f0e2b7d1caa4379bfe44daf0ef"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x93181dad972e4cd44d6e978f23add13ad2dbd0f61ecd6800bac31801aa1974602a6c13acefdc963c863c763e9ab83016"},"signature":"0x8f01f447c0ef5f89779d600185727d98f4351150a2c736f13681d2893fd59460cd9fe7093bc7ad6ef2b779d18d3d4386159143714a0b6b682e0578d55fe36e52e213ab4e5d1cf7e8205235d21c68d3e2910987432c141d2b31ccb04f936582d1"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xa20b2748beb3c518e0017ac143231a0125a94491772081234447cfa69fd60e0bfa1b88ff1f4458982f6622a2268b289a"},"signature":"0x8bf66d7f2f3b737418cd548568bd23a4e40c3e33cdbfd15454e62b203dc1be928025ce7027d2392364f75afaa49b467d0fe2ea8d1cf3a4858234fe0b4eab4e6f4c8e59d122c13a3d81bb835029c9a068213da00889862b6ed9f4d60f91d6e6a8"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x91aa07bf720f405ba4c476f4bd68e0645c0ca5f56a4db67111b7661aef259afc224bb72e438777da7739c55fb61ee65c"},"signature":"0xb1acaa9ede10889199d8836b585523bd9efb7db94a7036d26e6ddaf736430bfca980a7fcb2425906ca9ff1fe62245aa50b560a25787a4ddeb9fe147f8dd97d2ab40fdf4d5c6fce5316986d79a39faa36048d13cfbf0ae144d75eac8989633eda"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xa1c407178b869b7fa3f9d67bf3da1d362974c77fba4c036bcb7e2330a54e0e6074170e01d8103db375eff2aa86a67bbb"},"signature":"0xab26c88c2de433cb43366f518c24b6e86405e8cfc256b706c9b0f04dd0a3d4ac4fd392d63b798a3b6e9c0c4967b65a410e51475aea859c1300b2e7bd43674583cd1f4a49356a1ebe1880f49f6e8ad1c138420421596ea16a1e1ed1fc6accb43d"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xafe3edc40e72a656c3b7b6af3ee114fe773a9d54d34efb3177c2e54b4612a09f2f334bf723c024b485d37f85e7dc691e"},"signature":"0x9115ce88a5c5d2929c579b99b23feda6f7931e8ffb40da5b7d5a1d9dfb2432b27c09372ffc9713833dc0a57acf85c33d15b85d8b06aa33728d53424e197bc64da39717eca9c0de15b2989bfa07611306c2db2dd252b7ed149ac239b0ed73bbdb"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xa5044213f379146597199b0d46f93f624518bfda6907fd220e99b76bfbe874c842399dd3c7933abad93aca96f23ed634"},"signature":"0xb9616cae346ba4f0e84d6fcb20cc01fe59865b1d5527090274bf960d3c34b37c3cd5d8a6ee22551090ed118bbad9b21f0bfeaf2e4e7b05010a095fe800a03d981a440cb9f30e7d4894112fef28f1aa2e50ed914f581aaa788cf54f8e5dfba1b6"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x925dbe689d83889080f1f3e92b5a2128abf1c245976f76d422f2655aebc5222ac5bd15e7997685f2819cac4b75ece8bf"},"signature":"0x99a6bdb93bc950d74e80227ae563490f883dd94acc2f950771ce48eeaddc9776c19d88b73e9428950ccde57a9b39b670146bc50bd055e817e729bd501530526577973db32550943bd980e750c76927f0b15827dd09304b5aaa492c8666aa761d"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x95fa0a99e13b4c3a7fb9d94d2280c42dfef5f661093586a86e84904cfac6f52fc4c71ea7d671143b7bc164d81cad6853"},"signature":"0x98f875e46a4e897bb80e79d935a1b160b060d035d80c9c9e44377abb8acc52ce7666cbc9f596d7ab408ddf72276619230ef2f4555a847f04380aaf177d2fb39c84318795b62b681c7b54711184f7cc368f0bacfc56296b47c9a3e7ec11211019"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x86a0f4fc2b7a8ad4d0c60da873b19fe2cc9375d7a9443f340a0c0ccb23c38924fadcf2c69322a041dc21768cb76bd22f"},"signature":"0x96b349d712fc5cc63994ab30b97128bee80fd757c6bb4ee8d822c5110df9b7cb96bffb11a6c483660b89d66573e7293906f497b5117562b15794cf84a49b1320f26a6b9a32aa2925322ffd486fe82132f66c50b934b82978f0c8dd355b804471"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x801e6221189673ba717efa3050d4be4673c75babb54d276c4192ecd7c8a858a30f7672416a31be7c4c461edd1e727aba"},"signature":"0xb8c7645564f15b8fd2a4a507a47952445729f02e8da10d58ceea056b7223f5bdb0f1979ab396ec4621f99f739796ebfc07cb5905008dd53f25e91727550827ede61b7f4bde0ac1b5592e5841b27a836a913c015e9405e8ce553148cca448f05f"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x87ef988d41cfae75b76bf8eb6d9c93e680875d4ac3a204079d096be00f54b57b8d5d710f83ed204f13904fd3df53790b"},"signature":"0x90d81393d114769a0946f4b84b3cb890735c05c508ea6c289f7fff9a81f11c5ca9aeb99e45588fd798b30082bd9a98a80b4bdcc8e9d58248b263bc2d5778ec729ff766928c6afdc85719286400a95ce8dd41feec4e7286f077ca8ab3d325a8ed"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x8a8be3ae22b6522c5aa5b73f3c1ae648f02cb176fb64702e083709622a374204c242ee7b9f028aa113ea6a1ed51d5d34"},"signature":"0x8ed9dfd47b958aaf2fe3eba38729d70cfa2ef22c47f0a671692fbeade4cfe887a79523fdaa8bac16c50dc8a654bd5ded15816fe29f341983ffe234baf254365ac96ec5f6e49f2efbe792be84f6093461b946fc76069e79a03f6828a70aa16c45"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x8228be2506ff4bb201b83272ee5bf35b52f0a8548a86daa46e628b1a8b284e82b7e4bf1730fa250e9e599bbc051c653d"},"signature":"0xa3be4fb2d216733e864653c9bcb172a43f279c4438820736a77bb502d8bbe285f162c063ffec2e9e21409b9e7df4c7d012b058d214a7416a1f3c99c5c70f70792c0d831c5d944a591b4c19ce082c07c2d64a8084806dad9bc6e4008d8cbb830c"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xa4d8c74e6a77f71b942c5ae43b56244e442e415ece3a4c0bf535df51ea74fa88829da43de1a97c1873be4eac3665b719"},"signature":"0xa08bbe60cd0c02c773778e1610f8de5dcbac1badfd5a6f98fe16a084a11b2d575cf436ff74f2d53368b3f767418f85db10f3163a339b3f5fa5d35fe03e4fffdae76eaef56b89dd89b1f3ddac8ced6bd42f2e3e0eb3498e5b09d493dbe28829d5"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x8657d3103ebc7a8e034e3d3e23f50500ac078b3a3121c2190a842855ed58c3e979dab44b0043344899b81ea5e24c9f34"},"signature":"0x9811d0dc0dc12ae7ffcf677a8deef7804e1507258db0f467631cf615e52e6af388c2dbd1503380a8172dbef5cd9035790a0e1891ee635ad3824519bcc467c20c36afe2748bdb74b6775e5249a25c1c8ded782e48d5b069400a093c3ca8775f0e"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xacb271a03c1a5b024ec47f736fe072de2ddf2ce139b793240f1710dbe0c2ab5f14bf76d5c33e5f3e42bc32315a0f8b72"},"signature":"0xb4b12a78a4ef5739684207238ba68e57c581d19578806d04450260409443fe11cf724afda82556bf23d19403130448e519084c066d04130d408dd7307249d5ba6337a2a1f2e1dc9ecc43993f4c57f3ef6d70454cc04883700ede75ab371c4eeb"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xb89ddb673f7338a49fa8935c915d1f1094f28f6a9e31f2bf28df2ea56bde3c35e63c8fe877bce61a46ad9fd98b46624d"},"signature":"0x82b3babae3428e66928628460d725e75e1cd35db37f9293d594b40a955cbb4e8ac5d03eb4fa205c6bf8a03e3170049cc09d82b65182125b9192591e437903af16108e68cb0f504fcdae08baf1998512126649d3061f270c2f668015b9c647a39"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xae4209338f7e863d4e4cc86a2f9ad47bd4bc8f7b7811b9f9ec22dc20df0dfe532c2f7ac265e6b29b957dc08128543a6b"},"signature":"0xa8eddc07f6fd2f29c9394e3b3a466d54cd37543db5488910fc4a31d6fc4243cde389fbbdcc4bd5a653ab287a3aca495b018ebf0bb764dee39e6018edbb3910dd07dd92825170ce70046d62a9cbe257d0bf370afc2e860a36eb6b889a9ce4f938"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xb2feea7a56076a435a2ea0e4894396e92796829e01e52676a651b0f816c0786a2d10e9fd83cf21660094c121e913452b"},"signature":"0x88dad373212848dbf07541b172e5aa5a75d2ef058c3156e3d89ac352c12b862b8620a5f9001b11008f595375f716d9ca0075c8de63d3b4068a99b67109c3e86506874a3bafdffbb0d3539109bff76f3abb3f26b5a136d4193b1256e5144049d7"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x97cbba0cf71a93cb10db0db27eeb3172c8fc78b216e238587e608f0396ee38a686f0f148724bc1021ee103fd76f8e1d9"},"signature":"0x8560ee35ddd9e963710e3967e6a448c981b0bfb04a6d8e5c07559eef2fe66e60e20e9537e203dfba4e793bdced2b197e133dbd7befa3ac6982e0e4617088e09ad7295562b12e9302a432c1f1605c36c31a1abe37b3ff6ab8cdba8c3589bba280"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x85a03cb372aafbf044fb9ae397ed561fc27558815668c6d7fe637c18fe09927fa6b6e881c72e93956c7f96aee3a7047b"},"signature":"0x8f7c1174ef884748d42b0ecf60553b4ac9c43b7ae8d96510959488979484961387818a6ee4a8dc32c3caec2c81b8ef21042004b6e2b16cbc354bd84a5490b133a86ea8e7cae56a97f70875b95043bca9c3d295171b35e629e61326480263a0ff"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x939e5f6bd2443af622f2612e8e9fec029404f75f5c8438f3e03b9f76b42ab12bdfb45eeade25dfe49f37763104f8bea4"},"signature":"0x8a691860a1d6a4552cbd3cdcc07bf92eaaca19f39f07348816e0f37d9a82196de1df026fcbea1366aea592d1030dbc9b193384f36cb364aa7aa11bd68e596874f72e7ed95b0754f547e69ec553702b607095944b71961135246714a89efb1a25"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xb44ee568fe538b9815fab4c66047ad8f98564402a9eaa46e4819dbebb715f90730209f1b3eb070367a166b196ad87aa7"},"signature":"0x91b7664b59c1acb65a72b801511195027fa18c77db34c49e14b4a2c5e20b48f645c05483ca15a50214aaf75b94752350042677fac2b25be0c41d6cc802c560f910b135403fe6ade2435a4a3325f1876ccabcf6b05b6ea8c642ab727b775965ca"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xa99564ff22412ea7e4c11a1551bdc5a301e32201bbc9276c38899aa46d1d52f306aa59b0edd70b5978883225957f88b8"},"signature":"0xa997f55d47f6e8675703fc331b29c7cfa7334bfacd1b5756e19313024174a7542f6c468a708fdb51c870f86f0d01e1eb00c11b71724928f27ad0c6ad7f8c6c482397daf6e2313e6ebedd28b182f79486c403abc8fcf5d3167a9ed666f42c50b4"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xa6be2296d9ebd0ba3c624e0f6ef9d417887f8ede1ea73514cc3d504a43a5967a6b4512fc7a5d558378cc9e1b73c3bca1"},"signature":"0x854801e757a5fa9fd40a7a9afaa310c30d12f2191d3827ef3c13b867c88a308b70e2ec8caa8c551ea675963efbb1d808037f6e8339c9bb915f26db27d461be98b5038a554d0354ea11d957ca245e020feebce4cb3fb9961c4356831461de748c"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x93d5775a8807678ef2a9030ad491879b3d6fcc604fbd2942a695c4c19409a690c457b624ce4d5d771d10184c72eb1559"},"signature":"0x97a3569abc1fc38d331e0428ede763413bceb51a32ccf04e663173ca81f851dfd1b845a3ba8ddb07ba956baaf440ba4f0b4ce2b1af2c27e2b98b174ddeb71740439ce32887d90d5d946113914561d715a1648bdd6369d65a0621badf1510b6bd"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xa558fcb01365cb290a561b0000c9827f770fda53cf572fb998fa76445a8d37685db1f65384b0f7ac0b8a9f26c1299e81"},"signature":"0xb0d1985fbfadc88341876b9e9bbad580ce7a10a9d86d61a82f842b5c1832c14425e20df97e84307634a7eab903812a750fcdcea69dba94e0b88394818102264e14c3afca8ac16abd7e43f1ae5a99204aba9c9a1ce72462700dcebb8e4141e63b"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xb4404158db032d5ba90da512d646bd57d0f689ac65d19709c8adac8099df30136ece305c204ae97bb1a25a15d8db65a9"},"signature":"0xb616f04070b57cc9d354a8c642653e04fef360bfb4958221b1db3a26a308a206b6d0ee7d209f1608f0f58b703729c685069847fcaa2bfa9f4f90f6bcf4ed74d62651309220d09180e9845cc1c0ba08f4db2f24ddcaabb309b40c7554234b6b0f"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x8eb4c9fbb062e542cb6e8dc2c63eb2b7439d8557e8ed531a25cb5a6b774428cffd02c03d9989d77e2b5401467949a3d1"},"signature":"0xabdead51cec94c5ccc6655b8b3f12b773679b326120495fc3beaa5022a48fdd15425d5b2df819f782481e6a1f9f668e4139c5e6429ee1b7817c8d4e609dc4dae7109b3b1035f234de08bde10eaa113ad0a23eeeb6aa884828591efff2492397c"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xaaec769a5a29e9ead91b3c81eca399dd81aa72e55abb304878794ca0dfae580aa5883866e096c49c4ccc87ca9e948ced"},"signature":"0xa2d9d5e6cad3e6a69b88b7c0633d2823b05cb3ee88f6980e9d210fab3cf38848bf1cc53c4bb1ca8643d16f231b5f8e520ae04afe08d67aea2076e7bde2884977de9799dfe32587c24ed8e569c7ac29b2a5227bb65cb83fccf039c823b816267e"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xa796dedab984874872bf9b29f63925d09a4a1d7d2acbbfc089cffdf138dbe370b7ba31fceb197f34e3a84b18964259f6"},"signature":"0xb5bcfbd6e1a277039cb8ba48479f12571015921ce83d511cc95bcd9b6e4769e2085b18cbb22be319427964c2777b09541528224efa7801e7959f7de61e8baa24e3f9c3d3234b7883ca7a5a6f3150b40daa4633e133aea61cf88ef0693aad94ad"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xb4a6c12abb50950de8ffc0e45b6e9aedc608a225685842e41348f670e9d51f0c2d1e7c371296add864a901572240fc49"},"signature":"0x811424fd233b84c1e3d7acb39c1a721707c0dd2ee68c2ddf3e523fcd4130210be1b5819e63ed0b5751942f024d15ec64042d5da617b4b867faaecfb8abaa1a967ab12672dfdd17509dd916cf434567d820f93aa7d815b3e778491b5499b70a71"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xa9218a98523a43bc84063ab4924d0812e1585d0be936b34b00e67605439fc91f4e0d4962b01e2b608cef9d92b5b0b42d"},"signature":"0x8dc0ed9e762fa3fdbbb091a52a8749bd1308d1f6365af7436f0c92071f4f4fdba0c346f7abccc5f0df63970a99fb3fbd0c4a8962181d276156e6d4258923d05090f171cacc71b91a7bef89f29b4a937dc249150b31887958ace0bc8c102baf3c"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xb62fa6bc50cb0e254738ad6b51b5e90ef7bc4e6d884688ce35e8ae104aaaf9a1d70bbd05694236d363191dd724e452d8"},"signature":"0xb0227384ecafc721c4495571f666d0f0e8e6016fe8236088e5114b3ae268dc9f3e0ee5a8201136045ab3150b3ea79d23176172f7917a921b8468d868aa996206afe1c47324fe288d059c962adda70cddf819a6a65df501ab14b5a009c707df85"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x82eecd9b6238ef12ee305921664d9c0a6b976d78473fbe27b8d13ef4c600f428ace1e2000547c7292b585cd63dee4696"},"signature":"0xa91eba8907d6346640c9e9072739583fc4866d10588f516520df711a35f5b3df1a01024a87a4a0aeed429ba5755b50d708fddd03b2ff84c3d233911280c1f5056fb9abdf687757ef484d69d7b8d33f18b94501e25d96e24edea180a8797c3a75"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xac7a7e8111cd78a914fcde4be9c022dca17a0451f9a3a6fee13c5ffc8ab6f6ecb6465632a92420a85522672f9a99528a"},"signature":"0x96eff79821e8a1d930922e08e33e28875da7c38daa4394970e29ab4fe60c0940f073749ff92a9de1b06137f7a705fcd811f7851e0b4672b05c92cff49505bb6ada16fa6cb40c1a65aeff3d2be8f872d2acd2f7a050a467f77146f6db012c1450"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x9003204660a4f3d8750ec4a1c68f922d572961613505fe053e88c38bb052ff3582032583bb900ce654763ba117cea5e5"},"signature":"0xa85e2c3a8d12096c3c2517a0cb50b0ec95e7d53374b76c912c6abbe7cd80c78b4aa970fbdae550e98c154e33755892af0b35fdcab3e34bf4fa1921c5d9857f5d6cfeeae80cf4b20cd3000db4e41c698309c5bdc59e586cc6b14f69303bb63b5c"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xb260f6429b4aced574471c97da4767f80ac108cec91e07001e8675d954a6c294c4b5044e3297c6ddb725b7683dd60f59"},"signature":"0x85989347c66cb1050bbc183fe6c781f6102c091ed7ea2a269d47e11234bc8d34b2cb1d157e00ddbc70cfd8855ecf9e2e0a16c0c00dee729bfb6c8f56449efdcbd4f898581dfb5669cb4579249cc15319c54242dd189cbdf18cf8a40604e3f3cf"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xa32a4bed50fa78650153cb32d981cd2d208585e31207d5be4288c0467e3121e75bb8821baa04b619d914a13417ea2a55"},"signature":"0x96026f7135f1705f608e378207cf9b3211f671e21c33c3cab8f43b395926bb8087392750cf360b95aab72965f2af52671825d19823deb7d872de181adb30602527fa0c099423c36bf66ad2bc07e8cbb53acf32969a4883ce011b949f23269157"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x87824d3113333d1630dbe9bfc43f9d599424955525181116fd0e585625334d83447787e9d3c82e06dec051505a2bd2dd"},"signature":"0x8452bc9592467ec488412288f4610e9ff802675ef9d3f42d0c42bf45a43b231ff163f49ca65d6aa5a48db50277c3b3620e219de92aa5aab8b6773590a06f0919770179767fd9802efb8846cf4bc0089203363ad9ff380d140b51392b6c3a9dc9"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xb315ef8eafad3dd939629d584ef382b71b17c0dc711e4f3d067ea11cc266feedd2c9ce40b7ce574dbe7f6fd32f94a942"},"signature":"0x8512673158f2e1349a83b132222aa894c5ec609a8d643101ede5d1eae845e698448432ac7576bd9edd7c1c584aaade900b73f9bac9019237cc3e74b49045a939410d3a8a5d363c11e2542ed0354aab255247165079c0c3e97c14a14fbebef68b"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xa60091c586bebe33f1e55d2b30e0d15dcf7b1375962b522c027c66893e33e6db0b10312ff013a0d3a1dfb0afcfa07797"},"signature":"0xa895a2e0b7aa498c54355238689251221d369f3dd230f7d556cef1529d95292de6cb131ffd83eb145852ccde319b75620d20944ac309258cce79f2ea067ae5828f4edfa1a16653cf217d4ebecdcd4b3340c27c2a43f983bff96bd7ec47024dd0"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xb20d6593d22c95b327d8d7fce2b3001ae382a5cc1506325348e042623a6c7e25945c75a2728ffee3782d6937adec5a8e"},"signature":"0xae9e0cda93802e80562d21eb17d0eb0711cede9ee90f3104146caf60804c70184080b52adcb01412d5eea333bb8c8cd512bd5a63107b9746d1ad2fc96618cb099a0e1cd187aa3081f063432bc165b0c5d1669468311763ba2979e89f1dd960b8"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xa58768a1cce5355cc08506bdb4cf74cde1b6ab1dbeaa002a324b86b04053fb3ce6fc191f57022b962da33f30aec230e1"},"signature":"0xa8fb67e6e6d6b8a588a030a9f2c9ac19c7f30fd51996fdd82282567beb01ce1c1da5c9e180fb76ce7af7113d08d11ba112b6268c7caf70519cb15d236b280926ef26ff9a2f1c16d9ae22c5419e62742b43b60b81585ce66ca56fa6fccea1dd43"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0xa34b29790868d0a0bc8d92335a3e3dfc37128491d7153d86c905e7a421282a84877eff963e4e6bde6353a6541250feb8"},"signature":"0x8d5a7381a1da559b658cf8c30886f1eca7204560ffd70ab0d344dd8967c65998f1783d274469d6dfb6d7d091ce89edac0b0ff06a2ae3f44c38301e4e3eb48b28319d92531f00627a9fa52cfecb8aad24f58e914bce7989e4e8b9bfecd1a9bba6"},{"message":{"fee_recipient":"0x7ba68795465c94160ff2771a61052049ef1e74d1","gas_limit":"30000000","timestamp":"1656684360","pubkey":"0x8756fa80964a8cabb4b51502c4799b79fb65797a1b031cd9ee871854a66923f932fd87089d2c977a6c92b3dc8234761b"},"signature":"0xb7351ef9cda5a268a92fd15fd94f087aacf0d5806e92d0290fe2d9d81d567f6d43cbbe72e3cc96ef40f16ba0e4d1fd65003259cfbf3cb4d2f7b6e6f4f2d31e97d0e07d9dc6ec075ca99860abb6a30662eb5ba6f2c442c86c3b1c7a6d73bc796e"}] \ No newline at end of file diff --git a/mev-boost-relay/testdata/valreg2.json.gz b/mev-boost-relay/testdata/valreg2.json.gz deleted file mode 100644 index 46b9ec428..000000000 Binary files a/mev-boost-relay/testdata/valreg2.json.gz and /dev/null differ diff --git a/mev-boost-relay/testdata/website-htmldata.json b/mev-boost-relay/testdata/website-htmldata.json deleted file mode 100644 index 5593dbe12..000000000 --- a/mev-boost-relay/testdata/website-htmldata.json +++ /dev/null @@ -1,802 +0,0 @@ -{ - "Network": "Sepolia", - "RelayPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ValidatorsTotal": 1973, - "ValidatorsRegistered": 355, - "CapellaForkVersion": "0x90000072", - "BellatrixForkVersion": "0x90000071", - "GenesisForkVersion": "0x90000069", - "GenesisValidatorsRoot": "0xd8ea171f3c94aea21ebc42a1ed61052acf3f9209c00e4efbaaddac09ed9b8078", - "BuilderSigningDomain": "0x00000001d3010778cd08ee514b08fe67b6c503b510987a4ce43f42306d97c67c", - "BeaconProposerSigningDomain": "0x0000000036fa50131482fe2af396daf210839ea6dcaaaa6372e95478610d7e08", - "HeadSlot": 668155, - "NumPayloadsDelivered": 19557, - "ValueLink": "/", - "ValueDesc": " ", - "ShowConfigDetails": false, - "LinkEtherscan": "https://etherscan.io", - "LinkBeaconchain": "https://beaconcha.in", - "RelayURL": "https://0xac6e77dfe25ecd6110b8e780608cce0dab71fdd5ebea22a16c0205200f2f8e2e3ad3b71d3499c54ad14d6c21b41a37ae@boost-relay.flashbots.net", - "Payloads": [ - { - "ID": 20457, - "InsertedAt": "2022-09-20T01:18:48.145623Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658594, - "Epoch": 20581, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0xa5d72ac4cdcd847d67cb5a68c6141cde99a91303ca84165bbdc6fd7f643422faec783de60739e1b2753088280c90a68b", - "ProposerFeeRecipient": "0xf24a01ae29dec4629dfb4170647c4ed4efc392cd", - "ParentHash": "0x66726f99de797693ba86cf9cad776aa459ee30d4e16dc7c45c1685e285b986a1", - "BlockHash": "0xff738f03dcfbe6544db177bfe336bdf778733708ec7c0ca58c639bd68e6831b8", - "BlockNumber": 1923149, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20459, - "InsertedAt": "2022-09-20T01:20:12.294826Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658601, - "Epoch": 20581, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0xab7eff4ef8696db334bce564bc273af0412bb4de547056326dff2037e1eca7abde039a51953948dd61d3d15925cd92f6", - "ProposerFeeRecipient": "0x670b24610df99b1685aeac0dfd5307b92e0cf4d7", - "ParentHash": "0xa04ec73daf7c8dd103a6abe4712afc331fb46e382980a4f78f7a26a20420c482", - "BlockHash": "0xeb433a5bf3371f4692d1bf772212d08a0925193469b7c5afb92e87d4e6137aa7", - "BlockNumber": 1923154, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20440, - "InsertedAt": "2022-09-20T01:10:25.430078Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658552, - "Epoch": 20579, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0xa52c15840b89d92897d1e140b2b8468a88886c5e1092861e598b3a433b340ded5b35b3d632a9879820fd56f20ca3a68b", - "ProposerFeeRecipient": "0xdeadfeea52c15840b89d92897d1e140b2b8468a8", - "ParentHash": "0xf0368870f7cf75a13035c7fddadc7ca9ad32af2f1e80a553cc6c7755e257ebfb", - "BlockHash": "0x2c4c9509422946e8844e9fc2cbddb85eaaa97af1b7047036e251888e853f7420", - "BlockNumber": 1923113, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20442, - "InsertedAt": "2022-09-20T01:11:00.290907Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658555, - "Epoch": 20579, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0xb2235bdf60dde5d0d78c72cb69e6e09153b0154efdbab97e1bc91f18d3cec4f660a80311fe6a1acd419a448ab65b18f1", - "ProposerFeeRecipient": "0x1268ad189526ac0b386faf06effc46779c340ee6", - "ParentHash": "0x72e0e47c359a98d1c5b0e3ef390c07c21746bcfb902e761225cc520d36316a2c", - "BlockHash": "0xf3c07430ad0c4819a8fe26ee6aec13279387037bb8c517c9393c199e88dd64d0", - "BlockNumber": 1923116, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20445, - "InsertedAt": "2022-09-20T01:13:37.649101Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658568, - "Epoch": 20580, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0x870286c76b360f9dd862e7900e9af235d4ade37c0970b88a23b56e27307b8bb2a4ffa4c19176581d39faa7d87711ff6b", - "ProposerFeeRecipient": "0xdeadfee870286c76b360f9dd862e7900e9af235d", - "ParentHash": "0x2cfced18d4e9d073818469d604cd816c68bffff0895482da4261b94ee8b25f59", - "BlockHash": "0x1b79d18afd05acff986ce9796dd2b149d014a726083efb90da0cb883220fbd2f", - "BlockNumber": 1923126, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20455, - "InsertedAt": "2022-09-20T01:18:36.212291Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658593, - "Epoch": 20581, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0x8d474636a638e7b398566a39b3f939a314f1cf88e64d81db0f556ca60951ec1dca1b93e3906a6654ed9ba06f2c31d4ea", - "ProposerFeeRecipient": "0x670b24610df99b1685aeac0dfd5307b92e0cf4d7", - "ParentHash": "0x1d9bbea8977ad06ca10471d807a0b6fadd0b3328312ff945459709e7516f49ff", - "BlockHash": "0x66726f99de797693ba86cf9cad776aa459ee30d4e16dc7c45c1685e285b986a1", - "BlockNumber": 1923148, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20468, - "InsertedAt": "2022-09-20T01:33:24.208189Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658667, - "Epoch": 20583, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0xa58d2fb1c2612d28c54fafa7f2e1e6c336c24435abdb53e1be9dce9aebecbf7468a348b872549535ac18aa003f83ea87", - "ProposerFeeRecipient": "0x670b24610df99b1685aeac0dfd5307b92e0cf4d7", - "ParentHash": "0xd19d2522b1ceae42d7e0c2e3a5a279fadb211544ac9751ae6ef211fae11f9267", - "BlockHash": "0x36f20d0c17ef8b989a99fb6b92cade4aedb07c81ebc9cb55c68b4005b96afcd8", - "BlockNumber": 1923212, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20470, - "InsertedAt": "2022-09-20T01:33:49.691416Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658669, - "Epoch": 20583, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0xb8137fd57ce7d3cfaf8bdbaa28704734d567d0e7a2d87fb84716722c524bb93acb2c1284249027f3c87bccc264c01f4e", - "ProposerFeeRecipient": "0xdeadfeeb8137fd57ce7d3cfaf8bdbaa28704734d", - "ParentHash": "0xe61ced1880cf89141d7ba4780047bc228af0571c39a861c452b571b5da35c40d", - "BlockHash": "0x04c6d30667dc1aac89dda668904dc103b19a5b069d832cab60000711d116a112", - "BlockNumber": 1923214, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20473, - "InsertedAt": "2022-09-20T01:35:24.199006Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658677, - "Epoch": 20583, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0xb5036d4c241685bcd67156e4ab0eba42b97f639947d54b17af2c88fbcc5fc57359c7df4bc7f8df955a524fb1501a6fda", - "ProposerFeeRecipient": "0x670b24610df99b1685aeac0dfd5307b92e0cf4d7", - "ParentHash": "0xec9c6383241fe03c072489b0202aa1651e2b48770bd9f053b28ff6a491a979e4", - "BlockHash": "0xe1b42616b9b553022482345909a2bbd3f9ed9df2d96aee8bd859ad2322ccf093", - "BlockNumber": 1923220, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20475, - "InsertedAt": "2022-09-20T01:35:36.281184Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658678, - "Epoch": 20583, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0x908d762396519ce3c409551b3b5915033cdfe521a586d5c17f49c1d2faa6cb59fa51e1fb74f200487bea87a1d6f37477", - "ProposerFeeRecipient": "0x1268ad189526ac0b386faf06effc46779c340ee6", - "ParentHash": "0xe1b42616b9b553022482345909a2bbd3f9ed9df2d96aee8bd859ad2322ccf093", - "BlockHash": "0x43df37d5d04d61d06e3e94b5b24c88095cdd978d582d0b98bd13294082b18d3d", - "BlockNumber": 1923221, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20476, - "InsertedAt": "2022-09-20T01:36:13.597011Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658681, - "Epoch": 20583, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0x8eafbb7002f5bc4cea23e7b1ba1ec10558de447c7b3e209b77f4df7b042804a07bb27c85d76aea591fa5693542c070de", - "ProposerFeeRecipient": "0xdeadfee8eafbb7002f5bc4cea23e7b1ba1ec1055", - "ParentHash": "0x10ade2a03359b62a984e972c975893a9d5a88ab2fbc5a512995a72134d1d2783", - "BlockHash": "0xd9ab4f0bf4c8d543bf3c44eb4adfab7b089c79a10993705cadd6b2f100e36c41", - "BlockNumber": 1923224, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20480, - "InsertedAt": "2022-09-20T01:42:00.252051Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658710, - "Epoch": 20584, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0x84d3e2a06e16ced26094b356a16a4fb6aad50ad9ab23ef804a5852a33ef0bff76f3c5fbf7beb062376c2e669cb598679", - "ProposerFeeRecipient": "0x1268ad189526ac0b386faf06effc46779c340ee6", - "ParentHash": "0x3c4d5c438010a0974541e2a394ff84eaf96e895a500b4ac054d63d060f9341d3", - "BlockHash": "0x275928129b8077ac862cb7bb6eb913b2c307aebb49920c541aa0c6b0fc380a10", - "BlockNumber": 1923249, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20481, - "InsertedAt": "2022-09-20T01:45:00.247864Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658725, - "Epoch": 20585, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0x8a9f7e8d45f11c4bfb0921c6008f3c79ff923452bcfa7769beb3222f1f37dcb861be979e6eae187f06cf26af05e8ee5b", - "ProposerFeeRecipient": "0x1268ad189526ac0b386faf06effc46779c340ee6", - "ParentHash": "0x2f55267c92897b7444b38a8953635afe7ce59a77021964e6a340c86c5d27aeb2", - "BlockHash": "0xbee4dbec7acb9d14bb29fc3f603af733ddbd32a20c367e2cb9e66e5c9ef97f0b", - "BlockNumber": 1923262, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20462, - "InsertedAt": "2022-09-20T01:27:12.315529Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658636, - "Epoch": 20582, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0xa626b9c78e4ff01b48653961a65db6887a113273fec0f5b96bad34cc2c7647402f84568bb96b55d5da1a84cfb9eb4891", - "ProposerFeeRecipient": "0x670b24610df99b1685aeac0dfd5307b92e0cf4d7", - "ParentHash": "0x242608743367fa308338285efd5443646a7c367a8a8c3611ab03e0bf5606e7c3", - "BlockHash": "0x5b3612f4c39dfae7efacfb7157cbca74da2c2c5167481d76a8bc9a839fec3347", - "BlockNumber": 1923182, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20497, - "InsertedAt": "2022-09-20T02:06:01.53772Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658830, - "Epoch": 20588, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0xb09c1f4dfa1f003396dfb6f6201a4b2c9d3f9c7242085639b3e2566b46d68942d06b984226f2f45396adef285a428515", - "ProposerFeeRecipient": "0xdeadfeeb09c1f4dfa1f003396dfb6f6201a4b2c9", - "ParentHash": "0xa89b4c6db07e4a565bb5751d5dee0e4356b692ee7ac21f37354d09752cd4b225", - "BlockHash": "0xf20a409c4d9c388594f2604b09b3c83caa7dd1d0f2141873601521b51a1cf480", - "BlockNumber": 1923351, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20503, - "InsertedAt": "2022-09-20T02:10:48.285762Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658854, - "Epoch": 20589, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0xab7c058199294c02e1edf9b790004f971cb8c41ae7efd25592705970141cdd5318e8eb187959f1ac8bf45c59f1ead0d9", - "ProposerFeeRecipient": "0x1268ad189526ac0b386faf06effc46779c340ee6", - "ParentHash": "0xf349623345da4c8e7c93befe4b5635b3ad550dfc19bd5bd5e943701474182592", - "BlockHash": "0x324cb9a43d8a3a0249aae5cfdf18c3616da40bb5f275355259ace3b293e46de9", - "BlockNumber": 1923374, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20505, - "InsertedAt": "2022-09-20T02:12:24.229414Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658862, - "Epoch": 20589, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0xb01a30d439def99e676c097e5f4b2aa249aa4d184eaace81819a698cb37d33f5a24089339916ee0acb539f0e62936d83", - "ProposerFeeRecipient": "0x1268ad189526ac0b386faf06effc46779c340ee6", - "ParentHash": "0x0f9ce226f2d2089be21e8780bc54f1e2670f94bec7e3bb6ce13a3928357ba2b0", - "BlockHash": "0x47e7afb54637a42ed2d9987caf1d93f8b269796c83d7c1fab2110802412d103d", - "BlockNumber": 1923382, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20484, - "InsertedAt": "2022-09-20T01:48:24.909757Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658742, - "Epoch": 20585, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0x921b2546b8ae2dfe9c29c8bed6f7485298898e9a7e5ba47a2c027f8f75420183f5abdcfe3ec3bb068c6848d0e2b8c699", - "ProposerFeeRecipient": "0x455e5aa18469bc6ccef49594645666c587a3a71b", - "ParentHash": "0x42c0069391565842e30584baca23c9c80895fc8d683c51e771fdd3bf5d53ca84", - "BlockHash": "0x5a6fb538c240eddeeb87ee735539c560fc199a2b73497f2ef7fcf3f0dead8a5c", - "BlockNumber": 1923277, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20487, - "InsertedAt": "2022-09-20T01:53:48.287633Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658769, - "Epoch": 20586, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0x8bb045e7482b7abe670d72eb2f7afe4207b5a3d488364ff7bb4266f8784ea41893553a4bf7d01e78c99ed9008e2c13bb", - "ProposerFeeRecipient": "0x1268ad189526ac0b386faf06effc46779c340ee6", - "ParentHash": "0x3789f0b3494c6288d4b8313a2fd1e4fdf19271ed0227141917d15a74d88397d8", - "BlockHash": "0xa0ef1576c27c52cc34210a53ddcc53a3cb5f96c16feb63589fb91c1a04c7f549", - "BlockNumber": 1923299, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20491, - "InsertedAt": "2022-09-20T01:57:01.653252Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658785, - "Epoch": 20587, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0x90273bb88f2d4d23f9d7dd2fad356f7c0626b4ff52569f274ca62f8fba65fbded0121e7cc0981272da155f36e9be8bae", - "ProposerFeeRecipient": "0xdeadfee90273bb88f2d4d23f9d7dd2fad356f7c0", - "ParentHash": "0x19d17bbb6115974008214ac275975a94ac89878e6ecfed19e84bc39b1eba526d", - "BlockHash": "0xa9757ce7d099da899104ec4294d68f23beae7a33a3987743d2ed1e378553b159", - "BlockNumber": 1923313, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20492, - "InsertedAt": "2022-09-20T01:57:12.195495Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658786, - "Epoch": 20587, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0xa2d7c628a47e4e948332b2faf6ed63316090b6fedd4d9c92cc2c12d93ea0615b79d133058579b9a6ff48a4e9918848fa", - "ProposerFeeRecipient": "0x670b24610df99b1685aeac0dfd5307b92e0cf4d7", - "ParentHash": "0xa9757ce7d099da899104ec4294d68f23beae7a33a3987743d2ed1e378553b159", - "BlockHash": "0x6f7d3a79f00d53d20cba4cf8fe4e6f365c2066f7c824e5f0d37984209a7c3b80", - "BlockNumber": 1923314, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20521, - "InsertedAt": "2022-09-20T02:26:49.609054Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658934, - "Epoch": 20591, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0x90f4476224b64c2a5333198a4300ece8b3a59ae315469b23fd98dadcdceaaf38642d2076e9cd0bfacc515306f807819f", - "ProposerFeeRecipient": "0xdeadfee90f4476224b64c2a5333198a4300ece8b", - "ParentHash": "0x630ba3f153fb544cc215fa3b423cd363b370149caf436074a2d128aaec95870b", - "BlockHash": "0x127bba22f89ad353b34d7be58f1486635f503d377c014379d208245c7fe71b6e", - "BlockNumber": 1923442, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20506, - "InsertedAt": "2022-09-20T02:12:37.686632Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658863, - "Epoch": 20589, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0x8be8d356bbf35ccd5980848662b5d6361eef583b535da90cef6c07904ccfb5963aaa230ac30ad63441f60e807434497f", - "ProposerFeeRecipient": "0xdeadfee8be8d356bbf35ccd5980848662b5d6361", - "ParentHash": "0x47e7afb54637a42ed2d9987caf1d93f8b269796c83d7c1fab2110802412d103d", - "BlockHash": "0x1dd8496db756c26ca2567fefd15c36377733cd7157905429a6bef6f0123b6771", - "BlockNumber": 1923383, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20507, - "InsertedAt": "2022-09-20T02:14:00.311409Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658870, - "Epoch": 20589, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0xaec5e915f23d327ceb37612ced6a3fbdcb3153ae075fa37c32146a7aac038fb65e03a87612b9a8c2a89188fa98c0a630", - "ProposerFeeRecipient": "0x670b24610df99b1685aeac0dfd5307b92e0cf4d7", - "ParentHash": "0xc585fdeb62c36ec5e521b4a3e48e0fad02570ef695ea69948bb34eb01b491133", - "BlockHash": "0xa0db83fa8816787e71b457b6c09ab9511d5c104f532a01110857da0fb7064550", - "BlockNumber": 1923388, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20509, - "InsertedAt": "2022-09-20T02:17:48.287069Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658889, - "Epoch": 20590, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0x8d797819318cdf7b26405d1a327d80d4c289e56f830b28d4e303bcb019aeb0b3d69bfed58adcde8a2445dd5281b86af1", - "ProposerFeeRecipient": "0x1268ad189526ac0b386faf06effc46779c340ee6", - "ParentHash": "0x7b1b53f1e49577f686c81b03403637622dab62ceadd36fff4035ab99aa28504d", - "BlockHash": "0xa95eab49ef7dafd41c08609464d3c8a22c499f4884e02490f9948614dc637ff7", - "BlockNumber": 1923404, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20510, - "InsertedAt": "2022-09-20T02:19:25.632485Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658897, - "Epoch": 20590, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0x82b8c013f24fe64b8e0337ae8b6a682cae336b8404eafc1404744f80f765efdb8b2873d1d3f31141e8dfe4d93346ac56", - "ProposerFeeRecipient": "0xdeadfee82b8c013f24fe64b8e0337ae8b6a682ca", - "ParentHash": "0xc2d6fbf21d0a58f4cd29450fb8534229a1a4ea5c4102a8cf750130b9ea2c28b7", - "BlockHash": "0x897ac440c485f8a74840760c95ecb7593d8b5203e921d7a9362563784ffdedb8", - "BlockNumber": 1923411, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20511, - "InsertedAt": "2022-09-20T02:20:00.348943Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658900, - "Epoch": 20590, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0x876561bba29e656b7122f1cb51a02dff1ac7d470217d8a4799c01e61816c4660eea91843a5a42502ddf842d2daeb0586", - "ProposerFeeRecipient": "0x670b24610df99b1685aeac0dfd5307b92e0cf4d7", - "ParentHash": "0x1a33cca741bfdb9c383ee3a1da5937f094d9cc607399c2dee6f2212501ed7f26", - "BlockHash": "0xa6d19da69114a9161798e13c06cc326668bc20400a88ee3cc16c0677e8922b8a", - "BlockNumber": 1923413, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20515, - "InsertedAt": "2022-09-20T02:21:01.617723Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658905, - "Epoch": 20590, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0x8c38ab2a9558ac41c6ef736a5560e5960102e92f710efac3f631367a3f6d7227e0813579f349e661116bb29b2163b296", - "ProposerFeeRecipient": "0xdeadfee8c38ab2a9558ac41c6ef736a5560e5960", - "ParentHash": "0x8658d567e92c24b5220dbf528c2fd16b38479498f2edd1c6d91911a10e3c0f34", - "BlockHash": "0xd91e95e34017b904e2e166f02efe1533c7a8a72be29224b04f55a7ebe1bbaf2d", - "BlockNumber": 1923417, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20516, - "InsertedAt": "2022-09-20T02:22:01.655873Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 658910, - "Epoch": 20590, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0x97a16c696787a99fd243193ef8edc43285d9d9b5911a27d057186a0b80b2593236d1dd48baaba1e9a0467114aeb776e8", - "ProposerFeeRecipient": "0xdeadfee97a16c696787a99fd243193ef8edc4328", - "ParentHash": "0x9600db57903c3c110e00d09630d3805e7103ed95d2e3dc126021149a4b24aa84", - "BlockHash": "0xf30d507c037ff0238035b4ae76f5285632acbb0e473bd4888eb4a792b594edc3", - "BlockNumber": 1923421, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - }, - { - "ID": 20541, - "InsertedAt": "2022-09-20T02:49:48.215125Z", - "ExecutionPayloadID": { - "Int64": 0, - "Valid": false - }, - "SignedBlindedBeaconBlock": { - "String": "", - "Valid": false - }, - "Slot": 659049, - "Epoch": 20595, - "BuilderPubkey": "0x845bd072b7cd566f02faeb0a4033ce9399e42839ced64e8b2adcfc859ed1e8e1a5a293336a49feac6d9a5edb779be53a", - "ProposerPubkey": "0xb0a771b9a0dd7e352d46c8efcc1834e610dd097711bf7117678a99d386890c93b9b901872d4dcacb6dcbcf3aea0883ea", - "ProposerFeeRecipient": "0x670b24610df99b1685aeac0dfd5307b92e0cf4d7", - "ParentHash": "0x66b7be6c8d869ce982161059224c3bc5803e3169706230c021000597a6ae09e1", - "BlockHash": "0x576ce9f291e535b3f5bca4493ab425a9d0578c87c286a9c222e7ec53fb334aef", - "BlockNumber": 1923542, - "GasUsed": 21000, - "GasLimit": 30000000, - "NumTx": 1, - "NumBlobs": 1, - "BlobGasUsed": 21000, - "Value": "21000" - } - ] -} \ No newline at end of file