Skip to content

Commit

Permalink
feat(CI): Docker compose for all components (#1044)
Browse files Browse the repository at this point in the history
Co-authored-by: Mariano Nicolini <[email protected]>
Co-authored-by: samoht9277 <[email protected]>
  • Loading branch information
3 people authored and PatStiles committed Nov 6, 2024
1 parent 25d686c commit be07fec
Show file tree
Hide file tree
Showing 18 changed files with 830 additions and 21 deletions.
14 changes: 14 additions & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
docker/*
**/.DS_Store
**/.idea
out
cache
**/build
**/target
**/aligned_verification_data
**/broadcast
volume
nonce_*.bin
docker-compose.yaml
.github/**
**.md
56 changes: 56 additions & 0 deletions .github/workflows/send-proofs-docker.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
name: "[CI] Send proofs to network"

on:
pull_request:
types:
- opened
- synchronize
paths-ignore:
- '**.md'

concurrency:
group: pull_request-${{ github.event.pull_request.number }}
cancel-in-progress: true

jobs:
network-test-docker-compose:
name: "Test network with Docker Compose"
runs-on: aligned-runner-ci
permissions:
contents: read
packages: write
pull-requests: write

steps:
- name: Log in to GitHub Container Registry
uses: docker/login-action@v2
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

- name: Checkout
uses: actions/checkout@v4
with:
submodules: recursive

- name: Build containers
run: make docker_build

- name: Start containers and initialize network
run: make docker_up && sleep 15

- name: Send proofs batches
run: make docker_batcher_send_all_proofs_burst

- name: Verify all sent proofs
run: make docker_verify_proof_submission_success

- name: Stop containers
continue-on-error: true
if: always()
run: make docker_down

- name: Ensure admin permissions in _work
if: always()
run: sudo chown admin:admin -R /home/admin/actions-runner/_work/
218 changes: 218 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -698,6 +698,224 @@ tracker_dump_db:
docker exec -t tracker-postgres-container pg_dumpall -c -U tracker_user > dump.$$(date +\%Y\%m\%d_\%H\%M\%S).sql
@echo "Dumped database successfully to /operator_tracker"

DOCKER_RPC_URL=http://anvil:8545
PROOF_GENERATOR_ADDRESS=0x66f9664f97F2b50F62D13eA064982f936dE76657

docker_build_base_image:
docker compose -f docker-compose.yaml --profile aligned_base build

docker_build_aggregator:
docker compose -f docker-compose.yaml --profile aggregator build

docker_build_operator:
docker compose -f docker-compose.yaml --profile operator build

docker_build_batcher:
docker compose -f docker-compose.yaml --profile batcher build

docker_restart_aggregator:
docker compose -f docker-compose.yaml --profile aggregator down
docker compose -f docker-compose.yaml --profile aggregator up -d --remove-orphans --force-recreate

docker_restart_operator:
docker compose -f docker-compose.yaml --profile operator down
docker compose -f docker-compose.yaml --profile operator up -d --remove-orphans --force-recreate

docker_restart_batcher:
docker compose -f docker-compose.yaml --profile batcher down
docker compose -f docker-compose.yaml --profile batcher up -d --remove-orphans --force-recreate

docker_build:
docker compose -f docker-compose.yaml --profile aligned_base build
docker compose -f docker-compose.yaml --profile eigenlayer-cli build
docker compose -f docker-compose.yaml --profile foundry build
docker compose -f docker-compose.yaml --profile base build
docker compose -f docker-compose.yaml --profile operator build
docker compose -f docker-compose.yaml --profile batcher build
docker compose -f docker-compose.yaml --profile aggregator build

docker_up:
docker compose -f docker-compose.yaml --profile base up -d --remove-orphans --force-recreate
@until [ "$$(docker inspect $$(docker ps | grep anvil | awk '{print $$1}') | jq -r '.[0].State.Health.Status')" = "healthy" ]; do sleep .5; done; sleep 2
docker compose -f docker-compose.yaml --profile aggregator up -d --remove-orphans --force-recreate
docker compose -f docker-compose.yaml run --rm fund-operator
docker compose -f docker-compose.yaml run --rm register-operator-eigenlayer
docker compose -f docker-compose.yaml run --rm mint-mock-tokens
docker compose -f docker-compose.yaml run --rm operator-deposit-into-mock-strategy
docker compose -f docker-compose.yaml run --rm operator-whitelist-devnet
docker compose -f docker-compose.yaml run --rm operator-register-with-aligned-layer
docker compose -f docker-compose.yaml --profile operator up -d --remove-orphans --force-recreate
docker compose -f docker-compose.yaml run --rm user-fund-payment-service-devnet
docker compose -f docker-compose.yaml --profile batcher up -d --remove-orphans --force-recreate
@echo "Up and running"

docker_down:
docker compose -f docker-compose.yaml --profile batcher down
docker compose -f docker-compose.yaml --profile operator down
docker compose -f docker-compose.yaml --profile base down
@echo "Everything down"
docker ps

DOCKER_BURST_SIZE=2
DOCKER_PROOFS_PRIVATE_KEY=0xac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80

docker_batcher_send_sp1_burst:
@echo "Sending SP1 fibonacci task to Batcher..."
docker exec $(shell docker ps | grep batcher | awk '{print $$1}') aligned submit \
--private_key $(DOCKER_PROOFS_PRIVATE_KEY) \
--proving_system SP1 \
--proof ./scripts/test_files/sp1/sp1_fibonacci.proof \
--vm_program ./scripts/test_files/sp1/sp1_fibonacci.elf \
--repetitions $(DOCKER_BURST_SIZE) \
--proof_generator_addr $(PROOF_GENERATOR_ADDRESS) \
--rpc_url $(DOCKER_RPC_URL)

docker_batcher_send_risc0_burst:
@echo "Sending Risc0 fibonacci task to Batcher..."
docker exec $(shell docker ps | grep batcher | awk '{print $$1}') aligned submit \
--private_key $(DOCKER_PROOFS_PRIVATE_KEY) \
--proving_system Risc0 \
--proof ./scripts/test_files/risc_zero/fibonacci_proof_generator/risc_zero_fibonacci.proof \
--vm_program ./scripts/test_files/risc_zero/fibonacci_proof_generator/fibonacci_id.bin \
--public_input ./scripts/test_files/risc_zero/fibonacci_proof_generator/risc_zero_fibonacci.pub \
--repetitions $(DOCKER_BURST_SIZE) \
--proof_generator_addr $(PROOF_GENERATOR_ADDRESS) \
--rpc_url $(DOCKER_RPC_URL)

docker_batcher_send_plonk_bn254_burst:
@echo "Sending Groth16Bn254 1!=0 task to Batcher..."
docker exec $(shell docker ps | grep batcher | awk '{print $$1}') aligned submit \
--private_key $(DOCKER_PROOFS_PRIVATE_KEY) \
--proving_system GnarkPlonkBn254 \
--proof ./scripts/test_files/gnark_plonk_bn254_script/plonk.proof \
--public_input ./scripts/test_files/gnark_plonk_bn254_script/plonk_pub_input.pub \
--vk ./scripts/test_files/gnark_plonk_bn254_script/plonk.vk \
--proof_generator_addr $(PROOF_GENERATOR_ADDRESS) \
--rpc_url $(DOCKER_RPC_URL) \
--repetitions $(DOCKER_BURST_SIZE)

docker_batcher_send_plonk_bls12_381_burst:
@echo "Sending Groth16 BLS12-381 1!=0 task to Batcher..."
docker exec $(shell docker ps | grep batcher | awk '{print $$1}') aligned submit \
--private_key $(DOCKER_PROOFS_PRIVATE_KEY) \
--proving_system GnarkPlonkBls12_381 \
--proof ./scripts/test_files/gnark_plonk_bls12_381_script/plonk.proof \
--public_input ./scripts/test_files/gnark_plonk_bls12_381_script/plonk_pub_input.pub \
--vk ./scripts/test_files/gnark_plonk_bls12_381_script/plonk.vk \
--proof_generator_addr $(PROOF_GENERATOR_ADDRESS) \
--repetitions $(DOCKER_BURST_SIZE) \
--rpc_url $(DOCKER_RPC_URL)

docker_batcher_send_groth16_burst:
@echo "Sending Groth16 BLS12-381 1!=0 task to Batcher..."
docker exec $(shell docker ps | grep batcher | awk '{print $$1}') aligned submit \
--private_key $(DOCKER_PROOFS_PRIVATE_KEY) \
--proving_system Groth16Bn254 \
--proof ./scripts/test_files/gnark_groth16_bn254_script/groth16.proof \
--public_input ./scripts/test_files/gnark_groth16_bn254_script/plonk_pub_input.pub \
--vk ./scripts/test_files/gnark_groth16_bn254_script/groth16.vk \
--proof_generator_addr $(PROOF_GENERATOR_ADDRESS) \
--repetitions $(DOCKER_BURST_SIZE) \
--rpc_url $(DOCKER_RPC_URL)

# Update target as new proofs are supported.
docker_batcher_send_all_proofs_burst:
@$(MAKE) docker_batcher_send_sp1_burst
@$(MAKE) docker_batcher_send_risc0_burst
@$(MAKE) docker_batcher_send_plonk_bn254_burst
@$(MAKE) docker_batcher_send_plonk_bls12_381_burst
@$(MAKE) docker_batcher_send_groth16_burst

docker_batcher_send_infinite_groth16:
docker exec $(shell docker ps | grep batcher | awk '{print $$1}') \
sh -c ' \
mkdir -p scripts/test_files/gnark_groth16_bn254_infinite_script/infinite_proofs; \
counter=1; \
timer=3; \
while true; do \
echo "Generating proof $${counter} != 0"; \
gnark_groth16_bn254_infinite_script $${counter}; \
aligned submit \
--rpc_url $(DOCKER_RPC_URL) \
--repetitions $(DOCKER_BURST_SIZE) \
--proving_system Groth16Bn254 \
--proof scripts/test_files/gnark_groth16_bn254_infinite_script/infinite_proofs/ineq_$${counter}_groth16.proof \
--public_input scripts/test_files/gnark_groth16_bn254_infinite_script/infinite_proofs/ineq_$${counter}_groth16.pub \
--vk scripts/test_files/gnark_groth16_bn254_infinite_script/infinite_proofs/ineq_$${counter}_groth16.vk \
--proof_generator_addr $(PROOF_GENERATOR_ADDRESS); \
sleep $${timer}; \
counter=$$((counter + 1)); \
done \
'

docker_verify_proofs_onchain:
@echo "Verifying proofs..."
docker exec $(shell docker ps | grep batcher | awk '{print $$1}') \
sh -c ' \
for proof in ./aligned_verification_data/*.cbor; do \
echo "Verifying $${proof}"; \
aligned verify-proof-onchain \
--aligned-verification-data $${proof} \
--rpc_url $(DOCKER_RPC_URL); \
done \
'

DOCKER_PROOFS_WAIT_TIME=30

docker_verify_proof_submission_success:
@echo "Verifying proofs were successfully submitted..."
docker exec $(shell docker ps | grep batcher | awk '{print $$1}') \
sh -c ' \
if [ -z "$$(ls -A ./aligned_verification_data)" ]; then echo "ERROR: There are no proofs on aligned_verification_data/ directory" && exit 1; fi; \
echo "Waiting $(DOCKER_PROOFS_WAIT_TIME) seconds before starting proof verification. \n"; \
sleep $(DOCKER_PROOFS_WAIT_TIME); \
for proof in ./aligned_verification_data/*.cbor; do \
echo "Verifying proof $${proof} \n"; \
verification=$$(aligned verify-proof-onchain \
--aligned-verification-data $${proof} \
--rpc_url $$(echo $(DOCKER_RPC_URL)) 2>&1); \
if echo "$$verification" | grep -q not; then \
echo "ERROR: Proof verification failed for $${proof}"; \
exit 1; \
elif echo "$$verification" | grep -q verified; then \
echo "Proof verification succeeded for $${proof}"; \
fi; \
echo "---------------------------------------------------------------------------------------------------"; \
done; \
if [ $$(ls -1 ./aligned_verification_data/*.cbor | wc -l) -ne 10 ]; then \
echo "ERROR: Some proofs were verified successfully, but some proofs are missing in the aligned_verification_data/ directory"; \
exit 1; \
fi; \
echo "All proofs verified successfully!"; \
'

docker_attach_foundry:
docker exec -ti $(shell docker ps | grep anvil | awk '{print $$1}') /bin/bash

docker_attach_anvil:
docker exec -ti $(shell docker ps | grep anvil | awk '{print $$1}') /bin/bash

docker_attach_aggregator:
docker exec -ti $(shell docker ps | grep aggregator | awk '{print $$1}') /bin/bash

docker_attach_operator:
docker exec -ti $(shell docker ps | grep operator | awk '{print $$1}') /bin/bash

docker_attach_batcher:
docker exec -ti $(shell docker ps | grep batcher | awk '{print $$1}') /bin/bash

docker_logs_anvil:
docker compose -f docker-compose.yaml logs anvil -f

docker_logs_aggregator:
docker compose -f docker-compose.yaml logs aggregator -f

docker_logs_operator:
docker compose -f docker-compose.yaml logs operator -f

docker_logs_batcher:
docker compose -f docker-compose.yaml logs batcher -f

__TELEMETRY__:
# Collector, Jaeger and Elixir API
telemetry_full_start: open_telemetry_start telemetry_start
Expand Down
13 changes: 0 additions & 13 deletions anvil.Dockerfile

This file was deleted.

8 changes: 8 additions & 0 deletions batcher/aligned-batcher/.env.docker
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
AWS_SECRET_ACCESS_KEY=test
AWS_REGION=us-east-2
AWS_ACCESS_KEY_ID=test
AWS_BUCKET_NAME=aligned.storage
UPLOAD_ENDPOINT=http://localstack:4566
DOWNLOAD_ENDPOINT=http://localstack:4566/aligned.storage
RUST_LOG=info
RUST_BACKTRACE=1
8 changes: 0 additions & 8 deletions compose.yaml

This file was deleted.

32 changes: 32 additions & 0 deletions config-files/config-aggregator-docker.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
# Common variables for all the services
# 'production' only prints info and above. 'development' also prints debug
environment: "production"
aligned_layer_deployment_config_file_path: "./contracts/script/output/devnet/alignedlayer_deployment_output.json"
eigen_layer_deployment_config_file_path: "./contracts/script/output/devnet/eigenlayer_deployment_output.json"
eth_rpc_url: "http://anvil:8545"
eth_rpc_url_fallback: "http://anvil:8545"
eth_ws_url: "ws://anvil:8545"
eth_ws_url_fallback: "ws://anvil:8545"
eigen_metrics_ip_port_address: "localhost:9090"

## ECDSA Configurations
ecdsa:
private_key_store_path: "config-files/anvil.aggregator.ecdsa.key.json"
private_key_store_password: ""

## BLS Configurations
bls:
private_key_store_path: "config-files/anvil.aggregator.bls.key.json"
private_key_store_password: ""

## Aggregator Configurations
aggregator:
server_ip_port_address: 0.0.0.0:8090
bls_public_key_compendium_address: 0x322813Fd9A801c5507c9de605d63CEA4f2CE6c44
avs_service_manager_address: 0xc3e53F4d16Ae77Db1c982e75a937B9f60FE63690
enable_metrics: true
metrics_ip_port_address: 0.0.0.0:9091
telemetry_ip_port_address: localhost:4001
garbage_collector_period: 2m #The period of the GC process. Suggested value for Prod: '168h' (7 days)
garbage_collector_tasks_age: 20 #The age of tasks that will be removed by the GC, in blocks. Suggested value for prod: '216000' (30 days)
garbage_collector_tasks_interval: 10 #The interval of queried blocks to get an old batch. Suggested value for prod: '900' (3 hours)
28 changes: 28 additions & 0 deletions config-files/config-batcher-docker.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
# Common variables for all the services
# 'production' only prints info and above. 'development' also prints debug
environment: "production"
aligned_layer_deployment_config_file_path: "./contracts/script/output/devnet/alignedlayer_deployment_output.json"
eigen_layer_deployment_config_file_path: "./contracts/script/output/devnet/eigenlayer_deployment_output.json"
eth_rpc_url: "http://anvil:8545"
eth_rpc_url_fallback: "http://anvil:8545"
eth_ws_url: "ws://anvil:8545"
eth_ws_url_fallback: "ws://anvil:8545"
eigen_metrics_ip_port_address: "localhost:9090"

## ECDSA Configurations
ecdsa:
private_key_store_path: "config-files/anvil.batcher.ecdsa.key.json"
private_key_store_password: ""

## Batcher configurations
batcher:
block_interval: 3
batch_size_interval: 10
max_proof_size: 67108864 # 64 MiB
max_batch_size: 268435456 # 256 MiB
eth_ws_reconnects: 99999999999999
pre_verification_is_enabled: true
metrics_port: 9093
non_paying:
address: 0xa0Ee7A142d267C1f36714E4a8F75612F20a79720 # Anvil address 9
replacement_private_key: ac0974bec39a17e36ba4a6b4d238ff944bacb478cbed5efcae784d7bf4f2ff80 # Anvil address 1
Loading

0 comments on commit be07fec

Please sign in to comment.