diff --git a/.github/workflows/container-image.yml b/.github/workflows/container-image.yml new file mode 100644 index 0000000..e39cef3 --- /dev/null +++ b/.github/workflows/container-image.yml @@ -0,0 +1,16 @@ +name: Container Image + +on: + push: + branches: + - dev + - feature/server_only + workflow_dispatch: + +jobs: + call-build-and-push: + name: Call + uses: CirclesUBI/.github/.github/workflows/build-and-push.yml@main + with: + image-name: pathfinder2 + secrets: inherit diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 2301282..6fe3804 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -1,9 +1,8 @@ name: BuildAndTest on: - push: pull_request: - branches: [ "main" ] + branches: [ "dev" ] env: CARGO_TERM_COLOR: always @@ -23,17 +22,20 @@ jobs: ~/.cargo/git target key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.toml') }} - - name: Setup PATH - run: echo ~/.foundry/bin/ >> $GITHUB_PATH + - name: Install Foundry uses: foundry-rs/foundry-toolchain@v1 + + - name: Format + run: cargo fmt --check --verbose + - name: Lint + run: cargo clippy --all --all-features -- -D warnings + - name: Build run: cargo build --verbose + - name: Download safes run: wget -q -c https://rpc.circlesubi.id/pathfinder-db/capacity_graph.db + - name: Run tests run: cargo test --verbose - - name: Lint - run: cargo clippy --all --all-features -- -D warnings - - name: Format - run: cargo fmt --check --verbose diff --git a/.gitignore b/.gitignore index 088ba6b..35ae214 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,6 @@ Cargo.lock # These are backup files generated by rustfmt **/*.rs.bk + +# Capacity graph runtime state +capacity_graph.db diff --git a/Cargo.toml b/Cargo.toml index e2ae8bb..3634df7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,3 +12,4 @@ json = "^0.12.4" num-bigint = "^0.4.3" serde = { version = "1.0.149", features = ["serde_derive"] } serde_json = "1.0.89" +regex = "1.8.1" diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..0ab8283 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,15 @@ +FROM rust:latest AS build + +WORKDIR /build +COPY . . + +RUN cargo install --path . +RUN cargo build --release + +FROM rust AS app + +WORKDIR /app +COPY --from=build /build/target/release . +RUN chmod +x ./server + +ENTRYPOINT ["./server"] diff --git a/README.md b/README.md index 26708a3..33d87ac 100644 --- a/README.md +++ b/README.md @@ -1,20 +1,20 @@ -## Pathfinder2 +# Pathfinder2 -Pathfinder is a collection of tools related to -computing transitive transfers in the +Pathfinder is a collection of tools related to +computing transitive transfers in the [CirclesUBI](https://joincircles.net) trust graph. -### Building +## Building -This is a rust project, so assuming `cargo` is installed, `cargo build` -creates two binaries: The server (default) and the cli. +This is a rust project, so assuming `cargo` is installed, `cargo build` creates three binaries: +The `server` (default), the `cli` and the `convert` tool. -Both need a file that contains the trust graph edges to work. +All need a file that contains the trust graph edges to work. A reasonably up to date edge database file can be obtained from -https://chriseth.github.io/pathfinder2/edges.dat +- https://circlesubi.github.io/pathfinder2/edges.dat -#### Using the Server +### Using the Server `cargo run --release :` will start a JSON-RPC server listening on the given port. @@ -29,18 +29,52 @@ Number of worker threads: 4 Size of request queue: 10 -#### Using the CLI - -The CLI will load an edge database file and compute the transitive transfers -from one source to one destination. You can limit the number of hops to explore -and the maximum amount of circles to transfer. - +#### Run with test data +1) Download the balances and trust binary dump from [binary dump from 2023-05-23](graph_at_20230523_15_00.db) +2) Start the server with `cargo run --release :` +3) Import the data with the curl command below +4) Query the server with the curl command below + +The data can be imported into a running pathfinder2 server with the following command: +```shell +curl -X POST \ + -H "Content-Type: application/json" \ + -d '{ + "id": "timestamp_value", + "method": "load_safes_binary", + "params": { + "file": "/path/to/graph_at_20230523_15_00.db" + } +}' \ + "http://:" +``` +afterward the server can be queried with the following command: +```shell +curl -X POST \ + -H "Content-Type: application/json" \ + -d '{ + "id": "timestamp_value", + "method": "compute_transfer", + "params": { + "from": "0x000...", + "to": "0x000...", + "value": 999999999999, + "iterative": false, + "prune": true + } +}' \ + "http://:" +``` + +### Using the CLI + +The CLI will load an edge database file and compute the transitive transfers from one source to one destination. You can limit the number of hops to explore and the maximum amount of circles to transfer. The options are: `cargo run --release --bin cli [ []] [--dot ]` -For example +For example: `cargo run --release --bin cli 0x9BA1Bcd88E99d6E1E03252A70A63FEa83Bf1208c 0x42cEDde51198D1773590311E2A340DC06B24cB37 edges.dat 3 1000000000000000000` @@ -48,12 +82,12 @@ Computes a transfer of at most `1000000000000000000`, exploring 3 hops. If you specify `--dot `, a graphviz/dot representation of the transfer graph is written to the given file. -#### Conversion Tool +### Conversion Tool -The conversion tool can convert between different ways of representing the edge and trust relations in the circles system. +The conversion tool can convert between different ways of representing the edge and trust relations in the circles system. All data formats are described in https://hackmd.io/Gg04t7gjQKeDW2Q6Jchp0Q -It can read an edge database both in CSV and binary formatand a "safe database" in json and binary format. +It can read an edge database both in CSV and binary formatand a "safe database" in json and binary format. The output is always an edge database in either binary or CSV format. Example: diff --git a/download_safes.py b/download_safes.py index 0c52e1e..630e289 100755 --- a/download_safes.py +++ b/download_safes.py @@ -15,7 +15,7 @@ }""".replace('\n', ' ') #API='https://graph.circles.garden/subgraphs/name/CirclesUBI/circles-subgraph' -API='https://api.thegraph.com/subgraphs/name/circlesubi/circles' +API='https://api.thegraph.com/subgraphs/name/circlesubi/circles-ubi' lastID = 0 diff --git a/graph_at_20230523_15_00.db b/graph_at_20230523_15_00.db new file mode 100644 index 0000000..adadedc Binary files /dev/null and b/graph_at_20230523_15_00.db differ diff --git a/src/bin/server.rs b/src/bin/server.rs index f3d5524..6b0b2ec 100644 --- a/src/bin/server.rs +++ b/src/bin/server.rs @@ -6,5 +6,18 @@ fn main() { let listen_at = env::args() .nth(1) .unwrap_or_else(|| "127.0.0.1:8080".to_string()); - server::start_server(&listen_at, 10, 4); + + let queue_size = env::args() + .nth(2) + .unwrap_or_else(|| "10".to_string()) + .parse::() + .unwrap(); + + let thread_count = env::args() + .nth(3) + .unwrap_or_else(|| "4".to_string()) + .parse::() + .unwrap(); + + server::start_server(&listen_at, queue_size, thread_count); } diff --git a/src/io.rs b/src/io.rs index 8969d21..b29057b 100644 --- a/src/io.rs +++ b/src/io.rs @@ -46,7 +46,7 @@ pub fn read_edges_csv(path: &String) -> Result { pub fn write_edges_binary(edges: &EdgeDB, path: &String) -> Result<(), io::Error> { let mut file = File::create(path)?; - let address_index = write_address_index(&mut file, edges)?; + let address_index = write_address_index(&mut file, addresses_from_edges(edges))?; write_edges(&mut file, edges, &address_index) } @@ -123,6 +123,46 @@ pub fn import_from_safes_binary(path: &str) -> Result { Ok(DB::new(safes, token_owner)) } +pub fn export_safes_to_binary(db: &DB, path: &str) -> Result<(), io::Error> { + let mut file = File::create(path)?; + + let address_index = write_address_index(&mut file, addresses_from_safes(db.safes()))?; + + // organizations + let organizations = db.safes().iter().filter(|s| s.1.organization); + write_u32(&mut file, organizations.clone().count() as u32)?; + for (user, _) in organizations { + write_address(&mut file, user, &address_index)?; + } + + // trust edges + let trust_edges = db.safes().iter().flat_map(|(user, safe)| { + safe.limit_percentage + .iter() + .map(|(other, percentage)| (*user, other, percentage)) + }); + write_u32(&mut file, trust_edges.clone().count() as u32)?; + for (user, send_to, percentage) in trust_edges { + write_address(&mut file, &user, &address_index)?; + write_address(&mut file, send_to, &address_index)?; + write_u8(&mut file, *percentage)?; + } + + // balances + let balances = db.safes().iter().flat_map(|(user, safe)| { + safe.balances + .iter() + .map(|(token_owner, amount)| (*user, token_owner, amount)) + }); + write_u32(&mut file, balances.clone().count() as u32)?; + for (user, token_owner, amount) in balances { + write_address(&mut file, &user, &address_index)?; + write_address(&mut file, token_owner, &address_index)?; + write_u256(&mut file, amount)?; + } + Ok(()) +} + fn read_address_index(file: &mut File) -> Result, io::Error> { let address_count = read_u32(file)?; let mut addresses = HashMap::new(); @@ -134,10 +174,7 @@ fn read_address_index(file: &mut File) -> Result, io::Erro Ok(addresses) } -fn write_address_index( - file: &mut File, - edges: &EdgeDB, -) -> Result, io::Error> { +fn addresses_from_edges(edges: &EdgeDB) -> BTreeSet
{ let mut addresses = BTreeSet::new(); for Edge { from, to, token, .. @@ -147,6 +184,37 @@ fn write_address_index( addresses.insert(*to); addresses.insert(*token); } + addresses +} + +fn addresses_from_safes(safes: &BTreeMap) -> BTreeSet
{ + let mut addresses = BTreeSet::new(); + for ( + user, + Safe { + token_address, + balances, + limit_percentage, + organization: _, + }, + ) in safes + { + addresses.insert(*user); + addresses.insert(*token_address); + for other in balances.keys() { + addresses.insert(*other); + } + for other in limit_percentage.keys() { + addresses.insert(*other); + } + } + addresses +} + +fn write_address_index( + file: &mut File, + addresses: BTreeSet
, +) -> Result, io::Error> { write_u32(file, addresses.len() as u32)?; let mut index = HashMap::new(); for (i, addr) in addresses.into_iter().enumerate() { diff --git a/src/safe_db/db.rs b/src/safe_db/db.rs index 1abc7da..e331b96 100644 --- a/src/safe_db/db.rs +++ b/src/safe_db/db.rs @@ -21,6 +21,10 @@ impl DB { db } + pub fn safes(&self) -> &BTreeMap { + &self.safes + } + pub fn edges(&self) -> &EdgeDB { &self.edges } diff --git a/src/server.rs b/src/server.rs index ca01750..8ab7c10 100644 --- a/src/server.rs +++ b/src/server.rs @@ -3,11 +3,15 @@ use crate::io::{import_from_safes_binary, read_edges_binary, read_edges_csv}; use crate::types::edge::EdgeDB; use crate::types::{Address, Edge, U256}; use json::JsonValue; +use num_bigint::BigUint; +use regex::Regex; use std::error::Error; +use std::fmt::{Debug, Display, Formatter}; use std::io::Read; use std::io::{BufRead, BufReader, Write}; use std::net::{TcpListener, TcpStream}; use std::ops::Deref; +use std::str::FromStr; use std::sync::mpsc::TrySendError; use std::sync::{mpsc, Arc, Mutex, RwLock}; use std::thread; @@ -18,6 +22,52 @@ struct JsonRpcRequest { params: JsonValue, } +struct InputValidationError(String); +impl Error for InputValidationError {} + +impl Debug for InputValidationError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "Error: {}", self.0) + } +} +impl Display for InputValidationError { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "Error: {}", self.0) + } +} + +fn validate_and_parse_ethereum_address(address: &str) -> Result> { + let re = Regex::new(r"^0x[0-9a-fA-F]{40}$").unwrap(); + if re.is_match(address) { + Ok(Address::from(address)) + } else { + Err(Box::new(InputValidationError(format!( + "Invalid Ethereum address: {}", + address + )))) + } +} + +fn validate_and_parse_u256(value_str: &str) -> Result> { + match BigUint::from_str(value_str) { + Ok(parsed_value) => { + if parsed_value > U256::MAX.into() { + Err(Box::new(InputValidationError(format!( + "Value {} is too large. Maximum value is {}.", + parsed_value, + U256::MAX + )))) + } else { + Ok(U256::from_bigint_truncating(parsed_value)) + } + } + Err(e) => Err(Box::new(InputValidationError(format!( + "Invalid value: {}. Couldn't parse value: {}", + value_str, e + )))), + } +} + pub fn start_server(listen_at: &str, queue_size: usize, threads: u64) { let edges: Arc>> = Arc::new(RwLock::new(Arc::new(EdgeDB::default()))); @@ -137,22 +187,28 @@ fn compute_transfer( mut socket: TcpStream, ) -> Result<(), Box> { socket.write_all(chunked_header().as_bytes())?; + + let parsed_value_param = match request.params["value"].as_str() { + Some(value_str) => validate_and_parse_u256(value_str)?, + None => U256::MAX, + }; + + let from_address = validate_and_parse_ethereum_address(&request.params["from"].to_string())?; + let to_address = validate_and_parse_ethereum_address(&request.params["to"].to_string())?; + let max_distances = if request.params["iterative"].as_bool().unwrap_or_default() { vec![Some(1), Some(2), None] } else { vec![None] }; + let max_transfers = request.params["max_transfers"].as_u64(); for max_distance in max_distances { let (flow, transfers) = graph::compute_flow( - &Address::from(request.params["from"].to_string().as_str()), - &Address::from(request.params["to"].to_string().as_str()), + &from_address, + &to_address, edges, - if request.params.has_key("value") { - U256::from(request.params["value"].to_string().as_str()) - } else { - U256::MAX - }, + parsed_value_param, max_distance, max_transfers, );