Skip to content

Commit

Permalink
feat(multichain): enhance search api (#1151)
Browse files Browse the repository at this point in the history
* feat: add paginated address prefix search

* fix: address prefix search

* feat: add quick search for dapps

* fix: filter out chains with invalid chain_id

* fix: adjust order_by

* fix: proto fields

* feat: add logs for failed search

* feat: add setting for default page size
  • Loading branch information
lok52 authored Dec 18, 2024
1 parent e24adcd commit 57c1884
Show file tree
Hide file tree
Showing 20 changed files with 460 additions and 61 deletions.
15 changes: 11 additions & 4 deletions multichain-aggregator/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 2 additions & 1 deletion multichain-aggregator/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -55,10 +55,11 @@ alloy-primitives = "0.8"
anyhow = "1.0"
async-std = { version = "1", features = ["attributes", "tokio1"] }
async-trait = "0.1"
blockscout-chains = { git = "https://github.com/blockscout/blockscout-rs", version = "0.1.0" }
blockscout-chains = { git = "https://github.com/blockscout/blockscout-rs", version = "0.2.0" }
config = "0.13"
env-collector = { git = "https://github.com/blockscout/blockscout-rs", version = "0.1.1" }
pretty_assertions = "1.3"
regex = "1.10"
reqwest = "0.12"
thiserror = "1.0"
url = { version = "2.4" }
13 changes: 8 additions & 5 deletions multichain-aggregator/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,13 @@ Service-specific environment variables. Common environment variables are listed

[anchor]: <> (anchors.envs.start)

| Variable | Req&#x200B;uir&#x200B;ed | Description | Default value |
| -------------------------------------------------- | ------------------------ | ---------------------------------- | -------------- |
| `MULTICHAIN_AGGREGATOR__DATABASE__CONNECT__URL` | true | Postgres connect URL to service DB | |
| `MULTICHAIN_AGGREGATOR__DATABASE__CREATE_DATABASE` | | Create database if doesn't exist | `false` |
| `MULTICHAIN_AGGREGATOR__DATABASE__RUN_MIGRATIONS` | | Run database migrations | `false` |
| Variable | Req&#x200B;uir&#x200B;ed | Description | Default value |
| -------------------------------------------------------- | ------------------------ | ----------------------------------- | ------------- |
| `MULTICHAIN_AGGREGATOR__DATABASE__CONNECT__URL` | true | Postgres connect URL to service DB | |
| `MULTICHAIN_AGGREGATOR__DATABASE__CREATE_DATABASE` | | Create database if doesn't exist | `false` |
| `MULTICHAIN_AGGREGATOR__DATABASE__RUN_MIGRATIONS` | | Run database migrations | `false` |
| `MULTICHAIN_AGGREGATOR__SERVICE__DAPP_CLIENT__URL` | true | e.g. `http://localhost:8080/api/v1` | |
| `MULTICHAIN_AGGREGATOR__SERVICE__API__DEFAULT_PAGE_SIZE` | | | `50` |
| `MULTICHAIN_AGGREGATOR__SERVICE__API__MAX_PAGE_SIZE` | | | `100` |

[anchor]: <> (anchors.envs.end)
2 changes: 1 addition & 1 deletion multichain-aggregator/justfile
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ stop-test-postgres:

run:
MULTICHAIN_AGGREGATOR__DATABASE__CONNECT__URL={{DATABASE_URL}} \
cargo run --bin multichain-aggregator-server
dotenvy -f multichain-aggregator-server/config/base.env cargo run --bin multichain-aggregator-server

generate-entities:
sea-orm-cli generate entity --lib -o multichain-aggregator-entity/src
Expand Down
4 changes: 4 additions & 0 deletions multichain-aggregator/multichain-aggregator-logic/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,13 @@ tracing = { workspace = true }
sea-orm = { workspace = true }
alloy-primitives = { workspace = true }
regex = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
thiserror = { workspace = true }
tonic = { workspace = true }
tokio = { workspace = true }
reqwest = { workspace = true }
url = { workspace = true }

[dev-dependencies]
blockscout-service-launcher = { workspace = true }
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
use crate::error::ServiceError;
use serde::Deserialize;
use url::Url;

pub struct DappClient {
http: reqwest::Client,
url: Url,
}

#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Dapp {
pub id: String,
pub title: String,
pub logo: String,
pub short_description: String,
}

#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct DappWithChainId {
pub dapp: Dapp,
pub chain_id: String,
}

impl DappClient {
pub fn new(url: Url) -> Self {
let http = reqwest::Client::new();
Self { http, url }
}

pub async fn search_dapps(&self, query: &str) -> Result<Vec<DappWithChainId>, ServiceError> {
let mut url = self.url.clone();
url.set_path("/api/v1/marketplace/dapps:search");
url.query_pairs_mut().append_pair("query", query);

self.http
.get(url)
.send()
.await
.map_err(|e| ServiceError::Internal(e.into()))?
.json::<Vec<DappWithChainId>>()
.await
.map_err(|e| ServiceError::Internal(e.into()))
}
}
5 changes: 4 additions & 1 deletion multichain-aggregator/multichain-aggregator-logic/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
pub mod api_key_manager;
pub mod dapp_client;
pub mod error;
mod import;
mod proto;
Expand All @@ -7,4 +8,6 @@ pub mod search;
mod types;

pub use import::batch_import;
pub use types::{api_keys::ApiKey, batch_import_request::BatchImportRequest, chains::Chain};
pub use types::{
api_keys::ApiKey, batch_import_request::BatchImportRequest, chains::Chain, ChainId,
};
Original file line number Diff line number Diff line change
@@ -1,12 +1,13 @@
use crate::{
error::{ParseError, ServiceError},
types::addresses::Address,
types::{addresses::Address, ChainId},
};
use alloy_primitives::Address as AddressAlloy;
use entity::addresses::{ActiveModel, Column, Entity, Model};
use regex::Regex;
use sea_orm::{
prelude::Expr, sea_query::OnConflict, ActiveValue::NotSet, ColumnTrait, ConnectionTrait, DbErr,
EntityTrait, Iterable, QueryFilter, QuerySelect,
prelude::Expr, sea_query::OnConflict, ActiveValue::NotSet, ConnectionTrait, DbErr, EntityTrait,
IntoSimpleExpr, Iterable, QueryFilter, QueryOrder, QuerySelect,
};
use std::sync::OnceLock;

Expand All @@ -15,6 +16,11 @@ fn words_regex() -> &'static Regex {
RE.get_or_init(|| Regex::new(r"[a-zA-Z0-9]+").unwrap())
}

fn hex_regex() -> &'static Regex {
static RE: OnceLock<Regex> = OnceLock::new();
RE.get_or_init(|| Regex::new(r"^(0x)?[0-9a-fA-F]{3,40}$").unwrap())
}

pub async fn upsert_many<C>(db: &C, addresses: Vec<Address>) -> Result<(), DbErr>
where
C: ConnectionTrait,
Expand Down Expand Up @@ -44,32 +50,45 @@ where
Ok(())
}

pub async fn find_by_address<C>(
db: &C,
address: alloy_primitives::Address,
) -> Result<Vec<Address>, ServiceError>
pub async fn search_by_query<C>(db: &C, q: &str) -> Result<Vec<Address>, ServiceError>
where
C: ConnectionTrait,
{
let res = Entity::find()
.filter(Column::Hash.eq(address.as_slice()))
.all(db)
.await?
.into_iter()
.map(Address::try_from)
.collect::<Result<Vec<_>, _>>()?;

Ok(res)
search_by_query_paginated(db, q, None, 100)
.await
.map(|(addresses, _)| addresses)
}

pub async fn search_by_query<C>(db: &C, q: &str) -> Result<Vec<Address>, ServiceError>
pub async fn search_by_query_paginated<C>(
db: &C,
q: &str,
page_token: Option<(AddressAlloy, ChainId)>,
limit: u64,
) -> Result<(Vec<Address>, Option<(AddressAlloy, ChainId)>), ServiceError>
where
C: ConnectionTrait,
{
let mut query = Entity::find();
let page_token = page_token.unwrap_or((AddressAlloy::ZERO, ChainId::MIN));
let mut query = Entity::find()
.filter(
Expr::tuple([
Column::Hash.into_simple_expr(),
Column::ChainId.into_simple_expr(),
])
.gte(Expr::tuple([
page_token.0.as_slice().into(),
page_token.1.into(),
])),
)
.order_by_asc(Column::Hash)
.order_by_asc(Column::ChainId)
.limit(limit + 1);

if let Ok(address) = try_parse_address(q) {
query = query.filter(Column::Hash.eq(address.as_slice()));
if hex_regex().is_match(q) {
query = query.filter(Expr::cust_with_expr(
"encode(hash, 'hex') LIKE $1",
format!("{}%", q.to_lowercase().strip_prefix("0x").unwrap_or(q)),
));
} else {
let ts_query = prepare_ts_query(q);
query = query.filter(Expr::cust_with_expr(
Expand All @@ -80,14 +99,20 @@ where
));
}

let res = query
.limit(50)
let addresses = query
.all(db)
.await?
.into_iter()
.map(Address::try_from)
.collect::<Result<Vec<_>, _>>()?;
Ok(res)

match addresses.get(limit as usize) {
Some(a) => Ok((
addresses[0..limit as usize].to_vec(),
Some((a.hash, a.chain_id)),
)),
None => Ok((addresses, None)),
}
}

fn non_primary_columns() -> impl Iterator<Item = Column> {
Expand Down
57 changes: 49 additions & 8 deletions multichain-aggregator/multichain-aggregator-logic/src/search.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,17 @@
use crate::{
dapp_client::DappClient,
error::ServiceError,
repository::{addresses, block_ranges, hashes},
types::{
chains::Chain,
dapp::MarketplaceDapp,
search_results::{ChainSearchResult, SearchResults},
ChainId,
},
};
use sea_orm::DatabaseConnection;
use std::collections::BTreeMap;
use tokio::try_join;
use tokio::join;

macro_rules! populate_search_results {
($target:expr, $explorers:expr, $from:expr, $field:ident) => {
Expand All @@ -28,27 +30,66 @@ macro_rules! populate_search_results {

pub async fn quick_search(
db: &DatabaseConnection,
dapp_client: &DappClient,
query: String,
chains: &[Chain],
) -> Result<SearchResults, ServiceError> {
let raw_query = query.trim();

let ((blocks, transactions), block_numbers, addresses) = try_join!(
let (hashes, block_numbers, addresses, dapps) = join!(
hashes::search_by_query(db, raw_query),
block_ranges::search_by_query(db, raw_query),
addresses::search_by_query(db, raw_query)
)?;
addresses::search_by_query(db, raw_query),
dapp_client.search_dapps(raw_query),
);

let explorers: BTreeMap<ChainId, String> = chains
.iter()
.filter_map(|c| c.explorer_url.as_ref().map(|url| (c.id, url.clone())))
.collect();

let mut results = SearchResults::default();
populate_search_results!(results, explorers, addresses, addresses);
populate_search_results!(results, explorers, blocks, blocks);
populate_search_results!(results, explorers, transactions, transactions);
populate_search_results!(results, explorers, block_numbers, block_numbers);

match hashes {
Ok((blocks, transactions)) => {
populate_search_results!(results, explorers, blocks, blocks);
populate_search_results!(results, explorers, transactions, transactions);
}
Err(err) => {
tracing::error!(error = ?err, "failed to search hashes");
}
}

match block_numbers {
Ok(block_numbers) => {
populate_search_results!(results, explorers, block_numbers, block_numbers);
}
Err(err) => {
tracing::error!(error = ?err, "failed to search block numbers");
}
}

match addresses {
Ok(addresses) => {
populate_search_results!(results, explorers, addresses, addresses);
}
Err(err) => {
tracing::error!(error = ?err, "failed to search addresses");
}
}

match dapps {
Ok(dapps) => {
let dapps: Vec<MarketplaceDapp> = dapps
.into_iter()
.filter_map(|d| d.try_into().ok())
.collect();
populate_search_results!(results, explorers, dapps, dapps);
}
Err(err) => {
tracing::error!(error = ?err, "failed to search dapps");
}
}

Ok(results)
}
Loading

0 comments on commit 57c1884

Please sign in to comment.