diff --git a/stats/stats-server/src/read_service.rs b/stats/stats-server/src/read_service.rs index 3b1d611c5..57cb5d6fc 100644 --- a/stats/stats-server/src/read_service.rs +++ b/stats/stats-server/src/read_service.rs @@ -17,7 +17,7 @@ use stats::{ range::UniversalRange, types::Timespan, utils::{day_start, MarkedDbConnection}, - RequestedPointsLimit, ResolutionKind, UpdateError, + RequestedPointsLimit, ResolutionKind, ChartError, }; use stats_proto::blockscout::stats::v1 as proto_v1; use tonic::{Request, Response, Status}; @@ -60,10 +60,10 @@ impl From for ReadLimits { } } -fn map_update_error(err: UpdateError) -> Status { +fn map_update_error(err: ChartError) -> Status { match &err { - UpdateError::ChartNotFound(_) => Status::not_found(err.to_string()), - UpdateError::IntervalTooLarge { limit: _ } => Status::invalid_argument(err.to_string()), + ChartError::ChartNotFound(_) => Status::not_found(err.to_string()), + ChartError::IntervalTooLarge { limit: _ } => Status::invalid_argument(err.to_string()), _ => { tracing::error!(err = ?err, "internal read error"); Status::internal(err.to_string()) @@ -100,10 +100,10 @@ impl ReadService { range: UniversalRange>, points_limit: Option, query_time: DateTime, - ) -> Result { + ) -> Result { let migrations = BlockscoutMigrations::query_from_db(self.blockscout.connection.as_ref()) .await - .map_err(UpdateError::BlockscoutDB)?; + .map_err(ChartError::BlockscoutDB)?; let context = UpdateContext::from_params_now_or_override(UpdateParameters { db: &self.db, blockscout: &self.blockscout, @@ -143,7 +143,7 @@ impl ReadService { range: UniversalRange>, points_limit: Option, query_time: DateTime, - ) -> Result { + ) -> Result { let data = self .query_with_handle(query_handle, range, points_limit, query_time) .await?; diff --git a/stats/stats/src/charts/chart.rs b/stats/stats/src/charts/chart.rs index b3ea22ff1..8713a897f 100644 --- a/stats/stats/src/charts/chart.rs +++ b/stats/stats/src/charts/chart.rs @@ -17,9 +17,8 @@ use super::{ query_dispatch::{ChartTypeSpecifics, QuerySerialized, QuerySerializedDyn}, }; -// todo: rename to `ChartError` or similar #[derive(Error, Debug)] -pub enum UpdateError { +pub enum ChartError { #[error("blockscout database error: {0}")] BlockscoutDB(DbErr), #[error("stats database error: {0}")] @@ -32,12 +31,12 @@ pub enum UpdateError { Internal(String), } -impl From for UpdateError { +impl From for ChartError { fn from(read: ReadError) -> Self { match read { - ReadError::DB(db) => UpdateError::StatsDB(db), - ReadError::ChartNotFound(err) => UpdateError::ChartNotFound(err), - ReadError::IntervalTooLarge(limit) => UpdateError::IntervalTooLarge { limit }, + ReadError::DB(db) => ChartError::StatsDB(db), + ReadError::ChartNotFound(err) => ChartError::ChartNotFound(err), + ReadError::IntervalTooLarge(limit) => ChartError::IntervalTooLarge { limit }, } } } diff --git a/stats/stats/src/charts/counters/average_block_time.rs b/stats/stats/src/charts/counters/average_block_time.rs index bb9b05c81..67c76a1f9 100644 --- a/stats/stats/src/charts/counters/average_block_time.rs +++ b/stats/stats/src/charts/counters/average_block_time.rs @@ -12,7 +12,7 @@ use crate::{ range::UniversalRange, types::TimespanValue, utils::NANOS_PER_SEC, - ChartProperties, MissingDatePolicy, Named, UpdateError, + ChartProperties, MissingDatePolicy, Named, ChartError, }; use blockscout_db::entity::blocks; @@ -52,12 +52,12 @@ struct BlockTimestamp { async fn query_average_block_time( cx: &UpdateContext<'_>, offset: u64, -) -> Result>, UpdateError> { +) -> Result>, ChartError> { let query = average_block_time_statement(offset); let block_timestamps = BlockTimestamp::find_by_statement(query) .all(cx.blockscout.connection.as_ref()) .await - .map_err(UpdateError::BlockscoutDB)?; + .map_err(ChartError::BlockscoutDB)?; Ok(calculate_average_block_time(block_timestamps)) } @@ -69,12 +69,12 @@ impl RemoteQueryBehaviour for AverageBlockTimeQuery { async fn query_data( cx: &UpdateContext<'_>, _range: UniversalRange>, - ) -> Result, UpdateError> { + ) -> Result, ChartError> { match query_average_block_time(cx, OFFSET_BLOCKS).await? { Some(avg_block_time) => Ok(avg_block_time), None => query_average_block_time(cx, 0) .await? - .ok_or(UpdateError::Internal( + .ok_or(ChartError::Internal( "No blocks were returned to calculate average block time".into(), )), } diff --git a/stats/stats/src/charts/counters/mock.rs b/stats/stats/src/charts/counters/mock.rs index 0576f2443..701e2aace 100644 --- a/stats/stats/src/charts/counters/mock.rs +++ b/stats/stats/src/charts/counters/mock.rs @@ -11,7 +11,7 @@ use crate::{ }, range::UniversalRange, types::timespans::DateValue, - ChartProperties, Named, UpdateError, + ChartProperties, Named, ChartError, }; use chrono::{DateTime, NaiveDate, Utc}; @@ -32,7 +32,7 @@ where async fn query_data( cx: &UpdateContext<'_>, _range: UniversalRange>, - ) -> Result { + ) -> Result { if cx.time >= PointDateTime::get() { Ok(DateValue:: { timespan: PointDateTime::get().date_naive(), diff --git a/stats/stats/src/charts/counters/total_blocks.rs b/stats/stats/src/charts/counters/total_blocks.rs index 5e006e1d2..24cdc94a1 100644 --- a/stats/stats/src/charts/counters/total_blocks.rs +++ b/stats/stats/src/charts/counters/total_blocks.rs @@ -12,7 +12,7 @@ use crate::{ range::UniversalRange, types::timespans::DateValue, utils::MarkedDbConnection, - ChartProperties, MissingDatePolicy, Named, UpdateError, + ChartProperties, MissingDatePolicy, Named, ChartError, }; use blockscout_db::entity::blocks; @@ -36,7 +36,7 @@ impl RemoteQueryBehaviour for TotalBlocksQueryBehaviour { async fn query_data( cx: &UpdateContext<'_>, _range: UniversalRange>, - ) -> Result { + ) -> Result { let data = blocks::Entity::find() .select_only() .column_as(Expr::col(blocks::Column::Number).count(), "number") @@ -45,8 +45,8 @@ impl RemoteQueryBehaviour for TotalBlocksQueryBehaviour { .into_model::() .one(cx.blockscout.connection.as_ref()) .await - .map_err(UpdateError::BlockscoutDB)? - .ok_or_else(|| UpdateError::Internal("query returned nothing".into()))?; + .map_err(ChartError::BlockscoutDB)? + .ok_or_else(|| ChartError::Internal("query returned nothing".into()))?; let data = DateValue:: { timespan: data.timestamp.date(), @@ -85,7 +85,7 @@ pub static TOTAL_BLOCKS_ESTIMATION_CACHE_LIVENESS_SEC: OnceLock = OnceLock: static CACHED_BLOCKS_ESTIMATION: OnceLock>> = OnceLock::new(); impl ValueEstimation for CachedBlocksEstimation { - async fn estimate(blockscout: &MarkedDbConnection) -> Result, UpdateError> { + async fn estimate(blockscout: &MarkedDbConnection) -> Result, ChartError> { async fn cached_blocks_estimation( blockscout: &DatabaseConnection, db_id: &str, @@ -115,7 +115,7 @@ impl ValueEstimation for CachedBlocksEstimation { let now = Utc::now(); let value = cached_blocks_estimation(blockscout.connection.as_ref(), &blockscout.db_name) .await - .map_err(UpdateError::BlockscoutDB)? + .map_err(ChartError::BlockscoutDB)? .map(|b| { let b = b as f64 * 0.9; b as i64 diff --git a/stats/stats/src/charts/counters/total_contracts.rs b/stats/stats/src/charts/counters/total_contracts.rs index 30b342fd7..c5eeeab67 100644 --- a/stats/stats/src/charts/counters/total_contracts.rs +++ b/stats/stats/src/charts/counters/total_contracts.rs @@ -8,7 +8,7 @@ use crate::{ }, range::UniversalRange, types::timespans::DateValue, - ChartProperties, MissingDatePolicy, Named, UpdateError, + ChartProperties, MissingDatePolicy, Named, ChartError, }; use blockscout_db::entity::addresses; @@ -24,13 +24,13 @@ impl RemoteQueryBehaviour for TotalContractsQueryBehaviour { async fn query_data( cx: &UpdateContext<'_>, _range: UniversalRange>, - ) -> Result { + ) -> Result { let value = addresses::Entity::find() .filter(addresses::Column::ContractCode.is_not_null()) .filter(addresses::Column::InsertedAt.lte(cx.time)) .count(cx.blockscout.connection.as_ref()) .await - .map_err(UpdateError::BlockscoutDB)?; + .map_err(ChartError::BlockscoutDB)?; let timespan = cx.time.date_naive(); Ok(DateValue:: { timespan, diff --git a/stats/stats/src/charts/counters/yesterday_txns.rs b/stats/stats/src/charts/counters/yesterday_txns.rs index 4cfa9a31e..f16f39cc8 100644 --- a/stats/stats/src/charts/counters/yesterday_txns.rs +++ b/stats/stats/src/charts/counters/yesterday_txns.rs @@ -10,7 +10,7 @@ use crate::{ range::UniversalRange, types::TimespanValue, utils::day_start, - ChartProperties, MissingDatePolicy, Named, UpdateError, + ChartProperties, MissingDatePolicy, Named, ChartError, }; use chrono::{DateTime, Days, NaiveDate, Utc}; use entity::sea_orm_active_enums::ChartType; @@ -24,11 +24,11 @@ impl RemoteQueryBehaviour for YesterdayTxnsQuery { async fn query_data( cx: &UpdateContext<'_>, _range: UniversalRange>, - ) -> Result { + ) -> Result { let today = cx.time.date_naive(); let yesterday = today .checked_sub_days(Days::new(1)) - .ok_or(UpdateError::Internal( + .ok_or(ChartError::Internal( "Update time is incorrect: ~ minimum possible date".into(), ))?; let yesterday_range = day_start(&yesterday)..day_start(&today); @@ -39,7 +39,7 @@ impl RemoteQueryBehaviour for YesterdayTxnsQuery { let data = Self::Output::find_by_statement(query) .one(cx.blockscout.connection.as_ref()) .await - .map_err(UpdateError::BlockscoutDB)? + .map_err(ChartError::BlockscoutDB)? // no transactions for yesterday .unwrap_or(TimespanValue::with_zero_value(yesterday)); Ok(data) diff --git a/stats/stats/src/charts/db_interaction/read.rs b/stats/stats/src/charts/db_interaction/read.rs index 271ed4724..7b55f639a 100644 --- a/stats/stats/src/charts/db_interaction/read.rs +++ b/stats/stats/src/charts/db_interaction/read.rs @@ -10,7 +10,7 @@ use crate::{ timespans::{DateValue, Month, Week, Year}, ExtendedTimespanValue, Timespan, TimespanDuration, TimespanValue, }, - ChartProperties, MissingDatePolicy, UpdateError, + ChartProperties, MissingDatePolicy, ChartError, }; use blockscout_db::entity::blocks; @@ -482,7 +482,7 @@ pub async fn last_accurate_point( force_full: bool, approximate_trailing_points: u64, policy: MissingDatePolicy, -) -> Result>, UpdateError> +) -> Result>, ChartError> where ChartProps: ChartProperties + ?Sized, ChartProps::Resolution: Ord + Clone + Debug, @@ -499,7 +499,7 @@ where .into_model() .one(db) .await - .map_err(UpdateError::StatsDB)?; + .map_err(ChartError::StatsDB)?; let metadata = get_chart_metadata(db, &ChartProps::key()).await?; match recorded_min_blockscout_block { @@ -536,7 +536,7 @@ where } }); let Some(last_accurate_point) = last_accurate_point else { - return Err(UpdateError::Internal("Failure while reading chart data: did not return accurate data (with `fill_missing_dates`=true)".into())); + return Err(ChartError::Internal("Failure while reading chart data: did not return accurate data (with `fill_missing_dates`=true)".into())); }; if let Some(block) = recorded_min_blockscout_block.min_blockscout_block { @@ -656,10 +656,10 @@ impl RemoteQueryBehaviour for QueryAllBlockTimestampRange { async fn query_data( cx: &UpdateContext<'_>, _range: UniversalRange>, - ) -> Result { + ) -> Result { let start_timestamp = get_min_date_blockscout(cx.blockscout.connection.as_ref()) .await - .map_err(UpdateError::BlockscoutDB)? + .map_err(ChartError::BlockscoutDB)? .and_utc(); Ok(start_timestamp..cx.time) } diff --git a/stats/stats/src/charts/lines/gas_used_growth.rs b/stats/stats/src/charts/lines/gas_used_growth.rs index 6b9cc3bfe..cdce9900d 100644 --- a/stats/stats/src/charts/lines/gas_used_growth.rs +++ b/stats/stats/src/charts/lines/gas_used_growth.rs @@ -21,7 +21,7 @@ use crate::{ define_and_impl_resolution_properties, types::timespans::{DateValue, Month, Week, Year}, utils::sql_with_range_filter_opt, - ChartProperties, MissingDatePolicy, Named, UpdateError, + ChartError, ChartProperties, MissingDatePolicy, Named, }; use chrono::{DateTime, NaiveDate, Utc}; @@ -61,7 +61,7 @@ pub struct IncrementsFromPartialSum; impl MapFunction>> for IncrementsFromPartialSum { type Output = Vec>; - fn function(inner_data: Vec>) -> Result { + fn function(inner_data: Vec>) -> Result { Ok(inner_data .into_iter() .scan(Decimal::ZERO, |state, mut next| { diff --git a/stats/stats/src/charts/lines/mock.rs b/stats/stats/src/charts/lines/mock.rs index 2dcecb1c3..ac6b5a151 100644 --- a/stats/stats/src/charts/lines/mock.rs +++ b/stats/stats/src/charts/lines/mock.rs @@ -12,7 +12,7 @@ use crate::{ missing_date::fit_into_range, range::{Incrementable, UniversalRange}, types::{timespans::DateValue, Timespan, TimespanValue}, - ChartProperties, MissingDatePolicy, Named, UpdateError, + ChartProperties, MissingDatePolicy, Named, ChartError, }; use chrono::{DateTime, Duration, NaiveDate, Utc}; @@ -100,7 +100,7 @@ where async fn query_data( cx: &UpdateContext<'_>, range: UniversalRange>, - ) -> Result>, UpdateError> { + ) -> Result>, ChartError> { let full_data = mocked_lines(DateRange::get(), ValueRange::get()); Ok(mock_trim_lines(full_data, cx.time, range, Policy::get())) } @@ -146,7 +146,7 @@ where async fn query_data( cx: &UpdateContext<'_>, range: UniversalRange>, - ) -> Result { + ) -> Result { Ok(mock_trim_lines(Data::get(), cx.time, range, Policy::get())) } } diff --git a/stats/stats/src/charts/lines/native_coin_holders_growth.rs b/stats/stats/src/charts/lines/native_coin_holders_growth.rs index ef5536c89..f9b82e997 100644 --- a/stats/stats/src/charts/lines/native_coin_holders_growth.rs +++ b/stats/stats/src/charts/lines/native_coin_holders_growth.rs @@ -21,7 +21,7 @@ use crate::{ }, define_and_impl_resolution_properties, types::timespans::{DateValue, Month, Week, Year}, - ChartProperties, MissingDatePolicy, Named, UpdateError, + ChartProperties, MissingDatePolicy, Named, ChartError, }; use blockscout_db::entity::address_coin_balances_daily; @@ -96,7 +96,7 @@ impl UpdateBehaviour<(), (), NaiveDate> for Update { last_accurate_point: Option>, min_blockscout_block: i64, dependency_data_fetch_timer: &mut AggregateTimer, - ) -> Result<(), UpdateError> { + ) -> Result<(), ChartError> { update_sequentially_with_support_table( cx, chart_id, @@ -115,7 +115,7 @@ pub async fn update_sequentially_with_support_table( last_accurate_point: Option>, min_blockscout_block: i64, remote_fetch_timer: &mut AggregateTimer, -) -> Result<(), UpdateError> { +) -> Result<(), ChartError> { tracing::info!(chart =% Properties::key(), "start sequential update"); let all_days = match last_accurate_point { Some(last_row) => get_unique_ordered_days( @@ -124,14 +124,14 @@ pub async fn update_sequentially_with_support_table( remote_fetch_timer, ) .await - .map_err(UpdateError::BlockscoutDB)?, + .map_err(ChartError::BlockscoutDB)?, None => { clear_support_table(cx.db.connection.as_ref()) .await - .map_err(UpdateError::BlockscoutDB)?; + .map_err(ChartError::BlockscoutDB)?; get_unique_ordered_days(cx.blockscout.connection.as_ref(), None, remote_fetch_timer) .await - .map_err(UpdateError::BlockscoutDB)? + .map_err(ChartError::BlockscoutDB)? } }; @@ -151,21 +151,21 @@ pub async fn update_sequentially_with_support_table( .connection .begin() .await - .map_err(UpdateError::StatsDB)?; + .map_err(ChartError::StatsDB)?; let data: Vec = calculate_days_using_support_table( &db_tx, cx.blockscout.connection.as_ref(), days.iter().copied(), ) .await - .map_err(|e| UpdateError::Internal(e.to_string()))? + .map_err(|e| ChartError::Internal(e.to_string()))? .into_iter() .map(|result| result.active_model(chart_id, Some(min_blockscout_block))) .collect(); insert_data_many(&db_tx, data) .await - .map_err(UpdateError::StatsDB)?; - db_tx.commit().await.map_err(UpdateError::StatsDB)?; + .map_err(ChartError::StatsDB)?; + db_tx.commit().await.map_err(ChartError::StatsDB)?; } Ok(()) } @@ -174,7 +174,7 @@ async fn calculate_days_using_support_table( db: &C1, blockscout: &C2, days: impl IntoIterator, -) -> Result>, UpdateError> +) -> Result>, ChartError> where C1: ConnectionTrait, C2: ConnectionTrait, @@ -182,7 +182,7 @@ where let mut result = vec![]; let new_holders_by_date = get_holder_changes_by_date(blockscout, days) .await - .map_err(|e| UpdateError::Internal(format!("cannot get new holders: {e}")))?; + .map_err(|e| ChartError::Internal(format!("cannot get new holders: {e}")))?; for (date, holders) in new_holders_by_date { // this check shouldnt be triggered if data in blockscout is correct, @@ -190,7 +190,7 @@ where let addresses = holders.iter().map(|h| &h.address).collect::>(); if addresses.len() != holders.len() { tracing::error!(addresses = ?addresses, date = ?date, "duplicate addresses in holders"); - return Err(UpdateError::Internal( + return Err(ChartError::Internal( "duplicate addresses in holders".to_string(), )); }; @@ -203,10 +203,10 @@ where update_current_holders(db, holders) .await - .map_err(|e| UpdateError::Internal(format!("cannot update holders: {e}")))?; + .map_err(|e| ChartError::Internal(format!("cannot update holders: {e}")))?; let new_count = count_current_holders(db) .await - .map_err(|e| UpdateError::Internal(format!("cannot count holders: {e}")))?; + .map_err(|e| ChartError::Internal(format!("cannot count holders: {e}")))?; result.push(DateValue:: { timespan: date, value: new_count.to_string(), diff --git a/stats/stats/src/charts/lines/new_accounts.rs b/stats/stats/src/charts/lines/new_accounts.rs index 2c2f9f30f..24f5da7a6 100644 --- a/stats/stats/src/charts/lines/new_accounts.rs +++ b/stats/stats/src/charts/lines/new_accounts.rs @@ -24,7 +24,7 @@ use crate::{ range::{data_source_query_range_to_db_statement_range, UniversalRange}, types::timespans::{Month, Week, Year}, utils::sql_with_range_filter_opt, - ChartProperties, Named, UpdateError, + ChartError, ChartProperties, Named, }; use chrono::{DateTime, NaiveDate, Utc}; @@ -101,7 +101,7 @@ impl RemoteQueryBehaviour for NewAccountsQueryBehaviour { async fn query_data( cx: &UpdateContext<'_>, range: UniversalRange>, - ) -> Result>, UpdateError> { + ) -> Result>, ChartError> { let statement_range = data_source_query_range_to_db_statement_range::(cx, range) .await?; @@ -112,7 +112,7 @@ impl RemoteQueryBehaviour for NewAccountsQueryBehaviour { let mut data = DateValue::::find_by_statement(query) .all(cx.blockscout.connection.as_ref()) .await - .map_err(UpdateError::BlockscoutDB)?; + .map_err(ChartError::BlockscoutDB)?; // make sure that it's sorted data.sort_by_key(|d| d.timespan); if let Some(range) = statement_range { diff --git a/stats/stats/src/charts/mod.rs b/stats/stats/src/charts/mod.rs index c7abc1d27..510e7b7e9 100644 --- a/stats/stats/src/charts/mod.rs +++ b/stats/stats/src/charts/mod.rs @@ -6,5 +6,5 @@ pub mod query_dispatch; pub mod types; pub use chart::{ chart_properties_portrait, ChartKey, ChartObject, ChartProperties, ChartPropertiesObject, - MissingDatePolicy, Named, ResolutionKind, UpdateError, + MissingDatePolicy, Named, ResolutionKind, ChartError, }; diff --git a/stats/stats/src/charts/query_dispatch.rs b/stats/stats/src/charts/query_dispatch.rs index 169b67f6f..997bd7b12 100644 --- a/stats/stats/src/charts/query_dispatch.rs +++ b/stats/stats/src/charts/query_dispatch.rs @@ -18,7 +18,7 @@ use crate::{ use super::{ types::{ExtendedTimespanValue, Timespan, TimespanValue}, - ChartProperties, UpdateError, + ChartProperties, ChartError, }; /// Data query trait with unified data format (for external use) @@ -39,7 +39,7 @@ pub trait QuerySerialized { range: UniversalRange>, points_limit: Option, fill_missing_dates: bool, - ) -> Pin> + Send + 'a>>; + ) -> Pin> + Send + 'a>>; } /// [`QuerySerialized`] but for dynamic dispatch @@ -142,7 +142,7 @@ where range: UniversalRange>, points_limit: Option, fill_missing_dates: bool, - ) -> Pin> + Send + 'a>> + ) -> Pin> + Send + 'a>> { let cx = cx.clone(); Box::pin(async move { diff --git a/stats/stats/src/data_processing.rs b/stats/stats/src/data_processing.rs index 99195854a..73cc1e3d1 100644 --- a/stats/stats/src/data_processing.rs +++ b/stats/stats/src/data_processing.rs @@ -3,7 +3,7 @@ use chrono::NaiveDate; use crate::{ charts::types::timespans::DateValue, types::{Timespan, TimespanValue}, - UpdateError, + ChartError, }; use std::{ mem, @@ -17,7 +17,7 @@ use std::{ pub fn cumsum( mut data: Vec>, mut prev_sum: Value, -) -> Result>, UpdateError> +) -> Result>, ChartError> where Value: AddAssign + Clone, TimespanValue: Default, @@ -40,7 +40,7 @@ where pub fn deltas( mut data: Vec>, mut prev_value: Value, -) -> Result>, UpdateError> +) -> Result>, ChartError> where Value: SubAssign + Clone, TimespanValue: Default, @@ -61,7 +61,7 @@ where pub fn sum( data: &[TimespanValue], mut partial_sum: Value, -) -> Result, UpdateError> +) -> Result, ChartError> where Resolution: Timespan + Clone + Ord, Value: AddAssign + Clone, diff --git a/stats/stats/src/data_source/kinds/auxiliary/cumulative.rs b/stats/stats/src/data_source/kinds/auxiliary/cumulative.rs index 6fa509bc1..ca77c9c99 100644 --- a/stats/stats/src/data_source/kinds/auxiliary/cumulative.rs +++ b/stats/stats/src/data_source/kinds/auxiliary/cumulative.rs @@ -10,7 +10,7 @@ use crate::{ data_source::{DataSource, UpdateContext}, range::UniversalRange, types::TimespanValue, - UpdateError, + ChartError, }; /// Auxiliary source for cumulative chart. @@ -43,7 +43,7 @@ where Ok(()) } - async fn update_itself(_cx: &UpdateContext<'_>) -> Result<(), UpdateError> { + async fn update_itself(_cx: &UpdateContext<'_>) -> Result<(), ChartError> { // just an adapter; inner is handled recursively Ok(()) } @@ -52,7 +52,7 @@ where cx: &UpdateContext<'_>, range: UniversalRange>, dependency_data_fetch_timer: &mut AggregateTimer, - ) -> Result { + ) -> Result { let delta_data = Delta::query_data(cx, range, dependency_data_fetch_timer).await?; let data = cumsum::(delta_data, Value::zero())?; Ok(data) diff --git a/stats/stats/src/data_source/kinds/data_manipulation/delta.rs b/stats/stats/src/data_source/kinds/data_manipulation/delta.rs index d07762d70..9aee44706 100644 --- a/stats/stats/src/data_source/kinds/data_manipulation/delta.rs +++ b/stats/stats/src/data_source/kinds/data_manipulation/delta.rs @@ -15,7 +15,7 @@ use crate::{ data_source::{DataSource, UpdateContext}, range::UniversalRange, types::TimespanValue, - UpdateError, + ChartError, }; /// Calculate delta data from cumulative dependency. @@ -52,7 +52,7 @@ where Ok(()) } - async fn update_itself(_cx: &UpdateContext<'_>) -> Result<(), UpdateError> { + async fn update_itself(_cx: &UpdateContext<'_>) -> Result<(), ChartError> { // just an adapter; inner is handled recursively Ok(()) } @@ -61,7 +61,7 @@ where cx: &UpdateContext<'_>, range: UniversalRange>, dependency_data_fetch_timer: &mut AggregateTimer, - ) -> Result { + ) -> Result { let mut request_range = range.clone(); request_range.start = request_range.start.map(|s| { s.checked_sub_signed(TimeDelta::days(1)) diff --git a/stats/stats/src/data_source/kinds/data_manipulation/filter_deducible.rs b/stats/stats/src/data_source/kinds/data_manipulation/filter_deducible.rs index 73ce9c9ad..28728ffa4 100644 --- a/stats/stats/src/data_source/kinds/data_manipulation/filter_deducible.rs +++ b/stats/stats/src/data_source/kinds/data_manipulation/filter_deducible.rs @@ -12,7 +12,7 @@ use crate::{ data_source::{DataSource, UpdateContext}, range::UniversalRange, types::TimespanValue, - ChartProperties, MissingDatePolicy, UpdateError, + ChartProperties, MissingDatePolicy, ChartError, }; /// Pass only essential points from `D`, removing ones that can be deduced @@ -45,7 +45,7 @@ where Ok(()) } - async fn update_itself(_cx: &UpdateContext<'_>) -> Result<(), UpdateError> { + async fn update_itself(_cx: &UpdateContext<'_>) -> Result<(), ChartError> { // just an adapter; inner is handled recursively Ok(()) } @@ -54,7 +54,7 @@ where cx: &UpdateContext<'_>, range: UniversalRange>, dependency_data_fetch_timer: &mut AggregateTimer, - ) -> Result { + ) -> Result { let data = DS::query_data(cx, range, dependency_data_fetch_timer).await?; Ok(match Properties::missing_date_policy() { MissingDatePolicy::FillZero => { diff --git a/stats/stats/src/data_source/kinds/data_manipulation/last_point.rs b/stats/stats/src/data_source/kinds/data_manipulation/last_point.rs index e502f83d2..c3fcb11ad 100644 --- a/stats/stats/src/data_source/kinds/data_manipulation/last_point.rs +++ b/stats/stats/src/data_source/kinds/data_manipulation/last_point.rs @@ -13,7 +13,7 @@ use crate::{ range::UniversalRange, types::{Timespan, TimespanValue, ZeroTimespanValue}, utils::day_start, - UpdateError, + ChartError, }; pub struct LastPoint(PhantomData) @@ -42,7 +42,7 @@ where Ok(()) } - async fn update_itself(_cx: &UpdateContext<'_>) -> Result<(), UpdateError> { + async fn update_itself(_cx: &UpdateContext<'_>) -> Result<(), ChartError> { // just an adapter; inner is handled recursively Ok(()) } @@ -51,7 +51,7 @@ where cx: &UpdateContext<'_>, _range: UniversalRange>, dependency_data_fetch_timer: &mut AggregateTimer, - ) -> Result { + ) -> Result { let data = DS::query_data( cx, (day_start(&cx.time.date_naive())..cx.time).into(), diff --git a/stats/stats/src/data_source/kinds/data_manipulation/map/mod.rs b/stats/stats/src/data_source/kinds/data_manipulation/map/mod.rs index a4449250d..a0d114b64 100644 --- a/stats/stats/src/data_source/kinds/data_manipulation/map/mod.rs +++ b/stats/stats/src/data_source/kinds/data_manipulation/map/mod.rs @@ -10,7 +10,7 @@ use sea_orm::{DatabaseConnection, DbErr}; use crate::{ data_source::{DataSource, UpdateContext}, range::UniversalRange, - UpdateError, + ChartError, }; mod parse; @@ -29,7 +29,7 @@ where pub trait MapFunction { type Output: Send; - fn function(inner_data: Input) -> Result; + fn function(inner_data: Input) -> Result; } impl DataSource for Map @@ -52,7 +52,7 @@ where Ok(()) } - async fn update_itself(_cx: &UpdateContext<'_>) -> Result<(), UpdateError> { + async fn update_itself(_cx: &UpdateContext<'_>) -> Result<(), ChartError> { // just an adapter; inner is handled recursively Ok(()) } @@ -61,7 +61,7 @@ where cx: &UpdateContext<'_>, range: UniversalRange>, dependency_data_fetch_timer: &mut AggregateTimer, - ) -> Result { + ) -> Result { let inner_data = ::query_data(cx, range, dependency_data_fetch_timer).await?; F::function(inner_data) diff --git a/stats/stats/src/data_source/kinds/data_manipulation/map/parse.rs b/stats/stats/src/data_source/kinds/data_manipulation/map/parse.rs index bac618b32..c1a9b88ca 100644 --- a/stats/stats/src/data_source/kinds/data_manipulation/map/parse.rs +++ b/stats/stats/src/data_source/kinds/data_manipulation/map/parse.rs @@ -1,7 +1,7 @@ use std::{fmt::Display, marker::PhantomData, str::FromStr}; use crate::{ - data_source::kinds::data_manipulation::map::MapFunction, types::TimespanValue, UpdateError, + data_source::kinds::data_manipulation::map::MapFunction, types::TimespanValue, ChartError, }; use super::Map; @@ -19,19 +19,19 @@ where fn function( inner_data: Vec>, - ) -> Result>, UpdateError> { + ) -> Result>, ChartError> { inner_data .into_iter() .map(|p| { let val_parsed = p.value.parse::().map_err(|e| { - UpdateError::Internal(format!("failed to parse values of dependency: {e}")) + ChartError::Internal(format!("failed to parse values of dependency: {e}")) })?; Ok(TimespanValue { timespan: p.timespan, value: val_parsed, }) }) - .collect::, UpdateError>>() + .collect::, ChartError>>() } } @@ -45,9 +45,9 @@ where fn function( inner_data: TimespanValue, - ) -> Result { + ) -> Result { let val_parsed = inner_data.value.parse::().map_err(|e| { - UpdateError::Internal(format!("failed to parse values of dependency: {e}")) + ChartError::Internal(format!("failed to parse values of dependency: {e}")) })?; Ok(TimespanValue { timespan: inner_data.timespan, diff --git a/stats/stats/src/data_source/kinds/data_manipulation/map/strip_extension.rs b/stats/stats/src/data_source/kinds/data_manipulation/map/strip_extension.rs index 80de37e0f..f0accab13 100644 --- a/stats/stats/src/data_source/kinds/data_manipulation/map/strip_extension.rs +++ b/stats/stats/src/data_source/kinds/data_manipulation/map/strip_extension.rs @@ -1,6 +1,6 @@ use crate::{ types::{ExtendedTimespanValue, TimespanValue}, - UpdateError, + ChartError, }; use super::{Map, MapFunction}; @@ -16,7 +16,7 @@ where V: Send, { type Output = Vec>; - fn function(inner_data: Vec>) -> Result { + fn function(inner_data: Vec>) -> Result { Ok(inner_data.into_iter().map(|p| p.into()).collect()) } } @@ -27,7 +27,7 @@ where V: Send, { type Output = TimespanValue; - fn function(inner_data: ExtendedTimespanValue) -> Result { + fn function(inner_data: ExtendedTimespanValue) -> Result { Ok(inner_data.into()) } } diff --git a/stats/stats/src/data_source/kinds/data_manipulation/map/to_string.rs b/stats/stats/src/data_source/kinds/data_manipulation/map/to_string.rs index ccf6a3d11..bf6b2dc98 100644 --- a/stats/stats/src/data_source/kinds/data_manipulation/map/to_string.rs +++ b/stats/stats/src/data_source/kinds/data_manipulation/map/to_string.rs @@ -1,4 +1,4 @@ -use crate::{types::TimespanValue, UpdateError}; +use crate::{types::TimespanValue, ChartError}; use super::{Map, MapFunction}; @@ -12,7 +12,7 @@ where type Output = Vec>; fn function( inner_data: Vec>, - ) -> Result { + ) -> Result { Ok(inner_data.into_iter().map(|p| p.into()).collect()) } } @@ -23,7 +23,7 @@ where TimespanValue: Into>, { type Output = TimespanValue; - fn function(inner_data: TimespanValue) -> Result { + fn function(inner_data: TimespanValue) -> Result { Ok(inner_data.into()) } } diff --git a/stats/stats/src/data_source/kinds/data_manipulation/resolutions/average.rs b/stats/stats/src/data_source/kinds/data_manipulation/resolutions/average.rs index 0560c591e..1b96272c6 100644 --- a/stats/stats/src/data_source/kinds/data_manipulation/resolutions/average.rs +++ b/stats/stats/src/data_source/kinds/data_manipulation/resolutions/average.rs @@ -12,7 +12,7 @@ use crate::{ }, range::UniversalRange, types::{ConsistsOf, Timespan, TimespanValue}, - UpdateError, + ChartError, }; use super::extend_to_timespan_boundaries; @@ -55,7 +55,7 @@ where Ok(()) } - async fn update_itself(_cx: &UpdateContext<'_>) -> Result<(), UpdateError> { + async fn update_itself(_cx: &UpdateContext<'_>) -> Result<(), ChartError> { // just an adapter; inner is handled recursively Ok(()) } @@ -64,7 +64,7 @@ where cx: &UpdateContext<'_>, range: UniversalRange>, dependency_data_fetch_timer: &mut AggregateTimer, - ) -> Result { + ) -> Result { let time_range_for_lower_res = extend_to_timespan_boundaries::(range); let high_res_averages = Average::query_data( cx, diff --git a/stats/stats/src/data_source/kinds/data_manipulation/resolutions/last_value.rs b/stats/stats/src/data_source/kinds/data_manipulation/resolutions/last_value.rs index 1789384a8..8e14e9c74 100644 --- a/stats/stats/src/data_source/kinds/data_manipulation/resolutions/last_value.rs +++ b/stats/stats/src/data_source/kinds/data_manipulation/resolutions/last_value.rs @@ -13,7 +13,7 @@ use crate::{ data_source::{DataSource, UpdateContext}, range::UniversalRange, types::{ConsistsOf, Timespan, TimespanValue}, - UpdateError, + ChartError, }; use super::{extend_to_timespan_boundaries, reduce_each_timespan}; @@ -45,7 +45,7 @@ where Ok(()) } - async fn update_itself(_cx: &UpdateContext<'_>) -> Result<(), UpdateError> { + async fn update_itself(_cx: &UpdateContext<'_>) -> Result<(), ChartError> { // just an adapter; inner is handled recursively Ok(()) } @@ -54,7 +54,7 @@ where cx: &UpdateContext<'_>, range: UniversalRange>, dependency_data_fetch_timer: &mut AggregateTimer, - ) -> Result { + ) -> Result { let time_range_for_lower_res = extend_to_timespan_boundaries::(range); let high_res_data = DS::query_data(cx, time_range_for_lower_res, dependency_data_fetch_timer).await?; diff --git a/stats/stats/src/data_source/kinds/data_manipulation/resolutions/sum.rs b/stats/stats/src/data_source/kinds/data_manipulation/resolutions/sum.rs index 24bd2ed27..c6939d1d6 100644 --- a/stats/stats/src/data_source/kinds/data_manipulation/resolutions/sum.rs +++ b/stats/stats/src/data_source/kinds/data_manipulation/resolutions/sum.rs @@ -14,7 +14,7 @@ use crate::{ data_source::{DataSource, UpdateContext}, range::UniversalRange, types::{ConsistsOf, Timespan, TimespanValue}, - UpdateError, + ChartError, }; use super::{extend_to_timespan_boundaries, reduce_each_timespan}; @@ -46,7 +46,7 @@ where Ok(()) } - async fn update_itself(_cx: &UpdateContext<'_>) -> Result<(), UpdateError> { + async fn update_itself(_cx: &UpdateContext<'_>) -> Result<(), ChartError> { // just an adapter; inner is handled recursively Ok(()) } @@ -55,7 +55,7 @@ where cx: &UpdateContext<'_>, range: UniversalRange>, dependency_data_fetch_timer: &mut AggregateTimer, - ) -> Result { + ) -> Result { let time_range_for_lower_res = extend_to_timespan_boundaries::(range); let high_res_data = DS::query_data(cx, time_range_for_lower_res, dependency_data_fetch_timer).await?; diff --git a/stats/stats/src/data_source/kinds/data_manipulation/sum_point.rs b/stats/stats/src/data_source/kinds/data_manipulation/sum_point.rs index b8de19092..ba40c12f0 100644 --- a/stats/stats/src/data_source/kinds/data_manipulation/sum_point.rs +++ b/stats/stats/src/data_source/kinds/data_manipulation/sum_point.rs @@ -14,7 +14,7 @@ use crate::{ data_source::{source::DataSource, UpdateContext}, range::UniversalRange, types::{Timespan, TimespanValue}, - UpdateError, + ChartError, }; /// Sum all dependency's data. @@ -49,7 +49,7 @@ where Ok(()) } - async fn update_itself(_cx: &UpdateContext<'_>) -> Result<(), UpdateError> { + async fn update_itself(_cx: &UpdateContext<'_>) -> Result<(), ChartError> { // just an adapter; inner is handled recursively Ok(()) } @@ -58,7 +58,7 @@ where cx: &UpdateContext<'_>, _range: UniversalRange>, dependency_data_fetch_timer: &mut AggregateTimer, - ) -> Result { + ) -> Result { // it's possible to not request full data range and use last accurate point; // can be updated to work similarly to cumulative let full_data = diff --git a/stats/stats/src/data_source/kinds/local_db/mod.rs b/stats/stats/src/data_source/kinds/local_db/mod.rs index e73437f7f..2cabd4282 100644 --- a/stats/stats/src/data_source/kinds/local_db/mod.rs +++ b/stats/stats/src/data_source/kinds/local_db/mod.rs @@ -35,7 +35,7 @@ use crate::{ data_source::{DataSource, UpdateContext}, metrics, range::UniversalRange, - UpdateError, + ChartError, }; use super::auxiliary::PartialCumulative; @@ -141,7 +141,7 @@ where async fn update_itself_inner( cx: &UpdateContext<'_>, dependency_data_fetch_timer: &mut AggregateTimer, - ) -> Result<(), UpdateError> { + ) -> Result<(), ChartError> { let metadata = get_chart_metadata(cx.db.connection.as_ref(), &ChartProps::key()).await?; if let Some(last_updated_at) = metadata.last_updated_at { if postgres_timestamps_eq(cx.time, last_updated_at) { @@ -165,7 +165,7 @@ where let chart_id = metadata.id; let min_blockscout_block = get_min_block_blockscout(cx.blockscout.connection.as_ref()) .await - .map_err(UpdateError::BlockscoutDB)?; + .map_err(ChartError::BlockscoutDB)?; let last_accurate_point = last_accurate_point::( chart_id, min_blockscout_block, @@ -229,7 +229,7 @@ where Create::create(db, init_time).await } - async fn update_itself(cx: &UpdateContext<'_>) -> Result<(), UpdateError> { + async fn update_itself(cx: &UpdateContext<'_>) -> Result<(), ChartError> { // set up metrics + write some logs let mut dependency_data_fetch_timer = AggregateTimer::new(); @@ -260,7 +260,7 @@ where cx: &UpdateContext<'_>, range: UniversalRange>, dependency_data_fetch_timer: &mut AggregateTimer, - ) -> Result { + ) -> Result { let _timer = dependency_data_fetch_timer.start_interval(); // maybe add `fill_missing_dates` parameter to current function as well in the future // to get rid of "Note" in the `DataSource`'s method documentation @@ -329,7 +329,7 @@ mod tests { tests::{init_db::init_marked_db_all, mock_blockscout::fill_mock_blockscout_data}, types::{timespans::DateValue, TimespanValue}, update_group::{SyncUpdateGroup, UpdateGroup}, - ChartProperties, Named, UpdateError, + ChartProperties, Named, ChartError, }; type WasTriggeredStorage = Arc>; @@ -368,7 +368,7 @@ mod tests { _last_accurate_point: Option>, min_blockscout_block: i64, _dependency_data_fetch_timer: &mut AggregateTimer, - ) -> Result<(), UpdateError> { + ) -> Result<(), ChartError> { Self::record_trigger().await; // insert smth for dependency to work well let data = DateValue:: { @@ -378,7 +378,7 @@ mod tests { let value = data.active_model(chart_id, Some(min_blockscout_block)); insert_data_many(cx.db.connection.as_ref(), vec![value]) .await - .map_err(UpdateError::StatsDB)?; + .map_err(ChartError::StatsDB)?; Ok(()) } } diff --git a/stats/stats/src/data_source/kinds/local_db/parameter_traits.rs b/stats/stats/src/data_source/kinds/local_db/parameter_traits.rs index 4f3b88bf0..69a62a545 100644 --- a/stats/stats/src/data_source/kinds/local_db/parameter_traits.rs +++ b/stats/stats/src/data_source/kinds/local_db/parameter_traits.rs @@ -9,7 +9,7 @@ use crate::{ data_source::{DataSource, UpdateContext}, range::UniversalRange, types::TimespanValue, - RequestedPointsLimit, UpdateError, + RequestedPointsLimit, ChartError, }; /// In most cases, [`super::DefaultCreate`] is enough. @@ -38,18 +38,18 @@ where last_accurate_point: Option>, min_blockscout_block: i64, dependency_data_fetch_timer: &mut AggregateTimer, - ) -> impl Future> + Send; + ) -> impl Future> + Send; /// Update only chart metadata. fn update_metadata( db: &DatabaseConnection, chart_id: i32, update_time: DateTime, - ) -> impl Future> + Send { + ) -> impl Future> + Send { async move { set_last_updated_at(chart_id, db, update_time) .await - .map_err(UpdateError::StatsDB) + .map_err(ChartError::StatsDB) } } } @@ -65,5 +65,5 @@ pub trait QueryBehaviour { range: UniversalRange>, points_limit: Option, fill_missing_dates: bool, - ) -> impl Future> + Send; + ) -> impl Future> + Send; } diff --git a/stats/stats/src/data_source/kinds/local_db/parameters/query.rs b/stats/stats/src/data_source/kinds/local_db/parameters/query.rs index 476be1856..85268f1f7 100644 --- a/stats/stats/src/data_source/kinds/local_db/parameters/query.rs +++ b/stats/stats/src/data_source/kinds/local_db/parameters/query.rs @@ -9,7 +9,7 @@ use crate::{ range::UniversalRange, types::{timespans::DateValue, ExtendedTimespanValue, Timespan}, utils::MarkedDbConnection, - ChartProperties, RequestedPointsLimit, UpdateError, + ChartProperties, RequestedPointsLimit, ChartError, }; /// Usually the choice for line charts @@ -32,7 +32,7 @@ where range: UniversalRange>, points_limit: Option, fill_missing_dates: bool, - ) -> Result { + ) -> Result { // In DB we store data with date precision. Also, `get_line_chart_data` // works with inclusive range. Therefore, we need to convert the range and // get date without time. @@ -74,7 +74,7 @@ impl QueryBehaviour for DefaultQueryLast { _range: UniversalRange>, _points_limit: Option, _fill_missing_dates: bool, - ) -> Result { + ) -> Result { let value = get_counter_data( cx.db.connection.as_ref(), &C::name(), @@ -82,7 +82,7 @@ impl QueryBehaviour for DefaultQueryLast { C::missing_date_policy(), ) .await? - .ok_or(UpdateError::Internal(format!( + .ok_or(ChartError::Internal(format!( "no data for counter '{}' was found", C::name() )))?; @@ -92,7 +92,7 @@ impl QueryBehaviour for DefaultQueryLast { #[trait_variant::make(Send)] pub trait ValueEstimation { - async fn estimate(blockscout: &MarkedDbConnection) -> Result, UpdateError>; + async fn estimate(blockscout: &MarkedDbConnection) -> Result, ChartError>; } pub struct QueryLastWithEstimationFallback(PhantomData<(E, C)>) @@ -112,7 +112,7 @@ where _range: UniversalRange>, _points_limit: Option, _fill_missing_dates: bool, - ) -> Result { + ) -> Result { let value = match get_counter_data( cx.db.connection.as_ref(), &C::name(), @@ -142,7 +142,7 @@ mod tests { data_source::{types::BlockscoutMigrations, UpdateContext, UpdateParameters}, tests::init_db::init_marked_db_all, types::timespans::DateValue, - MissingDatePolicy, Named, UpdateError, + MissingDatePolicy, Named, ChartError, }; #[tokio::test] @@ -173,7 +173,7 @@ mod tests { impl ValueEstimation for TestFallback { async fn estimate( _blockscout: &MarkedDbConnection, - ) -> Result, UpdateError> { + ) -> Result, ChartError> { Ok(expected_estimate()) } } diff --git a/stats/stats/src/data_source/kinds/local_db/parameters/update/batching/mod.rs b/stats/stats/src/data_source/kinds/local_db/parameters/update/batching/mod.rs index 13ac7c346..9b1d9ab0d 100644 --- a/stats/stats/src/data_source/kinds/local_db/parameters/update/batching/mod.rs +++ b/stats/stats/src/data_source/kinds/local_db/parameters/update/batching/mod.rs @@ -19,7 +19,7 @@ use crate::{ }, range::UniversalRange, types::{ExtendedTimespanValue, Timespan, TimespanDuration, TimespanValue}, - ChartProperties, UpdateError, + ChartProperties, ChartError, }; pub mod parameter_traits; @@ -61,7 +61,7 @@ where last_accurate_point: Option>, min_blockscout_block: i64, dependency_data_fetch_timer: &mut AggregateTimer, - ) -> Result<(), UpdateError> { + ) -> Result<(), ChartError> { let now = cx.time; let update_from = last_accurate_point .clone() @@ -72,7 +72,7 @@ where get_min_date_blockscout(cx.blockscout.connection.as_ref()) .await .map(|time| time.date()) - .map_err(UpdateError::BlockscoutDB)?, + .map_err(ChartError::BlockscoutDB)?, ), }; @@ -129,7 +129,7 @@ where async fn get_previous_step_last_point( cx: &UpdateContext<'_>, this_step_start: Resolution, -) -> Result, UpdateError> +) -> Result, ChartError> where Resolution: Timespan + Clone, Query: QueryBehaviour>>, @@ -158,7 +158,7 @@ where )); if last_point_range_values.len() > 1 { // return error because it's likely that date in `previous_step_last_point` is incorrect - return Err(UpdateError::Internal("Retrieved 2 points from previous step; probably an issue with range construction and handling".to_owned())); + return Err(ChartError::Internal("Retrieved 2 points from previous step; probably an issue with range construction and handling".to_owned())); } Ok(previous_step_last_point) } @@ -171,7 +171,7 @@ async fn batch_update_values_step last_accurate_point: TimespanValue, range: BatchRange, dependency_data_fetch_timer: &mut AggregateTimer, -) -> Result +) -> Result where MainDep: DataSource, ResolutionDep: DataSource, @@ -235,12 +235,12 @@ fn generate_batch_ranges( start: Resolution, end: DateTime, max_step: TimespanDuration, -) -> Result>, UpdateError> +) -> Result>, ChartError> where Resolution: Timespan + Ord + Clone, { if max_step.repeats() == 0 { - return Err(UpdateError::Internal( + return Err(ChartError::Internal( "Zero maximum batch step is not allowed".into(), )); } diff --git a/stats/stats/src/data_source/kinds/local_db/parameters/update/batching/parameter_traits.rs b/stats/stats/src/data_source/kinds/local_db/parameters/update/batching/parameter_traits.rs index d2277ccb5..d4150c3c9 100644 --- a/stats/stats/src/data_source/kinds/local_db/parameters/update/batching/parameter_traits.rs +++ b/stats/stats/src/data_source/kinds/local_db/parameters/update/batching/parameter_traits.rs @@ -5,7 +5,7 @@ use sea_orm::DatabaseConnection; use crate::{ types::{Timespan, TimespanValue}, - UpdateError, + ChartError, }; pub trait BatchStepBehaviour @@ -25,5 +25,5 @@ where last_accurate_point: TimespanValue, main_data: MainInput, resolution_data: ResolutionInput, - ) -> impl Future> + std::marker::Send; + ) -> impl Future> + std::marker::Send; } diff --git a/stats/stats/src/data_source/kinds/local_db/parameters/update/batching/parameters/cumulative.rs b/stats/stats/src/data_source/kinds/local_db/parameters/update/batching/parameters/cumulative.rs index f8242e729..c01894e68 100644 --- a/stats/stats/src/data_source/kinds/local_db/parameters/update/batching/parameters/cumulative.rs +++ b/stats/stats/src/data_source/kinds/local_db/parameters/update/batching/parameters/cumulative.rs @@ -9,7 +9,7 @@ use sea_orm::DatabaseConnection; use crate::{ data_source::kinds::local_db::parameters::update::batching::parameter_traits::BatchStepBehaviour, types::{Timespan, TimespanValue}, - ChartProperties, UpdateError, + ChartProperties, ChartError, }; use super::PassVecStep; @@ -37,9 +37,9 @@ where last_accurate_point: TimespanValue, main_data: Vec>, _resolution_data: (), - ) -> Result { + ) -> Result { let partial_sum = last_accurate_point.value.parse::().map_err(|e| { - UpdateError::Internal(format!( + ChartError::Internal(format!( "failed to parse value in chart '{}': {e}", ChartProps::key() )) diff --git a/stats/stats/src/data_source/kinds/local_db/parameters/update/batching/parameters/mock.rs b/stats/stats/src/data_source/kinds/local_db/parameters/update/batching/parameters/mock.rs index 846acfd52..0d0cc61ec 100644 --- a/stats/stats/src/data_source/kinds/local_db/parameters/update/batching/parameters/mock.rs +++ b/stats/stats/src/data_source/kinds/local_db/parameters/update/batching/parameters/mock.rs @@ -5,7 +5,7 @@ use sea_orm::DatabaseConnection; use crate::{ data_source::kinds::local_db::parameters::update::batching::parameter_traits::BatchStepBehaviour, - tests::recorder::Recorder, types::timespans::DateValue, UpdateError, + tests::recorder::Recorder, types::timespans::DateValue, ChartError, }; use super::PassVecStep; @@ -40,7 +40,7 @@ where last_accurate_point: DateValue, main_data: Vec>, resolution_data: (), - ) -> Result { + ) -> Result { StepsRecorder::record(StepInput { chart_id, update_time, diff --git a/stats/stats/src/data_source/kinds/local_db/parameters/update/batching/parameters/mod.rs b/stats/stats/src/data_source/kinds/local_db/parameters/update/batching/parameters/mod.rs index ce27c16d3..41e274fd8 100644 --- a/stats/stats/src/data_source/kinds/local_db/parameters/update/batching/parameters/mod.rs +++ b/stats/stats/src/data_source/kinds/local_db/parameters/update/batching/parameters/mod.rs @@ -8,7 +8,7 @@ use crate::{ timespans::{Month, Week, Year}, Timespan, TimespanDuration, TimespanValue, }, - UpdateError, + ChartError, }; use super::parameter_traits::BatchStepBehaviour; @@ -42,7 +42,7 @@ where _last_accurate_point: TimespanValue, main_data: Vec>, _resolution_data: (), - ) -> Result { + ) -> Result { let found = main_data.len(); // note: right away cloning another chart will not result in exact copy, // because if the other chart is `FillPrevious`, then omitted starting point @@ -56,7 +56,7 @@ where .map(|value| value.active_model(chart_id, Some(min_blockscout_block))); insert_data_many(db, values) .await - .map_err(UpdateError::StatsDB)?; + .map_err(ChartError::StatsDB)?; Ok(found) } } diff --git a/stats/stats/src/data_source/kinds/local_db/parameters/update/point.rs b/stats/stats/src/data_source/kinds/local_db/parameters/update/point.rs index 774bc348c..f3b3c4a16 100644 --- a/stats/stats/src/data_source/kinds/local_db/parameters/update/point.rs +++ b/stats/stats/src/data_source/kinds/local_db/parameters/update/point.rs @@ -7,7 +7,7 @@ use crate::{ data_source::{kinds::local_db::UpdateBehaviour, DataSource, UpdateContext}, range::UniversalRange, types::{Timespan, TimespanValue}, - UpdateError, + ChartError, }; /// Store output of the `MainDep` right in the local db @@ -24,13 +24,13 @@ where _last_accurate_point: Option>, min_blockscout_block: i64, remote_fetch_timer: &mut AggregateTimer, - ) -> Result<(), UpdateError> { + ) -> Result<(), ChartError> { // range doesn't make sense there; thus is not used let data = MainDep::query_data(cx, UniversalRange::full(), remote_fetch_timer).await?; let value = data.active_model(chart_id, Some(min_blockscout_block)); insert_data_many(cx.db.connection.as_ref(), vec![value]) .await - .map_err(UpdateError::StatsDB)?; + .map_err(ChartError::StatsDB)?; Ok(()) } } diff --git a/stats/stats/src/data_source/kinds/remote_db/mod.rs b/stats/stats/src/data_source/kinds/remote_db/mod.rs index beb69e8d5..5a47180d4 100644 --- a/stats/stats/src/data_source/kinds/remote_db/mod.rs +++ b/stats/stats/src/data_source/kinds/remote_db/mod.rs @@ -30,7 +30,7 @@ use sea_orm::{DatabaseConnection, DbErr}; use crate::{ data_source::{source::DataSource, types::UpdateContext}, range::UniversalRange, - UpdateError, + ChartError, }; pub use query::{ @@ -48,7 +48,7 @@ pub trait RemoteQueryBehaviour { fn query_data( cx: &UpdateContext<'_>, range: UniversalRange>, - ) -> impl Future> + Send; + ) -> impl Future> + Send; } impl DataSource for RemoteDatabaseSource { @@ -71,12 +71,12 @@ impl DataSource for RemoteDatabaseSource { cx: &UpdateContext<'_>, range: UniversalRange>, remote_fetch_timer: &mut AggregateTimer, - ) -> Result<::Output, UpdateError> { + ) -> Result<::Output, ChartError> { let _interval = remote_fetch_timer.start_interval(); Q::query_data(cx, range).await } - async fn update_itself(_cx: &UpdateContext<'_>) -> Result<(), UpdateError> { + async fn update_itself(_cx: &UpdateContext<'_>) -> Result<(), ChartError> { Ok(()) } } diff --git a/stats/stats/src/data_source/kinds/remote_db/query/all.rs b/stats/stats/src/data_source/kinds/remote_db/query/all.rs index acdf2436d..0583240e4 100644 --- a/stats/stats/src/data_source/kinds/remote_db/query/all.rs +++ b/stats/stats/src/data_source/kinds/remote_db/query/all.rs @@ -13,7 +13,7 @@ use crate::{ }, range::{data_source_query_range_to_db_statement_range, UniversalRange}, types::TimespanValue, - UpdateError, + ChartError, }; pub trait StatementFromRange { @@ -54,7 +54,7 @@ where async fn query_data( cx: &UpdateContext<'_>, range: UniversalRange>, - ) -> Result>, UpdateError> { + ) -> Result>, ChartError> { // to not overcomplicate the queries let query_range = data_source_query_range_to_db_statement_range::(cx, range).await?; @@ -62,7 +62,7 @@ where let mut data = TimespanValue::::find_by_statement(query) .all(cx.blockscout.connection.as_ref()) .await - .map_err(UpdateError::BlockscoutDB)?; + .map_err(ChartError::BlockscoutDB)?; // linear time for sorted sequences data.sort_unstable_by(|a, b| a.timespan.cmp(&b.timespan)); // can't use sort_*_by_key: https://github.com/rust-lang/rust/issues/34162 diff --git a/stats/stats/src/data_source/kinds/remote_db/query/each.rs b/stats/stats/src/data_source/kinds/remote_db/query/each.rs index 13333b9eb..ea8dcf397 100644 --- a/stats/stats/src/data_source/kinds/remote_db/query/each.rs +++ b/stats/stats/src/data_source/kinds/remote_db/query/each.rs @@ -14,7 +14,7 @@ use crate::{ }, range::{exclusive_range_to_inclusive, UniversalRange}, types::{Timespan, TimespanValue}, - UpdateError, + ChartError, }; pub trait StatementFromTimespan { @@ -51,7 +51,7 @@ where async fn query_data( cx: &UpdateContext<'_>, range: UniversalRange>, - ) -> Result>, UpdateError> { + ) -> Result>, ChartError> { let query_range = if let Some(r) = range.clone().try_into_exclusive() { r } else { @@ -65,7 +65,7 @@ where let point_value = ValueWrapper::::find_by_statement(query) .one(cx.blockscout.connection.as_ref()) .await - .map_err(UpdateError::BlockscoutDB)?; + .map_err(ChartError::BlockscoutDB)?; if let Some(ValueWrapper { value }) = point_value { let timespan = resolution_from_range(point_range); collected_data.push(TimespanValue { timespan, value }); diff --git a/stats/stats/src/data_source/kinds/remote_db/query/one.rs b/stats/stats/src/data_source/kinds/remote_db/query/one.rs index aca5d1a68..1a1dcedc2 100644 --- a/stats/stats/src/data_source/kinds/remote_db/query/one.rs +++ b/stats/stats/src/data_source/kinds/remote_db/query/one.rs @@ -10,7 +10,7 @@ use crate::{ }, range::UniversalRange, types::TimespanValue, - UpdateError, + ChartError, }; pub trait StatementForOne { @@ -42,13 +42,13 @@ where async fn query_data( cx: &UpdateContext<'_>, _range: UniversalRange>, - ) -> Result, UpdateError> { + ) -> Result, ChartError> { let query = S::get_statement(&cx.blockscout_applied_migrations); let data = TimespanValue::::find_by_statement(query) .one(cx.blockscout.connection.as_ref()) .await - .map_err(UpdateError::BlockscoutDB)? - .ok_or_else(|| UpdateError::Internal("query returned nothing".into()))?; + .map_err(ChartError::BlockscoutDB)? + .ok_or_else(|| ChartError::Internal("query returned nothing".into()))?; Ok(data) } } diff --git a/stats/stats/src/data_source/source.rs b/stats/stats/src/data_source/source.rs index 4c72e4760..bb1960986 100644 --- a/stats/stats/src/data_source/source.rs +++ b/stats/stats/src/data_source/source.rs @@ -7,7 +7,7 @@ use sea_orm::{DatabaseConnection, DbErr}; use tracing::instrument; use tynm::type_name; -use crate::{range::UniversalRange, UpdateError}; +use crate::{range::UniversalRange, ChartError}; use super::types::UpdateContext; @@ -116,7 +116,7 @@ pub trait DataSource { #[instrument(skip_all, level = tracing::Level::DEBUG, fields(source_mutex_id = Self::mutex_id()))] fn update_recursively( cx: &UpdateContext<'_>, - ) -> impl Future> + Send { + ) -> impl Future> + Send { async move { // Couldn't figure out how to control level per-argument basis (in instrumentation) // so this event is used insted, since the name is usually quite verbose @@ -148,7 +148,7 @@ pub trait DataSource { /// Update only thise data source's data (values + metadat) fn update_itself( cx: &UpdateContext<'_>, - ) -> impl Future> + Send; + ) -> impl Future> + Send; /// Retrieve chart data. /// If `range` is `Some`, should return data within the range. Otherwise - all data. @@ -165,7 +165,7 @@ pub trait DataSource { cx: &UpdateContext<'_>, range: UniversalRange>, dependency_data_fetch_timer: &mut AggregateTimer, - ) -> impl Future> + Send; + ) -> impl Future> + Send; } // Base case for recursive type @@ -197,12 +197,12 @@ impl DataSource for () { HashSet::new() } - async fn update_recursively(_cx: &UpdateContext<'_>) -> Result<(), UpdateError> { + async fn update_recursively(_cx: &UpdateContext<'_>) -> Result<(), ChartError> { // stop recursion Ok(()) } - async fn update_itself(_cx: &UpdateContext<'_>) -> Result<(), UpdateError> { + async fn update_itself(_cx: &UpdateContext<'_>) -> Result<(), ChartError> { unreachable!("not called by `update_recursively` and must not be called by anything else") } @@ -210,7 +210,7 @@ impl DataSource for () { _cx: &UpdateContext<'_>, _range: UniversalRange>, _remote_fetch_timer: &mut AggregateTimer, - ) -> Result { + ) -> Result { Ok(()) } } diff --git a/stats/stats/src/data_source/tests.rs b/stats/stats/src/data_source/tests.rs index 480d7cbf7..569537f0d 100644 --- a/stats/stats/src/data_source/tests.rs +++ b/stats/stats/src/data_source/tests.rs @@ -38,7 +38,7 @@ use crate::{ types::timespans::{DateValue, Month, Week, Year}, update_group::{SyncUpdateGroup, UpdateGroup}, utils::{produce_filter_and_values, sql_with_range_filter_opt}, - ChartProperties, MissingDatePolicy, Named, UpdateError, + ChartProperties, MissingDatePolicy, Named, ChartError, }; pub struct NewContractsQuery; @@ -217,7 +217,7 @@ impl BatchStepBehaviour>, ()> _last_accurate_point: DateValue, _main_data: Vec>, _resolution_data: (), - ) -> Result { + ) -> Result { // do something (just an example, not intended for running) todo!(); // save data diff --git a/stats/stats/src/lib.rs b/stats/stats/src/lib.rs index fa477611b..f53b02971 100644 --- a/stats/stats/src/lib.rs +++ b/stats/stats/src/lib.rs @@ -23,5 +23,5 @@ pub use charts::{ RequestedPointsLimit, }, lines, query_dispatch, types, ChartKey, ChartObject, ChartProperties, ChartPropertiesObject, - MissingDatePolicy, Named, ResolutionKind, UpdateError, + MissingDatePolicy, Named, ResolutionKind, ChartError, }; diff --git a/stats/stats/src/range.rs b/stats/stats/src/range.rs index 9efb733ed..612b013a3 100644 --- a/stats/stats/src/range.rs +++ b/stats/stats/src/range.rs @@ -5,7 +5,7 @@ use chrono::{DateTime, Utc}; use crate::{ data_source::{kinds::remote_db::RemoteQueryBehaviour, UpdateContext}, types::{Timespan, TimespanDuration}, - UpdateError, + ChartError, }; #[derive(Debug, Clone, PartialEq, Eq)] @@ -298,7 +298,7 @@ impl Decrementable for i32 { pub async fn data_source_query_range_to_db_statement_range( cx: &UpdateContext<'_>, data_source_range: UniversalRange>, -) -> Result>>, UpdateError> +) -> Result>>, ChartError> where AllRangeSource: RemoteQueryBehaviour>>, { diff --git a/stats/stats/src/update_group.rs b/stats/stats/src/update_group.rs index ec8793c19..010de9005 100644 --- a/stats/stats/src/update_group.rs +++ b/stats/stats/src/update_group.rs @@ -43,7 +43,7 @@ use tokio::sync::{Mutex, MutexGuard}; use crate::{ charts::{chart_properties_portrait::imports::ChartKey, ChartObject}, data_source::UpdateParameters, - UpdateError, + ChartError, }; #[derive(Error, Debug, PartialEq)] @@ -95,7 +95,7 @@ pub trait UpdateGroup: core::fmt::Debug { &self, params: UpdateParameters<'a>, enabled_charts: &HashSet, - ) -> Result<(), UpdateError>; + ) -> Result<(), ChartError>; } /// Construct update group that implemants [`UpdateGroup`]. The main purpose of the @@ -500,12 +500,12 @@ impl SyncUpdateGroup { db: &DatabaseConnection, creation_time_override: Option>, enabled_charts: &HashSet, - ) -> Result<(), UpdateError> { + ) -> Result<(), ChartError> { let (_joint_guard, enabled_members) = self.lock_enabled_dependencies(enabled_charts).await; self.inner .create_charts(db, creation_time_override, &enabled_members) .await - .map_err(UpdateError::StatsDB) + .map_err(ChartError::StatsDB) } /// Ignores unknown names @@ -513,7 +513,7 @@ impl SyncUpdateGroup { &self, params: UpdateParameters<'a>, enabled_charts: &HashSet, - ) -> Result<(), UpdateError> { + ) -> Result<(), ChartError> { let (_joint_guard, enabled_members) = self.lock_enabled_dependencies(enabled_charts).await; tracing::info!( update_group = self.name(), diff --git a/stats/stats/src/utils.rs b/stats/stats/src/utils.rs index 8220dffda..1d1f937d5 100644 --- a/stats/stats/src/utils.rs +++ b/stats/stats/src/utils.rs @@ -12,6 +12,9 @@ pub fn day_start(date: &NaiveDate) -> DateTime { .and_utc() } +/// Database connection with a mark of what database it is. +/// Used to separate caching for different databases to +/// prevent data clashes when running unit tests concurrently. #[derive(Debug, Clone)] pub struct MarkedDbConnection { pub connection: Arc,