diff --git a/Cargo.lock b/Cargo.lock index beb9fee3a2..db0b0e1fda 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1131,6 +1131,7 @@ dependencies = [ "chrono", "common-models", "common-utils", + "config", "database-models", "sea-orm", "sea-query", @@ -2225,6 +2226,7 @@ dependencies = [ "dependent-models", "enums", "fitness-models", + "itertools 0.13.0", "media-models", "mime_guess", "nanoid", diff --git a/apps/backend/src/common.rs b/apps/backend/src/common.rs index 824b9fb401..03fd996ebd 100644 --- a/apps/backend/src/common.rs +++ b/apps/backend/src/common.rs @@ -69,7 +69,7 @@ pub async fn create_app_services( s3_client, config.file_storage.s3_bucket_name.clone(), )); - let cache_service = CacheService::new(&db); + let cache_service = CacheService::new(&db, config.clone()); let supporting_service = Arc::new( SupportingService::new( &db, diff --git a/apps/frontend/app/routes/_dashboard.fitness.exercises.item.$id._index.tsx b/apps/frontend/app/routes/_dashboard.fitness.exercises.item.$id._index.tsx index 10688affbc..5865853ed1 100644 --- a/apps/frontend/app/routes/_dashboard.fitness.exercises.item.$id._index.tsx +++ b/apps/frontend/app/routes/_dashboard.fitness.exercises.item.$id._index.tsx @@ -539,25 +539,25 @@ export default function Page() { }; }); invariant(data); - return ( + return data.filter((d) => d.value).length > 0 ? ( {changeCase(best)} - ); + ) : null; })} diff --git a/apps/frontend/app/routes/_dashboard.settings.imports-and-exports._index.tsx b/apps/frontend/app/routes/_dashboard.settings.imports-and-exports._index.tsx index ddddf926e0..712b6c6ac8 100644 --- a/apps/frontend/app/routes/_dashboard.settings.imports-and-exports._index.tsx +++ b/apps/frontend/app/routes/_dashboard.settings.imports-and-exports._index.tsx @@ -81,10 +81,11 @@ export const action = async ({ request }: ActionFunctionArgs) => { formData.delete("source"); const values = await match(source) .with( - ImportSource.Storygraph, + ImportSource.Hevy, ImportSource.Imdb, - ImportSource.Goodreads, ImportSource.OpenScale, + ImportSource.Goodreads, + ImportSource.Storygraph, () => ({ genericCsv: processSubmission(formData, genericCsvImportFormSchema), }), @@ -270,9 +271,10 @@ export default function Page() { ), ) .with( + ImportSource.Hevy, + ImportSource.Imdb, ImportSource.OpenScale, ImportSource.Goodreads, - ImportSource.Imdb, ImportSource.Storygraph, () => ( <> diff --git a/crates/enums/src/lib.rs b/crates/enums/src/lib.rs index 12a333f2e1..0c9108f032 100644 --- a/crates/enums/src/lib.rs +++ b/crates/enums/src/lib.rs @@ -200,6 +200,7 @@ pub enum ImportSource { Igdb, Imdb, Plex, + Hevy, Trakt, Movary, Jellyfin, @@ -565,17 +566,17 @@ pub enum IntegrationProvider { } #[derive( - Debug, - Clone, - Copy, - PartialEq, Eq, - EnumIter, - DeriveActiveEnum, - Deserialize, - Serialize, Enum, + Copy, + Clone, + Debug, Display, + EnumIter, + PartialEq, + Serialize, + Deserialize, + DeriveActiveEnum, )] #[sea_orm( rs_type = "String", diff --git a/crates/migrations/src/m20240903_add_changes_for_user_to_collection_removal.rs b/crates/migrations/src/m20240903_add_changes_for_user_to_collection_removal.rs index da95d0ed8f..8b539013f9 100644 --- a/crates/migrations/src/m20240903_add_changes_for_user_to_collection_removal.rs +++ b/crates/migrations/src/m20240903_add_changes_for_user_to_collection_removal.rs @@ -1,6 +1,6 @@ use sea_orm_migration::prelude::*; -use crate::m20231017_create_user_to_entity::CONSTRAINT_SQL; +use super::m20231017_create_user_to_entity::CONSTRAINT_SQL; #[derive(DeriveMigrationName)] pub struct Migration; diff --git a/crates/migrations/src/m20240926_add_columns_for_open_sourcing_pro_version.rs b/crates/migrations/src/m20240926_add_columns_for_open_sourcing_pro_version.rs index 523dc81ef2..12bb9f29db 100644 --- a/crates/migrations/src/m20240926_add_columns_for_open_sourcing_pro_version.rs +++ b/crates/migrations/src/m20240926_add_columns_for_open_sourcing_pro_version.rs @@ -1,6 +1,6 @@ use sea_orm_migration::prelude::*; -use crate::{ +use super::{ m20231016_create_collection_to_entity::{CONSTRAINT_SQL, ENTITY_ID_SQL, ENTITY_LOT_SQL}, m20240904_create_monitored_entity::MONITORED_ENTITY_VIEW_CREATION_SQL, }; diff --git a/crates/migrations/src/m20241019_changes_for_issue_929.rs b/crates/migrations/src/m20241019_changes_for_issue_929.rs index 0544bb07a1..e702e1c863 100644 --- a/crates/migrations/src/m20241019_changes_for_issue_929.rs +++ b/crates/migrations/src/m20241019_changes_for_issue_929.rs @@ -1,6 +1,6 @@ use sea_orm_migration::prelude::*; -use crate::m20240904_create_monitored_entity::MONITORED_ENTITY_VIEW_CREATION_SQL; +use super::m20240904_create_monitored_entity::MONITORED_ENTITY_VIEW_CREATION_SQL; #[derive(DeriveMigrationName)] pub struct Migration; diff --git a/crates/migrations/src/m20241126_changes_for_issue_1113.rs b/crates/migrations/src/m20241126_changes_for_issue_1113.rs index 3a18b387a8..e8da7f4460 100644 --- a/crates/migrations/src/m20241126_changes_for_issue_1113.rs +++ b/crates/migrations/src/m20241126_changes_for_issue_1113.rs @@ -1,6 +1,6 @@ use sea_orm_migration::prelude::*; -use crate::{ +use super::{ m20230822_create_exercise::EXERCISE_NAME_INDEX, m20240827_create_daily_user_activity::create_daily_user_activity_table, }; diff --git a/crates/models/database/src/functions.rs b/crates/models/database/src/functions.rs index 4f4035887a..752512ac44 100644 --- a/crates/models/database/src/functions.rs +++ b/crates/models/database/src/functions.rs @@ -5,7 +5,7 @@ use sea_orm::{ ActiveModelTrait, ActiveValue, ColumnTrait, ConnectionTrait, EntityTrait, QueryFilter, }; -use crate::{prelude::UserToEntity, user_to_entity}; +use super::{prelude::UserToEntity, user_to_entity}; pub async fn get_user_to_entity_association( db: &C, diff --git a/crates/models/database/src/review.rs b/crates/models/database/src/review.rs index 0964ba387e..4eb462039b 100644 --- a/crates/models/database/src/review.rs +++ b/crates/models/database/src/review.rs @@ -11,7 +11,7 @@ use rust_decimal::Decimal; use sea_orm::{entity::prelude::*, ActiveValue}; use serde::{Deserialize, Serialize}; -use crate::functions::associate_user_with_entity; +use super::functions::associate_user_with_entity; #[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)] #[sea_orm(table_name = "review")] diff --git a/crates/models/database/src/seen.rs b/crates/models/database/src/seen.rs index bc3d893b70..088b61025d 100644 --- a/crates/models/database/src/seen.rs +++ b/crates/models/database/src/seen.rs @@ -15,7 +15,7 @@ use rust_decimal_macros::dec; use sea_orm::{entity::prelude::*, ActiveValue}; use serde::{Deserialize, Serialize}; -use crate::functions::associate_user_with_entity; +use super::functions::associate_user_with_entity; #[derive(Clone, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize, SimpleObject, Educe)] #[graphql(name = "Seen")] diff --git a/crates/models/media/src/lib.rs b/crates/models/media/src/lib.rs index 1455cd1e86..0a30fa432f 100644 --- a/crates/models/media/src/lib.rs +++ b/crates/models/media/src/lib.rs @@ -611,12 +611,14 @@ pub struct ImportOrExportPersonItem { #[derive(Debug, Serialize, Deserialize, Clone, Schematic)] #[serde(rename_all = "snake_case")] pub struct ImportOrExportExerciseItem { + /// The unique identifier of the exercise. + pub id: String, /// The name of the exercise. pub name: String, - /// The review history for the user. - pub reviews: Vec, /// The collections this entity was added to. pub collections: Vec, + /// The review history for the user. + pub reviews: Vec, } #[derive( diff --git a/crates/providers/src/igdb.rs b/crates/providers/src/igdb.rs index 50560ec15b..6e909712c4 100644 --- a/crates/providers/src/igdb.rs +++ b/crates/providers/src/igdb.rs @@ -547,7 +547,6 @@ impl IgdbService { let access_token = self.get_access_token().await; cc.set_with_expiry( ApplicationCacheKey::IgdbSettings, - None, ApplicationCacheValue::IgdbSettings { access_token: access_token.clone(), }, diff --git a/crates/providers/src/listennotes.rs b/crates/providers/src/listennotes.rs index 80aca4e33d..0b1ff82f0e 100644 --- a/crates/providers/src/listennotes.rs +++ b/crates/providers/src/listennotes.rs @@ -218,7 +218,6 @@ impl ListennotesService { } cc.set_with_expiry( ApplicationCacheKey::ListennotesSettings, - None, ApplicationCacheValue::ListennotesSettings { genres: genres.clone(), }, diff --git a/crates/providers/src/tmdb.rs b/crates/providers/src/tmdb.rs index 02f00a819c..eda378604a 100644 --- a/crates/providers/src/tmdb.rs +++ b/crates/providers/src/tmdb.rs @@ -1345,7 +1345,6 @@ async fn get_settings( }; cc.set_with_expiry( ApplicationCacheKey::TmdbSettings, - None, ApplicationCacheValue::TmdbSettings(settings.clone()), ) .await diff --git a/crates/services/cache/Cargo.toml b/crates/services/cache/Cargo.toml index db078fa18b..94dbc0c0a7 100644 --- a/crates/services/cache/Cargo.toml +++ b/crates/services/cache/Cargo.toml @@ -8,6 +8,7 @@ async-graphql = { workspace = true } chrono = { workspace = true } common-models = { path = "../../models/common" } common-utils = { path = "../../utils/common" } +config = { path = "../../config" } database-models = { path = "../../models/database" } sea-orm = { workspace = true } sea-query = { workspace = true } diff --git a/crates/services/cache/src/lib.rs b/crates/services/cache/src/lib.rs index 727c2784e4..478896301e 100644 --- a/crates/services/cache/src/lib.rs +++ b/crates/services/cache/src/lib.rs @@ -1,3 +1,5 @@ +use std::sync::Arc; + use async_graphql::Result; use chrono::{Duration, Utc}; use common_models::{ApplicationCacheKey, ApplicationCacheValue}; @@ -9,22 +11,41 @@ use uuid::Uuid; pub struct CacheService { db: DatabaseConnection, + config: Arc, } impl CacheService { - pub fn new(db: &DatabaseConnection) -> Self { - Self { db: db.clone() } + pub fn new(db: &DatabaseConnection, config: Arc) -> Self { + Self { + config, + db: db.clone(), + } } } impl CacheService { + fn get_expiry_for_key(&self, key: &ApplicationCacheKey) -> Option { + match key { + ApplicationCacheKey::UserAnalyticsParameters { .. } => Some(8), + ApplicationCacheKey::UserAnalytics { .. } => Some(2), + ApplicationCacheKey::IgdbSettings + | ApplicationCacheKey::ListennotesSettings + | ApplicationCacheKey::ServerKeyValidated + | ApplicationCacheKey::TmdbSettings => None, + ApplicationCacheKey::MetadataRecentlyConsumed { .. } => Some(1), + ApplicationCacheKey::ProgressUpdateCache { .. } => { + Some(self.config.server.progress_update_threshold) + } + } + } + pub async fn set_with_expiry( &self, key: ApplicationCacheKey, - expiry_hours: Option, value: ApplicationCacheValue, ) -> Result { let now = Utc::now(); + let expiry_hours = self.get_expiry_for_key(&key); let to_insert = application_cache::ActiveModel { key: ActiveValue::Set(key), value: ActiveValue::Set(value), diff --git a/crates/services/exporter/Cargo.toml b/crates/services/exporter/Cargo.toml index 963e05af9f..1ab50c39f4 100644 --- a/crates/services/exporter/Cargo.toml +++ b/crates/services/exporter/Cargo.toml @@ -13,6 +13,7 @@ database-models = { path = "../../models/database" } database-utils = { path = "../../utils/database" } dependent-models = { path = "../../models/dependent" } enums = { path = "../../enums" } +itertools = { workspace = true } fitness-models = { path = "../../models/fitness" } media-models = { path = "../../models/media" } mime_guess = { workspace = true } diff --git a/crates/services/exporter/src/lib.rs b/crates/services/exporter/src/lib.rs index 7414bf22b3..142c9ad834 100644 --- a/crates/services/exporter/src/lib.rs +++ b/crates/services/exporter/src/lib.rs @@ -19,6 +19,7 @@ use database_utils::{ use dependent_models::{ImportOrExportWorkoutItem, ImportOrExportWorkoutTemplateItem}; use enums::EntityLot; use fitness_models::UserMeasurementsListInput; +use itertools::Itertools; use media_models::{ ImportOrExportExerciseItem, ImportOrExportItemRating, ImportOrExportItemReview, ImportOrExportMetadataGroupItem, ImportOrExportMetadataItem, ImportOrExportMetadataItemSeen, @@ -373,27 +374,37 @@ impl ExporterService { user_id: &String, writer: &mut JsonStreamWriter, ) -> Result<()> { - let exercises = Exercise::find() - .filter(exercise::Column::CreatedByUserId.eq(user_id)) + let exercises = UserToEntity::find() + .select_only() + .column(exercise::Column::Id) + .column(exercise::Column::Name) + .filter(user_to_entity::Column::UserId.eq(user_id)) + .filter(user_to_entity::Column::ExerciseId.is_not_null()) + .left_join(Exercise) + .into_tuple::<(String, String)>() .all(&self.0.db) .await .unwrap(); - for e in exercises { - let reviews = item_reviews(user_id, &e.id, EntityLot::Exercise, false, &self.0) + for (exercise_id, exercise_name) in exercises { + let reviews = item_reviews(user_id, &exercise_id, EntityLot::Exercise, false, &self.0) .await? .into_iter() .map(|r| self.get_review_export_item(r)) - .collect(); + .collect_vec(); let collections = - entity_in_collections(&self.0.db, user_id, &e.id, EntityLot::Exercise) + entity_in_collections(&self.0.db, user_id, &exercise_id, EntityLot::Exercise) .await? .into_iter() .map(|c| c.name) - .collect(); + .collect_vec(); + if reviews.is_empty() && collections.is_empty() { + continue; + } let exp = ImportOrExportExerciseItem { - name: e.name, - collections, reviews, + collections, + id: exercise_id, + name: exercise_name, }; writer.serialize_value(&exp).unwrap(); } diff --git a/crates/services/importer/src/audiobookshelf.rs b/crates/services/importer/src/audiobookshelf.rs index e604c1411a..ce1b577638 100644 --- a/crates/services/importer/src/audiobookshelf.rs +++ b/crates/services/importer/src/audiobookshelf.rs @@ -20,9 +20,7 @@ use reqwest::{ use serde_json::json; use specific_models::audiobookshelf as audiobookshelf_models; -use crate::utils; - -use super::{ImportFailStep, ImportFailedItem}; +use super::{utils, ImportFailStep, ImportFailedItem}; pub async fn import( input: DeployUrlAndKeyImportInput, diff --git a/crates/services/importer/src/goodreads.rs b/crates/services/importer/src/goodreads.rs index f497399a35..d2304e4340 100644 --- a/crates/services/importer/src/goodreads.rs +++ b/crates/services/importer/src/goodreads.rs @@ -15,9 +15,7 @@ use rust_decimal::Decimal; use rust_decimal_macros::dec; use serde::Deserialize; -use crate::utils; - -use super::{ImportFailStep, ImportFailedItem}; +use super::{utils, ImportFailStep, ImportFailedItem}; #[derive(Debug, Deserialize)] struct Book { diff --git a/crates/services/importer/src/hevy.rs b/crates/services/importer/src/hevy.rs new file mode 100644 index 0000000000..91a2a7ac54 --- /dev/null +++ b/crates/services/importer/src/hevy.rs @@ -0,0 +1,178 @@ +use std::{collections::HashMap, sync::Arc}; + +use async_graphql::Result; +use chrono::NaiveDateTime; +use common_utils::ryot_log; +use csv::Reader; +use database_models::exercise; +use dependent_models::{ImportCompletedItem, ImportResult}; +use enums::ExerciseLot; +use fitness_models::{ + SetLot, UserExerciseInput, UserWorkoutInput, UserWorkoutSetRecord, WorkoutSetStatistic, +}; +use importer_models::{ImportFailStep, ImportFailedItem}; +use indexmap::IndexMap; +use itertools::Itertools; +use media_models::DeployGenericCsvImportInput; +use rust_decimal::Decimal; +use rust_decimal_macros::dec; +use serde::{Deserialize, Serialize}; +use supporting_service::SupportingService; + +use super::utils; + +#[derive(Debug, Serialize, Deserialize, Clone, Default)] +struct Entry { + title: String, + set_index: u8, + rpe: Option, + set_type: String, + end_time: String, + start_time: String, + reps: Option, + exercise_title: String, + #[serde(alias = "weight_kg", alias = "weight_lbs")] + weight: Option, + #[serde(alias = "duration_seconds")] + duration: Option, + #[serde(alias = "distance_km", alias = "distance_miles")] + distance: Option, + description: Option, + exercise_notes: Option, +} + +pub async fn import( + input: DeployGenericCsvImportInput, + ss: &Arc, + user_id: &str, +) -> Result { + let mut completed = vec![]; + let mut failed = vec![]; + let mut unique_exercises: HashMap = HashMap::new(); + let entries_reader = Reader::from_path(&input.csv_path)? + .deserialize::() + .map(|r| r.unwrap()) + .collect_vec(); + + let mut workouts_to_entries = IndexMap::new(); + for entry in entries_reader.clone() { + workouts_to_entries + .entry((entry.start_time.clone(), entry.end_time.clone())) + .or_insert(vec![]) + .push(entry); + } + + let mut exercises_to_workouts = IndexMap::new(); + + for (workout_number, entries) in workouts_to_entries { + let mut exercises = IndexMap::new(); + for entry in entries { + exercises + .entry(entry.exercise_title.clone()) + .or_insert(vec![]) + .push(entry); + } + exercises_to_workouts.insert(workout_number, exercises); + } + + for (workout_identifier, workout) in exercises_to_workouts { + let first_exercise = workout.first().unwrap().1.first().unwrap(); + let mut collected_exercises = vec![]; + for (exercise_name, exercises) in workout.clone() { + let mut collected_sets = vec![]; + let valid_ex = exercises.first().unwrap(); + let exercise_lot = if valid_ex.duration.is_some() && valid_ex.distance.is_some() { + ExerciseLot::DistanceAndDuration + } else if valid_ex.duration.is_some() { + ExerciseLot::Duration + } else if valid_ex.reps.is_some() && valid_ex.weight.is_some() { + ExerciseLot::RepsAndWeight + } else if valid_ex.reps.is_some() { + ExerciseLot::Reps + } else { + failed.push(ImportFailedItem { + lot: None, + step: ImportFailStep::InputTransformation, + identifier: format!( + "Workout #{:#?}, Set #{}", + workout_identifier, valid_ex.set_index + ), + error: Some(format!( + "Could not determine exercise lot: {}", + serde_json::to_string(&valid_ex).unwrap() + )), + }); + continue; + }; + let exercise_id = utils::associate_with_existing_or_new_exercise( + user_id, + &exercise_name, + exercise_lot, + ss, + &mut unique_exercises, + ) + .await?; + ryot_log!(debug, "Importing exercise with id = {}", exercise_id); + for set in exercises { + let weight = set.weight.map(|d| if d == dec!(0) { dec!(1) } else { d }); + let set_lot = match set.set_type.as_str() { + "warmup" => SetLot::WarmUp, + "failure" => SetLot::Failure, + "dropset" => SetLot::Drop, + _ => SetLot::Normal, + }; + collected_sets.push(UserWorkoutSetRecord { + note: None, + lot: set_lot, + rpe: set.rpe, + rest_time: None, + confirmed_at: None, + statistic: WorkoutSetStatistic { + weight, + reps: set.reps, + duration: set.duration.and_then(|r| r.checked_div(dec!(60))), + distance: set.distance.and_then(|d| d.checked_div(dec!(1000))), + ..Default::default() + }, + }); + } + collected_exercises.push(UserExerciseInput { + exercise_id, + assets: None, + sets: collected_sets, + notes: first_exercise + .exercise_notes + .clone() + .map(|n| vec![n]) + .unwrap_or_default(), + }); + } + let start_time = parse_date_string(&first_exercise.start_time); + let end_time = parse_date_string(&first_exercise.end_time); + completed.push(ImportCompletedItem::Workout(UserWorkoutInput { + assets: None, + supersets: vec![], + template_id: None, + repeated_from: None, + create_workout_id: None, + update_workout_id: None, + exercises: collected_exercises, + update_workout_template_id: None, + name: first_exercise.title.clone(), + comment: first_exercise.description.clone(), + end_time: utils::get_date_time_with_offset(end_time, &ss.timezone), + start_time: utils::get_date_time_with_offset(start_time, &ss.timezone), + })); + } + completed.extend( + unique_exercises + .values() + .cloned() + .map(ImportCompletedItem::Exercise), + ); + Ok(ImportResult { failed, completed }) +} + +fn parse_date_string(input: &str) -> NaiveDateTime { + NaiveDateTime::parse_from_str(&input, "%d %b %Y, %H:%M").unwrap() +} diff --git a/crates/services/importer/src/lib.rs b/crates/services/importer/src/lib.rs index 2f4d7d1878..87972744a9 100644 --- a/crates/services/importer/src/lib.rs +++ b/crates/services/importer/src/lib.rs @@ -1,15 +1,19 @@ -use std::sync::Arc; +use std::{collections::HashMap, sync::Arc}; use async_graphql::Result; use background::ApplicationJob; use chrono::{DateTime, Duration, NaiveDateTime, Offset, TimeZone, Utc}; use common_models::BackgroundJob; use common_utils::ryot_log; -use database_models::{import_report, prelude::ImportReport}; +use database_models::{ + exercise, import_report, + prelude::{Exercise, ImportReport}, +}; use dependent_utils::{ - commit_metadata, deploy_background_job, get_google_books_service, get_openlibrary_service, - get_tmdb_non_media_service, process_import, + commit_metadata, deploy_background_job, generate_exercise_id, get_google_books_service, + get_openlibrary_service, get_tmdb_non_media_service, process_import, }; +use enums::{ExerciseLot, ExerciseSource}; use enums::{ImportSource, MediaSource}; use importer_models::{ImportFailStep, ImportFailedItem}; use media_models::{DeployImportJobInput, ImportOrExportMetadataItem}; @@ -24,6 +28,7 @@ use traits::TraceOk; mod audiobookshelf; mod generic_json; mod goodreads; +mod hevy; mod igdb; mod imdb; mod jellyfin; @@ -78,6 +83,7 @@ impl ImporterService { ImportSource::StrongApp => { strong_app::import(input.strong_app.unwrap(), &self.0, &user_id).await } + ImportSource::Hevy => hevy::import(input.generic_csv.unwrap(), &self.0, &user_id).await, ImportSource::Mediatracker => mediatracker::import(input.url_and_key.unwrap()).await, ImportSource::Myanimelist => myanimelist::import(input.mal.unwrap()).await, ImportSource::Goodreads => { @@ -198,4 +204,40 @@ pub mod utils { } identifier.map(|id| (id, source)) } + + pub async fn associate_with_existing_or_new_exercise( + user_id: &str, + exercise_name: &String, + exercise_lot: ExerciseLot, + ss: &Arc, + unique_exercises: &mut HashMap, + ) -> Result { + let existing_exercise = Exercise::find() + .filter(exercise::Column::Lot.eq(exercise_lot)) + .filter(exercise::Column::Name.eq(exercise_name)) + .one(&ss.db) + .await?; + let generated_id = generate_exercise_id(&exercise_name, exercise_lot, user_id); + let exercise_id = match existing_exercise { + Some(db_ex) if db_ex.source == ExerciseSource::Github || db_ex.id == generated_id => { + db_ex.id + } + _ => match unique_exercises.get(exercise_name) { + Some(mem_ex) => mem_ex.id.clone(), + None => { + unique_exercises.insert( + exercise_name.clone(), + exercise::Model { + lot: exercise_lot, + id: generated_id.clone(), + name: exercise_name.to_owned(), + ..Default::default() + }, + ); + generated_id + } + }, + }; + Ok(exercise_id) + } } diff --git a/crates/services/importer/src/storygraph.rs b/crates/services/importer/src/storygraph.rs index 0800b0dd21..457470937e 100644 --- a/crates/services/importer/src/storygraph.rs +++ b/crates/services/importer/src/storygraph.rs @@ -15,9 +15,7 @@ use rust_decimal::Decimal; use rust_decimal_macros::dec; use serde::{Deserialize, Serialize}; -use crate::utils; - -use super::{ImportFailStep, ImportFailedItem, ImportOrExportMetadataItem}; +use super::{utils, ImportFailStep, ImportFailedItem, ImportOrExportMetadataItem}; #[derive(Debug, Serialize, Deserialize)] #[serde(untagged)] diff --git a/crates/services/importer/src/strong_app.rs b/crates/services/importer/src/strong_app.rs index ccd53a32bb..58e236c8a8 100644 --- a/crates/services/importer/src/strong_app.rs +++ b/crates/services/importer/src/strong_app.rs @@ -4,10 +4,9 @@ use async_graphql::Result; use chrono::{Duration, NaiveDateTime}; use common_utils::ryot_log; use csv::ReaderBuilder; -use database_models::{exercise, prelude::Exercise}; +use database_models::exercise; use dependent_models::{ImportCompletedItem, ImportResult}; -use dependent_utils::generate_exercise_id; -use enums::{ExerciseLot, ExerciseSource}; +use enums::ExerciseLot; use fitness_models::{ SetLot, UserExerciseInput, UserWorkoutInput, UserWorkoutSetRecord, WorkoutSetStatistic, }; @@ -17,7 +16,6 @@ use itertools::Itertools; use media_models::DeployStrongAppImportInput; use rust_decimal::Decimal; use rust_decimal_macros::dec; -use sea_orm::{ColumnTrait, EntityTrait, QueryFilter}; use serde::{Deserialize, Serialize}; use supporting_service::SupportingService; @@ -144,34 +142,14 @@ async fn import_exercises( }); continue; }; - let existing_exercise = Exercise::find() - .filter(exercise::Column::Lot.eq(exercise_lot)) - .filter(exercise::Column::Name.eq(&exercise_name)) - .one(&ss.db) - .await?; - let generated_id = generate_exercise_id(&exercise_name, exercise_lot, user_id); - let exercise_id = match existing_exercise { - Some(db_ex) - if db_ex.source == ExerciseSource::Github || db_ex.id == generated_id => - { - db_ex.id - } - _ => match unique_exercises.get(&exercise_name) { - Some(mem_ex) => mem_ex.id.clone(), - None => { - unique_exercises.insert( - exercise_name.clone(), - exercise::Model { - lot: exercise_lot, - name: exercise_name, - id: generated_id.clone(), - ..Default::default() - }, - ); - generated_id - } - }, - }; + let exercise_id = utils::associate_with_existing_or_new_exercise( + user_id, + &exercise_name, + exercise_lot, + ss, + &mut unique_exercises, + ) + .await?; ryot_log!(debug, "Importing exercise with id = {}", exercise_id); for set in exercises { if let Some(note) = set.notes { @@ -185,6 +163,11 @@ async fn import_exercises( _ => SetLot::Normal, }; collected_sets.push(UserWorkoutSetRecord { + rpe: None, + note: None, + lot: set_lot, + rest_time: None, + confirmed_at: None, statistic: WorkoutSetStatistic { weight, reps: set.reps, @@ -192,11 +175,6 @@ async fn import_exercises( distance: set.distance.and_then(|d| d.checked_div(dec!(1000))), ..Default::default() }, - rpe: None, - note: None, - lot: set_lot, - rest_time: None, - confirmed_at: None, }); } collected_exercises.push(UserExerciseInput { diff --git a/crates/services/miscellaneous/src/lib.rs b/crates/services/miscellaneous/src/lib.rs index 772070bbce..eb69c734fb 100644 --- a/crates/services/miscellaneous/src/lib.rs +++ b/crates/services/miscellaneous/src/lib.rs @@ -3140,7 +3140,6 @@ ORDER BY RANDOM() LIMIT 10; if is_server_key_validated { cs.set_with_expiry( ApplicationCacheKey::ServerKeyValidated, - None, ApplicationCacheValue::Empty, ) .await?; diff --git a/crates/services/statistics/src/lib.rs b/crates/services/statistics/src/lib.rs index e32c23f900..845f66a9f5 100644 --- a/crates/services/statistics/src/lib.rs +++ b/crates/services/statistics/src/lib.rs @@ -66,7 +66,6 @@ impl StatisticsService { .cache_service .set_with_expiry( cache_key, - Some(8), ApplicationCacheValue::UserAnalyticsParameters(response.clone()), ) .await?; @@ -379,7 +378,6 @@ impl StatisticsService { .cache_service .set_with_expiry( cache_key, - Some(2), ApplicationCacheValue::UserAnalytics(response.clone()), ) .await?; diff --git a/crates/utils/dependent/src/lib.rs b/crates/utils/dependent/src/lib.rs index 3e693d8d99..575bab68b9 100644 --- a/crates/utils/dependent/src/lib.rs +++ b/crates/utils/dependent/src/lib.rs @@ -1264,7 +1264,6 @@ pub async fn mark_entity_as_recently_consumed( user_id: user_id.to_owned(), entity_id: entity_id.to_owned(), }, - Some(1), ApplicationCacheValue::Empty, ) .await?; @@ -1527,11 +1526,7 @@ pub async fn progress_update( let id = seen.id.clone(); if seen.state == SeenState::Completed && respect_cache { ss.cache_service - .set_with_expiry( - cache, - Some(ss.config.server.progress_update_threshold), - ApplicationCacheValue::Empty, - ) + .set_with_expiry(cache, ApplicationCacheValue::Empty) .await?; } if seen.state == SeenState::Completed { @@ -2116,6 +2111,8 @@ where let source_result = import.clone(); let total = import.completed.len(); + let mut need_to_schedule_user_for_workout_revision = false; + for (idx, item) in import.completed.into_iter().enumerate() { ryot_log!( debug, @@ -2315,6 +2312,7 @@ where } } ImportCompletedItem::Workout(workout) => { + need_to_schedule_user_for_workout_revision = true; if let Err(err) = create_or_update_workout(workout, user_id, ss).await { import.failed.push(ImportFailedItem { lot: None, @@ -2366,6 +2364,10 @@ where .await?; } + if need_to_schedule_user_for_workout_revision { + schedule_user_for_workout_revision(user_id, ss).await?; + } + let details = ImportResultResponse { failed_items: import.failed, import: ImportDetails { total }, diff --git a/docs/content/importing.md b/docs/content/importing.md index cb8b9eda2f..473a054a6d 100644 --- a/docs/content/importing.md +++ b/docs/content/importing.md @@ -175,6 +175,19 @@ Exercise" or "Merge Exercise" actions to map the exercise to an existing one. - Scroll down to the "General" section and click on "Export data". - Upload the csv file in the input. +## Hevy + +You can import your workouts from [Hevy](https://www.hevy.com). Exercises will be created +using the same strategy as the [Strong app](#strong-app) importer. + +### Steps + +- Login to your Hevy account on the app and go to the "Profile" page. +- Click on the cog icon on the top right and select "Export & Import Data" under + "Preferences". +- Click on "Export" and then click on the button that says "Export Workouts". +- Upload the csv file in the input. + ## IMDb You can import your watchlist from [IMDb](https://www.imdb.com). They will be added to diff --git a/docs/includes/export-schema.ts b/docs/includes/export-schema.ts index a086bf6d67..7377e3f1f7 100644 --- a/docs/includes/export-schema.ts +++ b/docs/includes/export-schema.ts @@ -59,6 +59,8 @@ export interface ImportOrExportItemRating { export interface ImportOrExportExerciseItem { /** The collections this entity was added to. */ collections: string[]; + /** The unique identifier of the exercise. */ + id: string; /** The name of the exercise. */ name: string; /** The review history for the user. */ diff --git a/libs/generated/src/graphql/backend/graphql.ts b/libs/generated/src/graphql/backend/graphql.ts index 2ffc0de62e..5a72a4b41c 100644 --- a/libs/generated/src/graphql/backend/graphql.ts +++ b/libs/generated/src/graphql/backend/graphql.ts @@ -869,6 +869,7 @@ export enum ImportSource { Audiobookshelf = 'AUDIOBOOKSHELF', GenericJson = 'GENERIC_JSON', Goodreads = 'GOODREADS', + Hevy = 'HEVY', Igdb = 'IGDB', Imdb = 'IMDB', Jellyfin = 'JELLYFIN', diff --git a/libs/generated/src/graphql/backend/types.generated.ts b/libs/generated/src/graphql/backend/types.generated.ts index e3aef8ad43..3efe9d0b3f 100644 --- a/libs/generated/src/graphql/backend/types.generated.ts +++ b/libs/generated/src/graphql/backend/types.generated.ts @@ -892,6 +892,7 @@ export enum ImportSource { Audiobookshelf = 'AUDIOBOOKSHELF', GenericJson = 'GENERIC_JSON', Goodreads = 'GOODREADS', + Hevy = 'HEVY', Igdb = 'IGDB', Imdb = 'IMDB', Jellyfin = 'JELLYFIN',