diff --git a/services/appflowy-collaborate/src/group/manager.rs b/services/appflowy-collaborate/src/group/manager.rs index 20b38ab0a..ebcfd1c4d 100644 --- a/services/appflowy-collaborate/src/group/manager.rs +++ b/services/appflowy-collaborate/src/group/manager.rs @@ -207,7 +207,7 @@ where ) .await?, ); - self.state.insert_group(object_id, group.clone()).await; + self.state.insert_group(object_id, group).await; Ok(()) } } diff --git a/src/api/workspace.rs b/src/api/workspace.rs index fae130cea..cb850521c 100644 --- a/src/api/workspace.rs +++ b/src/api/workspace.rs @@ -1927,6 +1927,7 @@ async fn post_database_row_handler( biz::collab::ops::insert_database_row( &state.collab_access_control_storage, + &state.pg_pool, &workspace_id, &db_id, uid, diff --git a/src/biz/collab/mod.rs b/src/biz/collab/mod.rs index dc877de24..8378f840f 100644 --- a/src/biz/collab/mod.rs +++ b/src/biz/collab/mod.rs @@ -1,3 +1,4 @@ pub mod folder_view; pub mod ops; pub mod publish_outline; +pub mod utils; diff --git a/src/biz/collab/ops.rs b/src/biz/collab/ops.rs index f3c30bc41..807d6e788 100644 --- a/src/biz/collab/ops.rs +++ b/src/biz/collab/ops.rs @@ -5,19 +5,13 @@ use app_error::AppError; use appflowy_collaborate::collab::storage::CollabAccessControlStorage; use chrono::DateTime; use chrono::Utc; -use collab::core::collab::DataSource; use collab::preclude::Collab; -use collab_database::database::DatabaseBody; use collab_database::entity::FieldType; -use collab_database::fields::Field; -use collab_database::fields::TypeOptions; -use collab_database::rows::Cell; use collab_database::rows::CreateRowParams; use collab_database::rows::DatabaseRowBody; use collab_database::rows::Row; use collab_database::rows::RowDetail; use collab_database::views::OrderObjectPosition; -use collab_database::workspace_database::NoPersistenceDatabaseCollabService; use collab_database::workspace_database::WorkspaceDatabase; use collab_database::workspace_database::WorkspaceDatabaseBody; use collab_entity::CollabType; @@ -30,8 +24,8 @@ use database::collab::{CollabStorage, GetCollabOrigin}; use database::publish::select_published_view_ids_for_workspace; use database::publish::select_workspace_id_for_publish_namespace; use database_entity::dto::CollabParams; +use database_entity::dto::QueryCollab; use database_entity::dto::QueryCollabResult; -use database_entity::dto::{QueryCollab, QueryCollabParams}; use shared_entity::dto::workspace_dto::AFDatabase; use shared_entity::dto::workspace_dto::AFDatabaseField; use shared_entity::dto::workspace_dto::AFDatabaseRow; @@ -58,6 +52,7 @@ use database_entity::dto::{ UpdateCollabMemberParams, }; +use crate::biz::collab::utils::field_by_name_uniq; use crate::biz::workspace::ops::broadcast_update; use super::folder_view::collab_folder_to_folder_view; @@ -66,6 +61,16 @@ use super::folder_view::section_items_to_recent_folder_view; use super::folder_view::section_items_to_trash_folder_view; use super::folder_view::to_dto_folder_view_miminal; use super::publish_outline::collab_folder_to_published_outline; +use super::utils::collab_from_doc_state; +use super::utils::convert_database_cells_human_readable; +use super::utils::encode_collab_v1_bytes; +use super::utils::field_by_id_name_uniq; +use super::utils::get_database_body; +use super::utils::get_latest_collab; +use super::utils::get_latest_collab_encoded; +use super::utils::new_cell_from_value; +use super::utils::selection_name_by_id; +use super::utils::type_options_serde; /// Create a new collab member /// If the collab member already exists, return [AppError::RecordAlreadyExists] @@ -360,46 +365,6 @@ pub async fn get_latest_collab_folder( Ok(folder) } -pub async fn get_latest_collab_encoded( - collab_storage: &CollabAccessControlStorage, - collab_origin: GetCollabOrigin, - workspace_id: &str, - oid: &str, - collab_type: CollabType, -) -> Result { - collab_storage - .get_encode_collab( - collab_origin, - QueryCollabParams { - workspace_id: workspace_id.to_string(), - inner: QueryCollab { - object_id: oid.to_string(), - collab_type, - }, - }, - true, - ) - .await -} - -pub async fn get_latest_collab( - storage: &CollabAccessControlStorage, - origin: GetCollabOrigin, - workspace_id: &str, - oid: &str, - collab_type: CollabType, -) -> Result { - let ec = get_latest_collab_encoded(storage, origin, workspace_id, oid, collab_type).await?; - let collab: Collab = Collab::new_with_source(CollabOrigin::Server, oid, ec.into(), vec![], false) - .map_err(|e| { - AppError::Internal(anyhow::anyhow!( - "Failed to create collab from encoded collab: {:?}", - e - )) - })?; - Ok(collab) -} - pub async fn get_published_view( collab_storage: &CollabAccessControlStorage, publish_namespace: String, @@ -506,138 +471,133 @@ pub async fn list_database_row_ids( Ok(db_rows) } -fn new_cell_from_value(cell_value: serde_json::Value, field: &Field) -> Cell { - // Based on the type of the field, handle each value differently - // This should be as forgiving/generic/all-purpose as much as possible - // to support different use cases. - let field_type = FieldType::from(field.field_type); - match field_type { - FieldType::RichText => { - // - todo!() - }, - FieldType::Number => todo!(), - FieldType::DateTime => todo!(), - FieldType::SingleSelect => todo!(), - FieldType::MultiSelect => todo!(), - FieldType::Checkbox => todo!(), - FieldType::URL => todo!(), - FieldType::Checklist => todo!(), - FieldType::LastEditedTime => todo!(), - FieldType::CreatedTime => todo!(), - FieldType::Relation => todo!(), - FieldType::Summary => todo!(), - FieldType::Translate => todo!(), - FieldType::Time => todo!(), - FieldType::Media => todo!(), - } -} - pub async fn insert_database_row( collab_storage: &CollabAccessControlStorage, + pg_pool: &PgPool, workspace_uuid_str: &str, database_uuid_str: &str, uid: i64, cell_value_by_id: HashMap, ) -> Result<(), AppError> { + let mut db_txn = pg_pool.begin().await?; + // get database types and type options let (mut db_collab, db_body) = get_database_body(collab_storage, workspace_uuid_str, database_uuid_str).await?; - let field_by_id = db_body - .fields - .get_all_fields(&db_collab.transact()) - .into_iter() - .fold(HashMap::new(), |mut acc, field| { - acc.insert(field.id.clone(), field); - acc - }); + let all_fields = db_body.fields.get_all_fields(&db_collab.transact()); + let field_by_id = all_fields.iter().fold(HashMap::new(), |mut acc, field| { + acc.insert(field.id.clone(), field.clone()); + acc + }); + let field_by_name = field_by_name_uniq(all_fields); let new_db_row_id = uuid::Uuid::new_v4().to_string(); - let mut new_row = Row::new(new_db_row_id.clone(), database_uuid_str); - { - for (id, cell_value) in cell_value_by_id { + + let mut new_db_row_collab = + Collab::new_with_origin(CollabOrigin::Empty, new_db_row_id.clone(), vec![], false); + let new_db_row_body = { + let database_body = DatabaseRowBody::create( + new_db_row_id.clone().into(), + &mut new_db_row_collab, + Row::empty(new_db_row_id.clone().into(), database_uuid_str), + ); + let mut txn = new_db_row_collab.transact_mut(); + for (id, serde_val) in cell_value_by_id { let field = match field_by_id.get(&id) { Some(f) => f, - None => { - tracing::warn!( - "field not found: {} for database: {}", - id, - database_uuid_str - ); - continue; + // try use field name if id not found + None => match field_by_name.get(&id) { + Some(f) => f, + None => { + tracing::warn!( + "field not found: {} for database: {}", + id, + database_uuid_str + ); + continue; + }, }, }; - - let new_cell: Cell = new_cell_from_value(cell_value, field); - new_row.cells.insert(id.clone(), new_cell); + let new_cell = new_cell_from_value(serde_val, field); + if let Some(new_cell) = new_cell { + database_body.update(&mut txn, |row_update| { + row_update.update_cells(|cells_update| { + cells_update.insert_cell(&field.id, new_cell); + }); + }); + } } - } - - let mut new_db_row_collab = - Collab::new_with_origin(CollabOrigin::Empty, new_db_row_id.clone(), vec![], false); - let new_db_row_body = DatabaseRowBody::create( - new_db_row_id.clone().into(), - &mut new_db_row_collab, - new_row, - ); + database_body + }; let db_row_ec_v1 = encode_collab_v1_bytes(&new_db_row_collab, CollabType::DatabaseRow)?; // insert row collab_storage - .queue_insert_or_update_collab( + .insert_new_collab_with_transaction( workspace_uuid_str, &uid, CollabParams { object_id: new_db_row_id.clone(), encoded_collab_v1: db_row_ec_v1.into(), - collab_type: CollabType::Database, + collab_type: CollabType::DatabaseRow, embeddings: None, }, - true, + &mut db_txn, + "inserting new database row from server", ) .await?; - // create a new row + let ts_now = chrono::Utc::now().timestamp(); + let row_order = db_body + .create_row(CreateRowParams { + id: new_db_row_id.into(), + database_id: database_uuid_str.to_string(), + cells: new_db_row_body + .cells(&new_db_row_collab.transact()) + .unwrap_or_default(), + height: 30, + visibility: true, + row_position: OrderObjectPosition::End, + created_at: ts_now, + modified_at: ts_now, + }) + .await + .map_err(|e| AppError::Internal(anyhow::anyhow!("Failed to create row: {:?}", e)))?; + + // For each database view, add the new row order let db_collab_update = { - let txn = db_collab.transact_mut(); - let ts_now = chrono::Utc::now().timestamp(); - let _ = db_body - .create_row(CreateRowParams { - id: uuid::Uuid::new_v4().to_string().into(), - database_id: database_uuid_str.to_string(), - cells: new_db_row_body - .cells(&new_db_row_collab.transact()) - .unwrap_or_default(), - height: 30, - visibility: true, - row_position: OrderObjectPosition::End, - created_at: ts_now, - modified_at: ts_now, - }) - .await - .map_err(|e| AppError::Internal(anyhow::anyhow!("Failed to create row: {:?}", e)))?; + let mut txn = db_collab.transact_mut(); + let mut db_views = db_body.views.get_all_views(&txn); + for db_view in db_views.iter_mut() { + db_view.row_orders.push(row_order.clone()); + } + db_body.views.clear(&mut txn); + for view in db_views { + db_body.views.insert_view(&mut txn, view); + } txn.encode_update_v1() }; - let db_ec_v1 = encode_collab_v1_bytes(&db_collab, CollabType::Database)?; + let updated_db_collab = encode_collab_v1_bytes(&db_collab, CollabType::Database)?; - // insert database with new row collab_storage - .queue_insert_or_update_collab( + .insert_new_collab_with_transaction( workspace_uuid_str, &uid, CollabParams { object_id: database_uuid_str.to_string(), - encoded_collab_v1: db_ec_v1.into(), + encoded_collab_v1: updated_db_collab.into(), collab_type: CollabType::Database, embeddings: None, }, - true, + &mut db_txn, + "inserting updated database from server", ) .await?; - broadcast_update(&collab_storage, workspace_uuid_str, db_collab_update).await?; + db_txn.commit().await?; + broadcast_update(collab_storage, database_uuid_str, db_collab_update).await?; Ok(()) } @@ -693,32 +653,9 @@ pub async fn list_database_row_details( let (database_collab, db_body) = get_database_body(collab_storage, &workspace_uuid_str, &database_uuid_str).await?; - // create a map of field id to field. - // ensure that the field name is unique. - // if the field name is repeated, it will be appended with the field id, - // under practical usage circumstances, no other collision should occur - let field_by_id: HashMap = { - let all_fields = db_body.fields.get_all_fields(&database_collab.transact()); - - let mut uniq_name_set: HashSet = HashSet::with_capacity(all_fields.len()); - let mut field_by_id: HashMap = HashMap::with_capacity(all_fields.len()); - - for mut field in all_fields { - // if the name already exists, append the field id to the name - if uniq_name_set.contains(&field.name) { - let new_name = format!("{}-{}", field.name, field.id); - field.name.clone_from(&new_name); - } - uniq_name_set.insert(field.name.clone()); - field_by_id.insert(field.id.clone(), field); - } - field_by_id - }; - - let mut selection_name_by_id: HashMap = HashMap::new(); - for field in field_by_id.values() { - add_to_selection_from_field(&mut selection_name_by_id, field); - } + let all_fields = db_body.fields.get_all_fields(&database_collab.transact()); + let selection_name_by_id = selection_name_by_id(&all_fields); + let field_by_name_uniq = field_by_id_name_uniq(all_fields); let query_collabs: Vec = row_ids .iter() @@ -733,13 +670,31 @@ pub async fn list_database_row_details( .into_iter() .flat_map(|(id, result)| match result { QueryCollabResult::Success { encode_collab_v1 } => { - let ec = EncodedCollab::decode_from_bytes(&encode_collab_v1).unwrap(); + let ec = match EncodedCollab::decode_from_bytes(&encode_collab_v1) { + Ok(ec) => ec, + Err(err) => { + tracing::error!("Failed to decode encoded collab: {:?}", err); + return None; + }, + }; let collab = - Collab::new_with_source(CollabOrigin::Server, &id, ec.into(), vec![], false).unwrap(); - let row_detail = RowDetail::from_collab(&collab).unwrap(); + match Collab::new_with_source(CollabOrigin::Server, &id, ec.into(), vec![], false) { + Ok(collab) => collab, + Err(err) => { + tracing::error!("Failed to create collab: {:?}", err); + return None; + }, + }; + let row_detail = match RowDetail::from_collab(&collab) { + Some(row_detail) => row_detail, + None => { + tracing::error!("Failed to get row detail from collab: {:?}", collab); + return None; + }, + }; let cells = convert_database_cells_human_readable( row_detail.row.cells, - &field_by_id, + &field_by_name_uniq, &selection_name_by_id, ); Some(AFDatabaseRowDetail { id, cells }) @@ -753,224 +708,3 @@ pub async fn list_database_row_details( Ok(database_row_details) } - -fn convert_database_cells_human_readable( - db_cells: HashMap>, - field_by_id: &HashMap, - selection_name_by_id: &HashMap, -) -> HashMap> { - let mut human_readable_records: HashMap> = - HashMap::with_capacity(db_cells.len()); - - for (field_id, cell) in db_cells { - let field = match field_by_id.get(&field_id) { - Some(field) => field, - None => { - tracing::error!("Failed to get field by id: {}", field_id); - continue; - }, - }; - let field_type = FieldType::from(field.field_type); - - let mut human_readable_cell: HashMap = - HashMap::with_capacity(cell.len()); - for (key, value) in cell { - let serde_value: serde_json::Value = match key.as_str() { - "created_at" | "last_modified" => match value.cast::() { - Ok(timestamp) => chrono::DateTime::from_timestamp(timestamp, 0) - .unwrap_or_default() - .to_rfc3339() - .into(), - Err(err) => { - tracing::error!("Failed to cast timestamp: {:?}", err); - serde_json::Value::Null - }, - }, - "field_type" => format!("{:?}", field_type).into(), - "data" => { - match field_type { - FieldType::DateTime => { - if let yrs::any::Any::String(value_str) = value { - let int_value = value_str.parse::().unwrap_or_default(); - chrono::DateTime::from_timestamp(int_value, 0) - .unwrap_or_default() - .to_rfc3339() - .into() - } else { - serde_json::to_value(value).unwrap_or_default() - } - }, - FieldType::Checklist => { - if let yrs::any::Any::String(value_str) = value { - serde_json::from_str(&value_str).unwrap_or_default() - } else { - serde_json::to_value(value).unwrap_or_default() - } - }, - FieldType::Media => { - if let yrs::any::Any::Array(arr) = value { - let mut acc = Vec::with_capacity(arr.len()); - for v in arr.as_ref() { - if let yrs::any::Any::String(value_str) = v { - let serde_value = serde_json::from_str(value_str).unwrap_or_default(); - acc.push(serde_value); - } - } - serde_json::Value::Array(acc) - } else { - serde_json::to_value(value).unwrap_or_default() - } - }, - FieldType::SingleSelect => { - if let yrs::any::Any::String(ref value_str) = value { - selection_name_by_id - .get(value_str.as_ref()) - .map(|v| v.to_string()) - .map(serde_json::Value::String) - .unwrap_or_else(|| value.to_string().into()) - } else { - serde_json::to_value(value).unwrap_or_default() - } - }, - FieldType::MultiSelect => { - if let yrs::any::Any::String(value_str) = value { - value_str - .split(',') - .filter_map(|v| selection_name_by_id.get(v).map(|v| v.to_string())) - .fold(String::new(), |mut acc, s| { - if !acc.is_empty() { - acc.push(','); - } - acc.push_str(&s); - acc - }) - .into() - } else { - serde_json::to_value(value).unwrap_or_default() - } - }, - // Handle different field types formatting as needed - _ => serde_json::to_value(value).unwrap_or_default(), - } - }, - _ => serde_json::to_value(value).unwrap_or_default(), - }; - human_readable_cell.insert(key, serde_value); - } - human_readable_records.insert(field.name.clone(), human_readable_cell); - } - human_readable_records -} - -fn add_to_selection_from_field(name_by_id: &mut HashMap, field: &Field) { - let field_type = FieldType::from(field.field_type); - match field_type { - FieldType::SingleSelect => { - add_to_selection_from_type_options(name_by_id, &field.type_options, &field_type); - }, - FieldType::MultiSelect => { - add_to_selection_from_type_options(name_by_id, &field.type_options, &field_type) - }, - _ => (), - } -} - -fn add_to_selection_from_type_options( - name_by_id: &mut HashMap, - type_options: &TypeOptions, - field_type: &FieldType, -) { - if let Some(type_opt) = type_options.get(&field_type.type_id()) { - if let Some(yrs::Any::String(arc_str)) = type_opt.get("content") { - if let Ok(serde_value) = serde_json::from_str::(arc_str) { - if let Some(selections) = serde_value.get("options").and_then(|v| v.as_array()) { - for selection in selections { - if let serde_json::Value::Object(selection) = selection { - if let (Some(id), Some(name)) = ( - selection.get("id").and_then(|v| v.as_str()), - selection.get("name").and_then(|v| v.as_str()), - ) { - name_by_id.insert(id.to_owned(), name.to_owned()); - } - } - } - } - } - } - }; -} - -async fn get_database_body( - collab_storage: &CollabAccessControlStorage, - workspace_uuid_str: &str, - database_uuid_str: &str, -) -> Result<(Collab, DatabaseBody), AppError> { - let db_collab = get_latest_collab( - collab_storage, - GetCollabOrigin::Server, - workspace_uuid_str, - database_uuid_str, - CollabType::Database, - ) - .await?; - let db_body = DatabaseBody::from_collab( - &db_collab, - Arc::new(NoPersistenceDatabaseCollabService), - None, - ) - .ok_or_else(|| { - AppError::Internal(anyhow::anyhow!( - "Failed to create database body from collab, db_collab_id: {}", - database_uuid_str, - )) - })?; - Ok((db_collab, db_body)) -} - -pub fn collab_from_doc_state(doc_state: Vec, object_id: &str) -> Result { - let collab = Collab::new_with_source( - CollabOrigin::Server, - object_id, - DataSource::DocStateV1(doc_state), - vec![], - false, - ) - .map_err(|e| AppError::Unhandled(e.to_string()))?; - Ok(collab) -} - -fn type_options_serde( - type_options: &TypeOptions, - field_type: &FieldType, -) -> HashMap { - let type_option = match type_options.get(&field_type.type_id()) { - Some(type_option) => type_option, - None => return HashMap::new(), - }; - - let mut result = HashMap::with_capacity(type_option.len()); - for (key, value) in type_option { - match field_type { - FieldType::SingleSelect | FieldType::MultiSelect | FieldType::Media => { - if let yrs::Any::String(arc_str) = value { - if let Ok(serde_value) = serde_json::from_str::(arc_str) { - result.insert(key.clone(), serde_value); - } - } - }, - _ => { - result.insert(key.clone(), serde_json::to_value(value).unwrap_or_default()); - }, - } - } - - result -} - -fn encode_collab_v1_bytes(collab: &Collab, collab_type: CollabType) -> Result, AppError> { - let bs = collab - .encode_collab_v1(|collab| collab_type.validate_require_data(collab)) - .map_err(|e| AppError::Unhandled(e.to_string()))? - .encode_to_bytes()?; - Ok(bs) -} diff --git a/src/biz/collab/utils.rs b/src/biz/collab/utils.rs new file mode 100644 index 000000000..a0fd91fec --- /dev/null +++ b/src/biz/collab/utils.rs @@ -0,0 +1,414 @@ +use app_error::AppError; +use appflowy_collaborate::collab::storage::CollabAccessControlStorage; +use collab::core::collab::DataSource; +use collab::preclude::Collab; +use collab_database::database::DatabaseBody; +use collab_database::entity::FieldType; +use collab_database::fields::Field; +use collab_database::fields::TypeOptions; +use collab_database::rows::new_cell_builder; +use collab_database::rows::Cell; +use collab_database::template::entity::CELL_DATA; +use collab_database::workspace_database::NoPersistenceDatabaseCollabService; +use collab_entity::CollabType; +use collab_entity::EncodedCollab; +use collab_folder::CollabOrigin; +use database::collab::CollabStorage; +use database::collab::GetCollabOrigin; +use database_entity::dto::QueryCollab; +use database_entity::dto::QueryCollabParams; +use std::collections::HashMap; +use std::collections::HashSet; +use std::sync::Arc; + +pub fn convert_database_cells_human_readable( + db_cells: HashMap>, + field_by_id: &HashMap, + selection_name_by_id: &HashMap, +) -> HashMap> { + let mut human_readable_records: HashMap> = + HashMap::with_capacity(db_cells.len()); + + for (field_id, cell) in db_cells { + let field = match field_by_id.get(&field_id) { + Some(field) => field, + None => { + tracing::error!("Failed to get field by id: {}, cell: {:?}", field_id, cell); + continue; + }, + }; + let field_type = FieldType::from(field.field_type); + + let mut human_readable_cell: HashMap = + HashMap::with_capacity(cell.len()); + for (key, value) in cell { + let serde_value: serde_json::Value = match key.as_str() { + "created_at" | "last_modified" => match value.cast::() { + Ok(timestamp) => chrono::DateTime::from_timestamp(timestamp, 0) + .unwrap_or_default() + .to_rfc3339() + .into(), + Err(err) => { + tracing::error!("Failed to cast timestamp: {:?}", err); + serde_json::Value::Null + }, + }, + "field_type" => format!("{:?}", field_type).into(), + "data" => { + match field_type { + FieldType::DateTime => { + if let yrs::any::Any::String(value_str) = value { + let int_value = value_str.parse::().unwrap_or_default(); + chrono::DateTime::from_timestamp(int_value, 0) + .unwrap_or_default() + .to_rfc3339() + .into() + } else { + serde_json::to_value(value).unwrap_or_default() + } + }, + FieldType::Checklist => { + if let yrs::any::Any::String(value_str) = value { + serde_json::from_str(&value_str).unwrap_or_default() + } else { + serde_json::to_value(value).unwrap_or_default() + } + }, + FieldType::Media => { + if let yrs::any::Any::Array(arr) = value { + let mut acc = Vec::with_capacity(arr.len()); + for v in arr.as_ref() { + if let yrs::any::Any::String(value_str) = v { + let serde_value = serde_json::from_str(value_str).unwrap_or_default(); + acc.push(serde_value); + } + } + serde_json::Value::Array(acc) + } else { + serde_json::to_value(value).unwrap_or_default() + } + }, + FieldType::SingleSelect => { + if let yrs::any::Any::String(ref value_str) = value { + selection_name_by_id + .get(value_str.as_ref()) + .map(|v| v.to_string()) + .map(serde_json::Value::String) + .unwrap_or_else(|| value.to_string().into()) + } else { + serde_json::to_value(value).unwrap_or_default() + } + }, + FieldType::MultiSelect => { + if let yrs::any::Any::String(value_str) = value { + value_str + .split(',') + .filter_map(|v| selection_name_by_id.get(v).map(|v| v.to_string())) + .fold(String::new(), |mut acc, s| { + if !acc.is_empty() { + acc.push(','); + } + acc.push_str(&s); + acc + }) + .into() + } else { + serde_json::to_value(value).unwrap_or_default() + } + }, + // Handle different field types formatting as needed + _ => serde_json::to_value(value).unwrap_or_default(), + } + }, + _ => serde_json::to_value(value).unwrap_or_default(), + }; + human_readable_cell.insert(key, serde_value); + } + human_readable_records.insert(field.name.clone(), human_readable_cell); + } + human_readable_records +} + +pub fn selection_name_by_id(fields: &[Field]) -> HashMap { + let mut selection_name_by_id: HashMap = HashMap::new(); + for field in fields { + add_to_selection_from_field(&mut selection_name_by_id, field); + } + selection_name_by_id +} + +/// create a map of field name to field +/// if the field name is repeated, it will be appended with the field id, +pub fn field_by_name_uniq(mut fields: Vec) -> HashMap { + fields.sort_by_key(|a| a.id.clone()); + let mut uniq_name_set: HashSet = HashSet::with_capacity(fields.len()); + let mut field_by_name: HashMap = HashMap::with_capacity(fields.len()); + + for field in fields { + // if the name already exists, append the field id to the name + let name = if uniq_name_set.contains(&field.name) { + format!("{}-{}", field.name, field.id) + } else { + field.name.clone() + }; + uniq_name_set.insert(name.clone()); + field_by_name.insert(name, field); + } + field_by_name +} + +/// create a map of field id to field name, and ensure that the field name is unique. +/// if the field name is repeated, it will be appended with the field id, +/// under practical usage circumstances, no other collision should occur +pub fn field_by_id_name_uniq(mut fields: Vec) -> HashMap { + fields.sort_by_key(|a| a.id.clone()); + let mut uniq_name_set: HashSet = HashSet::with_capacity(fields.len()); + let mut field_by_id: HashMap = HashMap::with_capacity(fields.len()); + + for mut field in fields { + // if the name already exists, append the field id to the name + if uniq_name_set.contains(&field.name) { + let new_name = format!("{}-{}", field.name, field.id); + field.name.clone_from(&new_name); + } + uniq_name_set.insert(field.name.clone()); + field_by_id.insert(field.id.clone(), field); + } + field_by_id +} + +fn add_to_selection_from_field(name_by_id: &mut HashMap, field: &Field) { + let field_type = FieldType::from(field.field_type); + match field_type { + FieldType::SingleSelect => { + add_to_selection_from_type_options(name_by_id, &field.type_options, &field_type); + }, + FieldType::MultiSelect => { + add_to_selection_from_type_options(name_by_id, &field.type_options, &field_type) + }, + _ => (), + } +} + +fn add_to_selection_from_type_options( + name_by_id: &mut HashMap, + type_options: &TypeOptions, + field_type: &FieldType, +) { + if let Some(type_opt) = type_options.get(&field_type.type_id()) { + if let Some(yrs::Any::String(arc_str)) = type_opt.get("content") { + if let Ok(serde_value) = serde_json::from_str::(arc_str) { + if let Some(selections) = serde_value.get("options").and_then(|v| v.as_array()) { + for selection in selections { + if let serde_json::Value::Object(selection) = selection { + if let (Some(id), Some(name)) = ( + selection.get("id").and_then(|v| v.as_str()), + selection.get("name").and_then(|v| v.as_str()), + ) { + name_by_id.insert(id.to_owned(), name.to_owned()); + } + } + } + } + } + } + }; +} + +pub fn type_options_serde( + type_options: &TypeOptions, + field_type: &FieldType, +) -> HashMap { + let type_option = match type_options.get(&field_type.type_id()) { + Some(type_option) => type_option, + None => return HashMap::new(), + }; + + let mut result = HashMap::with_capacity(type_option.len()); + for (key, value) in type_option { + match field_type { + FieldType::SingleSelect | FieldType::MultiSelect | FieldType::Media => { + if let yrs::Any::String(arc_str) = value { + if let Ok(serde_value) = serde_json::from_str::(arc_str) { + result.insert(key.clone(), serde_value); + } + } + }, + _ => { + result.insert(key.clone(), serde_json::to_value(value).unwrap_or_default()); + }, + } + } + + result +} + +pub fn collab_from_doc_state(doc_state: Vec, object_id: &str) -> Result { + let collab = Collab::new_with_source( + CollabOrigin::Server, + object_id, + DataSource::DocStateV1(doc_state), + vec![], + false, + ) + .map_err(|e| AppError::Unhandled(e.to_string()))?; + Ok(collab) +} + +pub async fn get_database_body( + collab_storage: &CollabAccessControlStorage, + workspace_uuid_str: &str, + database_uuid_str: &str, +) -> Result<(Collab, DatabaseBody), AppError> { + let db_collab = get_latest_collab( + collab_storage, + GetCollabOrigin::Server, + workspace_uuid_str, + database_uuid_str, + CollabType::Database, + ) + .await?; + let db_body = DatabaseBody::from_collab( + &db_collab, + Arc::new(NoPersistenceDatabaseCollabService), + None, + ) + .ok_or_else(|| { + AppError::Internal(anyhow::anyhow!( + "Failed to create database body from collab, db_collab_id: {}", + database_uuid_str, + )) + })?; + Ok((db_collab, db_body)) +} + +pub fn encode_collab_v1_bytes( + collab: &Collab, + collab_type: CollabType, +) -> Result, AppError> { + let bs = collab + .encode_collab_v1(|collab| collab_type.validate_require_data(collab)) + .map_err(|e| AppError::Unhandled(e.to_string()))? + .encode_to_bytes()?; + Ok(bs) +} + +pub async fn get_latest_collab_encoded( + collab_storage: &CollabAccessControlStorage, + collab_origin: GetCollabOrigin, + workspace_id: &str, + oid: &str, + collab_type: CollabType, +) -> Result { + collab_storage + .get_encode_collab( + collab_origin, + QueryCollabParams { + workspace_id: workspace_id.to_string(), + inner: QueryCollab { + object_id: oid.to_string(), + collab_type, + }, + }, + true, + ) + .await +} + +pub async fn get_latest_collab( + storage: &CollabAccessControlStorage, + origin: GetCollabOrigin, + workspace_id: &str, + oid: &str, + collab_type: CollabType, +) -> Result { + let ec = get_latest_collab_encoded(storage, origin, workspace_id, oid, collab_type).await?; + let collab: Collab = Collab::new_with_source(CollabOrigin::Server, oid, ec.into(), vec![], false) + .map_err(|e| { + AppError::Internal(anyhow::anyhow!( + "Failed to create collab from encoded collab: {:?}", + e + )) + })?; + Ok(collab) +} + +pub fn new_cell_from_value(cell_value: serde_json::Value, field: &Field) -> Option { + let field_type = FieldType::from(field.field_type); + let cell_value: Option = match field_type { + FieldType::Relation | FieldType::Media => { + if let serde_json::Value::Array(arr) = cell_value { + let mut acc = Vec::with_capacity(arr.len()); + for v in arr { + if let serde_json::Value::String(value_str) = v { + acc.push(yrs::any::Any::String(value_str.into())); + } + } + Some(yrs::any::Any::Array(acc.into())) + } else { + tracing::warn!("invalid media/relation value: {:?}", cell_value); + None + } + }, + FieldType::RichText => { + if let serde_json::Value::String(value_str) = cell_value { + Some(yrs::any::Any::String(value_str.into())) + } else { + None + } + }, + FieldType::Checkbox => { + let is_yes = match cell_value { + serde_json::Value::Null => false, + serde_json::Value::Bool(b) => b, + serde_json::Value::Number(n) => n.is_i64() && n.as_i64().unwrap() >= 1, + serde_json::Value::String(s) => s.to_lowercase() == "yes", + _ => { + tracing::warn!("invalid checklist value: {:?}", cell_value); + false + }, + }; + if is_yes { + Some(yrs::any::Any::String("Yes".into())) + } else { + None + } + }, + FieldType::Number => match cell_value { + serde_json::Value::Number(n) => Some(yrs::any::Any::String(n.to_string().into())), + serde_json::Value::String(s) => Some(yrs::any::Any::String(s.into())), + _ => { + tracing::warn!("invalid number value: {:?}", cell_value); + None + }, + }, + FieldType::SingleSelect + | FieldType::MultiSelect + | FieldType::Checklist + | FieldType::URL + | FieldType::Summary + | FieldType::Translate + | FieldType::DateTime => match serde_json::to_string(&cell_value) { + Ok(s) => Some(yrs::any::Any::String(s.into())), + Err(err) => { + tracing::error!("Failed to serialize cell value: {:?}", err); + None + }, + }, + FieldType::LastEditedTime | FieldType::CreatedTime | FieldType::Time => { + // should not be possible + tracing::error!( + "attempt to insert into invalid field: {:?}, value: {}", + field_type, + cell_value + ); + None + }, + }; + + cell_value.map(|v| { + let mut new_cell = new_cell_builder(field_type); + new_cell.insert(CELL_DATA.to_string(), v); + new_cell + }) +} diff --git a/src/biz/workspace/page_view.rs b/src/biz/workspace/page_view.rs index 795d0d89d..ed03dcafb 100644 --- a/src/biz/workspace/page_view.rs +++ b/src/biz/workspace/page_view.rs @@ -46,11 +46,9 @@ use crate::biz::collab::folder_view::{ parse_extra_field_as_json, to_dto_view_icon, to_dto_view_layout, to_folder_view_icon, to_space_permission, }; -use crate::biz::collab::ops::{collab_from_doc_state, get_latest_workspace_database}; -use crate::biz::collab::{ - folder_view::view_is_space, - ops::{get_latest_collab_encoded, get_latest_collab_folder}, -}; +use crate::biz::collab::ops::get_latest_workspace_database; +use crate::biz::collab::utils::{collab_from_doc_state, get_latest_collab_encoded}; +use crate::biz::collab::{folder_view::view_is_space, ops::get_latest_collab_folder}; use super::ops::broadcast_update; diff --git a/src/biz/workspace/publish_dup.rs b/src/biz/workspace/publish_dup.rs index 6f5ee329b..a4dd70faf 100644 --- a/src/biz/workspace/publish_dup.rs +++ b/src/biz/workspace/publish_dup.rs @@ -43,8 +43,8 @@ use yrs::{Map, MapRef}; use crate::biz::collab::folder_view::to_folder_view_icon; use crate::biz::collab::folder_view::to_folder_view_layout; -use crate::biz::collab::ops::collab_from_doc_state; -use crate::biz::collab::ops::get_latest_collab_encoded; +use crate::biz::collab::utils::collab_from_doc_state; +use crate::biz::collab::utils::get_latest_collab_encoded; use super::ops::broadcast_update; diff --git a/tests/workspace/publish.rs b/tests/workspace/publish.rs index a48a3c486..7fa764df4 100644 --- a/tests/workspace/publish.rs +++ b/tests/workspace/publish.rs @@ -1,6 +1,6 @@ use app_error::ErrorCode; use appflowy_cloud::biz::collab::folder_view::collab_folder_to_folder_view; -use appflowy_cloud::biz::collab::ops::collab_from_doc_state; +use appflowy_cloud::biz::collab::utils::collab_from_doc_state; use client_api::entity::{ AFRole, GlobalComment, PatchPublishedCollab, PublishCollabItem, PublishCollabMetadata, PublishInfoMeta,