Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: insert database row into selected database #1029

Merged
merged 39 commits into from
Dec 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
39 commits
Select commit Hold shift + click to select a range
b7de5e9
feat: insert database row into selected database
speed2exe Nov 28, 2024
1600baf
feat: inserting database row
speed2exe Nov 28, 2024
739fef0
Merge branch 'main' into feat/insert-database-row
speed2exe Nov 28, 2024
fc0698d
feat: add impl for cell
speed2exe Nov 29, 2024
e2d68ae
feat: insert row selection and mulitselection
speed2exe Nov 30, 2024
7a3edeb
feat: support datetime field type
speed2exe Dec 2, 2024
6d5e4ed
fix: other fields like rich text
speed2exe Dec 2, 2024
d4986b4
feat: add database field server impl
speed2exe Dec 2, 2024
5c2ac03
feat: add client api and tests
speed2exe Dec 3, 2024
7122bfc
Merge branch 'main' into feat/insert-database-row
speed2exe Dec 3, 2024
b8e09c1
Merge branch 'main' into feat/insert-database-row
speed2exe Dec 4, 2024
99c0be5
Merge branch 'main' into feat/insert-database-row
speed2exe Dec 4, 2024
5d24f6f
feat: use to json value impl from collab
speed2exe Dec 4, 2024
6f1ba48
feat: use add database cell impl from collab
speed2exe Dec 4, 2024
67737cf
feat: update to latest collab
speed2exe Dec 5, 2024
84c0444
Merge branch 'main' into feat/insert-database-row
speed2exe Dec 5, 2024
f681279
chore: upgrade collab and fix tests
speed2exe Dec 5, 2024
c6b3981
chore: review issues
speed2exe Dec 6, 2024
879e5c7
chore: merge with main
speed2exe Dec 6, 2024
058e940
fix: code review feedback
speed2exe Dec 6, 2024
9cb8ea1
feat: filter only allow supported field types
speed2exe Dec 6, 2024
11743f7
feat: support more field types
speed2exe Dec 7, 2024
edb4769
feat: support created at and last modified
speed2exe Dec 9, 2024
4092301
chore: merge with main
speed2exe Dec 9, 2024
974b697
feat: add timestamp cell for created at and modified at fields
speed2exe Dec 9, 2024
26d80b9
Merge branch 'main' into feat/insert-database-row
speed2exe Dec 9, 2024
a9d0619
chore: linting and formatting
speed2exe Dec 9, 2024
fdb87d2
fix: add created at and last modified
speed2exe Dec 9, 2024
1c767ea
fix: ci add service dependency of appflowy cloud on admin frontend
speed2exe Dec 9, 2024
a85afa1
chore: trigger ci
speed2exe Dec 9, 2024
85ea7eb
feat: add logging for admin frontend client signin
speed2exe Dec 9, 2024
dca6345
fix: server logs if error
speed2exe Dec 9, 2024
2a5b643
fix: create admin confirmation without email
speed2exe Dec 9, 2024
fd015ee
fix: ci add service dependency of appflowy cloud on admin frontend
speed2exe Dec 9, 2024
ad733c2
fix: server logs if error
speed2exe Dec 9, 2024
e7739e4
Merge branch 'main' into feat/insert-database-row
speed2exe Dec 10, 2024
166d239
chore: update collab
speed2exe Dec 10, 2024
59a865c
chore: merge with main
speed2exe Dec 10, 2024
fa644cd
Merge branch 'main' into feat/insert-database-row
speed2exe Dec 10, 2024
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 10 additions & 10 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

18 changes: 9 additions & 9 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ chrono = { version = "0.4.37", features = [
derive_more = { version = "0.99" }
secrecy.workspace = true
rand = { version = "0.8", features = ["std_rng"] }
anyhow = "1.0.79"
anyhow.workspace = true
thiserror = "1.0.56"
reqwest = { workspace = true, features = [
"json",
Expand Down Expand Up @@ -248,7 +248,7 @@ serde = { version = "1.0.195", features = ["derive"] }
bytes = "1.5.0"
workspace-template = { path = "libs/workspace-template" }
uuid = { version = "1.6.1", features = ["v4", "v5"] }
anyhow = "1.0.79"
anyhow = "1.0.94"
actix = "0.13.3"
actix-web = { version = "4.5.1", default-features = false, features = [
"openssl",
Expand Down Expand Up @@ -312,13 +312,13 @@ lto = false # Disable Link-Time Optimization
[patch.crates-io]
# It's diffcult to resovle different version with the same crate used in AppFlowy Frontend and the Client-API crate.
# So using patch to workaround this issue.
collab = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c4e23b6d6b1ba320c74b40e557069e99dd0377e7" }
collab-entity = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c4e23b6d6b1ba320c74b40e557069e99dd0377e7" }
collab-folder = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c4e23b6d6b1ba320c74b40e557069e99dd0377e7" }
collab-document = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c4e23b6d6b1ba320c74b40e557069e99dd0377e7" }
collab-user = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c4e23b6d6b1ba320c74b40e557069e99dd0377e7" }
collab-database = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c4e23b6d6b1ba320c74b40e557069e99dd0377e7" }
collab-importer = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "c4e23b6d6b1ba320c74b40e557069e99dd0377e7" }
collab = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "128f3a81ea86a58e355615b1c00edbf861867886" }
collab-entity = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "128f3a81ea86a58e355615b1c00edbf861867886" }
collab-folder = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "128f3a81ea86a58e355615b1c00edbf861867886" }
collab-document = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "128f3a81ea86a58e355615b1c00edbf861867886" }
collab-user = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "128f3a81ea86a58e355615b1c00edbf861867886" }
collab-database = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "128f3a81ea86a58e355615b1c00edbf861867886" }
collab-importer = { git = "https://github.com/AppFlowy-IO/AppFlowy-Collab", rev = "128f3a81ea86a58e355615b1c00edbf861867886" }

[features]
history = []
Expand Down
2 changes: 1 addition & 1 deletion admin_frontend/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ gotrue-entity = { path = "../libs/gotrue-entity" }
database-entity = { path = "../libs/database-entity" }
shared-entity = { path = "../libs/shared-entity" }

anyhow = "1.0.79"
anyhow.workspace = true
axum = { version = "0.7", features = ["json"] }
tokio = { version = "1.36", features = ["rt-multi-thread", "macros"] }
askama = "0.12"
Expand Down
4 changes: 2 additions & 2 deletions libs/client-api/src/http_chat.rs
Original file line number Diff line number Diff line change
Expand Up @@ -320,7 +320,7 @@ impl Stream for QuestionStream {
fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
let this = self.project();

return match ready!(this.stream.as_mut().poll_next(cx)) {
match ready!(this.stream.as_mut().poll_next(cx)) {
Some(Ok(value)) => match value {
Value::Object(mut value) => {
if let Some(metadata) = value.remove(STREAM_METADATA_KEY) {
Expand All @@ -344,6 +344,6 @@ impl Stream for QuestionStream {
},
Some(Err(err)) => Poll::Ready(Some(Err(err))),
None => Poll::Ready(None),
};
}
}
}
52 changes: 50 additions & 2 deletions libs/client-api/src/http_collab.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,8 @@ use app_error::AppError;
use bytes::Bytes;
use chrono::{DateTime, Utc};
use client_api_entity::workspace_dto::{
AFDatabase, AFDatabaseField, AFDatabaseRow, AFDatabaseRowDetail, DatabaseRowUpdatedItem,
ListDatabaseRowDetailParam, ListDatabaseRowUpdatedParam,
AFDatabase, AFDatabaseField, AFDatabaseRow, AFDatabaseRowDetail, AFInsertDatabaseField,
DatabaseRowUpdatedItem, ListDatabaseRowDetailParam, ListDatabaseRowUpdatedParam,
};
use client_api_entity::{
BatchQueryCollabParams, BatchQueryCollabResult, CollabParams, CreateCollabParams,
Expand Down Expand Up @@ -210,6 +210,28 @@ impl Client {
AppResponse::from_response(resp).await?.into_data()
}

// Adds a database field to the specified database.
// Returns the field id of the newly created field.
pub async fn add_database_field(
&self,
workspace_id: &str,
database_id: &str,
insert_field: &AFInsertDatabaseField,
) -> Result<String, AppResponseError> {
let url = format!(
"{}/api/workspace/{}/database/{}/fields",
self.base_url, workspace_id, database_id
);
let resp = self
.http_client_with_auth(Method::POST, &url)
.await?
.json(insert_field)
.send()
.await?;
log_request_id(&resp);
AppResponse::from_response(resp).await?.into_data()
}

pub async fn list_database_row_ids_updated(
&self,
workspace_id: &str,
Expand Down Expand Up @@ -250,6 +272,32 @@ impl Client {
AppResponse::from_response(resp).await?.into_data()
}

/// Example payload:
/// {
/// "Name": "some_data", # using column name
/// "_pIkG": "some other data" # using field_id (can be obtained from [get_database_fields])
/// }
/// Upon success, returns the row id for the newly created row.
pub async fn add_database_item(
&self,
workspace_id: &str,
database_id: &str,
payload: &serde_json::Value,
) -> Result<String, AppResponseError> {
let url = format!(
"{}/api/workspace/{}/database/{}/row",
self.base_url, workspace_id, database_id
);
let resp = self
.http_client_with_auth(Method::POST, &url)
.await?
.json(&payload)
.send()
.await?;
log_request_id(&resp);
AppResponse::from_response(resp).await?.into_data()
}

#[instrument(level = "debug", skip_all, err)]
pub async fn post_realtime_msg(
&self,
Expand Down
2 changes: 1 addition & 1 deletion libs/client-api/src/http_file.rs
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,7 @@ impl WSClientConnectURLProvider for Client {
///
/// # Returns
/// A `Result` containing the base64-encoded MD5 hash on success, or an error if the file cannot be read.

///
/// Asynchronously calculates the MD5 hash of a file using efficient buffer handling and returns it as a base64-encoded string.
///
/// # Arguments
Expand Down
2 changes: 1 addition & 1 deletion libs/collab-stream/src/pubsub.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ impl CollabStreamPub {

#[instrument(level = "debug", skip_all, err)]
pub async fn publish(&mut self, message: PubSubMessage) -> Result<(), StreamError> {
self.conn.publish(ACTIVE_COLLAB_CHANNEL, message).await?;
let () = self.conn.publish(ACTIVE_COLLAB_CHANNEL, message).await?;
Ok(())
}
}
Expand Down
8 changes: 4 additions & 4 deletions libs/collab-stream/src/stream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ use crate::error::StreamError;
use crate::model::{MessageId, StreamBinary, StreamMessage, StreamMessageByStreamKey};
use redis::aio::ConnectionManager;
use redis::streams::{StreamMaxlen, StreamReadOptions};
use redis::{pipe, AsyncCommands, RedisError};
use redis::{pipe, AsyncCommands, Pipeline, RedisError};

pub struct CollabStream {
connection_manager: ConnectionManager,
Expand Down Expand Up @@ -34,9 +34,9 @@ impl CollabStream {
let mut pipe = pipe();
for message in messages {
let tuple = message.into_tuple_array();
pipe.xadd(&self.stream_key, "*", tuple.as_slice());
let _: &mut Pipeline = pipe.xadd(&self.stream_key, "*", tuple.as_slice());
}
pipe.query_async(&mut self.connection_manager).await?;
let () = pipe.query_async(&mut self.connection_manager).await?;
Ok(())
}

Expand Down Expand Up @@ -90,7 +90,7 @@ impl CollabStream {
}

pub async fn clear(&mut self) -> Result<(), RedisError> {
self
let () = self
.connection_manager
.xtrim(&self.stream_key, StreamMaxlen::Equals(0))
.await?;
Expand Down
16 changes: 8 additions & 8 deletions libs/collab-stream/src/stream_group.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use redis::streams::{
StreamClaimOptions, StreamClaimReply, StreamMaxlen, StreamPendingData, StreamPendingReply,
StreamReadOptions,
};
use redis::{pipe, AsyncCommands, ErrorKind, RedisResult};
use redis::{pipe, AsyncCommands, ErrorKind, Pipeline, RedisResult};
use tokio_util::sync::CancellationToken;
use tracing::{error, info, trace, warn};

Expand Down Expand Up @@ -119,7 +119,7 @@ impl StreamGroup {
.into_iter()
.map(|m| m.to_string())
.collect::<Vec<String>>();
self
let () = self
.connection_manager
.xack(&self.stream_key, &self.group_name, &message_ids)
.await?;
Expand Down Expand Up @@ -164,7 +164,7 @@ impl StreamGroup {
let message = message.into();
let tuple = message.into_tuple_array();
if let Some(len) = self.config.max_len {
pipe
let _: &mut Pipeline = pipe
.cmd("XADD")
.arg(&self.stream_key)
.arg("MAXLEN")
Expand All @@ -177,7 +177,7 @@ impl StreamGroup {
}
}

pipe.query_async(&mut self.connection_manager).await?;
let () = pipe.query_async(&mut self.connection_manager).await?;
if let Err(err) = self.set_expiration().await {
error!("set expiration fail: {:?}", err);
}
Expand All @@ -197,13 +197,13 @@ impl StreamGroup {
let tuple = message.into_tuple_array();
match self.config.max_len {
Some(max_len) => {
self
let () = self
.connection_manager
.xadd_maxlen(&self.stream_key, StreamMaxlen::Approx(max_len), "*", &tuple)
.await?;
},
None => {
self
let () = self
.connection_manager
.xadd(&self.stream_key, "*", tuple.as_slice())
.await?;
Expand Down Expand Up @@ -369,7 +369,7 @@ impl StreamGroup {
/// Use the `XTRIM` command to truncate the Redis stream to a maximum length of zero, effectively
/// removing all entries from the stream.
pub async fn clear(&mut self) -> Result<(), StreamError> {
self
let () = self
.connection_manager
.xtrim(&self.stream_key, StreamMaxlen::Equals(0))
.await?;
Expand All @@ -393,7 +393,7 @@ impl StreamGroup {
};

if should_set_expiration {
self
let () = self
.connection_manager
.expire(&self.stream_key, expire_time)
.await?;
Expand Down
1 change: 0 additions & 1 deletion libs/database/src/collab/collab_db_ops.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,6 @@ use uuid::Uuid;
/// * There's a database operation failure.
/// * There's an attempt to insert a row with an existing `object_id` but a different `workspace_id`.
///

#[inline]
#[instrument(level = "trace", skip(tx, params), fields(oid=%params.object_id), err)]
pub async fn insert_into_af_collab(
Expand Down
7 changes: 7 additions & 0 deletions libs/shared-entity/src/dto/workspace_dto.rs
Original file line number Diff line number Diff line change
Expand Up @@ -376,3 +376,10 @@ pub struct AFDatabaseField {
pub type_option: HashMap<String, serde_json::Value>,
pub is_primary: bool,
}

#[derive(Default, Debug, Clone, Serialize, Deserialize)]
pub struct AFInsertDatabaseField {
pub name: String,
pub field_type: i64, // FieldType ID
pub type_option_data: Option<serde_json::Value>, // TypeOptionData
}
2 changes: 1 addition & 1 deletion rust-toolchain.toml
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
[toolchain]
channel = "1.78.0"
channel = "1.83.0"
6 changes: 3 additions & 3 deletions services/appflowy-collaborate/src/collab/cache/mem_cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ impl CollabMemCache {
pub async fn insert_collab_meta(&self, meta: CollabMetadata) -> Result<(), AppError> {
let key = collab_meta_key(&meta.object_id);
let value = serde_json::to_string(&meta)?;
self
let () = self
.connection_manager
.clone()
.set_ex(key, value, ONE_MONTH)
Expand Down Expand Up @@ -188,7 +188,7 @@ impl CollabMemCache {
// for executing a subsequent transaction (with MULTI/EXEC). If any of the watched keys are
// altered by another client before the current client executes EXEC, the transaction will be
// aborted by Redis (the EXEC will return nil indicating the transaction was not processed).
redis::cmd("WATCH")
let () = redis::cmd("WATCH")
.arg(&cache_object_id)
.query_async::<_, ()>(&mut conn)
.await?;
Expand Down Expand Up @@ -228,7 +228,7 @@ impl CollabMemCache {
.ignore()
.expire(&cache_object_id, expiration_seconds.unwrap_or(SEVEN_DAYS) as i64) // Setting the expiration to 7 days
.ignore();
pipeline.query_async(&mut conn).await?;
let () = pipeline.query_async(&mut conn).await?;
}
Ok::<(), redis::RedisError>(())
}
Expand Down
2 changes: 1 addition & 1 deletion services/appflowy-collaborate/src/group/manager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ where
self.edit_state_max_secs,
indexer,
)?);
self.state.insert_group(object_id, group.clone());
self.state.insert_group(object_id, group);
Ok(())
}
}
Expand Down
Loading
Loading