diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml index 494217b9..0dbd76bd 100644 --- a/.github/workflows/backend.yml +++ b/.github/workflows/backend.yml @@ -10,6 +10,7 @@ on: env: CARGO_TERM_COLOR: always + jobs: check: name: Check @@ -44,3 +45,5 @@ jobs: with: command: fmt args: --manifest-path ./backend/Cargo.toml + + diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml new file mode 100644 index 00000000..eea70d99 --- /dev/null +++ b/.github/workflows/master.yml @@ -0,0 +1,51 @@ +name: master + +on: + push: + branches: + - "*" + paths: + - "**.rs" + - .github/workflows/master.yml + +env: + CARGO_TERM_COLOR: always + +jobs: + check: + runs-on: ubuntu-latest + steps: + - name: Check out + uses: actions/checkout@v3 + - name: Install Rust + uses: actions-rs/toolchain@v1 + with: + profile: minimal + toolchain: nightly-x86_64-unknown-linux-gnu + override: true + components: rustfmt, clippy + - name: Set up cargo cache + uses: actions/cache@v3 + continue-on-error: false + with: + path: | + ~/.cargo/bin/ + ~/.cargo/registry/index/ + ~/.cargo/registry/cache/ + ~/.cargo/git/db/ + target/ + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + restore-keys: ${{ runner.os }}-cargo- + - name: Lint + run: | + cargo fmt --all -- --check + cargo clippy -- -D warnings + - name: Check + run: cargo check + - name: Run Backend Test + run: cargo test -p backend + - name: Run Frontend Test + run: cargo test -p frontend + # judger need to build plugin first(which take long time) + # - name: Run Judger test + # run: cd judger && just test diff --git a/backend/.gitignore b/backend/.gitignore index afad4683..de1b2a38 100644 --- a/backend/.gitignore +++ b/backend/.gitignore @@ -7,4 +7,5 @@ /cert.pem /a.log /judger.proto -/backend.proto \ No newline at end of file +/backend.proto +/ws-Cargo.toml \ No newline at end of file diff --git a/backend/Cargo.toml b/backend/Cargo.toml index a070dac0..7d7d53d3 100644 --- a/backend/Cargo.toml +++ b/backend/Cargo.toml @@ -85,9 +85,3 @@ features = ["mutex", "spin_mutex", "rwlock"] [build-dependencies] tonic-build = { workspace = true } - -[features] -default = ["single-instance"] -unsecured-log = [] -single-instance = [] -testsuit = [] diff --git a/backend/Dockerfile b/backend/Dockerfile index 022462ff..acf75abb 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -14,6 +14,9 @@ WORKDIR /complier/proto COPY backend.proto . COPY judger.proto . +WORKDIR /compiler +COPY ws-Cargo.toml Cargo.toml + WORKDIR /complier/backend COPY . . diff --git a/backend/build.rs b/backend/build.rs index 890c79cc..cd09f129 100644 --- a/backend/build.rs +++ b/backend/build.rs @@ -1,5 +1,19 @@ fn main() -> Result<(), Box> { - tonic_build::compile_protos("../proto/backend.proto")?; - tonic_build::compile_protos("../proto/judger.proto")?; + tonic_build::configure() + .build_client(false) + .type_attribute( + "oj.backend.SortBy", + "#[derive(serde::Serialize, serde::Deserialize)]", + ) + .compile(&["../proto/backend.proto"], &["../proto"])?; + // tonic_build::compile_protos("../proto/backend.proto")?; + // tonic_build::compile_protos("../proto/judger.proto")?; + tonic_build::configure() + .build_server(false) + .type_attribute( + "oj.backend.SortBy", + "#[derive(serde::Serialize, serde::Deserialize)]", + ) + .compile(&["../proto/judger.proto"], &["../proto"])?; Ok(()) } diff --git a/backend/src/controller/duplicate.rs b/backend/src/controller/duplicate.rs index b47443b6..7549785f 100644 --- a/backend/src/controller/duplicate.rs +++ b/backend/src/controller/duplicate.rs @@ -3,9 +3,7 @@ use tracing::Span; use uuid::Uuid; pub struct DupController { - #[cfg(feature = "single-instance")] dup_i32: Cache<(i32, Uuid), i32>, - #[cfg(feature = "single-instance")] dup_str: Cache<(i32, Uuid), String>, } @@ -13,29 +11,24 @@ impl DupController { #[tracing::instrument(parent=span, name="duplicate_construct",level = "info",skip_all)] pub fn new(span: &Span) -> Self { Self { - #[cfg(feature = "single-instance")] dup_i32: Cache::new(150), - #[cfg(feature = "single-instance")] dup_str: Cache::new(150), } } /// store a request_id with result i32 pub fn store_i32(&self, spliter: i32, uuid: Uuid, result: i32) { tracing::trace!(request_id=?uuid); - #[cfg(feature = "single-instance")] self.dup_i32.insert((spliter, uuid), result); } /// store a request_id with result String pub fn store_str(&self, spliter: i32, uuid: Uuid, result: String) { tracing::trace!(request_id=?uuid); - #[cfg(feature = "single-instance")] self.dup_str.insert((spliter, uuid), result); } /// attempt to get result of i32 #[tracing::instrument(level = "debug", skip(self))] pub fn check_i32(&self, spliter: i32, uuid: &Uuid) -> Option { tracing::trace!(request_id=?uuid); - #[cfg(feature = "single-instance")] if let Some(x) = self.dup_i32.get(&(spliter, *uuid)) { log::debug!("duplicated request_id: {}, result: {}", uuid, x); return Some(x); @@ -46,7 +39,6 @@ impl DupController { #[tracing::instrument(level = "debug", skip(self))] pub fn check_str(&self, spliter: i32, uuid: &Uuid) -> Option { tracing::trace!(request_id=?uuid); - #[cfg(feature = "single-instance")] if let Some(x) = self.dup_str.get(&(spliter, *uuid)) { log::debug!("duplicated request_id: {}, result: {}", uuid, x); return Some(x); diff --git a/backend/src/controller/token.rs b/backend/src/controller/token.rs index 0358fa32..ac4f08a0 100644 --- a/backend/src/controller/token.rs +++ b/backend/src/controller/token.rs @@ -62,7 +62,6 @@ impl From for CachedToken { } pub struct TokenController { - #[cfg(feature = "single-instance")] cache: Cache, rng: Mutex, cache_meter: RateMetrics<30>, @@ -72,10 +71,8 @@ impl TokenController { #[tracing::instrument(parent = span,name="token_construct_controller",level = "info",skip_all)] pub fn new(span: &Span) -> Arc { log::debug!("Setup TokenController"); - #[cfg(feature = "single-instance")] let cache = Cache::new(500); let self_ = Arc::new(Self { - #[cfg(feature = "single-instance")] cache, rng: Mutex::new(Hc128Rng::from_entropy()), cache_meter: RateMetrics::new("hitrate_token"), @@ -137,7 +134,6 @@ impl TokenController { let token: CachedToken; - #[cfg(feature = "single-instance")] let cache_result = { match self.cache.get(&rand) { Some(cc) => { @@ -151,8 +147,6 @@ impl TokenController { None => None, } }; - #[cfg(not(feature = "single-instance"))] - let cache_result: Option = None; let token = match cache_result { Some(token) => { @@ -172,7 +166,6 @@ impl TokenController { tracing::trace!(user_id = token.user_id, "cache_missed"); self.cache_meter.unset(); - #[cfg(feature = "single-instance")] self.cache.insert(rand, token.clone()); token @@ -199,7 +192,6 @@ impl TokenController { .exec(db) .await?; - #[cfg(feature = "single-instance")] self.cache.remove(&rand); Ok(Some(())) diff --git a/backend/src/endpoint/util/pager/impls.rs b/backend/src/endpoint/util/pager/impls.rs index 51ad0a5e..51a5db00 100644 --- a/backend/src/endpoint/util/pager/impls.rs +++ b/backend/src/endpoint/util/pager/impls.rs @@ -42,7 +42,7 @@ impl PagerTrait for problem::Entity { _ => problem::Column::Id, } } - fn get_key_of(model: &Self::Model, sort: &SortBy) -> String { + fn sort_value(model: &Self::Model, sort: &SortBy) -> String { match sort { SortBy::UploadDate => model.update_at.to_string(), SortBy::CreateDate => model.create_at.to_string(), @@ -90,7 +90,7 @@ impl PagerTrait for test::Entity { _ => test::Column::Id, } } - fn get_key_of(model: &Self::Model, sort: &SortBy) -> String { + fn sort_value(model: &Self::Model, sort: &SortBy) -> String { match sort { SortBy::Score => (model.score).to_string(), _ => model.id.to_string(), @@ -128,7 +128,7 @@ impl PagerTrait for contest::Entity { _ => contest::Column::Id, } } - fn get_key_of(model: &Self::Model, sort: &SortBy) -> String { + fn sort_value(model: &Self::Model, sort: &SortBy) -> String { match sort { SortBy::CreateDate => model.create_at.to_string(), SortBy::UploadDate => model.update_at.to_string(), @@ -170,7 +170,7 @@ impl PagerTrait for user::Entity { _ => user::Column::Id, } } - fn get_key_of(model: &Self::Model, sort: &SortBy) -> String { + fn sort_value(model: &Self::Model, sort: &SortBy) -> String { match sort { SortBy::CreateDate => model.create_at.to_string(), SortBy::Score => model.score.to_string(), @@ -215,7 +215,7 @@ impl PagerTrait for submit::Entity { _ => submit::Column::Id, } } - fn get_key_of(model: &Self::Model, sort: &SortBy) -> String { + fn sort_value(model: &Self::Model, sort: &SortBy) -> String { match sort { SortBy::Committed => match model.committed { true => "1".to_string(), @@ -253,7 +253,7 @@ impl PagerTrait for education::Entity { fn sort_column(_sort: &SortBy) -> education::Column { education::Column::Id } - fn get_key_of(model: &Self::Model, _sort: &SortBy) -> String { + fn sort_value(model: &Self::Model, _sort: &SortBy) -> String { model.id.to_string() } fn get_id(model: &Self::Model) -> i32 { @@ -276,7 +276,7 @@ impl PagerTrait for chat::Entity { type ParentMarker = HasParent; - fn get_key_of(model: &Self::Model, _sort: &SortBy) -> String { + fn sort_value(model: &Self::Model, _sort: &SortBy) -> String { model.id.to_string() } diff --git a/backend/src/endpoint/util/pager/mod.rs b/backend/src/endpoint/util/pager/mod.rs index 52536db1..9a40731a 100644 --- a/backend/src/endpoint/util/pager/mod.rs +++ b/backend/src/endpoint/util/pager/mod.rs @@ -54,45 +54,27 @@ where const COL_SELECT: &'static [Self::Column]; type ParentMarker: PagerMarker; - fn sort(select: Select, sort: &SortBy, rev: bool) -> Select { - let desc = match rev { - true => Order::Asc, - false => Order::Desc, - }; - select.order_by(Self::sort_column(sort), desc) - } - fn get_key_of(model: &Self::Model, sort: &SortBy) -> String; + fn sort_value(model: &Self::Model, sort: &SortBy) -> String; fn sort_column(sort: &SortBy) -> Self::Column; fn get_id(model: &Self::Model) -> i32; fn query_filter(select: Select, auth: &Auth) -> Result, Error>; } -#[derive(Serialize, Deserialize)] -enum RawSearchDep { - Text(String), - Column(i32, bool, String), - Parent(i32), -} +#[derive(Clone, Debug, Default, Serialize, Deserialize)] +pub struct LastValue(bool, String); -#[derive(Serialize, Deserialize)] -struct RawPager { - type_number: i32, - sort: RawSearchDep, - last_rev: bool, - last_pk: Option, -} - -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Serialize, Deserialize)] pub enum SearchDep { Text(String), - Column(SortBy, bool, String), + Column(SortBy, LastValue), Parent(i32), + ParentSort(i32, SortBy, LastValue), } impl SearchDep { - fn update_last_col(&mut self, data: String) { - if let Self::Column(_a, _b, c) = self { - *c = data; + fn update_last_col(&mut self, data: LastValue) { + if let Self::Column(_, val) = self { + *val = data; } else { unreachable!() } @@ -100,8 +82,9 @@ impl SearchDep { } /// An instance of paginator itself -#[derive(Clone, Debug)] +#[derive(Clone, Debug, Serialize, Deserialize)] pub struct Pager { + type_number: i32, sort: SearchDep, last_pk: Option, last_rev: bool, @@ -114,8 +97,8 @@ where E: EntityTrait + PagerTrait>, { fn parent_search(ppk: i32) -> Self; + fn parent_sorted_search(ppk: i32, sort: SortBy) -> Self; fn from_raw(s: String, server: &Server) -> Result, Error>; - fn into_raw(self, server: &Server) -> String; async fn fetch( &mut self, limit: u64, @@ -131,7 +114,6 @@ where E: EntityTrait + PagerTrait, { fn from_raw(s: String, server: &Server) -> Result, Error>; - fn into_raw(self, server: &Server) -> String; async fn fetch( &mut self, limit: u64, @@ -151,32 +133,22 @@ where #[instrument] fn parent_search(ppk: i32) -> Self { Self { + type_number: E::TYPE_NUMBER, sort: SearchDep::Parent(ppk), _entity: PhantomData, last_pk: None, - last_rev: false, + last_rev: true, } } - #[instrument(name = "pagination_deserialize", level = "trace", skip(server))] - fn into_raw(self, server: &Server) -> String { - let raw = RawPager { + #[instrument] + fn parent_sorted_search(ppk: i32, sort: SortBy) -> Self { + Self { type_number: E::TYPE_NUMBER, - sort: match self.sort { - SearchDep::Text(s) => RawSearchDep::Text(s), - SearchDep::Column(sort_by, rev, last_val) => { - RawSearchDep::Column(sort_by as i32, rev, last_val) - } - SearchDep::Parent(x) => RawSearchDep::Parent(x), - }, - last_rev: self.last_rev, - last_pk: self.last_pk, - }; - let byte = server.crypto.encode(raw); - - base64::Engine::encode( - &base64::engine::general_purpose::STANDARD_NO_PAD, - byte.unwrap(), - ) + sort: SearchDep::ParentSort(ppk, sort, LastValue::default()), + _entity: PhantomData, + last_pk: None, + last_rev: false, + } } #[instrument(skip_all, name = "pagination_deserialize", level = "trace")] fn from_raw(s: String, server: &Server) -> Result, Error> { @@ -185,29 +157,14 @@ where tracing::trace!(err=?e,"base64_deserialize"); Error::PaginationError("Not base64") })?; - let pager = server.crypto.decode::(byte).map_err(|e| { + let pager = server.crypto.decode::>(byte).map_err(|e| { tracing::debug!(err=?e,"bincode_deserialize"); Error::PaginationError("Malformated pager") })?; if pager.type_number != E::TYPE_NUMBER { return Err(Error::PaginationError("Pager type number mismatch")); } - let sort = match pager.sort { - RawSearchDep::Text(x) => SearchDep::Text(x), - RawSearchDep::Column(sort_by, rev, last_val) => { - let sort_by = sort_by - .try_into() - .map_err(|_| Error::PaginationError("Pager reconstruction failed"))?; - SearchDep::Column(sort_by, rev, last_val) - } - RawSearchDep::Parent(x) => SearchDep::Parent(x), - }; - Ok(Pager { - sort, - _entity: PhantomData, - last_pk: pager.last_pk, - last_rev: pager.last_rev, - }) + Ok(pager) } #[instrument(skip(self, auth))] async fn fetch( @@ -217,14 +174,29 @@ where rev: bool, auth: &Auth, ) -> Result, Error> { + Self::check_bound(limit, offset)?; let models = match &self.sort { - SearchDep::Text(txt) => { - let mut query = E::query_filter(E::find(), auth)?; - let mut condition = E::COL_TEXT[0].like(txt.as_str()); - for col in E::COL_TEXT[1..].iter() { - condition = condition.or(col.like(txt.as_str())); + SearchDep::Text(txt) => self.text_search_inner(limit, offset, rev, auth).await?, + SearchDep::Column(sort, last_val) => { + self.column_search_inner(limit, offset, rev, auth).await? + } + SearchDep::Parent(p_pk) => { + let db = DB.get().unwrap(); + // TODO: select ID only + let query = E::ParentMarker::related_filter(auth).await?; + let parent = query + .filter(E::ParentMarker::COL_ID.eq(*p_pk)) + .columns([E::ParentMarker::COL_ID]) + .one(db) + .await?; + + if parent.is_none() { + return Ok(vec![]); } - query = query.filter(condition); + + let mut query = parent.unwrap().find_related(E::default()); + + query = E::query_filter(query, auth)?; if let Some(last) = self.last_pk { let paginate = PaginatePkBuilder::default() @@ -238,6 +210,7 @@ where } else { query = order_by_bool(query, E::COL_ID, rev); } + query = query.offset(offset).limit(limit); query .columns(E::COL_SELECT.to_vec()) @@ -246,44 +219,12 @@ where .all(DB.get().unwrap()) .await? } - SearchDep::Column(sort, inner_rev, last_val) => { - let mut query = E::query_filter(E::find(), auth)?; - let rev = rev ^ inner_rev; - - let col = E::sort_column(sort); - - if let Some(last) = self.last_pk { - PaginateColBuilder::default() - .include(self.last_rev ^ rev) - .rev(rev) - .pk(E::COL_ID) - .col(col) - .last_id(last) - .last_value(last_val) - .build() - .unwrap(); - } else { - query = order_by_bool(query, E::COL_ID, rev); - query = order_by_bool(query, col, rev); - } - - query = query.offset(offset).limit(limit); - let models = query - .columns(E::COL_SELECT.to_vec()) - .limit(limit) - .offset(offset) - .all(DB.get().unwrap()) - .await?; - - if let Some(model) = models.last() { - self.sort.update_last_col(E::get_key_of(model, sort)); - } - - models - } - SearchDep::Parent(p_pk) => { + SearchDep::ParentSort(p_pk, sort, last_val) => { let db = DB.get().unwrap(); // TODO: select ID only + let LastValue(inner_rev, last_val) = last_val; + let rev = rev ^ inner_rev; + let query = E::ParentMarker::related_filter(auth).await?; let parent = query .filter(E::ParentMarker::COL_ID.eq(*p_pk)) @@ -300,11 +241,13 @@ where query = E::query_filter(query, auth)?; if let Some(last) = self.last_pk { - let paginate = PaginatePkBuilder::default() + let paginate = PaginateColBuilder::default() .include(self.last_rev ^ rev) .rev(rev) .pk(E::COL_ID) - .last(last) + .last_id(last) + .col(E::sort_column(sort)) + .last_value(&last_val) .build() .unwrap(); query = paginate.apply(query); @@ -313,12 +256,19 @@ where } query = query.offset(offset).limit(limit); - query + let models = query .columns(E::COL_SELECT.to_vec()) .limit(limit) .offset(offset) .all(DB.get().unwrap()) - .await? + .await?; + + if let Some(model) = models.last() { + self.sort + .update_last_col(LastValue(rev, E::sort_value(model, sort))); + } + + models } }; if let Some(model) = models.last() { @@ -334,26 +284,6 @@ where E: PagerTrait, { #[instrument(name = "pagination_deserialize", level = "trace", skip(server))] - fn into_raw(self, server: &Server) -> String { - let raw = RawPager { - type_number: E::TYPE_NUMBER, - sort: match self.sort { - SearchDep::Text(s) => RawSearchDep::Text(s), - SearchDep::Column(sort_by, rev, last_val) => { - RawSearchDep::Column(sort_by as i32, rev, last_val) - } - SearchDep::Parent(x) => RawSearchDep::Parent(x), - }, - last_pk: self.last_pk, - last_rev: self.last_rev, - }; - let byte = server.crypto.encode(raw); - - base64::Engine::encode( - &base64::engine::general_purpose::STANDARD_NO_PAD, - byte.unwrap(), - ) - } #[instrument(skip_all, name = "pagination_deserialize", level = "trace")] fn from_raw(s: String, server: &Server) -> Result, Error> { let byte = base64::Engine::decode(&base64::engine::general_purpose::STANDARD_NO_PAD, s) @@ -361,31 +291,20 @@ where tracing::trace!(err=?e,"base64_deserialize"); Error::PaginationError("Not base64") })?; - let pager = server.crypto.decode::(byte).map_err(|e| { + let pager = server.crypto.decode::>(byte).map_err(|e| { tracing::debug!(err=?e,"bincode_deserialize"); Error::PaginationError("Malformated pager") })?; if pager.type_number != E::TYPE_NUMBER { return Err(Error::PaginationError("Pager type number mismatch")); } - let sort = match pager.sort { - RawSearchDep::Text(x) => SearchDep::Text(x), - RawSearchDep::Column(sort_by, rev, last_val) => { - let sort_by = sort_by - .try_into() - .map_err(|_| Error::PaginationError("Pager reconstruction failed"))?; - SearchDep::Column(sort_by, rev, last_val) - } - RawSearchDep::Parent(_) => { - return Err(Error::PaginationError("Pager reconstruction failed")); + match pager.sort { + SearchDep::Parent(_) | SearchDep::ParentSort(_, _, _) => { + return Err(Error::PaginationError("Pager type number mismatch")) } - }; - Ok(Pager { - sort, - _entity: PhantomData, - last_pk: pager.last_pk, - last_rev: pager.last_rev, - }) + _ => (), + } + Ok(pager) } #[instrument(skip(self, auth))] async fn fetch( @@ -395,74 +314,14 @@ where rev: bool, auth: &Auth, ) -> Result, Error> { + Self::check_bound(limit, offset)?; let models = match &self.sort { - SearchDep::Text(txt) => { - let mut query = E::query_filter(E::find(), auth)?; - let mut condition = E::COL_TEXT[0].like(txt.as_str()); - for col in E::COL_TEXT[1..].iter() { - condition = condition.or(col.like(txt.as_str())); - } - query = query.filter(condition); - - if let Some(last) = self.last_pk { - let paginate = PaginatePkBuilder::default() - .include(self.last_rev ^ rev) - .rev(rev) - .pk(E::COL_ID) - .last(last) - .build() - .unwrap(); - query = paginate.apply(query); - } else { - query = order_by_bool(query, E::COL_ID, rev); - } - query = query.offset(offset).limit(limit); - query - .columns(E::COL_SELECT.to_vec()) - .limit(limit) - .offset(offset) - .all(DB.get().unwrap()) - .await? - } - SearchDep::Column(sort, inner_rev, last_val) => { - let mut query = E::query_filter(E::find(), auth)?; - let rev = rev ^ inner_rev; - - let col = E::sort_column(sort); - - if let Some(last) = self.last_pk { - PaginateColBuilder::default() - .include(self.last_rev ^ rev) - .rev(rev) - .pk(E::COL_ID) - .col(col) - .last_id(last) - .last_value(last_val) - .build() - .unwrap(); - } else { - query = order_by_bool(query, E::COL_ID, rev); - query = order_by_bool(query, col, rev); - } - - query = query.offset(offset).limit(limit); - let models = query - .columns(E::COL_SELECT.to_vec()) - .limit(limit) - .offset(offset) - .all(DB.get().unwrap()) - .await?; - - if let Some(model) = models.last() { - self.sort.update_last_col(E::get_key_of(model, sort)); - } - - models - } - SearchDep::Parent(_p_pk) => { - unreachable!() - } - }; + SearchDep::Text(_) => self.text_search_inner(limit, offset, rev, auth).await, + SearchDep::Column(_, _) => self.column_search_inner(limit, offset, rev, auth).await, + _ => Err(Error::Unreachable( + "Pager can not have parent search", + )), + }?; if let Some(model) = models.last() { self.last_pk = Some(E::get_id(model)); } @@ -470,23 +329,132 @@ where } } -impl Pager { +impl Pager +where + E: PagerTrait, +{ #[instrument(level = "debug")] pub fn sort_search(sort: SortBy, rev: bool) -> Self { Self { - sort: SearchDep::Column(sort, rev, "".to_string()), + type_number: E::TYPE_NUMBER, + sort: SearchDep::Column(sort, LastValue(rev, "".to_string())), _entity: PhantomData, last_pk: None, last_rev: false, } } + #[instrument(name = "pagination_deserialize", level = "trace", skip(server))] + pub fn into_raw(self, server: &Server) -> String { + let byte = server.crypto.encode(self); + + base64::Engine::encode( + &base64::engine::general_purpose::STANDARD_NO_PAD, + byte.unwrap(), + ) + } #[instrument(level = "debug")] pub fn text_search(sort: String) -> Self { Self { + type_number: E::TYPE_NUMBER, sort: SearchDep::Text(sort), _entity: PhantomData, last_pk: None, last_rev: false, } } + #[instrument(skip(self, auth))] + async fn text_search_inner( + &mut self, + limit: u64, + offset: u64, + rev: bool, + auth: &Auth, + ) -> Result, Error> { + if let SearchDep::Text(txt) = &self.sort { + let mut query = E::query_filter(E::find(), auth)?; + let mut condition = E::COL_TEXT[0].like(txt.as_str()); + for col in E::COL_TEXT[1..].iter() { + condition = condition.or(col.like(txt.as_str())); + } + query = query.filter(condition); + + if let Some(last) = self.last_pk { + let paginate = PaginatePkBuilder::default() + .include(self.last_rev ^ rev) + .rev(rev) + .pk(E::COL_ID) + .last(last) + .build() + .unwrap(); + query = paginate.apply(query); + } else { + query = order_by_bool(query, E::COL_ID, rev); + } + query = query.offset(offset).limit(limit); + Ok(query + .columns(E::COL_SELECT.to_vec()) + .limit(limit) + .offset(offset) + .all(DB.get().unwrap()) + .await?) + } else { + Err(Error::Unreachable("text_search_inner")) + } + } + #[instrument(skip(self, auth))] + async fn column_search_inner( + &mut self, + limit: u64, + offset: u64, + rev: bool, + auth: &Auth, + ) -> Result, Error> { + if let SearchDep::Column(sort, last_val) = &self.sort { + let mut query = E::query_filter(E::find(), auth)?; + let LastValue(inner_rev, last_val) = last_val; + let rev = rev ^ inner_rev; + + let col = E::sort_column(sort); + + if let Some(last) = self.last_pk { + PaginateColBuilder::default() + .include(self.last_rev ^ rev) + .rev(rev) + .pk(E::COL_ID) + .col(col) + .last_id(last) + .last_value(last_val) + .build() + .unwrap(); + } else { + query = order_by_bool(query, E::COL_ID, rev); + query = order_by_bool(query, col, rev); + } + + query = query.offset(offset).limit(limit); + let models = query + .columns(E::COL_SELECT.to_vec()) + .limit(limit) + .offset(offset) + .all(DB.get().unwrap()) + .await?; + + if let Some(model) = models.last() { + self.sort + .update_last_col(LastValue(rev, E::sort_value(model, sort))); + } + + Ok(models) + } else { + Err(Error::Unreachable("column_search_inner")) + } + } + + fn check_bound(limit: u64, offset: u64) -> Result<(), Error> { + if limit > PAGE_MAX_SIZE || offset > PAGE_MAX_OFFSET { + Err(Error::NumberTooLarge) + } else { + Ok(()) + } + } } diff --git a/backend/src/macro_tool.rs b/backend/src/macro_tool.rs index 9b713162..9ea06f54 100644 --- a/backend/src/macro_tool.rs +++ b/backend/src/macro_tool.rs @@ -1,9 +1,9 @@ #[macro_export] -#[cfg(feature = "unsecured-log")] +#[cfg(debug_assertions)] macro_rules! report_internal { ($level:ident,$pattern:literal) => {{ tracing::$level!($pattern); - tonic::Status::internal($error.to_string()) + tonic::Status::internal($pattern.to_string()) }}; ($level:ident,$pattern:literal, $error:expr) => {{ tracing::$level!($pattern, $error); @@ -12,6 +12,7 @@ macro_rules! report_internal { } #[macro_export] +#[cfg(not(debug_assertions))] macro_rules! report_internal { ($level:ident,$pattern:literal) => {{ tracing::$level!($pattern); diff --git a/backend/src/server.rs b/backend/src/server.rs index 945dd3e5..2d7edf3d 100644 --- a/backend/src/server.rs +++ b/backend/src/server.rs @@ -59,7 +59,6 @@ impl Server { _otel_guard: otel_guard, }) } - #[cfg(not(feature="testsuit"))] pub async fn start(self: Arc) { transport::Server::builder() .accept_http1(true) @@ -76,21 +75,4 @@ impl Server { .await .unwrap(); } - #[cfg(feature="testsuit")] - pub async fn start(self: Arc) { - transport::Server::builder() - .accept_http1(true) - .max_frame_size(Some(MAX_FRAME_SIZE)) - .add_service(ProblemSetServer::new(self.clone())) - .add_service(EducationSetServer::new(self.clone())) - .add_service(UserSetServer::new(self.clone())) - .add_service(TokenSetServer::new(self.clone())) - .add_service(ContestSetServer::new(self.clone())) - .add_service(TestcaseSetServer::new(self.clone())) - .add_service(SubmitSetServer::new(self.clone())) - .add_service(ChatSetServer::new(self.clone())) - .serve(self.config.bind_address.clone().parse().unwrap()) - .await - .unwrap(); - } } diff --git a/frontend/justfile b/frontend/justfile index ed7663de..d695bf63 100644 --- a/frontend/justfile +++ b/frontend/justfile @@ -11,7 +11,7 @@ release-docker: just prepare just release cp ../proto/*.proto . - cp ../cert/*.pem . + cp ../Cargo.toml ws-Cargo.toml docker build . -t mdoj-gateway release: diff --git a/judger/.gitignore b/judger/.gitignore index de74b930..7781b444 100644 --- a/judger/.gitignore +++ b/judger/.gitignore @@ -5,4 +5,5 @@ /config.toml /judger.proto /config -/plugins-out \ No newline at end of file +/plugins-out +/ws-Cargo.toml \ No newline at end of file diff --git a/judger/Dockerfile b/judger/Dockerfile index dcb3331a..3bc62968 100644 --- a/judger/Dockerfile +++ b/judger/Dockerfile @@ -13,6 +13,9 @@ RUN rustup target add ${ARCH}-unknown-linux-musl WORKDIR /complier/proto COPY judger.proto . +WORKDIR /compiler +COPY ws-Cargo.toml Cargo.toml + WORKDIR /complier/judger COPY . . diff --git a/judger/justfile b/judger/justfile index 0d57d943..c70df57e 100644 --- a/judger/justfile +++ b/judger/justfile @@ -2,6 +2,7 @@ release-docker: MUSL_TARGET=$(uname -m)-linux-musl cd plugins/rlua-54 && sh ./build.sh cp ../proto/judger.proto . + cp ../Cargo.toml ws-Cargo.toml docker build . --build-arg ARCH=$(uname -m) -t mdoj-judger build-plugin: @@ -10,7 +11,6 @@ build-plugin: release-plugin: just build-plugin cd plugins sh export-all.sh - build-nsjail: cd nsjail-docker && make nsjail-3.1 diff --git a/proto/backend.proto b/proto/backend.proto index c315a15d..7ad0cf20 100644 --- a/proto/backend.proto +++ b/proto/backend.proto @@ -171,6 +171,7 @@ service SubmitSet { rpc Create(CreateSubmitRequest) returns (SubmitId); rpc Remove(SubmitId) returns (google.protobuf.Empty); + // list submit by problem, sorted by pirmary key(desc) rpc ListByProblem(ListByRequest) returns (ListSubmitResponse); // are not guarantee to yield status