diff --git a/rust/lakesoul-datafusion/src/catalog/lakesoul_catalog.rs b/rust/lakesoul-datafusion/src/catalog/lakesoul_catalog.rs index 7d6d4d2dc..b0fa12b5f 100644 --- a/rust/lakesoul-datafusion/src/catalog/lakesoul_catalog.rs +++ b/rust/lakesoul-datafusion/src/catalog/lakesoul_catalog.rs @@ -4,7 +4,7 @@ use crate::catalog::LakeSoulNamespace; use datafusion::catalog::schema::SchemaProvider; -use datafusion::catalog::{CatalogProvider, MemoryCatalogProvider}; +use datafusion::catalog::CatalogProvider; use datafusion::prelude::SessionContext; use lakesoul_metadata::MetaDataClientRef; use std::any::Any; diff --git a/rust/lakesoul-datafusion/src/catalog/lakesoul_namespace.rs b/rust/lakesoul-datafusion/src/catalog/lakesoul_namespace.rs index fac3a2571..4e38883ed 100644 --- a/rust/lakesoul-datafusion/src/catalog/lakesoul_namespace.rs +++ b/rust/lakesoul-datafusion/src/catalog/lakesoul_namespace.rs @@ -11,9 +11,7 @@ use datafusion::prelude::SessionContext; use lakesoul_io::datasource::file_format::LakeSoulParquetFormat; use lakesoul_io::datasource::listing::LakeSoulListingTable; use lakesoul_metadata::MetaDataClientRef; -use proto::proto::entity::Namespace; use std::any::Any; -use std::collections::HashSet; use std::fmt::{Debug, Formatter}; use std::sync::Arc; use tokio::runtime::Handle; @@ -140,7 +138,7 @@ impl SchemaProvider for LakeSoulNamespace { fn table_exist(&self, name: &str) -> bool { // table name is primary key for `table_name_id` - self.table_names().into_iter().find(|s| s == name).is_some() + self.table_names().into_iter().any(|s| s == name) } } diff --git a/rust/lakesoul-datafusion/src/catalog/mod.rs b/rust/lakesoul-datafusion/src/catalog/mod.rs index 9cd4b77ec..ff255e1f3 100644 --- a/rust/lakesoul-datafusion/src/catalog/mod.rs +++ b/rust/lakesoul-datafusion/src/catalog/mod.rs @@ -20,6 +20,9 @@ use crate::error::Result; // pub mod lakesoul_sink; // pub mod lakesoul_source; mod lakesoul_catalog; +// used in catalog_test, but still say unused_imports, i think it is a bug about rust-lint. +// this is a workaround +#[cfg(test)] pub use lakesoul_catalog::*; mod lakesoul_namespace; pub use lakesoul_namespace::*; diff --git a/rust/lakesoul-datafusion/src/lib.rs b/rust/lakesoul-datafusion/src/lib.rs index e75f1c301..654a205ac 100644 --- a/rust/lakesoul-datafusion/src/lib.rs +++ b/rust/lakesoul-datafusion/src/lib.rs @@ -1,7 +1,9 @@ // SPDX-FileCopyrightText: 2023 LakeSoul Contributors // // SPDX-License-Identifier: Apache-2.0 - +#![allow(dead_code)] +#![allow(clippy::type_complexity)] +// after finished. remove above attr extern crate core; mod catalog; diff --git a/rust/lakesoul-datafusion/src/test/catalog_tests.rs b/rust/lakesoul-datafusion/src/test/catalog_tests.rs index 47f18f3c4..bb29a37db 100644 --- a/rust/lakesoul-datafusion/src/test/catalog_tests.rs +++ b/rust/lakesoul-datafusion/src/test/catalog_tests.rs @@ -2,21 +2,20 @@ // // SPDX-License-Identifier: Apache-2.0 +#[cfg(test)] mod catalog_tests { - use crate::catalog::LakeSoulTableProperty; - use crate::catalog::{LakeSoulCatalog, LakeSoulNamespace}; + use crate::catalog::{LakeSoulCatalog, LakeSoulNamespace,LakeSoulTableProperty}; use crate::lakesoul_table::LakeSoulTable; use crate::serialize::arrow_java::ArrowJavaSchema; use arrow::array::{ArrayRef, Int32Array, RecordBatch}; use arrow::datatypes::{DataType, Field, Schema, SchemaRef}; use datafusion::assert_batches_eq; use datafusion::catalog::schema::SchemaProvider; - use datafusion::catalog::{CatalogList, CatalogProvider}; use lakesoul_io::lakesoul_io_config::create_session_context; - use lakesoul_io::lakesoul_io_config::{LakeSoulIOConfig, LakeSoulIOConfigBuilder}; + use lakesoul_io::lakesoul_io_config::LakeSoulIOConfigBuilder; use lakesoul_metadata::{MetaDataClient, MetaDataClientRef}; - use proto::proto::entity::{Namespace, TableInfo, TableNameId}; - use rand::distributions::{Alphanumeric, Standard}; + use proto::proto::entity::{Namespace, TableInfo}; + use rand::distributions::Alphanumeric; use rand::{thread_rng, Rng, SeedableRng}; use rand_chacha::ChaCha8Rng; use std::env; @@ -100,13 +99,11 @@ mod catalog_tests { let rt = Runtime::new().unwrap(); rt.block_on(async { let client = Arc::new(MetaDataClient::from_env().await.unwrap()); - let mut config = LakeSoulIOConfigBuilder::new().build(); // insert data; let batch = create_batch_i32( vec!["range", "hash", "value"], vec![&[20201101, 20201101, 20201101, 20201102], &[1, 2, 3, 4], &[1, 2, 3, 4]], ); - let table_name = "test_table_01"; let pks = vec!["range".to_string(), "hash".to_string()]; let schema = SchemaRef::new(Schema::new( ["range", "hash", "value"] diff --git a/rust/lakesoul-io/src/datasource/parquet_source.rs b/rust/lakesoul-io/src/datasource/parquet_source.rs index 7c427a6b4..77fcc8062 100644 --- a/rust/lakesoul-io/src/datasource/parquet_source.rs +++ b/rust/lakesoul-io/src/datasource/parquet_source.rs @@ -346,7 +346,7 @@ pub fn merge_stream( let merge_stream = SortedStreamMerger::new_from_streams( streams, merge_schema, - primary_keys.iter().map(String::clone).collect(), + primary_keys.iter().cloned().collect(), batch_size, merge_ops, ) diff --git a/rust/lakesoul-io/src/filter/parser.rs b/rust/lakesoul-io/src/filter/parser.rs index 57661fbb8..732381179 100644 --- a/rust/lakesoul-io/src/filter/parser.rs +++ b/rust/lakesoul-io/src/filter/parser.rs @@ -163,7 +163,7 @@ fn qualified_expr(expr_str: &str, schema: SchemaRef) -> Option<(Expr, Arc let mut expr: Option<(Expr, Arc)> = None; let mut root = "".to_owned(); let mut sub_fields: &Fields = schema.fields(); - for expr_substr in expr_str.split('.').into_iter() { + for expr_substr in expr_str.split('.') { root = if root.is_empty() { expr_substr.to_owned() } else {