diff --git a/rust/lakesoul-datafusion/src/catalog/mod.rs b/rust/lakesoul-datafusion/src/catalog/mod.rs index 25d9680ce..2eea3628a 100644 --- a/rust/lakesoul-datafusion/src/catalog/mod.rs +++ b/rust/lakesoul-datafusion/src/catalog/mod.rs @@ -2,19 +2,19 @@ // // SPDX-License-Identifier: Apache-2.0 -use std::env; -use std::fmt::Debug; -use std::sync::Arc; -use std::time::SystemTime; use datafusion::catalog_common::TableReference; use lakesoul_io::lakesoul_io_config::{LakeSoulIOConfig, LakeSoulIOConfigBuilder}; use lakesoul_metadata::MetaDataClientRef; use proto::proto::entity::{CommitOp, DataCommitInfo, DataFileOp, FileOp, TableInfo, Uuid}; +use std::env; +use std::fmt::Debug; +use std::sync::Arc; +use std::time::SystemTime; -use crate::lakesoul_table::helpers::create_io_config_builder_from_table_info; -use crate::serialize::arrow_java::ArrowJavaSchema; // use crate::transaction::TransactionMetaInfo; use crate::error::{LakeSoulError, Result}; +use crate::lakesoul_table::helpers::create_io_config_builder_from_table_info; +use crate::serialize::arrow_java::ArrowJavaSchema; // pub mod lakesoul_sink; // pub mod lakesoul_source; @@ -84,6 +84,7 @@ pub(crate) async fn create_io_config_builder( } else { vec![] }; + println!("get table {} files: {:?}", table_name, data_files); create_io_config_builder_from_table_info(Arc::new(table_info)).map(|builder| builder.with_files(data_files)) } else { Ok(LakeSoulIOConfigBuilder::new()) diff --git a/rust/lakesoul-datafusion/src/test/catalog_tests.rs b/rust/lakesoul-datafusion/src/test/catalog_tests.rs index 2b4517c72..ccf9578c1 100644 --- a/rust/lakesoul-datafusion/src/test/catalog_tests.rs +++ b/rust/lakesoul-datafusion/src/test/catalog_tests.rs @@ -10,8 +10,7 @@ mod catalog_tests { use arrow::array::{ArrayRef, Int32Array, RecordBatch}; use arrow::datatypes::{DataType, Field, Schema, SchemaRef}; use datafusion::assert_batches_eq; - use datafusion::catalog::schema::SchemaProvider; - use datafusion::catalog::CatalogProvider; + use datafusion::catalog::{CatalogProvider, SchemaProvider}; use lakesoul_io::lakesoul_io_config::create_session_context; use lakesoul_io::lakesoul_io_config::LakeSoulIOConfigBuilder; use lakesoul_metadata::{MetaDataClient, MetaDataClientRef}; @@ -178,7 +177,7 @@ mod catalog_tests { assert_eq!(names.len(), tables.len()); for name in names { assert!(schema.table_exist(&name)); - assert!(schema.table(&name).await.is_some()); + assert!(schema.table(&name).await.unwrap().is_some()); assert!(schema.deregister_table(&name).unwrap().is_some()); } } @@ -284,9 +283,9 @@ mod catalog_tests { }); } - #[test] - fn test_all_cases() { - test_catalog_api(); - test_catalog_sql(); - } + // #[test] + // fn test_all_cases() { + // test_catalog_api(); + // test_catalog_sql(); + // } } diff --git a/rust/lakesoul-datafusion/src/test/hash_tests.rs b/rust/lakesoul-datafusion/src/test/hash_tests.rs index c9fb4c540..cd7d4ccdf 100644 --- a/rust/lakesoul-datafusion/src/test/hash_tests.rs +++ b/rust/lakesoul-datafusion/src/test/hash_tests.rs @@ -3,13 +3,14 @@ // SPDX-License-Identifier: Apache-2.0 mod hash_tests { + use chrono::DateTime; use lakesoul_io::hash_utils::{HashValue, HASH_SEED}; #[test] fn chrono_test() { let date = chrono::NaiveDate::parse_from_str("0001-01-01", "%Y-%m-%d").unwrap(); let datetime = date.and_hms_opt(12, 12, 12).unwrap(); - let epoch_time = chrono::NaiveDateTime::from_timestamp_millis(0).unwrap(); + let epoch_time = DateTime::from_timestamp_millis(0).unwrap().naive_utc(); println!("{}", datetime.signed_duration_since(epoch_time).num_days() as i32); println!( @@ -27,7 +28,7 @@ mod hash_tests { lakesoul_io::constant::FLINK_TIMESTAMP_FORMAT, ) .unwrap(); - let epoch_time = chrono::NaiveDateTime::from_timestamp_millis(0).unwrap(); + let epoch_time = DateTime::from_timestamp_millis(0).unwrap().naive_utc(); println!("{}", datetime.signed_duration_since(epoch_time).num_days() as i32); println!( diff --git a/rust/lakesoul-datafusion/src/test/upsert_tests.rs b/rust/lakesoul-datafusion/src/test/upsert_tests.rs index 5f91239bb..5888cf0a7 100644 --- a/rust/lakesoul-datafusion/src/test/upsert_tests.rs +++ b/rust/lakesoul-datafusion/src/test/upsert_tests.rs @@ -1462,10 +1462,10 @@ mod upsert_with_io_config_tests { .and_hms_micro_opt(8, 28, 53, 123456) .unwrap(); - let val1 = dt1.timestamp_micros(); - let val2 = dt2.timestamp_micros(); - let val3 = dt3.timestamp_micros(); - let val4 = dt4.timestamp_micros(); + let val1 = dt1.and_utc().timestamp_micros(); + let val2 = dt2.and_utc().timestamp_micros(); + let val3 = dt3.and_utc().timestamp_micros(); + let val4 = dt4.and_utc().timestamp_micros(); let table_name = "test_merge_same_column_with_timestamp_type_i64_time"; let builder = init_table( @@ -1519,10 +1519,10 @@ mod upsert_with_io_config_tests { .and_hms_micro_opt(8, 28, 53, 123456) .unwrap(); - let val1 = dt1.timestamp_micros(); - let _val2 = _dt2.timestamp_micros(); - let val3 = dt3.timestamp_micros(); - let val4 = dt4.timestamp_micros(); + let val1 = dt1.and_utc().timestamp_micros(); + let _val2 = _dt2.and_utc().timestamp_micros(); + let val3 = dt3.and_utc().timestamp_micros(); + let val4 = dt4.and_utc().timestamp_micros(); let table_name = "merge_different_columns_with_timestamp_type_i32_time"; let builder = init_table( @@ -3232,10 +3232,10 @@ mod upsert_with_metadata_tests { .and_hms_micro_opt(8, 28, 53, 123456) .unwrap(); - let val1 = dt1.timestamp_micros(); - let val2 = dt2.timestamp_micros(); - let val3 = dt3.timestamp_micros(); - let val4 = dt4.timestamp_micros(); + let val1 = dt1.and_utc().timestamp_micros(); + let val2 = dt2.and_utc().timestamp_micros(); + let val3 = dt3.and_utc().timestamp_micros(); + let val4 = dt4.and_utc().timestamp_micros(); let table_name = "test_merge_same_column_with_timestamp_type_i64_time"; let client = Arc::new(MetaDataClient::from_env().await?); @@ -3307,10 +3307,10 @@ mod upsert_with_metadata_tests { .and_hms_micro_opt(8, 28, 53, 123456) .unwrap(); - let val1 = dt1.timestamp_micros(); - let _val2 = _dt2.timestamp_micros(); - let val3 = dt3.timestamp_micros(); - let val4 = dt4.timestamp_micros(); + let val1 = dt1.and_utc().timestamp_micros(); + let _val2 = _dt2.and_utc().timestamp_micros(); + let val3 = dt3.and_utc().timestamp_micros(); + let val4 = dt4.and_utc().timestamp_micros(); let table_name = "merge_different_columns_with_timestamp_type_i32_time"; let client = Arc::new(MetaDataClient::from_env().await?); diff --git a/rust/lakesoul-io/src/hash_utils/mod.rs b/rust/lakesoul-io/src/hash_utils/mod.rs index aa34b727d..d35babe71 100644 --- a/rust/lakesoul-io/src/hash_utils/mod.rs +++ b/rust/lakesoul-io/src/hash_utils/mod.rs @@ -448,7 +448,6 @@ mod tests { #[test] // Tests actual values of hashes, which are different if forcing collisions - #[cfg(not(feature = "force_hash_collisions"))] fn create_hashes_for_dict_arrays() { let strings = [Some("foo"), None, Some("bar"), Some("foo"), None]; @@ -497,7 +496,6 @@ mod tests { #[test] // Tests actual values of hashes, which are different if forcing collisions - #[cfg(not(feature = "force_hash_collisions"))] fn create_hashes_for_list_arrays() { let data = vec![ Some(vec![Some(0), Some(1), Some(2)]), @@ -523,7 +521,6 @@ mod tests { #[test] // Tests actual values of hashes, which are different if forcing collisions - #[cfg(not(feature = "force_hash_collisions"))] fn create_multi_column_hash_for_dict_arrays() { let strings1 = [Some("foo"), None, Some("bar")]; let strings2 = [Some("blarg"), Some("blah"), None];