Skip to content

Commit

Permalink
fix annoying warnings without RUSTFLAGS
Browse files Browse the repository at this point in the history
Signed-off-by: mag1c1an1 <mag1cian@icloud.com>
  • Loading branch information
mag1c1an1 committed Jan 23, 2024
1 parent df252f0 commit 4fcdb69
Show file tree
Hide file tree
Showing 7 changed files with 15 additions and 15 deletions.
2 changes: 1 addition & 1 deletion rust/lakesoul-datafusion/src/catalog/lakesoul_catalog.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

use crate::catalog::LakeSoulNamespace;
use datafusion::catalog::schema::SchemaProvider;
use datafusion::catalog::{CatalogProvider, MemoryCatalogProvider};
use datafusion::catalog::CatalogProvider;
use datafusion::prelude::SessionContext;
use lakesoul_metadata::MetaDataClientRef;
use std::any::Any;
Expand Down
4 changes: 1 addition & 3 deletions rust/lakesoul-datafusion/src/catalog/lakesoul_namespace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,7 @@ use datafusion::prelude::SessionContext;
use lakesoul_io::datasource::file_format::LakeSoulParquetFormat;
use lakesoul_io::datasource::listing::LakeSoulListingTable;
use lakesoul_metadata::MetaDataClientRef;
use proto::proto::entity::Namespace;
use std::any::Any;
use std::collections::HashSet;
use std::fmt::{Debug, Formatter};
use std::sync::Arc;
use tokio::runtime::Handle;
Expand Down Expand Up @@ -140,7 +138,7 @@ impl SchemaProvider for LakeSoulNamespace {

fn table_exist(&self, name: &str) -> bool {
// table name is primary key for `table_name_id`
self.table_names().into_iter().find(|s| s == name).is_some()
self.table_names().into_iter().any(|s| s == name)
}
}

Expand Down
3 changes: 3 additions & 0 deletions rust/lakesoul-datafusion/src/catalog/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,9 @@ use crate::error::Result;
// pub mod lakesoul_sink;
// pub mod lakesoul_source;
mod lakesoul_catalog;
// used in catalog_test, but still say unused_imports, i think it is a bug about rust-lint.
// this is a workaround
#[cfg(test)]
pub use lakesoul_catalog::*;
mod lakesoul_namespace;
pub use lakesoul_namespace::*;
Expand Down
4 changes: 3 additions & 1 deletion rust/lakesoul-datafusion/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
// SPDX-FileCopyrightText: 2023 LakeSoul Contributors
//
// SPDX-License-Identifier: Apache-2.0

#![allow(dead_code)]
#![allow(clippy::type_complexity)]
// after finished. remove above attr
extern crate core;

mod catalog;
Expand Down
13 changes: 5 additions & 8 deletions rust/lakesoul-datafusion/src/test/catalog_tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,21 +2,20 @@
//
// SPDX-License-Identifier: Apache-2.0

#[cfg(test)]
mod catalog_tests {
use crate::catalog::LakeSoulTableProperty;
use crate::catalog::{LakeSoulCatalog, LakeSoulNamespace};
use crate::catalog::{LakeSoulCatalog, LakeSoulNamespace,LakeSoulTableProperty};
use crate::lakesoul_table::LakeSoulTable;
use crate::serialize::arrow_java::ArrowJavaSchema;
use arrow::array::{ArrayRef, Int32Array, RecordBatch};
use arrow::datatypes::{DataType, Field, Schema, SchemaRef};
use datafusion::assert_batches_eq;
use datafusion::catalog::schema::SchemaProvider;
use datafusion::catalog::{CatalogList, CatalogProvider};
use lakesoul_io::lakesoul_io_config::create_session_context;
use lakesoul_io::lakesoul_io_config::{LakeSoulIOConfig, LakeSoulIOConfigBuilder};
use lakesoul_io::lakesoul_io_config::LakeSoulIOConfigBuilder;
use lakesoul_metadata::{MetaDataClient, MetaDataClientRef};
use proto::proto::entity::{Namespace, TableInfo, TableNameId};
use rand::distributions::{Alphanumeric, Standard};
use proto::proto::entity::{Namespace, TableInfo};
use rand::distributions::Alphanumeric;
use rand::{thread_rng, Rng, SeedableRng};
use rand_chacha::ChaCha8Rng;
use std::env;
Expand Down Expand Up @@ -100,13 +99,11 @@ mod catalog_tests {
let rt = Runtime::new().unwrap();
rt.block_on(async {
let client = Arc::new(MetaDataClient::from_env().await.unwrap());
let mut config = LakeSoulIOConfigBuilder::new().build();
// insert data;
let batch = create_batch_i32(
vec!["range", "hash", "value"],
vec![&[20201101, 20201101, 20201101, 20201102], &[1, 2, 3, 4], &[1, 2, 3, 4]],
);
let table_name = "test_table_01";
let pks = vec!["range".to_string(), "hash".to_string()];
let schema = SchemaRef::new(Schema::new(
["range", "hash", "value"]
Expand Down
2 changes: 1 addition & 1 deletion rust/lakesoul-io/src/datasource/parquet_source.rs
Original file line number Diff line number Diff line change
Expand Up @@ -346,7 +346,7 @@ pub fn merge_stream(
let merge_stream = SortedStreamMerger::new_from_streams(
streams,
merge_schema,
primary_keys.iter().map(String::clone).collect(),
primary_keys.iter().cloned().collect(),
batch_size,
merge_ops,
)
Expand Down
2 changes: 1 addition & 1 deletion rust/lakesoul-io/src/filter/parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ fn qualified_expr(expr_str: &str, schema: SchemaRef) -> Option<(Expr, Arc<Field>
let mut expr: Option<(Expr, Arc<Field>)> = None;
let mut root = "".to_owned();
let mut sub_fields: &Fields = schema.fields();
for expr_substr in expr_str.split('.').into_iter() {
for expr_substr in expr_str.split('.') {
root = if root.is_empty() {
expr_substr.to_owned()
} else {
Expand Down

0 comments on commit 4fcdb69

Please sign in to comment.