Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: clippy #1106

Merged
merged 8 commits into from
Jan 27, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
267 changes: 190 additions & 77 deletions delta-inspect/Cargo.lock

Large diffs are not rendered by default.

10 changes: 5 additions & 5 deletions delta-inspect/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -66,15 +66,15 @@ async fn main() -> anyhow::Result<()> {
};

if files_matches.is_present("full_uri") {
table.get_file_uris().for_each(|f| println!("{}", f));
table.get_file_uris().for_each(|f| println!("{f}"));
} else {
table.get_files_iter().for_each(|f| println!("{}", f));
table.get_files_iter().for_each(|f| println!("{f}"));
};
}
Some(("info", info_matches)) => {
let table_uri = info_matches.value_of("uri").unwrap();
let table = deltalake::open_table(table_uri).await?;
println!("{}", table);
println!("{table}");
}
Some(("vacuum", vacuum_matches)) => {
let dry_run = !vacuum_matches.is_present("no_dry_run");
Expand All @@ -91,9 +91,9 @@ async fn main() -> anyhow::Result<()> {
)
.await?;
if dry_run {
println!("Files to deleted: {:#?}", files);
println!("Files to deleted: {files:#?}");
} else {
println!("Files deleted: {:#?}", files);
println!("Files deleted: {files:#?}");
}
}
_ => unreachable!(),
Expand Down
42 changes: 12 additions & 30 deletions dynamodb_lock/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -701,29 +701,20 @@ mod tests {

#[test]
fn lock_options_default_test() {
std::env::set_var(
dynamo_lock_options::DYNAMO_LOCK_TABLE_NAME,
"some_table".to_string(),
);
std::env::set_var(
dynamo_lock_options::DYNAMO_LOCK_OWNER_NAME,
"some_owner".to_string(),
);
std::env::set_var(dynamo_lock_options::DYNAMO_LOCK_TABLE_NAME, "some_table");
std::env::set_var(dynamo_lock_options::DYNAMO_LOCK_OWNER_NAME, "some_owner");
std::env::set_var(
dynamo_lock_options::DYNAMO_LOCK_PARTITION_KEY_VALUE,
"some_pk".to_string(),
);
std::env::set_var(
dynamo_lock_options::DYNAMO_LOCK_LEASE_DURATION,
"40".to_string(),
"some_pk",
);
std::env::set_var(dynamo_lock_options::DYNAMO_LOCK_LEASE_DURATION, "40");
std::env::set_var(
dynamo_lock_options::DYNAMO_LOCK_REFRESH_PERIOD_MILLIS,
"2000".to_string(),
"2000",
);
std::env::set_var(
dynamo_lock_options::DYNAMO_LOCK_ADDITIONAL_TIME_TO_WAIT_MILLIS,
"3000".to_string(),
"3000",
);

let options = DynamoDbOptions::default();
Expand Down Expand Up @@ -767,29 +758,20 @@ mod tests {

#[test]
fn lock_options_mixed_test() {
std::env::set_var(
dynamo_lock_options::DYNAMO_LOCK_TABLE_NAME,
"some_table".to_string(),
);
std::env::set_var(
dynamo_lock_options::DYNAMO_LOCK_OWNER_NAME,
"some_owner".to_string(),
);
std::env::set_var(dynamo_lock_options::DYNAMO_LOCK_TABLE_NAME, "some_table");
std::env::set_var(dynamo_lock_options::DYNAMO_LOCK_OWNER_NAME, "some_owner");
std::env::set_var(
dynamo_lock_options::DYNAMO_LOCK_PARTITION_KEY_VALUE,
"some_pk".to_string(),
);
std::env::set_var(
dynamo_lock_options::DYNAMO_LOCK_LEASE_DURATION,
"40".to_string(),
"some_pk",
);
std::env::set_var(dynamo_lock_options::DYNAMO_LOCK_LEASE_DURATION, "40");
std::env::set_var(
dynamo_lock_options::DYNAMO_LOCK_REFRESH_PERIOD_MILLIS,
"2000".to_string(),
"2000",
);
std::env::set_var(
dynamo_lock_options::DYNAMO_LOCK_ADDITIONAL_TIME_TO_WAIT_MILLIS,
"3000".to_string(),
"3000",
);

let options = DynamoDbOptions::from_map(hashmap! {
Expand Down
5 changes: 2 additions & 3 deletions glibc_version/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ mod imp {
let version_str = ldd_output_to_version_str(output_str)?;

parse_glibc_version(version_str)
.ok_or_else(|| format!("Invalid version string from ldd output: {}", version_str,))
.ok_or_else(|| format!("Invalid version string from ldd output: {version_str}",))
}

fn ldd_output_to_version_str(output_str: &str) -> Result<&str, String> {
Expand All @@ -42,8 +42,7 @@ mod imp {
Ok(captures.get(1).unwrap().as_str())
} else {
Err(format!(
"ERROR: failed to detect glibc version. ldd output: {}",
output_str,
"ERROR: failed to detect glibc version. ldd output: {output_str}",
))
}
}
Expand Down
10 changes: 4 additions & 6 deletions python/src/filesystem.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ impl DeltaFileSystemHandler {
fn normalize_path(&self, path: String) -> PyResult<String> {
let suffix = if path.ends_with('/') { "/" } else { "" };
let path = Path::parse(path).unwrap();
Ok(format!("{}{}", path, suffix))
Ok(format!("{path}{suffix}"))
}

fn copy_file(&self, src: String, dest: String) -> PyResult<()> {
Expand Down Expand Up @@ -87,7 +87,7 @@ impl DeltaFileSystemHandler {
}

fn equals(&self, other: &DeltaFileSystemHandler) -> PyResult<bool> {
Ok(format!("{:?}", self) == format!("{:?}", other))
Ok(format!("{self:?}") == format!("{other:?}"))
}

fn get_file_info<'py>(&self, paths: Vec<String>, py: Python<'py>) -> PyResult<Vec<&'py PyAny>> {
Expand Down Expand Up @@ -313,14 +313,12 @@ impl ObjectInputFile {
fn check_position(&self, position: i64, action: &str) -> PyResult<()> {
if position < 0 {
return Err(PyIOError::new_err(format!(
"Cannot {} for negative position.",
action
"Cannot {action} for negative position."
)));
}
if position > self.content_length {
return Err(PyIOError::new_err(format!(
"Cannot {} past end of file.",
action
"Cannot {action} past end of file."
)));
}
Ok(())
Expand Down
7 changes: 2 additions & 5 deletions python/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ impl PyDeltaTableError {
}

fn from_chrono(err: chrono::ParseError) -> pyo3::PyErr {
PyDeltaTableError::new_err(format!("Parse date and time string failed: {}", err))
PyDeltaTableError::new_err(format!("Parse date and time string failed: {err}"))
}

fn from_checkpoint(err: deltalake::checkpoints::CheckpointError) -> pyo3::PyErr {
Expand Down Expand Up @@ -514,10 +514,7 @@ fn filestats_to_expression<'py>(
schema
.field_with_name(column_name)
.map_err(|_| {
PyDeltaTableError::new_err(format!(
"Column not found in schema: {}",
column_name
))
PyDeltaTableError::new_err(format!("Column not found in schema: {column_name}"))
})?
.data_type()
.clone(),
Expand Down
5 changes: 2 additions & 3 deletions python/src/schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -135,8 +135,7 @@ impl PrimitiveType {
if data_type.starts_with("decimal") {
if try_parse_decimal_type(&data_type).is_none() {
Err(PyValueError::new_err(format!(
"invalid decimal type: {}",
data_type
"invalid decimal type: {data_type}"
)))
} else {
Ok(Self {
Expand Down Expand Up @@ -685,7 +684,7 @@ impl Field {
.metadata(py)?
.call_method0(py, "__repr__")?
.extract(py)?;
format!(", metadata={}", metadata_repr)
format!(", metadata={metadata_repr}")
};
Ok(format!(
"Field({}, {}, nullable={}{})",
Expand Down
2 changes: 1 addition & 1 deletion rust/examples/read_delta_table.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@
async fn main() -> Result<(), deltalake::DeltaTableError> {
let table_path = "./tests/data/delta-0.8.0";
let table = deltalake::open_table(table_path).await?;
println!("{}", table);
println!("{table}");
Ok(())
}
2 changes: 1 addition & 1 deletion rust/src/action/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ fn decode_path(raw_path: &str) -> Result<String, ActionError> {
percent_decode(raw_path.as_bytes())
.decode_utf8()
.map(|c| c.to_string())
.map_err(|e| ActionError::InvalidField(format!("Decode path failed for action: {}", e)))
.map_err(|e| ActionError::InvalidField(format!("Decode path failed for action: {e}")))
}

/// Struct used to represent minValues and maxValues in add action statistics.
Expand Down
24 changes: 8 additions & 16 deletions rust/src/action/parquet_read/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,7 @@ fn populate_hashmap_with_option_from_parquet_map(

fn gen_action_type_error(action: &str, field: &str, expected_type: &str) -> ActionError {
ActionError::InvalidField(format!(
"type for {} in {} action should be {}",
field, action, expected_type
"type for {field} in {action} action should be {expected_type}"
))
}

Expand Down Expand Up @@ -83,8 +82,7 @@ impl Add {
)
.map_err(|estr| {
ActionError::InvalidField(format!(
"Invalid partitionValues for add action: {}",
estr,
"Invalid partitionValues for add action: {estr}",
))
})?;
}
Expand All @@ -104,8 +102,7 @@ impl Add {
populate_hashmap_with_option_from_parquet_map(&mut tags, tags_map)
.map_err(|estr| {
ActionError::InvalidField(format!(
"Invalid tags for add action: {}",
estr,
"Invalid tags for add action: {estr}",
))
})?;
re.tags = Some(tags);
Expand Down Expand Up @@ -352,8 +349,7 @@ impl MetaData {
)
.map_err(|estr| {
ActionError::InvalidField(format!(
"Invalid configuration for metaData action: {}",
estr,
"Invalid configuration for metaData action: {estr}",
))
})?;
}
Expand All @@ -377,8 +373,7 @@ impl MetaData {
)
.map_err(|estr| {
ActionError::InvalidField(format!(
"Invalid format.options for metaData action: {}",
estr,
"Invalid format.options for metaData action: {estr}",
))
})?;
re.format.options = options;
Expand Down Expand Up @@ -443,8 +438,7 @@ impl Remove {
)
.map_err(|estr| {
ActionError::InvalidField(format!(
"Invalid partitionValues for remove action: {}",
estr,
"Invalid partitionValues for remove action: {estr}",
))
})?;
re.partition_values = Some(partition_values);
Expand All @@ -457,8 +451,7 @@ impl Remove {
populate_hashmap_with_option_from_parquet_map(&mut tags, tags_map)
.map_err(|estr| {
ActionError::InvalidField(format!(
"Invalid tags for remove action: {}",
estr,
"Invalid tags for remove action: {estr}",
))
})?;
re.tags = Some(tags);
Expand Down Expand Up @@ -598,8 +591,7 @@ impl Action {
"cdc" => Action::cdc(AddCDCFile::from_parquet_record(col_data)?),
name => {
return Err(ActionError::InvalidField(format!(
"Unexpected action from checkpoint: {}",
name,
"Unexpected action from checkpoint: {name}",
)));
}
})
Expand Down
2 changes: 1 addition & 1 deletion rust/src/checkpoints.rs
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ async fn create_checkpoint_for(
let size = parquet_bytes.len() as i64;
let checkpoint = CheckPoint::new(version, size, None);

let file_name = format!("{:020}.checkpoint.parquet", version);
let file_name = format!("{version:020}.checkpoint.parquet");
let checkpoint_path = storage.log_path().child(file_name);

debug!("Writing checkpoint to {:?}.", checkpoint_path);
Expand Down
8 changes: 4 additions & 4 deletions rust/src/delta.rs
Original file line number Diff line number Diff line change
Expand Up @@ -551,7 +551,7 @@ impl DeltaTable {

/// Return the uri of commit version.
pub fn commit_uri_from_version(&self, version: DeltaDataTypeVersion) -> Path {
let version = format!("{:020}.json", version);
let version = format!("{version:020}.json");
Path::from_iter(["_delta_log", &version])
}

Expand All @@ -563,7 +563,7 @@ impl DeltaTable {

match check_point.parts {
None => {
let path = log_path.child(&*format!("{}.checkpoint.parquet", checkpoint_prefix));
let path = log_path.child(&*format!("{checkpoint_prefix}.checkpoint.parquet"));
checkpoint_data_paths.push(path);
}
Some(parts) => {
Expand Down Expand Up @@ -1221,7 +1221,7 @@ impl fmt::Display for DeltaTable {
writeln!(f, "\tversion: {}", self.version())?;
match self.state.current_metadata() {
Some(metadata) => {
writeln!(f, "\tmetadata: {}", metadata)?;
writeln!(f, "\tmetadata: {metadata}")?;
}
None => {
writeln!(f, "\tmetadata: None")?;
Expand Down Expand Up @@ -1383,7 +1383,7 @@ impl<'a> DeltaTransaction<'a> {
// Write delta log entry as temporary file to storage. For the actual commit,
// the temporary file is moved (atomic rename) to the delta log folder within `commit` function.
let token = Uuid::new_v4().to_string();
let file_name = format!("_commit_{}.json.tmp", token);
let file_name = format!("_commit_{token}.json.tmp");
let path = Path::from_iter(["_delta_log", &file_name]);

self.delta_table.storage.put(&path, log_entry).await?;
Expand Down
Loading