Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Enable clone_on_ref_ptr clippy lint on expr crate #11238

Merged
merged 1 commit into from
Jul 4, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion datafusion/expr/src/expr_rewriter/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -214,7 +214,7 @@ pub fn coerce_plan_expr_for_schema(
LogicalPlan::Projection(Projection { expr, input, .. }) => {
let new_exprs =
coerce_exprs_for_schema(expr.clone(), input.schema(), schema)?;
let projection = Projection::try_new(new_exprs, input.clone())?;
let projection = Projection::try_new(new_exprs, Arc::clone(input))?;
Ok(LogicalPlan::Projection(projection))
}
_ => {
Expand Down
2 changes: 1 addition & 1 deletion datafusion/expr/src/expr_schema.rs
Original file line number Diff line number Diff line change
Expand Up @@ -520,7 +520,7 @@ pub fn cast_subquery(subquery: Subquery, cast_to_type: &DataType) -> Result<Subq
.cast_to(cast_to_type, projection.input.schema())?;
LogicalPlan::Projection(Projection::try_new(
vec![cast_expr],
projection.input.clone(),
Arc::clone(&projection.input),
)?)
}
_ => {
Expand Down
2 changes: 2 additions & 0 deletions datafusion/expr/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
// Make cheap clones clear: https://github.com/apache/datafusion/issues/11143
#![deny(clippy::clone_on_ref_ptr)]

//! [DataFusion](https://github.com/apache/datafusion)
//! is an extensible query execution framework that uses
Expand Down
23 changes: 15 additions & 8 deletions datafusion/expr/src/logical_plan/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1223,17 +1223,17 @@ pub fn build_join_schema(
JoinType::Inner => {
// left then right
let left_fields = left_fields
.map(|(q, f)| (q.cloned(), f.clone()))
.map(|(q, f)| (q.cloned(), Arc::clone(f)))
.collect::<Vec<_>>();
let right_fields = right_fields
.map(|(q, f)| (q.cloned(), f.clone()))
.map(|(q, f)| (q.cloned(), Arc::clone(f)))
.collect::<Vec<_>>();
left_fields.into_iter().chain(right_fields).collect()
}
JoinType::Left => {
// left then right, right set to nullable in case of not matched scenario
let left_fields = left_fields
.map(|(q, f)| (q.cloned(), f.clone()))
.map(|(q, f)| (q.cloned(), Arc::clone(f)))
.collect::<Vec<_>>();
left_fields
.into_iter()
Expand All @@ -1243,7 +1243,7 @@ pub fn build_join_schema(
JoinType::Right => {
// left then right, left set to nullable in case of not matched scenario
let right_fields = right_fields
.map(|(q, f)| (q.cloned(), f.clone()))
.map(|(q, f)| (q.cloned(), Arc::clone(f)))
.collect::<Vec<_>>();
nullify_fields(left_fields)
.into_iter()
Expand All @@ -1259,11 +1259,15 @@ pub fn build_join_schema(
}
JoinType::LeftSemi | JoinType::LeftAnti => {
// Only use the left side for the schema
left_fields.map(|(q, f)| (q.cloned(), f.clone())).collect()
left_fields
.map(|(q, f)| (q.cloned(), Arc::clone(f)))
.collect()
}
JoinType::RightSemi | JoinType::RightAnti => {
// Only use the right side for the schema
right_fields.map(|(q, f)| (q.cloned(), f.clone())).collect()
right_fields
.map(|(q, f)| (q.cloned(), Arc::clone(f)))
.collect()
}
};
let func_dependencies = left.functional_dependencies().join(
Expand Down Expand Up @@ -1578,7 +1582,7 @@ impl TableSource for LogicalTableSource {
}

fn schema(&self) -> SchemaRef {
self.table_schema.clone()
Arc::clone(&self.table_schema)
}

fn supports_filters_pushdown(
Expand Down Expand Up @@ -1692,7 +1696,10 @@ pub fn unnest_with_options(
}
None => {
dependency_indices.push(index);
Ok(vec![(original_qualifier.cloned(), original_field.clone())])
Ok(vec![(
original_qualifier.cloned(),
Arc::clone(original_field),
)])
}
}
})
Expand Down
30 changes: 15 additions & 15 deletions datafusion/expr/src/logical_plan/plan.rs
Original file line number Diff line number Diff line change
Expand Up @@ -762,9 +762,9 @@ impl LogicalPlan {
// If inputs are not pruned do not change schema
// TODO this seems wrong (shouldn't we always use the schema of the input?)
let schema = if schema.fields().len() == input_schema.fields().len() {
schema.clone()
Arc::clone(&schema)
} else {
input_schema.clone()
Arc::clone(input_schema)
};
Ok(LogicalPlan::Union(Union { inputs, schema }))
}
Expand Down Expand Up @@ -850,7 +850,7 @@ impl LogicalPlan {
..
}) => Ok(LogicalPlan::Dml(DmlStatement::new(
table_name.clone(),
table_schema.clone(),
Arc::clone(table_schema),
op.clone(),
Arc::new(inputs.swap_remove(0)),
))),
Expand All @@ -863,13 +863,13 @@ impl LogicalPlan {
}) => Ok(LogicalPlan::Copy(CopyTo {
input: Arc::new(inputs.swap_remove(0)),
output_url: output_url.clone(),
file_type: file_type.clone(),
file_type: Arc::clone(file_type),
options: options.clone(),
partition_by: partition_by.clone(),
})),
LogicalPlan::Values(Values { schema, .. }) => {
Ok(LogicalPlan::Values(Values {
schema: schema.clone(),
schema: Arc::clone(schema),
values: expr
.chunks_exact(schema.fields().len())
.map(|s| s.to_vec())
Expand Down Expand Up @@ -1028,9 +1028,9 @@ impl LogicalPlan {
let input_schema = inputs[0].schema();
// If inputs are not pruned do not change schema.
let schema = if schema.fields().len() == input_schema.fields().len() {
schema.clone()
Arc::clone(schema)
} else {
input_schema.clone()
Arc::clone(input_schema)
};
Ok(LogicalPlan::Union(Union {
inputs: inputs.into_iter().map(Arc::new).collect(),
Expand Down Expand Up @@ -1074,7 +1074,7 @@ impl LogicalPlan {
assert_eq!(inputs.len(), 1);
Ok(LogicalPlan::Analyze(Analyze {
verbose: a.verbose,
schema: a.schema.clone(),
schema: Arc::clone(&a.schema),
input: Arc::new(inputs.swap_remove(0)),
}))
}
Expand All @@ -1088,7 +1088,7 @@ impl LogicalPlan {
verbose: e.verbose,
plan: Arc::new(inputs.swap_remove(0)),
stringified_plans: e.stringified_plans.clone(),
schema: e.schema.clone(),
schema: Arc::clone(&e.schema),
logical_optimization_succeeded: e.logical_optimization_succeeded,
}))
}
Expand Down Expand Up @@ -1370,7 +1370,7 @@ impl LogicalPlan {
param_values: &ParamValues,
) -> Result<LogicalPlan> {
self.transform_up_with_subqueries(|plan| {
let schema = plan.schema().clone();
let schema = Arc::clone(plan.schema());
plan.map_expressions(|e| {
e.infer_placeholder_types(&schema)?.transform_up(|e| {
if let Expr::Placeholder(Placeholder { id, .. }) = e {
Expand Down Expand Up @@ -2260,7 +2260,7 @@ impl Window {
let fields: Vec<(Option<TableReference>, Arc<Field>)> = input
.schema()
.iter()
.map(|(q, f)| (q.cloned(), f.clone()))
.map(|(q, f)| (q.cloned(), Arc::clone(f)))
.collect();
let input_len = fields.len();
let mut window_fields = fields;
Expand Down Expand Up @@ -3385,7 +3385,7 @@ digraph {
vec![col("a")],
Arc::new(LogicalPlan::EmptyRelation(EmptyRelation {
produce_one_row: false,
schema: empty_schema.clone(),
schema: Arc::clone(&empty_schema),
})),
empty_schema,
);
Expand Down Expand Up @@ -3500,9 +3500,9 @@ digraph {
);
let scan = Arc::new(LogicalPlan::TableScan(TableScan {
table_name: TableReference::bare("tab"),
source: source.clone(),
source: Arc::clone(&source) as Arc<dyn TableSource>,
projection: None,
projected_schema: schema.clone(),
projected_schema: Arc::clone(&schema),
filters: vec![],
fetch: None,
}));
Expand Down Expand Up @@ -3532,7 +3532,7 @@ digraph {
table_name: TableReference::bare("tab"),
source,
projection: None,
projected_schema: unique_schema.clone(),
projected_schema: Arc::clone(&unique_schema),
filters: vec![],
fetch: None,
}));
Expand Down
8 changes: 4 additions & 4 deletions datafusion/expr/src/type_coercion/binary.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1048,16 +1048,16 @@ fn temporal_coercion(lhs_type: &DataType, rhs_type: &DataType) -> Option<DataTyp
match (lhs_tz.as_ref(), rhs_tz.as_ref()) {
// UTC and "+00:00" are the same by definition. Most other timezones
// do not have a 1-1 mapping between timezone and an offset from UTC
("UTC", "+00:00") | ("+00:00", "UTC") => Some(lhs_tz.clone()),
(lhs, rhs) if lhs == rhs => Some(lhs_tz.clone()),
("UTC", "+00:00") | ("+00:00", "UTC") => Some(Arc::clone(lhs_tz)),
(lhs, rhs) if lhs == rhs => Some(Arc::clone(lhs_tz)),
// can't cast across timezones
_ => {
return None;
}
}
}
(Some(lhs_tz), None) => Some(lhs_tz.clone()),
(None, Some(rhs_tz)) => Some(rhs_tz.clone()),
(Some(lhs_tz), None) => Some(Arc::clone(lhs_tz)),
(None, Some(rhs_tz)) => Some(Arc::clone(rhs_tz)),
(None, None) => None,
};

Expand Down
12 changes: 6 additions & 6 deletions datafusion/expr/src/type_coercion/functions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -598,7 +598,7 @@ fn coerced_from<'a>(
Arc::new(f_into.as_ref().clone().with_data_type(data_type));
Some(FixedSizeList(new_field, *size_from))
}
Some(_) => Some(FixedSizeList(f_into.clone(), *size_from)),
Some(_) => Some(FixedSizeList(Arc::clone(f_into), *size_from)),
_ => None,
}
}
Expand All @@ -607,7 +607,7 @@ fn coerced_from<'a>(
(Timestamp(unit, Some(tz)), _) if tz.as_ref() == TIMEZONE_WILDCARD => {
match type_from {
Timestamp(_, Some(from_tz)) => {
Some(Timestamp(unit.clone(), Some(from_tz.clone())))
Some(Timestamp(unit.clone(), Some(Arc::clone(from_tz))))
}
Null | Date32 | Utf8 | LargeUtf8 | Timestamp(_, None) => {
// In the absence of any other information assume the time zone is "+00" (UTC).
Expand Down Expand Up @@ -715,12 +715,12 @@ mod tests {
fn test_fixed_list_wildcard_coerce() -> Result<()> {
let inner = Arc::new(Field::new("item", DataType::Int32, false));
let current_types = vec![
DataType::FixedSizeList(inner.clone(), 2), // able to coerce for any size
DataType::FixedSizeList(Arc::clone(&inner), 2), // able to coerce for any size
];

let signature = Signature::exact(
vec![DataType::FixedSizeList(
inner.clone(),
Arc::clone(&inner),
FIXED_SIZE_LIST_WILDCARD,
)],
Volatility::Stable,
Expand All @@ -731,15 +731,15 @@ mod tests {

// make sure it can't coerce to a different size
let signature = Signature::exact(
vec![DataType::FixedSizeList(inner.clone(), 3)],
vec![DataType::FixedSizeList(Arc::clone(&inner), 3)],
Volatility::Stable,
);
let coerced_data_types = data_types(&current_types, &signature);
assert!(coerced_data_types.is_err());

// make sure it works with the same type.
let signature = Signature::exact(
vec![DataType::FixedSizeList(inner.clone(), 2)],
vec![DataType::FixedSizeList(Arc::clone(&inner), 2)],
Volatility::Stable,
);
let coerced_data_types = data_types(&current_types, &signature).unwrap();
Expand Down
9 changes: 6 additions & 3 deletions datafusion/expr/src/udaf.rs
Original file line number Diff line number Diff line change
Expand Up @@ -106,8 +106,8 @@ impl AggregateUDF {
Self::new_from_impl(AggregateUDFLegacyWrapper {
name: name.to_owned(),
signature: signature.clone(),
return_type: return_type.clone(),
accumulator: accumulator.clone(),
return_type: Arc::clone(return_type),
accumulator: Arc::clone(accumulator),
})
}

Expand All @@ -133,7 +133,10 @@ impl AggregateUDF {
///
/// If you implement [`AggregateUDFImpl`] directly you should return aliases directly.
pub fn with_aliases(self, aliases: impl IntoIterator<Item = &'static str>) -> Self {
Self::new_from_impl(AliasedAggregateUDFImpl::new(self.inner.clone(), aliases))
Self::new_from_impl(AliasedAggregateUDFImpl::new(
Arc::clone(&self.inner),
aliases,
))
}

/// creates an [`Expr`] that calls the aggregate function.
Expand Down
8 changes: 4 additions & 4 deletions datafusion/expr/src/udf.rs
Original file line number Diff line number Diff line change
Expand Up @@ -87,8 +87,8 @@ impl ScalarUDF {
Self::new_from_impl(ScalarUdfLegacyWrapper {
name: name.to_owned(),
signature: signature.clone(),
return_type: return_type.clone(),
fun: fun.clone(),
return_type: Arc::clone(return_type),
fun: Arc::clone(fun),
})
}

Expand All @@ -114,7 +114,7 @@ impl ScalarUDF {
///
/// If you implement [`ScalarUDFImpl`] directly you should return aliases directly.
pub fn with_aliases(self, aliases: impl IntoIterator<Item = &'static str>) -> Self {
Self::new_from_impl(AliasedScalarUDFImpl::new(self.inner.clone(), aliases))
Self::new_from_impl(AliasedScalarUDFImpl::new(Arc::clone(&self.inner), aliases))
}

/// Returns a [`Expr`] logical expression to call this UDF with specified
Expand Down Expand Up @@ -199,7 +199,7 @@ impl ScalarUDF {
/// Returns a `ScalarFunctionImplementation` that can invoke the function
/// during execution
pub fn fun(&self) -> ScalarFunctionImplementation {
let captured = self.inner.clone();
let captured = Arc::clone(&self.inner);
Arc::new(move |args| captured.invoke(args))
}

Expand Down
6 changes: 3 additions & 3 deletions datafusion/expr/src/udwf.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,8 +90,8 @@ impl WindowUDF {
Self::new_from_impl(WindowUDFLegacyWrapper {
name: name.to_owned(),
signature: signature.clone(),
return_type: return_type.clone(),
partition_evaluator_factory: partition_evaluator_factory.clone(),
return_type: Arc::clone(return_type),
partition_evaluator_factory: Arc::clone(partition_evaluator_factory),
})
}

Expand All @@ -117,7 +117,7 @@ impl WindowUDF {
///
/// If you implement [`WindowUDFImpl`] directly you should return aliases directly.
pub fn with_aliases(self, aliases: impl IntoIterator<Item = &'static str>) -> Self {
Self::new_from_impl(AliasedWindowUDFImpl::new(self.inner.clone(), aliases))
Self::new_from_impl(AliasedWindowUDFImpl::new(Arc::clone(&self.inner), aliases))
}

/// creates a [`Expr`] that calls the window function given
Expand Down
2 changes: 1 addition & 1 deletion datafusion/expr/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1199,7 +1199,7 @@ pub fn only_or_err<T>(slice: &[T]) -> Result<&T> {
/// merge inputs schema into a single schema.
pub fn merge_schema(inputs: Vec<&LogicalPlan>) -> DFSchema {
if inputs.len() == 1 {
inputs[0].schema().clone().as_ref().clone()
inputs[0].schema().as_ref().clone()
} else {
inputs.iter().map(|input| input.schema()).fold(
DFSchema::empty(),
Expand Down