Skip to content

Commit

Permalink
Minor: clean up error entries (#13521)
Browse files Browse the repository at this point in the history
* Minor: clean up error entries

* Minor: clean up error entries
  • Loading branch information
comphead authored Nov 22, 2024
1 parent 4e5e765 commit c0ca4b4
Show file tree
Hide file tree
Showing 9 changed files with 30 additions and 38 deletions.
27 changes: 13 additions & 14 deletions datafusion/core/src/dataframe/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,8 @@ use arrow::datatypes::{DataType, Field};
use arrow_schema::{Schema, SchemaRef};
use datafusion_common::config::{CsvOptions, JsonOptions};
use datafusion_common::{
plan_err, Column, DFSchema, DataFusionError, ParamValues, SchemaError, UnnestOptions,
exec_err, not_impl_err, plan_err, Column, DFSchema, DataFusionError, ParamValues,
SchemaError, UnnestOptions,
};
use datafusion_expr::dml::InsertOp;
use datafusion_expr::{case, is_null, lit, SortExpr};
Expand Down Expand Up @@ -869,16 +870,16 @@ impl DataFrame {
for result in describe_record_batch.iter() {
let array_ref = match result {
Ok(df) => {
let batchs = df.clone().collect().await;
match batchs {
Ok(batchs)
if batchs.len() == 1
&& batchs[0]
let batches = df.clone().collect().await;
match batches {
Ok(batches)
if batches.len() == 1
&& batches[0]
.column_by_name(field.name())
.is_some() =>
{
let column =
batchs[0].column_by_name(field.name()).unwrap();
batches[0].column_by_name(field.name()).unwrap();

if column.data_type().is_null() {
Arc::new(StringArray::from(vec!["null"]))
Expand All @@ -901,9 +902,7 @@ impl DataFrame {
{
Arc::new(StringArray::from(vec!["null"]))
}
Err(other_err) => {
panic!("{other_err}")
}
Err(e) => return exec_err!("{}", e),
};
array_datas.push(array_ref);
}
Expand Down Expand Up @@ -1564,10 +1563,10 @@ impl DataFrame {
writer_options: Option<CsvOptions>,
) -> Result<Vec<RecordBatch>, DataFusionError> {
if options.insert_op != InsertOp::Append {
return Err(DataFusionError::NotImplemented(format!(
return not_impl_err!(
"{} is not implemented for DataFrame::write_csv.",
options.insert_op
)));
);
}

let format = if let Some(csv_opts) = writer_options {
Expand Down Expand Up @@ -1625,10 +1624,10 @@ impl DataFrame {
writer_options: Option<JsonOptions>,
) -> Result<Vec<RecordBatch>, DataFusionError> {
if options.insert_op != InsertOp::Append {
return Err(DataFusionError::NotImplemented(format!(
return not_impl_err!(
"{} is not implemented for DataFrame::write_json.",
options.insert_op
)));
);
}

let format = if let Some(json_opts) = writer_options {
Expand Down
5 changes: 3 additions & 2 deletions datafusion/core/src/dataframe/parquet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ use super::{
};

use datafusion_common::config::TableParquetOptions;
use datafusion_common::not_impl_err;
use datafusion_expr::dml::InsertOp;

impl DataFrame {
Expand Down Expand Up @@ -59,10 +60,10 @@ impl DataFrame {
writer_options: Option<TableParquetOptions>,
) -> Result<Vec<RecordBatch>, DataFusionError> {
if options.insert_op != InsertOp::Append {
return Err(DataFusionError::NotImplemented(format!(
return not_impl_err!(
"{} is not implemented for DataFrame::write_parquet.",
options.insert_op
)));
);
}

let format = if let Some(parquet_opts) = writer_options {
Expand Down
6 changes: 2 additions & 4 deletions datafusion/core/src/datasource/file_format/avro.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ use std::sync::Arc;
use arrow::datatypes::Schema;
use arrow::datatypes::SchemaRef;
use async_trait::async_trait;
use datafusion_common::internal_err;
use datafusion_common::parsers::CompressionTypeVariant;
use datafusion_common::DataFusionError;
use datafusion_common::GetExt;
use datafusion_common::DEFAULT_AVRO_EXTENSION;
use datafusion_physical_expr::PhysicalExpr;
Expand Down Expand Up @@ -105,9 +105,7 @@ impl FileFormat for AvroFormat {
let ext = self.get_ext();
match file_compression_type.get_variant() {
CompressionTypeVariant::UNCOMPRESSED => Ok(ext),
_ => Err(DataFusionError::Internal(
"Avro FileFormat does not support compression.".into(),
)),
_ => internal_err!("Avro FileFormat does not support compression."),
}
}

Expand Down
6 changes: 2 additions & 4 deletions datafusion/core/src/datasource/file_format/parquet.rs
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ use datafusion_common::file_options::parquet_writer::ParquetWriterOptions;
use datafusion_common::parsers::CompressionTypeVariant;
use datafusion_common::stats::Precision;
use datafusion_common::{
internal_datafusion_err, not_impl_err, DataFusionError, GetExt,
internal_datafusion_err, internal_err, not_impl_err, DataFusionError, GetExt,
DEFAULT_PARQUET_EXTENSION,
};
use datafusion_common_runtime::SpawnedTask;
Expand Down Expand Up @@ -323,9 +323,7 @@ impl FileFormat for ParquetFormat {
let ext = self.get_ext();
match file_compression_type.get_variant() {
CompressionTypeVariant::UNCOMPRESSED => Ok(ext),
_ => Err(DataFusionError::Internal(
"Parquet FileFormat does not support compression.".into(),
)),
_ => internal_err!("Parquet FileFormat does not support compression."),
}
}

Expand Down
6 changes: 3 additions & 3 deletions datafusion/core/src/datasource/file_format/write/demux.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ use datafusion_common::cast::{
as_boolean_array, as_date32_array, as_date64_array, as_int32_array, as_int64_array,
as_string_array, as_string_view_array,
};
use datafusion_common::{exec_datafusion_err, DataFusionError};
use datafusion_common::{exec_datafusion_err, not_impl_err, DataFusionError};
use datafusion_common_runtime::SpawnedTask;
use datafusion_execution::TaskContext;

Expand Down Expand Up @@ -438,10 +438,10 @@ fn compute_partition_keys_by_row<'a>(
)
}
_ => {
return Err(DataFusionError::NotImplemented(format!(
return not_impl_err!(
"it is not yet supported to write to hive partitions with datatype {}",
dtype
)))
)
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1165,7 +1165,7 @@ mod tests {
})
.collect::<Vec<_>>()
})
.map_err(|e| e.to_string().leak() as &'static str);
.map_err(|e| e.strip_backtrace().leak() as &'static str);

assert_eq!(results_by_name, case.expected_result, "{}", case.name);
}
Expand Down
6 changes: 2 additions & 4 deletions datafusion/core/src/datasource/physical_plan/statistics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ use arrow::{
};
use arrow_array::RecordBatch;
use arrow_schema::SchemaRef;
use datafusion_common::{DataFusionError, Result};
use datafusion_common::{plan_err, DataFusionError, Result};
use datafusion_physical_expr::{expressions::Column, PhysicalSortExpr};
use datafusion_physical_expr_common::sort_expr::LexOrdering;

Expand Down Expand Up @@ -232,9 +232,7 @@ impl MinMaxStatistics {

// check that sort columns are non-nullable
if field.is_nullable() {
return Err(DataFusionError::Plan(
"cannot sort by nullable column".to_string(),
));
return plan_err!("cannot sort by nullable column");
}

Ok(SortColumn {
Expand Down
6 changes: 2 additions & 4 deletions datafusion/sql/src/select.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ use crate::utils::{
};

use datafusion_common::tree_node::{TreeNode, TreeNodeRecursion};
use datafusion_common::{not_impl_err, plan_err, DataFusionError, Result};
use datafusion_common::{not_impl_err, plan_err, Result};
use datafusion_common::{RecursionUnnestOption, UnnestOptions};
use datafusion_expr::expr::{Alias, PlannedReplaceSelectItem, WildcardOptions};
use datafusion_expr::expr_rewriter::{
Expand Down Expand Up @@ -657,9 +657,7 @@ impl<'a, S: ContextProvider> SqlToRel<'a, S> {
} = options;

if opt_rename.is_some() {
Err(DataFusionError::NotImplemented(
"wildcard * with RENAME not supported ".to_string(),
))
not_impl_err!("wildcard * with RENAME not supported ")
} else {
Ok(())
}
Expand Down
4 changes: 2 additions & 2 deletions docs/source/library-user-guide/catalogs.md
Original file line number Diff line number Diff line change
Expand Up @@ -73,9 +73,9 @@ impl SchemaProvider for MemorySchemaProvider {
table: Arc<dyn TableProvider>,
) -> Result<Option<Arc<dyn TableProvider>>> {
if self.table_exist(name.as_str()) {
return Err(DataFusionError::Execution(format!(
return exec_err!(
"The table {name} already exists"
)));
);
}
Ok(self.tables.insert(name, table))
}
Expand Down

0 comments on commit c0ca4b4

Please sign in to comment.