Skip to content

Commit

Permalink
chore fmt and clippy
Browse files Browse the repository at this point in the history
  • Loading branch information
coastalwhite committed Aug 14, 2024
1 parent 6d6acce commit c321dc1
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
6 changes: 3 additions & 3 deletions crates/polars-io/src/parquet/read/mmap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@ use bytes::Bytes;
use polars_core::datatypes::PlHashMap;
use polars_error::PolarsResult;
use polars_parquet::read::{
column_iter_to_arrays, get_field_columns, BasicDecompressor, ColumnChunkMetaData,
Filter, PageReader,
column_iter_to_arrays, get_field_columns, BasicDecompressor, ColumnChunkMetaData, Filter,
PageReader,
};
use polars_utils::mmap::{MemReader, MemSlice};

Expand Down Expand Up @@ -63,7 +63,7 @@ fn _mmap_single_column<'a>(

// similar to arrow2 serializer, except this accepts a slice instead of a vec.
// this allows us to memory map
pub(super) fn to_deserializer<'a>(
pub(super) fn to_deserializer(
columns: Vec<(&ColumnChunkMetaData, MemSlice)>,
field: Field,
filter: Option<Filter>,
Expand Down
2 changes: 1 addition & 1 deletion crates/polars-parquet/src/arrow/read/row_group.rs
Original file line number Diff line number Diff line change
Expand Up @@ -160,7 +160,7 @@ pub fn to_deserializer(
/// This operation is single-threaded. For readers with stronger invariants
/// (e.g. implement [`Clone`]) you can use [`read_columns`] to read multiple columns at once
/// and convert them to [`ArrayIter`] via [`to_deserializer`].
pub fn read_columns_many<'a, R: Read + Seek>(
pub fn read_columns_many<R: Read + Seek>(
reader: &mut R,
row_group: &RowGroupMetaData,
fields: Vec<Field>,
Expand Down

0 comments on commit c321dc1

Please sign in to comment.