Skip to content

Commit

Permalink
chore(rust): Revert "Use auto_doc_cfg" (pola-rs#6164)
Browse files Browse the repository at this point in the history
  • Loading branch information
ritchie46 authored Jan 11, 2023
1 parent febcdda commit 557f986
Show file tree
Hide file tree
Showing 47 changed files with 170 additions and 10 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/docs-deploy.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ jobs:

- name: Build Rust documentation
env:
RUSTDOCFLAGS: --cfg docsrs
RUSTFLAGS: --cfg docsrs
run: cargo doc --features=docs-selection --package polars

- name: Prepare deployment
Expand Down
2 changes: 1 addition & 1 deletion polars/polars-algo/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![cfg_attr(docsrs, feature(doc_cfg))]
mod algo;
pub use algo::*;
pub mod prelude;
3 changes: 3 additions & 0 deletions polars/polars-core/src/chunked_array/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,15 +30,18 @@ mod from;
pub(crate) mod list;
pub(crate) mod logical;
#[cfg(feature = "object")]
#[cfg_attr(docsrs, doc(cfg(feature = "object")))]
pub mod object;
#[cfg(feature = "random")]
#[cfg_attr(docsrs, doc(cfg(feature = "random")))]
mod random;
pub mod strings;
#[cfg(any(
feature = "temporal",
feature = "dtype-datetime",
feature = "dtype-date"
))]
#[cfg_attr(docsrs, doc(cfg(feature = "temporal")))]
pub mod temporal;
mod trusted_len;
pub mod upstream_traits;
Expand Down
3 changes: 3 additions & 0 deletions polars/polars-core/src/chunked_array/ndarray.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ where
{
/// If data is aligned in a single chunk and has no Null values a zero copy view is returned
/// as an `ndarray`
#[cfg_attr(docsrs, doc(cfg(feature = "ndarray")))]
pub fn to_ndarray(&self) -> PolarsResult<ArrayView1<T::Native>> {
let slice = self.cont_slice()?;
Ok(aview1(slice))
Expand All @@ -17,6 +18,7 @@ where

impl ListChunked {
/// If all nested `Series` have the same length, a 2 dimensional `ndarray::Array` is returned.
#[cfg_attr(docsrs, doc(cfg(feature = "ndarray")))]
pub fn to_ndarray<N>(&self) -> PolarsResult<Array2<N::Native>>
where
N: PolarsNumericType,
Expand Down Expand Up @@ -94,6 +96,7 @@ impl DataFrame {
/// [2.0, 8.0],
/// [3.0, 6.0]], shape=[3, 2], strides=[2, 1], layout=C (0x1), const ndim=2/
/// ```
#[cfg_attr(docsrs, doc(cfg(feature = "ndarray")))]
pub fn to_ndarray<N>(&self) -> PolarsResult<Array2<N::Native>>
where
N: PolarsNumericType,
Expand Down
6 changes: 6 additions & 0 deletions polars/polars-core/src/chunked_array/ops/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -476,6 +476,7 @@ pub trait ChunkUnique<T: PolarsDataType> {

/// The most occurring value(s). Can return multiple Values
#[cfg(feature = "mode")]
#[cfg_attr(docsrs, doc(cfg(feature = "mode")))]
fn mode(&self) -> PolarsResult<ChunkedArray<T>> {
Err(PolarsError::InvalidOperation(
"mode is not implemented for this dtype".into(),
Expand Down Expand Up @@ -727,6 +728,7 @@ pub trait ChunkPeaks {

/// Check if element is member of list array
#[cfg(feature = "is_in")]
#[cfg_attr(docsrs, doc(cfg(feature = "is_in")))]
pub trait IsIn {
/// Check if elements of this array are in the right Series, or List values of the right Series.
fn is_in(&self, _other: &Series) -> PolarsResult<BooleanChunked> {
Expand All @@ -748,6 +750,7 @@ pub trait ArgAgg {

/// Repeat the values `n` times.
#[cfg(feature = "repeat_by")]
#[cfg_attr(docsrs, doc(cfg(feature = "repeat_by")))]
pub trait RepeatBy {
/// Repeat the values `n` times, where `n` is determined by the values in `by`.
fn repeat_by(&self, _by: &IdxCa) -> ListChunked {
Expand All @@ -756,6 +759,7 @@ pub trait RepeatBy {
}

#[cfg(feature = "is_first")]
#[cfg_attr(docsrs, doc(cfg(feature = "is_first")))]
/// Mask the first unique values as `true`
pub trait IsFirst<T: PolarsDataType> {
fn is_first(&self) -> PolarsResult<BooleanChunked> {
Expand All @@ -766,6 +770,7 @@ pub trait IsFirst<T: PolarsDataType> {
}

#[cfg(feature = "is_first")]
#[cfg_attr(docsrs, doc(cfg(feature = "is_first")))]
/// Mask the last unique values as `true`
pub trait IsLast<T: PolarsDataType> {
fn is_last(&self) -> PolarsResult<BooleanChunked> {
Expand All @@ -776,6 +781,7 @@ pub trait IsLast<T: PolarsDataType> {
}

#[cfg(feature = "concat_str")]
#[cfg_attr(docsrs, doc(cfg(feature = "concat_str")))]
/// Concat the values into a string array.
pub trait StrConcat {
/// Concat the values into a string array.
Expand Down
2 changes: 2 additions & 0 deletions polars/polars-core/src/datatypes/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -119,11 +119,13 @@ impl PolarsDataType for ListType {
}

#[cfg(feature = "object")]
#[cfg_attr(docsrs, doc(cfg(feature = "object")))]
pub struct ObjectType<T>(T);
#[cfg(feature = "object")]
pub type ObjectChunked<T> = ChunkedArray<ObjectType<T>>;

#[cfg(feature = "object")]
#[cfg_attr(docsrs, doc(cfg(feature = "object")))]
impl<T: PolarsObject> PolarsDataType for ObjectType<T> {
fn get_dtype() -> DataType {
DataType::Object(T::type_name())
Expand Down
2 changes: 2 additions & 0 deletions polars/polars-core/src/frame/asof_join/groups.rs
Original file line number Diff line number Diff line change
Expand Up @@ -619,6 +619,7 @@ fn dispatch_join<T: PolarsNumericType>(
}

impl DataFrame {
#[cfg_attr(docsrs, doc(cfg(feature = "asof_join")))]
#[allow(clippy::too_many_arguments)]
#[doc(hidden)]
pub fn _join_asof_by(
Expand Down Expand Up @@ -703,6 +704,7 @@ impl DataFrame {
/// This is similar to a left-join except that we match on nearest key rather than equal keys.
/// The keys must be sorted to perform an asof join. This is a special implementation of an asof join
/// that searches for the nearest keys within a subgroup set by `by`.
#[cfg_attr(docsrs, doc(cfg(feature = "asof_join")))]
#[allow(clippy::too_many_arguments)]
pub fn join_asof_by<I, S>(
&self,
Expand Down
1 change: 1 addition & 0 deletions polars/polars-core/src/frame/asof_join/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -196,6 +196,7 @@ impl DataFrame {

/// This is similar to a left-join except that we match on nearest key rather than equal keys.
/// The keys must be sorted to perform an asof join
#[cfg_attr(docsrs, doc(cfg(feature = "asof_join")))]
pub fn join_asof(
&self,
other: &DataFrame,
Expand Down
5 changes: 5 additions & 0 deletions polars/polars-core/src/frame/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2778,6 +2778,7 @@ impl DataFrame {

/// Aggregate the column horizontally to their min values.
#[cfg(feature = "zip_with")]
#[cfg_attr(docsrs, doc(cfg(feature = "zip_with")))]
pub fn hmin(&self) -> PolarsResult<Option<Series>> {
let min_fn = |acc: &Series, s: &Series| {
let mask = acc.lt(s)? & acc.is_not_null() | s.is_null();
Expand Down Expand Up @@ -2807,6 +2808,7 @@ impl DataFrame {

/// Aggregate the column horizontally to their max values.
#[cfg(feature = "zip_with")]
#[cfg_attr(docsrs, doc(cfg(feature = "zip_with")))]
pub fn hmax(&self) -> PolarsResult<Option<Series>> {
let max_fn = |acc: &Series, s: &Series| {
let mask = acc.gt(s)? & acc.is_not_null() | s.is_null();
Expand Down Expand Up @@ -3303,6 +3305,7 @@ impl DataFrame {

/// Split into multiple DataFrames partitioned by groups
#[cfg(feature = "partition_by")]
#[cfg_attr(docsrs, doc(cfg(feature = "partition_by")))]
pub fn partition_by(&self, cols: impl IntoVec<String>) -> PolarsResult<Vec<DataFrame>> {
let cols = cols.into_vec();
self._partition_by_impl(&cols, false)
Expand All @@ -3311,6 +3314,7 @@ impl DataFrame {
/// Split into multiple DataFrames partitioned by groups
/// Order of the groups are maintained.
#[cfg(feature = "partition_by")]
#[cfg_attr(docsrs, doc(cfg(feature = "partition_by")))]
pub fn partition_by_stable(&self, cols: impl IntoVec<String>) -> PolarsResult<Vec<DataFrame>> {
let cols = cols.into_vec();
self._partition_by_impl(&cols, true)
Expand All @@ -3319,6 +3323,7 @@ impl DataFrame {
/// Unnest the given `Struct` columns. This means that the fields of the `Struct` type will be
/// inserted as columns.
#[cfg(feature = "dtype-struct")]
#[cfg_attr(docsrs, doc(cfg(feature = "dtype-struct")))]
pub fn unnest<I: IntoVec<String>>(&self, cols: I) -> PolarsResult<DataFrame> {
let cols = cols.into_vec();
self.unnest_impl(cols.into_iter().collect())
Expand Down
7 changes: 7 additions & 0 deletions polars/polars-core/src/frame/row.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ impl<'a> Row<'a> {

impl DataFrame {
/// Get a row from a DataFrame. Use of this is discouraged as it will likely be slow.
#[cfg_attr(docsrs, doc(cfg(feature = "rows")))]
pub fn get_row(&self, idx: usize) -> PolarsResult<Row> {
let values = self
.columns
Expand All @@ -30,6 +31,7 @@ impl DataFrame {
/// Amortize allocations by reusing a row.
/// The caller is responsible to make sure that the row has at least the capacity for the number
/// of columns in the DataFrame
#[cfg_attr(docsrs, doc(cfg(feature = "rows")))]
pub fn get_row_amortized<'a>(&'a self, idx: usize, row: &mut Row<'a>) -> PolarsResult<()> {
for (s, any_val) in self.columns.iter().zip(&mut row.0) {
*any_val = s.get(idx)?;
Expand All @@ -44,6 +46,7 @@ impl DataFrame {
/// # Safety
/// Does not do any bounds checking.
#[inline]
#[cfg_attr(docsrs, doc(cfg(feature = "rows")))]
pub unsafe fn get_row_amortized_unchecked<'a>(&'a self, idx: usize, row: &mut Row<'a>) {
self.columns
.iter()
Expand All @@ -55,12 +58,14 @@ impl DataFrame {

/// Create a new DataFrame from rows. This should only be used when you have row wise data,
/// as this is a lot slower than creating the `Series` in a columnar fashion
#[cfg_attr(docsrs, doc(cfg(feature = "rows")))]
pub fn from_rows_and_schema(rows: &[Row], schema: &Schema) -> PolarsResult<Self> {
Self::from_rows_iter_and_schema(rows.iter(), schema)
}

/// Create a new DataFrame from an iterator over rows. This should only be used when you have row wise data,
/// as this is a lot slower than creating the `Series` in a columnar fashion
#[cfg_attr(docsrs, doc(cfg(feature = "rows")))]
pub fn from_rows_iter_and_schema<'a, I>(mut rows: I, schema: &Schema) -> PolarsResult<Self>
where
I: Iterator<Item = &'a Row<'a>>,
Expand Down Expand Up @@ -103,6 +108,7 @@ impl DataFrame {

/// Create a new DataFrame from rows. This should only be used when you have row wise data,
/// as this is a lot slower than creating the `Series` in a columnar fashion
#[cfg_attr(docsrs, doc(cfg(feature = "rows")))]
pub fn from_rows(rows: &[Row]) -> PolarsResult<Self> {
let schema = rows_to_schema_first_non_null(rows, Some(50));
let has_nulls = schema
Expand Down Expand Up @@ -171,6 +177,7 @@ impl DataFrame {
}
}

#[cfg_attr(docsrs, doc(cfg(feature = "rows")))]
/// Transpose a DataFrame. This is a very expensive operation.
pub fn transpose(&self) -> PolarsResult<DataFrame> {
let height = self.height();
Expand Down
3 changes: 3 additions & 0 deletions polars/polars-core/src/functions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,7 @@ impl<'a> IterBroadCast<'a> {
/// The concatenated strings are separated by a `delimiter`.
/// If no `delimiter` is needed, an empty &str should be passed as argument.
#[cfg(feature = "concat_str")]
#[cfg_attr(docsrs, doc(cfg(feature = "concat_str")))]
pub fn concat_str(s: &[Series], delimiter: &str) -> PolarsResult<Utf8Chunked> {
if s.is_empty() {
return Err(PolarsError::NoData(
Expand Down Expand Up @@ -211,6 +212,7 @@ pub fn concat_str(s: &[Series], delimiter: &str) -> PolarsResult<Utf8Chunked> {

/// Concat `[DataFrame]`s horizontally.
#[cfg(feature = "horizontal_concat")]
#[cfg_attr(docsrs, doc(cfg(feature = "horizontal_concat")))]
/// Concat horizontally and extend with null values if lengths don't match
pub fn hor_concat_df(dfs: &[DataFrame]) -> PolarsResult<DataFrame> {
let max_len = dfs
Expand Down Expand Up @@ -251,6 +253,7 @@ pub fn hor_concat_df(dfs: &[DataFrame]) -> PolarsResult<DataFrame> {

/// Concat `[DataFrame]`s diagonally.
#[cfg(feature = "diagonal_concat")]
#[cfg_attr(docsrs, doc(cfg(feature = "diagonal_concat")))]
/// Concat diagonally thereby combining different schemas.
pub fn diag_concat_df(dfs: &[DataFrame]) -> PolarsResult<DataFrame> {
// TODO! replace with lazy only?
Expand Down
3 changes: 2 additions & 1 deletion polars/polars-core/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![cfg_attr(docsrs, feature(doc_cfg))]
extern crate core;

#[macro_use]
Expand All @@ -17,6 +17,7 @@ mod named_from;
pub mod prelude;
pub mod schema;
#[cfg(feature = "serde")]
#[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
pub mod serde;
pub mod series;
pub mod testing;
Expand Down
1 change: 1 addition & 0 deletions polars/polars-core/src/series/implementations/object.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ where
.map(|ca| ca.into_series())
}
}
#[cfg_attr(docsrs, doc(cfg(feature = "object")))]
impl<T> SeriesTrait for SeriesWrap<ObjectChunked<T>>
where
T: PolarsObject,
Expand Down
9 changes: 9 additions & 0 deletions polars/polars-core/src/series/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -370,6 +370,7 @@ impl Series {
/// Create a new ChunkedArray with values from self where the mask evaluates `true` and values
/// from `other` where the mask evaluates `false`
#[cfg(feature = "zip_with")]
#[cfg_attr(docsrs, doc(cfg(feature = "zip_with")))]
pub fn zip_with(&self, mask: &BooleanChunked, other: &Series) -> PolarsResult<Series> {
let (lhs, rhs) = coerce_lhs_rhs(self, other)?;
lhs.zip_with_same_type(mask, rhs.as_ref())
Expand Down Expand Up @@ -510,6 +511,7 @@ impl Series {
}

#[cfg(feature = "dot_product")]
#[cfg_attr(docsrs, doc(cfg(feature = "dot_product")))]
pub fn dot(&self, other: &Series) -> Option<f64> {
(self * other).sum::<f64>()
}
Expand All @@ -534,6 +536,7 @@ impl Series {
}

/// Get an array with the cumulative max computed at every element
#[cfg_attr(docsrs, doc(cfg(feature = "cum_agg")))]
pub fn cummax(&self, _reverse: bool) -> Series {
#[cfg(feature = "cum_agg")]
{
Expand All @@ -546,6 +549,7 @@ impl Series {
}

/// Get an array with the cumulative min computed at every element
#[cfg_attr(docsrs, doc(cfg(feature = "cum_agg")))]
pub fn cummin(&self, _reverse: bool) -> Series {
#[cfg(feature = "cum_agg")]
{
Expand All @@ -561,6 +565,7 @@ impl Series {
///
/// If the [`DataType`] is one of `{Int8, UInt8, Int16, UInt16}` the `Series` is
/// first cast to `Int64` to prevent overflow issues.
#[cfg_attr(docsrs, doc(cfg(feature = "cum_agg")))]
#[allow(unused_variables)]
pub fn cumsum(&self, reverse: bool) -> Series {
#[cfg(feature = "cum_agg")]
Expand Down Expand Up @@ -609,6 +614,7 @@ impl Series {
///
/// If the [`DataType`] is one of `{Int8, UInt8, Int16, UInt16, Int32, UInt32}` the `Series` is
/// first cast to `Int64` to prevent overflow issues.
#[cfg_attr(docsrs, doc(cfg(feature = "cum_agg")))]
#[allow(unused_variables)]
pub fn cumprod(&self, reverse: bool) -> Series {
#[cfg(feature = "cum_agg")]
Expand Down Expand Up @@ -649,6 +655,7 @@ impl Series {
///
/// If the [`DataType`] is one of `{Int8, UInt8, Int16, UInt16}` the `Series` is
/// first cast to `Int64` to prevent overflow issues.
#[cfg_attr(docsrs, doc(cfg(feature = "product")))]
pub fn product(&self) -> Series {
#[cfg(feature = "product")]
{
Expand Down Expand Up @@ -681,6 +688,7 @@ impl Series {
}

#[cfg(feature = "rank")]
#[cfg_attr(docsrs, doc(cfg(feature = "rank")))]
pub fn rank(&self, options: RankOptions) -> Series {
rank(self, options.method, options.descending)
}
Expand Down Expand Up @@ -799,6 +807,7 @@ impl Series {
}

#[cfg(feature = "abs")]
#[cfg_attr(docsrs, doc(cfg(feature = "abs")))]
/// convert numerical values to their absolute value
pub fn abs(&self) -> PolarsResult<Series> {
let a = self.to_physical_repr();
Expand Down
1 change: 1 addition & 0 deletions polars/polars-core/src/series/ops/diff.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ use crate::prelude::*;
use crate::series::ops::NullBehavior;

impl Series {
#[cfg_attr(docsrs, doc(cfg(feature = "diff")))]
pub fn diff(&self, n: usize, null_behavior: NullBehavior) -> Series {
use DataType::*;
let s = match self.dtype() {
Expand Down
5 changes: 5 additions & 0 deletions polars/polars-core/src/series/ops/mod.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,20 @@
#[cfg(feature = "diff")]
#[cfg_attr(docsrs, doc(cfg(feature = "diff")))]
pub mod diff;
mod downcast;
#[cfg(feature = "ewma")]
#[cfg_attr(docsrs, doc(cfg(feature = "ewma")))]
mod ewm;
mod extend;
#[cfg(feature = "moment")]
#[cfg_attr(docsrs, doc(cfg(feature = "moment")))]
pub mod moment;
mod null;
#[cfg(feature = "pct_change")]
#[cfg_attr(docsrs, doc(cfg(feature = "pct_change")))]
pub mod pct_change;
#[cfg(feature = "round_series")]
#[cfg_attr(docsrs, doc(cfg(feature = "round_series")))]
mod round;
mod to_list;
mod unique;
Expand Down
Loading

0 comments on commit 557f986

Please sign in to comment.