Skip to content

Commit

Permalink
Use auto_doc_cfg
Browse files Browse the repository at this point in the history
This makes it so one doesn't need to manually configure the
documentation cfg for feature flags.

rust-lang/rust#90502

Fixes pola-rs#6123
  • Loading branch information
n8henrie committed Jan 9, 2023
1 parent 45ad9e3 commit b2588ec
Show file tree
Hide file tree
Showing 47 changed files with 10 additions and 170 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/docs-deploy.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ jobs:

- name: Build Rust documentation
env:
RUSTFLAGS: --cfg docsrs
RUSTDOCFLAGS: --cfg docsrs
run: cargo doc --features=docs-selection --package polars

- name: Prepare deployment
Expand Down
2 changes: 1 addition & 1 deletion polars/polars-algo/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
mod algo;
pub use algo::*;
pub mod prelude;
3 changes: 0 additions & 3 deletions polars/polars-core/src/chunked_array/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,18 +30,15 @@ mod from;
pub(crate) mod list;
pub(crate) mod logical;
#[cfg(feature = "object")]
#[cfg_attr(docsrs, doc(cfg(feature = "object")))]
pub mod object;
#[cfg(feature = "random")]
#[cfg_attr(docsrs, doc(cfg(feature = "random")))]
mod random;
pub mod strings;
#[cfg(any(
feature = "temporal",
feature = "dtype-datetime",
feature = "dtype-date"
))]
#[cfg_attr(docsrs, doc(cfg(feature = "temporal")))]
pub mod temporal;
mod trusted_len;
pub mod upstream_traits;
Expand Down
3 changes: 0 additions & 3 deletions polars/polars-core/src/chunked_array/ndarray.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@ where
{
/// If data is aligned in a single chunk and has no Null values a zero copy view is returned
/// as an `ndarray`
#[cfg_attr(docsrs, doc(cfg(feature = "ndarray")))]
pub fn to_ndarray(&self) -> PolarsResult<ArrayView1<T::Native>> {
let slice = self.cont_slice()?;
Ok(aview1(slice))
Expand All @@ -18,7 +17,6 @@ where

impl ListChunked {
/// If all nested `Series` have the same length, a 2 dimensional `ndarray::Array` is returned.
#[cfg_attr(docsrs, doc(cfg(feature = "ndarray")))]
pub fn to_ndarray<N>(&self) -> PolarsResult<Array2<N::Native>>
where
N: PolarsNumericType,
Expand Down Expand Up @@ -96,7 +94,6 @@ impl DataFrame {
/// [2.0, 8.0],
/// [3.0, 6.0]], shape=[3, 2], strides=[2, 1], layout=C (0x1), const ndim=2/
/// ```
#[cfg_attr(docsrs, doc(cfg(feature = "ndarray")))]
pub fn to_ndarray<N>(&self) -> PolarsResult<Array2<N::Native>>
where
N: PolarsNumericType,
Expand Down
6 changes: 0 additions & 6 deletions polars/polars-core/src/chunked_array/ops/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -476,7 +476,6 @@ pub trait ChunkUnique<T: PolarsDataType> {

/// The most occurring value(s). Can return multiple Values
#[cfg(feature = "mode")]
#[cfg_attr(docsrs, doc(cfg(feature = "mode")))]
fn mode(&self) -> PolarsResult<ChunkedArray<T>> {
Err(PolarsError::InvalidOperation(
"mode is not implemented for this dtype".into(),
Expand Down Expand Up @@ -728,7 +727,6 @@ pub trait ChunkPeaks {

/// Check if element is member of list array
#[cfg(feature = "is_in")]
#[cfg_attr(docsrs, doc(cfg(feature = "is_in")))]
pub trait IsIn {
/// Check if elements of this array are in the right Series, or List values of the right Series.
fn is_in(&self, _other: &Series) -> PolarsResult<BooleanChunked> {
Expand All @@ -750,7 +748,6 @@ pub trait ArgAgg {

/// Repeat the values `n` times.
#[cfg(feature = "repeat_by")]
#[cfg_attr(docsrs, doc(cfg(feature = "repeat_by")))]
pub trait RepeatBy {
/// Repeat the values `n` times, where `n` is determined by the values in `by`.
fn repeat_by(&self, _by: &IdxCa) -> ListChunked {
Expand All @@ -759,7 +756,6 @@ pub trait RepeatBy {
}

#[cfg(feature = "is_first")]
#[cfg_attr(docsrs, doc(cfg(feature = "is_first")))]
/// Mask the first unique values as `true`
pub trait IsFirst<T: PolarsDataType> {
fn is_first(&self) -> PolarsResult<BooleanChunked> {
Expand All @@ -770,7 +766,6 @@ pub trait IsFirst<T: PolarsDataType> {
}

#[cfg(feature = "is_first")]
#[cfg_attr(docsrs, doc(cfg(feature = "is_first")))]
/// Mask the last unique values as `true`
pub trait IsLast<T: PolarsDataType> {
fn is_last(&self) -> PolarsResult<BooleanChunked> {
Expand All @@ -781,7 +776,6 @@ pub trait IsLast<T: PolarsDataType> {
}

#[cfg(feature = "concat_str")]
#[cfg_attr(docsrs, doc(cfg(feature = "concat_str")))]
/// Concat the values into a string array.
pub trait StrConcat {
/// Concat the values into a string array.
Expand Down
2 changes: 0 additions & 2 deletions polars/polars-core/src/datatypes/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -119,13 +119,11 @@ impl PolarsDataType for ListType {
}

#[cfg(feature = "object")]
#[cfg_attr(docsrs, doc(cfg(feature = "object")))]
pub struct ObjectType<T>(T);
#[cfg(feature = "object")]
pub type ObjectChunked<T> = ChunkedArray<ObjectType<T>>;

#[cfg(feature = "object")]
#[cfg_attr(docsrs, doc(cfg(feature = "object")))]
impl<T: PolarsObject> PolarsDataType for ObjectType<T> {
fn get_dtype() -> DataType {
DataType::Object(T::type_name())
Expand Down
2 changes: 0 additions & 2 deletions polars/polars-core/src/frame/asof_join/groups.rs
Original file line number Diff line number Diff line change
Expand Up @@ -619,7 +619,6 @@ fn dispatch_join<T: PolarsNumericType>(
}

impl DataFrame {
#[cfg_attr(docsrs, doc(cfg(feature = "asof_join")))]
#[allow(clippy::too_many_arguments)]
#[doc(hidden)]
pub fn _join_asof_by(
Expand Down Expand Up @@ -704,7 +703,6 @@ impl DataFrame {
/// This is similar to a left-join except that we match on nearest key rather than equal keys.
/// The keys must be sorted to perform an asof join. This is a special implementation of an asof join
/// that searches for the nearest keys within a subgroup set by `by`.
#[cfg_attr(docsrs, doc(cfg(feature = "asof_join")))]
#[allow(clippy::too_many_arguments)]
pub fn join_asof_by<I, S>(
&self,
Expand Down
1 change: 0 additions & 1 deletion polars/polars-core/src/frame/asof_join/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,6 @@ impl DataFrame {

/// This is similar to a left-join except that we match on nearest key rather than equal keys.
/// The keys must be sorted to perform an asof join
#[cfg_attr(docsrs, doc(cfg(feature = "asof_join")))]
pub fn join_asof(
&self,
other: &DataFrame,
Expand Down
5 changes: 0 additions & 5 deletions polars/polars-core/src/frame/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2778,7 +2778,6 @@ impl DataFrame {

/// Aggregate the column horizontally to their min values.
#[cfg(feature = "zip_with")]
#[cfg_attr(docsrs, doc(cfg(feature = "zip_with")))]
pub fn hmin(&self) -> PolarsResult<Option<Series>> {
let min_fn = |acc: &Series, s: &Series| {
let mask = acc.lt(s)? & acc.is_not_null() | s.is_null();
Expand Down Expand Up @@ -2808,7 +2807,6 @@ impl DataFrame {

/// Aggregate the column horizontally to their max values.
#[cfg(feature = "zip_with")]
#[cfg_attr(docsrs, doc(cfg(feature = "zip_with")))]
pub fn hmax(&self) -> PolarsResult<Option<Series>> {
let max_fn = |acc: &Series, s: &Series| {
let mask = acc.gt(s)? & acc.is_not_null() | s.is_null();
Expand Down Expand Up @@ -3305,7 +3303,6 @@ impl DataFrame {

/// Split into multiple DataFrames partitioned by groups
#[cfg(feature = "partition_by")]
#[cfg_attr(docsrs, doc(cfg(feature = "partition_by")))]
pub fn partition_by(&self, cols: impl IntoVec<String>) -> PolarsResult<Vec<DataFrame>> {
let cols = cols.into_vec();
self._partition_by_impl(&cols, false)
Expand All @@ -3314,7 +3311,6 @@ impl DataFrame {
/// Split into multiple DataFrames partitioned by groups
/// Order of the groups are maintained.
#[cfg(feature = "partition_by")]
#[cfg_attr(docsrs, doc(cfg(feature = "partition_by")))]
pub fn partition_by_stable(&self, cols: impl IntoVec<String>) -> PolarsResult<Vec<DataFrame>> {
let cols = cols.into_vec();
self._partition_by_impl(&cols, true)
Expand All @@ -3323,7 +3319,6 @@ impl DataFrame {
/// Unnest the given `Struct` columns. This means that the fields of the `Struct` type will be
/// inserted as columns.
#[cfg(feature = "dtype-struct")]
#[cfg_attr(docsrs, doc(cfg(feature = "dtype-struct")))]
pub fn unnest<I: IntoVec<String>>(&self, cols: I) -> PolarsResult<DataFrame> {
let cols = cols.into_vec();
self.unnest_impl(cols.into_iter().collect())
Expand Down
7 changes: 0 additions & 7 deletions polars/polars-core/src/frame/row.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ impl<'a> Row<'a> {

impl DataFrame {
/// Get a row from a DataFrame. Use of this is discouraged as it will likely be slow.
#[cfg_attr(docsrs, doc(cfg(feature = "rows")))]
pub fn get_row(&self, idx: usize) -> PolarsResult<Row> {
let values = self
.columns
Expand All @@ -31,7 +30,6 @@ impl DataFrame {
/// Amortize allocations by reusing a row.
/// The caller is responsible to make sure that the row has at least the capacity for the number
/// of columns in the DataFrame
#[cfg_attr(docsrs, doc(cfg(feature = "rows")))]
pub fn get_row_amortized<'a>(&'a self, idx: usize, row: &mut Row<'a>) -> PolarsResult<()> {
for (s, any_val) in self.columns.iter().zip(&mut row.0) {
*any_val = s.get(idx)?;
Expand All @@ -46,7 +44,6 @@ impl DataFrame {
/// # Safety
/// Does not do any bounds checking.
#[inline]
#[cfg_attr(docsrs, doc(cfg(feature = "rows")))]
pub unsafe fn get_row_amortized_unchecked<'a>(&'a self, idx: usize, row: &mut Row<'a>) {
self.columns
.iter()
Expand All @@ -58,14 +55,12 @@ impl DataFrame {

/// Create a new DataFrame from rows. This should only be used when you have row wise data,
/// as this is a lot slower than creating the `Series` in a columnar fashion
#[cfg_attr(docsrs, doc(cfg(feature = "rows")))]
pub fn from_rows_and_schema(rows: &[Row], schema: &Schema) -> PolarsResult<Self> {
Self::from_rows_iter_and_schema(rows.iter(), schema)
}

/// Create a new DataFrame from an iterator over rows. This should only be used when you have row wise data,
/// as this is a lot slower than creating the `Series` in a columnar fashion
#[cfg_attr(docsrs, doc(cfg(feature = "rows")))]
pub fn from_rows_iter_and_schema<'a, I>(mut rows: I, schema: &Schema) -> PolarsResult<Self>
where
I: Iterator<Item = &'a Row<'a>>,
Expand Down Expand Up @@ -108,7 +103,6 @@ impl DataFrame {

/// Create a new DataFrame from rows. This should only be used when you have row wise data,
/// as this is a lot slower than creating the `Series` in a columnar fashion
#[cfg_attr(docsrs, doc(cfg(feature = "rows")))]
pub fn from_rows(rows: &[Row]) -> PolarsResult<Self> {
let schema = rows_to_schema_first_non_null(rows, Some(50));
let has_nulls = schema
Expand Down Expand Up @@ -177,7 +171,6 @@ impl DataFrame {
}
}

#[cfg_attr(docsrs, doc(cfg(feature = "rows")))]
/// Transpose a DataFrame. This is a very expensive operation.
pub fn transpose(&self) -> PolarsResult<DataFrame> {
let height = self.height();
Expand Down
3 changes: 0 additions & 3 deletions polars/polars-core/src/functions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,6 @@ impl<'a> IterBroadCast<'a> {
/// The concatenated strings are separated by a `delimiter`.
/// If no `delimiter` is needed, an empty &str should be passed as argument.
#[cfg(feature = "concat_str")]
#[cfg_attr(docsrs, doc(cfg(feature = "concat_str")))]
pub fn concat_str(s: &[Series], delimiter: &str) -> PolarsResult<Utf8Chunked> {
if s.is_empty() {
return Err(PolarsError::NoData(
Expand Down Expand Up @@ -212,7 +211,6 @@ pub fn concat_str(s: &[Series], delimiter: &str) -> PolarsResult<Utf8Chunked> {

/// Concat `[DataFrame]`s horizontally.
#[cfg(feature = "horizontal_concat")]
#[cfg_attr(docsrs, doc(cfg(feature = "horizontal_concat")))]
/// Concat horizontally and extend with null values if lengths don't match
pub fn hor_concat_df(dfs: &[DataFrame]) -> PolarsResult<DataFrame> {
let max_len = dfs
Expand Down Expand Up @@ -253,7 +251,6 @@ pub fn hor_concat_df(dfs: &[DataFrame]) -> PolarsResult<DataFrame> {

/// Concat `[DataFrame]`s diagonally.
#[cfg(feature = "diagonal_concat")]
#[cfg_attr(docsrs, doc(cfg(feature = "diagonal_concat")))]
/// Concat diagonally thereby combining different schemas.
pub fn diag_concat_df(dfs: &[DataFrame]) -> PolarsResult<DataFrame> {
// TODO! replace with lazy only?
Expand Down
3 changes: 1 addition & 2 deletions polars/polars-core/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
#![cfg_attr(docsrs, feature(doc_cfg))]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
extern crate core;

#[macro_use]
Expand All @@ -17,7 +17,6 @@ mod named_from;
pub mod prelude;
pub mod schema;
#[cfg(feature = "serde")]
#[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
pub mod serde;
pub mod series;
pub mod testing;
Expand Down
1 change: 0 additions & 1 deletion polars/polars-core/src/series/implementations/object.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,6 @@ where
.map(|ca| ca.into_series())
}
}
#[cfg_attr(docsrs, doc(cfg(feature = "object")))]
impl<T> SeriesTrait for SeriesWrap<ObjectChunked<T>>
where
T: PolarsObject,
Expand Down
9 changes: 0 additions & 9 deletions polars/polars-core/src/series/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -370,7 +370,6 @@ impl Series {
/// Create a new ChunkedArray with values from self where the mask evaluates `true` and values
/// from `other` where the mask evaluates `false`
#[cfg(feature = "zip_with")]
#[cfg_attr(docsrs, doc(cfg(feature = "zip_with")))]
pub fn zip_with(&self, mask: &BooleanChunked, other: &Series) -> PolarsResult<Series> {
let (lhs, rhs) = coerce_lhs_rhs(self, other)?;
lhs.zip_with_same_type(mask, rhs.as_ref())
Expand Down Expand Up @@ -511,7 +510,6 @@ impl Series {
}

#[cfg(feature = "dot_product")]
#[cfg_attr(docsrs, doc(cfg(feature = "dot_product")))]
pub fn dot(&self, other: &Series) -> Option<f64> {
(self * other).sum::<f64>()
}
Expand All @@ -536,7 +534,6 @@ impl Series {
}

/// Get an array with the cumulative max computed at every element
#[cfg_attr(docsrs, doc(cfg(feature = "cum_agg")))]
pub fn cummax(&self, _reverse: bool) -> Series {
#[cfg(feature = "cum_agg")]
{
Expand All @@ -549,7 +546,6 @@ impl Series {
}

/// Get an array with the cumulative min computed at every element
#[cfg_attr(docsrs, doc(cfg(feature = "cum_agg")))]
pub fn cummin(&self, _reverse: bool) -> Series {
#[cfg(feature = "cum_agg")]
{
Expand All @@ -565,7 +561,6 @@ impl Series {
///
/// If the [`DataType`] is one of `{Int8, UInt8, Int16, UInt16}` the `Series` is
/// first cast to `Int64` to prevent overflow issues.
#[cfg_attr(docsrs, doc(cfg(feature = "cum_agg")))]
#[allow(unused_variables)]
pub fn cumsum(&self, reverse: bool) -> Series {
#[cfg(feature = "cum_agg")]
Expand Down Expand Up @@ -614,7 +609,6 @@ impl Series {
///
/// If the [`DataType`] is one of `{Int8, UInt8, Int16, UInt16, Int32, UInt32}` the `Series` is
/// first cast to `Int64` to prevent overflow issues.
#[cfg_attr(docsrs, doc(cfg(feature = "cum_agg")))]
#[allow(unused_variables)]
pub fn cumprod(&self, reverse: bool) -> Series {
#[cfg(feature = "cum_agg")]
Expand Down Expand Up @@ -655,7 +649,6 @@ impl Series {
///
/// If the [`DataType`] is one of `{Int8, UInt8, Int16, UInt16}` the `Series` is
/// first cast to `Int64` to prevent overflow issues.
#[cfg_attr(docsrs, doc(cfg(feature = "product")))]
pub fn product(&self) -> Series {
#[cfg(feature = "product")]
{
Expand Down Expand Up @@ -688,7 +681,6 @@ impl Series {
}

#[cfg(feature = "rank")]
#[cfg_attr(docsrs, doc(cfg(feature = "rank")))]
pub fn rank(&self, options: RankOptions) -> Series {
rank(self, options.method, options.descending)
}
Expand Down Expand Up @@ -807,7 +799,6 @@ impl Series {
}

#[cfg(feature = "abs")]
#[cfg_attr(docsrs, doc(cfg(feature = "abs")))]
/// convert numerical values to their absolute value
pub fn abs(&self) -> PolarsResult<Series> {
let a = self.to_physical_repr();
Expand Down
1 change: 0 additions & 1 deletion polars/polars-core/src/series/ops/diff.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ use crate::prelude::*;
use crate::series::ops::NullBehavior;

impl Series {
#[cfg_attr(docsrs, doc(cfg(feature = "diff")))]
pub fn diff(&self, n: usize, null_behavior: NullBehavior) -> Series {
use DataType::*;
let s = match self.dtype() {
Expand Down
5 changes: 0 additions & 5 deletions polars/polars-core/src/series/ops/mod.rs
Original file line number Diff line number Diff line change
@@ -1,20 +1,15 @@
#[cfg(feature = "diff")]
#[cfg_attr(docsrs, doc(cfg(feature = "diff")))]
pub mod diff;
mod downcast;
#[cfg(feature = "ewma")]
#[cfg_attr(docsrs, doc(cfg(feature = "ewma")))]
mod ewm;
mod extend;
#[cfg(feature = "moment")]
#[cfg_attr(docsrs, doc(cfg(feature = "moment")))]
pub mod moment;
mod null;
#[cfg(feature = "pct_change")]
#[cfg_attr(docsrs, doc(cfg(feature = "pct_change")))]
pub mod pct_change;
#[cfg(feature = "round_series")]
#[cfg_attr(docsrs, doc(cfg(feature = "round_series")))]
mod round;
mod to_list;
mod unique;
Expand Down
Loading

0 comments on commit b2588ec

Please sign in to comment.