Skip to content

Commit

Permalink
merged incoming changes from main brnach into index_dtype_api_depreca…
Browse files Browse the repository at this point in the history
…tion
  • Loading branch information
ShisuiUzumaki committed Jan 5, 2023
2 parents 8888596 + c989cdb commit 01d1fc6
Show file tree
Hide file tree
Showing 361 changed files with 8,014 additions and 4,960 deletions.
2 changes: 1 addition & 1 deletion .github/actions/setup-conda/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ runs:
- name: Set Arrow version in ${{ inputs.environment-file }} to ${{ inputs.pyarrow-version }}
run: |
grep -q ' - pyarrow' ${{ inputs.environment-file }}
sed -i"" -e "s/ - pyarrow<10/ - pyarrow=${{ inputs.pyarrow-version }}/" ${{ inputs.environment-file }}
sed -i"" -e "s/ - pyarrow/ - pyarrow=${{ inputs.pyarrow-version }}/" ${{ inputs.environment-file }}
cat ${{ inputs.environment-file }}
shell: bash
if: ${{ inputs.pyarrow-version }}
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/code-checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,8 @@ jobs:
with:
extra_args: --verbose --all-files

docstring_typing_pylint:
name: Docstring validation, typing, and pylint
docstring_typing_manual_hooks:
name: Docstring validation, typing, and other manual pre-commit hooks
runs-on: ubuntu-22.04
defaults:
run:
Expand Down
3 changes: 3 additions & 0 deletions .github/workflows/codeql.yml
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,9 @@ concurrency:
group: ${{ github.repository }}-${{ github.head_ref || github.sha }}-${{ github.workflow }}
cancel-in-progress: true

permissions:
contents: read

jobs:
analyze:
runs-on: ubuntu-22.04
Expand Down
7 changes: 7 additions & 0 deletions .github/workflows/docbuild-and-upload.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ on:
env:
ENV_FILE: environment.yml
PANDAS_CI: 1
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

permissions:
contents: read
Expand Down Expand Up @@ -45,6 +46,12 @@ jobs:
- name: Build Pandas
uses: ./.github/actions/build_pandas

- name: Set up maintainers cache
uses: actions/cache@v3
with:
path: maintainers.json
key: maintainers

- name: Build website
run: python web/pandas_web.py web/pandas --target-path=web/build

Expand Down
3 changes: 2 additions & 1 deletion .github/workflows/macos-windows.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ env:
PANDAS_CI: 1
PYTEST_TARGET: pandas
PATTERN: "not slow and not db and not network and not single_cpu"
ERROR_ON_WARNINGS: "1"


permissions:
Expand Down Expand Up @@ -52,7 +53,7 @@ jobs:
uses: ./.github/actions/setup-conda
with:
environment-file: ci/deps/${{ matrix.env_file }}
pyarrow-version: ${{ matrix.os == 'macos-latest' && '6' || '' }}
pyarrow-version: ${{ matrix.os == 'macos-latest' && '9' || '' }}

- name: Build Pandas
uses: ./.github/actions/build_pandas
Expand Down
54 changes: 0 additions & 54 deletions .github/workflows/scorecards.yml

This file was deleted.

24 changes: 17 additions & 7 deletions .github/workflows/ubuntu.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ jobs:
matrix:
env_file: [actions-38.yaml, actions-39.yaml, actions-310.yaml]
pattern: ["not single_cpu", "single_cpu"]
pyarrow_version: ["7", "8", "9"]
pyarrow_version: ["7", "8", "9", "10"]
include:
- name: "Downstream Compat"
env_file: actions-38-downstream_compat.yaml
Expand All @@ -38,6 +38,7 @@ jobs:
- name: "Minimum Versions"
env_file: actions-38-minimum_versions.yaml
pattern: "not slow and not network and not single_cpu"
error_on_warnings: "0"
- name: "Locale: it_IT"
env_file: actions-38.yaml
pattern: "not slow and not network and not single_cpu"
Expand All @@ -62,33 +63,42 @@ jobs:
env_file: actions-310.yaml
pattern: "not slow and not network and not single_cpu"
pandas_copy_on_write: "1"
error_on_warnings: "0"
- name: "Data Manager"
env_file: actions-38.yaml
pattern: "not slow and not network and not single_cpu"
pandas_data_manager: "array"
error_on_warnings: "0"
- name: "Pypy"
env_file: actions-pypy-38.yaml
pattern: "not slow and not network and not single_cpu"
test_args: "--max-worker-restart 0"
error_on_warnings: "0"
- name: "Numpy Dev"
env_file: actions-310-numpydev.yaml
pattern: "not slow and not network and not single_cpu"
test_args: "-W error::DeprecationWarning:numpy -W error::FutureWarning:numpy"
error_on_warnings: "0"
exclude:
- env_file: actions-39.yaml
pyarrow_version: "6"
- env_file: actions-39.yaml
- env_file: actions-38.yaml
pyarrow_version: "7"
- env_file: actions-310.yaml
pyarrow_version: "6"
- env_file: actions-310.yaml
- env_file: actions-38.yaml
pyarrow_version: "8"
- env_file: actions-38.yaml
pyarrow_version: "9"
- env_file: actions-39.yaml
pyarrow_version: "7"
- env_file: actions-39.yaml
pyarrow_version: "8"
- env_file: actions-39.yaml
pyarrow_version: "9"
fail-fast: false
name: ${{ matrix.name || format('{0} pyarrow={1} {2}', matrix.env_file, matrix.pyarrow_version, matrix.pattern) }}
env:
ENV_FILE: ci/deps/${{ matrix.env_file }}
PATTERN: ${{ matrix.pattern }}
EXTRA_APT: ${{ matrix.extra_apt || '' }}
ERROR_ON_WARNINGS: ${{ matrix.error_on_warnings || '1' }}
LANG: ${{ matrix.lang || '' }}
LC_ALL: ${{ matrix.lc_all || '' }}
PANDAS_DATA_MANAGER: ${{ matrix.pandas_data_manager || 'block' }}
Expand Down
3 changes: 3 additions & 0 deletions .github/workflows/wheels.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,9 @@ concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true

permissions:
contents: read

jobs:
build_wheels:
name: Build wheel for ${{ matrix.python[0] }}-${{ matrix.buildplat[1] }}
Expand Down
52 changes: 40 additions & 12 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,17 @@
minimum_pre_commit_version: 2.15.0
exclude: ^LICENSES/|\.(html|csv|svg)$
# reserve "manual" for mypy and pyright
default_stages: [commit, merge-commit, push, prepare-commit-msg, commit-msg, post-checkout, post-commit, post-merge, post-rewrite]
# reserve "manual" for relatively slow hooks which we still want to run in CI
default_stages: [
commit,
merge-commit,
push,
prepare-commit-msg,
commit-msg,
post-checkout,
post-commit,
post-merge,
post-rewrite
]
ci:
autofix_prs: false
repos:
Expand All @@ -24,7 +34,7 @@ repos:
types_or: [python, rst, markdown]
additional_dependencies: [tomli]
- repo: https://github.com/MarcoGorelli/cython-lint
rev: v0.9.1
rev: v0.10.1
hooks:
- id: cython-lint
- id: double-quote-cython-strings
Expand All @@ -34,9 +44,11 @@ repos:
- id: debug-statements
- id: end-of-file-fixer
exclude: \.txt$
stages: [commit, merge-commit, push, prepare-commit-msg, commit-msg, post-checkout, post-commit, post-merge, post-rewrite]
stages: [commit, merge-commit, push, prepare-commit-msg, commit-msg,
post-checkout, post-commit, post-merge, post-rewrite]
- id: trailing-whitespace
stages: [commit, merge-commit, push, prepare-commit-msg, commit-msg, post-checkout, post-commit, post-merge, post-rewrite]
stages: [commit, merge-commit, push, prepare-commit-msg, commit-msg,
post-checkout, post-commit, post-merge, post-rewrite]
- repo: https://github.com/cpplint/cpplint
rev: 1.6.1
hooks:
Expand All @@ -46,7 +58,14 @@ repos:
# this particular codebase (e.g. src/headers, src/klib). However,
# we can lint all header files since they aren't "generated" like C files are.
exclude: ^pandas/_libs/src/(klib|headers)/
args: [--quiet, '--extensions=c,h', '--headers=h', --recursive, '--filter=-readability/casting,-runtime/int,-build/include_subdir']
args: [
--quiet,
'--extensions=c,h',
'--headers=h',
--recursive,
--linelength=88,
'--filter=-readability/casting,-runtime/int,-build/include_subdir,-readability/fn_size'
]
- repo: https://github.com/PyCQA/flake8
rev: 6.0.0
hooks:
Expand All @@ -58,12 +77,12 @@ repos:
- flake8-bugbear==22.7.1
- pandas-dev-flaker==0.5.0
- repo: https://github.com/pycqa/pylint
rev: v2.15.6
rev: v2.15.9
hooks:
- id: pylint
stages: [manual]
- repo: https://github.com/pycqa/pylint
rev: v2.15.6
rev: v2.15.9
hooks:
- id: pylint
alias: redefined-outer-name
Expand All @@ -76,15 +95,14 @@ repos:
|^pandas/util/_test_decorators\.py # keep excluded
|^pandas/_version\.py # keep excluded
|^pandas/conftest\.py # keep excluded
|^pandas/core/generic\.py
args: [--disable=all, --enable=redefined-outer-name]
stages: [manual]
- repo: https://github.com/PyCQA/isort
rev: 5.10.1
rev: 5.11.4
hooks:
- id: isort
- repo: https://github.com/asottile/pyupgrade
rev: v3.2.2
rev: v3.3.1
hooks:
- id: pyupgrade
args: [--py38-plus]
Expand All @@ -107,6 +125,7 @@ repos:
hooks:
- id: yesqa
additional_dependencies: *flake8_dependencies
stages: [manual]
- repo: local
hooks:
# NOTE: we make `black` a local hook because if it's installed from
Expand Down Expand Up @@ -214,7 +233,6 @@ repos:
exclude: ^pandas/tests/extension/base/base\.py
- id: pip-to-conda
name: Generate pip dependency from conda
description: This hook checks if the conda environment.yml and requirements-dev.txt are equal
language: python
entry: python scripts/generate_pip_deps_from_conda.py
files: ^(environment.yml|requirements-dev.txt)$
Expand Down Expand Up @@ -311,6 +329,16 @@ repos:
files: ^pandas
exclude: ^(pandas/tests|pandas/_version.py|pandas/io/clipboard)
language: python
stages: [manual]
additional_dependencies:
- autotyping==22.9.0
- libcst==0.4.7
- id: check-test-naming
name: check that test names start with 'test'
entry: python -m scripts.check_test_naming
types: [python]
files: ^pandas/tests
language: python
exclude: |
(?x)
^pandas/tests/generic/test_generic.py # GH50380
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,6 @@ RUN apt-get install -y build-essential
RUN apt-get install -y libhdf5-dev

RUN python -m pip install --upgrade pip
RUN python -m pip install --use-deprecated=legacy-resolver \
RUN python -m pip install \
-r https://raw.githubusercontent.com/pandas-dev/pandas/main/requirements-dev.txt
CMD ["/bin/bash"]
1 change: 0 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
[![Package Status](https://img.shields.io/pypi/status/pandas.svg)](https://pypi.org/project/pandas/)
[![License](https://img.shields.io/pypi/l/pandas.svg)](https://github.com/pandas-dev/pandas/blob/main/LICENSE)
[![Coverage](https://codecov.io/github/pandas-dev/pandas/coverage.svg?branch=main)](https://codecov.io/gh/pandas-dev/pandas)
[![OpenSSF Scorecard](https://api.securityscorecards.dev/projects/github.com/pandas-dev/pandas/badge)](https://api.securityscorecards.dev/projects/github.com/pandas-dev/pandas)
[![Downloads](https://static.pepy.tech/personalized-badge/pandas?period=month&units=international_system&left_color=black&right_color=orange&left_text=PyPI%20downloads%20per%20month)](https://pepy.tech/project/pandas)
[![Slack](https://img.shields.io/badge/join_Slack-information-brightgreen.svg?logo=slack)](https://pandas.pydata.org/docs/dev/development/community.html?highlight=slack#community-slack)
[![Powered by NumFOCUS](https://img.shields.io/badge/powered%20by-NumFOCUS-orange.svg?style=flat&colorA=E1523D&colorB=007D8A)](https://numfocus.org)
Expand Down
2 changes: 1 addition & 1 deletion asv_bench/asv.conf.json
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@
// pip (with all the conda available packages installed first,
// followed by the pip installed packages).
"matrix": {
"numpy": [],
"numpy": ["1.23.5"], // https://github.com/pandas-dev/pandas/pull/50356
"Cython": ["0.29.32"],
"matplotlib": [],
"sqlalchemy": [],
Expand Down
41 changes: 41 additions & 0 deletions asv_bench/benchmarks/array.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,5 +90,46 @@ def time_setitem_list(self, multiple_chunks):
def time_setitem_slice(self, multiple_chunks):
self.array[::10] = "foo"

def time_setitem_null_slice(self, multiple_chunks):
self.array[:] = "foo"

def time_tolist(self, multiple_chunks):
self.array.tolist()


class ArrowExtensionArray:

params = [
[
"boolean[pyarrow]",
"float64[pyarrow]",
"int64[pyarrow]",
"string[pyarrow]",
"timestamp[ns][pyarrow]",
],
[False, True],
]
param_names = ["dtype", "hasna"]

def setup(self, dtype, hasna):
N = 100_000
if dtype == "boolean[pyarrow]":
data = np.random.choice([True, False], N, replace=True)
elif dtype == "float64[pyarrow]":
data = np.random.randn(N)
elif dtype == "int64[pyarrow]":
data = np.arange(N)
elif dtype == "string[pyarrow]":
data = tm.rands_array(10, N)
elif dtype == "timestamp[ns][pyarrow]":
data = pd.date_range("2000-01-01", freq="s", periods=N)
else:
raise NotImplementedError

arr = pd.array(data, dtype=dtype)
if hasna:
arr[::2] = pd.NA
self.arr = arr

def time_to_numpy(self, dtype, hasna):
self.arr.to_numpy()
Loading

0 comments on commit 01d1fc6

Please sign in to comment.