Skip to content

Commit

Permalink
chore: fix merge conflicts
Browse files Browse the repository at this point in the history
  • Loading branch information
ncclementi committed Sep 20, 2024
2 parents 0565567 + c62efce commit 9f7d943
Show file tree
Hide file tree
Showing 84 changed files with 1,609 additions and 1,259 deletions.
103 changes: 103 additions & 0 deletions .github/labeler.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
# backends
bigquery:
- changed-files:
- any-glob-to-any-file: "ibis/backends/bigquery/**"

clickhouse:
- changed-files:
- any-glob-to-any-file: "ibis/backends/clickhouse/**"

datafusion:
- changed-files:
- any-glob-to-any-file: "ibis/backends/datafusion/**"

druid:
- changed-files:
- any-glob-to-any-file: "ibis/backends/druid/**"

duckdb:
- changed-files:
- any-glob-to-any-file: "ibis/backends/duckdb/**"

exasol:
- changed-files:
- any-glob-to-any-file: "ibis/backends/exasol/**"

flink:
- changed-files:
- any-glob-to-any-file: "ibis/backends/flink/**"

impala:
- changed-files:
- any-glob-to-any-file: "ibis/backends/impala/**"

mssql:
- changed-files:
- any-glob-to-any-file: "ibis/backends/mssql/**"

mysql:
- changed-files:
- any-glob-to-any-file: "ibis/backends/mysql/**"

oracle:
- changed-files:
- any-glob-to-any-file: "ibis/backends/oracle/**"

polars:
- changed-files:
- any-glob-to-any-file: "ibis/backends/polars/**"

postgres:
- changed-files:
- any-glob-to-any-file: "ibis/backends/postgres/**"

pyspark:
- changed-files:
- any-glob-to-any-file: "ibis/backends/pyspark/**"

risingwave:
- changed-files:
- any-glob-to-any-file: "ibis/backends/risingwave/**"

snowflake:
- changed-files:
- any-glob-to-any-file: "ibis/backends/snowflake/**"

sqlite:
- changed-files:
- any-glob-to-any-file: "ibis/backends/sqlite/**"

trino:
- changed-files:
- any-glob-to-any-file: "ibis/backends/trino/**"

# miscellaneous labels
tests:
- changed-files:
- any-glob-to-any-file: "**/tests/**"

nix:
- changed-files:
- any-glob-to-any-file: "**/*.nix"
- any-glob-to-any-file: "poetry.lock"

datatypes:
- changed-files:
- any-glob-to-any-file: "ibis/expr/datatypes/**"

ci:
- changed-files:
- any-glob-to-any-file: ".github/**"

dependencies:
- changed-files:
- any-glob-to-any-file: "**/*.nix"
- any-glob-to-any-file: "poetry.lock"
- any-glob-to-any-file: "flake.lock"
- any-glob-to-any-file: "requirements-dev.txt"
- any-glob-to-any-file: "conda/*.yml"

docs:
- changed-files:
- any-glob-to-any-file: "**/*.qmd"
- any-glob-to-any-file: "**/*.md"
6 changes: 5 additions & 1 deletion .github/renovate.json
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@
"addLabels": ["druid"]
},
{
"matchPackagePatterns": ["pymysql", "mariadb"],
"matchPackagePatterns": ["mysqlclient", "mariadb"],
"addLabels": ["mysql"]
},
{
Expand All @@ -78,6 +78,10 @@
"matchPackagePatterns": ["pyspark"],
"addLabels": ["pyspark"]
},
{
"matchPackagePatterns": ["risingwave"],
"addLabels": ["risingwave"]
},
{
"matchPackagePatterns": ["snowflake-connector-python"],
"addLabels": ["snowflake"]
Expand Down
19 changes: 19 additions & 0 deletions .github/workflows/algolia/configure-algolia-api.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,15 @@
index_name = os.environ["ALGOLIA_INDEX"]


ONE_WAY_SYNONYMS = {
# A list of search terms that have (historically) not returned results
# that we can map to existing search terms that we know are good
"md5": ["hashbytes"],
"fetchdf": ["to_pandas", "to_polars", "to_pyarrow"],
"unique": ["distinct"],
}


def main():
client = SearchClient.create(app_id, api_key)
index = client.init_index(index_name)
Expand All @@ -30,6 +39,16 @@ def main():

index.set_settings(override_default_settings)

for input_, synonyms in ONE_WAY_SYNONYMS.items():
index.save_synonym(
{
"objectID": input_,
"type": "oneWaySynonym",
"input": input_,
"synonyms": synonyms,
}
)


if __name__ == "__main__":
main()
2 changes: 1 addition & 1 deletion .github/workflows/docs-preview.yml
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ jobs:
path: docs/**/.jupyter_cache

- name: build docs
run: nix develop --ignore-environment --keep HOME -c just docs-build-all
run: nix develop '.#ibis311' --ignore-environment --keep HOME -c just docs-build-all

- name: install netlify cli
run: npm install -g netlify-cli
Expand Down
15 changes: 10 additions & 5 deletions .github/workflows/ibis-backends.yml
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,7 @@ jobs:
- sqlite
- name: datafusion
title: DataFusion
serial: true
extras:
- datafusion
- name: polars
Expand All @@ -140,6 +141,7 @@ jobs:
- polars
sys-deps:
- libgeos-dev
- default-libmysqlclient-dev
- name: postgres
title: PostgreSQL
extras:
Expand Down Expand Up @@ -270,6 +272,7 @@ jobs:
- mysql
sys-deps:
- libgeos-dev
- default-libmysqlclient-dev
- os: windows-latest
backend:
name: clickhouse
Expand Down Expand Up @@ -650,24 +653,27 @@ jobs:
run: docker compose logs

test_pyspark:
name: PySpark ${{ matrix.pyspark-version }} ubuntu-latest python-${{ matrix.python-version }}
name: PySpark ${{ matrix.pyspark-minor-version }} ubuntu-latest python-${{ matrix.python-version }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
include:
- python-version: "3.10"
pyspark-version: "3.3.3"
pyspark-minor-version: "3.3"
deps:
- "'pandas@<2'"
- "'numpy@<1.24'"
- python-version: "3.11"
pyspark-version: "3.5"
pyspark-version: "3.5.2"
pyspark-minor-version: "3.5"
deps:
- "'pandas@>2'"
- "'numpy@>1.24'"
- python-version: "3.12"
pyspark-version: "3.5"
pyspark-version: "3.5.2"
pyspark-minor-version: "3.5"
deps:
- "'pandas@>2'"
- "'numpy@>1.24'"
Expand Down Expand Up @@ -722,8 +728,7 @@ jobs:

- name: install iceberg
shell: bash
if: matrix.pyspark-version == '3.5'
run: pushd "$(poetry run python -c "import pyspark; print(pyspark.__file__.rsplit('/', 1)[0])")/jars" && curl -LO https://search.maven.org/remotecontent?filepath=org/apache/iceberg/iceberg-spark-runtime-3.5_2.12/1.5.2/iceberg-spark-runtime-3.5_2.12-1.5.2.jar
run: just download-iceberg-jar ${{ matrix.pyspark-minor-version }}

- name: run tests
run: just ci-check -m pyspark
Expand Down
8 changes: 5 additions & 3 deletions .github/workflows/ibis-docs-main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,8 @@ jobs:

- name: checkout
uses: actions/checkout@v4
with:
fetch-depth: 0

- name: restore cache of the previously rendered notebooks
uses: actions/cache/restore@v4
Expand All @@ -67,10 +69,10 @@ jobs:
path: docs/**/.jupyter_cache

- name: build api docs
run: nix develop --ignore-environment -c just docs-apigen --verbose
run: nix develop '.#ibis311' --ignore-environment -c just docs-apigen --verbose

- name: build docs
run: nix develop --ignore-environment --keep HOME -c just docs-render
run: nix develop '.#ibis311' --ignore-environment --keep HOME -c just docs-render

- name: cache rendered notebooks
uses: actions/cache/save@v4
Expand All @@ -79,7 +81,7 @@ jobs:
path: docs/**/.jupyter_cache

- name: build jupyterlite
run: nix develop --ignore-environment --keep HOME -c just build-jupyterlite
run: nix develop '.#ibis311' --ignore-environment --keep HOME -c just build-jupyterlite

- name: check that all frozen computations were done before push
run: git diff --exit-code --stat
Expand Down
6 changes: 3 additions & 3 deletions .github/workflows/ibis-docs-pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -67,13 +67,13 @@ jobs:
path: docs/**/.jupyter_cache

- name: generate api docs
run: nix develop --ignore-environment -c just docs-apigen --verbose
run: nix develop '.#ibis311' --ignore-environment -c just docs-apigen --verbose

- name: build docs
run: nix develop --ignore-environment --keep HOME -c just docs-render
run: nix develop '.#ibis311' --ignore-environment --keep HOME -c just docs-render

- name: build jupyterlite
run: nix develop --ignore-environment --keep HOME -c just build-jupyterlite
run: nix develop '.#ibis311' --ignore-environment --keep HOME -c just build-jupyterlite

- name: check that all frozen computations were done before push
run: git diff --exit-code --stat
Expand Down
12 changes: 12 additions & 0 deletions .github/workflows/labeler.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
name: PR Labeler
on:
- pull_request_target

jobs:
labeler:
permissions:
contents: read
pull-requests: write
runs-on: ubuntu-latest
steps:
- uses: actions/labeler@v5
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -218,3 +218,7 @@ Ibis is an open source project and welcomes contributions from anyone in the com
Join our community by interacting on GitHub or chatting with us on [Zulip](https://ibis-project.zulipchat.com/).

For more information visit https://ibis-project.org/.

## Governance

The Ibis project is an [independently governed](https://github.com/ibis-project/governance/blob/main/governance.md) open source community project to build and maintain the portable Python dataframe library. Ibis has contributors across a range of data companies and institutions.
16 changes: 8 additions & 8 deletions compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,7 @@ services:
- trino

minio:
image: bitnami/minio:2024.9.9
image: bitnami/minio:2024.9.13
environment:
MINIO_ROOT_USER: accesskey
MINIO_ROOT_PASSWORD: secretkey
Expand Down Expand Up @@ -161,7 +161,7 @@ services:
test:
- CMD-SHELL
- trino --output-format null --execute 'show schemas in hive; show schemas in memory'
image: trinodb/trino:457
image: trinodb/trino:458
ports:
- 8080:8080
networks:
Expand Down Expand Up @@ -204,7 +204,7 @@ services:
- druid

druid-coordinator:
image: apache/druid:30.0.0
image: apache/druid:30.0.1
hostname: coordinator
container_name: coordinator
volumes:
Expand All @@ -229,7 +229,7 @@ services:
- druid

druid-broker:
image: apache/druid:30.0.0
image: apache/druid:30.0.1
hostname: broker
container_name: broker
volumes:
Expand Down Expand Up @@ -257,7 +257,7 @@ services:
- druid

druid-historical:
image: apache/druid:30.0.0
image: apache/druid:30.0.1
hostname: historical
container_name: historical
volumes:
Expand All @@ -284,7 +284,7 @@ services:
- druid

druid-middlemanager:
image: apache/druid:30.0.0
image: apache/druid:30.0.1
hostname: middlemanager
container_name: middlemanager
volumes:
Expand Down Expand Up @@ -312,7 +312,7 @@ services:
- druid

druid:
image: apache/druid:30.0.0
image: apache/druid:30.0.1
hostname: router
container_name: router
volumes:
Expand Down Expand Up @@ -557,7 +557,7 @@ services:
- impala

risingwave:
image: ghcr.io/risingwavelabs/risingwave:v1.10.1
image: ghcr.io/risingwavelabs/risingwave:v2.0.0
command: "standalone --meta-opts=\" \
--advertise-addr 0.0.0.0:5690 \
--backend mem \
Expand Down
2 changes: 1 addition & 1 deletion conda/environment-arm64-flink.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ dependencies:
- pyarrow-hotfix >=0.4
- pydata-google-auth
- pydruid >=0.6.5
- pymysql >=1
- mysqlclient >=2.2.4
- pyspark >=3
- python-dateutil >=2.8.2
- python-duckdb >=0.8.1
Expand Down
2 changes: 1 addition & 1 deletion conda/environment-arm64.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ dependencies:
- pyarrow-hotfix >=0.4
- pydata-google-auth
- pydruid >=0.6.5
- pymysql >=1
- mysqlclient >=2.2.4
- pyodbc >=4.0.39
- pyspark >=3
- python-dateutil >=2.8.2
Expand Down
2 changes: 1 addition & 1 deletion conda/environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ dependencies:
- pyarrow-hotfix >=0.4
- pydata-google-auth
- pydruid >=0.6.5
- pymysql >=1
- mysqlclient >=2.2.4
- pyodbc >=4.0.39
- pyspark >=3
- python >=3.10
Expand Down
Loading

0 comments on commit 9f7d943

Please sign in to comment.