Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore(deps): lock file maintenance #9489

Merged
merged 4 commits into from
Jul 1, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions .github/workflows/ibis-backends.yml
Original file line number Diff line number Diff line change
Expand Up @@ -531,7 +531,7 @@ jobs:
title: Dask
deps:
required:
- "numpy@1.23.2"
- "numpy@1.23.5"
- "pyarrow@10.0.1"
optional:
- "dask[array,dataframe]@2022.9.1"
Expand All @@ -542,7 +542,7 @@ jobs:
title: PostgreSQL
deps:
required:
- "numpy@1.23.2"
- "numpy@1.23.5"
- "pyarrow@10.0.1"
optional:
- "psycopg2@2.8.4"
Expand All @@ -560,7 +560,7 @@ jobs:
title: PostgreSQL
deps:
required:
- "numpy@1.23.2"
- "numpy@1.23.5"
- "pyarrow@10.0.1"
optional:
- "psycopg2@2.8.4"
Expand All @@ -577,7 +577,7 @@ jobs:
title: PostgreSQL
deps:
required:
- "numpy@1.23.2"
- "numpy@1.23.5"
- "pyarrow@10.0.1"
optional:
- "psycopg2@2.8.4"
Expand All @@ -594,7 +594,7 @@ jobs:
title: Dask
deps:
required:
- "numpy@1.23.2"
- "numpy@1.23.5"
- "pyarrow@10.0.1"
optional:
- "dask[array,dataframe]@2022.9.1"
Expand Down
4 changes: 2 additions & 2 deletions ibis/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,8 +123,8 @@ def connect(*args, **kwargs):
proxy._from_url = backend._from_url
proxy._to_sqlglot = backend._to_sqlglot
# Add any additional methods that should be exposed at the top level
for name in getattr(backend, "_top_level_methods", ()):
setattr(proxy, name, getattr(backend, name))
for attr in getattr(backend, "_top_level_methods", ()):
setattr(proxy, attr, getattr(backend, attr))

return proxy

Expand Down
3 changes: 2 additions & 1 deletion ibis/backends/dask/convert.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
from __future__ import annotations

import dask.dataframe as dd
import numpy as np
import pandas as pd
import pandas.api.types as pdt

Expand All @@ -25,7 +26,7 @@ def convert_column(cls, obj, dtype):

@classmethod
def convert_default(cls, s, dtype, pandas_type):
if pandas_type == object:
if pandas_type == np.object_:
func = lambda x: x if x is pd.NA else dt.normalize(dtype, x)
meta = (s.name, pandas_type)
return s.map(func, na_action="ignore", meta=meta).astype(pandas_type)
Expand Down
10 changes: 5 additions & 5 deletions ibis/backends/dask/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,14 +113,14 @@ def elementwise(cls, func, operands, name, dtype):
def partitionwise(cls, func, operands, name, dtype):
cols = {}
kwargs = {}
for name, operand in operands.items():
for opname, operand in operands.items():
if isinstance(operand, (tuple, list)):
for i, v in enumerate(operand):
cols[f"{name}_{i}"] = v
kwargs[name] = tuple(f"{name}_{i}" for i in range(len(operand)))
cols[f"{opname}_{i}"] = v
kwargs[opname] = tuple(f"{opname}_{i}" for i in range(len(operand)))
else:
cols[name] = operand
kwargs[name] = name
cols[opname] = operand
kwargs[opname] = opname

def mapper(df):
unpacked = {}
Expand Down
3 changes: 2 additions & 1 deletion ibis/backends/pandas/convert.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from __future__ import annotations

import numpy as np
import pandas as pd
import pandas.api.types as pdt

Expand All @@ -25,7 +26,7 @@ def convert_column(cls, obj, dtype):

@classmethod
def convert_default(cls, s, dtype, pandas_type):
if pandas_type == object:
if pandas_type == np.object_:
func = lambda x: x if x is pd.NA else dt.normalize(dtype, x)
return s.map(func, na_action="ignore").astype(pandas_type)
else:
Expand Down
8 changes: 4 additions & 4 deletions ibis/backends/sql/rewrites.py
Original file line number Diff line number Diff line change
Expand Up @@ -255,11 +255,11 @@ def extract_ctes(node):
dont_count = (ops.Field, ops.CountStar, ops.CountDistinctStar)

g = Graph.from_bfs(node, filter=~InstanceOf(dont_count))
for node, dependents in g.invert().items():
if isinstance(node, ops.View) or (
len(dependents) > 1 and isinstance(node, cte_types)
for op, dependents in g.invert().items():
if isinstance(op, ops.View) or (
len(dependents) > 1 and isinstance(op, cte_types)
):
result.append(node)
result.append(op)

return result

Expand Down
2 changes: 1 addition & 1 deletion ibis/backends/tests/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -1216,7 +1216,7 @@ def test_has_operation_no_geo(con, op):
# filter out builtins that are types, except for tuples on ClickHouse
# and duckdb because tuples are used to represent lists of expressions
if isinstance(obj, type)
if (obj != tuple or backend not in ("clickhouse", "duckdb"))
if (obj is not tuple or backend not in ("clickhouse", "duckdb"))
if (backend != "pyspark" or vparse(pd.__version__) < vparse("2"))
],
)
Expand Down
4 changes: 2 additions & 2 deletions ibis/expr/types/joins.py
Original file line number Diff line number Diff line change
Expand Up @@ -369,9 +369,9 @@ def cross_join(
rname: str = "{name}_right",
):
left = self.join(right, how="cross", predicates=(), lname=lname, rname=rname)
for right in rest:
for table in rest:
left = left.join(
right, how="cross", predicates=(), lname=lname, rname=rname
table, how="cross", predicates=(), lname=lname, rname=rname
)
return left

Expand Down
4 changes: 2 additions & 2 deletions ibis/expr/types/relations.py
Original file line number Diff line number Diff line change
Expand Up @@ -1881,8 +1881,8 @@ def difference(self, table: Table, *rest: Table, distinct: bool = True) -> Table
└───────┘
"""
node = ops.Difference(self, table, distinct=distinct)
for table in rest:
node = ops.Difference(node, table, distinct=distinct)
for expr in rest:
node = ops.Difference(node, expr, distinct=distinct)
return node.to_expr()

@deprecated(as_of="9.0", instead="use table.as_scalar() instead")
Expand Down
4 changes: 0 additions & 4 deletions ibis/expr/types/strings.py
Original file line number Diff line number Diff line change
Expand Up @@ -409,8 +409,6 @@ def capitalize(self) -> StringValue:
"""
return ops.Capitalize(self).to_expr()

initcap = capitalize

@util.deprecated(
instead="use the `capitalize` method", as_of="9.0", removed_in="10.0"
)
Expand Down Expand Up @@ -635,8 +633,6 @@ def repeat(self, n: int | ir.IntegerValue) -> StringValue:
"""
return ops.Repeat(self, n).to_expr()

__mul__ = __rmul__ = repeat

def translate(self, from_str: StringValue, to_str: StringValue) -> StringValue:
"""Replace `from_str` characters in `self` characters in `to_str`.

Expand Down
8 changes: 3 additions & 5 deletions ibis/tests/benchmarks/test_benchmarks.py
Original file line number Diff line number Diff line change
Expand Up @@ -704,8 +704,6 @@

N = 20_000_000

con = duckdb.connect()

path = str(tmp_path_factory.mktemp("duckdb") / "data.ddb")
sql = (
lambda var, table, n=N: f"""
Expand All @@ -719,9 +717,9 @@
"""
)

with duckdb.connect(path) as con:
con.execute(sql("x", table="t1"))
con.execute(sql("y", table="t2"))
with duckdb.connect(path) as cur:
cur.execute(sql("x", table="t1"))
cur.execute(sql("y", table="t2"))

Check warning on line 722 in ibis/tests/benchmarks/test_benchmarks.py

View check run for this annotation

Codecov / codecov/patch

ibis/tests/benchmarks/test_benchmarks.py#L721-L722

Added lines #L721 - L722 were not covered by tests
return path


Expand Down
12 changes: 6 additions & 6 deletions ibis/tests/expr/test_sql_builtins.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def test_zero_ifnull(functional_alltypes):

iresult = functional_alltypes.int_col.fill_null(0)

assert type(dresult.op()) == ops.Coalesce
assert type(dresult.op()) is ops.Coalesce
assert type(dresult) == ir.FloatingColumn

# Impala upconverts all ints to bigint. Hmm.
Expand All @@ -113,8 +113,8 @@ def test_ceil_floor(functional_alltypes, lineitem):
fresult = functional_alltypes.double_col.floor()
assert isinstance(cresult, ir.IntegerColumn)
assert isinstance(fresult, ir.IntegerColumn)
assert type(cresult.op()) == ops.Ceil
assert type(fresult.op()) == ops.Floor
assert type(cresult.op()) is ops.Ceil
assert type(fresult.op()) is ops.Floor

cresult = ibis.literal(1.2345).ceil()
fresult = ibis.literal(1.2345).floor()
Expand All @@ -134,7 +134,7 @@ def test_ceil_floor(functional_alltypes, lineitem):
def test_sign(functional_alltypes, lineitem):
result = functional_alltypes.double_col.sign()
assert isinstance(result, ir.FloatingColumn)
assert type(result.op()) == ops.Sign
assert type(result.op()) is ops.Sign

result = ibis.literal(1.2345).sign()
assert isinstance(result, ir.FloatingScalar)
Expand Down Expand Up @@ -171,8 +171,8 @@ def test_round(functional_alltypes, lineitem):

def _check_unary_op(expr, fname, ex_op, ex_type):
result = getattr(expr, fname)()
assert type(result.op()) == ex_op
assert type(result) == ex_type
assert type(result.op()) is ex_op
assert type(result) is ex_type


def test_coalesce_instance_method(sql_table):
Expand Down
8 changes: 4 additions & 4 deletions ibis/tests/expr/test_struct.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,13 +72,13 @@ def test_unpack_from_table(t):
def test_lift_join(t, s):
join = t.join(s, t.d == s.a.g)
result = join.a_right.lift()
with join_tables(join) as (t, s):
with join_tables(join) as (tt, ss):
expected = ops.JoinChain(
first=t,
first=tt,
rest=[
ops.JoinLink("inner", s, [t.d == s.a.g]),
ops.JoinLink("inner", ss, [tt.d == ss.a.g]),
],
values={"f": s.a.f, "g": s.a.g},
values={"f": ss.a.f, "g": ss.a.g},
)
assert result.op() == expected

Expand Down
Loading