Skip to content

Commit

Permalink
revert: test(fixtures): ensure that most connections are cleaned up w…
Browse files Browse the repository at this point in the history
…hen the session ends (#10693) (#10717)
  • Loading branch information
cpcloud authored Jan 24, 2025
1 parent c7270d3 commit 470e6fd
Show file tree
Hide file tree
Showing 20 changed files with 49 additions and 130 deletions.
9 changes: 2 additions & 7 deletions ibis/backends/clickhouse/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -178,13 +178,8 @@ def add_catalog_and_schema(node):


@pytest.fixture(scope="session")
def backend(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


@pytest.fixture(scope="session")
def con(backend):
return backend.connection
def con(tmp_path_factory, data_dir, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id).connection


@pytest.fixture(scope="session")
Expand Down
4 changes: 1 addition & 3 deletions ibis/backends/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -470,9 +470,7 @@ def ddl_backend(request, data_dir, tmp_path_factory, worker_id):
@pytest.fixture(scope="session")
def ddl_con(ddl_backend):
"""Instance of Client, already connected to the db (if applies)."""
connection = ddl_backend.connection
yield connection
connection.disconnect()
return ddl_backend.connection


@pytest.fixture(
Expand Down
9 changes: 2 additions & 7 deletions ibis/backends/datafusion/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,13 +74,8 @@ def add_catalog_and_schema(node):


@pytest.fixture(scope="session")
def backend(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


@pytest.fixture(scope="session")
def con(backend):
return backend.connection
def con(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id).connection


@pytest.fixture(scope="session")
Expand Down
9 changes: 2 additions & 7 deletions ibis/backends/duckdb/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,13 +138,8 @@ def add_catalog_and_schema(node):


@pytest.fixture(scope="session")
def backend(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


@pytest.fixture(scope="session")
def con(backend):
return backend.connection
def con(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id).connection


@pytest.fixture(scope="session")
Expand Down
9 changes: 2 additions & 7 deletions ibis/backends/mssql/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,10 +60,5 @@ def connect(*, tmpdir, worker_id, **kw):


@pytest.fixture(scope="session")
def backend(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


@pytest.fixture(scope="session")
def con(backend):
return backend.connection
def con(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id).connection
9 changes: 2 additions & 7 deletions ibis/backends/mysql/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,10 +75,5 @@ def connect(*, tmpdir, worker_id, **kw):


@pytest.fixture(scope="session")
def backend(tmp_path_factory, data_dir, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


@pytest.fixture(scope="session")
def con(backend):
return backend.connection
def con(tmp_path_factory, data_dir, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id).connection
9 changes: 2 additions & 7 deletions ibis/backends/oracle/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,13 +133,8 @@ def format_table(name: str) -> str:


@pytest.fixture(scope="session")
def backend(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


@pytest.fixture(scope="session")
def con(backend):
return backend.connection
def con(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id).connection


def init_oracle_database(
Expand Down
9 changes: 2 additions & 7 deletions ibis/backends/polars/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,13 +44,8 @@ def assert_series_equal(cls, left, right, *args, **kwargs) -> None:


@pytest.fixture(scope="session")
def backend(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


@pytest.fixture(scope="session")
def con(backend):
return backend.connection
def con(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id).connection


@pytest.fixture(scope="session")
Expand Down
9 changes: 2 additions & 7 deletions ibis/backends/postgres/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,13 +69,8 @@ def connect(*, tmpdir, worker_id, **kw):


@pytest.fixture(scope="session")
def backend(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


@pytest.fixture(scope="session")
def con(backend):
return backend.connection
def con(tmp_path_factory, data_dir, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id).connection


@pytest.fixture(scope="module")
Expand Down
29 changes: 9 additions & 20 deletions ibis/backends/pyspark/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
from ibis.backends.pyspark.datatypes import PySparkSchema
from ibis.backends.tests.base import BackendTest, ServiceBackendTest
from ibis.backends.tests.data import json_types, topk, win
from ibis.backends.tests.errors import PySparkConnectException
from ibis.conftest import IS_SPARK_REMOTE, SPARK_REMOTE

if TYPE_CHECKING:
Expand Down Expand Up @@ -379,12 +378,11 @@ def connect(*, tmpdir, worker_id, **kw):
return con

@pytest.fixture(scope="session")
def backend_streaming(data_dir, tmp_path_factory, worker_id):
return TestConfForStreaming.load_data(data_dir, tmp_path_factory, worker_id)

@pytest.fixture(scope="session")
def con_streaming(backend_streaming):
return backend_streaming.connection
def con_streaming(data_dir, tmp_path_factory, worker_id):
backend_test = TestConfForStreaming.load_data(
data_dir, tmp_path_factory, worker_id
)
return backend_test.connection

@pytest.fixture(autouse=True, scope="function")
def stop_active_jobs(con_streaming):
Expand All @@ -398,22 +396,13 @@ def write_to_memory(self, expr, table_name):
df = self._session.sql(expr.compile())
df.writeStream.format("memory").queryName(table_name).start()

def __del__(self):
if not SPARK_REMOTE:
try: # noqa: SIM105
self.connection.disconnect()
except (AttributeError, PySparkConnectException):
pass


@pytest.fixture(scope="session")
def backend(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)

def con(data_dir, tmp_path_factory, worker_id):
backend_test = TestConf.load_data(data_dir, tmp_path_factory, worker_id)
con = backend_test.connection

@pytest.fixture(scope="session")
def con(backend):
return backend.connection
return con


class IbisWindow:
Expand Down
9 changes: 2 additions & 7 deletions ibis/backends/risingwave/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,13 +68,8 @@ def connect(*, tmpdir, worker_id, port: int | None = None, **kw):


@pytest.fixture(scope="session")
def backend(tmp_path_factory, data_dir, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


@pytest.fixture(scope="session")
def con(backend):
return backend.connection
def con(tmp_path_factory, data_dir, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id).connection


@pytest.fixture(scope="module")
Expand Down
9 changes: 2 additions & 7 deletions ibis/backends/snowflake/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -215,10 +215,5 @@ def connect(*, tmpdir, worker_id, **kw) -> BaseBackend:


@pytest.fixture(scope="session")
def backend(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


@pytest.fixture(scope="session")
def con(backend):
return backend.connection
def con(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id).connection
9 changes: 2 additions & 7 deletions ibis/backends/sqlite/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,10 +55,5 @@ def functional_alltypes(self) -> ir.Table:


@pytest.fixture(scope="session")
def backend(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


@pytest.fixture(scope="session")
def con(backend):
return backend.connection
def con(data_dir, tmp_path_factory, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id).connection
6 changes: 0 additions & 6 deletions ibis/backends/tests/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,12 +63,6 @@ class BackendTest(abc.ABC):
tpc_absolute_tolerance: float | None = None
"Absolute tolerance for floating point comparisons with pytest.approx in TPC correctness tests."

def __del__(self):
try: # noqa: SIM105
self.connection.disconnect()
except AttributeError:
pass

@property
@abc.abstractmethod
def deps(self) -> Iterable[str]:
Expand Down
5 changes: 1 addition & 4 deletions ibis/backends/tests/errors.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,16 +55,13 @@
from pyspark.errors.exceptions.base import ParseException as PySparkParseException
from pyspark.errors.exceptions.base import PySparkValueError
from pyspark.errors.exceptions.base import PythonException as PySparkPythonException
from pyspark.errors.exceptions.connect import (
SparkConnectException as PySparkConnectException,
)
from pyspark.errors.exceptions.connect import (
SparkConnectGrpcException as PySparkConnectGrpcException,
)
except ImportError:
PySparkParseException = PySparkAnalysisException = PySparkArithmeticException = (
PySparkPythonException
) = PySparkConnectException = PySparkConnectGrpcException = PySparkValueError = None
) = PySparkConnectGrpcException = PySparkValueError = None

try:
from google.api_core.exceptions import BadRequest as GoogleBadRequest
Expand Down
11 changes: 6 additions & 5 deletions ibis/backends/tests/test_array.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,11 @@
GoogleBadRequest,
MySQLOperationalError,
PolarsComputeError,
PsycoPg2ArraySubscriptError,
PsycoPg2IndeterminateDatatype,
PsycoPg2InternalError,
PsycoPg2ProgrammingError,
PsycoPgInvalidTextRepresentation,
PsycoPg2SyntaxError,
PsycoPgSyntaxError,
Py4JJavaError,
PyAthenaDatabaseError,
Expand Down Expand Up @@ -1117,7 +1118,7 @@ def test_unnest_struct(con):


@builtin_array
@pytest.mark.notimpl(["postgres"], raises=PsycoPgSyntaxError)
@pytest.mark.notimpl(["postgres"], raises=PsycoPg2SyntaxError)
@pytest.mark.notimpl(["risingwave"], raises=PsycoPg2InternalError)
@pytest.mark.notimpl(
["trino"], reason="inserting maps into structs doesn't work", raises=TrinoUserError
Expand Down Expand Up @@ -1208,7 +1209,7 @@ def test_zip_null(con, fn):


@builtin_array
@pytest.mark.notimpl(["postgres"], raises=PsycoPgSyntaxError)
@pytest.mark.notimpl(["postgres"], raises=PsycoPg2SyntaxError)
@pytest.mark.notimpl(["risingwave"], raises=PsycoPg2ProgrammingError)
@pytest.mark.notimpl(["datafusion"], raises=Exception, reason="not yet supported")
@pytest.mark.notimpl(
Expand Down Expand Up @@ -1768,7 +1769,7 @@ def test_table_unnest_column_expr(backend):
@pytest.mark.notimpl(["datafusion", "polars"], raises=com.OperationNotDefinedError)
@pytest.mark.notimpl(["trino"], raises=TrinoUserError)
@pytest.mark.notimpl(["athena"], raises=PyAthenaOperationalError)
@pytest.mark.notimpl(["postgres"], raises=PsycoPgSyntaxError)
@pytest.mark.notimpl(["postgres"], raises=PsycoPg2SyntaxError)
@pytest.mark.notimpl(["risingwave"], raises=PsycoPg2ProgrammingError)
@pytest.mark.notyet(
["risingwave"], raises=PsycoPg2InternalError, reason="not supported in risingwave"
Expand Down Expand Up @@ -1889,7 +1890,7 @@ def test_array_agg_bool(con, data, agg, baseline_func):

@pytest.mark.notyet(
["postgres"],
raises=PsycoPgInvalidTextRepresentation,
raises=PsycoPg2ArraySubscriptError,
reason="all dimensions must match in size",
)
@pytest.mark.notimpl(["risingwave", "flink"], raises=com.OperationNotDefinedError)
Expand Down
4 changes: 2 additions & 2 deletions ibis/backends/tests/test_client.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
ImpalaHiveServer2Error,
OracleDatabaseError,
PsycoPg2InternalError,
PsycoPgUndefinedObject,
PsycoPg2UndefinedObject,
Py4JJavaError,
PyAthenaDatabaseError,
PyODBCProgrammingError,
Expand Down Expand Up @@ -725,7 +725,7 @@ def test_list_database_contents(con):
@pytest.mark.notyet(["databricks"], raises=DatabricksServerOperationError)
@pytest.mark.notyet(["bigquery"], raises=com.UnsupportedBackendType)
@pytest.mark.notyet(
["postgres"], raises=PsycoPgUndefinedObject, reason="no unsigned int types"
["postgres"], raises=PsycoPg2UndefinedObject, reason="no unsigned int types"
)
@pytest.mark.notyet(
["oracle"], raises=OracleDatabaseError, reason="no unsigned int types"
Expand Down
4 changes: 2 additions & 2 deletions ibis/backends/tests/test_generic.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
OracleDatabaseError,
PolarsInvalidOperationError,
PsycoPg2InternalError,
PsycoPgSyntaxError,
PsycoPg2SyntaxError,
Py4JJavaError,
PyAthenaDatabaseError,
PyAthenaOperationalError,
Expand Down Expand Up @@ -1736,7 +1736,7 @@ def hash_256(col):
pytest.mark.notimpl(["flink"], raises=Py4JJavaError),
pytest.mark.notimpl(["druid"], raises=PyDruidProgrammingError),
pytest.mark.notimpl(["oracle"], raises=OracleDatabaseError),
pytest.mark.notimpl(["postgres"], raises=PsycoPgSyntaxError),
pytest.mark.notimpl(["postgres"], raises=PsycoPg2SyntaxError),
pytest.mark.notimpl(["risingwave"], raises=PsycoPg2InternalError),
pytest.mark.notimpl(["snowflake"], raises=AssertionError),
pytest.mark.never(
Expand Down
8 changes: 4 additions & 4 deletions ibis/backends/tests/test_struct.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
DatabricksServerOperationError,
PolarsColumnNotFoundError,
PsycoPg2InternalError,
PsycoPgSyntaxError,
PsycoPg2SyntaxError,
Py4JJavaError,
PyAthenaDatabaseError,
PyAthenaOperationalError,
Expand Down Expand Up @@ -138,7 +138,7 @@ def test_collect_into_struct(alltypes):


@pytest.mark.notimpl(
["postgres"], reason="struct literals not implemented", raises=PsycoPgSyntaxError
["postgres"], reason="struct literals not implemented", raises=PsycoPg2SyntaxError
)
@pytest.mark.notimpl(
["risingwave"],
Expand All @@ -155,7 +155,7 @@ def test_field_access_after_case(con):


@pytest.mark.notimpl(
["postgres"], reason="struct literals not implemented", raises=PsycoPgSyntaxError
["postgres"], reason="struct literals not implemented", raises=PsycoPg2SyntaxError
)
@pytest.mark.notimpl(["flink"], raises=IbisError, reason="not implemented in ibis")
@pytest.mark.parametrize(
Expand Down Expand Up @@ -242,7 +242,7 @@ def test_keyword_fields(con, nullable):

@pytest.mark.notyet(
["postgres"],
raises=PsycoPgSyntaxError,
raises=PsycoPg2SyntaxError,
reason="sqlglot doesn't implement structs for postgres correctly",
)
@pytest.mark.notyet(
Expand Down
9 changes: 2 additions & 7 deletions ibis/backends/trino/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,13 +162,8 @@ def awards_players(self):


@pytest.fixture(scope="session")
def backend(tmp_path_factory, data_dir, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id)


@pytest.fixture(scope="session")
def con(backend):
return backend.connection
def con(tmp_path_factory, data_dir, worker_id):
return TestConf.load_data(data_dir, tmp_path_factory, worker_id).connection


def generate_tpc_tables(suite_name, *, data_dir):
Expand Down

0 comments on commit 470e6fd

Please sign in to comment.