diff --git a/cassandra/cqlengine/connection.py b/cassandra/cqlengine/connection.py index d98020b8a8..2dc132734e 100644 --- a/cassandra/cqlengine/connection.py +++ b/cassandra/cqlengine/connection.py @@ -324,7 +324,7 @@ def setup( :param int consistency: The global default :class:`~.ConsistencyLevel` - default is the same as :attr:`.Session.default_consistency_level` :param bool lazy_connect: True if should not connect until first use :param bool retry_connect: True if we should retry to connect even if there was a connection failure initially - :param \*\*kwargs: Pass-through keyword arguments for :class:`cassandra.cluster.Cluster` + :param kwargs: Pass-through keyword arguments for :class:`cassandra.cluster.Cluster` """ from cassandra.cqlengine import models diff --git a/cassandra/cqlengine/query.py b/cassandra/cqlengine/query.py index 11f664ec02..070fd5a612 100644 --- a/cassandra/cqlengine/query.py +++ b/cassandra/cqlengine/query.py @@ -206,8 +206,8 @@ def add_callback(self, fn, *args, **kwargs): :param fn: Callable object :type fn: callable - :param \*args: Positional arguments to be passed to the callback at the time of execution - :param \*\*kwargs: Named arguments to be passed to the callback at the time of execution + :param args: Positional arguments to be passed to the callback at the time of execution + :param kwargs: Named arguments to be passed to the callback at the time of execution """ if not callable(fn): raise ValueError("Value for argument 'fn' is {0} and is not a callable object.".format(type(fn))) @@ -277,8 +277,8 @@ class ContextQuery(object): A Context manager to allow a Model to switch context easily. Presently, the context only specifies a keyspace for model IO. - :param \*args: One or more models. A model should be a class type, not an instance. - :param \*\*kwargs: (optional) Context parameters: can be *keyspace* or *connection* + :param args: One or more models. A model should be a class type, not an instance. + :param kwargs: (optional) Context parameters: can be *keyspace* or *connection* For example: diff --git a/cassandra/datastax/cloud/__init__.py b/cassandra/datastax/cloud/__init__.py index ecb4a73fd4..f6829ad68f 100644 --- a/cassandra/datastax/cloud/__init__.py +++ b/cassandra/datastax/cloud/__init__.py @@ -23,7 +23,7 @@ _HAS_SSL = True try: - from ssl import SSLContext, PROTOCOL_TLS, CERT_REQUIRED + from ssl import SSLContext, PROTOCOL_TLS_CLIENT, CERT_REQUIRED except: _HAS_SSL = False @@ -170,7 +170,7 @@ def parse_metadata_info(config, http_data): def _ssl_context_from_cert(ca_cert_location, cert_location, key_location): - ssl_context = SSLContext(PROTOCOL_TLS) + ssl_context = SSLContext(PROTOCOL_TLS_CLIENT) ssl_context.load_verify_locations(ca_cert_location) ssl_context.verify_mode = CERT_REQUIRED ssl_context.load_cert_chain(certfile=cert_location, keyfile=key_location) diff --git a/cassandra/util.py b/cassandra/util.py index dd5c58b01d..486284a058 100644 --- a/cassandra/util.py +++ b/cassandra/util.py @@ -34,7 +34,7 @@ from cassandra import DriverException DATETIME_EPOC = datetime.datetime(1970, 1, 1) -UTC_DATETIME_EPOC = datetime.datetime.utcfromtimestamp(0) +UTC_DATETIME_EPOC = datetime.datetime.fromtimestamp(0, tz=datetime.timezone.utc) _nan = float('nan') diff --git a/pytest.ini b/pytest.ini index 0846273427..e95bb2dd87 100644 --- a/pytest.ini +++ b/pytest.ini @@ -3,3 +3,16 @@ log_format = %(asctime)s.%(msecs)03d %(levelname)s [%(module)s:%(lineno)s]: %(me log_level = DEBUG log_date_format = %Y-%m-%d %H:%M:%S xfail_strict=true + +filterwarnings = + error + ignore::pytest.PytestCollectionWarning + ignore::ResourceWarning + ignore:distutils Version classes are deprecated:DeprecationWarning:eventlet.support.greenlets + ignore:X509Extension support in pyOpenSSL is deprecated.:DeprecationWarning + ignore:CRL support in pyOpenSSL is deprecated:DeprecationWarning + ignore:sign\(\) is deprecated:DeprecationWarning + ignore:verify\(\) is deprecated:DeprecationWarning + ignore:pkg_resources is deprecated as an API:DeprecationWarning:gevent.events + ignore:pkg_resources.declare_namespace:DeprecationWarning:gevent.events + ignore:"@coroutine" decorator is deprecated since Python 3.8:DeprecationWarning:asynctest.* \ No newline at end of file diff --git a/tests/integration/cqlengine/__init__.py b/tests/integration/cqlengine/__init__.py index 5b7d16c535..a8ba17c01c 100644 --- a/tests/integration/cqlengine/__init__.py +++ b/tests/integration/cqlengine/__init__.py @@ -77,7 +77,7 @@ def wrapped_function(*args, **kwargs): # DeMonkey Patch our code cassandra.cqlengine.connection.execute = original_function # Check to see if we have a pre-existing test case to work from. - if len(args) is 0: + if len(args) == 0: test_case = unittest.TestCase("__init__") else: test_case = args[0] diff --git a/tests/integration/cqlengine/query/test_queryoperators.py b/tests/integration/cqlengine/query/test_queryoperators.py index fd148bafcf..fbf666cf21 100644 --- a/tests/integration/cqlengine/query/test_queryoperators.py +++ b/tests/integration/cqlengine/query/test_queryoperators.py @@ -154,6 +154,6 @@ def test_named_table_pk_token_function(self): query = named.all().limit(1) first_page = list(query) last = first_page[-1] - self.assertTrue(len(first_page) is 1) + self.assertTrue(len(first_page) == 1) next_page = list(query.filter(pk__token__gt=functions.Token(last.key))) - self.assertTrue(len(next_page) is 1) + self.assertTrue(len(next_page) == 1) diff --git a/tests/integration/standard/test_cluster.py b/tests/integration/standard/test_cluster.py index 43356dbd82..0bb5888544 100644 --- a/tests/integration/standard/test_cluster.py +++ b/tests/integration/standard/test_cluster.py @@ -1,3 +1,4 @@ + # Copyright DataStax, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -150,7 +151,7 @@ def test_raise_error_on_control_connection_timeout(self): get_node(1).pause() cluster = TestCluster(contact_points=['127.0.0.1'], connect_timeout=1) - with self.assertRaisesRegex(NoHostAvailable, "OperationTimedOut\('errors=Timed out creating connection \(1 seconds\)"): + with self.assertRaisesRegex(NoHostAvailable, r"OperationTimedOut\('errors=Timed out creating connection \(1 seconds\)"): cluster.connect() cluster.shutdown() diff --git a/tests/integration/standard/test_connection.py b/tests/integration/standard/test_connection.py index 0220ffbb1a..3f7b5f9d3f 100644 --- a/tests/integration/standard/test_connection.py +++ b/tests/integration/standard/test_connection.py @@ -181,7 +181,7 @@ def wait_for_connections(self, host, cluster): while(retry < 300): retry += 1 connections = self.fetch_connections(host, cluster) - if len(connections) is not 0: + if len(connections) != 0: return connections time.sleep(.1) self.fail("No new connections found") @@ -191,7 +191,7 @@ def wait_for_no_connections(self, host, cluster): while(retry < 100): retry += 1 connections = self.fetch_connections(host, cluster) - if len(connections) is 0: + if len(connections) == 0: return time.sleep(.5) self.fail("Connections never cleared") diff --git a/tests/integration/standard/test_metadata.py b/tests/integration/standard/test_metadata.py index c561491ab4..46f976facc 100644 --- a/tests/integration/standard/test_metadata.py +++ b/tests/integration/standard/test_metadata.py @@ -1646,7 +1646,7 @@ def test_function_no_parameters(self): with self.VerifiedFunction(self, **kwargs) as vf: fn_meta = self.keyspace_function_meta[vf.signature] - self.assertRegex(fn_meta.as_cql_query(), "CREATE FUNCTION.*%s\(\) .*" % kwargs['name']) + self.assertRegex(fn_meta.as_cql_query(), r"CREATE FUNCTION.*%s\(\) .*" % kwargs['name']) def test_functions_follow_keyspace_alter(self): """ @@ -1694,12 +1694,12 @@ def test_function_cql_called_on_null(self): kwargs['called_on_null_input'] = True with self.VerifiedFunction(self, **kwargs) as vf: fn_meta = self.keyspace_function_meta[vf.signature] - self.assertRegex(fn_meta.as_cql_query(), "CREATE FUNCTION.*\) CALLED ON NULL INPUT RETURNS .*") + self.assertRegex(fn_meta.as_cql_query(), r"CREATE FUNCTION.*\) CALLED ON NULL INPUT RETURNS .*") kwargs['called_on_null_input'] = False with self.VerifiedFunction(self, **kwargs) as vf: fn_meta = self.keyspace_function_meta[vf.signature] - self.assertRegex(fn_meta.as_cql_query(), "CREATE FUNCTION.*\) RETURNS NULL ON NULL INPUT RETURNS .*") + self.assertRegex(fn_meta.as_cql_query(), r"CREATE FUNCTION.*\) RETURNS NULL ON NULL INPUT RETURNS .*") @requires_java_udf diff --git a/tests/integration/standard/test_query.py b/tests/integration/standard/test_query.py index fdab4e7a0a..10844c4ea3 100644 --- a/tests/integration/standard/test_query.py +++ b/tests/integration/standard/test_query.py @@ -168,7 +168,7 @@ def test_client_ip_in_trace(self): client_ip = trace.client # Ip address should be in the local_host range - pat = re.compile("127.0.0.\d{1,3}") + pat = re.compile(r"127.0.0.\d{1,3}") # Ensure that ip is set self.assertIsNotNone(client_ip, "Client IP was not set in trace with C* >= 2.2") diff --git a/tests/integration/standard/test_scylla_cloud.py b/tests/integration/standard/test_scylla_cloud.py index d1a22f8826..57392e7936 100644 --- a/tests/integration/standard/test_scylla_cloud.py +++ b/tests/integration/standard/test_scylla_cloud.py @@ -1,4 +1,5 @@ import logging +import warnings import os.path from unittest import TestCase from ccmlib.utils.ssl_utils import generate_ssl_stores @@ -11,7 +12,8 @@ from cassandra.io.libevreactor import LibevConnection supported_connection_classes = [LibevConnection, TwistedConnection] try: - from cassandra.io.asyncorereactor import AsyncoreConnection + with warnings.filterwarnings("ignore", category=DeprecationWarning, message="The asyncore module is deprecated"): + from cassandra.io.asyncorereactor import AsyncoreConnection supported_connection_classes += [AsyncoreConnection] except ImportError: pass diff --git a/tests/unit/advanced/test_graph.py b/tests/unit/advanced/test_graph.py index a98a48c82f..bc8b54351c 100644 --- a/tests/unit/advanced/test_graph.py +++ b/tests/unit/advanced/test_graph.py @@ -257,6 +257,7 @@ def test_with_graph_protocol(self): def test_init_unknown_kwargs(self): with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") GraphOptions(unknown_param=42) self.assertEqual(len(w), 1) self.assertRegex(str(w[0].message), r"^Unknown keyword.*GraphOptions.*") diff --git a/tests/unit/advanced/test_insights.py b/tests/unit/advanced/test_insights.py index 4f1dd7ac12..ede8e4a70e 100644 --- a/tests/unit/advanced/test_insights.py +++ b/tests/unit/advanced/test_insights.py @@ -14,6 +14,7 @@ import unittest +import pytest import logging from mock import sentinel @@ -103,6 +104,7 @@ def superclass_sentinel_serializer(obj): class TestConfigAsDict(unittest.TestCase): # graph/query.py + @pytest.mark.filterwarnings("ignore:Unknown keyword argument received for GraphOptions:UserWarning") def test_graph_options(self): self.maxDiff = None diff --git a/tests/unit/advanced/test_policies.py b/tests/unit/advanced/test_policies.py index b8e4a4e757..4e1148956d 100644 --- a/tests/unit/advanced/test_policies.py +++ b/tests/unit/advanced/test_policies.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import unittest +import pytest from mock import Mock @@ -29,6 +30,7 @@ def get_host(self, addr): return self.hosts.get(addr) +@pytest.mark.filterwarnings("ignore:DSELoadBalancingPolicy will be removed:DeprecationWarning") class DSELoadBalancingPolicyTest(unittest.TestCase): def test_no_target(self): diff --git a/tests/unit/cython/bytesio_testhelper.pyx b/tests/unit/cython/bytesio_testhelper.pyx index 7ba91bc4c0..37e76ab332 100644 --- a/tests/unit/cython/bytesio_testhelper.pyx +++ b/tests/unit/cython/bytesio_testhelper.pyx @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +# cython: language_level=3 + from cassandra.bytesio cimport BytesIOReader def test_read1(assert_equal, assert_raises): diff --git a/tests/unit/cython/types_testhelper.pyx b/tests/unit/cython/types_testhelper.pyx index 55fd310837..7f59b8419b 100644 --- a/tests/unit/cython/types_testhelper.pyx +++ b/tests/unit/cython/types_testhelper.pyx @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +# cython: language_level=3 + import calendar import datetime import time diff --git a/tests/unit/cython/utils_testhelper.pyx b/tests/unit/cython/utils_testhelper.pyx index fe67691aa8..10127f3b47 100644 --- a/tests/unit/cython/utils_testhelper.pyx +++ b/tests/unit/cython/utils_testhelper.pyx @@ -12,6 +12,8 @@ # See the License for the specific language governing permissions and # limitations under the License. +# cython: language_level=3 + import datetime from cassandra.cython_utils cimport datetime_from_timestamp diff --git a/tests/unit/io/utils.py b/tests/unit/io/utils.py index 09175ce8c1..08373309c1 100644 --- a/tests/unit/io/utils.py +++ b/tests/unit/io/utils.py @@ -123,7 +123,7 @@ def submit_and_wait_for_completion(unit_test, create_timer, start, end, incremen pending_callbacks.append(callback) # wait for all the callbacks associated with the timers to be invoked - while len(pending_callbacks) is not 0: + while len(pending_callbacks) != 0: for callback in pending_callbacks: if callback.was_invoked(): pending_callbacks.remove(callback) @@ -233,7 +233,7 @@ def make_error_body(self, code, msg): def make_msg(self, header, body=binary_type()): return header + uint32_pack(len(body)) + body - def test_successful_connection(self): + def _test_successful_connection(self): c = self.make_connection() # let it write the OptionsMessage @@ -255,6 +255,9 @@ def test_successful_connection(self): self.assertTrue(c.connected_event.is_set()) return c + def test_successful_connection(self): + self._test_successful_connection() + def test_eagain_on_buffer_size(self): self._check_error_recovery_on_buffer_size(errno.EAGAIN) @@ -272,7 +275,7 @@ def test_sslwantwrite_on_buffer_size(self): error_class=ssl.SSLError) def _check_error_recovery_on_buffer_size(self, error_code, error_class=socket_error): - c = self.test_successful_connection() + c = self._test_successful_connection() # current data, used by the recv side_effect message_chunks = None diff --git a/tests/unit/test_cluster.py b/tests/unit/test_cluster.py index 16487397c2..981cad26e3 100644 --- a/tests/unit/test_cluster.py +++ b/tests/unit/test_cluster.py @@ -12,6 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import unittest +import pytest import logging import six @@ -275,6 +276,9 @@ def test_default_exec_parameters(self): self.assertEqual(cluster.profile_manager.default.row_factory, named_tuple_factory) @mock_session_pools + @pytest.mark.filterwarnings("ignore:DowngradingConsistencyRetryPolicy:DeprecationWarning") + @pytest.mark.filterwarnings("ignore:Legacy execution parameters will be removed in 4.0:DeprecationWarning") + @pytest.mark.filterwarnings("ignore:Setting the consistency level at the session level will be removed in 4.0:DeprecationWarning") def test_default_legacy(self): cluster = Cluster(load_balancing_policy=RoundRobinPolicy(), default_retry_policy=DowngradingConsistencyRetryPolicy()) self.assertEqual(cluster._config_mode, _ConfigMode.LEGACY) @@ -322,6 +326,8 @@ def test_serial_consistency_level_validation(self): ep = ExecutionProfile(RoundRobinPolicy(), serial_consistency_level=42) @mock_session_pools + @pytest.mark.filterwarnings("ignore:DowngradingConsistencyRetryPolicy:DeprecationWarning") + @pytest.mark.filterwarnings("ignore:Legacy execution parameters will be removed in 4.0:DeprecationWarning") def test_statement_params_override_legacy(self): cluster = Cluster(load_balancing_policy=RoundRobinPolicy(), default_retry_policy=DowngradingConsistencyRetryPolicy()) self.assertEqual(cluster._config_mode, _ConfigMode.LEGACY) @@ -343,6 +349,7 @@ def test_statement_params_override_legacy(self): self._verify_response_future_profile(rf, expected_profile) @mock_session_pools + @pytest.mark.filterwarnings("ignore:DowngradingConsistencyRetryPolicy:DeprecationWarning") def test_statement_params_override_profile(self): non_default_profile = ExecutionProfile(RoundRobinPolicy(), *[object() for _ in range(2)]) cluster = Cluster(execution_profiles={'non-default': non_default_profile}) @@ -367,6 +374,9 @@ def test_statement_params_override_profile(self): self._verify_response_future_profile(rf, expected_profile) @mock_session_pools + @pytest.mark.filterwarnings("ignore:DowngradingConsistencyRetryPolicy:DeprecationWarning") + @pytest.mark.filterwarnings("ignore:Legacy execution parameters will be removed in 4.0:DeprecationWarning") + @pytest.mark.filterwarnings("ignore:Setting the consistency level at the session level will be removed in 4.0:DeprecationWarning") def test_no_profile_with_legacy(self): # don't construct with both self.assertRaises(ValueError, Cluster, load_balancing_policy=RoundRobinPolicy(), execution_profiles={'a': ExecutionProfile()}) @@ -393,6 +403,7 @@ def test_no_profile_with_legacy(self): self.assertRaises(ValueError, session.execute_async, "query", execution_profile='some name here') @mock_session_pools + @pytest.mark.filterwarnings("ignore:Setting the consistency level at the session level will be removed in 4.0:DeprecationWarning") def test_no_legacy_with_profile(self): cluster_init = Cluster(execution_profiles={'name': ExecutionProfile()}) cluster_add = Cluster() @@ -513,6 +524,7 @@ def _check_warning_on_no_lbp_with_contact_points(self, cluster_kwargs): self.assertIn('please specify a load-balancing policy', warning_message) self.assertIn("contact_points = ['127.0.0.1']", warning_message) + @pytest.mark.filterwarnings("ignore:Legacy execution parameters will be removed in 4.0:DeprecationWarning") def test_no_warning_on_contact_points_with_lbp_legacy_mode(self): """ Test that users aren't warned when they instantiate a Cluster object diff --git a/tests/unit/test_exception.py b/tests/unit/test_exception.py index b39b22239c..5c8e8d9ecf 100644 --- a/tests/unit/test_exception.py +++ b/tests/unit/test_exception.py @@ -29,7 +29,7 @@ def extract_consistency(self, msg): :param msg: message with consistency value :return: String representing consistency value """ - match = re.search("'consistency':\s+'([\w\s]+)'", msg) + match = re.search(r"'consistency':\s+'([\w\s]+)'", msg) return match and match.group(1) def test_timeout_consistency(self): diff --git a/tests/unit/test_metadata.py b/tests/unit/test_metadata.py index b0a8b63b16..10d8293b50 100644 --- a/tests/unit/test_metadata.py +++ b/tests/unit/test_metadata.py @@ -849,9 +849,9 @@ def test_strip_frozen(self): argument_to_expected_results = [ ('int', 'int'), ('tuple', 'tuple'), - (r'map<"!@#$%^&*()[]\ frozen >>>", int>', r'map<"!@#$%^&*()[]\ frozen >>>", int>'), # A valid UDT name + (r'map<"!@#$%^&*()[]\\ frozen >>>", int>', r'map<"!@#$%^&*()[]\ frozen >>>", int>'), # A valid UDT name ('frozen>', 'tuple'), - (r'frozen>>", int>>', r'map<"!@#$%^&*()[]\ frozen >>>", int>'), + (r'frozen>>", int>>', r'map<"!@#$%^&*()[]\ frozen >>>", int>'), ('frozen>, int>>, frozen>>>>>', 'map, int>, map>>'), ] diff --git a/tests/unit/test_policies.py b/tests/unit/test_policies.py index db9eae6324..1e15a4bd37 100644 --- a/tests/unit/test_policies.py +++ b/tests/unit/test_policies.py @@ -13,6 +13,7 @@ # limitations under the License. import unittest +import pytest from itertools import islice, cycle from mock import Mock, patch, call @@ -1134,6 +1135,7 @@ def test_unavailable(self): self.assertEqual(consistency, None) +@pytest.mark.filterwarnings("ignore:DowngradingConsistencyRetryPolicy:DeprecationWarning") class DowngradingConsistencyRetryPolicyTest(unittest.TestCase): def test_read_timeout(self): diff --git a/tests/unit/test_response_future.py b/tests/unit/test_response_future.py index 82da9e0049..ac38b0c7a2 100644 --- a/tests/unit/test_response_future.py +++ b/tests/unit/test_response_future.py @@ -82,7 +82,7 @@ def test_result_message(self): expected_result = (object(), object()) rf._set_result(None, None, None, self.make_mock_response(expected_result[0], expected_result[1])) - result = rf.result()[0] + result = rf.result().one() self.assertEqual(result, expected_result) def test_unknown_result_class(self): @@ -128,7 +128,7 @@ def test_other_result_message_kind(self): rf.send_request() result = Mock(spec=ResultMessage, kind=999, results=[1, 2, 3]) rf._set_result(None, None, None, result) - self.assertEqual(rf.result()[0], result) + self.assertEqual(rf.result().one(), result) def test_heartbeat_defunct_deadlock(self): """ @@ -396,7 +396,7 @@ def test_first_pool_shutdown(self): expected_result = (object(), object()) rf._set_result(None, None, None, self.make_mock_response(expected_result[0], expected_result[1])) - result = rf.result()[0] + result = rf.result().one() self.assertEqual(result, expected_result) def test_timeout_getting_connection_from_pool(self): @@ -420,7 +420,7 @@ def test_timeout_getting_connection_from_pool(self): expected_result = (object(), object()) rf._set_result(None, None, None, self.make_mock_response(expected_result[0], expected_result[1])) - self.assertEqual(rf.result()[0], expected_result) + self.assertEqual(rf.result().one(), expected_result) # make sure the exception is recorded correctly self.assertEqual(rf._errors, {'ip1': exc}) @@ -438,7 +438,7 @@ def test_callback(self): rf._set_result(None, None, None, self.make_mock_response(expected_result[0], expected_result[1])) - result = rf.result()[0] + result = rf.result().one() self.assertEqual(result, expected_result) callback.assert_called_once_with([expected_result], arg, **kwargs) @@ -488,7 +488,7 @@ def test_multiple_callbacks(self): rf._set_result(None, None, None, self.make_mock_response(expected_result[0], expected_result[1])) - result = rf.result()[0] + result = rf.result().one() self.assertEqual(result, expected_result) callback.assert_called_once_with([expected_result], arg, **kwargs) @@ -561,7 +561,7 @@ def test_add_callbacks(self): errback=self.assertIsInstance, errback_args=(Exception,)) rf._set_result(None, None, None, self.make_mock_response(expected_result[0], expected_result[1])) - self.assertEqual(rf.result()[0], expected_result) + self.assertEqual(rf.result().one(), expected_result) callback.assert_called_once_with([expected_result], arg, **kwargs) diff --git a/tests/unit/test_resultset.py b/tests/unit/test_resultset.py index 97002d90d7..5163ac962d 100644 --- a/tests/unit/test_resultset.py +++ b/tests/unit/test_resultset.py @@ -14,6 +14,7 @@ from cassandra.query import named_tuple_factory, dict_factory, tuple_factory import unittest +import pytest from mock import Mock, PropertyMock, patch @@ -51,6 +52,7 @@ def test_iter_paged_with_empty_pages(self): itr = iter(rs) self.assertListEqual(list(itr), expected) + @pytest.mark.filterwarnings("ignore:ResultSet indexing support will be removed in 4.0:DeprecationWarning") def test_list_non_paged(self): # list access on RS for backwards-compatibility expected = list(range(10)) @@ -78,6 +80,7 @@ def test_has_more_pages(self): self.assertTrue(rs.has_more_pages) self.assertFalse(rs.has_more_pages) + @pytest.mark.filterwarnings("ignore:ResultSet indexing support will be removed in 4.0:DeprecationWarning") def test_iterate_then_index(self): # RuntimeError if indexing with no pages expected = list(range(10)) @@ -113,6 +116,7 @@ def test_iterate_then_index(self): self.assertFalse(rs) self.assertFalse(list(rs)) + @pytest.mark.filterwarnings("ignore:ResultSet indexing support will be removed in 4.0:DeprecationWarning") def test_index_list_mode(self): # no pages expected = list(range(10)) @@ -152,7 +156,7 @@ def test_eq(self): # results can be iterated or indexed once we're materialized self.assertListEqual(list(rs), expected) - self.assertEqual(rs[9], expected[9]) + self.assertEqual(list(rs)[9], expected[9]) self.assertTrue(rs) # pages @@ -165,7 +169,7 @@ def test_eq(self): # results can be iterated or indexed once we're materialized self.assertListEqual(list(rs), expected) - self.assertEqual(rs[9], expected[9]) + self.assertEqual(list(rs)[9], expected[9]) self.assertTrue(rs) def test_bool(self): diff --git a/tests/unit/test_types.py b/tests/unit/test_types.py index af3b327ef8..7724a46201 100644 --- a/tests/unit/test_types.py +++ b/tests/unit/test_types.py @@ -195,7 +195,7 @@ def test_empty_value(self): def test_datetype(self): now_time_seconds = time.time() - now_datetime = datetime.datetime.utcfromtimestamp(now_time_seconds) + now_datetime = datetime.datetime.fromtimestamp(now_time_seconds, tz=utc_timezone) # Cassandra timestamps in millis now_timestamp = now_time_seconds * 1e3 @@ -206,7 +206,7 @@ def test_datetype(self): # deserialize # epoc expected = 0 - self.assertEqual(DateType.deserialize(int64_pack(1000 * expected), 0), datetime.datetime.utcfromtimestamp(expected)) + self.assertEqual(DateType.deserialize(int64_pack(1000 * expected), 0), datetime.datetime.fromtimestamp(expected, tz=utc_timezone).replace(tzinfo=None)) # beyond 32b expected = 2 ** 33 @@ -335,7 +335,7 @@ def test_month_rounding_creation_failure(self): @jira_ticket PYTHON-912 """ feb_stamp = ms_timestamp_from_datetime( - datetime.datetime(2018, 2, 25, 18, 59, 59, 0) + datetime.datetime(2018, 2, 25, 18, 59, 59, 0, tzinfo=utc_timezone) ) dr = DateRange(OPEN_BOUND, DateRangeBound(feb_stamp, DateRangePrecision.MONTH)) @@ -344,7 +344,7 @@ def test_month_rounding_creation_failure(self): # Leap year feb_stamp_leap_year = ms_timestamp_from_datetime( - datetime.datetime(2016, 2, 25, 18, 59, 59, 0) + datetime.datetime(2016, 2, 25, 18, 59, 59, 0, tzinfo=utc_timezone) ) dr = DateRange(OPEN_BOUND, DateRangeBound(feb_stamp_leap_year, DateRangePrecision.MONTH)) @@ -372,7 +372,7 @@ def test_deserialize_single_value(self): self.assertEqual( DateRangeType.deserialize(serialized, 5), util.DateRange(value=util.DateRangeBound( - value=datetime.datetime(2017, 2, 1, 15, 42, 12, 404000), + value=datetime.datetime(2017, 2, 1, 15, 42, 12, 404000, tzinfo=utc_timezone), precision='HOUR') ) ) @@ -387,11 +387,11 @@ def test_deserialize_closed_range(self): DateRangeType.deserialize(serialized, 5), util.DateRange( lower_bound=util.DateRangeBound( - value=datetime.datetime(2017, 2, 1, 0, 0), + value=datetime.datetime(2017, 2, 1, 0, 0, tzinfo=utc_timezone), precision='DAY' ), upper_bound=util.DateRangeBound( - value=datetime.datetime(2017, 2, 1, 15, 42, 12, 404000), + value=datetime.datetime(2017, 2, 1, 15, 42, 12, 404000, tzinfo=utc_timezone), precision='MILLISECOND' ) ) @@ -406,7 +406,7 @@ def test_deserialize_open_high(self): deserialized, util.DateRange( lower_bound=util.DateRangeBound( - value=datetime.datetime(2017, 2, 1, 15, 0), + value=datetime.datetime(2017, 2, 1, 15, 0, tzinfo=utc_timezone), precision='HOUR' ), upper_bound=util.OPEN_BOUND @@ -423,7 +423,7 @@ def test_deserialize_open_low(self): util.DateRange( lower_bound=util.OPEN_BOUND, upper_bound=util.DateRangeBound( - value=datetime.datetime(2017, 2, 1, 15, 42, 20, 1000), + value=datetime.datetime(2017, 2, 1, 15, 42, 20, 1000, tzinfo=utc_timezone), precision='MINUTE' ) ) @@ -444,7 +444,7 @@ def test_serialize_single_value(self): deserialized, util.DateRange( value=util.DateRangeBound( - value=datetime.datetime(2017, 2, 1, 15, 42, 12), + value=datetime.datetime(2017, 2, 1, 15, 42, 12, tzinfo=utc_timezone), precision='SECOND' ) ) @@ -461,11 +461,11 @@ def test_serialize_closed_range(self): deserialized, util.DateRange( lower_bound=util.DateRangeBound( - value=datetime.datetime(2017, 2, 1, 15, 42, 12), + value=datetime.datetime(2017, 2, 1, 15, 42, 12, tzinfo=utc_timezone), precision='SECOND' ), upper_bound=util.DateRangeBound( - value=datetime.datetime(2017, 12, 31), + value=datetime.datetime(2017, 12, 31, tzinfo=utc_timezone), precision='YEAR' ) ) @@ -480,7 +480,7 @@ def test_serialize_open_high(self): deserialized, util.DateRange( lower_bound=util.DateRangeBound( - value=datetime.datetime(2017, 2, 1), + value=datetime.datetime(2017, 2, 1, tzinfo=utc_timezone), precision='DAY' ), upper_bound=util.OPEN_BOUND @@ -496,7 +496,7 @@ def test_serialize_open_low(self): deserialized, util.DateRange( lower_bound=util.DateRangeBound( - value=datetime.datetime(2017, 2, 1, 15), + value=datetime.datetime(2017, 2, 1, 15, tzinfo=utc_timezone), precision='HOUR' ), upper_bound=util.OPEN_BOUND @@ -557,8 +557,8 @@ def test_serialize_zero_datetime(self): @test_category data_types """ DateRangeType.serialize(util.DateRange( - lower_bound=(datetime.datetime(1970, 1, 1), 'YEAR'), - upper_bound=(datetime.datetime(1970, 1, 1), 'YEAR') + lower_bound=(datetime.datetime(1970, 1, 1, tzinfo=utc_timezone), 'YEAR'), + upper_bound=(datetime.datetime(1970, 1, 1, tzinfo=utc_timezone), 'YEAR') ), 5) def test_deserialize_zero_datetime(self):