From e24acabb94a00456e2018fe59dc80edbd5742eec Mon Sep 17 00:00:00 2001 From: Dmitry Kropachev Date: Thu, 19 Dec 2024 20:13:00 -0400 Subject: [PATCH] Stop result indexing in test It is marked to be depricated, we need to stop using it. It also spams following warnings in tests: DeprecationWarning: ResultSet indexing support will be removed in 4.0. Consider using ResultSet.one() to get a single row. --- tests/integration/__init__.py | 2 +- .../statements/test_base_statement.py | 2 +- tests/integration/cqlengine/test_ttl.py | 2 +- tests/integration/standard/test_cluster.py | 14 +-- .../standard/test_custom_protocol_handler.py | 8 +- .../standard/test_cython_protocol_handlers.py | 4 +- tests/integration/standard/test_metadata.py | 10 +- .../standard/test_prepared_statements.py | 14 +-- tests/integration/standard/test_query.py | 92 ++++++++-------- tests/integration/standard/test_routing.py | 2 +- .../standard/test_row_factories.py | 10 +- tests/integration/standard/test_types.py | 102 +++++++++--------- tests/integration/standard/test_udts.py | 48 ++++----- tests/integration/upgrade/test_upgrade.py | 6 +- 14 files changed, 159 insertions(+), 157 deletions(-) diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py index 53683c32bd..633fee729c 100644 --- a/tests/integration/__init__.py +++ b/tests/integration/__init__.py @@ -87,7 +87,7 @@ def get_server_versions(): c = TestCluster() s = c.connect() - row = s.execute('SELECT cql_version, release_version FROM system.local')[0] + row = s.execute('SELECT cql_version, release_version FROM system.local').one() cass_version = _tuple_version(row.release_version) cql_version = _tuple_version(row.cql_version) diff --git a/tests/integration/cqlengine/statements/test_base_statement.py b/tests/integration/cqlengine/statements/test_base_statement.py index 0c95504b13..b49e45e226 100644 --- a/tests/integration/cqlengine/statements/test_base_statement.py +++ b/tests/integration/cqlengine/statements/test_base_statement.py @@ -61,7 +61,7 @@ def tearDownClass(cls): def _verify_statement(self, original): st = SelectStatement(self.table_name) result = execute(st) - response = result[0] + response = result.one() for assignment in original.assignments: self.assertEqual(response[assignment.field], assignment.value) diff --git a/tests/integration/cqlengine/test_ttl.py b/tests/integration/cqlengine/test_ttl.py index 186521afd6..1d951e7c65 100644 --- a/tests/integration/cqlengine/test_ttl.py +++ b/tests/integration/cqlengine/test_ttl.py @@ -169,7 +169,7 @@ def get_default_ttl(self, table_name): except InvalidRequest: default_ttl = session.execute("SELECT default_time_to_live FROM system.schema_columnfamilies " "WHERE keyspace_name = 'cqlengine_test' AND columnfamily_name = '{0}'".format(table_name)) - return default_ttl[0]['default_time_to_live'] + return default_ttl.one()['default_time_to_live'] def test_default_ttl_not_set(self): session = get_session() diff --git a/tests/integration/standard/test_cluster.py b/tests/integration/standard/test_cluster.py index e506596bf7..7ffb257831 100644 --- a/tests/integration/standard/test_cluster.py +++ b/tests/integration/standard/test_cluster.py @@ -899,7 +899,7 @@ def test_profile_load_balancing(self): # use a copied instance and override the row factory # assert last returned value can be accessed as a namedtuple so we can prove something different - named_tuple_row = rs[0] + named_tuple_row = rs.one() self.assertIsInstance(named_tuple_row, tuple) self.assertTrue(named_tuple_row.release_version) @@ -910,13 +910,13 @@ def test_profile_load_balancing(self): rs = session.execute(query, execution_profile=tmp_profile) queried_hosts.add(rs.response_future._current_host) self.assertEqual(queried_hosts, expected_hosts) - tuple_row = rs[0] + tuple_row = rs.one() self.assertIsInstance(tuple_row, tuple) with self.assertRaises(AttributeError): tuple_row.release_version # make sure original profile is not impacted - self.assertTrue(session.execute(query, execution_profile='node1')[0].release_version) + self.assertTrue(session.execute(query, execution_profile='node1').one().release_version) def test_setting_lbp_legacy(self): cluster = TestCluster() @@ -1390,7 +1390,7 @@ def test_simple_nested(self): with cluster.connect() as session: self.assertFalse(cluster.is_shutdown) self.assertFalse(session.is_shutdown) - self.assertTrue(session.execute('select release_version from system.local')[0]) + self.assertTrue(session.execute('select release_version from system.local').one()) self.assertTrue(session.is_shutdown) self.assertTrue(cluster.is_shutdown) @@ -1408,7 +1408,7 @@ def test_cluster_no_session(self): session = cluster.connect() self.assertFalse(cluster.is_shutdown) self.assertFalse(session.is_shutdown) - self.assertTrue(session.execute('select release_version from system.local')[0]) + self.assertTrue(session.execute('select release_version from system.local').one()) self.assertTrue(session.is_shutdown) self.assertTrue(cluster.is_shutdown) @@ -1428,7 +1428,7 @@ def test_session_no_cluster(self): self.assertFalse(cluster.is_shutdown) self.assertFalse(session.is_shutdown) self.assertFalse(unmanaged_session.is_shutdown) - self.assertTrue(session.execute('select release_version from system.local')[0]) + self.assertTrue(session.execute('select release_version from system.local').one()) self.assertTrue(session.is_shutdown) self.assertFalse(cluster.is_shutdown) self.assertFalse(unmanaged_session.is_shutdown) @@ -1551,7 +1551,7 @@ def test_valid_protocol_version_beta_options_connect(self): cluster = Cluster(protocol_version=cassandra.ProtocolVersion.V6, allow_beta_protocol_version=True) session = cluster.connect() self.assertEqual(cluster.protocol_version, cassandra.ProtocolVersion.V6) - self.assertTrue(session.execute("select release_version from system.local")[0]) + self.assertTrue(session.execute("select release_version from system.local").one()) cluster.shutdown() diff --git a/tests/integration/standard/test_custom_protocol_handler.py b/tests/integration/standard/test_custom_protocol_handler.py index 3a3d50ed39..9d9f90e631 100644 --- a/tests/integration/standard/test_custom_protocol_handler.py +++ b/tests/integration/standard/test_custom_protocol_handler.py @@ -70,20 +70,20 @@ def test_custom_raw_uuid_row_results(self): session = cluster.connect(keyspace="custserdes") result = session.execute("SELECT schema_version FROM system.local") - uuid_type = result[0][0] + uuid_type = result.one()[0] self.assertEqual(type(uuid_type), uuid.UUID) # use our custom protocol handlder session.client_protocol_handler = CustomTestRawRowType result_set = session.execute("SELECT schema_version FROM system.local") - raw_value = result_set[0][0] + raw_value = result_set.one()[0] self.assertTrue(isinstance(raw_value, bytes)) self.assertEqual(len(raw_value), 16) # Ensure that we get normal uuid back when we re-connect session.client_protocol_handler = ProtocolHandler result_set = session.execute("SELECT schema_version FROM system.local") - uuid_type = result_set[0][0] + uuid_type = result_set.one()[0] self.assertEqual(type(uuid_type), uuid.UUID) cluster.shutdown() @@ -113,7 +113,7 @@ def test_custom_raw_row_results_all_types(self): # verify data params = get_all_primitive_params(0) - results = session.execute("SELECT {0} FROM alltypes WHERE primkey=0".format(columns_string))[0] + results = session.execute("SELECT {0} FROM alltypes WHERE primkey=0".format(columns_string)).one() for expected, actual in zip(params, results): self.assertEqual(actual, expected) # Ensure we have covered the various primitive types diff --git a/tests/integration/standard/test_cython_protocol_handlers.py b/tests/integration/standard/test_cython_protocol_handlers.py index 9cb5914f16..9e85edb914 100644 --- a/tests/integration/standard/test_cython_protocol_handlers.py +++ b/tests/integration/standard/test_cython_protocol_handlers.py @@ -231,14 +231,14 @@ def test_null_types(self): table = "%s.%s" % (self.keyspace_name, self.function_table_name) create_table_with_all_types(table, s, 10) - begin_unset = max(s.execute('select primkey from %s' % (table,))[0]['primkey']) + 1 + begin_unset = max(s.execute('select primkey from %s' % (table,)).one()['primkey']) + 1 keys_null = range(begin_unset, begin_unset + 10) # scatter some emptry rows in here insert = "insert into %s (primkey) values (%%s)" % (table,) execute_concurrent_with_args(s, insert, ((k,) for k in keys_null)) - result = s.execute("select * from %s" % (table,))[0] + result = s.execute("select * from %s" % (table,)).one() from numpy.ma import masked, MaskedArray result_keys = result.pop('primkey') diff --git a/tests/integration/standard/test_metadata.py b/tests/integration/standard/test_metadata.py index 944dd8ab20..ba9eeae433 100644 --- a/tests/integration/standard/test_metadata.py +++ b/tests/integration/standard/test_metadata.py @@ -163,8 +163,8 @@ def test_schema_metadata_disable(self): query = "SELECT * FROM system.local" no_schema_rs = no_schema_session.execute(query) no_token_rs = no_token_session.execute(query) - self.assertIsNotNone(no_schema_rs[0]) - self.assertIsNotNone(no_token_rs[0]) + self.assertIsNotNone(no_schema_rs.one()) + self.assertIsNotNone(no_token_rs.one()) no_schema.shutdown() no_token.shutdown() @@ -1819,14 +1819,14 @@ def test_init_cond(self): for init_cond in (-1, 0, 1): cql_init = encoder.cql_encode_all_types(init_cond) with self.VerifiedAggregate(self, **self.make_aggregate_kwargs('sum_int', 'int', init_cond=cql_init)) as va: - sum_res = s.execute("SELECT %s(v) AS sum FROM t" % va.function_kwargs['name'])[0].sum + sum_res = s.execute("SELECT %s(v) AS sum FROM t" % va.function_kwargs['name']).one().sum self.assertEqual(sum_res, int(init_cond) + sum(expected_values)) # list for init_cond in ([], ['1', '2']): cql_init = encoder.cql_encode_all_types(init_cond) with self.VerifiedAggregate(self, **self.make_aggregate_kwargs('extend_list', 'list', init_cond=cql_init)) as va: - list_res = s.execute("SELECT %s(v) AS list_res FROM t" % va.function_kwargs['name'])[0].list_res + list_res = s.execute("SELECT %s(v) AS list_res FROM t" % va.function_kwargs['name']).one().list_res self.assertListEqual(list_res[:len(init_cond)], init_cond) self.assertEqual(set(i for i in list_res[len(init_cond):]), set(str(i) for i in expected_values)) @@ -1837,7 +1837,7 @@ def test_init_cond(self): for init_cond in ({}, {1: 2, 3: 4}, {5: 5}): cql_init = encoder.cql_encode_all_types(init_cond) with self.VerifiedAggregate(self, **self.make_aggregate_kwargs('update_map', 'map', init_cond=cql_init)) as va: - map_res = s.execute("SELECT %s(v) AS map_res FROM t" % va.function_kwargs['name'])[0].map_res + map_res = s.execute("SELECT %s(v) AS map_res FROM t" % va.function_kwargs['name']).one().map_res self.assertDictContainsSubset(expected_map_values, map_res) init_not_updated = dict((k, init_cond[k]) for k in set(init_cond) - expected_key_set) self.assertDictContainsSubset(init_not_updated, map_res) diff --git a/tests/integration/standard/test_prepared_statements.py b/tests/integration/standard/test_prepared_statements.py index bbde2affcb..5ccc0732fa 100644 --- a/tests/integration/standard/test_prepared_statements.py +++ b/tests/integration/standard/test_prepared_statements.py @@ -229,7 +229,7 @@ def test_none_values(self): bound = prepared.bind((1,)) results = self.session.execute(bound) - self.assertEqual(results[0].v, None) + self.assertEqual(results.one().v, None) def test_unset_values(self): """ @@ -272,7 +272,7 @@ def test_unset_values(self): for params, expected in bind_expected: self.session.execute(insert, params) results = self.session.execute(select, (0,)) - self.assertEqual(results[0], expected) + self.assertEqual(results.one(), expected) self.assertRaises(ValueError, self.session.execute, select, (UNSET_VALUE, 0, 0)) @@ -297,7 +297,7 @@ def test_no_meta(self): bound = prepared.bind(None) bound.consistency_level = ConsistencyLevel.ALL results = self.session.execute(bound) - self.assertEqual(results[0].v, 0) + self.assertEqual(results.one().v, 0) def test_none_values_dicts(self): """ @@ -322,7 +322,7 @@ def test_none_values_dicts(self): bound = prepared.bind({'k': 1}) results = self.session.execute(bound) - self.assertEqual(results[0].v, None) + self.assertEqual(results.one().v, None) def test_async_binding(self): """ @@ -346,7 +346,7 @@ def test_async_binding(self): future = self.session.execute_async(prepared, (873,)) results = future.result() - self.assertEqual(results[0].v, None) + self.assertEqual(results.one().v, None) def test_async_binding_dicts(self): """ @@ -369,7 +369,7 @@ def test_async_binding_dicts(self): future = self.session.execute_async(prepared, {'k': 873}) results = future.result() - self.assertEqual(results[0].v, None) + self.assertEqual(results.one().v, None) def test_raise_error_on_prepared_statement_execution_dropped_table(self): """ @@ -616,7 +616,7 @@ def _test_updated_conditional(self, session, value): def check_result_and_metadata(expected): self.assertEqual( - session.execute(prepared_statement, (value, value, value))[0], + session.execute(prepared_statement, (value, value, value)).one(), expected ) self.assertEqual(prepared_statement.result_metadata_id, first_id) diff --git a/tests/integration/standard/test_query.py b/tests/integration/standard/test_query.py index bc05051318..fd9ad1f2e4 100644 --- a/tests/integration/standard/test_query.py +++ b/tests/integration/standard/test_query.py @@ -263,7 +263,7 @@ def _wait_for_trace_to_delete(self, trace_id): def _is_trace_present(self, trace_id): select_statement = SimpleStatement("SElECT duration FROM system_traces.sessions WHERE session_id = {0}".format(trace_id), consistency_level=ConsistencyLevel.ALL) ssrs = self.session.execute(select_statement) - if not len(ssrs.current_rows) or ssrs[0].duration is None: + if not len(ssrs.current_rows) or ssrs.one().duration is None: return False return True @@ -331,7 +331,7 @@ def test_basic_json_query(self): self.session.execute(insert_query) results = self.session.execute(json_query) self.assertEqual(results.column_names, ["[json]"]) - self.assertEqual(results[0][0], '{"k": 1, "v": 1}') + self.assertEqual(results.one()[0], '{"k": 1, "v": 1}') def test_host_targeting_query(self): """ @@ -496,9 +496,9 @@ def test_prepared_metadata_generation(self): future = session.execute_async(select_statement) results = future.result() if base_line is None: - base_line = results[0]._asdict().keys() + base_line = results.one()._asdict().keys() else: - self.assertEqual(base_line, results[0]._asdict().keys()) + self.assertEqual(base_line, results.one()._asdict().keys()) cluster.shutdown() @@ -566,7 +566,7 @@ def test_prepare_batch_statement(self): select_results = session.execute(SimpleStatement("SELECT * FROM %s WHERE k = 1" % table, consistency_level=ConsistencyLevel.ALL)) - first_row = select_results[0][:2] + first_row = select_results.one()[:2] self.assertEqual((1, 2), first_row) def test_prepare_batch_statement_after_alter(self): @@ -815,7 +815,7 @@ def test_conditional_update(self): result = future.result() self.assertEqual(future.message.serial_consistency_level, ConsistencyLevel.SERIAL) self.assertTrue(result) - self.assertFalse(result[0].applied) + self.assertFalse(result.one().applied) statement = SimpleStatement( "UPDATE test3rf.test SET v=1 WHERE k=0 IF v=0", @@ -825,7 +825,7 @@ def test_conditional_update(self): result = future.result() self.assertEqual(future.message.serial_consistency_level, ConsistencyLevel.LOCAL_SERIAL) self.assertTrue(result) - self.assertTrue(result[0].applied) + self.assertTrue(result.one().applied) def test_conditional_update_with_prepared_statements(self): self.session.execute("INSERT INTO test3rf.test (k, v) VALUES (0, 0)") @@ -837,7 +837,7 @@ def test_conditional_update_with_prepared_statements(self): result = future.result() self.assertEqual(future.message.serial_consistency_level, ConsistencyLevel.SERIAL) self.assertTrue(result) - self.assertFalse(result[0].applied) + self.assertFalse(result.one().applied) statement = self.session.prepare( "UPDATE test3rf.test SET v=1 WHERE k=0 IF v=0") @@ -847,7 +847,7 @@ def test_conditional_update_with_prepared_statements(self): result = future.result() self.assertEqual(future.message.serial_consistency_level, ConsistencyLevel.LOCAL_SERIAL) self.assertTrue(result) - self.assertTrue(result[0].applied) + self.assertTrue(result.one().applied) def test_conditional_update_with_batch_statements(self): self.session.execute("INSERT INTO test3rf.test (k, v) VALUES (0, 0)") @@ -858,7 +858,7 @@ def test_conditional_update_with_batch_statements(self): result = future.result() self.assertEqual(future.message.serial_consistency_level, ConsistencyLevel.SERIAL) self.assertTrue(result) - self.assertFalse(result[0].applied) + self.assertFalse(result.one().applied) statement = BatchStatement(serial_consistency_level=ConsistencyLevel.LOCAL_SERIAL) statement.add("UPDATE test3rf.test SET v=1 WHERE k=0 IF v=0") @@ -867,7 +867,7 @@ def test_conditional_update_with_batch_statements(self): result = future.result() self.assertEqual(future.message.serial_consistency_level, ConsistencyLevel.LOCAL_SERIAL) self.assertTrue(result) - self.assertTrue(result[0].applied) + self.assertTrue(result.one().applied) def test_bad_consistency_level(self): statement = SimpleStatement("foo") @@ -1243,23 +1243,23 @@ def test_mv_filtering(self): query_statement = SimpleStatement("SELECT * FROM {0}.alltimehigh WHERE game='Coup'".format(self.keyspace_name), consistency_level=ConsistencyLevel.QUORUM) results = self.session.execute(query_statement) - self.assertEqual(results[0].game, 'Coup') - self.assertEqual(results[0].year, 2015) - self.assertEqual(results[0].month, 5) - self.assertEqual(results[0].day, 1) - self.assertEqual(results[0].score, 4000) - self.assertEqual(results[0].user, "pcmanus") + self.assertEqual(results.one().game, 'Coup') + self.assertEqual(results.one().year, 2015) + self.assertEqual(results.one().month, 5) + self.assertEqual(results.one().day, 1) + self.assertEqual(results.one().score, 4000) + self.assertEqual(results.one().user, "pcmanus") # Test prepared statement and daily high filtering prepared_query = self.session.prepare("SELECT * FROM {0}.dailyhigh WHERE game=? AND year=? AND month=? and day=?".format(self.keyspace_name)) bound_query = prepared_query.bind(("Coup", 2015, 6, 2)) results = self.session.execute(bound_query) - self.assertEqual(results[0].game, 'Coup') - self.assertEqual(results[0].year, 2015) - self.assertEqual(results[0].month, 6) - self.assertEqual(results[0].day, 2) - self.assertEqual(results[0].score, 2000) - self.assertEqual(results[0].user, "pcmanus") + self.assertEqual(results.one().game, 'Coup') + self.assertEqual(results.one().year, 2015) + self.assertEqual(results.one().month, 6) + self.assertEqual(results.one().day, 2) + self.assertEqual(results.one().score, 2000) + self.assertEqual(results.one().user, "pcmanus") self.assertEqual(results[1].game, 'Coup') self.assertEqual(results[1].year, 2015) @@ -1272,12 +1272,12 @@ def test_mv_filtering(self): prepared_query = self.session.prepare("SELECT * FROM {0}.monthlyhigh WHERE game=? AND year=? AND month=? and score >= ? and score <= ?".format(self.keyspace_name)) bound_query = prepared_query.bind(("Coup", 2015, 6, 2500, 3500)) results = self.session.execute(bound_query) - self.assertEqual(results[0].game, 'Coup') - self.assertEqual(results[0].year, 2015) - self.assertEqual(results[0].month, 6) - self.assertEqual(results[0].day, 20) - self.assertEqual(results[0].score, 3500) - self.assertEqual(results[0].user, "jbellis") + self.assertEqual(results.one().game, 'Coup') + self.assertEqual(results.one().year, 2015) + self.assertEqual(results.one().month, 6) + self.assertEqual(results.one().day, 20) + self.assertEqual(results.one().score, 3500) + self.assertEqual(results.one().user, "jbellis") self.assertEqual(results[1].game, 'Coup') self.assertEqual(results[1].year, 2015) @@ -1297,12 +1297,12 @@ def test_mv_filtering(self): query_statement = SimpleStatement("SELECT * FROM {0}.filtereduserhigh WHERE game='Chess'".format(self.keyspace_name), consistency_level=ConsistencyLevel.QUORUM) results = self.session.execute(query_statement) - self.assertEqual(results[0].game, 'Chess') - self.assertEqual(results[0].year, 2015) - self.assertEqual(results[0].month, 6) - self.assertEqual(results[0].day, 21) - self.assertEqual(results[0].score, 3500) - self.assertEqual(results[0].user, "jbellis") + self.assertEqual(results.one().game, 'Chess') + self.assertEqual(results.one().year, 2015) + self.assertEqual(results.one().month, 6) + self.assertEqual(results.one().day, 21) + self.assertEqual(results.one().score, 3500) + self.assertEqual(results.one().user, "jbellis") self.assertEqual(results[1].game, 'Chess') self.assertEqual(results[1].year, 2015) @@ -1478,12 +1478,12 @@ def test_lower_protocol(self): def _check_set_keyspace_in_statement(self, session): simple_stmt = SimpleStatement("SELECT * from {}".format(self.table_name), keyspace=self.ks_name) results = session.execute(simple_stmt) - self.assertEqual(results[0], (1, 1)) + self.assertEqual(results.one(), (1, 1)) simple_stmt = SimpleStatement("SELECT * from {}".format(self.table_name)) simple_stmt.keyspace = self.ks_name results = session.execute(simple_stmt) - self.assertEqual(results[0], (1, 1)) + self.assertEqual(results.one(), (1, 1)) @greaterthanorequalcass40 @@ -1537,14 +1537,14 @@ def test_prepared_with_keyspace_explicit(self): prepared_statement = self.session.prepare(query, keyspace=self.ks_name) results = self.session.execute(prepared_statement, (1, )) - self.assertEqual(results[0], (1, 1)) + self.assertEqual(results.one(), (1, 1)) prepared_statement_alternative = self.session.prepare(query, keyspace=self.alternative_ks) self.assertNotEqual(prepared_statement.query_id, prepared_statement_alternative.query_id) results = self.session.execute(prepared_statement_alternative, (2,)) - self.assertEqual(results[0], (2, 2)) + self.assertEqual(results.one(), (2, 2)) def test_reprepare_after_host_is_down(self): """ @@ -1574,10 +1574,10 @@ def test_reprepare_after_host_is_down(self): self.assertEqual(1, mock_handler.get_message_count('debug', 'Preparing all known prepared statements')) results = self.session.execute(prepared_statement, (1,), execution_profile="only_first") - self.assertEqual(results[0], (1, )) + self.assertEqual(results.one(), (1, )) results = self.session.execute(prepared_statement_alternative, (2,), execution_profile="only_first") - self.assertEqual(results[0], (2, )) + self.assertEqual(results.one(), (2, )) def test_prepared_not_found(self): """ @@ -1601,7 +1601,7 @@ def test_prepared_not_found(self): for _ in range(10): results = session.execute(prepared_statement, (1, )) - self.assertEqual(results[0], (1,)) + self.assertEqual(results.one(), (1,)) def test_prepared_in_query_keyspace(self): """ @@ -1620,12 +1620,12 @@ def test_prepared_in_query_keyspace(self): query = "SELECT k from {}.{} WHERE k = ?".format(self.ks_name, self.table_name) prepared_statement = session.prepare(query) results = session.execute(prepared_statement, (1,)) - self.assertEqual(results[0], (1,)) + self.assertEqual(results.one(), (1,)) query = "SELECT k from {}.{} WHERE k = ?".format(self.alternative_ks, self.table_name) prepared_statement = session.prepare(query) results = session.execute(prepared_statement, (2,)) - self.assertEqual(results[0], (2,)) + self.assertEqual(results.one(), (2,)) def test_prepared_in_query_keyspace_and_explicit(self): """ @@ -1642,9 +1642,9 @@ def test_prepared_in_query_keyspace_and_explicit(self): query = "SELECT k from {}.{} WHERE k = ?".format(self.ks_name, self.table_name) prepared_statement = self.session.prepare(query, keyspace="system") results = self.session.execute(prepared_statement, (1,)) - self.assertEqual(results[0], (1,)) + self.assertEqual(results.one(), (1,)) query = "SELECT k from {}.{} WHERE k = ?".format(self.ks_name, self.table_name) prepared_statement = self.session.prepare(query, keyspace=self.alternative_ks) results = self.session.execute(prepared_statement, (1,)) - self.assertEqual(results[0], (1,)) + self.assertEqual(results.one(), (1,)) diff --git a/tests/integration/standard/test_routing.py b/tests/integration/standard/test_routing.py index 47697ee9c8..7d6651cf8b 100644 --- a/tests/integration/standard/test_routing.py +++ b/tests/integration/standard/test_routing.py @@ -48,7 +48,7 @@ def insert_select_token(self, insert, select, key_values): my_token = s.cluster.metadata.token_map.token_class.from_key(bound.routing_key) - cass_token = s.execute(select, key_values)[0][0] + cass_token = s.execute(select, key_values).one()[0] token = s.cluster.metadata.token_map.token_class(cass_token) self.assertEqual(my_token, token) diff --git a/tests/integration/standard/test_row_factories.py b/tests/integration/standard/test_row_factories.py index 6855e8a410..413a6bf50b 100644 --- a/tests/integration/standard/test_row_factories.py +++ b/tests/integration/standard/test_row_factories.py @@ -65,7 +65,7 @@ def test_sanitizing(self): query = "SELECT v1 AS duplicate, v2 AS duplicate, v3 AS duplicate from {0}.{1}".format(self.ks_name, self.function_table_name) rs = self.session.execute(query) - row = rs[0] + row = rs.one() self.assertTrue(hasattr(row, 'duplicate')) self.assertTrue(hasattr(row, 'duplicate_')) self.assertTrue(hasattr(row, 'duplicate__')) @@ -93,8 +93,9 @@ def _results_from_row_factory(self, row_factory): def test_tuple_factory(self): result = self._results_from_row_factory(tuple_factory) self.assertIsInstance(result, ResultSet) - self.assertIsInstance(result[0], tuple) + self.assertIsInstance(result.one(), tuple) + result = result.all() for row in result: self.assertEqual(row[0], row[1]) @@ -106,7 +107,7 @@ def test_tuple_factory(self): def test_named_tuple_factory(self): result = self._results_from_row_factory(named_tuple_factory) self.assertIsInstance(result, ResultSet) - result = list(result) + result = result.all() for row in result: self.assertEqual(row.k, row.v) @@ -119,8 +120,9 @@ def test_named_tuple_factory(self): def _test_dict_factory(self, row_factory, row_type): result = self._results_from_row_factory(row_factory) self.assertIsInstance(result, ResultSet) - self.assertIsInstance(result[0], row_type) + self.assertIsInstance(result.one(), row_type) + result = result.all() for row in result: self.assertEqual(row['k'], row['v']) diff --git a/tests/integration/standard/test_types.py b/tests/integration/standard/test_types.py index 2377129e9d..56b2914cec 100644 --- a/tests/integration/standard/test_types.py +++ b/tests/integration/standard/test_types.py @@ -62,7 +62,7 @@ def test_can_insert_blob_type_as_string(self): s.execute(query, params) - results = s.execute("SELECT * FROM blobstring")[0] + results = s.execute("SELECT * FROM blobstring").one() for expected, actual in zip(params, results): self.assertEqual(expected, actual) @@ -77,7 +77,7 @@ def test_can_insert_blob_type_as_bytearray(self): params = ['key1', bytearray(b'blob1')] s.execute("INSERT INTO blobbytes (a, b) VALUES (%s, %s)", params) - results = s.execute("SELECT * FROM blobbytes")[0] + results = s.execute("SELECT * FROM blobbytes").one() for expected, actual in zip(params, results): self.assertEqual(expected, actual) @@ -104,7 +104,7 @@ def test_des_bytes_type_array(self): params = ['key1', bytearray(b'blob1')] s.execute("INSERT INTO blobbytes2 (a, b) VALUES (%s, %s)", params) - results = s.execute("SELECT * FROM blobbytes2")[0] + results = s.execute("SELECT * FROM blobbytes2").one() for expected, actual in zip(params, results): self.assertEqual(expected, actual) finally: @@ -139,7 +139,7 @@ def test_can_insert_primitive_datatypes(self): s.execute("INSERT INTO alltypes ({0}) VALUES ({1})".format(columns_string, placeholders), params) # verify data - results = s.execute("SELECT {0} FROM alltypes WHERE zz=0".format(columns_string))[0] + results = s.execute("SELECT {0} FROM alltypes WHERE zz=0".format(columns_string)).one() for expected, actual in zip(params, results): self.assertEqual(actual, expected) @@ -156,7 +156,7 @@ def test_can_insert_primitive_datatypes(self): s.execute("INSERT INTO alltypes ({0}) VALUES ({1})".format(single_columns_string, placeholders), single_params) # verify data - result = s.execute("SELECT {0} FROM alltypes WHERE zz=%s".format(single_columns_string), (key,))[0][1] + result = s.execute("SELECT {0} FROM alltypes WHERE zz=%s".format(single_columns_string), (key,)).one()[1] compare_value = data_sample if isinstance(data_sample, ipaddress.IPv4Address) or isinstance(data_sample, ipaddress.IPv6Address): @@ -170,20 +170,20 @@ def test_can_insert_primitive_datatypes(self): s.execute(insert.bind(params)) # verify data - results = s.execute("SELECT {0} FROM alltypes WHERE zz=0".format(columns_string))[0] + results = s.execute("SELECT {0} FROM alltypes WHERE zz=0".format(columns_string)).one() for expected, actual in zip(params, results): self.assertEqual(actual, expected) # verify data with prepared statement query select = s.prepare("SELECT {0} FROM alltypes WHERE zz=?".format(columns_string)) - results = s.execute(select.bind([0]))[0] + results = s.execute(select.bind([0])).one() for expected, actual in zip(params, results): self.assertEqual(actual, expected) # verify data with with prepared statement, use dictionary with no explicit columns select = s.prepare("SELECT * FROM alltypes") results = s.execute(select, - execution_profile=s.execution_profile_clone_update(EXEC_PROFILE_DEFAULT, row_factory=ordered_dict_factory))[0] + execution_profile=s.execution_profile_clone_update(EXEC_PROFILE_DEFAULT, row_factory=ordered_dict_factory)).one() for expected, actual in zip(params, results.values()): self.assertEqual(actual, expected) @@ -232,7 +232,7 @@ def test_can_insert_collection_datatypes(self): s.execute("INSERT INTO allcoltypes ({0}) VALUES ({1})".format(columns_string, placeholders), params) # verify data - results = s.execute("SELECT {0} FROM allcoltypes WHERE zz=0".format(columns_string))[0] + results = s.execute("SELECT {0} FROM allcoltypes WHERE zz=0".format(columns_string)).one() for expected, actual in zip(params, results): self.assertEqual(actual, expected) @@ -248,13 +248,13 @@ def test_can_insert_collection_datatypes(self): s.execute(insert.bind(params)) # verify data - results = s.execute("SELECT {0} FROM allcoltypes WHERE zz=0".format(columns_string))[0] + results = s.execute("SELECT {0} FROM allcoltypes WHERE zz=0".format(columns_string)).one() for expected, actual in zip(params, results): self.assertEqual(actual, expected) # verify data with prepared statement query select = s.prepare("SELECT {0} FROM allcoltypes WHERE zz=?".format(columns_string)) - results = s.execute(select.bind([0]))[0] + results = s.execute(select.bind([0])).one() for expected, actual in zip(params, results): self.assertEqual(actual, expected) @@ -262,7 +262,7 @@ def test_can_insert_collection_datatypes(self): select = s.prepare("SELECT * FROM allcoltypes") results = s.execute(select, execution_profile=s.execution_profile_clone_update(EXEC_PROFILE_DEFAULT, - row_factory=ordered_dict_factory))[0] + row_factory=ordered_dict_factory)).one() for expected, actual in zip(params, results.values()): self.assertEqual(actual, expected) @@ -298,12 +298,12 @@ def test_can_insert_empty_strings_and_nulls(self): # verify all types initially null with simple statement columns_string = ','.join(col_names) s.execute("INSERT INTO all_empty (zz) VALUES (2)") - results = s.execute("SELECT {0} FROM all_empty WHERE zz=2".format(columns_string))[0] + results = s.execute("SELECT {0} FROM all_empty WHERE zz=2".format(columns_string)).one() self.assertTrue(all(x is None for x in results)) # verify all types initially null with prepared statement select = s.prepare("SELECT {0} FROM all_empty WHERE zz=?".format(columns_string)) - results = s.execute(select.bind([2]))[0] + results = s.execute(select.bind([2])).one() self.assertTrue(all(x is None for x in results)) # insert empty strings for string-like fields @@ -313,12 +313,12 @@ def test_can_insert_empty_strings_and_nulls(self): s.execute("INSERT INTO all_empty (zz, {0}) VALUES (3, {1})".format(columns_string, placeholders), expected_values.values()) # verify string types empty with simple statement - results = s.execute("SELECT {0} FROM all_empty WHERE zz=3".format(columns_string))[0] + results = s.execute("SELECT {0} FROM all_empty WHERE zz=3".format(columns_string)).one() for expected, actual in zip(expected_values.values(), results): self.assertEqual(actual, expected) # verify string types empty with prepared statement - results = s.execute(s.prepare("SELECT {0} FROM all_empty WHERE zz=?".format(columns_string)), [3])[0] + results = s.execute(s.prepare("SELECT {0} FROM all_empty WHERE zz=?".format(columns_string)), [3]).one() for expected, actual in zip(expected_values.values(), results): self.assertEqual(actual, expected) @@ -350,13 +350,13 @@ def test_can_insert_empty_strings_and_nulls(self): # check via simple statement query = "SELECT {0} FROM all_empty WHERE zz=5".format(columns_string) - results = s.execute(query)[0] + results = s.execute(query).one() for col in results: self.assertEqual(None, col) # check via prepared statement select = s.prepare("SELECT {0} FROM all_empty WHERE zz=?".format(columns_string)) - results = s.execute(select.bind([5]))[0] + results = s.execute(select.bind([5])).one() for col in results: self.assertEqual(None, col) @@ -367,11 +367,11 @@ def test_can_insert_empty_strings_and_nulls(self): insert = s.prepare("INSERT INTO all_empty (zz, {0}) VALUES (5, {1})".format(columns_string, placeholders)) s.execute(insert, null_values) - results = s.execute(query)[0] + results = s.execute(query).one() for col in results: self.assertEqual(None, col) - results = s.execute(select.bind([5]))[0] + results = s.execute(select.bind([5])).one() for col in results: self.assertEqual(None, col) @@ -385,7 +385,7 @@ def test_can_insert_empty_values_for_int32(self): execute_until_pass(s, "INSERT INTO empty_values (a, b) VALUES ('a', blobAsInt(0x))") try: Int32Type.support_empty_values = True - results = execute_until_pass(s, "SELECT b FROM empty_values WHERE a='a'")[0] + results = execute_until_pass(s, "SELECT b FROM empty_values WHERE a='a'").one() self.assertIs(EMPTY, results.b) finally: Int32Type.support_empty_values = False @@ -410,13 +410,13 @@ def test_timezone_aware_datetimes_are_timestamps(self): # test non-prepared statement s.execute("INSERT INTO tz_aware (a, b) VALUES ('key1', %s)", [dt]) - result = s.execute("SELECT b FROM tz_aware WHERE a='key1'")[0].b + result = s.execute("SELECT b FROM tz_aware WHERE a='key1'").one().b self.assertEqual(dt.utctimetuple(), result.utctimetuple()) # test prepared statement insert = s.prepare("INSERT INTO tz_aware (a, b) VALUES ('key2', ?)") s.execute(insert.bind([dt])) - result = s.execute("SELECT b FROM tz_aware WHERE a='key2'")[0].b + result = s.execute("SELECT b FROM tz_aware WHERE a='key2'").one().b self.assertEqual(dt.utctimetuple(), result.utctimetuple()) def test_can_insert_tuples(self): @@ -438,20 +438,20 @@ def test_can_insert_tuples(self): # test non-prepared statement complete = ('foo', 123, True) s.execute("INSERT INTO tuple_type (a, b) VALUES (0, %s)", parameters=(complete,)) - result = s.execute("SELECT b FROM tuple_type WHERE a=0")[0] + result = s.execute("SELECT b FROM tuple_type WHERE a=0").one() self.assertEqual(complete, result.b) partial = ('bar', 456) partial_result = partial + (None,) s.execute("INSERT INTO tuple_type (a, b) VALUES (1, %s)", parameters=(partial,)) - result = s.execute("SELECT b FROM tuple_type WHERE a=1")[0] + result = s.execute("SELECT b FROM tuple_type WHERE a=1").one() self.assertEqual(partial_result, result.b) # test single value tuples subpartial = ('zoo',) subpartial_result = subpartial + (None, None) s.execute("INSERT INTO tuple_type (a, b) VALUES (2, %s)", parameters=(subpartial,)) - result = s.execute("SELECT b FROM tuple_type WHERE a=2")[0] + result = s.execute("SELECT b FROM tuple_type WHERE a=2").one() self.assertEqual(subpartial_result, result.b) # test prepared statement @@ -464,9 +464,9 @@ def test_can_insert_tuples(self): self.assertRaises(ValueError, s.execute, prepared, parameters=(0, (1, 2, 3, 4, 5, 6))) prepared = s.prepare("SELECT b FROM tuple_type WHERE a=?") - self.assertEqual(complete, s.execute(prepared, (3,))[0].b) - self.assertEqual(partial_result, s.execute(prepared, (4,))[0].b) - self.assertEqual(subpartial_result, s.execute(prepared, (5,))[0].b) + self.assertEqual(complete, s.execute(prepared, (3,)).one().b) + self.assertEqual(partial_result, s.execute(prepared, (4,)).one().b) + self.assertEqual(subpartial_result, s.execute(prepared, (5,)).one().b) c.shutdown() @@ -506,7 +506,7 @@ def test_can_insert_tuples_with_varying_lengths(self): s.execute("INSERT INTO tuple_lengths (k, v_%s) VALUES (0, %s)", (i, created_tuple)) - result = s.execute("SELECT v_%s FROM tuple_lengths WHERE k=0", (i,))[0] + result = s.execute("SELECT v_%s FROM tuple_lengths WHERE k=0", (i,)).one() self.assertEqual(tuple(created_tuple), result['v_%s' % i]) c.shutdown() @@ -534,7 +534,7 @@ def test_can_insert_tuples_all_primitive_datatypes(self): values.append(get_sample(data_type)) expected = tuple(values + [None] * (type_count - len(values))) s.execute("INSERT INTO tuple_primitive (k, v) VALUES (%s, %s)", (i, tuple(values))) - result = s.execute("SELECT v FROM tuple_primitive WHERE k=%s", (i,))[0] + result = s.execute("SELECT v FROM tuple_primitive WHERE k=%s", (i,)).one() self.assertEqual(result.v, expected) c.shutdown() @@ -589,7 +589,7 @@ def test_can_insert_tuples_all_collection_datatypes(self): created_tuple = tuple([[get_sample(datatype)]]) s.execute("INSERT INTO tuple_non_primative (k, v_%s) VALUES (0, %s)", (i, created_tuple)) - result = s.execute("SELECT v_%s FROM tuple_non_primative WHERE k=0", (i,))[0] + result = s.execute("SELECT v_%s FROM tuple_non_primative WHERE k=0", (i,)).one() self.assertEqual(created_tuple, result['v_%s' % i]) i += 1 @@ -598,7 +598,7 @@ def test_can_insert_tuples_all_collection_datatypes(self): created_tuple = tuple([sortedset([get_sample(datatype)])]) s.execute("INSERT INTO tuple_non_primative (k, v_%s) VALUES (0, %s)", (i, created_tuple)) - result = s.execute("SELECT v_%s FROM tuple_non_primative WHERE k=0", (i,))[0] + result = s.execute("SELECT v_%s FROM tuple_non_primative WHERE k=0", (i,)).one() self.assertEqual(created_tuple, result['v_%s' % i]) i += 1 @@ -612,7 +612,7 @@ def test_can_insert_tuples_all_collection_datatypes(self): s.execute("INSERT INTO tuple_non_primative (k, v_%s) VALUES (0, %s)", (i, created_tuple)) - result = s.execute("SELECT v_%s FROM tuple_non_primative WHERE k=0", (i,))[0] + result = s.execute("SELECT v_%s FROM tuple_non_primative WHERE k=0", (i,)).one() self.assertEqual(created_tuple, result['v_%s' % i]) i += 1 c.shutdown() @@ -673,7 +673,7 @@ def test_can_insert_nested_tuples(self): s.execute("INSERT INTO nested_tuples (k, v_%s) VALUES (%s, %s)", (i, i, created_tuple)) # verify tuple was written and read correctly - result = s.execute("SELECT v_%s FROM nested_tuples WHERE k=%s", (i, i))[0] + result = s.execute("SELECT v_%s FROM nested_tuples WHERE k=%s", (i, i)).one() self.assertEqual(created_tuple, result['v_%s' % i]) c.shutdown() @@ -693,16 +693,16 @@ def test_can_insert_tuples_with_nulls(self): s.execute(insert, [(None, None, None, None)]) result = s.execute("SELECT * FROM tuples_nulls WHERE k=0") - self.assertEqual((None, None, None, None), result[0].t) + self.assertEqual((None, None, None, None), result.one().t) read = s.prepare("SELECT * FROM tuples_nulls WHERE k=0") - self.assertEqual((None, None, None, None), s.execute(read)[0].t) + self.assertEqual((None, None, None, None), s.execute(read).one().t) # also test empty strings where compatible s.execute(insert, [('', None, None, b'')]) result = s.execute("SELECT * FROM tuples_nulls WHERE k=0") - self.assertEqual(('', None, None, b''), result[0].t) - self.assertEqual(('', None, None, b''), s.execute(read)[0].t) + self.assertEqual(('', None, None, b''), result.one().t) + self.assertEqual(('', None, None, b''), s.execute(read).one().t) def test_insert_collection_with_null_fails(self): """ @@ -772,13 +772,13 @@ def test_can_read_composite_type(self): # CompositeType string literals are split on ':' chars s.execute("INSERT INTO composites (a, b) VALUES (0, 'abc:123')") - result = s.execute("SELECT * FROM composites WHERE a = 0")[0] + result = s.execute("SELECT * FROM composites WHERE a = 0").one() self.assertEqual(0, result.a) self.assertEqual(('abc', 123), result.b) # CompositeType values can omit elements at the end s.execute("INSERT INTO composites (a, b) VALUES (0, 'abc')") - result = s.execute("SELECT * FROM composites WHERE a = 0")[0] + result = s.execute("SELECT * FROM composites WHERE a = 0").one() self.assertEqual(0, result.a) self.assertEqual(('abc',), result.b) @@ -805,7 +805,7 @@ def test_special_float_cql_encoding(self): def verify_insert_select(ins_statement, sel_statement): execute_concurrent_with_args(s, ins_statement, ((f, f) for f in items)) for f in items: - row = s.execute(sel_statement, (f,))[0] + row = s.execute(sel_statement, (f,)).one() if math.isnan(f): self.assertTrue(math.isnan(row.f)) self.assertTrue(math.isnan(row.d)) @@ -839,7 +839,7 @@ def test_cython_decimal(self): try: self.session.execute("INSERT INTO {0} (dc) VALUES (-1.08430792318105707)".format(self.function_table_name)) results = self.session.execute("SELECT * FROM {0}".format(self.function_table_name)) - self.assertTrue(str(results[0].dc) == '-1.08430792318105707') + self.assertTrue(str(results.one().dc) == '-1.08430792318105707') finally: self.session.execute("DROP TABLE {0}".format(self.function_table_name)) @@ -882,7 +882,7 @@ def test_smoke_duration_values(self): self.session.execute(prepared, (1, Duration(month_day_value, month_day_value, nanosecond_value))) results = self.session.execute("SELECT * FROM duration_smoke") - v = results[0][1] + v = results.one()[1] self.assertEqual(Duration(month_day_value, month_day_value, nanosecond_value), v, "Error encoding value {0},{0},{1}".format(month_day_value, nanosecond_value)) @@ -1086,7 +1086,7 @@ def _daterange_round_trip(self, to_insert, expected=None): prep_sel = self.session.prepare("SELECT * FROM tab WHERE dr = '%s' " % (to_insert,)) results = self.session.execute(prep_sel) - dr = results[0].dr + dr = results.one().dr # sometimes this is truncated in the assertEqual output on failure; if isinstance(expected, str): self.assertEqual(str(dr), expected) @@ -1140,7 +1140,7 @@ def _daterange_round_trip(self, to_insert, expected=None): query = "SELECT * FROM tab WHERE dr = '{0}' ".format(to_insert) results= self.session.execute("SELECT * FROM tab WHERE dr = '{0}' ".format(to_insert)) - dr = results[0].dr + dr = results.one().dr # sometimes this is truncated in the assertEqual output on failure; if isinstance(expected, str): self.assertEqual(str(dr), expected) @@ -1173,7 +1173,7 @@ def test_date_range_collection(self): "{'[2000-01-01T10:15:30.001Z TO 2020]', '[2000-01-01T10:15:30.001Z TO 2020]', '[2010-01-01T10:15:30.001Z TO 2020]'}, " + "{1: '[2000-01-01T10:15:30.001Z TO 2020]', 2: '[2010-01-01T10:15:30.001Z TO 2020]'}, " + "{'[2000-01-01T10:15:30.001Z TO 2020]': 1, '[2010-01-01T10:15:30.001Z TO 2020]': 2})") - results = list(self.session.execute("SELECT * FROM dateRangeIntegrationTest5")) + results = self.session.execute("SELECT * FROM dateRangeIntegrationTest5").all() self.assertEqual(len(results),1) lower_bound_1 = util.DateRangeBound(datetime(2000, 1, 1, 10, 15, 30, 1000), 'MILLISECOND') @@ -1305,16 +1305,16 @@ def test_nested_types_with_protocol_version(self): def read_inserts_at_level(self, proto_ver): session = TestCluster(protocol_version=proto_ver).connect(self.keyspace_name) try: - results = session.execute('select * from t')[0] + results = session.execute('select * from t').one() self.assertEqual("[SortedSet([1, 2]), SortedSet([3, 5])]", str(results.v)) - results = session.execute('select * from u')[0] + results = session.execute('select * from u').one() self.assertEqual("SortedSet([[1, 2], [3, 5]])", str(results.v)) - results = session.execute('select * from v')[0] + results = session.execute('select * from v').one() self.assertEqual("{SortedSet([1, 2]): [1, 2, 3], SortedSet([3, 5]): [4, 5, 6]}", str(results.v)) - results = session.execute('select * from w')[0] + results = session.execute('select * from w').one() self.assertEqual("typ(v0=OrderedMapSerializedKey([(1, [1, 2, 3]), (2, [4, 5, 6])]), v1=[7, 8, 9])", str(results.v)) finally: diff --git a/tests/integration/standard/test_udts.py b/tests/integration/standard/test_udts.py index a50f3f47de..7188bf3eb8 100644 --- a/tests/integration/standard/test_udts.py +++ b/tests/integration/standard/test_udts.py @@ -65,7 +65,7 @@ def test_non_frozen_udts(self): self.session.execute("INSERT INTO {0} (a, b) VALUES (%s, %s)".format(self.function_table_name), (0, User("Nebraska", True))) self.session.execute("UPDATE {0} SET b.has_corn = False where a = 0".format(self.function_table_name)) result = self.session.execute("SELECT * FROM {0}".format(self.function_table_name)) - self.assertFalse(result[0].b.has_corn) + self.assertFalse(result.one().b.has_corn) table_sql = self.cluster.metadata.keyspaces[self.keyspace_name].tables[self.function_table_name].as_cql_query() self.assertNotIn("", table_sql) @@ -85,7 +85,7 @@ def test_can_insert_unprepared_registered_udts(self): s.execute("INSERT INTO mytable (a, b) VALUES (%s, %s)", (0, User(42, 'bob'))) result = s.execute("SELECT b FROM mytable WHERE a=0") - row = result[0] + row = result.one() self.assertEqual(42, row.b.age) self.assertEqual('bob', row.b.name) self.assertTrue(type(row.b) is User) @@ -104,7 +104,7 @@ def test_can_insert_unprepared_registered_udts(self): s.execute("INSERT INTO mytable (a, b) VALUES (%s, %s)", (0, User('Texas', True))) result = s.execute("SELECT b FROM mytable WHERE a=0") - row = result[0] + row = result.one() self.assertEqual('Texas', row.b.state) self.assertEqual(True, row.b.is_cool) self.assertTrue(type(row.b) is User) @@ -150,7 +150,7 @@ def test_can_register_udt_before_connecting(self): s.execute("INSERT INTO udt_test_register_before_connecting.mytable (a, b) VALUES (%s, %s)", (0, User1(42, 'bob'))) result = s.execute("SELECT b FROM udt_test_register_before_connecting.mytable WHERE a=0") - row = result[0] + row = result.one() self.assertEqual(42, row.b.age) self.assertEqual('bob', row.b.name) self.assertTrue(type(row.b) is User1) @@ -158,7 +158,7 @@ def test_can_register_udt_before_connecting(self): # use the same UDT name in a different keyspace s.execute("INSERT INTO udt_test_register_before_connecting2.mytable (a, b) VALUES (%s, %s)", (0, User2('Texas', True))) result = s.execute("SELECT b FROM udt_test_register_before_connecting2.mytable WHERE a=0") - row = result[0] + row = result.one() self.assertEqual('Texas', row.b.state) self.assertEqual(True, row.b.is_cool) self.assertTrue(type(row.b) is User2) @@ -185,7 +185,7 @@ def test_can_insert_prepared_unregistered_udts(self): select = s.prepare("SELECT b FROM mytable WHERE a=?") result = s.execute(select, (0,)) - row = result[0] + row = result.one() self.assertEqual(42, row.b.age) self.assertEqual('bob', row.b.name) @@ -204,7 +204,7 @@ def test_can_insert_prepared_unregistered_udts(self): select = s.prepare("SELECT b FROM mytable WHERE a=?") result = s.execute(select, (0,)) - row = result[0] + row = result.one() self.assertEqual('Texas', row.b.state) self.assertEqual(True, row.b.is_cool) @@ -231,7 +231,7 @@ def test_can_insert_prepared_registered_udts(self): select = s.prepare("SELECT b FROM mytable WHERE a=?") result = s.execute(select, (0,)) - row = result[0] + row = result.one() self.assertEqual(42, row.b.age) self.assertEqual('bob', row.b.name) self.assertTrue(type(row.b) is User) @@ -253,7 +253,7 @@ def test_can_insert_prepared_registered_udts(self): select = s.prepare("SELECT b FROM mytable WHERE a=?") result = s.execute(select, (0,)) - row = result[0] + row = result.one() self.assertEqual('Texas', row.b.state) self.assertEqual(True, row.b.is_cool) self.assertTrue(type(row.b) is User) @@ -280,15 +280,15 @@ def test_can_insert_udts_with_nulls(self): s.execute(insert, [User(None, None, None, None)]) results = s.execute("SELECT b FROM mytable WHERE a=0") - self.assertEqual((None, None, None, None), results[0].b) + self.assertEqual((None, None, None, None), results.one().b) select = s.prepare("SELECT b FROM mytable WHERE a=0") - self.assertEqual((None, None, None, None), s.execute(select)[0].b) + self.assertEqual((None, None, None, None), s.execute(select).one().b) # also test empty strings s.execute(insert, [User('', None, None, bytes())]) results = s.execute("SELECT b FROM mytable WHERE a=0") - self.assertEqual(('', None, None, bytes()), results[0].b) + self.assertEqual(('', None, None, bytes()), results.one().b) c.shutdown() @@ -327,7 +327,7 @@ def test_can_insert_udts_with_varying_lengths(self): s.execute("INSERT INTO mytable (k, v) VALUES (0, %s)", (created_udt,)) # verify udt was written and read correctly, increase timeout to avoid the query failure on slow systems - result = s.execute("SELECT v FROM mytable WHERE k=0")[0] + result = s.execute("SELECT v FROM mytable WHERE k=0").one() self.assertEqual(created_udt, result.v) c.shutdown() @@ -365,7 +365,7 @@ def nested_udt_verification_helper(self, session, max_nesting_depth, udts): session.execute("INSERT INTO mytable (k, v_%s) VALUES (0, %s)", [i, udt]) # verify udt was written and read correctly - result = session.execute("SELECT v_{0} FROM mytable WHERE k=0".format(i))[0] + result = session.execute("SELECT v_{0} FROM mytable WHERE k=0".format(i)).one() self.assertEqual(udt, result["v_{0}".format(i)]) # write udt via prepared statement @@ -373,7 +373,7 @@ def nested_udt_verification_helper(self, session, max_nesting_depth, udts): session.execute(insert, [udt]) # verify udt was written and read correctly - result = session.execute("SELECT v_{0} FROM mytable WHERE k=1".format(i))[0] + result = session.execute("SELECT v_{0} FROM mytable WHERE k=1".format(i)).one() self.assertEqual(udt, result["v_{0}".format(i)]) def _cluster_default_dict_factory(self): @@ -441,7 +441,7 @@ def test_can_insert_nested_unregistered_udts(self): s.execute(insert, [udt]) # verify udt was written and read correctly - result = s.execute("SELECT v_{0} FROM mytable WHERE k=0".format(i))[0] + result = s.execute("SELECT v_{0} FROM mytable WHERE k=0".format(i)).one() self.assertEqual(udt, result["v_{0}".format(i)]) def test_can_insert_nested_registered_udts_with_different_namedtuples(self): @@ -532,7 +532,7 @@ def test_can_insert_udt_all_datatypes(self): # retrieve and verify data results = s.execute("SELECT * FROM mytable") - row = results[0].b + row = results.one().b for expected, actual in zip(params, row): self.assertEqual(expected, actual) @@ -590,7 +590,7 @@ def test_can_insert_udt_all_collection_datatypes(self): # retrieve and verify data results = s.execute("SELECT * FROM mytable") - row = results[0].b + row = results.one().b for expected, actual in zip(params, row): self.assertEqual(expected, actual) @@ -599,7 +599,7 @@ def test_can_insert_udt_all_collection_datatypes(self): def insert_select_column(self, session, table_name, column_name, value): insert = session.prepare("INSERT INTO %s (k, %s) VALUES (?, ?)" % (table_name, column_name)) session.execute(insert, (0, value)) - result = session.execute("SELECT %s FROM %s WHERE k=%%s" % (column_name, table_name), (0,))[0][0] + result = session.execute("SELECT %s FROM %s WHERE k=%%s" % (column_name, table_name), (0,)).one()[0] self.assertEqual(result, value) def test_can_insert_nested_collections(self): @@ -666,7 +666,7 @@ def test_non_alphanum_identifiers(self): # table with types as map keys to make sure the tuple lookup works s.execute('CREATE TABLE %s (k int PRIMARY KEY, non_alphanum_type_map map, int>, alphanum_type_map map, int>)' % (self.table_name, non_alphanum_name, type_name)) s.execute('INSERT INTO %s (k, non_alphanum_type_map, alphanum_type_map) VALUES (%s, {{"%s": \'nonalphanum\'}: 0}, {{"%s": \'alphanum\'}: 1})' % (self.table_name, 0, non_alphanum_name, non_alphanum_name)) - row = s.execute('SELECT * FROM %s' % (self.table_name,))[0] + row = s.execute('SELECT * FROM %s' % (self.table_name,)).one() k, v = row.non_alphanum_type_map.popitem() self.assertEqual(v, 0) @@ -695,23 +695,23 @@ def test_type_alteration(self): s.cluster.register_user_type('udttests', type_name, dict) - val = s.execute('SELECT v FROM %s' % self.table_name)[0][0] + val = s.execute('SELECT v FROM %s' % self.table_name).one()[0] self.assertEqual(val['v0'], 1) # add field s.execute('ALTER TYPE %s ADD v1 text' % (type_name,)) - val = s.execute('SELECT v FROM %s' % self.table_name)[0][0] + val = s.execute('SELECT v FROM %s' % self.table_name).one()[0] self.assertEqual(val['v0'], 1) self.assertIsNone(val['v1']) s.execute("INSERT INTO %s (k, v) VALUES (0, {v0 : 2, v1 : 'sometext'})" % (self.table_name,)) - val = s.execute('SELECT v FROM %s' % self.table_name)[0][0] + val = s.execute('SELECT v FROM %s' % self.table_name).one()[0] self.assertEqual(val['v0'], 2) self.assertEqual(val['v1'], 'sometext') # alter field type s.execute('ALTER TYPE %s ALTER v1 TYPE blob' % (type_name,)) s.execute("INSERT INTO %s (k, v) VALUES (0, {v0 : 3, v1 : 0xdeadbeef})" % (self.table_name,)) - val = s.execute('SELECT v FROM %s' % self.table_name)[0][0] + val = s.execute('SELECT v FROM %s' % self.table_name).one()[0] self.assertEqual(val['v0'], 3) self.assertEqual(val['v1'], b'\xde\xad\xbe\xef') diff --git a/tests/integration/upgrade/test_upgrade.py b/tests/integration/upgrade/test_upgrade.py index 63e1a64b9d..d4b1594383 100644 --- a/tests/integration/upgrade/test_upgrade.py +++ b/tests/integration/upgrade/test_upgrade.py @@ -56,7 +56,7 @@ def test_can_write(self): self.session.execute("INSERT INTO test3rf.test(k, v) VALUES (%s, 0)", (next(c), ), execution_profile="one") time.sleep(0.0001) - total_number_of_inserted = self.session.execute("SELECT COUNT(*) from test3rf.test", execution_profile="all")[0][0] + total_number_of_inserted = self.session.execute("SELECT COUNT(*) from test3rf.test", execution_profile="all").one()[0] self.assertEqual(total_number_of_inserted, next(c)) self.assertEqual(self.logger_handler.get_message_count("error", ""), 0) @@ -115,7 +115,7 @@ def test_can_write(self): self.session.execute("INSERT INTO test3rf.test(k, v) VALUES (%s, 0)", (next(c),), execution_profile="one") time.sleep(0.0001) - total_number_of_inserted = self.session.execute("SELECT COUNT(*) from test3rf.test", execution_profile="all")[0][0] + total_number_of_inserted = self.session.execute("SELECT COUNT(*) from test3rf.test", execution_profile="all").one()[0] self.assertEqual(total_number_of_inserted, next(c)) self.assertEqual(self.logger_handler.get_message_count("error", ""), 0) @@ -279,7 +279,7 @@ def test_can_write_speculative(self): execution_profile='spec_ep_rr') time.sleep(0.0001) - total_number_of_inserted = session.execute("SELECT COUNT(*) from test3rf.test", execution_profile="all")[0][0] + total_number_of_inserted = session.execute("SELECT COUNT(*) from test3rf.test", execution_profile="all").one()[0] self.assertEqual(total_number_of_inserted, next(c)) self.assertEqual(self.logger_handler.get_message_count("error", ""), 0)