From 99fbdbf099c4fe6e8885bfb37e487ed0e4b84950 Mon Sep 17 00:00:00 2001 From: Hannah Stepanek Date: Thu, 17 Aug 2023 12:50:44 -0700 Subject: [PATCH] Add Database/instance metric check --- tests/datastore_bmemcached/test_memcache.py | 95 ++++--- .../test_async_batching.py | 13 +- .../datastore_firestore/test_async_client.py | 14 +- .../test_async_collections.py | 17 +- .../test_async_documents.py | 19 +- tests/datastore_firestore/test_async_query.py | 50 +++- .../test_async_transaction.py | 44 +++- tests/datastore_firestore/test_batching.py | 9 +- tests/datastore_firestore/test_client.py | 4 +- tests/datastore_firestore/test_collections.py | 6 +- tests/datastore_firestore/test_documents.py | 6 +- tests/datastore_firestore/test_query.py | 13 +- tests/datastore_firestore/test_transaction.py | 8 +- tests/datastore_mysql/test_database.py | 235 ++++++++++------- tests/datastore_postgresql/test_database.py | 2 + tests/datastore_psycopg2cffi/test_database.py | 249 ++++++++++-------- tests/datastore_pylibmc/test_memcache.py | 87 +++--- tests/datastore_pymemcache/test_memcache.py | 82 +++--- tests/datastore_pymysql/test_database.py | 197 ++++++++------ tests/datastore_pyodbc/test_pyodbc.py | 1 + tests/datastore_pysolr/test_solr.py | 45 ++-- tests/datastore_solrpy/test_solr.py | 50 ++-- 22 files changed, 734 insertions(+), 512 deletions(-) diff --git a/tests/datastore_bmemcached/test_memcache.py b/tests/datastore_bmemcached/test_memcache.py index 68eee06333..33f949d55d 100644 --- a/tests/datastore_bmemcached/test_memcache.py +++ b/tests/datastore_bmemcached/test_memcache.py @@ -13,83 +13,94 @@ # limitations under the License. import os -from testing_support.db_settings import memcached_settings + import bmemcached +from testing_support.db_settings import memcached_settings +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.background_task import background_task from newrelic.api.transaction import set_background_task -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics -from testing_support.db_settings import memcached_settings - - DB_SETTINGS = memcached_settings()[0] -MEMCACHED_HOST = DB_SETTINGS['host'] -MEMCACHED_PORT = DB_SETTINGS['port'] +MEMCACHED_HOST = DB_SETTINGS["host"] +MEMCACHED_PORT = DB_SETTINGS["port"] MEMCACHED_NAMESPACE = str(os.getpid()) -MEMCACHED_ADDR = '%s:%s' % (MEMCACHED_HOST, MEMCACHED_PORT) +MEMCACHED_ADDR = "%s:%s" % (MEMCACHED_HOST, MEMCACHED_PORT) _test_bt_set_get_delete_scoped_metrics = [ - ('Datastore/operation/Memcached/set', 1), - ('Datastore/operation/Memcached/get', 1), - ('Datastore/operation/Memcached/delete', 1)] + ("Datastore/operation/Memcached/set", 1), + ("Datastore/operation/Memcached/get", 1), + ("Datastore/operation/Memcached/delete", 1), +] _test_bt_set_get_delete_rollup_metrics = [ - ('Datastore/all', 3), - ('Datastore/allOther', 3), - ('Datastore/Memcached/all', 3), - ('Datastore/Memcached/allOther', 3), - ('Datastore/operation/Memcached/set', 1), - ('Datastore/operation/Memcached/get', 1), - ('Datastore/operation/Memcached/delete', 1)] + ("Datastore/all", 3), + ("Datastore/allOther", 3), + ("Datastore/Memcached/all", 3), + ("Datastore/Memcached/allOther", 3), + ("Datastore/operation/Memcached/set", 1), + ("Datastore/operation/Memcached/get", 1), + ("Datastore/operation/Memcached/delete", 1), + ("Datastore/instance/Memcached/%s/%s" % (MEMCACHED_HOST, MEMCACHED_PORT), 1), +] + @validate_transaction_metrics( - 'test_memcache:test_bt_set_get_delete', - scoped_metrics=_test_bt_set_get_delete_scoped_metrics, - rollup_metrics=_test_bt_set_get_delete_rollup_metrics, - background_task=True) + "test_memcache:test_bt_set_get_delete", + scoped_metrics=_test_bt_set_get_delete_scoped_metrics, + rollup_metrics=_test_bt_set_get_delete_rollup_metrics, + background_task=True, +) @background_task() def test_bt_set_get_delete(): set_background_task(True) client = bmemcached.Client([MEMCACHED_ADDR]) - key = MEMCACHED_NAMESPACE + 'key' + key = MEMCACHED_NAMESPACE + "key" - client.set(key, 'value') + client.set(key, "value") value = client.get(key) client.delete(key) - assert value == 'value' + assert value == "value" + _test_wt_set_get_delete_scoped_metrics = [ - ('Datastore/operation/Memcached/set', 1), - ('Datastore/operation/Memcached/get', 1), - ('Datastore/operation/Memcached/delete', 1)] + ("Datastore/operation/Memcached/set", 1), + ("Datastore/operation/Memcached/get", 1), + ("Datastore/operation/Memcached/delete", 1), +] _test_wt_set_get_delete_rollup_metrics = [ - ('Datastore/all', 3), - ('Datastore/allWeb', 3), - ('Datastore/Memcached/all', 3), - ('Datastore/Memcached/allWeb', 3), - ('Datastore/operation/Memcached/set', 1), - ('Datastore/operation/Memcached/get', 1), - ('Datastore/operation/Memcached/delete', 1)] + ("Datastore/all", 3), + ("Datastore/allWeb", 3), + ("Datastore/Memcached/all", 3), + ("Datastore/Memcached/allWeb", 3), + ("Datastore/operation/Memcached/set", 1), + ("Datastore/operation/Memcached/get", 1), + ("Datastore/operation/Memcached/delete", 1), + ("Datastore/instance/MySQL/%s/%s" % (MEMCACHED_HOST, MEMCACHED_PORT), 1), +] + @validate_transaction_metrics( - 'test_memcache:test_wt_set_get_delete', - scoped_metrics=_test_wt_set_get_delete_scoped_metrics, - rollup_metrics=_test_wt_set_get_delete_rollup_metrics, - background_task=False) + "test_memcache:test_wt_set_get_delete", + scoped_metrics=_test_wt_set_get_delete_scoped_metrics, + rollup_metrics=_test_wt_set_get_delete_rollup_metrics, + background_task=False, +) @background_task() def test_wt_set_get_delete(): set_background_task(False) client = bmemcached.Client([MEMCACHED_ADDR]) - key = MEMCACHED_NAMESPACE + 'key' + key = MEMCACHED_NAMESPACE + "key" - client.set(key, 'value') + client.set(key, "value") value = client.get(key) client.delete(key) - assert value == 'value' + assert value == "value" diff --git a/tests/datastore_firestore/test_async_batching.py b/tests/datastore_firestore/test_async_batching.py index 08890c39af..fd4afea8cb 100644 --- a/tests/datastore_firestore/test_async_batching.py +++ b/tests/datastore_firestore/test_async_batching.py @@ -13,16 +13,18 @@ # limitations under the License. import pytest - -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics -from newrelic.api.background_task import background_task from testing_support.validators.validate_database_duration import ( validate_database_duration, ) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from testing_support.validators.validate_tt_collector_json import ( validate_tt_collector_json, ) +from newrelic.api.background_task import background_task + @pytest.fixture() def exercise_async_write_batch(async_client, async_collection): @@ -33,10 +35,11 @@ async def _exercise_async_write_batch(): async_batch.set(doc, {}) await async_batch.commit() + return _exercise_async_write_batch -def test_firestore_async_write_batch(loop, exercise_async_write_batch): +def test_firestore_async_write_batch(loop, exercise_async_write_batch, instance_info): _test_scoped_metrics = [ ("Datastore/operation/Firestore/commit", 1), ] @@ -44,7 +47,9 @@ def test_firestore_async_write_batch(loop, exercise_async_write_batch): _test_rollup_metrics = [ ("Datastore/all", 1), ("Datastore/allOther", 1), + ("Datastore/instance/Firestore/%s/%s" % (instance_info["host"], instance_info["port_path_or_id"]), 1), ] + @validate_database_duration() @validate_transaction_metrics( "test_firestore_async_write_batch", diff --git a/tests/datastore_firestore/test_async_client.py b/tests/datastore_firestore/test_async_client.py index 1a17181d59..703fbc4c28 100644 --- a/tests/datastore_firestore/test_async_client.py +++ b/tests/datastore_firestore/test_async_client.py @@ -13,16 +13,18 @@ # limitations under the License. import pytest - -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics -from newrelic.api.background_task import background_task from testing_support.validators.validate_database_duration import ( validate_database_duration, ) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from testing_support.validators.validate_tt_collector_json import ( validate_tt_collector_json, ) +from newrelic.api.background_task import background_task + @pytest.fixture() def existing_document(collection): @@ -37,10 +39,11 @@ async def _exercise_async_client(): assert len([_ async for _ in async_client.collections()]) >= 1 doc = [_ async for _ in async_client.get_all([existing_document])][0] assert doc.to_dict()["x"] == 1 + return _exercise_async_client -def test_firestore_async_client(loop, exercise_async_client): +def test_firestore_async_client(loop, exercise_async_client, instance_info): _test_scoped_metrics = [ ("Datastore/operation/Firestore/collections", 1), ("Datastore/operation/Firestore/get_all", 1), @@ -49,6 +52,7 @@ def test_firestore_async_client(loop, exercise_async_client): _test_rollup_metrics = [ ("Datastore/all", 2), ("Datastore/allOther", 2), + ("Datastore/instance/Firestore/%s/%s" % (instance_info["host"], instance_info["port_path_or_id"]), 2), ] @validate_database_duration() @@ -60,7 +64,7 @@ def test_firestore_async_client(loop, exercise_async_client): ) @background_task(name="test_firestore_async_client") def _test(): - loop.run_until_complete(exercise_async_client()) + loop.run_until_complete(exercise_async_client()) _test() diff --git a/tests/datastore_firestore/test_async_collections.py b/tests/datastore_firestore/test_async_collections.py index a1004a7205..45ac691b97 100644 --- a/tests/datastore_firestore/test_async_collections.py +++ b/tests/datastore_firestore/test_async_collections.py @@ -13,16 +13,18 @@ # limitations under the License. import pytest - -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics -from newrelic.api.background_task import background_task from testing_support.validators.validate_database_duration import ( validate_database_duration, ) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from testing_support.validators.validate_tt_collector_json import ( validate_tt_collector_json, ) +from newrelic.api.background_task import background_task + @pytest.fixture() def exercise_async_collections(async_collection): @@ -30,17 +32,18 @@ async def _exercise_async_collections(): async_collection.document("DoesNotExist") await async_collection.add({"capital": "Rome", "currency": "Euro", "language": "Italian"}, "Italy") await async_collection.add({"capital": "Mexico City", "currency": "Peso", "language": "Spanish"}, "Mexico") - + documents_get = await async_collection.get() assert len(documents_get) == 2 documents_stream = [_ async for _ in async_collection.stream()] assert len(documents_stream) == 2 documents_list = [_ async for _ in async_collection.list_documents()] assert len(documents_list) == 2 + return _exercise_async_collections -def test_firestore_async_collections(loop, exercise_async_collections, async_collection): +def test_firestore_async_collections(loop, exercise_async_collections, async_collection, instance_info): _test_scoped_metrics = [ ("Datastore/statement/Firestore/%s/stream" % async_collection.id, 1), ("Datastore/statement/Firestore/%s/get" % async_collection.id, 1), @@ -55,7 +58,9 @@ def test_firestore_async_collections(loop, exercise_async_collections, async_col ("Datastore/operation/Firestore/list_documents", 1), ("Datastore/all", 5), ("Datastore/allOther", 5), + ("Datastore/instance/Firestore/%s/%s" % (instance_info["host"], instance_info["port_path_or_id"]), 5), ] + @validate_database_duration() @validate_transaction_metrics( "test_firestore_async_collections", @@ -75,7 +80,7 @@ def test_firestore_async_collections_generators(collection, async_collection, as collection.add({}) collection.add({}) assert len([_ for _ in collection.list_documents()]) == 2 - + assert_trace_for_async_generator(async_collection.stream) assert_trace_for_async_generator(async_collection.list_documents) diff --git a/tests/datastore_firestore/test_async_documents.py b/tests/datastore_firestore/test_async_documents.py index 9c0a30479d..8a4ecd7bba 100644 --- a/tests/datastore_firestore/test_async_documents.py +++ b/tests/datastore_firestore/test_async_documents.py @@ -13,16 +13,18 @@ # limitations under the License. import pytest - -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics -from newrelic.api.background_task import background_task from testing_support.validators.validate_database_duration import ( validate_database_duration, ) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from testing_support.validators.validate_tt_collector_json import ( validate_tt_collector_json, ) +from newrelic.api.background_task import background_task + @pytest.fixture() def exercise_async_documents(async_collection): @@ -40,10 +42,11 @@ async def _exercise_async_documents(): await usa_doc.update({"president": "Joe Biden"}) await async_collection.document("USA").delete() + return _exercise_async_documents -def test_firestore_async_documents(loop, exercise_async_documents): +def test_firestore_async_documents(loop, exercise_async_documents, instance_info): _test_scoped_metrics = [ ("Datastore/statement/Firestore/Italy/set", 1), ("Datastore/statement/Firestore/Italy/get", 1), @@ -64,7 +67,9 @@ def test_firestore_async_documents(loop, exercise_async_documents): ("Datastore/operation/Firestore/delete", 1), ("Datastore/all", 7), ("Datastore/allOther", 7), + ("Datastore/instance/Firestore/%s/%s" % (instance_info["host"], instance_info["port_path_or_id"]), 7), ] + @validate_database_duration() @validate_transaction_metrics( "test_firestore_async_documents", @@ -80,7 +85,9 @@ def _test(): @background_task() -def test_firestore_async_documents_generators(collection, async_collection, assert_trace_for_async_generator): +def test_firestore_async_documents_generators( + collection, async_collection, assert_trace_for_async_generator, instance_info +): subcollection_doc = collection.document("SubCollections") subcollection_doc.set({}) subcollection_doc.collection("collection1").add({}) @@ -88,7 +95,7 @@ def test_firestore_async_documents_generators(collection, async_collection, asse assert len([_ for _ in subcollection_doc.collections()]) == 2 async_subcollection = async_collection.document(subcollection_doc.id) - + assert_trace_for_async_generator(async_subcollection.collections) diff --git a/tests/datastore_firestore/test_async_query.py b/tests/datastore_firestore/test_async_query.py index c3e43d0e4c..8c29841e9f 100644 --- a/tests/datastore_firestore/test_async_query.py +++ b/tests/datastore_firestore/test_async_query.py @@ -13,16 +13,18 @@ # limitations under the License. import pytest - -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics -from newrelic.api.background_task import background_task from testing_support.validators.validate_database_duration import ( validate_database_duration, ) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from testing_support.validators.validate_tt_collector_json import ( validate_tt_collector_json, ) +from newrelic.api.background_task import background_task + @pytest.fixture(autouse=True) def sample_data(collection): @@ -36,16 +38,20 @@ def sample_data(collection): # ===== AsyncQuery ===== + @pytest.fixture() def exercise_async_query(async_collection): async def _exercise_async_query(): - async_query = async_collection.select("x").limit(10).order_by("x").where(field_path="x", op_string="<=", value=3) + async_query = ( + async_collection.select("x").limit(10).order_by("x").where(field_path="x", op_string="<=", value=3) + ) assert len(await async_query.get()) == 3 assert len([_ async for _ in async_query.stream()]) == 3 + return _exercise_async_query -def test_firestore_async_query(loop, exercise_async_query, async_collection): +def test_firestore_async_query(loop, exercise_async_query, async_collection, instance_info): _test_scoped_metrics = [ ("Datastore/statement/Firestore/%s/stream" % async_collection.id, 1), ("Datastore/statement/Firestore/%s/get" % async_collection.id, 1), @@ -56,7 +62,9 @@ def test_firestore_async_query(loop, exercise_async_query, async_collection): ("Datastore/operation/Firestore/stream", 1), ("Datastore/all", 2), ("Datastore/allOther", 2), + ("Datastore/instance/Firestore/%s/%s" % (instance_info["host"], instance_info["port_path_or_id"]), 2), ] + # @validate_database_duration() @validate_transaction_metrics( "test_firestore_async_query", @@ -85,18 +93,21 @@ def _test(): _test() + # ===== AsyncAggregationQuery ===== + @pytest.fixture() def exercise_async_aggregation_query(async_collection): async def _exercise_async_aggregation_query(): async_aggregation_query = async_collection.select("x").where(field_path="x", op_string="<=", value=3).count() assert (await async_aggregation_query.get())[0][0].value == 3 assert [_ async for _ in async_aggregation_query.stream()][0][0].value == 3 + return _exercise_async_aggregation_query -def test_firestore_async_aggregation_query(loop, exercise_async_aggregation_query, async_collection): +def test_firestore_async_aggregation_query(loop, exercise_async_aggregation_query, async_collection, instance_info): _test_scoped_metrics = [ ("Datastore/statement/Firestore/%s/stream" % async_collection.id, 1), ("Datastore/statement/Firestore/%s/get" % async_collection.id, 1), @@ -107,7 +118,9 @@ def test_firestore_async_aggregation_query(loop, exercise_async_aggregation_quer ("Datastore/operation/Firestore/stream", 1), ("Datastore/all", 2), ("Datastore/allOther", 2), + ("Datastore/instance/Firestore/%s/%s" % (instance_info["host"], instance_info["port_path_or_id"]), 2), ] + @validate_database_duration() @validate_transaction_metrics( "test_firestore_async_aggregation_query", @@ -128,7 +141,9 @@ def test_firestore_async_aggregation_query_generators(async_collection, assert_t assert_trace_for_async_generator(async_aggregation_query.stream) -def test_firestore_async_aggregation_query_trace_node_datastore_params(loop, exercise_async_aggregation_query, instance_info): +def test_firestore_async_aggregation_query_trace_node_datastore_params( + loop, exercise_async_aggregation_query, instance_info +): @validate_tt_collector_json(datastore_params=instance_info) @background_task() def _test(): @@ -145,9 +160,9 @@ def patch_partition_queries(monkeypatch, async_client, collection, sample_data): """ Partitioning is not implemented in the Firestore emulator. - Ordinarily this method would return a coroutine that returns an async_generator of Cursor objects. - Each Cursor must point at a valid document path. To test this, we can patch the RPC to return 1 Cursor - which is pointed at any document available. The get_partitions will take that and make 2 QueryPartition + Ordinarily this method would return a coroutine that returns an async_generator of Cursor objects. + Each Cursor must point at a valid document path. To test this, we can patch the RPC to return 1 Cursor + which is pointed at any document available. The get_partitions will take that and make 2 QueryPartition objects out of it, which should be enough to ensure we can exercise the generator's tracing. """ from google.cloud.firestore_v1.types.document import Value @@ -159,6 +174,7 @@ def patch_partition_queries(monkeypatch, async_client, collection, sample_data): async def mock_partition_query(*args, **kwargs): async def _mock_partition_query(): yield Cursor(before=False, values=[Value(reference_value=documents[0].path)]) + return _mock_partition_query() monkeypatch.setattr(async_client._firestore_api, "partition_query", mock_partition_query) @@ -178,10 +194,13 @@ async def _exercise_async_collection_group(): while partitions: documents.extend(await partitions.pop().query().get()) assert len(documents) == 6 + return _exercise_async_collection_group -def test_firestore_async_collection_group(loop, exercise_async_collection_group, async_collection, patch_partition_queries): +def test_firestore_async_collection_group( + loop, exercise_async_collection_group, async_collection, patch_partition_queries, instance_info +): _test_scoped_metrics = [ ("Datastore/statement/Firestore/%s/get" % async_collection.id, 3), ("Datastore/statement/Firestore/%s/stream" % async_collection.id, 1), @@ -194,6 +213,7 @@ def test_firestore_async_collection_group(loop, exercise_async_collection_group, ("Datastore/operation/Firestore/get_partitions", 1), ("Datastore/all", 5), ("Datastore/allOther", 5), + ("Datastore/instance/Firestore/%s/%s" % (instance_info["host"], instance_info["port_path_or_id"]), 5), ] @validate_database_duration() @@ -211,12 +231,16 @@ def _test(): @background_task() -def test_firestore_async_collection_group_generators(async_client, async_collection, assert_trace_for_async_generator, patch_partition_queries): +def test_firestore_async_collection_group_generators( + async_client, async_collection, assert_trace_for_async_generator, patch_partition_queries +): async_collection_group = async_client.collection_group(async_collection.id) assert_trace_for_async_generator(async_collection_group.get_partitions, 1) -def test_firestore_async_collection_group_trace_node_datastore_params(loop, exercise_async_collection_group, instance_info, patch_partition_queries): +def test_firestore_async_collection_group_trace_node_datastore_params( + loop, exercise_async_collection_group, instance_info, patch_partition_queries +): @validate_tt_collector_json(datastore_params=instance_info) @background_task() def _test(): diff --git a/tests/datastore_firestore/test_async_transaction.py b/tests/datastore_firestore/test_async_transaction.py index 134c080bdd..2b8646ec5b 100644 --- a/tests/datastore_firestore/test_async_transaction.py +++ b/tests/datastore_firestore/test_async_transaction.py @@ -13,16 +13,18 @@ # limitations under the License. import pytest - -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics -from newrelic.api.background_task import background_task from testing_support.validators.validate_database_duration import ( validate_database_duration, ) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from testing_support.validators.validate_tt_collector_json import ( validate_tt_collector_json, ) +from newrelic.api.background_task import background_task + @pytest.fixture(autouse=True) def sample_data(collection): @@ -34,20 +36,26 @@ def sample_data(collection): def exercise_async_transaction_commit(async_client, async_collection): async def _exercise_async_transaction_commit(): from google.cloud.firestore import async_transactional - + @async_transactional async def _exercise(async_transaction): # get a DocumentReference - with pytest.raises(TypeError): # get is currently broken. It attempts to await an async_generator instead of consuming it. + with pytest.raises( + TypeError + ): # get is currently broken. It attempts to await an async_generator instead of consuming it. [_ async for _ in async_transaction.get(async_collection.document("doc1"))] # get a Query - with pytest.raises(TypeError): # get is currently broken. It attempts to await an async_generator instead of consuming it. + with pytest.raises( + TypeError + ): # get is currently broken. It attempts to await an async_generator instead of consuming it. async_query = async_collection.select("x").where(field_path="x", op_string=">", value=2) assert len([_ async for _ in async_transaction.get(async_query)]) == 1 # get_all on a list of DocumentReferences - with pytest.raises(TypeError): # get_all is currently broken. It attempts to await an async_generator instead of consuming it. + with pytest.raises( + TypeError + ): # get_all is currently broken. It attempts to await an async_generator instead of consuming it. all_docs = async_transaction.get_all([async_collection.document("doc%d" % x) for x in range(1, 4)]) assert len([_ async for _ in all_docs]) == 3 @@ -57,6 +65,7 @@ async def _exercise(async_transaction): await _exercise(async_client.transaction()) assert len([_ async for _ in async_collection.list_documents()]) == 2 + return _exercise_async_transaction_commit @@ -64,7 +73,7 @@ async def _exercise(async_transaction): def exercise_async_transaction_rollback(async_client, async_collection): async def _exercise_async_transaction_rollback(): from google.cloud.firestore import async_transactional - + @async_transactional async def _exercise(async_transaction): # set and delete methods @@ -75,10 +84,11 @@ async def _exercise(async_transaction): with pytest.raises(RuntimeError): await _exercise(async_client.transaction()) assert len([_ async for _ in async_collection.list_documents()]) == 3 + return _exercise_async_transaction_rollback -def test_firestore_async_transaction_commit(loop, exercise_async_transaction_commit, async_collection): +def test_firestore_async_transaction_commit(loop, exercise_async_transaction_commit, async_collection, instance_info): _test_scoped_metrics = [ ("Datastore/operation/Firestore/commit", 1), # ("Datastore/operation/Firestore/get_all", 2), @@ -91,7 +101,9 @@ def test_firestore_async_transaction_commit(loop, exercise_async_transaction_com ("Datastore/operation/Firestore/list_documents", 1), ("Datastore/all", 2), # Should be 5 if not for broken APIs ("Datastore/allOther", 2), + ("Datastore/instance/Firestore/%s/%s" % (instance_info["host"], instance_info["port_path_or_id"]), 2), ] + @validate_database_duration() @validate_transaction_metrics( "test_firestore_async_transaction", @@ -106,7 +118,9 @@ def _test(): _test() -def test_firestore_async_transaction_rollback(loop, exercise_async_transaction_rollback, async_collection): +def test_firestore_async_transaction_rollback( + loop, exercise_async_transaction_rollback, async_collection, instance_info +): _test_scoped_metrics = [ ("Datastore/operation/Firestore/rollback", 1), ("Datastore/statement/Firestore/%s/list_documents" % async_collection.id, 1), @@ -116,7 +130,9 @@ def test_firestore_async_transaction_rollback(loop, exercise_async_transaction_r ("Datastore/operation/Firestore/list_documents", 1), ("Datastore/all", 2), ("Datastore/allOther", 2), + ("Datastore/instance/Firestore/%s/%s" % (instance_info["host"], instance_info["port_path_or_id"]), 2), ] + @validate_database_duration() @validate_transaction_metrics( "test_firestore_async_transaction", @@ -131,7 +147,9 @@ def _test(): _test() -def test_firestore_async_transaction_commit_trace_node_datastore_params(loop, exercise_async_transaction_commit, instance_info): +def test_firestore_async_transaction_commit_trace_node_datastore_params( + loop, exercise_async_transaction_commit, instance_info +): @validate_tt_collector_json(datastore_params=instance_info) @background_task() def _test(): @@ -140,7 +158,9 @@ def _test(): _test() -def test_firestore_async_transaction_rollback_trace_node_datastore_params(loop, exercise_async_transaction_rollback, instance_info): +def test_firestore_async_transaction_rollback_trace_node_datastore_params( + loop, exercise_async_transaction_rollback, instance_info +): @validate_tt_collector_json(datastore_params=instance_info) @background_task() def _test(): diff --git a/tests/datastore_firestore/test_batching.py b/tests/datastore_firestore/test_batching.py index 5dcdd7b396..07964338c0 100644 --- a/tests/datastore_firestore/test_batching.py +++ b/tests/datastore_firestore/test_batching.py @@ -13,7 +13,6 @@ # limitations under the License. import pytest - from testing_support.validators.validate_database_duration import ( validate_database_duration, ) @@ -38,10 +37,11 @@ def _exercise_write_batch(): batch.set(doc, {}) batch.commit() + return _exercise_write_batch -def test_firestore_write_batch(exercise_write_batch): +def test_firestore_write_batch(exercise_write_batch, instance_info): _test_scoped_metrics = [ ("Datastore/operation/Firestore/commit", 1), ] @@ -49,6 +49,7 @@ def test_firestore_write_batch(exercise_write_batch): _test_rollup_metrics = [ ("Datastore/all", 1), ("Datastore/allOther", 1), + ("Datastore/instance/Firestore/%s/%s" % (instance_info["host"], instance_info["port_path_or_id"]), 1), ] @validate_database_duration() @@ -88,10 +89,11 @@ def _exercise_bulk_write_batch(): batch.set(doc, {}) batch.commit() + return _exercise_bulk_write_batch -def test_firestore_bulk_write_batch(exercise_bulk_write_batch): +def test_firestore_bulk_write_batch(exercise_bulk_write_batch, instance_info): _test_scoped_metrics = [ ("Datastore/operation/Firestore/commit", 1), ] @@ -99,6 +101,7 @@ def test_firestore_bulk_write_batch(exercise_bulk_write_batch): _test_rollup_metrics = [ ("Datastore/all", 1), ("Datastore/allOther", 1), + ("Datastore/instance/Firestore/%s/%s" % (instance_info["host"], instance_info["port_path_or_id"]), 1), ] @validate_database_duration() diff --git a/tests/datastore_firestore/test_client.py b/tests/datastore_firestore/test_client.py index 06580356ad..a5fd1b37f8 100644 --- a/tests/datastore_firestore/test_client.py +++ b/tests/datastore_firestore/test_client.py @@ -38,10 +38,11 @@ def _exercise_client(): assert len([_ for _ in client.collections()]) doc = [_ for _ in client.get_all([sample_data])][0] assert doc.to_dict()["x"] == 1 + return _exercise_client -def test_firestore_client(exercise_client): +def test_firestore_client(exercise_client, instance_info): _test_scoped_metrics = [ ("Datastore/operation/Firestore/collections", 1), ("Datastore/operation/Firestore/get_all", 1), @@ -50,6 +51,7 @@ def test_firestore_client(exercise_client): _test_rollup_metrics = [ ("Datastore/all", 2), ("Datastore/allOther", 2), + ("Datastore/instance/Firestore/%s/%s" % (instance_info["host"], instance_info["port_path_or_id"]), 2), ] @validate_database_duration() diff --git a/tests/datastore_firestore/test_collections.py b/tests/datastore_firestore/test_collections.py index c5c443dcef..2e58bbe950 100644 --- a/tests/datastore_firestore/test_collections.py +++ b/tests/datastore_firestore/test_collections.py @@ -13,7 +13,6 @@ # limitations under the License. import pytest - from testing_support.validators.validate_database_duration import ( validate_database_duration, ) @@ -40,10 +39,11 @@ def _exercise_collections(): assert len(documents_stream) == 2 documents_list = [_ for _ in collection.list_documents()] assert len(documents_list) == 2 + return _exercise_collections -def test_firestore_collections(exercise_collections, collection): +def test_firestore_collections(exercise_collections, collection, instance_info): _test_scoped_metrics = [ ("Datastore/statement/Firestore/%s/stream" % collection.id, 1), ("Datastore/statement/Firestore/%s/get" % collection.id, 1), @@ -58,7 +58,9 @@ def test_firestore_collections(exercise_collections, collection): ("Datastore/operation/Firestore/list_documents", 1), ("Datastore/all", 5), ("Datastore/allOther", 5), + ("Datastore/instance/Firestore/%s/%s" % (instance_info["host"], instance_info["port_path_or_id"]), 5), ] + @validate_database_duration() @validate_transaction_metrics( "test_firestore_collections", diff --git a/tests/datastore_firestore/test_documents.py b/tests/datastore_firestore/test_documents.py index 2006899601..ae6b94edd8 100644 --- a/tests/datastore_firestore/test_documents.py +++ b/tests/datastore_firestore/test_documents.py @@ -13,7 +13,6 @@ # limitations under the License. import pytest - from testing_support.validators.validate_database_duration import ( validate_database_duration, ) @@ -43,10 +42,11 @@ def _exercise_documents(): usa_doc.update({"president": "Joe Biden"}) collection.document("USA").delete() + return _exercise_documents -def test_firestore_documents(exercise_documents): +def test_firestore_documents(exercise_documents, instance_info): _test_scoped_metrics = [ ("Datastore/statement/Firestore/Italy/set", 1), ("Datastore/statement/Firestore/Italy/get", 1), @@ -67,7 +67,9 @@ def test_firestore_documents(exercise_documents): ("Datastore/operation/Firestore/delete", 1), ("Datastore/all", 7), ("Datastore/allOther", 7), + ("Datastore/instance/Firestore/%s/%s" % (instance_info["host"], instance_info["port_path_or_id"]), 7), ] + @validate_database_duration() @validate_transaction_metrics( "test_firestore_documents", diff --git a/tests/datastore_firestore/test_query.py b/tests/datastore_firestore/test_query.py index 5e681f53e5..6f1643c5b9 100644 --- a/tests/datastore_firestore/test_query.py +++ b/tests/datastore_firestore/test_query.py @@ -45,10 +45,11 @@ def _exercise_query(): query = collection.select("x").limit(10).order_by("x").where(field_path="x", op_string="<=", value=3) assert len(query.get()) == 3 assert len([_ for _ in query.stream()]) == 3 + return _exercise_query -def test_firestore_query(exercise_query, collection): +def test_firestore_query(exercise_query, collection, instance_info): _test_scoped_metrics = [ ("Datastore/statement/Firestore/%s/stream" % collection.id, 1), ("Datastore/statement/Firestore/%s/get" % collection.id, 1), @@ -59,6 +60,7 @@ def test_firestore_query(exercise_query, collection): ("Datastore/operation/Firestore/stream", 1), ("Datastore/all", 2), ("Datastore/allOther", 2), + ("Datastore/instance/Firestore/%s/%s" % (instance_info["host"], instance_info["port_path_or_id"]), 2), ] @validate_database_duration() @@ -89,6 +91,7 @@ def _test(): _test() + # ===== AggregationQuery ===== @@ -98,10 +101,11 @@ def _exercise_aggregation_query(): aggregation_query = collection.select("x").where(field_path="x", op_string="<=", value=3).count() assert aggregation_query.get()[0][0].value == 3 assert [_ for _ in aggregation_query.stream()][0][0].value == 3 + return _exercise_aggregation_query -def test_firestore_aggregation_query(exercise_aggregation_query, collection): +def test_firestore_aggregation_query(exercise_aggregation_query, collection, instance_info): _test_scoped_metrics = [ ("Datastore/statement/Firestore/%s/stream" % collection.id, 1), ("Datastore/statement/Firestore/%s/get" % collection.id, 1), @@ -112,6 +116,7 @@ def test_firestore_aggregation_query(exercise_aggregation_query, collection): ("Datastore/operation/Firestore/stream", 1), ("Datastore/all", 2), ("Datastore/allOther", 2), + ("Datastore/instance/Firestore/%s/%s" % (instance_info["host"], instance_info["port_path_or_id"]), 2), ] @validate_database_duration() @@ -182,10 +187,11 @@ def _exercise_collection_group(): while partitions: documents.extend(partitions.pop().query().get()) assert len(documents) == 6 + return _exercise_collection_group -def test_firestore_collection_group(exercise_collection_group, client, collection): +def test_firestore_collection_group(exercise_collection_group, client, collection, instance_info): _test_scoped_metrics = [ ("Datastore/statement/Firestore/%s/get" % collection.id, 3), ("Datastore/statement/Firestore/%s/stream" % collection.id, 1), @@ -198,6 +204,7 @@ def test_firestore_collection_group(exercise_collection_group, client, collectio ("Datastore/operation/Firestore/get_partitions", 1), ("Datastore/all", 5), ("Datastore/allOther", 5), + ("Datastore/instance/Firestore/%s/%s" % (instance_info["host"], instance_info["port_path_or_id"]), 5), ] @validate_database_duration() diff --git a/tests/datastore_firestore/test_transaction.py b/tests/datastore_firestore/test_transaction.py index c322a797ea..59d496a00a 100644 --- a/tests/datastore_firestore/test_transaction.py +++ b/tests/datastore_firestore/test_transaction.py @@ -55,6 +55,7 @@ def _exercise(transaction): _exercise(client.transaction()) assert len([_ for _ in collection.list_documents()]) == 2 + return _exercise_transaction_commit @@ -73,10 +74,11 @@ def _exercise(transaction): with pytest.raises(RuntimeError): _exercise(client.transaction()) assert len([_ for _ in collection.list_documents()]) == 3 + return _exercise_transaction_rollback -def test_firestore_transaction_commit(exercise_transaction_commit, collection): +def test_firestore_transaction_commit(exercise_transaction_commit, collection, instance_info): _test_scoped_metrics = [ ("Datastore/operation/Firestore/commit", 1), ("Datastore/operation/Firestore/get_all", 2), @@ -89,6 +91,7 @@ def test_firestore_transaction_commit(exercise_transaction_commit, collection): ("Datastore/operation/Firestore/list_documents", 1), ("Datastore/all", 5), ("Datastore/allOther", 5), + ("Datastore/instance/Firestore/%s/%s" % (instance_info["host"], instance_info["port_path_or_id"]), 5), ] @validate_database_duration() @@ -105,7 +108,7 @@ def _test(): _test() -def test_firestore_transaction_rollback(exercise_transaction_rollback, collection): +def test_firestore_transaction_rollback(exercise_transaction_rollback, collection, instance_info): _test_scoped_metrics = [ ("Datastore/operation/Firestore/rollback", 1), ("Datastore/statement/Firestore/%s/list_documents" % collection.id, 1), @@ -115,6 +118,7 @@ def test_firestore_transaction_rollback(exercise_transaction_rollback, collectio ("Datastore/operation/Firestore/list_documents", 1), ("Datastore/all", 2), ("Datastore/allOther", 2), + ("Datastore/instance/Firestore/%s/%s" % (instance_info["host"], instance_info["port_path_or_id"]), 2), ] @validate_database_duration() diff --git a/tests/datastore_mysql/test_database.py b/tests/datastore_mysql/test_database.py index 2fc8ca129b..8f86419039 100644 --- a/tests/datastore_mysql/test_database.py +++ b/tests/datastore_mysql/test_database.py @@ -13,11 +13,15 @@ # limitations under the License. import mysql.connector - -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics -from testing_support.validators.validate_database_trace_inputs import validate_database_trace_inputs - from testing_support.db_settings import mysql_settings +from testing_support.util import instance_hostname +from testing_support.validators.validate_database_trace_inputs import ( + validate_database_trace_inputs, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) + from newrelic.api.background_task import background_task DB_SETTINGS = mysql_settings() @@ -27,80 +31,95 @@ mysql_version = tuple(int(x) for x in mysql.connector.__version__.split(".")[:3]) if mysql_version >= (8, 0, 30): - _connector_metric_name = 'Function/mysql.connector.pooling:connect' + _connector_metric_name = "Function/mysql.connector.pooling:connect" else: - _connector_metric_name = 'Function/mysql.connector:connect' + _connector_metric_name = "Function/mysql.connector:connect" _test_execute_via_cursor_scoped_metrics = [ - (_connector_metric_name, 1), - ('Datastore/statement/MySQL/datastore_mysql_%s/select' % DB_NAMESPACE, 1), - ('Datastore/statement/MySQL/datastore_mysql_%s/insert' % DB_NAMESPACE, 1), - ('Datastore/statement/MySQL/datastore_mysql_%s/update' % DB_NAMESPACE, 1), - ('Datastore/statement/MySQL/datastore_mysql_%s/delete' % DB_NAMESPACE, 1), - ('Datastore/operation/MySQL/drop', 2), - ('Datastore/operation/MySQL/create', 2), - ('Datastore/statement/MySQL/%s/call' % DB_PROCEDURE, 1), - ('Datastore/operation/MySQL/commit', 2), - ('Datastore/operation/MySQL/rollback', 1)] + (_connector_metric_name, 1), + ("Datastore/statement/MySQL/datastore_mysql_%s/select" % DB_NAMESPACE, 1), + ("Datastore/statement/MySQL/datastore_mysql_%s/insert" % DB_NAMESPACE, 1), + ("Datastore/statement/MySQL/datastore_mysql_%s/update" % DB_NAMESPACE, 1), + ("Datastore/statement/MySQL/datastore_mysql_%s/delete" % DB_NAMESPACE, 1), + ("Datastore/operation/MySQL/drop", 2), + ("Datastore/operation/MySQL/create", 2), + ("Datastore/statement/MySQL/%s/call" % DB_PROCEDURE, 1), + ("Datastore/operation/MySQL/commit", 2), + ("Datastore/operation/MySQL/rollback", 1), +] _test_execute_via_cursor_rollup_metrics = [ - ('Datastore/all', 13), - ('Datastore/allOther', 13), - ('Datastore/MySQL/all', 13), - ('Datastore/MySQL/allOther', 13), - ('Datastore/operation/MySQL/select', 1), - ('Datastore/statement/MySQL/datastore_mysql_%s/select' % DB_NAMESPACE, 1), - ('Datastore/operation/MySQL/insert', 1), - ('Datastore/statement/MySQL/datastore_mysql_%s/insert' % DB_NAMESPACE, 1), - ('Datastore/operation/MySQL/update', 1), - ('Datastore/statement/MySQL/datastore_mysql_%s/update' % DB_NAMESPACE, 1), - ('Datastore/operation/MySQL/delete', 1), - ('Datastore/statement/MySQL/datastore_mysql_%s/delete' % DB_NAMESPACE, 1), - ('Datastore/statement/MySQL/%s/call' % DB_PROCEDURE, 1), - ('Datastore/operation/MySQL/call', 1), - ('Datastore/operation/MySQL/drop', 2), - ('Datastore/operation/MySQL/create', 2), - ('Datastore/operation/MySQL/commit', 2), - ('Datastore/operation/MySQL/rollback', 1)] - -@validate_transaction_metrics('test_database:test_execute_via_cursor', - scoped_metrics=_test_execute_via_cursor_scoped_metrics, - rollup_metrics=_test_execute_via_cursor_rollup_metrics, - background_task=True) + ("Datastore/all", 13), + ("Datastore/allOther", 13), + ("Datastore/MySQL/all", 13), + ("Datastore/MySQL/allOther", 13), + ("Datastore/operation/MySQL/select", 1), + ("Datastore/statement/MySQL/datastore_mysql_%s/select" % DB_NAMESPACE, 1), + ("Datastore/operation/MySQL/insert", 1), + ("Datastore/statement/MySQL/datastore_mysql_%s/insert" % DB_NAMESPACE, 1), + ("Datastore/operation/MySQL/update", 1), + ("Datastore/statement/MySQL/datastore_mysql_%s/update" % DB_NAMESPACE, 1), + ("Datastore/operation/MySQL/delete", 1), + ("Datastore/statement/MySQL/datastore_mysql_%s/delete" % DB_NAMESPACE, 1), + ("Datastore/statement/MySQL/%s/call" % DB_PROCEDURE, 1), + ("Datastore/operation/MySQL/call", 1), + ("Datastore/operation/MySQL/drop", 2), + ("Datastore/operation/MySQL/create", 2), + ("Datastore/operation/MySQL/commit", 2), + ("Datastore/operation/MySQL/rollback", 1), + ("Datastore/instance/MySQL/%s/%s" % (instance_hostname(DB_SETTINGS["host"]), DB_SETTINGS["port"]), 12), +] + + +@validate_transaction_metrics( + "test_database:test_execute_via_cursor", + scoped_metrics=_test_execute_via_cursor_scoped_metrics, + rollup_metrics=_test_execute_via_cursor_rollup_metrics, + background_task=True, +) @validate_database_trace_inputs(sql_parameters_type=dict) @background_task() def test_execute_via_cursor(table_name): - connection = mysql.connector.connect(db=DB_SETTINGS['name'], - user=DB_SETTINGS['user'], passwd=DB_SETTINGS['password'], - host=DB_SETTINGS['host'], port=DB_SETTINGS['port']) + connection = mysql.connector.connect( + db=DB_SETTINGS["name"], + user=DB_SETTINGS["user"], + passwd=DB_SETTINGS["password"], + host=DB_SETTINGS["host"], + port=DB_SETTINGS["port"], + ) cursor = connection.cursor() cursor.execute("""drop table if exists `%s`""" % table_name) - cursor.execute("""create table %s """ - """(a integer, b real, c text)""" % table_name) + cursor.execute("""create table %s """ """(a integer, b real, c text)""" % table_name) - cursor.executemany("""insert into `%s` """ % table_name + - """values (%(a)s, %(b)s, %(c)s)""", [dict(a=1, b=1.0, c='1.0'), - dict(a=2, b=2.2, c='2.2'), dict(a=3, b=3.3, c='3.3')]) + cursor.executemany( + """insert into `%s` """ % table_name + """values (%(a)s, %(b)s, %(c)s)""", + [dict(a=1, b=1.0, c="1.0"), dict(a=2, b=2.2, c="2.2"), dict(a=3, b=3.3, c="3.3")], + ) cursor.execute("""select * from %s""" % table_name) - for row in cursor: pass + for row in cursor: + pass - cursor.execute("""update `%s` """ % table_name + - """set a=%(a)s, b=%(b)s, c=%(c)s where a=%(old_a)s""", - dict(a=4, b=4.0, c='4.0', old_a=1)) + cursor.execute( + """update `%s` """ % table_name + """set a=%(a)s, b=%(b)s, c=%(c)s where a=%(old_a)s""", + dict(a=4, b=4.0, c="4.0", old_a=1), + ) cursor.execute("""delete from `%s` where a=2""" % table_name) cursor.execute("""drop procedure if exists %s""" % DB_PROCEDURE) - cursor.execute("""CREATE PROCEDURE %s() + cursor.execute( + """CREATE PROCEDURE %s() BEGIN SELECT 'Hello World!'; - END""" % DB_PROCEDURE) + END""" + % DB_PROCEDURE + ) cursor.callproc("%s" % DB_PROCEDURE) @@ -108,76 +127,92 @@ def test_execute_via_cursor(table_name): connection.rollback() connection.commit() + _test_connect_using_alias_scoped_metrics = [ - (_connector_metric_name, 1), - ('Datastore/statement/MySQL/datastore_mysql_%s/select' % DB_NAMESPACE, 1), - ('Datastore/statement/MySQL/datastore_mysql_%s/insert' % DB_NAMESPACE, 1), - ('Datastore/statement/MySQL/datastore_mysql_%s/update' % DB_NAMESPACE, 1), - ('Datastore/statement/MySQL/datastore_mysql_%s/delete' % DB_NAMESPACE, 1), - ('Datastore/operation/MySQL/drop', 2), - ('Datastore/operation/MySQL/create', 2), - ('Datastore/statement/MySQL/%s/call' % DB_PROCEDURE, 1), - ('Datastore/operation/MySQL/commit', 2), - ('Datastore/operation/MySQL/rollback', 1)] + (_connector_metric_name, 1), + ("Datastore/statement/MySQL/datastore_mysql_%s/select" % DB_NAMESPACE, 1), + ("Datastore/statement/MySQL/datastore_mysql_%s/insert" % DB_NAMESPACE, 1), + ("Datastore/statement/MySQL/datastore_mysql_%s/update" % DB_NAMESPACE, 1), + ("Datastore/statement/MySQL/datastore_mysql_%s/delete" % DB_NAMESPACE, 1), + ("Datastore/operation/MySQL/drop", 2), + ("Datastore/operation/MySQL/create", 2), + ("Datastore/statement/MySQL/%s/call" % DB_PROCEDURE, 1), + ("Datastore/operation/MySQL/commit", 2), + ("Datastore/operation/MySQL/rollback", 1), +] _test_connect_using_alias_rollup_metrics = [ - ('Datastore/all', 13), - ('Datastore/allOther', 13), - ('Datastore/MySQL/all', 13), - ('Datastore/MySQL/allOther', 13), - ('Datastore/operation/MySQL/select', 1), - ('Datastore/statement/MySQL/datastore_mysql_%s/select' % DB_NAMESPACE, 1), - ('Datastore/operation/MySQL/insert', 1), - ('Datastore/statement/MySQL/datastore_mysql_%s/insert' % DB_NAMESPACE, 1), - ('Datastore/operation/MySQL/update', 1), - ('Datastore/statement/MySQL/datastore_mysql_%s/update' % DB_NAMESPACE, 1), - ('Datastore/operation/MySQL/delete', 1), - ('Datastore/statement/MySQL/datastore_mysql_%s/delete' % DB_NAMESPACE, 1), - ('Datastore/statement/MySQL/%s/call' % DB_PROCEDURE, 1), - ('Datastore/operation/MySQL/call', 1), - ('Datastore/operation/MySQL/drop', 2), - ('Datastore/operation/MySQL/create', 2), - ('Datastore/operation/MySQL/commit', 2), - ('Datastore/operation/MySQL/rollback', 1)] - -@validate_transaction_metrics('test_database:test_connect_using_alias', - scoped_metrics=_test_connect_using_alias_scoped_metrics, - rollup_metrics=_test_connect_using_alias_rollup_metrics, - background_task=True) + ("Datastore/all", 13), + ("Datastore/allOther", 13), + ("Datastore/MySQL/all", 13), + ("Datastore/MySQL/allOther", 13), + ("Datastore/operation/MySQL/select", 1), + ("Datastore/statement/MySQL/datastore_mysql_%s/select" % DB_NAMESPACE, 1), + ("Datastore/operation/MySQL/insert", 1), + ("Datastore/statement/MySQL/datastore_mysql_%s/insert" % DB_NAMESPACE, 1), + ("Datastore/operation/MySQL/update", 1), + ("Datastore/statement/MySQL/datastore_mysql_%s/update" % DB_NAMESPACE, 1), + ("Datastore/operation/MySQL/delete", 1), + ("Datastore/statement/MySQL/datastore_mysql_%s/delete" % DB_NAMESPACE, 1), + ("Datastore/statement/MySQL/%s/call" % DB_PROCEDURE, 1), + ("Datastore/operation/MySQL/call", 1), + ("Datastore/operation/MySQL/drop", 2), + ("Datastore/operation/MySQL/create", 2), + ("Datastore/operation/MySQL/commit", 2), + ("Datastore/operation/MySQL/rollback", 1), + ("Datastore/instance/MySQL/%s/%s" % (instance_hostname(DB_SETTINGS["host"]), DB_SETTINGS["port"]), 12), +] + + +@validate_transaction_metrics( + "test_database:test_connect_using_alias", + scoped_metrics=_test_connect_using_alias_scoped_metrics, + rollup_metrics=_test_connect_using_alias_rollup_metrics, + background_task=True, +) @validate_database_trace_inputs(sql_parameters_type=dict) @background_task() def test_connect_using_alias(table_name): - connection = mysql.connector.connect(db=DB_SETTINGS['name'], - user=DB_SETTINGS['user'], passwd=DB_SETTINGS['password'], - host=DB_SETTINGS['host'], port=DB_SETTINGS['port']) + connection = mysql.connector.connect( + db=DB_SETTINGS["name"], + user=DB_SETTINGS["user"], + passwd=DB_SETTINGS["password"], + host=DB_SETTINGS["host"], + port=DB_SETTINGS["port"], + ) cursor = connection.cursor() cursor.execute("""drop table if exists `%s`""" % table_name) - cursor.execute("""create table %s """ - """(a integer, b real, c text)""" % table_name) + cursor.execute("""create table %s """ """(a integer, b real, c text)""" % table_name) - cursor.executemany("""insert into `%s` """ % table_name + - """values (%(a)s, %(b)s, %(c)s)""", [dict(a=1, b=1.0, c='1.0'), - dict(a=2, b=2.2, c='2.2'), dict(a=3, b=3.3, c='3.3')]) + cursor.executemany( + """insert into `%s` """ % table_name + """values (%(a)s, %(b)s, %(c)s)""", + [dict(a=1, b=1.0, c="1.0"), dict(a=2, b=2.2, c="2.2"), dict(a=3, b=3.3, c="3.3")], + ) cursor.execute("""select * from %s""" % table_name) - for row in cursor: pass + for row in cursor: + pass - cursor.execute("""update `%s` """ % table_name + - """set a=%(a)s, b=%(b)s, c=%(c)s where a=%(old_a)s""", - dict(a=4, b=4.0, c='4.0', old_a=1)) + cursor.execute( + """update `%s` """ % table_name + """set a=%(a)s, b=%(b)s, c=%(c)s where a=%(old_a)s""", + dict(a=4, b=4.0, c="4.0", old_a=1), + ) cursor.execute("""delete from `%s` where a=2""" % table_name) cursor.execute("""drop procedure if exists %s""" % DB_PROCEDURE) - cursor.execute("""CREATE PROCEDURE %s() + cursor.execute( + """CREATE PROCEDURE %s() BEGIN SELECT 'Hello World!'; - END""" % DB_PROCEDURE) + END""" + % DB_PROCEDURE + ) cursor.callproc("%s" % DB_PROCEDURE) diff --git a/tests/datastore_postgresql/test_database.py b/tests/datastore_postgresql/test_database.py index 19070880bc..cf432d1742 100644 --- a/tests/datastore_postgresql/test_database.py +++ b/tests/datastore_postgresql/test_database.py @@ -14,6 +14,7 @@ import postgresql.driver.dbapi20 from testing_support.db_settings import postgresql_settings +from testing_support.util import instance_hostname from testing_support.validators.validate_database_trace_inputs import ( validate_database_trace_inputs, ) @@ -63,6 +64,7 @@ ("Datastore/operation/Postgres/commit", 3), ("Datastore/operation/Postgres/rollback", 1), ("Datastore/operation/Postgres/other", 1), + ("Datastore/instance/Postgres/%s/%s" % (instance_hostname(DB_SETTINGS["host"]), DB_SETTINGS["port"]), 13), ("Function/postgresql.driver.dbapi20:connect", 1), ("Function/postgresql.driver.dbapi20:Connection.__enter__", 1), ("Function/postgresql.driver.dbapi20:Connection.__exit__", 1), diff --git a/tests/datastore_psycopg2cffi/test_database.py b/tests/datastore_psycopg2cffi/test_database.py index 54ff6ad09d..939c5cabcb 100644 --- a/tests/datastore_psycopg2cffi/test_database.py +++ b/tests/datastore_psycopg2cffi/test_database.py @@ -15,166 +15,190 @@ import psycopg2cffi import psycopg2cffi.extensions import psycopg2cffi.extras - -from testing_support.fixtures import validate_stats_engine_explain_plan_output_is_none -from testing_support.validators.validate_transaction_errors import validate_transaction_errors -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics -from testing_support.validators.validate_transaction_slow_sql_count import \ - validate_transaction_slow_sql_count -from testing_support.validators.validate_database_trace_inputs import validate_database_trace_inputs - from testing_support.db_settings import postgresql_settings +from testing_support.fixtures import validate_stats_engine_explain_plan_output_is_none +from testing_support.util import instance_hostname +from testing_support.validators.validate_database_trace_inputs import ( + validate_database_trace_inputs, +) +from testing_support.validators.validate_transaction_errors import ( + validate_transaction_errors, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) +from testing_support.validators.validate_transaction_slow_sql_count import ( + validate_transaction_slow_sql_count, +) from newrelic.api.background_task import background_task DB_SETTINGS = postgresql_settings()[0] _test_execute_via_cursor_scoped_metrics = [ - ('Function/psycopg2cffi:connect', 1), - ('Function/psycopg2cffi._impl.connection:Connection.__enter__', 1), - ('Function/psycopg2cffi._impl.connection:Connection.__exit__', 1), - ('Datastore/statement/Postgres/%s/select' % DB_SETTINGS["table_name"], 1), - ('Datastore/statement/Postgres/%s/insert' % DB_SETTINGS["table_name"], 1), - ('Datastore/statement/Postgres/%s/update' % DB_SETTINGS["table_name"], 1), - ('Datastore/statement/Postgres/%s/delete' % DB_SETTINGS["table_name"], 1), - ('Datastore/statement/Postgres/now/call', 1), - ('Datastore/statement/Postgres/pg_sleep/call', 1), - ('Datastore/operation/Postgres/drop', 1), - ('Datastore/operation/Postgres/create', 1), - ('Datastore/operation/Postgres/commit', 3), - ('Datastore/operation/Postgres/rollback', 1)] + ("Function/psycopg2cffi:connect", 1), + ("Function/psycopg2cffi._impl.connection:Connection.__enter__", 1), + ("Function/psycopg2cffi._impl.connection:Connection.__exit__", 1), + ("Datastore/statement/Postgres/%s/select" % DB_SETTINGS["table_name"], 1), + ("Datastore/statement/Postgres/%s/insert" % DB_SETTINGS["table_name"], 1), + ("Datastore/statement/Postgres/%s/update" % DB_SETTINGS["table_name"], 1), + ("Datastore/statement/Postgres/%s/delete" % DB_SETTINGS["table_name"], 1), + ("Datastore/statement/Postgres/now/call", 1), + ("Datastore/statement/Postgres/pg_sleep/call", 1), + ("Datastore/operation/Postgres/drop", 1), + ("Datastore/operation/Postgres/create", 1), + ("Datastore/operation/Postgres/commit", 3), + ("Datastore/operation/Postgres/rollback", 1), +] _test_execute_via_cursor_rollup_metrics = [ - ('Datastore/all', 13), - ('Datastore/allOther', 13), - ('Datastore/Postgres/all', 13), - ('Datastore/Postgres/allOther', 13), - ('Datastore/operation/Postgres/select', 1), - ('Datastore/statement/Postgres/%s/select' % DB_SETTINGS["table_name"], 1), - ('Datastore/operation/Postgres/insert', 1), - ('Datastore/statement/Postgres/%s/insert' % DB_SETTINGS["table_name"], 1), - ('Datastore/operation/Postgres/update', 1), - ('Datastore/statement/Postgres/%s/update' % DB_SETTINGS["table_name"], 1), - ('Datastore/operation/Postgres/delete', 1), - ('Datastore/statement/Postgres/%s/delete' % DB_SETTINGS["table_name"], 1), - ('Datastore/operation/Postgres/drop', 1), - ('Datastore/operation/Postgres/create', 1), - ('Datastore/statement/Postgres/now/call', 1), - ('Datastore/statement/Postgres/pg_sleep/call', 1), - ('Datastore/operation/Postgres/call', 2), - ('Datastore/operation/Postgres/commit', 3), - ('Datastore/operation/Postgres/rollback', 1)] - - -@validate_transaction_metrics('test_database:test_execute_via_cursor', - scoped_metrics=_test_execute_via_cursor_scoped_metrics, - rollup_metrics=_test_execute_via_cursor_rollup_metrics, - background_task=True) + ("Datastore/all", 13), + ("Datastore/allOther", 13), + ("Datastore/Postgres/all", 13), + ("Datastore/Postgres/allOther", 13), + ("Datastore/operation/Postgres/select", 1), + ("Datastore/statement/Postgres/%s/select" % DB_SETTINGS["table_name"], 1), + ("Datastore/operation/Postgres/insert", 1), + ("Datastore/statement/Postgres/%s/insert" % DB_SETTINGS["table_name"], 1), + ("Datastore/operation/Postgres/update", 1), + ("Datastore/statement/Postgres/%s/update" % DB_SETTINGS["table_name"], 1), + ("Datastore/operation/Postgres/delete", 1), + ("Datastore/statement/Postgres/%s/delete" % DB_SETTINGS["table_name"], 1), + ("Datastore/operation/Postgres/drop", 1), + ("Datastore/operation/Postgres/create", 1), + ("Datastore/statement/Postgres/now/call", 1), + ("Datastore/statement/Postgres/pg_sleep/call", 1), + ("Datastore/operation/Postgres/call", 2), + ("Datastore/operation/Postgres/commit", 3), + ("Datastore/operation/Postgres/rollback", 1), + ("Datastore/instance/Postgres/%s/%s" % (instance_hostname(DB_SETTINGS["host"]), DB_SETTINGS["port"]), 12), +] + + +@validate_transaction_metrics( + "test_database:test_execute_via_cursor", + scoped_metrics=_test_execute_via_cursor_scoped_metrics, + rollup_metrics=_test_execute_via_cursor_rollup_metrics, + background_task=True, +) @validate_database_trace_inputs(sql_parameters_type=tuple) @background_task() def test_execute_via_cursor(): with psycopg2cffi.connect( - database=DB_SETTINGS['name'], user=DB_SETTINGS['user'], - password=DB_SETTINGS['password'], host=DB_SETTINGS['host'], - port=DB_SETTINGS['port']) as connection: + database=DB_SETTINGS["name"], + user=DB_SETTINGS["user"], + password=DB_SETTINGS["password"], + host=DB_SETTINGS["host"], + port=DB_SETTINGS["port"], + ) as connection: cursor = connection.cursor() psycopg2cffi.extensions.register_type(psycopg2cffi.extensions.UNICODE) - psycopg2cffi.extensions.register_type( - psycopg2cffi.extensions.UNICODE, - connection) - psycopg2cffi.extensions.register_type( - psycopg2cffi.extensions.UNICODE, - cursor) + psycopg2cffi.extensions.register_type(psycopg2cffi.extensions.UNICODE, connection) + psycopg2cffi.extensions.register_type(psycopg2cffi.extensions.UNICODE, cursor) cursor.execute("""drop table if exists %s""" % DB_SETTINGS["table_name"]) - cursor.execute("""create table %s """ % DB_SETTINGS["table_name"] + - """(a integer, b real, c text)""") + cursor.execute("""create table %s """ % DB_SETTINGS["table_name"] + """(a integer, b real, c text)""") - cursor.executemany("""insert into %s """ % DB_SETTINGS["table_name"] + - """values (%s, %s, %s)""", [(1, 1.0, '1.0'), - (2, 2.2, '2.2'), (3, 3.3, '3.3')]) + cursor.executemany( + """insert into %s """ % DB_SETTINGS["table_name"] + """values (%s, %s, %s)""", + [(1, 1.0, "1.0"), (2, 2.2, "2.2"), (3, 3.3, "3.3")], + ) cursor.execute("""select * from %s""" % DB_SETTINGS["table_name"]) for row in cursor: pass - cursor.execute("""update %s""" % DB_SETTINGS["table_name"] + """ set a=%s, b=%s, """ - """c=%s where a=%s""", (4, 4.0, '4.0', 1)) + cursor.execute( + """update %s""" % DB_SETTINGS["table_name"] + """ set a=%s, b=%s, """ """c=%s where a=%s""", + (4, 4.0, "4.0", 1), + ) cursor.execute("""delete from %s where a=2""" % DB_SETTINGS["table_name"]) connection.commit() - cursor.callproc('now') - cursor.callproc('pg_sleep', (0,)) + cursor.callproc("now") + cursor.callproc("pg_sleep", (0,)) connection.rollback() connection.commit() _test_rollback_on_exception_scoped_metrics = [ - ('Function/psycopg2cffi:connect', 1), - ('Function/psycopg2cffi._impl.connection:Connection.__enter__', 1), - ('Function/psycopg2cffi._impl.connection:Connection.__exit__', 1), - ('Datastore/operation/Postgres/rollback', 1)] + ("Function/psycopg2cffi:connect", 1), + ("Function/psycopg2cffi._impl.connection:Connection.__enter__", 1), + ("Function/psycopg2cffi._impl.connection:Connection.__exit__", 1), + ("Datastore/operation/Postgres/rollback", 1), +] _test_rollback_on_exception_rollup_metrics = [ - ('Datastore/all', 2), - ('Datastore/allOther', 2), - ('Datastore/Postgres/all', 2), - ('Datastore/Postgres/allOther', 2)] - - -@validate_transaction_metrics('test_database:test_rollback_on_exception', - scoped_metrics=_test_rollback_on_exception_scoped_metrics, - rollup_metrics=_test_rollback_on_exception_rollup_metrics, - background_task=True) + ("Datastore/all", 2), + ("Datastore/allOther", 2), + ("Datastore/Postgres/all", 2), + ("Datastore/Postgres/allOther", 2), +] + + +@validate_transaction_metrics( + "test_database:test_rollback_on_exception", + scoped_metrics=_test_rollback_on_exception_scoped_metrics, + rollup_metrics=_test_rollback_on_exception_rollup_metrics, + background_task=True, +) @validate_database_trace_inputs(sql_parameters_type=tuple) @background_task() def test_rollback_on_exception(): try: with psycopg2cffi.connect( - database=DB_SETTINGS['name'], user=DB_SETTINGS['user'], - password=DB_SETTINGS['password'], host=DB_SETTINGS['host'], - port=DB_SETTINGS['port']): - - raise RuntimeError('error') + database=DB_SETTINGS["name"], + user=DB_SETTINGS["user"], + password=DB_SETTINGS["password"], + host=DB_SETTINGS["host"], + port=DB_SETTINGS["port"], + ): + + raise RuntimeError("error") except RuntimeError: pass _test_async_mode_scoped_metrics = [ - ('Function/psycopg2cffi:connect', 1), - ('Datastore/statement/Postgres/%s/select' % DB_SETTINGS["table_name"], 1), - ('Datastore/statement/Postgres/%s/insert' % DB_SETTINGS["table_name"], 1), - ('Datastore/operation/Postgres/drop', 1), - ('Datastore/operation/Postgres/create', 1)] + ("Function/psycopg2cffi:connect", 1), + ("Datastore/statement/Postgres/%s/select" % DB_SETTINGS["table_name"], 1), + ("Datastore/statement/Postgres/%s/insert" % DB_SETTINGS["table_name"], 1), + ("Datastore/operation/Postgres/drop", 1), + ("Datastore/operation/Postgres/create", 1), +] _test_async_mode_rollup_metrics = [ - ('Datastore/all', 5), - ('Datastore/allOther', 5), - ('Datastore/Postgres/all', 5), - ('Datastore/Postgres/allOther', 5), - ('Datastore/operation/Postgres/select', 1), - ('Datastore/statement/Postgres/%s/select' % DB_SETTINGS["table_name"], 1), - ('Datastore/operation/Postgres/insert', 1), - ('Datastore/statement/Postgres/%s/insert' % DB_SETTINGS["table_name"], 1), - ('Datastore/operation/Postgres/drop', 1), - ('Datastore/operation/Postgres/create', 1)] + ("Datastore/all", 5), + ("Datastore/allOther", 5), + ("Datastore/Postgres/all", 5), + ("Datastore/Postgres/allOther", 5), + ("Datastore/operation/Postgres/select", 1), + ("Datastore/statement/Postgres/%s/select" % DB_SETTINGS["table_name"], 1), + ("Datastore/operation/Postgres/insert", 1), + ("Datastore/statement/Postgres/%s/insert" % DB_SETTINGS["table_name"], 1), + ("Datastore/operation/Postgres/drop", 1), + ("Datastore/operation/Postgres/create", 1), + ("Datastore/instance/Postgres/%s/%s" % (instance_hostname(DB_SETTINGS["host"]), DB_SETTINGS["port"]), 4), +] @validate_stats_engine_explain_plan_output_is_none() @validate_transaction_slow_sql_count(num_slow_sql=4) @validate_database_trace_inputs(sql_parameters_type=tuple) -@validate_transaction_metrics('test_database:test_async_mode', - scoped_metrics=_test_async_mode_scoped_metrics, - rollup_metrics=_test_async_mode_rollup_metrics, - background_task=True) +@validate_transaction_metrics( + "test_database:test_async_mode", + scoped_metrics=_test_async_mode_scoped_metrics, + rollup_metrics=_test_async_mode_rollup_metrics, + background_task=True, +) @validate_transaction_errors(errors=[]) @background_task() def test_async_mode(): @@ -182,16 +206,19 @@ def test_async_mode(): wait = psycopg2cffi.extras.wait_select kwargs = {} - version = tuple(int(_) for _ in psycopg2cffi.__version__.split('.')) + version = tuple(int(_) for _ in psycopg2cffi.__version__.split(".")) if version >= (2, 8): - kwargs['async_'] = 1 + kwargs["async_"] = 1 else: - kwargs['async'] = 1 + kwargs["async"] = 1 async_conn = psycopg2cffi.connect( - database=DB_SETTINGS['name'], user=DB_SETTINGS['user'], - password=DB_SETTINGS['password'], host=DB_SETTINGS['host'], - port=DB_SETTINGS['port'], **kwargs + database=DB_SETTINGS["name"], + user=DB_SETTINGS["user"], + password=DB_SETTINGS["password"], + host=DB_SETTINGS["host"], + port=DB_SETTINGS["port"], + **kwargs ) wait(async_conn) async_cur = async_conn.cursor() @@ -199,12 +226,10 @@ def test_async_mode(): async_cur.execute("""drop table if exists %s""" % DB_SETTINGS["table_name"]) wait(async_cur.connection) - async_cur.execute("""create table %s """ % DB_SETTINGS["table_name"] + - """(a integer, b real, c text)""") + async_cur.execute("""create table %s """ % DB_SETTINGS["table_name"] + """(a integer, b real, c text)""") wait(async_cur.connection) - async_cur.execute("""insert into %s """ % DB_SETTINGS["table_name"] + - """values (%s, %s, %s)""", (1, 1.0, '1.0')) + async_cur.execute("""insert into %s """ % DB_SETTINGS["table_name"] + """values (%s, %s, %s)""", (1, 1.0, "1.0")) wait(async_cur.connection) async_cur.execute("""select * from %s""" % DB_SETTINGS["table_name"]) diff --git a/tests/datastore_pylibmc/test_memcache.py b/tests/datastore_pylibmc/test_memcache.py index 769f3b483c..64da33416d 100644 --- a/tests/datastore_pylibmc/test_memcache.py +++ b/tests/datastore_pylibmc/test_memcache.py @@ -12,85 +12,92 @@ # See the License for the specific language governing permissions and # limitations under the License. -import os - import pylibmc - from testing_support.db_settings import memcached_settings -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.background_task import background_task from newrelic.api.transaction import set_background_task - DB_SETTINGS = memcached_settings()[0] MEMCACHED_HOST = DB_SETTINGS["host"] MEMCACHED_PORT = DB_SETTINGS["port"] MEMCACHED_NAMESPACE = DB_SETTINGS["namespace"] -MEMCACHED_ADDR = '%s:%s' % (MEMCACHED_HOST, MEMCACHED_PORT) +MEMCACHED_ADDR = "%s:%s" % (MEMCACHED_HOST, MEMCACHED_PORT) _test_bt_set_get_delete_scoped_metrics = [ - ('Datastore/operation/Memcached/set', 1), - ('Datastore/operation/Memcached/get', 1), - ('Datastore/operation/Memcached/delete', 1)] + ("Datastore/operation/Memcached/set", 1), + ("Datastore/operation/Memcached/get", 1), + ("Datastore/operation/Memcached/delete", 1), +] _test_bt_set_get_delete_rollup_metrics = [ - ('Datastore/all', 3), - ('Datastore/allOther', 3), - ('Datastore/Memcached/all', 3), - ('Datastore/Memcached/allOther', 3), - ('Datastore/operation/Memcached/set', 1), - ('Datastore/operation/Memcached/get', 1), - ('Datastore/operation/Memcached/delete', 1)] + ("Datastore/all", 3), + ("Datastore/allOther", 3), + ("Datastore/Memcached/all", 3), + ("Datastore/Memcached/allOther", 3), + ("Datastore/operation/Memcached/set", 1), + ("Datastore/operation/Memcached/get", 1), + ("Datastore/operation/Memcached/delete", 1), +] + @validate_transaction_metrics( - 'test_memcache:test_bt_set_get_delete', - scoped_metrics=_test_bt_set_get_delete_scoped_metrics, - rollup_metrics=_test_bt_set_get_delete_rollup_metrics, - background_task=True) + "test_memcache:test_bt_set_get_delete", + scoped_metrics=_test_bt_set_get_delete_scoped_metrics, + rollup_metrics=_test_bt_set_get_delete_rollup_metrics, + background_task=True, +) @background_task() def test_bt_set_get_delete(): set_background_task(True) client = pylibmc.Client([MEMCACHED_ADDR]) - key = MEMCACHED_NAMESPACE + 'key' + key = MEMCACHED_NAMESPACE + "key" - client.set(key, 'value') + client.set(key, "value") value = client.get(key) client.delete(key) - assert value == 'value' + assert value == "value" + _test_wt_set_get_delete_scoped_metrics = [ - ('Datastore/operation/Memcached/set', 1), - ('Datastore/operation/Memcached/get', 1), - ('Datastore/operation/Memcached/delete', 1)] + ("Datastore/operation/Memcached/set", 1), + ("Datastore/operation/Memcached/get", 1), + ("Datastore/operation/Memcached/delete", 1), +] _test_wt_set_get_delete_rollup_metrics = [ - ('Datastore/all', 3), - ('Datastore/allWeb', 3), - ('Datastore/Memcached/all', 3), - ('Datastore/Memcached/allWeb', 3), - ('Datastore/operation/Memcached/set', 1), - ('Datastore/operation/Memcached/get', 1), - ('Datastore/operation/Memcached/delete', 1)] + ("Datastore/all", 3), + ("Datastore/allWeb", 3), + ("Datastore/Memcached/all", 3), + ("Datastore/Memcached/allWeb", 3), + ("Datastore/operation/Memcached/set", 1), + ("Datastore/operation/Memcached/get", 1), + ("Datastore/operation/Memcached/delete", 1), +] + @validate_transaction_metrics( - 'test_memcache:test_wt_set_get_delete', - scoped_metrics=_test_wt_set_get_delete_scoped_metrics, - rollup_metrics=_test_wt_set_get_delete_rollup_metrics, - background_task=False) + "test_memcache:test_wt_set_get_delete", + scoped_metrics=_test_wt_set_get_delete_scoped_metrics, + rollup_metrics=_test_wt_set_get_delete_rollup_metrics, + background_task=False, +) @background_task() def test_wt_set_get_delete(): set_background_task(False) client = pylibmc.Client([MEMCACHED_ADDR]) - key = MEMCACHED_NAMESPACE + 'key' + key = MEMCACHED_NAMESPACE + "key" - client.set(key, 'value') + client.set(key, "value") value = client.get(key) client.delete(key) - assert value == 'value' + assert value == "value" diff --git a/tests/datastore_pymemcache/test_memcache.py b/tests/datastore_pymemcache/test_memcache.py index 9aeea4d54d..3100db5b7f 100644 --- a/tests/datastore_pymemcache/test_memcache.py +++ b/tests/datastore_pymemcache/test_memcache.py @@ -15,9 +15,10 @@ import os import pymemcache.client - -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.db_settings import memcached_settings +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.background_task import background_task from newrelic.api.transaction import set_background_task @@ -31,65 +32,74 @@ MEMCACHED_ADDR = (MEMCACHED_HOST, int(MEMCACHED_PORT)) _test_bt_set_get_delete_scoped_metrics = [ - ('Datastore/operation/Memcached/set', 1), - ('Datastore/operation/Memcached/get', 1), - ('Datastore/operation/Memcached/delete', 1)] + ("Datastore/operation/Memcached/set", 1), + ("Datastore/operation/Memcached/get", 1), + ("Datastore/operation/Memcached/delete", 1), +] _test_bt_set_get_delete_rollup_metrics = [ - ('Datastore/all', 3), - ('Datastore/allOther', 3), - ('Datastore/Memcached/all', 3), - ('Datastore/Memcached/allOther', 3), - ('Datastore/operation/Memcached/set', 1), - ('Datastore/operation/Memcached/get', 1), - ('Datastore/operation/Memcached/delete', 1)] + ("Datastore/all", 3), + ("Datastore/allOther", 3), + ("Datastore/Memcached/all", 3), + ("Datastore/Memcached/allOther", 3), + ("Datastore/operation/Memcached/set", 1), + ("Datastore/operation/Memcached/get", 1), + ("Datastore/operation/Memcached/delete", 1), +] + @validate_transaction_metrics( - 'test_memcache:test_bt_set_get_delete', - scoped_metrics=_test_bt_set_get_delete_scoped_metrics, - rollup_metrics=_test_bt_set_get_delete_rollup_metrics, - background_task=True) + "test_memcache:test_bt_set_get_delete", + scoped_metrics=_test_bt_set_get_delete_scoped_metrics, + rollup_metrics=_test_bt_set_get_delete_rollup_metrics, + background_task=True, +) @background_task() def test_bt_set_get_delete(): set_background_task(True) client = pymemcache.client.Client(MEMCACHED_ADDR) - key = MEMCACHED_NAMESPACE + 'key' + key = MEMCACHED_NAMESPACE + "key" - client.set(key, b'value') + client.set(key, b"value") value = client.get(key) client.delete(key) - assert value == b'value' + assert value == b"value" + _test_wt_set_get_delete_scoped_metrics = [ - ('Datastore/operation/Memcached/set', 1), - ('Datastore/operation/Memcached/get', 1), - ('Datastore/operation/Memcached/delete', 1)] + ("Datastore/operation/Memcached/set", 1), + ("Datastore/operation/Memcached/get", 1), + ("Datastore/operation/Memcached/delete", 1), +] _test_wt_set_get_delete_rollup_metrics = [ - ('Datastore/all', 3), - ('Datastore/allWeb', 3), - ('Datastore/Memcached/all', 3), - ('Datastore/Memcached/allWeb', 3), - ('Datastore/operation/Memcached/set', 1), - ('Datastore/operation/Memcached/get', 1), - ('Datastore/operation/Memcached/delete', 1)] + ("Datastore/all", 3), + ("Datastore/allWeb", 3), + ("Datastore/Memcached/all", 3), + ("Datastore/Memcached/allWeb", 3), + ("Datastore/operation/Memcached/set", 1), + ("Datastore/operation/Memcached/get", 1), + ("Datastore/operation/Memcached/delete", 1), +] + @validate_transaction_metrics( - 'test_memcache:test_wt_set_get_delete', - scoped_metrics=_test_wt_set_get_delete_scoped_metrics, - rollup_metrics=_test_wt_set_get_delete_rollup_metrics, - background_task=False) + "test_memcache:test_wt_set_get_delete", + scoped_metrics=_test_wt_set_get_delete_scoped_metrics, + rollup_metrics=_test_wt_set_get_delete_rollup_metrics, + background_task=False, +) @background_task() def test_wt_set_get_delete(): set_background_task(False) client = pymemcache.client.Client(MEMCACHED_ADDR) - key = MEMCACHED_NAMESPACE + 'key' + key = MEMCACHED_NAMESPACE + "key" - client.set(key, b'value') + client.set(key, b"value") value = client.get(key) client.delete(key) - assert value == b'value' + assert value == b"value" diff --git a/tests/datastore_pymysql/test_database.py b/tests/datastore_pymysql/test_database.py index 5943b12665..ad4db1d9c1 100644 --- a/tests/datastore_pymysql/test_database.py +++ b/tests/datastore_pymysql/test_database.py @@ -13,11 +13,14 @@ # limitations under the License. import pymysql - -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics -from testing_support.validators.validate_database_trace_inputs import validate_database_trace_inputs - from testing_support.db_settings import mysql_settings +from testing_support.util import instance_hostname +from testing_support.validators.validate_database_trace_inputs import ( + validate_database_trace_inputs, +) +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.background_task import background_task @@ -25,76 +28,92 @@ TABLE_NAME = "datastore_pymysql_" + DB_SETTINGS["namespace"] PROCEDURE_NAME = "hello_" + DB_SETTINGS["namespace"] +HOST = instance_hostname(DB_SETTINGS["host"]) +PORT = DB_SETTINGS["port"] + def execute_db_calls_with_cursor(cursor): cursor.execute("""drop table if exists %s""" % TABLE_NAME) - cursor.execute("""create table %s """ % TABLE_NAME + - """(a integer, b real, c text)""") + cursor.execute("""create table %s """ % TABLE_NAME + """(a integer, b real, c text)""") - cursor.executemany("""insert into %s """ % TABLE_NAME + - """values (%s, %s, %s)""", [(1, 1.0, '1.0'), - (2, 2.2, '2.2'), (3, 3.3, '3.3')]) + cursor.executemany( + """insert into %s """ % TABLE_NAME + """values (%s, %s, %s)""", + [(1, 1.0, "1.0"), (2, 2.2, "2.2"), (3, 3.3, "3.3")], + ) cursor.execute("""select * from %s""" % TABLE_NAME) - for row in cursor: pass + for row in cursor: + pass - cursor.execute("""update %s""" % TABLE_NAME + """ set a=%s, b=%s, """ - """c=%s where a=%s""", (4, 4.0, '4.0', 1)) + cursor.execute("""update %s""" % TABLE_NAME + """ set a=%s, b=%s, """ """c=%s where a=%s""", (4, 4.0, "4.0", 1)) cursor.execute("""delete from %s where a=2""" % TABLE_NAME) cursor.execute("""drop procedure if exists %s""" % PROCEDURE_NAME) - cursor.execute("""CREATE PROCEDURE %s() + cursor.execute( + """CREATE PROCEDURE %s() BEGIN SELECT 'Hello World!'; - END""" % PROCEDURE_NAME) + END""" + % PROCEDURE_NAME + ) cursor.callproc(PROCEDURE_NAME) _test_execute_via_cursor_scoped_metrics = [ - ('Function/pymysql:Connect', 1), - ('Datastore/statement/MySQL/%s/select' % TABLE_NAME, 1), - ('Datastore/statement/MySQL/%s/insert' % TABLE_NAME, 1), - ('Datastore/statement/MySQL/%s/update' % TABLE_NAME, 1), - ('Datastore/statement/MySQL/%s/delete' % TABLE_NAME, 1), - ('Datastore/operation/MySQL/drop', 2), - ('Datastore/operation/MySQL/create', 2), - ('Datastore/statement/MySQL/%s/call' % PROCEDURE_NAME, 1), - ('Datastore/operation/MySQL/commit', 2), - ('Datastore/operation/MySQL/rollback', 1)] + ("Function/pymysql:Connect", 1), + ("Datastore/statement/MySQL/%s/select" % TABLE_NAME, 1), + ("Datastore/statement/MySQL/%s/insert" % TABLE_NAME, 1), + ("Datastore/statement/MySQL/%s/update" % TABLE_NAME, 1), + ("Datastore/statement/MySQL/%s/delete" % TABLE_NAME, 1), + ("Datastore/operation/MySQL/drop", 2), + ("Datastore/operation/MySQL/create", 2), + ("Datastore/statement/MySQL/%s/call" % PROCEDURE_NAME, 1), + ("Datastore/operation/MySQL/commit", 2), + ("Datastore/operation/MySQL/rollback", 1), +] _test_execute_via_cursor_rollup_metrics = [ - ('Datastore/all', 13), - ('Datastore/allOther', 13), - ('Datastore/MySQL/all', 13), - ('Datastore/MySQL/allOther', 13), - ('Datastore/statement/MySQL/%s/select' % TABLE_NAME, 1), - ('Datastore/statement/MySQL/%s/insert' % TABLE_NAME, 1), - ('Datastore/statement/MySQL/%s/update' % TABLE_NAME, 1), - ('Datastore/statement/MySQL/%s/delete' % TABLE_NAME, 1), - ('Datastore/operation/MySQL/select', 1), - ('Datastore/operation/MySQL/insert', 1), - ('Datastore/operation/MySQL/update', 1), - ('Datastore/operation/MySQL/delete', 1), - ('Datastore/statement/MySQL/%s/call' % PROCEDURE_NAME, 1), - ('Datastore/operation/MySQL/call', 1), - ('Datastore/operation/MySQL/drop', 2), - ('Datastore/operation/MySQL/create', 2), - ('Datastore/operation/MySQL/commit', 2), - ('Datastore/operation/MySQL/rollback', 1)] - -@validate_transaction_metrics('test_database:test_execute_via_cursor', - scoped_metrics=_test_execute_via_cursor_scoped_metrics, - rollup_metrics=_test_execute_via_cursor_rollup_metrics, - background_task=True) + ("Datastore/all", 13), + ("Datastore/allOther", 13), + ("Datastore/MySQL/all", 13), + ("Datastore/MySQL/allOther", 13), + ("Datastore/statement/MySQL/%s/select" % TABLE_NAME, 1), + ("Datastore/statement/MySQL/%s/insert" % TABLE_NAME, 1), + ("Datastore/statement/MySQL/%s/update" % TABLE_NAME, 1), + ("Datastore/statement/MySQL/%s/delete" % TABLE_NAME, 1), + ("Datastore/operation/MySQL/select", 1), + ("Datastore/operation/MySQL/insert", 1), + ("Datastore/operation/MySQL/update", 1), + ("Datastore/operation/MySQL/delete", 1), + ("Datastore/statement/MySQL/%s/call" % PROCEDURE_NAME, 1), + ("Datastore/operation/MySQL/call", 1), + ("Datastore/operation/MySQL/drop", 2), + ("Datastore/operation/MySQL/create", 2), + ("Datastore/operation/MySQL/commit", 2), + ("Datastore/operation/MySQL/rollback", 1), + ("Datastore/instance/MySQL/%s/%s" % (HOST, PORT), 12), +] + + +@validate_transaction_metrics( + "test_database:test_execute_via_cursor", + scoped_metrics=_test_execute_via_cursor_scoped_metrics, + rollup_metrics=_test_execute_via_cursor_rollup_metrics, + background_task=True, +) @validate_database_trace_inputs(sql_parameters_type=tuple) @background_task() def test_execute_via_cursor(): - connection = pymysql.connect(db=DB_SETTINGS['name'], - user=DB_SETTINGS['user'], passwd=DB_SETTINGS['password'], - host=DB_SETTINGS['host'], port=DB_SETTINGS['port']) + connection = pymysql.connect( + db=DB_SETTINGS["name"], + user=DB_SETTINGS["user"], + passwd=DB_SETTINGS["password"], + host=DB_SETTINGS["host"], + port=DB_SETTINGS["port"], + ) with connection.cursor() as cursor: execute_db_calls_with_cursor(cursor) @@ -105,49 +124,57 @@ def test_execute_via_cursor(): _test_execute_via_cursor_context_mangaer_scoped_metrics = [ - ('Function/pymysql:Connect', 1), - ('Datastore/statement/MySQL/%s/select' % TABLE_NAME, 1), - ('Datastore/statement/MySQL/%s/insert' % TABLE_NAME, 1), - ('Datastore/statement/MySQL/%s/update' % TABLE_NAME, 1), - ('Datastore/statement/MySQL/%s/delete' % TABLE_NAME, 1), - ('Datastore/operation/MySQL/drop', 2), - ('Datastore/operation/MySQL/create', 2), - ('Datastore/statement/MySQL/%s/call' % PROCEDURE_NAME, 1), - ('Datastore/operation/MySQL/commit', 2), - ('Datastore/operation/MySQL/rollback', 1)] + ("Function/pymysql:Connect", 1), + ("Datastore/statement/MySQL/%s/select" % TABLE_NAME, 1), + ("Datastore/statement/MySQL/%s/insert" % TABLE_NAME, 1), + ("Datastore/statement/MySQL/%s/update" % TABLE_NAME, 1), + ("Datastore/statement/MySQL/%s/delete" % TABLE_NAME, 1), + ("Datastore/operation/MySQL/drop", 2), + ("Datastore/operation/MySQL/create", 2), + ("Datastore/statement/MySQL/%s/call" % PROCEDURE_NAME, 1), + ("Datastore/operation/MySQL/commit", 2), + ("Datastore/operation/MySQL/rollback", 1), +] _test_execute_via_cursor_context_mangaer_rollup_metrics = [ - ('Datastore/all', 13), - ('Datastore/allOther', 13), - ('Datastore/MySQL/all', 13), - ('Datastore/MySQL/allOther', 13), - ('Datastore/statement/MySQL/%s/select' % TABLE_NAME, 1), - ('Datastore/statement/MySQL/%s/insert' % TABLE_NAME, 1), - ('Datastore/statement/MySQL/%s/update' % TABLE_NAME, 1), - ('Datastore/statement/MySQL/%s/delete' % TABLE_NAME, 1), - ('Datastore/operation/MySQL/select', 1), - ('Datastore/operation/MySQL/insert', 1), - ('Datastore/operation/MySQL/update', 1), - ('Datastore/operation/MySQL/delete', 1), - ('Datastore/statement/MySQL/%s/call' % PROCEDURE_NAME, 1), - ('Datastore/operation/MySQL/call', 1), - ('Datastore/operation/MySQL/drop', 2), - ('Datastore/operation/MySQL/create', 2), - ('Datastore/operation/MySQL/commit', 2), - ('Datastore/operation/MySQL/rollback', 1)] + ("Datastore/all", 13), + ("Datastore/allOther", 13), + ("Datastore/MySQL/all", 13), + ("Datastore/MySQL/allOther", 13), + ("Datastore/statement/MySQL/%s/select" % TABLE_NAME, 1), + ("Datastore/statement/MySQL/%s/insert" % TABLE_NAME, 1), + ("Datastore/statement/MySQL/%s/update" % TABLE_NAME, 1), + ("Datastore/statement/MySQL/%s/delete" % TABLE_NAME, 1), + ("Datastore/operation/MySQL/select", 1), + ("Datastore/operation/MySQL/insert", 1), + ("Datastore/operation/MySQL/update", 1), + ("Datastore/operation/MySQL/delete", 1), + ("Datastore/statement/MySQL/%s/call" % PROCEDURE_NAME, 1), + ("Datastore/operation/MySQL/call", 1), + ("Datastore/operation/MySQL/drop", 2), + ("Datastore/operation/MySQL/create", 2), + ("Datastore/operation/MySQL/commit", 2), + ("Datastore/operation/MySQL/rollback", 1), + ("Datastore/instance/MySQL/%s/%s" % (HOST, PORT), 12), +] @validate_transaction_metrics( - 'test_database:test_execute_via_cursor_context_manager', - scoped_metrics=_test_execute_via_cursor_context_mangaer_scoped_metrics, - rollup_metrics=_test_execute_via_cursor_context_mangaer_rollup_metrics, - background_task=True) + "test_database:test_execute_via_cursor_context_manager", + scoped_metrics=_test_execute_via_cursor_context_mangaer_scoped_metrics, + rollup_metrics=_test_execute_via_cursor_context_mangaer_rollup_metrics, + background_task=True, +) @validate_database_trace_inputs(sql_parameters_type=tuple) @background_task() def test_execute_via_cursor_context_manager(): - connection = pymysql.connect(db=DB_SETTINGS['name'], - user=DB_SETTINGS['user'], passwd=DB_SETTINGS['password'], - host=DB_SETTINGS['host'], port=DB_SETTINGS['port']) + connection = pymysql.connect( + db=DB_SETTINGS["name"], + user=DB_SETTINGS["user"], + passwd=DB_SETTINGS["password"], + host=DB_SETTINGS["host"], + port=DB_SETTINGS["port"], + ) cursor = connection.cursor() with cursor: diff --git a/tests/datastore_pyodbc/test_pyodbc.py b/tests/datastore_pyodbc/test_pyodbc.py index 119908e4db..5a810be5f4 100644 --- a/tests/datastore_pyodbc/test_pyodbc.py +++ b/tests/datastore_pyodbc/test_pyodbc.py @@ -13,6 +13,7 @@ # limitations under the License. import pytest from testing_support.db_settings import postgresql_settings +from testing_support.util import instance_hostname from testing_support.validators.validate_database_trace_inputs import ( validate_database_trace_inputs, ) diff --git a/tests/datastore_pysolr/test_solr.py b/tests/datastore_pysolr/test_solr.py index a987a29ac9..e17117117e 100644 --- a/tests/datastore_pysolr/test_solr.py +++ b/tests/datastore_pysolr/test_solr.py @@ -13,16 +13,19 @@ # limitations under the License. from pysolr import Solr - -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.db_settings import solr_settings +from testing_support.util import instance_hostname +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.background_task import background_task DB_SETTINGS = solr_settings()[0] SOLR_HOST = DB_SETTINGS["host"] SOLR_PORT = DB_SETTINGS["port"] -SOLR_URL = 'http://%s:%s/solr/collection' % (DB_SETTINGS["host"], DB_SETTINGS["port"]) +SOLR_URL = "http://%s:%s/solr/collection" % (DB_SETTINGS["host"], DB_SETTINGS["port"]) + def _exercise_solr(solr): # Construct document names within namespace @@ -31,30 +34,36 @@ def _exercise_solr(solr): solr.add([{"id": x} for x in documents]) - solr.search('id:%s' % documents[0]) + solr.search("id:%s" % documents[0]) solr.delete(id=documents[0]) # Delete all documents. - solr.delete(q='id:*_%s' % DB_SETTINGS["namespace"]) + solr.delete(q="id:*_%s" % DB_SETTINGS["namespace"]) + _test_solr_search_scoped_metrics = [ - ('Datastore/operation/Solr/add', 1), - ('Datastore/operation/Solr/delete', 2), - ('Datastore/operation/Solr/search', 1)] + ("Datastore/operation/Solr/add", 1), + ("Datastore/operation/Solr/delete", 2), + ("Datastore/operation/Solr/search", 1), +] _test_solr_search_rollup_metrics = [ - ('Datastore/all', 4), - ('Datastore/allOther', 4), - ('Datastore/Solr/all', 4), - ('Datastore/Solr/allOther', 4), - ('Datastore/operation/Solr/add', 1), - ('Datastore/operation/Solr/search', 1), - ('Datastore/operation/Solr/delete', 2)] - -@validate_transaction_metrics('test_solr:test_solr_search', + ("Datastore/all", 4), + ("Datastore/allOther", 4), + ("Datastore/Solr/all", 4), + ("Datastore/Solr/allOther", 4), + ("Datastore/operation/Solr/add", 1), + ("Datastore/operation/Solr/search", 1), + ("Datastore/operation/Solr/delete", 2), +] + + +@validate_transaction_metrics( + "test_solr:test_solr_search", scoped_metrics=_test_solr_search_scoped_metrics, rollup_metrics=_test_solr_search_rollup_metrics, - background_task=True) + background_task=True, +) @background_task() def test_solr_search(): s = Solr(SOLR_URL) diff --git a/tests/datastore_solrpy/test_solr.py b/tests/datastore_solrpy/test_solr.py index ee1a7e91ef..56dcce62bf 100644 --- a/tests/datastore_solrpy/test_solr.py +++ b/tests/datastore_solrpy/test_solr.py @@ -13,16 +13,19 @@ # limitations under the License. from solr import SolrConnection - -from testing_support.validators.validate_transaction_metrics import validate_transaction_metrics from testing_support.db_settings import solr_settings +from testing_support.util import instance_hostname +from testing_support.validators.validate_transaction_metrics import ( + validate_transaction_metrics, +) from newrelic.api.background_task import background_task DB_SETTINGS = solr_settings()[0] SOLR_HOST = DB_SETTINGS["host"] SOLR_PORT = DB_SETTINGS["port"] -SOLR_URL = 'http://%s:%s/solr/collection' % (DB_SETTINGS["host"], DB_SETTINGS["port"]) +SOLR_URL = "http://%s:%s/solr/collection" % (DB_SETTINGS["host"], DB_SETTINGS["port"]) + def _exercise_solr(solr): # Construct document names within namespace @@ -31,30 +34,37 @@ def _exercise_solr(solr): solr.add_many([{"id": x} for x in documents]) solr.commit() - solr.query('id:%s' % documents[0]).results - solr.delete('id:*_%s' % DB_SETTINGS["namespace"]) + solr.query("id:%s" % documents[0]).results + solr.delete("id:*_%s" % DB_SETTINGS["namespace"]) solr.commit() + _test_solr_search_scoped_metrics = [ - ('Datastore/operation/Solr/add_many', 1), - ('Datastore/operation/Solr/delete', 1), - ('Datastore/operation/Solr/commit', 2), - ('Datastore/operation/Solr/query', 1)] + ("Datastore/operation/Solr/add_many", 1), + ("Datastore/operation/Solr/delete", 1), + ("Datastore/operation/Solr/commit", 2), + ("Datastore/operation/Solr/query", 1), +] _test_solr_search_rollup_metrics = [ - ('Datastore/all', 5), - ('Datastore/allOther', 5), - ('Datastore/Solr/all', 5), - ('Datastore/Solr/allOther', 5), - ('Datastore/operation/Solr/add_many', 1), - ('Datastore/operation/Solr/query', 1), - ('Datastore/operation/Solr/commit', 2), - ('Datastore/operation/Solr/delete', 1)] - -@validate_transaction_metrics('test_solr:test_solr_search', + ("Datastore/all", 5), + ("Datastore/allOther", 5), + ("Datastore/Solr/all", 5), + ("Datastore/Solr/allOther", 5), + ("Datastore/instance/Solr/%s/%s" % (instance_hostname(SOLR_HOST), SOLR_PORT), 3), + ("Datastore/operation/Solr/add_many", 1), + ("Datastore/operation/Solr/query", 1), + ("Datastore/operation/Solr/commit", 2), + ("Datastore/operation/Solr/delete", 1), +] + + +@validate_transaction_metrics( + "test_solr:test_solr_search", scoped_metrics=_test_solr_search_scoped_metrics, rollup_metrics=_test_solr_search_rollup_metrics, - background_task=True) + background_task=True, +) @background_task() def test_solr_search(): s = SolrConnection(SOLR_URL)