From 4d9f1647b3ee4b6c6f1cadd750cb999b28783da8 Mon Sep 17 00:00:00 2001 From: Mourits de Beer <31511766+ff137@users.noreply.github.com> Date: Thu, 15 Aug 2024 21:21:06 +0200 Subject: [PATCH] =?UTF-8?q?=F0=9F=91=B7=20fix=20lint=20workflow=20and=20?= =?UTF-8?q?=F0=9F=8E=A8=20apply=20ruff=20linting=20(#3166)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * :arrow_up: Update ruff-pre-commit Signed-off-by: ff137 * :art: add `"--config" "pyproject.toml"` to linter args Signed-off-by: ff137 * :art: apply `pre-commit run --all-files` Signed-off-by: ff137 * :art: manually fix long line Signed-off-by: ff137 * :construction_worker: fix lint workflow Signed-off-by: ff137 * :construction: test if lint workflow works, with faulty line Signed-off-by: ff137 * :rewind: revert test change Signed-off-by: ff137 * :construction: test if lint workflow works, with missing docstring Signed-off-by: ff137 * :rewind: revert test change Signed-off-by: ff137 --------- Signed-off-by: ff137 --- .github/workflows/format.yml | 3 + .pre-commit-config.yaml | 4 +- aries_cloudagent/admin/request_context.py | 6 +- aries_cloudagent/admin/routes.py | 8 +- aries_cloudagent/admin/server.py | 8 +- .../admin/tests/test_admin_server.py | 11 +- aries_cloudagent/admin/tests/test_auth.py | 2 - .../anoncreds/default/legacy_indy/author.py | 4 +- .../anoncreds/default/legacy_indy/recover.py | 10 +- .../anoncreds/default/legacy_indy/registry.py | 28 ++--- .../default/legacy_indy/tests/test_recover.py | 2 - .../legacy_indy/tests/test_registry.py | 10 +- aries_cloudagent/anoncreds/holder.py | 21 +--- aries_cloudagent/anoncreds/issuer.py | 17 +-- .../anoncreds/models/anoncreds_revocation.py | 4 +- .../anoncreds/models/anoncreds_schema.py | 4 +- aries_cloudagent/anoncreds/registry.py | 12 +- aries_cloudagent/anoncreds/revocation.py | 48 ++----- .../anoncreds/tests/test_holder.py | 53 ++------ .../anoncreds/tests/test_issuer.py | 16 +-- .../anoncreds/tests/test_revocation.py | 30 ++--- .../anoncreds/tests/test_routes.py | 12 +- .../anoncreds/tests/test_verifier.py | 24 ++-- aries_cloudagent/anoncreds/util.py | 4 +- aries_cloudagent/anoncreds/verifier.py | 19 +-- aries_cloudagent/askar/didcomm/v1.py | 6 +- aries_cloudagent/askar/didcomm/v2.py | 4 +- aries_cloudagent/askar/profile.py | 8 +- aries_cloudagent/askar/profile_anon.py | 12 +- aries_cloudagent/commands/provision.py | 4 +- .../commands/tests/test_provision.py | 4 +- .../commands/tests/test_upgrade.py | 24 +--- aries_cloudagent/commands/upgrade.py | 16 ++- aries_cloudagent/config/default_context.py | 4 +- aries_cloudagent/config/injection_context.py | 2 +- aries_cloudagent/config/ledger.py | 8 +- aries_cloudagent/config/logging.py | 4 +- aries_cloudagent/config/provider.py | 4 +- .../config/tests/test_argparse.py | 8 +- aries_cloudagent/config/tests/test_ledger.py | 28 ++--- .../config/tests/test_settings.py | 4 +- aries_cloudagent/config/util.py | 4 +- .../connections/models/diddoc/diddoc.py | 4 +- .../connections/models/diddoc/service.py | 4 +- .../models/diddoc/tests/test_diddoc.py | 15 +-- .../connections/tests/test_base_manager.py | 12 +- aries_cloudagent/core/dispatcher.py | 4 +- .../core/in_memory/didcomm/tests/test_1pu.py | 12 +- aries_cloudagent/core/in_memory/profile.py | 4 +- aries_cloudagent/core/oob_processor.py | 25 ++-- aries_cloudagent/core/protocol_registry.py | 4 +- aries_cloudagent/core/tests/test_conductor.py | 24 +--- .../core/tests/test_dispatcher.py | 4 +- .../core/tests/test_oob_processor.py | 16 +-- .../core/tests/test_protocol_registry.py | 14 +-- aries_cloudagent/did/did_key.py | 4 +- .../did/tests/test_did_key_ed25519.py | 4 +- .../didcomm_v2/tests/test_adapters.py | 4 - aries_cloudagent/holder/routes.py | 4 +- aries_cloudagent/holder/tests/test_routes.py | 36 ++---- aries_cloudagent/indy/credx/holder.py | 4 +- aries_cloudagent/indy/credx/issuer.py | 20 +-- .../indy/credx/tests/test_cred_issuance.py | 12 +- aries_cloudagent/indy/models/pres_preview.py | 4 +- aries_cloudagent/indy/models/proof.py | 4 +- aries_cloudagent/indy/tests/test_verifier.py | 4 +- aries_cloudagent/indy/util.py | 4 +- aries_cloudagent/indy/verifier.py | 8 +- aries_cloudagent/ledger/indy_vdr.py | 28 ++--- .../ledger/merkel_validation/constants.py | 4 +- .../merkel_validation/domain_txn_handler.py | 4 +- .../multiple_ledger/indy_vdr_manager.py | 31 ++--- .../multiple_ledger/ledger_config_schema.py | 4 +- .../tests/test_indy_vdr_manager.py | 12 +- aries_cloudagent/ledger/routes.py | 16 +-- .../ledger/tests/test_indy_vdr.py | 12 +- aries_cloudagent/ledger/tests/test_routes.py | 4 +- aries_cloudagent/messaging/agent_message.py | 4 +- .../credential_definitions/routes.py | 8 +- .../tests/test_routes.py | 12 +- .../messaging/decorators/attach_decorator.py | 4 +- .../decorators/tests/test_attach_decorator.py | 4 +- .../messaging/jsonld/create_verify_data.py | 9 +- .../messaging/jsonld/credential.py | 4 +- aries_cloudagent/messaging/jsonld/routes.py | 4 +- .../messaging/jsonld/tests/test_routes.py | 15 +-- aries_cloudagent/messaging/message_type.py | 4 +- .../messaging/models/base_record.py | 7 +- .../messaging/models/tests/test_base.py | 4 +- .../models/tests/test_base_record.py | 8 +- aries_cloudagent/messaging/request_context.py | 6 +- aries_cloudagent/messaging/responder.py | 4 +- aries_cloudagent/messaging/schemas/routes.py | 12 +- aries_cloudagent/messaging/tests/test_util.py | 4 +- aries_cloudagent/multitenant/admin/routes.py | 4 +- .../multitenant/admin/tests/test_routes.py | 8 +- .../multitenant/tests/test_base.py | 34 ++--- .../multitenant/tests/test_manager.py | 8 +- .../protocols/actionmenu/v1_0/routes.py | 4 +- .../actionmenu/v1_0/tests/test_util.py | 4 +- .../protocols/basicmessage/v1_0/routes.py | 4 +- .../handlers/connection_invitation_handler.py | 4 +- .../handlers/connection_response_handler.py | 2 +- .../handlers/tests/test_request_handler.py | 4 +- .../handlers/tests/test_response_handler.py | 4 +- .../protocols/connections/v1_0/manager.py | 7 +- .../v1_0/messages/connection_invitation.py | 4 +- .../messages/tests/test_connection_request.py | 4 +- .../tests/test_connection_response.py | 3 +- .../connections/v1_0/tests/test_manager.py | 72 ++++------- .../connections/v1_0/tests/test_routes.py | 14 +-- .../v1_0/handlers/keylist_handler.py | 4 +- .../v1_0/handlers/keylist_query_handler.py | 4 +- .../v1_0/handlers/keylist_update_handler.py | 4 +- .../keylist_update_response_handler.py | 4 +- .../v1_0/handlers/mediation_deny_handler.py | 4 +- .../v1_0/handlers/mediation_grant_handler.py | 4 +- .../handlers/mediation_request_handler.py | 4 +- .../tests/test_keylist_query_handler.py | 6 +- .../test_keylist_update_response_handler.py | 8 +- .../coordinate_mediation/v1_0/manager.py | 16 +-- .../v1_0/messages/inner/keylist_key.py | 2 +- .../v1_0/messages/inner/keylist_updated.py | 2 +- .../v1_0/messages/keylist_update.py | 4 +- .../v1_0/messages/mediate_deny.py | 4 +- .../v1_0/messages/mediate_grant.py | 4 +- .../v1_0/route_manager.py | 4 +- .../coordinate_mediation/v1_0/routes.py | 12 +- .../v1_0/tests/test_mediation_manager.py | 16 +-- .../v1_0/tests/test_route_manager.py | 4 +- .../v1_0/tests/test_routes.py | 30 ++--- .../did_rotate/v1_0/message_types.py | 4 +- .../v1_0/messages/problem_report.py | 8 +- .../did_rotate/v1_0/models/rotate_record.py | 4 +- .../did_rotate/v1_0/tests/test_manager.py | 20 +-- .../did_rotate/v1_0/tests/test_routes.py | 4 +- .../v1_0/handlers/request_handler.py | 4 +- .../v1_0/handlers/response_handler.py | 4 +- .../protocols/didexchange/v1_0/manager.py | 16 +-- .../didexchange/v1_0/message_types.py | 4 +- .../messages/tests/test_problem_report.py | 4 +- .../didexchange/v1_0/tests/test_manager.py | 76 +++-------- .../handlers/tests/test_disclose_handler.py | 8 +- .../discovery/v1_0/models/discovery_record.py | 4 +- .../discovery/v1_0/tests/test_manager.py | 4 +- .../tests/test_disclosures_handler.py | 12 +- .../discovery/v2_0/tests/test_manager.py | 4 +- .../v1_0/messages/messages_attach.py | 2 +- .../messages/tests/test_cancel_transaction.py | 4 +- .../test_endorsed_transaction_response.py | 8 +- .../test_refused_transaction_response.py | 4 +- .../tests/test_transaction_request.py | 3 +- .../messages/tests/test_transaction_resend.py | 4 +- .../endorse_transaction/v1_0/routes.py | 24 +--- .../v1_0/tests/test_routes.py | 16 +-- .../introduction/v0_1/demo_service.py | 17 +-- .../handlers/invitation_request_handler.py | 4 +- .../messages/tests/test_forward_invitation.py | 10 +- .../messages/tests/test_invitation_request.py | 6 +- .../introduction/v0_1/tests/test_service.py | 4 +- .../v1_0/handlers/credential_issue_handler.py | 4 +- .../v1_0/handlers/credential_offer_handler.py | 8 +- .../handlers/credential_proposal_handler.py | 4 +- .../tests/test_credential_ack_handler.py | 4 +- .../tests/test_credential_offer_handler.py | 3 +- .../issue_credential/v1_0/manager.py | 80 +++++------- .../v1_0/messages/credential_ack.py | 4 +- .../messages/tests/test_credential_request.py | 10 +- .../protocols/issue_credential/v1_0/routes.py | 4 +- .../v1_0/tests/test_manager.py | 12 +- .../v1_0/tests/test_routes.py | 16 +-- .../v2_0/formats/anoncreds/handler.py | 10 +- .../formats/anoncreds/tests/test_handler.py | 31 ++--- .../v2_0/formats/indy/handler.py | 12 +- .../v2_0/formats/indy/tests/test_handler.py | 31 ++--- .../v2_0/formats/ld_proof/handler.py | 12 +- .../formats/ld_proof/tests/test_handler.py | 29 ++--- .../v2_0/formats/vc_di/handler.py | 12 +- .../v2_0/formats/vc_di/models/cred_request.py | 4 +- .../v2_0/formats/vc_di/tests/test_handler.py | 22 +--- .../v2_0/handlers/cred_offer_handler.py | 4 +- .../v2_0/handlers/cred_proposal_handler.py | 4 +- .../handlers/tests/test_cred_ack_handler.py | 4 +- .../handlers/tests/test_cred_offer_handler.py | 3 +- .../issue_credential/v2_0/manager.py | 16 ++- .../issue_credential/v2_0/message_types.py | 4 +- .../v2_0/messages/cred_request.py | 4 +- .../v2_0/messages/tests/test_cred_format.py | 12 +- .../protocols/issue_credential/v2_0/routes.py | 16 +-- .../v2_0/tests/test_manager.py | 35 ++---- .../v2_0/tests/test_routes.py | 20 +-- .../protocols/out_of_band/v1_0/manager.py | 21 +--- .../out_of_band/v1_0/messages/invitation.py | 5 +- .../out_of_band/v1_0/messages/reuse.py | 4 +- .../protocols/out_of_band/v1_0/routes.py | 8 +- .../out_of_band/v1_0/tests/test_manager.py | 22 ++-- .../anoncreds/pres_exch_handler.py | 4 +- .../protocols/present_proof/dif/pres_exch.py | 28 ++--- .../present_proof/dif/pres_exch_handler.py | 42 ++----- .../present_proof/dif/pres_request_schema.py | 4 +- .../present_proof/dif/tests/test_data.py | 40 ++---- .../present_proof/dif/tests/test_pres_exch.py | 28 ++--- .../dif/tests/test_pres_exch_handler.py | 44 ++----- .../present_proof/indy/pres_exch_handler.py | 14 +-- .../handlers/presentation_proposal_handler.py | 8 +- .../handlers/presentation_request_handler.py | 22 ++-- .../test_presentation_proposal_handler.py | 3 +- .../test_presentation_request_handler.py | 4 +- .../protocols/present_proof/v1_0/manager.py | 34 +++-- .../v1_0/models/presentation_exchange.py | 4 +- .../protocols/present_proof/v1_0/routes.py | 8 +- .../present_proof/v1_0/tests/test_manager.py | 24 +--- .../present_proof/v1_0/tests/test_routes.py | 36 ++---- .../v2_0/formats/anoncreds/handler.py | 10 +- .../present_proof/v2_0/formats/dif/handler.py | 33 ++--- .../v2_0/formats/dif/tests/test_handler.py | 50 +++----- .../v2_0/formats/indy/handler.py | 14 +-- .../v2_0/handlers/pres_proposal_handler.py | 4 +- .../v2_0/handlers/pres_request_handler.py | 4 +- .../tests/test_pres_request_handler.py | 56 +++------ .../protocols/present_proof/v2_0/manager.py | 20 ++- .../v2_0/messages/pres_request.py | 4 +- .../v2_0/models/tests/test_record.py | 4 +- .../protocols/present_proof/v2_0/routes.py | 49 +++----- .../present_proof/v2_0/tests/test_manager.py | 86 ++++--------- .../v2_0/tests/test_manager_anoncreds.py | 86 ++++--------- .../present_proof/v2_0/tests/test_routes.py | 118 +++++------------- .../v2_0/tests/test_routes_anoncreds.py | 118 +++++------------- .../protocols/problem_report/v1_0/handler.py | 4 +- .../protocols/problem_report/v1_0/message.py | 8 +- .../v1_0/models/rev_notification_record.py | 4 +- .../v2_0/messages/revoke.py | 4 +- .../v2_0/models/rev_notification_record.py | 4 +- .../routing/v1_0/handlers/forward_handler.py | 4 +- .../handlers/tests/test_forward_handler.py | 18 ++- .../routing/v1_0/models/route_record.py | 2 +- .../protocols/trustping/v1_0/messages/ping.py | 4 +- .../v1_0/messages/tests/test_trust_ping.py | 8 +- .../protocols/trustping/v1_0/routes.py | 4 +- aries_cloudagent/resolver/base.py | 4 +- aries_cloudagent/resolver/default/indy.py | 4 +- .../resolver/default/legacy_peer.py | 4 +- aries_cloudagent/resolver/default/peer1.py | 2 +- aries_cloudagent/resolver/default/peer3.py | 4 +- aries_cloudagent/resolver/default/peer4.py | 4 +- .../resolver/default/tests/test_indy.py | 8 +- .../default/tests/test_legacy_peer.py | 4 +- .../resolver/default/tests/test_universal.py | 4 +- aries_cloudagent/resolver/default/web.py | 4 +- aries_cloudagent/resolver/routes.py | 4 +- .../models/issuer_rev_reg_record.py | 16 +-- .../revocation/models/revocation_registry.py | 4 +- .../models/tests/test_revocation_registry.py | 12 +- aries_cloudagent/revocation/recover.py | 4 +- aries_cloudagent/revocation/routes.py | 46 ++----- .../revocation/tests/test_manager.py | 12 +- .../revocation_anoncreds/manager.py | 4 +- .../revocation_anoncreds/recover.py | 4 +- .../revocation_anoncreds/routes.py | 12 +- .../tests/test_manager.py | 4 +- .../revocation_anoncreds/tests/test_routes.py | 12 +- aries_cloudagent/storage/in_memory.py | 4 +- aries_cloudagent/storage/record.py | 4 +- .../tests/test_in_memory_vc_holder.py | 4 +- aries_cloudagent/tails/tests/test_indy.py | 20 +-- aries_cloudagent/transport/inbound/receipt.py | 2 +- .../inbound/tests/test_http_transport.py | 4 +- .../transport/inbound/tests/test_manager.py | 4 +- .../inbound/tests/test_ws_transport.py | 6 +- .../transport/outbound/manager.py | 11 +- .../outbound/tests/test_http_transport.py | 12 +- .../outbound/tests/test_ws_transport.py | 4 +- aries_cloudagent/transport/pack_format.py | 12 +- .../transport/queue/tests/test_basic_queue.py | 8 +- .../transport/tests/test_pack_format.py | 14 +-- aries_cloudagent/utils/classloader.py | 4 +- aries_cloudagent/utils/endorsement_setup.py | 8 +- aries_cloudagent/utils/http.py | 3 +- aries_cloudagent/utils/jwe.py | 4 +- aries_cloudagent/utils/profiles.py | 4 +- aries_cloudagent/utils/repeat.py | 4 +- aries_cloudagent/utils/stats.py | 4 +- aries_cloudagent/utils/task_queue.py | 4 +- .../utils/tests/test_classloader.py | 12 +- aries_cloudagent/utils/tests/test_http.py | 4 +- aries_cloudagent/utils/tests/test_jwe.py | 24 ++-- aries_cloudagent/utils/tests/test_repeat.py | 6 +- .../vc/ld_proofs/crypto/key_pair.py | 4 +- .../vc/ld_proofs/crypto/wallet_key_pair.py | 4 +- .../vc/ld_proofs/document_downloader.py | 12 +- .../tests/test_controller_proof_purpose.py | 12 +- .../suites/bbs_bls_signature_proof_2020.py | 8 +- .../vc/ld_proofs/tests/test_check.py | 7 +- .../dif_presentation_submission_v1.py | 22 ++-- .../vc/tests/contexts/schema_org.py | 52 ++------ aries_cloudagent/vc/vc_di/prove.py | 18 +-- .../vc/vc_di/tests/test_manager.py | 1 - aries_cloudagent/vc/vc_di/tests/test_prove.py | 9 +- aries_cloudagent/vc/vc_ld/issue.py | 4 +- aries_cloudagent/vc/vc_ld/manager.py | 4 +- .../vc/vc_ld/tests/test_credential.py | 8 +- .../vc/vc_ld/tests/test_manager.py | 4 +- aries_cloudagent/vc/vc_ld/tests/test_vc_ld.py | 8 +- aries_cloudagent/vc/vc_ld/verify.py | 4 +- aries_cloudagent/wallet/anoncreds_upgrade.py | 8 +- aries_cloudagent/wallet/askar.py | 8 +- aries_cloudagent/wallet/jwt.py | 8 +- aries_cloudagent/wallet/key_pair.py | 12 +- .../wallet/models/wallet_record.py | 4 +- aries_cloudagent/wallet/routes.py | 24 +--- aries_cloudagent/wallet/sd_jwt.py | 8 +- .../wallet/tests/test_anoncreds_upgrade.py | 17 +-- aries_cloudagent/wallet/tests/test_bbs.py | 4 +- aries_cloudagent/wallet/tests/test_crypto.py | 3 +- .../test_default_verification_key_strategy.py | 8 +- .../wallet/tests/test_did_method.py | 8 +- .../wallet/tests/test_in_memory_wallet.py | 17 +-- aries_cloudagent/wallet/tests/test_jwt.py | 8 +- aries_cloudagent/wallet/tests/test_routes.py | 12 +- demo/features/environment.py | 1 - demo/features/steps/0453-issue-credential.py | 24 ++-- demo/features/steps/0454-present-proof.py | 4 +- demo/features/steps/0586-sign-transaction.py | 12 +- demo/features/steps/revocation-api.py | 4 +- .../steps/taa-txn-author-agreement.py | 4 - demo/playground/examples/tests/__init__.py | 4 +- .../tests/test_mediator_ping_agents.py | 14 +-- .../examples/tests/test_ping_agents.py | 8 +- demo/runners/acme.py | 7 +- demo/runners/agent_container.py | 42 ++----- demo/runners/faber.py | 6 +- demo/runners/performance.py | 4 +- demo/runners/support/agent.py | 56 +++------ 333 files changed, 1152 insertions(+), 2891 deletions(-) diff --git a/.github/workflows/format.yml b/.github/workflows/format.yml index 77918ae3f3..74302d3ee4 100644 --- a/.github/workflows/format.yml +++ b/.github/workflows/format.yml @@ -16,3 +16,6 @@ jobs: python-version: "3.12" - name: Ruff Format and Lint Check uses: chartboost/ruff-action@v1 + with: + version: 0.5.7 + args: "format --check" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5f019d9070..9b9edb79c9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -8,12 +8,12 @@ repos: additional_dependencies: ['@commitlint/config-conventional'] - repo: https://github.com/astral-sh/ruff-pre-commit # Ensure this is synced with pyproject.toml - rev: v0.5.0 + rev: v0.5.7 hooks: # Run the linter - id: ruff stages: [commit] - args: [--fix, --exit-non-zero-on-fix] + args: [--fix, --exit-non-zero-on-fix, "--config", "pyproject.toml"] # Run the formatter - id: ruff-format stages: [commit] diff --git a/aries_cloudagent/admin/request_context.py b/aries_cloudagent/admin/request_context.py index 8d88bc539f..a8d17fe2e4 100644 --- a/aries_cloudagent/admin/request_context.py +++ b/aries_cloudagent/admin/request_context.py @@ -24,7 +24,7 @@ def __init__( context: Optional[InjectionContext] = None, settings: Optional[Mapping[str, object]] = None, root_profile: Optional[Profile] = None, - metadata: Optional[dict] = None + metadata: Optional[dict] = None, ): """Initialize an instance of AdminRequestContext.""" self._context = (context or profile.context).start_scope(settings) @@ -154,8 +154,6 @@ def __repr__(self) -> str: """ skip = ("session",) items = ( - "{}={}".format(k, repr(v)) - for k, v in self.__dict__.items() - if k not in skip + "{}={}".format(k, repr(v)) for k, v in self.__dict__.items() if k not in skip ) return "<{}({})>".format(self.__class__.__name__, ", ".join(items)) diff --git a/aries_cloudagent/admin/routes.py b/aries_cloudagent/admin/routes.py index 62abab842a..583be2e331 100644 --- a/aries_cloudagent/admin/routes.py +++ b/aries_cloudagent/admin/routes.py @@ -52,17 +52,13 @@ class AdminResetSchema(OpenAPISchema): class AdminStatusLivelinessSchema(OpenAPISchema): """Schema for the liveliness endpoint.""" - alive = fields.Boolean( - metadata={"description": "Liveliness status", "example": True} - ) + alive = fields.Boolean(metadata={"description": "Liveliness status", "example": True}) class AdminStatusReadinessSchema(OpenAPISchema): """Schema for the readiness endpoint.""" - ready = fields.Boolean( - metadata={"description": "Readiness status", "example": True} - ) + ready = fields.Boolean(metadata={"description": "Readiness status", "example": True}) class AdminShutdownSchema(OpenAPISchema): diff --git a/aries_cloudagent/admin/server.py b/aries_cloudagent/admin/server.py index 36ad7944e4..58fa2d9eab 100644 --- a/aries_cloudagent/admin/server.py +++ b/aries_cloudagent/admin/server.py @@ -243,9 +243,7 @@ def __init__( """ self.app = None self.admin_api_key = context.settings.get("admin.admin_api_key") - self.admin_insecure_mode = bool( - context.settings.get("admin.admin_insecure_mode") - ) + self.admin_insecure_mode = bool(context.settings.get("admin.admin_insecure_mode")) self.host = host self.port = port self.context = context @@ -602,9 +600,7 @@ async def websocket_handler(self, request): msg_received = receive.result() msg_api_key = msg_received.get("x-api-key") except Exception: - LOGGER.exception( - "Exception in websocket receiving task:" - ) + LOGGER.exception("Exception in websocket receiving task:") if self.admin_api_key and general_utils.const_compare( self.admin_api_key, msg_api_key ): diff --git a/aries_cloudagent/admin/tests/test_admin_server.py b/aries_cloudagent/admin/tests/test_admin_server.py index 24c8ebfe6c..0772610a0d 100644 --- a/aries_cloudagent/admin/tests/test_admin_server.py +++ b/aries_cloudagent/admin/tests/test_admin_server.py @@ -171,17 +171,13 @@ async def test_start_stop(self): server = self.get_admin_server(settings) await server.start() assert server.app._client_max_size == 4 * 1024 * 1024 - with mock.patch.object( - server, "websocket_queues", mock.MagicMock() - ) as mock_wsq: + with mock.patch.object(server, "websocket_queues", mock.MagicMock()) as mock_wsq: mock_wsq.values = mock.MagicMock( return_value=[mock.MagicMock(stop=mock.MagicMock())] ) await server.stop() - with mock.patch.object( - web.TCPSite, "start", mock.CoroutineMock() - ) as mock_start: + with mock.patch.object(web.TCPSite, "start", mock.CoroutineMock()) as mock_start: mock_start.side_effect = OSError("Failure to launch") with self.assertRaises(AdminSetupError): await self.get_admin_server(settings).start() @@ -280,8 +276,7 @@ async def test_visit_secure_mode(self): assert response.headers["Access-Control-Allow-Headers"] == "X-API-KEY" assert response.headers["Access-Control-Allow-Methods"] == "GET" assert ( - response.headers["Access-Control-Allow-Origin"] - == "http://localhost:3000" + response.headers["Access-Control-Allow-Origin"] == "http://localhost:3000" ) async with self.client_session.ws_connect( diff --git a/aries_cloudagent/admin/tests/test_auth.py b/aries_cloudagent/admin/tests/test_auth.py index 2d6700a147..f0a1b2a0e1 100644 --- a/aries_cloudagent/admin/tests/test_auth.py +++ b/aries_cloudagent/admin/tests/test_auth.py @@ -11,7 +11,6 @@ class TestAdminAuthentication(IsolatedAsyncioTestCase): def setUp(self) -> None: - self.profile = InMemoryProfile.test_profile( settings={ "admin.admin_api_key": "admin_api_key", @@ -64,7 +63,6 @@ async def test_valid_api_key(self): class TestTenantAuthentication(IsolatedAsyncioTestCase): def setUp(self) -> None: - self.profile = InMemoryProfile.test_profile( settings={ "admin.admin_api_key": "admin_api_key", diff --git a/aries_cloudagent/anoncreds/default/legacy_indy/author.py b/aries_cloudagent/anoncreds/default/legacy_indy/author.py index 82f139784f..4777030e98 100644 --- a/aries_cloudagent/anoncreds/default/legacy_indy/author.py +++ b/aries_cloudagent/anoncreds/default/legacy_indy/author.py @@ -27,9 +27,7 @@ async def get_endorser_info(profile, options: Optional[dict] = None): connection_record = await ConnRecord.retrieve_by_id( session, endorser_connection_id ) - endorser_info = await connection_record.metadata_get( - session, "endorser_info" - ) + endorser_info = await connection_record.metadata_get(session, "endorser_info") except StorageNotFoundError as err: raise web.HTTPNotFound( reason=f"Connection for endorser with id {endorser_connection_id} not found" diff --git a/aries_cloudagent/anoncreds/default/legacy_indy/recover.py b/aries_cloudagent/anoncreds/default/legacy_indy/recover.py index 1933e69763..d8bde0103c 100644 --- a/aries_cloudagent/anoncreds/default/legacy_indy/recover.py +++ b/aries_cloudagent/anoncreds/default/legacy_indy/recover.py @@ -37,9 +37,9 @@ async def _check_tails_hash_for_inconsistency(tails_location: str, tails_hash: s LOGGER.debug("Tails URL: %s", tails_location) tails_data_http_response = await session.get(tails_location) tails_data = await tails_data_http_response.read() - remote_tails_hash = base58.b58encode( - hashlib.sha256(tails_data).digest() - ).decode("utf-8") + remote_tails_hash = base58.b58encode(hashlib.sha256(tails_data).digest()).decode( + "utf-8" + ) if remote_tails_hash != tails_hash: raise RevocRecoveryException( f"Tails hash mismatch {remote_tails_hash} {tails_hash}" @@ -48,7 +48,9 @@ async def _check_tails_hash_for_inconsistency(tails_location: str, tails_hash: s LOGGER.debug(f"Checked tails hash: {tails_hash}") -async def fetch_txns(genesis_txns: str, registry_id: str, issuer_id: str) -> tuple[ +async def fetch_txns( + genesis_txns: str, registry_id: str, issuer_id: str +) -> tuple[ dict, set[int], ]: diff --git a/aries_cloudagent/anoncreds/default/legacy_indy/registry.py b/aries_cloudagent/anoncreds/default/legacy_indy/registry.py index fa4b062852..5a2bf02acc 100644 --- a/aries_cloudagent/anoncreds/default/legacy_indy/registry.py +++ b/aries_cloudagent/anoncreds/default/legacy_indy/registry.py @@ -257,9 +257,7 @@ async def register_schema( profile, options ) - write_ledger = ( - True if endorser_did is None and not create_transaction else False - ) + write_ledger = True if endorser_did is None and not create_transaction else False # Get either the transaction or the seq_no or the created schema async with ledger: @@ -428,9 +426,7 @@ async def register_credential_definition( profile, options ) - write_ledger = ( - True if endorser_did is None and not create_transaction else False - ) + write_ledger = True if endorser_did is None and not create_transaction else False async with ledger: try: @@ -611,9 +607,7 @@ async def register_revocation_registry_definition( profile, options ) - write_ledger = ( - True if endorser_did is None and not create_transaction else False - ) + write_ledger = True if endorser_did is None and not create_transaction else False try: async with ledger: @@ -890,9 +884,7 @@ async def register_revocation_list( profile, options ) - write_ledger = ( - True if endorser_did is None and not create_transaction else False - ) + write_ledger = True if endorser_did is None and not create_transaction else False result = await self._revoc_reg_entry_with_fix( profile, @@ -996,9 +988,7 @@ async def update_revocation_list( profile, options ) - write_ledger = ( - True if endorser_did is None and not create_transaction else False - ) + write_ledger = True if endorser_did is None and not create_transaction else False result = await self._revoc_reg_entry_with_fix( profile, @@ -1116,9 +1106,7 @@ def _wallet_accumalator_matches_ledger_list( ) async with ledger: - (rev_reg_delta, _) = await ledger.get_revoc_reg_delta( - rev_list.rev_reg_def_id - ) + (rev_reg_delta, _) = await ledger.get_revoc_reg_delta(rev_list.rev_reg_def_id) async with profile.session() as session: LOGGER.debug(f"revocation_list = {rev_list.revocation_list}") @@ -1163,9 +1151,7 @@ async def _revoked_issuer_cred_rev_record_ids() -> List[int]: session, rev_reg_id=rev_list.rev_reg_def_id ) return [ - int(rec.cred_rev_id) - for rec in cred_rev_records - if rec.state == "revoked" + int(rec.cred_rev_id) for rec in cred_rev_records if rec.state == "revoked" ] def _revocation_list_to_array_of_indexes( diff --git a/aries_cloudagent/anoncreds/default/legacy_indy/tests/test_recover.py b/aries_cloudagent/anoncreds/default/legacy_indy/tests/test_recover.py index 2973c1824b..d8bd99482d 100644 --- a/aries_cloudagent/anoncreds/default/legacy_indy/tests/test_recover.py +++ b/aries_cloudagent/anoncreds/default/legacy_indy/tests/test_recover.py @@ -40,7 +40,6 @@ @pytest.mark.anoncreds class TestLegacyIndyRecover(IsolatedAsyncioTestCase): - @mock.patch.object( indy_vdr, "open_pool", @@ -108,7 +107,6 @@ async def test_fetch_txns(self, *_): ), ) async def test_generate_ledger_rrrecovery_txn(self): - # Has updates result = await generate_ledger_rrrecovery_txn( GENESIS, diff --git a/aries_cloudagent/anoncreds/default/legacy_indy/tests/test_registry.py b/aries_cloudagent/anoncreds/default/legacy_indy/tests/test_registry.py index 8c549a13db..2fc3bdcfc2 100644 --- a/aries_cloudagent/anoncreds/default/legacy_indy/tests/test_registry.py +++ b/aries_cloudagent/anoncreds/default/legacy_indy/tests/test_registry.py @@ -157,9 +157,7 @@ async def test_supported_did_regex(self): assert self.registry.supported_identifiers_regex == SUPPORTED_ID_REGEX assert bool(self.registry.supported_identifiers_regex.match(TEST_INDY_DID)) assert bool(self.registry.supported_identifiers_regex.match(TEST_INDY_DID_1)) - assert bool( - self.registry.supported_identifiers_regex.match(TEST_INDY_SCHEMA_ID) - ) + assert bool(self.registry.supported_identifiers_regex.match(TEST_INDY_SCHEMA_ID)) assert bool( self.registry.supported_identifiers_regex.match(TEST_INDY_REV_REG_DEF_ID) ) @@ -790,9 +788,7 @@ async def test_register_revocation_registry_definition_with_create_transaction_a async def test_txn_submit(self): self.profile.context.injector.bind_instance( BaseLedger, - mock.MagicMock( - txn_submit=mock.CoroutineMock(return_value="transaction_id") - ), + mock.MagicMock(txn_submit=mock.CoroutineMock(return_value="transaction_id")), ) async with self.profile.session() as session: ledger = session.inject(BaseLedger) @@ -883,7 +879,6 @@ async def test_register_revocation_list_no_endorsement( async def test_register_revocation_list_with_author_role( self, mock_send_revoc_reg_entry, mock_create_record, _ ): - self.profile.inject_or = mock.MagicMock() self.profile.settings.set_value("endorser.author", True) @@ -1104,7 +1099,6 @@ async def test_register_revocation_list_with_create_transaction_option_and_auto_ mock.CoroutineMock(return_value=MockTxn()), ) async def test_fix_ledger_entry(self, *_): - self.profile.context.injector.bind_instance( BaseLedger, mock.MagicMock(send_revoc_reg_entry=mock.CoroutineMock(return_value={})), diff --git a/aries_cloudagent/anoncreds/holder.py b/aries_cloudagent/anoncreds/holder.py index 2f6e92e50c..86e8432cdc 100644 --- a/aries_cloudagent/anoncreds/holder.py +++ b/aries_cloudagent/anoncreds/holder.py @@ -103,9 +103,7 @@ async def get_master_secret(self) -> str: # isn't accepted by cred.process secret = record.value.decode("ascii") except AnoncredsError as err: - raise AnonCredsHolderError( - "Error loading master secret" - ) from err + raise AnonCredsHolderError("Error loading master secret") from err break else: try: @@ -236,9 +234,7 @@ async def _finish_store_credential( schema_id = cred_recvd.schema_id schema_id_parts = re.match(r"^(\w+):2:([^:]+):([^:]+)$", schema_id) if not schema_id_parts: - raise AnonCredsHolderError( - f"Error parsing credential schema ID: {schema_id}" - ) + raise AnonCredsHolderError(f"Error parsing credential schema ID: {schema_id}") cred_def_id = cred_recvd.cred_def_id cdef_id_parts = re.match(r"^(\w+):3:CL:([^:]+):([^:]+)$", cred_def_id) if not cdef_id_parts: @@ -478,9 +474,7 @@ async def get_credentials_for_presentation_request_by_referent( if row.name in creds: creds[row.name]["presentation_referents"].add(reft) else: - cred_info = _make_cred_info( - row.name, Credential.load(row.raw_value) - ) + cred_info = _make_cred_info(row.name, Credential.load(row.raw_value)) creds[row.name] = { "cred_info": cred_info, "interval": presentation_request.get("non_revoked"), @@ -590,9 +584,7 @@ async def get_mime_type( credential_id, ) except AskarError as err: - raise AnonCredsHolderError( - "Error retrieving credential mime types" - ) from err + raise AnonCredsHolderError("Error retrieving credential mime types") from err values = mime_types_record and mime_types_record.value_json if values: return values.get(attr) if attr else values @@ -676,10 +668,7 @@ def get_rev_state(cred_id: str, detail: dict): present_creds, self_attest, secret, - { - schema_id: schema.to_native() - for schema_id, schema in schemas.items() - }, + {schema_id: schema.to_native() for schema_id, schema in schemas.items()}, { cred_def_id: cred_def.to_native() for cred_def_id, cred_def in credential_definitions.items() diff --git a/aries_cloudagent/anoncreds/issuer.py b/aries_cloudagent/anoncreds/issuer.py index 513b3dc573..9aedc6db1a 100644 --- a/aries_cloudagent/anoncreds/issuer.py +++ b/aries_cloudagent/anoncreds/issuer.py @@ -321,10 +321,7 @@ async def create_and_register_credential_definition( raise ValueError("max_cred_num must be an integer") # Don't allow revocable cred def to be created without tails server base url - if ( - not self.profile.settings.get("tails_server_base_url") - and support_revocation - ): + if not self.profile.settings.get("tails_server_base_url") and support_revocation: raise AnonCredsIssuerError( "tails_server_base_url not configured. Can't create revocable credential definition." # noqa: E501 ) @@ -568,9 +565,7 @@ async def create_credential_offer(self, credential_definition_id: str) -> str: CATEGORY_CRED_DEF_KEY_PROOF, credential_definition_id ) except AskarError as err: - raise AnonCredsIssuerError( - "Error retrieving credential definition" - ) from err + raise AnonCredsIssuerError("Error retrieving credential definition") from err if not cred_def or not key_proof: raise AnonCredsIssuerError( "Credential definition not found for credential offer" @@ -611,9 +606,7 @@ async def create_credential( CATEGORY_CRED_DEF_PRIVATE, cred_def_id ) except AskarError as err: - raise AnonCredsIssuerError( - "Error retrieving credential definition" - ) from err + raise AnonCredsIssuerError("Error retrieving credential definition") from err if not cred_def or not cred_def_private: raise AnonCredsIssuerError( @@ -670,9 +663,7 @@ async def create_credential_w3c( CATEGORY_CRED_DEF_PRIVATE, cred_def_id ) except AskarError as err: - raise AnonCredsIssuerError( - "Error retrieving credential definition" - ) from err + raise AnonCredsIssuerError("Error retrieving credential definition") from err if not cred_def or not cred_def_private: raise AnonCredsIssuerError( diff --git a/aries_cloudagent/anoncreds/models/anoncreds_revocation.py b/aries_cloudagent/anoncreds/models/anoncreds_revocation.py index 85cf81db50..9754399853 100644 --- a/aries_cloudagent/anoncreds/models/anoncreds_revocation.py +++ b/aries_cloudagent/anoncreds/models/anoncreds_revocation.py @@ -253,9 +253,7 @@ def __init__( @property def rev_reg_def_id(self): """Revocation Registry Definition ID.""" - return ( - self.revocation_registry_definition_state.revocation_registry_definition_id - ) + return self.revocation_registry_definition_state.revocation_registry_definition_id @property def rev_reg_def(self): diff --git a/aries_cloudagent/anoncreds/models/anoncreds_schema.py b/aries_cloudagent/anoncreds/models/anoncreds_schema.py index 1357a79e0a..b2383ff60f 100644 --- a/aries_cloudagent/anoncreds/models/anoncreds_schema.py +++ b/aries_cloudagent/anoncreds/models/anoncreds_schema.py @@ -92,7 +92,7 @@ def __init__( schema_id: str, resolution_metadata: Dict[str, Any], schema_metadata: Dict[str, Any], - **kwargs + **kwargs, ): """Initialize an instance. @@ -205,7 +205,7 @@ def __init__( schema_state: SchemaState, registration_metadata: Optional[dict] = None, schema_metadata: Optional[dict] = None, - **kwargs + **kwargs, ): """Initialize an instance. diff --git a/aries_cloudagent/anoncreds/registry.py b/aries_cloudagent/anoncreds/registry.py index 8a920412d2..79aba036cc 100644 --- a/aries_cloudagent/anoncreds/registry.py +++ b/aries_cloudagent/anoncreds/registry.py @@ -45,9 +45,7 @@ def register(self, registry: BaseAnonCredsHandler): async def _resolver_for_identifier(self, identifier: str) -> BaseAnonCredsResolver: resolvers = [ - resolver - for resolver in self.resolvers - if await resolver.supports(identifier) + resolver for resolver in self.resolvers if await resolver.supports(identifier) ] if len(resolvers) == 0: raise AnonCredsResolutionError( @@ -59,9 +57,7 @@ async def _resolver_for_identifier(self, identifier: str) -> BaseAnonCredsResolv ) return resolvers[0] - async def _registrar_for_identifier( - self, identifier: str - ) -> BaseAnonCredsRegistrar: + async def _registrar_for_identifier(self, identifier: str) -> BaseAnonCredsRegistrar: registrars = [ registrar for registrar in self.registrars @@ -111,9 +107,7 @@ async def register_credential_definition( options: Optional[dict] = None, ) -> CredDefResult: """Register a credential definition on the registry.""" - registrar = await self._registrar_for_identifier( - credential_definition.issuer_id - ) + registrar = await self._registrar_for_identifier(credential_definition.issuer_id) return await registrar.register_credential_definition( profile, schema, diff --git a/aries_cloudagent/anoncreds/revocation.py b/aries_cloudagent/anoncreds/revocation.py index 29c4c3c283..790feae27e 100644 --- a/aries_cloudagent/anoncreds/revocation.py +++ b/aries_cloudagent/anoncreds/revocation.py @@ -195,9 +195,7 @@ async def create_and_register_revocation_registry_definition( ), ) except AnoncredsError as err: - raise AnonCredsRevocationError( - "Error creating revocation registry" - ) from err + raise AnonCredsRevocationError("Error creating revocation registry") from err rev_reg_def = RevRegDef.from_native(rev_reg_def) @@ -253,9 +251,7 @@ async def store_revocation_registry_definition( if result.revocation_registry_definition_state.state == STATE_FINISHED: await self.notify( - RevRegDefFinishedEvent.with_payload( - identifier, rev_reg_def, options - ) + RevRegDefFinishedEvent.with_payload(identifier, rev_reg_def, options) ) except AskarError as err: raise AnonCredsRevocationError( @@ -526,9 +522,7 @@ async def finish_revocation_list( ) await txn.commit() # Notify about revoked creds on any list update - await self.notify( - RevListFinishedEvent.with_payload(rev_reg_def_id, revoked) - ) + await self.notify(RevListFinishedEvent.with_payload(rev_reg_def_id, revoked)) async def update_revocation_list( self, @@ -571,9 +565,7 @@ async def update_revocation_list( rev_reg_def = RevRegDef.deserialize(rev_reg_def_entry.value_json) rev_list = RevList.deserialize(rev_list_entry.value_json["rev_list"]) if rev_list.revocation_list != curr.revocation_list: - raise AnonCredsRevocationError( - "Passed revocation list does not match stored" - ) + raise AnonCredsRevocationError("Passed revocation list does not match stored") anoncreds_registry = self.profile.inject(AnonCredsRegistry) result = await anoncreds_registry.update_revocation_list( @@ -604,9 +596,7 @@ async def update_revocation_list( return result - async def get_created_revocation_list( - self, rev_reg_def_id: str - ) -> Optional[RevList]: + async def get_created_revocation_list(self, rev_reg_def_id: str) -> Optional[RevList]: """Return rev list from record in wallet.""" try: async with self.profile.session() as session: @@ -654,9 +644,7 @@ async def retrieve_tails(self, rev_reg_def: RevRegDef) -> str: with open(tails_file_path, "wb", buffer_size) as tails_file: with Session() as req_session: try: - resp = req_session.get( - rev_reg_def.value.tails_location, stream=True - ) + resp = req_session.get(rev_reg_def.value.tails_location, stream=True) # Should this directly raise an Error? if resp.status_code != http.HTTPStatus.OK: LOGGER.warning( @@ -990,15 +978,11 @@ async def _create_credential( """ - def _handle_missing_entries( - rev_list: Entry, rev_reg_def: Entry, rev_key: Entry - ): + def _handle_missing_entries(rev_list: Entry, rev_reg_def: Entry, rev_key: Entry): if not rev_list: raise AnonCredsRevocationError("Revocation registry list not found") if not rev_reg_def: - raise AnonCredsRevocationError( - "Revocation registry definition not found" - ) + raise AnonCredsRevocationError("Revocation registry definition not found") if not rev_key: raise AnonCredsRevocationError( "Revocation registry definition private data not found" @@ -1049,9 +1033,7 @@ def _has_required_id_and_tails_path(): # If something goes wrong later, the index will be skipped. # FIXME - double check issuance type in case of upgraded wallet? if rev_reg_index > rev_reg_def.max_cred_num: - raise AnonCredsRevocationRegistryFullError( - "Revocation registry is full" - ) + raise AnonCredsRevocationRegistryFullError("Revocation registry is full") rev_list_value_json["next_index"] = rev_reg_index + 1 async with self.profile.transaction() as txn: await txn.handle.replace( @@ -1162,9 +1144,7 @@ async def _create_credential_helper( rev_reg_def_result = None if revocable: - rev_reg_def_result = await self.get_or_create_active_registry( - cred_def_id - ) + rev_reg_def_result = await self.get_or_create_active_registry(cred_def_id) if ( rev_reg_def_result.revocation_registry_definition_state.state != STATE_FINISHED @@ -1202,9 +1182,7 @@ def _is_full_registry( <= int(cred_rev_id) + 1 ) - if rev_reg_def_result and _is_full_registry( - rev_reg_def_result, cred_rev_id - ): + if rev_reg_def_result and _is_full_registry(rev_reg_def_result, cred_rev_id): await self.handle_full_registry(rev_reg_def_id) return cred_json, cred_rev_id, rev_reg_def_id @@ -1294,9 +1272,7 @@ async def revoke_pending_credentials( # TODO This is a little rough; stored tails location will have public uri # but library needs local tails location rev_reg_def = RevRegDef.deserialize(rev_reg_def_entry.value_json) - rev_reg_def.value.tails_location = self.get_local_tails_path( - rev_reg_def - ) + rev_reg_def.value.tails_location = self.get_local_tails_path(rev_reg_def) cred_def = CredDef.deserialize(cred_def_entry.value_json) rev_reg_def_private = RevocationRegistryDefinitionPrivate.load( rev_reg_def_private_entry.value_json diff --git a/aries_cloudagent/anoncreds/tests/test_holder.py b/aries_cloudagent/anoncreds/tests/test_holder.py index 6c44e7c21b..e622d99495 100644 --- a/aries_cloudagent/anoncreds/tests/test_holder.py +++ b/aries_cloudagent/anoncreds/tests/test_holder.py @@ -56,9 +56,7 @@ class MockCredReceived: def __init__(self, bad_schema=False, bad_cred_def=False): self.schema_id = "Sc886XPwD1gDcHwmmLDeR2:2:degree schema:45.101.94" - self.cred_def_id = ( - "Sc886XPwD1gDcHwmmLDeR2:3:CL:229975:faber.agent.degree_schema" - ) + self.cred_def_id = "Sc886XPwD1gDcHwmmLDeR2:3:CL:229975:faber.agent.degree_schema" if bad_schema: self.schema_id = "bad-schema-id" @@ -79,9 +77,7 @@ def to_dict(self): class MockCredReceivedW3C: def __init__(self, bad_schema=False, bad_cred_def=False): self.schema_id = "Sc886XPwD1gDcHwmmLDeR2:2:degree schema:45.101.94" - self.cred_def_id = ( - "Sc886XPwD1gDcHwmmLDeR2:3:CL:229975:faber.agent.degree_schema" - ) + self.cred_def_id = "Sc886XPwD1gDcHwmmLDeR2:3:CL:229975:faber.agent.degree_schema" if bad_schema: self.schema_id = "bad-schema-id" @@ -206,9 +202,7 @@ async def test_get_master_secret_does_not_return_master_secret( with self.assertRaises(AnonCredsHolderError): await self.holder.get_master_secret() - @mock.patch.object( - AnonCredsHolder, "get_master_secret", return_value="master-secret" - ) + @mock.patch.object(AnonCredsHolder, "get_master_secret", return_value="master-secret") @mock.patch.object( CredentialRequest, "create", @@ -270,9 +264,7 @@ async def test_create_credential_request_with_non_anoncreds_profile_throws_x(sel "holder-did", ) - @mock.patch.object( - AnonCredsHolder, "get_master_secret", return_value="master-secret" - ) + @mock.patch.object(AnonCredsHolder, "get_master_secret", return_value="master-secret") async def test_store_credential_fails_to_load_raises_x(self, mock_master_secret): with self.assertRaises(AnonCredsHolderError): await self.holder.store_credential( @@ -290,9 +282,7 @@ async def test_store_credential_fails_to_load_raises_x(self, mock_master_secret) ) assert mock_master_secret.called - @mock.patch.object( - AnonCredsHolder, "get_master_secret", return_value="master-secret" - ) + @mock.patch.object(AnonCredsHolder, "get_master_secret", return_value="master-secret") @mock.patch.object( Credential, "load", @@ -349,9 +339,7 @@ async def test_store_credential(self, mock_load, mock_master_secret): {"cred-req-meta": "cred-req-meta"}, ) - @mock.patch.object( - AnonCredsHolder, "get_master_secret", return_value="master-secret" - ) + @mock.patch.object(AnonCredsHolder, "get_master_secret", return_value="master-secret") @mock.patch.object( W3cCredential, "load", @@ -369,7 +357,6 @@ async def test_store_credential(self, mock_load, mock_master_secret): async def test_store_credential_w3c( self, mock_load, mock_w3cload, mock_master_secret ): - self.profile.context.injector.bind_instance( DocumentLoader, custom_document_loader ) @@ -381,9 +368,7 @@ async def test_store_credential_w3c( ) with mock.patch.object(jsonld, "expand", return_value=MagicMock()): - with mock.patch.object( - JsonLdProcessor, "get_values", return_value=["type1"] - ): + with mock.patch.object(JsonLdProcessor, "get_values", return_value=["type1"]): result = await self.holder.store_credential_w3c( MOCK_CRED_DEF, MOCK_W3C_CRED, @@ -397,9 +382,7 @@ async def test_store_credential_w3c( assert mock_w3cload.called assert self.profile.transaction.called - @mock.patch.object( - AnonCredsHolder, "get_master_secret", return_value="master-secret" - ) + @mock.patch.object(AnonCredsHolder, "get_master_secret", return_value="master-secret") @mock.patch.object(Credential, "load", return_value=MockCredential()) async def test_store_credential_failed_trx(self, mock_load, mock_master_secret): self.profile.transaction = mock.MagicMock( @@ -542,9 +525,7 @@ async def test_get_mime_type(self, mock_handle): assert mock_handle.fetch.call_count == 2 @mock.patch.object(InMemoryProfileSession, "handle") - @mock.patch.object( - AnonCredsHolder, "get_master_secret", return_value="master-secret" - ) + @mock.patch.object(AnonCredsHolder, "get_master_secret", return_value="master-secret") @mock.patch.object( anoncreds.Presentation, "create", return_value=Presentation.load(MOCK_PRES) ) @@ -592,9 +573,7 @@ async def test_create_presentation( ) @mock.patch.object(InMemoryProfileSession, "handle") - @mock.patch.object( - AnonCredsHolder, "get_master_secret", return_value="master-secret" - ) + @mock.patch.object(AnonCredsHolder, "get_master_secret", return_value="master-secret") @mock.patch.object( anoncreds.Presentation, "create", return_value=Presentation.load(MOCK_PRES) ) @@ -687,9 +666,7 @@ async def test_create_presentation_with_revocation( assert mock_handle.fetch.called @mock.patch.object(InMemoryProfileSession, "handle") - @mock.patch.object( - AnonCredsHolder, "get_master_secret", return_value="master-secret" - ) + @mock.patch.object(AnonCredsHolder, "get_master_secret", return_value="master-secret") @mock.patch.object( anoncreds.Presentation, "create", @@ -714,9 +691,7 @@ async def test_create_presentation_create_error( ) @mock.patch.object(InMemoryProfileSession, "handle") - @mock.patch.object( - AnonCredsHolder, "get_master_secret", return_value="master-secret" - ) + @mock.patch.object(AnonCredsHolder, "get_master_secret", return_value="master-secret") @mock.patch.object( anoncreds.W3cPresentation, "create", @@ -739,9 +714,7 @@ async def test_create_presentation_w3c( mock_handle.fetch.assert_called @mock.patch.object(InMemoryProfileSession, "handle") - @mock.patch.object( - AnonCredsHolder, "get_master_secret", return_value="master-secret" - ) + @mock.patch.object(AnonCredsHolder, "get_master_secret", return_value="master-secret") @mock.patch.object( anoncreds.W3cPresentation, "create", diff --git a/aries_cloudagent/anoncreds/tests/test_issuer.py b/aries_cloudagent/anoncreds/tests/test_issuer.py index 673dfe462e..c48c9a6d6b 100644 --- a/aries_cloudagent/anoncreds/tests/test_issuer.py +++ b/aries_cloudagent/anoncreds/tests/test_issuer.py @@ -172,9 +172,7 @@ async def test_create_and_register_schema(self, mock_session_handle): mock_session_handle.insert = mock.CoroutineMock(return_value=None) self.profile.inject = mock.Mock( return_value=mock.MagicMock( - register_schema=mock.CoroutineMock( - return_value=get_mock_schema_result() - ) + register_schema=mock.CoroutineMock(return_value=get_mock_schema_result()) ) ) result = await self.issuer.create_and_register_schema( @@ -274,9 +272,7 @@ async def test_create_and_register_schema_fail_insert(self, mock_session_handle) ) self.profile.inject = mock.Mock( return_value=mock.MagicMock( - register_schema=mock.CoroutineMock( - return_value=get_mock_schema_result() - ) + register_schema=mock.CoroutineMock(return_value=get_mock_schema_result()) ) ) @@ -475,9 +471,7 @@ async def test_create_and_register_credential_definition_invalid_options_raises_ ) @mock.patch.object(test_module.AnonCredsIssuer, "notify") - async def test_create_and_register_credential_definition_finishes( - self, mock_notify - ): + async def test_create_and_register_credential_definition_finishes(self, mock_notify): self.profile.inject = mock.Mock( return_value=mock.MagicMock( get_schema=mock.CoroutineMock( @@ -636,9 +630,7 @@ async def test_match_created_cred_defs(self, mock_session_handle): assert result == "name4" @mock.patch.object(InMemoryProfileSession, "handle") - async def test_create_credential_offer_cred_def_not_found( - self, mock_session_handle - ): + async def test_create_credential_offer_cred_def_not_found(self, mock_session_handle): # None, Valid # Valid, None # None, None diff --git a/aries_cloudagent/anoncreds/tests/test_revocation.py b/aries_cloudagent/anoncreds/tests/test_revocation.py index 7f98764348..26f9ff8108 100644 --- a/aries_cloudagent/anoncreds/tests/test_revocation.py +++ b/aries_cloudagent/anoncreds/tests/test_revocation.py @@ -235,14 +235,12 @@ async def test_create_and_register_revocation_registry_definition( return_value=MockEntry(raw_value=cred_def.to_json_buffer()) ) - result = ( - await self.revocation.create_and_register_revocation_registry_definition( - issuer_id="did:indy:sovrin:SGrjRL82Y9ZZbzhUDXokvQ", - cred_def_id="CsQY9MGeD3CQP4EyuVFo5m:3:CL:14951:MYCO_Biomarker", - registry_type="CL_ACCUM", - tag="tag", - max_cred_num=100, - ) + result = await self.revocation.create_and_register_revocation_registry_definition( + issuer_id="did:indy:sovrin:SGrjRL82Y9ZZbzhUDXokvQ", + cred_def_id="CsQY9MGeD3CQP4EyuVFo5m:3:CL:14951:MYCO_Biomarker", + registry_type="CL_ACCUM", + tag="tag", + max_cred_num=100, ) assert result is not None @@ -466,9 +464,7 @@ async def test_set_active_registry(self, mock_handle): rev_reg_def_id="test-rev-reg-def-id", ) - mock_handle.fetch = mock.CoroutineMock( - return_value=MockEntry(tags=inactive_tags) - ) + mock_handle.fetch = mock.CoroutineMock(return_value=MockEntry(tags=inactive_tags)) mock_handle.fetch_all = mock.CoroutineMock( return_value=[MockEntry(tags=inactive_tags), MockEntry(tags=inactive_tags)] ) @@ -1148,9 +1144,7 @@ async def call_test_func(): with self.assertRaises(test_module.AnonCredsRevocationError): await call_test_func() - @mock.patch.object( - AnonCredsIssuer, "cred_def_supports_revocation", return_value=True - ) + @mock.patch.object(AnonCredsIssuer, "cred_def_supports_revocation", return_value=True) async def test_create_credential(self, mock_supports_revocation): self.profile.inject = mock.Mock( return_value=mock.MagicMock( @@ -1315,9 +1309,7 @@ async def test_mark_pending_revocations(self, mock_handle): # rev list entry not found with self.assertRaises(test_module.AnonCredsRevocationError): - await self.revocation.mark_pending_revocations( - "test-rev-reg-id", int("200") - ) + await self.revocation.mark_pending_revocations("test-rev-reg-id", int("200")) # valid await self.revocation.mark_pending_revocations("test-rev-reg-id", int("200")) @@ -1383,9 +1375,7 @@ async def test_clear_pending_revocations_with_non_anoncreds_session(self): self.profile.session(), rev_reg_def_id="test-rev-reg-id" ) - @mock.patch.object( - AnonCredsIssuer, "cred_def_supports_revocation", return_value=True - ) + @mock.patch.object(AnonCredsIssuer, "cred_def_supports_revocation", return_value=True) async def test_create_credential_w3c(self, mock_supports_revocation): self.profile.inject = mock.Mock( return_value=mock.MagicMock( diff --git a/aries_cloudagent/anoncreds/tests/test_routes.py b/aries_cloudagent/anoncreds/tests/test_routes.py index 288f7f7eba..809b3f42e7 100644 --- a/aries_cloudagent/anoncreds/tests/test_routes.py +++ b/aries_cloudagent/anoncreds/tests/test_routes.py @@ -60,9 +60,7 @@ async def asyncSetUp(self) -> None: }, profile_class=AskarAnoncredsProfile, ) - self.context = AdminRequestContext.test_context( - self.session_inject, self.profile - ) + self.context = AdminRequestContext.test_context(self.session_inject, self.profile) self.request_dict = { "context": self.context, } @@ -276,9 +274,7 @@ async def test_rev_reg_def_post(self, mock_match, mock_create): result = await test_module.rev_reg_def_post(self.request) - assert ( - json.loads(result.body)["revocation_registry_definition_id"] == "revRegId" - ) + assert json.loads(result.body)["revocation_registry_definition_id"] == "revRegId" assert mock_match.call_count == 1 assert mock_create.call_count == 1 @@ -296,9 +292,7 @@ async def test_rev_list_post(self, mock_create): return_value={"revRegDefId": "rev_reg_def_id", "options": {}} ) result = await test_module.rev_list_post(self.request) - assert ( - json.loads(result.body)["revocation_registry_definition_id"] == "revRegId" - ) + assert json.loads(result.body)["revocation_registry_definition_id"] == "revRegId" assert mock_create.call_count == 1 @mock.patch.object( diff --git a/aries_cloudagent/anoncreds/tests/test_verifier.py b/aries_cloudagent/anoncreds/tests/test_verifier.py index 4501f1351a..df830258b3 100644 --- a/aries_cloudagent/anoncreds/tests/test_verifier.py +++ b/aries_cloudagent/anoncreds/tests/test_verifier.py @@ -149,9 +149,9 @@ async def test_check_timestamps_with_names(self): ) # with rev_reg_id - mock_pres["identifiers"][0][ - "rev_reg_id" - ] = "TUku9MDGa7QALbAJX4oAww:3:TUku9MDGa7QALbAJX4oAww:3:CL:18:tag:CL_ACCUM:0" + mock_pres["identifiers"][0]["rev_reg_id"] = ( + "TUku9MDGa7QALbAJX4oAww:3:TUku9MDGa7QALbAJX4oAww:3:CL:18:tag:CL_ACCUM:0" + ) # Superfluous timestamp with self.assertRaises(ValueError): @@ -272,9 +272,9 @@ async def test_check_timestamps_with_name(self): ) # with rev_reg_id - mock_pres["identifiers"][0][ - "rev_reg_id" - ] = "TUku9MDGa7QALbAJX4oAww:3:TUku9MDGa7QALbAJX4oAww:3:CL:18:tag:CL_ACCUM:0" + mock_pres["identifiers"][0]["rev_reg_id"] = ( + "TUku9MDGa7QALbAJX4oAww:3:TUku9MDGa7QALbAJX4oAww:3:CL:18:tag:CL_ACCUM:0" + ) # Superfluous timestamp with self.assertRaises(ValueError): @@ -393,9 +393,9 @@ async def test_check_timestamps_predicates(self): ) # with rev_reg_id - mock_pres["identifiers"][0][ - "rev_reg_id" - ] = "TUku9MDGa7QALbAJX4oAww:3:TUku9MDGa7QALbAJX4oAww:3:CL:18:tag:CL_ACCUM:0" + mock_pres["identifiers"][0]["rev_reg_id"] = ( + "TUku9MDGa7QALbAJX4oAww:3:TUku9MDGa7QALbAJX4oAww:3:CL:18:tag:CL_ACCUM:0" + ) # Superfluous timestamp with self.assertRaises(ValueError): @@ -475,9 +475,9 @@ async def test_pre_verify(self): del mock_pres_req["requested_attributes"]["consent_attrs"]["name"] with self.assertRaises(ValueError): await self.verifier.pre_verify(mock_pres_req, mock_pres) - mock_pres_req["requested_attributes"]["consent_attrs"][ - "name" - ] = "jti_unique_identifier" + mock_pres_req["requested_attributes"]["consent_attrs"]["name"] = ( + "jti_unique_identifier" + ) # attr not in proof mock_pres["requested_proof"]["self_attested_attrs"] = {} with self.assertRaises(ValueError): diff --git a/aries_cloudagent/anoncreds/util.py b/aries_cloudagent/anoncreds/util.py index e558e33ad7..b6ba048df0 100644 --- a/aries_cloudagent/anoncreds/util.py +++ b/aries_cloudagent/anoncreds/util.py @@ -30,7 +30,9 @@ def indy_client_dir(subpath: str = None, create: bool = False) -> str: ( "Documents" if isdir(join(home, "Documents")) - else getenv("EXTERNAL_STORAGE", "") if system() == "Linux" else "" + else getenv("EXTERNAL_STORAGE", "") + if system() == "Linux" + else "" ), ".indy_client", subpath if subpath else "", diff --git a/aries_cloudagent/anoncreds/verifier.py b/aries_cloudagent/anoncreds/verifier.py index 9018a023a6..961d26cd8a 100644 --- a/aries_cloudagent/anoncreds/verifier.py +++ b/aries_cloudagent/anoncreds/verifier.py @@ -177,9 +177,7 @@ async def check_timestamps( index = revealed_attrs[uuid]["sub_proof_index"] if cred_defs[index].credential_definition.value.revocation: timestamp = pres["identifiers"][index].get("timestamp") - if (timestamp is not None) ^ bool( - non_revoc_intervals.get(uuid) - ): + if (timestamp is not None) ^ bool(non_revoc_intervals.get(uuid)): LOGGER.debug(f">>> uuid: {uuid}") LOGGER.debug( f">>> revealed_attrs[uuid]: {revealed_attrs[uuid]}" @@ -344,9 +342,7 @@ async def pre_verify(self, pres_req: dict, pres: dict) -> list: f"'{req_attr['name']}'" ) else: - raise ValueError( - f"Missing requested attribute '{req_attr['name']}'" - ) + raise ValueError(f"Missing requested attribute '{req_attr['name']}'") elif "names" in req_attr: group_spec = revealed_groups[uuid] pres_req_attr_spec = { @@ -417,18 +413,15 @@ async def process_pres_identifiers( if identifier.get("timestamp"): rev_lists.setdefault(identifier["rev_reg_id"], {}) - if ( - identifier["timestamp"] - not in rev_lists[identifier["rev_reg_id"]] - ): + if identifier["timestamp"] not in rev_lists[identifier["rev_reg_id"]]: result = await anoncreds_registry.get_revocation_list( self.profile, identifier["rev_reg_id"], timestamp_to=identifier["timestamp"], ) - rev_lists[identifier["rev_reg_id"]][ - identifier["timestamp"] - ] = result.revocation_list.serialize() + rev_lists[identifier["rev_reg_id"]][identifier["timestamp"]] = ( + result.revocation_list.serialize() + ) return ( schemas, cred_defs, diff --git a/aries_cloudagent/askar/didcomm/v1.py b/aries_cloudagent/askar/didcomm/v1.py index 55400c7ba0..5083ce1f49 100644 --- a/aries_cloudagent/askar/didcomm/v1.py +++ b/aries_cloudagent/askar/didcomm/v1.py @@ -121,9 +121,9 @@ def _extract_payload_key(sender_cek: dict, recip_secret: Key) -> Tuple[bytes, st recip_x = recip_secret.convert_key(KeyAlg.X25519) if sender_cek["nonce"] and sender_cek["sender"]: - sender_vk = crypto_box.crypto_box_seal_open( - recip_x, sender_cek["sender"] - ).decode("utf-8") + sender_vk = crypto_box.crypto_box_seal_open(recip_x, sender_cek["sender"]).decode( + "utf-8" + ) sender_x = Key.from_public_bytes( KeyAlg.ED25519, b58_to_bytes(sender_vk) ).convert_key(KeyAlg.X25519) diff --git a/aries_cloudagent/askar/didcomm/v2.py b/aries_cloudagent/askar/didcomm/v2.py index 7d89a3ec9c..c00e0e8ac0 100644 --- a/aries_cloudagent/askar/didcomm/v2.py +++ b/aries_cloudagent/askar/didcomm/v2.py @@ -200,9 +200,7 @@ def ecdh_1pu_decrypt( enc_alg = wrapper.protected.get("enc") if enc_alg not in ("A128CBC-HS256", "A256CBC-HS512"): - raise DidcommEnvelopeError( - f"Unsupported ECDH-1PU content encryption: {enc_alg}" - ) + raise DidcommEnvelopeError(f"Unsupported ECDH-1PU content encryption: {enc_alg}") recip = wrapper.get_recipient(recip_kid) if not recip: diff --git a/aries_cloudagent/askar/profile.py b/aries_cloudagent/askar/profile.py index d4d7ec0fc0..f5379bb08e 100644 --- a/aries_cloudagent/askar/profile.py +++ b/aries_cloudagent/askar/profile.py @@ -39,7 +39,7 @@ def __init__( opened: AskarOpenStore, context: InjectionContext = None, *, - profile_id: str = None + profile_id: str = None, ): """Create a new AskarProfile instance.""" super().__init__(context=context, name=opened.name, created=opened.created) @@ -105,9 +105,7 @@ def bind_providers(self): injector.bind_provider( BaseStorageSearch, - ClassProvider( - "aries_cloudagent.storage.askar.AskarStorageSearch", ref(self) - ), + ClassProvider("aries_cloudagent.storage.askar.AskarStorageSearch", ref(self)), ) injector.bind_provider( @@ -208,7 +206,7 @@ def __init__( is_txn: bool, *, context: InjectionContext = None, - settings: Mapping[str, Any] = None + settings: Mapping[str, Any] = None, ): """Create a new IndySdkProfileSession instance.""" super().__init__(profile=profile, context=context, settings=settings) diff --git a/aries_cloudagent/askar/profile_anon.py b/aries_cloudagent/askar/profile_anon.py index 3e1ce59e47..8f881c514f 100644 --- a/aries_cloudagent/askar/profile_anon.py +++ b/aries_cloudagent/askar/profile_anon.py @@ -42,7 +42,7 @@ def __init__( opened: AskarOpenStore, context: InjectionContext = None, *, - profile_id: str = None + profile_id: str = None, ): """Create a new AskarProfile instance.""" super().__init__(context=context, name=opened.name, created=opened.created) @@ -96,9 +96,7 @@ def bind_providers(self): injector.bind_provider( BaseStorageSearch, - ClassProvider( - "aries_cloudagent.storage.askar.AskarStorageSearch", ref(self) - ), + ClassProvider("aries_cloudagent.storage.askar.AskarStorageSearch", ref(self)), ) injector.bind_provider( VCHolder, @@ -162,9 +160,7 @@ def bind_providers(self): BaseLedger, ClassProvider(IndyVdrLedger, self.ledger_pool, ref(self)) ) - def session( - self, context: InjectionContext = None - ) -> "AskarAnoncredsProfileSession": + def session(self, context: InjectionContext = None) -> "AskarAnoncredsProfileSession": """Start a new interactive session with no transaction support requested.""" return AskarAnoncredsProfileSession(self, False, context=context) @@ -194,7 +190,7 @@ def __init__( is_txn: bool, *, context: InjectionContext = None, - settings: Mapping[str, Any] = None + settings: Mapping[str, Any] = None, ): """Create a new AskarAnoncredsProfileSession instance.""" super().__init__(profile=profile, context=context, settings=settings) diff --git a/aries_cloudagent/commands/provision.py b/aries_cloudagent/commands/provision.py index 3c002dc143..bfc64c2bb8 100644 --- a/aries_cloudagent/commands/provision.py +++ b/aries_cloudagent/commands/provision.py @@ -29,9 +29,7 @@ class ProvisionError(BaseError): def init_argument_parser(parser: ArgumentParser): """Initialize an argument parser with the module's arguments.""" - return arg.load_argument_groups( - parser, *arg.group.get_registered(arg.CAT_PROVISION) - ) + return arg.load_argument_groups(parser, *arg.group.get_registered(arg.CAT_PROVISION)) async def provision(settings: dict): diff --git a/aries_cloudagent/commands/tests/test_provision.py b/aries_cloudagent/commands/tests/test_provision.py index 9afd7cde98..73b9e35630 100644 --- a/aries_cloudagent/commands/tests/test_provision.py +++ b/aries_cloudagent/commands/tests/test_provision.py @@ -54,9 +54,7 @@ async def test_provision_should_store_provided_mediation_invite(self): # given mediation_invite = "test-invite" - with mock.patch.object( - test_module.MediationInviteStore, "store" - ) as invite_store: + with mock.patch.object(test_module.MediationInviteStore, "store") as invite_store: # when await test_module.provision({"mediation.invite": mediation_invite}) diff --git a/aries_cloudagent/commands/tests/test_upgrade.py b/aries_cloudagent/commands/tests/test_upgrade.py index 918b6cec4a..a6506cd497 100644 --- a/aries_cloudagent/commands/tests/test_upgrade.py +++ b/aries_cloudagent/commands/tests/test_upgrade.py @@ -65,9 +65,7 @@ async def test_upgrade_storage_from_version_included(self): ConnRecord, "query", mock.CoroutineMock(return_value=[ConnRecord()]), - ), mock.patch.object( - ConnRecord, "save", mock.CoroutineMock() - ): + ), mock.patch.object(ConnRecord, "save", mock.CoroutineMock()): await test_module.upgrade( settings={ "upgrade.config_path": "./aries_cloudagent/commands/default_version_upgrade_config.yml", @@ -89,9 +87,7 @@ async def test_upgrade_storage_missing_from_version(self): ConnRecord, "query", mock.CoroutineMock(return_value=[ConnRecord()]), - ), mock.patch.object( - ConnRecord, "save", mock.CoroutineMock() - ): + ), mock.patch.object(ConnRecord, "save", mock.CoroutineMock()): await test_module.upgrade(settings={}) async def test_upgrade_from_version(self): @@ -279,9 +275,7 @@ async def test_upgrade_missing_from_version(self): ConnRecord, "query", mock.CoroutineMock(return_value=[ConnRecord()]), - ), mock.patch.object( - ConnRecord, "save", mock.CoroutineMock() - ): + ), mock.patch.object(ConnRecord, "save", mock.CoroutineMock()): with self.assertRaises(UpgradeError) as ctx: await test_module.upgrade( settings={ @@ -378,9 +372,7 @@ async def test_execute(self): ConnRecord, "query", mock.CoroutineMock(return_value=[ConnRecord()]), - ), mock.patch.object( - ConnRecord, "save", mock.CoroutineMock() - ), mock.patch.object( + ), mock.patch.object(ConnRecord, "save", mock.CoroutineMock()), mock.patch.object( asyncio, "get_event_loop", mock.MagicMock() ) as mock_get_event_loop, mock.patch.object( # Normally, this would be a CoroutingMock. However, the coroutine @@ -684,9 +676,7 @@ async def test_upgrade_explicit_check(self): ): with self.assertRaises(UpgradeError) as ctx: await test_module.upgrade(profile=self.profile) - assert "Explicit upgrade flag with critical value found" in str( - ctx.exception - ) + assert "Explicit upgrade flag with critical value found" in str(ctx.exception) with mock.patch.object( test_module.yaml, @@ -714,9 +704,7 @@ async def test_upgrade_explicit_check(self): ): with self.assertRaises(UpgradeError) as ctx: await test_module.upgrade(profile=self.profile) - assert "Explicit upgrade flag with critical value found" in str( - ctx.exception - ) + assert "Explicit upgrade flag with critical value found" in str(ctx.exception) with mock.patch.object( test_module, "LOGGER", mock.MagicMock() diff --git a/aries_cloudagent/commands/upgrade.py b/aries_cloudagent/commands/upgrade.py index 855fcb668f..6a7272c50c 100644 --- a/aries_cloudagent/commands/upgrade.py +++ b/aries_cloudagent/commands/upgrade.py @@ -89,15 +89,13 @@ def setup_version_upgrade_config(self, path: str): tagged_config_dict[config_id] = {} if "resave_records" in provided_config: if provided_config.get("resave_records").get("base_record_path"): - recs_list = recs_list + provided_config.get( - "resave_records" - ).get("base_record_path") - if provided_config.get("resave_records").get( - "base_exch_record_path" - ): - recs_list = recs_list + provided_config.get( - "resave_records" - ).get("base_exch_record_path") + recs_list = recs_list + provided_config.get("resave_records").get( + "base_record_path" + ) + if provided_config.get("resave_records").get("base_exch_record_path"): + recs_list = recs_list + provided_config.get("resave_records").get( + "base_exch_record_path" + ) tagged_config_dict[config_id]["resave_records"] = recs_list config_key_set = set(provided_config.keys()) try: diff --git a/aries_cloudagent/config/default_context.py b/aries_cloudagent/config/default_context.py index a67292ec00..aa0b96c867 100644 --- a/aries_cloudagent/config/default_context.py +++ b/aries_cloudagent/config/default_context.py @@ -118,9 +118,7 @@ async def bind_providers(self, context: InjectionContext): # Allow action menu to be provided by driver context.injector.bind_instance(BaseMenuService, DriverMenuService(context)) - context.injector.bind_instance( - BaseIntroductionService, DemoIntroductionService() - ) + context.injector.bind_instance(BaseIntroductionService, DemoIntroductionService()) async def load_plugins(self, context: InjectionContext): """Set up plugin registry and load plugins.""" diff --git a/aries_cloudagent/config/injection_context.py b/aries_cloudagent/config/injection_context.py index 0ba68d63a8..87cbf37513 100644 --- a/aries_cloudagent/config/injection_context.py +++ b/aries_cloudagent/config/injection_context.py @@ -24,7 +24,7 @@ def __init__( self, *, settings: Optional[Mapping[str, object]] = None, - enforce_typing: bool = True + enforce_typing: bool = True, ): """Initialize a `ServiceConfig`.""" self._injector = Injector(settings, enforce_typing=enforce_typing) diff --git a/aries_cloudagent/config/ledger.py b/aries_cloudagent/config/ledger.py index 2662471666..0882f79787 100644 --- a/aries_cloudagent/config/ledger.py +++ b/aries_cloudagent/config/ledger.py @@ -47,9 +47,7 @@ async def get_genesis_transactions(settings: Settings) -> str: elif settings.get("ledger.genesis_file"): try: genesis_path = settings["ledger.genesis_file"] - LOGGER.info( - "Reading ledger genesis transactions from: %s", genesis_path - ) + LOGGER.info("Reading ledger genesis transactions from: %s", genesis_path) with open(genesis_path, "r") as genesis_file: txns = genesis_file.read() except IOError as e: @@ -81,9 +79,7 @@ async def load_multiple_genesis_transactions_from_config(settings: Settings): with open(genesis_path, "r") as genesis_file: txns = genesis_file.read() except IOError as e: - raise ConfigError( - "Error reading ledger genesis transactions" - ) from e + raise ConfigError("Error reading ledger genesis transactions") from e is_write_ledger = ( False if config.get("is_write") is None else config.get("is_write") ) diff --git a/aries_cloudagent/config/logging.py b/aries_cloudagent/config/logging.py index 7cb55ec829..9d74b0b0a7 100644 --- a/aries_cloudagent/config/logging.py +++ b/aries_cloudagent/config/logging.py @@ -207,9 +207,7 @@ def _configure_logging(cls, log_config_path, log_level, log_file): # Set custom file handler if log_file: - logging.root.handlers.append( - logging.FileHandler(log_file, encoding="utf-8") - ) + logging.root.handlers.append(logging.FileHandler(log_file, encoding="utf-8")) # Set custom log level if log_level: diff --git a/aries_cloudagent/config/provider.py b/aries_cloudagent/config/provider.py index 17a4e22b70..de6e178070 100644 --- a/aries_cloudagent/config/provider.py +++ b/aries_cloudagent/config/provider.py @@ -43,7 +43,7 @@ def __init__( instance_cls: Union[str, type], *ctor_args, init_method: Optional[str] = None, - **ctor_kwargs + **ctor_kwargs, ): """Initialize the class provider.""" self._ctor_args = ctor_args @@ -113,7 +113,7 @@ def __init__( provider: BaseProvider, methods: Sequence[str], *, - ignore_missing: bool = True + ignore_missing: bool = True, ): """Initialize the statistics provider instance.""" if not provider: diff --git a/aries_cloudagent/config/tests/test_argparse.py b/aries_cloudagent/config/tests/test_argparse.py index 5003975170..3698329143 100644 --- a/aries_cloudagent/config/tests/test_argparse.py +++ b/aries_cloudagent/config/tests/test_argparse.py @@ -13,9 +13,7 @@ async def test_groups(self): parser = argparse.create_argument_parser() groups = ( - g - for g in argparse.group.get_registered() - if g is not argparse.TransportGroup + g for g in argparse.group.get_registered() if g is not argparse.TransportGroup ) argparse.load_argument_groups(parser, *groups) @@ -628,8 +626,6 @@ def test_universal_resolver(self): assert supported_regex assert supported_regex == ["regex"] - result = parser.parse_args( - ["-e", "test", "--universal-resolver-regex", "regex"] - ) + result = parser.parse_args(["-e", "test", "--universal-resolver-regex", "regex"]) with self.assertRaises(argparse.ArgsParseError): group.get_settings(result) diff --git a/aries_cloudagent/config/tests/test_ledger.py b/aries_cloudagent/config/tests/test_ledger.py index 4cd0a3e4b2..6af560d148 100644 --- a/aries_cloudagent/config/tests/test_ledger.py +++ b/aries_cloudagent/config/tests/test_ledger.py @@ -16,9 +16,7 @@ class TestLedgerConfig(IsolatedAsyncioTestCase): async def test_fetch_genesis_transactions(self): - with mock.patch.object( - test_module, "fetch", mock.CoroutineMock() - ) as mock_fetch: + with mock.patch.object(test_module, "fetch", mock.CoroutineMock()) as mock_fetch: await test_module.fetch_genesis_transactions("http://1.2.3.4:9000/genesis") async def test_fetch_genesis_transactions_x(self): @@ -77,9 +75,7 @@ async def test_ledger_config_no_taa_accept(self): get_txn_author_agreement=mock.CoroutineMock( return_value={ "taa_required": True, - "taa_record": { - "digest": b"ffffffffffffffffffffffffffffffffffffffff" - }, + "taa_record": {"digest": b"ffffffffffffffffffffffffffffffffffffffff"}, } ), get_latest_txn_author_acceptance=mock.CoroutineMock( @@ -101,9 +97,7 @@ async def _get_session(): test_module, "accept_taa", mock.CoroutineMock() ) as mock_accept_taa: mock_accept_taa.return_value = False - assert not await test_module.ledger_config( - profile, TEST_DID, provision=True - ) + assert not await test_module.ledger_config(profile, TEST_DID, provision=True) async def test_accept_taa(self): settings = { @@ -113,9 +107,7 @@ async def test_accept_taa(self): get_txn_author_agreement=mock.CoroutineMock( return_value={ "taa_required": True, - "taa_record": { - "digest": b"ffffffffffffffffffffffffffffffffffffffff" - }, + "taa_record": {"digest": b"ffffffffffffffffffffffffffffffffffffffff"}, } ), get_latest_txn_author_acceptance=mock.CoroutineMock( @@ -507,9 +499,7 @@ async def test_load_multiple_genesis_transactions_config_error_a(self): ) ) with self.assertRaises(test_module.ConfigError) as cm: - await test_module.load_multiple_genesis_transactions_from_config( - settings - ) + await test_module.load_multiple_genesis_transactions_from_config(settings) assert "No is_write ledger set" in str(cm.exception) async def test_load_multiple_genesis_transactions_multiple_write(self): @@ -625,9 +615,7 @@ async def test_load_multiple_genesis_transactions_from_config_io_x(self): ) as mock_fetch, mock.patch("builtins.open", mock.MagicMock()) as mock_open: mock_open.side_effect = IOError("no read permission") with self.assertRaises(test_module.ConfigError): - await test_module.load_multiple_genesis_transactions_from_config( - settings - ) + await test_module.load_multiple_genesis_transactions_from_config(settings) @mock.patch("sys.stdout") async def test_ledger_accept_taa_not_tty_not_accept_config(self, mock_stdout): @@ -678,9 +666,7 @@ async def test_ledger_accept_taa_tty(self, mock_stdout): ) as mock_use_aio_loop, mock.patch.object( test_module.prompt_toolkit, "prompt", mock.CoroutineMock() ) as mock_prompt: - mock_ledger = mock.MagicMock( - accept_txn_author_agreement=mock.CoroutineMock() - ) + mock_ledger = mock.MagicMock(accept_txn_author_agreement=mock.CoroutineMock()) mock_prompt.return_value = "" assert await test_module.accept_taa( mock_ledger, mock_profile, taa_info, provision=False diff --git a/aries_cloudagent/config/tests/test_settings.py b/aries_cloudagent/config/tests/test_settings.py index c5d2c21756..bad58f38f2 100644 --- a/aries_cloudagent/config/tests/test_settings.py +++ b/aries_cloudagent/config/tests/test_settings.py @@ -20,9 +20,7 @@ def test_settings_init(self): for key in self.test_settings: assert key in self.test_instance assert self.test_instance[key] == self.test_settings[key] - assert ( - self.test_instance.get_value(self.test_key) == self.test_settings[key] - ) + assert self.test_instance.get_value(self.test_key) == self.test_settings[key] with self.assertRaises(KeyError): self.test_instance["MISSING"] assert len(self.test_instance) == 1 diff --git a/aries_cloudagent/config/util.py b/aries_cloudagent/config/util.py index 8ac52ff994..931bc8036b 100644 --- a/aries_cloudagent/config/util.py +++ b/aries_cloudagent/config/util.py @@ -45,9 +45,7 @@ def __call__(self, arg: str) -> int: f"Value must be greater than or equal to {self.min_val}" ) if self.max_val is not None and val > self.max_val: - raise ArgumentTypeError( - f"Value must be less than or equal to {self.max_val}" - ) + raise ArgumentTypeError(f"Value must be less than or equal to {self.max_val}") return val def __repr__(self): diff --git a/aries_cloudagent/connections/models/diddoc/diddoc.py b/aries_cloudagent/connections/models/diddoc/diddoc.py index 855bddcea4..170da4ec9c 100644 --- a/aries_cloudagent/connections/models/diddoc/diddoc.py +++ b/aries_cloudagent/connections/models/diddoc/diddoc.py @@ -305,9 +305,7 @@ def deserialize(cls, did_doc: dict) -> "DIDDoc": rv.did, service.get( "id", - canon_ref( - rv.did, "assigned-service-{}".format(len(rv.service)), ";" - ), + canon_ref(rv.did, "assigned-service-{}".format(len(rv.service)), ";"), ), service["type"], rv.add_service_pubkeys(service, "recipientKeys"), diff --git a/aries_cloudagent/connections/models/diddoc/service.py b/aries_cloudagent/connections/models/diddoc/service.py index 330d08782d..919e1e126d 100644 --- a/aries_cloudagent/connections/models/diddoc/service.py +++ b/aries_cloudagent/connections/models/diddoc/service.py @@ -64,7 +64,9 @@ def __init__( self._recip_keys = ( [recip_keys] if isinstance(recip_keys, PublicKey) - else list(recip_keys) if recip_keys else None + else list(recip_keys) + if recip_keys + else None ) self._routing_keys = routing_keys or [] self._endpoint = endpoint diff --git a/aries_cloudagent/connections/models/diddoc/tests/test_diddoc.py b/aries_cloudagent/connections/models/diddoc/tests/test_diddoc.py index 9b8e8d3d6b..f5c192909f 100644 --- a/aries_cloudagent/connections/models/diddoc/tests/test_diddoc.py +++ b/aries_cloudagent/connections/models/diddoc/tests/test_diddoc.py @@ -80,8 +80,7 @@ async def test_basic(self): ) assert set(dd_copy.authnkey) == set(dd.authnkey) assert all( - dd_copy.pubkey[k].to_dict() == dd.pubkey[k].to_dict() - for k in dd_copy.pubkey + dd_copy.pubkey[k].to_dict() == dd.pubkey[k].to_dict() for k in dd_copy.pubkey ) assert set(dd_copy.pubkey) == set(dd.pubkey) assert all( @@ -323,9 +322,7 @@ def test_minimal_explicit(self): assert len(dd.authnkey) == 0 assert {s.priority for s in dd.service.values()} == {0, 1, 2} assert len(dd.service) == 3 - assert all( - len(dd.service[k].to_dict()["recipientKeys"]) == 1 for k in dd.service - ) + assert all(len(dd.service[k].to_dict()["recipientKeys"]) == 1 for k in dd.service) assert ( "routingKeys" not in dd.service["did:sov:LjgpST2rjsoxYegQDRm7EL;indy"].to_dict() @@ -355,9 +352,7 @@ def test_minimal_explicit(self): assert canon_ref(dd.did, "99", "#") in dd.pubkey assert len(dd.authnkey) == 1 - service = Service( - dd.did, "abc", "IndyAgent", [pk], [pk], "http://www.abc.ca/123" - ) + service = Service(dd.did, "abc", "IndyAgent", [pk], [pk], "http://www.abc.ca/123") dd.set(service) assert len(dd.service) == 4 assert canon_ref(dd.did, "abc", ";") in dd.service @@ -454,9 +449,7 @@ def test_canon_did(self): with self.assertRaises(ValueError): canon_ref(valid_did, ref="did:sov:not-a-DID", delimiter="#") - urlref = ( - "https://www.clafouti-quasar.ca:8443/supply-management/fruit/index.html" - ) + urlref = "https://www.clafouti-quasar.ca:8443/supply-management/fruit/index.html" assert canon_ref(valid_did, ref=urlref) == urlref # print('\n\n== 13 == Reference canonicalization operates as expected') diff --git a/aries_cloudagent/connections/tests/test_base_manager.py b/aries_cloudagent/connections/tests/test_base_manager.py index 21f5615b69..bcc7f0d4d0 100644 --- a/aries_cloudagent/connections/tests/test_base_manager.py +++ b/aries_cloudagent/connections/tests/test_base_manager.py @@ -111,9 +111,7 @@ async def asyncSetUp(self): self.context = self.profile.context self.multitenant_mgr = mock.MagicMock(MultitenantManager, autospec=True) - self.context.injector.bind_instance( - BaseMultitenantManager, self.multitenant_mgr - ) + self.context.injector.bind_instance(BaseMultitenantManager, self.multitenant_mgr) self.test_mediator_routing_keys = [ "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL#z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" @@ -565,9 +563,7 @@ async def test_fetch_connection_targets_conn_invitation_supports_Ed25519Verifica vmethod = builder.verification_method.add( Ed25519VerificationKey2020, public_key_multibase=multibase.encode( - multicodec.wrap( - "ed25519-pub", b58_to_bytes(self.test_target_verkey) - ), + multicodec.wrap("ed25519-pub", b58_to_bytes(self.test_target_verkey)), "base58btc", ), ) @@ -969,9 +965,7 @@ async def test_verification_methods_for_service(self): self.manager.resolve_didcomm_services = mock.CoroutineMock( return_value=(doc, doc.service) ) - recip, routing = await self.manager.verification_methods_for_service( - doc, service - ) + recip, routing = await self.manager.verification_methods_for_service(doc, service) assert recip == [vm] assert routing diff --git a/aries_cloudagent/core/dispatcher.py b/aries_cloudagent/core/dispatcher.py index bb7c0adc45..517869307e 100644 --- a/aries_cloudagent/core/dispatcher.py +++ b/aries_cloudagent/core/dispatcher.py @@ -187,9 +187,7 @@ async def handle_v1_message( except ProblemReportParseError: pass # avoid problem report recursion except MessageParseError as e: - self.logger.error( - f"Message parsing failed: {str(e)}, sending problem report" - ) + self.logger.error(f"Message parsing failed: {str(e)}, sending problem report") error_result = ProblemReport( description={ "en": str(e), diff --git a/aries_cloudagent/core/in_memory/didcomm/tests/test_1pu.py b/aries_cloudagent/core/in_memory/didcomm/tests/test_1pu.py index 7923c9d6df..b6b70d5cc8 100644 --- a/aries_cloudagent/core/in_memory/didcomm/tests/test_1pu.py +++ b/aries_cloudagent/core/in_memory/didcomm/tests/test_1pu.py @@ -33,9 +33,7 @@ def test_1pu_hex_example(): ) print("Bob 1PU key: ", bobKey.hex()) - assert ( - aliceKey == bobKey - ), "Both parties should generate the same key using ECDH-1PU" + assert aliceKey == bobKey, "Both parties should generate the same key using ECDH-1PU" # Example key exchange in https://tools.ietf.org/id/draft-madden-jose-ecdh-1pu-03.html#rfc.appendix.A @@ -48,9 +46,7 @@ def test_1pu_appendix_example(): y = "y77t-RvAHRKTsSGdIYUfweuOvwrvDD-Q3Hv5J0fSKbE" aliceSecretKey = b64_to_bytes(d, urlsafe=True).hex() - alicePublicKey = ( - b64_to_bytes(x, urlsafe=True) + b64_to_bytes(y, urlsafe=True) - ).hex() + alicePublicKey = (b64_to_bytes(x, urlsafe=True) + b64_to_bytes(y, urlsafe=True)).hex() # _______________________________________________________________________________ @@ -95,9 +91,7 @@ def test_1pu_appendix_example(): "6caf13723d14850ad4b42cd6dde935bffd2fff00a9ba70de05c203a5e1722ca7" ) - assert ( - aliceKey == bobKey - ), "Both parties should generate the same key using ECDH-1PU" + assert aliceKey == bobKey, "Both parties should generate the same key using ECDH-1PU" assert ( aliceKey == expected_result ), "Generated key should match the appendix A example" diff --git a/aries_cloudagent/core/in_memory/profile.py b/aries_cloudagent/core/in_memory/profile.py index 7473f57e3d..50c2e1fb39 100644 --- a/aries_cloudagent/core/in_memory/profile.py +++ b/aries_cloudagent/core/in_memory/profile.py @@ -32,7 +32,7 @@ def __init__( *, context: InjectionContext = None, name: str = None, - profile_class: Any = None + profile_class: Any = None, ): """Create a new InMemoryProfile instance.""" super().__init__(context=context, name=name, created=True) @@ -127,7 +127,7 @@ def __init__( profile: Profile, *, context: InjectionContext = None, - settings: Mapping[str, Any] = None + settings: Mapping[str, Any] = None, ): """Create a new InMemoryProfileSession instance.""" super().__init__(profile=profile, context=context, settings=settings) diff --git a/aries_cloudagent/core/oob_processor.py b/aries_cloudagent/core/oob_processor.py index 91ade823c6..49a7ff33e6 100644 --- a/aries_cloudagent/core/oob_processor.py +++ b/aries_cloudagent/core/oob_processor.py @@ -34,9 +34,7 @@ class OobMessageProcessor: def __init__( self, - inbound_message_router: Callable[ - [Profile, InboundMessage, Optional[bool]], None - ], + inbound_message_router: Callable[[Profile, InboundMessage, Optional[bool]], None], ) -> None: """Initialize an inbound OOB message processor. @@ -64,10 +62,7 @@ async def clean_finished_oob_record(self, profile: Profile, message: AgentMessag # If the oob record is not multi use and it doesn't contain any # attachments, we can now safely remove the oob record - if ( - not oob_record.multi_use - and not oob_record.invitation.requests_attach - ): + if not oob_record.multi_use and not oob_record.invitation.requests_attach: await oob_record.delete_record(session) except StorageNotFoundError: # It is fine if no oob record is found, Only retrieved for cleanup @@ -261,13 +256,11 @@ async def find_oob_record_for_inbound_message( # Verify the sender key is present in their service in our record # If we don't have the sender verkey stored yet we can allow any key if oob_record.their_service and ( - ( - context.message_receipt.recipient_verkey - and ( - not context.message_receipt.sender_verkey - or context.message_receipt.sender_verkey - not in oob_record.their_service.recipient_keys - ) + context.message_receipt.recipient_verkey + and ( + not context.message_receipt.sender_verkey + or context.message_receipt.sender_verkey + not in oob_record.their_service.recipient_keys ) ): LOGGER.debug( @@ -347,9 +340,7 @@ async def handle_message( if not oob_record.connection_id: oob_record.attach_thread_id = self.get_thread_id(message) if their_service: - LOGGER.debug( - "Storing their service in oob record %s", their_service - ) + LOGGER.debug("Storing their service in oob record %s", their_service) oob_record.their_service = their_service await oob_record.save(session) diff --git a/aries_cloudagent/core/protocol_registry.py b/aries_cloudagent/core/protocol_registry.py index 273233b374..18a1623015 100644 --- a/aries_cloudagent/core/protocol_registry.py +++ b/aries_cloudagent/core/protocol_registry.py @@ -24,9 +24,7 @@ def from_dict(cls, data: dict) -> "VersionDefinition": """Create a version definition from a dict.""" return cls( min=MessageVersion(data["major_version"], data["minimum_minor_version"]), - current=MessageVersion( - data["major_version"], data["current_minor_version"] - ), + current=MessageVersion(data["major_version"], data["current_minor_version"]), ) diff --git a/aries_cloudagent/core/tests/test_conductor.py b/aries_cloudagent/core/tests/test_conductor.py index 685e420005..669c472a12 100644 --- a/aries_cloudagent/core/tests/test_conductor.py +++ b/aries_cloudagent/core/tests/test_conductor.py @@ -1261,9 +1261,7 @@ async def test_set_default_mediator(self): with mock.patch.object( test_module, "MediationManager", - return_value=mock.MagicMock( - set_default_mediator_by_id=mock.CoroutineMock() - ), + return_value=mock.MagicMock(set_default_mediator_by_id=mock.CoroutineMock()), ) as mock_mgr, mock.patch.object( MediationRecord, "retrieve_by_id", mock.CoroutineMock() ), mock.patch.object( @@ -1434,9 +1432,7 @@ def __get_mediator_config( ) @mock.patch.object(test_module.ConnectionInvitation, "from_url") async def test_mediator_invitation_0160(self, mock_from_url, _): - conductor = test_module.Conductor( - self.__get_mediator_config("test-invite", True) - ) + conductor = test_module.Conductor(self.__get_mediator_config("test-invite", True)) with mock.patch.object( test_module, "OutboundTransportManager", autospec=True ) as mock_outbound_mgr: @@ -1550,9 +1546,7 @@ async def test_mediation_invitation_should_use_stored_invitation( # given invite_string = "test-invite" - conductor = test_module.Conductor( - self.__get_mediator_config(invite_string, True) - ) + conductor = test_module.Conductor(self.__get_mediator_config(invite_string, True)) with mock.patch.object( test_module, "OutboundTransportManager", autospec=True ) as mock_outbound_mgr: @@ -1608,9 +1602,7 @@ async def test_mediation_invitation_should_not_create_connection_for_old_invitat # given invite_string = "test-invite" - conductor = test_module.Conductor( - self.__get_mediator_config(invite_string, True) - ) + conductor = test_module.Conductor(self.__get_mediator_config(invite_string, True)) with mock.patch.object( test_module, "OutboundTransportManager", autospec=True ) as mock_outbound_mgr: @@ -1622,9 +1614,7 @@ async def test_mediation_invitation_should_not_create_connection_for_old_invitat invite_store_mock = get_invite_store_mock(invite_string, True) patched_invite_store.return_value = invite_store_mock - connection_manager_mock = mock.MagicMock( - receive_invitation=mock.CoroutineMock() - ) + connection_manager_mock = mock.MagicMock(receive_invitation=mock.CoroutineMock()) patched_connection_manager.return_value = connection_manager_mock with mock.patch.object( BaseStorage, @@ -1654,9 +1644,7 @@ async def test_mediation_invitation_should_not_create_connection_for_old_invitat return_value=get_invite_store_mock("test-invite"), ) async def test_mediator_invitation_x(self, _): - conductor = test_module.Conductor( - self.__get_mediator_config("test-invite", True) - ) + conductor = test_module.Conductor(self.__get_mediator_config("test-invite", True)) with mock.patch.object( test_module, "OutboundTransportManager", autospec=True ) as mock_outbound_mgr: diff --git a/aries_cloudagent/core/tests/test_dispatcher.py b/aries_cloudagent/core/tests/test_dispatcher.py index 5d7ef0ac5f..5e8d1143f0 100644 --- a/aries_cloudagent/core/tests/test_dispatcher.py +++ b/aries_cloudagent/core/tests/test_dispatcher.py @@ -258,9 +258,7 @@ async def test_dispatch_versioned_message_handle_greater_succeeds(self): await dispatcher.setup() rcv = Receiver() message = { - "@type": DIDCommPrefix.qualify_current( - StubV1_2AgentMessage.Meta.message_type - ) + "@type": DIDCommPrefix.qualify_current(StubV1_2AgentMessage.Meta.message_type) } with mock.patch.object( diff --git a/aries_cloudagent/core/tests/test_oob_processor.py b/aries_cloudagent/core/tests/test_oob_processor.py index c7e42e2b81..4345e5b761 100644 --- a/aries_cloudagent/core/tests/test_oob_processor.py +++ b/aries_cloudagent/core/tests/test_oob_processor.py @@ -59,9 +59,7 @@ async def test_clean_finished_oob_record_no_multi_use_no_request_attach(self): "retrieve_by_tag_filter", mock.CoroutineMock(return_value=mock_oob), ) as mock_retrieve_oob: - await self.oob_processor.clean_finished_oob_record( - self.profile, test_message - ) + await self.oob_processor.clean_finished_oob_record(self.profile, test_message) assert mock_oob.state == OobRecord.STATE_DONE mock_oob.emit_event.assert_called_once() @@ -87,9 +85,7 @@ async def test_clean_finished_oob_record_multi_use(self): "retrieve_by_tag_filter", mock.CoroutineMock(return_value=mock_oob), ) as mock_retrieve_oob: - await self.oob_processor.clean_finished_oob_record( - self.profile, test_message - ) + await self.oob_processor.clean_finished_oob_record(self.profile, test_message) mock_oob.emit_event.assert_called_once() mock_oob.delete_record.assert_not_called() @@ -109,9 +105,7 @@ async def test_clean_finished_oob_record_x(self): ) as mock_retrieve_oob: mock_retrieve_oob.side_effect = (StorageNotFoundError(),) - await self.oob_processor.clean_finished_oob_record( - self.profile, test_message - ) + await self.oob_processor.clean_finished_oob_record(self.profile, test_message) async def test_find_oob_target_for_outbound_message(self): mock_oob = mock.MagicMock( @@ -150,9 +144,7 @@ async def test_find_oob_target_for_outbound_message(self): assert target.recipient_keys == [ "9WCgWKUaAJj3VWxxtzvvMQN3AoFxoBtBDo9ntwJnVVCC" ] - assert target.routing_keys == [ - "6QSduYdf8Bi6t8PfNm5vNomGWDtXhmMmTRzaciudBXYJ" - ] + assert target.routing_keys == ["6QSduYdf8Bi6t8PfNm5vNomGWDtXhmMmTRzaciudBXYJ"] assert target.sender_key == "3Dn1SJNPaCXcvvJvSbsFWP2xaCjMom3can8CQNhWrTRx" payload = json.loads(outbound.payload) diff --git a/aries_cloudagent/core/tests/test_protocol_registry.py b/aries_cloudagent/core/tests/test_protocol_registry.py index 72c78a2c6f..bbc24c2894 100644 --- a/aries_cloudagent/core/tests/test_protocol_registry.py +++ b/aries_cloudagent/core/tests/test_protocol_registry.py @@ -24,9 +24,7 @@ def test_protocols(self): self.registry.register_message_types( {self.test_message_type: self.test_message_cls} ) - self.registry.register_controllers( - {self.test_message_type: self.test_controller} - ) + self.registry.register_controllers({self.test_message_type: self.test_controller}) assert list(self.registry.message_types) == [self.test_message_type] assert list(self.registry.protocols) == [self.test_protocol] @@ -96,8 +94,7 @@ def test_register_msg_types_for_multiple_minor_versions(self): } self.registry.register_message_types(test_typesets, test_version_def) assert ( - "https://didcom.org/out-of-band/1.0/invitation" - in self.registry.message_types + "https://didcom.org/out-of-band/1.0/invitation" in self.registry.message_types ) assert ( "https://didcom.org/out-of-band/1.0/handshake-reuse" @@ -112,8 +109,7 @@ def test_register_msg_types_for_multiple_minor_versions(self): in self.registry.message_types ) assert ( - "https://didcom.org/out-of-band/1.1/invitation" - in self.registry.message_types + "https://didcom.org/out-of-band/1.1/invitation" in self.registry.message_types ) assert ( "https://didcom.org/out-of-band/1.1/handshake-reuse" @@ -194,9 +190,7 @@ def __init__(self, protocol): async def check_access(self, context): return False - with mock.patch.object( - ClassLoader, "load_class", mock.MagicMock() - ) as load_class: + with mock.patch.object(ClassLoader, "load_class", mock.MagicMock()) as load_class: load_class.return_value = Mockery published = await self.registry.prepare_disclosed(ctx, protocols) assert not published diff --git a/aries_cloudagent/did/did_key.py b/aries_cloudagent/did/did_key.py index cd98ad50d3..872c17d6e8 100644 --- a/aries_cloudagent/did/did_key.py +++ b/aries_cloudagent/did/did_key.py @@ -277,9 +277,7 @@ def construct_did_key_ed25519(did_key: "DIDKey") -> dict: return did_doc -def construct_did_signature_key_base( - *, id: str, key_id: str, verification_method: dict -): +def construct_did_signature_key_base(*, id: str, key_id: str, verification_method: dict): """Create base did key structure to use for most signature keys. May not be suitable for all did key types diff --git a/aries_cloudagent/did/tests/test_did_key_ed25519.py b/aries_cloudagent/did/tests/test_did_key_ed25519.py index 53c2eb8bf2..c5063719bc 100644 --- a/aries_cloudagent/did/tests/test_did_key_ed25519.py +++ b/aries_cloudagent/did/tests/test_did_key_ed25519.py @@ -9,9 +9,7 @@ TEST_ED25519_FINGERPRINT = "z6MkmjY8GnV5i9YTDtPETC2uUAW6ejw3nk5mXF5yci5ab7th" TEST_ED25519_DID = f"did:key:{TEST_ED25519_FINGERPRINT}" TEST_ED25519_KEY_ID = f"{TEST_ED25519_DID}#{TEST_ED25519_FINGERPRINT}" -TEST_ED25519_PREFIX_BYTES = b"".join( - [b"\xed\x01", b58_to_bytes(TEST_ED25519_BASE58_KEY)] -) +TEST_ED25519_PREFIX_BYTES = b"".join([b"\xed\x01", b58_to_bytes(TEST_ED25519_BASE58_KEY)]) class TestDIDKey(TestCase): diff --git a/aries_cloudagent/didcomm_v2/tests/test_adapters.py b/aries_cloudagent/didcomm_v2/tests/test_adapters.py index f5575caced..4cee64c603 100644 --- a/aries_cloudagent/didcomm_v2/tests/test_adapters.py +++ b/aries_cloudagent/didcomm_v2/tests/test_adapters.py @@ -57,12 +57,10 @@ class TestAdapters(IsolatedAsyncioTestCase): res_adapter = ResolverAdapter(profile=profile, resolver=resolver) async def test_resolver_adapter_resolve_did(self): - doc = await self.res_adapter.resolve(self.test_did) assert doc["did"] == self.test_did async def test_resolver_adapter_is_resolvable(self): - valid = await self.res_adapter.is_resolvable(self.test_did) assert valid @@ -70,7 +68,6 @@ async def test_resolver_adapter_is_resolvable(self): assert not invalid async def test_secrets_adapter_errors(self): - sec_adapter = SecretsAdapter(session=MagicMock()) with self.assertRaises(SecretsAdapterError) as ctx: await sec_adapter.get_secret_by_kid("kid") @@ -104,7 +101,6 @@ async def test_secrets_adapter_empty(self): assert not await sec_adapter.get_secret_by_kid("kid") async def test_secrets_adapter_valid_return(self): - store = MagicMock() askar_profile = AskarProfile(opened=store) session: AskarProfileSession = askar_profile.session() diff --git a/aries_cloudagent/holder/routes.py b/aries_cloudagent/holder/routes.py index 4d85b4323a..37c1d5e1e7 100644 --- a/aries_cloudagent/holder/routes.py +++ b/aries_cloudagent/holder/routes.py @@ -462,9 +462,7 @@ async def w3c_cred_remove(request: web.BaseRequest): vc_record = await holder.retrieve_credential_by_id(credential_id) await holder.delete_credential(vc_record) topic = "acapy::record::w3c_credential::delete" - await session.profile.notify( - topic, {"id": credential_id, "state": "deleted"} - ) + await session.profile.notify(topic, {"id": credential_id, "state": "deleted"}) except StorageNotFoundError as err: raise web.HTTPNotFound(reason=err.roll_up) from err except StorageError as err: diff --git a/aries_cloudagent/holder/tests/test_routes.py b/aries_cloudagent/holder/tests/test_routes.py index 4a25c67218..2a4834f877 100644 --- a/aries_cloudagent/holder/tests/test_routes.py +++ b/aries_cloudagent/holder/tests/test_routes.py @@ -81,9 +81,7 @@ async def test_credentials_get_with_anoncreds(self, mock_get_credential): }, profile_class=AskarAnoncredsProfile, ) - self.context = AdminRequestContext.test_context( - self.session_inject, self.profile - ) + self.context = AdminRequestContext.test_context(self.session_inject, self.profile) self.request_dict = { "context": self.context, } @@ -150,9 +148,7 @@ async def test_credentials_revoked_with_anoncreds(self, mock_credential_revoked) }, profile_class=AskarAnoncredsProfile, ) - self.context = AdminRequestContext.test_context( - self.session_inject, self.profile - ) + self.context = AdminRequestContext.test_context(self.session_inject, self.profile) self.request_dict = { "context": self.context, } @@ -235,9 +231,7 @@ async def test_attribute_mime_types_get(self): with mock.patch.object(test_module.web, "json_response") as mock_response: await test_module.credentials_attr_mime_types_get(self.request) - mock_response.assert_called_once_with( - {"results": {"a": "application/jpeg"}} - ) + mock_response.assert_called_once_with({"results": {"a": "application/jpeg"}}) @mock.patch.object(AnonCredsHolder, "get_mime_type") async def test_attribute_mime_types_get_with_anoncreds(self, mock_get_mime_type): @@ -249,9 +243,7 @@ async def test_attribute_mime_types_get_with_anoncreds(self, mock_get_mime_type) }, profile_class=AskarAnoncredsProfile, ) - self.context = AdminRequestContext.test_context( - self.session_inject, self.profile - ) + self.context = AdminRequestContext.test_context(self.session_inject, self.profile) self.request_dict = { "context": self.context, } @@ -275,9 +267,7 @@ async def test_attribute_mime_types_get_with_anoncreds(self, mock_get_mime_type) with mock.patch.object(test_module.web, "json_response") as mock_response: await test_module.credentials_attr_mime_types_get(self.request) - mock_response.assert_called_once_with( - {"results": {"a": "application/jpeg"}} - ) + mock_response.assert_called_once_with({"results": {"a": "application/jpeg"}}) assert mock_get_mime_type.called async def test_credentials_remove(self): @@ -304,9 +294,7 @@ async def test_credentials_remove_with_anoncreds(self, mock_delete_credential): }, profile_class=AskarAnoncredsProfile, ) - self.context = AdminRequestContext.test_context( - self.session_inject, self.profile - ) + self.context = AdminRequestContext.test_context(self.session_inject, self.profile) self.request_dict = { "context": self.context, } @@ -324,12 +312,8 @@ async def test_credentials_remove_with_anoncreds(self, mock_delete_credential): mock_delete_credential.side_effect = [ None, - AnonCredsHolderError( - "anoncreds error", error_code=AskarErrorCode.NOT_FOUND - ), - AnonCredsHolderError( - "anoncreds error", error_code=AskarErrorCode.UNEXPECTED - ), + AnonCredsHolderError("anoncreds error", error_code=AskarErrorCode.NOT_FOUND), + AnonCredsHolderError("anoncreds error", error_code=AskarErrorCode.UNEXPECTED), ] with mock.patch.object( @@ -384,9 +368,7 @@ async def test_credentials_list_with_anoncreds(self, mock_get_credentials): }, profile_class=AskarAnoncredsProfile, ) - self.context = AdminRequestContext.test_context( - self.session_inject, self.profile - ) + self.context = AdminRequestContext.test_context(self.session_inject, self.profile) self.request_dict = { "context": self.context, } diff --git a/aries_cloudagent/indy/credx/holder.py b/aries_cloudagent/indy/credx/holder.py index b730a62b62..b09f01ca86 100644 --- a/aries_cloudagent/indy/credx/holder.py +++ b/aries_cloudagent/indy/credx/holder.py @@ -331,9 +331,7 @@ async def get_credentials_for_presentation_request_by_referent( if row.name in creds: creds[row.name]["presentation_referents"].add(reft) else: - cred_info = _make_cred_info( - row.name, Credential.load(row.raw_value) - ) + cred_info = _make_cred_info(row.name, Credential.load(row.raw_value)) creds[row.name] = { "cred_info": cred_info, "interval": presentation_request.get("non_revoked"), diff --git a/aries_cloudagent/indy/credx/issuer.py b/aries_cloudagent/indy/credx/issuer.py index cd0ff3343b..fc419e72c2 100644 --- a/aries_cloudagent/indy/credx/issuer.py +++ b/aries_cloudagent/indy/credx/issuer.py @@ -197,9 +197,7 @@ async def create_credential_offer(self, credential_definition_id: str) -> str: except AskarError as err: raise IndyIssuerError("Error retrieving credential definition") from err if not cred_def or not key_proof: - raise IndyIssuerError( - "Credential definition not found for credential offer" - ) + raise IndyIssuerError("Credential definition not found for credential offer") try: # The tag holds the full name of the schema, # as opposed to just the sequence number @@ -288,9 +286,7 @@ async def create_credential( if not rev_reg_info: raise IndyIssuerError("Revocation registry metadata not found") if not rev_reg_def: - raise IndyIssuerError( - "Revocation registry definition not found" - ) + raise IndyIssuerError("Revocation registry definition not found") if not rev_key: raise IndyIssuerError( "Revocation registry definition private data not found" @@ -320,9 +316,7 @@ async def create_credential( ) await txn.commit() except AskarError as err: - raise IndyIssuerError( - "Error updating revocation registry index" - ) from err + raise IndyIssuerError("Error updating revocation registry index") from err revoc = CredentialRevocationConfig( rev_reg_def, @@ -388,9 +382,7 @@ async def revoke_credentials( raise IndyIssuerError("Repeated conflict attempting to update registry") try: async with self._profile.session() as session: - cred_def = await session.handle.fetch( - CATEGORY_CRED_DEF, cred_def_id - ) + cred_def = await session.handle.fetch(CATEGORY_CRED_DEF, cred_def_id) rev_reg_def = await session.handle.fetch( CATEGORY_REV_REG_DEF, revoc_reg_id ) @@ -551,9 +543,7 @@ def update(d1, d2): delta.update_with(d2) return delta.to_json() except CredxError as err: - raise IndyIssuerError( - "Error merging revocation registry deltas" - ) from err + raise IndyIssuerError("Error merging revocation registry deltas") from err return await asyncio.get_event_loop().run_in_executor( None, update, fro_delta, to_delta diff --git a/aries_cloudagent/indy/credx/tests/test_cred_issuance.py b/aries_cloudagent/indy/credx/tests/test_cred_issuance.py index 4ef865101f..6ffe5c964c 100644 --- a/aries_cloudagent/indy/credx/tests/test_cred_issuance.py +++ b/aries_cloudagent/indy/credx/tests/test_cred_issuance.py @@ -93,8 +93,7 @@ async def asyncSetUp(self): async def test_issue_store_non_rev(self): assert ( - self.issuer.make_schema_id(TEST_DID, SCHEMA_NAME, SCHEMA_VERSION) - == SCHEMA_ID + self.issuer.make_schema_id(TEST_DID, SCHEMA_NAME, SCHEMA_VERSION) == SCHEMA_ID ) (s_id, schema_json) = await self.issuer.create_schema( @@ -175,11 +174,7 @@ async def test_issue_store_non_rev(self): pres_json = await self.holder.create_presentation( PRES_REQ_NON_REV, - { - "requested_attributes": { - CRED_REFT: {"cred_id": cred_id, "revealed": True} - } - }, + {"requested_attributes": {CRED_REFT: {"cred_id": cred_id, "revealed": True}}}, {s_id: schema}, {cd_id: cred_def}, rev_states=None, @@ -194,8 +189,7 @@ async def test_issue_store_non_rev(self): async def test_issue_store_rev(self): assert ( - self.issuer.make_schema_id(TEST_DID, SCHEMA_NAME, SCHEMA_VERSION) - == SCHEMA_ID + self.issuer.make_schema_id(TEST_DID, SCHEMA_NAME, SCHEMA_VERSION) == SCHEMA_ID ) (s_id, schema_json) = await self.issuer.create_schema( diff --git a/aries_cloudagent/indy/models/pres_preview.py b/aries_cloudagent/indy/models/pres_preview.py index aeea3b2e77..44ad053061 100644 --- a/aries_cloudagent/indy/models/pres_preview.py +++ b/aries_cloudagent/indy/models/pres_preview.py @@ -415,9 +415,7 @@ def non_revoc(cred_def_id: str) -> IndyNonRevocationInterval: "name": attr_spec.name, **{"restrictions": [{"cred_def_id": cd_id}] for _ in [""] if cd_id}, **{ - "non_revoked": interval.serialize() - for _ in [""] - if revoc_support + "non_revoked": interval.serialize() for _ in [""] if revoc_support }, } diff --git a/aries_cloudagent/indy/models/proof.py b/aries_cloudagent/indy/models/proof.py index 278d009a8c..e74fcd8738 100644 --- a/aries_cloudagent/indy/models/proof.py +++ b/aries_cloudagent/indy/models/proof.py @@ -121,9 +121,7 @@ class Meta: model_class = IndyGEProofPred unknown = EXCLUDE - attr_name = fields.Str( - metadata={"description": "Attribute name, indy-canonicalized"} - ) + attr_name = fields.Str(metadata={"description": "Attribute name, indy-canonicalized"}) p_type = fields.Str( validate=validate.OneOf([p.fortran for p in Predicate]), metadata={"description": "Predicate type"}, diff --git a/aries_cloudagent/indy/tests/test_verifier.py b/aries_cloudagent/indy/tests/test_verifier.py index a9bfef2734..63b931f1d9 100644 --- a/aries_cloudagent/indy/tests/test_verifier.py +++ b/aries_cloudagent/indy/tests/test_verifier.py @@ -481,9 +481,7 @@ async def test_check_timestamps(self): proof_x, REV_REG_DEFS, ) - assert "Presentation attributes mismatch requested" in str( - context.exception - ) + assert "Presentation attributes mismatch requested" in str(context.exception) # all clear, attribute group ('names') await self.verifier.check_timestamps( diff --git a/aries_cloudagent/indy/util.py b/aries_cloudagent/indy/util.py index d565f9d407..096c39c413 100644 --- a/aries_cloudagent/indy/util.py +++ b/aries_cloudagent/indy/util.py @@ -26,7 +26,9 @@ def indy_client_dir(subpath: str = None, create: bool = False) -> str: ( "Documents" if isdir(join(home, "Documents")) - else getenv("EXTERNAL_STORAGE", "") if system() == "Linux" else "" + else getenv("EXTERNAL_STORAGE", "") + if system() == "Linux" + else "" ), ".indy_client", subpath if subpath else "", diff --git a/aries_cloudagent/indy/verifier.py b/aries_cloudagent/indy/verifier.py index 56b1432f32..2a2bb0f243 100644 --- a/aries_cloudagent/indy/verifier.py +++ b/aries_cloudagent/indy/verifier.py @@ -185,9 +185,7 @@ async def check_timestamps( index = revealed_attrs[uuid]["sub_proof_index"] if cred_defs[index]["value"].get("revocation"): timestamp = pres["identifiers"][index].get("timestamp") - if (timestamp is not None) ^ bool( - non_revoc_intervals.get(uuid) - ): + if (timestamp is not None) ^ bool(non_revoc_intervals.get(uuid)): LOGGER.debug(f">>> uuid: {uuid}") LOGGER.debug( f">>> revealed_attrs[uuid]: {revealed_attrs[uuid]}" @@ -352,9 +350,7 @@ async def pre_verify(self, pres_req: dict, pres: dict) -> list: f"'{req_attr['name']}'" ) else: - raise ValueError( - f"Missing requested attribute '{req_attr['name']}'" - ) + raise ValueError(f"Missing requested attribute '{req_attr['name']}'") elif "names" in req_attr: group_spec = revealed_groups[uuid] pres_req_attr_spec = { diff --git a/aries_cloudagent/ledger/indy_vdr.py b/aries_cloudagent/ledger/indy_vdr.py index db2739284c..80845a69a5 100644 --- a/aries_cloudagent/ledger/indy_vdr.py +++ b/aries_cloudagent/ledger/indy_vdr.py @@ -126,14 +126,10 @@ def genesis_txns(self) -> str: path = self.cfg_path.joinpath(self.name, "genesis") self.genesis_txns_cache = _normalize_txns(open(path).read()) except FileNotFoundError: - raise LedgerConfigError( - "Pool config '%s' not found", self.name - ) from None + raise LedgerConfigError("Pool config '%s' not found", self.name) from None return self.genesis_txns_cache - async def create_pool_config( - self, genesis_transactions: str, recreate: bool = False - ): + async def create_pool_config(self, genesis_transactions: str, recreate: bool = False): """Create the pool ledger configuration.""" cfg_pool = self.cfg_path.joinpath(self.name) @@ -757,9 +753,7 @@ async def update_endpoint_for_did( ) try: - attrib_req = ledger.build_attrib_request( - nym, nym, None, attr_json, None - ) + attrib_req = ledger.build_attrib_request(nym, nym, None, attr_json, None) if endorser_did and not write_ledger: attrib_req.set_endorser(endorser_did) @@ -808,9 +802,7 @@ async def register_nym( raise BadLedgerRequestError("Cannot register NYM without a public DID") try: - nym_req = ledger.build_nym_request( - public_info.did, did, verkey, alias, role - ) + nym_req = ledger.build_nym_request(public_info.did, did, verkey, alias, role) except VdrError as err: raise LedgerError("Exception when building nym request") from err @@ -952,9 +944,7 @@ async def fetch_txn_author_agreement(self) -> dict: taa_found = response["data"] taa_required = bool(taa_found and taa_found["text"]) if taa_found: - taa_found["digest"] = self.taa_digest( - taa_found["version"], taa_found["text"] - ) + taa_found["digest"] = self.taa_digest(taa_found["version"], taa_found["text"]) return { "aml_record": aml_found, @@ -968,9 +958,7 @@ def taa_rough_timestamp(self) -> int: Anything more accurate is a privacy concern. """ return int( - datetime.combine( - date.today(), datetime.min.time(), timezone.utc - ).timestamp() + datetime.combine(date.today(), datetime.min.time(), timezone.utc).timestamp() ) async def accept_txn_author_agreement( @@ -1105,9 +1093,7 @@ async def get_revoc_reg_delta( # re-calculate the delta. if not accum_to: try: - (_, timestamp) = await self.get_revoc_reg_entry( - revoc_reg_id, int(time()) - ) + (_, timestamp) = await self.get_revoc_reg_entry(revoc_reg_id, int(time())) fetch_req = ledger.build_get_revoc_reg_delta_request( public_info and public_info.did, revoc_reg_id, diff --git a/aries_cloudagent/ledger/merkel_validation/constants.py b/aries_cloudagent/ledger/merkel_validation/constants.py index 73c5885222..56c32ce82b 100644 --- a/aries_cloudagent/ledger/merkel_validation/constants.py +++ b/aries_cloudagent/ledger/merkel_validation/constants.py @@ -52,7 +52,5 @@ TAG = "tag" ACCUM_TO = "accum_to" ACCUM_FROM = "accum_from" -(NODE_TYPE_BLANK, NODE_TYPE_LEAF, NODE_TYPE_EXTENSION, NODE_TYPE_BRANCH) = tuple( - range(4) -) +(NODE_TYPE_BLANK, NODE_TYPE_LEAF, NODE_TYPE_EXTENSION, NODE_TYPE_BRANCH) = tuple(range(4)) BLANK_NODE = b"" diff --git a/aries_cloudagent/ledger/merkel_validation/domain_txn_handler.py b/aries_cloudagent/ledger/merkel_validation/domain_txn_handler.py index 5756bdf418..4e7b4997aa 100644 --- a/aries_cloudagent/ledger/merkel_validation/domain_txn_handler.py +++ b/aries_cloudagent/ledger/merkel_validation/domain_txn_handler.py @@ -117,9 +117,7 @@ def hash_of(text) -> str: def make_state_path_for_attr(did, attr_name, attr_is_hash=False) -> bytes: """Return state_path for ATTR.""" nameHash = ( - hashlib.sha256(attr_name.encode()).hexdigest() - if not attr_is_hash - else attr_name + hashlib.sha256(attr_name.encode()).hexdigest() if not attr_is_hash else attr_name ) return "{DID}:{MARKER}:{ATTR_NAME}".format( DID=did, MARKER=MARKER_ATTR, ATTR_NAME=nameHash diff --git a/aries_cloudagent/ledger/multiple_ledger/indy_vdr_manager.py b/aries_cloudagent/ledger/multiple_ledger/indy_vdr_manager.py index 86a70704f5..1d39ad58f1 100644 --- a/aries_cloudagent/ledger/multiple_ledger/indy_vdr_manager.py +++ b/aries_cloudagent/ledger/multiple_ledger/indy_vdr_manager.py @@ -77,9 +77,9 @@ async def get_nonprod_ledgers(self) -> Mapping: async def get_ledger_inst_by_id(self, ledger_id: str) -> Optional[BaseLedger]: """Return BaseLedger instance.""" - return self.production_ledgers.get( + return self.production_ledgers.get(ledger_id) or self.non_production_ledgers.get( ledger_id - ) or self.non_production_ledgers.get(ledger_id) + ) async def get_ledger_id_by_ledger_pool_name(self, pool_name: str) -> str: """Return ledger_id by ledger pool name.""" @@ -197,9 +197,7 @@ async def lookup_did_in_configured_ledgers( applicable_ledger_inst = result[1] is_self_certified = result[2] if applicable_ledger_id in self.production_ledgers: - insert_key = list(self.production_ledgers).index( - applicable_ledger_id - ) + insert_key = list(self.production_ledgers).index(applicable_ledger_id) if is_self_certified: applicable_prod_ledgers["self_certified"][insert_key] = ( applicable_ledger_id, @@ -220,9 +218,10 @@ async def lookup_did_in_configured_ledgers( applicable_ledger_inst, ) else: - applicable_non_prod_ledgers["non_self_certified"][ - insert_key - ] = (applicable_ledger_id, applicable_ledger_inst) + applicable_non_prod_ledgers["non_self_certified"][insert_key] = ( + applicable_ledger_id, + applicable_ledger_inst, + ) applicable_prod_ledgers["self_certified"] = OrderedDict( sorted(applicable_prod_ledgers.get("self_certified").items()) ) @@ -240,36 +239,28 @@ async def lookup_did_in_configured_ledgers( applicable_prod_ledgers.get("self_certified").values() )[0] if cache_did and self.cache: - await self.cache.set( - cache_key, successful_ledger_inst[0], self.cache_ttl - ) + await self.cache.set(cache_key, successful_ledger_inst[0], self.cache_ttl) return successful_ledger_inst elif len(applicable_non_prod_ledgers.get("self_certified")) > 0: successful_ledger_inst = list( applicable_non_prod_ledgers.get("self_certified").values() )[0] if cache_did and self.cache: - await self.cache.set( - cache_key, successful_ledger_inst[0], self.cache_ttl - ) + await self.cache.set(cache_key, successful_ledger_inst[0], self.cache_ttl) return successful_ledger_inst elif len(applicable_prod_ledgers.get("non_self_certified")) > 0: successful_ledger_inst = list( applicable_prod_ledgers.get("non_self_certified").values() )[0] if cache_did and self.cache: - await self.cache.set( - cache_key, successful_ledger_inst[0], self.cache_ttl - ) + await self.cache.set(cache_key, successful_ledger_inst[0], self.cache_ttl) return successful_ledger_inst elif len(applicable_non_prod_ledgers.get("non_self_certified")) > 0: successful_ledger_inst = list( applicable_non_prod_ledgers.get("non_self_certified").values() )[0] if cache_did and self.cache: - await self.cache.set( - cache_key, successful_ledger_inst[0], self.cache_ttl - ) + await self.cache.set(cache_key, successful_ledger_inst[0], self.cache_ttl) return successful_ledger_inst else: raise MultipleLedgerManagerError( diff --git a/aries_cloudagent/ledger/multiple_ledger/ledger_config_schema.py b/aries_cloudagent/ledger/multiple_ledger/ledger_config_schema.py index 89901826b7..1f45f9e312 100644 --- a/aries_cloudagent/ledger/multiple_ledger/ledger_config_schema.py +++ b/aries_cloudagent/ledger/multiple_ledger/ledger_config_schema.py @@ -42,9 +42,7 @@ class Meta: unknown = EXCLUDE id = fields.Str(required=False, metadata={"description": "ledger_id"}) - is_production = fields.Bool( - required=False, metadata={"description": "is_production"} - ) + is_production = fields.Bool(required=False, metadata={"description": "is_production"}) genesis_transactions = fields.Str( required=False, metadata={"description": "genesis_transactions"} ) diff --git a/aries_cloudagent/ledger/multiple_ledger/tests/test_indy_vdr_manager.py b/aries_cloudagent/ledger/multiple_ledger/tests/test_indy_vdr_manager.py index 196bd87390..87d8a66397 100644 --- a/aries_cloudagent/ledger/multiple_ledger/tests/test_indy_vdr_manager.py +++ b/aries_cloudagent/ledger/multiple_ledger/tests/test_indy_vdr_manager.py @@ -107,12 +107,12 @@ def test_get_endorser_info_for_ledger(self): writable_ledgers=writable_ledgers, endorser_map=endorser_info_map, ) - assert ( - "endorser_1" - ), "test_public_did_1" == manager.get_endorser_info_for_ledger("test_prod_1") - assert ( - "endorser_2" - ), "test_public_did_2" == manager.get_endorser_info_for_ledger("test_prod_2") + assert "endorser_1", "test_public_did_1" == manager.get_endorser_info_for_ledger( + "test_prod_1" + ) + assert "endorser_2", "test_public_did_2" == manager.get_endorser_info_for_ledger( + "test_prod_2" + ) async def test_get_write_ledgers(self): ledger_ids = await self.manager.get_write_ledgers() diff --git a/aries_cloudagent/ledger/routes.py b/aries_cloudagent/ledger/routes.py index 8fbf8ee102..4d63eb5af3 100644 --- a/aries_cloudagent/ledger/routes.py +++ b/aries_cloudagent/ledger/routes.py @@ -282,9 +282,7 @@ async def register_ledger_nym(request: web.BaseRequest): did = request.query.get("did") verkey = request.query.get("verkey") if not did or not verkey: - raise web.HTTPBadRequest( - reason="Request query must include both did and verkey" - ) + raise web.HTTPBadRequest(reason="Request query must include both did and verkey") alias = request.query.get("alias") role = request.query.get("role") @@ -321,9 +319,7 @@ async def register_ledger_nym(request: web.BaseRequest): raise web.HTTPBadRequest(reason=err.roll_up) from err async with context.profile.session() as session: - endorser_info = await connection_record.metadata_get( - session, "endorser_info" - ) + endorser_info = await connection_record.metadata_get(session, "endorser_info") if not endorser_info: raise web.HTTPForbidden( reason=( @@ -775,9 +771,7 @@ async def set_write_ledger(request: web.BaseRequest): return web.json_response({"write_ledger": set_ledger_id}) -@docs( - tags=["ledger"], summary="Fetch the multiple ledger configuration currently in use" -) +@docs(tags=["ledger"], summary="Fetch the multiple ledger configuration currently in use") @response_schema(LedgerConfigListSchema, 200, description="") @tenant_authentication async def get_ledger_config(request: web.BaseRequest): @@ -799,9 +793,7 @@ async def get_ledger_config(request: web.BaseRequest): ledger_config_list = session.settings.get_value("ledger.ledger_config_list") config_ledger_dict = {"production_ledgers": [], "non_production_ledgers": []} production_ledger_keys = (await multiledger_mgr.get_prod_ledgers()).keys() - non_production_ledger_keys = ( - await multiledger_mgr.get_nonprod_ledgers() - ).keys() + non_production_ledger_keys = (await multiledger_mgr.get_nonprod_ledgers()).keys() config_ledger_ids_set = set() for config in ledger_config_list: ledger_id = config.get("id") diff --git a/aries_cloudagent/ledger/tests/test_indy_vdr.py b/aries_cloudagent/ledger/tests/test_indy_vdr.py index 98ecc12ced..5e0ec2cfae 100644 --- a/aries_cloudagent/ledger/tests/test_indy_vdr.py +++ b/aries_cloudagent/ledger/tests/test_indy_vdr.py @@ -238,9 +238,7 @@ async def test_send_schema( ) async with ledger: - ledger.pool_handle.submit_request.return_value = { - "txnMetadata": {"seqNo": 1} - } + ledger.pool_handle.submit_request.return_value = {"txnMetadata": {"seqNo": 1}} with mock.patch.object( ledger, @@ -506,9 +504,7 @@ async def test_send_credential_definition_no_public_did( ) @pytest.mark.asyncio - async def test_send_credential_definition_no_such_schema( - self, ledger: IndyVdrLedger - ): + async def test_send_credential_definition_no_such_schema(self, ledger: IndyVdrLedger): issuer = mock.MagicMock(IndyIssuer) async with ledger: ledger.pool_handle.submit_request.return_value = {} @@ -1193,9 +1189,7 @@ async def test_send_revoc_reg_entry_anoncreds_do_not_write_to_ledger( return_value=json.dumps({"result": {"txnMetadata": {"seqNo": 1234}}}), ): ledger.pool_handle.submit_request.return_value = {"status": "ok"} - result = await ledger.send_revoc_reg_entry( - reg_id, "CL_ACCUM", reg_entry - ) + result = await ledger.send_revoc_reg_entry(reg_id, "CL_ACCUM", reg_entry) assert result == 1234 @pytest.mark.asyncio diff --git a/aries_cloudagent/ledger/tests/test_routes.py b/aries_cloudagent/ledger/tests/test_routes.py index 69992e7d2a..941c7dd336 100644 --- a/aries_cloudagent/ledger/tests/test_routes.py +++ b/aries_cloudagent/ledger/tests/test_routes.py @@ -294,9 +294,7 @@ async def test_register_nym_bad_request(self): async def test_register_nym_ledger_txn_error(self): self.request.query = {"did": self.test_did, "verkey": self.test_verkey} - self.ledger.register_nym.side_effect = test_module.LedgerTransactionError( - "Error" - ) + self.ledger.register_nym.side_effect = test_module.LedgerTransactionError("Error") with self.assertRaises(test_module.web.HTTPForbidden): await test_module.register_ledger_nym(self.request) diff --git a/aries_cloudagent/messaging/agent_message.py b/aries_cloudagent/messaging/agent_message.py index 57d399af39..7dc62f49dd 100644 --- a/aries_cloudagent/messaging/agent_message.py +++ b/aries_cloudagent/messaging/agent_message.py @@ -555,9 +555,7 @@ def check_dump_decorators(self, obj, **kwargs): expect_fields = resolve_meta_property(self, "signed_fields") or () for field_name in expect_fields: if field_name not in self._signatures: - raise BaseModelError( - "Missing signature for field: {}".format(field_name) - ) + raise BaseModelError("Missing signature for field: {}".format(field_name)) return obj diff --git a/aries_cloudagent/messaging/credential_definitions/routes.py b/aries_cloudagent/messaging/credential_definitions/routes.py index 56cb62403e..29daa976f0 100644 --- a/aries_cloudagent/messaging/credential_definitions/routes.py +++ b/aries_cloudagent/messaging/credential_definitions/routes.py @@ -262,9 +262,7 @@ async def credential_definitions_send_credential_definition(request: web.BaseReq raise web.HTTPBadRequest(reason=err.roll_up) from err async with profile.session() as session: - endorser_info = await connection_record.metadata_get( - session, "endorser_info" - ) + endorser_info = await connection_record.metadata_get(session, "endorser_info") if not endorser_info: raise web.HTTPForbidden( reason=( @@ -551,9 +549,7 @@ async def on_cred_def_event(profile: Profile, event: Event): meta_data["context"].get("rev_reg_size", None) if support_revocation else None ) auto_create_rev_reg = meta_data["processing"].get("auto_create_rev_reg", False) - create_pending_rev_reg = meta_data["processing"].get( - "create_pending_rev_reg", False - ) + create_pending_rev_reg = meta_data["processing"].get("create_pending_rev_reg", False) endorser_connection_id = ( meta_data["endorser"].get("connection_id", None) if "endorser" in meta_data diff --git a/aries_cloudagent/messaging/credential_definitions/tests/test_routes.py b/aries_cloudagent/messaging/credential_definitions/tests/test_routes.py index 90043bdfef..666dbec138 100644 --- a/aries_cloudagent/messaging/credential_definitions/tests/test_routes.py +++ b/aries_cloudagent/messaging/credential_definitions/tests/test_routes.py @@ -80,10 +80,8 @@ async def test_send_credential_definition(self): self.request.query = {"create_transaction_for_endorser": "false"} with mock.patch.object(test_module.web, "json_response") as mock_response: - result = ( - await test_module.credential_definitions_send_credential_definition( - self.request - ) + result = await test_module.credential_definitions_send_credential_definition( + self.request ) assert result == mock_response.return_value mock_response.assert_called_once_with( @@ -129,10 +127,8 @@ async def test_send_credential_definition_create_transaction_for_endorser(self): } ) ) - result = ( - await test_module.credential_definitions_send_credential_definition( - self.request - ) + result = await test_module.credential_definitions_send_credential_definition( + self.request ) assert result == mock_response.return_value mock_response.assert_called_once_with( diff --git a/aries_cloudagent/messaging/decorators/attach_decorator.py b/aries_cloudagent/messaging/decorators/attach_decorator.py index 461e38ea1a..4aa1689e3c 100644 --- a/aries_cloudagent/messaging/decorators/attach_decorator.py +++ b/aries_cloudagent/messaging/decorators/attach_decorator.py @@ -664,9 +664,7 @@ def data_base64( mime_type="application/json", lastmod_time=lastmod_time, byte_count=byte_count, - data=AttachDecoratorData( - base64_=bytes_to_b64(json.dumps(mapping).encode()) - ), + data=AttachDecoratorData(base64_=bytes_to_b64(json.dumps(mapping).encode())), ) @classmethod diff --git a/aries_cloudagent/messaging/decorators/tests/test_attach_decorator.py b/aries_cloudagent/messaging/decorators/tests/test_attach_decorator.py index 02d557bec8..bf513e08b1 100644 --- a/aries_cloudagent/messaging/decorators/tests/test_attach_decorator.py +++ b/aries_cloudagent/messaging/decorators/tests/test_attach_decorator.py @@ -441,9 +441,7 @@ async def test_indy_sign(self, wallet, seed): byte_count=BYTE_COUNT, ) deco_indy_master = deepcopy(deco_indy) - did_info = [ - await wallet.create_local_did(SOV, ED25519, seed[i]) for i in [0, 1] - ] + did_info = [await wallet.create_local_did(SOV, ED25519, seed[i]) for i in [0, 1]] assert deco_indy.data.signatures == 0 assert deco_indy.data.header_map() is None await deco_indy.data.sign(did_info[0].verkey, wallet) diff --git a/aries_cloudagent/messaging/jsonld/create_verify_data.py b/aries_cloudagent/messaging/jsonld/create_verify_data.py index 26844a627e..a2ed01712b 100644 --- a/aries_cloudagent/messaging/jsonld/create_verify_data.py +++ b/aries_cloudagent/messaging/jsonld/create_verify_data.py @@ -89,17 +89,12 @@ def create_verify_data(data, signature_options, document_loader=None): expanded, data.get("@context"), options={ - **{ - opt: document_loader - for opt in ["documentLoader"] - if document_loader - } + **{opt: document_loader for opt in ["documentLoader"] if document_loader} }, ) dropped = set(data.keys()) - set(for_diff.keys()) raise DroppedAttributeError( - f"{dropped} attributes dropped. " - "Provide definitions in context to correct." + f"{dropped} attributes dropped. " "Provide definitions in context to correct." ) # Check proof for dropped attributes attr = [ diff --git a/aries_cloudagent/messaging/jsonld/credential.py b/aries_cloudagent/messaging/jsonld/credential.py index fdfb41f692..df8c557583 100644 --- a/aries_cloudagent/messaging/jsonld/credential.py +++ b/aries_cloudagent/messaging/jsonld/credential.py @@ -57,9 +57,7 @@ def verify_jws_header(header): """Check header requirements.""" if header != {"alg": "EdDSA", "b64": False, "crit": ["b64"]}: - raise BadJWSHeaderError( - "Invalid JWS header parameters for Ed25519Signature2018." - ) + raise BadJWSHeaderError("Invalid JWS header parameters for Ed25519Signature2018.") async def jws_verify(session, verify_data, signature, public_key): diff --git a/aries_cloudagent/messaging/jsonld/routes.py b/aries_cloudagent/messaging/jsonld/routes.py index 72cfd62fcb..abffd1106a 100644 --- a/aries_cloudagent/messaging/jsonld/routes.py +++ b/aries_cloudagent/messaging/jsonld/routes.py @@ -53,9 +53,7 @@ class SignRequestSchema(OpenAPISchema): class SignResponseSchema(OpenAPISchema): """Response schema for a signed jsonld doc.""" - signed_doc = fields.Dict( - required=False, metadata={"description": "Signed document"} - ) + signed_doc = fields.Dict(required=False, metadata={"description": "Signed document"}) error = fields.Str(required=False, metadata={"description": "Error text"}) diff --git a/aries_cloudagent/messaging/jsonld/tests/test_routes.py b/aries_cloudagent/messaging/jsonld/tests/test_routes.py index f48afb3dcb..f33159fa48 100644 --- a/aries_cloudagent/messaging/jsonld/tests/test_routes.py +++ b/aries_cloudagent/messaging/jsonld/tests/test_routes.py @@ -150,9 +150,7 @@ def _mock_verify_request(request_body=request_body): "admin.admin_api_key": "secret-key", } ) - context = AdminRequestContext.test_context( - {DIDResolver: mock_resolver}, profile - ) + context = AdminRequestContext.test_context({DIDResolver: mock_resolver}, profile) outbound_message_router = mock.CoroutineMock() request_dict = { "context": context, @@ -186,9 +184,7 @@ async def test_sign(mock_sign_request, mock_response): assert "error" not in mock_response.call_args[0][0] -@pytest.mark.parametrize( - "error", [DroppedAttributeError, MissingVerificationMethodError] -) +@pytest.mark.parametrize("error", [DroppedAttributeError, MissingVerificationMethodError]) @pytest.mark.asyncio async def test_sign_bad_req_error(mock_sign_request, mock_response, error): test_module.sign_credential = mock.CoroutineMock(side_effect=error()) @@ -327,9 +323,7 @@ async def test_verify_credential(self): "issuer": ("did:key:z6MkjRagNiMu91DduvCvgEsqLZDVzrJzFrwahc4tXLt9DoHd"), "issuanceDate": "2020-03-10T04:24:12.164Z", "credentialSubject": { - "id": ( - "did:key:" "z6MkjRagNiMu91DduvCvgEsqLZDVzrJzFrwahc4tXLt9DoHd" - ), + "id": ("did:key:" "z6MkjRagNiMu91DduvCvgEsqLZDVzrJzFrwahc4tXLt9DoHd"), "degree": { "type": "BachelorDegree", "name": "Bachelor of Science and Arts", @@ -464,8 +458,7 @@ async def test_sign_credential(self): "issuanceDate": "2020-03-10T04:24:12.164Z", "credentialSubject": { "id": ( - "did:key:" - "z6MkjRagNiMu91DduvCvgEsqLZDVzrJzFrwahc4tXLt9DoHd" + "did:key:" "z6MkjRagNiMu91DduvCvgEsqLZDVzrJzFrwahc4tXLt9DoHd" ), "degree": { "type": "BachelorDegree", diff --git a/aries_cloudagent/messaging/message_type.py b/aries_cloudagent/messaging/message_type.py index 9e387eae6f..06bbf591cd 100644 --- a/aries_cloudagent/messaging/message_type.py +++ b/aries_cloudagent/messaging/message_type.py @@ -102,9 +102,7 @@ def from_message_type( raise ValueError(f"Invalid protocol identifier: {message_type}") elif isinstance(message_type, MessageType): - return cls( - message_type.doc_uri, message_type.protocol, message_type.version - ) + return cls(message_type.doc_uri, message_type.protocol, message_type.version) else: raise TypeError(f"Invalid message type: {message_type}") diff --git a/aries_cloudagent/messaging/models/base_record.py b/aries_cloudagent/messaging/models/base_record.py index 80b22c1898..c7ee98c84c 100644 --- a/aries_cloudagent/messaging/models/base_record.py +++ b/aries_cloudagent/messaging/models/base_record.py @@ -51,8 +51,7 @@ def match_post_filter( return ( positive and all( - record.get(k) and record.get(k) in alts - for k, alts in post_filter.items() + record.get(k) and record.get(k) in alts for k, alts in post_filter.items() ) ) or ( (not positive) @@ -498,9 +497,7 @@ def log_state( def strip_tag_prefix(cls, tags: dict): """Strip tilde from unencrypted tag names.""" - return ( - {(k[1:] if "~" in k else k): v for (k, v) in tags.items()} if tags else {} - ) + return {(k[1:] if "~" in k else k): v for (k, v) in tags.items()} if tags else {} @classmethod def prefix_tag_filter(cls, tag_filter: dict): diff --git a/aries_cloudagent/messaging/models/tests/test_base.py b/aries_cloudagent/messaging/models/tests/test_base.py index b627227a0d..d19b5b1c6b 100644 --- a/aries_cloudagent/messaging/models/tests/test_base.py +++ b/aries_cloudagent/messaging/models/tests/test_base.py @@ -103,9 +103,7 @@ def test_model_with_unknown(self): model = model.validate() assert model.attr == "succeeds" - model = ModelImplWithUnknown.deserialize( - {"attr": "succeeds", "another": "value"} - ) + model = ModelImplWithUnknown.deserialize({"attr": "succeeds", "another": "value"}) assert model.extra assert model.extra["another"] == "value" assert model.attr == "succeeds" diff --git a/aries_cloudagent/messaging/models/tests/test_base_record.py b/aries_cloudagent/messaging/models/tests/test_base_record.py index eaa60aa61d..06fb281a9d 100644 --- a/aries_cloudagent/messaging/models/tests/test_base_record.py +++ b/aries_cloudagent/messaging/models/tests/test_base_record.py @@ -136,9 +136,7 @@ async def test_retrieve_by_tag_filter_multi_x_delete(self): records.append(ARecordImpl(a="1", b=str(i), code="one")) await records[i].save(session) with self.assertRaises(StorageDuplicateError): - await ARecordImpl.retrieve_by_tag_filter( - session, {"code": "one"}, {"a": "1"} - ) + await ARecordImpl.retrieve_by_tag_filter(session, {"code": "one"}, {"a": "1"}) await records[0].delete_record(session) async def test_save_x(self): @@ -320,9 +318,7 @@ async def test_tag_prefix(self): assert {**expect} == {**actual} tags = {"$or": [{"a": "x"}, {"c": "z"}]} - assert UnencTestImpl.prefix_tag_filter(tags) == { - "$or": [{"~a": "x"}, {"c": "z"}] - } + assert UnencTestImpl.prefix_tag_filter(tags) == {"$or": [{"~a": "x"}, {"c": "z"}]} async def test_query_with_limit(self): session = InMemoryProfile.test_session() diff --git a/aries_cloudagent/messaging/request_context.py b/aries_cloudagent/messaging/request_context.py index b346675aca..6eb0b0468d 100644 --- a/aries_cloudagent/messaging/request_context.py +++ b/aries_cloudagent/messaging/request_context.py @@ -26,7 +26,7 @@ def __init__( profile: Profile, *, context: Optional[InjectionContext] = None, - settings: Optional[Mapping[str, object]] = None + settings: Optional[Mapping[str, object]] = None, ): """Initialize an instance of RequestContext.""" self._connection_ready = False @@ -229,8 +229,6 @@ def __repr__(self) -> str: """ skip = () items = ( - "{}={}".format(k, repr(v)) - for k, v in self.__dict__.items() - if k not in skip + "{}={}".format(k, repr(v)) for k, v in self.__dict__.items() if k not in skip ) return "<{}({})>".format(self.__class__.__name__, ", ".join(items)) diff --git a/aries_cloudagent/messaging/responder.py b/aries_cloudagent/messaging/responder.py index f179009c6b..b71daee7fa 100644 --- a/aries_cloudagent/messaging/responder.py +++ b/aries_cloudagent/messaging/responder.py @@ -154,9 +154,7 @@ async def conn_rec_active_state_check( async def _wait_for_state() -> Tuple[bool, Optional[str]]: while True: async with profile.session() as session: - conn_record = await ConnRecord.retrieve_by_id( - session, connection_id - ) + conn_record = await ConnRecord.retrieve_by_id(session, connection_id) if conn_record.is_ready: # if ConnRecord.State.get(conn_record.state) in ( # ConnRecord.State.COMPLETED, diff --git a/aries_cloudagent/messaging/schemas/routes.py b/aries_cloudagent/messaging/schemas/routes.py index 6d4e50f9c2..d8128a5398 100644 --- a/aries_cloudagent/messaging/schemas/routes.py +++ b/aries_cloudagent/messaging/schemas/routes.py @@ -91,9 +91,7 @@ class SchemaSendResultSchema(OpenAPISchema): "example": INDY_SCHEMA_ID_EXAMPLE, }, ) - schema = fields.Nested( - SchemaSchema(), metadata={"description": "Schema definition"} - ) + schema = fields.Nested(SchemaSchema(), metadata={"description": "Schema definition"}) class TxnOrSchemaSendResultSchema(OpenAPISchema): @@ -233,9 +231,7 @@ async def schemas_send_schema(request: web.BaseRequest): raise web.HTTPBadRequest(reason=err.roll_up) from err async with profile.session() as session: - endorser_info = await connection_record.metadata_get( - session, "endorser_info" - ) + endorser_info = await connection_record.metadata_get(session, "endorser_info") if not endorser_info: raise web.HTTPForbidden( reason=( @@ -301,9 +297,7 @@ async def schemas_send_schema(request: web.BaseRequest): # If the transaction is for the endorser, but the schema has already been created, # then we send back the schema since the transaction will fail to be created. elif "signed_txn" not in schema_def: - return web.json_response( - {"sent": {"schema_id": schema_id, "schema": schema_def}} - ) + return web.json_response({"sent": {"schema_id": schema_id, "schema": schema_def}}) else: transaction_mgr = TransactionManager(context.profile) try: diff --git a/aries_cloudagent/messaging/tests/test_util.py b/aries_cloudagent/messaging/tests/test_util.py index 834dc26ae7..bf17231b65 100644 --- a/aries_cloudagent/messaging/tests/test_util.py +++ b/aries_cloudagent/messaging/tests/test_util.py @@ -27,9 +27,7 @@ def test_parse(self): "2019-05-17 20:51:19Z": datetime( 2019, 5, 17, 20, 51, 19, 0, tzinfo=timezone.utc ), - "2019-05-17 20:51Z": datetime( - 2019, 5, 17, 20, 51, 0, 0, tzinfo=timezone.utc - ), + "2019-05-17 20:51Z": datetime(2019, 5, 17, 20, 51, 0, 0, tzinfo=timezone.utc), "2019-11-15T22:37:14": datetime( 2019, 11, 15, 22, 37, 14, tzinfo=timezone.utc ), diff --git a/aries_cloudagent/multitenant/admin/routes.py b/aries_cloudagent/multitenant/admin/routes.py index 1d350cb658..36e6e0a88a 100644 --- a/aries_cloudagent/multitenant/admin/routes.py +++ b/aries_cloudagent/multitenant/admin/routes.py @@ -484,9 +484,7 @@ async def wallet_create(request: web.BaseRequest): try: multitenant_mgr = context.profile.inject(BaseMultitenantManager) - wallet_record = await multitenant_mgr.create_wallet( - settings, key_management_mode - ) + wallet_record = await multitenant_mgr.create_wallet(settings, key_management_mode) token = await multitenant_mgr.create_auth_token(wallet_record, wallet_key) diff --git a/aries_cloudagent/multitenant/admin/tests/test_routes.py b/aries_cloudagent/multitenant/admin/tests/test_routes.py index 7576591968..597e0398a7 100644 --- a/aries_cloudagent/multitenant/admin/tests/test_routes.py +++ b/aries_cloudagent/multitenant/admin/tests/test_routes.py @@ -620,9 +620,7 @@ async def test_wallet_get(self): mock_wallet_record_retrieve_by_id.return_value = mock_wallet_record await test_module.wallet_get(self.request) - mock_response.assert_called_once_with( - {"settings": {}, "wallet_id": "dummy"} - ) + mock_response.assert_called_once_with({"settings": {}, "wallet_id": "dummy"}) async def test_wallet_get_not_found(self): self.request.match_info = {"wallet_id": "dummy"} @@ -754,9 +752,7 @@ async def test_wallet_remove_managed(self): result = await test_module.wallet_remove(self.request) - self.mock_multitenant_mgr.remove_wallet.assert_called_once_with( - "dummy", None - ) + self.mock_multitenant_mgr.remove_wallet.assert_called_once_with("dummy", None) mock_response.assert_called_once_with({}) assert result == mock_response.return_value diff --git a/aries_cloudagent/multitenant/tests/test_base.py b/aries_cloudagent/multitenant/tests/test_base.py index 7ae46e875f..35b3ca8267 100644 --- a/aries_cloudagent/multitenant/tests/test_base.py +++ b/aries_cloudagent/multitenant/tests/test_base.py @@ -32,7 +32,7 @@ async def get_wallet_profile( wallet_record: WalletRecord, extra_settings: dict = ..., *, - provision=False + provision=False, ): """Do nothing.""" @@ -178,9 +178,7 @@ async def test_get_wallet_by_key(self): assert isinstance(wallet, WalletRecord) async def test_create_wallet_removes_key_only_unmanaged_mode(self): - with mock.patch.object( - self.manager, "get_wallet_profile" - ) as get_wallet_profile: + with mock.patch.object(self.manager, "get_wallet_profile") as get_wallet_profile: get_wallet_profile.return_value = InMemoryProfile.test_profile() unmanaged_wallet_record = await self.manager.create_wallet( @@ -431,9 +429,7 @@ async def test_get_wallet_details_from_token(self): "very_secret_jwt", algorithm="HS256", ) - ret_wallet_id, ret_wallet_key = self.manager.get_wallet_details_from_token( - token - ) + ret_wallet_id, ret_wallet_key = self.manager.get_wallet_details_from_token(token) assert ret_wallet_id == wallet_record.wallet_id assert not ret_wallet_key @@ -446,9 +442,7 @@ async def test_get_wallet_details_from_token(self): "very_secret_jwt", algorithm="HS256", ) - ret_wallet_id, ret_wallet_key = self.manager.get_wallet_details_from_token( - token - ) + ret_wallet_id, ret_wallet_key = self.manager.get_wallet_details_from_token(token) assert ret_wallet_id == wallet_record.wallet_id assert ret_wallet_key == "wallet_key" @@ -463,9 +457,7 @@ async def test_get_wallet_and_profile(self): session = await self.profile.session() await wallet_record.save(session) - with mock.patch.object( - self.manager, "get_wallet_profile" - ) as get_wallet_profile: + with mock.patch.object(self.manager, "get_wallet_profile") as get_wallet_profile: mock_profile = InMemoryProfile.test_profile() get_wallet_profile.return_value = mock_profile @@ -512,9 +504,7 @@ async def test_get_profile_for_token_managed_wallet_no_iat(self): {"wallet_id": wallet_record.wallet_id}, "very_secret_jwt", algorithm="HS256" ) - with mock.patch.object( - self.manager, "get_wallet_profile" - ) as get_wallet_profile: + with mock.patch.object(self.manager, "get_wallet_profile") as get_wallet_profile: mock_profile = InMemoryProfile.test_profile() get_wallet_profile.return_value = mock_profile @@ -549,9 +539,7 @@ async def test_get_profile_for_token_managed_wallet_iat(self): algorithm="HS256", ) - with mock.patch.object( - self.manager, "get_wallet_profile" - ) as get_wallet_profile: + with mock.patch.object(self.manager, "get_wallet_profile") as get_wallet_profile: mock_profile = InMemoryProfile.test_profile() get_wallet_profile.return_value = mock_profile @@ -623,9 +611,7 @@ async def test_get_profile_for_token_unmanaged_wallet(self): algorithm="HS256", ) - with mock.patch.object( - self.manager, "get_wallet_profile" - ) as get_wallet_profile: + with mock.patch.object(self.manager, "get_wallet_profile") as get_wallet_profile: mock_profile = InMemoryProfile.test_profile() get_wallet_profile.return_value = mock_profile @@ -685,7 +671,5 @@ async def test_get_profile_for_key(self): ), mock.patch.object( self.manager, "get_wallet_profile", mock.CoroutineMock() ) as mock_get_wallet_profile: - profile = await self.manager.get_profile_for_key( - self.context, "test-verkey" - ) + profile = await self.manager.get_profile_for_key(self.context, "test-verkey") assert profile == mock_get_wallet_profile.return_value diff --git a/aries_cloudagent/multitenant/tests/test_manager.py b/aries_cloudagent/multitenant/tests/test_manager.py index 17bd7f0331..8c129adde9 100644 --- a/aries_cloudagent/multitenant/tests/test_manager.py +++ b/aries_cloudagent/multitenant/tests/test_manager.py @@ -21,9 +21,7 @@ async def test_get_wallet_profile_returns_from_cache(self): wallet_record = WalletRecord(wallet_id="test") self.manager._profiles.put("test", InMemoryProfile.test_profile()) - with mock.patch( - "aries_cloudagent.config.wallet.wallet_config" - ) as wallet_config: + with mock.patch("aries_cloudagent.config.wallet.wallet_config") as wallet_config: profile = await self.manager.get_wallet_profile( self.profile.context, wallet_record ) @@ -37,9 +35,7 @@ async def test_get_wallet_profile_not_in_cache(self): {"admin.webhook_urls": ["http://localhost:8020"]} ) - with mock.patch( - "aries_cloudagent.config.wallet.wallet_config" - ) as wallet_config: + with mock.patch("aries_cloudagent.config.wallet.wallet_config") as wallet_config: profile = await self.manager.get_wallet_profile( self.profile.context, wallet_record ) diff --git a/aries_cloudagent/protocols/actionmenu/v1_0/routes.py b/aries_cloudagent/protocols/actionmenu/v1_0/routes.py index 802fa75ebf..b066fd2550 100644 --- a/aries_cloudagent/protocols/actionmenu/v1_0/routes.py +++ b/aries_cloudagent/protocols/actionmenu/v1_0/routes.py @@ -90,9 +90,7 @@ class ActionMenuFetchResultSchema(OpenAPISchema): result = fields.Nested(MenuSchema, metadata={"description": "Action menu"}) -@docs( - tags=["action-menu"], summary="Close the active menu associated with a connection" -) +@docs(tags=["action-menu"], summary="Close the active menu associated with a connection") @match_info_schema(MenuConnIdMatchInfoSchema()) @response_schema(ActionMenuModulesResultSchema(), 200, description="") @tenant_authentication diff --git a/aries_cloudagent/protocols/actionmenu/v1_0/tests/test_util.py b/aries_cloudagent/protocols/actionmenu/v1_0/tests/test_util.py index 89f54a1b42..68ecfbcc53 100644 --- a/aries_cloudagent/protocols/actionmenu/v1_0/tests/test_util.py +++ b/aries_cloudagent/protocols/actionmenu/v1_0/tests/test_util.py @@ -72,6 +72,4 @@ async def test_save_retrieve_delete_connection_menu(self): mock_event_bus.events.clear() # retrieve no menu - assert ( - await test_module.retrieve_connection_menu(connection_id, context) is None - ) + assert await test_module.retrieve_connection_menu(connection_id, context) is None diff --git a/aries_cloudagent/protocols/basicmessage/v1_0/routes.py b/aries_cloudagent/protocols/basicmessage/v1_0/routes.py index 015318eb5f..772a26b571 100644 --- a/aries_cloudagent/protocols/basicmessage/v1_0/routes.py +++ b/aries_cloudagent/protocols/basicmessage/v1_0/routes.py @@ -21,9 +21,7 @@ class BasicMessageModuleResponseSchema(OpenAPISchema): class SendMessageSchema(OpenAPISchema): """Request schema for sending a message.""" - content = fields.Str( - metadata={"description": "Message content", "example": "Hello"} - ) + content = fields.Str(metadata={"description": "Message content", "example": "Hello"}) class BasicConnIdMatchInfoSchema(OpenAPISchema): diff --git a/aries_cloudagent/protocols/connections/v1_0/handlers/connection_invitation_handler.py b/aries_cloudagent/protocols/connections/v1_0/handlers/connection_invitation_handler.py index 1f4c4c3ec6..3c9dde344d 100644 --- a/aries_cloudagent/protocols/connections/v1_0/handlers/connection_invitation_handler.py +++ b/aries_cloudagent/protocols/connections/v1_0/handlers/connection_invitation_handler.py @@ -26,9 +26,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): report = ConnectionProblemReport( description={ "code": ProblemReportReason.INVITATION_NOT_ACCEPTED.value, - "en": ( - "Connection invitations cannot be submitted via agent messaging" - ), + "en": ("Connection invitations cannot be submitted via agent messaging"), } ) report.assign_thread_from(context.message) diff --git a/aries_cloudagent/protocols/connections/v1_0/handlers/connection_response_handler.py b/aries_cloudagent/protocols/connections/v1_0/handlers/connection_response_handler.py index 6fea9398c6..d9556d8f67 100644 --- a/aries_cloudagent/protocols/connections/v1_0/handlers/connection_response_handler.py +++ b/aries_cloudagent/protocols/connections/v1_0/handlers/connection_response_handler.py @@ -42,4 +42,4 @@ async def handle(self, context: RequestContext, responder: BaseResponder): # send trust ping in response if context.settings.get("auto_ping_connection"): - await responder.send(Ping(), connection_id=connection.connection_id), + (await responder.send(Ping(), connection_id=connection.connection_id),) diff --git a/aries_cloudagent/protocols/connections/v1_0/handlers/tests/test_request_handler.py b/aries_cloudagent/protocols/connections/v1_0/handlers/tests/test_request_handler.py index db8ef1ce5f..f1ede14976 100644 --- a/aries_cloudagent/protocols/connections/v1_0/handlers/tests/test_request_handler.py +++ b/aries_cloudagent/protocols/connections/v1_0/handlers/tests/test_request_handler.py @@ -160,9 +160,7 @@ async def test_connection_record_without_mediation_metadata( @pytest.mark.asyncio @mock.patch.object(handler, "ConnectionManager") @mock.patch.object(connection_target, "ConnectionTarget") - async def test_problem_report( - self, mock_conn_target, mock_conn_mgr, request_context - ): + async def test_problem_report(self, mock_conn_target, mock_conn_mgr, request_context): mock_conn_mgr.return_value.receive_request = mock.CoroutineMock() mock_conn_mgr.return_value.receive_request.side_effect = ConnectionManagerError( error_code=ProblemReportReason.REQUEST_NOT_ACCEPTED.value diff --git a/aries_cloudagent/protocols/connections/v1_0/handlers/tests/test_response_handler.py b/aries_cloudagent/protocols/connections/v1_0/handlers/tests/test_response_handler.py index c52111330e..56349e0ef2 100644 --- a/aries_cloudagent/protocols/connections/v1_0/handlers/tests/test_response_handler.py +++ b/aries_cloudagent/protocols/connections/v1_0/handlers/tests/test_response_handler.py @@ -97,9 +97,7 @@ async def test_called_auto_ping(self, mock_conn_mgr, request_context): @pytest.mark.asyncio @mock.patch.object(handler, "ConnectionManager") @mock.patch.object(connection_target, "ConnectionTarget") - async def test_problem_report( - self, mock_conn_target, mock_conn_mgr, request_context - ): + async def test_problem_report(self, mock_conn_target, mock_conn_mgr, request_context): mock_conn_mgr.return_value.accept_response = mock.CoroutineMock() mock_conn_mgr.return_value.accept_response.side_effect = ConnectionManagerError( error_code=ProblemReportReason.RESPONSE_NOT_ACCEPTED.value, diff --git a/aries_cloudagent/protocols/connections/v1_0/manager.py b/aries_cloudagent/protocols/connections/v1_0/manager.py index 5ee610724d..5b7b272e2c 100644 --- a/aries_cloudagent/protocols/connections/v1_0/manager.py +++ b/aries_cloudagent/protocols/connections/v1_0/manager.py @@ -167,9 +167,7 @@ async def create_invitation( # Create and store new invitation key async with self.profile.session() as session: wallet = session.inject(BaseWallet) - invitation_signing_key = await wallet.create_signing_key( - key_type=ED25519 - ) + invitation_signing_key = await wallet.create_signing_key(key_type=ED25519) invitation_key = invitation_signing_key.verkey recipient_keys = [invitation_key] @@ -561,8 +559,7 @@ async def receive_request( if not connection: if not self.profile.settings.get("requests_through_public_did"): raise ConnectionManagerError( - "Unsolicited connection requests to " - "public DID is not enabled" + "Unsolicited connection requests to " "public DID is not enabled" ) connection = ConnRecord() connection.invitation_key = connection_key diff --git a/aries_cloudagent/protocols/connections/v1_0/messages/connection_invitation.py b/aries_cloudagent/protocols/connections/v1_0/messages/connection_invitation.py index 889a3f1cd2..f2bcb36068 100644 --- a/aries_cloudagent/protocols/connections/v1_0/messages/connection_invitation.py +++ b/aries_cloudagent/protocols/connections/v1_0/messages/connection_invitation.py @@ -200,9 +200,7 @@ def validate_fields(self, data, **kwargs): """ if data.get("did"): if data.get("recipient_keys"): - raise ValidationError( - "Fields are incompatible", ("did", "recipientKeys") - ) + raise ValidationError("Fields are incompatible", ("did", "recipientKeys")) if data.get("endpoint"): raise ValidationError( "Fields are incompatible", ("did", "serviceEndpoint") diff --git a/aries_cloudagent/protocols/connections/v1_0/messages/tests/test_connection_request.py b/aries_cloudagent/protocols/connections/v1_0/messages/tests/test_connection_request.py index 9ae14e169c..f6f20a6d83 100644 --- a/aries_cloudagent/protocols/connections/v1_0/messages/tests/test_connection_request.py +++ b/aries_cloudagent/protocols/connections/v1_0/messages/tests/test_connection_request.py @@ -100,9 +100,7 @@ def test_serialize(self, mock_connection_request_schema_dump): self.connection_request ) - assert ( - connection_request_dict is mock_connection_request_schema_dump.return_value - ) + assert connection_request_dict is mock_connection_request_schema_dump.return_value class TestConnectionRequestSchema(IsolatedAsyncioTestCase, TestConfig): diff --git a/aries_cloudagent/protocols/connections/v1_0/messages/tests/test_connection_response.py b/aries_cloudagent/protocols/connections/v1_0/messages/tests/test_connection_response.py index 3522895da8..2a3858044a 100644 --- a/aries_cloudagent/protocols/connections/v1_0/messages/tests/test_connection_response.py +++ b/aries_cloudagent/protocols/connections/v1_0/messages/tests/test_connection_response.py @@ -96,8 +96,7 @@ def test_serialize(self, mock_connection_response_schema_dump): ) assert ( - connection_response_dict - is mock_connection_response_schema_dump.return_value + connection_response_dict is mock_connection_response_schema_dump.return_value ) diff --git a/aries_cloudagent/protocols/connections/v1_0/tests/test_manager.py b/aries_cloudagent/protocols/connections/v1_0/tests/test_manager.py index a8589e458b..6d6e57ae93 100644 --- a/aries_cloudagent/protocols/connections/v1_0/tests/test_manager.py +++ b/aries_cloudagent/protocols/connections/v1_0/tests/test_manager.py @@ -65,9 +65,7 @@ async def asyncSetUp(self): self.route_manager.routing_info = mock.CoroutineMock( return_value=([], self.test_endpoint) ) - self.route_manager.mediation_record_if_id = mock.CoroutineMock( - return_value=None - ) + self.route_manager.mediation_record_if_id = mock.CoroutineMock(return_value=None) self.resolver = DIDResolver() self.resolver.register_resolver(LegacyPeerDIDResolver()) @@ -91,13 +89,9 @@ async def asyncSetUp(self): self.context = self.profile.context self.multitenant_mgr = mock.MagicMock(MultitenantManager, autospec=True) - self.context.injector.bind_instance( - BaseMultitenantManager, self.multitenant_mgr - ) + self.context.injector.bind_instance(BaseMultitenantManager, self.multitenant_mgr) - self.test_mediator_routing_keys = [ - "3Dn1SJNPaCXcvvJvSbsFWP2xaCjMom3can8CQNhWrTRR" - ] + self.test_mediator_routing_keys = ["3Dn1SJNPaCXcvvJvSbsFWP2xaCjMom3can8CQNhWrTRR"] self.test_mediator_conn_id = "mediator-conn-id" self.test_mediator_endpoint = "http://mediator.example.com" @@ -112,9 +106,7 @@ async def test_create_invitation_non_multi_use_invitation_fails_on_reuse(self): requestA = ConnectionRequest( connection=ConnectionDetail( did=self.test_target_did, - did_doc=self.make_did_doc( - self.test_target_did, self.test_target_verkey - ), + did_doc=self.make_did_doc(self.test_target_did, self.test_target_verkey), ), label="SameInviteRequestA", ) @@ -161,9 +153,7 @@ async def test_create_invitation_public_no_public_invites(self): self.context.update_settings({"public_invites": False}) with self.assertRaises(ConnectionManagerError): - await self.manager.create_invitation( - public=True, my_endpoint="testendpoint" - ) + await self.manager.create_invitation(public=True, my_endpoint="testendpoint") async def test_create_invitation_public_no_public_did(self): self.context.update_settings({"public_invites": True}) @@ -187,9 +177,7 @@ async def test_create_invitation_multi_use(self): requestA = ConnectionRequest( connection=ConnectionDetail( did=self.test_target_did, - did_doc=self.make_did_doc( - self.test_target_did, self.test_target_verkey - ), + did_doc=self.make_did_doc(self.test_target_did, self.test_target_verkey), ), label="SameInviteRequestA", ) @@ -802,9 +790,7 @@ async def test_receive_request_public_did_no_public_invites(self): ConnRecord, "retrieve_by_id", autospec=True ) as mock_conn_retrieve_by_id, mock.patch.object( ConnRecord, "retrieve_request", autospec=True - ), mock.patch.object( - self.manager, "store_did_document", mock.CoroutineMock() - ): + ), mock.patch.object(self.manager, "store_did_document", mock.CoroutineMock()): with self.assertRaises(ConnectionManagerError): await self.manager.receive_request(mock_request, receipt) @@ -860,9 +846,7 @@ async def test_create_response(self): ConnRecord, "save", autospec=True ) as mock_conn_save, mock.patch.object( ConnectionResponse, "sign_field", autospec=True - ) as mock_sign, mock.patch.object( - conn_rec, "metadata_get", mock.CoroutineMock() - ): + ) as mock_sign, mock.patch.object(conn_rec, "metadata_get", mock.CoroutineMock()): await self.manager.create_response(conn_rec, "http://10.20.30.40:5060/") async def test_create_response_multitenant(self): @@ -879,9 +863,9 @@ async def test_create_response_multitenant(self): endpoint=self.test_mediator_endpoint, ) - with mock.patch.object( - ConnRecord, "log_state", autospec=True - ), mock.patch.object(ConnRecord, "save", autospec=True), mock.patch.object( + with mock.patch.object(ConnRecord, "log_state", autospec=True), mock.patch.object( + ConnRecord, "save", autospec=True + ), mock.patch.object( ConnRecord, "metadata_get", mock.CoroutineMock(return_value=False) ), mock.patch.object( ConnRecord, "retrieve_request", autospec=True @@ -951,9 +935,9 @@ async def test_create_response_mediation(self): # Ensure the path with new did creation is hit record.my_did = None - with mock.patch.object( - ConnRecord, "log_state", autospec=True - ), mock.patch.object(ConnRecord, "save", autospec=True), mock.patch.object( + with mock.patch.object(ConnRecord, "log_state", autospec=True), mock.patch.object( + ConnRecord, "save", autospec=True + ), mock.patch.object( record, "metadata_get", mock.CoroutineMock(return_value=False) ), mock.patch.object( ConnectionManager, "create_did_document", autospec=True @@ -965,9 +949,7 @@ async def test_create_response_mediation(self): mock.CoroutineMock(return_value=[mediation_record]), ), mock.patch.object( record, "retrieve_request", autospec=True - ), mock.patch.object( - ConnectionResponse, "sign_field", autospec=True - ): + ), mock.patch.object(ConnectionResponse, "sign_field", autospec=True): did_info = DIDInfo( did=self.test_did, verkey=self.test_verkey, @@ -1021,9 +1003,7 @@ async def test_accept_response_find_by_thread_id(self): mock_response.connection.did = self.test_target_did mock_response.connection.did_doc = mock.MagicMock(spec=DIDDoc) mock_response.connection.did_doc.did = self.test_target_did - mock_response.verify_signed_field = mock.CoroutineMock( - return_value="sig_verkey" - ) + mock_response.verify_signed_field = mock.CoroutineMock(return_value="sig_verkey") receipt = MessageReceipt(recipient_did=self.test_did, recipient_did_public=True) with mock.patch.object( @@ -1032,9 +1012,7 @@ async def test_accept_response_find_by_thread_id(self): ConnRecord, "retrieve_by_request_id", mock.CoroutineMock() ) as mock_conn_retrieve_by_req_id, mock.patch.object( MediationManager, "get_default_mediator", mock.CoroutineMock() - ), mock.patch.object( - self.manager, "store_did_document", mock.CoroutineMock() - ): + ), mock.patch.object(self.manager, "store_did_document", mock.CoroutineMock()): mock_conn_retrieve_by_req_id.return_value = mock.MagicMock( did=self.test_target_did, did_doc=mock.MagicMock(did=self.test_target_did), @@ -1055,9 +1033,7 @@ async def test_accept_response_not_found_by_thread_id_receipt_has_sender_did(sel mock_response.connection.did = self.test_target_did mock_response.connection.did_doc = mock.MagicMock(spec=DIDDoc) mock_response.connection.did_doc.did = self.test_target_did - mock_response.verify_signed_field = mock.CoroutineMock( - return_value="sig_verkey" - ) + mock_response.verify_signed_field = mock.CoroutineMock(return_value="sig_verkey") receipt = MessageReceipt(sender_did=self.test_target_did) @@ -1069,9 +1045,7 @@ async def test_accept_response_not_found_by_thread_id_receipt_has_sender_did(sel ConnRecord, "retrieve_by_did", mock.CoroutineMock() ) as mock_conn_retrieve_by_did, mock.patch.object( MediationManager, "get_default_mediator", mock.CoroutineMock() - ), mock.patch.object( - self.manager, "store_did_document", mock.CoroutineMock() - ): + ), mock.patch.object(self.manager, "store_did_document", mock.CoroutineMock()): mock_conn_retrieve_by_req_id.side_effect = StorageNotFoundError() mock_conn_retrieve_by_did.return_value = mock.MagicMock( did=self.test_target_did, @@ -1217,9 +1191,7 @@ async def test_accept_response_auto_send_mediation_request(self): mock_response.connection.did = self.test_target_did mock_response.connection.did_doc = mock.MagicMock(spec=DIDDoc) mock_response.connection.did_doc.did = self.test_target_did - mock_response.verify_signed_field = mock.CoroutineMock( - return_value="sig_verkey" - ) + mock_response.verify_signed_field = mock.CoroutineMock(return_value="sig_verkey") receipt = MessageReceipt(recipient_did=self.test_did, recipient_did_public=True) with mock.patch.object( @@ -1228,9 +1200,7 @@ async def test_accept_response_auto_send_mediation_request(self): ConnRecord, "retrieve_by_request_id", mock.CoroutineMock() ) as mock_conn_retrieve_by_req_id, mock.patch.object( MediationManager, "get_default_mediator", mock.CoroutineMock() - ), mock.patch.object( - self.manager, "store_did_document", mock.CoroutineMock() - ): + ), mock.patch.object(self.manager, "store_did_document", mock.CoroutineMock()): mock_conn_retrieve_by_req_id.return_value = mock.MagicMock( did=self.test_target_did, did_doc=mock.MagicMock(did=self.test_target_did), diff --git a/aries_cloudagent/protocols/connections/v1_0/tests/test_routes.py b/aries_cloudagent/protocols/connections/v1_0/tests/test_routes.py index d56494d5bd..83fdaf10b9 100644 --- a/aries_cloudagent/protocols/connections/v1_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/connections/v1_0/tests/test_routes.py @@ -48,9 +48,7 @@ async def test_connections_list(self): STATE_INVITATION = ConnRecord.State.INVITATION STATE_ABANDONED = ConnRecord.State.ABANDONED ROLE_REQUESTER = ConnRecord.Role.REQUESTER - with mock.patch.object( - test_module, "ConnRecord", autospec=True - ) as mock_conn_rec: + with mock.patch.object(test_module, "ConnRecord", autospec=True) as mock_conn_rec: mock_conn_rec.query = mock.CoroutineMock() mock_conn_rec.Role = ConnRecord.Role mock_conn_rec.State = mock.MagicMock( @@ -132,9 +130,7 @@ async def test_connections_list_x(self): STATE_COMPLETED = ConnRecord.State.COMPLETED ROLE_REQUESTER = ConnRecord.Role.REQUESTER - with mock.patch.object( - test_module, "ConnRecord", autospec=True - ) as mock_conn_rec: + with mock.patch.object(test_module, "ConnRecord", autospec=True) as mock_conn_rec: mock_conn_rec.Role = mock.MagicMock(return_value=ROLE_REQUESTER) mock_conn_rec.State = mock.MagicMock( COMPLETED=STATE_COMPLETED, @@ -330,9 +326,7 @@ async def test_connections_retrieve_not_found(self): async def test_connections_retrieve_x(self): self.request.match_info = {"conn_id": "dummy"} mock_conn_rec = mock.MagicMock() - mock_conn_rec.serialize = mock.MagicMock( - side_effect=test_module.BaseModelError() - ) + mock_conn_rec.serialize = mock.MagicMock(side_effect=test_module.BaseModelError()) with mock.patch.object( test_module.ConnRecord, "retrieve_by_id", mock.CoroutineMock() @@ -387,7 +381,7 @@ async def test_connections_create_invitation(self): routing_keys=body["routing_keys"], my_endpoint=body["service_endpoint"], metadata=body["metadata"], - mediation_id="some-id" + mediation_id="some-id", ) mock_response.assert_called_once_with( { diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/keylist_handler.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/keylist_handler.py index b93527d18f..006ec0acc0 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/keylist_handler.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/keylist_handler.py @@ -19,9 +19,7 @@ class KeylistHandler(BaseHandler): async def handle(self, context: RequestContext, responder: BaseResponder): """Handle keylist message.""" - self._logger.debug( - "%s called with context %s", self.__class__.__name__, context - ) + self._logger.debug("%s called with context %s", self.__class__.__name__, context) assert isinstance(context.message, Keylist) if not context.connection_ready: diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/keylist_query_handler.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/keylist_query_handler.py index 7d47296b64..6a18d06854 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/keylist_query_handler.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/keylist_query_handler.py @@ -16,9 +16,7 @@ class KeylistQueryHandler(BaseHandler): async def handle(self, context: RequestContext, responder: BaseResponder): """Handle keylist-query message.""" - self._logger.debug( - "%s called with context %s", self.__class__.__name__, context - ) + self._logger.debug("%s called with context %s", self.__class__.__name__, context) assert isinstance(context.message, KeylistQuery) if not context.connection_ready: diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/keylist_update_handler.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/keylist_update_handler.py index 20c63a5e15..d31cd0e9f9 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/keylist_update_handler.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/keylist_update_handler.py @@ -16,9 +16,7 @@ class KeylistUpdateHandler(BaseHandler): async def handle(self, context: RequestContext, responder: BaseResponder): """Handle keylist-update messages.""" - self._logger.debug( - "%s called with context %s", self.__class__.__name__, context - ) + self._logger.debug("%s called with context %s", self.__class__.__name__, context) assert isinstance(context.message, KeylistUpdate) if not context.connection_ready: diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/keylist_update_response_handler.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/keylist_update_response_handler.py index d2c33704d8..da9f378b72 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/keylist_update_response_handler.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/keylist_update_response_handler.py @@ -16,9 +16,7 @@ class KeylistUpdateResponseHandler(BaseHandler): async def handle(self, context: RequestContext, responder: BaseResponder): """Handle keylist-update-response message.""" - self._logger.debug( - "%s called with context %s", self.__class__.__name__, context - ) + self._logger.debug("%s called with context %s", self.__class__.__name__, context) assert isinstance(context.message, KeylistUpdateResponse) if not context.connection_ready: diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/mediation_deny_handler.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/mediation_deny_handler.py index 7a27a8fdce..3e2ad0416c 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/mediation_deny_handler.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/mediation_deny_handler.py @@ -15,9 +15,7 @@ class MediationDenyHandler(BaseHandler): async def handle(self, context: RequestContext, responder: BaseResponder): """Handle mediate-deny message.""" - self._logger.debug( - "%s called with context %s", self.__class__.__name__, context - ) + self._logger.debug("%s called with context %s", self.__class__.__name__, context) assert isinstance(context.message, MediationDeny) if not context.connection_ready: diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/mediation_grant_handler.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/mediation_grant_handler.py index fc597b63c7..d82a406373 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/mediation_grant_handler.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/mediation_grant_handler.py @@ -16,9 +16,7 @@ class MediationGrantHandler(BaseHandler): async def handle(self, context: RequestContext, responder: BaseResponder): """Handle mediate-grant message.""" - self._logger.debug( - "%s called with context %s", self.__class__.__name__, context - ) + self._logger.debug("%s called with context %s", self.__class__.__name__, context) assert isinstance(context.message, MediationGrant) if not context.connection_ready: diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/mediation_request_handler.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/mediation_request_handler.py index fedd7da258..d6c2cd26aa 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/mediation_request_handler.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/mediation_request_handler.py @@ -14,9 +14,7 @@ class MediationRequestHandler(BaseHandler): async def handle(self, context: RequestContext, responder: BaseResponder): """Handle mediate-request message.""" - self._logger.debug( - "%s called with context %s", self.__class__.__name__, context - ) + self._logger.debug("%s called with context %s", self.__class__.__name__, context) assert isinstance(context.message, MediationRequest) if not context.connection_ready: diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/tests/test_keylist_query_handler.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/tests/test_keylist_query_handler.py index 5b4275b570..08cd0c0f99 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/tests/test_keylist_query_handler.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/tests/test_keylist_query_handler.py @@ -47,8 +47,7 @@ async def test_handler_no_record(self): result, _target = responder.messages[0] assert isinstance(result, CMProblemReport) assert ( - result.description["code"] - == ProblemReportReason.MEDIATION_NOT_GRANTED.value + result.description["code"] == ProblemReportReason.MEDIATION_NOT_GRANTED.value ) async def test_handler_record_not_granted(self): @@ -61,8 +60,7 @@ async def test_handler_record_not_granted(self): result, _target = responder.messages[0] assert isinstance(result, CMProblemReport) assert ( - result.description["code"] - == ProblemReportReason.MEDIATION_NOT_GRANTED.value + result.description["code"] == ProblemReportReason.MEDIATION_NOT_GRANTED.value ) async def test_handler(self): diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/tests/test_keylist_update_response_handler.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/tests/test_keylist_update_response_handler.py index 2b57a90bb6..734ad7177d 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/tests/test_keylist_update_response_handler.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/handlers/tests/test_keylist_update_response_handler.py @@ -43,9 +43,7 @@ async def asyncSetUp(self): self.context.connection_ready = True self.context.connection_record = ConnRecord(connection_id=TEST_CONN_ID) self.mock_event_bus = MockEventBus() - self.context.profile.context.injector.bind_instance( - EventBus, self.mock_event_bus - ) + self.context.profile.context.injector.bind_instance(EventBus, self.mock_event_bus) self.route_manager = MockRouteManager() self.context.profile.context.injector.bind_instance( RouteManager, self.route_manager @@ -79,9 +77,7 @@ async def _result_generator(): yield ConnRecord(connection_id="conn_id_1") yield ConnRecord(connection_id="conn_id_2") - async def _retrieve_by_invitation_key( - generator: AsyncGenerator, *args, **kwargs - ): + async def _retrieve_by_invitation_key(generator: AsyncGenerator, *args, **kwargs): return await generator.__anext__() with mock.patch.object( diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/manager.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/manager.py index 1b2184cf20..fa8ee6ff9a 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/manager.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/manager.py @@ -164,9 +164,7 @@ async def grant_request( """ async with self._profile.session() as session: - mediation_record = await MediationRecord.retrieve_by_id( - session, mediation_id - ) + mediation_record = await MediationRecord.retrieve_by_id(session, mediation_id) if mediation_record.role != MediationRecord.ROLE_SERVER: raise MediationManagerError( f"role({mediation_record.role}) is not {MediationRecord.ROLE_SERVER}" @@ -198,9 +196,7 @@ async def deny_request( """ async with self._profile.session() as session: - mediation_record = await MediationRecord.retrieve_by_id( - session, mediation_id - ) + mediation_record = await MediationRecord.retrieve_by_id(session, mediation_id) if mediation_record.role != MediationRecord.ROLE_SERVER: raise MediationManagerError( f"role({mediation_record.role}) is not {MediationRecord.ROLE_SERVER}" @@ -406,9 +402,7 @@ async def set_default_mediator(self, record: MediationRecord): async with self._profile.session() as session: await self._set_default_mediator_id(record.mediation_id, session) - async def _set_default_mediator_id( - self, mediation_id: str, session: ProfileSession - ): + async def _set_default_mediator_id(self, mediation_id: str, session: ProfileSession): """Set the default mediator ID (internal).""" default_record = await self._get_default_record(session) storage = session.inject(BaseStorage) @@ -609,9 +603,7 @@ async def store_update_results( to_remove.append(record) for record_for_saving in to_save: - await record_for_saving.save( - session, reason="Route successfully added." - ) + await record_for_saving.save(session, reason="Route successfully added.") for record_for_removal in to_remove: await record_for_removal.delete_record(session) diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/messages/inner/keylist_key.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/messages/inner/keylist_key.py index e77bfa37b8..187aa175be 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/messages/inner/keylist_key.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/messages/inner/keylist_key.py @@ -21,7 +21,7 @@ def __init__( recipient_key: str = None, action: str = None, result: str = None, - **kwargs + **kwargs, ): """Initialize a KeylistKey instance. diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/messages/inner/keylist_updated.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/messages/inner/keylist_updated.py index 8765a94272..e268d22377 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/messages/inner/keylist_updated.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/messages/inner/keylist_updated.py @@ -29,7 +29,7 @@ def __init__( recipient_key: str = None, action: str = None, result: str = None, - **kwargs + **kwargs, ): """Initialize a KeylistUpdated instance. diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/messages/keylist_update.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/messages/keylist_update.py index d736bce35c..2800500a27 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/messages/keylist_update.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/messages/keylist_update.py @@ -8,9 +8,7 @@ from ..message_types import KEYLIST_UPDATE, PROTOCOL_PACKAGE from .inner.keylist_update_rule import KeylistUpdateRule, KeylistUpdateRuleSchema -HANDLER_CLASS = ( - f"{PROTOCOL_PACKAGE}.handlers.keylist_update_handler.KeylistUpdateHandler" -) +HANDLER_CLASS = f"{PROTOCOL_PACKAGE}.handlers.keylist_update_handler.KeylistUpdateHandler" class KeylistUpdate(AgentMessage): diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/messages/mediate_deny.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/messages/mediate_deny.py index 5a262cd886..bef592f431 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/messages/mediate_deny.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/messages/mediate_deny.py @@ -3,9 +3,7 @@ from .....messaging.agent_message import AgentMessage, AgentMessageSchema from ..message_types import MEDIATE_DENY, PROTOCOL_PACKAGE -HANDLER_CLASS = ( - f"{PROTOCOL_PACKAGE}.handlers.mediation_deny_handler.MediationDenyHandler" -) +HANDLER_CLASS = f"{PROTOCOL_PACKAGE}.handlers.mediation_deny_handler.MediationDenyHandler" class MediationDeny(AgentMessage): diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/messages/mediate_grant.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/messages/mediate_grant.py index 67a371c57e..4175fe7e9f 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/messages/mediate_grant.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/messages/mediate_grant.py @@ -61,8 +61,6 @@ class Meta: } ) routing_keys = fields.List( - fields.Str( - metadata={"description": "Keys to use for forward message packaging"} - ), + fields.Str(metadata={"description": "Keys to use for forward message packaging"}), required=False, ) diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/route_manager.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/route_manager.py index a7ada5873d..d148be1780 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/route_manager.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/route_manager.py @@ -339,9 +339,7 @@ async def _route_for_key( keylist_update = await mediation_mgr.remove_key(replace_key, keylist_update) responder = profile.inject(BaseResponder) - await responder.send( - keylist_update, connection_id=mediation_record.connection_id - ) + await responder.send(keylist_update, connection_id=mediation_record.connection_id) return keylist_update async def routing_info( diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/routes.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/routes.py index 8d8c97231e..c3d515fecc 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/routes.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/routes.py @@ -203,9 +203,7 @@ async def retrieve_mediation_request(request: web.BaseRequest): mediation_id = request.match_info["mediation_id"] try: async with context.profile.session() as session: - mediation_record = await MediationRecord.retrieve_by_id( - session, mediation_id - ) + mediation_record = await MediationRecord.retrieve_by_id(session, mediation_id) result = mediation_record.serialize() except StorageNotFoundError as err: raise web.HTTPNotFound(reason=err.roll_up) from err @@ -226,9 +224,7 @@ async def delete_mediation_request(request: web.BaseRequest): mediation_id = request.match_info["mediation_id"] try: async with context.profile.session() as session: - mediation_record = await MediationRecord.retrieve_by_id( - session, mediation_id - ) + mediation_record = await MediationRecord.retrieve_by_id(session, mediation_id) result = mediation_record.serialize() async with context.profile.session() as session: await mediation_record.delete_record(session) @@ -453,9 +449,7 @@ async def get_default_mediator(request: web.BaseRequest): """Get default mediator.""" context: AdminRequestContext = request["context"] try: - default_mediator = await MediationManager( - context.profile - ).get_default_mediator() + default_mediator = await MediationManager(context.profile).get_default_mediator() results = default_mediator.serialize() if default_mediator else {} except (StorageError, BaseModelError) as err: raise web.HTTPBadRequest(reason=err.roll_up) from err diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/tests/test_mediation_manager.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/tests/test_mediation_manager.py index 7718a10ee0..59b299b47e 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/tests/test_mediation_manager.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/tests/test_mediation_manager.py @@ -67,9 +67,7 @@ def manager(profile) -> Iterable[MediationManager]: # pylint: disable=W0621 @pytest.fixture def record() -> Iterable[MediationRecord]: """Fixture for record used in tests.""" - yield MediationRecord( - state=MediationRecord.STATE_GRANTED, connection_id=TEST_CONN_ID - ) + yield MediationRecord(state=MediationRecord.STATE_GRANTED, connection_id=TEST_CONN_ID) class TestMediationManager: # pylint: disable=R0904,W0621 @@ -262,9 +260,7 @@ async def test_clear_default_mediator(self, manager: MediationManager, session): await manager.clear_default_mediator() assert not await manager.get_default_mediator_id() - async def test_clear_default_mediator_no_default_set( - self, manager: MediationManager - ): + async def test_clear_default_mediator_no_default_set(self, manager: MediationManager): await manager.clear_default_mediator() async def test_prepare_request(self, manager): @@ -276,9 +272,7 @@ async def test_prepare_request(self, manager): async def test_request_granted_base58(self, manager): """test_request_granted.""" record, _ = await manager.prepare_request(TEST_CONN_ID) - grant = MediationGrant( - endpoint=TEST_ENDPOINT, routing_keys=[TEST_BASE58_VERKEY] - ) + grant = MediationGrant(endpoint=TEST_ENDPOINT, routing_keys=[TEST_BASE58_VERKEY]) await manager.request_granted(record, grant) assert record.state == MediationRecord.STATE_GRANTED assert record.endpoint == TEST_ENDPOINT @@ -312,9 +306,7 @@ async def test_prepare_keylist_query(self, manager): async def test_prepare_keylist_query_pagination(self, manager): """test_prepare_keylist_query_pagination.""" - query = await manager.prepare_keylist_query( - paginate_limit=10, paginate_offset=20 - ) + query = await manager.prepare_keylist_query(paginate_limit=10, paginate_offset=20) assert query.paginate.limit == 10 assert query.paginate.offset == 20 diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/tests/test_route_manager.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/tests/test_route_manager.py index 9dfaf7d611..c065753245 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/tests/test_route_manager.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/tests/test_route_manager.py @@ -693,9 +693,7 @@ async def test_mediation_routing_info_with_mediator( routing_keys=[TEST_ROUTE_VERKEY_REF], endpoint="http://mediator.example.com", ) - keys, endpoint = await mediation_route_manager.routing_info( - profile, mediation_record - ) + keys, endpoint = await mediation_route_manager.routing_info(profile, mediation_record) assert keys == mediation_record.routing_keys assert endpoint == mediation_record.endpoint diff --git a/aries_cloudagent/protocols/coordinate_mediation/v1_0/tests/test_routes.py b/aries_cloudagent/protocols/coordinate_mediation/v1_0/tests/test_routes.py index 2b3ecd3c04..fe25038ab2 100644 --- a/aries_cloudagent/protocols/coordinate_mediation/v1_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/coordinate_mediation/v1_0/tests/test_routes.py @@ -47,7 +47,7 @@ def setUp(self): self.mock_record = mock.MagicMock( **serialized, serialize=mock.MagicMock(return_value=serialized), - save=mock.CoroutineMock() + save=mock.CoroutineMock(), ) def test_mediation_sort_key(self): @@ -147,9 +147,7 @@ async def test_retrieve_mediation_request(self): ) as mock_response: mock_mediation_record_retrieve.return_value = self.mock_record await test_module.retrieve_mediation_request(self.request) - mock_response.assert_called_once_with( - self.mock_record.serialize.return_value - ) + mock_response.assert_called_once_with(self.mock_record.serialize.return_value) mock_mediation_record_retrieve.assert_called() async def test_retrieve_mediation_request_x_not_found(self): @@ -159,9 +157,7 @@ async def test_retrieve_mediation_request_x_not_found(self): mock.CoroutineMock(side_effect=test_module.StorageNotFoundError()), ) as mock_mediation_record_retrieve, mock.patch.object( test_module.web, "json_response" - ) as mock_response, self.assertRaises( - test_module.web.HTTPNotFound - ): + ) as mock_response, self.assertRaises(test_module.web.HTTPNotFound): await test_module.retrieve_mediation_request(self.request) async def test_retrieve_mediation_request_x_storage_error(self): @@ -171,9 +167,7 @@ async def test_retrieve_mediation_request_x_storage_error(self): mock.CoroutineMock(side_effect=test_module.StorageError()), ) as mock_mediation_record_retrieve, mock.patch.object( test_module.web, "json_response" - ) as mock_response, self.assertRaises( - test_module.web.HTTPBadRequest - ): + ) as mock_response, self.assertRaises(test_module.web.HTTPBadRequest): await test_module.retrieve_mediation_request(self.request) async def test_delete_mediation_request(self): @@ -186,9 +180,7 @@ async def test_delete_mediation_request(self): ) as mock_response: mock_mediation_record_retrieve.return_value = self.mock_record await test_module.delete_mediation_request(self.request) - mock_response.assert_called_once_with( - self.mock_record.serialize.return_value - ) + mock_response.assert_called_once_with(self.mock_record.serialize.return_value) mock_mediation_record_retrieve.assert_called() mock_delete_record.assert_called() @@ -199,9 +191,7 @@ async def test_delete_mediation_request_x_not_found(self): mock.CoroutineMock(side_effect=test_module.StorageNotFoundError()), ) as mock_mediation_record_retrieve, mock.patch.object( test_module.web, "json_response" - ) as mock_response, self.assertRaises( - test_module.web.HTTPNotFound - ): + ) as mock_response, self.assertRaises(test_module.web.HTTPNotFound): await test_module.delete_mediation_request(self.request) async def test_delete_mediation_request_x_storage_error(self): @@ -211,9 +201,7 @@ async def test_delete_mediation_request_x_storage_error(self): mock.CoroutineMock(side_effect=test_module.StorageError()), ) as mock_mediation_record_retrieve, mock.patch.object( test_module.web, "json_response" - ) as mock_response, self.assertRaises( - test_module.web.HTTPBadRequest - ): + ) as mock_response, self.assertRaises(test_module.web.HTTPBadRequest): await test_module.delete_mediation_request(self.request) async def test_request_mediation(self): @@ -475,9 +463,7 @@ async def test_send_keylist_update(self): ) as mock_retrieve_by_id, mock.patch.object( test_module.web, "json_response", - mock.MagicMock( - side_effect=lambda *args, **kwargs: [*args, *kwargs.values()] - ), + mock.MagicMock(side_effect=lambda *args, **kwargs: [*args, *kwargs.values()]), ) as mock_response: results, status = await test_module.send_keylist_update(self.request) assert results["updates"] == body_with_didkey["updates"] diff --git a/aries_cloudagent/protocols/did_rotate/v1_0/message_types.py b/aries_cloudagent/protocols/did_rotate/v1_0/message_types.py index 7fe4175704..fa5f1f5a16 100644 --- a/aries_cloudagent/protocols/did_rotate/v1_0/message_types.py +++ b/aries_cloudagent/protocols/did_rotate/v1_0/message_types.py @@ -2,9 +2,7 @@ from ...didcomm_prefix import DIDCommPrefix -SPEC_URI = ( - "https://github.com/hyperledger/aries-rfcs/tree/main/features/0794-did-rotate" -) +SPEC_URI = "https://github.com/hyperledger/aries-rfcs/tree/main/features/0794-did-rotate" # Message types ROTATE = "did-rotate/1.0/rotate" diff --git a/aries_cloudagent/protocols/did_rotate/v1_0/messages/problem_report.py b/aries_cloudagent/protocols/did_rotate/v1_0/messages/problem_report.py index e5320b67f2..35ce06213b 100644 --- a/aries_cloudagent/protocols/did_rotate/v1_0/messages/problem_report.py +++ b/aries_cloudagent/protocols/did_rotate/v1_0/messages/problem_report.py @@ -10,9 +10,7 @@ ) from ..message_types import PROBLEM_REPORT, PROTOCOL_PACKAGE -HANDLER_CLASS = ( - f"{PROTOCOL_PACKAGE}.handlers.problem_report_handler.ProblemReportHandler" -) +HANDLER_CLASS = f"{PROTOCOL_PACKAGE}.handlers.problem_report_handler.ProblemReportHandler" class ProblemReportReason(Enum): @@ -141,9 +139,7 @@ def validate_fields(self, data, **kwargs): raise ValidationError("Rotate problem report must contain problem_items") if "did" not in data["problem_items"][0]: - raise ValidationError( - "Rotate problem report problem_items must contain did" - ) + raise ValidationError("Rotate problem report problem_items must contain did") @pre_dump def check_thread_deco(self, obj, **kwargs): diff --git a/aries_cloudagent/protocols/did_rotate/v1_0/models/rotate_record.py b/aries_cloudagent/protocols/did_rotate/v1_0/models/rotate_record.py index d4a1cd774d..6010d5fe60 100644 --- a/aries_cloudagent/protocols/did_rotate/v1_0/models/rotate_record.py +++ b/aries_cloudagent/protocols/did_rotate/v1_0/models/rotate_record.py @@ -64,9 +64,7 @@ async def retrieve_by_connection_id( cls, session, connection_id: str ) -> "RotateRecord": """Retrieve a rotate record by connection ID.""" - return await cls.retrieve_by_tag_filter( - session, {"connection_id": connection_id} - ) + return await cls.retrieve_by_tag_filter(session, {"connection_id": connection_id}) @classmethod async def retrieve_by_thread_id(cls, session, thread_id: str) -> "RotateRecord": diff --git a/aries_cloudagent/protocols/did_rotate/v1_0/tests/test_manager.py b/aries_cloudagent/protocols/did_rotate/v1_0/tests/test_manager.py index 15c3cf80af..471d712f2f 100644 --- a/aries_cloudagent/protocols/did_rotate/v1_0/tests/test_manager.py +++ b/aries_cloudagent/protocols/did_rotate/v1_0/tests/test_manager.py @@ -34,9 +34,7 @@ async def asyncSetUp(self) -> None: self.route_manager.routing_info = mock.CoroutineMock( return_value=([], self.test_endpoint) ) - self.route_manager.mediation_record_if_id = mock.CoroutineMock( - return_value=None - ) + self.route_manager.mediation_record_if_id = mock.CoroutineMock(return_value=None) self.route_manager.mediation_record_for_connection = mock.CoroutineMock( return_value=None ) @@ -56,15 +54,11 @@ async def test_hangup(self): mock_conn_record = MockConnRecord(test_conn_id, True) mock_conn_record.delete_record = mock.CoroutineMock() - with mock.patch.object( - self.responder, "send", mock.CoroutineMock() - ) as mock_send: + with mock.patch.object(self.responder, "send", mock.CoroutineMock()) as mock_send: msg = await self.manager.hangup(mock_conn_record) mock_conn_record.delete_record.assert_called_once() mock_send.assert_called_once() - assert ( - msg._type == DIDCommPrefix.NEW.value + "/" + test_message_types.HANGUP - ) + assert msg._type == DIDCommPrefix.NEW.value + "/" + test_message_types.HANGUP async def test_receive_hangup(self): mock_conn_record = MockConnRecord(test_conn_id, True) @@ -77,14 +71,10 @@ async def test_rotate_my_did(self): mock_conn_record = MockConnRecord(test_conn_id, True) test_to_did = "did:peer:2:testdid" - with mock.patch.object( - self.responder, "send", mock.CoroutineMock() - ) as mock_send: + with mock.patch.object(self.responder, "send", mock.CoroutineMock()) as mock_send: msg = await self.manager.rotate_my_did(mock_conn_record, test_to_did) mock_send.assert_called_once() - assert ( - msg._type == DIDCommPrefix.NEW.value + "/" + test_message_types.ROTATE - ) + assert msg._type == DIDCommPrefix.NEW.value + "/" + test_message_types.ROTATE async def test_receive_rotate(self): mock_conn_record = MockConnRecord(test_conn_id, True) diff --git a/aries_cloudagent/protocols/did_rotate/v1_0/tests/test_routes.py b/aries_cloudagent/protocols/did_rotate/v1_0/tests/test_routes.py index f596005cf3..1cb0f16a69 100644 --- a/aries_cloudagent/protocols/did_rotate/v1_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/did_rotate/v1_0/tests/test_routes.py @@ -57,9 +57,7 @@ async def asyncSetUp(self): "DIDRotateManager", autospec=True, return_value=mock.MagicMock( - rotate_my_did=mock.CoroutineMock( - return_value=generate_mock_rotate_message() - ) + rotate_my_did=mock.CoroutineMock(return_value=generate_mock_rotate_message()) ), ) async def test_rotate(self, *_): diff --git a/aries_cloudagent/protocols/didexchange/v1_0/handlers/request_handler.py b/aries_cloudagent/protocols/didexchange/v1_0/handlers/request_handler.py index 594df441a5..eb39643c4c 100644 --- a/aries_cloudagent/protocols/didexchange/v1_0/handlers/request_handler.py +++ b/aries_cloudagent/protocols/didexchange/v1_0/handlers/request_handler.py @@ -48,9 +48,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): conn_rec, mediation_id=mediation_id, ) - await responder.send_reply( - response, connection_id=conn_rec.connection_id - ) + await responder.send_reply(response, connection_id=conn_rec.connection_id) conn_rec.state = ConnRecord.State.RESPONSE.rfc23 async with context.session() as session: await conn_rec.save(session, reason="Sent connection response") diff --git a/aries_cloudagent/protocols/didexchange/v1_0/handlers/response_handler.py b/aries_cloudagent/protocols/didexchange/v1_0/handlers/response_handler.py index 26e3e18659..9ecedf58a5 100644 --- a/aries_cloudagent/protocols/didexchange/v1_0/handlers/response_handler.py +++ b/aries_cloudagent/protocols/didexchange/v1_0/handlers/response_handler.py @@ -26,9 +26,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): profile = context.profile mgr = DIDXManager(profile) try: - conn_rec = await mgr.accept_response( - context.message, context.message_receipt - ) + conn_rec = await mgr.accept_response(context.message, context.message_receipt) except DIDXManagerError as e: report, targets = await mgr.manager_error_to_problem_report( e, context.message, context.message_receipt diff --git a/aries_cloudagent/protocols/didexchange/v1_0/manager.py b/aries_cloudagent/protocols/didexchange/v1_0/manager.py index 17ec15b226..953e7ec73d 100644 --- a/aries_cloudagent/protocols/didexchange/v1_0/manager.py +++ b/aries_cloudagent/protocols/didexchange/v1_0/manager.py @@ -156,9 +156,7 @@ async def receive_invitation( # Save the invitation for later processing await conn_rec.attach_invitation(session, invitation) if not conn_rec.invitation_key and conn_rec.their_public_did: - targets = await self.resolve_connection_targets( - conn_rec.their_public_did - ) + targets = await self.resolve_connection_targets(conn_rec.their_public_did) conn_rec.invitation_key = targets[0].recipient_keys[0] await self._route_manager.save_mediator_for_connection( @@ -596,9 +594,7 @@ async def _receive_request_pairwise_did( async with self.profile.transaction() as txn: # Attach the connection request so it can be found and responded to - await conn_rec.save( - txn, reason="Received connection request from invitation" - ) + await conn_rec.save(txn, reason="Received connection request from invitation") await conn_rec.attach_request(txn, request) await txn.commit() @@ -886,9 +882,7 @@ async def create_response( ) if send_mediation_request: temp_mediation_mgr = MediationManager(self.profile) - _, request = await temp_mediation_mgr.prepare_request( - conn_rec.connection_id - ) + _, request = await temp_mediation_mgr.prepare_request(conn_rec.connection_id) responder = self.profile.inject(BaseResponder) await responder.send(request, connection_id=conn_rec.connection_id) @@ -1206,9 +1200,7 @@ async def verify_rotate( if not signed_diddoc_bytes: raise DIDXManagerError("DID rotate attachment is not signed.") if not await attached.data.verify(wallet, invi_key): - raise DIDXManagerError( - "DID rotate attachment signature failed verification" - ) + raise DIDXManagerError("DID rotate attachment signature failed verification") return signed_diddoc_bytes.decode() diff --git a/aries_cloudagent/protocols/didexchange/v1_0/message_types.py b/aries_cloudagent/protocols/didexchange/v1_0/message_types.py index a1b2e3873b..792e2c8722 100644 --- a/aries_cloudagent/protocols/didexchange/v1_0/message_types.py +++ b/aries_cloudagent/protocols/didexchange/v1_0/message_types.py @@ -27,8 +27,6 @@ DIDX_REQUEST: f"{PROTOCOL_PACKAGE}.messages.request.DIDXRequest", DIDX_RESPONSE: f"{PROTOCOL_PACKAGE}.messages.response.DIDXResponse", DIDX_COMPLETE: f"{PROTOCOL_PACKAGE}.messages.complete.DIDXComplete", - PROBLEM_REPORT: ( - f"{PROTOCOL_PACKAGE}.messages.problem_report.DIDXProblemReport" - ), + PROBLEM_REPORT: (f"{PROTOCOL_PACKAGE}.messages.problem_report.DIDXProblemReport"), } ) diff --git a/aries_cloudagent/protocols/didexchange/v1_0/messages/tests/test_problem_report.py b/aries_cloudagent/protocols/didexchange/v1_0/messages/tests/test_problem_report.py index 6d4a700ae7..1d8d0eba59 100644 --- a/aries_cloudagent/protocols/didexchange/v1_0/messages/tests/test_problem_report.py +++ b/aries_cloudagent/protocols/didexchange/v1_0/messages/tests/test_problem_report.py @@ -36,7 +36,5 @@ def test_missing_code(): def test_unrecognized_code(): with mock.patch.object(test_module, "LOGGER", autospec=True) as mock_logger: - DIDXProblemReport.deserialize( - {"description": {"code": "unknown", "en": "test"}} - ) + DIDXProblemReport.deserialize({"description": {"code": "unknown", "en": "test"}}) mock_logger.warning.assert_called_once() diff --git a/aries_cloudagent/protocols/didexchange/v1_0/tests/test_manager.py b/aries_cloudagent/protocols/didexchange/v1_0/tests/test_manager.py index 17e1faf3a1..d02235f9f0 100644 --- a/aries_cloudagent/protocols/didexchange/v1_0/tests/test_manager.py +++ b/aries_cloudagent/protocols/didexchange/v1_0/tests/test_manager.py @@ -85,9 +85,7 @@ async def asyncSetUp(self): self.route_manager.routing_info = mock.CoroutineMock( return_value=([], self.test_endpoint) ) - self.route_manager.mediation_record_if_id = mock.CoroutineMock( - return_value=None - ) + self.route_manager.mediation_record_if_id = mock.CoroutineMock(return_value=None) self.route_manager.mediation_record_for_connection = mock.CoroutineMock( return_value=None ) @@ -133,12 +131,8 @@ async def asyncSetUp(self): self.context.injector.bind_instance(DIDResolver, self.resolver) self.multitenant_mgr = mock.MagicMock(MultitenantManager, autospec=True) - self.context.injector.bind_instance( - BaseMultitenantManager, self.multitenant_mgr - ) - self.multitenant_mgr.get_default_mediator = mock.CoroutineMock( - return_value=None - ) + self.context.injector.bind_instance(BaseMultitenantManager, self.multitenant_mgr) + self.multitenant_mgr.get_default_mediator = mock.CoroutineMock(return_value=None) self.manager = DIDXManager(self.profile) assert self.manager.profile @@ -638,9 +632,7 @@ async def test_receive_request_explicit_public_did(self): ) as mock_response, mock.patch.object( self.manager, "verify_diddoc", - mock.CoroutineMock( - return_value={"id": "did:sov:" + TestConfig.test_did} - ), + mock.CoroutineMock(return_value={"id": "did:sov:" + TestConfig.test_did}), ), mock.patch.object( self.manager, "create_did_document", mock.CoroutineMock() ) as mock_create_did_doc, mock.patch.object( @@ -1142,9 +1134,7 @@ async def test_receive_request_public_did_no_auto_accept(self): ) as mock_create_did_doc, mock.patch.object( self.manager, "verify_diddoc", - mock.CoroutineMock( - return_value={"id": "did:sov:" + TestConfig.test_did} - ), + mock.CoroutineMock(return_value={"id": "did:sov:" + TestConfig.test_did}), ), mock.patch.object( self.manager, "store_did_document", mock.CoroutineMock() ): @@ -1217,9 +1207,7 @@ async def test_receive_request_implicit_public_did_not_enabled(self): ) as mock_did_posture, mock.patch.object( self.manager, "verify_diddoc", - mock.CoroutineMock( - return_value={"id": "did:sov:" + TestConfig.test_did} - ), + mock.CoroutineMock(return_value={"id": "did:sov:" + TestConfig.test_did}), ), mock.patch.object( self.manager, "store_did_document", mock.CoroutineMock() ): @@ -1284,9 +1272,7 @@ async def test_receive_request_implicit_public_did(self): ) as mock_did_posture, mock.patch.object( self.manager, "verify_diddoc", - mock.CoroutineMock( - return_value={"id": "did:sov:" + TestConfig.test_did} - ), + mock.CoroutineMock(return_value={"id": "did:sov:" + TestConfig.test_did}), ), mock.patch.object( self.manager, "store_did_document", mock.CoroutineMock() ): @@ -1371,9 +1357,7 @@ async def test_receive_request_peer_did(self): ) as mock_response, mock.patch.object( self.manager, "verify_diddoc", - mock.CoroutineMock( - return_value={"id": "did:sov:" + TestConfig.test_did} - ), + mock.CoroutineMock(return_value={"id": "did:sov:" + TestConfig.test_did}), ), mock.patch.object( self.manager, "store_did_document", mock.CoroutineMock() ): @@ -1449,9 +1433,7 @@ async def test_receive_request_peer_did_not_found_x(self): ) async def test_create_response(self): - conn_rec = ConnRecord( - connection_id="dummy", state=ConnRecord.State.REQUEST.rfc23 - ) + conn_rec = ConnRecord(connection_id="dummy", state=ConnRecord.State.REQUEST.rfc23) with mock.patch.object( test_module.ConnRecord, "retrieve_request", mock.CoroutineMock() @@ -1464,9 +1446,7 @@ async def test_create_response(self): ) as mock_response, mock.patch.object( self.manager, "create_did_document", mock.CoroutineMock() ) as mock_create_did_doc: - mock_create_did_doc.return_value = mock.MagicMock( - serialize=mock.MagicMock() - ) + mock_create_did_doc.return_value = mock.MagicMock(serialize=mock.MagicMock()) mock_attach_deco.data_base64 = mock.MagicMock( return_value=mock.MagicMock( data=mock.MagicMock(sign=mock.CoroutineMock()) @@ -1572,9 +1552,7 @@ async def test_create_response_mediation_id_invalid_conn_state(self): assert "Connection not in state" in str(context.exception) async def test_create_response_multitenant(self): - conn_rec = ConnRecord( - connection_id="dummy", state=ConnRecord.State.REQUEST.rfc23 - ) + conn_rec = ConnRecord(connection_id="dummy", state=ConnRecord.State.REQUEST.rfc23) self.manager.profile.context.update_settings( { @@ -1599,9 +1577,7 @@ async def test_create_response_multitenant(self): method=SOV, key_type=ED25519, ) - mock_create_did_doc.return_value = mock.MagicMock( - serialize=mock.MagicMock() - ) + mock_create_did_doc.return_value = mock.MagicMock(serialize=mock.MagicMock()) mock_attach_deco.data_base64 = mock.MagicMock( return_value=mock.MagicMock( data=mock.MagicMock(sign=mock.CoroutineMock()) @@ -1632,9 +1608,7 @@ async def test_create_response_conn_rec_my_did(self): InMemoryWallet, "get_local_did", mock.CoroutineMock() ) as mock_get_loc_did: mock_get_loc_did.return_value = self.did_info - mock_create_did_doc.return_value = mock.MagicMock( - serialize=mock.MagicMock() - ) + mock_create_did_doc.return_value = mock.MagicMock(serialize=mock.MagicMock()) mock_attach_deco.data_base64 = mock.MagicMock( return_value=mock.MagicMock( data=mock.MagicMock(sign=mock.CoroutineMock()) @@ -1752,9 +1726,7 @@ async def test_create_response_use_public_did(self): ED25519, ) - conn_rec = ConnRecord( - connection_id="dummy", state=ConnRecord.State.REQUEST.rfc23 - ) + conn_rec = ConnRecord(connection_id="dummy", state=ConnRecord.State.REQUEST.rfc23) with mock.patch.object( test_module.ConnRecord, "retrieve_request", mock.CoroutineMock() @@ -1767,9 +1739,7 @@ async def test_create_response_use_public_did(self): ) as mock_response, mock.patch.object( self.manager, "create_did_document", mock.CoroutineMock() ) as mock_create_did_doc: - mock_create_did_doc.return_value = mock.MagicMock( - serialize=mock.MagicMock() - ) + mock_create_did_doc.return_value = mock.MagicMock(serialize=mock.MagicMock()) mock_attach_deco.data_base64 = mock.MagicMock( return_value=mock.MagicMock( data=mock.MagicMock(sign=mock.CoroutineMock()) @@ -1781,9 +1751,7 @@ async def test_create_response_use_public_did(self): ) async def test_create_response_use_public_did_x_no_public_did(self): - conn_rec = ConnRecord( - connection_id="dummy", state=ConnRecord.State.REQUEST.rfc23 - ) + conn_rec = ConnRecord(connection_id="dummy", state=ConnRecord.State.REQUEST.rfc23) with mock.patch.object( test_module.ConnRecord, "retrieve_request", mock.CoroutineMock() @@ -1796,9 +1764,7 @@ async def test_create_response_use_public_did_x_no_public_did(self): ) as mock_response, mock.patch.object( self.manager, "create_did_document", mock.CoroutineMock() ) as mock_create_did_doc: - mock_create_did_doc.return_value = mock.MagicMock( - serialize=mock.MagicMock() - ) + mock_create_did_doc.return_value = mock.MagicMock(serialize=mock.MagicMock()) mock_attach_deco.data_base64 = mock.MagicMock( return_value=mock.MagicMock( data=mock.MagicMock(sign=mock.CoroutineMock()) @@ -2045,9 +2011,7 @@ async def test_accept_response_find_by_thread_id_no_did_doc_attached(self): mock_response.did = TestConfig.test_target_did mock_response.did_doc_attach = None mock_response.did_rotate_attach.data.verify = mock.AsyncMock(return_value=True) - mock_response.did_rotate_attach.data.signed = ( - TestConfig.test_target_did.encode() - ) + mock_response.did_rotate_attach.data.signed = TestConfig.test_target_did.encode() receipt = MessageReceipt( recipient_did=TestConfig.test_did, @@ -2135,9 +2099,7 @@ async def test_accept_response_find_by_thread_id_did_mismatch(self): verify=mock.CoroutineMock(return_value=True), signed=mock.MagicMock( decode=mock.MagicMock( - return_value=json.dumps( - {"id": "did:sov:" + TestConfig.test_did} - ) + return_value=json.dumps({"id": "did:sov:" + TestConfig.test_did}) ) ), ) diff --git a/aries_cloudagent/protocols/discovery/v1_0/handlers/tests/test_disclose_handler.py b/aries_cloudagent/protocols/discovery/v1_0/handlers/tests/test_disclose_handler.py index 7b506e45a6..59d46f043b 100644 --- a/aries_cloudagent/protocols/discovery/v1_0/handlers/tests/test_disclose_handler.py +++ b/aries_cloudagent/protocols/discovery/v1_0/handlers/tests/test_disclose_handler.py @@ -35,9 +35,7 @@ async def test_disclose(self, request_context: RequestContext): disclose_msg = Disclose( protocols=[ { - "pid": DIDCommPrefix.qualify_current( - "test_proto/v1.0/test_message" - ), + "pid": DIDCommPrefix.qualify_current("test_proto/v1.0/test_message"), "roles": [], } ] @@ -67,9 +65,7 @@ async def test_disclose_connection_not_ready(self, request_context): disclose_msg = Disclose( protocols=[ { - "pid": DIDCommPrefix.qualify_current( - "test_proto/v1.0/test_message" - ), + "pid": DIDCommPrefix.qualify_current("test_proto/v1.0/test_message"), "roles": [], } ] diff --git a/aries_cloudagent/protocols/discovery/v1_0/models/discovery_record.py b/aries_cloudagent/protocols/discovery/v1_0/models/discovery_record.py index fa06199d6d..087e626f61 100644 --- a/aries_cloudagent/protocols/discovery/v1_0/models/discovery_record.py +++ b/aries_cloudagent/protocols/discovery/v1_0/models/discovery_record.py @@ -44,9 +44,7 @@ def __init__( **kwargs, ): """Initialize a new V10DiscoveryExchangeRecord.""" - super().__init__( - discovery_exchange_id, state or self.STATE_QUERY_SENT, **kwargs - ) + super().__init__(discovery_exchange_id, state or self.STATE_QUERY_SENT, **kwargs) self._id = discovery_exchange_id self.connection_id = connection_id self.thread_id = thread_id diff --git a/aries_cloudagent/protocols/discovery/v1_0/tests/test_manager.py b/aries_cloudagent/protocols/discovery/v1_0/tests/test_manager.py index fbc46a7912..b4fa3071b3 100644 --- a/aries_cloudagent/protocols/discovery/v1_0/tests/test_manager.py +++ b/aries_cloudagent/protocols/discovery/v1_0/tests/test_manager.py @@ -33,9 +33,7 @@ async def asyncSetUp(self): self.disclose = Disclose( protocols=[ { - "pid": DIDCommPrefix.qualify_current( - "test_proto/v1.0/test_message" - ), + "pid": DIDCommPrefix.qualify_current("test_proto/v1.0/test_message"), "roles": [], } ] diff --git a/aries_cloudagent/protocols/discovery/v2_0/handlers/tests/test_disclosures_handler.py b/aries_cloudagent/protocols/discovery/v2_0/handlers/tests/test_disclosures_handler.py index cd592aff3a..82d3cf0318 100644 --- a/aries_cloudagent/protocols/discovery/v2_0/handlers/tests/test_disclosures_handler.py +++ b/aries_cloudagent/protocols/discovery/v2_0/handlers/tests/test_disclosures_handler.py @@ -45,9 +45,7 @@ async def test_disclosures(self, request_context): ] ) test_queries = [ - QueryItem( - feature_type="protocol", match="https://didcomm.org/tictactoe/1.*" - ), + QueryItem(feature_type="protocol", match="https://didcomm.org/tictactoe/1.*"), QueryItem(feature_type="goal-code", match="aries.*"), ] queries = Queries(queries=test_queries) @@ -85,9 +83,7 @@ async def test_disclosures_connection_id_no_thid(self, request_context): ] ) test_queries = [ - QueryItem( - feature_type="protocol", match="https://didcomm.org/tictactoe/1.*" - ), + QueryItem(feature_type="protocol", match="https://didcomm.org/tictactoe/1.*"), QueryItem(feature_type="goal-code", match="aries.*"), ] queries = Queries(queries=test_queries) @@ -129,9 +125,7 @@ async def test_disclosures_no_conn_id_no_thid(self, request_context): ] ) test_queries = [ - QueryItem( - feature_type="protocol", match="https://didcomm.org/tictactoe/1.*" - ), + QueryItem(feature_type="protocol", match="https://didcomm.org/tictactoe/1.*"), QueryItem(feature_type="goal-code", match="aries.*"), ] disclosures.assign_thread_id("test123") diff --git a/aries_cloudagent/protocols/discovery/v2_0/tests/test_manager.py b/aries_cloudagent/protocols/discovery/v2_0/tests/test_manager.py index 8106b2c2bd..9b3e9b4ce0 100644 --- a/aries_cloudagent/protocols/discovery/v2_0/tests/test_manager.py +++ b/aries_cloudagent/protocols/discovery/v2_0/tests/test_manager.py @@ -172,9 +172,7 @@ async def test_proactive_disclosure_no_responder(self): self._caplog.set_level(logging.WARNING) mock_receive_query.return_value = Disclosures() await self.manager.proactive_disclose_features("test123") - assert ( - "Unable to send discover-features v2 disclosures" in self._caplog.text - ) + assert "Unable to send discover-features v2 disclosures" in self._caplog.text async def test_check_if_disclosure_received(self): with mock.patch.object( diff --git a/aries_cloudagent/protocols/endorse_transaction/v1_0/messages/messages_attach.py b/aries_cloudagent/protocols/endorse_transaction/v1_0/messages/messages_attach.py index 366dce27b9..19fab4a653 100644 --- a/aries_cloudagent/protocols/endorse_transaction/v1_0/messages/messages_attach.py +++ b/aries_cloudagent/protocols/endorse_transaction/v1_0/messages/messages_attach.py @@ -31,7 +31,7 @@ def __init__( mechanism: str = None, taaDigest: str = None, time: int = None, - **kwargs + **kwargs, ): """Initialize the attached message object. diff --git a/aries_cloudagent/protocols/endorse_transaction/v1_0/messages/tests/test_cancel_transaction.py b/aries_cloudagent/protocols/endorse_transaction/v1_0/messages/tests/test_cancel_transaction.py index 1ae989233f..034bec8356 100644 --- a/aries_cloudagent/protocols/endorse_transaction/v1_0/messages/tests/test_cancel_transaction.py +++ b/aries_cloudagent/protocols/endorse_transaction/v1_0/messages/tests/test_cancel_transaction.py @@ -60,9 +60,7 @@ def test_serialize(self, mock_cancel_transaction_schema_dump): self.cancel_transaction ) - assert ( - cancel_transaction_dict is mock_cancel_transaction_schema_dump.return_value - ) + assert cancel_transaction_dict is mock_cancel_transaction_schema_dump.return_value class TestCancelTransactionSchema(IsolatedAsyncioTestCase, TestConfig): diff --git a/aries_cloudagent/protocols/endorse_transaction/v1_0/messages/tests/test_endorsed_transaction_response.py b/aries_cloudagent/protocols/endorse_transaction/v1_0/messages/tests/test_endorsed_transaction_response.py index 44ecbd3ada..8cc5765167 100644 --- a/aries_cloudagent/protocols/endorse_transaction/v1_0/messages/tests/test_endorsed_transaction_response.py +++ b/aries_cloudagent/protocols/endorse_transaction/v1_0/messages/tests/test_endorsed_transaction_response.py @@ -40,8 +40,7 @@ def setUp(self): def test_init(self): """Test initialization.""" assert ( - self.endorsed_transaction_response.transaction_id - == self.test_transaction_id + self.endorsed_transaction_response.transaction_id == self.test_transaction_id ) assert self.endorsed_transaction_response.thread_id == self.test_thread_id assert ( @@ -53,9 +52,8 @@ def test_init(self): def test_type(self): """Test type.""" - assert ( - self.endorsed_transaction_response._type - == DIDCommPrefix.qualify_current(ENDORSED_TRANSACTION_RESPONSE) + assert self.endorsed_transaction_response._type == DIDCommPrefix.qualify_current( + ENDORSED_TRANSACTION_RESPONSE ) @mock.patch( diff --git a/aries_cloudagent/protocols/endorse_transaction/v1_0/messages/tests/test_refused_transaction_response.py b/aries_cloudagent/protocols/endorse_transaction/v1_0/messages/tests/test_refused_transaction_response.py index 72ab421091..cf6869062d 100644 --- a/aries_cloudagent/protocols/endorse_transaction/v1_0/messages/tests/test_refused_transaction_response.py +++ b/aries_cloudagent/protocols/endorse_transaction/v1_0/messages/tests/test_refused_transaction_response.py @@ -78,9 +78,7 @@ def test_serialize(self, mock_refused_transaction_response_schema_dump): """ Test serialization. """ - refused_transaction_response_dict = ( - self.refused_transaction_response.serialize() - ) + refused_transaction_response_dict = self.refused_transaction_response.serialize() mock_refused_transaction_response_schema_dump.assert_called_once_with( self.refused_transaction_response ) diff --git a/aries_cloudagent/protocols/endorse_transaction/v1_0/messages/tests/test_transaction_request.py b/aries_cloudagent/protocols/endorse_transaction/v1_0/messages/tests/test_transaction_request.py index 938e07f3c6..db643fe065 100644 --- a/aries_cloudagent/protocols/endorse_transaction/v1_0/messages/tests/test_transaction_request.py +++ b/aries_cloudagent/protocols/endorse_transaction/v1_0/messages/tests/test_transaction_request.py @@ -104,8 +104,7 @@ def test_serialize(self, mock_transaction_request_schema_dump): ) assert ( - transaction_request_dict - is mock_transaction_request_schema_dump.return_value + transaction_request_dict is mock_transaction_request_schema_dump.return_value ) diff --git a/aries_cloudagent/protocols/endorse_transaction/v1_0/messages/tests/test_transaction_resend.py b/aries_cloudagent/protocols/endorse_transaction/v1_0/messages/tests/test_transaction_resend.py index a7db7be9b4..5a2a0df253 100644 --- a/aries_cloudagent/protocols/endorse_transaction/v1_0/messages/tests/test_transaction_resend.py +++ b/aries_cloudagent/protocols/endorse_transaction/v1_0/messages/tests/test_transaction_resend.py @@ -60,9 +60,7 @@ def test_serialize(self, mock_transaction_resend_schema_dump): self.transaction_resend ) - assert ( - transaction_resend_dict is mock_transaction_resend_schema_dump.return_value - ) + assert transaction_resend_dict is mock_transaction_resend_schema_dump.return_value class TestTransactionResendSchema(IsolatedAsyncioTestCase, TestConfig): diff --git a/aries_cloudagent/protocols/endorse_transaction/v1_0/routes.py b/aries_cloudagent/protocols/endorse_transaction/v1_0/routes.py index f4ab0f2ebc..a716277120 100644 --- a/aries_cloudagent/protocols/endorse_transaction/v1_0/routes.py +++ b/aries_cloudagent/protocols/endorse_transaction/v1_0/routes.py @@ -114,9 +114,7 @@ class EndorserInfoSchema(OpenAPISchema): endorser_did = fields.Str(required=True, metadata={"description": "Endorser DID"}) - endorser_name = fields.Str( - required=False, metadata={"description": "Endorser Name"} - ) + endorser_name = fields.Str(required=False, metadata={"description": "Endorser Name"}) @docs( @@ -297,9 +295,7 @@ async def endorse_transaction_response(request: web.BaseRequest): endorser_did = request.query.get("endorser_did") try: async with context.profile.session() as session: - transaction = await TransactionRecord.retrieve_by_id( - session, transaction_id - ) + transaction = await TransactionRecord.retrieve_by_id(session, transaction_id) connection_record = await ConnRecord.retrieve_by_id( session, transaction.connection_id ) @@ -368,9 +364,7 @@ async def refuse_transaction_response(request: web.BaseRequest): transaction_id = request.match_info["tran_id"] try: async with context.profile.session() as session: - transaction = await TransactionRecord.retrieve_by_id( - session, transaction_id - ) + transaction = await TransactionRecord.retrieve_by_id(session, transaction_id) connection_record = await ConnRecord.retrieve_by_id( session, transaction.connection_id ) @@ -434,9 +428,7 @@ async def cancel_transaction(request: web.BaseRequest): transaction_id = request.match_info["tran_id"] try: async with context.profile.session() as session: - transaction = await TransactionRecord.retrieve_by_id( - session, transaction_id - ) + transaction = await TransactionRecord.retrieve_by_id(session, transaction_id) connection_record = await ConnRecord.retrieve_by_id( session, transaction.connection_id ) @@ -499,9 +491,7 @@ async def transaction_resend(request: web.BaseRequest): transaction_id = request.match_info["tran_id"] try: async with context.profile.session() as session: - transaction = await TransactionRecord.retrieve_by_id( - session, transaction_id - ) + transaction = await TransactionRecord.retrieve_by_id(session, transaction_id) connection_record = await ConnRecord.retrieve_by_id( session, transaction.connection_id ) @@ -669,9 +659,7 @@ async def transaction_write(request: web.BaseRequest): transaction_id = request.match_info["tran_id"] try: async with context.profile.session() as session: - transaction = await TransactionRecord.retrieve_by_id( - session, transaction_id - ) + transaction = await TransactionRecord.retrieve_by_id(session, transaction_id) except StorageNotFoundError as err: raise web.HTTPNotFound(reason=err.roll_up) from err except BaseModelError as err: diff --git a/aries_cloudagent/protocols/endorse_transaction/v1_0/tests/test_routes.py b/aries_cloudagent/protocols/endorse_transaction/v1_0/tests/test_routes.py index d924b93216..15be2bf542 100644 --- a/aries_cloudagent/protocols/endorse_transaction/v1_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/endorse_transaction/v1_0/tests/test_routes.py @@ -1553,18 +1553,14 @@ async def test_transaction_write_schema_txn(self): mock_txn_mgr.return_value.complete_transaction = mock.CoroutineMock() mock_txn_mgr.return_value.complete_transaction.return_value = ( - mock.CoroutineMock( - serialize=mock.MagicMock(return_value={"...": "..."}) - ), + mock.CoroutineMock(serialize=mock.MagicMock(return_value={"...": "..."})), mock.CoroutineMock(), ) mock_txn_rec_retrieve.return_value = mock.MagicMock( serialize=mock.MagicMock(), state=TransactionRecord.STATE_TRANSACTION_ENDORSED, - messages_attach=[ - {"data": {"json": json.dumps({"message": "attached"})}} - ], + messages_attach=[{"data": {"json": json.dumps({"message": "attached"})}}], ) await test_module.transaction_write(self.request) mock_response.assert_called_once_with({"...": "..."}) @@ -1599,9 +1595,7 @@ async def test_transaction_write_wrong_state_x(self): mock_txn_rec_retrieve.return_value = mock.MagicMock( serialize=mock.MagicMock(return_value={"...": "..."}), state=TransactionRecord.STATE_TRANSACTION_CREATED, - messages_attach=[ - {"data": {"json": json.dumps({"message": "attached"})}} - ], + messages_attach=[{"data": {"json": json.dumps({"message": "attached"})}}], ) with self.assertRaises(test_module.web.HTTPForbidden): @@ -1623,9 +1617,7 @@ async def test_transaction_write_schema_txn_complete_x(self): mock_txn_rec_retrieve.return_value = mock.MagicMock( serialize=mock.MagicMock(return_value={"...": "..."}), state=TransactionRecord.STATE_TRANSACTION_ENDORSED, - messages_attach=[ - {"data": {"json": json.dumps({"message": "attached"})}} - ], + messages_attach=[{"data": {"json": json.dumps({"message": "attached"})}}], ) with self.assertRaises(test_module.web.HTTPBadRequest): diff --git a/aries_cloudagent/protocols/introduction/v0_1/demo_service.py b/aries_cloudagent/protocols/introduction/v0_1/demo_service.py index 3f54dd6f8c..9050fd2305 100644 --- a/aries_cloudagent/protocols/introduction/v0_1/demo_service.py +++ b/aries_cloudagent/protocols/introduction/v0_1/demo_service.py @@ -42,18 +42,13 @@ async def start_introduction( message: The message to use when requesting the invitation """ try: - init_connection = await ConnRecord.retrieve_by_id( - session, init_connection_id - ) + init_connection = await ConnRecord.retrieve_by_id(session, init_connection_id) except StorageNotFoundError: raise IntroductionError( f"Initiator connection {init_connection_id} not found" ) - if ( - ConnRecord.State.get(init_connection.state) - is not ConnRecord.State.COMPLETED - ): + if ConnRecord.State.get(init_connection.state) is not ConnRecord.State.COMPLETED: raise IntroductionError( f"Initiator connection {init_connection_id} not active" ) @@ -63,17 +58,13 @@ async def start_introduction( session, target_connection_id ) except StorageNotFoundError: - raise IntroductionError( - "Target connection {target_connection_id} not found" - ) + raise IntroductionError("Target connection {target_connection_id} not found") if ( ConnRecord.State.get(target_connection.state) is not ConnRecord.State.COMPLETED ): - raise IntroductionError( - "Target connection {target_connection_id} not active" - ) + raise IntroductionError("Target connection {target_connection_id} not active") msg = IntroInvitationRequest( responder=init_connection.their_label, diff --git a/aries_cloudagent/protocols/introduction/v0_1/handlers/invitation_request_handler.py b/aries_cloudagent/protocols/introduction/v0_1/handlers/invitation_request_handler.py index c1d284c1f3..0b137fc2e2 100644 --- a/aries_cloudagent/protocols/introduction/v0_1/handlers/invitation_request_handler.py +++ b/aries_cloudagent/protocols/introduction/v0_1/handlers/invitation_request_handler.py @@ -33,9 +33,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): profile = context.profile connection_mgr = ConnectionManager(profile) _connection, invite = await connection_mgr.create_invitation() - response = IntroInvitation( - invitation=invite, message=context.message.message - ) + response = IntroInvitation(invitation=invite, message=context.message.message) response.assign_thread_from(context.message) response.assign_trace_from(context.message) await responder.send_reply(response) diff --git a/aries_cloudagent/protocols/introduction/v0_1/messages/tests/test_forward_invitation.py b/aries_cloudagent/protocols/introduction/v0_1/messages/tests/test_forward_invitation.py index 2cb11623e6..c84703c7f5 100644 --- a/aries_cloudagent/protocols/introduction/v0_1/messages/tests/test_forward_invitation.py +++ b/aries_cloudagent/protocols/introduction/v0_1/messages/tests/test_forward_invitation.py @@ -36,13 +36,10 @@ def test_init(self): def test_type(self): """Test type.""" - assert self.invitation._type == DIDCommPrefix.qualify_current( - FORWARD_INVITATION - ) + assert self.invitation._type == DIDCommPrefix.qualify_current(FORWARD_INVITATION) @mock.patch( - f"{PROTOCOL_PACKAGE}.messages." - "forward_invitation.ForwardInvitationSchema.load" + f"{PROTOCOL_PACKAGE}.messages." "forward_invitation.ForwardInvitationSchema.load" ) def test_deserialize(self, mock_invitation_schema_load): """ @@ -56,8 +53,7 @@ def test_deserialize(self, mock_invitation_schema_load): assert invitation is mock_invitation_schema_load.return_value @mock.patch( - f"{PROTOCOL_PACKAGE}.messages." - "forward_invitation.ForwardInvitationSchema.dump" + f"{PROTOCOL_PACKAGE}.messages." "forward_invitation.ForwardInvitationSchema.dump" ) def test_serialize(self, mock_invitation_schema_dump): """ diff --git a/aries_cloudagent/protocols/introduction/v0_1/messages/tests/test_invitation_request.py b/aries_cloudagent/protocols/introduction/v0_1/messages/tests/test_invitation_request.py index b0224c6c58..63e8b3368f 100644 --- a/aries_cloudagent/protocols/introduction/v0_1/messages/tests/test_invitation_request.py +++ b/aries_cloudagent/protocols/introduction/v0_1/messages/tests/test_invitation_request.py @@ -30,8 +30,7 @@ def test_type(self): assert self.request._type == DIDCommPrefix.qualify_current(INVITATION_REQUEST) @mock.patch( - f"{PROTOCOL_PACKAGE}.messages." - "invitation_request.InvitationRequestSchema.load" + f"{PROTOCOL_PACKAGE}.messages." "invitation_request.InvitationRequestSchema.load" ) def test_deserialize(self, mock_invitation_schema_load): """ @@ -45,8 +44,7 @@ def test_deserialize(self, mock_invitation_schema_load): assert request is mock_invitation_schema_load.return_value @mock.patch( - f"{PROTOCOL_PACKAGE}.messages." - "invitation_request.InvitationRequestSchema.dump" + f"{PROTOCOL_PACKAGE}.messages." "invitation_request.InvitationRequestSchema.dump" ) def test_serialize(self, mock_invitation_schema_dump): """ diff --git a/aries_cloudagent/protocols/introduction/v0_1/tests/test_service.py b/aries_cloudagent/protocols/introduction/v0_1/tests/test_service.py index 1d791b551f..d607913ebb 100644 --- a/aries_cloudagent/protocols/introduction/v0_1/tests/test_service.py +++ b/aries_cloudagent/protocols/introduction/v0_1/tests/test_service.py @@ -35,9 +35,7 @@ def setUp(self): _id="#inline", _type="did-communication", did=TEST_DID, - recipient_keys=[ - DIDKey.from_public_key_b58(TEST_VERKEY, ED25519).did - ], + recipient_keys=[DIDKey.from_public_key_b58(TEST_VERKEY, ED25519).did], routing_keys=[ DIDKey.from_public_key_b58(TEST_ROUTE_VERKEY, ED25519).did ], diff --git a/aries_cloudagent/protocols/issue_credential/v1_0/handlers/credential_issue_handler.py b/aries_cloudagent/protocols/issue_credential/v1_0/handlers/credential_issue_handler.py index 208076b60e..8018c80e2d 100644 --- a/aries_cloudagent/protocols/issue_credential/v1_0/handlers/credential_issue_handler.py +++ b/aries_cloudagent/protocols/issue_credential/v1_0/handlers/credential_issue_handler.py @@ -68,9 +68,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): # Automatically move to next state if flag is set if cred_ex_record and context.settings.get("debug.auto_store_credential"): try: - cred_ex_record = await credential_manager.store_credential( - cred_ex_record - ) + cred_ex_record = await credential_manager.store_credential(cred_ex_record) except ( BaseModelError, CredentialManagerError, diff --git a/aries_cloudagent/protocols/issue_credential/v1_0/handlers/credential_offer_handler.py b/aries_cloudagent/protocols/issue_credential/v1_0/handlers/credential_offer_handler.py index a389e5176b..e7a736c0ea 100644 --- a/aries_cloudagent/protocols/issue_credential/v1_0/handlers/credential_offer_handler.py +++ b/aries_cloudagent/protocols/issue_credential/v1_0/handlers/credential_offer_handler.py @@ -53,9 +53,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): ) connection_id = ( - context.connection_record.connection_id - if context.connection_record - else None + context.connection_record.connection_id if context.connection_record else None ) credential_manager = CredentialManager(profile) @@ -77,9 +75,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): holder_did = default_did_from_verkey(oob_record.our_recipient_key) # If auto respond is turned on, automatically reply with credential request - if cred_ex_record and context.settings.get( - "debug.auto_respond_credential_offer" - ): + if cred_ex_record and context.settings.get("debug.auto_respond_credential_offer"): credential_request_message = None try: ( diff --git a/aries_cloudagent/protocols/issue_credential/v1_0/handlers/credential_proposal_handler.py b/aries_cloudagent/protocols/issue_credential/v1_0/handlers/credential_proposal_handler.py index 5b4ff0b1ce..354c82012c 100644 --- a/aries_cloudagent/protocols/issue_credential/v1_0/handlers/credential_proposal_handler.py +++ b/aries_cloudagent/protocols/issue_credential/v1_0/handlers/credential_proposal_handler.py @@ -40,9 +40,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): if context.connection_record and not context.connection_ready: raise HandlerException("Connection used for credential proposal not ready") elif not context.connection_record: - raise HandlerException( - "Connectionless not supported for credential proposal" - ) + raise HandlerException("Connectionless not supported for credential proposal") credential_manager = CredentialManager(profile) cred_ex_record = await credential_manager.receive_proposal( diff --git a/aries_cloudagent/protocols/issue_credential/v1_0/handlers/tests/test_credential_ack_handler.py b/aries_cloudagent/protocols/issue_credential/v1_0/handlers/tests/test_credential_ack_handler.py index aed4264e6b..861fbcda8f 100644 --- a/aries_cloudagent/protocols/issue_credential/v1_0/handlers/tests/test_credential_ack_handler.py +++ b/aries_cloudagent/protocols/issue_credential/v1_0/handlers/tests/test_credential_ack_handler.py @@ -60,9 +60,7 @@ async def test_called_not_ready(self): responder = MockResponder() with self.assertRaises(test_module.HandlerException) as err: await handler.handle(request_context, responder) - assert ( - err.exception.message == "Connection used for credential ack not ready" - ) + assert err.exception.message == "Connection used for credential ack not ready" async def test_called_no_connection_no_oob(self): request_context = RequestContext.test_context() diff --git a/aries_cloudagent/protocols/issue_credential/v1_0/handlers/tests/test_credential_offer_handler.py b/aries_cloudagent/protocols/issue_credential/v1_0/handlers/tests/test_credential_offer_handler.py index 65f7ca1541..c5323518d0 100644 --- a/aries_cloudagent/protocols/issue_credential/v1_0/handlers/tests/test_credential_offer_handler.py +++ b/aries_cloudagent/protocols/issue_credential/v1_0/handlers/tests/test_credential_offer_handler.py @@ -137,8 +137,7 @@ async def test_called_not_ready(self): with self.assertRaises(test_module.HandlerException) as err: await handler.handle(request_context, responder) assert ( - err.exception.message - == "Connection used for credential offer not ready" + err.exception.message == "Connection used for credential offer not ready" ) assert not responder.messages diff --git a/aries_cloudagent/protocols/issue_credential/v1_0/manager.py b/aries_cloudagent/protocols/issue_credential/v1_0/manager.py index 3714efa0ea..e0d52fd81c 100644 --- a/aries_cloudagent/protocols/issue_credential/v1_0/manager.py +++ b/aries_cloudagent/protocols/issue_credential/v1_0/manager.py @@ -169,9 +169,7 @@ async def create_proposal( cred_def_id=cred_def_id, issuer_did=issuer_did, ) - credential_proposal_message.assign_trace_decorator( - self._profile.settings, trace - ) + credential_proposal_message.assign_trace_decorator(self._profile.settings, trace) if auto_remove is None: auto_remove = not self._profile.settings.get("preserve_exchange_records") @@ -350,15 +348,13 @@ async def receive_offer( # Get credential exchange record (holder sent proposal first) # or create it (issuer sent offer first) try: - cred_ex_record = ( - await ( - V10CredentialExchange.retrieve_by_connection_and_thread( - txn, - connection_id, - message._thread_id, - role=V10CredentialExchange.ROLE_HOLDER, - for_update=True, - ) + cred_ex_record = await ( + V10CredentialExchange.retrieve_by_connection_and_thread( + txn, + connection_id, + message._thread_id, + role=V10CredentialExchange.ROLE_HOLDER, + for_update=True, ) ) except StorageNotFoundError: # issuer sent this offer free of any proposal @@ -538,15 +534,13 @@ async def receive_request( async with self._profile.transaction() as txn: try: - cred_ex_record = ( - await ( - V10CredentialExchange.retrieve_by_connection_and_thread( - txn, - connection_id, - message._thread_id, - role=V10CredentialExchange.ROLE_ISSUER, - for_update=True, - ) + cred_ex_record = await ( + V10CredentialExchange.retrieve_by_connection_and_thread( + txn, + connection_id, + message._thread_id, + role=V10CredentialExchange.ROLE_ISSUER, + for_update=True, ) ) except StorageNotFoundError as ex: @@ -650,9 +644,7 @@ async def issue_credential( if revocable: revoc = IndyRevocation(self._profile) - registry_info = await revoc.get_or_create_active_registry( - cred_def_id - ) + registry_info = await revoc.get_or_create_active_registry(cred_def_id) if not registry_info: continue del revoc @@ -750,15 +742,13 @@ async def receive_credential( async with self._profile.transaction() as txn: try: - cred_ex_record = ( - await ( - V10CredentialExchange.retrieve_by_connection_and_thread( - txn, - connection_id, - message._thread_id, - role=V10CredentialExchange.ROLE_HOLDER, - for_update=True, - ) + cred_ex_record = await ( + V10CredentialExchange.retrieve_by_connection_and_thread( + txn, + connection_id, + message._thread_id, + role=V10CredentialExchange.ROLE_HOLDER, + for_update=True, ) ) except StorageNotFoundError: @@ -955,15 +945,13 @@ async def receive_credential_ack( """ async with self._profile.transaction() as txn: try: - cred_ex_record = ( - await ( - V10CredentialExchange.retrieve_by_connection_and_thread( - txn, - connection_id, - message._thread_id, - role=V10CredentialExchange.ROLE_ISSUER, - for_update=True, - ) + cred_ex_record = await ( + V10CredentialExchange.retrieve_by_connection_and_thread( + txn, + connection_id, + message._thread_id, + role=V10CredentialExchange.ROLE_ISSUER, + for_update=True, ) ) except StorageNotFoundError: @@ -996,11 +984,9 @@ async def receive_problem_report( """ async with self._profile.transaction() as txn: try: - cred_ex_record = ( - await ( - V10CredentialExchange.retrieve_by_connection_and_thread( - txn, connection_id, message._thread_id, for_update=True - ) + cred_ex_record = await ( + V10CredentialExchange.retrieve_by_connection_and_thread( + txn, connection_id, message._thread_id, for_update=True ) ) except StorageNotFoundError: diff --git a/aries_cloudagent/protocols/issue_credential/v1_0/messages/credential_ack.py b/aries_cloudagent/protocols/issue_credential/v1_0/messages/credential_ack.py index fd7b8939df..5457e72079 100644 --- a/aries_cloudagent/protocols/issue_credential/v1_0/messages/credential_ack.py +++ b/aries_cloudagent/protocols/issue_credential/v1_0/messages/credential_ack.py @@ -6,9 +6,7 @@ from ..message_types import CREDENTIAL_ACK, PROTOCOL_PACKAGE -HANDLER_CLASS = ( - f"{PROTOCOL_PACKAGE}.handlers.credential_ack_handler.CredentialAckHandler" -) +HANDLER_CLASS = f"{PROTOCOL_PACKAGE}.handlers.credential_ack_handler.CredentialAckHandler" class CredentialAck(V10Ack): diff --git a/aries_cloudagent/protocols/issue_credential/v1_0/messages/tests/test_credential_request.py b/aries_cloudagent/protocols/issue_credential/v1_0/messages/tests/test_credential_request.py index b3ae1e2203..836876ec0d 100644 --- a/aries_cloudagent/protocols/issue_credential/v1_0/messages/tests/test_credential_request.py +++ b/aries_cloudagent/protocols/issue_credential/v1_0/messages/tests/test_credential_request.py @@ -74,8 +74,7 @@ def test_type(self): ) @mock.patch( - f"{PROTOCOL_PACKAGE}.messages." - "credential_request.CredentialRequestSchema.load" + f"{PROTOCOL_PACKAGE}.messages." "credential_request.CredentialRequestSchema.load" ) def test_deserialize(self, mock_credential_request_schema_load): """ @@ -89,8 +88,7 @@ def test_deserialize(self, mock_credential_request_schema_load): assert credential_request is mock_credential_request_schema_load.return_value @mock.patch( - f"{PROTOCOL_PACKAGE}.messages." - "credential_request.CredentialRequestSchema.dump" + f"{PROTOCOL_PACKAGE}.messages." "credential_request.CredentialRequestSchema.dump" ) def test_serialize(self, mock_credential_request_schema_dump): """ @@ -109,9 +107,7 @@ def test_serialize(self, mock_credential_request_schema_dump): credential_request_dict = credential_request.serialize() mock_credential_request_schema_dump.assert_called_once_with(credential_request) - assert ( - credential_request_dict is mock_credential_request_schema_dump.return_value - ) + assert credential_request_dict is mock_credential_request_schema_dump.return_value class TestCredentialRequestSchema(TestCase): diff --git a/aries_cloudagent/protocols/issue_credential/v1_0/routes.py b/aries_cloudagent/protocols/issue_credential/v1_0/routes.py index 67cad773a7..d77682b55f 100644 --- a/aries_cloudagent/protocols/issue_credential/v1_0/routes.py +++ b/aries_cloudagent/protocols/issue_credential/v1_0/routes.py @@ -1465,9 +1465,7 @@ async def register(app: web.Application): web.post( "/issue-credential/send-proposal", credential_exchange_send_proposal ), - web.post( - "/issue-credential/send-offer", credential_exchange_send_free_offer - ), + web.post("/issue-credential/send-offer", credential_exchange_send_free_offer), web.post( "/issue-credential/records/{cred_ex_id}/send-offer", credential_exchange_send_bound_offer, diff --git a/aries_cloudagent/protocols/issue_credential/v1_0/tests/test_manager.py b/aries_cloudagent/protocols/issue_credential/v1_0/tests/test_manager.py index 5802c5fcc5..f769d9b1f5 100644 --- a/aries_cloudagent/protocols/issue_credential/v1_0/tests/test_manager.py +++ b/aries_cloudagent/protocols/issue_credential/v1_0/tests/test_manager.py @@ -61,9 +61,7 @@ async def asyncSetUp(self): Ledger = mock.MagicMock() self.ledger = Ledger() self.ledger.get_schema = mock.CoroutineMock(return_value=SCHEMA) - self.ledger.get_credential_definition = mock.CoroutineMock( - return_value=CRED_DEF - ) + self.ledger.get_credential_definition = mock.CoroutineMock(return_value=CRED_DEF) self.ledger.get_revoc_reg_def = mock.CoroutineMock(return_value=REV_REG_DEF) self.ledger.__aenter__ = mock.CoroutineMock(return_value=self.ledger) self.ledger.credential_definition_id2schema_id = mock.CoroutineMock( @@ -1411,9 +1409,7 @@ async def test_store_credential_no_preview(self): await stored_exchange.save(self.session) cred_def = mock.MagicMock() - self.ledger.get_credential_definition = mock.CoroutineMock( - return_value=cred_def - ) + self.ledger.get_credential_definition = mock.CoroutineMock(return_value=cred_def) cred_id = "cred-id" holder = mock.MagicMock() @@ -1481,9 +1477,7 @@ async def test_store_credential_holder_store_indy_error(self): await stored_exchange.save(self.session) cred_def = mock.MagicMock() - self.ledger.get_credential_definition = mock.CoroutineMock( - return_value=cred_def - ) + self.ledger.get_credential_definition = mock.CoroutineMock(return_value=cred_def) cred_id = "cred-id" holder = mock.MagicMock() diff --git a/aries_cloudagent/protocols/issue_credential/v1_0/tests/test_routes.py b/aries_cloudagent/protocols/issue_credential/v1_0/tests/test_routes.py index 01c06e76a5..d6ac8a0661 100644 --- a/aries_cloudagent/protocols/issue_credential/v1_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/issue_credential/v1_0/tests/test_routes.py @@ -86,9 +86,7 @@ async def test_credential_exchange_retrieve(self): with mock.patch.object(test_module.web, "json_response") as mock_response: await test_module.credential_exchange_retrieve(self.request) - mock_response.assert_called_once_with( - mock_cred_ex.serialize.return_value - ) + mock_response.assert_called_once_with(mock_cred_ex.serialize.return_value) async def test_credential_exchange_retrieve_not_found(self): self.request.match_info = {"cred_ex_id": "dummy"} @@ -129,9 +127,7 @@ async def test_credential_exchange_create(self): test_module, "CredentialManager", autospec=True ) as mock_credential_manager, mock.patch.object( test_module.CredentialPreview, "deserialize", autospec=True - ), mock.patch.object( - test_module.web, "json_response" - ) as mock_response: + ), mock.patch.object(test_module.web, "json_response") as mock_response: mock_credential_manager.return_value.create_offer = mock.CoroutineMock() mock_credential_manager.return_value.create_offer.return_value = ( @@ -162,9 +158,7 @@ async def test_credential_exchange_create_x(self): test_module, "CredentialManager", autospec=True ) as mock_credential_manager, mock.patch.object( test_module.CredentialPreview, "deserialize", autospec=True - ), mock.patch.object( - test_module.web, "json_response" - ) as mock_response: + ), mock.patch.object(test_module.web, "json_response") as mock_response: mock_credential_manager.return_value.create_offer = mock.CoroutineMock() mock_credential_manager.return_value.create_offer.return_value = ( @@ -200,9 +194,7 @@ async def test_credential_exchange_send(self): test_module, "CredentialManager", autospec=True ) as mock_credential_manager, mock.patch.object( test_module.CredentialPreview, "deserialize", autospec=True - ), mock.patch.object( - test_module.web, "json_response" - ) as mock_response: + ), mock.patch.object(test_module.web, "json_response") as mock_response: mock_credential_manager.return_value.create_offer = mock.CoroutineMock() mock_credential_manager.return_value.create_offer.return_value = ( diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/anoncreds/handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/anoncreds/handler.py index 18a1f99cd7..ba9ac7af83 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/anoncreds/handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/anoncreds/handler.py @@ -90,10 +90,8 @@ async def get_detail_record(self, cred_ex_id: str) -> V20CredExRecordIndy: """Retrieve credential exchange detail record by cred_ex_id.""" async with self.profile.session() as session: - records = ( - await AnonCredsCredFormatHandler.format.detail.query_by_cred_ex_id( - session, cred_ex_id - ) + records = await AnonCredsCredFormatHandler.format.detail.query_by_cred_ex_id( + session, cred_ex_id ) if len(records) > 1: @@ -328,9 +326,7 @@ async def issue_credential( cred_request = cred_ex_record.cred_request.attachment( AnonCredsCredFormatHandler.format ) - cred_values = cred_ex_record.cred_offer.credential_preview.attr_dict( - decode=False - ) + cred_values = cred_ex_record.cred_offer.credential_preview.attr_dict(decode=False) issuer = AnonCredsIssuer(self.profile) cred_def_id = cred_offer["cred_def_id"] diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/anoncreds/tests/test_handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/anoncreds/tests/test_handler.py index fd104cbfd7..c75b0cf8ce 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/anoncreds/tests/test_handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/anoncreds/tests/test_handler.py @@ -205,9 +205,7 @@ async def asyncSetUp(self): Ledger = mock.MagicMock() self.ledger = Ledger() self.ledger.get_schema = mock.CoroutineMock(return_value=SCHEMA) - self.ledger.get_credential_definition = mock.CoroutineMock( - return_value=CRED_DEF - ) + self.ledger.get_credential_definition = mock.CoroutineMock(return_value=CRED_DEF) self.ledger.get_revoc_reg_def = mock.CoroutineMock(return_value=REV_REG_DEF) self.ledger.__aenter__ = mock.CoroutineMock(return_value=self.ledger) self.ledger.credential_definition_id2schema_id = mock.CoroutineMock( @@ -285,9 +283,7 @@ async def test_get_indy_detail_record(self): await details_indy[0].save(self.session) await details_indy[1].save(self.session) # exercise logger warning on get() - with mock.patch.object( - INDY_LOGGER, "warning", mock.MagicMock() - ) as mock_warning: + with mock.patch.object(INDY_LOGGER, "warning", mock.MagicMock()) as mock_warning: assert await self.handler.get_detail_record(cred_ex_id) in details_indy mock_warning.assert_called_once() @@ -579,9 +575,7 @@ async def test_create_request(self): ) cred_def = {"cred": "def"} - self.ledger.get_credential_definition = mock.CoroutineMock( - return_value=cred_def - ) + self.ledger.get_credential_definition = mock.CoroutineMock(return_value=cred_def) cred_req_meta = {} self.holder.create_credential_request = mock.CoroutineMock( @@ -621,27 +615,23 @@ async def test_create_request(self): "get_ledger_for_identifier", mock.CoroutineMock(return_value=(None, self.ledger)), ): - await self.handler.create_request( - cred_ex_record, {"holder_did": holder_did} - ) + await self.handler.create_request(cred_ex_record, {"holder_did": holder_did}) async def test_create_request_bad_state(self): cred_ex_record = V20CredExRecord(state=V20CredExRecord.STATE_OFFER_SENT) with self.assertRaises(V20CredFormatError) as context: await self.handler.create_request(cred_ex_record) - assert ( - "Indy issue credential format cannot start from credential request" - in str(context.exception) + assert "Indy issue credential format cannot start from credential request" in str( + context.exception ) cred_ex_record.state = None with self.assertRaises(V20CredFormatError) as context: await self.handler.create_request(cred_ex_record) - assert ( - "Indy issue credential format cannot start from credential request" - in str(context.exception) + assert "Indy issue credential format cannot start from credential request" in str( + context.exception ) async def test_create_request_not_unique_x(self): @@ -673,9 +663,8 @@ async def test_receive_request_no_offer(self): with self.assertRaises(V20CredFormatError) as context: await self.handler.receive_request(cred_ex_record, cred_request_message) - assert ( - "Indy issue credential format cannot start from credential request" - in str(context.exception) + assert "Indy issue credential format cannot start from credential request" in str( + context.exception ) @pytest.mark.skip(reason="Anoncreds-break") diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/indy/handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/indy/handler.py index 3b1e01f398..ad3cba25f0 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/indy/handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/indy/handler.py @@ -440,9 +440,7 @@ async def issue_credential( """Issue indy credential.""" # Temporary shim while the new anoncreds library integration is in progress if self.anoncreds_handler: - return await self.anoncreds_handler.issue_credential( - cred_ex_record, retries - ) + return await self.anoncreds_handler.issue_credential(cred_ex_record, retries) await self._check_uniqueness(cred_ex_record.cred_ex_id) @@ -450,9 +448,7 @@ async def issue_credential( cred_request = cred_ex_record.cred_request.attachment( IndyCredFormatHandler.format ) - cred_values = cred_ex_record.cred_offer.credential_preview.attr_dict( - decode=False - ) + cred_values = cred_ex_record.cred_offer.credential_preview.attr_dict(decode=False) schema_id = cred_offer["schema_id"] cred_def_id = cred_offer["cred_def_id"] @@ -522,9 +518,7 @@ async def store_credential( """Store indy credential.""" # Temporary shim while the new anoncreds library integration is in progress if self.anoncreds_handler: - return await self.anoncreds_handler.store_credential( - cred_ex_record, cred_id - ) + return await self.anoncreds_handler.store_credential(cred_ex_record, cred_id) cred = cred_ex_record.cred_issue.attachment(IndyCredFormatHandler.format) diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/indy/tests/test_handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/indy/tests/test_handler.py index 77abf110c3..92c470e3a3 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/indy/tests/test_handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/indy/tests/test_handler.py @@ -204,9 +204,7 @@ async def asyncSetUp(self): Ledger = mock.MagicMock() self.ledger = Ledger() self.ledger.get_schema = mock.CoroutineMock(return_value=SCHEMA) - self.ledger.get_credential_definition = mock.CoroutineMock( - return_value=CRED_DEF - ) + self.ledger.get_credential_definition = mock.CoroutineMock(return_value=CRED_DEF) self.ledger.get_revoc_reg_def = mock.CoroutineMock(return_value=REV_REG_DEF) self.ledger.__aenter__ = mock.CoroutineMock(return_value=self.ledger) self.ledger.credential_definition_id2schema_id = mock.CoroutineMock( @@ -284,9 +282,7 @@ async def test_get_indy_detail_record(self): await details_indy[0].save(self.session) await details_indy[1].save(self.session) # exercise logger warning on get() - with mock.patch.object( - INDY_LOGGER, "warning", mock.MagicMock() - ) as mock_warning: + with mock.patch.object(INDY_LOGGER, "warning", mock.MagicMock()) as mock_warning: assert await self.handler.get_detail_record(cred_ex_id) in details_indy mock_warning.assert_called_once() @@ -573,9 +569,7 @@ async def test_create_request(self): ) cred_def = {"cred": "def"} - self.ledger.get_credential_definition = mock.CoroutineMock( - return_value=cred_def - ) + self.ledger.get_credential_definition = mock.CoroutineMock(return_value=cred_def) cred_req_meta = {} self.holder.create_credential_request = mock.CoroutineMock( @@ -615,27 +609,23 @@ async def test_create_request(self): "get_ledger_for_identifier", mock.CoroutineMock(return_value=(None, self.ledger)), ): - await self.handler.create_request( - cred_ex_record, {"holder_did": holder_did} - ) + await self.handler.create_request(cred_ex_record, {"holder_did": holder_did}) async def test_create_request_bad_state(self): cred_ex_record = V20CredExRecord(state=V20CredExRecord.STATE_OFFER_SENT) with self.assertRaises(V20CredFormatError) as context: await self.handler.create_request(cred_ex_record) - assert ( - "Indy issue credential format cannot start from credential request" - in str(context.exception) + assert "Indy issue credential format cannot start from credential request" in str( + context.exception ) cred_ex_record.state = None with self.assertRaises(V20CredFormatError) as context: await self.handler.create_request(cred_ex_record) - assert ( - "Indy issue credential format cannot start from credential request" - in str(context.exception) + assert "Indy issue credential format cannot start from credential request" in str( + context.exception ) async def test_create_request_not_unique_x(self): @@ -667,9 +657,8 @@ async def test_receive_request_no_offer(self): with self.assertRaises(V20CredFormatError) as context: await self.handler.receive_request(cred_ex_record, cred_request_message) - assert ( - "Indy issue credential format cannot start from credential request" - in str(context.exception) + assert "Indy issue credential format cannot start from credential request" in str( + context.exception ) async def test_issue_credential_revocable(self): diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/ld_proof/handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/ld_proof/handler.py index dca321efae..48f180fdc7 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/ld_proof/handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/ld_proof/handler.py @@ -123,9 +123,7 @@ def get_format_data(self, message_type: str, data: dict) -> CredFormatAttachment attach_id=LDProofCredFormatHandler.format.api, format_=self.get_format_identifier(message_type), ), - AttachDecorator.data_base64( - data, ident=LDProofCredFormatHandler.format.api - ), + AttachDecorator.data_base64(data, ident=LDProofCredFormatHandler.format.api), ) async def create_proposal( @@ -282,9 +280,7 @@ async def issue_credential( ) -> CredFormatAttachment: """Issue linked data proof credential.""" if not cred_ex_record.cred_request: - raise V20CredFormatError( - "Cannot issue credential without credential request" - ) + raise V20CredFormatError("Cannot issue credential without credential request") detail_dict = cred_ex_record.cred_request.attachment( LDProofCredFormatHandler.format @@ -423,6 +419,4 @@ async def store_credential( await vc_holder.store_credential(vc_record) # Store detail record, emit event - await detail_record.save( - session, reason="store credential v2.0", event=True - ) + await detail_record.save(session, reason="store credential v2.0", event=True) diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/ld_proof/tests/test_handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/ld_proof/tests/test_handler.py index 3472f1f3f5..e52e8b5350 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/ld_proof/tests/test_handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/ld_proof/tests/test_handler.py @@ -345,9 +345,7 @@ async def test_create_offer_x_wrong_attributes(self): test_module, "get_properties_without_context", return_value=missing_properties, - ), self.assertRaises( - LinkedDataProofException - ) as context: + ), self.assertRaises(LinkedDataProofException) as context: await self.handler.create_offer(self.cred_proposal) assert ( @@ -454,9 +452,7 @@ async def test_receive_request_with_offer_no_id(self): ], ) ], - requests_attach=[ - AttachDecorator.data_base64(LD_PROOF_VC_DETAIL, ident="0") - ], + requests_attach=[AttachDecorator.data_base64(LD_PROOF_VC_DETAIL, ident="0")], ) await self.handler.receive_request(cred_ex_record, cred_request) @@ -576,9 +572,7 @@ async def test_issue_credential(self): ], ) ], - requests_attach=[ - AttachDecorator.data_base64(LD_PROOF_VC_DETAIL, ident="0") - ], + requests_attach=[AttachDecorator.data_base64(LD_PROOF_VC_DETAIL, ident="0")], ) cred_ex_record = V20CredExRecord( @@ -643,9 +637,7 @@ async def test_receive_credential(self): ], ) ], - requests_attach=[ - AttachDecorator.data_base64(LD_PROOF_VC_DETAIL, ident="0") - ], + requests_attach=[AttachDecorator.data_base64(LD_PROOF_VC_DETAIL, ident="0")], ) cred_ex_record = V20CredExRecord( cred_ex_id="cred-ex-id", @@ -771,9 +763,8 @@ async def test_receive_credential_x_credential_status_ne_both_set(self): with self.assertRaises(V20CredFormatError) as context: await self.handler.receive_credential(cred_ex_record, cred_issue) - assert ( - "Received credential status type does not match credential request" - in str(context.exception) + assert "Received credential status type does not match credential request" in str( + context.exception ) async def test_receive_credential_x_proof_options_ne(self): @@ -797,9 +788,7 @@ async def test_receive_credential_x_proof_options_ne(self): ], ) ], - credentials_attach=[ - AttachDecorator.data_base64(LD_PROOF_VC, ident="0") - ], + credentials_attach=[AttachDecorator.data_base64(LD_PROOF_VC, ident="0")], ) cred_request = V20CredRequest( formats=[ @@ -900,8 +889,6 @@ async def test_store_credential_x_not_verified(self): ) as mock_verify_credential, mock.patch.object( self.manager, "_get_proof_purpose", - ) as mock_get_proof_purpose, self.assertRaises( - V20CredFormatError - ) as context: + ) as mock_get_proof_purpose, self.assertRaises(V20CredFormatError) as context: await self.handler.store_credential(cred_ex_record, cred_id) assert "Received invalid credential: " in str(context.exception) diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py index b418bfdf9e..833aba09bf 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/handler.py @@ -329,9 +329,7 @@ async def create_request( ) nonce = cred_offer["binding_method"]["anoncreds_link_secret"]["nonce"] - cred_def_id = cred_offer["binding_method"]["anoncreds_link_secret"][ - "cred_def_id" - ] + cred_def_id = cred_offer["binding_method"]["anoncreds_link_secret"]["cred_def_id"] ledger = self.profile.inject(BaseLedger) # TODO use the ledger registry in the anoncreds module, @@ -436,13 +434,9 @@ async def issue_credential( cred_request = cred_ex_record.cred_request.attachment( VCDICredFormatHandler.format ) - cred_values = cred_ex_record.cred_offer.credential_preview.attr_dict( - decode=False - ) + cred_values = cred_ex_record.cred_offer.credential_preview.attr_dict(decode=False) - cred_def_id = cred_offer["binding_method"]["anoncreds_link_secret"][ - "cred_def_id" - ] + cred_def_id = cred_offer["binding_method"]["anoncreds_link_secret"]["cred_def_id"] ledger = self.profile.inject(BaseLedger) # TODO use the ledger registry in the anoncreds module, diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/models/cred_request.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/models/cred_request.py index 7845b7676b..a922ec1e22 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/models/cred_request.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/models/cred_request.py @@ -101,9 +101,7 @@ class Meta: model_class = DidcommSignedAttachmentRequest unknown = EXCLUDE - attachment_id = fields.Str( - required=True, metadata={"description": "", "example": ""} - ) + attachment_id = fields.Str(required=True, metadata={"description": "", "example": ""}) class BindingProof(BaseModel): diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/tests/test_handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/tests/test_handler.py index 57f130007b..d2dec27a41 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/tests/test_handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/formats/vc_di/tests/test_handler.py @@ -259,9 +259,7 @@ async def asyncSetUp(self): Ledger = mock.MagicMock() self.ledger = Ledger() self.ledger.get_schema = mock.CoroutineMock(return_value=SCHEMA) - self.ledger.get_credential_definition = mock.CoroutineMock( - return_value=CRED_DEF - ) + self.ledger.get_credential_definition = mock.CoroutineMock(return_value=CRED_DEF) self.ledger.get_revoc_reg_def = mock.CoroutineMock(return_value=REV_REG_DEF) self.ledger.__aenter__ = mock.CoroutineMock(return_value=self.ledger) self.ledger.credential_definition_id2schema_id = mock.CoroutineMock( @@ -341,9 +339,7 @@ async def test_get_vcdi_detail_record(self): await details_vcdi[0].save(self.session) await details_vcdi[1].save(self.session) # exercise logger warning on get() - with mock.patch.object( - VCDI_LOGGER, "warning", mock.MagicMock() - ) as mock_warning: + with mock.patch.object(VCDI_LOGGER, "warning", mock.MagicMock()) as mock_warning: assert await self.handler.get_detail_record(cred_ex_id) in details_vcdi mock_warning.assert_called_once() @@ -482,7 +478,6 @@ async def test_receive_offer(self): await self.handler.receive_offer(cred_ex_record, cred_offer_message) async def test_create_request(self): - # Define your mock credential definition mock_credential_definition_result = GetCredDefResult( credential_definition=CredDef( @@ -529,7 +524,6 @@ async def test_create_request(self): with mock.patch.object( AnonCredsHolder, "create_credential_request", mock.CoroutineMock() ) as mock_create: - mock_create.return_value = ( json.dumps(VCDI_CRED_REQ["binding_proof"]["anoncreds_link_secret"]), json.dumps(cred_req_meta), @@ -538,9 +532,7 @@ async def test_create_request(self): cred_ex_record, {"holder_did": holder_did} ) - legacy_offer = await self.handler._prepare_legacy_offer( - VCDI_OFFER, SCHEMA_ID - ) + legacy_offer = await self.handler._prepare_legacy_offer(VCDI_OFFER, SCHEMA_ID) mock_create.assert_called_once_with( legacy_offer, mock_credential_definition_result.credential_definition, @@ -553,9 +545,7 @@ async def test_create_request(self): assert attachment.data.base64 cred_ex_record._id = "dummy-id2" - await self.handler.create_request( - cred_ex_record, {"holder_did": holder_did} - ) + await self.handler.create_request(cred_ex_record, {"holder_did": holder_did}) self.context.injector.clear_binding(BaseCache) cred_ex_record._id = "dummy-id3" @@ -641,9 +631,7 @@ async def test_issue_credential_revocable(self): (cred_format, attachment) = await self.handler.issue_credential( cred_ex_record, retries=1 ) - legacy_offer = await self.handler._prepare_legacy_offer( - VCDI_OFFER, SCHEMA_ID - ) + legacy_offer = await self.handler._prepare_legacy_offer(VCDI_OFFER, SCHEMA_ID) legacy_request = await self.handler._prepare_legacy_request( VCDI_CRED_REQ, CRED_DEF_ID ) diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/handlers/cred_offer_handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/handlers/cred_offer_handler.py index 3d1432e5bc..e280675e43 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/handlers/cred_offer_handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/handlers/cred_offer_handler.py @@ -54,9 +54,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): ) connection_id = ( - context.connection_record.connection_id - if context.connection_record - else None + context.connection_record.connection_id if context.connection_record else None ) profile = context.profile diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/handlers/cred_proposal_handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/handlers/cred_proposal_handler.py index d6762223e6..7bdf3d77a4 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/handlers/cred_proposal_handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/handlers/cred_proposal_handler.py @@ -40,9 +40,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): if context.connection_record and not context.connection_ready: raise HandlerException("Connection used for credential proposal not ready") elif not context.connection_record: - raise HandlerException( - "Connectionless not supported for credential proposal" - ) + raise HandlerException("Connectionless not supported for credential proposal") profile = context.profile cred_manager = V20CredManager(profile) diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/handlers/tests/test_cred_ack_handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/handlers/tests/test_cred_ack_handler.py index 146588ad08..d9b6e4cae0 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/handlers/tests/test_cred_ack_handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/handlers/tests/test_cred_ack_handler.py @@ -59,9 +59,7 @@ async def test_called_not_ready(self): responder = MockResponder() with self.assertRaises(test_module.HandlerException) as err: await handler.handle(request_context, responder) - assert ( - err.exception.message == "Connection used for credential ack not ready" - ) + assert err.exception.message == "Connection used for credential ack not ready" assert not responder.messages diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/handlers/tests/test_cred_offer_handler.py b/aries_cloudagent/protocols/issue_credential/v2_0/handlers/tests/test_cred_offer_handler.py index 3df7749b6b..aa21834a5a 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/handlers/tests/test_cred_offer_handler.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/handlers/tests/test_cred_offer_handler.py @@ -174,8 +174,7 @@ async def test_called_not_ready(self): with self.assertRaises(test_module.HandlerException) as err: await handler_inst.handle(request_context, responder) assert ( - err.exception.message - == "Connection used for credential offer not ready" + err.exception.message == "Connection used for credential offer not ready" ) assert not responder.messages diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/manager.py b/aries_cloudagent/protocols/issue_credential/v2_0/manager.py index 0bf492b7b9..e67ef37831 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/manager.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/manager.py @@ -177,9 +177,11 @@ async def receive_proposal( # Format specific receive_proposal handlers for format in cred_proposal_message.formats: - await V20CredFormat.Format.get(format.format).handler( - self.profile - ).receive_proposal(cred_ex_record, cred_proposal_message) + await ( + V20CredFormat.Format.get(format.format) + .handler(self.profile) + .receive_proposal(cred_ex_record, cred_proposal_message) + ) cred_ex_record.cred_proposal = cred_proposal_message cred_ex_record.state = V20CredExRecord.STATE_PROPOSAL_RECEIVED @@ -434,9 +436,7 @@ async def receive_request( if (f := V20CredFormat.Format.get(format.format)) ] handlers_without_offer = [ - handler - for handler in handlers - if handler.can_receive_request_without_offer() + handler for handler in handlers if handler.can_receive_request_without_offer() ] async with self._profile.session() as session: @@ -531,9 +531,7 @@ async def issue_credential( ) if len(issue_formats) == 0: - raise V20CredManagerError( - "Unable to issue credential. No supported formats" - ) + raise V20CredManagerError("Unable to issue credential. No supported formats") cred_issue_message = V20CredIssue( replacement_id=replacement_id, diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/message_types.py b/aries_cloudagent/protocols/issue_credential/v2_0/message_types.py index aab8fd6ee7..de7d225fcc 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/message_types.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/message_types.py @@ -22,9 +22,7 @@ MESSAGE_TYPES = DIDCommPrefix.qualify_all( { - CRED_20_PROPOSAL: ( - f"{PROTOCOL_PACKAGE}.messages.cred_proposal.V20CredProposal" - ), + CRED_20_PROPOSAL: (f"{PROTOCOL_PACKAGE}.messages.cred_proposal.V20CredProposal"), CRED_20_OFFER: f"{PROTOCOL_PACKAGE}.messages.cred_offer.V20CredOffer", CRED_20_REQUEST: f"{PROTOCOL_PACKAGE}.messages.cred_request.V20CredRequest", CRED_20_ISSUE: f"{PROTOCOL_PACKAGE}.messages.cred_issue.V20CredIssue", diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/messages/cred_request.py b/aries_cloudagent/protocols/issue_credential/v2_0/messages/cred_request.py index 87a575d160..f403d46f5c 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/messages/cred_request.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/messages/cred_request.py @@ -12,9 +12,7 @@ from ..message_types import CRED_20_REQUEST, PROTOCOL_PACKAGE from .cred_format import V20CredFormat, V20CredFormatSchema -HANDLER_CLASS = ( - f"{PROTOCOL_PACKAGE}.handlers.cred_request_handler.V20CredRequestHandler" -) +HANDLER_CLASS = f"{PROTOCOL_PACKAGE}.handlers.cred_request_handler.V20CredRequestHandler" class V20CredRequest(AgentMessage): diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/messages/tests/test_cred_format.py b/aries_cloudagent/protocols/issue_credential/v2_0/messages/tests/test_cred_format.py index 4aade3ba15..b067118aee 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/messages/tests/test_cred_format.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/messages/tests/test_cred_format.py @@ -44,9 +44,7 @@ def test_get_attachment_data(self): formats=[ V20CredFormat(attach_id="indy", format_=V20CredFormat.Format.INDY) ], - attachments=[ - AttachDecorator.data_base64(TEST_INDY_FILTER, ident="indy") - ], + attachments=[AttachDecorator.data_base64(TEST_INDY_FILTER, ident="indy")], ) == TEST_INDY_FILTER ) @@ -56,9 +54,7 @@ def test_get_attachment_data(self): formats=[ V20CredFormat(attach_id="indy", format_=V20CredFormat.Format.INDY) ], - attachments=[ - AttachDecorator.data_base64(TEST_INDY_FILTER, ident="xxx") - ], + attachments=[AttachDecorator.data_base64(TEST_INDY_FILTER, ident="xxx")], ) is None ) @@ -68,9 +64,7 @@ def test_get_attachment_data(self): formats=[ V20CredFormat(attach_id="indy", format_=V20CredFormat.Format.INDY) ], - attachments=[ - AttachDecorator.data_base64(TEST_INDY_FILTER, ident="indy") - ], + attachments=[AttachDecorator.data_base64(TEST_INDY_FILTER, ident="indy")], ) is None ) diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/routes.py b/aries_cloudagent/protocols/issue_credential/v2_0/routes.py index 8803356ce9..9c5bdc85e0 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/routes.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/routes.py @@ -679,9 +679,7 @@ async def credential_exchange_create(request: web.BaseRequest): try: # Not all formats use credential preview - cred_preview = ( - V20CredPreview.deserialize(preview_spec) if preview_spec else None - ) + cred_preview = V20CredPreview.deserialize(preview_spec) if preview_spec else None cred_proposal = V20CredProposal( comment=comment, credential_preview=cred_preview, @@ -764,9 +762,7 @@ async def credential_exchange_send(request: web.BaseRequest): cred_ex_record = None try: # Not all formats use credential preview - cred_preview = ( - V20CredPreview.deserialize(preview_spec) if preview_spec else None - ) + cred_preview = V20CredPreview.deserialize(preview_spec) if preview_spec else None async with profile.session() as session: conn_record = await ConnRecord.retrieve_by_id(session, connection_id) if not conn_record.is_ready: @@ -873,9 +869,7 @@ async def credential_exchange_send_proposal(request: web.BaseRequest): conn_record = None cred_ex_record = None try: - cred_preview = ( - V20CredPreview.deserialize(preview_spec) if preview_spec else None - ) + cred_preview = V20CredPreview.deserialize(preview_spec) if preview_spec else None async with profile.session() as session: conn_record = await ConnRecord.retrieve_by_id(session, connection_id) if not conn_record.is_ready: @@ -1538,9 +1532,7 @@ async def credential_exchange_issue(request: web.BaseRequest): outbound_handler, ) - await outbound_handler( - cred_issue_message, connection_id=cred_ex_record.connection_id - ) + await outbound_handler(cred_issue_message, connection_id=cred_ex_record.connection_id) trace_event( context.settings, diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/tests/test_manager.py b/aries_cloudagent/protocols/issue_credential/v2_0/tests/test_manager.py index f8cde14b03..1508fac3c9 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/tests/test_manager.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/tests/test_manager.py @@ -90,9 +90,7 @@ async def asyncSetUp(self): Ledger = mock.MagicMock() self.ledger = Ledger() self.ledger.get_schema = mock.CoroutineMock(return_value=SCHEMA) - self.ledger.get_credential_definition = mock.CoroutineMock( - return_value=CRED_DEF - ) + self.ledger.get_credential_definition = mock.CoroutineMock(return_value=CRED_DEF) self.ledger.get_revoc_reg_def = mock.CoroutineMock(return_value=REV_REG_DEF) self.ledger.__aenter__ = mock.CoroutineMock(return_value=self.ledger) self.ledger.credential_definition_id2schema_id = mock.CoroutineMock( @@ -168,9 +166,7 @@ async def test_create_proposal(self): V20CredFormat.Format.INDY.api ], ), - AttachDecorator.data_base64( - {}, ident=V20CredFormat.Format.INDY.api - ), + AttachDecorator.data_base64({}, ident=V20CredFormat.Format.INDY.api), ) ) cx_rec = await self.manager.create_proposal( @@ -230,8 +226,7 @@ async def test_create_proposal_no_preview(self): cred_proposal = cx_rec.cred_proposal assert ( - cred_proposal.attachment(V20CredFormat.Format.LD_PROOF) - == LD_PROOF_VC_DETAIL + cred_proposal.attachment(V20CredFormat.Format.LD_PROOF) == LD_PROOF_VC_DETAIL ) assert cx_rec.connection_id == connection_id assert cx_rec.thread_id == cred_proposal._thread_id @@ -283,8 +278,7 @@ async def test_receive_proposal(self): "cred_def_id": CRED_DEF_ID } assert ( - ret_cred_proposal.credential_preview.attributes - == cred_preview.attributes + ret_cred_proposal.credential_preview.attributes == cred_preview.attributes ) assert cx_rec.connection_id == connection_id assert cx_rec.role == V20CredExRecord.ROLE_ISSUER @@ -426,9 +420,7 @@ async def test_create_bound_offer(self): assert ret_cx_rec == cx_rec mock_save.assert_called_once() - mock_handler.return_value.create_offer.assert_called_once_with( - cred_proposal - ) + mock_handler.return_value.create_offer.assert_called_once_with(cred_proposal) assert cx_rec.thread_id == ret_offer._thread_id assert cx_rec.cred_offer.replacement_id == ret_offer.replacement_id @@ -837,9 +829,7 @@ async def test_receive_request_no_connection_cred_request(self): mock_retrieve.return_value = stored_cx_rec mock_handler.return_value.receive_request = mock.CoroutineMock() - cx_rec = await self.manager.receive_request( - cred_request, mock_conn, mock_oob - ) + cx_rec = await self.manager.receive_request(cred_request, mock_conn, mock_oob) mock_retrieve.assert_called_once_with( self.session, @@ -998,9 +988,7 @@ async def test_issue_credential_indy(self): ) mock_save.assert_called_once() - mock_handler.return_value.issue_credential.assert_called_once_with( - ret_cx_rec - ) + mock_handler.return_value.issue_credential.assert_called_once_with(ret_cx_rec) assert ret_cx_rec.cred_issue.attachment() == INDY_CRED assert ret_cred_issue.attachment() == INDY_CRED @@ -1107,9 +1095,7 @@ async def test_issue_credential_anoncreds(self): ) mock_save.assert_called_once() - mock_handler.return_value.issue_credential.assert_called_once_with( - ret_cx_rec - ) + mock_handler.return_value.issue_credential.assert_called_once_with(ret_cx_rec) assert ret_cx_rec.cred_issue.attachment() == INDY_CRED assert ret_cred_issue.attachment() == INDY_CRED @@ -1274,9 +1260,8 @@ async def test_receive_cred_x_extra_formats(self): cred_issue, connection_id, ) - assert ( - "Received issue credential format(s) not present in credential" - in str(context.exception) + assert "Received issue credential format(s) not present in credential" in str( + context.exception ) async def test_receive_cred_x_no_formats(self): diff --git a/aries_cloudagent/protocols/issue_credential/v2_0/tests/test_routes.py b/aries_cloudagent/protocols/issue_credential/v2_0/tests/test_routes.py index e25e088635..f221db4250 100644 --- a/aries_cloudagent/protocols/issue_credential/v2_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/issue_credential/v2_0/tests/test_routes.py @@ -130,9 +130,7 @@ async def test_credential_exchange_list_x(self): ) as mock_cx_rec: mock_cx_rec.connection_id = "conn-123" mock_cx_rec.thread_id = "conn-123" - mock_cx_rec.query = mock.CoroutineMock( - side_effect=test_module.StorageError() - ) + mock_cx_rec.query = mock.CoroutineMock(side_effect=test_module.StorageError()) with self.assertRaises(test_module.web.HTTPBadRequest): await test_module.credential_exchange_list(self.request) @@ -1382,9 +1380,7 @@ async def test_credential_exchange_issue_no_conn_record(self): test_module, "V20CredExRecord", autospec=True ) as mock_cx_rec_cls: mock_cx_rec.state = mock_cx_rec_cls.STATE_REQUEST_RECEIVED - mock_cx_rec_cls.retrieve_by_id = mock.CoroutineMock( - return_value=mock_cx_rec - ) + mock_cx_rec_cls.retrieve_by_id = mock.CoroutineMock(return_value=mock_cx_rec) # Emulate storage not found (bad connection id) mock_conn_rec.retrieve_by_id = mock.CoroutineMock( @@ -1446,9 +1442,7 @@ async def test_credential_exchange_issue_rev_reg_full_indy(self): test_module, "V20CredExRecord", autospec=True ) as mock_cx_rec_cls: mock_cx_rec.state = mock_cx_rec_cls.STATE_REQUEST_RECEIVED - mock_cx_rec_cls.retrieve_by_id = mock.CoroutineMock( - return_value=mock_cx_rec - ) + mock_cx_rec_cls.retrieve_by_id = mock.CoroutineMock(return_value=mock_cx_rec) mock_conn_rec.retrieve_by_id = mock.CoroutineMock() mock_conn_rec.retrieve_by_id.return_value.is_ready = True @@ -1478,9 +1472,7 @@ async def test_credential_exchange_issue_rev_reg_full_anoncreds(self): test_module, "V20CredExRecord", autospec=True ) as mock_cx_rec_cls: mock_cx_rec.state = mock_cx_rec_cls.STATE_REQUEST_RECEIVED - mock_cx_rec_cls.retrieve_by_id = mock.CoroutineMock( - return_value=mock_cx_rec - ) + mock_cx_rec_cls.retrieve_by_id = mock.CoroutineMock(return_value=mock_cx_rec) mock_conn_rec.retrieve_by_id = mock.CoroutineMock() mock_conn_rec.retrieve_by_id.return_value.is_ready = True @@ -1509,9 +1501,7 @@ async def test_credential_exchange_issue_deser_x(self): ) as mock_cred_mgr, mock.patch.object( test_module, "V20CredExRecord", autospec=True ) as mock_cx_rec_cls: - mock_cx_rec_cls.retrieve_by_id = mock.CoroutineMock( - return_value=mock_cx_rec - ) + mock_cx_rec_cls.retrieve_by_id = mock.CoroutineMock(return_value=mock_cx_rec) mock_cred_mgr.return_value = mock.MagicMock( issue_credential=mock.CoroutineMock( return_value=( diff --git a/aries_cloudagent/protocols/out_of_band/v1_0/manager.py b/aries_cloudagent/protocols/out_of_band/v1_0/manager.py index 236b5ef1e3..08b982c4a2 100644 --- a/aries_cloudagent/protocols/out_of_band/v1_0/manager.py +++ b/aries_cloudagent/protocols/out_of_band/v1_0/manager.py @@ -136,10 +136,7 @@ def __init__( "create_unique_did can only be used with use_did_method" ) - if ( - use_did_method - and use_did_method not in DIDXManager.SUPPORTED_USE_DID_METHODS - ): + if use_did_method and use_did_method not in DIDXManager.SUPPORTED_USE_DID_METHODS: raise OutOfBandManagerError(f"Unsupported use_did_method: {use_did_method}") self.profile = profile @@ -195,9 +192,7 @@ def __init__( self.mediation_id = mediation_id self.metadata = metadata - async def create_attachment( - self, attachment: Mapping, pthid: str - ) -> AttachDecorator: + async def create_attachment(self, attachment: Mapping, pthid: str) -> AttachDecorator: """Create attachment for OOB invitation.""" a_type = attachment.get("type") a_id = attachment.get("id") @@ -898,9 +893,7 @@ async def _service_decorator_from_service( ) return None - async def _wait_for_reuse_response( - self, oob_id: str, timeout: int = 15 - ) -> OobRecord: + async def _wait_for_reuse_response(self, oob_id: str, timeout: int = 15) -> OobRecord: """Wait for reuse response. Wait for reuse response message state. Either by receiving a reuse accepted or @@ -975,9 +968,7 @@ async def _wait_for_state() -> ConnRecord: # This rules out the scenario where the record was in the desired state # Before starting the event listener async with self.profile.session() as session: - conn_record = await ConnRecord.retrieve_by_id( - session, connection_id - ) + conn_record = await ConnRecord.retrieve_by_id(session, connection_id) if conn_record.is_ready: return conn_record @@ -1356,9 +1347,7 @@ async def receive_reuse_accepted_message( await oob_record.delete_record(session) conn_record.invitation_msg_id = invi_msg_id - await conn_record.save( - session, reason="Assigning new invitation_msg_id" - ) + await conn_record.save(session, reason="Assigning new invitation_msg_id") # Emit webhook await self.profile.notify( REUSE_ACCEPTED_WEBHOOK_TOPIC, diff --git a/aries_cloudagent/protocols/out_of_band/v1_0/messages/invitation.py b/aries_cloudagent/protocols/out_of_band/v1_0/messages/invitation.py index 252d26dfba..6f18750463 100644 --- a/aries_cloudagent/protocols/out_of_band/v1_0/messages/invitation.py +++ b/aries_cloudagent/protocols/out_of_band/v1_0/messages/invitation.py @@ -64,10 +64,7 @@ def get(cls, label: Union[str, "HSProto"]) -> Optional["HSProto"]: if isinstance(label, str): for hsp in HSProto: - if ( - DIDCommPrefix.unqualify(label) == hsp.name - or label.lower() in hsp.aka - ): + if DIDCommPrefix.unqualify(label) == hsp.name or label.lower() in hsp.aka: return hsp elif isinstance(label, HSProto): diff --git a/aries_cloudagent/protocols/out_of_band/v1_0/messages/reuse.py b/aries_cloudagent/protocols/out_of_band/v1_0/messages/reuse.py index 1c2f6f45dd..5f2e44bfd8 100644 --- a/aries_cloudagent/protocols/out_of_band/v1_0/messages/reuse.py +++ b/aries_cloudagent/protocols/out_of_band/v1_0/messages/reuse.py @@ -7,9 +7,7 @@ from .....messaging.agent_message import AgentMessage, AgentMessageSchema from ..message_types import DEFAULT_VERSION, MESSAGE_REUSE, PROTOCOL_PACKAGE -HANDLER_CLASS = ( - f"{PROTOCOL_PACKAGE}.handlers.reuse_handler.HandshakeReuseMessageHandler" -) +HANDLER_CLASS = f"{PROTOCOL_PACKAGE}.handlers.reuse_handler.HandshakeReuseMessageHandler" class HandshakeReuse(AgentMessage): diff --git a/aries_cloudagent/protocols/out_of_band/v1_0/routes.py b/aries_cloudagent/protocols/out_of_band/v1_0/routes.py index 7fcd42c1e4..f600e8b808 100644 --- a/aries_cloudagent/protocols/out_of_band/v1_0/routes.py +++ b/aries_cloudagent/protocols/out_of_band/v1_0/routes.py @@ -48,9 +48,7 @@ class InvitationCreateQueryStringSchema(OpenAPISchema): ) create_unique_did = fields.Boolean( required=False, - metadata={ - "description": "Create unique DID for this invitation (default false)" - }, + metadata={"description": "Create unique DID for this invitation (default false)"}, ) @@ -268,9 +266,7 @@ async def invitation_create(request: web.BaseRequest): public=use_public_did, use_did=use_did, use_did_method=use_did_method, - hs_protos=[ - h for h in [HSProto.get(hsp) for hsp in handshake_protocols] if h - ], + hs_protos=[h for h in [HSProto.get(hsp) for hsp in handshake_protocols] if h], multi_use=multi_use, create_unique_did=create_unique_did, attachments=attachments, diff --git a/aries_cloudagent/protocols/out_of_band/v1_0/tests/test_manager.py b/aries_cloudagent/protocols/out_of_band/v1_0/tests/test_manager.py index bc310c13ec..c2bfe14bcd 100644 --- a/aries_cloudagent/protocols/out_of_band/v1_0/tests/test_manager.py +++ b/aries_cloudagent/protocols/out_of_band/v1_0/tests/test_manager.py @@ -314,9 +314,7 @@ def setUp(self): self.responder = MockResponder() self.responder.send = mock.CoroutineMock() - self.test_mediator_routing_keys = [ - "3Dn1SJNPaCXcvvJvSbsFWP2xaCjMom3can8CQNhWrTRR" - ] + self.test_mediator_routing_keys = ["3Dn1SJNPaCXcvvJvSbsFWP2xaCjMom3can8CQNhWrTRR"] self.test_mediator_conn_id = "mediator-conn-id" self.test_mediator_endpoint = "http://mediator.example.com" @@ -590,9 +588,7 @@ async def test_create_invitation_attachment_v2_0_cred_offer(self): key_type=ED25519, ) mock_retrieve_cxid_v1.side_effect = test_module.StorageNotFoundError() - mock_retrieve_cxid_v2.return_value = mock.MagicMock( - cred_offer=V20CredOffer() - ) + mock_retrieve_cxid_v2.return_value = mock.MagicMock(cred_offer=V20CredOffer()) invi_rec = await self.manager.create_invitation( my_endpoint=TestConfig.test_endpoint, public=False, @@ -798,9 +794,9 @@ async def test_create_invitation_peer_did(self): service_accept=["didcomm/aip1", "didcomm/aip2;env=rfc19"], ) - assert invi_rec._invitation.ser[ - "@type" - ] == DIDCommPrefix.qualify_current(self.TEST_INVI_MESSAGE_TYPE) + assert invi_rec._invitation.ser["@type"] == DIDCommPrefix.qualify_current( + self.TEST_INVI_MESSAGE_TYPE + ) assert not invi_rec._invitation.ser.get("requests~attach") assert invi_rec.invitation.label == "That guy" assert ( @@ -827,9 +823,7 @@ async def test_create_invitation_metadata_assigned(self): ) service = invi_rec._invitation.ser["services"][0] invitation_key = DIDKey.from_did(service["recipientKeys"][0]).public_key_b58 - record = await ConnRecord.retrieve_by_invitation_key( - session, invitation_key - ) + record = await ConnRecord.retrieve_by_invitation_key(session, invitation_key) assert await record.metadata_get_all(session) == {"hello": "world"} async def test_create_invitation_x_public_metadata(self): @@ -1446,9 +1440,7 @@ async def test_existing_conn_record_public_did(self): oob_mgr_find_existing_conn.assert_called_once() assert result.state == OobRecord.STATE_ACCEPTED - oob_record_save.assert_called_once_with( - ANY, reason="Storing reuse msg data" - ) + oob_record_save.assert_called_once_with(ANY, reason="Storing reuse msg data") async def test_receive_invitation_handshake_reuse(self): self.profile.context.update_settings({"public_invites": True}) diff --git a/aries_cloudagent/protocols/present_proof/anoncreds/pres_exch_handler.py b/aries_cloudagent/protocols/present_proof/anoncreds/pres_exch_handler.py index 3b3457e37e..0052a78873 100644 --- a/aries_cloudagent/protocols/present_proof/anoncreds/pres_exch_handler.py +++ b/aries_cloudagent/protocols/present_proof/anoncreds/pres_exch_handler.py @@ -194,9 +194,7 @@ async def _get_revocation_states( revocation_states[rev_reg_id] = {} rev_reg_def = revocation_registries[rev_reg_id] revocation = AnonCredsRevocation(self._profile) - tails_local_path = await revocation.get_or_fetch_local_tails_path( - rev_reg_def - ) + tails_local_path = await revocation.get_or_fetch_local_tails_path(rev_reg_def) try: revocation_states[rev_reg_id][timestamp] = json.loads( await self.holder.create_revocation_state( diff --git a/aries_cloudagent/protocols/present_proof/dif/pres_exch.py b/aries_cloudagent/protocols/present_proof/dif/pres_exch.py index 72d7eef25e..ad62e169d3 100644 --- a/aries_cloudagent/protocols/present_proof/dif/pres_exch.py +++ b/aries_cloudagent/protocols/present_proof/dif/pres_exch.py @@ -108,9 +108,7 @@ class Meta: model_class = SubmissionRequirements unknown = EXCLUDE - _name = fields.Str( - required=False, data_key="name", metadata={"description": "Name"} - ) + _name = fields.Str(required=False, data_key="name", metadata={"description": "Name"}) purpose = fields.Str(required=False, metadata={"description": "Purpose"}) rule = fields.Str( required=False, @@ -131,9 +129,7 @@ class Meta: data_key="max", metadata={"description": "Max Value", "example": 1234, "strict": True}, ) - _from = fields.Str( - required=False, data_key="from", metadata={"description": "From"} - ) + _from = fields.Str(required=False, data_key="from", metadata={"description": "From"}) # Self References from_nested = fields.List( fields.Nested(lambda: SubmissionRequirementsSchema()), required=False @@ -334,9 +330,7 @@ class Meta: model_class = Filter unknown = EXCLUDE - _type = fields.Str( - required=False, data_key="type", metadata={"description": "Type"} - ) + _type = fields.Str(required=False, data_key="type", metadata={"description": "Type"}) fmt = fields.Str( required=False, data_key="format", metadata={"description": "Format"} ) @@ -518,9 +512,7 @@ def extract_info(self, data, **kwargs): data["status_active"] = data["statuses"]["active"]["directive"] if "suspended" in data.get("statuses"): if "directive" in data.get("statuses").get("suspended"): - data["status_suspended"] = data["statuses"]["suspended"][ - "directive" - ] + data["status_suspended"] = data["statuses"]["suspended"]["directive"] if "revoked" in data.get("statuses"): if "directive" in data.get("statuses").get("revoked"): data["status_revoked"] = data["statuses"]["revoked"]["directive"] @@ -596,9 +588,7 @@ class Meta: metadata = fields.Dict( required=False, metadata={"description": "Metadata dictionary"} ) - constraint = fields.Nested( - ConstraintsSchema, required=False, data_key="constraints" - ) + constraint = fields.Nested(ConstraintsSchema, required=False, data_key="constraints") schemas = fields.Nested( SchemasInputDescriptorFilterSchema, required=False, @@ -672,9 +662,7 @@ class Meta: required=False, metadata={"description": "Min Value", "example": 1234, "strict": True}, ) - input_descriptors = fields.List( - fields.Nested(InputDescriptorsSchema), required=False - ) + input_descriptors = fields.List(fields.Nested(InputDescriptorsSchema), required=False) # Self References nested_req = fields.List( fields.Nested(lambda: RequirementSchema(exclude=("_nested_req",))), @@ -750,9 +738,7 @@ class Meta: submission_requirements = fields.List( fields.Nested(SubmissionRequirementsSchema), required=False ) - input_descriptors = fields.List( - fields.Nested(InputDescriptorsSchema), required=False - ) + input_descriptors = fields.List(fields.Nested(InputDescriptorsSchema), required=False) class InputDescriptorMapping(BaseModel): diff --git a/aries_cloudagent/protocols/present_proof/dif/pres_exch_handler.py b/aries_cloudagent/protocols/present_proof/dif/pres_exch_handler.py index 4cf39a4379..2c67bdffff 100644 --- a/aries_cloudagent/protocols/present_proof/dif/pres_exch_handler.py +++ b/aries_cloudagent/protocols/present_proof/dif/pres_exch_handler.py @@ -119,10 +119,8 @@ async def _get_issue_suite( """Get signature suite for signing presentation.""" did_info = await self._did_info_for_did(issuer_id) verkey_id_strategy = self.profile.context.inject(BaseVerificationKeyStrategy) - verification_method = ( - await verkey_id_strategy.get_verification_method_id_for_did( - issuer_id, self.profile, proof_purpose="assertionMethod" - ) + verification_method = await verkey_id_strategy.get_verification_method_id_for_did( + issuer_id, self.profile, proof_purpose="assertionMethod" ) if verification_method is None: @@ -476,9 +474,7 @@ def create_vcrecord(self, cred_dict: dict) -> VCRecord: if subjects: if isinstance(subjects, dict): subjects = [subjects] - subject_ids = [ - subject.get("id") for subject in subjects if ("id" in subject) - ] + subject_ids = [subject.get("id") for subject in subjects if ("id" in subject)] else: cred_dict["credentialSubject"] = {} @@ -548,9 +544,7 @@ def reveal_doc(self, credential_dict: dict, constraints: Constraints): else: return self.reveal_doc_frame - def new_credential_builder( - self, new_credential: dict, unflatten_dict: dict - ) -> dict: + def new_credential_builder(self, new_credential: dict, unflatten_dict: dict) -> dict: """Update and return the new_credential. Args: @@ -648,8 +642,8 @@ def validate_patch(self, to_check: any, _filter: Filter) -> bool: if isinstance(to_check, str): if _filter.fmt == "date" or _filter.fmt == "date-time": try: - to_compare_date = ( - self.string_to_timezone_aware_datetime(to_check) + to_compare_date = self.string_to_timezone_aware_datetime( + to_check ) if isinstance(to_compare_date, datetime): return True @@ -1162,9 +1156,7 @@ def is_numeric(self, val: any): return float(val) except ValueError: pass - raise DIFPresExchError( - "Invalid type provided for comparison/numeric operation." - ) + raise DIFPresExchError("Invalid type provided for comparison/numeric operation.") async def merge_nested_results( self, nested_result: Sequence[dict], exclude: dict @@ -1287,17 +1279,13 @@ async def create_vp( applicable_creds=applicable_creds ) if not issuer_id: - vp = await create_presentation( - credentials=applicable_creds_list - ) + vp = await create_presentation(credentials=applicable_creds_list) vp = self.__add_dif_fields_to_vp(vp, submission_property) result_vp.append(vp) continue else: applicable_creds_list = filtered_creds_list - vp = await create_presentation( - credentials=applicable_creds_list - ) + vp = await create_presentation(credentials=applicable_creds_list) else: issuer_id = self.pres_signing_did vp = await create_presentation(credentials=applicable_creds_list) @@ -1400,9 +1388,7 @@ async def verify_received_pres( input_descriptors = pd.input_descriptors if isinstance(pres, Sequence): for pr in pres: - descriptor_map_list = pr["presentation_submission"].get( - "descriptor_map" - ) + descriptor_map_list = pr["presentation_submission"].get("descriptor_map") await self.__verify_desc_map_list( descriptor_map_list, pr, input_descriptors ) @@ -1412,16 +1398,12 @@ async def verify_received_pres( descriptor_map_list, pres, input_descriptors ) - async def __verify_desc_map_list( - self, descriptor_map_list, pres, input_descriptors - ): + async def __verify_desc_map_list(self, descriptor_map_list, pres, input_descriptors): inp_desc_id_constraint_map = {} inp_desc_id_schema_one_of_filter = set() inp_desc_id_schemas_map = {} for input_descriptor in input_descriptors: - inp_desc_id_constraint_map[input_descriptor.id] = ( - input_descriptor.constraint - ) + inp_desc_id_constraint_map[input_descriptor.id] = input_descriptor.constraint inp_desc_id_schemas_map[input_descriptor.id] = input_descriptor.schemas if input_descriptor.schemas.oneof_filter: inp_desc_id_schema_one_of_filter.add(input_descriptor.id) diff --git a/aries_cloudagent/protocols/present_proof/dif/pres_request_schema.py b/aries_cloudagent/protocols/present_proof/dif/pres_request_schema.py index 2f0bec6cf0..6033708012 100644 --- a/aries_cloudagent/protocols/present_proof/dif/pres_request_schema.py +++ b/aries_cloudagent/protocols/present_proof/dif/pres_request_schema.py @@ -42,9 +42,7 @@ class Meta: model_class = DIFProofRequest options = fields.Nested(DIFOptionsSchema(), required=False) - presentation_definition = fields.Nested( - PresentationDefinitionSchema(), required=True - ) + presentation_definition = fields.Nested(PresentationDefinitionSchema(), required=True) class DIFPresSpecSchema(OpenAPISchema): diff --git a/aries_cloudagent/protocols/present_proof/dif/tests/test_data.py b/aries_cloudagent/protocols/present_proof/dif/tests/test_data.py index eaa4661cab..e9920c3299 100644 --- a/aries_cloudagent/protocols/present_proof/dif/tests/test_data.py +++ b/aries_cloudagent/protocols/present_proof/dif/tests/test_data.py @@ -59,18 +59,14 @@ def create_vcrecord(cred_dict: dict, expanded_types: list): "http://hl7.org/fhir/Patient.address": [ { "@id": "urn:bnid:_:c14n1", - "http://hl7.org/fhir/Address.city": [ - {"@value": "Рума"} - ], + "http://hl7.org/fhir/Address.city": [{"@value": "Рума"}], "http://hl7.org/fhir/Address.country": [ {"@value": "test"} ], }, { "@id": "urn:bnid:_:c14n1", - "http://hl7.org/fhir/Address.city": [ - {"@value": "Рума"} - ], + "http://hl7.org/fhir/Address.city": [{"@value": "Рума"}], }, ], } @@ -1899,9 +1895,7 @@ def create_vcrecord(cred_dict: dict, expanded_types: list): "performer": [ {"reference": "PractitionerRole/ca6632d5-a447-6306-e053-5a18000a3953"} ], - "specimen": [ - {"reference": "Specimen/ca666dfb-5a85-614a-e053-5a18000af20b"} - ], + "specimen": [{"reference": "Specimen/ca666dfb-5a85-614a-e053-5a18000af20b"}], "result": [ {"reference": "Observation/ca708651-e8eb-3513-e053-5a18000ae79b"}, {"reference": "Observation/ca708651-e8ec-3513-e053-5a18000ae79b"}, @@ -2020,9 +2014,7 @@ def create_vcrecord(cred_dict: dict, expanded_types: list): }, ] }, - "subject": { - "reference": "Patient/ca66572a-0a1b-0d53-e053-5a18000ad0b7" - }, + "subject": {"reference": "Patient/ca66572a-0a1b-0d53-e053-5a18000ad0b7"}, "effectiveDateTime": "2021-08-26T07:09:00EUROPE/BELGRADE", "method": { "coding": [ @@ -2089,9 +2081,7 @@ def create_vcrecord(cred_dict: dict, expanded_types: list): }, ] }, - "subject": { - "reference": "Patient/ca66572a-0a1b-0d53-e053-5a18000ad0b7" - }, + "subject": {"reference": "Patient/ca66572a-0a1b-0d53-e053-5a18000ad0b7"}, "effectiveDateTime": "2021-08-26T07:09:00EUROPE/BELGRADE", "method": { "coding": [ @@ -2158,9 +2148,7 @@ def create_vcrecord(cred_dict: dict, expanded_types: list): }, ] }, - "subject": { - "reference": "Patient/ca66572a-0a1b-0d53-e053-5a18000ad0b7" - }, + "subject": {"reference": "Patient/ca66572a-0a1b-0d53-e053-5a18000ad0b7"}, "effectiveDateTime": "2021-08-26T07:09:00EUROPE/BELGRADE", "method": { "coding": [ @@ -2227,9 +2215,7 @@ def create_vcrecord(cred_dict: dict, expanded_types: list): }, ] }, - "subject": { - "reference": "Patient/ca66572a-0a1b-0d53-e053-5a18000ad0b7" - }, + "subject": {"reference": "Patient/ca66572a-0a1b-0d53-e053-5a18000ad0b7"}, "effectiveDateTime": "2021-08-26T07:09:00EUROPE/BELGRADE", "method": { "coding": [ @@ -2296,9 +2282,7 @@ def create_vcrecord(cred_dict: dict, expanded_types: list): }, ] }, - "subject": { - "reference": "Patient/ca66572a-0a1b-0d53-e053-5a18000ad0b7" - }, + "subject": {"reference": "Patient/ca66572a-0a1b-0d53-e053-5a18000ad0b7"}, "effectiveDateTime": "2021-08-26T07:09:00EUROPE/BELGRADE", "method": { "coding": [ @@ -2365,9 +2349,7 @@ def create_vcrecord(cred_dict: dict, expanded_types: list): }, ] }, - "subject": { - "reference": "Patient/ca66572a-0a1b-0d53-e053-5a18000ad0b7" - }, + "subject": {"reference": "Patient/ca66572a-0a1b-0d53-e053-5a18000ad0b7"}, "effectiveDateTime": "2021-08-26T07:09:00EUROPE/BELGRADE", "method": { "coding": [ @@ -2434,9 +2416,7 @@ def create_vcrecord(cred_dict: dict, expanded_types: list): }, ] }, - "subject": { - "reference": "Patient/ca66572a-0a1b-0d53-e053-5a18000ad0b7" - }, + "subject": {"reference": "Patient/ca66572a-0a1b-0d53-e053-5a18000ad0b7"}, "effectiveDateTime": "2021-08-26T07:09:00EUROPE/BELGRADE", "method": { "coding": [ diff --git a/aries_cloudagent/protocols/present_proof/dif/tests/test_pres_exch.py b/aries_cloudagent/protocols/present_proof/dif/tests/test_pres_exch.py index 520f065bc3..0c8ac25e0d 100644 --- a/aries_cloudagent/protocols/present_proof/dif/tests/test_pres_exch.py +++ b/aries_cloudagent/protocols/present_proof/dif/tests/test_pres_exch.py @@ -397,9 +397,7 @@ def test_schemas_input_desc_filter(self): test_schemas_filter ) ser_schema_filter = deser_schema_filter.serialize() - deser_schema_filter = SchemasInputDescriptorFilter.deserialize( - ser_schema_filter - ) + deser_schema_filter = SchemasInputDescriptorFilter.deserialize(ser_schema_filter) assert deser_schema_filter.oneof_filter assert deser_schema_filter.uri_groups[0][0].uri == test_schema_list[0][0].get( "uri" @@ -424,28 +422,16 @@ def test_schemas_input_desc_filter(self): test_schemas_filter ) ser_schema_filter = deser_schema_filter.serialize() - deser_schema_filter = SchemasInputDescriptorFilter.deserialize( - ser_schema_filter - ) + deser_schema_filter = SchemasInputDescriptorFilter.deserialize(ser_schema_filter) assert deser_schema_filter.oneof_filter - assert deser_schema_filter.uri_groups[0][0].uri == test_schema_list[0].get( - "uri" - ) - assert deser_schema_filter.uri_groups[1][0].uri == test_schema_list[1].get( - "uri" - ) + assert deser_schema_filter.uri_groups[0][0].uri == test_schema_list[0].get("uri") + assert deser_schema_filter.uri_groups[1][0].uri == test_schema_list[1].get("uri") assert isinstance(deser_schema_filter, SchemasInputDescriptorFilter) deser_schema_filter = SchemasInputDescriptorFilter.deserialize(test_schema_list) ser_schema_filter = deser_schema_filter.serialize() - deser_schema_filter = SchemasInputDescriptorFilter.deserialize( - ser_schema_filter - ) + deser_schema_filter = SchemasInputDescriptorFilter.deserialize(ser_schema_filter) assert not deser_schema_filter.oneof_filter - assert deser_schema_filter.uri_groups[0][0].uri == test_schema_list[0].get( - "uri" - ) - assert deser_schema_filter.uri_groups[0][1].uri == test_schema_list[1].get( - "uri" - ) + assert deser_schema_filter.uri_groups[0][0].uri == test_schema_list[0].get("uri") + assert deser_schema_filter.uri_groups[0][1].uri == test_schema_list[1].get("uri") assert isinstance(deser_schema_filter, SchemasInputDescriptorFilter) diff --git a/aries_cloudagent/protocols/present_proof/dif/tests/test_pres_exch_handler.py b/aries_cloudagent/protocols/present_proof/dif/tests/test_pres_exch_handler.py index 75b9dac6e4..d2eb5c2cb5 100644 --- a/aries_cloudagent/protocols/present_proof/dif/tests/test_pres_exch_handler.py +++ b/aries_cloudagent/protocols/present_proof/dif/tests/test_pres_exch_handler.py @@ -98,9 +98,7 @@ async def test_load_cred_json_a(self, setup_tuple, profile): if isinstance(tmp_vp, Sequence): cred_count_list = [] for tmp_vp_single in tmp_vp: - cred_count_list.append( - len(tmp_vp_single.get("verifiableCredential")) - ) + cred_count_list.append(len(tmp_vp_single.get("verifiableCredential"))) assert min(cred_count_list) == tmp_pd[1] else: @@ -125,9 +123,7 @@ async def test_load_cred_json_b(self, setup_tuple, profile): if isinstance(tmp_vp, Sequence): cred_count_list = [] for tmp_vp_single in tmp_vp: - cred_count_list.append( - len(tmp_vp_single.get("verifiableCredential")) - ) + cred_count_list.append(len(tmp_vp_single.get("verifiableCredential"))) assert min(cred_count_list) == tmp_pd[1] else: @@ -1841,9 +1837,7 @@ def test_invalid_number_filter(self, profile): def test_invalid_string_filter(self, profile): dif_pres_exch_handler = DIFPresExchHandler(profile) - assert not dif_pres_exch_handler.process_string_val( - val="test", _filter=Filter() - ) + assert not dif_pres_exch_handler.process_string_val(val="test", _filter=Filter()) @pytest.mark.ursa_bbs_signatures def test_cred_schema_match_b(self, profile, setup_tuple): @@ -2036,9 +2030,7 @@ async def test_get_sign_key_credential_subject_id(self, profile): ( issuer_id, filtered_creds, - ) = await dif_pres_exch_handler.get_sign_key_credential_subject_id( - VC_RECORDS - ) + ) = await dif_pres_exch_handler.get_sign_key_credential_subject_id(VC_RECORDS) assert issuer_id == "did:sov:LjgpST2rjsoxYegQDRm7EL" assert len(filtered_creds) == 2 @@ -2123,9 +2115,7 @@ async def test_get_sign_key_credential_subject_id_bbsbls(self, profile): "https://example.org/examples#UniversityDegreeCredential", ], issuer_id="https://example.edu/issuers/565049", - subject_ids=[ - "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" - ], + subject_ids=["did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL"], proof_types=["BbsBlsSignature2020"], schema_ids=["https://example.org/examples/degree.json"], cred_value={"...": "..."}, @@ -2169,12 +2159,8 @@ async def test_get_sign_key_credential_subject_id_bbsbls(self, profile): ( issuer_id, filtered_creds, - ) = await dif_pres_exch_handler.get_sign_key_credential_subject_id( - VC_RECORDS - ) - assert ( - issuer_id == "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" - ) + ) = await dif_pres_exch_handler.get_sign_key_credential_subject_id(VC_RECORDS) + assert issuer_id == "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" assert len(filtered_creds) == 2 @pytest.mark.ursa_bbs_signatures @@ -2193,9 +2179,7 @@ async def test_create_vp_no_issuer(self, profile, setup_tuple): "https://example.org/examples#UniversityDegreeCredential", ], issuer_id="https://example.edu/issuers/565049", - subject_ids=[ - "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" - ], + subject_ids=["did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL"], proof_types=["BbsBlsSignature2020"], schema_ids=["https://example.org/examples/degree.json"], cred_value={"...": "..."}, @@ -2476,9 +2460,7 @@ async def test_filter_with_only_string_type(self, setup_tuple, profile): ] } """ - tmp_pd = PresentationDefinition.deserialize( - test_pd_filter_with_only_string_type - ) + tmp_pd = PresentationDefinition.deserialize(test_pd_filter_with_only_string_type) tmp_vp = await dif_pres_exch_handler.create_vp( credentials=cred_list, pd=tmp_pd, @@ -2818,9 +2800,7 @@ async def test_derive_nested_cred_missing_credsubjectid_b(self, profile): .startswith("urn:") ) assert ( - tmp_vp.get("verifiableCredential")[0] - .get("credentialSubject") - .get("college") + tmp_vp.get("verifiableCredential")[0].get("credentialSubject").get("college") == "Contoso University" ) @@ -3621,9 +3601,7 @@ async def test_filter_by_field_keyerror(self, profile): "path": ["$.credentialSubject.Patient[0].address[0].city"], } ) - assert not await dif_pres_exch_handler.filter_by_field( - field, vc_record_cred - ) + assert not await dif_pres_exch_handler.filter_by_field(field, vc_record_cred) @pytest.mark.asyncio async def test_filter_by_field_xsd_parser(self, profile): diff --git a/aries_cloudagent/protocols/present_proof/indy/pres_exch_handler.py b/aries_cloudagent/protocols/present_proof/indy/pres_exch_handler.py index 9e748fe9a5..9c510afc9a 100644 --- a/aries_cloudagent/protocols/present_proof/indy/pres_exch_handler.py +++ b/aries_cloudagent/protocols/present_proof/indy/pres_exch_handler.py @@ -251,17 +251,15 @@ async def process_pres_identifiers( ) if identifier["cred_def_id"] not in cred_defs: - cred_defs[identifier["cred_def_id"]] = ( - await ledger.get_credential_definition( - identifier["cred_def_id"] - ) - ) + cred_defs[ + identifier["cred_def_id"] + ] = await ledger.get_credential_definition(identifier["cred_def_id"]) if identifier.get("rev_reg_id"): if identifier["rev_reg_id"] not in rev_reg_defs: - rev_reg_defs[identifier["rev_reg_id"]] = ( - await ledger.get_revoc_reg_def(identifier["rev_reg_id"]) - ) + rev_reg_defs[ + identifier["rev_reg_id"] + ] = await ledger.get_revoc_reg_def(identifier["rev_reg_id"]) if identifier.get("timestamp"): rev_reg_entries.setdefault(identifier["rev_reg_id"], {}) diff --git a/aries_cloudagent/protocols/present_proof/v1_0/handlers/presentation_proposal_handler.py b/aries_cloudagent/protocols/present_proof/v1_0/handlers/presentation_proposal_handler.py index d513123258..6716fbd4fd 100644 --- a/aries_cloudagent/protocols/present_proof/v1_0/handlers/presentation_proposal_handler.py +++ b/aries_cloudagent/protocols/present_proof/v1_0/handlers/presentation_proposal_handler.py @@ -27,9 +27,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): """ r_time = get_timer() profile = context.profile - self._logger.debug( - "PresentationProposalHandler called with context %s", context - ) + self._logger.debug("PresentationProposalHandler called with context %s", context) assert isinstance(context.message, PresentationProposal) self._logger.info( "Received presentation proposal message: %s", @@ -42,9 +40,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): ) # If connection is present it must be ready for use elif not context.connection_ready: - raise HandlerException( - "Connection used for presentation proposal not ready" - ) + raise HandlerException("Connection used for presentation proposal not ready") presentation_manager = PresentationManager(profile) presentation_exchange_record = await presentation_manager.receive_proposal( diff --git a/aries_cloudagent/protocols/present_proof/v1_0/handlers/presentation_request_handler.py b/aries_cloudagent/protocols/present_proof/v1_0/handlers/presentation_request_handler.py index 80f862d296..9651f986bc 100644 --- a/aries_cloudagent/protocols/present_proof/v1_0/handlers/presentation_request_handler.py +++ b/aries_cloudagent/protocols/present_proof/v1_0/handlers/presentation_request_handler.py @@ -56,9 +56,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): ) connection_id = ( - context.connection_record.connection_id - if context.connection_record - else None + context.connection_record.connection_id if context.connection_record else None ) presentation_manager = PresentationManager(profile) @@ -69,15 +67,15 @@ async def handle(self, context: RequestContext, responder: BaseResponder): # or create it (verifier sent request first) try: async with profile.session() as session: - (presentation_exchange_record) = ( - await V10PresentationExchange.retrieve_by_tag_filter( - session, - {"thread_id": context.message._thread_id}, - { - "role": V10PresentationExchange.ROLE_PROVER, - "connection_id": connection_id, - }, - ) + ( + presentation_exchange_record + ) = await V10PresentationExchange.retrieve_by_tag_filter( + session, + {"thread_id": context.message._thread_id}, + { + "role": V10PresentationExchange.ROLE_PROVER, + "connection_id": connection_id, + }, ) # holder initiated via proposal presentation_exchange_record.presentation_request = indy_proof_request presentation_exchange_record.presentation_request_dict = ( diff --git a/aries_cloudagent/protocols/present_proof/v1_0/handlers/tests/test_presentation_proposal_handler.py b/aries_cloudagent/protocols/present_proof/v1_0/handlers/tests/test_presentation_proposal_handler.py index c3df16088a..a2318dde40 100644 --- a/aries_cloudagent/protocols/present_proof/v1_0/handlers/tests/test_presentation_proposal_handler.py +++ b/aries_cloudagent/protocols/present_proof/v1_0/handlers/tests/test_presentation_proposal_handler.py @@ -120,8 +120,7 @@ async def test_called_not_ready(self): with self.assertRaises(test_module.HandlerException) as err: await handler.handle(request_context, responder) assert ( - err.exception.message - == "Connection used for presentation proposal not ready" + err.exception.message == "Connection used for presentation proposal not ready" ) assert not responder.messages diff --git a/aries_cloudagent/protocols/present_proof/v1_0/handlers/tests/test_presentation_request_handler.py b/aries_cloudagent/protocols/present_proof/v1_0/handlers/tests/test_presentation_request_handler.py index 68875205b3..7afbcc0bc5 100644 --- a/aries_cloudagent/protocols/present_proof/v1_0/handlers/tests/test_presentation_request_handler.py +++ b/aries_cloudagent/protocols/present_proof/v1_0/handlers/tests/test_presentation_request_handler.py @@ -546,9 +546,7 @@ async def test_called_auto_present_pred_single_match(self): ) mock_holder = mock.MagicMock( get_credentials_for_presentation_request_by_referent=( - mock.CoroutineMock( - return_value=[{"cred_info": {"referent": "dummy-0"}}] - ) + mock.CoroutineMock(return_value=[{"cred_info": {"referent": "dummy-0"}}]) ) ) request_context.injector.bind_instance(OobMessageProcessor, mock_oob_processor) diff --git a/aries_cloudagent/protocols/present_proof/v1_0/manager.py b/aries_cloudagent/protocols/present_proof/v1_0/manager.py index 00f72b3b98..8ddf88e12f 100644 --- a/aries_cloudagent/protocols/present_proof/v1_0/manager.py +++ b/aries_cloudagent/protocols/present_proof/v1_0/manager.py @@ -298,9 +298,7 @@ async def create_presentation( ) presentation_exchange_record.presentation = indy_proof async with self._profile.session() as session: - await presentation_exchange_record.save( - session, reason="create presentation" - ) + await presentation_exchange_record.save(session, reason="create presentation") return presentation_exchange_record, presentation_message @@ -363,9 +361,9 @@ async def receive_presentation( name = proof_req["requested_attributes"][reft]["name"] value = attr_spec["raw"] if not presentation_preview.has_attr_spec( - cred_def_id=presentation["identifiers"][ - attr_spec["sub_proof_index"] - ]["cred_def_id"], + cred_def_id=presentation["identifiers"][attr_spec["sub_proof_index"]][ + "cred_def_id" + ], name=name, value=value, ): @@ -437,9 +435,7 @@ async def verify_presentation( presentation_exchange_record.state = V10PresentationExchange.STATE_VERIFIED async with self._profile.session() as session: - await presentation_exchange_record.save( - session, reason="verify presentation" - ) + await presentation_exchange_record.save(session, reason="verify presentation") await self.send_presentation_ack(presentation_exchange_record, responder) return presentation_exchange_record @@ -517,16 +513,16 @@ async def receive_presentation_ack( connection_id = connection_record.connection_id if connection_record else None async with self._profile.session() as session: - (presentation_exchange_record) = ( - await V10PresentationExchange.retrieve_by_tag_filter( - session, - {"thread_id": message._thread_id}, - { - # connection_id can be null in connectionless - "connection_id": connection_id, - "role": V10PresentationExchange.ROLE_PROVER, - }, - ) + ( + presentation_exchange_record + ) = await V10PresentationExchange.retrieve_by_tag_filter( + session, + {"thread_id": message._thread_id}, + { + # connection_id can be null in connectionless + "connection_id": connection_id, + "role": V10PresentationExchange.ROLE_PROVER, + }, ) presentation_exchange_record.verified = message._verification_result presentation_exchange_record.state = ( diff --git a/aries_cloudagent/protocols/present_proof/v1_0/models/presentation_exchange.py b/aries_cloudagent/protocols/present_proof/v1_0/models/presentation_exchange.py index 9a3dac03f9..f6b6bf1060 100644 --- a/aries_cloudagent/protocols/present_proof/v1_0/models/presentation_exchange.py +++ b/aries_cloudagent/protocols/present_proof/v1_0/models/presentation_exchange.py @@ -125,9 +125,7 @@ def presentation_proposal_dict(self, value): def presentation_request(self) -> IndyProofRequest: """Accessor; get deserialized view.""" return ( - None - if self._presentation_request is None - else self._presentation_request.de + None if self._presentation_request is None else self._presentation_request.de ) @presentation_request.setter diff --git a/aries_cloudagent/protocols/present_proof/v1_0/routes.py b/aries_cloudagent/protocols/present_proof/v1_0/routes.py index 1af0f985eb..97459a687e 100644 --- a/aries_cloudagent/protocols/present_proof/v1_0/routes.py +++ b/aries_cloudagent/protocols/present_proof/v1_0/routes.py @@ -187,9 +187,7 @@ class V10PresentationCreateRequestRequestSchema(AdminAPIMessageTracingSchema): ) -class V10PresentationSendRequestRequestSchema( - V10PresentationCreateRequestRequestSchema -): +class V10PresentationSendRequestRequestSchema(V10PresentationCreateRequestRequestSchema): """Request schema for sending a proof request on a connection.""" connection_id = fields.Str( @@ -964,9 +962,7 @@ async def presentation_exchange_verify_presentation(request: web.BaseRequest): except StorageNotFoundError as err: raise web.HTTPNotFound(reason=err.roll_up) from err - if pres_ex_record.state != ( - V10PresentationExchange.STATE_PRESENTATION_RECEIVED - ): + if pres_ex_record.state != (V10PresentationExchange.STATE_PRESENTATION_RECEIVED): raise web.HTTPBadRequest( reason=( f"Presentation exchange {presentation_exchange_id} " diff --git a/aries_cloudagent/protocols/present_proof/v1_0/tests/test_manager.py b/aries_cloudagent/protocols/present_proof/v1_0/tests/test_manager.py index 5ff7ee0eeb..75f909e0e4 100644 --- a/aries_cloudagent/protocols/present_proof/v1_0/tests/test_manager.py +++ b/aries_cloudagent/protocols/present_proof/v1_0/tests/test_manager.py @@ -309,9 +309,7 @@ async def asyncSetUp(self): Verifier = mock.MagicMock(IndyVerifier, autospec=True) self.verifier = Verifier() - self.verifier.verify_presentation = mock.CoroutineMock( - return_value=("true", []) - ) + self.verifier.verify_presentation = mock.CoroutineMock(return_value=("true", [])) injector.bind_instance(IndyVerifier, self.verifier) self.manager = PresentationManager(self.profile) @@ -354,9 +352,7 @@ async def test_create_exchange_for_proposal(self): with mock.patch.object( V10PresentationExchange, "save", autospec=True - ) as save_ex, mock.patch.object( - PresentationProposal, "serialize", autospec=True - ): + ) as save_ex, mock.patch.object(PresentationProposal, "serialize", autospec=True): exchange = await self.manager.create_exchange_for_proposal( CONN_ID, proposal, @@ -375,9 +371,7 @@ async def test_receive_proposal(self): connection_record = mock.MagicMock(connection_id=CONN_ID) proposal = PresentationProposal() - with mock.patch.object( - V10PresentationExchange, "save", autospec=True - ) as save_ex: + with mock.patch.object(V10PresentationExchange, "save", autospec=True) as save_ex: exchange = await self.manager.receive_proposal(proposal, connection_record) save_ex.assert_called_once() @@ -418,9 +412,7 @@ async def test_create_exchange_for_request(self): ] ) - with mock.patch.object( - V10PresentationExchange, "save", autospec=True - ) as save_ex: + with mock.patch.object(V10PresentationExchange, "save", autospec=True) as save_ex: exchange = await self.manager.create_exchange_for_request( CONN_ID, pres_req, @@ -437,9 +429,7 @@ async def test_create_exchange_for_request(self): async def test_receive_request(self): exchange_in = V10PresentationExchange() - with mock.patch.object( - V10PresentationExchange, "save", autospec=True - ) as save_ex: + with mock.patch.object(V10PresentationExchange, "save", autospec=True) as save_ex: exchange_out = await self.manager.receive_request(exchange_in) save_ex.assert_called_once() @@ -1228,9 +1218,7 @@ async def test_verify_presentation(self): presentation=INDY_PROOF, ) - with mock.patch.object( - V10PresentationExchange, "save", autospec=True - ) as save_ex: + with mock.patch.object(V10PresentationExchange, "save", autospec=True) as save_ex: exchange_out = await self.manager.verify_presentation(exchange_in) save_ex.assert_called_once() diff --git a/aries_cloudagent/protocols/present_proof/v1_0/tests/test_routes.py b/aries_cloudagent/protocols/present_proof/v1_0/tests/test_routes.py index 9b5889b973..0f20eae8e4 100644 --- a/aries_cloudagent/protocols/present_proof/v1_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/present_proof/v1_0/tests/test_routes.py @@ -256,9 +256,7 @@ async def test_presentation_exchange_retrieve(self): with mock.patch.object(test_module.web, "json_response") as mock_response: await test_module.presentation_exchange_retrieve(self.request) - mock_response.assert_called_once_with( - mock_pres_ex.serialize.return_value - ) + mock_response.assert_called_once_with(mock_pres_ex.serialize.return_value) async def test_presentation_exchange_retrieve_not_found(self): self.request.match_info = {"pres_ex_id": "dummy"} @@ -401,9 +399,7 @@ async def test_presentation_exchange_send_proposal_x(self): mock_presentation_manager.return_value.create_exchange_for_proposal = ( mock.CoroutineMock( return_value=mock.MagicMock( - serialize=mock.MagicMock( - side_effect=test_module.StorageError() - ), + serialize=mock.MagicMock(side_effect=test_module.StorageError()), save_error_state=mock.CoroutineMock(), ) ) @@ -496,9 +492,7 @@ async def test_presentation_exchange_create_request_x(self): mock_presentation_manager.return_value.create_exchange_for_request = ( mock.CoroutineMock( return_value=mock.MagicMock( - serialize=mock.MagicMock( - side_effect=test_module.StorageError() - ), + serialize=mock.MagicMock(side_effect=test_module.StorageError()), save_error_state=mock.CoroutineMock(), ) ) @@ -644,9 +638,7 @@ async def test_presentation_exchange_send_free_request_x(self): mock_presentation_manager.return_value.create_exchange_for_request = ( mock.CoroutineMock( return_value=mock.MagicMock( - serialize=mock.MagicMock( - side_effect=test_module.StorageError() - ), + serialize=mock.MagicMock(side_effect=test_module.StorageError()), save_error_state=mock.CoroutineMock(), ) ) @@ -1232,9 +1224,7 @@ async def test_presentation_exchange_verify_presentation(self): mock_presentation_manager.return_value = mock_mgr with mock.patch.object(test_module.web, "json_response") as mock_response: - await test_module.presentation_exchange_verify_presentation( - self.request - ) + await test_module.presentation_exchange_verify_presentation(self.request) mock_response.assert_called_once_with({"thread_id": "sample-thread-id"}) async def test_presentation_exchange_verify_presentation_px_rec_not_found(self): @@ -1248,9 +1238,7 @@ async def test_presentation_exchange_verify_presentation_px_rec_not_found(self): ) as mock_retrieve: mock_retrieve.side_effect = StorageNotFoundError("no such record") with self.assertRaises(test_module.web.HTTPNotFound) as context: - await test_module.presentation_exchange_verify_presentation( - self.request - ) + await test_module.presentation_exchange_verify_presentation(self.request) assert "no such record" in str(context.exception) async def test_presentation_exchange_verify_presentation_bad_state(self): @@ -1273,9 +1261,7 @@ async def test_presentation_exchange_verify_presentation_bad_state(self): ) ) with self.assertRaises(test_module.web.HTTPBadRequest): - await test_module.presentation_exchange_verify_presentation( - self.request - ) + await test_module.presentation_exchange_verify_presentation(self.request) async def test_presentation_exchange_verify_presentation_x(self): self.request.match_info = {"pres_ex_id": "dummy"} @@ -1336,13 +1322,9 @@ async def test_presentation_exchange_verify_presentation_x(self): mock_presentation_manager.return_value = mock_mgr with self.assertRaises(test_module.web.HTTPBadRequest): # ledger error - await test_module.presentation_exchange_verify_presentation( - self.request - ) + await test_module.presentation_exchange_verify_presentation(self.request) with self.assertRaises(test_module.web.HTTPBadRequest): # storage error - await test_module.presentation_exchange_verify_presentation( - self.request - ) + await test_module.presentation_exchange_verify_presentation(self.request) async def test_presentation_exchange_problem_report(self): self.request.json = mock.CoroutineMock() diff --git a/aries_cloudagent/protocols/present_proof/v2_0/formats/anoncreds/handler.py b/aries_cloudagent/protocols/present_proof/v2_0/formats/anoncreds/handler.py index fd23962d36..a3b5862a5c 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/formats/anoncreds/handler.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/formats/anoncreds/handler.py @@ -140,12 +140,10 @@ async def create_pres( indy_proof_request = proof_request.attachment( AnonCredsPresExchangeHandler.format ) - requested_credentials = ( - await indy_proof_req_preview2indy_requested_creds( - indy_proof_request, - preview=None, - holder=AnonCredsHolder(self._profile), - ) + requested_credentials = await indy_proof_req_preview2indy_requested_creds( + indy_proof_request, + preview=None, + holder=AnonCredsHolder(self._profile), ) except ValueError as err: LOGGER.warning(f"{err}") diff --git a/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/handler.py b/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/handler.py index 5bc0d714e4..569d303705 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/handler.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/handler.py @@ -224,16 +224,13 @@ async def create_pres( elif ( len(proof_types) == 1 and ( - BbsBlsSignature2020.signature_type - not in proof_types + BbsBlsSignature2020.signature_type not in proof_types ) and ( - Ed25519Signature2018.signature_type - not in proof_types + Ed25519Signature2018.signature_type not in proof_types ) and ( - Ed25519Signature2020.signature_type - not in proof_types + Ed25519Signature2020.signature_type not in proof_types ) ): raise V20PresFormatHandlerError( @@ -246,16 +243,13 @@ async def create_pres( elif ( len(proof_types) >= 2 and ( - BbsBlsSignature2020.signature_type - not in proof_types + BbsBlsSignature2020.signature_type not in proof_types ) and ( - Ed25519Signature2018.signature_type - not in proof_types + Ed25519Signature2018.signature_type not in proof_types ) and ( - Ed25519Signature2020.signature_type - not in proof_types + Ed25519Signature2020.signature_type not in proof_types ) ): raise V20PresFormatHandlerError( @@ -269,20 +263,15 @@ async def create_pres( proof_format == Ed25519Signature2018.signature_type ): - proof_type = [ - Ed25519Signature2018.signature_type - ] + proof_type = [Ed25519Signature2018.signature_type] dif_handler_proof_type = ( Ed25519Signature2018.signature_type ) break elif ( - proof_format - == BbsBlsSignature2020.signature_type + proof_format == BbsBlsSignature2020.signature_type ): - proof_type = [ - BbsBlsSignature2020.signature_type - ] + proof_type = [BbsBlsSignature2020.signature_type] dif_handler_proof_type = ( BbsBlsSignature2020.signature_type ) @@ -462,9 +451,7 @@ async def verify_pres(self, pres_ex_record: V20PresExRecord) -> V20PresExRecord: """ dif_proof = pres_ex_record.pres.attachment(DIFPresFormatHandler.format) - pres_request = pres_ex_record.pres_request.attachment( - DIFPresFormatHandler.format - ) + pres_request = pres_ex_record.pres_request.attachment(DIFPresFormatHandler.format) pres_ver_result = None if isinstance(dif_proof, Sequence): if len(dif_proof) == 0: diff --git a/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/tests/test_handler.py b/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/tests/test_handler.py index 61e7f5c250..8cb8b891b1 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/tests/test_handler.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/formats/dif/tests/test_handler.py @@ -421,9 +421,7 @@ async def test_create_bound_request_a(self): ], ) ], - proposals_attach=[ - AttachDecorator.data_json(dif_proposal_dict, ident="dif") - ], + proposals_attach=[AttachDecorator.data_json(dif_proposal_dict, ident="dif")], ) record = V20PresExRecord( pres_ex_id="pxid", @@ -477,9 +475,7 @@ async def test_create_bound_request_b(self): ], ) ], - proposals_attach=[ - AttachDecorator.data_json(dif_proposal_dict, ident="dif") - ], + proposals_attach=[AttachDecorator.data_json(dif_proposal_dict, ident="dif")], ) record = V20PresExRecord( pres_ex_id="pxid", @@ -533,9 +529,7 @@ async def test_create_bound_request_c(self): ], ) ], - proposals_attach=[ - AttachDecorator.data_json(dif_proposal_dict, ident="dif") - ], + proposals_attach=[AttachDecorator.data_json(dif_proposal_dict, ident="dif")], ) record = V20PresExRecord( pres_ex_id="pxid", @@ -1503,8 +1497,8 @@ async def test_retrieve_uri_list_from_schema_filter(self): SchemaInputDescriptor(uri="test321", required=True), ] ] - test_one_of_uri_groups = ( - await self.handler.retrieve_uri_list_from_schema_filter(test_schema_filter) + test_one_of_uri_groups = await self.handler.retrieve_uri_list_from_schema_filter( + test_schema_filter ) assert test_one_of_uri_groups == [["test123", "test321"]] @@ -1766,9 +1760,7 @@ async def test_verify_received_limit_disclosure_a(self): ], ) pres_request = deepcopy(DIF_PRES_REQUEST_B) - pres_request["presentation_definition"]["input_descriptors"][0][ - "constraints" - ] = { + pres_request["presentation_definition"]["input_descriptors"][0]["constraints"] = { "limit_disclosure": "required", "fields": [ { @@ -1816,9 +1808,7 @@ async def test_verify_received_limit_disclosure_a(self): auto_present=True, error_msg="error", ) - with mock.patch.object( - jsonld, "expand", mock.MagicMock() - ) as mock_jsonld_expand: + with mock.patch.object(jsonld, "expand", mock.MagicMock()) as mock_jsonld_expand: mock_jsonld_expand.return_value = EXPANDED_CRED_FHIR_TYPE_2 await self.handler.receive_pres(message=dif_pres, pres_ex_record=record) @@ -1853,9 +1843,7 @@ async def test_verify_received_limit_disclosure_b(self): ], ) pres_request = deepcopy(DIF_PRES_REQUEST_B) - pres_request["presentation_definition"]["input_descriptors"][0][ - "constraints" - ] = { + pres_request["presentation_definition"]["input_descriptors"][0]["constraints"] = { "limit_disclosure": "required", "fields": [ { @@ -1895,17 +1883,15 @@ async def test_verify_received_limit_disclosure_b(self): auto_present=True, error_msg="error", ) - with mock.patch.object( - jsonld, "expand", mock.MagicMock() - ) as mock_jsonld_expand: + with mock.patch.object(jsonld, "expand", mock.MagicMock()) as mock_jsonld_expand: mock_jsonld_expand.return_value = EXPANDED_CRED_FHIR_TYPE_1 await self.handler.receive_pres(message=dif_pres, pres_ex_record=record) async def test_verify_received_pres_invalid_jsonpath(self): dif_proof = deepcopy(DIF_PRES) - dif_proof["presentation_submission"]["descriptor_map"][0][ - "path" - ] = "$.verifiableCredential[1]" + dif_proof["presentation_submission"]["descriptor_map"][0]["path"] = ( + "$.verifiableCredential[1]" + ) dif_pres = V20Pres( formats=[ V20PresFormat( @@ -2206,9 +2192,7 @@ async def test_verify_received_pres_fail_schema_filter(self): ], ) pres_request = deepcopy(DIF_PRES_REQUEST_B) - pres_request["presentation_definition"]["input_descriptors"][0][ - "constraints" - ] = { + pres_request["presentation_definition"]["input_descriptors"][0]["constraints"] = { "limit_disclosure": "required", "fields": [ { @@ -2354,17 +2338,13 @@ def test_get_type_manager_options(self): profile = InMemoryProfile.test_profile() handler = DIFPresFormatHandler(profile) dif_proof = {"proof": {"type": "DataIntegrityProof"}} - pres_request = { - "options": {"challenge": "3fa85f64-5717-4562-b3fc-2c963f66afa7"} - } + pres_request = {"options": {"challenge": "3fa85f64-5717-4562-b3fc-2c963f66afa7"}} manager, options = handler._get_type_manager_options(dif_proof, pres_request) assert isinstance(manager, VcDiManager) assert options == pres_request dif_proof = {"proof": {"type": "LDPProof"}} - pres_request = { - "options": {"challenge": "3fa85f64-5717-4562-b3fc-2c963f66afa7"} - } + pres_request = {"options": {"challenge": "3fa85f64-5717-4562-b3fc-2c963f66afa7"}} manager, options = handler._get_type_manager_options(dif_proof, pres_request) assert isinstance(manager, VcLdpManager) assert options.challenge == "3fa85f64-5717-4562-b3fc-2c963f66afa7" diff --git a/aries_cloudagent/protocols/present_proof/v2_0/formats/indy/handler.py b/aries_cloudagent/protocols/present_proof/v2_0/formats/indy/handler.py index 0a2007d856..abddabecf0 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/formats/indy/handler.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/formats/indy/handler.py @@ -159,9 +159,7 @@ async def create_pres( """Create a presentation.""" # Temporary shim while the new anoncreds library integration is in progress if self.anoncreds_handler: - return await self.anoncreds_handler.create_pres( - pres_ex_record, request_data - ) + return await self.anoncreds_handler.create_pres(pres_ex_record, request_data) requested_credentials = {} if not request_data: @@ -170,12 +168,10 @@ async def create_pres( indy_proof_request = proof_request.attachment( IndyPresExchangeHandler.format ) - requested_credentials = ( - await indy_proof_req_preview2indy_requested_creds( - indy_proof_request, - preview=None, - holder=self._profile.inject(IndyHolder), - ) + requested_credentials = await indy_proof_req_preview2indy_requested_creds( + indy_proof_request, + preview=None, + holder=self._profile.inject(IndyHolder), ) except ValueError as err: LOGGER.warning(f"{err}") diff --git a/aries_cloudagent/protocols/present_proof/v2_0/handlers/pres_proposal_handler.py b/aries_cloudagent/protocols/present_proof/v2_0/handlers/pres_proposal_handler.py index 23b16c0ebf..a9bd71cc7a 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/handlers/pres_proposal_handler.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/handlers/pres_proposal_handler.py @@ -40,9 +40,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): ) # If connection is present it must be ready for use elif not context.connection_ready: - raise HandlerException( - "Connection used for presentation proposal not ready" - ) + raise HandlerException("Connection used for presentation proposal not ready") profile = context.profile pres_manager = V20PresManager(profile) diff --git a/aries_cloudagent/protocols/present_proof/v2_0/handlers/pres_request_handler.py b/aries_cloudagent/protocols/present_proof/v2_0/handlers/pres_request_handler.py index 60b1fcff9d..dfd4b9ac34 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/handlers/pres_request_handler.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/handlers/pres_request_handler.py @@ -56,9 +56,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): ) connection_id = ( - context.connection_record.connection_id - if context.connection_record - else None + context.connection_record.connection_id if context.connection_record else None ) profile = context.profile diff --git a/aries_cloudagent/protocols/present_proof/v2_0/handlers/tests/test_pres_request_handler.py b/aries_cloudagent/protocols/present_proof/v2_0/handlers/tests/test_pres_request_handler.py index 790e9d7175..671f469823 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/handlers/tests/test_pres_request_handler.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/handlers/tests/test_pres_request_handler.py @@ -188,9 +188,7 @@ async def test_called(self): request_context.connection_record.connection_id = "dummy" request_context.message_receipt = MessageReceipt() request_context.message = V20PresRequest() - request_context.message.attachment = mock.MagicMock( - return_value=mock.MagicMock() - ) + request_context.message.attachment = mock.MagicMock(return_value=mock.MagicMock()) mock_oob_processor = mock.MagicMock( find_oob_record_for_inbound_message=mock.CoroutineMock( @@ -206,9 +204,7 @@ async def test_called(self): format_=V20PresFormat.Format.INDY.aries, ) ], - proposals_attach=[ - AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy") - ], + proposals_attach=[AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy")], ) px_rec_instance = test_module.V20PresExRecord( pres_proposal=pres_proposal.serialize(), @@ -247,9 +243,7 @@ async def test_called_not_found(self): request_context.connection_record.connection_id = "dummy" request_context.message_receipt = MessageReceipt() request_context.message = V20PresRequest() - request_context.message.attachment = mock.MagicMock( - return_value=mock.MagicMock() - ) + request_context.message.attachment = mock.MagicMock(return_value=mock.MagicMock()) mock_oob_processor = mock.MagicMock( find_oob_record_for_inbound_message=mock.CoroutineMock( @@ -265,9 +259,7 @@ async def test_called_not_found(self): format_=V20PresFormat.Format.INDY.aries, ) ], - proposals_attach=[ - AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy") - ], + proposals_attach=[AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy")], ) px_rec_instance = test_module.V20PresExRecord( pres_proposal=pres_proposal.serialize(), @@ -316,9 +308,7 @@ async def test_called_auto_present_x_indy(self): format_=V20PresFormat.Format.INDY.aries, ) ], - proposals_attach=[ - AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy") - ], + proposals_attach=[AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy")], ) mock_px_rec = mock.MagicMock( pres_proposal=pres_proposal.serialize(), @@ -381,9 +371,7 @@ async def test_called_auto_present_x_anoncreds(self): format_=V20PresFormat.Format.INDY.aries, ) ], - proposals_attach=[ - AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy") - ], + proposals_attach=[AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy")], ) mock_px_rec = mock.MagicMock( pres_proposal=pres_proposal.serialize(), @@ -453,9 +441,7 @@ async def test_called_auto_present_indy(self): format_=V20PresFormat.Format.INDY.aries, ) ], - proposals_attach=[ - AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy") - ], + proposals_attach=[AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy")], ) mock_px_rec = test_module.V20PresExRecord( pres_proposal=pres_proposal.serialize(), @@ -527,9 +513,7 @@ async def test_called_auto_present_anoncreds(self): format_=V20PresFormat.Format.INDY.aries, ) ], - proposals_attach=[ - AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy") - ], + proposals_attach=[AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy")], ) mock_px_rec = test_module.V20PresExRecord( pres_proposal=pres_proposal.serialize(), @@ -799,9 +783,7 @@ async def test_called_auto_present_pred_no_match_indy(self): format_=V20PresFormat.Format.INDY.aries, ) ], - proposals_attach=[ - AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy") - ], + proposals_attach=[AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy")], ) mock_px_rec = mock.MagicMock( pres_proposal=pres_proposal.serialize(), @@ -867,9 +849,7 @@ async def test_called_auto_present_pred_no_match_anoncreds(self): format_=V20PresFormat.Format.INDY.aries, ) ], - proposals_attach=[ - AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy") - ], + proposals_attach=[AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy")], ) mock_px_rec = mock.MagicMock( pres_proposal=pres_proposal.serialize(), @@ -948,9 +928,7 @@ async def test_called_auto_present_pred_single_match_indy(self): mock_holder = mock.MagicMock( get_credentials_for_presentation_request_by_referent=( - mock.CoroutineMock( - return_value=[{"cred_info": {"referent": "dummy-0"}}] - ) + mock.CoroutineMock(return_value=[{"cred_info": {"referent": "dummy-0"}}]) ) ) request_context.injector.bind_instance(IndyHolder, mock_holder) @@ -1016,9 +994,7 @@ async def test_called_auto_present_pred_single_match_anoncreds(self): mock_holder = mock.MagicMock( get_credentials_for_presentation_request_by_referent=( - mock.CoroutineMock( - return_value=[{"cred_info": {"referent": "dummy-0"}}] - ) + mock.CoroutineMock(return_value=[{"cred_info": {"referent": "dummy-0"}}]) ) ) request_context.injector.bind_instance(AnonCredsHolder, mock_holder) @@ -1220,9 +1196,7 @@ async def test_called_auto_present_multi_cred_match_reft_indy(self): format_=V20PresFormat.Format.INDY.aries, ) ], - proposals_attach=[ - AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy") - ], + proposals_attach=[AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy")], ) mock_oob_processor = mock.MagicMock( @@ -1334,9 +1308,7 @@ async def test_called_auto_present_multi_cred_match_reft_anoncreds(self): format_=V20PresFormat.Format.INDY.aries, ) ], - proposals_attach=[ - AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy") - ], + proposals_attach=[AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy")], ) mock_oob_processor = mock.MagicMock( diff --git a/aries_cloudagent/protocols/present_proof/v2_0/manager.py b/aries_cloudagent/protocols/present_proof/v2_0/manager.py index 0568698331..040314e232 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/manager.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/manager.py @@ -71,9 +71,7 @@ async def create_exchange_for_proposal( ) async with self._profile.session() as session: - await pres_ex_record.save( - session, reason="create v2.0 presentation proposal" - ) + await pres_ex_record.save(session, reason="create v2.0 presentation proposal") return pres_ex_record @@ -98,9 +96,7 @@ async def receive_pres_proposal( ) async with self._profile.session() as session: - await pres_ex_record.save( - session, reason="receive v2.0 presentation request" - ) + await pres_ex_record.save(session, reason="receive v2.0 presentation request") return pres_ex_record @@ -211,9 +207,7 @@ async def receive_pres_request(self, pres_ex_record: V20PresExRecord): """ pres_ex_record.state = V20PresExRecord.STATE_REQUEST_RECEIVED async with self._profile.session() as session: - await pres_ex_record.save( - session, reason="receive v2.0 presentation request" - ) + await pres_ex_record.save(session, reason="receive v2.0 presentation request") return pres_ex_record @@ -299,9 +293,7 @@ async def create_pres( # Assign thid (and optionally pthid) to message pres_message.assign_thread_from(pres_ex_record.pres_request) - pres_message.assign_trace_decorator( - self._profile.settings, pres_ex_record.trace - ) + pres_message.assign_trace_decorator(self._profile.settings, pres_ex_record.trace) # save presentation exchange state pres_ex_record.state = V20PresExRecord.STATE_PRESENTATION_SENT @@ -334,7 +326,9 @@ async def receive_pres( connection_id = ( None if oob_record - else connection_record.connection_id if connection_record else None + else connection_record.connection_id + if connection_record + else None ) async with self._profile.session() as session: diff --git a/aries_cloudagent/protocols/present_proof/v2_0/messages/pres_request.py b/aries_cloudagent/protocols/present_proof/v2_0/messages/pres_request.py index 51e2d2dfb4..dd9e0cbf08 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/messages/pres_request.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/messages/pres_request.py @@ -12,9 +12,7 @@ from ..message_types import PRES_20_REQUEST, PROTOCOL_PACKAGE from .pres_format import V20PresFormat, V20PresFormatSchema -HANDLER_CLASS = ( - f"{PROTOCOL_PACKAGE}.handlers.pres_request_handler.V20PresRequestHandler" -) +HANDLER_CLASS = f"{PROTOCOL_PACKAGE}.handlers.pres_request_handler.V20PresRequestHandler" class V20PresRequest(AgentMessage): diff --git a/aries_cloudagent/protocols/present_proof/v2_0/models/tests/test_record.py b/aries_cloudagent/protocols/present_proof/v2_0/models/tests/test_record.py index cf72950d84..c4bf6d8e67 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/models/tests/test_record.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/models/tests/test_record.py @@ -80,9 +80,7 @@ async def test_record(self): ], ) ], - proposals_attach=[ - AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy") - ], + proposals_attach=[AttachDecorator.data_base64(INDY_PROOF_REQ, ident="indy")], ) record = V20PresExRecord( pres_ex_id="pxid", diff --git a/aries_cloudagent/protocols/present_proof/v2_0/routes.py b/aries_cloudagent/protocols/present_proof/v2_0/routes.py index a1e5b9f257..2edb0e4c08 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/routes.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/routes.py @@ -159,9 +159,7 @@ class V20PresProposalRequestSchema(AdminAPIMessageTracingSchema): allow_none=True, metadata={"description": "Human-readable comment"}, ) - presentation_proposal = fields.Nested( - V20PresProposalByFormatSchema(), required=True - ) + presentation_proposal = fields.Nested(V20PresProposalByFormatSchema(), required=True) auto_present = fields.Boolean( required=False, dump_default=False, @@ -409,9 +407,7 @@ def _formats_attach(by_format: Mapping, msg_type: str, spec: str) -> Mapping: attach = [] for fmt_api, item_by_fmt in by_format.items(): if fmt_api == V20PresFormat.Format.INDY.api: - attach.append( - AttachDecorator.data_base64(mapping=item_by_fmt, ident=fmt_api) - ) + attach.append(AttachDecorator.data_base64(mapping=item_by_fmt, ident=fmt_api)) elif fmt_api == V20PresFormat.Format.DIF.api: attach.append(AttachDecorator.data_json(mapping=item_by_fmt, ident=fmt_api)) return { @@ -605,9 +601,7 @@ async def present_proof_credentials_list(request: web.BaseRequest): input_descriptors_list = dif_pres_request.get( "presentation_definition", {} ).get("input_descriptors") - claim_fmt = dif_pres_request.get("presentation_definition", {}).get( - "format" - ) + claim_fmt = dif_pres_request.get("presentation_definition", {}).get("format") if claim_fmt and len(claim_fmt.keys()) > 0: claim_fmt = ClaimFormat.deserialize(claim_fmt) input_descriptors = [] @@ -659,16 +653,13 @@ async def present_proof_credentials_list(request: web.BaseRequest): elif ( len(proof_types) == 1 and ( - BbsBlsSignature2020.signature_type - not in proof_types + BbsBlsSignature2020.signature_type not in proof_types ) and ( - Ed25519Signature2018.signature_type - not in proof_types + Ed25519Signature2018.signature_type not in proof_types ) and ( - Ed25519Signature2020.signature_type - not in proof_types + Ed25519Signature2020.signature_type not in proof_types ) ): raise web.HTTPBadRequest( @@ -682,16 +673,13 @@ async def present_proof_credentials_list(request: web.BaseRequest): elif ( len(proof_types) >= 2 and ( - BbsBlsSignature2020.signature_type - not in proof_types + BbsBlsSignature2020.signature_type not in proof_types ) and ( - Ed25519Signature2018.signature_type - not in proof_types + Ed25519Signature2018.signature_type not in proof_types ) and ( - Ed25519Signature2020.signature_type - not in proof_types + Ed25519Signature2020.signature_type not in proof_types ) ): raise web.HTTPBadRequest( @@ -707,25 +695,18 @@ async def present_proof_credentials_list(request: web.BaseRequest): proof_format == Ed25519Signature2018.signature_type ): - proof_type = [ - Ed25519Signature2018.signature_type - ] + proof_type = [Ed25519Signature2018.signature_type] break elif ( proof_format == Ed25519Signature2020.signature_type ): - proof_type = [ - Ed25519Signature2020.signature_type - ] + proof_type = [Ed25519Signature2020.signature_type] break elif ( - proof_format - == BbsBlsSignature2020.signature_type + proof_format == BbsBlsSignature2020.signature_type ): - proof_type = [ - BbsBlsSignature2020.signature_type - ] + proof_type = [BbsBlsSignature2020.signature_type] break elif claim_fmt.di_vc: if "proof_type" in claim_fmt.di_vc: @@ -1395,9 +1376,7 @@ async def present_proof_remove(request: web.BaseRequest): try: async with context.profile.session() as session: try: - pres_ex_record = await V20PresExRecord.retrieve_by_id( - session, pres_ex_id - ) + pres_ex_record = await V20PresExRecord.retrieve_by_id(session, pres_ex_id) await pres_ex_record.delete_record(session) except (BaseModelError, ValidationError): storage = session.inject(BaseStorage) diff --git a/aries_cloudagent/protocols/present_proof/v2_0/tests/test_manager.py b/aries_cloudagent/protocols/present_proof/v2_0/tests/test_manager.py index 874ba4c148..9edef6a3d1 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/tests/test_manager.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/tests/test_manager.py @@ -475,9 +475,7 @@ async def asyncSetUp(self): Verifier = mock.MagicMock(IndyVerifier, autospec=True) self.verifier = Verifier() - self.verifier.verify_presentation = mock.CoroutineMock( - return_value=("true", []) - ) + self.verifier.verify_presentation = mock.CoroutineMock(return_value=("true", [])) injector.bind_instance(IndyVerifier, self.verifier) self.manager = V20PresManager(self.profile) @@ -1357,9 +1355,7 @@ async def test_receive_pres(self): format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.INDY.api], ) ], - presentations_attach=[ - AttachDecorator.data_base64(INDY_PROOF, ident="indy") - ], + presentations_attach=[AttachDecorator.data_base64(INDY_PROOF, ident="indy")], ) pres.assign_thread_id("thread-id") @@ -1409,9 +1405,9 @@ async def test_receive_pres_receive_pred_value_mismatch_punt_to_indy(self): ], ) indy_proof_req = deepcopy(INDY_PROOF_REQ_NAME) - indy_proof_req["requested_predicates"]["0_highscore_GE_uuid"]["restrictions"][ - 0 - ]["attr::player::value"] = "impostor" + indy_proof_req["requested_predicates"]["0_highscore_GE_uuid"]["restrictions"][0][ + "attr::player::value" + ] = "impostor" pres_request = V20PresRequest( formats=[ V20PresFormat( @@ -1432,9 +1428,7 @@ async def test_receive_pres_receive_pred_value_mismatch_punt_to_indy(self): format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.INDY.api], ) ], - presentations_attach=[ - AttachDecorator.data_base64(INDY_PROOF, ident="indy") - ], + presentations_attach=[AttachDecorator.data_base64(INDY_PROOF, ident="indy")], ) pres.assign_thread_id("thread-id") @@ -1516,9 +1510,7 @@ async def test_receive_pres_indy_no_predicate_restrictions(self): format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.INDY.api], ) ], - presentations_attach=[ - AttachDecorator.data_base64(INDY_PROOF, ident="indy") - ], + presentations_attach=[AttachDecorator.data_base64(INDY_PROOF, ident="indy")], ) pres.assign_thread_id("thread-id") @@ -1629,9 +1621,7 @@ async def test_receive_pres_indy_no_attr_restrictions(self): async def test_receive_pres_bait_and_switch_attr_name(self): connection_record = mock.MagicMock(connection_id=CONN_ID) indy_proof_req = deepcopy(INDY_PROOF_REQ_NAME) - indy_proof_req["requested_attributes"]["0_screencapture_uuid"]["restrictions"][ - 0 - ][ + indy_proof_req["requested_attributes"]["0_screencapture_uuid"]["restrictions"][0][ "attr::screenCapture::value" ] = "c2NyZWVuIGNhcHR1cmUgc2hvd2luZyBzY29yZSBpbiB0aGUgbWlsbGlvbnM=" pres_proposal = V20PresProposal( @@ -1667,9 +1657,7 @@ async def test_receive_pres_bait_and_switch_attr_name(self): format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.INDY.api], ) ], - presentations_attach=[ - AttachDecorator.data_base64(INDY_PROOF, ident="indy") - ], + presentations_attach=[AttachDecorator.data_base64(INDY_PROOF, ident="indy")], ) px_rec_dummy = V20PresExRecord( pres_proposal=pres_proposal.serialize(), @@ -1684,9 +1672,7 @@ async def test_receive_pres_bait_and_switch_attr_name(self): retrieve_ex.return_value = px_rec_dummy with self.assertRaises(V20PresFormatHandlerError) as context: await self.manager.receive_pres(pres_x, connection_record, None) - assert "does not satisfy proof request restrictions" in str( - context.exception - ) + assert "does not satisfy proof request restrictions" in str(context.exception) indy_proof_req["requested_attributes"]["shenanigans"] = indy_proof_req[ "requested_attributes" @@ -1700,9 +1686,7 @@ async def test_receive_pres_bait_and_switch_attr_name(self): ], ) ], - proposals_attach=[ - AttachDecorator.data_base64(indy_proof_req, ident="indy") - ], + proposals_attach=[AttachDecorator.data_base64(indy_proof_req, ident="indy")], ) pres_request = V20PresRequest( formats=[ @@ -1722,9 +1706,7 @@ async def test_receive_pres_bait_and_switch_attr_name(self): format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.INDY.api], ) ], - presentations_attach=[ - AttachDecorator.data_base64(INDY_PROOF, ident="indy") - ], + presentations_attach=[AttachDecorator.data_base64(INDY_PROOF, ident="indy")], ) px_rec_dummy = V20PresExRecord( @@ -1757,9 +1739,7 @@ async def test_receive_pres_bait_and_switch_attr_names(self): ], ) ], - proposals_attach=[ - AttachDecorator.data_base64(indy_proof_req, ident="indy") - ], + proposals_attach=[AttachDecorator.data_base64(indy_proof_req, ident="indy")], ) pres_request = V20PresRequest( formats=[ @@ -1815,9 +1795,7 @@ async def test_receive_pres_bait_and_switch_attr_names(self): ], ) ], - proposals_attach=[ - AttachDecorator.data_base64(indy_proof_req, ident="indy") - ], + proposals_attach=[AttachDecorator.data_base64(indy_proof_req, ident="indy")], ) pres_request = V20PresRequest( formats=[ @@ -1894,9 +1872,7 @@ async def test_receive_pres_bait_and_switch_pred(self): format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.INDY.api], ) ], - presentations_attach=[ - AttachDecorator.data_base64(INDY_PROOF, ident="indy") - ], + presentations_attach=[AttachDecorator.data_base64(INDY_PROOF, ident="indy")], ) px_rec_dummy = V20PresExRecord( @@ -1930,9 +1906,7 @@ async def test_receive_pres_bait_and_switch_pred(self): ], ) ], - proposals_attach=[ - AttachDecorator.data_base64(indy_proof_req, ident="indy") - ], + proposals_attach=[AttachDecorator.data_base64(indy_proof_req, ident="indy")], ) pres_request = V20PresRequest( formats=[ @@ -1952,9 +1926,7 @@ async def test_receive_pres_bait_and_switch_pred(self): format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.INDY.api], ) ], - presentations_attach=[ - AttachDecorator.data_base64(INDY_PROOF, ident="indy") - ], + presentations_attach=[AttachDecorator.data_base64(INDY_PROOF, ident="indy")], ) px_rec_dummy = V20PresExRecord( @@ -1988,9 +1960,7 @@ async def test_receive_pres_bait_and_switch_pred(self): ], ) ], - proposals_attach=[ - AttachDecorator.data_base64(indy_proof_req, ident="indy") - ], + proposals_attach=[AttachDecorator.data_base64(indy_proof_req, ident="indy")], ) pres_request = V20PresRequest( formats=[ @@ -2010,9 +1980,7 @@ async def test_receive_pres_bait_and_switch_pred(self): format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.INDY.api], ) ], - presentations_attach=[ - AttachDecorator.data_base64(INDY_PROOF, ident="indy") - ], + presentations_attach=[AttachDecorator.data_base64(INDY_PROOF, ident="indy")], ) px_rec_dummy = V20PresExRecord( @@ -2046,9 +2014,7 @@ async def test_receive_pres_bait_and_switch_pred(self): ], ) ], - proposals_attach=[ - AttachDecorator.data_base64(indy_proof_req, ident="indy") - ], + proposals_attach=[AttachDecorator.data_base64(indy_proof_req, ident="indy")], ) pres_request = V20PresRequest( formats=[ @@ -2068,9 +2034,7 @@ async def test_receive_pres_bait_and_switch_pred(self): format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.INDY.api], ) ], - presentations_attach=[ - AttachDecorator.data_base64(INDY_PROOF, ident="indy") - ], + presentations_attach=[AttachDecorator.data_base64(INDY_PROOF, ident="indy")], ) px_rec_dummy = V20PresExRecord( @@ -2112,9 +2076,7 @@ async def test_verify_pres(self): format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.INDY.api], ) ], - presentations_attach=[ - AttachDecorator.data_base64(INDY_PROOF, ident="indy") - ], + presentations_attach=[AttachDecorator.data_base64(INDY_PROOF, ident="indy")], ) px_rec_in = V20PresExRecord( pres_request=pres_request, @@ -2209,9 +2171,7 @@ async def test_verify_pres_indy_and_dif(self): mock.CoroutineMock( return_value=PresentationVerificationResult(verified=True) ), - ), mock.patch.object( - V20PresExRecord, "save", autospec=True - ) as save_ex: + ), mock.patch.object(V20PresExRecord, "save", autospec=True) as save_ex: px_rec_out = await self.manager.verify_pres(px_rec_in) save_ex.assert_called_once() assert px_rec_out.state == (V20PresExRecord.STATE_DONE) diff --git a/aries_cloudagent/protocols/present_proof/v2_0/tests/test_manager_anoncreds.py b/aries_cloudagent/protocols/present_proof/v2_0/tests/test_manager_anoncreds.py index 3490cb1f9a..eb2aae61be 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/tests/test_manager_anoncreds.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/tests/test_manager_anoncreds.py @@ -480,9 +480,7 @@ async def asyncSetUp(self): Verifier = mock.MagicMock(AnonCredsVerifier, autospec=True) self.verifier = Verifier() - self.verifier.verify_presentation = mock.CoroutineMock( - return_value=("true", []) - ) + self.verifier.verify_presentation = mock.CoroutineMock(return_value=("true", [])) injector.bind_instance(AnonCredsVerifier, self.verifier) self.manager = V20PresManager(self.profile) @@ -1367,9 +1365,7 @@ async def test_receive_pres(self): format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.INDY.api], ) ], - presentations_attach=[ - AttachDecorator.data_base64(INDY_PROOF, ident="indy") - ], + presentations_attach=[AttachDecorator.data_base64(INDY_PROOF, ident="indy")], ) pres.assign_thread_id("thread-id") @@ -1419,9 +1415,9 @@ async def test_receive_pres_receive_pred_value_mismatch_punt_to_indy(self): ], ) indy_proof_req = deepcopy(INDY_PROOF_REQ_NAME) - indy_proof_req["requested_predicates"]["0_highscore_GE_uuid"]["restrictions"][ - 0 - ]["attr::player::value"] = "impostor" + indy_proof_req["requested_predicates"]["0_highscore_GE_uuid"]["restrictions"][0][ + "attr::player::value" + ] = "impostor" pres_request = V20PresRequest( formats=[ V20PresFormat( @@ -1442,9 +1438,7 @@ async def test_receive_pres_receive_pred_value_mismatch_punt_to_indy(self): format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.INDY.api], ) ], - presentations_attach=[ - AttachDecorator.data_base64(INDY_PROOF, ident="indy") - ], + presentations_attach=[AttachDecorator.data_base64(INDY_PROOF, ident="indy")], ) pres.assign_thread_id("thread-id") @@ -1526,9 +1520,7 @@ async def test_receive_pres_indy_no_predicate_restrictions(self): format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.INDY.api], ) ], - presentations_attach=[ - AttachDecorator.data_base64(INDY_PROOF, ident="indy") - ], + presentations_attach=[AttachDecorator.data_base64(INDY_PROOF, ident="indy")], ) pres.assign_thread_id("thread-id") @@ -1639,9 +1631,7 @@ async def test_receive_pres_indy_no_attr_restrictions(self): async def test_receive_pres_bait_and_switch_attr_name(self): connection_record = mock.MagicMock(connection_id=CONN_ID) indy_proof_req = deepcopy(INDY_PROOF_REQ_NAME) - indy_proof_req["requested_attributes"]["0_screencapture_uuid"]["restrictions"][ - 0 - ][ + indy_proof_req["requested_attributes"]["0_screencapture_uuid"]["restrictions"][0][ "attr::screenCapture::value" ] = "c2NyZWVuIGNhcHR1cmUgc2hvd2luZyBzY29yZSBpbiB0aGUgbWlsbGlvbnM=" pres_proposal = V20PresProposal( @@ -1677,9 +1667,7 @@ async def test_receive_pres_bait_and_switch_attr_name(self): format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.INDY.api], ) ], - presentations_attach=[ - AttachDecorator.data_base64(INDY_PROOF, ident="indy") - ], + presentations_attach=[AttachDecorator.data_base64(INDY_PROOF, ident="indy")], ) px_rec_dummy = V20PresExRecord( pres_proposal=pres_proposal.serialize(), @@ -1694,9 +1682,7 @@ async def test_receive_pres_bait_and_switch_attr_name(self): retrieve_ex.return_value = px_rec_dummy with self.assertRaises(V20PresFormatHandlerError) as context: await self.manager.receive_pres(pres_x, connection_record, None) - assert "does not satisfy proof request restrictions" in str( - context.exception - ) + assert "does not satisfy proof request restrictions" in str(context.exception) indy_proof_req["requested_attributes"]["shenanigans"] = indy_proof_req[ "requested_attributes" @@ -1710,9 +1696,7 @@ async def test_receive_pres_bait_and_switch_attr_name(self): ], ) ], - proposals_attach=[ - AttachDecorator.data_base64(indy_proof_req, ident="indy") - ], + proposals_attach=[AttachDecorator.data_base64(indy_proof_req, ident="indy")], ) pres_request = V20PresRequest( formats=[ @@ -1732,9 +1716,7 @@ async def test_receive_pres_bait_and_switch_attr_name(self): format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.INDY.api], ) ], - presentations_attach=[ - AttachDecorator.data_base64(INDY_PROOF, ident="indy") - ], + presentations_attach=[AttachDecorator.data_base64(INDY_PROOF, ident="indy")], ) px_rec_dummy = V20PresExRecord( @@ -1767,9 +1749,7 @@ async def test_receive_pres_bait_and_switch_attr_names(self): ], ) ], - proposals_attach=[ - AttachDecorator.data_base64(indy_proof_req, ident="indy") - ], + proposals_attach=[AttachDecorator.data_base64(indy_proof_req, ident="indy")], ) pres_request = V20PresRequest( formats=[ @@ -1825,9 +1805,7 @@ async def test_receive_pres_bait_and_switch_attr_names(self): ], ) ], - proposals_attach=[ - AttachDecorator.data_base64(indy_proof_req, ident="indy") - ], + proposals_attach=[AttachDecorator.data_base64(indy_proof_req, ident="indy")], ) pres_request = V20PresRequest( formats=[ @@ -1904,9 +1882,7 @@ async def test_receive_pres_bait_and_switch_pred(self): format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.INDY.api], ) ], - presentations_attach=[ - AttachDecorator.data_base64(INDY_PROOF, ident="indy") - ], + presentations_attach=[AttachDecorator.data_base64(INDY_PROOF, ident="indy")], ) px_rec_dummy = V20PresExRecord( @@ -1940,9 +1916,7 @@ async def test_receive_pres_bait_and_switch_pred(self): ], ) ], - proposals_attach=[ - AttachDecorator.data_base64(indy_proof_req, ident="indy") - ], + proposals_attach=[AttachDecorator.data_base64(indy_proof_req, ident="indy")], ) pres_request = V20PresRequest( formats=[ @@ -1962,9 +1936,7 @@ async def test_receive_pres_bait_and_switch_pred(self): format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.INDY.api], ) ], - presentations_attach=[ - AttachDecorator.data_base64(INDY_PROOF, ident="indy") - ], + presentations_attach=[AttachDecorator.data_base64(INDY_PROOF, ident="indy")], ) px_rec_dummy = V20PresExRecord( @@ -1998,9 +1970,7 @@ async def test_receive_pres_bait_and_switch_pred(self): ], ) ], - proposals_attach=[ - AttachDecorator.data_base64(indy_proof_req, ident="indy") - ], + proposals_attach=[AttachDecorator.data_base64(indy_proof_req, ident="indy")], ) pres_request = V20PresRequest( formats=[ @@ -2020,9 +1990,7 @@ async def test_receive_pres_bait_and_switch_pred(self): format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.INDY.api], ) ], - presentations_attach=[ - AttachDecorator.data_base64(INDY_PROOF, ident="indy") - ], + presentations_attach=[AttachDecorator.data_base64(INDY_PROOF, ident="indy")], ) px_rec_dummy = V20PresExRecord( @@ -2056,9 +2024,7 @@ async def test_receive_pres_bait_and_switch_pred(self): ], ) ], - proposals_attach=[ - AttachDecorator.data_base64(indy_proof_req, ident="indy") - ], + proposals_attach=[AttachDecorator.data_base64(indy_proof_req, ident="indy")], ) pres_request = V20PresRequest( formats=[ @@ -2078,9 +2044,7 @@ async def test_receive_pres_bait_and_switch_pred(self): format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.INDY.api], ) ], - presentations_attach=[ - AttachDecorator.data_base64(INDY_PROOF, ident="indy") - ], + presentations_attach=[AttachDecorator.data_base64(INDY_PROOF, ident="indy")], ) px_rec_dummy = V20PresExRecord( @@ -2123,9 +2087,7 @@ async def test_verify_pres(self): format_=ATTACHMENT_FORMAT[PRES_20][V20PresFormat.Format.INDY.api], ) ], - presentations_attach=[ - AttachDecorator.data_base64(INDY_PROOF, ident="indy") - ], + presentations_attach=[AttachDecorator.data_base64(INDY_PROOF, ident="indy")], ) px_rec_in = V20PresExRecord( pres_request=pres_request, @@ -2221,9 +2183,7 @@ async def test_verify_pres_indy_and_dif(self): mock.CoroutineMock( return_value=PresentationVerificationResult(verified=True) ), - ), mock.patch.object( - V20PresExRecord, "save", autospec=True - ) as save_ex: + ), mock.patch.object(V20PresExRecord, "save", autospec=True) as save_ex: px_rec_out = await self.manager.verify_pres(px_rec_in) save_ex.assert_called_once() assert px_rec_out.state == (V20PresExRecord.STATE_DONE) diff --git a/aries_cloudagent/protocols/present_proof/v2_0/tests/test_routes.py b/aries_cloudagent/protocols/present_proof/v2_0/tests/test_routes.py index 328b2bf878..2b41547116 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/tests/test_routes.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/tests/test_routes.py @@ -1031,9 +1031,9 @@ async def test_present_proof_credentials_list_schema_uri(self): } self.request.query = {"extra_query": {}} test_pd = deepcopy(DIF_PROOF_REQ) - test_pd["presentation_definition"]["input_descriptors"][0]["schema"][0][ - "uri" - ] = "https://example.org/test.json" + test_pd["presentation_definition"]["input_descriptors"][0]["schema"][0]["uri"] = ( + "https://example.org/test.json" + ) test_pd["presentation_definition"]["input_descriptors"][0]["schema"].pop(1) record = V20PresExRecord( state="request-received", @@ -1220,9 +1220,7 @@ async def test_present_proof_send_proposal(self): self.request.json = mock.CoroutineMock( return_value={ "connection_id": "dummy-conn-id", - "presentation_proposal": { - V20PresFormat.Format.INDY.api: INDY_PROOF_REQ - }, + "presentation_proposal": {V20PresFormat.Format.INDY.api: INDY_PROOF_REQ}, } ) @@ -1239,21 +1237,17 @@ async def test_present_proof_send_proposal(self): return_value=mock.MagicMock(is_ready=True) ) mock_px_rec_inst = mock.MagicMock() - mock_pres_mgr.return_value.create_exchange_for_proposal = ( - mock.CoroutineMock(return_value=mock_px_rec_inst) + mock_pres_mgr.return_value.create_exchange_for_proposal = mock.CoroutineMock( + return_value=mock_px_rec_inst ) await test_module.present_proof_send_proposal(self.request) - mock_response.assert_called_once_with( - mock_px_rec_inst.serialize.return_value - ) + mock_response.assert_called_once_with(mock_px_rec_inst.serialize.return_value) async def test_present_proof_send_proposal_no_conn_record(self): self.request.json = mock.CoroutineMock() - with mock.patch.object( - test_module, "ConnRecord", autospec=True - ) as mock_conn_rec: + with mock.patch.object(test_module, "ConnRecord", autospec=True) as mock_conn_rec: mock_conn_rec.retrieve_by_id = mock.CoroutineMock( side_effect=StorageNotFoundError() ) @@ -1284,14 +1278,10 @@ async def test_present_proof_send_proposal_x(self): ) as mock_conn_rec, mock.patch.object( test_module, "V20PresManager", autospec=True ) as mock_pres_mgr: - mock_pres_mgr.return_value.create_exchange_for_proposal = ( - mock.CoroutineMock( - return_value=mock.MagicMock( - serialize=mock.MagicMock( - side_effect=test_module.StorageError() - ), - save_error_state=mock.CoroutineMock(), - ) + mock_pres_mgr.return_value.create_exchange_for_proposal = mock.CoroutineMock( + return_value=mock.MagicMock( + serialize=mock.MagicMock(side_effect=test_module.StorageError()), + save_error_state=mock.CoroutineMock(), ) ) @@ -1327,9 +1317,7 @@ async def test_present_proof_create_request(self): mock_pres_mgr_cls.return_value = mock_pres_mgr_inst await test_module.present_proof_create_request(self.request) - mock_response.assert_called_once_with( - mock_px_rec_inst.serialize.return_value - ) + mock_response.assert_called_once_with(mock_px_rec_inst.serialize.return_value) async def test_present_proof_create_request_x(self): self.request.json = mock.CoroutineMock( @@ -1350,9 +1338,7 @@ async def test_present_proof_create_request_x(self): mock_pres_mgr_inst = mock.MagicMock( create_exchange_for_request=mock.CoroutineMock( return_value=mock.MagicMock( - serialize=mock.MagicMock( - side_effect=test_module.StorageError() - ), + serialize=mock.MagicMock(side_effect=test_module.StorageError()), save_error_state=mock.CoroutineMock(), ) ) @@ -1395,9 +1381,7 @@ async def test_present_proof_send_free_request(self): mock_pres_mgr_cls.return_value = mock_pres_mgr_inst await test_module.present_proof_send_free_request(self.request) - mock_response.assert_called_once_with( - mock_px_rec_inst.serialize.return_value - ) + mock_response.assert_called_once_with(mock_px_rec_inst.serialize.return_value) async def test_present_proof_send_free_request_not_found(self): self.request.json = mock.CoroutineMock(return_value={"connection_id": "dummy"}) @@ -1456,9 +1440,7 @@ async def test_present_proof_send_free_request_x(self): mock_pres_mgr_inst = mock.MagicMock( create_exchange_for_request=mock.CoroutineMock( return_value=mock.MagicMock( - serialize=mock.MagicMock( - side_effect=test_module.StorageError() - ), + serialize=mock.MagicMock(side_effect=test_module.StorageError()), save_error_state=mock.CoroutineMock(), ) ) @@ -1498,9 +1480,7 @@ async def test_present_proof_send_bound_request(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_PROPOSAL_RECEIVED, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( return_value=mock_px_rec_inst @@ -1521,9 +1501,7 @@ async def test_present_proof_send_bound_request(self): mock_pres_mgr_cls.return_value = mock_pres_mgr_inst await test_module.present_proof_send_bound_request(self.request) - mock_response.assert_called_once_with( - mock_px_rec_inst.serialize.return_value - ) + mock_response.assert_called_once_with(mock_px_rec_inst.serialize.return_value) async def test_present_proof_send_bound_request_not_found(self): self.request.json = mock.CoroutineMock(return_value={"trace": False}) @@ -1551,9 +1529,7 @@ async def test_present_proof_send_bound_request_not_found(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_PROPOSAL_RECEIVED, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( return_value=mock_px_rec_inst @@ -1591,9 +1567,7 @@ async def test_present_proof_send_bound_request_not_ready(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_PROPOSAL_RECEIVED, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( return_value=mock_px_rec_inst @@ -1646,9 +1620,7 @@ async def test_present_proof_send_bound_request_bad_state(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_DONE, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( return_value=mock_px_rec_inst @@ -1685,9 +1657,7 @@ async def test_present_proof_send_bound_request_x(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_PROPOSAL_RECEIVED, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), save_error_state=mock.CoroutineMock(), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( @@ -1743,9 +1713,7 @@ async def test_present_proof_send_presentation(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_REQUEST_RECEIVED, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( return_value=mock_px_rec_inst @@ -1764,9 +1732,7 @@ async def test_present_proof_send_presentation(self): mock_pres_mgr_cls.return_value = mock_pres_mgr_inst await test_module.present_proof_send_presentation(self.request) - mock_response.assert_called_once_with( - mock_px_rec_inst.serialize.return_value - ) + mock_response.assert_called_once_with(mock_px_rec_inst.serialize.return_value) async def test_present_proof_send_presentation_dif(self): proof_req = deepcopy(DIF_PROOF_REQ) @@ -1798,9 +1764,7 @@ async def test_present_proof_send_presentation_dif(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_REQUEST_RECEIVED, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( return_value=mock_px_rec_inst @@ -1819,9 +1783,7 @@ async def test_present_proof_send_presentation_dif(self): mock_pres_mgr_cls.return_value = mock_pres_mgr_inst await test_module.present_proof_send_presentation(self.request) - mock_response.assert_called_once_with( - mock_px_rec_inst.serialize.return_value - ) + mock_response.assert_called_once_with(mock_px_rec_inst.serialize.return_value) async def test_present_proof_send_presentation_dif_error(self): self.request.json = mock.CoroutineMock(return_value={"dif": DIF_PROOF_REQ}) @@ -1944,9 +1906,7 @@ async def test_present_proof_send_presentation_not_found(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_REQUEST_RECEIVED, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( return_value=mock_px_rec_inst @@ -1989,9 +1949,7 @@ async def test_present_proof_send_presentation_not_ready(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_REQUEST_RECEIVED, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( return_value=mock_px_rec_inst @@ -2026,9 +1984,7 @@ async def test_present_proof_send_presentation_bad_state(self): mock_px_rec_inst = mock.MagicMock( connection_id=None, state=test_module.V20PresExRecord.STATE_DONE, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( return_value=mock_px_rec_inst @@ -2070,9 +2026,7 @@ async def test_present_proof_send_presentation_x(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_REQUEST_RECEIVED, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), save_error_state=mock.CoroutineMock(), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( @@ -2114,9 +2068,7 @@ async def test_present_proof_verify_presentation(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_PRESENTATION_RECEIVED, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( return_value=mock_px_rec_inst @@ -2157,9 +2109,7 @@ async def test_present_proof_verify_presentation_bad_state(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_DONE, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( return_value=mock_px_rec_inst @@ -2183,9 +2133,7 @@ async def test_present_proof_verify_presentation_x(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_PRESENTATION_RECEIVED, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), save_error_state=mock.CoroutineMock(), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( diff --git a/aries_cloudagent/protocols/present_proof/v2_0/tests/test_routes_anoncreds.py b/aries_cloudagent/protocols/present_proof/v2_0/tests/test_routes_anoncreds.py index e79f0e0c74..c48509760a 100644 --- a/aries_cloudagent/protocols/present_proof/v2_0/tests/test_routes_anoncreds.py +++ b/aries_cloudagent/protocols/present_proof/v2_0/tests/test_routes_anoncreds.py @@ -1036,9 +1036,9 @@ async def test_present_proof_credentials_list_schema_uri(self): } self.request.query = {"extra_query": {}} test_pd = deepcopy(DIF_PROOF_REQ) - test_pd["presentation_definition"]["input_descriptors"][0]["schema"][0][ - "uri" - ] = "https://example.org/test.json" + test_pd["presentation_definition"]["input_descriptors"][0]["schema"][0]["uri"] = ( + "https://example.org/test.json" + ) test_pd["presentation_definition"]["input_descriptors"][0]["schema"].pop(1) record = V20PresExRecord( state="request-received", @@ -1225,9 +1225,7 @@ async def test_present_proof_send_proposal(self): self.request.json = mock.CoroutineMock( return_value={ "connection_id": "dummy-conn-id", - "presentation_proposal": { - V20PresFormat.Format.INDY.api: INDY_PROOF_REQ - }, + "presentation_proposal": {V20PresFormat.Format.INDY.api: INDY_PROOF_REQ}, } ) @@ -1244,21 +1242,17 @@ async def test_present_proof_send_proposal(self): return_value=mock.MagicMock(is_ready=True) ) mock_px_rec_inst = mock.MagicMock() - mock_pres_mgr.return_value.create_exchange_for_proposal = ( - mock.CoroutineMock(return_value=mock_px_rec_inst) + mock_pres_mgr.return_value.create_exchange_for_proposal = mock.CoroutineMock( + return_value=mock_px_rec_inst ) await test_module.present_proof_send_proposal(self.request) - mock_response.assert_called_once_with( - mock_px_rec_inst.serialize.return_value - ) + mock_response.assert_called_once_with(mock_px_rec_inst.serialize.return_value) async def test_present_proof_send_proposal_no_conn_record(self): self.request.json = mock.CoroutineMock() - with mock.patch.object( - test_module, "ConnRecord", autospec=True - ) as mock_conn_rec: + with mock.patch.object(test_module, "ConnRecord", autospec=True) as mock_conn_rec: mock_conn_rec.retrieve_by_id = mock.CoroutineMock( side_effect=StorageNotFoundError() ) @@ -1289,14 +1283,10 @@ async def test_present_proof_send_proposal_x(self): ) as mock_conn_rec, mock.patch.object( test_module, "V20PresManager", autospec=True ) as mock_pres_mgr: - mock_pres_mgr.return_value.create_exchange_for_proposal = ( - mock.CoroutineMock( - return_value=mock.MagicMock( - serialize=mock.MagicMock( - side_effect=test_module.StorageError() - ), - save_error_state=mock.CoroutineMock(), - ) + mock_pres_mgr.return_value.create_exchange_for_proposal = mock.CoroutineMock( + return_value=mock.MagicMock( + serialize=mock.MagicMock(side_effect=test_module.StorageError()), + save_error_state=mock.CoroutineMock(), ) ) @@ -1332,9 +1322,7 @@ async def test_present_proof_create_request(self): mock_pres_mgr_cls.return_value = mock_pres_mgr_inst await test_module.present_proof_create_request(self.request) - mock_response.assert_called_once_with( - mock_px_rec_inst.serialize.return_value - ) + mock_response.assert_called_once_with(mock_px_rec_inst.serialize.return_value) async def test_present_proof_create_request_x(self): self.request.json = mock.CoroutineMock( @@ -1355,9 +1343,7 @@ async def test_present_proof_create_request_x(self): mock_pres_mgr_inst = mock.MagicMock( create_exchange_for_request=mock.CoroutineMock( return_value=mock.MagicMock( - serialize=mock.MagicMock( - side_effect=test_module.StorageError() - ), + serialize=mock.MagicMock(side_effect=test_module.StorageError()), save_error_state=mock.CoroutineMock(), ) ) @@ -1400,9 +1386,7 @@ async def test_present_proof_send_free_request(self): mock_pres_mgr_cls.return_value = mock_pres_mgr_inst await test_module.present_proof_send_free_request(self.request) - mock_response.assert_called_once_with( - mock_px_rec_inst.serialize.return_value - ) + mock_response.assert_called_once_with(mock_px_rec_inst.serialize.return_value) async def test_present_proof_send_free_request_not_found(self): self.request.json = mock.CoroutineMock(return_value={"connection_id": "dummy"}) @@ -1461,9 +1445,7 @@ async def test_present_proof_send_free_request_x(self): mock_pres_mgr_inst = mock.MagicMock( create_exchange_for_request=mock.CoroutineMock( return_value=mock.MagicMock( - serialize=mock.MagicMock( - side_effect=test_module.StorageError() - ), + serialize=mock.MagicMock(side_effect=test_module.StorageError()), save_error_state=mock.CoroutineMock(), ) ) @@ -1503,9 +1485,7 @@ async def test_present_proof_send_bound_request(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_PROPOSAL_RECEIVED, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( return_value=mock_px_rec_inst @@ -1526,9 +1506,7 @@ async def test_present_proof_send_bound_request(self): mock_pres_mgr_cls.return_value = mock_pres_mgr_inst await test_module.present_proof_send_bound_request(self.request) - mock_response.assert_called_once_with( - mock_px_rec_inst.serialize.return_value - ) + mock_response.assert_called_once_with(mock_px_rec_inst.serialize.return_value) async def test_present_proof_send_bound_request_not_found(self): self.request.json = mock.CoroutineMock(return_value={"trace": False}) @@ -1556,9 +1534,7 @@ async def test_present_proof_send_bound_request_not_found(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_PROPOSAL_RECEIVED, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( return_value=mock_px_rec_inst @@ -1596,9 +1572,7 @@ async def test_present_proof_send_bound_request_not_ready(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_PROPOSAL_RECEIVED, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( return_value=mock_px_rec_inst @@ -1651,9 +1625,7 @@ async def test_present_proof_send_bound_request_bad_state(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_DONE, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( return_value=mock_px_rec_inst @@ -1690,9 +1662,7 @@ async def test_present_proof_send_bound_request_x(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_PROPOSAL_RECEIVED, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), save_error_state=mock.CoroutineMock(), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( @@ -1748,9 +1718,7 @@ async def test_present_proof_send_presentation(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_REQUEST_RECEIVED, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( return_value=mock_px_rec_inst @@ -1769,9 +1737,7 @@ async def test_present_proof_send_presentation(self): mock_pres_mgr_cls.return_value = mock_pres_mgr_inst await test_module.present_proof_send_presentation(self.request) - mock_response.assert_called_once_with( - mock_px_rec_inst.serialize.return_value - ) + mock_response.assert_called_once_with(mock_px_rec_inst.serialize.return_value) async def test_present_proof_send_presentation_dif(self): proof_req = deepcopy(DIF_PROOF_REQ) @@ -1803,9 +1769,7 @@ async def test_present_proof_send_presentation_dif(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_REQUEST_RECEIVED, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( return_value=mock_px_rec_inst @@ -1824,9 +1788,7 @@ async def test_present_proof_send_presentation_dif(self): mock_pres_mgr_cls.return_value = mock_pres_mgr_inst await test_module.present_proof_send_presentation(self.request) - mock_response.assert_called_once_with( - mock_px_rec_inst.serialize.return_value - ) + mock_response.assert_called_once_with(mock_px_rec_inst.serialize.return_value) async def test_present_proof_send_presentation_dif_error(self): self.request.json = mock.CoroutineMock(return_value={"dif": DIF_PROOF_REQ}) @@ -1949,9 +1911,7 @@ async def test_present_proof_send_presentation_not_found(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_REQUEST_RECEIVED, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( return_value=mock_px_rec_inst @@ -1994,9 +1954,7 @@ async def test_present_proof_send_presentation_not_ready(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_REQUEST_RECEIVED, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( return_value=mock_px_rec_inst @@ -2031,9 +1989,7 @@ async def test_present_proof_send_presentation_bad_state(self): mock_px_rec_inst = mock.MagicMock( connection_id=None, state=test_module.V20PresExRecord.STATE_DONE, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( return_value=mock_px_rec_inst @@ -2075,9 +2031,7 @@ async def test_present_proof_send_presentation_x(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_REQUEST_RECEIVED, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), save_error_state=mock.CoroutineMock(), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( @@ -2119,9 +2073,7 @@ async def test_present_proof_verify_presentation(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_PRESENTATION_RECEIVED, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( return_value=mock_px_rec_inst @@ -2162,9 +2114,7 @@ async def test_present_proof_verify_presentation_bad_state(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_DONE, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( return_value=mock_px_rec_inst @@ -2188,9 +2138,7 @@ async def test_present_proof_verify_presentation_x(self): mock_px_rec_inst = mock.MagicMock( connection_id="dummy", state=test_module.V20PresExRecord.STATE_PRESENTATION_RECEIVED, - serialize=mock.MagicMock( - return_value={"thread_id": "sample-thread-id"} - ), + serialize=mock.MagicMock(return_value={"thread_id": "sample-thread-id"}), save_error_state=mock.CoroutineMock(), ) mock_px_rec_cls.retrieve_by_id = mock.CoroutineMock( diff --git a/aries_cloudagent/protocols/problem_report/v1_0/handler.py b/aries_cloudagent/protocols/problem_report/v1_0/handler.py index 99e1c1d179..33a5c793e4 100644 --- a/aries_cloudagent/protocols/problem_report/v1_0/handler.py +++ b/aries_cloudagent/protocols/problem_report/v1_0/handler.py @@ -29,6 +29,4 @@ async def handle(self, context: RequestContext, responder: BaseResponder): context.message, ) - await context.profile.notify( - "acapy::problem_report", context.message.serialize() - ) + await context.profile.notify("acapy::problem_report", context.message.serialize()) diff --git a/aries_cloudagent/protocols/problem_report/v1_0/message.py b/aries_cloudagent/protocols/problem_report/v1_0/message.py index 56847eadef..66dc17950d 100644 --- a/aries_cloudagent/protocols/problem_report/v1_0/message.py +++ b/aries_cloudagent/protocols/problem_report/v1_0/message.py @@ -71,12 +71,8 @@ class Meta: unknown = EXCLUDE description = fields.Dict( - keys=fields.Str( - metadata={"description": "Locale or 'code'", "example": "en-US"} - ), - values=fields.Str( - metadata={"description": "Problem description or error code"} - ), + keys=fields.Str(metadata={"description": "Locale or 'code'", "example": "en-US"}), + values=fields.Str(metadata={"description": "Problem description or error code"}), required=False, metadata={"description": "Human-readable localized problem descriptions"}, ) diff --git a/aries_cloudagent/protocols/revocation_notification/v1_0/models/rev_notification_record.py b/aries_cloudagent/protocols/revocation_notification/v1_0/models/rev_notification_record.py index f92752e93c..e2a5b66c4d 100644 --- a/aries_cloudagent/protocols/revocation_notification/v1_0/models/rev_notification_record.py +++ b/aries_cloudagent/protocols/revocation_notification/v1_0/models/rev_notification_record.py @@ -93,9 +93,7 @@ async def query_by_ids( "More than one RevNotificationRecord was found for the given IDs" ) if not result: - raise StorageNotFoundError( - "No RevNotificationRecord found for the given IDs" - ) + raise StorageNotFoundError("No RevNotificationRecord found for the given IDs") return result[0] @classmethod diff --git a/aries_cloudagent/protocols/revocation_notification/v2_0/messages/revoke.py b/aries_cloudagent/protocols/revocation_notification/v2_0/messages/revoke.py index 531303e5fd..44aa3cef2c 100644 --- a/aries_cloudagent/protocols/revocation_notification/v2_0/messages/revoke.py +++ b/aries_cloudagent/protocols/revocation_notification/v2_0/messages/revoke.py @@ -66,9 +66,7 @@ class Meta: PleaseAckDecoratorSchema, required=False, data_key="~please_ack", - metadata={ - "description": "Whether or not the holder should acknowledge receipt" - }, + metadata={"description": "Whether or not the holder should acknowledge receipt"}, ) comment = fields.Str( required=False, diff --git a/aries_cloudagent/protocols/revocation_notification/v2_0/models/rev_notification_record.py b/aries_cloudagent/protocols/revocation_notification/v2_0/models/rev_notification_record.py index abbd3e04aa..2587863c91 100644 --- a/aries_cloudagent/protocols/revocation_notification/v2_0/models/rev_notification_record.py +++ b/aries_cloudagent/protocols/revocation_notification/v2_0/models/rev_notification_record.py @@ -93,9 +93,7 @@ async def query_by_ids( "More than one RevNotificationRecord was found for the given IDs" ) if not result: - raise StorageNotFoundError( - "No RevNotificationRecord found for the given IDs" - ) + raise StorageNotFoundError("No RevNotificationRecord found for the given IDs") return result[0] @classmethod diff --git a/aries_cloudagent/protocols/routing/v1_0/handlers/forward_handler.py b/aries_cloudagent/protocols/routing/v1_0/handlers/forward_handler.py index 47654fc5b0..cc12fad775 100644 --- a/aries_cloudagent/protocols/routing/v1_0/handlers/forward_handler.py +++ b/aries_cloudagent/protocols/routing/v1_0/handlers/forward_handler.py @@ -47,9 +47,7 @@ async def handle(self, context: RequestContext, responder: BaseResponder): connection_verkey = connection_targets[0].recipient_keys[0] # Note: not currently vetting the state of the connection here - self._logger.info( - f"Forwarding message to connection: {recipient.connection_id}" - ) + self._logger.info(f"Forwarding message to connection: {recipient.connection_id}") send_status = await responder.send( packed, diff --git a/aries_cloudagent/protocols/routing/v1_0/handlers/tests/test_forward_handler.py b/aries_cloudagent/protocols/routing/v1_0/handlers/tests/test_forward_handler.py index 0e4947738c..6daade22c9 100644 --- a/aries_cloudagent/protocols/routing/v1_0/handlers/tests/test_forward_handler.py +++ b/aries_cloudagent/protocols/routing/v1_0/handlers/tests/test_forward_handler.py @@ -39,14 +39,12 @@ async def test_handle(self): mock_mgr.return_value.get_recipient = mock.CoroutineMock( return_value=RouteRecord(connection_id="dummy") ) - mock_connection_mgr.return_value.get_connection_targets = ( - mock.CoroutineMock( - return_value=[ - ConnectionTarget( - recipient_keys=["recip_key"], - ) - ] - ) + mock_connection_mgr.return_value.get_connection_targets = mock.CoroutineMock( + return_value=[ + ConnectionTarget( + recipient_keys=["recip_key"], + ) + ] ) await handler.handle(self.context, responder) @@ -77,9 +75,7 @@ async def test_handle_cannot_resolve_recipient(self): handler = test_module.ForwardHandler() responder = MockResponder() - with mock.patch.object( - test_module, "RoutingManager", autospec=True - ) as mock_mgr: + with mock.patch.object(test_module, "RoutingManager", autospec=True) as mock_mgr: mock_mgr.return_value.get_recipient = mock.CoroutineMock( side_effect=test_module.RoutingManagerError() ) diff --git a/aries_cloudagent/protocols/routing/v1_0/models/route_record.py b/aries_cloudagent/protocols/routing/v1_0/models/route_record.py index 2f7c3d3778..2adea0d1b9 100644 --- a/aries_cloudagent/protocols/routing/v1_0/models/route_record.py +++ b/aries_cloudagent/protocols/routing/v1_0/models/route_record.py @@ -28,7 +28,7 @@ def __init__( connection_id: str = None, wallet_id: str = None, recipient_key: str = None, - **kwargs + **kwargs, ): """Initialize route record. diff --git a/aries_cloudagent/protocols/trustping/v1_0/messages/ping.py b/aries_cloudagent/protocols/trustping/v1_0/messages/ping.py index 9313646ba2..518b684e52 100644 --- a/aries_cloudagent/protocols/trustping/v1_0/messages/ping.py +++ b/aries_cloudagent/protocols/trustping/v1_0/messages/ping.py @@ -18,9 +18,7 @@ class Meta: message_type = PING schema_class = "PingSchema" - def __init__( - self, *, response_requested: bool = True, comment: str = None, **kwargs - ): + def __init__(self, *, response_requested: bool = True, comment: str = None, **kwargs): """Initialize a Ping message instance. Args: diff --git a/aries_cloudagent/protocols/trustping/v1_0/messages/tests/test_trust_ping.py b/aries_cloudagent/protocols/trustping/v1_0/messages/tests/test_trust_ping.py index 92e184bf1d..8e200f3edb 100644 --- a/aries_cloudagent/protocols/trustping/v1_0/messages/tests/test_trust_ping.py +++ b/aries_cloudagent/protocols/trustping/v1_0/messages/tests/test_trust_ping.py @@ -26,9 +26,7 @@ def test_type(self): """Test type.""" assert self.test_ping._type == DIDCommPrefix.qualify_current(PING) - @mock.patch( - "aries_cloudagent.protocols.trustping.v1_0.messages.ping.PingSchema.load" - ) + @mock.patch("aries_cloudagent.protocols.trustping.v1_0.messages.ping.PingSchema.load") def test_deserialize(self, mock_ping_schema_load): """ Test deserialization. @@ -40,9 +38,7 @@ def test_deserialize(self, mock_ping_schema_load): assert msg is mock_ping_schema_load.return_value - @mock.patch( - "aries_cloudagent.protocols.trustping.v1_0.messages.ping.PingSchema.dump" - ) + @mock.patch("aries_cloudagent.protocols.trustping.v1_0.messages.ping.PingSchema.dump") def test_serialize(self, mock_ping_schema_load): """ Test serialization. diff --git a/aries_cloudagent/protocols/trustping/v1_0/routes.py b/aries_cloudagent/protocols/trustping/v1_0/routes.py index b1e850515f..fac9455a3a 100644 --- a/aries_cloudagent/protocols/trustping/v1_0/routes.py +++ b/aries_cloudagent/protocols/trustping/v1_0/routes.py @@ -77,9 +77,7 @@ async def connections_send_ping(request: web.BaseRequest): async def register(app: web.Application): """Register routes.""" - app.add_routes( - [web.post("/connections/{conn_id}/send-ping", connections_send_ping)] - ) + app.add_routes([web.post("/connections/{conn_id}/send-ping", connections_send_ping)]) def post_process_routes(app: web.Application): diff --git a/aries_cloudagent/resolver/base.py b/aries_cloudagent/resolver/base.py index b279968b8a..e7b600e105 100644 --- a/aries_cloudagent/resolver/base.py +++ b/aries_cloudagent/resolver/base.py @@ -128,9 +128,7 @@ async def supports(self, profile: Profile, did: str) -> bool: DeprecationWarning, ) - supported_did_regex = re.compile( - "^did:(?:{}):.*$".format("|".join(methods)) - ) + supported_did_regex = re.compile("^did:(?:{}):.*$".format("|".join(methods))) return bool(supported_did_regex.match(did)) diff --git a/aries_cloudagent/resolver/default/indy.py b/aries_cloudagent/resolver/default/indy.py index ce18f75c67..bbd2fb27bb 100644 --- a/aries_cloudagent/resolver/default/indy.py +++ b/aries_cloudagent/resolver/default/indy.py @@ -142,9 +142,7 @@ def add_services( ) builder.context.append(self.CONTEXT_DIDCOMM_V2) else: - LOGGER.warning( - "No endpoint for DID although endpoint attrib was resolvable" - ) + LOGGER.warning("No endpoint for DID although endpoint attrib was resolvable") if other_endpoints: for type_, endpoint in other_endpoints.items(): diff --git a/aries_cloudagent/resolver/default/legacy_peer.py b/aries_cloudagent/resolver/default/legacy_peer.py index b421d18ce7..a3fbad9848 100644 --- a/aries_cloudagent/resolver/default/legacy_peer.py +++ b/aries_cloudagent/resolver/default/legacy_peer.py @@ -221,9 +221,7 @@ def _make_qualified(value: dict) -> dict: return value @staticmethod - def remove_verification_method( - vms: List[dict], public_key_base58: str - ) -> List[dict]: + def remove_verification_method(vms: List[dict], public_key_base58: str) -> List[dict]: """Remove the verification method with the given key.""" return [vm for vm in vms if vm["publicKeyBase58"] != public_key_base58] diff --git a/aries_cloudagent/resolver/default/peer1.py b/aries_cloudagent/resolver/default/peer1.py index 8f1c928759..840581ea87 100644 --- a/aries_cloudagent/resolver/default/peer1.py +++ b/aries_cloudagent/resolver/default/peer1.py @@ -18,7 +18,7 @@ # TODO Copy pasted from did-peer-4, reuse when available def _operate_on_embedded( - visitor: Callable[[dict], dict] + visitor: Callable[[dict], dict], ) -> Callable[[Union[dict, str]], Union[dict, str]]: """Return an adapter function that turns a vm visitor into a vm | ref visitor. diff --git a/aries_cloudagent/resolver/default/peer3.py b/aries_cloudagent/resolver/default/peer3.py index 79a17f9c0d..63c81b95f4 100644 --- a/aries_cloudagent/resolver/default/peer3.py +++ b/aries_cloudagent/resolver/default/peer3.py @@ -97,9 +97,7 @@ async def remove_record_for_deleted_conn(self, profile: Profile, event: Event): ), ] if dids: - LOGGER.debug( - "Removing peer 2 to 3 mapping for deleted connection: %s", dids - ) + LOGGER.debug("Removing peer 2 to 3 mapping for deleted connection: %s", dids) async with profile.session() as session: storage = session.inject(BaseStorage) for did in dids: diff --git a/aries_cloudagent/resolver/default/peer4.py b/aries_cloudagent/resolver/default/peer4.py index 153dde999c..523074ae13 100644 --- a/aries_cloudagent/resolver/default/peer4.py +++ b/aries_cloudagent/resolver/default/peer4.py @@ -53,9 +53,7 @@ async def _resolve( async with profile.session() as session: storage = session.inject(BaseStorage) try: - record = await storage.get_record( - self.RECORD_TYPE, short_did_peer_4 - ) + record = await storage.get_record(self.RECORD_TYPE, short_did_peer_4) except StorageNotFoundError: record = StorageRecord(self.RECORD_TYPE, did, {}, short_did_peer_4) await storage.add_record(record) diff --git a/aries_cloudagent/resolver/default/tests/test_indy.py b/aries_cloudagent/resolver/default/tests/test_indy.py index d2344a2047..60c6ada8c8 100644 --- a/aries_cloudagent/resolver/default/tests/test_indy.py +++ b/aries_cloudagent/resolver/default/tests/test_indy.py @@ -68,9 +68,7 @@ async def test_resolve(self, profile: Profile, resolver: IndyDIDResolver): assert await resolver.resolve(profile, TEST_DID0) @pytest.mark.asyncio - async def test_resolve_with_accept( - self, profile: Profile, resolver: IndyDIDResolver - ): + async def test_resolve_with_accept(self, profile: Profile, resolver: IndyDIDResolver): """Test resolve method.""" assert await resolver.resolve( profile, TEST_DID0, ["didcomm/aip1", "didcomm/aip2;env=rfc19"] @@ -93,9 +91,7 @@ async def test_resolve_multitenant( assert await resolver.resolve(profile, TEST_DID0) @pytest.mark.asyncio - async def test_resolve_x_no_ledger( - self, profile: Profile, resolver: IndyDIDResolver - ): + async def test_resolve_x_no_ledger(self, profile: Profile, resolver: IndyDIDResolver): """Test resolve method with no ledger.""" profile.context.injector.bind_instance( IndyLedgerRequestsExecutor, diff --git a/aries_cloudagent/resolver/default/tests/test_legacy_peer.py b/aries_cloudagent/resolver/default/tests/test_legacy_peer.py index 2acace6876..e26478a43e 100644 --- a/aries_cloudagent/resolver/default/tests/test_legacy_peer.py +++ b/aries_cloudagent/resolver/default/tests/test_legacy_peer.py @@ -107,9 +107,7 @@ async def test_resolve_x_not_found( test_module, "BaseConnectionManager" ) as mock_mgr, mock.patch.object( test_module, "LegacyDocCorrections" - ) as mock_corrections, pytest.raises( - test_module.DIDNotFound - ): + ) as mock_corrections, pytest.raises(test_module.DIDNotFound): doc = object mock_corrections.apply = mock.MagicMock(return_value=doc) mock_mgr.return_value = mock.MagicMock( diff --git a/aries_cloudagent/resolver/default/tests/test_universal.py b/aries_cloudagent/resolver/default/tests/test_universal.py index 05f3b89801..5e45b9a044 100644 --- a/aries_cloudagent/resolver/default/tests/test_universal.py +++ b/aries_cloudagent/resolver/default/tests/test_universal.py @@ -103,9 +103,7 @@ async def test_resolve_not_found(profile, resolver, mock_client_session): @pytest.mark.asyncio async def test_resolve_unexpected_status(profile, resolver, mock_client_session): - mock_client_session.response = MockResponse( - 500, "Server failed to complete request" - ) + mock_client_session.response = MockResponse(500, "Server failed to complete request") with pytest.raises(ResolverError): await resolver.resolve(profile, "did:sov:WRfXPg8dantKVubE3HX8pw") diff --git a/aries_cloudagent/resolver/default/web.py b/aries_cloudagent/resolver/default/web.py index 15aad96e6a..9fdf1b09de 100644 --- a/aries_cloudagent/resolver/default/web.py +++ b/aries_cloudagent/resolver/default/web.py @@ -73,9 +73,7 @@ async def _resolve( did_doc = DIDDocument.from_json(await response.text()) return did_doc.serialize() except Exception as err: - raise ResolverError( - "Response was incorrectly formatted" - ) from err + raise ResolverError("Response was incorrectly formatted") from err if response.status == 404: raise DIDNotFound(f"No document found for {did}") raise ResolverError( diff --git a/aries_cloudagent/resolver/routes.py b/aries_cloudagent/resolver/routes.py index a027577556..3016aafbb7 100644 --- a/aries_cloudagent/resolver/routes.py +++ b/aries_cloudagent/resolver/routes.py @@ -16,9 +16,7 @@ class ResolutionResultSchema(OpenAPISchema): """Result schema for did document query.""" did_document = fields.Dict(required=True, metadata={"description": "DID Document"}) - metadata = fields.Dict( - required=True, metadata={"description": "Resolution metadata"} - ) + metadata = fields.Dict(required=True, metadata={"description": "Resolution metadata"}) class W3cDID(validate.Regexp): diff --git a/aries_cloudagent/revocation/models/issuer_rev_reg_record.py b/aries_cloudagent/revocation/models/issuer_rev_reg_record.py index dfd38a95a0..c7e28774ec 100644 --- a/aries_cloudagent/revocation/models/issuer_rev_reg_record.py +++ b/aries_cloudagent/revocation/models/issuer_rev_reg_record.py @@ -405,10 +405,8 @@ async def fix_ledger_entry( # await self.save(session) accum_count += 1 async with profile.session() as session: - issuer_rev_reg_record = ( - await IssuerRevRegRecord.retrieve_by_revoc_reg_id( - session, self.revoc_reg_id - ) + issuer_rev_reg_record = await IssuerRevRegRecord.retrieve_by_revoc_reg_id( + session, self.revoc_reg_id ) cred_def_id = issuer_rev_reg_record.cred_def_id _cred_def = await session.handle.fetch(CATEGORY_CRED_DEF, cred_def_id) @@ -417,10 +415,8 @@ async def fix_ledger_entry( ) credx_module = importlib.import_module("indy_credx") cred_defn = credx_module.CredentialDefinition.load(_cred_def.value_json) - rev_reg_defn_private = ( - credx_module.RevocationRegistryDefinitionPrivate.load( - _rev_reg_def_private.value_json - ) + rev_reg_defn_private = credx_module.RevocationRegistryDefinitionPrivate.load( + _rev_reg_def_private.value_json ) calculated_txn = await generate_ledger_rrrecovery_txn( genesis_transactions, @@ -501,9 +497,7 @@ async def clear_pending( """ if self.pending_pub: if cred_rev_ids: - self.pending_pub = [ - r for r in self.pending_pub if r not in cred_rev_ids - ] + self.pending_pub = [r for r in self.pending_pub if r not in cred_rev_ids] else: self.pending_pub.clear() await self.save(session, reason="Cleared pending revocations") diff --git a/aries_cloudagent/revocation/models/revocation_registry.py b/aries_cloudagent/revocation/models/revocation_registry.py index 56d223d9e5..24665faece 100644 --- a/aries_cloudagent/revocation/models/revocation_registry.py +++ b/aries_cloudagent/revocation/models/revocation_registry.py @@ -195,9 +195,7 @@ async def retrieve_tails(self): except OSError as err: LOGGER.warning(f"Could not delete invalid tails file: {err}") - raise RevocationError( - "The hash of the downloaded tails file does not match." - ) + raise RevocationError("The hash of the downloaded tails file does not match.") self.tails_local_path = str(tails_file_path) return self.tails_local_path diff --git a/aries_cloudagent/revocation/models/tests/test_revocation_registry.py b/aries_cloudagent/revocation/models/tests/test_revocation_registry.py index d28d6ac6df..c6b2af5970 100644 --- a/aries_cloudagent/revocation/models/tests/test_revocation_registry.py +++ b/aries_cloudagent/revocation/models/tests/test_revocation_registry.py @@ -104,9 +104,7 @@ async def test_retrieve_tails(self): more_magic = mock.MagicMock() with mock.patch.object(test_module, "Session", autospec=True) as mock_session: - mock_session.return_value.__enter__ = mock.MagicMock( - return_value=more_magic - ) + mock_session.return_value.__enter__ = mock.MagicMock(return_value=more_magic) more_magic.get = mock.MagicMock( side_effect=test_module.RequestException("Not this time") ) @@ -119,9 +117,7 @@ async def test_retrieve_tails(self): more_magic = mock.MagicMock() with mock.patch.object(test_module, "Session", autospec=True) as mock_session: - mock_session.return_value.__enter__ = mock.MagicMock( - return_value=more_magic - ) + mock_session.return_value.__enter__ = mock.MagicMock(return_value=more_magic) more_magic.get = mock.MagicMock( return_value=mock.MagicMock( iter_content=mock.MagicMock(side_effect=[(b"abcd1234",), (b"",)]) @@ -144,9 +140,7 @@ async def test_retrieve_tails(self): ) as mock_b58enc, mock.patch.object( Path, "is_file", autospec=True ) as mock_is_file: - mock_session.return_value.__enter__ = mock.MagicMock( - return_value=more_magic - ) + mock_session.return_value.__enter__ = mock.MagicMock(return_value=more_magic) more_magic.get = mock.MagicMock( return_value=mock.MagicMock( iter_content=mock.MagicMock(side_effect=[(b"abcd1234",), (b"",)]) diff --git a/aries_cloudagent/revocation/recover.py b/aries_cloudagent/revocation/recover.py index eca105a1b4..7643d53119 100644 --- a/aries_cloudagent/revocation/recover.py +++ b/aries_cloudagent/revocation/recover.py @@ -53,9 +53,7 @@ async def fetch_txns(genesis_txns, registry_id): async with aiohttp.ClientSession() as session: data = await session.get(defn.tails_location) tails_data = await data.read() - tails_hash = base58.b58encode(hashlib.sha256(tails_data).digest()).decode( - "utf-8" - ) + tails_hash = base58.b58encode(hashlib.sha256(tails_data).digest()).decode("utf-8") if tails_hash != defn.tails_hash: raise RevocRecoveryException( f"Tails hash mismatch {tails_hash} {defn.tails_hash}" diff --git a/aries_cloudagent/revocation/routes.py b/aries_cloudagent/revocation/routes.py index 7b6b890d56..0885cfc108 100644 --- a/aries_cloudagent/revocation/routes.py +++ b/aries_cloudagent/revocation/routes.py @@ -118,9 +118,7 @@ class TxnOrRevRegResultSchema(OpenAPISchema): txn = fields.Nested( TransactionRecordSchema(), required=False, - metadata={ - "description": "Revocation registry definition transaction to endorse" - }, + metadata={"description": "Revocation registry definition transaction to endorse"}, ) @@ -213,13 +211,9 @@ def validate_fields(self, data, **kwargs): notify_version = data.get("notify_version", "v1_0") if notify and not connection_id: - raise ValidationError( - "Request must specify connection_id if notify is true" - ) + raise ValidationError("Request must specify connection_id if notify is true") if notify and not notify_version: - raise ValidationError( - "Request must specify notify_version if notify is true" - ) + raise ValidationError("Request must specify notify_version if notify is true") publish = fields.Boolean( required=False, @@ -838,9 +832,7 @@ async def rev_regs_created(request: web.BaseRequest): is_anoncreds_profile_raise_web_exception(context.profile) search_tags = list(vars(RevRegsCreatedQueryStringSchema)["_declared_fields"]) - tag_filter = { - tag: request.query[tag] for tag in search_tags if tag in request.query - } + tag_filter = {tag: request.query[tag] for tag in search_tags if tag in request.query} async with context.profile.session() as session: found = await IssuerRevRegRecord.query( session, @@ -849,11 +841,7 @@ async def rev_regs_created(request: web.BaseRequest): ) return web.json_response( - { - "rev_reg_ids": [ - record.revoc_reg_id for record in found if record.revoc_reg_id - ] - } + {"rev_reg_ids": [record.revoc_reg_id for record in found if record.revoc_reg_id]} ) @@ -1293,9 +1281,7 @@ async def send_rev_reg_def(request: web.BaseRequest): raise web.HTTPBadRequest(reason=err.roll_up) from err async with profile.session() as session: - endorser_info = await connection_record.metadata_get( - session, "endorser_info" - ) + endorser_info = await connection_record.metadata_get(session, "endorser_info") if not endorser_info: raise web.HTTPForbidden( reason=( @@ -1413,9 +1399,7 @@ async def send_rev_reg_entry(request: web.BaseRequest): except BaseModelError as err: raise web.HTTPBadRequest(reason=err.roll_up) from err - endorser_info = await connection_record.metadata_get( - session, "endorser_info" - ) + endorser_info = await connection_record.metadata_get(session, "endorser_info") if not endorser_info: raise web.HTTPForbidden( reason=( @@ -1580,9 +1564,7 @@ async def on_revocation_registry_init_event(profile: Profile, event: Event): # TODO error handling - for now just let exceptions get raised endorser_connection_id = meta_data["endorser"]["connection_id"] async with profile.session() as session: - connection = await ConnRecord.retrieve_by_id( - session, endorser_connection_id - ) + connection = await ConnRecord.retrieve_by_id(session, endorser_connection_id) endorser_info = await connection.metadata_get(session, "endorser_info") endorser_did = endorser_info["endorser_did"] write_ledger = False @@ -1654,9 +1636,7 @@ async def generate(rr_record: IssuerRevRegRecord) -> dict: registry_record = await IssuerRevRegRecord.retrieve_by_id(session, record_id) await shield(generate(registry_record)) - create_pending_rev_reg = meta_data["processing"].get( - "create_pending_rev_reg", False - ) + create_pending_rev_reg = meta_data["processing"].get("create_pending_rev_reg", False) if write_ledger and create_pending_rev_reg: revoc = IndyRevocation(profile) await revoc.init_issuer_registry( @@ -1744,17 +1724,13 @@ async def on_revocation_registry_endorsed_event(profile: Profile, event: Event): await registry_record.upload_tails_file(profile) # Post the initial revocation entry - await notify_revocation_entry_event( - profile, registry_record.record_id, meta_data - ) + await notify_revocation_entry_event(profile, registry_record.record_id, meta_data) # create a "pending" registry if one is requested # (this is done automatically when creating a credential definition, so that when a # revocation registry fills up, we can continue to issue credentials without a # delay) - create_pending_rev_reg = meta_data["processing"].get( - "create_pending_rev_reg", False - ) + create_pending_rev_reg = meta_data["processing"].get("create_pending_rev_reg", False) if create_pending_rev_reg: endorser_connection_id = ( meta_data["endorser"].get("connection_id", None) diff --git a/aries_cloudagent/revocation/tests/test_manager.py b/aries_cloudagent/revocation/tests/test_manager.py index d811b31a27..074253975c 100644 --- a/aries_cloudagent/revocation/tests/test_manager.py +++ b/aries_cloudagent/revocation/tests/test_manager.py @@ -549,9 +549,7 @@ async def test_publish_pending_revocations_1_rev_reg_all(self): ) self.profile.context.injector.bind_instance(IndyIssuer, issuer) - _, result = await self.manager.publish_pending_revocations( - {REV_REG_ID: None} - ) + _, result = await self.manager.publish_pending_revocations({REV_REG_ID: None}) assert result == {REV_REG_ID: ["1", "2"]} mock_issuer_rev_reg_records[0].clear_pending.assert_called_once() mock_issuer_rev_reg_records[1].clear_pending.assert_not_called() @@ -611,9 +609,7 @@ async def test_publish_pending_revocations_1_rev_reg_some(self): ) self.profile.context.injector.bind_instance(IndyIssuer, issuer) - _, result = await self.manager.publish_pending_revocations( - {REV_REG_ID: "2"} - ) + _, result = await self.manager.publish_pending_revocations({REV_REG_ID: "2"}) assert result == {REV_REG_ID: ["2"]} mock_issuer_rev_reg_records[0].clear_pending.assert_called_once() mock_issuer_rev_reg_records[1].clear_pending.assert_not_called() @@ -801,9 +797,7 @@ async def test_set_revoked_state_v2(self): check_exchange_record = await V20CredExRecord.retrieve_by_id( session, exchange_record.cred_ex_id ) - assert ( - check_exchange_record.state == V20CredExRecord.STATE_CREDENTIAL_REVOKED - ) + assert check_exchange_record.state == V20CredExRecord.STATE_CREDENTIAL_REVOKED check_crev_record = await IssuerCredRevRecord.retrieve_by_id( session, crev_record.record_id diff --git a/aries_cloudagent/revocation_anoncreds/manager.py b/aries_cloudagent/revocation_anoncreds/manager.py index bc8fce9d49..1b81136731 100644 --- a/aries_cloudagent/revocation_anoncreds/manager.py +++ b/aries_cloudagent/revocation_anoncreds/manager.py @@ -261,9 +261,7 @@ async def publish_pending_revocations( else: limit_crids = None - result = await revoc.revoke_pending_credentials( - rrid, limit_crids=limit_crids - ) + result = await revoc.revoke_pending_credentials(rrid, limit_crids=limit_crids) if result.curr and result.revoked: await self.set_cred_revoked_state(rrid, result.revoked) await revoc.update_revocation_list( diff --git a/aries_cloudagent/revocation_anoncreds/recover.py b/aries_cloudagent/revocation_anoncreds/recover.py index 2d9eca8755..b77a4474db 100644 --- a/aries_cloudagent/revocation_anoncreds/recover.py +++ b/aries_cloudagent/revocation_anoncreds/recover.py @@ -55,9 +55,7 @@ async def fetch_txns(genesis_txns, registry_id): async with aiohttp.ClientSession() as session: data = await session.get(defn.tails_location) tails_data = await data.read() - tails_hash = base58.b58encode(hashlib.sha256(tails_data).digest()).decode( - "utf-8" - ) + tails_hash = base58.b58encode(hashlib.sha256(tails_data).digest()).decode("utf-8") if tails_hash != defn.tails_hash: raise RevocRecoveryException( f"Tails hash mismatch {tails_hash} {defn.tails_hash}" diff --git a/aries_cloudagent/revocation_anoncreds/routes.py b/aries_cloudagent/revocation_anoncreds/routes.py index 327627db96..928ef0ebd8 100644 --- a/aries_cloudagent/revocation_anoncreds/routes.py +++ b/aries_cloudagent/revocation_anoncreds/routes.py @@ -395,13 +395,9 @@ def validate_fields(self, data, **kwargs): notify_version = data.get("notify_version", "v1_0") if notify and not connection_id: - raise ValidationError( - "Request must specify connection_id if notify is true" - ) + raise ValidationError("Request must specify connection_id if notify is true") if notify and not notify_version: - raise ValidationError( - "Request must specify notify_version if notify is true" - ) + raise ValidationError("Request must specify notify_version if notify is true") publish = fields.Boolean( required=False, @@ -571,9 +567,7 @@ async def get_rev_regs(request: web.BaseRequest): is_not_anoncreds_profile_raise_web_exception(profile) search_tags = list(vars(RevRegsCreatedQueryStringSchema)["_declared_fields"]) - tag_filter = { - tag: request.query[tag] for tag in search_tags if tag in request.query - } + tag_filter = {tag: request.query[tag] for tag in search_tags if tag in request.query} cred_def_id = tag_filter.get("cred_def_id") state = tag_filter.get("state") try: diff --git a/aries_cloudagent/revocation_anoncreds/tests/test_manager.py b/aries_cloudagent/revocation_anoncreds/tests/test_manager.py index 12c4278c4b..827c5aaa86 100644 --- a/aries_cloudagent/revocation_anoncreds/tests/test_manager.py +++ b/aries_cloudagent/revocation_anoncreds/tests/test_manager.py @@ -497,9 +497,7 @@ async def test_set_revoked_state_v2(self): check_exchange_record = await V20CredExRecord.retrieve_by_id( session, exchange_record.cred_ex_id ) - assert ( - check_exchange_record.state == V20CredExRecord.STATE_CREDENTIAL_REVOKED - ) + assert check_exchange_record.state == V20CredExRecord.STATE_CREDENTIAL_REVOKED check_crev_record = await IssuerCredRevRecord.retrieve_by_id( session, crev_record.record_id diff --git a/aries_cloudagent/revocation_anoncreds/tests/test_routes.py b/aries_cloudagent/revocation_anoncreds/tests/test_routes.py index 5988b35464..a332654fa2 100644 --- a/aries_cloudagent/revocation_anoncreds/tests/test_routes.py +++ b/aries_cloudagent/revocation_anoncreds/tests/test_routes.py @@ -54,9 +54,7 @@ async def test_validate_cred_rev_rec_qs_and_revoke_req(self): with self.assertRaises(test_module.ValidationError): req.validate_fields({"rev_reg_id": test_module.INDY_REV_REG_ID_EXAMPLE}) with self.assertRaises(test_module.ValidationError): - req.validate_fields( - {"cred_rev_id": test_module.INDY_CRED_REV_ID_EXAMPLE} - ) + req.validate_fields({"cred_rev_id": test_module.INDY_CRED_REV_ID_EXAMPLE}) with self.assertRaises(test_module.ValidationError): req.validate_fields( { @@ -153,9 +151,7 @@ async def test_publish_revocations(self): await test_module.publish_revocations(self.request) - mock_response.assert_called_once_with( - {"rrid2crid": pub_pending.return_value} - ) + mock_response.assert_called_once_with({"rrid2crid": pub_pending.return_value}) async def test_publish_revocations_x(self): self.request.json = mock.CoroutineMock() @@ -408,9 +404,7 @@ async def test_get_tails_file(self): ) result = await test_module.get_tails_file(self.request) - mock_file_response.assert_called_once_with( - path="tails_location", status=200 - ) + mock_file_response.assert_called_once_with(path="tails_location", status=200) assert result is mock_file_response.return_value async def test_get_tails_file_not_found(self): diff --git a/aries_cloudagent/storage/in_memory.py b/aries_cloudagent/storage/in_memory.py index 68a166cea1..f8d1f8f844 100644 --- a/aries_cloudagent/storage/in_memory.py +++ b/aries_cloudagent/storage/in_memory.py @@ -309,9 +309,7 @@ async def fetch(self, max_count: int = None) -> Sequence[StorageRecord]: except StopIteration: break record = self._cache[id] - if record.type == check_type and tag_query_match( - record.tags, self.tag_query - ): + if record.type == check_type and tag_query_match(record.tags, self.tag_query): ret.append(record) i -= 1 diff --git a/aries_cloudagent/storage/record.py b/aries_cloudagent/storage/record.py index 4290f96952..25dd0d1ce9 100644 --- a/aries_cloudagent/storage/record.py +++ b/aries_cloudagent/storage/record.py @@ -11,9 +11,7 @@ class StorageRecord(namedtuple("StorageRecord", "type value tags id")): __slots__ = () - def __new__( - cls, type, value, tags: Optional[dict] = None, id: Optional[str] = None - ): + def __new__(cls, type, value, tags: Optional[dict] = None, id: Optional[str] = None): """Initialize some defaults on record.""" if not id: id = uuid4().hex diff --git a/aries_cloudagent/storage/vc_holder/tests/test_in_memory_vc_holder.py b/aries_cloudagent/storage/vc_holder/tests/test_in_memory_vc_holder.py index ca465b642c..c74178a776 100644 --- a/aries_cloudagent/storage/vc_holder/tests/test_in_memory_vc_holder.py +++ b/aries_cloudagent/storage/vc_holder/tests/test_in_memory_vc_holder.py @@ -286,9 +286,7 @@ async def test_sorting_vcrecord(self, holder: VCHolder): assert rows == expected @pytest.mark.asyncio - async def test_tag_query_valid_and_operator( - self, holder: VCHolder, record: VCRecord - ): + async def test_tag_query_valid_and_operator(self, holder: VCHolder, record: VCRecord): test_uri_list = [ "https://www.w3.org/2018/credentials#VerifiableCredential", "https://example.org/examples#UniversityDegreeCredential2", diff --git a/aries_cloudagent/tails/tests/test_indy.py b/aries_cloudagent/tails/tests/test_indy.py index 8e9eddb208..040fb7d40e 100644 --- a/aries_cloudagent/tails/tests/test_indy.py +++ b/aries_cloudagent/tails/tests/test_indy.py @@ -30,9 +30,7 @@ async def test_upload(self): ) indy_tails = test_module.IndyTailsServer() - with mock.patch.object( - test_module, "put_file", mock.CoroutineMock() - ) as mock_put: + with mock.patch.object(test_module, "put_file", mock.CoroutineMock()) as mock_put: mock_put.return_value = "tails-hash" (ok, text) = await indy_tails.upload_tails_file( context, @@ -40,9 +38,7 @@ async def test_upload(self): "/tmp/dummy/path", ) assert ok - assert ( - text == context.settings["tails_server_upload_url"] + "/" + REV_REG_ID - ) + assert text == context.settings["tails_server_upload_url"] + "/" + REV_REG_ID async def test_upload_indy_vdr(self): profile = InMemoryProfile.test_profile() @@ -61,9 +57,7 @@ async def test_upload_indy_vdr(self): profile.context.injector.bind_instance(BaseLedger, mock.MagicMock()) indy_tails = test_module.IndyTailsServer() - with mock.patch.object( - test_module, "put_file", mock.CoroutineMock() - ) as mock_put: + with mock.patch.object(test_module, "put_file", mock.CoroutineMock()) as mock_put: mock_put.return_value = "tails-hash" (ok, text) = await indy_tails.upload_tails_file( profile.context, @@ -71,9 +65,7 @@ async def test_upload_indy_vdr(self): "/tmp/dummy/path", ) assert ok - assert ( - text == profile.settings["tails_server_upload_url"] + "/" + REV_REG_ID - ) + assert text == profile.settings["tails_server_upload_url"] + "/" + REV_REG_ID async def test_upload_x(self): context = InjectionContext( @@ -84,9 +76,7 @@ async def test_upload_x(self): ) indy_tails = test_module.IndyTailsServer() - with mock.patch.object( - test_module, "put_file", mock.CoroutineMock() - ) as mock_put: + with mock.patch.object(test_module, "put_file", mock.CoroutineMock()) as mock_put: mock_put.side_effect = test_module.PutError("Server down for maintenance") (ok, text) = await indy_tails.upload_tails_file( diff --git a/aries_cloudagent/transport/inbound/receipt.py b/aries_cloudagent/transport/inbound/receipt.py index 83f2b149ad..9031772577 100644 --- a/aries_cloudagent/transport/inbound/receipt.py +++ b/aries_cloudagent/transport/inbound/receipt.py @@ -28,7 +28,7 @@ def __init__( sender_verkey: str = None, thread_id: str = None, parent_thread_id: str = None, - didcomm_version: DIDCommVersion = None + didcomm_version: DIDCommVersion = None, ): """Initialize the message delivery instance.""" self._connection_id = connection_id diff --git a/aries_cloudagent/transport/inbound/tests/test_http_transport.py b/aries_cloudagent/transport/inbound/tests/test_http_transport.py index 2d6baaac6c..702b4faad2 100644 --- a/aries_cloudagent/transport/inbound/tests/test_http_transport.py +++ b/aries_cloudagent/transport/inbound/tests/test_http_transport.py @@ -72,9 +72,7 @@ def get_application(self): return self.transport.make_application() async def test_start_x(self): - with mock.patch.object( - test_module.web, "TCPSite", mock.MagicMock() - ) as mock_site: + with mock.patch.object(test_module.web, "TCPSite", mock.MagicMock()) as mock_site: mock_site.return_value = mock.MagicMock( start=mock.CoroutineMock(side_effect=OSError()) ) diff --git a/aries_cloudagent/transport/inbound/tests/test_manager.py b/aries_cloudagent/transport/inbound/tests/test_manager.py index 7c9d8c846e..4f0f3a0b27 100644 --- a/aries_cloudagent/transport/inbound/tests/test_manager.py +++ b/aries_cloudagent/transport/inbound/tests/test_manager.py @@ -162,9 +162,7 @@ async def test_close_x(self): mgr.closed_session(mock_session) async def test_process_undelivered(self): - self.profile.context.update_settings( - {"transport.enable_undelivered_queue": True} - ) + self.profile.context.update_settings({"transport.enable_undelivered_queue": True}) test_verkey = "test-verkey" test_wire_format = mock.MagicMock() mgr = InboundTransportManager(self.profile, None) diff --git a/aries_cloudagent/transport/inbound/tests/test_ws_transport.py b/aries_cloudagent/transport/inbound/tests/test_ws_transport.py index 52d71e8f90..90a5f0c688 100644 --- a/aries_cloudagent/transport/inbound/tests/test_ws_transport.py +++ b/aries_cloudagent/transport/inbound/tests/test_ws_transport.py @@ -37,7 +37,7 @@ def create_session( client_info, wire_format, can_respond: bool = False, - **kwargs + **kwargs, ): if not self.session: session = InboundSession( @@ -68,9 +68,7 @@ def receive_message( self.result_event.set() async def test_start_x(self): - with mock.patch.object( - test_module.web, "TCPSite", mock.MagicMock() - ) as mock_site: + with mock.patch.object(test_module.web, "TCPSite", mock.MagicMock()) as mock_site: mock_site.return_value = mock.MagicMock( start=mock.CoroutineMock(side_effect=OSError()) ) diff --git a/aries_cloudagent/transport/outbound/manager.py b/aries_cloudagent/transport/outbound/manager.py index e83f54e352..55be0bd8d4 100644 --- a/aries_cloudagent/transport/outbound/manager.py +++ b/aries_cloudagent/transport/outbound/manager.py @@ -202,9 +202,7 @@ def get_running_transport_for_endpoint(self, endpoint: str): # Grab the scheme from the uri scheme = urlparse(endpoint).scheme if scheme == "": - raise OutboundDeliveryError( - f"The uri '{endpoint}' does not specify a scheme" - ) + raise OutboundDeliveryError(f"The uri '{endpoint}' does not specify a scheme") # Look up transport that is registered to handle this scheme transport_id = self.get_running_transport_for_scheme(scheme) @@ -326,9 +324,7 @@ def _process_done(self, task: asyncio.Task): """Handle completion of the drain process.""" exc_info = task_exc_info(task) if exc_info: - LOGGER.exception( - "Exception in outbound queue processing:", exc_info=exc_info - ) + LOGGER.exception("Exception in outbound queue processing:", exc_info=exc_info) if self._process_task and self._process_task.done(): self._process_task = None @@ -378,8 +374,7 @@ async def _process_loop(self): trace_event( self.root_profile.settings, queued.message if queued.message else queued.payload, - outcome="OutboundTransportManager.DELIVER.END." - + queued.endpoint, + outcome="OutboundTransportManager.DELIVER.END." + queued.endpoint, perf_counter=p_time, ) diff --git a/aries_cloudagent/transport/outbound/tests/test_http_transport.py b/aries_cloudagent/transport/outbound/tests/test_http_transport.py index 707d74c05d..12f4038c8a 100644 --- a/aries_cloudagent/transport/outbound/tests/test_http_transport.py +++ b/aries_cloudagent/transport/outbound/tests/test_http_transport.py @@ -76,9 +76,7 @@ async def send_message(transport, payload, endpoint): transport = HttpTransport() - await asyncio.wait_for( - send_message(transport, b"{}", endpoint=server_addr), 5.0 - ) + await asyncio.wait_for(send_message(transport, b"{}", endpoint=server_addr), 5.0) assert self.message_results == [{}] assert self.headers.get("content-type") == "application/ssi-agent-wire" @@ -92,9 +90,7 @@ async def send_message(transport, payload, endpoint): transport = HttpTransport() self.profile.settings["emit_new_didcomm_mime_type"] = True - await asyncio.wait_for( - send_message(transport, b"{}", endpoint=server_addr), 5.0 - ) + await asyncio.wait_for(send_message(transport, b"{}", endpoint=server_addr), 5.0) assert self.message_results == [{}] assert self.headers.get("content-type") == "application/didcomm-envelope-enc" @@ -107,9 +103,7 @@ async def send_message(transport, payload, endpoint): transport = HttpTransport() transport.collector = Collector() - await asyncio.wait_for( - send_message(transport, b"{}", endpoint=server_addr), 5.0 - ) + await asyncio.wait_for(send_message(transport, b"{}", endpoint=server_addr), 5.0) results = transport.collector.extract() assert results["count"] == { diff --git a/aries_cloudagent/transport/outbound/tests/test_ws_transport.py b/aries_cloudagent/transport/outbound/tests/test_ws_transport.py index 48697cf399..c1e1084d07 100644 --- a/aries_cloudagent/transport/outbound/tests/test_ws_transport.py +++ b/aries_cloudagent/transport/outbound/tests/test_ws_transport.py @@ -48,7 +48,5 @@ async def send_message(transport, payload, endpoint: str): assert self.message_results == [{}] self.message_results.clear() - await asyncio.wait_for( - send_message(transport, b"{}", endpoint=server_addr), 5.0 - ) + await asyncio.wait_for(send_message(transport, b"{}", endpoint=server_addr), 5.0) assert self.message_results == [{}] diff --git a/aries_cloudagent/transport/pack_format.py b/aries_cloudagent/transport/pack_format.py index 80d4351779..80729827d2 100644 --- a/aries_cloudagent/transport/pack_format.py +++ b/aries_cloudagent/transport/pack_format.py @@ -76,9 +76,7 @@ class PackWireFormat(BaseWireFormat): def __init__(self): """Initialize the pack wire format instance.""" self.v1pack_format = V1PackWireFormat() - self.v2pack_format = ( - V2PackWireFormat() if V2PackWireFormat is not None else None - ) + self.v2pack_format = V2PackWireFormat() if V2PackWireFormat is not None else None def get_for_packed_msg(self, packed_msg: Union[str, bytes]) -> BaseWireFormat: """Retrieve appropriate DIDComm instance for a given packed message.""" @@ -323,9 +321,7 @@ async def pack( raise WireFormatEncodeError("No wallet instance") try: - message = await wallet.pack_message( - message_json, recipient_keys, sender_key - ) + message = await wallet.pack_message(message_json, recipient_keys, sender_key) except WalletError as e: raise WireFormatEncodeError("Message pack failed") from e @@ -363,8 +359,6 @@ def get_recipient_keys(self, message_body: Union[str, bytes]) -> List[str]: recipient_keys = [recipient["header"]["kid"] for recipient in recipients] except Exception as e: - raise RecipientKeysError( - "Error trying to extract recipient keys from JWE", e - ) + raise RecipientKeysError("Error trying to extract recipient keys from JWE", e) return recipient_keys diff --git a/aries_cloudagent/transport/queue/tests/test_basic_queue.py b/aries_cloudagent/transport/queue/tests/test_basic_queue.py index b417bb1d0a..4ce073e4ab 100644 --- a/aries_cloudagent/transport/queue/tests/test_basic_queue.py +++ b/aries_cloudagent/transport/queue/tests/test_basic_queue.py @@ -43,9 +43,7 @@ async def test_dequeue_x(self): test_module.asyncio, "wait", mock.CoroutineMock() ) as mock_wait, mock.patch.object( queue, "stop_event" - ) as mock_stop_event, mock.patch.object( - queue, "queue" - ): + ) as mock_stop_event, mock.patch.object(queue, "queue"): mock_stop_event.is_set.return_value = False mock_wait.return_value = ( mock.MagicMock(), @@ -76,9 +74,7 @@ async def test_dequeue_none(self): test_module.asyncio, "wait", mock.CoroutineMock() ) as mock_wait, mock.patch.object( queue, "stop_event" - ) as mock_stop_event, mock.patch.object( - queue, "queue" - ): + ) as mock_stop_event, mock.patch.object(queue, "queue"): mock_stop_event.is_set.return_value = False mock_wait.return_value = ( mock.MagicMock(), diff --git a/aries_cloudagent/transport/tests/test_pack_format.py b/aries_cloudagent/transport/tests/test_pack_format.py index 6a91350786..7ebf823ca6 100644 --- a/aries_cloudagent/transport/tests/test_pack_format.py +++ b/aries_cloudagent/transport/tests/test_pack_format.py @@ -110,9 +110,7 @@ async def test_pack_x(self): ) ) session = InMemoryProfile.test_session(bind={BaseWallet: mock_wallet}) - with mock.patch.object( - test_module, "Forward", mock.MagicMock() - ) as mock_forward: + with mock.patch.object(test_module, "Forward", mock.MagicMock()) as mock_forward: mock_forward.return_value = mock.MagicMock(to_json=mock.MagicMock()) with self.assertRaises(WireFormatEncodeError): await serializer.pack(session, None, ["key"], ["key"], ["key"]) @@ -161,9 +159,7 @@ async def test_encode_decode(self): with self.assertRaises(test_module.RecipientKeysError): serializer.get_recipient_keys(message_json) - message_dict, delivery = await serializer.parse_message( - self.session, packed_json - ) + message_dict, delivery = await serializer.parse_message(self.session, packed_json) assert message_dict == self.test_message assert message_dict["@type"] == self.test_message_type assert delivery.thread_id == self.test_thread_id @@ -195,9 +191,7 @@ async def test_forward(self): assert isinstance(packed, dict) and "protected" in packed - message_dict, delivery = await serializer.parse_message( - self.session, packed_json - ) + message_dict, delivery = await serializer.parse_message(self.session, packed_json) assert message_dict["@type"] == DIDCommPrefix.qualify_current(FORWARD) assert delivery.recipient_verkey == router_did.verkey assert delivery.sender_verkey is None @@ -254,7 +248,6 @@ def setUp(self): self.wallet = self.session.inject(BaseWallet) async def test_errors(self): - self.session.context.injector.bind_instance( DIDCommMessaging, TestDIDCommMessaging() ) @@ -282,7 +275,6 @@ async def test_errors(self): ) async def test_fallback(self): - serializer = V2PackWireFormat() test_dm = TestDIDCommMessaging() diff --git a/aries_cloudagent/utils/classloader.py b/aries_cloudagent/utils/classloader.py index 771c186040..912b5c061f 100644 --- a/aries_cloudagent/utils/classloader.py +++ b/aries_cloudagent/utils/classloader.py @@ -68,9 +68,7 @@ def load_module(cls, mod_path: str, package: str = None) -> ModuleType: try: return import_module(mod_path, package) except ModuleNotFoundError as e: - raise ModuleLoadError( - f"Unable to import module {full_path}: {str(e)}" - ) from e + raise ModuleLoadError(f"Unable to import module {full_path}: {str(e)}") from e @classmethod def load_class( diff --git a/aries_cloudagent/utils/endorsement_setup.py b/aries_cloudagent/utils/endorsement_setup.py index 69e7facedb..ca02d9e4d1 100644 --- a/aries_cloudagent/utils/endorsement_setup.py +++ b/aries_cloudagent/utils/endorsement_setup.py @@ -43,9 +43,7 @@ async def attempt_auto_author_with_endorser_setup(profile: Profile): endorser_did = profile.settings.get_value("endorser.endorser_public_did") if not endorser_did: - LOGGER.info( - "No endorser DID, can connect, but can't setup connection metadata." - ) + LOGGER.info("No endorser DID, can connect, but can't setup connection metadata.") return try: @@ -99,6 +97,4 @@ async def attempt_auto_author_with_endorser_setup(profile: Profile): ) except Exception: - LOGGER.info( - "Error accepting endorser invitation/configuring endorser connection" - ) + LOGGER.info("Error accepting endorser invitation/configuring endorser connection") diff --git a/aries_cloudagent/utils/http.py b/aries_cloudagent/utils/http.py index ca40799db3..dc37f4cb69 100644 --- a/aries_cloudagent/utils/http.py +++ b/aries_cloudagent/utils/http.py @@ -184,8 +184,7 @@ async def put_file( ) if ( # redirect codes - response.status in (301, 302, 303, 307, 308) - and not attempt.final + response.status in (301, 302, 303, 307, 308) and not attempt.final ): # NOTE: a redirect counts as another upload attempt to_url = response.headers.get("Location") diff --git a/aries_cloudagent/utils/jwe.py b/aries_cloudagent/utils/jwe.py index 442681a6e6..075d9012e1 100644 --- a/aries_cloudagent/utils/jwe.py +++ b/aries_cloudagent/utils/jwe.py @@ -259,9 +259,7 @@ def protected_bytes(self) -> bytes: This value is used in the additional authenticated data when encrypting. """ return ( - self.protected_b64.encode("utf-8") - if self.protected_b64 is not None - else None + self.protected_b64.encode("utf-8") if self.protected_b64 is not None else None ) def set_payload(self, ciphertext: bytes, iv: bytes, tag: bytes, aad: bytes = None): diff --git a/aries_cloudagent/utils/profiles.py b/aries_cloudagent/utils/profiles.py index d5433f3afd..1e78314de7 100644 --- a/aries_cloudagent/utils/profiles.py +++ b/aries_cloudagent/utils/profiles.py @@ -15,9 +15,7 @@ def is_anoncreds_profile_raise_web_exception(profile: Profile) -> None: """Raise a web exception when the supplied profile is anoncreds.""" if isinstance(profile, AskarAnoncredsProfile): - raise web.HTTPForbidden( - reason="Interface not supported for an anoncreds profile" - ) + raise web.HTTPForbidden(reason="Interface not supported for an anoncreds profile") def is_not_anoncreds_profile_raise_web_exception(profile: Profile) -> None: diff --git a/aries_cloudagent/utils/repeat.py b/aries_cloudagent/utils/repeat.py index cc23e9d650..b13d73fb6e 100644 --- a/aries_cloudagent/utils/repeat.py +++ b/aries_cloudagent/utils/repeat.py @@ -46,9 +46,7 @@ def next_interval(self) -> float: def timeout(self, interval: float = None): """Create a context manager for timing out an attempt.""" - return async_timeout.timeout( - self.next_interval if interval is None else interval - ) + return async_timeout.timeout(self.next_interval if interval is None else interval) def __repr__(self) -> str: """Format as a string for debugging.""" diff --git a/aries_cloudagent/utils/stats.py b/aries_cloudagent/utils/stats.py index fe81bc975f..45a18f2019 100644 --- a/aries_cloudagent/utils/stats.py +++ b/aries_cloudagent/utils/stats.py @@ -41,9 +41,7 @@ def extract(self, names: Sequence[str] = None) -> dict: else: names = set(names).intersection(all_names) counts = {name: val for (name, val) in counts.items() if name in names} - maxes = { - name: val for (name, val) in self.max_time.items() if name in names - } + maxes = {name: val for (name, val) in self.max_time.items() if name in names} mins = {name: val for (name, val) in self.min_time.items() if name in names} totals = { name: val for (name, val) in self.total_time.items() if name in names diff --git a/aries_cloudagent/utils/task_queue.py b/aries_cloudagent/utils/task_queue.py index e67abb78b7..1f6f225a96 100644 --- a/aries_cloudagent/utils/task_queue.py +++ b/aries_cloudagent/utils/task_queue.py @@ -337,9 +337,7 @@ def completed_task( if exc_info: self.total_failed += 1 if not task_complete and not self._trace_fn: - LOGGER.exception( - "Error running task %s", ident or "", exc_info=exc_info - ) + LOGGER.exception("Error running task %s", ident or "", exc_info=exc_info) else: self.total_done += 1 if task_complete or self._trace_fn: diff --git a/aries_cloudagent/utils/tests/test_classloader.py b/aries_cloudagent/utils/tests/test_classloader.py index 50583856d2..41ae1fbbce 100644 --- a/aries_cloudagent/utils/tests/test_classloader.py +++ b/aries_cloudagent/utils/tests/test_classloader.py @@ -31,9 +31,7 @@ def test_import_relative(self): ) with mock.patch.object(test_module.sys, "modules", {}): assert ( - ClassLoader.load_module( - "..transport", "aries_cloudagent.config" - ).__name__ + ClassLoader.load_module("..transport", "aries_cloudagent.config").__name__ == "aries_cloudagent.transport" ) @@ -74,13 +72,9 @@ def test_load_subclass(self): def test_load_subclass_missing(self): with self.assertRaises(ClassNotFoundError): - assert ClassLoader.load_subclass_of( - TestCase, "aries_cloudagent.config.base" - ) + assert ClassLoader.load_subclass_of(TestCase, "aries_cloudagent.config.base") with self.assertRaises(ClassNotFoundError): - assert ClassLoader.load_subclass_of( - TestCase, "aries_cloudagent.not-a-module" - ) + assert ClassLoader.load_subclass_of(TestCase, "aries_cloudagent.not-a-module") def test_scan_packages(self): pkgs = ClassLoader.scan_subpackages("aries_cloudagent") diff --git a/aries_cloudagent/utils/tests/test_http.py b/aries_cloudagent/utils/tests/test_http.py index 6e2ff84adb..2fa4ad0225 100644 --- a/aries_cloudagent/utils/tests/test_http.py +++ b/aries_cloudagent/utils/tests/test_http.py @@ -64,9 +64,7 @@ async def redirect_route(self, request): async def test_fetch_stream(self): server_addr = f"http://localhost:{self.server.port}" - stream = await fetch_stream( - f"{server_addr}/succeed", session=self.client.session - ) + stream = await fetch_stream(f"{server_addr}/succeed", session=self.client.session) result = await stream.read() assert result == b"[true]" assert self.succeed_calls == 1 diff --git a/aries_cloudagent/utils/tests/test_jwe.py b/aries_cloudagent/utils/tests/test_jwe.py index 45dc8195fa..cd406976ca 100644 --- a/aries_cloudagent/utils/tests/test_jwe.py +++ b/aries_cloudagent/utils/tests/test_jwe.py @@ -39,9 +39,9 @@ def test_envelope_load_single_recipient(self): assert loaded.iv == IV assert loaded.tag == TAG assert loaded.aad == AAD - assert loaded.combined_aad == loaded.protected_bytes + b"." + b64url( - AAD - ).encode("utf-8") + assert loaded.combined_aad == loaded.protected_bytes + b"." + b64url(AAD).encode( + "utf-8" + ) assert loaded.ciphertext == CIPHERTEXT recips = list(loaded.recipients) @@ -74,9 +74,9 @@ def test_envelope_load_multiple_recipients(self): assert loaded.iv == IV assert loaded.tag == TAG assert loaded.aad == AAD - assert loaded.combined_aad == loaded.protected_bytes + b"." + b64url( - AAD - ).encode("utf-8") + assert loaded.combined_aad == loaded.protected_bytes + b"." + b64url(AAD).encode( + "utf-8" + ) assert loaded.ciphertext == CIPHERTEXT recips = list(loaded.recipients) @@ -112,9 +112,9 @@ def test_envelope_serialize_single_recipient(self): assert loaded.iv == IV assert loaded.tag == TAG assert loaded.aad == AAD - assert loaded.combined_aad == loaded.protected_bytes + b"." + b64url( - AAD - ).encode("utf-8") + assert loaded.combined_aad == loaded.protected_bytes + b"." + b64url(AAD).encode( + "utf-8" + ) assert loaded.ciphertext == CIPHERTEXT recips = list(loaded.recipients) @@ -145,9 +145,9 @@ def test_envelope_serialize_multiple_recipients(self): assert loaded.iv == IV assert loaded.tag == TAG assert loaded.aad == AAD - assert loaded.combined_aad == loaded.protected_bytes + b"." + b64url( - AAD - ).encode("utf-8") + assert loaded.combined_aad == loaded.protected_bytes + b"." + b64url(AAD).encode( + "utf-8" + ) assert loaded.ciphertext == CIPHERTEXT recips = list(loaded.recipients) diff --git a/aries_cloudagent/utils/tests/test_repeat.py b/aries_cloudagent/utils/tests/test_repeat.py index f92e8f3c1a..7f221270d5 100644 --- a/aries_cloudagent/utils/tests/test_repeat.py +++ b/aries_cloudagent/utils/tests/test_repeat.py @@ -33,7 +33,7 @@ async def sleep(timeout): assert seen == len(expect) def test_repr(self): - assert repr( - test_module.RepeatSequence(5, interval=5.0, backoff=0.25) - ).startswith(" bytes: """Sign message(s) using key pair.""" @abstractmethod - async def verify( - self, message: Union[List[bytes], bytes], signature: bytes - ) -> bool: + async def verify(self, message: Union[List[bytes], bytes], signature: bytes) -> bool: """Verify message(s) against signature using key pair.""" @abstractproperty diff --git a/aries_cloudagent/vc/ld_proofs/crypto/wallet_key_pair.py b/aries_cloudagent/vc/ld_proofs/crypto/wallet_key_pair.py index 8d99eaed90..189a1086d4 100644 --- a/aries_cloudagent/vc/ld_proofs/crypto/wallet_key_pair.py +++ b/aries_cloudagent/vc/ld_proofs/crypto/wallet_key_pair.py @@ -41,9 +41,7 @@ async def sign(self, message: Union[List[bytes], bytes]) -> bytes: from_verkey=self.public_key_base58, ) - async def verify( - self, message: Union[List[bytes], bytes], signature: bytes - ) -> bool: + async def verify(self, message: Union[List[bytes], bytes], signature: bytes) -> bool: """Verify message against signature using wallet.""" if not self.public_key_base58: raise LinkedDataProofException( diff --git a/aries_cloudagent/vc/ld_proofs/document_downloader.py b/aries_cloudagent/vc/ld_proofs/document_downloader.py index a2deb37b5b..21149ccefe 100644 --- a/aries_cloudagent/vc/ld_proofs/document_downloader.py +++ b/aries_cloudagent/vc/ld_proofs/document_downloader.py @@ -45,8 +45,9 @@ class StaticCacheJsonLdDownloader: "https://w3id.org/security/v2": "security-v2-context.jsonld", "https://w3id.org/security/suites/ed25519-2020/v1": "ed25519-2020-context.jsonld", "https://w3id.org/security/bbs/v1": "bbs-v1-context.jsonld", - "https://identity.foundation/presentation-exchange/submission/v1": - "dif-presentation-exchange-submission-v1.jsonld", + "https://identity.foundation/presentation-exchange/submission/v1": ( + "dif-presentation-exchange-submission-v1.jsonld" + ), } def __init__( @@ -104,8 +105,7 @@ def download(self, url: str, options: Optional[Dict], **kwargs): if ( not all([pieces.scheme, pieces.netloc]) or pieces.scheme not in ["http", "https"] - or set(pieces.netloc) - > set(string.ascii_letters + string.digits + "-.:") + or set(pieces.netloc) > set(string.ascii_letters + string.digits + "-.:") ): raise JsonLdError( 'URL could not be dereferenced; only "http" and "https" ' @@ -176,9 +176,7 @@ def parse(self, doc: Dict, link_header: Optional[str]): if ( linked_alternate and linked_alternate.get("type") == "application/ld+json" - and not re.match( - r"^application\/(\w*\+)?json$", doc["content_type"] - ) + and not re.match(r"^application\/(\w*\+)?json$", doc["content_type"]) ): doc["contentType"] = "application/ld+json" doc["documentUrl"] = jsonld.prepend_base( diff --git a/aries_cloudagent/vc/ld_proofs/purposes/tests/test_controller_proof_purpose.py b/aries_cloudagent/vc/ld_proofs/purposes/tests/test_controller_proof_purpose.py index 4babb4d91e..85ae8a279b 100644 --- a/aries_cloudagent/vc/ld_proofs/purposes/tests/test_controller_proof_purpose.py +++ b/aries_cloudagent/vc/ld_proofs/purposes/tests/test_controller_proof_purpose.py @@ -34,9 +34,7 @@ async def test_validate(self): proof = document.pop("proof") suite = mock.MagicMock() verification_method = { - "id": TEST_VC_DOCUMENT_SIGNED_DID_KEY_ED25519["proof"][ - "verificationMethod" - ], + "id": TEST_VC_DOCUMENT_SIGNED_DID_KEY_ED25519["proof"]["verificationMethod"], "controller": TEST_VC_DOCUMENT_SIGNED_DID_KEY_ED25519["issuer"], } @@ -57,9 +55,7 @@ async def test_validate_controller_invalid_type(self): proof = document.pop("proof") suite = mock.MagicMock() verification_method = { - "id": TEST_VC_DOCUMENT_SIGNED_DID_KEY_ED25519["proof"][ - "verificationMethod" - ], + "id": TEST_VC_DOCUMENT_SIGNED_DID_KEY_ED25519["proof"]["verificationMethod"], "controller": 10, } @@ -81,9 +77,7 @@ async def test_validate_x_not_authorized(self): proof = document.pop("proof") suite = mock.MagicMock() verification_method = { - "id": TEST_VC_DOCUMENT_SIGNED_DID_KEY_ED25519["proof"][ - "verificationMethod" - ], + "id": TEST_VC_DOCUMENT_SIGNED_DID_KEY_ED25519["proof"]["verificationMethod"], "controller": "did:example:489398593", } diff --git a/aries_cloudagent/vc/ld_proofs/suites/bbs_bls_signature_proof_2020.py b/aries_cloudagent/vc/ld_proofs/suites/bbs_bls_signature_proof_2020.py index 51ec067fb9..2133168397 100644 --- a/aries_cloudagent/vc/ld_proofs/suites/bbs_bls_signature_proof_2020.py +++ b/aries_cloudagent/vc/ld_proofs/suites/bbs_bls_signature_proof_2020.py @@ -104,9 +104,7 @@ async def derive_proof( # document statements into actual node identifiers # e.g _:c14n0 => urn:bnid:_:c14n0 transformed_input_document_statements = ( - self._transform_blank_node_ids_into_placeholder_node_ids( - document_statements - ) + self._transform_blank_node_ids_into_placeholder_node_ids(document_statements) ) # Transform the resulting RDF statements back into JSON-LD @@ -213,9 +211,7 @@ async def derive_proof( derived_proof["proofPurpose"] = proof["proofPurpose"] derived_proof["created"] = proof["created"] - return DeriveProofResult( - document={**reveal_document_result}, proof=derived_proof - ) + return DeriveProofResult(document={**reveal_document_result}, proof=derived_proof) async def verify_proof( self, diff --git a/aries_cloudagent/vc/ld_proofs/tests/test_check.py b/aries_cloudagent/vc/ld_proofs/tests/test_check.py index ca6ee1668b..2505d6b4da 100644 --- a/aries_cloudagent/vc/ld_proofs/tests/test_check.py +++ b/aries_cloudagent/vc/ld_proofs/tests/test_check.py @@ -148,8 +148,7 @@ class TestCheck(TestCase): def test_get_properties_without_context_valid(self): assert ( - get_properties_without_context(VALID_INPUT_DOC, custom_document_loader) - == [] + get_properties_without_context(VALID_INPUT_DOC, custom_document_loader) == [] ) def test_get_properties_without_context_invalid(self): @@ -168,9 +167,7 @@ def test_get_properties_without_context_invalid(self): def test_get_properties_without_context_vaccination_valid(self): assert ( - get_properties_without_context( - VALID_VACCINATION_DOC, custom_document_loader - ) + get_properties_without_context(VALID_VACCINATION_DOC, custom_document_loader) == [] ) diff --git a/aries_cloudagent/vc/tests/contexts/dif_presentation_submission_v1.py b/aries_cloudagent/vc/tests/contexts/dif_presentation_submission_v1.py index d8a4da17a4..292d95c295 100644 --- a/aries_cloudagent/vc/tests/contexts/dif_presentation_submission_v1.py +++ b/aries_cloudagent/vc/tests/contexts/dif_presentation_submission_v1.py @@ -1,15 +1,15 @@ DIF_PRESENTATION_SUBMISSION_V1 = { - "@context": { - "@version": 1.1, - "PresentationSubmission": { - "@id": "https://identity.foundation/presentation-exchange/#presentation-submission", - "@context": { + "@context": { "@version": 1.1, - "presentation_submission": { - "@id": "https://identity.foundation/presentation-exchange/#presentation-submission", - "@type": "@json" - } - } + "PresentationSubmission": { + "@id": "https://identity.foundation/presentation-exchange/#presentation-submission", + "@context": { + "@version": 1.1, + "presentation_submission": { + "@id": "https://identity.foundation/presentation-exchange/#presentation-submission", + "@type": "@json", + }, + }, + }, } - } } diff --git a/aries_cloudagent/vc/tests/contexts/schema_org.py b/aries_cloudagent/vc/tests/contexts/schema_org.py index e952d0462c..8e05a6d0d5 100644 --- a/aries_cloudagent/vc/tests/contexts/schema_org.py +++ b/aries_cloudagent/vc/tests/contexts/schema_org.py @@ -310,9 +310,7 @@ "DigitalAudioTapeFormat": {"@id": "schema:DigitalAudioTapeFormat"}, "DigitalDocument": {"@id": "schema:DigitalDocument"}, "DigitalDocumentPermission": {"@id": "schema:DigitalDocumentPermission"}, - "DigitalDocumentPermissionType": { - "@id": "schema:DigitalDocumentPermissionType" - }, + "DigitalDocumentPermissionType": {"@id": "schema:DigitalDocumentPermissionType"}, "DigitalFormat": {"@id": "schema:DigitalFormat"}, "DisabilitySupport": {"@id": "schema:DisabilitySupport"}, "DisagreeAction": {"@id": "schema:DisagreeAction"}, @@ -361,9 +359,7 @@ "EUEnergyEfficiencyCategoryE": {"@id": "schema:EUEnergyEfficiencyCategoryE"}, "EUEnergyEfficiencyCategoryF": {"@id": "schema:EUEnergyEfficiencyCategoryF"}, "EUEnergyEfficiencyCategoryG": {"@id": "schema:EUEnergyEfficiencyCategoryG"}, - "EUEnergyEfficiencyEnumeration": { - "@id": "schema:EUEnergyEfficiencyEnumeration" - }, + "EUEnergyEfficiencyEnumeration": {"@id": "schema:EUEnergyEfficiencyEnumeration"}, "Ear": {"@id": "schema:Ear"}, "EatAction": {"@id": "schema:EatAction"}, "EditedOrCroppedContent": {"@id": "schema:EditedOrCroppedContent"}, @@ -713,9 +709,7 @@ }, "MerchantReturnNotPermitted": {"@id": "schema:MerchantReturnNotPermitted"}, "MerchantReturnPolicy": {"@id": "schema:MerchantReturnPolicy"}, - "MerchantReturnUnlimitedWindow": { - "@id": "schema:MerchantReturnUnlimitedWindow" - }, + "MerchantReturnUnlimitedWindow": {"@id": "schema:MerchantReturnUnlimitedWindow"}, "MerchantReturnUnspecified": {"@id": "schema:MerchantReturnUnspecified"}, "Message": {"@id": "schema:Message"}, "MiddleSchool": {"@id": "schema:MiddleSchool"}, @@ -901,9 +895,7 @@ "Pathology": {"@id": "schema:Pathology"}, "PathologyTest": {"@id": "schema:PathologyTest"}, "Patient": {"@id": "schema:Patient"}, - "PatientExperienceHealthAspect": { - "@id": "schema:PatientExperienceHealthAspect" - }, + "PatientExperienceHealthAspect": {"@id": "schema:PatientExperienceHealthAspect"}, "PawnShop": {"@id": "schema:PawnShop"}, "PayAction": {"@id": "schema:PayAction"}, "PaymentAutomaticallyApplied": {"@id": "schema:PaymentAutomaticallyApplied"}, @@ -967,9 +959,7 @@ "PresentationDigitalDocument": {"@id": "schema:PresentationDigitalDocument"}, "PreventionHealthAspect": {"@id": "schema:PreventionHealthAspect"}, "PreventionIndication": {"@id": "schema:PreventionIndication"}, - "PriceComponentTypeEnumeration": { - "@id": "schema:PriceComponentTypeEnumeration" - }, + "PriceComponentTypeEnumeration": {"@id": "schema:PriceComponentTypeEnumeration"}, "PriceSpecification": {"@id": "schema:PriceSpecification"}, "PriceTypeEnumeration": {"@id": "schema:PriceTypeEnumeration"}, "PrimaryCare": {"@id": "schema:PrimaryCare"}, @@ -1009,9 +999,7 @@ "QAPage": {"@id": "schema:QAPage"}, "QualitativeValue": {"@id": "schema:QualitativeValue"}, "QuantitativeValue": {"@id": "schema:QuantitativeValue"}, - "QuantitativeValueDistribution": { - "@id": "schema:QuantitativeValueDistribution" - }, + "QuantitativeValueDistribution": {"@id": "schema:QuantitativeValueDistribution"}, "Quantity": {"@id": "schema:Quantity"}, "Question": {"@id": "schema:Question"}, "Quiz": {"@id": "schema:Quiz"}, @@ -1324,9 +1312,7 @@ "WearableMeasurementHips": {"@id": "schema:WearableMeasurementHips"}, "WearableMeasurementInseam": {"@id": "schema:WearableMeasurementInseam"}, "WearableMeasurementLength": {"@id": "schema:WearableMeasurementLength"}, - "WearableMeasurementOutsideLeg": { - "@id": "schema:WearableMeasurementOutsideLeg" - }, + "WearableMeasurementOutsideLeg": {"@id": "schema:WearableMeasurementOutsideLeg"}, "WearableMeasurementSleeve": {"@id": "schema:WearableMeasurementSleeve"}, "WearableMeasurementTypeEnumeration": { "@id": "schema:WearableMeasurementTypeEnumeration" @@ -1354,14 +1340,10 @@ "WearableSizeSystemAU": {"@id": "schema:WearableSizeSystemAU"}, "WearableSizeSystemBR": {"@id": "schema:WearableSizeSystemBR"}, "WearableSizeSystemCN": {"@id": "schema:WearableSizeSystemCN"}, - "WearableSizeSystemContinental": { - "@id": "schema:WearableSizeSystemContinental" - }, + "WearableSizeSystemContinental": {"@id": "schema:WearableSizeSystemContinental"}, "WearableSizeSystemDE": {"@id": "schema:WearableSizeSystemDE"}, "WearableSizeSystemEN13402": {"@id": "schema:WearableSizeSystemEN13402"}, - "WearableSizeSystemEnumeration": { - "@id": "schema:WearableSizeSystemEnumeration" - }, + "WearableSizeSystemEnumeration": {"@id": "schema:WearableSizeSystemEnumeration"}, "WearableSizeSystemEurope": {"@id": "schema:WearableSizeSystemEurope"}, "WearableSizeSystemFR": {"@id": "schema:WearableSizeSystemFR"}, "WearableSizeSystemGS1": {"@id": "schema:WearableSizeSystemGS1"}, @@ -1473,9 +1455,7 @@ "antagonist": {"@id": "schema:antagonist"}, "appearance": {"@id": "schema:appearance"}, "applicableLocation": {"@id": "schema:applicableLocation"}, - "applicantLocationRequirements": { - "@id": "schema:applicantLocationRequirements" - }, + "applicantLocationRequirements": {"@id": "schema:applicantLocationRequirements"}, "application": {"@id": "schema:application"}, "applicationCategory": {"@id": "schema:applicationCategory"}, "applicationContact": {"@id": "schema:applicationContact"}, @@ -2044,9 +2024,7 @@ "includedComposition": {"@id": "schema:includedComposition"}, "includedDataCatalog": {"@id": "schema:includedDataCatalog"}, "includedInDataCatalog": {"@id": "schema:includedInDataCatalog"}, - "includedInHealthInsurancePlan": { - "@id": "schema:includedInHealthInsurancePlan" - }, + "includedInHealthInsurancePlan": {"@id": "schema:includedInHealthInsurancePlan"}, "includedRiskFactor": {"@id": "schema:includedRiskFactor"}, "includesAttraction": {"@id": "schema:includesAttraction"}, "includesHealthPlanFormulary": {"@id": "schema:includesHealthPlanFormulary"}, @@ -2231,9 +2209,7 @@ "model": {"@id": "schema:model"}, "modelDate": {"@id": "schema:modelDate", "@type": "Date"}, "modifiedTime": {"@id": "schema:modifiedTime"}, - "monthlyMinimumRepaymentAmount": { - "@id": "schema:monthlyMinimumRepaymentAmount" - }, + "monthlyMinimumRepaymentAmount": {"@id": "schema:monthlyMinimumRepaymentAmount"}, "monthsOfExperience": {"@id": "schema:monthsOfExperience"}, "mpn": {"@id": "schema:mpn"}, "multipleValues": {"@id": "schema:multipleValues"}, @@ -2298,9 +2274,7 @@ "occupancy": {"@id": "schema:occupancy"}, "occupationLocation": {"@id": "schema:occupationLocation"}, "occupationalCategory": {"@id": "schema:occupationalCategory"}, - "occupationalCredentialAwarded": { - "@id": "schema:occupationalCredentialAwarded" - }, + "occupationalCredentialAwarded": {"@id": "schema:occupationalCredentialAwarded"}, "offerCount": {"@id": "schema:offerCount"}, "offeredBy": {"@id": "schema:offeredBy"}, "offers": {"@id": "schema:offers"}, diff --git a/aries_cloudagent/vc/vc_di/prove.py b/aries_cloudagent/vc/vc_di/prove.py index 3378eb4782..7ba6d20866 100644 --- a/aries_cloudagent/vc/vc_di/prove.py +++ b/aries_cloudagent/vc/vc_di/prove.py @@ -139,12 +139,8 @@ async def create_rev_states( for w3c_cred_cred in w3c_creds_metadata: rev_reg_def = rev_reg_defs.get(w3c_cred_cred["rev_reg_id"]) rev_reg_def["id"] = w3c_cred_cred["rev_reg_id"] - rev_reg_def_from_registry = RevocationRegistry.from_definition( - rev_reg_def, True - ) - local_tails_path = ( - await rev_reg_def_from_registry.get_or_fetch_local_tails_path() - ) + rev_reg_def_from_registry = RevocationRegistry.from_definition(rev_reg_def, True) + local_tails_path = await rev_reg_def_from_registry.get_or_fetch_local_tails_path() revocation_status_list = RevocationStatusList.load( rev_reg_entries.get(w3c_cred_cred["rev_reg_id"])[ w3c_cred_cred.get("timestamp") @@ -242,9 +238,7 @@ async def prepare_data_for_presentation( profile, w3c_cred.rev_reg_id, None ) w3c_creds_metadata[entry_idx]["rev_reg_id"] = w3c_cred.rev_reg_id - w3c_creds_metadata[entry_idx][ - "timestamp" - ] = result.revocation_list.timestamp + w3c_creds_metadata[entry_idx]["timestamp"] = result.revocation_list.timestamp non_revoked_interval = { "from": result.revocation_list.timestamp, @@ -281,9 +275,9 @@ async def prepare_data_for_presentation( f"{predicate_referent_base}_{predicate_referent_index}" ) predicate_referent_index = predicate_referent_index + 1 - anoncreds_proofrequest["requested_predicates"][ - predicate_referent - ] = pred_request + anoncreds_proofrequest["requested_predicates"][predicate_referent] = ( + pred_request + ) w3c_creds_metadata[entry_idx]["proof_preds"].append( predicate_referent ) diff --git a/aries_cloudagent/vc/vc_di/tests/test_manager.py b/aries_cloudagent/vc/vc_di/tests/test_manager.py index 4a5cbf8adc..7d23016cc0 100644 --- a/aries_cloudagent/vc/vc_di/tests/test_manager.py +++ b/aries_cloudagent/vc/vc_di/tests/test_manager.py @@ -173,7 +173,6 @@ def manager(profile: Profile): @pytest.mark.asyncio async def test_assert_no_callenge_error(manager: VcDiManager): with pytest.raises(VcDiManagerError) as context: - await manager.verify_presentation({}, {"options": {}}) diff --git a/aries_cloudagent/vc/vc_di/tests/test_prove.py b/aries_cloudagent/vc/vc_di/tests/test_prove.py index 29df71df5a..a749cc9d93 100644 --- a/aries_cloudagent/vc/vc_di/tests/test_prove.py +++ b/aries_cloudagent/vc/vc_di/tests/test_prove.py @@ -213,9 +213,7 @@ async def test_create_signed_anoncreds_presentation(profile: Profile): "@context": [ "https://www.w3.org/2018/credentials/v1", "https://w3id.org/security/data-integrity/v2", - { - "@vocab": "https://www.w3.org/ns/credentials/issuer-dependent#" - }, + {"@vocab": "https://www.w3.org/ns/credentials/issuer-dependent#"}, ], "type": ["VerifiableCredential"], "issuer": "7yDP6qARVAp1Rims8Fj43k", @@ -304,13 +302,10 @@ async def test_create_rev_states(): RevocationStatusList, "load", return_value=mock.MagicMock() ): with pytest.raises(AnonCredsHolderError): - await create_rev_states( - w3c_creds_metadata, rev_reg_defs, rev_reg_entries - ) + await create_rev_states(w3c_creds_metadata, rev_reg_defs, rev_reg_entries) with mock.patch.object( CredentialRevocationState, "create", return_value=mock.MagicMock() ) as mock_create: - result = await create_rev_states( w3c_creds_metadata, rev_reg_defs, rev_reg_entries ) diff --git a/aries_cloudagent/vc/vc_ld/issue.py b/aries_cloudagent/vc/vc_ld/issue.py index 3cbfe61d39..9fba814261 100644 --- a/aries_cloudagent/vc/vc_ld/issue.py +++ b/aries_cloudagent/vc/vc_ld/issue.py @@ -42,9 +42,7 @@ async def issue( # Validate credential errors = CredentialSchema().validate(credential) if len(errors) > 0: - raise LinkedDataProofException( - f"Credential contains invalid structure: {errors}" - ) + raise LinkedDataProofException(f"Credential contains invalid structure: {errors}") # Set default proof purpose if not set if not purpose: diff --git a/aries_cloudagent/vc/vc_ld/manager.py b/aries_cloudagent/vc/vc_ld/manager.py index de3f578158..25e8827619 100644 --- a/aries_cloudagent/vc/vc_ld/manager.py +++ b/aries_cloudagent/vc/vc_ld/manager.py @@ -57,9 +57,7 @@ Ed25519Signature2018: ED25519, Ed25519Signature2020: ED25519, } -PROOF_KEY_TYPE_MAPPING = cast( - Dict[ProofTypes, KeyType], SIGNATURE_SUITE_KEY_TYPE_MAPPING -) +PROOF_KEY_TYPE_MAPPING = cast(Dict[ProofTypes, KeyType], SIGNATURE_SUITE_KEY_TYPE_MAPPING) # We only want to add bbs suites to supported if the module is installed diff --git a/aries_cloudagent/vc/vc_ld/tests/test_credential.py b/aries_cloudagent/vc/vc_ld/tests/test_credential.py index 9afe04a2e1..b3b5851f98 100644 --- a/aries_cloudagent/vc/vc_ld/tests/test_credential.py +++ b/aries_cloudagent/vc/vc_ld/tests/test_credential.py @@ -292,9 +292,7 @@ ], "id": "http://example.gov/credentials/3732", "type": ["VerifiableCredential", "UniversityDegreeCredential"], - "issuer": { - "id": "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" - }, + "issuer": {"id": "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL"}, "issuanceDate": "2020-03-10T04:24:12.164Z", "credentialSubject": { "id": "did:example:456", @@ -325,9 +323,7 @@ ], "id": "http://example.gov/credentials/3732", "type": ["VerifiableCredential", "UniversityDegreeCredential"], - "issuer": { - "id": "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" - }, + "issuer": {"id": "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL"}, "issuanceDate": "2020-03-10T04:24:12.164Z", "credentialSubject": { "id": "did:example:456", diff --git a/aries_cloudagent/vc/vc_ld/tests/test_manager.py b/aries_cloudagent/vc/vc_ld/tests/test_manager.py index 34a76c527c..fbe095ac44 100644 --- a/aries_cloudagent/vc/vc_ld/tests/test_manager.py +++ b/aries_cloudagent/vc/vc_ld/tests/test_manager.py @@ -112,9 +112,7 @@ async def test_assert_can_issue_with_id_and_proof_type(manager: VcLdpManager): await manager.assert_can_issue_with_id_and_proof_type( "not_did", Ed25519Signature2018.signature_type ) - assert "Unable to issue credential with issuer id: not_did" in str( - context.value - ) + assert "Unable to issue credential with issuer id: not_did" in str(context.value) with mock.patch.object( manager, diff --git a/aries_cloudagent/vc/vc_ld/tests/test_vc_ld.py b/aries_cloudagent/vc/vc_ld/tests/test_vc_ld.py index 9fec151e35..1190d7277b 100644 --- a/aries_cloudagent/vc/vc_ld/tests/test_vc_ld.py +++ b/aries_cloudagent/vc/vc_ld/tests/test_vc_ld.py @@ -221,9 +221,7 @@ async def test_create_presentation(self): assert "Not all credentials have a valid structure" in str(context.exception) async def test_create_presentation_x_invalid_credential_structures(self): - unsigned_presentation = await create_presentation( - credentials=[CREDENTIAL_ISSUED] - ) + unsigned_presentation = await create_presentation(credentials=[CREDENTIAL_ISSUED]) suite = Ed25519Signature2018( verification_method=self.ed25519_verification_method, @@ -254,9 +252,7 @@ async def test_create_presentation_x_invalid_credential_structures(self): @pytest.mark.ursa_bbs_signatures async def test_sign_presentation_bbsbls(self): - unsigned_presentation = await create_presentation( - credentials=[CREDENTIAL_ISSUED] - ) + unsigned_presentation = await create_presentation(credentials=[CREDENTIAL_ISSUED]) suite = BbsBlsSignature2020( verification_method=self.bls12381g2_verification_method, diff --git a/aries_cloudagent/vc/vc_ld/verify.py b/aries_cloudagent/vc/vc_ld/verify.py index 95893d841c..4cbd0a8775 100644 --- a/aries_cloudagent/vc/vc_ld/verify.py +++ b/aries_cloudagent/vc/vc_ld/verify.py @@ -76,9 +76,7 @@ async def verify_credential( purpose=purpose, ) except Exception as e: - return DocumentVerificationResult( - verified=False, document=credential, errors=[e] - ) + return DocumentVerificationResult(verified=False, document=credential, errors=[e]) async def _verify_presentation( diff --git a/aries_cloudagent/wallet/anoncreds_upgrade.py b/aries_cloudagent/wallet/anoncreds_upgrade.py index 4e9f16e8bc..12f2cb2dd3 100644 --- a/aries_cloudagent/wallet/anoncreds_upgrade.py +++ b/aries_cloudagent/wallet/anoncreds_upgrade.py @@ -295,9 +295,7 @@ async def get_rev_list_upgrade_object( return RevListUpgradeObj( rev_list, - json.loads(rev_reg_def_upgrade_obj.askar_issuer_rev_reg_def.value)[ - "pending_pub" - ], + json.loads(rev_reg_def_upgrade_obj.askar_issuer_rev_reg_def.value)["pending_pub"], rev_reg_def_upgrade_obj.rev_reg_def_id, askar_cred_rev_records, ) @@ -602,9 +600,7 @@ async def upgrade_wallet_to_anoncreds_if_requested( async with profile.session() as session: storage = session.inject(BaseStorage) try: - upgrading_record = await storage.find_record( - RECORD_TYPE_ACAPY_UPGRADING, {} - ) + upgrading_record = await storage.find_record(RECORD_TYPE_ACAPY_UPGRADING, {}) if upgrading_record.value == UPGRADING_RECORD_FINISHED: IsAnoncredsSingleton().set_wallet(profile.name) return diff --git a/aries_cloudagent/wallet/askar.py b/aries_cloudagent/wallet/askar.py index b385c97737..5182aebb66 100644 --- a/aries_cloudagent/wallet/askar.py +++ b/aries_cloudagent/wallet/askar.py @@ -256,9 +256,7 @@ async def create_local_did( WalletError: If there is another backend error """ - did_validation = DIDParametersValidation( - self._session.context.inject(DIDMethods) - ) + did_validation = DIDParametersValidation(self._session.context.inject(DIDMethods)) did_validation.validate_key_type(method, key_type) if not metadata: @@ -420,9 +418,7 @@ async def get_local_did_for_verkey(self, verkey: str) -> DIDInfo: """ try: - dids = await self._session.handle.fetch_all( - CATEGORY_DID, {"verkey": verkey} - ) + dids = await self._session.handle.fetch_all(CATEGORY_DID, {"verkey": verkey}) except AskarError as err: raise WalletError("Error when fetching local DID for verkey") from err if dids: diff --git a/aries_cloudagent/wallet/jwt.py b/aries_cloudagent/wallet/jwt.py index d60c1ba5ad..ee4bd574e3 100644 --- a/aries_cloudagent/wallet/jwt.py +++ b/aries_cloudagent/wallet/jwt.py @@ -158,18 +158,14 @@ async def jwt_verify(profile: Profile, jwt: str) -> JWTVerifyResult: encoded_headers, encoded_payload, encoded_signature = jwt.split(".", 3) headers = b64_to_dict(encoded_headers) if "alg" not in headers or headers["alg"] != "EdDSA" or "kid" not in headers: - raise BadJWSHeaderError( - "Invalid JWS header parameters for Ed25519Signature2018." - ) + raise BadJWSHeaderError("Invalid JWS header parameters for Ed25519Signature2018.") payload = b64_to_dict(encoded_payload) verification_method = headers["kid"] decoded_signature = b64_to_bytes(encoded_signature, urlsafe=True) async with profile.session() as session: - verkey = await resolve_public_key_by_kid_for_verify( - profile, verification_method - ) + verkey = await resolve_public_key_by_kid_for_verify(profile, verification_method) wallet = session.inject(BaseWallet) valid = await wallet.verify_message( f"{encoded_headers}.{encoded_payload}".encode(), diff --git a/aries_cloudagent/wallet/key_pair.py b/aries_cloudagent/wallet/key_pair.py index b87e2f56b3..963cd256f1 100644 --- a/aries_cloudagent/wallet/key_pair.py +++ b/aries_cloudagent/wallet/key_pair.py @@ -75,9 +75,7 @@ async def get_key_pair(self, verkey: str) -> dict: """ - record = await self._store.find_record( - KEY_PAIR_STORAGE_TYPE, {"verkey": verkey} - ) + record = await self._store.find_record(KEY_PAIR_STORAGE_TYPE, {"verkey": verkey}) data = json.loads(record.value) return data @@ -97,9 +95,7 @@ async def delete_key_pair(self, verkey: str): StorageNotFoundError: If the record is not found """ - record = await self._store.find_record( - KEY_PAIR_STORAGE_TYPE, {"verkey": verkey} - ) + record = await self._store.find_record(KEY_PAIR_STORAGE_TYPE, {"verkey": verkey}) await self._store.delete_record(record) async def update_key_pair_metadata(self, verkey: str, metadata: dict): @@ -109,9 +105,7 @@ async def update_key_pair_metadata(self, verkey: str, metadata: dict): StorageNotFoundError: If the record is not found. """ - record = await self._store.find_record( - KEY_PAIR_STORAGE_TYPE, {"verkey": verkey} - ) + record = await self._store.find_record(KEY_PAIR_STORAGE_TYPE, {"verkey": verkey}) data = json.loads(record.value) data["metadata"] = metadata diff --git a/aries_cloudagent/wallet/models/wallet_record.py b/aries_cloudagent/wallet/models/wallet_record.py index a1f16c3beb..86546c225f 100644 --- a/aries_cloudagent/wallet/models/wallet_record.py +++ b/aries_cloudagent/wallet/models/wallet_record.py @@ -137,9 +137,7 @@ class Meta: ) key_management_mode = fields.Str( required=True, - validate=validate.OneOf( - [WalletRecord.MODE_MANAGED, WalletRecord.MODE_UNMANAGED] - ), + validate=validate.OneOf([WalletRecord.MODE_MANAGED, WalletRecord.MODE_UNMANAGED]), metadata={"description": "Mode regarding management of wallet key"}, ) settings = fields.Dict( diff --git a/aries_cloudagent/wallet/routes.py b/aries_cloudagent/wallet/routes.py index da17593ab9..406c4a6b49 100644 --- a/aries_cloudagent/wallet/routes.py +++ b/aries_cloudagent/wallet/routes.py @@ -533,9 +533,7 @@ async def wallet_did_list(request: web.BaseRequest): and (not filter_key_type or info.key_type == filter_key_type) ] - results.sort( - key=lambda info: (DIDPosture.get(info["posture"]).ordinal, info["did"]) - ) + results.sort(key=lambda info: (DIDPosture.get(info["posture"]).ordinal, info["did"])) return web.json_response({"results": results}) @@ -644,9 +642,7 @@ async def wallet_create_did(request: web.BaseRequest): ################################################### info = ( - await base_conn_mgr.create_did_peer_2( - my_endpoints, mediation_records - ) + await base_conn_mgr.create_did_peer_2(my_endpoints, mediation_records) if is_did_peer_2 else await base_conn_mgr.create_did_peer_4( my_endpoints, mediation_records @@ -935,9 +931,7 @@ async def promote_wallet_public_did( return info, attrib_def -@docs( - tags=["wallet"], summary="Update endpoint in wallet and on ledger if posted to it" -) +@docs(tags=["wallet"], summary="Update endpoint in wallet and on ledger if posted to it") @request_schema(DIDEndpointWithTypeSchema) @querystring_schema(CreateAttribTxnForEndorserOptionSchema()) @querystring_schema(AttribConnIdMatchInfoSchema()) @@ -956,9 +950,7 @@ async def wallet_set_did_endpoint(request: web.BaseRequest): body = await request.json() did = body["did"] endpoint = body.get("endpoint") - endpoint_type = EndpointType.get( - body.get("endpoint_type", EndpointType.ENDPOINT.w3c) - ) + endpoint_type = EndpointType.get(body.get("endpoint_type", EndpointType.ENDPOINT.w3c)) create_transaction_for_endorser = json.loads( request.query.get("create_transaction_for_endorser", "false") @@ -991,9 +983,7 @@ async def wallet_set_did_endpoint(request: web.BaseRequest): raise web.HTTPBadRequest(reason=err.roll_up) from err async with context.session() as session: - endorser_info = await connection_record.metadata_get( - session, "endorser_info" - ) + endorser_info = await connection_record.metadata_get(session, "endorser_info") if not endorser_info: raise web.HTTPForbidden( reason=( @@ -1085,9 +1075,7 @@ async def wallet_jwt_sign(request: web.BaseRequest): payload = body.get("payload", {}) try: - jws = await jwt_sign( - context.profile, headers, payload, did, verification_method - ) + jws = await jwt_sign(context.profile, headers, payload, did, verification_method) except ValueError as err: raise web.HTTPBadRequest(reason="Bad did or verification method") from err except WalletNotFoundError as err: diff --git a/aries_cloudagent/wallet/sd_jwt.py b/aries_cloudagent/wallet/sd_jwt.py index f5381c5cc3..9f65cf330d 100644 --- a/aries_cloudagent/wallet/sd_jwt.py +++ b/aries_cloudagent/wallet/sd_jwt.py @@ -129,9 +129,7 @@ def create_sd_list(payload, non_sd_list) -> List: """Create a list of claims which will be selectively disclosable.""" flattened_payload = create_json_paths(payload) separated_non_sd_list = separate_list_splices(non_sd_list) - sd_list = [ - claim for claim in flattened_payload if claim not in separated_non_sd_list - ] + sd_list = [claim for claim in flattened_payload if claim not in separated_non_sd_list] return sort_sd_list(sd_list) @@ -163,8 +161,8 @@ async def sd_jwt_sign( match.context.value.remove(match.value) match.context.value.append(SDObj(match.value)) else: - match.context.value[SDObj(str(match.path))] = ( - match.context.value.pop(str(match.path)) + match.context.value[SDObj(str(match.path))] = match.context.value.pop( + str(match.path) ) return await SDJWTIssuerACAPy( diff --git a/aries_cloudagent/wallet/tests/test_anoncreds_upgrade.py b/aries_cloudagent/wallet/tests/test_anoncreds_upgrade.py index 00c52bc623..5ad8a34c88 100644 --- a/aries_cloudagent/wallet/tests/test_anoncreds_upgrade.py +++ b/aries_cloudagent/wallet/tests/test_anoncreds_upgrade.py @@ -150,8 +150,7 @@ async def test_retry_converting_records(self, mock_handle): ) assert storage_type_record.value == STORAGE_TYPE_VALUE_ANONCREDS assert ( - upgrading_record.value - == anoncreds_upgrade.UPGRADING_RECORD_FINISHED + upgrading_record.value == anoncreds_upgrade.UPGRADING_RECORD_FINISHED ) assert "test-profile" in singletons.IsAnoncredsSingleton().wallets @@ -171,9 +170,7 @@ async def test_upgrade_wallet_to_anoncreds(self, mock_handle): anoncreds_upgrade.UPGRADING_RECORD_IN_PROGRESS, ) ) - await anoncreds_upgrade.upgrade_wallet_to_anoncreds_if_requested( - self.profile - ) + await anoncreds_upgrade.upgrade_wallet_to_anoncreds_if_requested(self.profile) storage_type_record = await storage.find_record( RECORD_TYPE_ACAPY_STORAGE_TYPE, tag_query={} ) @@ -200,9 +197,7 @@ async def test_upgrade_wallet_to_anoncreds(self, mock_handle): await storage.update_record( upgrading_record, anoncreds_upgrade.UPGRADING_RECORD_IN_PROGRESS, {} ) - await anoncreds_upgrade.upgrade_wallet_to_anoncreds_if_requested( - self.profile - ) + await anoncreds_upgrade.upgrade_wallet_to_anoncreds_if_requested(self.profile) assert mock_retry_converting_records.called async def test_set_storage_type_to_anoncreds_no_existing_record(self): @@ -224,7 +219,6 @@ async def test_set_storage_type_to_anoncreds_has_existing_record(self): assert storage_type_record.value == STORAGE_TYPE_VALUE_ANONCREDS async def test_update_if_subwallet_and_set_storage_type_with_subwallet(self): - await anoncreds_upgrade.finish_upgrade_by_updating_profile_or_shutting_down( self.profile, True ) @@ -235,7 +229,6 @@ async def test_update_if_subwallet_and_set_storage_type_with_subwallet(self): )._instance.flush.called async def test_update_if_subwallet_and_set_storage_type_with_base_wallet(self): - await anoncreds_upgrade.finish_upgrade_by_updating_profile_or_shutting_down( self.profile, False ) @@ -340,9 +333,7 @@ async def test_failed_upgrade(self, mock_handle): InMemoryProfileSession, "commit", # Don't wait for sleep in retry to speed up test - ) as mock_commit, mock.patch.object( - asyncio, "sleep" - ): + ) as mock_commit, mock.patch.object(asyncio, "sleep"): """ Only tests schemas and cred_defs failing to upgrade because the other objects are hard to mock. These tests should be enough to cover them as the logic is the same. diff --git a/aries_cloudagent/wallet/tests/test_bbs.py b/aries_cloudagent/wallet/tests/test_bbs.py index 6498bae993..3061b8d4bc 100644 --- a/aries_cloudagent/wallet/tests/test_bbs.py +++ b/aries_cloudagent/wallet/tests/test_bbs.py @@ -39,9 +39,7 @@ def test_sign(self): assert signed - assert verify_signed_messages_bls12381g2( - SIGN_MESSAGES, signed, PUBLIC_KEY_BYTES - ) + assert verify_signed_messages_bls12381g2(SIGN_MESSAGES, signed, PUBLIC_KEY_BYTES) def test_sign_x_invalid_secret_key_bytes(self): with self.assertRaises(BbsException) as context: diff --git a/aries_cloudagent/wallet/tests/test_crypto.py b/aries_cloudagent/wallet/tests/test_crypto.py index bac2d10cd2..01dd3fefc7 100644 --- a/aries_cloudagent/wallet/tests/test_crypto.py +++ b/aries_cloudagent/wallet/tests/test_crypto.py @@ -126,8 +126,7 @@ def test_extract_pack_recipients_x(self): with pytest.raises(ValueError) as excinfo: test_module.extract_pack_recipients( - [JweRecipient(encrypted_key=b"MTIzNDU", header={"kid": "4mZ5TYv4oN"})] - * 2 + [JweRecipient(encrypted_key=b"MTIzNDU", header={"kid": "4mZ5TYv4oN"})] * 2 ) assert "Duplicate recipient key" in str(excinfo.value) diff --git a/aries_cloudagent/wallet/tests/test_default_verification_key_strategy.py b/aries_cloudagent/wallet/tests/test_default_verification_key_strategy.py index 21feeac5ea..530956e044 100644 --- a/aries_cloudagent/wallet/tests/test_default_verification_key_strategy.py +++ b/aries_cloudagent/wallet/tests/test_default_verification_key_strategy.py @@ -19,18 +19,14 @@ async def asyncSetUp(self) -> None: async def test_with_did_sov(self): strategy = DefaultVerificationKeyStrategy() assert ( - await strategy.get_verification_method_id_for_did( - TEST_DID_SOV, self.profile - ) + await strategy.get_verification_method_id_for_did(TEST_DID_SOV, self.profile) == TEST_DID_SOV + "#key-1" ) async def test_with_did_key(self): strategy = DefaultVerificationKeyStrategy() assert ( - await strategy.get_verification_method_id_for_did( - TEST_DID_KEY, self.profile - ) + await strategy.get_verification_method_id_for_did(TEST_DID_KEY, self.profile) == DIDKey.from_did(TEST_DID_KEY).key_id ) diff --git a/aries_cloudagent/wallet/tests/test_did_method.py b/aries_cloudagent/wallet/tests/test_did_method.py index 8368855475..6c84c1312b 100644 --- a/aries_cloudagent/wallet/tests/test_did_method.py +++ b/aries_cloudagent/wallet/tests/test_did_method.py @@ -49,9 +49,7 @@ def test_from_multicodec_prefix(self): assert key_types.from_multicodec_prefix(X25519_PREFIX_BYTES) == X25519 assert key_types.from_multicodec_prefix(BLS12381G1_PREFIX_BYTES) == BLS12381G1 assert key_types.from_multicodec_prefix(BLS12381G2_PREFIX_BYTES) == BLS12381G2 - assert ( - key_types.from_multicodec_prefix(BLS12381G1G2_PREFIX_BYTES) == BLS12381G1G2 - ) + assert key_types.from_multicodec_prefix(BLS12381G1G2_PREFIX_BYTES) == BLS12381G1G2 assert key_types.from_multicodec_prefix(b"\xef\x01") is None def test_from_prefixed_bytes(self): @@ -87,9 +85,7 @@ def test_from_prefixed_bytes(self): == BLS12381G1G2 ) assert ( - key_types.from_prefixed_bytes( - b"".join([b"\xef\x01", b"other-random-bytes"]) - ) + key_types.from_prefixed_bytes(b"".join([b"\xef\x01", b"other-random-bytes"])) is None ) diff --git a/aries_cloudagent/wallet/tests/test_in_memory_wallet.py b/aries_cloudagent/wallet/tests/test_in_memory_wallet.py index 96e555bca8..a9a66a52d9 100644 --- a/aries_cloudagent/wallet/tests/test_in_memory_wallet.py +++ b/aries_cloudagent/wallet/tests/test_in_memory_wallet.py @@ -77,9 +77,7 @@ async def test_create_signing_key_bls12381g2_seeded(self, wallet: InMemoryWallet await wallet.create_signing_key(BLS12381G2, "invalid-seed", None) @pytest.mark.asyncio - async def test_create_signing_key_unsupported_key_type( - self, wallet: InMemoryWallet - ): + async def test_create_signing_key_unsupported_key_type(self, wallet: InMemoryWallet): with pytest.raises(WalletError): await wallet.create_signing_key(X25519) @@ -248,9 +246,7 @@ async def test_local_verkey(self, wallet: InMemoryWallet): @pytest.mark.ursa_bbs_signatures async def test_local_verkey_bls12381g2(self, wallet: InMemoryWallet): await wallet.create_local_did(KEY, BLS12381G2, self.test_seed) - bls_info_get = await wallet.get_local_did_for_verkey( - self.test_bls12381g2_verkey - ) + bls_info_get = await wallet.get_local_did_for_verkey(self.test_bls12381g2_verkey) assert bls_info_get.did == self.test_key_bls12381g2_did assert bls_info_get.verkey == self.test_bls12381g2_verkey @@ -397,9 +393,7 @@ async def test_sign_verify(self, wallet: InMemoryWallet): message_bin = self.test_message.encode("ascii") signature = await wallet.sign_message(message_bin, info.verkey) assert signature == self.test_signature - verify = await wallet.verify_message( - message_bin, signature, info.verkey, ED25519 - ) + verify = await wallet.verify_message(message_bin, signature, info.verkey, ED25519) assert verify bad_sig = b"x" + signature[1:] @@ -449,9 +443,7 @@ async def test_sign_verify_bbs(self, wallet: InMemoryWallet): assert verify bad_msg = b"x" + message_bin[1:] - verify = await wallet.verify_message( - bad_msg, signature, info.verkey, BLS12381G2 - ) + verify = await wallet.verify_message(bad_msg, signature, info.verkey, BLS12381G2) assert not verify with pytest.raises(WalletError): @@ -552,7 +544,6 @@ async def test_set_did_endpoint_x_not_sov(self, wallet: InMemoryWallet): @pytest.mark.asyncio async def test_assign_and_get_by_kid(self, wallet: InMemoryWallet): - key = await wallet.create_key(ED25519) await wallet.assign_kid_to_key(key.verkey, "test_kid") diff --git a/aries_cloudagent/wallet/tests/test_jwt.py b/aries_cloudagent/wallet/tests/test_jwt.py index b3a2b41f32..79470666fd 100644 --- a/aries_cloudagent/wallet/tests/test_jwt.py +++ b/aries_cloudagent/wallet/tests/test_jwt.py @@ -35,9 +35,7 @@ async def test_sign_with_verification_method_and_verify( verification_method = "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL#z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" headers = {} payload = {} - signed: str = await jwt_sign( - profile, headers, payload, did, verification_method - ) + signed: str = await jwt_sign(profile, headers, payload, did, verification_method) assert signed @@ -80,9 +78,7 @@ async def test_verify_x_invalid_signed(self, profile, in_memory_wallet): await jwt_verify(profile, signed) @pytest.mark.asyncio - async def test_resolve_public_key_by_kid_for_verify( - self, profile, in_memory_wallet - ): + async def test_resolve_public_key_by_kid_for_verify(self, profile, in_memory_wallet): await in_memory_wallet.create_local_did(KEY, ED25519, self.seed) kid = "did:key:z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL#z6Mkgg342Ycpuk263R9d8Aq6MUaxPn1DDeHyGo38EefXmgDL" key_material = await resolve_public_key_by_kid_for_verify(profile, kid) diff --git a/aries_cloudagent/wallet/tests/test_routes.py b/aries_cloudagent/wallet/tests/test_routes.py index ba8fc4db4d..7fb75df617 100644 --- a/aries_cloudagent/wallet/tests/test_routes.py +++ b/aries_cloudagent/wallet/tests/test_routes.py @@ -32,9 +32,7 @@ def setUp(self): self.profile = InMemoryProfile.test_profile( settings={"admin.admin_api_key": "secret-key"} ) - self.context = AdminRequestContext.test_context( - self.session_inject, self.profile - ) + self.context = AdminRequestContext.test_context(self.session_inject, self.profile) self.context.injector.bind_instance(KeyTypes, KeyTypes()) self.request_dict = { "context": self.context, @@ -58,9 +56,7 @@ def setUp(self): self.did_methods.register(WEB) self.context.injector.bind_instance(DIDMethods, self.did_methods) - self.test_mediator_routing_keys = [ - "3Dn1SJNPaCXcvvJvSbsFWP2xaCjMom3can8CQNhWrTRR" - ] + self.test_mediator_routing_keys = ["3Dn1SJNPaCXcvvJvSbsFWP2xaCjMom3can8CQNhWrTRR"] self.test_mediator_endpoint = "http://mediator.example.com" async def test_missing_wallet(self): @@ -704,9 +700,7 @@ async def test_set_public_did_update_endpoint_use_default_update_in_wallet(self) mock_route_manager = mock.MagicMock() mock_route_manager.route_verkey = mock.CoroutineMock() - mock_route_manager.mediation_record_if_id = mock.CoroutineMock( - return_value=None - ) + mock_route_manager.mediation_record_if_id = mock.CoroutineMock(return_value=None) mock_route_manager.routing_info = mock.CoroutineMock(return_value=(None, None)) mock_route_manager.__aenter__ = mock.CoroutineMock( return_value=mock_route_manager diff --git a/demo/features/environment.py b/demo/features/environment.py index abfe5be201..9ca2e341c3 100644 --- a/demo/features/environment.py +++ b/demo/features/environment.py @@ -1,4 +1,3 @@ -from runners.agent_container import AgentContainer from bdd_support.agent_backchannel_client import ( aries_container_terminate, ) diff --git a/demo/features/steps/0453-issue-credential.py b/demo/features/steps/0453-issue-credential.py index 98a92ad924..927de8c087 100644 --- a/demo/features/steps/0453-issue-credential.py +++ b/demo/features/steps/0453-issue-credential.py @@ -218,9 +218,7 @@ def step_impl(context, holder): # check wallet status wallet_revoked_creds = agent_container_GET( agent["agent"], - "/revocation/registry/" - + cred_exchange["indy"]["rev_reg_id"] - + "/issued/details", + "/revocation/registry/" + cred_exchange["indy"]["rev_reg_id"] + "/issued/details", ) print("wallet_revoked_creds:", wallet_revoked_creds) matched = False @@ -309,9 +307,7 @@ def step_impl(context, holder): # check wallet status wallet_revoked_creds = agent_container_GET( agent["agent"], - "/revocation/registry/" - + cred_exchange["indy"]["rev_reg_id"] - + "/issued/details", + "/revocation/registry/" + cred_exchange["indy"]["rev_reg_id"] + "/issued/details", ) matched = False for rec in wallet_revoked_creds: @@ -350,7 +346,9 @@ def step_impl(context, holder): assert False -@given('"{issuer}" is ready to issue a json-ld credential for {schema_name} with {key_type}') +@given( + '"{issuer}" is ready to issue a json-ld credential for {schema_name} with {key_type}' +) def step_impl(context, issuer, schema_name, key_type): # create a "did:key" to use as issuer agent = context.active_agents[issuer] @@ -383,7 +381,9 @@ def step_impl(context, holder, key_type): pass -@when('"{issuer}" offers "{holder}" a json-ld credential with data {credential_data} and {sig_type}') +@when( + '"{issuer}" offers "{holder}" a json-ld credential with data {credential_data} and {sig_type}' +) def step_impl(context, issuer, holder, credential_data, sig_type): # initiate a cred exchange with a json-ld credential agent = context.active_agents[issuer] @@ -652,7 +652,9 @@ def step_impl(context, holder, schema_name, credential_data, issuer, key_type, s Given "''' + issuer + """" is ready to issue a json-ld credential for """ - + schema_name + " with " + key_type + + schema_name + + " with " + + key_type + ''' And "''' + holder @@ -664,7 +666,9 @@ def step_impl(context, holder, schema_name, credential_data, issuer, key_type, s + '''" offers "''' + holder + """" a json-ld credential with data """ - + credential_data + " and " + sig_type + + credential_data + + " and " + + sig_type + ''' Then "''' + holder diff --git a/demo/features/steps/0454-present-proof.py b/demo/features/steps/0454-present-proof.py index 677612e007..6440ee3d1a 100644 --- a/demo/features/steps/0454-present-proof.py +++ b/demo/features/steps/0454-present-proof.py @@ -116,9 +116,7 @@ def step_impl(context, verifier, request_for_proof, prover, sig_type): { "uri": "https://www.w3.org/2018/credentials#VerifiableCredential" }, - { - "uri": "https://w3id.org/citizenship#PermanentResident" - }, + {"uri": "https://w3id.org/citizenship#PermanentResident"}, ], "constraints": { "limit_disclosure": "required", diff --git a/demo/features/steps/0586-sign-transaction.py b/demo/features/steps/0586-sign-transaction.py index 406db972a0..bf5a8a0c3c 100644 --- a/demo/features/steps/0586-sign-transaction.py +++ b/demo/features/steps/0586-sign-transaction.py @@ -141,9 +141,7 @@ def step_impl(context, agent_name, schema_name): ) if agent["agent"].endorser_role and agent["agent"].endorser_role == "author": - assert ( - created_txn["registration_metadata"]["txn"]["state"] == "request_sent" - ) + assert created_txn["registration_metadata"]["txn"]["state"] == "request_sent" assert created_txn["schema_state"]["state"] == "wait" assert created_txn["job_id"] is not None @@ -434,9 +432,7 @@ def step_impl(context, agent_name): context.rev_reg_id = rev_reg_id -@when( - '"{agent_name}" has activated the tails file, and uploaded it to the tails server' -) +@when('"{agent_name}" has activated the tails file, and uploaded it to the tails server') @then( '"{agent_name}" has has activated the tails file, and uploaded it to the tails server' ) @@ -778,9 +774,7 @@ def step_impl(context, holder_name, issuer_name): assert revoc_status_bool is True -@given( - 'Without endorser, "{agent_name}" authors a schema transaction with {schema_name}' -) +@given('Without endorser, "{agent_name}" authors a schema transaction with {schema_name}') def step_impl(context, agent_name, schema_name): agent = context.active_agents[agent_name] diff --git a/demo/features/steps/revocation-api.py b/demo/features/steps/revocation-api.py index b1af0a2811..ddd48c6751 100644 --- a/demo/features/steps/revocation-api.py +++ b/demo/features/steps/revocation-api.py @@ -56,9 +56,7 @@ def step_impl(context, issuer, count=None): f"\nlists revocation registries ({count} creds) = = = = = = = = = = = = = =" ) else: - print( - "\nlists revocation registries = = = = = = = = = = = = = = = = = = = = = =" - ) + print("\nlists revocation registries = = = = = = = = = = = = = = = = = = = = = =") print("\ncreated_response: ", len(created_response["rev_reg_ids"])) print("full_response: ", len(full_response["rev_reg_ids"])) print("decommissioned_response:", len(decommissioned_response["rev_reg_ids"])) diff --git a/demo/features/steps/taa-txn-author-agreement.py b/demo/features/steps/taa-txn-author-agreement.py index 6dc43cc22f..ee15b250e1 100644 --- a/demo/features/steps/taa-txn-author-agreement.py +++ b/demo/features/steps/taa-txn-author-agreement.py @@ -1,13 +1,9 @@ from behave import given, when, then -import json -from time import sleep -import time from bdd_support.agent_backchannel_client import ( agent_container_GET, agent_container_POST, agent_container_PUT, - async_sleep, ) diff --git a/demo/playground/examples/tests/__init__.py b/demo/playground/examples/tests/__init__.py index c35b313157..6e1d83c693 100644 --- a/demo/playground/examples/tests/__init__.py +++ b/demo/playground/examples/tests/__init__.py @@ -209,9 +209,7 @@ def get(self, path: str, return_json: bool = True, fail_with: str = None, **kwar return wrapped_get(self.url, path, **kwargs) - def post( - self, path: str, return_json: bool = True, fail_with: str = None, **kwargs - ): + def post(self, path: str, return_json: bool = True, fail_with: str = None, **kwargs): """Do post to agent endpoint.""" wrapped_post = post if fail_with: diff --git a/demo/playground/examples/tests/test_mediator_ping_agents.py b/demo/playground/examples/tests/test_mediator_ping_agents.py index 9f37e58e89..12eb0155af 100644 --- a/demo/playground/examples/tests/test_mediator_ping_agents.py +++ b/demo/playground/examples/tests/test_mediator_ping_agents.py @@ -99,7 +99,7 @@ def initialize_mediation(agent: Agent, invitation): @pytest.fixture(scope="session") def faber_mediator(faber, mediation_invite): - logger.info(f"faber_mediator...") + logger.info("faber_mediator...") result = initialize_mediation(faber, mediation_invite) logger.info(f"...faber_mediator = {result}") yield result @@ -107,7 +107,7 @@ def faber_mediator(faber, mediation_invite): @pytest.fixture(scope="session") def alice_mediator(alice, mediation_invite): - logger.info(f"alice_mediator...") + logger.info("alice_mediator...") result = initialize_mediation(alice, mediation_invite) logger.info(f"...alice_mediator = {result}") yield result @@ -115,7 +115,7 @@ def alice_mediator(alice, mediation_invite): @pytest.fixture(scope="session") def multi_one_mediator(multi_one, mediation_invite): - logger.info(f"multi_one_mediator...") + logger.info("multi_one_mediator...") result = initialize_mediation(multi_one, mediation_invite) logger.info(f"...multi_one_mediator = {result}") yield result @@ -145,9 +145,7 @@ def test_mediated_single_tenants( mediation_invite_json = jsonlib.loads(mediation_invite) logger.info(f"invitation service endpoint = {invite['serviceEndpoint']}") - logger.info( - f"mediator service endpoint = {mediation_invite_json['serviceEndpoint']}" - ) + logger.info(f"mediator service endpoint = {mediation_invite_json['serviceEndpoint']}") assert invite["serviceEndpoint"] == mediation_invite_json["serviceEndpoint"] resp = alice.receive_invite(invite, alias="faber", auto_accept="true") @@ -215,9 +213,7 @@ def test_mediated_multi_tenants( mediation_invite_json = jsonlib.loads(mediation_invite) logger.info(f"invitation service endpoint = {invite['serviceEndpoint']}") - logger.info( - f"mediator service endpoint = {mediation_invite_json['serviceEndpoint']}" - ) + logger.info(f"mediator service endpoint = {mediation_invite_json['serviceEndpoint']}") assert invite["serviceEndpoint"] == mediation_invite_json["serviceEndpoint"] resp = alice.receive_invite(invite, alias="multi_one", auto_accept="true") diff --git a/demo/playground/examples/tests/test_ping_agents.py b/demo/playground/examples/tests/test_ping_agents.py index b7c05b7461..86e7b65188 100644 --- a/demo/playground/examples/tests/test_ping_agents.py +++ b/demo/playground/examples/tests/test_ping_agents.py @@ -40,7 +40,7 @@ def alice_faber_connection(faber, alice): logger.info("faber create invitation to alice") invite = faber.create_invitation(auto_accept="true")["invitation"] logger.info(f"invitation = {invite}") - logger.info(f"alice receive invitation") + logger.info("alice receive invitation") resp = alice.receive_invite(invite, auto_accept="true") result = resp["connection_id"] logger.info(f"alice/faber connection_id = {result}") @@ -53,7 +53,7 @@ def faber_alice_connection(faber, alice): logger.info("alice create invitation to faber") invite = alice.create_invitation(auto_accept="true")["invitation"] logger.info(f"invitation = {invite}") - logger.info(f"faber receive invitation") + logger.info("faber receive invitation") resp = faber.receive_invite(invite, auto_accept="true") result = resp["connection_id"] logger.info(f"faber/alice connection_id = {result}") @@ -66,7 +66,7 @@ def alice_multi_one_connection(multi_one, alice): logger.info("multi_one create invitation to alice") invite = multi_one.create_invitation(auto_accept="true")["invitation"] logger.info(f"invitation = {invite}") - logger.info(f"alice receive invitation") + logger.info("alice receive invitation") resp = alice.receive_invite(invite, auto_accept="true") result = resp["connection_id"] logger.info(f"alice/multi_one connection_id = {result}") @@ -79,7 +79,7 @@ def multi_one_alice_connection(multi_one, alice): logger.info("alice create invitation to multi_one") invite = alice.create_invitation(auto_accept="true")["invitation"] logger.info(f"invitation = {invite}") - logger.info(f"faber receive invitation") + logger.info("faber receive invitation") resp = multi_one.receive_invite(invite, auto_accept="true") result = resp["connection_id"] logger.info(f"multi_one/alice connection_id = {result}") diff --git a/demo/runners/acme.py b/demo/runners/acme.py index 898ff72846..b2115bfc20 100644 --- a/demo/runners/acme.py +++ b/demo/runners/acme.py @@ -1,9 +1,7 @@ import asyncio -import json import logging import os import sys -from aiohttp import ClientError sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) # noqa @@ -16,7 +14,6 @@ check_requires, log_msg, log_status, - log_timer, prompt, prompt_loop, ) @@ -64,9 +61,7 @@ async def handle_oob_invitation(self, message): pass async def handle_connections(self, message): - print( - self.ident, "handle_connections", message["state"], message["rfc23_state"] - ) + print(self.ident, "handle_connections", message["state"], message["rfc23_state"]) conn_id = message["connection_id"] if (not self.connection_id) and message["rfc23_state"] == "invitation-sent": print(self.ident, "set connection id", conn_id) diff --git a/demo/runners/agent_container.py b/demo/runners/agent_container.py index 8ef4feb034..c2ad39fe5f 100644 --- a/demo/runners/agent_container.py +++ b/demo/runners/agent_container.py @@ -227,9 +227,7 @@ async def handle_issue_credential(self, message): cred_attrs = self.cred_attrs[message["credential_definition_id"]] cred_preview = { "@type": CRED_PREVIEW_TYPE, - "attributes": [ - {"name": n, "value": v} for (n, v) in cred_attrs.items() - ], + "attributes": [{"name": n, "value": v} for (n, v) in cred_attrs.items()], } try: cred_ex_rec = await self.admin_POST( @@ -447,9 +445,7 @@ async def handle_present_proof_v2_0(self, message): pres_request_indy = ( message["by_format"].get("pres_request", {}).get("indy") ) - pres_request_dif = ( - message["by_format"].get("pres_request", {}).get("dif") - ) + pres_request_dif = message["by_format"].get("pres_request", {}).get("dif") request = {} if not pres_request_dif and not pres_request_indy: @@ -632,9 +628,7 @@ async def generate_invitation( self._connection_ready = asyncio.Future() with log_timer("Generate invitation duration:"): # Generate an invitation - log_status( - "#7 Create a connection to alice and print out the invite details" - ) + log_status("#7 Create a connection to alice and print out the invite details") invi_rec = await self.get_invite( use_did_exchange, auto_accept=auto_accept, @@ -861,9 +855,7 @@ async def initialize( raise Exception("Endorser agent returns None :-(") # set the endorser invite so the agent can auto-connect - self.agent.endorser_invite = ( - self.endorser_agent.endorser_multi_invitation_url - ) + self.agent.endorser_invite = self.endorser_agent.endorser_multi_invitation_url self.agent.endorser_did = self.endorser_agent.endorser_public_did else: self.endorser_agent = None @@ -899,25 +891,17 @@ async def initialize( if self.mediation: # we need to pre-connect the agent to its mediator self.agent.log("Connect wallet to mediator ...") - if not await connect_wallet_to_mediator( - self.agent, self.mediator_agent - ): + if not await connect_wallet_to_mediator(self.agent, self.mediator_agent): raise Exception("Mediation setup FAILED :-(") if self.endorser_agent: self.agent.log("Connect wallet to endorser ...") - if not await connect_wallet_to_endorser( - self.agent, self.endorser_agent - ): + if not await connect_wallet_to_endorser(self.agent, self.endorser_agent): raise Exception("Endorser setup FAILED :-(") if self.taa_accept: await self.agent.taa_accept() # if we are an author, create our public DID here ... - if ( - self.endorser_role - and self.endorser_role == "author" - and self.endorser_agent - ): + if self.endorser_role and self.endorser_role == "author" and self.endorser_agent: if self.public_did and self.cred_type != CRED_FORMAT_JSON_LD: new_did = await self.agent.admin_POST("/wallet/did/create") self.agent.did = new_did["result"]["did"] @@ -1063,9 +1047,7 @@ async def request_proof(self, proof_request, explicit_revoc_required: bool = Fal if self.cred_type in [CRED_FORMAT_INDY, CRED_FORMAT_VC_DI]: indy_proof_request = { "name": ( - proof_request["name"] - if "name" in proof_request - else "Proof of stuff" + proof_request["name"] if "name" in proof_request else "Proof of stuff" ), "version": ( proof_request["version"] if "version" in proof_request else "1.0" @@ -1355,9 +1337,7 @@ def arg_parser(ident: str = None, port: int = 8020): metavar=(""), help="API level (10 or 20 (default))", ) - parser.add_argument( - "--timing", action="store_true", help="Enable timing information" - ) + parser.add_argument("--timing", action="store_true", help="Enable timing information") parser.add_argument( "--multitenant", action="store_true", help="Enable multitenancy options" ) @@ -1555,9 +1535,7 @@ async def create_agent_with_args(args, ident: str = None, extra_args: list = Non reuse_connections = "reuse_connections" in args and args.reuse_connections # if reuse_connections and aip != 20: # raise Exception("Can only specify `--reuse-connections` with AIP 2.0") - multi_use_invitations = ( - "multi_use_invitations" in args and args.multi_use_invitations - ) + multi_use_invitations = "multi_use_invitations" in args and args.multi_use_invitations if multi_use_invitations and aip != 20: raise Exception("Can only specify `--multi-use-invitations` with AIP 2.0") public_did_connections = ( diff --git a/demo/runners/faber.py b/demo/runners/faber.py index 45b74f280d..19d7ba0b05 100644 --- a/demo/runners/faber.py +++ b/demo/runners/faber.py @@ -377,11 +377,7 @@ def generate_proof_request_web_request( }, }, {"path": ["$.credentialSubject.name"]}, - { - "path": [ - "$.credentialSubject.degree" - ] - }, + {"path": ["$.credentialSubject.degree"]}, { "path": [ "$.credentialSubject.birthdate_dateint" diff --git a/demo/runners/performance.py b/demo/runners/performance.py index 194284b910..629bec9e96 100644 --- a/demo/runners/performance.py +++ b/demo/runners/performance.py @@ -532,9 +532,7 @@ async def check_received_pings(agent, issue_count, pb): recv_timer = faber.log_timer( f"Completed {issue_count} credential exchanges in" ) - batch_timer = faber.log_timer( - f"Started {batch_size} credential exchanges in" - ) + batch_timer = faber.log_timer(f"Started {batch_size} credential exchanges in") recv_timer.start() batch_timer.start() diff --git a/demo/runners/support/agent.py b/demo/runners/support/agent.py index 49d075f928..19587d668e 100644 --- a/demo/runners/support/agent.py +++ b/demo/runners/support/agent.py @@ -373,9 +373,7 @@ async def register_schema_and_creddef_indy( log_msg("Schema ID:", schema_id) # Create a cred def for the schema - cred_def_tag = ( - tag if tag else (self.ident + "." + schema_name).replace(" ", "_") - ) + cred_def_tag = tag if tag else (self.ident + "." + schema_name).replace(" ", "_") credential_definition_body = { "schema_id": schema_id, "support_revocation": support_revocation, @@ -405,9 +403,7 @@ async def register_schema_and_creddef_indy( credential_definition_response = await self.admin_GET( "/credential-definitions/created" ) - if 0 == len( - credential_definition_response["credential_definition_ids"] - ): + if 0 == len(credential_definition_response["credential_definition_ids"]): await asyncio.sleep(1.0) attempts = attempts - 1 credential_definition_id = credential_definition_response[ @@ -454,9 +450,7 @@ async def register_schema_and_creddef_anoncreds( log_msg("Schema ID:", schema_id) # Create a cred def for the schema - cred_def_tag = ( - tag if tag else (self.ident + "." + schema_name).replace(" ", "_") - ) + cred_def_tag = tag if tag else (self.ident + "." + schema_name).replace(" ", "_") max_cred_num = revocation_registry_size if revocation_registry_size else 0 credential_definition_body = { "credential_definition": { @@ -492,9 +486,7 @@ async def register_schema_and_creddef_anoncreds( credential_definition_response = await self.admin_GET( "/anoncreds/credential-definitions" ) - if 0 == len( - credential_definition_response["credential_definition_ids"] - ): + if 0 == len(credential_definition_response["credential_definition_ids"]): await asyncio.sleep(1.0) attempts = attempts - 1 credential_definition_id = credential_definition_response[ @@ -710,9 +702,7 @@ async def register_did( nym_info = data else: log_msg("using ledger: " + ledger_url + "/register") - resp = await self.client_session.post( - ledger_url + "/register", json=data - ) + resp = await self.client_session.post(ledger_url + "/register", json=data) if resp.status != 200: raise Exception( f"Error registering DID {data}, response code {resp.status}" @@ -925,9 +915,7 @@ def _process(self, args, env, loop): def get_process_args(self): return list( - flatten( - ([PYTHON, "-m", "aries_cloudagent", "start"], self.get_agent_args()) - ) + flatten(([PYTHON, "-m", "aries_cloudagent", "start"], self.get_agent_args())) ) async def start_process(self, python_path: str = None, wait: bool = True): @@ -1143,9 +1131,7 @@ async def admin_GET( if self.multitenant: if not headers: headers = {} - headers["Authorization"] = ( - "Bearer " + self.managed_wallet_params["token"] - ) + headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] response = await self.admin_request( "GET", path, None, text, params, headers=headers ) @@ -1197,9 +1183,7 @@ async def admin_POST( if self.multitenant: if not headers: headers = {} - headers["Authorization"] = ( - "Bearer " + self.managed_wallet_params["token"] - ) + headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] response = await self.admin_request( "POST", path, data, text, params, headers=headers ) @@ -1222,9 +1206,7 @@ async def admin_PATCH( if self.multitenant: if not headers: headers = {} - headers["Authorization"] = ( - "Bearer " + self.managed_wallet_params["token"] - ) + headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] return await self.admin_request( "PATCH", path, data, text, params, headers=headers ) @@ -1239,9 +1221,7 @@ async def admin_PUT( if self.multitenant: if not headers: headers = {} - headers["Authorization"] = ( - "Bearer " + self.managed_wallet_params["token"] - ) + headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] return await self.admin_request( "PUT", path, data, text, params, headers=headers ) @@ -1261,9 +1241,7 @@ async def admin_DELETE( if self.multitenant: if not headers: headers = {} - headers["Authorization"] = ( - "Bearer " + self.managed_wallet_params["token"] - ) + headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] response = await self.admin_request( "DELETE", path, data, text, params, headers=headers ) @@ -1282,9 +1260,7 @@ async def admin_GET_FILE(self, path, params=None, headers=None) -> bytes: if self.multitenant: if not headers: headers = {} - headers["Authorization"] = ( - "Bearer " + self.managed_wallet_params["token"] - ) + headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] params = {k: v for (k, v) in (params or {}).items() if v is not None} resp = await self.client_session.request( "GET", self.admin_url + path, params=params, headers=headers @@ -1300,9 +1276,7 @@ async def admin_PUT_FILE(self, files, url, params=None, headers=None) -> bytes: if self.multitenant: if not headers: headers = {} - headers["Authorization"] = ( - "Bearer " + self.managed_wallet_params["token"] - ) + headers["Authorization"] = "Bearer " + self.managed_wallet_params["token"] params = {k: v for (k, v) in (params or {}).items() if v is not None} resp = await self.client_session.request( "PUT", url, params=params, data=files, headers=headers @@ -1713,9 +1687,7 @@ async def handle_connections(self, message): # setup endorser meta-data on our connection log_msg("Setup endorser agent meta-data ...") await self.admin_POST( - "/transactions/" - + self.endorser_connection_id - + "/set-endorser-role", + "/transactions/" + self.endorser_connection_id + "/set-endorser-role", params={"transaction_my_job": "TRANSACTION_ENDORSER"}, )