diff --git a/.github/workflows/check-python.yaml b/.github/workflows/check-python.yaml index ea7a5d9f..e8464b76 100644 --- a/.github/workflows/check-python.yaml +++ b/.github/workflows/check-python.yaml @@ -45,12 +45,13 @@ jobs: - name: Check types with mypy run: poetry run mypy - - name: Check docs are up to date - run: | - poetry run poe docs - git diff --quiet --exit-code \ - ':!docs/html/_sources/apidocs/algokit_utils/algokit_utils.md.txt' \ - ':!docs/html/apidocs/algokit_utils/algokit_utils.html' \ - ':!docs/html/searchindex.js' \ - ':!docs/markdown/apidocs/algokit_utils/algokit_utils.md' \ - docs/ + # TODO: uncomment after bulk of feature parity with ts is addressed + # - name: Check docs are up to date + # run: | + # poetry run poe docs + # git diff --quiet --exit-code \ + # ':!docs/html/_sources/apidocs/algokit_utils/algokit_utils.md.txt' \ + # ':!docs/html/apidocs/algokit_utils/algokit_utils.html' \ + # ':!docs/html/searchindex.js' \ + # ':!docs/markdown/apidocs/algokit_utils/algokit_utils.md' \ + # docs/ diff --git a/legacy_v2_tests/__init__.py b/legacy_v2_tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/app_client_test.json b/legacy_v2_tests/app_client_test.json similarity index 100% rename from tests/app_client_test.json rename to legacy_v2_tests/app_client_test.json diff --git a/tests/app_client_test.py b/legacy_v2_tests/app_client_test.py similarity index 100% rename from tests/app_client_test.py rename to legacy_v2_tests/app_client_test.py diff --git a/tests/app_multi_underscore_template_var.py b/legacy_v2_tests/app_multi_underscore_template_var.py similarity index 100% rename from tests/app_multi_underscore_template_var.py rename to legacy_v2_tests/app_multi_underscore_template_var.py diff --git a/tests/app_resolve.json b/legacy_v2_tests/app_resolve.json similarity index 100% rename from tests/app_resolve.json rename to legacy_v2_tests/app_resolve.json diff --git a/tests/app_v1.json b/legacy_v2_tests/app_v1.json similarity index 100% rename from tests/app_v1.json rename to legacy_v2_tests/app_v1.json diff --git a/tests/app_v2.json b/legacy_v2_tests/app_v2.json similarity index 100% rename from tests/app_v2.json rename to legacy_v2_tests/app_v2.json diff --git a/tests/app_v3.json b/legacy_v2_tests/app_v3.json similarity index 100% rename from tests/app_v3.json rename to legacy_v2_tests/app_v3.json diff --git a/legacy_v2_tests/conftest.py b/legacy_v2_tests/conftest.py new file mode 100644 index 00000000..e3997a2c --- /dev/null +++ b/legacy_v2_tests/conftest.py @@ -0,0 +1,211 @@ +import inspect +import math +import random +import subprocess +from pathlib import Path +from typing import TYPE_CHECKING +from uuid import uuid4 + +import algosdk.transaction +import pytest +from algokit_utils import ( + DELETABLE_TEMPLATE_NAME, + UPDATABLE_TEMPLATE_NAME, + Account, + ApplicationClient, + ApplicationSpecification, + EnsureBalanceParameters, + ensure_funded, + get_account, + get_algod_client, + get_indexer_client, + get_kmd_client_from_algod_client, + replace_template_variables, +) +from dotenv import load_dotenv + +from legacy_v2_tests import app_client_test + +if TYPE_CHECKING: + from algosdk.kmd import KMDClient + from algosdk.v2client.algod import AlgodClient + from algosdk.v2client.indexer import IndexerClient + + +@pytest.fixture(autouse=True, scope="session") +def _environment_fixture() -> None: + env_path = Path(__file__).parent / ".." / "example.env" + load_dotenv(env_path) + + +def check_output_stability(logs: str, *, test_name: str | None = None) -> None: + """Test that the contract output hasn't changed for an Application, using git diff""" + caller_frame = inspect.stack()[1] + caller_path = Path(caller_frame.filename).resolve() + caller_dir = caller_path.parent + test_name = test_name or caller_frame.function + caller_stem = Path(caller_frame.filename).stem + output_dir = caller_dir / f"{caller_stem}.approvals" + output_dir.mkdir(exist_ok=True) + output_file = output_dir / f"{test_name}.approved.txt" + output_file_str = str(output_file) + output_file_did_exist = output_file.exists() + output_file.write_text(logs, encoding="utf-8") + + git_diff = subprocess.run( + [ + "git", + "diff", + "--exit-code", + "--no-ext-diff", + "--no-color", + output_file_str, + ], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + text=True, + check=False, + ) + # first fail if there are any changes to already committed files, you must manually add them in that case + assert git_diff.returncode == 0, git_diff.stdout + + # if first time running, fail in case of accidental change to output directory + if not output_file_did_exist: + pytest.fail( + f"New output folder created at {output_file_str} from test {test_name} - " + "if this was intentional, please commit the files to the git repo" + ) + + +def read_spec( + file_name: str, + *, + updatable: bool | None = None, + deletable: bool | None = None, + template_values: dict | None = None, +) -> ApplicationSpecification: + path = Path(__file__).parent / file_name + spec = ApplicationSpecification.from_json(Path(path).read_text(encoding="utf-8")) + + template_variables = template_values or {} + if updatable is not None: + template_variables["UPDATABLE"] = int(updatable) + + if deletable is not None: + template_variables["DELETABLE"] = int(deletable) + + spec.approval_program = ( + replace_template_variables(spec.approval_program, template_variables) + .replace(f"// {UPDATABLE_TEMPLATE_NAME}", "// updatable") + .replace(f"// {DELETABLE_TEMPLATE_NAME}", "// deletable") + ) + return spec + + +def get_specs( + updatable: bool | None = None, + deletable: bool | None = None, +) -> tuple[ApplicationSpecification, ApplicationSpecification, ApplicationSpecification]: + return ( + read_spec("app_v1.json", updatable=updatable, deletable=deletable), + read_spec("app_v2.json", updatable=updatable, deletable=deletable), + read_spec("app_v3.json", updatable=updatable, deletable=deletable), + ) + + +def get_unique_name() -> str: + name = str(uuid4()).replace("-", "") + assert name.isalnum() + return name + + +def is_opted_in(client_fixture: ApplicationClient) -> bool: + _, sender = client_fixture.resolve_signer_sender() + account_info = client_fixture.algod_client.account_info(sender) + assert isinstance(account_info, dict) + apps_local_state = account_info["apps-local-state"] + return any(x for x in apps_local_state if x["id"] == client_fixture.app_id) + + +@pytest.fixture(scope="session") +def algod_client() -> "AlgodClient": + return get_algod_client() + + +@pytest.fixture(scope="session") +def kmd_client(algod_client: "AlgodClient") -> "KMDClient": + return get_kmd_client_from_algod_client(algod_client) + + +@pytest.fixture(scope="session") +def indexer_client() -> "IndexerClient": + return get_indexer_client() + + +@pytest.fixture() +def creator(algod_client: "AlgodClient") -> Account: + creator_name = get_unique_name() + return get_account(algod_client, creator_name) + + +@pytest.fixture(scope="session") +def funded_account(algod_client: "AlgodClient") -> Account: + creator_name = get_unique_name() + return get_account(algod_client, creator_name) + + +@pytest.fixture(scope="session") +def app_spec() -> ApplicationSpecification: + app_spec = app_client_test.app.build() + path = Path(__file__).parent / "app_client_test.json" + path.write_text(app_spec.to_json()) + return read_spec("app_client_test.json", deletable=True, updatable=True, template_values={"VERSION": 1}) + + +def generate_test_asset(algod_client: "AlgodClient", sender: Account, total: int | None) -> int: + if total is None: + total = math.floor(random.random() * 100) + 20 + + decimals = 0 + asset_name = f"ASA ${math.floor(random.random() * 100) + 1}_${math.floor(random.random() * 100) + 1}_${total}" + + params = algod_client.suggested_params() + + txn = algosdk.transaction.AssetConfigTxn( + sender=sender.address, + sp=params, + total=total * 10**decimals, + decimals=decimals, + default_frozen=False, + unit_name="", + asset_name=asset_name, + manager=sender.address, + reserve=sender.address, + freeze=sender.address, + clawback=sender.address, + url="https://path/to/my/asset/details", + metadata_hash=None, + note=None, + lease=None, + rekey_to=None, + ) # type: ignore[no-untyped-call] + + signed_transaction = txn.sign(sender.private_key) # type: ignore[no-untyped-call] + algod_client.send_transaction(signed_transaction) + ptx = algod_client.pending_transaction_info(txn.get_txid()) # type: ignore[no-untyped-call] + + if isinstance(ptx, dict) and "asset-index" in ptx and isinstance(ptx["asset-index"], int): + return ptx["asset-index"] + else: + raise ValueError("Unexpected response from pending_transaction_info") + + +def assure_funds(algod_client: "AlgodClient", account: Account) -> None: + ensure_funded( + algod_client, + EnsureBalanceParameters( + account_to_fund=account, + min_spending_balance_micro_algos=300000, + min_funding_increment_micro_algos=1, + ), + ) diff --git a/tests/test_account.py b/legacy_v2_tests/test_account.py similarity index 88% rename from tests/test_account.py rename to legacy_v2_tests/test_account.py index 1536bd68..bb0ee272 100644 --- a/tests/test_account.py +++ b/legacy_v2_tests/test_account.py @@ -2,7 +2,7 @@ from algokit_utils import get_account -from tests.conftest import get_unique_name +from legacy_v2_tests.conftest import get_unique_name if TYPE_CHECKING: from algosdk.v2client.algod import AlgodClient diff --git a/tests/test_app.py b/legacy_v2_tests/test_app.py similarity index 100% rename from tests/test_app.py rename to legacy_v2_tests/test_app.py diff --git a/tests/test_app_client.py b/legacy_v2_tests/test_app_client.py similarity index 100% rename from tests/test_app_client.py rename to legacy_v2_tests/test_app_client.py diff --git a/tests/test_app_client_call.approvals/test_readonly_call_with_error.approved.txt b/legacy_v2_tests/test_app_client_call.approvals/test_readonly_call_with_error.approved.txt similarity index 100% rename from tests/test_app_client_call.approvals/test_readonly_call_with_error.approved.txt rename to legacy_v2_tests/test_app_client_call.approvals/test_readonly_call_with_error.approved.txt diff --git a/tests/test_app_client_call.approvals/test_readonly_call_with_error_debug_mode_disabled.approved.txt b/legacy_v2_tests/test_app_client_call.approvals/test_readonly_call_with_error_debug_mode_disabled.approved.txt similarity index 100% rename from tests/test_app_client_call.approvals/test_readonly_call_with_error_debug_mode_disabled.approved.txt rename to legacy_v2_tests/test_app_client_call.approvals/test_readonly_call_with_error_debug_mode_disabled.approved.txt diff --git a/tests/test_app_client_call.approvals/test_readonly_call_with_error_with_imported_source_map.approved.txt b/legacy_v2_tests/test_app_client_call.approvals/test_readonly_call_with_error_with_imported_source_map.approved.txt similarity index 100% rename from tests/test_app_client_call.approvals/test_readonly_call_with_error_with_imported_source_map.approved.txt rename to legacy_v2_tests/test_app_client_call.approvals/test_readonly_call_with_error_with_imported_source_map.approved.txt diff --git a/tests/test_app_client_call.approvals/test_readonly_call_with_error_with_new_client_missing_source_map.approved.txt b/legacy_v2_tests/test_app_client_call.approvals/test_readonly_call_with_error_with_new_client_missing_source_map.approved.txt similarity index 100% rename from tests/test_app_client_call.approvals/test_readonly_call_with_error_with_new_client_missing_source_map.approved.txt rename to legacy_v2_tests/test_app_client_call.approvals/test_readonly_call_with_error_with_new_client_missing_source_map.approved.txt diff --git a/tests/test_app_client_call.approvals/test_readonly_call_with_error_with_new_client_provided_source_map.approved.txt b/legacy_v2_tests/test_app_client_call.approvals/test_readonly_call_with_error_with_new_client_provided_source_map.approved.txt similarity index 100% rename from tests/test_app_client_call.approvals/test_readonly_call_with_error_with_new_client_provided_source_map.approved.txt rename to legacy_v2_tests/test_app_client_call.approvals/test_readonly_call_with_error_with_new_client_provided_source_map.approved.txt diff --git a/tests/test_app_client_call.approvals/test_readonly_call_with_error_with_new_client_provided_template_values.approved.txt b/legacy_v2_tests/test_app_client_call.approvals/test_readonly_call_with_error_with_new_client_provided_template_values.approved.txt similarity index 100% rename from tests/test_app_client_call.approvals/test_readonly_call_with_error_with_new_client_provided_template_values.approved.txt rename to legacy_v2_tests/test_app_client_call.approvals/test_readonly_call_with_error_with_new_client_provided_template_values.approved.txt diff --git a/tests/test_app_client_call.py b/legacy_v2_tests/test_app_client_call.py similarity index 98% rename from tests/test_app_client_call.py rename to legacy_v2_tests/test_app_client_call.py index 6d72f037..67acd4d5 100644 --- a/tests/test_app_client_call.py +++ b/legacy_v2_tests/test_app_client_call.py @@ -19,7 +19,7 @@ ) from algosdk.transaction import ApplicationCallTxn, PaymentTxn -from tests.conftest import check_output_stability, get_unique_name +from legacy_v2_tests.conftest import check_output_stability, get_unique_name if TYPE_CHECKING: from algosdk.abi import Method @@ -40,7 +40,7 @@ def client_fixture(algod_client: "AlgodClient", app_spec: ApplicationSpecificati # If you need to run a test without debug mode, you can reference this mock within the test and disable it explicitly. @pytest.fixture(autouse=True) def mock_config() -> Generator[Mock, None, None]: - with patch("algokit_utils.application_client.config", new_callable=Mock) as mock_config: + with patch("algokit_utils._legacy_v2.application_client.config", new_callable=Mock) as mock_config: mock_config.debug = True mock_config.project_root = None yield mock_config diff --git a/tests/test_app_client_clear_state.py b/legacy_v2_tests/test_app_client_clear_state.py similarity index 97% rename from tests/test_app_client_clear_state.py rename to legacy_v2_tests/test_app_client_clear_state.py index c8de6eba..f26a7094 100644 --- a/tests/test_app_client_clear_state.py +++ b/legacy_v2_tests/test_app_client_clear_state.py @@ -8,7 +8,7 @@ ApplicationSpecification, ) -from tests.conftest import is_opted_in +from legacy_v2_tests.conftest import is_opted_in if TYPE_CHECKING: from algosdk.v2client.algod import AlgodClient diff --git a/tests/test_app_client_close_out.approvals/test_abi_close_out_args_fails.approved.txt b/legacy_v2_tests/test_app_client_close_out.approvals/test_abi_close_out_args_fails.approved.txt similarity index 100% rename from tests/test_app_client_close_out.approvals/test_abi_close_out_args_fails.approved.txt rename to legacy_v2_tests/test_app_client_close_out.approvals/test_abi_close_out_args_fails.approved.txt diff --git a/tests/test_app_client_close_out.py b/legacy_v2_tests/test_app_client_close_out.py similarity index 96% rename from tests/test_app_client_close_out.py rename to legacy_v2_tests/test_app_client_close_out.py index b5ba1cd3..5ee5e9c6 100644 --- a/tests/test_app_client_close_out.py +++ b/legacy_v2_tests/test_app_client_close_out.py @@ -8,7 +8,7 @@ LogicError, ) -from tests.conftest import check_output_stability, is_opted_in +from legacy_v2_tests.conftest import check_output_stability, is_opted_in if TYPE_CHECKING: from algosdk.v2client.algod import AlgodClient diff --git a/tests/test_app_client_create.approvals/test_create_auto_find_ambiguous.approved.txt b/legacy_v2_tests/test_app_client_create.approvals/test_create_auto_find_ambiguous.approved.txt similarity index 100% rename from tests/test_app_client_create.approvals/test_create_auto_find_ambiguous.approved.txt rename to legacy_v2_tests/test_app_client_create.approvals/test_create_auto_find_ambiguous.approved.txt diff --git a/tests/test_app_client_create.py b/legacy_v2_tests/test_app_client_create.py similarity index 99% rename from tests/test_app_client_create.py rename to legacy_v2_tests/test_app_client_create.py index be29a5e4..1da7bbf7 100644 --- a/tests/test_app_client_create.py +++ b/legacy_v2_tests/test_app_client_create.py @@ -13,7 +13,7 @@ from algosdk.atomic_transaction_composer import AccountTransactionSigner, AtomicTransactionComposer, TransactionSigner from algosdk.transaction import ApplicationCallTxn, GenericSignedTransaction, OnComplete, Transaction -from tests.conftest import check_output_stability, get_unique_name +from legacy_v2_tests.conftest import check_output_stability, get_unique_name if TYPE_CHECKING: from algosdk.v2client.algod import AlgodClient diff --git a/tests/test_app_client_delete.approvals/test_abi_delete_args_fails.approved.txt b/legacy_v2_tests/test_app_client_delete.approvals/test_abi_delete_args_fails.approved.txt similarity index 100% rename from tests/test_app_client_delete.approvals/test_abi_delete_args_fails.approved.txt rename to legacy_v2_tests/test_app_client_delete.approvals/test_abi_delete_args_fails.approved.txt diff --git a/tests/test_app_client_delete.py b/legacy_v2_tests/test_app_client_delete.py similarity index 95% rename from tests/test_app_client_delete.py rename to legacy_v2_tests/test_app_client_delete.py index 6fc3ec5a..353bbfab 100644 --- a/tests/test_app_client_delete.py +++ b/legacy_v2_tests/test_app_client_delete.py @@ -8,7 +8,7 @@ LogicError, ) -from tests.conftest import check_output_stability +from legacy_v2_tests.conftest import check_output_stability if TYPE_CHECKING: from algosdk.v2client.algod import AlgodClient diff --git a/tests/test_app_client_deploy.py b/legacy_v2_tests/test_app_client_deploy.py similarity index 96% rename from tests/test_app_client_deploy.py rename to legacy_v2_tests/test_app_client_deploy.py index d1c8eba5..4eed49b6 100644 --- a/tests/test_app_client_deploy.py +++ b/legacy_v2_tests/test_app_client_deploy.py @@ -10,7 +10,7 @@ transfer, ) -from tests.conftest import get_unique_name, read_spec +from legacy_v2_tests.conftest import get_unique_name, read_spec if TYPE_CHECKING: from algosdk.v2client.algod import AlgodClient diff --git a/tests/test_app_client_opt_in.approvals/test_abi_update_args_fails.approved.txt b/legacy_v2_tests/test_app_client_opt_in.approvals/test_abi_update_args_fails.approved.txt similarity index 100% rename from tests/test_app_client_opt_in.approvals/test_abi_update_args_fails.approved.txt rename to legacy_v2_tests/test_app_client_opt_in.approvals/test_abi_update_args_fails.approved.txt diff --git a/tests/test_app_client_opt_in.py b/legacy_v2_tests/test_app_client_opt_in.py similarity index 96% rename from tests/test_app_client_opt_in.py rename to legacy_v2_tests/test_app_client_opt_in.py index 9244a826..816e96f0 100644 --- a/tests/test_app_client_opt_in.py +++ b/legacy_v2_tests/test_app_client_opt_in.py @@ -8,7 +8,7 @@ LogicError, ) -from tests.conftest import check_output_stability, is_opted_in +from legacy_v2_tests.conftest import check_output_stability, is_opted_in if TYPE_CHECKING: from algosdk.v2client.algod import AlgodClient diff --git a/tests/test_app_client_prepare.py b/legacy_v2_tests/test_app_client_prepare.py similarity index 100% rename from tests/test_app_client_prepare.py rename to legacy_v2_tests/test_app_client_prepare.py diff --git a/tests/test_app_client_resolve.py b/legacy_v2_tests/test_app_client_resolve.py similarity index 97% rename from tests/test_app_client_resolve.py rename to legacy_v2_tests/test_app_client_resolve.py index 2482149a..6c6023f3 100644 --- a/tests/test_app_client_resolve.py +++ b/legacy_v2_tests/test_app_client_resolve.py @@ -6,7 +6,7 @@ DefaultArgumentDict, ) -from tests.conftest import read_spec +from legacy_v2_tests.conftest import read_spec if TYPE_CHECKING: from algosdk.v2client.algod import AlgodClient diff --git a/tests/test_app_client_signer_sender.py b/legacy_v2_tests/test_app_client_signer_sender.py similarity index 100% rename from tests/test_app_client_signer_sender.py rename to legacy_v2_tests/test_app_client_signer_sender.py diff --git a/tests/test_app_client_template_values.py b/legacy_v2_tests/test_app_client_template_values.py similarity index 97% rename from tests/test_app_client_template_values.py rename to legacy_v2_tests/test_app_client_template_values.py index 0bf5ab70..5b27f320 100644 --- a/tests/test_app_client_template_values.py +++ b/legacy_v2_tests/test_app_client_template_values.py @@ -3,7 +3,7 @@ import algokit_utils import pytest -from tests.conftest import get_unique_name, read_spec +from legacy_v2_tests.conftest import get_unique_name, read_spec if TYPE_CHECKING: from algosdk.v2client.algod import AlgodClient @@ -153,7 +153,7 @@ def test_deploy_with_multi_underscore_template_value( indexer_client: "IndexerClient", funded_account: algokit_utils.Account, ) -> None: - from tests.app_multi_underscore_template_var import app + from legacy_v2_tests.app_multi_underscore_template_var import app some_value = 123 app_spec = app.build(algod_client) diff --git a/tests/test_app_client_update.approvals/test_abi_update_args_fails.approved.txt b/legacy_v2_tests/test_app_client_update.approvals/test_abi_update_args_fails.approved.txt similarity index 100% rename from tests/test_app_client_update.approvals/test_abi_update_args_fails.approved.txt rename to legacy_v2_tests/test_app_client_update.approvals/test_abi_update_args_fails.approved.txt diff --git a/tests/test_app_client_update.py b/legacy_v2_tests/test_app_client_update.py similarity index 96% rename from tests/test_app_client_update.py rename to legacy_v2_tests/test_app_client_update.py index 24dcf366..60cd10d9 100644 --- a/tests/test_app_client_update.py +++ b/legacy_v2_tests/test_app_client_update.py @@ -8,7 +8,7 @@ LogicError, ) -from tests.conftest import check_output_stability +from legacy_v2_tests.conftest import check_output_stability if TYPE_CHECKING: from algosdk.v2client.algod import AlgodClient diff --git a/tests/test_asset.py b/legacy_v2_tests/test_asset.py similarity index 98% rename from tests/test_asset.py rename to legacy_v2_tests/test_asset.py index d5612d26..3d75fa86 100644 --- a/tests/test_asset.py +++ b/legacy_v2_tests/test_asset.py @@ -16,7 +16,7 @@ from algosdk.kmd import KMDClient from algosdk.v2client.algod import AlgodClient -from tests.conftest import assure_funds, generate_test_asset, get_unique_name +from legacy_v2_tests.conftest import assure_funds, generate_test_asset, get_unique_name @pytest.fixture() diff --git a/tests/test_debug_utils.approvals/test_build_teal_sourcemaps.approved.txt b/legacy_v2_tests/test_debug_utils.approvals/test_build_teal_sourcemaps.approved.txt similarity index 100% rename from tests/test_debug_utils.approvals/test_build_teal_sourcemaps.approved.txt rename to legacy_v2_tests/test_debug_utils.approvals/test_build_teal_sourcemaps.approved.txt diff --git a/tests/test_debug_utils.approvals/test_build_teal_sourcemaps_without_sources.approved.txt b/legacy_v2_tests/test_debug_utils.approvals/test_build_teal_sourcemaps_without_sources.approved.txt similarity index 100% rename from tests/test_debug_utils.approvals/test_build_teal_sourcemaps_without_sources.approved.txt rename to legacy_v2_tests/test_debug_utils.approvals/test_build_teal_sourcemaps_without_sources.approved.txt diff --git a/tests/test_debug_utils.py b/legacy_v2_tests/test_debug_utils.py similarity index 95% rename from tests/test_debug_utils.py rename to legacy_v2_tests/test_debug_utils.py index 459bd126..9b6d8ca8 100644 --- a/tests/test_debug_utils.py +++ b/legacy_v2_tests/test_debug_utils.py @@ -9,8 +9,8 @@ persist_sourcemaps, simulate_and_persist_response, ) +from algokit_utils._legacy_v2.application_client import ApplicationClient from algokit_utils.account import get_account -from algokit_utils.application_client import ApplicationClient from algokit_utils.application_specification import ApplicationSpecification from algokit_utils.common import Program from algokit_utils.models import Account @@ -21,7 +21,7 @@ ) from algosdk.transaction import PaymentTxn -from tests.conftest import check_output_stability, get_unique_name +from legacy_v2_tests.conftest import check_output_stability, get_unique_name if TYPE_CHECKING: from algosdk.v2client.algod import AlgodClient @@ -123,7 +123,7 @@ def test_simulate_and_persist_response_via_app_call( client_fixture: ApplicationClient, mocker: Mock, ) -> None: - mock_config = mocker.patch("algokit_utils.application_client.config") + mock_config = mocker.patch("algokit_utils._legacy_v2.application_client.config") mock_config.debug = True mock_config.trace_all = True mock_config.trace_buffer_size_mb = 256 @@ -145,7 +145,7 @@ def test_simulate_and_persist_response_via_app_call( def test_simulate_and_persist_response( tmp_path_factory: pytest.TempPathFactory, client_fixture: ApplicationClient, mocker: Mock, funded_account: Account ) -> None: - mock_config = mocker.patch("algokit_utils.application_client.config") + mock_config = mocker.patch("algokit_utils._legacy_v2.application_client.config") mock_config.debug = True mock_config.trace_all = True cwd = tmp_path_factory.mktemp("cwd") diff --git a/tests/test_deploy.approvals/test_comment_stripping.approved.txt b/legacy_v2_tests/test_deploy.approvals/test_comment_stripping.approved.txt similarity index 100% rename from tests/test_deploy.approvals/test_comment_stripping.approved.txt rename to legacy_v2_tests/test_deploy.approvals/test_comment_stripping.approved.txt diff --git a/tests/test_deploy.approvals/test_template_substitution.approved.txt b/legacy_v2_tests/test_deploy.approvals/test_template_substitution.approved.txt similarity index 100% rename from tests/test_deploy.approvals/test_template_substitution.approved.txt rename to legacy_v2_tests/test_deploy.approvals/test_template_substitution.approved.txt diff --git a/tests/test_deploy.py b/legacy_v2_tests/test_deploy.py similarity index 93% rename from tests/test_deploy.py rename to legacy_v2_tests/test_deploy.py index 6a806f5d..51708f52 100644 --- a/tests/test_deploy.py +++ b/legacy_v2_tests/test_deploy.py @@ -1,9 +1,9 @@ from algokit_utils import ( replace_template_variables, ) -from algokit_utils.deploy import strip_comments +from algokit_utils._legacy_v2.deploy import strip_comments -from tests.conftest import check_output_stability +from legacy_v2_tests.conftest import check_output_stability def test_template_substitution() -> None: diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_immutable_app_and_on_update_equals_replace_app_succeeds.approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_immutable_app_and_on_update_equals_replace_app_succeeds.approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_immutable_app_and_on_update_equals_replace_app_succeeds.approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_immutable_app_and_on_update_equals_replace_app_succeeds.approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_immutable_app_cannot_determine_if_updatable.approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_immutable_app_cannot_determine_if_updatable.approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_immutable_app_cannot_determine_if_updatable.approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_immutable_app_cannot_determine_if_updatable.approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_immutable_app_fails.approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_immutable_app_fails.approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_immutable_app_fails.approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_immutable_app_fails.approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_permanent_app_and_on_schema_break_equals_replace_app_fails.approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_permanent_app_and_on_schema_break_equals_replace_app_fails.approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_permanent_app_and_on_schema_break_equals_replace_app_fails.approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_permanent_app_and_on_schema_break_equals_replace_app_fails.approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_permanent_app_cannot_determine_if_deletable.approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_permanent_app_cannot_determine_if_deletable.approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_permanent_app_cannot_determine_if_deletable.approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_permanent_app_cannot_determine_if_deletable.approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_permanent_app_fails.approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_permanent_app_fails.approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_permanent_app_fails.approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_permanent_app_fails.approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_permanent_app_on_update_equals_replace_app_fails_and_doesnt_create_2nd_app.approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_permanent_app_on_update_equals_replace_app_fails_and_doesnt_create_2nd_app.approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_permanent_app_on_update_equals_replace_app_fails_and_doesnt_create_2nd_app.approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_permanent_app_on_update_equals_replace_app_fails_and_doesnt_create_2nd_app.approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_updatable_app_succeeds.approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_updatable_app_succeeds.approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_updatable_app_succeeds.approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_app_with_existing_updatable_app_succeeds.approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_app_with_no_existing_app_succeeds.approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_app_with_no_existing_app_succeeds.approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_app_with_no_existing_app_succeeds.approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_app_with_no_existing_app_succeeds.approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_templated_app_with_changing_parameters_succeeds.approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_templated_app_with_changing_parameters_succeeds.approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_templated_app_with_changing_parameters_succeeds.approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_templated_app_with_changing_parameters_succeeds.approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.Fail-Updatable.No-Deletable.No].approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.Fail-Updatable.No-Deletable.No].approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.Fail-Updatable.No-Deletable.No].approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.Fail-Updatable.No-Deletable.No].approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.Fail-Updatable.No-Deletable.Yes].approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.Fail-Updatable.No-Deletable.Yes].approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.Fail-Updatable.No-Deletable.Yes].approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.Fail-Updatable.No-Deletable.Yes].approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.Fail-Updatable.Yes-Deletable.No].approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.Fail-Updatable.Yes-Deletable.No].approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.Fail-Updatable.Yes-Deletable.No].approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.Fail-Updatable.Yes-Deletable.No].approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.Fail-Updatable.Yes-Deletable.Yes].approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.Fail-Updatable.Yes-Deletable.Yes].approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.Fail-Updatable.Yes-Deletable.Yes].approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.Fail-Updatable.Yes-Deletable.Yes].approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.ReplaceApp-Updatable.No-Deletable.No].approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.ReplaceApp-Updatable.No-Deletable.No].approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.ReplaceApp-Updatable.No-Deletable.No].approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.ReplaceApp-Updatable.No-Deletable.No].approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.ReplaceApp-Updatable.No-Deletable.Yes].approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.ReplaceApp-Updatable.No-Deletable.Yes].approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.ReplaceApp-Updatable.No-Deletable.Yes].approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.ReplaceApp-Updatable.No-Deletable.Yes].approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.ReplaceApp-Updatable.Yes-Deletable.No].approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.ReplaceApp-Updatable.Yes-Deletable.No].approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.ReplaceApp-Updatable.Yes-Deletable.No].approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.ReplaceApp-Updatable.Yes-Deletable.No].approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.ReplaceApp-Updatable.Yes-Deletable.Yes].approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.ReplaceApp-Updatable.Yes-Deletable.Yes].approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.ReplaceApp-Updatable.Yes-Deletable.Yes].approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change[OnSchemaBreak.ReplaceApp-Updatable.Yes-Deletable.Yes].approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change_append.approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change_append.approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change_append.approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_schema_breaking_change_append.approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.Fail-Updatable.No-Deletable.No].approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.Fail-Updatable.No-Deletable.No].approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.Fail-Updatable.No-Deletable.No].approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.Fail-Updatable.No-Deletable.No].approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.Fail-Updatable.No-Deletable.Yes].approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.Fail-Updatable.No-Deletable.Yes].approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.Fail-Updatable.No-Deletable.Yes].approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.Fail-Updatable.No-Deletable.Yes].approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.Fail-Updatable.Yes-Deletable.No].approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.Fail-Updatable.Yes-Deletable.No].approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.Fail-Updatable.Yes-Deletable.No].approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.Fail-Updatable.Yes-Deletable.No].approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.Fail-Updatable.Yes-Deletable.Yes].approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.Fail-Updatable.Yes-Deletable.Yes].approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.Fail-Updatable.Yes-Deletable.Yes].approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.Fail-Updatable.Yes-Deletable.Yes].approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.ReplaceApp-Updatable.No-Deletable.No].approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.ReplaceApp-Updatable.No-Deletable.No].approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.ReplaceApp-Updatable.No-Deletable.No].approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.ReplaceApp-Updatable.No-Deletable.No].approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.ReplaceApp-Updatable.No-Deletable.Yes].approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.ReplaceApp-Updatable.No-Deletable.Yes].approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.ReplaceApp-Updatable.No-Deletable.Yes].approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.ReplaceApp-Updatable.No-Deletable.Yes].approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.ReplaceApp-Updatable.Yes-Deletable.No].approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.ReplaceApp-Updatable.Yes-Deletable.No].approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.ReplaceApp-Updatable.Yes-Deletable.No].approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.ReplaceApp-Updatable.Yes-Deletable.No].approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.ReplaceApp-Updatable.Yes-Deletable.Yes].approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.ReplaceApp-Updatable.Yes-Deletable.Yes].approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.ReplaceApp-Updatable.Yes-Deletable.Yes].approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.ReplaceApp-Updatable.Yes-Deletable.Yes].approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.UpdateApp-Updatable.No-Deletable.No].approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.UpdateApp-Updatable.No-Deletable.No].approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.UpdateApp-Updatable.No-Deletable.No].approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.UpdateApp-Updatable.No-Deletable.No].approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.UpdateApp-Updatable.No-Deletable.Yes].approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.UpdateApp-Updatable.No-Deletable.Yes].approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.UpdateApp-Updatable.No-Deletable.Yes].approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.UpdateApp-Updatable.No-Deletable.Yes].approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.UpdateApp-Updatable.Yes-Deletable.No].approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.UpdateApp-Updatable.Yes-Deletable.No].approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.UpdateApp-Updatable.Yes-Deletable.No].approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.UpdateApp-Updatable.Yes-Deletable.No].approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.UpdateApp-Updatable.Yes-Deletable.Yes].approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.UpdateApp-Updatable.Yes-Deletable.Yes].approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.UpdateApp-Updatable.Yes-Deletable.Yes].approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update[OnUpdate.UpdateApp-Updatable.Yes-Deletable.Yes].approved.txt diff --git a/tests/test_deploy_scenarios.approvals/test_deploy_with_update_append.approved.txt b/legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update_append.approved.txt similarity index 100% rename from tests/test_deploy_scenarios.approvals/test_deploy_with_update_append.approved.txt rename to legacy_v2_tests/test_deploy_scenarios.approvals/test_deploy_with_update_append.approved.txt diff --git a/tests/test_deploy_scenarios.py b/legacy_v2_tests/test_deploy_scenarios.py similarity index 98% rename from tests/test_deploy_scenarios.py rename to legacy_v2_tests/test_deploy_scenarios.py index d2740876..309fe4a3 100644 --- a/tests/test_deploy_scenarios.py +++ b/legacy_v2_tests/test_deploy_scenarios.py @@ -20,7 +20,7 @@ get_localnet_default_account, ) -from tests.conftest import check_output_stability, get_specs, get_unique_name, read_spec +from legacy_v2_tests.conftest import check_output_stability, get_specs, get_unique_name, read_spec logger = logging.getLogger(__name__) @@ -29,7 +29,7 @@ # If you need to run a test without debug mode, you can reference this mock within the test and disable it explicitly. @pytest.fixture(autouse=True) def mock_config(tmp_path_factory: pytest.TempPathFactory) -> Generator[Mock, None, None]: - with patch("algokit_utils.application_client.config", new_callable=Mock) as mock_config: + with patch("algokit_utils._legacy_v2.application_client.config", new_callable=Mock) as mock_config: mock_config.debug = True cwd = tmp_path_factory.mktemp("cwd") mock_config.project_root = cwd diff --git a/tests/test_dispenser_api_client.py b/legacy_v2_tests/test_dispenser_api_client.py similarity index 100% rename from tests/test_dispenser_api_client.py rename to legacy_v2_tests/test_dispenser_api_client.py diff --git a/tests/test_network_clients.py b/legacy_v2_tests/test_network_clients.py similarity index 100% rename from tests/test_network_clients.py rename to legacy_v2_tests/test_network_clients.py diff --git a/tests/test_transfer.approvals/test_transfer_algo_max_fee_fails.approved.txt b/legacy_v2_tests/test_transfer.approvals/test_transfer_algo_max_fee_fails.approved.txt similarity index 100% rename from tests/test_transfer.approvals/test_transfer_algo_max_fee_fails.approved.txt rename to legacy_v2_tests/test_transfer.approvals/test_transfer_algo_max_fee_fails.approved.txt diff --git a/tests/test_transfer.approvals/test_transfer_asset_max_fee_fails.approved.txt b/legacy_v2_tests/test_transfer.approvals/test_transfer_asset_max_fee_fails.approved.txt similarity index 100% rename from tests/test_transfer.approvals/test_transfer_asset_max_fee_fails.approved.txt rename to legacy_v2_tests/test_transfer.approvals/test_transfer_asset_max_fee_fails.approved.txt diff --git a/tests/test_transfer.py b/legacy_v2_tests/test_transfer.py similarity index 98% rename from tests/test_transfer.py rename to legacy_v2_tests/test_transfer.py index 7e13cdfb..8253a5eb 100644 --- a/tests/test_transfer.py +++ b/legacy_v2_tests/test_transfer.py @@ -24,8 +24,8 @@ from algosdk.util import algos_to_microalgos from pytest_httpx import HTTPXMock -from tests.conftest import assure_funds, check_output_stability, generate_test_asset, get_unique_name -from tests.test_network_clients import DEFAULT_TOKEN +from legacy_v2_tests.conftest import assure_funds, check_output_stability, generate_test_asset, get_unique_name +from legacy_v2_tests.test_network_clients import DEFAULT_TOKEN if TYPE_CHECKING: from algosdk.kmd import KMDClient diff --git a/poetry.lock b/poetry.lock index eb3dedd7..3544afa0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. [[package]] name = "alabaster" @@ -2072,6 +2072,26 @@ files = [ {file = "semver-2.13.0.tar.gz", hash = "sha256:fa0fe2722ee1c3f57eac478820c3a5ae2f624af8264cbdf9000c980ff7f75e3f"}, ] +[[package]] +name = "setuptools" +version = "75.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-75.2.0-py3-none-any.whl", hash = "sha256:a7fcb66f68b4d9e8e66b42f9876150a3371558f98fa32222ffaa5bced76406f8"}, + {file = "setuptools-75.2.0.tar.gz", hash = "sha256:753bb6ebf1f465a1912e19ed1d41f403a79173a9acf66a42e7e6aec45c3c16ec"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] + [[package]] name = "six" version = "1.16.0" @@ -2636,4 +2656,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.10" -content-hash = "59127574db0011d8eb6e5a2d55be3048e9cb4a68e34c9c3e5f4a836d488b7318" +content-hash = "66e85df44cca4d3edccb50f730dfb4e9dccf93582e78fa0074dc9b47baa925e2" diff --git a/pyproject.toml b/pyproject.toml index f82ba2d8..4e3a99a9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -34,6 +34,7 @@ pytest-httpx = "^0.21.3" pytest-xdist = "^3.4.0" sphinx-markdown-builder = "^0.6.6" linkify-it-py = "^2.0.3" +setuptools = "^75.2.0" [build-system] requires = ["poetry-core"] diff --git a/src/algokit_utils/__init__.py b/src/algokit_utils/__init__.py index 02a5e341..77959758 100644 --- a/src/algokit_utils/__init__.py +++ b/src/algokit_utils/__init__.py @@ -1,7 +1,7 @@ from algokit_utils._debugging import PersistSourceMapInput, persist_sourcemaps, simulate_and_persist_response -from algokit_utils._ensure_funded import EnsureBalanceParameters, EnsureFundedResponse, ensure_funded -from algokit_utils._transfer import TransferAssetParameters, TransferParameters, transfer, transfer_asset -from algokit_utils.account import ( +from algokit_utils._legacy_v2._ensure_funded import EnsureBalanceParameters, EnsureFundedResponse, ensure_funded +from algokit_utils._legacy_v2._transfer import TransferAssetParameters, TransferParameters, transfer, transfer_asset +from algokit_utils._legacy_v2.account import ( create_kmd_wallet_account, get_account, get_account_from_mnemonic, @@ -10,14 +10,14 @@ get_localnet_default_account, get_or_create_kmd_wallet_account, ) -from algokit_utils.application_client import ( +from algokit_utils._legacy_v2.application_client import ( ApplicationClient, execute_atc_with_logic_error, get_next_version, get_sender_from_signer, num_extra_program_pages, ) -from algokit_utils.application_specification import ( +from algokit_utils._legacy_v2.application_specification import ( ApplicationSpecification, AppSpecStateDict, CallConfig, @@ -27,9 +27,9 @@ MethodHints, OnCompleteActionName, ) -from algokit_utils.asset import opt_in, opt_out -from algokit_utils.common import Program -from algokit_utils.deploy import ( +from algokit_utils._legacy_v2.asset import opt_in, opt_out +from algokit_utils._legacy_v2.common import Program +from algokit_utils._legacy_v2.deploy import ( DELETABLE_TEMPLATE_NAME, NOTE_PREFIX, UPDATABLE_TEMPLATE_NAME, @@ -56,32 +56,24 @@ get_creator_apps, replace_template_variables, ) -from algokit_utils.dispenser_api import ( - DISPENSER_ACCESS_TOKEN_KEY, - DISPENSER_REQUEST_TIMEOUT, - DispenserFundResponse, - DispenserLimitResponse, - TestNetDispenserApiClient, -) -from algokit_utils.logic_error import LogicError -from algokit_utils.models import ( +from algokit_utils._legacy_v2.logic_error import LogicError +from algokit_utils._legacy_v2.models import ( ABIArgsDict, ABIMethod, ABITransactionResponse, Account, - CommonCallParameters, # noqa: F401 - CommonCallParametersDict, # noqa: F401 + CommonCallParameters, + CommonCallParametersDict, CreateCallParameters, CreateCallParametersDict, CreateTransactionParameters, OnCompleteCallParameters, OnCompleteCallParametersDict, - RawTransactionParameters, # noqa: F401 TransactionParameters, TransactionParametersDict, TransactionResponse, ) -from algokit_utils.network_clients import ( +from algokit_utils._legacy_v2.network_clients import ( AlgoClientConfig, get_algod_client, get_algonode_config, @@ -92,8 +84,16 @@ is_mainnet, is_testnet, ) +from algokit_utils.clients.dispenser_api_client import ( + DISPENSER_ACCESS_TOKEN_KEY, + DISPENSER_REQUEST_TIMEOUT, + DispenserFundResponse, + DispenserLimitResponse, + TestNetDispenserApiClient, +) __all__ = [ + # ==== LEGACY V2 EXPORTS BEGIN ==== "create_kmd_wallet_account", "get_account_from_mnemonic", "get_or_create_kmd_wallet_account", @@ -120,6 +120,8 @@ "CreateCallParameters", "CreateCallParametersDict", "CreateTransactionParameters", + "CommonCallParameters", + "CommonCallParametersDict", "DeployCallArgs", "DeployCreateCallArgs", "DeployCallArgsDict", @@ -179,4 +181,5 @@ "persist_sourcemaps", "PersistSourceMapInput", "simulate_and_persist_response", + # ==== LEGACY V2 EXPORTS END ==== ] diff --git a/src/algokit_utils/_debugging.py b/src/algokit_utils/_debugging.py index 5563a08e..e8c0ef52 100644 --- a/src/algokit_utils/_debugging.py +++ b/src/algokit_utils/_debugging.py @@ -14,7 +14,7 @@ from algosdk.encoding import checksum from algosdk.v2client.models import SimulateRequest, SimulateRequestTransactionGroup, SimulateTraceConfig -from algokit_utils.common import Program +from algokit_utils._legacy_v2.common import Program if typing.TYPE_CHECKING: from algosdk.v2client.algod import AlgodClient diff --git a/src/algokit_utils/_legacy_v2/__init__.py b/src/algokit_utils/_legacy_v2/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/algokit_utils/_ensure_funded.py b/src/algokit_utils/_legacy_v2/_ensure_funded.py similarity index 95% rename from src/algokit_utils/_ensure_funded.py rename to src/algokit_utils/_legacy_v2/_ensure_funded.py index b80734e4..23c87860 100644 --- a/src/algokit_utils/_ensure_funded.py +++ b/src/algokit_utils/_legacy_v2/_ensure_funded.py @@ -5,14 +5,14 @@ from algosdk.transaction import SuggestedParams from algosdk.v2client.algod import AlgodClient -from algokit_utils._transfer import TransferParameters, transfer -from algokit_utils.account import get_dispenser_account -from algokit_utils.dispenser_api import ( +from algokit_utils._legacy_v2._transfer import TransferParameters, transfer +from algokit_utils._legacy_v2.account import get_dispenser_account +from algokit_utils._legacy_v2.models import Account +from algokit_utils._legacy_v2.network_clients import is_testnet +from algokit_utils.clients.dispenser_api_client import ( DispenserAssetName, TestNetDispenserApiClient, ) -from algokit_utils.models import Account -from algokit_utils.network_clients import is_testnet @dataclass(kw_only=True) diff --git a/src/algokit_utils/_transfer.py b/src/algokit_utils/_legacy_v2/_transfer.py similarity index 99% rename from src/algokit_utils/_transfer.py rename to src/algokit_utils/_legacy_v2/_transfer.py index 0103b172..baca5b2b 100644 --- a/src/algokit_utils/_transfer.py +++ b/src/algokit_utils/_legacy_v2/_transfer.py @@ -7,7 +7,7 @@ from algosdk.atomic_transaction_composer import AccountTransactionSigner from algosdk.transaction import AssetTransferTxn, PaymentTxn, SuggestedParams -from algokit_utils.models import Account +from algokit_utils._legacy_v2.models import Account if TYPE_CHECKING: from algosdk.v2client.algod import AlgodClient diff --git a/src/algokit_utils/_legacy_v2/account.py b/src/algokit_utils/_legacy_v2/account.py new file mode 100644 index 00000000..819a448f --- /dev/null +++ b/src/algokit_utils/_legacy_v2/account.py @@ -0,0 +1,183 @@ +import logging +import os +from typing import TYPE_CHECKING, Any + +from algosdk.account import address_from_private_key +from algosdk.mnemonic import from_private_key, to_private_key +from algosdk.util import algos_to_microalgos + +from algokit_utils._legacy_v2._transfer import TransferParameters, transfer +from algokit_utils._legacy_v2.models import Account +from algokit_utils._legacy_v2.network_clients import get_kmd_client_from_algod_client, is_localnet + +if TYPE_CHECKING: + from collections.abc import Callable + + from algosdk.kmd import KMDClient + from algosdk.v2client.algod import AlgodClient + +__all__ = [ + "create_kmd_wallet_account", + "get_account", + "get_account_from_mnemonic", + "get_dispenser_account", + "get_kmd_wallet_account", + "get_localnet_default_account", + "get_or_create_kmd_wallet_account", +] + +logger = logging.getLogger(__name__) +_DEFAULT_ACCOUNT_MINIMUM_BALANCE = 1_000_000_000 + + +def get_account_from_mnemonic(mnemonic: str) -> Account: + """Convert a mnemonic (25 word passphrase) into an Account""" + private_key = to_private_key(mnemonic) # type: ignore[no-untyped-call] + address = address_from_private_key(private_key) # type: ignore[no-untyped-call] + return Account(private_key=private_key, address=address) + + +def create_kmd_wallet_account(kmd_client: "KMDClient", name: str) -> Account: + """Creates a wallet with specified name""" + wallet_id = kmd_client.create_wallet(name, "")["id"] + wallet_handle = kmd_client.init_wallet_handle(wallet_id, "") + kmd_client.generate_key(wallet_handle) + + key_ids: list[str] = kmd_client.list_keys(wallet_handle) + account_key = key_ids[0] + + private_account_key = kmd_client.export_key(wallet_handle, "", account_key) + return get_account_from_mnemonic(from_private_key(private_account_key)) # type: ignore[no-untyped-call] + + +def get_or_create_kmd_wallet_account( + client: "AlgodClient", name: str, fund_with_algos: float = 1000, kmd_client: "KMDClient | None" = None +) -> Account: + """Returns a wallet with specified name, or creates one if not found""" + kmd_client = kmd_client or get_kmd_client_from_algod_client(client) + account = get_kmd_wallet_account(client, kmd_client, name) + + if account: + account_info = client.account_info(account.address) + assert isinstance(account_info, dict) + if account_info["amount"] > 0: + return account + logger.debug(f"Found existing account in LocalNet with name '{name}', but no funds in the account.") + else: + account = create_kmd_wallet_account(kmd_client, name) + + logger.debug( + f"Couldn't find existing account in LocalNet with name '{name}'. " + f"So created account {account.address} with keys stored in KMD." + ) + + logger.debug(f"Funding account {account.address} with {fund_with_algos} ALGOs") + + if fund_with_algos: + transfer( + client, + TransferParameters( + from_account=get_dispenser_account(client), + to_address=account.address, + micro_algos=algos_to_microalgos(fund_with_algos), # type: ignore[no-untyped-call] + ), + ) + + return account + + +def _is_default_account(account: dict[str, Any]) -> bool: + return bool(account["status"] != "Offline" and account["amount"] > _DEFAULT_ACCOUNT_MINIMUM_BALANCE) + + +def get_localnet_default_account(client: "AlgodClient") -> Account: + """Returns the default Account in a LocalNet instance""" + if not is_localnet(client): + raise Exception("Can't get a default account from non LocalNet network") + + account = get_kmd_wallet_account( + client, get_kmd_client_from_algod_client(client), "unencrypted-default-wallet", _is_default_account + ) + assert account + return account + + +def get_dispenser_account(client: "AlgodClient") -> Account: + """Returns an Account based on DISPENSER_MNENOMIC environment variable or the default account on LocalNet""" + if is_localnet(client): + return get_localnet_default_account(client) + return get_account(client, "DISPENSER") + + +def get_kmd_wallet_account( + client: "AlgodClient", + kmd_client: "KMDClient", + name: str, + predicate: "Callable[[dict[str, Any]], bool] | None" = None, +) -> Account | None: + """Returns wallet matching specified name and predicate or None if not found""" + wallets: list[dict] = kmd_client.list_wallets() + + wallet = next((w for w in wallets if w["name"] == name), None) + if wallet is None: + return None + + wallet_id = wallet["id"] + wallet_handle = kmd_client.init_wallet_handle(wallet_id, "") + key_ids: list[str] = kmd_client.list_keys(wallet_handle) + matched_account_key = None + if predicate: + for key in key_ids: + account = client.account_info(key) + assert isinstance(account, dict) + if predicate(account): + matched_account_key = key + else: + matched_account_key = next(key_ids.__iter__(), None) + + if not matched_account_key: + return None + + private_account_key = kmd_client.export_key(wallet_handle, "", matched_account_key) + return get_account_from_mnemonic(from_private_key(private_account_key)) # type: ignore[no-untyped-call] + + +def get_account( + client: "AlgodClient", name: str, fund_with_algos: float = 1000, kmd_client: "KMDClient | None" = None +) -> Account: + """Returns an Algorand account with private key loaded by convention based on the given name identifier. + + # Convention + + **Non-LocalNet:** will load `os.environ[f"{name}_MNEMONIC"]` as a mnemonic secret + Be careful how the mnemonic is handled, never commit it into source control and ideally load it via a + secret storage service rather than the file system. + + **LocalNet:** will load the account from a KMD wallet called {name} and if that wallet doesn't exist it will + create it and fund the account for you + + This allows you to write code that will work seamlessly in production and local development (LocalNet) without + manual config locally (including when you reset the LocalNet). + + # Example + If you have a mnemonic secret loaded into `os.environ["ACCOUNT_MNEMONIC"]` then you can call the following to get + that private key loaded into an account object: + ```python + account = get_account('ACCOUNT', algod) + ``` + + If that code runs against LocalNet then a wallet called 'ACCOUNT' will automatically be created with an account + that is automatically funded with 1000 (default) ALGOs from the default LocalNet dispenser. + """ + + mnemonic_key = f"{name.upper()}_MNEMONIC" + mnemonic = os.getenv(mnemonic_key) + if mnemonic: + return get_account_from_mnemonic(mnemonic) + + if is_localnet(client): + account = get_or_create_kmd_wallet_account(client, name, fund_with_algos, kmd_client) + os.environ[mnemonic_key] = from_private_key(account.private_key) # type: ignore[no-untyped-call] + return account + + raise Exception(f"Missing environment variable '{mnemonic_key}' when looking for account '{name}'") diff --git a/src/algokit_utils/_legacy_v2/application_client.py b/src/algokit_utils/_legacy_v2/application_client.py new file mode 100644 index 00000000..32851fa4 --- /dev/null +++ b/src/algokit_utils/_legacy_v2/application_client.py @@ -0,0 +1,1449 @@ +import base64 +import copy +import json +import logging +import re +import typing +from math import ceil +from pathlib import Path +from typing import Any, Literal, cast, overload + +import algosdk +from algosdk import transaction +from algosdk.abi import ABIType, Method, Returns +from algosdk.account import address_from_private_key +from algosdk.atomic_transaction_composer import ( + ABI_RETURN_HASH, + ABIResult, + AccountTransactionSigner, + AtomicTransactionComposer, + AtomicTransactionResponse, + LogicSigTransactionSigner, + MultisigTransactionSigner, + SimulateAtomicTransactionResponse, + TransactionSigner, + TransactionWithSigner, +) +from algosdk.constants import APP_PAGE_MAX_SIZE +from algosdk.logic import get_application_address +from algosdk.source_map import SourceMap + +import algokit_utils._legacy_v2.application_specification as au_spec +import algokit_utils._legacy_v2.deploy as au_deploy +from algokit_utils._debugging import ( + PersistSourceMapInput, + persist_sourcemaps, + simulate_and_persist_response, + simulate_response, +) +from algokit_utils._legacy_v2.common import Program +from algokit_utils._legacy_v2.logic_error import LogicError, parse_logic_error +from algokit_utils._legacy_v2.models import ( + ABIArgsDict, + ABIArgType, + ABIMethod, + ABITransactionResponse, + Account, + CreateCallParameters, + CreateCallParametersDict, + OnCompleteCallParameters, + OnCompleteCallParametersDict, + SimulationTrace, + TransactionParameters, + TransactionParametersDict, + TransactionResponse, +) +from algokit_utils.config import config + +if typing.TYPE_CHECKING: + from algosdk.v2client.algod import AlgodClient + from algosdk.v2client.indexer import IndexerClient + + +logger = logging.getLogger(__name__) + + +"""A dictionary `dict[str, Any]` representing ABI argument names and values""" + +__all__ = [ + "ApplicationClient", + "execute_atc_with_logic_error", + "get_next_version", + "get_sender_from_signer", + "num_extra_program_pages", +] + +"""Alias for {py:class}`pyteal.ABIReturnSubroutine`, {py:class}`algosdk.abi.method.Method` or a {py:class}`str` +representing an ABI method name or signature""" + + +def num_extra_program_pages(approval: bytes, clear: bytes) -> int: + """Calculate minimum number of extra_pages required for provided approval and clear programs""" + + return ceil(((len(approval) + len(clear)) - APP_PAGE_MAX_SIZE) / APP_PAGE_MAX_SIZE) + + +class ApplicationClient: + """A class that wraps an ARC-0032 app spec and provides high productivity methods to deploy and call the app""" + + @overload + def __init__( + self, + algod_client: "AlgodClient", + app_spec: au_spec.ApplicationSpecification | Path, + *, + app_id: int = 0, + signer: TransactionSigner | Account | None = None, + sender: str | None = None, + suggested_params: transaction.SuggestedParams | None = None, + template_values: au_deploy.TemplateValueMapping | None = None, + ): ... + + @overload + def __init__( + self, + algod_client: "AlgodClient", + app_spec: au_spec.ApplicationSpecification | Path, + *, + creator: str | Account, + indexer_client: "IndexerClient | None" = None, + existing_deployments: au_deploy.AppLookup | None = None, + signer: TransactionSigner | Account | None = None, + sender: str | None = None, + suggested_params: transaction.SuggestedParams | None = None, + template_values: au_deploy.TemplateValueMapping | None = None, + app_name: str | None = None, + ): ... + + def __init__( # noqa: PLR0913 + self, + algod_client: "AlgodClient", + app_spec: au_spec.ApplicationSpecification | Path, + *, + app_id: int = 0, + creator: str | Account | None = None, + indexer_client: "IndexerClient | None" = None, + existing_deployments: au_deploy.AppLookup | None = None, + signer: TransactionSigner | Account | None = None, + sender: str | None = None, + suggested_params: transaction.SuggestedParams | None = None, + template_values: au_deploy.TemplateValueMapping | None = None, + app_name: str | None = None, + ): + """ApplicationClient can be created with an app_id to interact with an existing application, alternatively + it can be created with a creator and indexer_client specified to find existing applications by name and creator. + + :param AlgodClient algod_client: AlgoSDK algod client + :param ApplicationSpecification | Path app_spec: An Application Specification or the path to one + :param int app_id: The app_id of an existing application, to instead find the application by creator and name + use the creator and indexer_client parameters + :param str | Account creator: The address or Account of the app creator to resolve the app_id + :param IndexerClient indexer_client: AlgoSDK indexer client, only required if deploying or finding app_id by + creator and app name + :param AppLookup existing_deployments: + :param TransactionSigner | Account signer: Account or signer to use to sign transactions, if not specified and + creator was passed as an Account will use that. + :param str sender: Address to use as the sender for all transactions, will use the address associated with the + signer if not specified. + :param TemplateValueMapping template_values: Values to use for TMPL_* template variables, dictionary keys should + *NOT* include the TMPL_ prefix + :param str | None app_name: Name of application to use when deploying, defaults to name defined on the + Application Specification + """ + self.algod_client = algod_client + self.app_spec = ( + au_spec.ApplicationSpecification.from_json(app_spec.read_text()) if isinstance(app_spec, Path) else app_spec + ) + self._app_name = app_name + self._approval_program: Program | None = None + self._approval_source_map: SourceMap | None = None + self._clear_program: Program | None = None + + self.template_values: au_deploy.TemplateValueMapping = template_values or {} + self.existing_deployments = existing_deployments + self._indexer_client = indexer_client + if creator is not None: + if not self.existing_deployments and not self._indexer_client: + raise Exception( + "If using the creator parameter either existing_deployments or indexer_client must also be provided" + ) + self._creator: str | None = creator.address if isinstance(creator, Account) else creator + if self.existing_deployments and self.existing_deployments.creator != self._creator: + raise Exception( + "Attempt to create application client with invalid existing_deployments against" + f"a different creator ({self.existing_deployments.creator} instead of " + f"expected creator {self._creator}" + ) + self.app_id = 0 + else: + self.app_id = app_id + self._creator = None + + self.signer: TransactionSigner | None + if signer: + self.signer = ( + signer if isinstance(signer, TransactionSigner) else AccountTransactionSigner(signer.private_key) + ) + elif isinstance(creator, Account): + self.signer = AccountTransactionSigner(creator.private_key) + else: + self.signer = None + + self.sender = sender + self.suggested_params = suggested_params + + @property + def app_name(self) -> str: + return self._app_name or self.app_spec.contract.name + + @app_name.setter + def app_name(self, value: str) -> None: + self._app_name = value + + @property + def app_address(self) -> str: + return get_application_address(self.app_id) + + @property + def approval(self) -> Program | None: + return self._approval_program + + @property + def approval_source_map(self) -> SourceMap | None: + if self._approval_source_map: + return self._approval_source_map + if self._approval_program: + return self._approval_program.source_map + return None + + @approval_source_map.setter + def approval_source_map(self, value: SourceMap) -> None: + self._approval_source_map = value + + @property + def clear(self) -> Program | None: + return self._clear_program + + def prepare( + self, + signer: TransactionSigner | Account | None = None, + sender: str | None = None, + app_id: int | None = None, + template_values: au_deploy.TemplateValueDict | None = None, + ) -> "ApplicationClient": + """Creates a copy of this ApplicationClient, using the new signer, sender and app_id values if provided. + Will also substitute provided template_values into the associated app_spec in the copy""" + new_client: ApplicationClient = copy.copy(self) + new_client._prepare( # noqa: SLF001 + new_client, signer=signer, sender=sender, app_id=app_id, template_values=template_values + ) + return new_client + + def _prepare( # noqa: PLR0913 + self, + target: "ApplicationClient", + *, + signer: TransactionSigner | Account | None = None, + sender: str | None = None, + app_id: int | None = None, + template_values: au_deploy.TemplateValueDict | None = None, + ) -> None: + target.app_id = self.app_id if app_id is None else app_id + target.signer, target.sender = target.get_signer_sender( + AccountTransactionSigner(signer.private_key) if isinstance(signer, Account) else signer, sender + ) + target.template_values = {**self.template_values, **(template_values or {})} + + def deploy( # noqa: PLR0913 + self, + version: str | None = None, + *, + signer: TransactionSigner | None = None, + sender: str | None = None, + allow_update: bool | None = None, + allow_delete: bool | None = None, + on_update: au_deploy.OnUpdate = au_deploy.OnUpdate.Fail, + on_schema_break: au_deploy.OnSchemaBreak = au_deploy.OnSchemaBreak.Fail, + template_values: au_deploy.TemplateValueMapping | None = None, + create_args: au_deploy.ABICreateCallArgs + | au_deploy.ABICreateCallArgsDict + | au_deploy.DeployCreateCallArgs + | None = None, + update_args: au_deploy.ABICallArgs | au_deploy.ABICallArgsDict | au_deploy.DeployCallArgs | None = None, + delete_args: au_deploy.ABICallArgs | au_deploy.ABICallArgsDict | au_deploy.DeployCallArgs | None = None, + ) -> au_deploy.DeployResponse: + """Deploy an application and update client to reference it. + + Idempotently deploy (create, update/delete if changed) an app against the given name via the given creator + account, including deploy-time template placeholder substitutions. + To understand the architecture decisions behind this functionality please see + + + ```{note} + If there is a breaking state schema change to an existing app (and `on_schema_break` is set to + 'ReplaceApp' the existing app will be deleted and re-created. + ``` + + ```{note} + If there is an update (different TEAL code) to an existing app (and `on_update` is set to 'ReplaceApp') + the existing app will be deleted and re-created. + ``` + + :param str version: version to use when creating or updating app, if None version will be auto incremented + :param algosdk.atomic_transaction_composer.TransactionSigner signer: signer to use when deploying app + , if None uses self.signer + :param str sender: sender address to use when deploying app, if None uses self.sender + :param bool allow_delete: Used to set the `TMPL_DELETABLE` template variable to conditionally control if an app + can be deleted + :param bool allow_update: Used to set the `TMPL_UPDATABLE` template variable to conditionally control if an app + can be updated + :param OnUpdate on_update: Determines what action to take if an application update is required + :param OnSchemaBreak on_schema_break: Determines what action to take if an application schema requirements + has increased beyond the current allocation + :param dict[str, int|str|bytes] template_values: Values to use for `TMPL_*` template variables, dictionary keys + should *NOT* include the TMPL_ prefix + :param ABICreateCallArgs create_args: Arguments used when creating an application + :param ABICallArgs | ABICallArgsDict update_args: Arguments used when updating an application + :param ABICallArgs | ABICallArgsDict delete_args: Arguments used when deleting an application + :return DeployResponse: details action taken and relevant transactions + :raises DeploymentError: If the deployment failed + """ + # check inputs + if self.app_id: + raise au_deploy.DeploymentFailedError( + f"Attempt to deploy app which already has an app index of {self.app_id}" + ) + try: + resolved_signer, resolved_sender = self.resolve_signer_sender(signer, sender) + except ValueError as ex: + raise au_deploy.DeploymentFailedError(f"{ex}, unable to deploy app") from None + if not self._creator: + raise au_deploy.DeploymentFailedError("No creator provided, unable to deploy app") + if self._creator != resolved_sender: + raise au_deploy.DeploymentFailedError( + f"Attempt to deploy contract with a sender address {resolved_sender} that differs " + f"from the given creator address for this application client: {self._creator}" + ) + + # make a copy and prepare variables + template_values = {**self.template_values, **(template_values or {})} + au_deploy.add_deploy_template_variables(template_values, allow_update=allow_update, allow_delete=allow_delete) + + existing_app_metadata_or_reference = self._load_app_reference() + + self._approval_program, self._clear_program = substitute_template_and_compile( + self.algod_client, self.app_spec, template_values + ) + + if config.debug and config.project_root: + persist_sourcemaps( + sources=[ + PersistSourceMapInput( + compiled_teal=self._approval_program, app_name=self.app_name, file_name="approval.teal" + ), + PersistSourceMapInput( + compiled_teal=self._clear_program, app_name=self.app_name, file_name="clear.teal" + ), + ], + project_root=config.project_root, + client=self.algod_client, + with_sources=True, + ) + + deployer = au_deploy.Deployer( + app_client=self, + creator=self._creator, + signer=resolved_signer, + sender=resolved_sender, + new_app_metadata=self._get_app_deploy_metadata(version, allow_update, allow_delete), + existing_app_metadata_or_reference=existing_app_metadata_or_reference, + on_update=on_update, + on_schema_break=on_schema_break, + create_args=create_args, + update_args=update_args, + delete_args=delete_args, + ) + + return deployer.deploy() + + def compose_create( + self, + atc: AtomicTransactionComposer, + /, + call_abi_method: ABIMethod | bool | None = None, + transaction_parameters: CreateCallParameters | CreateCallParametersDict | None = None, + **abi_kwargs: ABIArgType, + ) -> None: + """Adds a signed transaction with application id == 0 and the schema and source of client's app_spec to atc""" + approval_program, clear_program = self._check_is_compiled() + transaction_parameters = _convert_transaction_parameters(transaction_parameters) + + extra_pages = transaction_parameters.extra_pages or num_extra_program_pages( + approval_program.raw_binary, clear_program.raw_binary + ) + + self.add_method_call( + atc, + app_id=0, + abi_method=call_abi_method, + abi_args=abi_kwargs, + on_complete=transaction_parameters.on_complete or transaction.OnComplete.NoOpOC, + call_config=au_spec.CallConfig.CREATE, + parameters=transaction_parameters, + approval_program=approval_program.raw_binary, + clear_program=clear_program.raw_binary, + global_schema=self.app_spec.global_state_schema, + local_schema=self.app_spec.local_state_schema, + extra_pages=extra_pages, + ) + + @overload + def create( + self, + call_abi_method: Literal[False], + transaction_parameters: CreateCallParameters | CreateCallParametersDict | None = ..., + ) -> TransactionResponse: ... + + @overload + def create( + self, + call_abi_method: ABIMethod | Literal[True], + transaction_parameters: CreateCallParameters | CreateCallParametersDict | None = ..., + **abi_kwargs: ABIArgType, + ) -> ABITransactionResponse: ... + + @overload + def create( + self, + call_abi_method: ABIMethod | bool | None = ..., + transaction_parameters: CreateCallParameters | CreateCallParametersDict | None = ..., + **abi_kwargs: ABIArgType, + ) -> TransactionResponse | ABITransactionResponse: ... + + def create( + self, + call_abi_method: ABIMethod | bool | None = None, + transaction_parameters: CreateCallParameters | CreateCallParametersDict | None = None, + **abi_kwargs: ABIArgType, + ) -> TransactionResponse | ABITransactionResponse: + """Submits a signed transaction with application id == 0 and the schema and source of client's app_spec""" + + atc = AtomicTransactionComposer() + + self.compose_create( + atc, + call_abi_method, + transaction_parameters, + **abi_kwargs, + ) + create_result = self._execute_atc_tr(atc) + self.app_id = au_deploy.get_app_id_from_tx_id(self.algod_client, create_result.tx_id) + return create_result + + def compose_update( + self, + atc: AtomicTransactionComposer, + /, + call_abi_method: ABIMethod | bool | None = None, + transaction_parameters: TransactionParameters | TransactionParametersDict | None = None, + **abi_kwargs: ABIArgType, + ) -> None: + """Adds a signed transaction with on_complete=UpdateApplication to atc""" + approval_program, clear_program = self._check_is_compiled() + + self.add_method_call( + atc=atc, + abi_method=call_abi_method, + abi_args=abi_kwargs, + parameters=transaction_parameters, + on_complete=transaction.OnComplete.UpdateApplicationOC, + approval_program=approval_program.raw_binary, + clear_program=clear_program.raw_binary, + ) + + @overload + def update( + self, + call_abi_method: ABIMethod | Literal[True], + transaction_parameters: TransactionParameters | TransactionParametersDict | None = ..., + **abi_kwargs: ABIArgType, + ) -> ABITransactionResponse: ... + + @overload + def update( + self, + call_abi_method: Literal[False], + transaction_parameters: TransactionParameters | TransactionParametersDict | None = ..., + ) -> TransactionResponse: ... + + @overload + def update( + self, + call_abi_method: ABIMethod | bool | None = ..., + transaction_parameters: TransactionParameters | TransactionParametersDict | None = ..., + **abi_kwargs: ABIArgType, + ) -> TransactionResponse | ABITransactionResponse: ... + + def update( + self, + call_abi_method: ABIMethod | bool | None = None, + transaction_parameters: TransactionParameters | TransactionParametersDict | None = None, + **abi_kwargs: ABIArgType, + ) -> TransactionResponse | ABITransactionResponse: + """Submits a signed transaction with on_complete=UpdateApplication""" + + atc = AtomicTransactionComposer() + self.compose_update( + atc, + call_abi_method, + transaction_parameters=transaction_parameters, + **abi_kwargs, + ) + return self._execute_atc_tr(atc) + + def compose_delete( + self, + atc: AtomicTransactionComposer, + /, + call_abi_method: ABIMethod | bool | None = None, + transaction_parameters: TransactionParameters | TransactionParametersDict | None = None, + **abi_kwargs: ABIArgType, + ) -> None: + """Adds a signed transaction with on_complete=DeleteApplication to atc""" + + self.add_method_call( + atc, + call_abi_method, + abi_args=abi_kwargs, + parameters=transaction_parameters, + on_complete=transaction.OnComplete.DeleteApplicationOC, + ) + + @overload + def delete( + self, + call_abi_method: ABIMethod | Literal[True], + transaction_parameters: TransactionParameters | TransactionParametersDict | None = ..., + **abi_kwargs: ABIArgType, + ) -> ABITransactionResponse: ... + + @overload + def delete( + self, + call_abi_method: Literal[False], + transaction_parameters: TransactionParameters | TransactionParametersDict | None = ..., + ) -> TransactionResponse: ... + + @overload + def delete( + self, + call_abi_method: ABIMethod | bool | None = ..., + transaction_parameters: TransactionParameters | TransactionParametersDict | None = ..., + **abi_kwargs: ABIArgType, + ) -> TransactionResponse | ABITransactionResponse: ... + + def delete( + self, + call_abi_method: ABIMethod | bool | None = None, + transaction_parameters: TransactionParameters | TransactionParametersDict | None = None, + **abi_kwargs: ABIArgType, + ) -> TransactionResponse | ABITransactionResponse: + """Submits a signed transaction with on_complete=DeleteApplication""" + + atc = AtomicTransactionComposer() + self.compose_delete( + atc, + call_abi_method, + transaction_parameters=transaction_parameters, + **abi_kwargs, + ) + return self._execute_atc_tr(atc) + + def compose_call( + self, + atc: AtomicTransactionComposer, + /, + call_abi_method: ABIMethod | bool | None = None, + transaction_parameters: OnCompleteCallParameters | OnCompleteCallParametersDict | None = None, + **abi_kwargs: ABIArgType, + ) -> None: + """Adds a signed transaction with specified parameters to atc""" + _parameters = _convert_transaction_parameters(transaction_parameters) + self.add_method_call( + atc, + abi_method=call_abi_method, + abi_args=abi_kwargs, + parameters=_parameters, + on_complete=_parameters.on_complete or transaction.OnComplete.NoOpOC, + ) + + @overload + def call( + self, + call_abi_method: ABIMethod | Literal[True], + transaction_parameters: OnCompleteCallParameters | OnCompleteCallParametersDict | None = ..., + **abi_kwargs: ABIArgType, + ) -> ABITransactionResponse: ... + + @overload + def call( + self, + call_abi_method: Literal[False], + transaction_parameters: OnCompleteCallParameters | OnCompleteCallParametersDict | None = ..., + ) -> TransactionResponse: ... + + @overload + def call( + self, + call_abi_method: ABIMethod | bool | None = ..., + transaction_parameters: OnCompleteCallParameters | OnCompleteCallParametersDict | None = ..., + **abi_kwargs: ABIArgType, + ) -> TransactionResponse | ABITransactionResponse: ... + + def call( + self, + call_abi_method: ABIMethod | bool | None = None, + transaction_parameters: OnCompleteCallParameters | OnCompleteCallParametersDict | None = None, + **abi_kwargs: ABIArgType, + ) -> TransactionResponse | ABITransactionResponse: + """Submits a signed transaction with specified parameters""" + atc = AtomicTransactionComposer() + _parameters = _convert_transaction_parameters(transaction_parameters) + self.compose_call( + atc, + call_abi_method=call_abi_method, + transaction_parameters=_parameters, + **abi_kwargs, + ) + + method = self._resolve_method( + call_abi_method, abi_kwargs, _parameters.on_complete or transaction.OnComplete.NoOpOC + ) + if method: + hints = self._method_hints(method) + if hints and hints.read_only: + if config.debug and config.project_root and config.trace_all: + simulate_and_persist_response( + atc, config.project_root, self.algod_client, config.trace_buffer_size_mb + ) + + return self._simulate_readonly_call(method, atc) + + return self._execute_atc_tr(atc) + + def compose_opt_in( + self, + atc: AtomicTransactionComposer, + /, + call_abi_method: ABIMethod | bool | None = None, + transaction_parameters: TransactionParameters | TransactionParametersDict | None = None, + **abi_kwargs: ABIArgType, + ) -> None: + """Adds a signed transaction with on_complete=OptIn to atc""" + self.add_method_call( + atc, + abi_method=call_abi_method, + abi_args=abi_kwargs, + parameters=transaction_parameters, + on_complete=transaction.OnComplete.OptInOC, + ) + + @overload + def opt_in( + self, + call_abi_method: ABIMethod | Literal[True] = ..., + transaction_parameters: TransactionParameters | TransactionParametersDict | None = None, + **abi_kwargs: ABIArgType, + ) -> ABITransactionResponse: ... + + @overload + def opt_in( + self, + call_abi_method: Literal[False] = ..., + transaction_parameters: TransactionParameters | TransactionParametersDict | None = None, + ) -> TransactionResponse: ... + + @overload + def opt_in( + self, + call_abi_method: ABIMethod | bool | None = ..., + transaction_parameters: TransactionParameters | TransactionParametersDict | None = ..., + **abi_kwargs: ABIArgType, + ) -> TransactionResponse | ABITransactionResponse: ... + + def opt_in( + self, + call_abi_method: ABIMethod | bool | None = None, + transaction_parameters: TransactionParameters | TransactionParametersDict | None = None, + **abi_kwargs: ABIArgType, + ) -> TransactionResponse | ABITransactionResponse: + """Submits a signed transaction with on_complete=OptIn""" + atc = AtomicTransactionComposer() + self.compose_opt_in( + atc, + call_abi_method=call_abi_method, + transaction_parameters=transaction_parameters, + **abi_kwargs, + ) + return self._execute_atc_tr(atc) + + def compose_close_out( + self, + atc: AtomicTransactionComposer, + /, + call_abi_method: ABIMethod | bool | None = None, + transaction_parameters: TransactionParameters | TransactionParametersDict | None = None, + **abi_kwargs: ABIArgType, + ) -> None: + """Adds a signed transaction with on_complete=CloseOut to ac""" + self.add_method_call( + atc, + abi_method=call_abi_method, + abi_args=abi_kwargs, + parameters=transaction_parameters, + on_complete=transaction.OnComplete.CloseOutOC, + ) + + @overload + def close_out( + self, + call_abi_method: ABIMethod | Literal[True], + transaction_parameters: TransactionParameters | TransactionParametersDict | None = ..., + **abi_kwargs: ABIArgType, + ) -> ABITransactionResponse: ... + + @overload + def close_out( + self, + call_abi_method: Literal[False], + transaction_parameters: TransactionParameters | TransactionParametersDict | None = ..., + ) -> TransactionResponse: ... + + @overload + def close_out( + self, + call_abi_method: ABIMethod | bool | None = ..., + transaction_parameters: TransactionParameters | TransactionParametersDict | None = ..., + **abi_kwargs: ABIArgType, + ) -> TransactionResponse | ABITransactionResponse: ... + + def close_out( + self, + call_abi_method: ABIMethod | bool | None = None, + transaction_parameters: TransactionParameters | TransactionParametersDict | None = None, + **abi_kwargs: ABIArgType, + ) -> TransactionResponse | ABITransactionResponse: + """Submits a signed transaction with on_complete=CloseOut""" + atc = AtomicTransactionComposer() + self.compose_close_out( + atc, + call_abi_method=call_abi_method, + transaction_parameters=transaction_parameters, + **abi_kwargs, + ) + return self._execute_atc_tr(atc) + + def compose_clear_state( + self, + atc: AtomicTransactionComposer, + /, + transaction_parameters: TransactionParameters | TransactionParametersDict | None = None, + app_args: list[bytes] | None = None, + ) -> None: + """Adds a signed transaction with on_complete=ClearState to atc""" + return self.add_method_call( + atc, + parameters=transaction_parameters, + on_complete=transaction.OnComplete.ClearStateOC, + app_args=app_args, + ) + + def clear_state( + self, + transaction_parameters: TransactionParameters | TransactionParametersDict | None = None, + app_args: list[bytes] | None = None, + ) -> TransactionResponse: + """Submits a signed transaction with on_complete=ClearState""" + atc = AtomicTransactionComposer() + self.compose_clear_state( + atc, + transaction_parameters=transaction_parameters, + app_args=app_args, + ) + return self._execute_atc_tr(atc) + + def get_global_state(self, *, raw: bool = False) -> dict[bytes | str, bytes | str | int]: + """Gets the global state info associated with app_id""" + global_state = self.algod_client.application_info(self.app_id) + assert isinstance(global_state, dict) + return cast( + dict[bytes | str, bytes | str | int], + _decode_state(global_state.get("params", {}).get("global-state", {}), raw=raw), + ) + + def get_local_state(self, account: str | None = None, *, raw: bool = False) -> dict[bytes | str, bytes | str | int]: + """Gets the local state info for associated app_id and account/sender""" + + if account is None: + _, account = self.resolve_signer_sender(self.signer, self.sender) + + acct_state = self.algod_client.account_application_info(account, self.app_id) + assert isinstance(acct_state, dict) + return cast( + dict[bytes | str, bytes | str | int], + _decode_state(acct_state.get("app-local-state", {}).get("key-value", {}), raw=raw), + ) + + def resolve(self, to_resolve: au_spec.DefaultArgumentDict) -> int | str | bytes: + """Resolves the default value for an ABI method, based on app_spec""" + + def _data_check(value: object) -> int | str | bytes: + if isinstance(value, int | str | bytes): + return value + raise ValueError(f"Unexpected type for constant data: {value}") + + match to_resolve: + case {"source": "constant", "data": data}: + return _data_check(data) + case {"source": "global-state", "data": str() as key}: + global_state = self.get_global_state(raw=True) + return global_state[key.encode()] + case {"source": "local-state", "data": str() as key}: + _, sender = self.resolve_signer_sender(self.signer, self.sender) + acct_state = self.get_local_state(sender, raw=True) + return acct_state[key.encode()] + case {"source": "abi-method", "data": dict() as method_dict}: + method = Method.undictify(method_dict) + response = self.call(method) + assert isinstance(response, ABITransactionResponse) + return _data_check(response.return_value) + + case {"source": source}: + raise ValueError(f"Unrecognized default argument source: {source}") + case _: + raise TypeError("Unable to interpret default argument specification") + + def _get_app_deploy_metadata( + self, version: str | None, allow_update: bool | None, allow_delete: bool | None + ) -> au_deploy.AppDeployMetaData: + updatable = ( + allow_update + if allow_update is not None + else au_deploy.get_deploy_control( + self.app_spec, au_deploy.UPDATABLE_TEMPLATE_NAME, transaction.OnComplete.UpdateApplicationOC + ) + ) + deletable = ( + allow_delete + if allow_delete is not None + else au_deploy.get_deploy_control( + self.app_spec, au_deploy.DELETABLE_TEMPLATE_NAME, transaction.OnComplete.DeleteApplicationOC + ) + ) + + app = self._load_app_reference() + + if version is None: + if app.app_id == 0: + version = "v1.0" + else: + assert isinstance(app, au_deploy.AppDeployMetaData) + version = get_next_version(app.version) + return au_deploy.AppDeployMetaData(self.app_name, version, updatable=updatable, deletable=deletable) + + def _check_is_compiled(self) -> tuple[Program, Program]: + if self._approval_program is None or self._clear_program is None: + self._approval_program, self._clear_program = substitute_template_and_compile( + self.algod_client, self.app_spec, self.template_values + ) + + if config.debug and config.project_root: + persist_sourcemaps( + sources=[ + PersistSourceMapInput( + compiled_teal=self._approval_program, app_name=self.app_name, file_name="approval.teal" + ), + PersistSourceMapInput( + compiled_teal=self._clear_program, app_name=self.app_name, file_name="clear.teal" + ), + ], + project_root=config.project_root, + client=self.algod_client, + with_sources=True, + ) + + return self._approval_program, self._clear_program + + def _simulate_readonly_call( + self, method: Method, atc: AtomicTransactionComposer + ) -> ABITransactionResponse | TransactionResponse: + response = simulate_response(atc, self.algod_client) + traces = None + if config.debug: + traces = _create_simulate_traces(response) + if response.failure_message: + raise _try_convert_to_logic_error( + response.failure_message, + self.app_spec.approval_program, + self._get_approval_source_map, + traces, + ) or Exception(f"Simulate failed for readonly method {method.get_signature()}: {response.failure_message}") + + return TransactionResponse.from_atr(response) + + def _load_reference_and_check_app_id(self) -> None: + self._load_app_reference() + self._check_app_id() + + def _load_app_reference(self) -> au_deploy.AppReference | au_deploy.AppMetaData: + if not self.existing_deployments and self._creator: + assert self._indexer_client + self.existing_deployments = au_deploy.get_creator_apps(self._indexer_client, self._creator) + + if self.existing_deployments: + app = self.existing_deployments.apps.get(self.app_name) + if app: + if self.app_id == 0: + self.app_id = app.app_id + return app + + return au_deploy.AppReference(self.app_id, self.app_address) + + def _check_app_id(self) -> None: + if self.app_id == 0: + raise Exception( + "ApplicationClient is not associated with an app instance, to resolve either:\n" + "1.) provide an app_id on construction OR\n" + "2.) provide a creator address so an app can be searched for OR\n" + "3.) create an app first using create or deploy methods" + ) + + def _resolve_method( + self, + abi_method: ABIMethod | bool | None, + args: ABIArgsDict | None, + on_complete: transaction.OnComplete, + call_config: au_spec.CallConfig = au_spec.CallConfig.CALL, + ) -> Method | None: + matches: list[Method | None] = [] + match abi_method: + case str() | Method(): # abi method specified + return self._resolve_abi_method(abi_method) + case bool() | None: # find abi method + has_bare_config = ( + call_config in au_deploy.get_call_config(self.app_spec.bare_call_config, on_complete) + or on_complete == transaction.OnComplete.ClearStateOC + ) + abi_methods = self._find_abi_methods(args, on_complete, call_config) + if abi_method is not False: + matches += abi_methods + if has_bare_config and abi_method is not True: + matches += [None] + case _: + return abi_method.method_spec() + + if len(matches) == 1: # exact match + return matches[0] + elif len(matches) > 1: # ambiguous match + signatures = ", ".join((m.get_signature() if isinstance(m, Method) else "bare") for m in matches) + raise Exception( + f"Could not find an exact method to use for {on_complete.name} with call_config of {call_config.name}, " + f"specify the exact method using abi_method and args parameters, considered: {signatures}" + ) + else: # no match + raise Exception( + f"Could not find any methods to use for {on_complete.name} with call_config of {call_config.name}" + ) + + def _get_approval_source_map(self) -> SourceMap | None: + if self.approval_source_map: + return self.approval_source_map + + try: + approval, _ = self._check_is_compiled() + except au_deploy.DeploymentFailedError: + return None + return approval.source_map + + def export_source_map(self) -> str | None: + """Export approval source map to JSON, can be later re-imported with `import_source_map`""" + source_map = self._get_approval_source_map() + if source_map: + return json.dumps( + { + "version": source_map.version, + "sources": source_map.sources, + "mappings": source_map.mappings, + } + ) + return None + + def import_source_map(self, source_map_json: str) -> None: + """Import approval source from JSON exported by `export_source_map`""" + source_map = json.loads(source_map_json) + self._approval_source_map = SourceMap(source_map) + + def add_method_call( # noqa: PLR0913 + self, + atc: AtomicTransactionComposer, + abi_method: ABIMethod | bool | None = None, + *, + abi_args: ABIArgsDict | None = None, + app_id: int | None = None, + parameters: TransactionParameters | TransactionParametersDict | None = None, + on_complete: transaction.OnComplete = transaction.OnComplete.NoOpOC, + local_schema: transaction.StateSchema | None = None, + global_schema: transaction.StateSchema | None = None, + approval_program: bytes | None = None, + clear_program: bytes | None = None, + extra_pages: int | None = None, + app_args: list[bytes] | None = None, + call_config: au_spec.CallConfig = au_spec.CallConfig.CALL, + ) -> None: + """Adds a transaction to the AtomicTransactionComposer passed""" + if app_id is None: + self._load_reference_and_check_app_id() + app_id = self.app_id + parameters = _convert_transaction_parameters(parameters) + method = self._resolve_method(abi_method, abi_args, on_complete, call_config) + sp = parameters.suggested_params or self.suggested_params or self.algod_client.suggested_params() + signer, sender = self.resolve_signer_sender(parameters.signer, parameters.sender) + if parameters.boxes is not None: + # TODO: algosdk actually does this, but it's type hints say otherwise... + encoded_boxes = [(id_, algosdk.encoding.encode_as_bytes(name)) for id_, name in parameters.boxes] + else: + encoded_boxes = None + + encoded_lease = parameters.lease.encode("utf-8") if isinstance(parameters.lease, str) else parameters.lease + + if not method: # not an abi method, treat as a regular call + if abi_args: + raise Exception(f"ABI arguments specified on a bare call: {', '.join(abi_args)}") + atc.add_transaction( + TransactionWithSigner( + txn=transaction.ApplicationCallTxn( # type: ignore[no-untyped-call] + sender=sender, + sp=sp, + index=app_id, + on_complete=on_complete, + approval_program=approval_program, + clear_program=clear_program, + global_schema=global_schema, + local_schema=local_schema, + extra_pages=extra_pages, + accounts=parameters.accounts, + foreign_apps=parameters.foreign_apps, + foreign_assets=parameters.foreign_assets, + boxes=encoded_boxes, + note=parameters.note, + lease=encoded_lease, + rekey_to=parameters.rekey_to, + app_args=app_args, + ), + signer=signer, + ) + ) + return + # resolve ABI method args + args = self._get_abi_method_args(abi_args, method) + atc.add_method_call( + app_id, + method, + sender, + sp, + signer, + method_args=args, + on_complete=on_complete, + local_schema=local_schema, + global_schema=global_schema, + approval_program=approval_program, + clear_program=clear_program, + extra_pages=extra_pages or 0, + accounts=parameters.accounts, + foreign_apps=parameters.foreign_apps, + foreign_assets=parameters.foreign_assets, + boxes=encoded_boxes, + note=parameters.note.encode("utf-8") if isinstance(parameters.note, str) else parameters.note, + lease=encoded_lease, + rekey_to=parameters.rekey_to, + ) + + def _get_abi_method_args(self, abi_args: ABIArgsDict | None, method: Method) -> list: + args: list = [] + hints = self._method_hints(method) + # copy args so we don't mutate original + abi_args = dict(abi_args or {}) + for method_arg in method.args: + name = method_arg.name + if name in abi_args: + argument = abi_args.pop(name) + if isinstance(argument, dict): + if hints.structs is None or name not in hints.structs: + raise Exception(f"Argument missing struct hint: {name}. Check argument name and type") + + elements = hints.structs[name]["elements"] + + argument_tuple = tuple(argument[field_name] for field_name, field_type in elements) + args.append(argument_tuple) + else: + args.append(argument) + + elif hints.default_arguments is not None and name in hints.default_arguments: + default_arg = hints.default_arguments[name] + if default_arg is not None: + args.append(self.resolve(default_arg)) + else: + raise Exception(f"Unspecified argument: {name}") + if abi_args: + raise Exception(f"Unused arguments specified: {', '.join(abi_args)}") + return args + + def _method_matches( + self, + method: Method, + args: ABIArgsDict | None, + on_complete: transaction.OnComplete, + call_config: au_spec.CallConfig, + ) -> bool: + hints = self._method_hints(method) + if call_config not in au_deploy.get_call_config(hints.call_config, on_complete): + return False + method_args = {m.name for m in method.args} + provided_args = set(args or {}) | set(hints.default_arguments) + + # TODO: also match on types? + return method_args == provided_args + + def _find_abi_methods( + self, args: ABIArgsDict | None, on_complete: transaction.OnComplete, call_config: au_spec.CallConfig + ) -> list[Method]: + return [ + method + for method in self.app_spec.contract.methods + if self._method_matches(method, args, on_complete, call_config) + ] + + def _resolve_abi_method(self, method: ABIMethod) -> Method: + if isinstance(method, str): + try: + return next(iter(m for m in self.app_spec.contract.methods if m.get_signature() == method)) + except StopIteration: + pass + return self.app_spec.contract.get_method_by_name(method) + elif hasattr(method, "method_spec"): + return method.method_spec() + else: + return method + + def _method_hints(self, method: Method) -> au_spec.MethodHints: + sig = method.get_signature() + if sig not in self.app_spec.hints: + return au_spec.MethodHints() + return self.app_spec.hints[sig] + + def _execute_atc_tr(self, atc: AtomicTransactionComposer) -> TransactionResponse: + result = self.execute_atc(atc) + return TransactionResponse.from_atr(result) + + def execute_atc(self, atc: AtomicTransactionComposer) -> AtomicTransactionResponse: + return execute_atc_with_logic_error( + atc, + self.algod_client, + approval_program=self.app_spec.approval_program, + approval_source_map=self._get_approval_source_map, + ) + + def get_signer_sender( + self, signer: TransactionSigner | None = None, sender: str | None = None + ) -> tuple[TransactionSigner | None, str | None]: + """Return signer and sender, using default values on client if not specified + + Will use provided values if given, otherwise will fall back to values defined on client. + If no sender is specified then will attempt to obtain sender from signer""" + resolved_signer = signer or self.signer + resolved_sender = sender or get_sender_from_signer(signer) or self.sender or get_sender_from_signer(self.signer) + return resolved_signer, resolved_sender + + def resolve_signer_sender( + self, signer: TransactionSigner | None = None, sender: str | None = None + ) -> tuple[TransactionSigner, str]: + """Return signer and sender, using default values on client if not specified + + Will use provided values if given, otherwise will fall back to values defined on client. + If no sender is specified then will attempt to obtain sender from signer + + :raises ValueError: Raised if a signer or sender is not provided. See `get_signer_sender` + for variant with no exception""" + resolved_signer, resolved_sender = self.get_signer_sender(signer, sender) + if not resolved_signer: + raise ValueError("No signer provided") + if not resolved_sender: + raise ValueError("No sender provided") + return resolved_signer, resolved_sender + + # TODO: remove private implementation, kept in the 1.0.2 release to not impact existing beaker 1.0 installs + _resolve_signer_sender = resolve_signer_sender + + +def substitute_template_and_compile( + algod_client: "AlgodClient", + app_spec: au_spec.ApplicationSpecification, + template_values: au_deploy.TemplateValueMapping, +) -> tuple[Program, Program]: + """Substitutes the provided template_values into app_spec and compiles""" + template_values = dict(template_values or {}) + clear = au_deploy.replace_template_variables(app_spec.clear_program, template_values) + + au_deploy.check_template_variables(app_spec.approval_program, template_values) + approval = au_deploy.replace_template_variables(app_spec.approval_program, template_values) + + approval_app, clear_app = Program(approval, algod_client), Program(clear, algod_client) + + return approval_app, clear_app + + +def get_next_version(current_version: str) -> str: + """Calculates the next version from `current_version` + + Next version is calculated by finding a semver like + version string and incrementing the lower. This function is used by {py:meth}`ApplicationClient.deploy` when + a version is not specified, and is intended mostly for convenience during local development. + + :params str current_version: An existing version string with a semver like version contained within it, + some valid inputs and incremented outputs: + `1` -> `2` + `1.0` -> `1.1` + `v1.1` -> `v1.2` + `v1.1-beta1` -> `v1.2-beta1` + `v1.2.3.4567` -> `v1.2.3.4568` + `v1.2.3.4567-alpha` -> `v1.2.3.4568-alpha` + :raises DeploymentFailedError: If `current_version` cannot be parsed""" + pattern = re.compile(r"(?P\w*)(?P(?:\d+\.)*\d+)(?P\w*)") + match = pattern.match(current_version) + if match: + version = match.group("version") + new_version = _increment_version(version) + + def replacement(m: re.Match) -> str: + return f"{m.group('prefix')}{new_version}{m.group('suffix')}" + + return re.sub(pattern, replacement, current_version) + raise au_deploy.DeploymentFailedError( + f"Could not auto increment {current_version}, please specify the next version using the version parameter" + ) + + +def _try_convert_to_logic_error( + source_ex: Exception | str, + approval_program: str, + approval_source_map: SourceMap | typing.Callable[[], SourceMap | None] | None = None, + simulate_traces: list[SimulationTrace] | None = None, +) -> Exception | None: + source_ex_str = str(source_ex) + logic_error_data = parse_logic_error(source_ex_str) + if logic_error_data: + return LogicError( + logic_error_str=source_ex_str, + logic_error=source_ex if isinstance(source_ex, Exception) else None, + program=approval_program, + source_map=approval_source_map() if callable(approval_source_map) else approval_source_map, + **logic_error_data, + traces=simulate_traces, + ) + + return None + + +def execute_atc_with_logic_error( + atc: AtomicTransactionComposer, + algod_client: "AlgodClient", + approval_program: str, + wait_rounds: int = 4, + approval_source_map: SourceMap | typing.Callable[[], SourceMap | None] | None = None, +) -> AtomicTransactionResponse: + """Calls {py:meth}`AtomicTransactionComposer.execute` on provided `atc`, but will parse any errors + and raise a {py:class}`LogicError` if possible + + ```{note} + `approval_program` and `approval_source_map` are required to be able to parse any errors into a + {py:class}`LogicError` + ``` + """ + try: + if config.debug and config.project_root and config.trace_all: + simulate_and_persist_response(atc, config.project_root, algod_client, config.trace_buffer_size_mb) + + return atc.execute(algod_client, wait_rounds=wait_rounds) + except Exception as ex: + if config.debug: + simulate = None + if config.project_root and not config.trace_all: + # if trace_all is enabled, we already have the traces executed above + # hence we only need to simulate if trace_all is disabled and + # project_root is set + simulate = simulate_and_persist_response( + atc, config.project_root, algod_client, config.trace_buffer_size_mb + ) + else: + simulate = simulate_response(atc, algod_client) + traces = _create_simulate_traces(simulate) + else: + traces = None + logger.info("An error occurred while executing the transaction.") + logger.info("To see more details, enable debug mode by setting config.debug = True ") + + logic_error = _try_convert_to_logic_error(ex, approval_program, approval_source_map, traces) + if logic_error: + raise logic_error from ex + raise ex + + +def _create_simulate_traces(simulate: SimulateAtomicTransactionResponse) -> list[SimulationTrace]: + traces = [] + if hasattr(simulate, "simulate_response") and hasattr(simulate, "failed_at") and simulate.failed_at: + for txn_group in simulate.simulate_response["txn-groups"]: + app_budget_added = txn_group.get("app-budget-added", None) + app_budget_consumed = txn_group.get("app-budget-consumed", None) + failure_message = txn_group.get("failure-message", None) + txn_result = txn_group.get("txn-results", [{}])[0] + exec_trace = txn_result.get("exec-trace", {}) + traces.append( + SimulationTrace( + app_budget_added=app_budget_added, + app_budget_consumed=app_budget_consumed, + failure_message=failure_message, + exec_trace=exec_trace, + ) + ) + return traces + + +def _convert_transaction_parameters( + args: TransactionParameters | TransactionParametersDict | None, +) -> CreateCallParameters: + _args = args.__dict__ if isinstance(args, TransactionParameters) else (args or {}) + return CreateCallParameters(**_args) + + +def get_sender_from_signer(signer: TransactionSigner | None) -> str | None: + """Returns the associated address of a signer, return None if no address found""" + + if isinstance(signer, AccountTransactionSigner): + sender = address_from_private_key(signer.private_key) # type: ignore[no-untyped-call] + assert isinstance(sender, str) + return sender + elif isinstance(signer, MultisigTransactionSigner): + sender = signer.msig.address() # type: ignore[no-untyped-call] + assert isinstance(sender, str) + return sender + elif isinstance(signer, LogicSigTransactionSigner): + return signer.lsig.address() + return None + + +# TEMPORARY, use SDK one when available +def _parse_result( + methods: dict[int, Method], + txns: list[dict[str, Any]], + txids: list[str], +) -> list[ABIResult]: + method_results = [] + for i, tx_info in enumerate(txns): + raw_value = b"" + return_value = None + decode_error = None + + if i not in methods: + continue + + # Parse log for ABI method return value + try: + if methods[i].returns.type == Returns.VOID: + method_results.append( + ABIResult( + tx_id=txids[i], + raw_value=raw_value, + return_value=return_value, + decode_error=decode_error, + tx_info=tx_info, + method=methods[i], + ) + ) + continue + + logs = tx_info.get("logs", []) + + # Look for the last returned value in the log + if not logs: + raise Exception("No logs") + + result = logs[-1] + # Check that the first four bytes is the hash of "return" + result_bytes = base64.b64decode(result) + if len(result_bytes) < len(ABI_RETURN_HASH) or result_bytes[: len(ABI_RETURN_HASH)] != ABI_RETURN_HASH: + raise Exception("no logs") + + raw_value = result_bytes[4:] + abi_return_type = methods[i].returns.type + if isinstance(abi_return_type, ABIType): + return_value = abi_return_type.decode(raw_value) + else: + return_value = raw_value + + except Exception as e: + decode_error = e + + method_results.append( + ABIResult( + tx_id=txids[i], + raw_value=raw_value, + return_value=return_value, + decode_error=decode_error, + tx_info=tx_info, + method=methods[i], + ) + ) + + return method_results + + +def _increment_version(version: str) -> str: + split = list(map(int, version.split("."))) + split[-1] = split[-1] + 1 + return ".".join(str(x) for x in split) + + +def _str_or_hex(v: bytes) -> str: + decoded: str + try: + decoded = v.decode("utf-8") + except UnicodeDecodeError: + decoded = v.hex() + + return decoded + + +def _decode_state(state: list[dict[str, Any]], *, raw: bool = False) -> dict[str | bytes, bytes | str | int | None]: + decoded_state: dict[str | bytes, bytes | str | int | None] = {} + + for state_value in state: + raw_key = base64.b64decode(state_value["key"]) + + key: str | bytes = raw_key if raw else _str_or_hex(raw_key) + val: str | bytes | int | None + + action = state_value["value"]["action"] if "action" in state_value["value"] else state_value["value"]["type"] + + match action: + case 1: + raw_val = base64.b64decode(state_value["value"]["bytes"]) + val = raw_val if raw else _str_or_hex(raw_val) + case 2: + val = state_value["value"]["uint"] + case 3: + val = None + case _: + raise NotImplementedError + + decoded_state[key] = val + return decoded_state diff --git a/src/algokit_utils/_legacy_v2/application_specification.py b/src/algokit_utils/_legacy_v2/application_specification.py new file mode 100644 index 00000000..392fce8d --- /dev/null +++ b/src/algokit_utils/_legacy_v2/application_specification.py @@ -0,0 +1,206 @@ +import base64 +import dataclasses +import json +from enum import IntFlag +from pathlib import Path +from typing import Any, Literal, TypeAlias, TypedDict + +from algosdk.abi import Contract +from algosdk.abi.method import MethodDict +from algosdk.transaction import StateSchema + +__all__ = [ + "CallConfig", + "DefaultArgumentDict", + "DefaultArgumentType", + "MethodConfigDict", + "OnCompleteActionName", + "MethodHints", + "ApplicationSpecification", + "AppSpecStateDict", +] + + +AppSpecStateDict: TypeAlias = dict[str, dict[str, dict]] +"""Type defining Application Specification state entries""" + + +class CallConfig(IntFlag): + """Describes the type of calls a method can be used for based on {py:class}`algosdk.transaction.OnComplete` type""" + + NEVER = 0 + """Never handle the specified on completion type""" + CALL = 1 + """Only handle the specified on completion type for application calls""" + CREATE = 2 + """Only handle the specified on completion type for application create calls""" + ALL = 3 + """Handle the specified on completion type for both create and normal application calls""" + + +class StructArgDict(TypedDict): + name: str + elements: list[list[str]] + + +OnCompleteActionName: TypeAlias = Literal[ + "no_op", "opt_in", "close_out", "clear_state", "update_application", "delete_application" +] +"""String literals representing on completion transaction types""" +MethodConfigDict: TypeAlias = dict[OnCompleteActionName, CallConfig] +"""Dictionary of `dict[OnCompletionActionName, CallConfig]` representing allowed actions for each on completion type""" +DefaultArgumentType: TypeAlias = Literal["abi-method", "local-state", "global-state", "constant"] +"""Literal values describing the types of default argument sources""" + + +class DefaultArgumentDict(TypedDict): + """ + DefaultArgument is a container for any arguments that may + be resolved prior to calling some target method + """ + + source: DefaultArgumentType + data: int | str | bytes | MethodDict + + +StateDict = TypedDict( # need to use function-form of TypedDict here since "global" is a reserved keyword + "StateDict", {"global": AppSpecStateDict, "local": AppSpecStateDict} +) + + +@dataclasses.dataclass(kw_only=True) +class MethodHints: + """MethodHints provides hints to the caller about how to call the method""" + + #: hint to indicate this method can be called through Dryrun + read_only: bool = False + #: hint to provide names for tuple argument indices + #: method_name=>param_name=>{name:str, elements:[str,str]} + structs: dict[str, StructArgDict] = dataclasses.field(default_factory=dict) + #: defaults + default_arguments: dict[str, DefaultArgumentDict] = dataclasses.field(default_factory=dict) + call_config: MethodConfigDict = dataclasses.field(default_factory=dict) + + def empty(self) -> bool: + return not self.dictify() + + def dictify(self) -> dict[str, Any]: + d: dict[str, Any] = {} + if self.read_only: + d["read_only"] = True + if self.default_arguments: + d["default_arguments"] = self.default_arguments + if self.structs: + d["structs"] = self.structs + if any(v for v in self.call_config.values() if v != CallConfig.NEVER): + d["call_config"] = _encode_method_config(self.call_config) + return d + + @staticmethod + def undictify(data: dict[str, Any]) -> "MethodHints": + return MethodHints( + read_only=data.get("read_only", False), + default_arguments=data.get("default_arguments", {}), + structs=data.get("structs", {}), + call_config=_decode_method_config(data.get("call_config", {})), + ) + + +def _encode_method_config(mc: MethodConfigDict) -> dict[str, str | None]: + return {k: mc[k].name for k in sorted(mc) if mc[k] != CallConfig.NEVER} + + +def _decode_method_config(data: dict[OnCompleteActionName, Any]) -> MethodConfigDict: + return {k: CallConfig[v] for k, v in data.items()} + + +def _encode_source(teal_text: str) -> str: + return base64.b64encode(teal_text.encode()).decode("utf-8") + + +def _decode_source(b64_text: str) -> str: + return base64.b64decode(b64_text).decode("utf-8") + + +def _encode_state_schema(schema: StateSchema) -> dict[str, int]: + return { + "num_byte_slices": schema.num_byte_slices, + "num_uints": schema.num_uints, + } + + +def _decode_state_schema(data: dict[str, int]) -> StateSchema: + return StateSchema( # type: ignore[no-untyped-call] + num_byte_slices=data.get("num_byte_slices", 0), + num_uints=data.get("num_uints", 0), + ) + + +@dataclasses.dataclass(kw_only=True) +class ApplicationSpecification: + """ARC-0032 application specification + + See """ + + approval_program: str + clear_program: str + contract: Contract + hints: dict[str, MethodHints] + schema: StateDict + global_state_schema: StateSchema + local_state_schema: StateSchema + bare_call_config: MethodConfigDict + + def dictify(self) -> dict: + return { + "hints": {k: v.dictify() for k, v in self.hints.items() if not v.empty()}, + "source": { + "approval": _encode_source(self.approval_program), + "clear": _encode_source(self.clear_program), + }, + "state": { + "global": _encode_state_schema(self.global_state_schema), + "local": _encode_state_schema(self.local_state_schema), + }, + "schema": self.schema, + "contract": self.contract.dictify(), + "bare_call_config": _encode_method_config(self.bare_call_config), + } + + def to_json(self) -> str: + return json.dumps(self.dictify(), indent=4) + + @staticmethod + def from_json(application_spec: str) -> "ApplicationSpecification": + json_spec = json.loads(application_spec) + return ApplicationSpecification( + approval_program=_decode_source(json_spec["source"]["approval"]), + clear_program=_decode_source(json_spec["source"]["clear"]), + schema=json_spec["schema"], + global_state_schema=_decode_state_schema(json_spec["state"]["global"]), + local_state_schema=_decode_state_schema(json_spec["state"]["local"]), + contract=Contract.undictify(json_spec["contract"]), + hints={k: MethodHints.undictify(v) for k, v in json_spec["hints"].items()}, + bare_call_config=_decode_method_config(json_spec.get("bare_call_config", {})), + ) + + def export(self, directory: Path | str | None = None) -> None: + """write out the artifacts generated by the application to disk + + Args: + directory(optional): path to the directory where the artifacts should be written + """ + if directory is None: + output_dir = Path.cwd() + else: + output_dir = Path(directory) + output_dir.mkdir(exist_ok=True, parents=True) + + (output_dir / "approval.teal").write_text(self.approval_program) + (output_dir / "clear.teal").write_text(self.clear_program) + (output_dir / "contract.json").write_text(json.dumps(self.contract.dictify(), indent=4)) + (output_dir / "application.json").write_text(self.to_json()) + + +def _state_schema(schema: dict[str, int]) -> StateSchema: + return StateSchema(schema.get("num-uint", 0), schema.get("num-byte-slice", 0)) # type: ignore[no-untyped-call] diff --git a/src/algokit_utils/_legacy_v2/asset.py b/src/algokit_utils/_legacy_v2/asset.py new file mode 100644 index 00000000..2ef4860f --- /dev/null +++ b/src/algokit_utils/_legacy_v2/asset.py @@ -0,0 +1,168 @@ +import logging +from typing import TYPE_CHECKING + +from algosdk.atomic_transaction_composer import AtomicTransactionComposer, TransactionWithSigner +from algosdk.constants import TX_GROUP_LIMIT +from algosdk.transaction import AssetTransferTxn + +if TYPE_CHECKING: + from algosdk.v2client.algod import AlgodClient + +from enum import Enum, auto + +from algokit_utils._legacy_v2.models import Account + +__all__ = ["opt_in", "opt_out"] +logger = logging.getLogger(__name__) + + +class ValidationType(Enum): + OPTIN = auto() + OPTOUT = auto() + + +def _ensure_account_is_valid(algod_client: "AlgodClient", account: Account) -> None: + try: + algod_client.account_info(account.address) + except Exception as err: + error_message = f"Account address{account.address} does not exist" + logger.debug(error_message) + raise err + + +def _ensure_asset_balance_conditions( + algod_client: "AlgodClient", account: Account, asset_ids: list, validation_type: ValidationType +) -> None: + invalid_asset_ids = [] + account_info = algod_client.account_info(account.address) + account_assets = account_info.get("assets", []) # type: ignore # noqa: PGH003 + for asset_id in asset_ids: + asset_exists_in_account_info = any(asset["asset-id"] == asset_id for asset in account_assets) + if validation_type == ValidationType.OPTIN: + if asset_exists_in_account_info: + logger.debug(f"Asset {asset_id} is already opted in for account {account.address}") + invalid_asset_ids.append(asset_id) + + elif validation_type == ValidationType.OPTOUT: + if not account_assets or not asset_exists_in_account_info: + logger.debug(f"Account {account.address} does not have asset {asset_id}") + invalid_asset_ids.append(asset_id) + else: + asset_balance = next((asset["amount"] for asset in account_assets if asset["asset-id"] == asset_id), 0) + if asset_balance != 0: + logger.debug(f"Asset {asset_id} balance is not zero") + invalid_asset_ids.append(asset_id) + + if len(invalid_asset_ids) > 0: + action = "opted out" if validation_type == ValidationType.OPTOUT else "opted in" + condition_message = ( + "their amount is zero and that the account has" + if validation_type == ValidationType.OPTOUT + else "they are valid and that the account has not" + ) + + error_message = ( + f"Assets {invalid_asset_ids} cannot be {action}. Ensure that " + f"{condition_message} previously opted into them." + ) + raise ValueError(error_message) + + +def opt_in(algod_client: "AlgodClient", account: Account, asset_ids: list[int]) -> dict[int, str]: + """ + Opt-in to a list of assets on the Algorand blockchain. Before an account can receive a specific asset, + it must `opt-in` to receive it. An opt-in transaction places an asset holding of 0 into the account and increases + its minimum balance by [100,000 microAlgos](https://developer.algorand.org/docs/get-details/asa/#assets-overview). + + Args: + algod_client (AlgodClient): An instance of the AlgodClient class from the algosdk library. + account (Account): An instance of the Account class representing the account that wants to opt-in to the assets. + asset_ids (list[int]): A list of integers representing the asset IDs to opt-in to. + Returns: + dict[int, str]: A dictionary where the keys are the asset IDs and the values + are the transaction IDs for opting-in to each asset. + """ + _ensure_account_is_valid(algod_client, account) + _ensure_asset_balance_conditions(algod_client, account, asset_ids, ValidationType.OPTIN) + suggested_params = algod_client.suggested_params() + result = {} + for i in range(0, len(asset_ids), TX_GROUP_LIMIT): + atc = AtomicTransactionComposer() + chunk = asset_ids[i : i + TX_GROUP_LIMIT] + for asset_id in chunk: + asset = algod_client.asset_info(asset_id) + xfer_txn = AssetTransferTxn( + sp=suggested_params, + sender=account.address, + receiver=account.address, + close_assets_to=None, + revocation_target=None, + amt=0, + note=f"opt in asset id ${asset_id}", + index=asset["index"], # type: ignore # noqa: PGH003 + rekey_to=None, + ) + + transaction_with_signer = TransactionWithSigner( + txn=xfer_txn, + signer=account.signer, + ) + atc.add_transaction(transaction_with_signer) + atc.execute(algod_client, 4) + + for index, asset_id in enumerate(chunk): + result[asset_id] = atc.tx_ids[index] + + return result + + +def opt_out(algod_client: "AlgodClient", account: Account, asset_ids: list[int]) -> dict[int, str]: + """ + Opt out from a list of Algorand Standard Assets (ASAs) by transferring them back to their creators. + The account also recovers the Minimum Balance Requirement for the asset (100,000 microAlgos) + The `optOut` function manages the opt-out process, permitting the account to discontinue holding a group of assets. + + It's essential to note that an account can only opt_out of an asset if its balance of that asset is zero. + + Args: + algod_client (AlgodClient): An instance of the AlgodClient class from the `algosdk` library. + account (Account): An instance of the Account class that holds the private key and address for an account. + asset_ids (list[int]): A list of integers representing the asset IDs of the ASAs to opt out from. + Returns: + dict[int, str]: A dictionary where the keys are the asset IDs and the values are the transaction IDs of + the executed transactions. + + """ + _ensure_account_is_valid(algod_client, account) + _ensure_asset_balance_conditions(algod_client, account, asset_ids, ValidationType.OPTOUT) + suggested_params = algod_client.suggested_params() + result = {} + for i in range(0, len(asset_ids), TX_GROUP_LIMIT): + atc = AtomicTransactionComposer() + chunk = asset_ids[i : i + TX_GROUP_LIMIT] + for asset_id in chunk: + asset = algod_client.asset_info(asset_id) + asset_creator = asset["params"]["creator"] # type: ignore # noqa: PGH003 + xfer_txn = AssetTransferTxn( + sp=suggested_params, + sender=account.address, + receiver=account.address, + close_assets_to=asset_creator, + revocation_target=None, + amt=0, + note=f"opt out asset id ${asset_id}", + index=asset["index"], # type: ignore # noqa: PGH003 + rekey_to=None, + ) + + transaction_with_signer = TransactionWithSigner( + txn=xfer_txn, + signer=account.signer, + ) + atc.add_transaction(transaction_with_signer) + atc.execute(algod_client, 4) + + for index, asset_id in enumerate(chunk): + result[asset_id] = atc.tx_ids[index] + + return result diff --git a/src/algokit_utils/_legacy_v2/common.py b/src/algokit_utils/_legacy_v2/common.py new file mode 100644 index 00000000..cd412f82 --- /dev/null +++ b/src/algokit_utils/_legacy_v2/common.py @@ -0,0 +1,28 @@ +""" +This module contains common classes and methods that are reused in more than one file. +""" + +import base64 +import typing + +from algosdk.source_map import SourceMap + +from algokit_utils._legacy_v2.deploy import strip_comments + +if typing.TYPE_CHECKING: + from algosdk.v2client.algod import AlgodClient + + +class Program: + """A compiled TEAL program""" + + def __init__(self, program: str, client: "AlgodClient"): + """ + Fully compile the program source to binary and generate a + source map for matching pc to line number + """ + self.teal = program + result: dict = client.compile(strip_comments(self.teal), source_map=True) + self.raw_binary = base64.b64decode(result["result"]) + self.binary_hash: str = result["hash"] + self.source_map = SourceMap(result["sourcemap"]) diff --git a/src/algokit_utils/_legacy_v2/deploy.py b/src/algokit_utils/_legacy_v2/deploy.py new file mode 100644 index 00000000..561ce413 --- /dev/null +++ b/src/algokit_utils/_legacy_v2/deploy.py @@ -0,0 +1,897 @@ +import base64 +import dataclasses +import json +import logging +import re +from collections.abc import Iterable, Mapping, Sequence +from enum import Enum +from typing import TYPE_CHECKING, TypeAlias, TypedDict + +from algosdk import transaction +from algosdk.atomic_transaction_composer import AtomicTransactionComposer, TransactionSigner +from algosdk.logic import get_application_address +from algosdk.transaction import StateSchema + +from algokit_utils._legacy_v2.application_specification import ( + ApplicationSpecification, + CallConfig, + MethodConfigDict, + OnCompleteActionName, +) +from algokit_utils._legacy_v2.models import ( + ABIArgsDict, + ABIMethod, + Account, + CreateCallParameters, + TransactionResponse, +) + +if TYPE_CHECKING: + from algosdk.v2client.algod import AlgodClient + from algosdk.v2client.indexer import IndexerClient + + from algokit_utils._legacy_v2.application_client import ApplicationClient + + +__all__ = [ + "UPDATABLE_TEMPLATE_NAME", + "DELETABLE_TEMPLATE_NAME", + "NOTE_PREFIX", + "ABICallArgs", + "ABICreateCallArgs", + "ABICallArgsDict", + "ABICreateCallArgsDict", + "DeploymentFailedError", + "AppReference", + "AppDeployMetaData", + "AppMetaData", + "AppLookup", + "DeployCallArgs", + "DeployCreateCallArgs", + "DeployCallArgsDict", + "DeployCreateCallArgsDict", + "Deployer", + "DeployResponse", + "OnUpdate", + "OnSchemaBreak", + "OperationPerformed", + "TemplateValueDict", + "TemplateValueMapping", + "get_app_id_from_tx_id", + "get_creator_apps", + "replace_template_variables", +] + +logger = logging.getLogger(__name__) + +DEFAULT_INDEXER_MAX_API_RESOURCES_PER_ACCOUNT = 1000 +_UPDATABLE = "UPDATABLE" +_DELETABLE = "DELETABLE" +UPDATABLE_TEMPLATE_NAME = f"TMPL_{_UPDATABLE}" +"""Template variable name used to control if a smart contract is updatable or not at deployment""" +DELETABLE_TEMPLATE_NAME = f"TMPL_{_DELETABLE}" +"""Template variable name used to control if a smart contract is deletable or not at deployment""" +_TOKEN_PATTERN = re.compile(r"TMPL_[A-Z_]+") +TemplateValue: TypeAlias = int | str | bytes +TemplateValueDict: TypeAlias = dict[str, TemplateValue] +"""Dictionary of `dict[str, int | str | bytes]` representing template variable names and values""" +TemplateValueMapping: TypeAlias = Mapping[str, TemplateValue] +"""Mapping of `str` to `int | str | bytes` representing template variable names and values""" + +NOTE_PREFIX = "ALGOKIT_DEPLOYER:j" +"""ARC-0002 compliant note prefix for algokit_utils deployed applications""" +# This prefix is also used to filter for parsable transaction notes in get_creator_apps. +# However, as the note is base64 encoded first we need to consider it's base64 representation. +# When base64 encoding bytes, 3 bytes are stored in every 4 characters. +# So then we don't need to worry about the padding/changing characters of the prefix if it was followed by +# additional characters, assert the NOTE_PREFIX length is a multiple of 3. +assert len(NOTE_PREFIX) % 3 == 0 + + +class DeploymentFailedError(Exception): + pass + + +@dataclasses.dataclass +class AppReference: + """Information about an Algorand app""" + + app_id: int + app_address: str + + +@dataclasses.dataclass +class AppDeployMetaData: + """Metadata about an application stored in a transaction note during creation. + + The note is serialized as JSON and prefixed with {py:data}`NOTE_PREFIX` and stored in the transaction note field + as part of {py:meth}`ApplicationClient.deploy` + """ + + name: str + version: str + deletable: bool | None + updatable: bool | None + + @staticmethod + def from_json(value: str) -> "AppDeployMetaData": + json_value: dict = json.loads(value) + json_value.setdefault("deletable", None) + json_value.setdefault("updatable", None) + return AppDeployMetaData(**json_value) + + @classmethod + def from_b64(cls: type["AppDeployMetaData"], b64: str) -> "AppDeployMetaData": + return cls.decode(base64.b64decode(b64)) + + @classmethod + def decode(cls: type["AppDeployMetaData"], value: bytes) -> "AppDeployMetaData": + note = value.decode("utf-8") + assert note.startswith(NOTE_PREFIX) + return cls.from_json(note[len(NOTE_PREFIX) :]) + + def encode(self) -> bytes: + json_str = json.dumps(self.__dict__) + return f"{NOTE_PREFIX}{json_str}".encode() + + +@dataclasses.dataclass +class AppMetaData(AppReference, AppDeployMetaData): + """Metadata about a deployed app""" + + created_round: int + updated_round: int + created_metadata: AppDeployMetaData + deleted: bool + + +@dataclasses.dataclass +class AppLookup: + """Cache of {py:class}`AppMetaData` for a specific `creator` + + Can be used as an argument to {py:class}`ApplicationClient` to reduce the number of calls when deploying multiple + apps or discovering multiple app_ids + """ + + creator: str + apps: dict[str, AppMetaData] = dataclasses.field(default_factory=dict) + + +def _sort_by_round(txn: dict) -> tuple[int, int]: + confirmed = txn["confirmed-round"] + offset = txn["intra-round-offset"] + return confirmed, offset + + +def _parse_note(metadata_b64: str | None) -> AppDeployMetaData | None: + if not metadata_b64: + return None + # noinspection PyBroadException + try: + return AppDeployMetaData.from_b64(metadata_b64) + except Exception: + return None + + +def get_creator_apps(indexer: "IndexerClient", creator_account: Account | str) -> AppLookup: + """Returns a mapping of Application names to {py:class}`AppMetaData` for all Applications created by specified + creator that have a transaction note containing {py:class}`AppDeployMetaData` + """ + apps: dict[str, AppMetaData] = {} + + creator_address = creator_account if isinstance(creator_account, str) else creator_account.address + token = None + # TODO: paginated indexer call instead of N + 1 calls + while True: + response = indexer.lookup_account_application_by_creator( + creator_address, limit=DEFAULT_INDEXER_MAX_API_RESOURCES_PER_ACCOUNT, next_page=token + ) # type: ignore[no-untyped-call] + if "message" in response: # an error occurred + raise Exception(f"Error querying applications for {creator_address}: {response}") + for app in response["applications"]: + app_id = app["id"] + app_created_at_round = app["created-at-round"] + app_deleted = app.get("deleted", False) + search_transactions_response = indexer.search_transactions( + min_round=app_created_at_round, + txn_type="appl", + application_id=app_id, + address=creator_address, + address_role="sender", + note_prefix=NOTE_PREFIX.encode("utf-8"), + ) # type: ignore[no-untyped-call] + transactions: list[dict] = search_transactions_response["transactions"] + if not transactions: + continue + + created_transaction = next( + t + for t in transactions + if t["application-transaction"]["application-id"] == 0 and t["sender"] == creator_address + ) + + transactions.sort(key=_sort_by_round, reverse=True) + latest_transaction = transactions[0] + app_updated_at_round = latest_transaction["confirmed-round"] + + create_metadata = _parse_note(created_transaction.get("note")) + update_metadata = _parse_note(latest_transaction.get("note")) + + if create_metadata and create_metadata.name: + apps[create_metadata.name] = AppMetaData( + app_id=app_id, + app_address=get_application_address(app_id), + created_metadata=create_metadata, + created_round=app_created_at_round, + **(update_metadata or create_metadata).__dict__, + updated_round=app_updated_at_round, + deleted=app_deleted, + ) + + token = response.get("next-token") + if not token: + break + + return AppLookup(creator_address, apps) + + +def _state_schema(schema: dict[str, int]) -> StateSchema: + return StateSchema(schema.get("num-uint", 0), schema.get("num-byte-slice", 0)) # type: ignore[no-untyped-call] + + +def _describe_schema_breaks(prefix: str, from_schema: StateSchema, to_schema: StateSchema) -> Iterable[str]: + if to_schema.num_uints > from_schema.num_uints: + yield f"{prefix} uints increased from {from_schema.num_uints} to {to_schema.num_uints}" + if to_schema.num_byte_slices > from_schema.num_byte_slices: + yield f"{prefix} byte slices increased from {from_schema.num_byte_slices} to {to_schema.num_byte_slices}" + + +@dataclasses.dataclass(kw_only=True) +class AppChanges: + app_updated: bool + schema_breaking_change: bool + schema_change_description: str | None + + +def check_for_app_changes( # noqa: PLR0913 + algod_client: "AlgodClient", + *, + new_approval: bytes, + new_clear: bytes, + new_global_schema: StateSchema, + new_local_schema: StateSchema, + app_id: int, +) -> AppChanges: + application_info = algod_client.application_info(app_id) + assert isinstance(application_info, dict) + application_create_params = application_info["params"] + + current_approval = base64.b64decode(application_create_params["approval-program"]) + current_clear = base64.b64decode(application_create_params["clear-state-program"]) + current_global_schema = _state_schema(application_create_params["global-state-schema"]) + current_local_schema = _state_schema(application_create_params["local-state-schema"]) + + app_updated = current_approval != new_approval or current_clear != new_clear + + schema_changes: list[str] = [] + schema_changes.extend(_describe_schema_breaks("Global", current_global_schema, new_global_schema)) + schema_changes.extend(_describe_schema_breaks("Local", current_local_schema, new_local_schema)) + + return AppChanges( + app_updated=app_updated, + schema_breaking_change=bool(schema_changes), + schema_change_description=", ".join(schema_changes), + ) + + +def _is_valid_token_character(char: str) -> bool: + return char.isalnum() or char == "_" + + +def _replace_template_variable(program_lines: list[str], template_variable: str, value: str) -> tuple[list[str], int]: + result: list[str] = [] + match_count = 0 + token = f"TMPL_{template_variable}" + token_idx_offset = len(value) - len(token) + for line in program_lines: + comment_idx = _find_unquoted_string(line, "//") + if comment_idx is None: + comment_idx = len(line) + code = line[:comment_idx] + comment = line[comment_idx:] + trailing_idx = 0 + while True: + token_idx = _find_template_token(code, token, trailing_idx) + if token_idx is None: + break + + trailing_idx = token_idx + len(token) + prefix = code[:token_idx] + suffix = code[trailing_idx:] + code = f"{prefix}{value}{suffix}" + match_count += 1 + trailing_idx += token_idx_offset + result.append(code + comment) + return result, match_count + + +def add_deploy_template_variables( + template_values: TemplateValueDict, allow_update: bool | None, allow_delete: bool | None +) -> None: + if allow_update is not None: + template_values[_UPDATABLE] = int(allow_update) + if allow_delete is not None: + template_values[_DELETABLE] = int(allow_delete) + + +def _find_unquoted_string(line: str, token: str, start: int = 0, end: int = -1) -> int | None: + """Find the first string within a line of TEAL. Only matches outside of quotes and base64 are returned. + Returns None if not found""" + + if end < 0: + end = len(line) + idx = start + in_quotes = in_base64 = False + while idx < end: + current_char = line[idx] + match current_char: + # enter base64 + case " " | "(" if not in_quotes and _last_token_base64(line, idx): + in_base64 = True + # exit base64 + case " " | ")" if not in_quotes and in_base64: + in_base64 = False + # escaped char + case "\\" if in_quotes: + # skip next character + idx += 1 + # quote boundary + case '"': + in_quotes = not in_quotes + # can test for match + case _ if not in_quotes and not in_base64 and line.startswith(token, idx): + # only match if not in quotes and string matches + return idx + idx += 1 + return None + + +def _last_token_base64(line: str, idx: int) -> bool: + try: + *_, last = line[:idx].split() + except ValueError: + return False + return last in ("base64", "b64") + + +def _find_template_token(line: str, token: str, start: int = 0, end: int = -1) -> int | None: + """Find the first template token within a line of TEAL. Only matches outside of quotes are returned. + Only full token matches are returned, i.e. TMPL_STR will not match against TMPL_STRING + Returns None if not found""" + if end < 0: + end = len(line) + + idx = start + while idx < end: + token_idx = _find_unquoted_string(line, token, idx, end) + if token_idx is None: + break + trailing_idx = token_idx + len(token) + if (token_idx == 0 or not _is_valid_token_character(line[token_idx - 1])) and ( # word boundary at start + trailing_idx >= len(line) or not _is_valid_token_character(line[trailing_idx]) # word boundary at end + ): + return token_idx + idx = trailing_idx + return None + + +def _strip_comment(line: str) -> str: + comment_idx = _find_unquoted_string(line, "//") + if comment_idx is None: + return line + return line[:comment_idx].rstrip() + + +def strip_comments(program: str) -> str: + return "\n".join(_strip_comment(line) for line in program.splitlines()) + + +def _has_token(program_without_comments: str, token: str) -> bool: + for line in program_without_comments.splitlines(): + token_idx = _find_template_token(line, token) + if token_idx is not None: + return True + return False + + +def _find_tokens(stripped_approval_program: str) -> list[str]: + return _TOKEN_PATTERN.findall(stripped_approval_program) + + +def check_template_variables(approval_program: str, template_values: TemplateValueDict) -> None: + approval_program = strip_comments(approval_program) + if _has_token(approval_program, UPDATABLE_TEMPLATE_NAME) and _UPDATABLE not in template_values: + raise DeploymentFailedError( + "allow_update must be specified if deploy time configuration of update is being used" + ) + if _has_token(approval_program, DELETABLE_TEMPLATE_NAME) and _DELETABLE not in template_values: + raise DeploymentFailedError( + "allow_delete must be specified if deploy time configuration of delete is being used" + ) + all_tokens = _find_tokens(approval_program) + missing_values = [token for token in all_tokens if token[len("TMPL_") :] not in template_values] + if missing_values: + raise DeploymentFailedError(f"The following template values were not provided: {', '.join(missing_values)}") + + for template_variable_name in template_values: + tmpl_variable = f"TMPL_{template_variable_name}" + if not _has_token(approval_program, tmpl_variable): + if template_variable_name == _UPDATABLE: + raise DeploymentFailedError( + "allow_update must only be specified if deploy time configuration of update is being used" + ) + if template_variable_name == _DELETABLE: + raise DeploymentFailedError( + "allow_delete must only be specified if deploy time configuration of delete is being used" + ) + logger.warning(f"{tmpl_variable} not found in approval program, but variable was provided") + + +def replace_template_variables(program: str, template_values: TemplateValueMapping) -> str: + """Replaces `TMPL_*` variables in `program` with `template_values` + + ```{note} + `template_values` keys should *NOT* be prefixed with `TMPL_` + ``` + """ + program_lines = program.splitlines() + for template_variable_name, template_value in template_values.items(): + match template_value: + case int(): + value = str(template_value) + case str(): + value = "0x" + template_value.encode("utf-8").hex() + case bytes(): + value = "0x" + template_value.hex() + case _: + raise DeploymentFailedError( + f"Unexpected template value type {template_variable_name}: {template_value.__class__}" + ) + + program_lines, matches = _replace_template_variable(program_lines, template_variable_name, value) + + return "\n".join(program_lines) + + +def has_template_vars(app_spec: ApplicationSpecification) -> bool: + return "TMPL_" in strip_comments(app_spec.approval_program) or "TMPL_" in strip_comments(app_spec.clear_program) + + +def get_deploy_control( + app_spec: ApplicationSpecification, template_var: str, on_complete: transaction.OnComplete +) -> bool | None: + if template_var not in strip_comments(app_spec.approval_program): + return None + return get_call_config(app_spec.bare_call_config, on_complete) != CallConfig.NEVER or any( + h for h in app_spec.hints.values() if get_call_config(h.call_config, on_complete) != CallConfig.NEVER + ) + + +def get_call_config(method_config: MethodConfigDict, on_complete: transaction.OnComplete) -> CallConfig: + def get(key: OnCompleteActionName) -> CallConfig: + return method_config.get(key, CallConfig.NEVER) + + match on_complete: + case transaction.OnComplete.NoOpOC: + return get("no_op") + case transaction.OnComplete.UpdateApplicationOC: + return get("update_application") + case transaction.OnComplete.DeleteApplicationOC: + return get("delete_application") + case transaction.OnComplete.OptInOC: + return get("opt_in") + case transaction.OnComplete.CloseOutOC: + return get("close_out") + case transaction.OnComplete.ClearStateOC: + return get("clear_state") + + +class OnUpdate(Enum): + """Action to take if an Application has been updated""" + + Fail = 0 + """Fail the deployment""" + UpdateApp = 1 + """Update the Application with the new approval and clear programs""" + ReplaceApp = 2 + """Create a new Application and delete the old Application in a single transaction""" + AppendApp = 3 + """Create a new application""" + + +class OnSchemaBreak(Enum): + """Action to take if an Application's schema has breaking changes""" + + Fail = 0 + """Fail the deployment""" + ReplaceApp = 2 + """Create a new Application and delete the old Application in a single transaction""" + AppendApp = 3 + """Create a new Application""" + + +class OperationPerformed(Enum): + """Describes the actions taken during deployment""" + + Nothing = 0 + """An existing Application was found""" + Create = 1 + """No existing Application was found, created a new Application""" + Update = 2 + """An existing Application was found, but was out of date, updated to latest version""" + Replace = 3 + """An existing Application was found, but was out of date, created a new Application and deleted the original""" + + +@dataclasses.dataclass(kw_only=True) +class DeployResponse: + """Describes the action taken during deployment, related transactions and the {py:class}`AppMetaData`""" + + app: AppMetaData + create_response: TransactionResponse | None = None + delete_response: TransactionResponse | None = None + update_response: TransactionResponse | None = None + action_taken: OperationPerformed = OperationPerformed.Nothing + + +@dataclasses.dataclass(kw_only=True) +class DeployCallArgs: + """Parameters used to update or delete an application when calling + {py:meth}`~algokit_utils.ApplicationClient.deploy`""" + + suggested_params: transaction.SuggestedParams | None = None + lease: bytes | str | None = None + accounts: list[str] | None = None + foreign_apps: list[int] | None = None + foreign_assets: list[int] | None = None + boxes: Sequence[tuple[int, bytes | bytearray | str | int]] | None = None + rekey_to: str | None = None + + +@dataclasses.dataclass(kw_only=True) +class ABICall: + method: ABIMethod | bool | None = None + args: ABIArgsDict = dataclasses.field(default_factory=dict) + + +@dataclasses.dataclass(kw_only=True) +class DeployCreateCallArgs(DeployCallArgs): + """Parameters used to create an application when calling {py:meth}`~algokit_utils.ApplicationClient.deploy`""" + + extra_pages: int | None = None + on_complete: transaction.OnComplete | None = None + + +@dataclasses.dataclass(kw_only=True) +class ABICallArgs(DeployCallArgs, ABICall): + """ABI Parameters used to update or delete an application when calling + {py:meth}`~algokit_utils.ApplicationClient.deploy`""" + + +@dataclasses.dataclass(kw_only=True) +class ABICreateCallArgs(DeployCreateCallArgs, ABICall): + """ABI Parameters used to create an application when calling {py:meth}`~algokit_utils.ApplicationClient.deploy`""" + + +class DeployCallArgsDict(TypedDict, total=False): + """Parameters used to update or delete an application when calling + {py:meth}`~algokit_utils.ApplicationClient.deploy`""" + + suggested_params: transaction.SuggestedParams + lease: bytes | str + accounts: list[str] + foreign_apps: list[int] + foreign_assets: list[int] + boxes: Sequence[tuple[int, bytes | bytearray | str | int]] + rekey_to: str + + +class ABICallArgsDict(DeployCallArgsDict, TypedDict, total=False): + """ABI Parameters used to update or delete an application when calling + {py:meth}`~algokit_utils.ApplicationClient.deploy`""" + + method: ABIMethod | bool + args: ABIArgsDict + + +class DeployCreateCallArgsDict(DeployCallArgsDict, TypedDict, total=False): + """Parameters used to create an application when calling {py:meth}`~algokit_utils.ApplicationClient.deploy`""" + + extra_pages: int | None + on_complete: transaction.OnComplete + + +class ABICreateCallArgsDict(DeployCreateCallArgsDict, TypedDict, total=False): + """ABI Parameters used to create an application when calling {py:meth}`~algokit_utils.ApplicationClient.deploy`""" + + method: ABIMethod | bool + args: ABIArgsDict + + +@dataclasses.dataclass(kw_only=True) +class Deployer: + app_client: "ApplicationClient" + creator: str + signer: TransactionSigner + sender: str + existing_app_metadata_or_reference: AppReference | AppMetaData + new_app_metadata: AppDeployMetaData + on_update: OnUpdate + on_schema_break: OnSchemaBreak + create_args: ABICreateCallArgs | ABICreateCallArgsDict | DeployCreateCallArgs | None + update_args: ABICallArgs | ABICallArgsDict | DeployCallArgs | None + delete_args: ABICallArgs | ABICallArgsDict | DeployCallArgs | None + + def deploy(self) -> DeployResponse: + """Ensures app associated with app client's creator is present and up to date""" + assert self.app_client.approval + assert self.app_client.clear + + if self.existing_app_metadata_or_reference.app_id == 0: + logger.info(f"{self.new_app_metadata.name} not found in {self.creator} account, deploying app.") + return self._create_app() + + assert isinstance(self.existing_app_metadata_or_reference, AppMetaData) + logger.debug( + f"{self.existing_app_metadata_or_reference.name} found in {self.creator} account, " + f"with app id {self.existing_app_metadata_or_reference.app_id}, " + f"version={self.existing_app_metadata_or_reference.version}." + ) + + app_changes = check_for_app_changes( + self.app_client.algod_client, + new_approval=self.app_client.approval.raw_binary, + new_clear=self.app_client.clear.raw_binary, + new_global_schema=self.app_client.app_spec.global_state_schema, + new_local_schema=self.app_client.app_spec.local_state_schema, + app_id=self.existing_app_metadata_or_reference.app_id, + ) + + if app_changes.schema_breaking_change: + logger.warning(f"Detected a breaking app schema change: {app_changes.schema_change_description}") + return self._deploy_breaking_change() + + if app_changes.app_updated: + logger.info(f"Detected a TEAL update in app id {self.existing_app_metadata_or_reference.app_id}") + return self._deploy_update() + + logger.info("No detected changes in app, nothing to do.") + return DeployResponse(app=self.existing_app_metadata_or_reference) + + def _deploy_breaking_change(self) -> DeployResponse: + assert isinstance(self.existing_app_metadata_or_reference, AppMetaData) + if self.on_schema_break == OnSchemaBreak.Fail: + raise DeploymentFailedError( + "Schema break detected and on_schema_break=OnSchemaBreak.Fail, stopping deployment. " + "If you want to try deleting and recreating the app then " + "re-run with on_schema_break=OnSchemaBreak.ReplaceApp" + ) + if self.on_schema_break == OnSchemaBreak.AppendApp: + logger.info("Schema break detected and on_schema_break=AppendApp, will attempt to create new app") + return self._create_app() + + if self.existing_app_metadata_or_reference.deletable: + logger.info( + "App is deletable and on_schema_break=ReplaceApp, will attempt to create new app and delete old app" + ) + elif self.existing_app_metadata_or_reference.deletable is False: + logger.warning( + "App is not deletable but on_schema_break=ReplaceApp, " + "will attempt to delete app, delete will most likely fail" + ) + else: + logger.warning( + "Cannot determine if App is deletable but on_schema_break=ReplaceApp, will attempt to delete app" + ) + return self._create_and_delete_app() + + def _deploy_update(self) -> DeployResponse: + assert isinstance(self.existing_app_metadata_or_reference, AppMetaData) + if self.on_update == OnUpdate.Fail: + raise DeploymentFailedError( + "Update detected and on_update=Fail, stopping deployment. " + "If you want to try updating the app then re-run with on_update=UpdateApp" + ) + if self.on_update == OnUpdate.AppendApp: + logger.info("Update detected and on_update=AppendApp, will attempt to create new app") + return self._create_app() + elif self.existing_app_metadata_or_reference.updatable and self.on_update == OnUpdate.UpdateApp: + logger.info("App is updatable and on_update=UpdateApp, will update app") + return self._update_app() + elif self.existing_app_metadata_or_reference.updatable and self.on_update == OnUpdate.ReplaceApp: + logger.warning( + "App is updatable but on_update=ReplaceApp, will attempt to create new app and delete old app" + ) + return self._create_and_delete_app() + elif self.on_update == OnUpdate.ReplaceApp: + if self.existing_app_metadata_or_reference.updatable is False: + logger.warning( + "App is not updatable and on_update=ReplaceApp, " + "will attempt to create new app and delete old app" + ) + else: + logger.warning( + "Cannot determine if App is updatable and on_update=ReplaceApp, " + "will attempt to create new app and delete old app" + ) + return self._create_and_delete_app() + else: + if self.existing_app_metadata_or_reference.updatable is False: + logger.warning( + "App is not updatable but on_update=UpdateApp, " + "will attempt to update app, update will most likely fail" + ) + else: + logger.warning( + "Cannot determine if App is updatable and on_update=UpdateApp, will attempt to update app" + ) + return self._update_app() + + def _create_app(self) -> DeployResponse: + assert self.app_client.existing_deployments + + method, abi_args, parameters = _convert_deploy_args( + self.create_args, self.new_app_metadata, self.signer, self.sender + ) + create_response = self.app_client.create( + method, + parameters, + **abi_args, + ) + logger.info( + f"{self.new_app_metadata.name} ({self.new_app_metadata.version}) deployed successfully, " + f"with app id {self.app_client.app_id}." + ) + assert create_response.confirmed_round is not None + app_metadata = _create_metadata(self.new_app_metadata, self.app_client.app_id, create_response.confirmed_round) + self.app_client.existing_deployments.apps[self.new_app_metadata.name] = app_metadata + return DeployResponse(app=app_metadata, create_response=create_response, action_taken=OperationPerformed.Create) + + def _create_and_delete_app(self) -> DeployResponse: + assert self.app_client.existing_deployments + assert isinstance(self.existing_app_metadata_or_reference, AppMetaData) + + logger.info( + f"Replacing {self.existing_app_metadata_or_reference.name} " + f"({self.existing_app_metadata_or_reference.version}) with " + f"{self.new_app_metadata.name} ({self.new_app_metadata.version}) in {self.creator} account." + ) + atc = AtomicTransactionComposer() + create_method, create_abi_args, create_parameters = _convert_deploy_args( + self.create_args, self.new_app_metadata, self.signer, self.sender + ) + self.app_client.compose_create( + atc, + create_method, + create_parameters, + **create_abi_args, + ) + create_txn_index = len(atc.txn_list) - 1 + delete_method, delete_abi_args, delete_parameters = _convert_deploy_args( + self.delete_args, self.new_app_metadata, self.signer, self.sender + ) + self.app_client.compose_delete( + atc, + delete_method, + delete_parameters, + **delete_abi_args, + ) + delete_txn_index = len(atc.txn_list) - 1 + create_delete_response = self.app_client.execute_atc(atc) + create_response = TransactionResponse.from_atr(create_delete_response, create_txn_index) + delete_response = TransactionResponse.from_atr(create_delete_response, delete_txn_index) + self.app_client.app_id = get_app_id_from_tx_id(self.app_client.algod_client, create_response.tx_id) + logger.info( + f"{self.new_app_metadata.name} ({self.new_app_metadata.version}) deployed successfully, " + f"with app id {self.app_client.app_id}." + ) + logger.info( + f"{self.existing_app_metadata_or_reference.name} " + f"({self.existing_app_metadata_or_reference.version}) with app id " + f"{self.existing_app_metadata_or_reference.app_id}, deleted successfully." + ) + + app_metadata = _create_metadata( + self.new_app_metadata, self.app_client.app_id, create_delete_response.confirmed_round + ) + self.app_client.existing_deployments.apps[self.new_app_metadata.name] = app_metadata + + return DeployResponse( + app=app_metadata, + create_response=create_response, + delete_response=delete_response, + action_taken=OperationPerformed.Replace, + ) + + def _update_app(self) -> DeployResponse: + assert self.app_client.existing_deployments + assert isinstance(self.existing_app_metadata_or_reference, AppMetaData) + + logger.info( + f"Updating {self.existing_app_metadata_or_reference.name} to {self.new_app_metadata.version} in " + f"{self.creator} account, with app id {self.existing_app_metadata_or_reference.app_id}" + ) + method, abi_args, parameters = _convert_deploy_args( + self.update_args, self.new_app_metadata, self.signer, self.sender + ) + update_response = self.app_client.update( + method, + parameters, + **abi_args, + ) + app_metadata = _create_metadata( + self.new_app_metadata, + self.app_client.app_id, + self.existing_app_metadata_or_reference.created_round, + updated_round=update_response.confirmed_round, + original_metadata=self.existing_app_metadata_or_reference.created_metadata, + ) + self.app_client.existing_deployments.apps[self.new_app_metadata.name] = app_metadata + return DeployResponse(app=app_metadata, update_response=update_response, action_taken=OperationPerformed.Update) + + +def _create_metadata( + app_spec_note: AppDeployMetaData, + app_id: int, + created_round: int, + updated_round: int | None = None, + original_metadata: AppDeployMetaData | None = None, +) -> AppMetaData: + return AppMetaData( + app_id=app_id, + app_address=get_application_address(app_id), + created_metadata=original_metadata or app_spec_note, + created_round=created_round, + updated_round=updated_round or created_round, + name=app_spec_note.name, + version=app_spec_note.version, + deletable=app_spec_note.deletable, + updatable=app_spec_note.updatable, + deleted=False, + ) + + +def _convert_deploy_args( + _args: DeployCallArgs | DeployCallArgsDict | None, + note: AppDeployMetaData, + signer: TransactionSigner | None, + sender: str | None, +) -> tuple[ABIMethod | bool | None, ABIArgsDict, CreateCallParameters]: + args = _args.__dict__ if isinstance(_args, DeployCallArgs) else dict(_args or {}) + + # return most derived type, unused parameters are ignored + parameters = CreateCallParameters( + note=note.encode(), + signer=signer, + sender=sender, + suggested_params=args.get("suggested_params"), + lease=args.get("lease"), + accounts=args.get("accounts"), + foreign_assets=args.get("foreign_assets"), + foreign_apps=args.get("foreign_apps"), + boxes=args.get("boxes"), + rekey_to=args.get("rekey_to"), + extra_pages=args.get("extra_pages"), + on_complete=args.get("on_complete"), + ) + + return args.get("method"), args.get("args") or {}, parameters + + +def get_app_id_from_tx_id(algod_client: "AlgodClient", tx_id: str) -> int: + """Finds the app_id for provided transaction id""" + result = algod_client.pending_transaction_info(tx_id) + assert isinstance(result, dict) + app_id = result["application-index"] + assert isinstance(app_id, int) + return app_id diff --git a/src/algokit_utils/_legacy_v2/logic_error.py b/src/algokit_utils/_legacy_v2/logic_error.py new file mode 100644 index 00000000..a365a3c1 --- /dev/null +++ b/src/algokit_utils/_legacy_v2/logic_error.py @@ -0,0 +1,85 @@ +import re +from copy import copy +from typing import TYPE_CHECKING, TypedDict + +from algokit_utils._legacy_v2.models import SimulationTrace + +if TYPE_CHECKING: + from algosdk.source_map import SourceMap as AlgoSourceMap + +__all__ = [ + "LogicError", + "parse_logic_error", +] + +LOGIC_ERROR = ( + ".*transaction (?P[A-Z0-9]+): logic eval error: (?P.*). Details: .*pc=(?P[0-9]+).*" +) + + +class LogicErrorData(TypedDict): + transaction_id: str + message: str + pc: int + + +def parse_logic_error( + error_str: str, +) -> LogicErrorData | None: + match = re.match(LOGIC_ERROR, error_str) + if match is None: + return None + + return { + "transaction_id": match.group("transaction_id"), + "message": match.group("message"), + "pc": int(match.group("pc")), + } + + +class LogicError(Exception): + def __init__( # noqa: PLR0913 + self, + *, + logic_error_str: str, + program: str, + source_map: "AlgoSourceMap | None", + transaction_id: str, + message: str, + pc: int, + logic_error: Exception | None = None, + traces: list[SimulationTrace] | None = None, + ): + self.logic_error = logic_error + self.logic_error_str = logic_error_str + self.program = program + self.source_map = source_map + self.lines = program.split("\n") + self.transaction_id = transaction_id + self.message = message + self.pc = pc + self.traces = traces + + self.line_no = self.source_map.get_line_for_pc(self.pc) if self.source_map else None + + def __str__(self) -> str: + return ( + f"Txn {self.transaction_id} had error '{self.message}' at PC {self.pc}" + + (":" if self.line_no is None else f" and Source Line {self.line_no}:") + + f"\n{self.trace()}" + ) + + def trace(self, lines: int = 5) -> str: + if self.line_no is None: + return """ +Could not determine TEAL source line for the error as no approval source map was provided, to receive a trace of the +error please provide an approval SourceMap. Either by: + 1.) Providing template_values when creating the ApplicationClient, so a SourceMap can be obtained automatically OR + 2.) Set approval_source_map from a previously compiled approval program OR + 3.) Import a previously exported source map using import_source_map""" + + program_lines = copy(self.lines) + program_lines[self.line_no] += "\t\t<-- Error" + lines_before = max(0, self.line_no - lines) + lines_after = min(len(program_lines), self.line_no + lines) + return "\n\t" + "\n\t".join(program_lines[lines_before:lines_after]) diff --git a/src/algokit_utils/models.py b/src/algokit_utils/_legacy_v2/models.py similarity index 97% rename from src/algokit_utils/models.py rename to src/algokit_utils/_legacy_v2/models.py index e1030088..cc5d34d2 100644 --- a/src/algokit_utils/models.py +++ b/src/algokit_utils/_legacy_v2/models.py @@ -202,14 +202,14 @@ class TransactionParametersDict(TypedDict, total=False): """Address to rekey to""" -class OnCompleteCallParametersDict(TypedDict, TransactionParametersDict, total=False): +class OnCompleteCallParametersDict(TransactionParametersDict, total=False): """Additional parameters that can be included in a transaction when using the ApplicationClient.call/compose_call methods""" on_complete: transaction.OnComplete -class CreateCallParametersDict(TypedDict, OnCompleteCallParametersDict, total=False): +class CreateCallParametersDict(OnCompleteCallParametersDict, total=False): """Additional parameters that can be included in a transaction when using the ApplicationClient.create/compose_create methods""" diff --git a/src/algokit_utils/_legacy_v2/network_clients.py b/src/algokit_utils/_legacy_v2/network_clients.py new file mode 100644 index 00000000..2de270da --- /dev/null +++ b/src/algokit_utils/_legacy_v2/network_clients.py @@ -0,0 +1,130 @@ +import dataclasses +import os +from typing import Literal +from urllib import parse + +from algosdk.kmd import KMDClient +from algosdk.v2client.algod import AlgodClient +from algosdk.v2client.indexer import IndexerClient + +__all__ = [ + "AlgoClientConfig", + "get_algod_client", + "get_algonode_config", + "get_default_localnet_config", + "get_indexer_client", + "get_kmd_client_from_algod_client", + "is_localnet", + "is_mainnet", + "is_testnet", + "AlgoClientConfigs", + "get_kmd_client", +] + + +@dataclasses.dataclass +class AlgoClientConfig: + """Connection details for connecting to an {py:class}`algosdk.v2client.algod.AlgodClient` or + {py:class}`algosdk.v2client.indexer.IndexerClient`""" + + server: str + """URL for the service e.g. `http://localhost:4001` or `https://testnet-api.algonode.cloud`""" + token: str + """API Token to authenticate with the service""" + + +@dataclasses.dataclass +class AlgoClientConfigs: + algod_config: AlgoClientConfig + indexer_config: AlgoClientConfig + kmd_config: AlgoClientConfig | None + + +def get_default_localnet_config(config: Literal["algod", "indexer", "kmd"]) -> AlgoClientConfig: + """Returns the client configuration to point to the default LocalNet""" + port = {"algod": 4001, "indexer": 8980, "kmd": 4002}[config] + return AlgoClientConfig(server=f"http://localhost:{port}", token="a" * 64) + + +def get_algonode_config( + network: Literal["testnet", "mainnet"], config: Literal["algod", "indexer"], token: str +) -> AlgoClientConfig: + client = "api" if config == "algod" else "idx" + return AlgoClientConfig( + server=f"https://{network}-{client}.algonode.cloud", + token=token, + ) + + +def get_algod_client(config: AlgoClientConfig | None = None) -> AlgodClient: + """Returns an {py:class}`algosdk.v2client.algod.AlgodClient` from `config` or environment + + If no configuration provided will use environment variables `ALGOD_SERVER`, `ALGOD_PORT` and `ALGOD_TOKEN`""" + config = config or _get_config_from_environment("ALGOD") + headers = {"X-Algo-API-Token": config.token} + return AlgodClient(config.token, config.server, headers) + + +def get_kmd_client(config: AlgoClientConfig | None = None) -> KMDClient: + """Returns an {py:class}`algosdk.kmd.KMDClient` from `config` or environment + + If no configuration provided will use environment variables `KMD_SERVER`, `KMD_PORT` and `KMD_TOKEN`""" + config = config or _get_config_from_environment("KMD") + return KMDClient(config.token, config.server) # type: ignore[no-untyped-call] + + +def get_indexer_client(config: AlgoClientConfig | None = None) -> IndexerClient: + """Returns an {py:class}`algosdk.v2client.indexer.IndexerClient` from `config` or environment. + + If no configuration provided will use environment variables `INDEXER_SERVER`, `INDEXER_PORT` and `INDEXER_TOKEN`""" + config = config or _get_config_from_environment("INDEXER") + headers = {"X-Indexer-API-Token": config.token} + return IndexerClient(config.token, config.server, headers) # type: ignore[no-untyped-call] + + +def is_localnet(client: AlgodClient) -> bool: + """Returns True if client genesis is `devnet-v1` or `sandnet-v1`""" + params = client.suggested_params() + return params.gen in ["devnet-v1", "sandnet-v1", "dockernet-v1"] + + +def is_mainnet(client: AlgodClient) -> bool: + """Returns True if client genesis is `mainnet-v1`""" + params = client.suggested_params() + return params.gen in ["mainnet-v1.0", "mainnet-v1", "mainnet"] + + +def is_testnet(client: AlgodClient) -> bool: + """Returns True if client genesis is `testnet-v1`""" + params = client.suggested_params() + return params.gen in ["testnet-v1.0", "testnet-v1", "testnet"] + + +def get_kmd_client_from_algod_client(client: AlgodClient) -> KMDClient: + """Returns an {py:class}`algosdk.kmd.KMDClient` from supplied `client` + + Will use the same address as provided `client` but on port specified by `KMD_PORT` environment variable, + or 4002 by default""" + # We can only use Kmd on the LocalNet otherwise it's not exposed so this makes some assumptions + # (e.g. same token and server as algod and port 4002 by default) + port = os.getenv("KMD_PORT", "4002") + server = _replace_kmd_port(client.algod_address, port) + return KMDClient(client.algod_token, server) # type: ignore[no-untyped-call] + + +def _replace_kmd_port(address: str, port: str) -> str: + parsed_algod = parse.urlparse(address) + kmd_host = parsed_algod.netloc.split(":", maxsplit=1)[0] + f":{port}" + kmd_parsed = parsed_algod._replace(netloc=kmd_host) + return parse.urlunparse(kmd_parsed) + + +def _get_config_from_environment(environment_prefix: str) -> AlgoClientConfig: + server = os.getenv(f"{environment_prefix}_SERVER") + if server is None: + raise Exception(f"Server environment variable not set: {environment_prefix}_SERVER") + port = os.getenv(f"{environment_prefix}_PORT") + if port: + parsed = parse.urlparse(server) + server = parsed._replace(netloc=f"{parsed.hostname}:{port}").geturl() + return AlgoClientConfig(server, os.getenv(f"{environment_prefix}_TOKEN", "")) diff --git a/src/algokit_utils/account.py b/src/algokit_utils/account.py index a0eb7d53..cb51b335 100644 --- a/src/algokit_utils/account.py +++ b/src/algokit_utils/account.py @@ -1,183 +1 @@ -import logging -import os -from typing import TYPE_CHECKING, Any - -from algosdk.account import address_from_private_key -from algosdk.mnemonic import from_private_key, to_private_key -from algosdk.util import algos_to_microalgos - -from algokit_utils._transfer import TransferParameters, transfer -from algokit_utils.models import Account -from algokit_utils.network_clients import get_kmd_client_from_algod_client, is_localnet - -if TYPE_CHECKING: - from collections.abc import Callable - - from algosdk.kmd import KMDClient - from algosdk.v2client.algod import AlgodClient - -__all__ = [ - "create_kmd_wallet_account", - "get_account", - "get_account_from_mnemonic", - "get_dispenser_account", - "get_kmd_wallet_account", - "get_localnet_default_account", - "get_or_create_kmd_wallet_account", -] - -logger = logging.getLogger(__name__) -_DEFAULT_ACCOUNT_MINIMUM_BALANCE = 1_000_000_000 - - -def get_account_from_mnemonic(mnemonic: str) -> Account: - """Convert a mnemonic (25 word passphrase) into an Account""" - private_key = to_private_key(mnemonic) # type: ignore[no-untyped-call] - address = address_from_private_key(private_key) # type: ignore[no-untyped-call] - return Account(private_key=private_key, address=address) - - -def create_kmd_wallet_account(kmd_client: "KMDClient", name: str) -> Account: - """Creates a wallet with specified name""" - wallet_id = kmd_client.create_wallet(name, "")["id"] - wallet_handle = kmd_client.init_wallet_handle(wallet_id, "") - kmd_client.generate_key(wallet_handle) - - key_ids: list[str] = kmd_client.list_keys(wallet_handle) - account_key = key_ids[0] - - private_account_key = kmd_client.export_key(wallet_handle, "", account_key) - return get_account_from_mnemonic(from_private_key(private_account_key)) # type: ignore[no-untyped-call] - - -def get_or_create_kmd_wallet_account( - client: "AlgodClient", name: str, fund_with_algos: float = 1000, kmd_client: "KMDClient | None" = None -) -> Account: - """Returns a wallet with specified name, or creates one if not found""" - kmd_client = kmd_client or get_kmd_client_from_algod_client(client) - account = get_kmd_wallet_account(client, kmd_client, name) - - if account: - account_info = client.account_info(account.address) - assert isinstance(account_info, dict) - if account_info["amount"] > 0: - return account - logger.debug(f"Found existing account in LocalNet with name '{name}', but no funds in the account.") - else: - account = create_kmd_wallet_account(kmd_client, name) - - logger.debug( - f"Couldn't find existing account in LocalNet with name '{name}'. " - f"So created account {account.address} with keys stored in KMD." - ) - - logger.debug(f"Funding account {account.address} with {fund_with_algos} ALGOs") - - if fund_with_algos: - transfer( - client, - TransferParameters( - from_account=get_dispenser_account(client), - to_address=account.address, - micro_algos=algos_to_microalgos(fund_with_algos), # type: ignore[no-untyped-call] - ), - ) - - return account - - -def _is_default_account(account: dict[str, Any]) -> bool: - return bool(account["status"] != "Offline" and account["amount"] > _DEFAULT_ACCOUNT_MINIMUM_BALANCE) - - -def get_localnet_default_account(client: "AlgodClient") -> Account: - """Returns the default Account in a LocalNet instance""" - if not is_localnet(client): - raise Exception("Can't get a default account from non LocalNet network") - - account = get_kmd_wallet_account( - client, get_kmd_client_from_algod_client(client), "unencrypted-default-wallet", _is_default_account - ) - assert account - return account - - -def get_dispenser_account(client: "AlgodClient") -> Account: - """Returns an Account based on DISPENSER_MNENOMIC environment variable or the default account on LocalNet""" - if is_localnet(client): - return get_localnet_default_account(client) - return get_account(client, "DISPENSER") - - -def get_kmd_wallet_account( - client: "AlgodClient", - kmd_client: "KMDClient", - name: str, - predicate: "Callable[[dict[str, Any]], bool] | None" = None, -) -> Account | None: - """Returns wallet matching specified name and predicate or None if not found""" - wallets: list[dict] = kmd_client.list_wallets() - - wallet = next((w for w in wallets if w["name"] == name), None) - if wallet is None: - return None - - wallet_id = wallet["id"] - wallet_handle = kmd_client.init_wallet_handle(wallet_id, "") - key_ids: list[str] = kmd_client.list_keys(wallet_handle) - matched_account_key = None - if predicate: - for key in key_ids: - account = client.account_info(key) - assert isinstance(account, dict) - if predicate(account): - matched_account_key = key - else: - matched_account_key = next(key_ids.__iter__(), None) - - if not matched_account_key: - return None - - private_account_key = kmd_client.export_key(wallet_handle, "", matched_account_key) - return get_account_from_mnemonic(from_private_key(private_account_key)) # type: ignore[no-untyped-call] - - -def get_account( - client: "AlgodClient", name: str, fund_with_algos: float = 1000, kmd_client: "KMDClient | None" = None -) -> Account: - """Returns an Algorand account with private key loaded by convention based on the given name identifier. - - # Convention - - **Non-LocalNet:** will load `os.environ[f"{name}_MNEMONIC"]` as a mnemonic secret - Be careful how the mnemonic is handled, never commit it into source control and ideally load it via a - secret storage service rather than the file system. - - **LocalNet:** will load the account from a KMD wallet called {name} and if that wallet doesn't exist it will - create it and fund the account for you - - This allows you to write code that will work seamlessly in production and local development (LocalNet) without - manual config locally (including when you reset the LocalNet). - - # Example - If you have a mnemonic secret loaded into `os.environ["ACCOUNT_MNEMONIC"]` then you can call the following to get - that private key loaded into an account object: - ```python - account = get_account('ACCOUNT', algod) - ``` - - If that code runs against LocalNet then a wallet called 'ACCOUNT' will automatically be created with an account - that is automatically funded with 1000 (default) ALGOs from the default LocalNet dispenser. - """ - - mnemonic_key = f"{name.upper()}_MNEMONIC" - mnemonic = os.getenv(mnemonic_key) - if mnemonic: - return get_account_from_mnemonic(mnemonic) - - if is_localnet(client): - account = get_or_create_kmd_wallet_account(client, name, fund_with_algos, kmd_client) - os.environ[mnemonic_key] = from_private_key(account.private_key) # type: ignore[no-untyped-call] - return account - - raise Exception(f"Missing environment variable '{mnemonic_key}' when looking for account '{name}'") +from algokit_utils._legacy_v2.account import * # noqa: F403 diff --git a/src/algokit_utils/accounts/__init__.py b/src/algokit_utils/accounts/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/algokit_utils/beta/account_manager.py b/src/algokit_utils/accounts/account_manager.py similarity index 63% rename from src/algokit_utils/beta/account_manager.py rename to src/algokit_utils/accounts/account_manager.py index 7eddff75..a2527c6c 100644 --- a/src/algokit_utils/beta/account_manager.py +++ b/src/algokit_utils/accounts/account_manager.py @@ -2,12 +2,12 @@ from dataclasses import dataclass from typing import Any -from algokit_utils.account import get_dispenser_account, get_kmd_wallet_account, get_localnet_default_account from algosdk.account import generate_account from algosdk.atomic_transaction_composer import AccountTransactionSigner, TransactionSigner from typing_extensions import Self -from .client_manager import ClientManager +from algokit_utils.account import get_dispenser_account, get_kmd_wallet_account, get_localnet_default_account +from algokit_utils.clients.client_manager import ClientManager @dataclass @@ -86,40 +86,11 @@ def get_asset_information(self, sender: str, asset_id: int) -> dict[str, Any]: assert isinstance(info, dict) return info - # TODO - # def from_mnemonic(self, mnemonic_secret: str, sender: Optional[str] = None) -> AddrAndSigner: - # """ - # Tracks and returns an Algorand account with secret key loaded (i.e. that can sign transactions) by taking the mnemonic secret. - - # Example: - # account = account.from_mnemonic("mnemonic secret ...") - # rekeyed_account = account.from_mnemonic("mnemonic secret ...", "SENDERADDRESS...") - - # :param mnemonic_secret: The mnemonic secret representing the private key of an account; **Note: Be careful how the mnemonic is handled**, - # never commit it into source control and ideally load it from the environment (ideally via a secret storage service) rather than the file system. - # :param sender: The optional sender address to use this signer for (aka a rekeyed account) - # :return: The account - # """ - # account = mnemonic_account(mnemonic_secret) - # return self.signer_account(rekeyed_account(account, sender) if sender else account) - def from_kmd( self, name: str, predicate: Callable[[dict[str, Any]], bool] | None = None, ) -> AddressAndSigner: - """ - Tracks and returns an Algorand account with private key loaded from the given KMD wallet (identified by name). - - Example (Get default funded account in a LocalNet): - default_dispenser_account = account.from_kmd('unencrypted-default-wallet', - lambda a: a['status'] != 'Offline' and a['amount'] > 1_000_000_000 - ) - - :param name: The name of the wallet to retrieve an account from - :param predicate: An optional filter to use to find the account (otherwise it will return a random account from the wallet) - :return: The account - """ account = get_kmd_wallet_account( name=name, predicate=predicate, client=self._client_manager.algod, kmd_client=self._client_manager.kmd ) @@ -129,29 +100,6 @@ def from_kmd( self.set_signer(account.address, account.signer) return AddressAndSigner(address=account.address, signer=account.signer) - # TODO - # def multisig( - # self, multisig_params: algosdk.MultisigMetadata, signing_accounts: Union[algosdk.Account, SigningAccount] - # ) -> TransactionSignerAccount: - # """ - # Tracks and returns an account that supports partial or full multisig signing. - - # Example: - # account = account.multisig( - # { - # "version": 1, - # "threshold": 1, - # "addrs": ["ADDRESS1...", "ADDRESS2..."] - # }, - # account.from_environment('ACCOUNT1') - # ) - - # :param multisig_params: The parameters that define the multisig account - # :param signing_accounts: The signers that are currently present - # :return: A multisig account wrapper - # """ - # return self.signer_account(multisig_account(multisig_params, signing_accounts)) - def random(self) -> AddressAndSigner: """ Tracks and returns a new, random Algorand account with secret key loaded. @@ -187,14 +135,6 @@ def dispenser(self) -> AddressAndSigner: return AddressAndSigner(address=acct.address, signer=acct.signer) def localnet_dispenser(self) -> AddressAndSigner: - """ - Returns an Algorand account with private key loaded for the default LocalNet dispenser account (that can be used to fund other accounts). - - Example: - account = account.localnet_dispenser() - - :return: The account - """ acct = get_localnet_default_account(self._client_manager.algod) self.set_signer(acct.address, acct.signer) return AddressAndSigner(address=acct.address, signer=acct.signer) diff --git a/src/algokit_utils/accounts/models.py b/src/algokit_utils/accounts/models.py new file mode 100644 index 00000000..e69de29b diff --git a/src/algokit_utils/application_client.py b/src/algokit_utils/application_client.py index 008ac32f..2859c5d0 100644 --- a/src/algokit_utils/application_client.py +++ b/src/algokit_utils/application_client.py @@ -1,1449 +1 @@ -import base64 -import copy -import json -import logging -import re -import typing -from math import ceil -from pathlib import Path -from typing import Any, Literal, cast, overload - -import algosdk -from algosdk import transaction -from algosdk.abi import ABIType, Method, Returns -from algosdk.account import address_from_private_key -from algosdk.atomic_transaction_composer import ( - ABI_RETURN_HASH, - ABIResult, - AccountTransactionSigner, - AtomicTransactionComposer, - AtomicTransactionResponse, - LogicSigTransactionSigner, - MultisigTransactionSigner, - SimulateAtomicTransactionResponse, - TransactionSigner, - TransactionWithSigner, -) -from algosdk.constants import APP_PAGE_MAX_SIZE -from algosdk.logic import get_application_address -from algosdk.source_map import SourceMap - -import algokit_utils.application_specification as au_spec -import algokit_utils.deploy as au_deploy -from algokit_utils._debugging import ( - PersistSourceMapInput, - persist_sourcemaps, - simulate_and_persist_response, - simulate_response, -) -from algokit_utils.common import Program -from algokit_utils.config import config -from algokit_utils.logic_error import LogicError, parse_logic_error -from algokit_utils.models import ( - ABIArgsDict, - ABIArgType, - ABIMethod, - ABITransactionResponse, - Account, - CreateCallParameters, - CreateCallParametersDict, - OnCompleteCallParameters, - OnCompleteCallParametersDict, - SimulationTrace, - TransactionParameters, - TransactionParametersDict, - TransactionResponse, -) - -if typing.TYPE_CHECKING: - from algosdk.v2client.algod import AlgodClient - from algosdk.v2client.indexer import IndexerClient - - -logger = logging.getLogger(__name__) - - -"""A dictionary `dict[str, Any]` representing ABI argument names and values""" - -__all__ = [ - "ApplicationClient", - "execute_atc_with_logic_error", - "get_next_version", - "get_sender_from_signer", - "num_extra_program_pages", -] - -"""Alias for {py:class}`pyteal.ABIReturnSubroutine`, {py:class}`algosdk.abi.method.Method` or a {py:class}`str` -representing an ABI method name or signature""" - - -def num_extra_program_pages(approval: bytes, clear: bytes) -> int: - """Calculate minimum number of extra_pages required for provided approval and clear programs""" - - return ceil(((len(approval) + len(clear)) - APP_PAGE_MAX_SIZE) / APP_PAGE_MAX_SIZE) - - -class ApplicationClient: - """A class that wraps an ARC-0032 app spec and provides high productivity methods to deploy and call the app""" - - @overload - def __init__( - self, - algod_client: "AlgodClient", - app_spec: au_spec.ApplicationSpecification | Path, - *, - app_id: int = 0, - signer: TransactionSigner | Account | None = None, - sender: str | None = None, - suggested_params: transaction.SuggestedParams | None = None, - template_values: au_deploy.TemplateValueMapping | None = None, - ): ... - - @overload - def __init__( - self, - algod_client: "AlgodClient", - app_spec: au_spec.ApplicationSpecification | Path, - *, - creator: str | Account, - indexer_client: "IndexerClient | None" = None, - existing_deployments: au_deploy.AppLookup | None = None, - signer: TransactionSigner | Account | None = None, - sender: str | None = None, - suggested_params: transaction.SuggestedParams | None = None, - template_values: au_deploy.TemplateValueMapping | None = None, - app_name: str | None = None, - ): ... - - def __init__( # noqa: PLR0913 - self, - algod_client: "AlgodClient", - app_spec: au_spec.ApplicationSpecification | Path, - *, - app_id: int = 0, - creator: str | Account | None = None, - indexer_client: "IndexerClient | None" = None, - existing_deployments: au_deploy.AppLookup | None = None, - signer: TransactionSigner | Account | None = None, - sender: str | None = None, - suggested_params: transaction.SuggestedParams | None = None, - template_values: au_deploy.TemplateValueMapping | None = None, - app_name: str | None = None, - ): - """ApplicationClient can be created with an app_id to interact with an existing application, alternatively - it can be created with a creator and indexer_client specified to find existing applications by name and creator. - - :param AlgodClient algod_client: AlgoSDK algod client - :param ApplicationSpecification | Path app_spec: An Application Specification or the path to one - :param int app_id: The app_id of an existing application, to instead find the application by creator and name - use the creator and indexer_client parameters - :param str | Account creator: The address or Account of the app creator to resolve the app_id - :param IndexerClient indexer_client: AlgoSDK indexer client, only required if deploying or finding app_id by - creator and app name - :param AppLookup existing_deployments: - :param TransactionSigner | Account signer: Account or signer to use to sign transactions, if not specified and - creator was passed as an Account will use that. - :param str sender: Address to use as the sender for all transactions, will use the address associated with the - signer if not specified. - :param TemplateValueMapping template_values: Values to use for TMPL_* template variables, dictionary keys should - *NOT* include the TMPL_ prefix - :param str | None app_name: Name of application to use when deploying, defaults to name defined on the - Application Specification - """ - self.algod_client = algod_client - self.app_spec = ( - au_spec.ApplicationSpecification.from_json(app_spec.read_text()) if isinstance(app_spec, Path) else app_spec - ) - self._app_name = app_name - self._approval_program: Program | None = None - self._approval_source_map: SourceMap | None = None - self._clear_program: Program | None = None - - self.template_values: au_deploy.TemplateValueMapping = template_values or {} - self.existing_deployments = existing_deployments - self._indexer_client = indexer_client - if creator is not None: - if not self.existing_deployments and not self._indexer_client: - raise Exception( - "If using the creator parameter either existing_deployments or indexer_client must also be provided" - ) - self._creator: str | None = creator.address if isinstance(creator, Account) else creator - if self.existing_deployments and self.existing_deployments.creator != self._creator: - raise Exception( - "Attempt to create application client with invalid existing_deployments against" - f"a different creator ({self.existing_deployments.creator} instead of " - f"expected creator {self._creator}" - ) - self.app_id = 0 - else: - self.app_id = app_id - self._creator = None - - self.signer: TransactionSigner | None - if signer: - self.signer = ( - signer if isinstance(signer, TransactionSigner) else AccountTransactionSigner(signer.private_key) - ) - elif isinstance(creator, Account): - self.signer = AccountTransactionSigner(creator.private_key) - else: - self.signer = None - - self.sender = sender - self.suggested_params = suggested_params - - @property - def app_name(self) -> str: - return self._app_name or self.app_spec.contract.name - - @app_name.setter - def app_name(self, value: str) -> None: - self._app_name = value - - @property - def app_address(self) -> str: - return get_application_address(self.app_id) - - @property - def approval(self) -> Program | None: - return self._approval_program - - @property - def approval_source_map(self) -> SourceMap | None: - if self._approval_source_map: - return self._approval_source_map - if self._approval_program: - return self._approval_program.source_map - return None - - @approval_source_map.setter - def approval_source_map(self, value: SourceMap) -> None: - self._approval_source_map = value - - @property - def clear(self) -> Program | None: - return self._clear_program - - def prepare( - self, - signer: TransactionSigner | Account | None = None, - sender: str | None = None, - app_id: int | None = None, - template_values: au_deploy.TemplateValueDict | None = None, - ) -> "ApplicationClient": - """Creates a copy of this ApplicationClient, using the new signer, sender and app_id values if provided. - Will also substitute provided template_values into the associated app_spec in the copy""" - new_client: ApplicationClient = copy.copy(self) - new_client._prepare( # noqa: SLF001 - new_client, signer=signer, sender=sender, app_id=app_id, template_values=template_values - ) - return new_client - - def _prepare( # noqa: PLR0913 - self, - target: "ApplicationClient", - *, - signer: TransactionSigner | Account | None = None, - sender: str | None = None, - app_id: int | None = None, - template_values: au_deploy.TemplateValueDict | None = None, - ) -> None: - target.app_id = self.app_id if app_id is None else app_id - target.signer, target.sender = target.get_signer_sender( - AccountTransactionSigner(signer.private_key) if isinstance(signer, Account) else signer, sender - ) - target.template_values = {**self.template_values, **(template_values or {})} - - def deploy( # noqa: PLR0913 - self, - version: str | None = None, - *, - signer: TransactionSigner | None = None, - sender: str | None = None, - allow_update: bool | None = None, - allow_delete: bool | None = None, - on_update: au_deploy.OnUpdate = au_deploy.OnUpdate.Fail, - on_schema_break: au_deploy.OnSchemaBreak = au_deploy.OnSchemaBreak.Fail, - template_values: au_deploy.TemplateValueMapping | None = None, - create_args: au_deploy.ABICreateCallArgs - | au_deploy.ABICreateCallArgsDict - | au_deploy.DeployCreateCallArgs - | None = None, - update_args: au_deploy.ABICallArgs | au_deploy.ABICallArgsDict | au_deploy.DeployCallArgs | None = None, - delete_args: au_deploy.ABICallArgs | au_deploy.ABICallArgsDict | au_deploy.DeployCallArgs | None = None, - ) -> au_deploy.DeployResponse: - """Deploy an application and update client to reference it. - - Idempotently deploy (create, update/delete if changed) an app against the given name via the given creator - account, including deploy-time template placeholder substitutions. - To understand the architecture decisions behind this functionality please see - - - ```{note} - If there is a breaking state schema change to an existing app (and `on_schema_break` is set to - 'ReplaceApp' the existing app will be deleted and re-created. - ``` - - ```{note} - If there is an update (different TEAL code) to an existing app (and `on_update` is set to 'ReplaceApp') - the existing app will be deleted and re-created. - ``` - - :param str version: version to use when creating or updating app, if None version will be auto incremented - :param algosdk.atomic_transaction_composer.TransactionSigner signer: signer to use when deploying app - , if None uses self.signer - :param str sender: sender address to use when deploying app, if None uses self.sender - :param bool allow_delete: Used to set the `TMPL_DELETABLE` template variable to conditionally control if an app - can be deleted - :param bool allow_update: Used to set the `TMPL_UPDATABLE` template variable to conditionally control if an app - can be updated - :param OnUpdate on_update: Determines what action to take if an application update is required - :param OnSchemaBreak on_schema_break: Determines what action to take if an application schema requirements - has increased beyond the current allocation - :param dict[str, int|str|bytes] template_values: Values to use for `TMPL_*` template variables, dictionary keys - should *NOT* include the TMPL_ prefix - :param ABICreateCallArgs create_args: Arguments used when creating an application - :param ABICallArgs | ABICallArgsDict update_args: Arguments used when updating an application - :param ABICallArgs | ABICallArgsDict delete_args: Arguments used when deleting an application - :return DeployResponse: details action taken and relevant transactions - :raises DeploymentError: If the deployment failed - """ - # check inputs - if self.app_id: - raise au_deploy.DeploymentFailedError( - f"Attempt to deploy app which already has an app index of {self.app_id}" - ) - try: - resolved_signer, resolved_sender = self.resolve_signer_sender(signer, sender) - except ValueError as ex: - raise au_deploy.DeploymentFailedError(f"{ex}, unable to deploy app") from None - if not self._creator: - raise au_deploy.DeploymentFailedError("No creator provided, unable to deploy app") - if self._creator != resolved_sender: - raise au_deploy.DeploymentFailedError( - f"Attempt to deploy contract with a sender address {resolved_sender} that differs " - f"from the given creator address for this application client: {self._creator}" - ) - - # make a copy and prepare variables - template_values = {**self.template_values, **(template_values or {})} - au_deploy.add_deploy_template_variables(template_values, allow_update=allow_update, allow_delete=allow_delete) - - existing_app_metadata_or_reference = self._load_app_reference() - - self._approval_program, self._clear_program = substitute_template_and_compile( - self.algod_client, self.app_spec, template_values - ) - - if config.debug and config.project_root: - persist_sourcemaps( - sources=[ - PersistSourceMapInput( - compiled_teal=self._approval_program, app_name=self.app_name, file_name="approval.teal" - ), - PersistSourceMapInput( - compiled_teal=self._clear_program, app_name=self.app_name, file_name="clear.teal" - ), - ], - project_root=config.project_root, - client=self.algod_client, - with_sources=True, - ) - - deployer = au_deploy.Deployer( - app_client=self, - creator=self._creator, - signer=resolved_signer, - sender=resolved_sender, - new_app_metadata=self._get_app_deploy_metadata(version, allow_update, allow_delete), - existing_app_metadata_or_reference=existing_app_metadata_or_reference, - on_update=on_update, - on_schema_break=on_schema_break, - create_args=create_args, - update_args=update_args, - delete_args=delete_args, - ) - - return deployer.deploy() - - def compose_create( - self, - atc: AtomicTransactionComposer, - /, - call_abi_method: ABIMethod | bool | None = None, - transaction_parameters: CreateCallParameters | CreateCallParametersDict | None = None, - **abi_kwargs: ABIArgType, - ) -> None: - """Adds a signed transaction with application id == 0 and the schema and source of client's app_spec to atc""" - approval_program, clear_program = self._check_is_compiled() - transaction_parameters = _convert_transaction_parameters(transaction_parameters) - - extra_pages = transaction_parameters.extra_pages or num_extra_program_pages( - approval_program.raw_binary, clear_program.raw_binary - ) - - self.add_method_call( - atc, - app_id=0, - abi_method=call_abi_method, - abi_args=abi_kwargs, - on_complete=transaction_parameters.on_complete or transaction.OnComplete.NoOpOC, - call_config=au_spec.CallConfig.CREATE, - parameters=transaction_parameters, - approval_program=approval_program.raw_binary, - clear_program=clear_program.raw_binary, - global_schema=self.app_spec.global_state_schema, - local_schema=self.app_spec.local_state_schema, - extra_pages=extra_pages, - ) - - @overload - def create( - self, - call_abi_method: Literal[False], - transaction_parameters: CreateCallParameters | CreateCallParametersDict | None = ..., - ) -> TransactionResponse: ... - - @overload - def create( - self, - call_abi_method: ABIMethod | Literal[True], - transaction_parameters: CreateCallParameters | CreateCallParametersDict | None = ..., - **abi_kwargs: ABIArgType, - ) -> ABITransactionResponse: ... - - @overload - def create( - self, - call_abi_method: ABIMethod | bool | None = ..., - transaction_parameters: CreateCallParameters | CreateCallParametersDict | None = ..., - **abi_kwargs: ABIArgType, - ) -> TransactionResponse | ABITransactionResponse: ... - - def create( - self, - call_abi_method: ABIMethod | bool | None = None, - transaction_parameters: CreateCallParameters | CreateCallParametersDict | None = None, - **abi_kwargs: ABIArgType, - ) -> TransactionResponse | ABITransactionResponse: - """Submits a signed transaction with application id == 0 and the schema and source of client's app_spec""" - - atc = AtomicTransactionComposer() - - self.compose_create( - atc, - call_abi_method, - transaction_parameters, - **abi_kwargs, - ) - create_result = self._execute_atc_tr(atc) - self.app_id = au_deploy.get_app_id_from_tx_id(self.algod_client, create_result.tx_id) - return create_result - - def compose_update( - self, - atc: AtomicTransactionComposer, - /, - call_abi_method: ABIMethod | bool | None = None, - transaction_parameters: TransactionParameters | TransactionParametersDict | None = None, - **abi_kwargs: ABIArgType, - ) -> None: - """Adds a signed transaction with on_complete=UpdateApplication to atc""" - approval_program, clear_program = self._check_is_compiled() - - self.add_method_call( - atc=atc, - abi_method=call_abi_method, - abi_args=abi_kwargs, - parameters=transaction_parameters, - on_complete=transaction.OnComplete.UpdateApplicationOC, - approval_program=approval_program.raw_binary, - clear_program=clear_program.raw_binary, - ) - - @overload - def update( - self, - call_abi_method: ABIMethod | Literal[True], - transaction_parameters: TransactionParameters | TransactionParametersDict | None = ..., - **abi_kwargs: ABIArgType, - ) -> ABITransactionResponse: ... - - @overload - def update( - self, - call_abi_method: Literal[False], - transaction_parameters: TransactionParameters | TransactionParametersDict | None = ..., - ) -> TransactionResponse: ... - - @overload - def update( - self, - call_abi_method: ABIMethod | bool | None = ..., - transaction_parameters: TransactionParameters | TransactionParametersDict | None = ..., - **abi_kwargs: ABIArgType, - ) -> TransactionResponse | ABITransactionResponse: ... - - def update( - self, - call_abi_method: ABIMethod | bool | None = None, - transaction_parameters: TransactionParameters | TransactionParametersDict | None = None, - **abi_kwargs: ABIArgType, - ) -> TransactionResponse | ABITransactionResponse: - """Submits a signed transaction with on_complete=UpdateApplication""" - - atc = AtomicTransactionComposer() - self.compose_update( - atc, - call_abi_method, - transaction_parameters=transaction_parameters, - **abi_kwargs, - ) - return self._execute_atc_tr(atc) - - def compose_delete( - self, - atc: AtomicTransactionComposer, - /, - call_abi_method: ABIMethod | bool | None = None, - transaction_parameters: TransactionParameters | TransactionParametersDict | None = None, - **abi_kwargs: ABIArgType, - ) -> None: - """Adds a signed transaction with on_complete=DeleteApplication to atc""" - - self.add_method_call( - atc, - call_abi_method, - abi_args=abi_kwargs, - parameters=transaction_parameters, - on_complete=transaction.OnComplete.DeleteApplicationOC, - ) - - @overload - def delete( - self, - call_abi_method: ABIMethod | Literal[True], - transaction_parameters: TransactionParameters | TransactionParametersDict | None = ..., - **abi_kwargs: ABIArgType, - ) -> ABITransactionResponse: ... - - @overload - def delete( - self, - call_abi_method: Literal[False], - transaction_parameters: TransactionParameters | TransactionParametersDict | None = ..., - ) -> TransactionResponse: ... - - @overload - def delete( - self, - call_abi_method: ABIMethod | bool | None = ..., - transaction_parameters: TransactionParameters | TransactionParametersDict | None = ..., - **abi_kwargs: ABIArgType, - ) -> TransactionResponse | ABITransactionResponse: ... - - def delete( - self, - call_abi_method: ABIMethod | bool | None = None, - transaction_parameters: TransactionParameters | TransactionParametersDict | None = None, - **abi_kwargs: ABIArgType, - ) -> TransactionResponse | ABITransactionResponse: - """Submits a signed transaction with on_complete=DeleteApplication""" - - atc = AtomicTransactionComposer() - self.compose_delete( - atc, - call_abi_method, - transaction_parameters=transaction_parameters, - **abi_kwargs, - ) - return self._execute_atc_tr(atc) - - def compose_call( - self, - atc: AtomicTransactionComposer, - /, - call_abi_method: ABIMethod | bool | None = None, - transaction_parameters: OnCompleteCallParameters | OnCompleteCallParametersDict | None = None, - **abi_kwargs: ABIArgType, - ) -> None: - """Adds a signed transaction with specified parameters to atc""" - _parameters = _convert_transaction_parameters(transaction_parameters) - self.add_method_call( - atc, - abi_method=call_abi_method, - abi_args=abi_kwargs, - parameters=_parameters, - on_complete=_parameters.on_complete or transaction.OnComplete.NoOpOC, - ) - - @overload - def call( - self, - call_abi_method: ABIMethod | Literal[True], - transaction_parameters: OnCompleteCallParameters | OnCompleteCallParametersDict | None = ..., - **abi_kwargs: ABIArgType, - ) -> ABITransactionResponse: ... - - @overload - def call( - self, - call_abi_method: Literal[False], - transaction_parameters: OnCompleteCallParameters | OnCompleteCallParametersDict | None = ..., - ) -> TransactionResponse: ... - - @overload - def call( - self, - call_abi_method: ABIMethod | bool | None = ..., - transaction_parameters: OnCompleteCallParameters | OnCompleteCallParametersDict | None = ..., - **abi_kwargs: ABIArgType, - ) -> TransactionResponse | ABITransactionResponse: ... - - def call( - self, - call_abi_method: ABIMethod | bool | None = None, - transaction_parameters: OnCompleteCallParameters | OnCompleteCallParametersDict | None = None, - **abi_kwargs: ABIArgType, - ) -> TransactionResponse | ABITransactionResponse: - """Submits a signed transaction with specified parameters""" - atc = AtomicTransactionComposer() - _parameters = _convert_transaction_parameters(transaction_parameters) - self.compose_call( - atc, - call_abi_method=call_abi_method, - transaction_parameters=_parameters, - **abi_kwargs, - ) - - method = self._resolve_method( - call_abi_method, abi_kwargs, _parameters.on_complete or transaction.OnComplete.NoOpOC - ) - if method: - hints = self._method_hints(method) - if hints and hints.read_only: - if config.debug and config.project_root and config.trace_all: - simulate_and_persist_response( - atc, config.project_root, self.algod_client, config.trace_buffer_size_mb - ) - - return self._simulate_readonly_call(method, atc) - - return self._execute_atc_tr(atc) - - def compose_opt_in( - self, - atc: AtomicTransactionComposer, - /, - call_abi_method: ABIMethod | bool | None = None, - transaction_parameters: TransactionParameters | TransactionParametersDict | None = None, - **abi_kwargs: ABIArgType, - ) -> None: - """Adds a signed transaction with on_complete=OptIn to atc""" - self.add_method_call( - atc, - abi_method=call_abi_method, - abi_args=abi_kwargs, - parameters=transaction_parameters, - on_complete=transaction.OnComplete.OptInOC, - ) - - @overload - def opt_in( - self, - call_abi_method: ABIMethod | Literal[True] = ..., - transaction_parameters: TransactionParameters | TransactionParametersDict | None = None, - **abi_kwargs: ABIArgType, - ) -> ABITransactionResponse: ... - - @overload - def opt_in( - self, - call_abi_method: Literal[False] = ..., - transaction_parameters: TransactionParameters | TransactionParametersDict | None = None, - ) -> TransactionResponse: ... - - @overload - def opt_in( - self, - call_abi_method: ABIMethod | bool | None = ..., - transaction_parameters: TransactionParameters | TransactionParametersDict | None = ..., - **abi_kwargs: ABIArgType, - ) -> TransactionResponse | ABITransactionResponse: ... - - def opt_in( - self, - call_abi_method: ABIMethod | bool | None = None, - transaction_parameters: TransactionParameters | TransactionParametersDict | None = None, - **abi_kwargs: ABIArgType, - ) -> TransactionResponse | ABITransactionResponse: - """Submits a signed transaction with on_complete=OptIn""" - atc = AtomicTransactionComposer() - self.compose_opt_in( - atc, - call_abi_method=call_abi_method, - transaction_parameters=transaction_parameters, - **abi_kwargs, - ) - return self._execute_atc_tr(atc) - - def compose_close_out( - self, - atc: AtomicTransactionComposer, - /, - call_abi_method: ABIMethod | bool | None = None, - transaction_parameters: TransactionParameters | TransactionParametersDict | None = None, - **abi_kwargs: ABIArgType, - ) -> None: - """Adds a signed transaction with on_complete=CloseOut to ac""" - self.add_method_call( - atc, - abi_method=call_abi_method, - abi_args=abi_kwargs, - parameters=transaction_parameters, - on_complete=transaction.OnComplete.CloseOutOC, - ) - - @overload - def close_out( - self, - call_abi_method: ABIMethod | Literal[True], - transaction_parameters: TransactionParameters | TransactionParametersDict | None = ..., - **abi_kwargs: ABIArgType, - ) -> ABITransactionResponse: ... - - @overload - def close_out( - self, - call_abi_method: Literal[False], - transaction_parameters: TransactionParameters | TransactionParametersDict | None = ..., - ) -> TransactionResponse: ... - - @overload - def close_out( - self, - call_abi_method: ABIMethod | bool | None = ..., - transaction_parameters: TransactionParameters | TransactionParametersDict | None = ..., - **abi_kwargs: ABIArgType, - ) -> TransactionResponse | ABITransactionResponse: ... - - def close_out( - self, - call_abi_method: ABIMethod | bool | None = None, - transaction_parameters: TransactionParameters | TransactionParametersDict | None = None, - **abi_kwargs: ABIArgType, - ) -> TransactionResponse | ABITransactionResponse: - """Submits a signed transaction with on_complete=CloseOut""" - atc = AtomicTransactionComposer() - self.compose_close_out( - atc, - call_abi_method=call_abi_method, - transaction_parameters=transaction_parameters, - **abi_kwargs, - ) - return self._execute_atc_tr(atc) - - def compose_clear_state( - self, - atc: AtomicTransactionComposer, - /, - transaction_parameters: TransactionParameters | TransactionParametersDict | None = None, - app_args: list[bytes] | None = None, - ) -> None: - """Adds a signed transaction with on_complete=ClearState to atc""" - return self.add_method_call( - atc, - parameters=transaction_parameters, - on_complete=transaction.OnComplete.ClearStateOC, - app_args=app_args, - ) - - def clear_state( - self, - transaction_parameters: TransactionParameters | TransactionParametersDict | None = None, - app_args: list[bytes] | None = None, - ) -> TransactionResponse: - """Submits a signed transaction with on_complete=ClearState""" - atc = AtomicTransactionComposer() - self.compose_clear_state( - atc, - transaction_parameters=transaction_parameters, - app_args=app_args, - ) - return self._execute_atc_tr(atc) - - def get_global_state(self, *, raw: bool = False) -> dict[bytes | str, bytes | str | int]: - """Gets the global state info associated with app_id""" - global_state = self.algod_client.application_info(self.app_id) - assert isinstance(global_state, dict) - return cast( - dict[bytes | str, bytes | str | int], - _decode_state(global_state.get("params", {}).get("global-state", {}), raw=raw), - ) - - def get_local_state(self, account: str | None = None, *, raw: bool = False) -> dict[bytes | str, bytes | str | int]: - """Gets the local state info for associated app_id and account/sender""" - - if account is None: - _, account = self.resolve_signer_sender(self.signer, self.sender) - - acct_state = self.algod_client.account_application_info(account, self.app_id) - assert isinstance(acct_state, dict) - return cast( - dict[bytes | str, bytes | str | int], - _decode_state(acct_state.get("app-local-state", {}).get("key-value", {}), raw=raw), - ) - - def resolve(self, to_resolve: au_spec.DefaultArgumentDict) -> int | str | bytes: - """Resolves the default value for an ABI method, based on app_spec""" - - def _data_check(value: object) -> int | str | bytes: - if isinstance(value, int | str | bytes): - return value - raise ValueError(f"Unexpected type for constant data: {value}") - - match to_resolve: - case {"source": "constant", "data": data}: - return _data_check(data) - case {"source": "global-state", "data": str() as key}: - global_state = self.get_global_state(raw=True) - return global_state[key.encode()] - case {"source": "local-state", "data": str() as key}: - _, sender = self.resolve_signer_sender(self.signer, self.sender) - acct_state = self.get_local_state(sender, raw=True) - return acct_state[key.encode()] - case {"source": "abi-method", "data": dict() as method_dict}: - method = Method.undictify(method_dict) - response = self.call(method) - assert isinstance(response, ABITransactionResponse) - return _data_check(response.return_value) - - case {"source": source}: - raise ValueError(f"Unrecognized default argument source: {source}") - case _: - raise TypeError("Unable to interpret default argument specification") - - def _get_app_deploy_metadata( - self, version: str | None, allow_update: bool | None, allow_delete: bool | None - ) -> au_deploy.AppDeployMetaData: - updatable = ( - allow_update - if allow_update is not None - else au_deploy.get_deploy_control( - self.app_spec, au_deploy.UPDATABLE_TEMPLATE_NAME, transaction.OnComplete.UpdateApplicationOC - ) - ) - deletable = ( - allow_delete - if allow_delete is not None - else au_deploy.get_deploy_control( - self.app_spec, au_deploy.DELETABLE_TEMPLATE_NAME, transaction.OnComplete.DeleteApplicationOC - ) - ) - - app = self._load_app_reference() - - if version is None: - if app.app_id == 0: - version = "v1.0" - else: - assert isinstance(app, au_deploy.AppDeployMetaData) - version = get_next_version(app.version) - return au_deploy.AppDeployMetaData(self.app_name, version, updatable=updatable, deletable=deletable) - - def _check_is_compiled(self) -> tuple[Program, Program]: - if self._approval_program is None or self._clear_program is None: - self._approval_program, self._clear_program = substitute_template_and_compile( - self.algod_client, self.app_spec, self.template_values - ) - - if config.debug and config.project_root: - persist_sourcemaps( - sources=[ - PersistSourceMapInput( - compiled_teal=self._approval_program, app_name=self.app_name, file_name="approval.teal" - ), - PersistSourceMapInput( - compiled_teal=self._clear_program, app_name=self.app_name, file_name="clear.teal" - ), - ], - project_root=config.project_root, - client=self.algod_client, - with_sources=True, - ) - - return self._approval_program, self._clear_program - - def _simulate_readonly_call( - self, method: Method, atc: AtomicTransactionComposer - ) -> ABITransactionResponse | TransactionResponse: - response = simulate_response(atc, self.algod_client) - traces = None - if config.debug: - traces = _create_simulate_traces(response) - if response.failure_message: - raise _try_convert_to_logic_error( - response.failure_message, - self.app_spec.approval_program, - self._get_approval_source_map, - traces, - ) or Exception(f"Simulate failed for readonly method {method.get_signature()}: {response.failure_message}") - - return TransactionResponse.from_atr(response) - - def _load_reference_and_check_app_id(self) -> None: - self._load_app_reference() - self._check_app_id() - - def _load_app_reference(self) -> au_deploy.AppReference | au_deploy.AppMetaData: - if not self.existing_deployments and self._creator: - assert self._indexer_client - self.existing_deployments = au_deploy.get_creator_apps(self._indexer_client, self._creator) - - if self.existing_deployments: - app = self.existing_deployments.apps.get(self.app_name) - if app: - if self.app_id == 0: - self.app_id = app.app_id - return app - - return au_deploy.AppReference(self.app_id, self.app_address) - - def _check_app_id(self) -> None: - if self.app_id == 0: - raise Exception( - "ApplicationClient is not associated with an app instance, to resolve either:\n" - "1.) provide an app_id on construction OR\n" - "2.) provide a creator address so an app can be searched for OR\n" - "3.) create an app first using create or deploy methods" - ) - - def _resolve_method( - self, - abi_method: ABIMethod | bool | None, - args: ABIArgsDict | None, - on_complete: transaction.OnComplete, - call_config: au_spec.CallConfig = au_spec.CallConfig.CALL, - ) -> Method | None: - matches: list[Method | None] = [] - match abi_method: - case str() | Method(): # abi method specified - return self._resolve_abi_method(abi_method) - case bool() | None: # find abi method - has_bare_config = ( - call_config in au_deploy.get_call_config(self.app_spec.bare_call_config, on_complete) - or on_complete == transaction.OnComplete.ClearStateOC - ) - abi_methods = self._find_abi_methods(args, on_complete, call_config) - if abi_method is not False: - matches += abi_methods - if has_bare_config and abi_method is not True: - matches += [None] - case _: - return abi_method.method_spec() - - if len(matches) == 1: # exact match - return matches[0] - elif len(matches) > 1: # ambiguous match - signatures = ", ".join((m.get_signature() if isinstance(m, Method) else "bare") for m in matches) - raise Exception( - f"Could not find an exact method to use for {on_complete.name} with call_config of {call_config.name}, " - f"specify the exact method using abi_method and args parameters, considered: {signatures}" - ) - else: # no match - raise Exception( - f"Could not find any methods to use for {on_complete.name} with call_config of {call_config.name}" - ) - - def _get_approval_source_map(self) -> SourceMap | None: - if self.approval_source_map: - return self.approval_source_map - - try: - approval, _ = self._check_is_compiled() - except au_deploy.DeploymentFailedError: - return None - return approval.source_map - - def export_source_map(self) -> str | None: - """Export approval source map to JSON, can be later re-imported with `import_source_map`""" - source_map = self._get_approval_source_map() - if source_map: - return json.dumps( - { - "version": source_map.version, - "sources": source_map.sources, - "mappings": source_map.mappings, - } - ) - return None - - def import_source_map(self, source_map_json: str) -> None: - """Import approval source from JSON exported by `export_source_map`""" - source_map = json.loads(source_map_json) - self._approval_source_map = SourceMap(source_map) - - def add_method_call( # noqa: PLR0913 - self, - atc: AtomicTransactionComposer, - abi_method: ABIMethod | bool | None = None, - *, - abi_args: ABIArgsDict | None = None, - app_id: int | None = None, - parameters: TransactionParameters | TransactionParametersDict | None = None, - on_complete: transaction.OnComplete = transaction.OnComplete.NoOpOC, - local_schema: transaction.StateSchema | None = None, - global_schema: transaction.StateSchema | None = None, - approval_program: bytes | None = None, - clear_program: bytes | None = None, - extra_pages: int | None = None, - app_args: list[bytes] | None = None, - call_config: au_spec.CallConfig = au_spec.CallConfig.CALL, - ) -> None: - """Adds a transaction to the AtomicTransactionComposer passed""" - if app_id is None: - self._load_reference_and_check_app_id() - app_id = self.app_id - parameters = _convert_transaction_parameters(parameters) - method = self._resolve_method(abi_method, abi_args, on_complete, call_config) - sp = parameters.suggested_params or self.suggested_params or self.algod_client.suggested_params() - signer, sender = self.resolve_signer_sender(parameters.signer, parameters.sender) - if parameters.boxes is not None: - # TODO: algosdk actually does this, but it's type hints say otherwise... - encoded_boxes = [(id_, algosdk.encoding.encode_as_bytes(name)) for id_, name in parameters.boxes] - else: - encoded_boxes = None - - encoded_lease = parameters.lease.encode("utf-8") if isinstance(parameters.lease, str) else parameters.lease - - if not method: # not an abi method, treat as a regular call - if abi_args: - raise Exception(f"ABI arguments specified on a bare call: {', '.join(abi_args)}") - atc.add_transaction( - TransactionWithSigner( - txn=transaction.ApplicationCallTxn( # type: ignore[no-untyped-call] - sender=sender, - sp=sp, - index=app_id, - on_complete=on_complete, - approval_program=approval_program, - clear_program=clear_program, - global_schema=global_schema, - local_schema=local_schema, - extra_pages=extra_pages, - accounts=parameters.accounts, - foreign_apps=parameters.foreign_apps, - foreign_assets=parameters.foreign_assets, - boxes=encoded_boxes, - note=parameters.note, - lease=encoded_lease, - rekey_to=parameters.rekey_to, - app_args=app_args, - ), - signer=signer, - ) - ) - return - # resolve ABI method args - args = self._get_abi_method_args(abi_args, method) - atc.add_method_call( - app_id, - method, - sender, - sp, - signer, - method_args=args, - on_complete=on_complete, - local_schema=local_schema, - global_schema=global_schema, - approval_program=approval_program, - clear_program=clear_program, - extra_pages=extra_pages or 0, - accounts=parameters.accounts, - foreign_apps=parameters.foreign_apps, - foreign_assets=parameters.foreign_assets, - boxes=encoded_boxes, - note=parameters.note.encode("utf-8") if isinstance(parameters.note, str) else parameters.note, - lease=encoded_lease, - rekey_to=parameters.rekey_to, - ) - - def _get_abi_method_args(self, abi_args: ABIArgsDict | None, method: Method) -> list: - args: list = [] - hints = self._method_hints(method) - # copy args so we don't mutate original - abi_args = dict(abi_args or {}) - for method_arg in method.args: - name = method_arg.name - if name in abi_args: - argument = abi_args.pop(name) - if isinstance(argument, dict): - if hints.structs is None or name not in hints.structs: - raise Exception(f"Argument missing struct hint: {name}. Check argument name and type") - - elements = hints.structs[name]["elements"] - - argument_tuple = tuple(argument[field_name] for field_name, field_type in elements) - args.append(argument_tuple) - else: - args.append(argument) - - elif hints.default_arguments is not None and name in hints.default_arguments: - default_arg = hints.default_arguments[name] - if default_arg is not None: - args.append(self.resolve(default_arg)) - else: - raise Exception(f"Unspecified argument: {name}") - if abi_args: - raise Exception(f"Unused arguments specified: {', '.join(abi_args)}") - return args - - def _method_matches( - self, - method: Method, - args: ABIArgsDict | None, - on_complete: transaction.OnComplete, - call_config: au_spec.CallConfig, - ) -> bool: - hints = self._method_hints(method) - if call_config not in au_deploy.get_call_config(hints.call_config, on_complete): - return False - method_args = {m.name for m in method.args} - provided_args = set(args or {}) | set(hints.default_arguments) - - # TODO: also match on types? - return method_args == provided_args - - def _find_abi_methods( - self, args: ABIArgsDict | None, on_complete: transaction.OnComplete, call_config: au_spec.CallConfig - ) -> list[Method]: - return [ - method - for method in self.app_spec.contract.methods - if self._method_matches(method, args, on_complete, call_config) - ] - - def _resolve_abi_method(self, method: ABIMethod) -> Method: - if isinstance(method, str): - try: - return next(iter(m for m in self.app_spec.contract.methods if m.get_signature() == method)) - except StopIteration: - pass - return self.app_spec.contract.get_method_by_name(method) - elif hasattr(method, "method_spec"): - return method.method_spec() - else: - return method - - def _method_hints(self, method: Method) -> au_spec.MethodHints: - sig = method.get_signature() - if sig not in self.app_spec.hints: - return au_spec.MethodHints() - return self.app_spec.hints[sig] - - def _execute_atc_tr(self, atc: AtomicTransactionComposer) -> TransactionResponse: - result = self.execute_atc(atc) - return TransactionResponse.from_atr(result) - - def execute_atc(self, atc: AtomicTransactionComposer) -> AtomicTransactionResponse: - return execute_atc_with_logic_error( - atc, - self.algod_client, - approval_program=self.app_spec.approval_program, - approval_source_map=self._get_approval_source_map, - ) - - def get_signer_sender( - self, signer: TransactionSigner | None = None, sender: str | None = None - ) -> tuple[TransactionSigner | None, str | None]: - """Return signer and sender, using default values on client if not specified - - Will use provided values if given, otherwise will fall back to values defined on client. - If no sender is specified then will attempt to obtain sender from signer""" - resolved_signer = signer or self.signer - resolved_sender = sender or get_sender_from_signer(signer) or self.sender or get_sender_from_signer(self.signer) - return resolved_signer, resolved_sender - - def resolve_signer_sender( - self, signer: TransactionSigner | None = None, sender: str | None = None - ) -> tuple[TransactionSigner, str]: - """Return signer and sender, using default values on client if not specified - - Will use provided values if given, otherwise will fall back to values defined on client. - If no sender is specified then will attempt to obtain sender from signer - - :raises ValueError: Raised if a signer or sender is not provided. See `get_signer_sender` - for variant with no exception""" - resolved_signer, resolved_sender = self.get_signer_sender(signer, sender) - if not resolved_signer: - raise ValueError("No signer provided") - if not resolved_sender: - raise ValueError("No sender provided") - return resolved_signer, resolved_sender - - # TODO: remove private implementation, kept in the 1.0.2 release to not impact existing beaker 1.0 installs - _resolve_signer_sender = resolve_signer_sender - - -def substitute_template_and_compile( - algod_client: "AlgodClient", - app_spec: au_spec.ApplicationSpecification, - template_values: au_deploy.TemplateValueMapping, -) -> tuple[Program, Program]: - """Substitutes the provided template_values into app_spec and compiles""" - template_values = dict(template_values or {}) - clear = au_deploy.replace_template_variables(app_spec.clear_program, template_values) - - au_deploy.check_template_variables(app_spec.approval_program, template_values) - approval = au_deploy.replace_template_variables(app_spec.approval_program, template_values) - - approval_app, clear_app = Program(approval, algod_client), Program(clear, algod_client) - - return approval_app, clear_app - - -def get_next_version(current_version: str) -> str: - """Calculates the next version from `current_version` - - Next version is calculated by finding a semver like - version string and incrementing the lower. This function is used by {py:meth}`ApplicationClient.deploy` when - a version is not specified, and is intended mostly for convenience during local development. - - :params str current_version: An existing version string with a semver like version contained within it, - some valid inputs and incremented outputs: - `1` -> `2` - `1.0` -> `1.1` - `v1.1` -> `v1.2` - `v1.1-beta1` -> `v1.2-beta1` - `v1.2.3.4567` -> `v1.2.3.4568` - `v1.2.3.4567-alpha` -> `v1.2.3.4568-alpha` - :raises DeploymentFailedError: If `current_version` cannot be parsed""" - pattern = re.compile(r"(?P\w*)(?P(?:\d+\.)*\d+)(?P\w*)") - match = pattern.match(current_version) - if match: - version = match.group("version") - new_version = _increment_version(version) - - def replacement(m: re.Match) -> str: - return f"{m.group('prefix')}{new_version}{m.group('suffix')}" - - return re.sub(pattern, replacement, current_version) - raise au_deploy.DeploymentFailedError( - f"Could not auto increment {current_version}, please specify the next version using the version parameter" - ) - - -def _try_convert_to_logic_error( - source_ex: Exception | str, - approval_program: str, - approval_source_map: SourceMap | typing.Callable[[], SourceMap | None] | None = None, - simulate_traces: list[SimulationTrace] | None = None, -) -> Exception | None: - source_ex_str = str(source_ex) - logic_error_data = parse_logic_error(source_ex_str) - if logic_error_data: - return LogicError( - logic_error_str=source_ex_str, - logic_error=source_ex if isinstance(source_ex, Exception) else None, - program=approval_program, - source_map=approval_source_map() if callable(approval_source_map) else approval_source_map, - **logic_error_data, - traces=simulate_traces, - ) - - return None - - -def execute_atc_with_logic_error( - atc: AtomicTransactionComposer, - algod_client: "AlgodClient", - approval_program: str, - wait_rounds: int = 4, - approval_source_map: SourceMap | typing.Callable[[], SourceMap | None] | None = None, -) -> AtomicTransactionResponse: - """Calls {py:meth}`AtomicTransactionComposer.execute` on provided `atc`, but will parse any errors - and raise a {py:class}`LogicError` if possible - - ```{note} - `approval_program` and `approval_source_map` are required to be able to parse any errors into a - {py:class}`LogicError` - ``` - """ - try: - if config.debug and config.project_root and config.trace_all: - simulate_and_persist_response(atc, config.project_root, algod_client, config.trace_buffer_size_mb) - - return atc.execute(algod_client, wait_rounds=wait_rounds) - except Exception as ex: - if config.debug: - simulate = None - if config.project_root and not config.trace_all: - # if trace_all is enabled, we already have the traces executed above - # hence we only need to simulate if trace_all is disabled and - # project_root is set - simulate = simulate_and_persist_response( - atc, config.project_root, algod_client, config.trace_buffer_size_mb - ) - else: - simulate = simulate_response(atc, algod_client) - traces = _create_simulate_traces(simulate) - else: - traces = None - logger.info("An error occurred while executing the transaction.") - logger.info("To see more details, enable debug mode by setting config.debug = True ") - - logic_error = _try_convert_to_logic_error(ex, approval_program, approval_source_map, traces) - if logic_error: - raise logic_error from ex - raise ex - - -def _create_simulate_traces(simulate: SimulateAtomicTransactionResponse) -> list[SimulationTrace]: - traces = [] - if hasattr(simulate, "simulate_response") and hasattr(simulate, "failed_at") and simulate.failed_at: - for txn_group in simulate.simulate_response["txn-groups"]: - app_budget_added = txn_group.get("app-budget-added", None) - app_budget_consumed = txn_group.get("app-budget-consumed", None) - failure_message = txn_group.get("failure-message", None) - txn_result = txn_group.get("txn-results", [{}])[0] - exec_trace = txn_result.get("exec-trace", {}) - traces.append( - SimulationTrace( - app_budget_added=app_budget_added, - app_budget_consumed=app_budget_consumed, - failure_message=failure_message, - exec_trace=exec_trace, - ) - ) - return traces - - -def _convert_transaction_parameters( - args: TransactionParameters | TransactionParametersDict | None, -) -> CreateCallParameters: - _args = args.__dict__ if isinstance(args, TransactionParameters) else (args or {}) - return CreateCallParameters(**_args) - - -def get_sender_from_signer(signer: TransactionSigner | None) -> str | None: - """Returns the associated address of a signer, return None if no address found""" - - if isinstance(signer, AccountTransactionSigner): - sender = address_from_private_key(signer.private_key) # type: ignore[no-untyped-call] - assert isinstance(sender, str) - return sender - elif isinstance(signer, MultisigTransactionSigner): - sender = signer.msig.address() # type: ignore[no-untyped-call] - assert isinstance(sender, str) - return sender - elif isinstance(signer, LogicSigTransactionSigner): - return signer.lsig.address() - return None - - -# TEMPORARY, use SDK one when available -def _parse_result( - methods: dict[int, Method], - txns: list[dict[str, Any]], - txids: list[str], -) -> list[ABIResult]: - method_results = [] - for i, tx_info in enumerate(txns): - raw_value = b"" - return_value = None - decode_error = None - - if i not in methods: - continue - - # Parse log for ABI method return value - try: - if methods[i].returns.type == Returns.VOID: - method_results.append( - ABIResult( - tx_id=txids[i], - raw_value=raw_value, - return_value=return_value, - decode_error=decode_error, - tx_info=tx_info, - method=methods[i], - ) - ) - continue - - logs = tx_info.get("logs", []) - - # Look for the last returned value in the log - if not logs: - raise Exception("No logs") - - result = logs[-1] - # Check that the first four bytes is the hash of "return" - result_bytes = base64.b64decode(result) - if len(result_bytes) < len(ABI_RETURN_HASH) or result_bytes[: len(ABI_RETURN_HASH)] != ABI_RETURN_HASH: - raise Exception("no logs") - - raw_value = result_bytes[4:] - abi_return_type = methods[i].returns.type - if isinstance(abi_return_type, ABIType): - return_value = abi_return_type.decode(raw_value) - else: - return_value = raw_value - - except Exception as e: - decode_error = e - - method_results.append( - ABIResult( - tx_id=txids[i], - raw_value=raw_value, - return_value=return_value, - decode_error=decode_error, - tx_info=tx_info, - method=methods[i], - ) - ) - - return method_results - - -def _increment_version(version: str) -> str: - split = list(map(int, version.split("."))) - split[-1] = split[-1] + 1 - return ".".join(str(x) for x in split) - - -def _str_or_hex(v: bytes) -> str: - decoded: str - try: - decoded = v.decode("utf-8") - except UnicodeDecodeError: - decoded = v.hex() - - return decoded - - -def _decode_state(state: list[dict[str, Any]], *, raw: bool = False) -> dict[str | bytes, bytes | str | int | None]: - decoded_state: dict[str | bytes, bytes | str | int | None] = {} - - for state_value in state: - raw_key = base64.b64decode(state_value["key"]) - - key: str | bytes = raw_key if raw else _str_or_hex(raw_key) - val: str | bytes | int | None - - action = state_value["value"]["action"] if "action" in state_value["value"] else state_value["value"]["type"] - - match action: - case 1: - raw_val = base64.b64decode(state_value["value"]["bytes"]) - val = raw_val if raw else _str_or_hex(raw_val) - case 2: - val = state_value["value"]["uint"] - case 3: - val = None - case _: - raise NotImplementedError - - decoded_state[key] = val - return decoded_state +from algokit_utils._legacy_v2.application_client import * # noqa: F403 diff --git a/src/algokit_utils/application_specification.py b/src/algokit_utils/application_specification.py index 392fce8d..56c286ee 100644 --- a/src/algokit_utils/application_specification.py +++ b/src/algokit_utils/application_specification.py @@ -1,206 +1 @@ -import base64 -import dataclasses -import json -from enum import IntFlag -from pathlib import Path -from typing import Any, Literal, TypeAlias, TypedDict - -from algosdk.abi import Contract -from algosdk.abi.method import MethodDict -from algosdk.transaction import StateSchema - -__all__ = [ - "CallConfig", - "DefaultArgumentDict", - "DefaultArgumentType", - "MethodConfigDict", - "OnCompleteActionName", - "MethodHints", - "ApplicationSpecification", - "AppSpecStateDict", -] - - -AppSpecStateDict: TypeAlias = dict[str, dict[str, dict]] -"""Type defining Application Specification state entries""" - - -class CallConfig(IntFlag): - """Describes the type of calls a method can be used for based on {py:class}`algosdk.transaction.OnComplete` type""" - - NEVER = 0 - """Never handle the specified on completion type""" - CALL = 1 - """Only handle the specified on completion type for application calls""" - CREATE = 2 - """Only handle the specified on completion type for application create calls""" - ALL = 3 - """Handle the specified on completion type for both create and normal application calls""" - - -class StructArgDict(TypedDict): - name: str - elements: list[list[str]] - - -OnCompleteActionName: TypeAlias = Literal[ - "no_op", "opt_in", "close_out", "clear_state", "update_application", "delete_application" -] -"""String literals representing on completion transaction types""" -MethodConfigDict: TypeAlias = dict[OnCompleteActionName, CallConfig] -"""Dictionary of `dict[OnCompletionActionName, CallConfig]` representing allowed actions for each on completion type""" -DefaultArgumentType: TypeAlias = Literal["abi-method", "local-state", "global-state", "constant"] -"""Literal values describing the types of default argument sources""" - - -class DefaultArgumentDict(TypedDict): - """ - DefaultArgument is a container for any arguments that may - be resolved prior to calling some target method - """ - - source: DefaultArgumentType - data: int | str | bytes | MethodDict - - -StateDict = TypedDict( # need to use function-form of TypedDict here since "global" is a reserved keyword - "StateDict", {"global": AppSpecStateDict, "local": AppSpecStateDict} -) - - -@dataclasses.dataclass(kw_only=True) -class MethodHints: - """MethodHints provides hints to the caller about how to call the method""" - - #: hint to indicate this method can be called through Dryrun - read_only: bool = False - #: hint to provide names for tuple argument indices - #: method_name=>param_name=>{name:str, elements:[str,str]} - structs: dict[str, StructArgDict] = dataclasses.field(default_factory=dict) - #: defaults - default_arguments: dict[str, DefaultArgumentDict] = dataclasses.field(default_factory=dict) - call_config: MethodConfigDict = dataclasses.field(default_factory=dict) - - def empty(self) -> bool: - return not self.dictify() - - def dictify(self) -> dict[str, Any]: - d: dict[str, Any] = {} - if self.read_only: - d["read_only"] = True - if self.default_arguments: - d["default_arguments"] = self.default_arguments - if self.structs: - d["structs"] = self.structs - if any(v for v in self.call_config.values() if v != CallConfig.NEVER): - d["call_config"] = _encode_method_config(self.call_config) - return d - - @staticmethod - def undictify(data: dict[str, Any]) -> "MethodHints": - return MethodHints( - read_only=data.get("read_only", False), - default_arguments=data.get("default_arguments", {}), - structs=data.get("structs", {}), - call_config=_decode_method_config(data.get("call_config", {})), - ) - - -def _encode_method_config(mc: MethodConfigDict) -> dict[str, str | None]: - return {k: mc[k].name for k in sorted(mc) if mc[k] != CallConfig.NEVER} - - -def _decode_method_config(data: dict[OnCompleteActionName, Any]) -> MethodConfigDict: - return {k: CallConfig[v] for k, v in data.items()} - - -def _encode_source(teal_text: str) -> str: - return base64.b64encode(teal_text.encode()).decode("utf-8") - - -def _decode_source(b64_text: str) -> str: - return base64.b64decode(b64_text).decode("utf-8") - - -def _encode_state_schema(schema: StateSchema) -> dict[str, int]: - return { - "num_byte_slices": schema.num_byte_slices, - "num_uints": schema.num_uints, - } - - -def _decode_state_schema(data: dict[str, int]) -> StateSchema: - return StateSchema( # type: ignore[no-untyped-call] - num_byte_slices=data.get("num_byte_slices", 0), - num_uints=data.get("num_uints", 0), - ) - - -@dataclasses.dataclass(kw_only=True) -class ApplicationSpecification: - """ARC-0032 application specification - - See """ - - approval_program: str - clear_program: str - contract: Contract - hints: dict[str, MethodHints] - schema: StateDict - global_state_schema: StateSchema - local_state_schema: StateSchema - bare_call_config: MethodConfigDict - - def dictify(self) -> dict: - return { - "hints": {k: v.dictify() for k, v in self.hints.items() if not v.empty()}, - "source": { - "approval": _encode_source(self.approval_program), - "clear": _encode_source(self.clear_program), - }, - "state": { - "global": _encode_state_schema(self.global_state_schema), - "local": _encode_state_schema(self.local_state_schema), - }, - "schema": self.schema, - "contract": self.contract.dictify(), - "bare_call_config": _encode_method_config(self.bare_call_config), - } - - def to_json(self) -> str: - return json.dumps(self.dictify(), indent=4) - - @staticmethod - def from_json(application_spec: str) -> "ApplicationSpecification": - json_spec = json.loads(application_spec) - return ApplicationSpecification( - approval_program=_decode_source(json_spec["source"]["approval"]), - clear_program=_decode_source(json_spec["source"]["clear"]), - schema=json_spec["schema"], - global_state_schema=_decode_state_schema(json_spec["state"]["global"]), - local_state_schema=_decode_state_schema(json_spec["state"]["local"]), - contract=Contract.undictify(json_spec["contract"]), - hints={k: MethodHints.undictify(v) for k, v in json_spec["hints"].items()}, - bare_call_config=_decode_method_config(json_spec.get("bare_call_config", {})), - ) - - def export(self, directory: Path | str | None = None) -> None: - """write out the artifacts generated by the application to disk - - Args: - directory(optional): path to the directory where the artifacts should be written - """ - if directory is None: - output_dir = Path.cwd() - else: - output_dir = Path(directory) - output_dir.mkdir(exist_ok=True, parents=True) - - (output_dir / "approval.teal").write_text(self.approval_program) - (output_dir / "clear.teal").write_text(self.clear_program) - (output_dir / "contract.json").write_text(json.dumps(self.contract.dictify(), indent=4)) - (output_dir / "application.json").write_text(self.to_json()) - - -def _state_schema(schema: dict[str, int]) -> StateSchema: - return StateSchema(schema.get("num-uint", 0), schema.get("num-byte-slice", 0)) # type: ignore[no-untyped-call] +from algokit_utils._legacy_v2.application_specification import * # noqa: F403 diff --git a/src/algokit_utils/applications/__init__.py b/src/algokit_utils/applications/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/algokit_utils/applications/models.py b/src/algokit_utils/applications/models.py new file mode 100644 index 00000000..e69de29b diff --git a/src/algokit_utils/asset.py b/src/algokit_utils/asset.py index 085ea8c5..4d9c8522 100644 --- a/src/algokit_utils/asset.py +++ b/src/algokit_utils/asset.py @@ -1,168 +1 @@ -import logging -from typing import TYPE_CHECKING - -from algosdk.atomic_transaction_composer import AtomicTransactionComposer, TransactionWithSigner -from algosdk.constants import TX_GROUP_LIMIT -from algosdk.transaction import AssetTransferTxn - -if TYPE_CHECKING: - from algosdk.v2client.algod import AlgodClient - -from enum import Enum, auto - -from algokit_utils.models import Account - -__all__ = ["opt_in", "opt_out"] -logger = logging.getLogger(__name__) - - -class ValidationType(Enum): - OPTIN = auto() - OPTOUT = auto() - - -def _ensure_account_is_valid(algod_client: "AlgodClient", account: Account) -> None: - try: - algod_client.account_info(account.address) - except Exception as err: - error_message = f"Account address{account.address} does not exist" - logger.debug(error_message) - raise err - - -def _ensure_asset_balance_conditions( - algod_client: "AlgodClient", account: Account, asset_ids: list, validation_type: ValidationType -) -> None: - invalid_asset_ids = [] - account_info = algod_client.account_info(account.address) - account_assets = account_info.get("assets", []) # type: ignore # noqa: PGH003 - for asset_id in asset_ids: - asset_exists_in_account_info = any(asset["asset-id"] == asset_id for asset in account_assets) - if validation_type == ValidationType.OPTIN: - if asset_exists_in_account_info: - logger.debug(f"Asset {asset_id} is already opted in for account {account.address}") - invalid_asset_ids.append(asset_id) - - elif validation_type == ValidationType.OPTOUT: - if not account_assets or not asset_exists_in_account_info: - logger.debug(f"Account {account.address} does not have asset {asset_id}") - invalid_asset_ids.append(asset_id) - else: - asset_balance = next((asset["amount"] for asset in account_assets if asset["asset-id"] == asset_id), 0) - if asset_balance != 0: - logger.debug(f"Asset {asset_id} balance is not zero") - invalid_asset_ids.append(asset_id) - - if len(invalid_asset_ids) > 0: - action = "opted out" if validation_type == ValidationType.OPTOUT else "opted in" - condition_message = ( - "their amount is zero and that the account has" - if validation_type == ValidationType.OPTOUT - else "they are valid and that the account has not" - ) - - error_message = ( - f"Assets {invalid_asset_ids} cannot be {action}. Ensure that " - f"{condition_message} previously opted into them." - ) - raise ValueError(error_message) - - -def opt_in(algod_client: "AlgodClient", account: Account, asset_ids: list[int]) -> dict[int, str]: - """ - Opt-in to a list of assets on the Algorand blockchain. Before an account can receive a specific asset, - it must `opt-in` to receive it. An opt-in transaction places an asset holding of 0 into the account and increases - its minimum balance by [100,000 microAlgos](https://developer.algorand.org/docs/get-details/asa/#assets-overview). - - Args: - algod_client (AlgodClient): An instance of the AlgodClient class from the algosdk library. - account (Account): An instance of the Account class representing the account that wants to opt-in to the assets. - asset_ids (list[int]): A list of integers representing the asset IDs to opt-in to. - Returns: - dict[int, str]: A dictionary where the keys are the asset IDs and the values - are the transaction IDs for opting-in to each asset. - """ - _ensure_account_is_valid(algod_client, account) - _ensure_asset_balance_conditions(algod_client, account, asset_ids, ValidationType.OPTIN) - suggested_params = algod_client.suggested_params() - result = {} - for i in range(0, len(asset_ids), TX_GROUP_LIMIT): - atc = AtomicTransactionComposer() - chunk = asset_ids[i : i + TX_GROUP_LIMIT] - for asset_id in chunk: - asset = algod_client.asset_info(asset_id) - xfer_txn = AssetTransferTxn( - sp=suggested_params, - sender=account.address, - receiver=account.address, - close_assets_to=None, - revocation_target=None, - amt=0, - note=f"opt in asset id ${asset_id}", - index=asset["index"], # type: ignore # noqa: PGH003 - rekey_to=None, - ) - - transaction_with_signer = TransactionWithSigner( - txn=xfer_txn, - signer=account.signer, - ) - atc.add_transaction(transaction_with_signer) - atc.execute(algod_client, 4) - - for index, asset_id in enumerate(chunk): - result[asset_id] = atc.tx_ids[index] - - return result - - -def opt_out(algod_client: "AlgodClient", account: Account, asset_ids: list[int]) -> dict[int, str]: - """ - Opt out from a list of Algorand Standard Assets (ASAs) by transferring them back to their creators. - The account also recovers the Minimum Balance Requirement for the asset (100,000 microAlgos) - The `optOut` function manages the opt-out process, permitting the account to discontinue holding a group of assets. - - It's essential to note that an account can only opt_out of an asset if its balance of that asset is zero. - - Args: - algod_client (AlgodClient): An instance of the AlgodClient class from the `algosdk` library. - account (Account): An instance of the Account class that holds the private key and address for an account. - asset_ids (list[int]): A list of integers representing the asset IDs of the ASAs to opt out from. - Returns: - dict[int, str]: A dictionary where the keys are the asset IDs and the values are the transaction IDs of - the executed transactions. - - """ - _ensure_account_is_valid(algod_client, account) - _ensure_asset_balance_conditions(algod_client, account, asset_ids, ValidationType.OPTOUT) - suggested_params = algod_client.suggested_params() - result = {} - for i in range(0, len(asset_ids), TX_GROUP_LIMIT): - atc = AtomicTransactionComposer() - chunk = asset_ids[i : i + TX_GROUP_LIMIT] - for asset_id in chunk: - asset = algod_client.asset_info(asset_id) - asset_creator = asset["params"]["creator"] # type: ignore # noqa: PGH003 - xfer_txn = AssetTransferTxn( - sp=suggested_params, - sender=account.address, - receiver=account.address, - close_assets_to=asset_creator, - revocation_target=None, - amt=0, - note=f"opt out asset id ${asset_id}", - index=asset["index"], # type: ignore # noqa: PGH003 - rekey_to=None, - ) - - transaction_with_signer = TransactionWithSigner( - txn=xfer_txn, - signer=account.signer, - ) - atc.add_transaction(transaction_with_signer) - atc.execute(algod_client, 4) - - for index, asset_id in enumerate(chunk): - result[asset_id] = atc.tx_ids[index] - - return result +from algokit_utils._legacy_v2.asset import * # noqa: F403 diff --git a/src/algokit_utils/assets/__init__.py b/src/algokit_utils/assets/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/algokit_utils/assets/models.py b/src/algokit_utils/assets/models.py new file mode 100644 index 00000000..e69de29b diff --git a/src/algokit_utils/clients/__init__.py b/src/algokit_utils/clients/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/algokit_utils/beta/algorand_client.py b/src/algokit_utils/clients/algorand_client.py similarity index 96% rename from src/algokit_utils/beta/algorand_client.py rename to src/algokit_utils/clients/algorand_client.py index e80dadaf..7e02e20a 100644 --- a/src/algokit_utils/beta/algorand_client.py +++ b/src/algokit_utils/clients/algorand_client.py @@ -4,10 +4,21 @@ from dataclasses import dataclass from typing import Any -from algokit_utils.beta.account_manager import AccountManager -from algokit_utils.beta.client_manager import AlgoSdkClients, ClientManager -from algokit_utils.beta.composer import ( - AlgokitComposer, +from algosdk.atomic_transaction_composer import AtomicTransactionResponse, TransactionSigner +from algosdk.transaction import SuggestedParams, Transaction, wait_for_confirmation +from typing_extensions import Self + +from algokit_utils.accounts.account_manager import AccountManager +from algokit_utils.clients.client_manager import AlgoSdkClients, ClientManager +from algokit_utils.network_clients import ( + AlgoClientConfigs, + get_algod_client, + get_algonode_config, + get_default_localnet_config, + get_indexer_client, + get_kmd_client, +) +from algokit_utils.transactions.transaction_composer import ( AppCallParams, AssetConfigParams, AssetCreateParams, @@ -18,18 +29,8 @@ MethodCallParams, OnlineKeyRegParams, PayParams, + TransactionComposer, ) -from algokit_utils.network_clients import ( - AlgoClientConfigs, - get_algod_client, - get_algonode_config, - get_default_localnet_config, - get_indexer_client, - get_kmd_client, -) -from algosdk.atomic_transaction_composer import AtomicTransactionResponse, TransactionSigner -from algosdk.transaction import SuggestedParams, Transaction, wait_for_confirmation -from typing_extensions import Self __all__ = [ "AlgorandClient", @@ -176,9 +177,9 @@ def account(self) -> AccountManager: """Get or create accounts that can sign transactions.""" return self._account_manager - def new_group(self) -> AlgokitComposer: - """Start a new `AlgokitComposer` transaction group""" - return AlgokitComposer( + def new_group(self) -> TransactionComposer: + """Start a new `TransactionComposer` transaction group""" + return TransactionComposer( algod=self.client.algod, get_signer=lambda addr: self.account.get_signer(addr), get_suggested_params=self.get_suggested_params, diff --git a/src/algokit_utils/beta/client_manager.py b/src/algokit_utils/clients/client_manager.py similarity index 73% rename from src/algokit_utils/beta/client_manager.py rename to src/algokit_utils/clients/client_manager.py index 1069eacf..16108520 100644 --- a/src/algokit_utils/beta/client_manager.py +++ b/src/algokit_utils/clients/client_manager.py @@ -1,21 +1,18 @@ import algosdk -from algokit_utils.dispenser_api import TestNetDispenserApiClient -from algokit_utils.network_clients import AlgoClientConfigs, get_algod_client, get_indexer_client, get_kmd_client from algosdk.kmd import KMDClient from algosdk.v2client.algod import AlgodClient from algosdk.v2client.indexer import IndexerClient +from algokit_utils.clients.dispenser_api_client import TestNetDispenserApiClient +from algokit_utils.network_clients import ( + AlgoClientConfigs, + get_algod_client, + get_indexer_client, + get_kmd_client, +) -class AlgoSdkClients: - """ - Clients from algosdk that interact with the official Algorand APIs. - - Attributes: - algod (AlgodClient): Algod client, see https://developer.algorand.org/docs/rest-apis/algod/ - indexer (Optional[IndexerClient]): Optional indexer client, see https://developer.algorand.org/docs/rest-apis/indexer/ - kmd (Optional[KMDClient]): Optional KMD client, see https://developer.algorand.org/docs/rest-apis/kmd/ - """ +class AlgoSdkClients: def __init__( self, algod: algosdk.v2client.algod.AlgodClient, @@ -28,13 +25,6 @@ def __init__( class ClientManager: - """ - Exposes access to various API clients. - - Args: - clients_or_config (Union[AlgoConfig, AlgoSdkClients]): algosdk clients or config for interacting with the official Algorand APIs. - """ - def __init__(self, clients_or_configs: AlgoClientConfigs | AlgoSdkClients): if isinstance(clients_or_configs, AlgoSdkClients): _clients = clients_or_configs diff --git a/src/algokit_utils/clients/dispenser_api_client.py b/src/algokit_utils/clients/dispenser_api_client.py new file mode 100644 index 00000000..66593e80 --- /dev/null +++ b/src/algokit_utils/clients/dispenser_api_client.py @@ -0,0 +1,178 @@ +import contextlib +import enum +import logging +import os +from dataclasses import dataclass + +import httpx + +logger = logging.getLogger(__name__) + + +class DispenserApiConfig: + BASE_URL = "https://api.dispenser.algorandfoundation.tools" + + +class DispenserAssetName(enum.IntEnum): + ALGO = 0 + + +@dataclass +class DispenserAsset: + asset_id: int + decimals: int + description: str + + +@dataclass +class DispenserFundResponse: + tx_id: str + amount: int + + +@dataclass +class DispenserLimitResponse: + amount: int + + +DISPENSER_ASSETS = { + DispenserAssetName.ALGO: DispenserAsset( + asset_id=0, + decimals=6, + description="Algo", + ), +} +DISPENSER_REQUEST_TIMEOUT = 15 +DISPENSER_ACCESS_TOKEN_KEY = "ALGOKIT_DISPENSER_ACCESS_TOKEN" + + +class TestNetDispenserApiClient: + """ + Client for interacting with the [AlgoKit TestNet Dispenser API](https://github.com/algorandfoundation/algokit/blob/main/docs/testnet_api.md). + To get started create a new access token via `algokit dispenser login --ci` + and pass it to the client constructor as `auth_token`. + Alternatively set the access token as environment variable `ALGOKIT_DISPENSER_ACCESS_TOKEN`, + and it will be auto loaded. If both are set, the constructor argument takes precedence. + + Default request timeout is 15 seconds. Modify by passing `request_timeout` to the constructor. + """ + + auth_token: str + request_timeout = DISPENSER_REQUEST_TIMEOUT + + def __init__(self, auth_token: str | None = None, request_timeout: int = DISPENSER_REQUEST_TIMEOUT): + auth_token_from_env = os.getenv(DISPENSER_ACCESS_TOKEN_KEY) + + if auth_token: + self.auth_token = auth_token + elif auth_token_from_env: + self.auth_token = auth_token_from_env + else: + raise Exception( + f"Can't init AlgoKit TestNet Dispenser API client " + f"because neither environment variable {DISPENSER_ACCESS_TOKEN_KEY} or " + "the auth_token were provided." + ) + + self.request_timeout = request_timeout + + def _process_dispenser_request( + self, *, auth_token: str, url_suffix: str, data: dict | None = None, method: str = "POST" + ) -> httpx.Response: + """ + Generalized method to process http requests to dispenser API + """ + + headers = {"Authorization": f"Bearer {(auth_token)}"} + + # Set request arguments + request_args = { + "url": f"{DispenserApiConfig.BASE_URL}/{url_suffix}", + "headers": headers, + "timeout": self.request_timeout, + } + + if method.upper() != "GET" and data is not None: + request_args["json"] = data + + try: + response: httpx.Response = getattr(httpx, method.lower())(**request_args) + response.raise_for_status() + return response + + except httpx.HTTPStatusError as err: + error_message = f"Error processing dispenser API request: {err.response.status_code}" + error_response = None + with contextlib.suppress(Exception): + error_response = err.response.json() + + if error_response and error_response.get("code"): + error_message = error_response.get("code") + + elif err.response.status_code == httpx.codes.BAD_REQUEST: + error_message = err.response.json()["message"] + + raise Exception(error_message) from err + + except Exception as err: + error_message = "Error processing dispenser API request" + logger.debug(f"{error_message}: {err}", exc_info=True) + raise err + + def fund(self, address: str, amount: int, asset_id: int) -> DispenserFundResponse: + """ + Fund an account with Algos from the dispenser API + """ + + try: + response = self._process_dispenser_request( + auth_token=self.auth_token, + url_suffix=f"fund/{asset_id}", + data={"receiver": address, "amount": amount, "assetID": asset_id}, + method="POST", + ) + + content = response.json() + return DispenserFundResponse(tx_id=content["txID"], amount=content["amount"]) + + except Exception as err: + logger.exception(f"Error funding account {address}: {err}") + raise err + + def refund(self, refund_txn_id: str) -> None: + """ + Register a refund for a transaction with the dispenser API + """ + + try: + self._process_dispenser_request( + auth_token=self.auth_token, + url_suffix="refund", + data={"refundTransactionID": refund_txn_id}, + method="POST", + ) + + except Exception as err: + logger.exception(f"Error issuing refund for txn_id {refund_txn_id}: {err}") + raise err + + def get_limit( + self, + address: str, + ) -> DispenserLimitResponse: + """ + Get current limit for an account with Algos from the dispenser API + """ + + try: + response = self._process_dispenser_request( + auth_token=self.auth_token, + url_suffix=f"fund/{DISPENSER_ASSETS[DispenserAssetName.ALGO].asset_id}/limit", + method="GET", + ) + content = response.json() + + return DispenserLimitResponse(amount=content["amount"]) + except Exception as err: + logger.exception(f"Error setting limit for account {address}: {err}") + raise err diff --git a/src/algokit_utils/clients/models.py b/src/algokit_utils/clients/models.py new file mode 100644 index 00000000..e69de29b diff --git a/src/algokit_utils/common.py b/src/algokit_utils/common.py index 8071c98f..45c54a87 100644 --- a/src/algokit_utils/common.py +++ b/src/algokit_utils/common.py @@ -1,28 +1 @@ -""" -This module contains common classes and methods that are reused in more than one file. -""" - -import base64 -import typing - -from algosdk.source_map import SourceMap - -from algokit_utils import deploy - -if typing.TYPE_CHECKING: - from algosdk.v2client.algod import AlgodClient - - -class Program: - """A compiled TEAL program""" - - def __init__(self, program: str, client: "AlgodClient"): - """ - Fully compile the program source to binary and generate a - source map for matching pc to line number - """ - self.teal = program - result: dict = client.compile(deploy.strip_comments(self.teal), source_map=True) - self.raw_binary = base64.b64decode(result["result"]) - self.binary_hash: str = result["hash"] - self.source_map = SourceMap(result["sourcemap"]) +from algokit_utils._legacy_v2.common import * # noqa: F403 diff --git a/src/algokit_utils/deploy.py b/src/algokit_utils/deploy.py index bb01c4f2..7543c6c1 100644 --- a/src/algokit_utils/deploy.py +++ b/src/algokit_utils/deploy.py @@ -1,897 +1 @@ -import base64 -import dataclasses -import json -import logging -import re -from collections.abc import Iterable, Mapping, Sequence -from enum import Enum -from typing import TYPE_CHECKING, TypeAlias, TypedDict - -from algosdk import transaction -from algosdk.atomic_transaction_composer import AtomicTransactionComposer, TransactionSigner -from algosdk.logic import get_application_address -from algosdk.transaction import StateSchema - -from algokit_utils.application_specification import ( - ApplicationSpecification, - CallConfig, - MethodConfigDict, - OnCompleteActionName, -) -from algokit_utils.models import ( - ABIArgsDict, - ABIMethod, - Account, - CreateCallParameters, - TransactionResponse, -) - -if TYPE_CHECKING: - from algosdk.v2client.algod import AlgodClient - from algosdk.v2client.indexer import IndexerClient - - from algokit_utils.application_client import ApplicationClient - - -__all__ = [ - "UPDATABLE_TEMPLATE_NAME", - "DELETABLE_TEMPLATE_NAME", - "NOTE_PREFIX", - "ABICallArgs", - "ABICreateCallArgs", - "ABICallArgsDict", - "ABICreateCallArgsDict", - "DeploymentFailedError", - "AppReference", - "AppDeployMetaData", - "AppMetaData", - "AppLookup", - "DeployCallArgs", - "DeployCreateCallArgs", - "DeployCallArgsDict", - "DeployCreateCallArgsDict", - "Deployer", - "DeployResponse", - "OnUpdate", - "OnSchemaBreak", - "OperationPerformed", - "TemplateValueDict", - "TemplateValueMapping", - "get_app_id_from_tx_id", - "get_creator_apps", - "replace_template_variables", -] - -logger = logging.getLogger(__name__) - -DEFAULT_INDEXER_MAX_API_RESOURCES_PER_ACCOUNT = 1000 -_UPDATABLE = "UPDATABLE" -_DELETABLE = "DELETABLE" -UPDATABLE_TEMPLATE_NAME = f"TMPL_{_UPDATABLE}" -"""Template variable name used to control if a smart contract is updatable or not at deployment""" -DELETABLE_TEMPLATE_NAME = f"TMPL_{_DELETABLE}" -"""Template variable name used to control if a smart contract is deletable or not at deployment""" -_TOKEN_PATTERN = re.compile(r"TMPL_[A-Z_]+") -TemplateValue: TypeAlias = int | str | bytes -TemplateValueDict: TypeAlias = dict[str, TemplateValue] -"""Dictionary of `dict[str, int | str | bytes]` representing template variable names and values""" -TemplateValueMapping: TypeAlias = Mapping[str, TemplateValue] -"""Mapping of `str` to `int | str | bytes` representing template variable names and values""" - -NOTE_PREFIX = "ALGOKIT_DEPLOYER:j" -"""ARC-0002 compliant note prefix for algokit_utils deployed applications""" -# This prefix is also used to filter for parsable transaction notes in get_creator_apps. -# However, as the note is base64 encoded first we need to consider it's base64 representation. -# When base64 encoding bytes, 3 bytes are stored in every 4 characters. -# So then we don't need to worry about the padding/changing characters of the prefix if it was followed by -# additional characters, assert the NOTE_PREFIX length is a multiple of 3. -assert len(NOTE_PREFIX) % 3 == 0 - - -class DeploymentFailedError(Exception): - pass - - -@dataclasses.dataclass -class AppReference: - """Information about an Algorand app""" - - app_id: int - app_address: str - - -@dataclasses.dataclass -class AppDeployMetaData: - """Metadata about an application stored in a transaction note during creation. - - The note is serialized as JSON and prefixed with {py:data}`NOTE_PREFIX` and stored in the transaction note field - as part of {py:meth}`ApplicationClient.deploy` - """ - - name: str - version: str - deletable: bool | None - updatable: bool | None - - @staticmethod - def from_json(value: str) -> "AppDeployMetaData": - json_value: dict = json.loads(value) - json_value.setdefault("deletable", None) - json_value.setdefault("updatable", None) - return AppDeployMetaData(**json_value) - - @classmethod - def from_b64(cls: type["AppDeployMetaData"], b64: str) -> "AppDeployMetaData": - return cls.decode(base64.b64decode(b64)) - - @classmethod - def decode(cls: type["AppDeployMetaData"], value: bytes) -> "AppDeployMetaData": - note = value.decode("utf-8") - assert note.startswith(NOTE_PREFIX) - return cls.from_json(note[len(NOTE_PREFIX) :]) - - def encode(self) -> bytes: - json_str = json.dumps(self.__dict__) - return f"{NOTE_PREFIX}{json_str}".encode() - - -@dataclasses.dataclass -class AppMetaData(AppReference, AppDeployMetaData): - """Metadata about a deployed app""" - - created_round: int - updated_round: int - created_metadata: AppDeployMetaData - deleted: bool - - -@dataclasses.dataclass -class AppLookup: - """Cache of {py:class}`AppMetaData` for a specific `creator` - - Can be used as an argument to {py:class}`ApplicationClient` to reduce the number of calls when deploying multiple - apps or discovering multiple app_ids - """ - - creator: str - apps: dict[str, AppMetaData] = dataclasses.field(default_factory=dict) - - -def _sort_by_round(txn: dict) -> tuple[int, int]: - confirmed = txn["confirmed-round"] - offset = txn["intra-round-offset"] - return confirmed, offset - - -def _parse_note(metadata_b64: str | None) -> AppDeployMetaData | None: - if not metadata_b64: - return None - # noinspection PyBroadException - try: - return AppDeployMetaData.from_b64(metadata_b64) - except Exception: - return None - - -def get_creator_apps(indexer: "IndexerClient", creator_account: Account | str) -> AppLookup: - """Returns a mapping of Application names to {py:class}`AppMetaData` for all Applications created by specified - creator that have a transaction note containing {py:class}`AppDeployMetaData` - """ - apps: dict[str, AppMetaData] = {} - - creator_address = creator_account if isinstance(creator_account, str) else creator_account.address - token = None - # TODO: paginated indexer call instead of N + 1 calls - while True: - response = indexer.lookup_account_application_by_creator( - creator_address, limit=DEFAULT_INDEXER_MAX_API_RESOURCES_PER_ACCOUNT, next_page=token - ) # type: ignore[no-untyped-call] - if "message" in response: # an error occurred - raise Exception(f"Error querying applications for {creator_address}: {response}") - for app in response["applications"]: - app_id = app["id"] - app_created_at_round = app["created-at-round"] - app_deleted = app.get("deleted", False) - search_transactions_response = indexer.search_transactions( - min_round=app_created_at_round, - txn_type="appl", - application_id=app_id, - address=creator_address, - address_role="sender", - note_prefix=NOTE_PREFIX.encode("utf-8"), - ) # type: ignore[no-untyped-call] - transactions: list[dict] = search_transactions_response["transactions"] - if not transactions: - continue - - created_transaction = next( - t - for t in transactions - if t["application-transaction"]["application-id"] == 0 and t["sender"] == creator_address - ) - - transactions.sort(key=_sort_by_round, reverse=True) - latest_transaction = transactions[0] - app_updated_at_round = latest_transaction["confirmed-round"] - - create_metadata = _parse_note(created_transaction.get("note")) - update_metadata = _parse_note(latest_transaction.get("note")) - - if create_metadata and create_metadata.name: - apps[create_metadata.name] = AppMetaData( - app_id=app_id, - app_address=get_application_address(app_id), - created_metadata=create_metadata, - created_round=app_created_at_round, - **(update_metadata or create_metadata).__dict__, - updated_round=app_updated_at_round, - deleted=app_deleted, - ) - - token = response.get("next-token") - if not token: - break - - return AppLookup(creator_address, apps) - - -def _state_schema(schema: dict[str, int]) -> StateSchema: - return StateSchema(schema.get("num-uint", 0), schema.get("num-byte-slice", 0)) # type: ignore[no-untyped-call] - - -def _describe_schema_breaks(prefix: str, from_schema: StateSchema, to_schema: StateSchema) -> Iterable[str]: - if to_schema.num_uints > from_schema.num_uints: - yield f"{prefix} uints increased from {from_schema.num_uints} to {to_schema.num_uints}" - if to_schema.num_byte_slices > from_schema.num_byte_slices: - yield f"{prefix} byte slices increased from {from_schema.num_byte_slices} to {to_schema.num_byte_slices}" - - -@dataclasses.dataclass(kw_only=True) -class AppChanges: - app_updated: bool - schema_breaking_change: bool - schema_change_description: str | None - - -def check_for_app_changes( # noqa: PLR0913 - algod_client: "AlgodClient", - *, - new_approval: bytes, - new_clear: bytes, - new_global_schema: StateSchema, - new_local_schema: StateSchema, - app_id: int, -) -> AppChanges: - application_info = algod_client.application_info(app_id) - assert isinstance(application_info, dict) - application_create_params = application_info["params"] - - current_approval = base64.b64decode(application_create_params["approval-program"]) - current_clear = base64.b64decode(application_create_params["clear-state-program"]) - current_global_schema = _state_schema(application_create_params["global-state-schema"]) - current_local_schema = _state_schema(application_create_params["local-state-schema"]) - - app_updated = current_approval != new_approval or current_clear != new_clear - - schema_changes: list[str] = [] - schema_changes.extend(_describe_schema_breaks("Global", current_global_schema, new_global_schema)) - schema_changes.extend(_describe_schema_breaks("Local", current_local_schema, new_local_schema)) - - return AppChanges( - app_updated=app_updated, - schema_breaking_change=bool(schema_changes), - schema_change_description=", ".join(schema_changes), - ) - - -def _is_valid_token_character(char: str) -> bool: - return char.isalnum() or char == "_" - - -def _replace_template_variable(program_lines: list[str], template_variable: str, value: str) -> tuple[list[str], int]: - result: list[str] = [] - match_count = 0 - token = f"TMPL_{template_variable}" - token_idx_offset = len(value) - len(token) - for line in program_lines: - comment_idx = _find_unquoted_string(line, "//") - if comment_idx is None: - comment_idx = len(line) - code = line[:comment_idx] - comment = line[comment_idx:] - trailing_idx = 0 - while True: - token_idx = _find_template_token(code, token, trailing_idx) - if token_idx is None: - break - - trailing_idx = token_idx + len(token) - prefix = code[:token_idx] - suffix = code[trailing_idx:] - code = f"{prefix}{value}{suffix}" - match_count += 1 - trailing_idx += token_idx_offset - result.append(code + comment) - return result, match_count - - -def add_deploy_template_variables( - template_values: TemplateValueDict, allow_update: bool | None, allow_delete: bool | None -) -> None: - if allow_update is not None: - template_values[_UPDATABLE] = int(allow_update) - if allow_delete is not None: - template_values[_DELETABLE] = int(allow_delete) - - -def _find_unquoted_string(line: str, token: str, start: int = 0, end: int = -1) -> int | None: - """Find the first string within a line of TEAL. Only matches outside of quotes and base64 are returned. - Returns None if not found""" - - if end < 0: - end = len(line) - idx = start - in_quotes = in_base64 = False - while idx < end: - current_char = line[idx] - match current_char: - # enter base64 - case " " | "(" if not in_quotes and _last_token_base64(line, idx): - in_base64 = True - # exit base64 - case " " | ")" if not in_quotes and in_base64: - in_base64 = False - # escaped char - case "\\" if in_quotes: - # skip next character - idx += 1 - # quote boundary - case '"': - in_quotes = not in_quotes - # can test for match - case _ if not in_quotes and not in_base64 and line.startswith(token, idx): - # only match if not in quotes and string matches - return idx - idx += 1 - return None - - -def _last_token_base64(line: str, idx: int) -> bool: - try: - *_, last = line[:idx].split() - except ValueError: - return False - return last in ("base64", "b64") - - -def _find_template_token(line: str, token: str, start: int = 0, end: int = -1) -> int | None: - """Find the first template token within a line of TEAL. Only matches outside of quotes are returned. - Only full token matches are returned, i.e. TMPL_STR will not match against TMPL_STRING - Returns None if not found""" - if end < 0: - end = len(line) - - idx = start - while idx < end: - token_idx = _find_unquoted_string(line, token, idx, end) - if token_idx is None: - break - trailing_idx = token_idx + len(token) - if (token_idx == 0 or not _is_valid_token_character(line[token_idx - 1])) and ( # word boundary at start - trailing_idx >= len(line) or not _is_valid_token_character(line[trailing_idx]) # word boundary at end - ): - return token_idx - idx = trailing_idx - return None - - -def _strip_comment(line: str) -> str: - comment_idx = _find_unquoted_string(line, "//") - if comment_idx is None: - return line - return line[:comment_idx].rstrip() - - -def strip_comments(program: str) -> str: - return "\n".join(_strip_comment(line) for line in program.splitlines()) - - -def _has_token(program_without_comments: str, token: str) -> bool: - for line in program_without_comments.splitlines(): - token_idx = _find_template_token(line, token) - if token_idx is not None: - return True - return False - - -def _find_tokens(stripped_approval_program: str) -> list[str]: - return _TOKEN_PATTERN.findall(stripped_approval_program) - - -def check_template_variables(approval_program: str, template_values: TemplateValueDict) -> None: - approval_program = strip_comments(approval_program) - if _has_token(approval_program, UPDATABLE_TEMPLATE_NAME) and _UPDATABLE not in template_values: - raise DeploymentFailedError( - "allow_update must be specified if deploy time configuration of update is being used" - ) - if _has_token(approval_program, DELETABLE_TEMPLATE_NAME) and _DELETABLE not in template_values: - raise DeploymentFailedError( - "allow_delete must be specified if deploy time configuration of delete is being used" - ) - all_tokens = _find_tokens(approval_program) - missing_values = [token for token in all_tokens if token[len("TMPL_") :] not in template_values] - if missing_values: - raise DeploymentFailedError(f"The following template values were not provided: {', '.join(missing_values)}") - - for template_variable_name in template_values: - tmpl_variable = f"TMPL_{template_variable_name}" - if not _has_token(approval_program, tmpl_variable): - if template_variable_name == _UPDATABLE: - raise DeploymentFailedError( - "allow_update must only be specified if deploy time configuration of update is being used" - ) - if template_variable_name == _DELETABLE: - raise DeploymentFailedError( - "allow_delete must only be specified if deploy time configuration of delete is being used" - ) - logger.warning(f"{tmpl_variable} not found in approval program, but variable was provided") - - -def replace_template_variables(program: str, template_values: TemplateValueMapping) -> str: - """Replaces `TMPL_*` variables in `program` with `template_values` - - ```{note} - `template_values` keys should *NOT* be prefixed with `TMPL_` - ``` - """ - program_lines = program.splitlines() - for template_variable_name, template_value in template_values.items(): - match template_value: - case int(): - value = str(template_value) - case str(): - value = "0x" + template_value.encode("utf-8").hex() - case bytes(): - value = "0x" + template_value.hex() - case _: - raise DeploymentFailedError( - f"Unexpected template value type {template_variable_name}: {template_value.__class__}" - ) - - program_lines, matches = _replace_template_variable(program_lines, template_variable_name, value) - - return "\n".join(program_lines) - - -def has_template_vars(app_spec: ApplicationSpecification) -> bool: - return "TMPL_" in strip_comments(app_spec.approval_program) or "TMPL_" in strip_comments(app_spec.clear_program) - - -def get_deploy_control( - app_spec: ApplicationSpecification, template_var: str, on_complete: transaction.OnComplete -) -> bool | None: - if template_var not in strip_comments(app_spec.approval_program): - return None - return get_call_config(app_spec.bare_call_config, on_complete) != CallConfig.NEVER or any( - h for h in app_spec.hints.values() if get_call_config(h.call_config, on_complete) != CallConfig.NEVER - ) - - -def get_call_config(method_config: MethodConfigDict, on_complete: transaction.OnComplete) -> CallConfig: - def get(key: OnCompleteActionName) -> CallConfig: - return method_config.get(key, CallConfig.NEVER) - - match on_complete: - case transaction.OnComplete.NoOpOC: - return get("no_op") - case transaction.OnComplete.UpdateApplicationOC: - return get("update_application") - case transaction.OnComplete.DeleteApplicationOC: - return get("delete_application") - case transaction.OnComplete.OptInOC: - return get("opt_in") - case transaction.OnComplete.CloseOutOC: - return get("close_out") - case transaction.OnComplete.ClearStateOC: - return get("clear_state") - - -class OnUpdate(Enum): - """Action to take if an Application has been updated""" - - Fail = 0 - """Fail the deployment""" - UpdateApp = 1 - """Update the Application with the new approval and clear programs""" - ReplaceApp = 2 - """Create a new Application and delete the old Application in a single transaction""" - AppendApp = 3 - """Create a new application""" - - -class OnSchemaBreak(Enum): - """Action to take if an Application's schema has breaking changes""" - - Fail = 0 - """Fail the deployment""" - ReplaceApp = 2 - """Create a new Application and delete the old Application in a single transaction""" - AppendApp = 3 - """Create a new Application""" - - -class OperationPerformed(Enum): - """Describes the actions taken during deployment""" - - Nothing = 0 - """An existing Application was found""" - Create = 1 - """No existing Application was found, created a new Application""" - Update = 2 - """An existing Application was found, but was out of date, updated to latest version""" - Replace = 3 - """An existing Application was found, but was out of date, created a new Application and deleted the original""" - - -@dataclasses.dataclass(kw_only=True) -class DeployResponse: - """Describes the action taken during deployment, related transactions and the {py:class}`AppMetaData`""" - - app: AppMetaData - create_response: TransactionResponse | None = None - delete_response: TransactionResponse | None = None - update_response: TransactionResponse | None = None - action_taken: OperationPerformed = OperationPerformed.Nothing - - -@dataclasses.dataclass(kw_only=True) -class DeployCallArgs: - """Parameters used to update or delete an application when calling - {py:meth}`~algokit_utils.ApplicationClient.deploy`""" - - suggested_params: transaction.SuggestedParams | None = None - lease: bytes | str | None = None - accounts: list[str] | None = None - foreign_apps: list[int] | None = None - foreign_assets: list[int] | None = None - boxes: Sequence[tuple[int, bytes | bytearray | str | int]] | None = None - rekey_to: str | None = None - - -@dataclasses.dataclass(kw_only=True) -class ABICall: - method: ABIMethod | bool | None = None - args: ABIArgsDict = dataclasses.field(default_factory=dict) - - -@dataclasses.dataclass(kw_only=True) -class DeployCreateCallArgs(DeployCallArgs): - """Parameters used to create an application when calling {py:meth}`~algokit_utils.ApplicationClient.deploy`""" - - extra_pages: int | None = None - on_complete: transaction.OnComplete | None = None - - -@dataclasses.dataclass(kw_only=True) -class ABICallArgs(DeployCallArgs, ABICall): - """ABI Parameters used to update or delete an application when calling - {py:meth}`~algokit_utils.ApplicationClient.deploy`""" - - -@dataclasses.dataclass(kw_only=True) -class ABICreateCallArgs(DeployCreateCallArgs, ABICall): - """ABI Parameters used to create an application when calling {py:meth}`~algokit_utils.ApplicationClient.deploy`""" - - -class DeployCallArgsDict(TypedDict, total=False): - """Parameters used to update or delete an application when calling - {py:meth}`~algokit_utils.ApplicationClient.deploy`""" - - suggested_params: transaction.SuggestedParams - lease: bytes | str - accounts: list[str] - foreign_apps: list[int] - foreign_assets: list[int] - boxes: Sequence[tuple[int, bytes | bytearray | str | int]] - rekey_to: str - - -class ABICallArgsDict(DeployCallArgsDict, TypedDict, total=False): - """ABI Parameters used to update or delete an application when calling - {py:meth}`~algokit_utils.ApplicationClient.deploy`""" - - method: ABIMethod | bool - args: ABIArgsDict - - -class DeployCreateCallArgsDict(DeployCallArgsDict, TypedDict, total=False): - """Parameters used to create an application when calling {py:meth}`~algokit_utils.ApplicationClient.deploy`""" - - extra_pages: int | None - on_complete: transaction.OnComplete - - -class ABICreateCallArgsDict(DeployCreateCallArgsDict, TypedDict, total=False): - """ABI Parameters used to create an application when calling {py:meth}`~algokit_utils.ApplicationClient.deploy`""" - - method: ABIMethod | bool - args: ABIArgsDict - - -@dataclasses.dataclass(kw_only=True) -class Deployer: - app_client: "ApplicationClient" - creator: str - signer: TransactionSigner - sender: str - existing_app_metadata_or_reference: AppReference | AppMetaData - new_app_metadata: AppDeployMetaData - on_update: OnUpdate - on_schema_break: OnSchemaBreak - create_args: ABICreateCallArgs | ABICreateCallArgsDict | DeployCreateCallArgs | None - update_args: ABICallArgs | ABICallArgsDict | DeployCallArgs | None - delete_args: ABICallArgs | ABICallArgsDict | DeployCallArgs | None - - def deploy(self) -> DeployResponse: - """Ensures app associated with app client's creator is present and up to date""" - assert self.app_client.approval - assert self.app_client.clear - - if self.existing_app_metadata_or_reference.app_id == 0: - logger.info(f"{self.new_app_metadata.name} not found in {self.creator} account, deploying app.") - return self._create_app() - - assert isinstance(self.existing_app_metadata_or_reference, AppMetaData) - logger.debug( - f"{self.existing_app_metadata_or_reference.name} found in {self.creator} account, " - f"with app id {self.existing_app_metadata_or_reference.app_id}, " - f"version={self.existing_app_metadata_or_reference.version}." - ) - - app_changes = check_for_app_changes( - self.app_client.algod_client, - new_approval=self.app_client.approval.raw_binary, - new_clear=self.app_client.clear.raw_binary, - new_global_schema=self.app_client.app_spec.global_state_schema, - new_local_schema=self.app_client.app_spec.local_state_schema, - app_id=self.existing_app_metadata_or_reference.app_id, - ) - - if app_changes.schema_breaking_change: - logger.warning(f"Detected a breaking app schema change: {app_changes.schema_change_description}") - return self._deploy_breaking_change() - - if app_changes.app_updated: - logger.info(f"Detected a TEAL update in app id {self.existing_app_metadata_or_reference.app_id}") - return self._deploy_update() - - logger.info("No detected changes in app, nothing to do.") - return DeployResponse(app=self.existing_app_metadata_or_reference) - - def _deploy_breaking_change(self) -> DeployResponse: - assert isinstance(self.existing_app_metadata_or_reference, AppMetaData) - if self.on_schema_break == OnSchemaBreak.Fail: - raise DeploymentFailedError( - "Schema break detected and on_schema_break=OnSchemaBreak.Fail, stopping deployment. " - "If you want to try deleting and recreating the app then " - "re-run with on_schema_break=OnSchemaBreak.ReplaceApp" - ) - if self.on_schema_break == OnSchemaBreak.AppendApp: - logger.info("Schema break detected and on_schema_break=AppendApp, will attempt to create new app") - return self._create_app() - - if self.existing_app_metadata_or_reference.deletable: - logger.info( - "App is deletable and on_schema_break=ReplaceApp, will attempt to create new app and delete old app" - ) - elif self.existing_app_metadata_or_reference.deletable is False: - logger.warning( - "App is not deletable but on_schema_break=ReplaceApp, " - "will attempt to delete app, delete will most likely fail" - ) - else: - logger.warning( - "Cannot determine if App is deletable but on_schema_break=ReplaceApp, will attempt to delete app" - ) - return self._create_and_delete_app() - - def _deploy_update(self) -> DeployResponse: - assert isinstance(self.existing_app_metadata_or_reference, AppMetaData) - if self.on_update == OnUpdate.Fail: - raise DeploymentFailedError( - "Update detected and on_update=Fail, stopping deployment. " - "If you want to try updating the app then re-run with on_update=UpdateApp" - ) - if self.on_update == OnUpdate.AppendApp: - logger.info("Update detected and on_update=AppendApp, will attempt to create new app") - return self._create_app() - elif self.existing_app_metadata_or_reference.updatable and self.on_update == OnUpdate.UpdateApp: - logger.info("App is updatable and on_update=UpdateApp, will update app") - return self._update_app() - elif self.existing_app_metadata_or_reference.updatable and self.on_update == OnUpdate.ReplaceApp: - logger.warning( - "App is updatable but on_update=ReplaceApp, will attempt to create new app and delete old app" - ) - return self._create_and_delete_app() - elif self.on_update == OnUpdate.ReplaceApp: - if self.existing_app_metadata_or_reference.updatable is False: - logger.warning( - "App is not updatable and on_update=ReplaceApp, " - "will attempt to create new app and delete old app" - ) - else: - logger.warning( - "Cannot determine if App is updatable and on_update=ReplaceApp, " - "will attempt to create new app and delete old app" - ) - return self._create_and_delete_app() - else: - if self.existing_app_metadata_or_reference.updatable is False: - logger.warning( - "App is not updatable but on_update=UpdateApp, " - "will attempt to update app, update will most likely fail" - ) - else: - logger.warning( - "Cannot determine if App is updatable and on_update=UpdateApp, will attempt to update app" - ) - return self._update_app() - - def _create_app(self) -> DeployResponse: - assert self.app_client.existing_deployments - - method, abi_args, parameters = _convert_deploy_args( - self.create_args, self.new_app_metadata, self.signer, self.sender - ) - create_response = self.app_client.create( - method, - parameters, - **abi_args, - ) - logger.info( - f"{self.new_app_metadata.name} ({self.new_app_metadata.version}) deployed successfully, " - f"with app id {self.app_client.app_id}." - ) - assert create_response.confirmed_round is not None - app_metadata = _create_metadata(self.new_app_metadata, self.app_client.app_id, create_response.confirmed_round) - self.app_client.existing_deployments.apps[self.new_app_metadata.name] = app_metadata - return DeployResponse(app=app_metadata, create_response=create_response, action_taken=OperationPerformed.Create) - - def _create_and_delete_app(self) -> DeployResponse: - assert self.app_client.existing_deployments - assert isinstance(self.existing_app_metadata_or_reference, AppMetaData) - - logger.info( - f"Replacing {self.existing_app_metadata_or_reference.name} " - f"({self.existing_app_metadata_or_reference.version}) with " - f"{self.new_app_metadata.name} ({self.new_app_metadata.version}) in {self.creator} account." - ) - atc = AtomicTransactionComposer() - create_method, create_abi_args, create_parameters = _convert_deploy_args( - self.create_args, self.new_app_metadata, self.signer, self.sender - ) - self.app_client.compose_create( - atc, - create_method, - create_parameters, - **create_abi_args, - ) - create_txn_index = len(atc.txn_list) - 1 - delete_method, delete_abi_args, delete_parameters = _convert_deploy_args( - self.delete_args, self.new_app_metadata, self.signer, self.sender - ) - self.app_client.compose_delete( - atc, - delete_method, - delete_parameters, - **delete_abi_args, - ) - delete_txn_index = len(atc.txn_list) - 1 - create_delete_response = self.app_client.execute_atc(atc) - create_response = TransactionResponse.from_atr(create_delete_response, create_txn_index) - delete_response = TransactionResponse.from_atr(create_delete_response, delete_txn_index) - self.app_client.app_id = get_app_id_from_tx_id(self.app_client.algod_client, create_response.tx_id) - logger.info( - f"{self.new_app_metadata.name} ({self.new_app_metadata.version}) deployed successfully, " - f"with app id {self.app_client.app_id}." - ) - logger.info( - f"{self.existing_app_metadata_or_reference.name} " - f"({self.existing_app_metadata_or_reference.version}) with app id " - f"{self.existing_app_metadata_or_reference.app_id}, deleted successfully." - ) - - app_metadata = _create_metadata( - self.new_app_metadata, self.app_client.app_id, create_delete_response.confirmed_round - ) - self.app_client.existing_deployments.apps[self.new_app_metadata.name] = app_metadata - - return DeployResponse( - app=app_metadata, - create_response=create_response, - delete_response=delete_response, - action_taken=OperationPerformed.Replace, - ) - - def _update_app(self) -> DeployResponse: - assert self.app_client.existing_deployments - assert isinstance(self.existing_app_metadata_or_reference, AppMetaData) - - logger.info( - f"Updating {self.existing_app_metadata_or_reference.name} to {self.new_app_metadata.version} in " - f"{self.creator} account, with app id {self.existing_app_metadata_or_reference.app_id}" - ) - method, abi_args, parameters = _convert_deploy_args( - self.update_args, self.new_app_metadata, self.signer, self.sender - ) - update_response = self.app_client.update( - method, - parameters, - **abi_args, - ) - app_metadata = _create_metadata( - self.new_app_metadata, - self.app_client.app_id, - self.existing_app_metadata_or_reference.created_round, - updated_round=update_response.confirmed_round, - original_metadata=self.existing_app_metadata_or_reference.created_metadata, - ) - self.app_client.existing_deployments.apps[self.new_app_metadata.name] = app_metadata - return DeployResponse(app=app_metadata, update_response=update_response, action_taken=OperationPerformed.Update) - - -def _create_metadata( - app_spec_note: AppDeployMetaData, - app_id: int, - created_round: int, - updated_round: int | None = None, - original_metadata: AppDeployMetaData | None = None, -) -> AppMetaData: - return AppMetaData( - app_id=app_id, - app_address=get_application_address(app_id), - created_metadata=original_metadata or app_spec_note, - created_round=created_round, - updated_round=updated_round or created_round, - name=app_spec_note.name, - version=app_spec_note.version, - deletable=app_spec_note.deletable, - updatable=app_spec_note.updatable, - deleted=False, - ) - - -def _convert_deploy_args( - _args: DeployCallArgs | DeployCallArgsDict | None, - note: AppDeployMetaData, - signer: TransactionSigner | None, - sender: str | None, -) -> tuple[ABIMethod | bool | None, ABIArgsDict, CreateCallParameters]: - args = _args.__dict__ if isinstance(_args, DeployCallArgs) else dict(_args or {}) - - # return most derived type, unused parameters are ignored - parameters = CreateCallParameters( - note=note.encode(), - signer=signer, - sender=sender, - suggested_params=args.get("suggested_params"), - lease=args.get("lease"), - accounts=args.get("accounts"), - foreign_assets=args.get("foreign_assets"), - foreign_apps=args.get("foreign_apps"), - boxes=args.get("boxes"), - rekey_to=args.get("rekey_to"), - extra_pages=args.get("extra_pages"), - on_complete=args.get("on_complete"), - ) - - return args.get("method"), args.get("args") or {}, parameters - - -def get_app_id_from_tx_id(algod_client: "AlgodClient", tx_id: str) -> int: - """Finds the app_id for provided transaction id""" - result = algod_client.pending_transaction_info(tx_id) - assert isinstance(result, dict) - app_id = result["application-index"] - assert isinstance(app_id, int) - return app_id +from algokit_utils._legacy_v2.deploy import * # noqa: F403 diff --git a/src/algokit_utils/dispenser_api.py b/src/algokit_utils/dispenser_api.py index 66593e80..1dc9e175 100644 --- a/src/algokit_utils/dispenser_api.py +++ b/src/algokit_utils/dispenser_api.py @@ -1,178 +1 @@ -import contextlib -import enum -import logging -import os -from dataclasses import dataclass - -import httpx - -logger = logging.getLogger(__name__) - - -class DispenserApiConfig: - BASE_URL = "https://api.dispenser.algorandfoundation.tools" - - -class DispenserAssetName(enum.IntEnum): - ALGO = 0 - - -@dataclass -class DispenserAsset: - asset_id: int - decimals: int - description: str - - -@dataclass -class DispenserFundResponse: - tx_id: str - amount: int - - -@dataclass -class DispenserLimitResponse: - amount: int - - -DISPENSER_ASSETS = { - DispenserAssetName.ALGO: DispenserAsset( - asset_id=0, - decimals=6, - description="Algo", - ), -} -DISPENSER_REQUEST_TIMEOUT = 15 -DISPENSER_ACCESS_TOKEN_KEY = "ALGOKIT_DISPENSER_ACCESS_TOKEN" - - -class TestNetDispenserApiClient: - """ - Client for interacting with the [AlgoKit TestNet Dispenser API](https://github.com/algorandfoundation/algokit/blob/main/docs/testnet_api.md). - To get started create a new access token via `algokit dispenser login --ci` - and pass it to the client constructor as `auth_token`. - Alternatively set the access token as environment variable `ALGOKIT_DISPENSER_ACCESS_TOKEN`, - and it will be auto loaded. If both are set, the constructor argument takes precedence. - - Default request timeout is 15 seconds. Modify by passing `request_timeout` to the constructor. - """ - - auth_token: str - request_timeout = DISPENSER_REQUEST_TIMEOUT - - def __init__(self, auth_token: str | None = None, request_timeout: int = DISPENSER_REQUEST_TIMEOUT): - auth_token_from_env = os.getenv(DISPENSER_ACCESS_TOKEN_KEY) - - if auth_token: - self.auth_token = auth_token - elif auth_token_from_env: - self.auth_token = auth_token_from_env - else: - raise Exception( - f"Can't init AlgoKit TestNet Dispenser API client " - f"because neither environment variable {DISPENSER_ACCESS_TOKEN_KEY} or " - "the auth_token were provided." - ) - - self.request_timeout = request_timeout - - def _process_dispenser_request( - self, *, auth_token: str, url_suffix: str, data: dict | None = None, method: str = "POST" - ) -> httpx.Response: - """ - Generalized method to process http requests to dispenser API - """ - - headers = {"Authorization": f"Bearer {(auth_token)}"} - - # Set request arguments - request_args = { - "url": f"{DispenserApiConfig.BASE_URL}/{url_suffix}", - "headers": headers, - "timeout": self.request_timeout, - } - - if method.upper() != "GET" and data is not None: - request_args["json"] = data - - try: - response: httpx.Response = getattr(httpx, method.lower())(**request_args) - response.raise_for_status() - return response - - except httpx.HTTPStatusError as err: - error_message = f"Error processing dispenser API request: {err.response.status_code}" - error_response = None - with contextlib.suppress(Exception): - error_response = err.response.json() - - if error_response and error_response.get("code"): - error_message = error_response.get("code") - - elif err.response.status_code == httpx.codes.BAD_REQUEST: - error_message = err.response.json()["message"] - - raise Exception(error_message) from err - - except Exception as err: - error_message = "Error processing dispenser API request" - logger.debug(f"{error_message}: {err}", exc_info=True) - raise err - - def fund(self, address: str, amount: int, asset_id: int) -> DispenserFundResponse: - """ - Fund an account with Algos from the dispenser API - """ - - try: - response = self._process_dispenser_request( - auth_token=self.auth_token, - url_suffix=f"fund/{asset_id}", - data={"receiver": address, "amount": amount, "assetID": asset_id}, - method="POST", - ) - - content = response.json() - return DispenserFundResponse(tx_id=content["txID"], amount=content["amount"]) - - except Exception as err: - logger.exception(f"Error funding account {address}: {err}") - raise err - - def refund(self, refund_txn_id: str) -> None: - """ - Register a refund for a transaction with the dispenser API - """ - - try: - self._process_dispenser_request( - auth_token=self.auth_token, - url_suffix="refund", - data={"refundTransactionID": refund_txn_id}, - method="POST", - ) - - except Exception as err: - logger.exception(f"Error issuing refund for txn_id {refund_txn_id}: {err}") - raise err - - def get_limit( - self, - address: str, - ) -> DispenserLimitResponse: - """ - Get current limit for an account with Algos from the dispenser API - """ - - try: - response = self._process_dispenser_request( - auth_token=self.auth_token, - url_suffix=f"fund/{DISPENSER_ASSETS[DispenserAssetName.ALGO].asset_id}/limit", - method="GET", - ) - content = response.json() - - return DispenserLimitResponse(amount=content["amount"]) - except Exception as err: - logger.exception(f"Error setting limit for account {address}: {err}") - raise err +from algokit_utils.clients.dispenser_api_client import * # noqa: F403 diff --git a/src/algokit_utils/errors/__init__.py b/src/algokit_utils/errors/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/algokit_utils/logic_error.py b/src/algokit_utils/logic_error.py index 56d22f9f..2b750b56 100644 --- a/src/algokit_utils/logic_error.py +++ b/src/algokit_utils/logic_error.py @@ -1,85 +1 @@ -import re -from copy import copy -from typing import TYPE_CHECKING, TypedDict - -from algokit_utils.models import SimulationTrace - -if TYPE_CHECKING: - from algosdk.source_map import SourceMap as AlgoSourceMap - -__all__ = [ - "LogicError", - "parse_logic_error", -] - -LOGIC_ERROR = ( - ".*transaction (?P[A-Z0-9]+): logic eval error: (?P.*). Details: .*pc=(?P[0-9]+).*" -) - - -class LogicErrorData(TypedDict): - transaction_id: str - message: str - pc: int - - -def parse_logic_error( - error_str: str, -) -> LogicErrorData | None: - match = re.match(LOGIC_ERROR, error_str) - if match is None: - return None - - return { - "transaction_id": match.group("transaction_id"), - "message": match.group("message"), - "pc": int(match.group("pc")), - } - - -class LogicError(Exception): - def __init__( # noqa: PLR0913 - self, - *, - logic_error_str: str, - program: str, - source_map: "AlgoSourceMap | None", - transaction_id: str, - message: str, - pc: int, - logic_error: Exception | None = None, - traces: list[SimulationTrace] | None = None, - ): - self.logic_error = logic_error - self.logic_error_str = logic_error_str - self.program = program - self.source_map = source_map - self.lines = program.split("\n") - self.transaction_id = transaction_id - self.message = message - self.pc = pc - self.traces = traces - - self.line_no = self.source_map.get_line_for_pc(self.pc) if self.source_map else None - - def __str__(self) -> str: - return ( - f"Txn {self.transaction_id} had error '{self.message}' at PC {self.pc}" - + (":" if self.line_no is None else f" and Source Line {self.line_no}:") - + f"\n{self.trace()}" - ) - - def trace(self, lines: int = 5) -> str: - if self.line_no is None: - return """ -Could not determine TEAL source line for the error as no approval source map was provided, to receive a trace of the -error please provide an approval SourceMap. Either by: - 1.) Providing template_values when creating the ApplicationClient, so a SourceMap can be obtained automatically OR - 2.) Set approval_source_map from a previously compiled approval program OR - 3.) Import a previously exported source map using import_source_map""" - - program_lines = copy(self.lines) - program_lines[self.line_no] += "\t\t<-- Error" - lines_before = max(0, self.line_no - lines) - lines_after = min(len(program_lines), self.line_no + lines) - return "\n\t" + "\n\t".join(program_lines[lines_before:lines_after]) +from algokit_utils._legacy_v2.logic_error import * # noqa: F403 diff --git a/src/algokit_utils/models/__init__.py b/src/algokit_utils/models/__init__.py new file mode 100644 index 00000000..bcffc093 --- /dev/null +++ b/src/algokit_utils/models/__init__.py @@ -0,0 +1 @@ +from algokit_utils._legacy_v2.models import * # noqa: F403 diff --git a/src/algokit_utils/models/common.py b/src/algokit_utils/models/common.py new file mode 100644 index 00000000..e69de29b diff --git a/src/algokit_utils/network_clients.py b/src/algokit_utils/network_clients.py index 2de270da..a9dc5de2 100644 --- a/src/algokit_utils/network_clients.py +++ b/src/algokit_utils/network_clients.py @@ -1,130 +1 @@ -import dataclasses -import os -from typing import Literal -from urllib import parse - -from algosdk.kmd import KMDClient -from algosdk.v2client.algod import AlgodClient -from algosdk.v2client.indexer import IndexerClient - -__all__ = [ - "AlgoClientConfig", - "get_algod_client", - "get_algonode_config", - "get_default_localnet_config", - "get_indexer_client", - "get_kmd_client_from_algod_client", - "is_localnet", - "is_mainnet", - "is_testnet", - "AlgoClientConfigs", - "get_kmd_client", -] - - -@dataclasses.dataclass -class AlgoClientConfig: - """Connection details for connecting to an {py:class}`algosdk.v2client.algod.AlgodClient` or - {py:class}`algosdk.v2client.indexer.IndexerClient`""" - - server: str - """URL for the service e.g. `http://localhost:4001` or `https://testnet-api.algonode.cloud`""" - token: str - """API Token to authenticate with the service""" - - -@dataclasses.dataclass -class AlgoClientConfigs: - algod_config: AlgoClientConfig - indexer_config: AlgoClientConfig - kmd_config: AlgoClientConfig | None - - -def get_default_localnet_config(config: Literal["algod", "indexer", "kmd"]) -> AlgoClientConfig: - """Returns the client configuration to point to the default LocalNet""" - port = {"algod": 4001, "indexer": 8980, "kmd": 4002}[config] - return AlgoClientConfig(server=f"http://localhost:{port}", token="a" * 64) - - -def get_algonode_config( - network: Literal["testnet", "mainnet"], config: Literal["algod", "indexer"], token: str -) -> AlgoClientConfig: - client = "api" if config == "algod" else "idx" - return AlgoClientConfig( - server=f"https://{network}-{client}.algonode.cloud", - token=token, - ) - - -def get_algod_client(config: AlgoClientConfig | None = None) -> AlgodClient: - """Returns an {py:class}`algosdk.v2client.algod.AlgodClient` from `config` or environment - - If no configuration provided will use environment variables `ALGOD_SERVER`, `ALGOD_PORT` and `ALGOD_TOKEN`""" - config = config or _get_config_from_environment("ALGOD") - headers = {"X-Algo-API-Token": config.token} - return AlgodClient(config.token, config.server, headers) - - -def get_kmd_client(config: AlgoClientConfig | None = None) -> KMDClient: - """Returns an {py:class}`algosdk.kmd.KMDClient` from `config` or environment - - If no configuration provided will use environment variables `KMD_SERVER`, `KMD_PORT` and `KMD_TOKEN`""" - config = config or _get_config_from_environment("KMD") - return KMDClient(config.token, config.server) # type: ignore[no-untyped-call] - - -def get_indexer_client(config: AlgoClientConfig | None = None) -> IndexerClient: - """Returns an {py:class}`algosdk.v2client.indexer.IndexerClient` from `config` or environment. - - If no configuration provided will use environment variables `INDEXER_SERVER`, `INDEXER_PORT` and `INDEXER_TOKEN`""" - config = config or _get_config_from_environment("INDEXER") - headers = {"X-Indexer-API-Token": config.token} - return IndexerClient(config.token, config.server, headers) # type: ignore[no-untyped-call] - - -def is_localnet(client: AlgodClient) -> bool: - """Returns True if client genesis is `devnet-v1` or `sandnet-v1`""" - params = client.suggested_params() - return params.gen in ["devnet-v1", "sandnet-v1", "dockernet-v1"] - - -def is_mainnet(client: AlgodClient) -> bool: - """Returns True if client genesis is `mainnet-v1`""" - params = client.suggested_params() - return params.gen in ["mainnet-v1.0", "mainnet-v1", "mainnet"] - - -def is_testnet(client: AlgodClient) -> bool: - """Returns True if client genesis is `testnet-v1`""" - params = client.suggested_params() - return params.gen in ["testnet-v1.0", "testnet-v1", "testnet"] - - -def get_kmd_client_from_algod_client(client: AlgodClient) -> KMDClient: - """Returns an {py:class}`algosdk.kmd.KMDClient` from supplied `client` - - Will use the same address as provided `client` but on port specified by `KMD_PORT` environment variable, - or 4002 by default""" - # We can only use Kmd on the LocalNet otherwise it's not exposed so this makes some assumptions - # (e.g. same token and server as algod and port 4002 by default) - port = os.getenv("KMD_PORT", "4002") - server = _replace_kmd_port(client.algod_address, port) - return KMDClient(client.algod_token, server) # type: ignore[no-untyped-call] - - -def _replace_kmd_port(address: str, port: str) -> str: - parsed_algod = parse.urlparse(address) - kmd_host = parsed_algod.netloc.split(":", maxsplit=1)[0] + f":{port}" - kmd_parsed = parsed_algod._replace(netloc=kmd_host) - return parse.urlunparse(kmd_parsed) - - -def _get_config_from_environment(environment_prefix: str) -> AlgoClientConfig: - server = os.getenv(f"{environment_prefix}_SERVER") - if server is None: - raise Exception(f"Server environment variable not set: {environment_prefix}_SERVER") - port = os.getenv(f"{environment_prefix}_PORT") - if port: - parsed = parse.urlparse(server) - server = parsed._replace(netloc=f"{parsed.hostname}:{port}").geturl() - return AlgoClientConfig(server, os.getenv(f"{environment_prefix}_TOKEN", "")) +from algokit_utils._legacy_v2.network_clients import * # noqa: F403 diff --git a/src/algokit_utils/transactions/__init__.py b/src/algokit_utils/transactions/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/algokit_utils/transactions/models.py b/src/algokit_utils/transactions/models.py new file mode 100644 index 00000000..e69de29b diff --git a/src/algokit_utils/beta/composer.py b/src/algokit_utils/transactions/transaction_composer.py similarity index 77% rename from src/algokit_utils/beta/composer.py rename to src/algokit_utils/transactions/transaction_composer.py index a8aaa4b8..254b2a30 100644 --- a/src/algokit_utils/beta/composer.py +++ b/src/algokit_utils/transactions/transaction_composer.py @@ -22,21 +22,6 @@ class SenderParam: @dataclass(frozen=True) class CommonTxnParams: - """ - Common transaction parameters. - - :param signer: The function used to sign transactions. - :param rekey_to: Change the signing key of the sender to the given address. - :param note: Note to attach to the transaction. - :param lease: Prevent multiple transactions with the same lease being included within the validity window. - :param static_fee: The transaction fee. In most cases you want to use `extra_fee` unless setting the fee to 0 to be covered by another transaction. - :param extra_fee: The fee to pay IN ADDITION to the suggested fee. Useful for covering inner transaction fees. - :param max_fee: Throw an error if the fee for the transaction is more than this amount. - :param validity_window: How many rounds the transaction should be valid for. - :param first_valid_round: Set the first round this transaction is valid. If left undefined, the value from algod will be used. Only set this when you intentionally want this to be some time in the future. - :param last_valid_round: The last round this transaction is valid. It is recommended to use validity_window instead. - """ - signer: TransactionSigner | None = None rekey_to: str | None = None note: bytes | None = None @@ -75,22 +60,6 @@ class _RequiredAssetCreateParams(SenderParam): @dataclass(frozen=True) class AssetCreateParams(CommonTxnParams, _RequiredAssetCreateParams): - """ - Asset creation parameters. - - :param total: The total amount of the smallest divisible unit to create. - :param decimals: The amount of decimal places the asset should have. - :param default_frozen: Whether the asset is frozen by default in the creator address. - :param manager: The address that can change the manager, reserve, clawback, and freeze addresses. There will permanently be no manager if undefined or an empty string. - :param reserve: The address that holds the uncirculated supply. - :param freeze: The address that can freeze the asset in any account. Freezing will be permanently disabled if undefined or an empty string. - :param clawback: The address that can clawback the asset from any account. Clawback will be permanently disabled if undefined or an empty string. - :param unit_name: The short ticker name for the asset. - :param asset_name: The full name of the asset. - :param url: The metadata URL for the asset. - :param metadata_hash: Hash of the metadata contained in the metadata URL. - """ - decimals: int | None = None default_frozen: bool | None = None manager: str | None = None @@ -110,16 +79,6 @@ class _RequiredAssetConfigParams(SenderParam): @dataclass(frozen=True) class AssetConfigParams(CommonTxnParams, _RequiredAssetConfigParams): - """ - Asset configuration parameters. - - :param asset_id: ID of the asset. - :param manager: The address that can change the manager, reserve, clawback, and freeze addresses. There will permanently be no manager if undefined or an empty string. - :param reserve: The address that holds the uncirculated supply. - :param freeze: The address that can freeze the asset in any account. Freezing will be permanently disabled if undefined or an empty string. - :param clawback: The address that can clawback the asset from any account. Clawback will be permanently disabled if undefined or an empty string. - """ - manager: str | None = None reserve: str | None = None freeze: str | None = None @@ -169,17 +128,6 @@ class _RequiredOnlineKeyRegParams(SenderParam): @dataclass(frozen=True) class OnlineKeyRegParams(CommonTxnParams, _RequiredOnlineKeyRegParams): - """ - Online key registration parameters. - - :param vote_key: The root participation public key. - :param selection_key: The VRF public key. - :param vote_first: The first round that the participation key is valid. Not to be confused with the `first_valid` round of the keyreg transaction. - :param vote_last: The last round that the participation key is valid. Not to be confused with the `last_valid` round of the keyreg transaction. - :param vote_key_dilution: This is the dilution for the 2-level participation key. It determines the interval (number of rounds) for generating new ephemeral keys. - :param state_proof_key: The 64 byte state proof public key commitment. - """ - state_proof_key: bytes | None = None @@ -192,16 +140,6 @@ class _RequiredAssetTransferParams(SenderParam): @dataclass(frozen=True) class AssetTransferParams(CommonTxnParams, _RequiredAssetTransferParams): - """ - Asset transfer parameters. - - :param asset_id: ID of the asset. - :param amount: Amount of the asset to transfer (smallest divisible unit). - :param receiver: The account to send the asset to. - :param clawback_target: The account to take the asset from. - :param close_asset_to: The account to close the asset to. - """ - clawback_target: str | None = None close_asset_to: str | None = None @@ -284,20 +222,7 @@ class MethodCallParams(CommonTxnParams, _RequiredMethodCallParams): ] -class AlgokitComposer: - """ - A class for composing and managing Algorand transactions using the Algosdk library. - - Attributes: - txn_method_map (dict[str, algosdk.abi.Method]): A dictionary that maps transaction IDs to their corresponding ABI methods. - txns (List[Union[TransactionWithSigner, TxnParams, AtomicTransactionComposer]]): A list of transactions that have not yet been composed. - atc (AtomicTransactionComposer): An instance of AtomicTransactionComposer used to compose transactions. - algod (AlgodClient): The AlgodClient instance used by the composer for suggested params. - get_suggested_params (Callable[[], algosdk.future.transaction.SuggestedParams]): A function that returns suggested parameters for transactions. - get_signer (Callable[[str], TransactionSigner]): A function that takes an address as input and returns a TransactionSigner for that address. - default_validity_window (int): The default validity window for transactions. - """ - +class TransactionComposer: def __init__( self, algod: AlgodClient, @@ -305,15 +230,6 @@ def __init__( get_suggested_params: Callable[[], algosdk.transaction.SuggestedParams] | None = None, default_validity_window: int | None = None, ): - """ - Initialize an instance of the AlgokitComposer class. - - Args: - algod (AlgodClient): An instance of AlgodClient used to get suggested params and send transactions. - get_signer (Callable[[str], TransactionSigner]): A function that takes an address as input and returns a TransactionSigner for that address. - get_suggested_params (Optional[Callable[[], algosdk.future.transaction.SuggestedParams]], optional): A function that returns suggested parameters for transactions. If not provided, it defaults to using algod.suggested_params(). Defaults to None. - default_validity_window (Optional[int], optional): The default validity window for transactions. If not provided, it defaults to 10. Defaults to None. - """ self.txn_method_map: dict[str, algosdk.abi.Method] = {} self.txns: list[TransactionWithSigner | TxnParams | AtomicTransactionComposer] = [] self.atc: AtomicTransactionComposer = AtomicTransactionComposer() @@ -323,47 +239,47 @@ def __init__( self.get_signer: Callable[[str], TransactionSigner] = get_signer self.default_validity_window: int = default_validity_window or 10 - def add_payment(self, params: PayParams) -> "AlgokitComposer": + def add_payment(self, params: PayParams) -> "TransactionComposer": self.txns.append(params) return self - def add_asset_create(self, params: AssetCreateParams) -> "AlgokitComposer": + def add_asset_create(self, params: AssetCreateParams) -> "TransactionComposer": self.txns.append(params) return self - def add_asset_config(self, params: AssetConfigParams) -> "AlgokitComposer": + def add_asset_config(self, params: AssetConfigParams) -> "TransactionComposer": self.txns.append(params) return self - def add_asset_freeze(self, params: AssetFreezeParams) -> "AlgokitComposer": + def add_asset_freeze(self, params: AssetFreezeParams) -> "TransactionComposer": self.txns.append(params) return self - def add_asset_destroy(self, params: AssetDestroyParams) -> "AlgokitComposer": + def add_asset_destroy(self, params: AssetDestroyParams) -> "TransactionComposer": self.txns.append(params) return self - def add_asset_transfer(self, params: AssetTransferParams) -> "AlgokitComposer": + def add_asset_transfer(self, params: AssetTransferParams) -> "TransactionComposer": self.txns.append(params) return self - def add_asset_opt_in(self, params: AssetOptInParams) -> "AlgokitComposer": + def add_asset_opt_in(self, params: AssetOptInParams) -> "TransactionComposer": self.txns.append(params) return self - def add_app_call(self, params: AppCallParams) -> "AlgokitComposer": + def add_app_call(self, params: AppCallParams) -> "TransactionComposer": self.txns.append(params) return self - def add_online_key_reg(self, params: OnlineKeyRegParams) -> "AlgokitComposer": + def add_online_key_reg(self, params: OnlineKeyRegParams) -> "TransactionComposer": self.txns.append(params) return self - def add_atc(self, atc: AtomicTransactionComposer) -> "AlgokitComposer": + def add_atc(self, atc: AtomicTransactionComposer) -> "TransactionComposer": self.txns.append(atc) return self - def add_method_call(self, params: MethodCallParams) -> "AlgokitComposer": + def add_method_call(self, params: MethodCallParams) -> "TransactionComposer": self.txns.append(params) return self @@ -633,7 +549,7 @@ def _build_method_call( # noqa: C901, PLR0912 return self._build_atc(method_atc) - def _build_txn( # noqa: C901, PLR0912 + def _build_txn( # noqa: C901, PLR0912, PLR0911 self, txn: TransactionWithSigner | TxnParams | AtomicTransactionComposer, suggested_params: algosdk.transaction.SuggestedParams, diff --git a/tests/conftest.py b/tests/conftest.py index be23305b..e3997a2c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -24,7 +24,7 @@ ) from dotenv import load_dotenv -from tests import app_client_test +from legacy_v2_tests import app_client_test if TYPE_CHECKING: from algosdk.kmd import KMDClient diff --git a/tests/test_algorand_client.py b/tests/test_algorand_client.py index 5f258640..8b7c448d 100644 --- a/tests/test_algorand_client.py +++ b/tests/test_algorand_client.py @@ -3,8 +3,8 @@ import pytest from algokit_utils import Account, ApplicationClient -from algokit_utils.beta.account_manager import AddressAndSigner -from algokit_utils.beta.algorand_client import ( +from algokit_utils.accounts.account_manager import AddressAndSigner +from algokit_utils.clients.algorand_client import ( AlgorandClient, AssetCreateParams, AssetOptInParams,