diff --git a/.cargo/config.toml b/.cargo/config.toml index 0a43f11c95b1..f113e9114ace 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -4,40 +4,6 @@ rustdocflags = [ "-Arustdoc::redundant_explicit_links", # stylistic ] -# An auto defined `clippy` feature was introduced, -# but it was found to clash with user defined features, -# so was renamed to `cargo-clippy`. -# -# If you want standard clippy run: -# RUSTFLAGS= cargo clippy -[target.'cfg(feature = "cargo-clippy")'] -rustflags = [ - "-Aclippy::all", - "-Dclippy::correctness", - "-Aclippy::if-same-then-else", - "-Asuspicious-double-ref-op", - "-Dclippy::complexity", - "-Aclippy::zero-prefixed-literal", # 00_1000_000 - "-Aclippy::type_complexity", # raison d'etre - "-Aclippy::nonminimal-bool", # maybe - "-Aclippy::borrowed-box", # Reasonable to fix this one - "-Aclippy::too-many-arguments", # (Turning this on would lead to) - "-Aclippy::unnecessary_cast", # Types may change - "-Aclippy::identity-op", # One case where we do 0 + - "-Aclippy::useless_conversion", # Types may change - "-Aclippy::unit_arg", # styalistic. - "-Aclippy::option-map-unit-fn", # styalistic - "-Aclippy::bind_instead_of_map", # styalistic - "-Aclippy::erasing_op", # E.g. 0 * DOLLARS - "-Aclippy::eq_op", # In tests we test equality. - "-Aclippy::while_immutable_condition", # false positives - "-Aclippy::needless_option_as_deref", # false positives - "-Aclippy::derivable_impls", # false positives - "-Aclippy::stable_sort_primitive", # prefer stable sort - "-Aclippy::extra-unused-type-parameters", # stylistic - "-Aclippy::default_constructed_unit_structs", # stylistic -] - [env] # Needed for musl builds so user doesn't have to install musl-tools. CC_x86_64_unknown_linux_musl = { value = ".cargo/musl-gcc", force = true, relative = true } diff --git a/.config/lychee.toml b/.config/lychee.toml index 72c1e66a4dfb..70869465d679 100644 --- a/.config/lychee.toml +++ b/.config/lychee.toml @@ -12,10 +12,10 @@ exclude_all_private = true # Treat these codes as success condition: accept = [ # Ok - 200, + "200", # Rate limited - GitHub likes to throw this. - 429, + "429", ] exclude_path = ["./target"] diff --git a/.config/taplo.toml b/.config/taplo.toml index ffe0417e42b1..f5d0b7021ba8 100644 --- a/.config/taplo.toml +++ b/.config/taplo.toml @@ -27,7 +27,7 @@ reorder_arrays = false # don't re-order order-dependent rustflags [[rule]] include = [".cargo/config.toml"] -keys = ["build", "target.'cfg(feature = \"cargo-clippy\")'"] +keys = ["build"] [rule.formatting] reorder_arrays = false diff --git a/.github/review-bot.yml b/.github/review-bot.yml index a5155949609e..aa4ab8a69e02 100644 --- a/.github/review-bot.yml +++ b/.github/review-bot.yml @@ -1,5 +1,6 @@ rules: - name: CI files + countAuthor: true condition: include: - ^\.gitlab-ci\.yml @@ -8,24 +9,26 @@ rules: - ^\.gitlab/.* - ^\.config/nextest.toml - ^\.cargo/.* - exclude: + exclude: - ^\.gitlab/pipeline/zombienet.* - minApprovals: 2 - type: basic - teams: - - ci - - release-engineering + type: "or" + reviewers: + - minApprovals: 2 + teams: + - ci + - minApprovals: 2 + teams: + - core-devs - name: Audit rules type: basic condition: include: - - ^polkadot/runtime\/(kusama|polkadot|common)\/.* + - ^polkadot/runtime/common/.* - ^polkadot/primitives/src\/.+\.rs$ - ^substrate/primitives/.* - ^substrate/frame/.* exclude: - - ^polkadot/runtime\/(kusama|polkadot)\/src\/weights\/.+\.rs$ - ^substrate\/frame\/.+\.md$ minApprovals: 1 allowedToSkipRule: @@ -41,13 +44,8 @@ rules: - .* # excluding files from 'Runtime files' and 'CI files' rules exclude: - - ^polkadot/runtime/(kusama|polkadot)/src/[^/]+\.rs$ - - ^cumulus/parachains/runtimes/assets/(asset-hub-kusama|asset-hub-polkadot)/src/[^/]+\.rs$ - - ^cumulus/parachains/runtimes/bridge-hubs/(bridge-hub-kusama|bridge-hub-polkadot)/src/[^/]+\.rs$ - - ^cumulus/parachains/runtimes/collectives/collectives-polkadot/src/[^/]+\.rs$ - ^cumulus/parachains/common/src/[^/]+\.rs$ - ^substrate/frame/(?!.*(nfts/.*|uniques/.*|babe/.*|grandpa/.*|beefy|merkle-mountain-range/.*|contracts/.*|election|nomination-pools/.*|staking/.*|aura/.*)) - - ^polkadot/runtime/(kusama|polkadot)/src/[^/]+\.rs$ - ^\.gitlab-ci\.yml - ^docker/.* - ^\.github/.* @@ -59,24 +57,6 @@ rules: teams: - core-devs - # cumulus - - name: Runtime files cumulus - countAuthor: true - condition: - include: - - ^cumulus/parachains/runtimes/assets/(asset-hub-kusama|asset-hub-polkadot)/src/[^/]+\.rs$ - - ^cumulus/parachains/runtimes/bridge-hubs/(bridge-hub-kusama|bridge-hub-polkadot)/src/[^/]+\.rs$ - - ^cumulus/parachains/runtimes/collectives/collectives-polkadot/src/[^/]+\.rs$ - - ^cumulus/parachains/common/src/[^/]+\.rs$ - type: and-distinct - reviewers: - - minApprovals: 1 - teams: - - locks-review - - minApprovals: 1 - teams: - - polkadot-review - # if there are any changes in the bridges subtree (in case of backport changes back to bridges repo) - name: Bridges subtree files type: basic @@ -88,7 +68,6 @@ rules: - bridges-core # substrate - - name: FRAME coders substrate condition: include: @@ -104,6 +83,7 @@ rules: # Protection of THIS file - name: Review Bot + countAuthor: true condition: include: - review-bot\.yml @@ -115,9 +95,6 @@ rules: - minApprovals: 1 teams: - locks-review - - minApprovals: 1 - teams: - - ci preventReviewRequests: teams: diff --git a/.github/runtime_specs/rococo.json b/.github/runtime_specs/rococo.json new file mode 100644 index 000000000000..6568b06400c8 --- /dev/null +++ b/.github/runtime_specs/rococo.json @@ -0,0 +1,17 @@ +{ + "pallets": { + "1": { + "constants": { + "EpochDuration": { + "value": [ 88, 2, 0, 0, 0, 0, 0, 0 ]} + } + }, + + "2": { + "constants": { + "MinimumPeriod": { + "value": [ 184, 11, 0, 0, 0, 0, 0, 0 ]} + } + } + } + } diff --git a/.github/runtime_specs/westend.json b/.github/runtime_specs/westend.json new file mode 100644 index 000000000000..6568b06400c8 --- /dev/null +++ b/.github/runtime_specs/westend.json @@ -0,0 +1,17 @@ +{ + "pallets": { + "1": { + "constants": { + "EpochDuration": { + "value": [ 88, 2, 0, 0, 0, 0, 0, 0 ]} + } + }, + + "2": { + "constants": { + "MinimumPeriod": { + "value": [ 184, 11, 0, 0, 0, 0, 0, 0 ]} + } + } + } + } diff --git a/.github/scripts/check-runtime.py b/.github/scripts/check-runtime.py new file mode 100755 index 000000000000..9f3d047e01f8 --- /dev/null +++ b/.github/scripts/check-runtime.py @@ -0,0 +1,124 @@ +#!/usr/bin/env python3 + +import json +import sys +import logging +import os + + +def check_constant(spec_pallet_id, spec_pallet_value, meta_constant): + """ + Check a single constant + + :param spec_pallet_id: + :param spec_pallet_value: + :param meta_constant: + :return: + """ + if meta_constant['name'] == list(spec_pallet_value.keys())[0]: + constant = meta_constant['name'] + res = list(spec_pallet_value.values())[0]["value"] == meta_constant["value"] + + logging.debug(f" Checking pallet:{spec_pallet_id}/constants/{constant}") + logging.debug(f" spec_pallet_value: {spec_pallet_value}") + logging.debug(f" meta_constant: {meta_constant}") + logging.info(f"pallet:{spec_pallet_id}/constants/{constant} -> {res}") + return res + else: + # logging.warning(f" Skipping pallet:{spec_pallet_id}/constants/{meta_constant['name']}") + pass + + +def check_pallet(metadata, spec_pallet): + """ + Check one pallet + + :param metadata: + :param spec_pallet_id: + :param spec_pallet_value: + :return: + """ + + spec_pallet_id, spec_pallet_value = spec_pallet + logging.debug(f"Pallet: {spec_pallet_id}") + + metadata_pallets = metadata["pallets"] + metadata_pallet = metadata_pallets[spec_pallet_id] + + res = map(lambda meta_constant_value: check_constant( + spec_pallet_id, spec_pallet_value["constants"], meta_constant_value), + metadata_pallet["constants"].values()) + res = list(filter(lambda item: item is not None, res)) + return all(res) + + +def check_pallets(metadata, specs): + """ + CHeck all pallets + + :param metadata: + :param specs: + :return: + """ + + res = list(map(lambda spec_pallet: check_pallet(metadata, spec_pallet), + specs['pallets'].items())) + res = list(filter(lambda item: item is not None, res)) + return all(res) + + +def check_metadata(metadata, specs): + """ + Check metadata (json) against a list of expectations + + :param metadata: Metadata in JSON format + :param expectation: Expectations + :return: Bool + """ + + res = check_pallets(metadata, specs) + return res + + +def help(): + """ Show some simple help """ + + print(f"You must pass 2 args, you passed {len(sys.argv) - 1}") + print("Sample call:") + print("check-runtime.py ") + + +def load_json(file): + """ Load json from a file """ + + f = open(file) + return json.load(f) + + +def main(): + LOGLEVEL = os.environ.get('LOGLEVEL', 'INFO').upper() + logging.basicConfig(level=LOGLEVEL) + + if len(sys.argv) != 3: + help() + exit(1) + + metadata_file = sys.argv[1] + specs_file = sys.argv[2] + print(f"Checking metadata from: {metadata_file} with specs from: {specs_file}") + + metadata = load_json(metadata_file) + specs = load_json(specs_file) + + res = check_metadata(metadata, specs) + + if res: + logging.info(f"OK") + exit(0) + else: + print("") + logging.info(f"Some errors were found, run again with LOGLEVEL=debug") + exit(1) + +if __name__ == "__main__": + main() diff --git a/.github/scripts/common/lib.sh b/.github/scripts/common/lib.sh index 2a835b4472b2..bd12d9c6e6ff 100755 --- a/.github/scripts/common/lib.sh +++ b/.github/scripts/common/lib.sh @@ -202,21 +202,26 @@ fetch_release_artifacts() { echo "Release ID : $RELEASE_ID" echo "Repo : $REPO" echo "Binary : $BINARY" + OUTPUT_DIR=${OUTPUT_DIR:-"./release-artifacts/${BINARY}"} + echo "OUTPUT_DIR : $OUTPUT_DIR" + echo "Fetching release info..." curl -L -s \ -H "Accept: application/vnd.github+json" \ -H "Authorization: Bearer ${GITHUB_TOKEN}" \ -H "X-GitHub-Api-Version: 2022-11-28" \ https://api.github.com/repos/${REPO}/releases/${RELEASE_ID} > release.json - # Get Asset ids + echo "Extract asset ids..." ids=($(jq -r '.assets[].id' < release.json )) + echo "Extract asset count..." count=$(jq '.assets|length' < release.json ) # Fetch artifacts - mkdir -p "./release-artifacts/${BINARY}" - pushd "./release-artifacts/${BINARY}" > /dev/null + mkdir -p "$OUTPUT_DIR" + pushd "$OUTPUT_DIR" > /dev/null + echo "Fetching assets..." iter=1 for id in "${ids[@]}" do @@ -302,3 +307,40 @@ function increment_rc_tag() { ((suffix++)) echo $suffix } + +function relative_parent() { + echo "$1" | sed -E 's/(.*)\/(.*)\/\.\./\1/g' +} + +# Find all the runtimes, it returns the result as JSON object, compatible to be +# used as Github Workflow Matrix. This call is exposed by the `scan` command and can be used as: +# podman run --rm -it -v /.../fellowship-runtimes:/build docker.io/chevdor/srtool:1.70.0-0.11.1 scan +function find_runtimes() { + libs=($(git grep -I -r --cached --max-depth 20 --files-with-matches 'construct_runtime!' -- '*lib.rs')) + re=".*-runtime$" + JSON=$(jq --null-input '{ "include": [] }') + + # EXCLUDED_RUNTIMES is a space separated list of runtime names (without the -runtime postfix) + # EXCLUDED_RUNTIMES=${EXCLUDED_RUNTIMES:-"substrate-test"} + IFS=' ' read -r -a exclusions <<< "$EXCLUDED_RUNTIMES" + + for lib in "${libs[@]}"; do + crate_dir=$(dirname "$lib") + cargo_toml="$crate_dir/../Cargo.toml" + + name=$(toml get -r $cargo_toml 'package.name') + chain=${name//-runtime/} + + if [[ "$name" =~ $re ]] && ! [[ ${exclusions[@]} =~ $chain ]]; then + lib_dir=$(dirname "$lib") + runtime_dir=$(relative_parent "$lib_dir/..") + ITEM=$(jq --null-input \ + --arg chain "$chain" \ + --arg name "$name" \ + --arg runtime_dir "$runtime_dir" \ + '{ "chain": $chain, "crate": $name, "runtime_dir": $runtime_dir }') + JSON=$(echo $JSON | jq ".include += [$ITEM]") + fi + done + echo $JSON +} diff --git a/.github/workflows/build-and-attach-release-runtimes.yml b/.github/workflows/build-and-attach-release-runtimes.yml index ee8c8190bc3e..680a9ecffd31 100644 --- a/.github/workflows/build-and-attach-release-runtimes.yml +++ b/.github/workflows/build-and-attach-release-runtimes.yml @@ -23,7 +23,7 @@ jobs: - { name: glutton-westend, package: glutton-westend-runtime, path: cumulus/parachains/runtimes/glutton/glutton-westend } build_config: # Release build has logging disabled and no dev features - - { type: on-chain-release, opts: --features on-chain-release-build } + - { type: on-chain-release, opts: --features on-chain-release-build } # Debug build has logging enabled and developer features - { type: dev-debug-build, opts: --features try-runtime } @@ -31,11 +31,11 @@ jobs: steps: - name: Checkout code - uses: actions/checkout@v4 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Build ${{ matrix.runtime.name }} ${{ matrix.build_config.type }} id: srtool_build - uses: chevdor/srtool-actions@v0.9.1 + uses: chevdor/srtool-actions@v0.9.2 env: BUILD_OPTS: ${{ matrix.build_config.opts }} with: diff --git a/.github/workflows/check-licenses.yml b/.github/workflows/check-licenses.yml index a5d7ba6ec278..e1e92d288cea 100644 --- a/.github/workflows/check-licenses.yml +++ b/.github/workflows/check-licenses.yml @@ -16,7 +16,7 @@ jobs: steps: - name: Checkout sources uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - uses: actions/setup-node@v4.0.0 + - uses: actions/setup-node@v4.0.1 with: node-version: "18.x" registry-url: "https://npm.pkg.github.com" diff --git a/.github/workflows/check-links.yml b/.github/workflows/check-links.yml index 0932d38c9add..14941efce24d 100644 --- a/.github/workflows/check-links.yml +++ b/.github/workflows/check-links.yml @@ -28,7 +28,7 @@ jobs: - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.0 (22. Sep 2023) - name: Lychee link checker - uses: lycheeverse/lychee-action@2ac9f030ccdea0033e2510a23a67da2a2da98492 # for v1.8.0 (15. May 2023) + uses: lycheeverse/lychee-action@fdea7032675810093199f485fe075f057cc37b3e # for v1.9.0 (5. Jan 2024) with: args: >- --config .config/lychee.toml diff --git a/.github/workflows/check-markdown.yml b/.github/workflows/check-markdown.yml index 2108f9420900..2b8a66db35b3 100644 --- a/.github/workflows/check-markdown.yml +++ b/.github/workflows/check-markdown.yml @@ -16,7 +16,7 @@ jobs: - name: Checkout sources uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - - uses: actions/setup-node@v4.0.0 + - uses: actions/setup-node@v4.0.1 with: node-version: "18.x" registry-url: "https://npm.pkg.github.com" @@ -31,4 +31,5 @@ jobs: env: CONFIG: .github/.markdownlint.yaml run: | + echo "Checking markdown formatting. More info: docs/contributor/markdown_linting.md" markdownlint --config "$CONFIG" --ignore target . diff --git a/.github/workflows/check-publish.yml b/.github/workflows/check-publish.yml index db0863888b83..1941bd981675 100644 --- a/.github/workflows/check-publish.yml +++ b/.github/workflows/check-publish.yml @@ -12,7 +12,7 @@ jobs: check-publish: runs-on: ubuntu-latest steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Rust Cache uses: Swatinem/rust-cache@3cf7f8cc28d1b4e7d01e3783be10a97d55d483c8 # v2.7.1 diff --git a/.github/workflows/check-runtimes.yml b/.github/workflows/check-runtimes.yml new file mode 100644 index 000000000000..0e5ad104766a --- /dev/null +++ b/.github/workflows/check-runtimes.yml @@ -0,0 +1,94 @@ +name: Check Runtimes Specs +# This GH Workflow fetches the runtimes available in a release. +# It then compares their metadata with reference specs located under +# .github/runtime_specs. + +on: + workflow_dispatch: + inputs: + release_id: + description: | + Release ID. + You can find it using the command: + curl -s \ + -H "Authorization: Bearer ${GITHUB_TOKEN}" https://api.github.com/repos/paritytech/polkadot-sdk/releases | \ + jq '.[] | { name: .name, id: .id }' + required: true + type: string + + # This trigger unfortunately does not work as expected. + # https://github.com/orgs/community/discussions/47794 + # release: + # types: [edited] + +env: + RUNTIME_SPECS_DIR: .github/runtime_specs + DATA_DIR: runtimes + RELEASE_ID: ${{ inputs.release_id }} + REPO: ${{ github.repository }} + +jobs: + find-specs: + name: Fetch runtime specs + outputs: + specs: ${{ steps.get-list.outputs.specs }} + runs-on: ubuntu-latest + steps: + - name: Checkout the repo + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + + - name: Get list + id: get-list + run: | + lst=$(ls $RUNTIME_SPECS_DIR/*.json | xargs -I{} basename "{}" .json | jq -R .| jq -sc .) + echo "Found: $lst" + echo "specs=$lst" >> $GITHUB_OUTPUT + + check-runtimes: + name: Check runtime specs + runs-on: ubuntu-latest + needs: + - find-specs + + strategy: + matrix: + specs: ${{ fromJSON(needs.find-specs.outputs.specs) }} + + steps: + - name: Checkout the repo + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 + + - name: Fetch release artifacts based on release id + env: + OUTPUT_DIR: ${{ env.DATA_DIR }} + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + . ./.github/scripts/common/lib.sh + fetch_release_artifacts + + - name: Install tooling + env: + SUBWASM_VERSION: v0.20.0 + DL_BASE_URL: https://github.com/chevdor/subwasm/releases/download + run: | + wget $DL_BASE_URL/$SUBWASM_VERSION/subwasm_linux_amd64_$SUBWASM_VERSION.deb \ + -O subwasm.deb + sudo dpkg -i subwasm.deb + subwasm --version + + - name: Extract metadata JSON for ${{ matrix.specs }} + env: + RUNTIME: ${{ matrix.specs }} + run: | + WASM=$(ls ${DATA_DIR}/${RUNTIME}*.wasm) + echo "WASM=$WASM" + subwasm show --json "$WASM" > "${DATA_DIR}/${RUNTIME}.json" + + - name: Check specs for ${{ matrix.specs }} + id: build + env: + RUNTIME: ${{ matrix.specs }} + LOGLEVEL: info + run: | + python --version + .github/scripts/check-runtime.py "${DATA_DIR}/${RUNTIME}.json" "${RUNTIME_SPECS_DIR}/${RUNTIME}.json" diff --git a/.github/workflows/claim-crates.yml b/.github/workflows/claim-crates.yml index 0bd5593b54f2..9e2722662018 100644 --- a/.github/workflows/claim-crates.yml +++ b/.github/workflows/claim-crates.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest environment: master steps: - - uses: actions/checkout@8ade135a41bc03ea155e62e844d188df1ea18608 # v4.1.0 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Rust Cache uses: Swatinem/rust-cache@3cf7f8cc28d1b4e7d01e3783be10a97d55d483c8 # v2.7.1 diff --git a/.github/workflows/fmt-check.yml b/.github/workflows/fmt-check.yml index e4d39acabfd7..99ac5120097d 100644 --- a/.github/workflows/fmt-check.yml +++ b/.github/workflows/fmt-check.yml @@ -15,7 +15,7 @@ jobs: os: ["ubuntu-latest"] runs-on: ${{ matrix.os }} container: - image: paritytech/ci-unified:bullseye-1.73.0-2023-11-01-v20231025 + image: paritytech/ci-unified:bullseye-1.74.0-2023-11-01-v20231204 steps: - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 diff --git a/.github/workflows/srtool.yml b/.github/workflows/srtool.yml new file mode 100644 index 000000000000..eb15538f559d --- /dev/null +++ b/.github/workflows/srtool.yml @@ -0,0 +1,135 @@ +name: Srtool build + +env: + SUBWASM_VERSION: 0.20.0 + TOML_CLI_VERSION: 0.2.4 + +on: + push: + tags: + - "*" + branches: + - release-v[0-9]+.[0-9]+.[0-9]+* + - release-cumulus-v[0-9]+* + - release-polkadot-v[0-9]+* + + schedule: + - cron: "00 02 * * 1" # 2AM weekly on monday + + workflow_dispatch: + +jobs: + find-runtimes: + name: Scan repo paritytech/polkadot-sdk + outputs: + runtime: ${{ steps.get_runtimes_list.outputs.runtime }} + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + with: + fetch-depth: 0 + + - name: Install tooling + run: | + URL=https://github.com/chevdor/toml-cli/releases/download/v${{ env.TOML_CLI_VERSION }}/toml_linux_amd64_v${{ env.TOML_CLI_VERSION }}.deb + curl -L $URL --output toml.deb + sudo dpkg -i toml.deb + toml --version; jq --version + + - name: Scan runtimes + env: + EXCLUDED_RUNTIMES: "substrate-test" + run: | + . ./.github/scripts/common/lib.sh + + echo "Github workspace: ${{ github.workspace }}" + echo "Current folder: $(pwd)"; ls -al + ls -al + + MATRIX=$(find_runtimes | tee runtimes_list.json) + echo $MATRIX + + - name: Get runtimes list + id: get_runtimes_list + run: | + ls -al + MATRIX=$(cat runtimes_list.json) + echo $MATRIX + echo "runtime=$MATRIX" >> $GITHUB_OUTPUT + + srtool: + runs-on: ubuntu-latest + needs: + - find-runtimes + strategy: + fail-fast: false + matrix: ${{ fromJSON(needs.find-runtimes.outputs.runtime) }} + + steps: + - uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 + with: + fetch-depth: 0 + + - name: Srtool build + id: srtool_build + uses: chevdor/srtool-actions@v0.9.2 + with: + chain: ${{ matrix.chain }} + runtime_dir: ${{ matrix.runtime_dir }} + + - name: Summary + run: | + echo '${{ steps.srtool_build.outputs.json }}' | jq > ${{ matrix.chain }}-srtool-digest.json + cat ${{ matrix.chain }}-srtool-digest.json + echo "Compact Runtime: ${{ steps.srtool_build.outputs.wasm }}" + echo "Compressed Runtime: ${{ steps.srtool_build.outputs.wasm_compressed }}" + + # it takes a while to build the runtime, so let's save the artifact as soon as we have it + - name: Archive Artifacts for ${{ matrix.chain }} + uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 + with: + name: ${{ matrix.chain }}-runtime + path: | + ${{ steps.srtool_build.outputs.wasm }} + ${{ steps.srtool_build.outputs.wasm_compressed }} + ${{ matrix.chain }}-srtool-digest.json + + # We now get extra information thanks to subwasm + - name: Install subwasm + run: | + wget https://github.com/chevdor/subwasm/releases/download/v${{ env.SUBWASM_VERSION }}/subwasm_linux_amd64_v${{ env.SUBWASM_VERSION }}.deb + sudo dpkg -i subwasm_linux_amd64_v${{ env.SUBWASM_VERSION }}.deb + subwasm --version + + - name: Show Runtime information + shell: bash + run: | + subwasm info ${{ steps.srtool_build.outputs.wasm }} + subwasm info ${{ steps.srtool_build.outputs.wasm_compressed }} + subwasm --json info ${{ steps.srtool_build.outputs.wasm }} > ${{ matrix.chain }}-info.json + subwasm --json info ${{ steps.srtool_build.outputs.wasm_compressed }} > ${{ matrix.chain }}-compressed-info.json + + - name: Extract the metadata + shell: bash + run: | + subwasm meta ${{ steps.srtool_build.outputs.wasm }} + subwasm --json meta ${{ steps.srtool_build.outputs.wasm }} > ${{ matrix.chain }}-metadata.json + + - name: Check the metadata diff + shell: bash + # the following subwasm call will error for chains that are not known and/or live, that includes shell for instance + run: | + subwasm diff ${{ steps.srtool_build.outputs.wasm }} --chain-b ${{ matrix.chain }} || \ + echo "Subwasm call failed, check the logs. This is likely because ${{ matrix.chain }} is not known by subwasm" | \ + tee ${{ matrix.chain }}-diff.txt + + - name: Archive Subwasm results + uses: actions/upload-artifact@0b7f8abb1508181956e8e162db84b466c27e18ce # v3.1.2 + with: + name: ${{ matrix.chain }}-runtime + path: | + ${{ matrix.chain }}-info.json + ${{ matrix.chain }}-compressed-info.json + ${{ matrix.chain }}-metadata.json + ${{ matrix.chain }}-diff.txt diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index e8a91568ccfc..dc4b3cf162e1 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -116,7 +116,7 @@ default: tags: - linux-docker -# +# .forklift-cache: before_script: - 'curl --header "PRIVATE-TOKEN: $FL_CI_GROUP_TOKEN" -o forklift -L "${CI_API_V4_URL}/projects/676/packages/generic/forklift/${FL_FORKLIFT_VERSION}/forklift_${FL_FORKLIFT_VERSION}_linux_amd64"' @@ -124,19 +124,19 @@ default: - mkdir .forklift - cp $FL_FORKLIFT_CONFIG .forklift/config.toml - export FORKLIFT_PACKAGE_SUFFIX=${CI_JOB_NAME/ [0-9 \/]*} - - shopt -s expand_aliases + - shopt -s expand_aliases - export PATH=$PATH:$(pwd) - | if [ "$FORKLIFT_BYPASS" != "true" ]; then echo "FORKLIFT_BYPASS not set, creating alias cargo='forklift cargo'" alias cargo="forklift cargo" fi - - ls -al - - rm -f forklift.sock + - ls -al + - rm -f forklift.sock - forklift clean # - echo "FL_FORKLIFT_VERSION ${FL_FORKLIFT_VERSION}" - - echo "FORKLIFT_PACKAGE_SUFFIX $FORKLIFT_PACKAGE_SUFFIX" + - echo "FORKLIFT_PACKAGE_SUFFIX $FORKLIFT_PACKAGE_SUFFIX" .common-refs: rules: diff --git a/.gitlab/pipeline/build.yml b/.gitlab/pipeline/build.yml index 377236193cc5..20aa4a5c2a28 100644 --- a/.gitlab/pipeline/build.yml +++ b/.gitlab/pipeline/build.yml @@ -220,6 +220,7 @@ build-test-parachain: # DAG: build-runtime-assets -> build-runtime-collectives -> build-runtime-bridge-hubs # DAG: build-runtime-assets -> build-runtime-collectives -> build-runtime-contracts +# DAG: build-runtime-assets -> build-runtime-coretime # DAG: build-runtime-assets -> build-runtime-starters -> build-runtime-testing build-runtime-assets: <<: *build-runtime-template @@ -235,6 +236,15 @@ build-runtime-collectives: - job: build-runtime-assets artifacts: false +build-runtime-coretime: + <<: *build-runtime-template + variables: + RUNTIME_PATH: "cumulus/parachains/runtimes/coretime" + # this is an artificial job dependency, for pipeline optimization using GitLab's DAGs + needs: + - job: build-runtime-assets + artifacts: false + build-runtime-bridge-hubs: <<: *build-runtime-template variables: diff --git a/.gitlab/pipeline/check.yml b/.gitlab/pipeline/check.yml index 7d98b9cc71c1..1ed12e68c2ce 100644 --- a/.gitlab/pipeline/check.yml +++ b/.gitlab/pipeline/check.yml @@ -4,8 +4,11 @@ cargo-clippy: - .docker-env - .common-refs - .pipeline-stopper-artifacts + variables: + RUSTFLAGS: "-D warnings" script: - - SKIP_WASM_BUILD=1 env -u RUSTFLAGS cargo clippy --all-targets --locked --workspace + - SKIP_WASM_BUILD=1 cargo clippy --all-targets --locked --workspace + - SKIP_WASM_BUILD=1 cargo clippy --all-targets --all-features --locked --workspace check-try-runtime: stage: check @@ -221,6 +224,19 @@ check-runtime-migration-collectives-westend: URI: "wss://westend-collectives-rpc.polkadot.io:443" COMMAND_EXTRA_ARGS: "--disable-spec-name-check" +# Check runtime migrations for Parity managed coretime chain +check-runtime-migration-coretime-rococo: + stage: check + extends: + - .docker-env + - .test-pr-refs + - .check-runtime-migration + variables: + NETWORK: "coretime-rococo" + PACKAGE: "coretime-rococo-runtime" + WASM: "coretime_rococo_runtime.compact.compressed.wasm" + URI: "wss://rococo-coretime-rpc.polkadot.io:443" + find-fail-ci-phrase: stage: check variables: diff --git a/.gitlab/pipeline/short-benchmarks.yml b/.gitlab/pipeline/short-benchmarks.yml index 97bce4799270..e9dbe2008811 100644 --- a/.gitlab/pipeline/short-benchmarks.yml +++ b/.gitlab/pipeline/short-benchmarks.yml @@ -74,6 +74,16 @@ short-benchmark-collectives-westend: variables: RUNTIME_CHAIN: collectives-westend-dev +short-benchmark-coretime-rococo: + <<: *short-bench-cumulus + variables: + RUNTIME_CHAIN: coretime-rococo-dev + +short-benchmark-coretime-westend: + <<: *short-bench-cumulus + variables: + RUNTIME_CHAIN: coretime-westend-dev + short-benchmark-glutton-westend: <<: *short-bench-cumulus variables: diff --git a/.gitlab/pipeline/test.yml b/.gitlab/pipeline/test.yml index f6dad887a68d..00d2b22c8107 100644 --- a/.gitlab/pipeline/test.yml +++ b/.gitlab/pipeline/test.yml @@ -29,7 +29,7 @@ test-linux-stable: --locked \ --release \ --no-fail-fast \ - --features try-runtime,experimental,ci-only-tests \ + --features try-runtime,experimental,riscv,ci-only-tests \ --partition count:${CI_NODE_INDEX}/${CI_NODE_TOTAL} # Upload tests results to Elasticsearch - echo "Upload test results to Elasticsearch" @@ -238,6 +238,8 @@ test-deterministic-wasm: cargo-check-benches: stage: test + artifacts: + expire_in: 10 days variables: CI_JOB_NAME: "cargo-check-benches" extends: @@ -305,6 +307,10 @@ node-bench-regression-guard: CI_IMAGE: "paritytech/node-bench-regression-guard:latest" before_script: [""] script: + - if [ $(ls -la artifacts/benches/ | grep master | wc -l) == 0 ]; then + echo "Couldn't find master artifacts"; + exit 1; + fi - echo "------- IMPORTANT -------" - echo "node-bench-regression-guard depends on the results of a cargo-check-benches job" - echo "In case of this job failure, check your pipeline's cargo-check-benches" @@ -438,6 +444,7 @@ cargo-check-each-crate: - .run-immediately # - .collect-artifacts variables: + RUSTFLAGS: "-D warnings" # $CI_JOB_NAME is set manually so that cache could be shared for all jobs # "cargo-check-each-crate I/N" jobs CI_JOB_NAME: cargo-check-each-crate @@ -519,6 +526,6 @@ test-syscalls: - ./list-syscalls.rb ../../../target/x86_64-unknown-linux-musl/production/polkadot-prepare-worker --only-used-syscalls | diff -u prepare-worker-syscalls - after_script: - if [[ "$CI_JOB_STATUS" == "failed" ]]; then - printf "The x86_64 syscalls used by the worker binaries have changed. Please review if this is expected and update polkadot/scripts/list-syscalls/*-worker-syscalls as needed.\n"; + printf "The x86_64 syscalls used by the worker binaries have changed. Please review if this is expected and update polkadot/scripts/list-syscalls/*-worker-syscalls as needed.\n"; fi allow_failure: false # this rarely triggers in practice diff --git a/.gitlab/pipeline/zombienet/polkadot.yml b/.gitlab/pipeline/zombienet/polkadot.yml index d1f3a201c80a..4112096a2ed7 100644 --- a/.gitlab/pipeline/zombienet/polkadot.yml +++ b/.gitlab/pipeline/zombienet/polkadot.yml @@ -131,6 +131,22 @@ zombienet-polkadot-functional-0008-dispute-old-finalized: --local-dir="${LOCAL_DIR}/functional" --test="0008-dispute-old-finalized.zndsl" +zombienet-polkadot-functional-0009-approval-voting-coalescing: + extends: + - .zombienet-polkadot-common + script: + - /home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh + --local-dir="${LOCAL_DIR}/functional" + --test="0009-approval-voting-coalescing.zndsl" + +zombienet-polkadot-functional-0010-validator-disabling: + extends: + - .zombienet-polkadot-common + script: + - /home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh + --local-dir="${LOCAL_DIR}/functional" + --test="0010-validator-disabling.zndsl" + zombienet-polkadot-smoke-0001-parachains-smoke-test: extends: - .zombienet-polkadot-common @@ -177,6 +193,14 @@ zombienet-polkadot-smoke-0003-deregister-register-validator: --local-dir="${LOCAL_DIR}/smoke" --test="0003-deregister-register-validator-smoke.zndsl" +zombienet-polkadot-smoke-0004-coretime-smoke-test: + extends: + - .zombienet-polkadot-common + script: + - /home/nonroot/zombie-net/scripts/ci/run-test-local-env-manager.sh + --local-dir="${LOCAL_DIR}/smoke" + --test="0004-coretime-smoke-test.zndsl" + zombienet-polkadot-misc-0001-parachains-paritydb: extends: - .zombienet-polkadot-common @@ -201,7 +225,7 @@ zombienet-polkadot-misc-0002-upgrade-node: # Exit if the job is not merge queue # - if [[ $CI_COMMIT_REF_NAME != *"gh-readonly-queue"* ]]; then echo "I will run only in a merge queue"; exit 0; fi - export ZOMBIENET_INTEGRATION_TEST_IMAGE="docker.io/parity/polkadot:latest" - - echo "Overrided poladot image ${ZOMBIENET_INTEGRATION_TEST_IMAGE}" + - echo "Overrided polkadot image ${ZOMBIENET_INTEGRATION_TEST_IMAGE}" - export COL_IMAGE="${COLANDER_IMAGE}":${PIPELINE_IMAGE_TAG} - BUILD_LINUX_JOB_ID="$(cat ./artifacts/BUILD_LINUX_JOB_ID)" - export POLKADOT_PR_ARTIFACTS_URL="https://gitlab.parity.io/parity/mirrors/polkadot-sdk/-/jobs/${BUILD_LINUX_JOB_ID}/artifacts/raw/artifacts" diff --git a/Cargo.lock b/Cargo.lock index a7bf114f9848..767c87def7c8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -68,7 +68,7 @@ version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd" dependencies = [ - "getrandom 0.2.11", + "getrandom 0.2.12", "once_cell", "version_check", ] @@ -80,7 +80,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77c3a9648d43b9cd48db467b3f87fdd6e146bcc88ab0180006cef2179fe11d01" dependencies = [ "cfg-if", - "getrandom 0.2.11", + "getrandom 0.2.12", "once_cell", "version_check", "zerocopy", @@ -124,9 +124,9 @@ checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" [[package]] name = "anstream" -version = "0.6.5" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d664a92ecae85fd0a7392615844904654d1d5f5514837f471ddef4a057aba1b6" +checksum = "628a8f9bd1e24b4e0db2b4bc2d000b001e7dd032d54afa60a68836aeec5aa54a" dependencies = [ "anstyle", "anstyle-parse", @@ -187,9 +187,9 @@ dependencies = [ [[package]] name = "aquamarine" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "074b80d14d0240b6ce94d68f059a2d26a5d77280ae142662365a21ef6e2594ef" +checksum = "21cc1548309245035eb18aa7f0967da6bc65587005170c56e6ef2788a4cf3f4e" dependencies = [ "include_dir", "itertools 0.10.5", @@ -418,9 +418,9 @@ dependencies = [ [[package]] name = "ark-scale" -version = "0.0.11" +version = "0.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "51bd73bb6ddb72630987d37fa963e99196896c0d0ea81b7c894567e74a2f83af" +checksum = "5f69c00b3b529be29528a6f2fd5fa7b1790f8bed81b9cdca17e326538545a179" dependencies = [ "ark-ec", "ark-ff", @@ -433,7 +433,7 @@ dependencies = [ [[package]] name = "ark-secret-scalar" version = "0.0.2" -source = "git+https://github.com/w3f/ring-vrf?rev=2019248#2019248785389b3246d55b1c3b0e9bdef4454cb7" +source = "git+https://github.com/w3f/ring-vrf?rev=e9782f9#e9782f938629c90f3adb3fff2358bc8d1386af3e" dependencies = [ "ark-ec", "ark-ff", @@ -475,14 +475,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94893f1e0c6eeab764ade8dc4c0db24caf4fe7cbbaafc0eba0a9030f447b5185" dependencies = [ "num-traits", - "rand 0.8.5", + "rand", "rayon", ] [[package]] name = "ark-transcript" version = "0.0.2" -source = "git+https://github.com/w3f/ring-vrf?rev=2019248#2019248785389b3246d55b1c3b0e9bdef4454cb7" +source = "git+https://github.com/w3f/ring-vrf?rev=e9782f9#e9782f938629c90f3adb3fff2358bc8d1386af3e" dependencies = [ "ark-ff", "ark-serialize", @@ -504,12 +504,6 @@ version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545" -[[package]] -name = "arrayvec" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" - [[package]] name = "arrayvec" version = "0.7.4" @@ -557,9 +551,9 @@ dependencies = [ [[package]] name = "assert_cmd" -version = "2.0.12" +version = "2.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "88903cb14723e4d4003335bb7f8a14f27691649105346a0f0957466c096adfe6" +checksum = "00ad3f3a942eee60335ab4342358c161ee296829e0d16ff42fc1d6cb07815467" dependencies = [ "anstyle", "bstr", @@ -591,11 +585,11 @@ dependencies = [ [[package]] name = "async-io" -version = "2.2.2" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6afaa937395a620e33dc6a742c593c01aced20aa376ffb0f628121198578ccc7" +checksum = "fb41eb19024a91746eba0773aa5e16036045bbf45733766661099e182ea6a744" dependencies = [ - "async-lock 3.2.0", + "async-lock 3.3.0", "cfg-if", "concurrent-queue", "futures-io", @@ -619,9 +613,9 @@ dependencies = [ [[package]] name = "async-lock" -version = "3.2.0" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7125e42787d53db9dd54261812ef17e937c95a51e4d291373b670342fa44310c" +checksum = "d034b430882f8381900d3fe6f0aaa3ad94f2cb4ac519b429692a1bc2dda4ae7b" dependencies = [ "event-listener 4.0.3", "event-listener-strategy", @@ -709,7 +703,7 @@ dependencies = [ [[package]] name = "bandersnatch_vrfs" version = "0.0.4" -source = "git+https://github.com/w3f/ring-vrf?rev=2019248#2019248785389b3246d55b1c3b0e9bdef4454cb7" +source = "git+https://github.com/w3f/ring-vrf?rev=e9782f9#e9782f938629c90f3adb3fff2358bc8d1386af3e" dependencies = [ "ark-bls12-381", "ark-ec", @@ -719,8 +713,8 @@ dependencies = [ "ark-std", "dleq_vrf", "fflonk", - "merlin 3.0.0", - "rand_chacha 0.3.1", + "merlin", + "rand_chacha", "rand_core 0.6.4", "ring 0.1.0", "sha2 0.10.8", @@ -743,9 +737,9 @@ checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" [[package]] name = "base64" -version = "0.21.6" +version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c79fed4cdb43e993fcdadc7e58a09fd0e3e649c4436fa11da71c9f1f3ee7feb9" +checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" [[package]] name = "base64ct" @@ -808,7 +802,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "93f2635620bf0b9d4576eb7bb9a38a55df78bd1205d26fa994b25911a69f212f" dependencies = [ "bitcoin_hashes", - "rand 0.8.5", + "rand", "rand_core 0.6.4", "serde", "unicode-normalization", @@ -828,9 +822,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.1" +version = "2.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" +checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" [[package]] name = "bitvec" @@ -872,7 +866,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23285ad32269793932e830392f2fe2f83e26488fd3ec778883a93c8323735780" dependencies = [ "arrayref", - "arrayvec 0.7.4", + "arrayvec", "constant_time_eq", ] @@ -883,7 +877,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94230421e395b9920d23df13ea5d77a20e1725331f90fbbf6df6040b33f756ae" dependencies = [ "arrayref", - "arrayvec 0.7.4", + "arrayvec", "constant_time_eq", ] @@ -894,24 +888,12 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0231f06152bf547e9c2b5194f247cd97aacf6dcd8b15d8e5ec0663f64580da87" dependencies = [ "arrayref", - "arrayvec 0.7.4", + "arrayvec", "cc", "cfg-if", "constant_time_eq", ] -[[package]] -name = "block-buffer" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b" -dependencies = [ - "block-padding", - "byte-tools", - "byteorder", - "generic-array 0.12.4", -] - [[package]] name = "block-buffer" version = "0.9.0" @@ -930,15 +912,6 @@ dependencies = [ "generic-array 0.14.7", ] -[[package]] -name = "block-padding" -version = "0.1.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa79dedbb091f449f1f39e53edf88d5dbe95f895dae6135a8d7b881fb5af73f5" -dependencies = [ - "byte-tools", -] - [[package]] name = "bounded-collections" version = "0.1.9" @@ -1226,9 +1199,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.4.14" +version = "4.4.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33e92c5c1a78c62968ec57dbc2440366a2d6e5a23faf829970ff1585dc6b18e2" +checksum = "1e578d6ec4194633722ccf9544794b71b1385c3c027efe0c55db226fc880865c" dependencies = [ "clap_builder", "clap_derive", @@ -1236,9 +1209,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.4.14" +version = "4.4.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4323769dc8a61e2c39ad7dc26f6f2800524691a44d74fe3d1071a5c24db6370" +checksum = "4df4df40ec50c46000231c914968278b1eb05098cf8f1b3a518a95030e71d1c7" dependencies = [ "anstream", "anstyle", @@ -1249,11 +1222,11 @@ dependencies = [ [[package]] name = "clap_complete" -version = "4.4.6" +version = "4.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97aeaa95557bd02f23fbb662f981670c3d20c5a26e69f7354b28f57092437fcd" +checksum = "dfb0d4825b75ff281318c393e8e1b80c4da9fb75a6b1d98547d389d6fe1f48d2" dependencies = [ - "clap 4.4.14", + "clap 4.4.18", ] [[package]] @@ -1322,8 +1295,8 @@ dependencies = [ "ark-std", "fflonk", "getrandom_or_panic", - "merlin 3.0.0", - "rand_chacha 0.3.1", + "merlin", + "rand_chacha", ] [[package]] @@ -1343,15 +1316,15 @@ dependencies = [ [[package]] name = "console" -version = "0.15.7" +version = "0.15.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c926e00cc70edefdc64d3a5ff31cc65bb97a3460097762bd23afb4d8145fccf8" +checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" dependencies = [ "encode_unicode", "lazy_static", "libc", "unicode-width", - "windows-sys 0.45.0", + "windows-sys 0.52.0", ] [[package]] @@ -1375,7 +1348,7 @@ version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f9d839f2a20b0aee515dc581a6172f2321f96cab76c1a38a4c584a194955390e" dependencies = [ - "getrandom 0.2.11", + "getrandom 0.2.12", "once_cell", "tiny-keccak", ] @@ -1649,19 +1622,6 @@ dependencies = [ "cipher 0.4.4", ] -[[package]] -name = "curve25519-dalek" -version = "2.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a9b85542f99a2dfa2a1b8e192662741c9859a846b296bef1c92ef9b58b5a216" -dependencies = [ - "byteorder", - "digest 0.8.1", - "rand_core 0.5.1", - "subtle 2.5.0", - "zeroize", -] - [[package]] name = "curve25519-dalek" version = "3.2.0" @@ -1932,7 +1892,7 @@ checksum = "86e3bdc80eee6e16b2b6b0f87fbc98c04bee3455e35174c0de1a125d0688c632" [[package]] name = "dleq_vrf" version = "0.0.2" -source = "git+https://github.com/w3f/ring-vrf?rev=2019248#2019248785389b3246d55b1c3b0e9bdef4454cb7" +source = "git+https://github.com/w3f/ring-vrf?rev=e9782f9#e9782f938629c90f3adb3fff2358bc8d1386af3e" dependencies = [ "ark-ec", "ark-ff", @@ -1941,7 +1901,7 @@ dependencies = [ "ark-serialize", "ark-std", "ark-transcript", - "arrayvec 0.7.4", + "arrayvec", "zeroize", ] @@ -2143,12 +2103,6 @@ dependencies = [ "syn 2.0.48", ] -[[package]] -name = "fake-simd" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed" - [[package]] name = "fallible-iterator" version = "0.3.0" @@ -2181,7 +2135,7 @@ dependencies = [ "ark-poly", "ark-serialize", "ark-std", - "merlin 3.0.0", + "merlin", ] [[package]] @@ -2215,7 +2169,7 @@ dependencies = [ "num-traits", "parity-scale-codec", "parking_lot 0.12.1", - "rand 0.8.5", + "rand", "scale-info", ] @@ -2226,7 +2180,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "835c052cb0c08c1acf6ffd71c022172e18723949c8282f2b9f27efbc51e64534" dependencies = [ "byteorder", - "rand 0.8.5", + "rand", "rustc-hex", "static_assertions", ] @@ -2298,7 +2252,6 @@ dependencies = [ "log", "parity-scale-codec", "scale-info", - "simple-mermaid", "sp-api", "sp-arithmetic", "sp-block-builder", @@ -2349,7 +2302,7 @@ dependencies = [ "Inflector", "array-bytes", "chrono", - "clap 4.4.14", + "clap 4.4.18", "comfy-table", "frame-benchmarking", "frame-support", @@ -2361,7 +2314,7 @@ dependencies = [ "linked-hash-map", "log", "parity-scale-codec", - "rand 0.8.5", + "rand", "rand_pcg", "sc-block-builder", "sc-cli", @@ -2410,7 +2363,7 @@ dependencies = [ "frame-election-provider-support", "frame-support", "parity-scale-codec", - "proc-macro-crate 2.0.0", + "proc-macro-crate 3.1.0", "proc-macro2", "quote", "scale-info", @@ -2427,7 +2380,7 @@ dependencies = [ "frame-support", "frame-system", "parity-scale-codec", - "rand 0.8.5", + "rand", "scale-info", "sp-arithmetic", "sp-core", @@ -2511,6 +2464,7 @@ name = "frame-support" version = "4.0.0-dev" dependencies = [ "aquamarine", + "array-bytes", "assert_matches", "bitflags 1.3.2", "docify", @@ -2559,6 +2513,7 @@ dependencies = [ "proc-macro-warning", "proc-macro2", "quote", + "regex", "sp-core-hashing", "syn 2.0.48", ] @@ -2568,7 +2523,7 @@ name = "frame-support-procedural-tools" version = "4.0.0-dev" dependencies = [ "frame-support-procedural-tools-derive", - "proc-macro-crate 2.0.0", + "proc-macro-crate 3.1.0", "proc-macro2", "quote", "syn 2.0.48", @@ -2871,7 +2826,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "27d12c0aed7f1e24276a241aadc4cb8ea9f83000f34bc062b7cc2d51e3b0fabd" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.2", "debugid", "fxhash", "serde", @@ -2933,9 +2888,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.11" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f" +checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" dependencies = [ "cfg-if", "libc", @@ -2948,7 +2903,7 @@ version = "0.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6ea1015b5a70616b688dc230cfe50c8af89d972cb132d5a622814d29773b10b9" dependencies = [ - "rand 0.8.5", + "rand", "rand_core 0.6.4", ] @@ -2994,9 +2949,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.22" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d6250322ef6e60f93f9a2162799302cd6f68f79f6e5d85c8c16f14d1d958178" +checksum = "bb2c4422095b67ee78da96fbb51a4cc413b3b25883c7717ff7ca1ab31022c9c9" dependencies = [ "bytes", "fnv", @@ -3019,9 +2974,9 @@ checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" [[package]] name = "handlebars" -version = "5.0.0" +version = "5.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94eae21d01d20dabef65d8eda734d83df6e2dea8166788804be9bd6bc92448fa" +checksum = "ab283476b99e66691dee3f1640fea91487a8d81f50fb5ecc75538f8f8879a1e4" dependencies = [ "log", "pest", @@ -3100,9 +3055,9 @@ dependencies = [ [[package]] name = "hermit-abi" -version = "0.3.3" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7" +checksum = "5d3d0e0f38255e7fa3cf31335b3a56f05febd18025f4db5ef7a0cfb4f8da651f" [[package]] name = "hex" @@ -3452,7 +3407,7 @@ version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0bad00257d07be169d870ab665980b06cdb366d792ad690bf2e76876dc503455" dependencies = [ - "hermit-abi 0.3.3", + "hermit-abi 0.3.4", "rustix", "windows-sys 0.52.0", ] @@ -3521,9 +3476,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.66" +version = "0.3.67" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cee9c64da59eae3b50095c18d3e74f8b73c0b86d2792824ff01bbce68ba229ca" +checksum = "9a1d36f1235bc969acba30b7f5990b864423a6068a10f7c90ae8f0112e3a59d1" dependencies = [ "wasm-bindgen", ] @@ -3582,7 +3537,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b5dde66c53d6dcdc8caea1874a45632ec0fcf5b437789f1e45766a1512ce803" dependencies = [ "anyhow", - "arrayvec 0.7.4", + "arrayvec", "async-lock 2.8.0", "async-trait", "beef", @@ -3593,7 +3548,7 @@ dependencies = [ "hyper", "jsonrpsee-types", "parking_lot 0.12.1", - "rand 0.8.5", + "rand", "rustc-hash", "serde", "serde_json", @@ -3685,9 +3640,9 @@ dependencies = [ [[package]] name = "keccak" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f6d5ed8676d904364de097082f4e7d240b571b67989ced0240f08b7f966f940" +checksum = "ecc2af9a1119c51f12a14607e783cb977bde58bc069ff0c3da1095e635d70654" dependencies = [ "cpufeatures", ] @@ -3876,7 +3831,7 @@ dependencies = [ "bytes", "futures", "futures-timer", - "getrandom 0.2.11", + "getrandom 0.2.12", "instant", "libp2p-allow-block-list", "libp2p-connection-limits", @@ -3944,7 +3899,7 @@ dependencies = [ "parking_lot 0.12.1", "pin-project", "quick-protobuf", - "rand 0.8.5", + "rand", "rw-stream-sink", "smallvec", "thiserror", @@ -4000,7 +3955,7 @@ dependencies = [ "multiaddr", "multihash 0.17.0", "quick-protobuf", - "rand 0.8.5", + "rand", "sha2 0.10.8", "thiserror", "zeroize", @@ -4012,7 +3967,7 @@ version = "0.43.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39d5ef876a2b2323d63c258e63c2f8e36f205fe5a11f0b3095d59635650790ff" dependencies = [ - "arrayvec 0.7.4", + "arrayvec", "asynchronous-codec", "bytes", "either", @@ -4025,7 +3980,7 @@ dependencies = [ "libp2p-swarm", "log", "quick-protobuf", - "rand 0.8.5", + "rand", "sha2 0.10.8", "smallvec", "thiserror", @@ -4047,7 +4002,7 @@ dependencies = [ "libp2p-identity", "libp2p-swarm", "log", - "rand 0.8.5", + "rand", "smallvec", "socket2 0.4.10", "tokio", @@ -4083,7 +4038,7 @@ dependencies = [ "log", "once_cell", "quick-protobuf", - "rand 0.8.5", + "rand", "sha2 0.10.8", "snow", "static_assertions", @@ -4105,7 +4060,7 @@ dependencies = [ "libp2p-core", "libp2p-swarm", "log", - "rand 0.8.5", + "rand", "void", ] @@ -4125,7 +4080,7 @@ dependencies = [ "log", "parking_lot 0.12.1", "quinn-proto", - "rand 0.8.5", + "rand", "rustls 0.20.9", "thiserror", "tokio", @@ -4143,7 +4098,7 @@ dependencies = [ "libp2p-core", "libp2p-identity", "libp2p-swarm", - "rand 0.8.5", + "rand", "smallvec", ] @@ -4162,7 +4117,7 @@ dependencies = [ "libp2p-identity", "libp2p-swarm-derive", "log", - "rand 0.8.5", + "rand", "smallvec", "tokio", "void", @@ -4266,7 +4221,7 @@ version = "0.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85c833ca1e66078851dba29046874e38f08b2c883700aa29a03ddd3b23814ee8" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.2", "libc", "redox_syscall 0.4.1", ] @@ -4288,9 +4243,9 @@ dependencies = [ [[package]] name = "libz-sys" -version = "1.1.13" +version = "1.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f526fdd09d99e19742883e43de41e1aa9e36db0c7ab7f935165d611c5cccc66" +checksum = "295c17e837573c8c821dbaeb3cceb3d745ad082f7572191409e69cbc1b3fd050" dependencies = [ "cc", "pkg-config", @@ -4332,9 +4287,9 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.4.12" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" +checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" [[package]] name = "lioness" @@ -4538,18 +4493,6 @@ dependencies = [ "hash-db", ] -[[package]] -name = "merlin" -version = "2.0.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e261cf0f8b3c42ded9f7d2bb59dea03aa52bc8a1cbc7482f9fc3fd1229d3b42" -dependencies = [ - "byteorder", - "keccak", - "rand_core 0.5.1", - "zeroize", -] - [[package]] name = "merlin" version = "3.0.0" @@ -4572,7 +4515,7 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" name = "minimal-node" version = "4.0.0-dev" dependencies = [ - "clap 4.4.14", + "clap 4.4.18", "frame", "futures", "jsonrpsee", @@ -4644,7 +4587,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "daa3eb39495d8e2e2947a1d862852c90cc6a4a8845f8b41c8829cb9fcc047f4a" dependencies = [ "arrayref", - "arrayvec 0.7.4", + "arrayvec", "bitflags 1.3.2", "blake2 0.10.6", "c2-chacha", @@ -4654,8 +4597,8 @@ dependencies = [ "lioness", "log", "parking_lot 0.12.1", - "rand 0.8.5", - "rand_chacha 0.3.1", + "rand", + "rand_chacha", "rand_distr", "subtle 2.5.0", "thiserror", @@ -4865,7 +4808,7 @@ version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7bddcd3bf5144b6392de80e04c347cd7fab2508f6df16a85fc496ecd5cec39bc" dependencies = [ - "rand 0.8.5", + "rand", ] [[package]] @@ -4951,7 +4894,7 @@ version = "0.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.2", "cfg-if", "libc", ] @@ -4961,7 +4904,7 @@ name = "node-bench" version = "0.9.0-dev" dependencies = [ "array-bytes", - "clap 4.4.14", + "clap 4.4.18", "derive_more", "fs_extra", "futures", @@ -4974,7 +4917,7 @@ dependencies = [ "node-primitives", "node-testing", "parity-db", - "rand 0.8.5", + "rand", "sc-basic-authorship", "sc-client-api", "sc-transaction-pool", @@ -5034,7 +4977,7 @@ dependencies = [ name = "node-runtime-generate-bags" version = "3.0.0" dependencies = [ - "clap 4.4.14", + "clap 4.4.18", "generate-bags", "kitchensink-runtime", ] @@ -5043,7 +4986,7 @@ dependencies = [ name = "node-template" version = "4.0.0-dev" dependencies = [ - "clap 4.4.14", + "clap 4.4.18", "frame-benchmarking", "frame-benchmarking-cli", "frame-system", @@ -5086,7 +5029,7 @@ dependencies = [ name = "node-template-release" version = "3.0.0" dependencies = [ - "clap 4.4.14", + "clap 4.4.18", "flate2", "fs_extra", "glob", @@ -5234,7 +5177,7 @@ version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a652d9771a63711fd3c3deb670acfbe5c30a4072e664d7a3bf5a9e1056ac72c3" dependencies = [ - "arrayvec 0.7.4", + "arrayvec", "itoa", ] @@ -5287,7 +5230,7 @@ version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" dependencies = [ - "hermit-abi 0.3.3", + "hermit-abi 0.3.4", "libc", ] @@ -5523,7 +5466,7 @@ dependencies = [ "frame-election-provider-support", "honggfuzz", "pallet-bags-list", - "rand 0.8.5", + "rand", ] [[package]] @@ -5814,7 +5757,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "rand 0.8.5", + "rand", "scale-info", "sp-core", "sp-io", @@ -5851,7 +5794,7 @@ dependencies = [ "pallet-staking-reward-curve", "pallet-timestamp", "parity-scale-codec", - "rand_chacha 0.3.1", + "rand_chacha", "scale-info", "serde", "sp-core", @@ -5868,7 +5811,7 @@ dependencies = [ name = "pallet-staking-reward-curve" version = "4.0.0-dev" dependencies = [ - "proc-macro-crate 2.0.0", + "proc-macro-crate 3.1.0", "proc-macro2", "quote", "sp-runtime", @@ -6078,7 +6021,7 @@ dependencies = [ "lz4", "memmap2", "parking_lot 0.12.1", - "rand 0.8.5", + "rand", "siphasher", "snap", "winapi", @@ -6090,7 +6033,7 @@ version = "3.6.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "881331e34fa842a2fb61cc2db9643a8fedc615e47cfcc52597d1af0db9a7e8fe" dependencies = [ - "arrayvec 0.7.4", + "arrayvec", "bitvec", "byte-slice-cast", "bytes", @@ -6315,9 +6258,9 @@ dependencies = [ [[package]] name = "pkg-config" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69d3587f8a9e599cc7ec2c00e331f71c4e69a5f9a4b8a6efd5b07466b9736f9a" +checksum = "2900ede94e305130c13ddd391e0ab7cbaeb783945ae07a279c268cb05109c6cb" [[package]] name = "platforms" @@ -6355,9 +6298,9 @@ dependencies = [ [[package]] name = "polling" -version = "3.3.1" +version = "3.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf63fa624ab313c11656b4cda960bfc46c410187ad493c41f6ba2d8c1e991c9e" +checksum = "545c980a3880efd47b2e262f6a4bb6daad6555cf3367aa9c4e52895f69537a41" dependencies = [ "cfg-if", "concurrent-queue", @@ -6410,13 +6353,12 @@ checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" [[package]] name = "predicates" -version = "3.0.4" +version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dfc28575c2e3f19cb3c73b93af36460ae898d426eba6fc15b9bd2a5220758a0" +checksum = "68b87bfd4605926cdfefc1c3b5f8fe560e3feca9d5552cf68c466d3d8236c7e8" dependencies = [ "anstyle", "difflib", - "itertools 0.11.0", "predicates-core", ] @@ -6489,6 +6431,15 @@ dependencies = [ "toml_edit 0.20.7", ] +[[package]] +name = "proc-macro-crate" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d37c51ca738a55da99dc0c4a34860fd675453b8b36209178c2249bb13651284" +dependencies = [ + "toml_edit 0.21.0", +] + [[package]] name = "proc-macro-error" version = "1.0.4" @@ -6667,7 +6618,7 @@ version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "588f6378e4dd99458b60ec275b4477add41ce4fa9f64dcba6f15adccb19b50d6" dependencies = [ - "rand 0.8.5", + "rand", ] [[package]] @@ -6688,7 +6639,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94b0b33c13a79f669c85defaf4c275dc86a0c0372807d0ca3d78e0bb87274863" dependencies = [ "bytes", - "rand 0.8.5", + "rand", "ring 0.16.20", "rustc-hash", "rustls 0.20.9", @@ -6714,19 +6665,6 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" -[[package]] -name = "rand" -version = "0.7.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a6b1679d49b24bbfe0c803429aa1874472f50d9b363131f0e89fc356b544d03" -dependencies = [ - "getrandom 0.1.16", - "libc", - "rand_chacha 0.2.2", - "rand_core 0.5.1", - "rand_hc", -] - [[package]] name = "rand" version = "0.8.5" @@ -6734,20 +6672,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", - "rand_chacha 0.3.1", + "rand_chacha", "rand_core 0.6.4", ] -[[package]] -name = "rand_chacha" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4c8ed856279c9737206bf725bf36935d8666ead7aa69b52be55af369d193402" -dependencies = [ - "ppv-lite86", - "rand_core 0.5.1", -] - [[package]] name = "rand_chacha" version = "0.3.1" @@ -6773,7 +6701,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.11", + "getrandom 0.2.12", ] [[package]] @@ -6783,16 +6711,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32cb0b9bc82b0a0876c2dd994a7e7a2683d3e7390ca40e6886785ef0c7e3ee31" dependencies = [ "num-traits", - "rand 0.8.5", -] - -[[package]] -name = "rand_hc" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" -dependencies = [ - "rand_core 0.5.1", + "rand", ] [[package]] @@ -6812,9 +6731,9 @@ checksum = "60a357793950651c4ed0f3f52338f53b2f809f32d83a07f72909fa13e4c6c1e3" [[package]] name = "rayon" -version = "1.8.0" +version = "1.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1" +checksum = "fa7237101a77a10773db45d62004a272517633fbcc3df19d96455ede1122e051" dependencies = [ "either", "rayon-core", @@ -6822,9 +6741,9 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.12.0" +version = "1.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed" +checksum = "1465873a3dfdaa8ae7cb14b4383657caab0b3e8a0aa9ae8e04b044854c8dfce2" dependencies = [ "crossbeam-deque", "crossbeam-utils", @@ -6866,7 +6785,7 @@ version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a18479200779601e498ada4e8c1e1f50e3ee19deb0259c25825a98b5603b2cb4" dependencies = [ - "getrandom 0.2.11", + "getrandom 0.2.12", "libredox", "thiserror", ] @@ -6971,7 +6890,7 @@ dependencies = [ "blake2 0.10.6", "common", "fflonk", - "merlin 3.0.0", + "merlin", ] [[package]] @@ -6996,7 +6915,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "688c63d65483050968b2a8937f7995f443e27041a0f7700aa59b0822aedebb74" dependencies = [ "cc", - "getrandom 0.2.11", + "getrandom 0.2.12", "libc", "spin 0.9.8", "untrusted 0.9.0", @@ -7096,11 +7015,11 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.28" +version = "0.38.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316" +checksum = "322394588aaf33c24007e8bb3238ee3e4c5c09c084ab32bc73890b99ff326bca" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.2", "errno", "libc", "linux-raw-sys", @@ -7149,7 +7068,7 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" dependencies = [ - "base64 0.21.6", + "base64 0.21.7", ] [[package]] @@ -7240,7 +7159,7 @@ dependencies = [ "prost", "prost-build", "quickcheck", - "rand 0.8.5", + "rand", "sc-client-api", "sc-network", "sp-api", @@ -7326,7 +7245,7 @@ dependencies = [ name = "sc-chain-spec-derive" version = "4.0.0-dev" dependencies = [ - "proc-macro-crate 2.0.0", + "proc-macro-crate 3.1.0", "proc-macro2", "quote", "syn 2.0.48", @@ -7339,7 +7258,7 @@ dependencies = [ "array-bytes", "bip39", "chrono", - "clap 4.4.14", + "clap 4.4.18", "fdlimit", "futures", "futures-timer", @@ -7348,7 +7267,7 @@ dependencies = [ "log", "names", "parity-scale-codec", - "rand 0.8.5", + "rand", "regex", "rpassword", "sc-client-api", @@ -7420,7 +7339,7 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.12.1", "quickcheck", - "rand 0.8.5", + "rand", "sc-client-api", "sc-state-db", "schnellru", @@ -7595,7 +7514,7 @@ dependencies = [ "log", "parity-scale-codec", "parking_lot 0.12.1", - "rand 0.8.5", + "rand", "sc-block-builder", "sc-chain-spec", "sc-client-api", @@ -7778,7 +7697,7 @@ name = "sc-mixnet" version = "0.1.0-dev" dependencies = [ "array-bytes", - "arrayvec 0.7.4", + "arrayvec", "blake2 0.10.6", "bytes", "futures", @@ -7825,7 +7744,7 @@ dependencies = [ "parking_lot 0.12.1", "partial_sort", "pin-project", - "rand 0.8.5", + "rand", "sc-client-api", "sc-network-common", "sc-network-light", @@ -7958,7 +7877,7 @@ dependencies = [ "libp2p", "log", "parking_lot 0.12.1", - "rand 0.8.5", + "rand", "sc-block-builder", "sc-client-api", "sc-consensus", @@ -8007,7 +7926,7 @@ dependencies = [ "num_cpus", "parity-scale-codec", "parking_lot 0.12.1", - "rand 0.8.5", + "rand", "sc-block-builder", "sc-client-api", "sc-client-db", @@ -8172,7 +8091,7 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.12.1", "pin-project", - "rand 0.8.5", + "rand", "sc-chain-spec", "sc-client-api", "sc-client-db", @@ -8268,10 +8187,9 @@ dependencies = [ name = "sc-storage-monitor" version = "0.1.0" dependencies = [ - "clap 4.4.14", + "clap 4.4.18", "fs4", "log", - "sc-client-db", "sp-core", "thiserror", "tokio", @@ -8303,7 +8221,7 @@ dependencies = [ "futures", "libc", "log", - "rand 0.8.5", + "rand", "rand_pcg", "regex", "sc-telemetry", @@ -8325,7 +8243,7 @@ dependencies = [ "log", "parking_lot 0.12.1", "pin-project", - "rand 0.8.5", + "rand", "sc-utils", "serde", "serde_json", @@ -8366,7 +8284,7 @@ dependencies = [ name = "sc-tracing-proc-macro" version = "4.0.0-dev" dependencies = [ - "proc-macro-crate 2.0.0", + "proc-macro-crate 3.1.0", "proc-macro2", "quote", "syn 2.0.48", @@ -8471,6 +8389,17 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "schemars" +version = "0.8.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45a28f4c49489add4ce10783f7911893516f15afe45d015608d41faca6bc4d29" +dependencies = [ + "dyn-clone", + "serde", + "serde_json", +] + [[package]] name = "schnellru" version = "0.2.1" @@ -8484,18 +8413,19 @@ dependencies = [ [[package]] name = "schnorrkel" -version = "0.9.1" +version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "021b403afe70d81eea68f6ea12f6b3c9588e5d536a94c3bf80f15e7faa267862" +checksum = "8de18f6d8ba0aad7045f5feae07ec29899c1112584a38509a84ad7b04451eaa0" dependencies = [ + "aead", "arrayref", - "arrayvec 0.5.2", - "curve25519-dalek 2.1.3", - "getrandom 0.1.16", - "merlin 2.0.1", - "rand 0.7.3", - "rand_core 0.5.1", - "sha2 0.8.2", + "arrayvec", + "curve25519-dalek 4.1.1", + "getrandom_or_panic", + "merlin", + "rand_core 0.6.4", + "serde_bytes", + "sha2 0.10.8", "subtle 2.5.0", "zeroize", ] @@ -8563,6 +8493,15 @@ dependencies = [ "serde_derive", ] +[[package]] +name = "serde_bytes" +version = "0.11.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b8497c313fd43ab992087548117643f6fcd935cbf36f176ffda0aacf9591734" +dependencies = [ + "serde", +] + [[package]] name = "serde_derive" version = "1.0.195" @@ -8618,18 +8557,6 @@ dependencies = [ "digest 0.10.7", ] -[[package]] -name = "sha2" -version = "0.8.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a256f46ea78a0c0d9ff00077504903ac881a1dafdc20da66545699e7776b3e69" -dependencies = [ - "block-buffer 0.7.3", - "digest 0.8.1", - "fake-simd", - "opaque-debug 0.2.3", -] - [[package]] name = "sha2" version = "0.9.9" @@ -8710,11 +8637,6 @@ dependencies = [ "wide", ] -[[package]] -name = "simple-mermaid" -version = "0.1.0" -source = "git+https://github.com/kianenigma/simple-mermaid.git?rev=e48b187bcfd5cc75111acd9d241f1bd36604344b#e48b187bcfd5cc75111acd9d241f1bd36604344b" - [[package]] name = "siphasher" version = "0.3.11" @@ -8738,9 +8660,9 @@ checksum = "826167069c09b99d56f31e9ae5c99049e932a98c9dc2dac47645b08dbbf76ba7" [[package]] name = "smallvec" -version = "1.11.2" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970" +checksum = "2593d31f82ead8df961d8bd23a64c2ccf2eb5dd34b0a34bfb4dd54011c72009e" [[package]] name = "snap" @@ -8798,7 +8720,7 @@ dependencies = [ "http", "httparse", "log", - "rand 0.8.5", + "rand", "sha-1", ] @@ -8831,7 +8753,7 @@ dependencies = [ "assert_matches", "blake2 0.10.6", "expander", - "proc-macro-crate 2.0.0", + "proc-macro-crate 3.1.0", "proc-macro2", "quote", "syn 2.0.48", @@ -8892,7 +8814,7 @@ dependencies = [ "num-traits", "parity-scale-codec", "primitive-types", - "rand 0.8.5", + "rand", "scale-info", "serde", "sp-core", @@ -9080,12 +9002,12 @@ dependencies = [ "itertools 0.12.0", "lazy_static", "log", - "merlin 2.0.1", + "merlin", "parity-scale-codec", "parking_lot 0.12.1", "paste", "primitive-types", - "rand 0.8.5", + "rand", "regex", "scale-info", "schnorrkel", @@ -9229,7 +9151,6 @@ dependencies = [ name = "sp-keyring" version = "24.0.0" dependencies = [ - "lazy_static", "sp-core", "sp-runtime", "strum 0.25.0", @@ -9241,8 +9162,8 @@ version = "0.27.0" dependencies = [ "parity-scale-codec", "parking_lot 0.12.1", - "rand 0.8.5", - "rand_chacha 0.3.1", + "rand", + "rand_chacha", "sp-core", "sp-externalities", "thiserror", @@ -9282,7 +9203,7 @@ name = "sp-npos-elections" version = "4.0.0-dev" dependencies = [ "parity-scale-codec", - "rand 0.8.5", + "rand", "scale-info", "serde", "sp-arithmetic", @@ -9296,9 +9217,9 @@ dependencies = [ name = "sp-npos-elections-fuzzer" version = "2.0.0-alpha.5" dependencies = [ - "clap 4.4.14", + "clap 4.4.18", "honggfuzz", - "rand 0.8.5", + "rand", "sp-npos-elections", "sp-runtime", ] @@ -9342,7 +9263,7 @@ dependencies = [ "log", "parity-scale-codec", "paste", - "rand 0.8.5", + "rand", "scale-info", "serde", "serde_json", @@ -9388,7 +9309,7 @@ version = "11.0.0" dependencies = [ "Inflector", "expander", - "proc-macro-crate 2.0.0", + "proc-macro-crate 3.1.0", "proc-macro2", "quote", "syn 2.0.48", @@ -9470,7 +9391,7 @@ dependencies = [ "parity-scale-codec", "parking_lot 0.12.1", "pretty_assertions", - "rand 0.8.5", + "rand", "smallvec", "sp-core", "sp-externalities", @@ -9551,13 +9472,12 @@ dependencies = [ "array-bytes", "criterion", "hash-db", - "hashbrown 0.14.3", "lazy_static", "memory-db", "nohash-hasher", "parity-scale-codec", "parking_lot 0.12.1", - "rand 0.8.5", + "rand", "scale-info", "schnellru", "sp-core", @@ -9618,6 +9538,7 @@ dependencies = [ "bounded-collections", "parity-scale-codec", "scale-info", + "schemars", "serde", "smallvec", "sp-arithmetic", @@ -9689,10 +9610,8 @@ checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" name = "staging-chain-spec-builder" version = "2.0.0" dependencies = [ - "clap 4.4.14", - "kitchensink-runtime", + "clap 4.4.18", "log", - "rand 0.8.5", "sc-chain-spec", "serde_json", "sp-tracing", @@ -9704,7 +9623,7 @@ version = "3.0.0-dev" dependencies = [ "array-bytes", "assert_cmd", - "clap 4.4.14", + "clap 4.4.18", "clap_complete", "criterion", "frame-benchmarking", @@ -9733,7 +9652,7 @@ dependencies = [ "pallet-transaction-payment", "parity-scale-codec", "platforms", - "rand 0.8.5", + "rand", "regex", "sc-authority-discovery", "sc-basic-authorship", @@ -9801,7 +9720,7 @@ dependencies = [ name = "staging-node-inspect" version = "0.9.0-dev" dependencies = [ - "clap 4.4.14", + "clap 4.4.18", "parity-scale-codec", "sc-cli", "sc-client-api", @@ -9886,7 +9805,7 @@ dependencies = [ name = "subkey" version = "3.0.0" dependencies = [ - "clap 4.4.14", + "clap 4.4.18", "sc-cli", ] @@ -9915,7 +9834,7 @@ dependencies = [ name = "substrate-frame-cli" version = "4.0.0-dev" dependencies = [ - "clap 4.4.14", + "clap 4.4.18", "frame-support", "frame-system", "sc-cli", @@ -10239,9 +10158,9 @@ dependencies = [ [[package]] name = "termcolor" -version = "1.4.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff1bc3d3f05aff0403e8ac0d92ced918ec05b666a43f83297ccef5bea8a3d449" +checksum = "06794f8f6c5c898b3275aebefa6b8a1cb24cd2c6c79397ab15774837a0bc5755" dependencies = [ "winapi-util", ] @@ -10423,7 +10342,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7f57eb36ecbe0fc510036adff84824dd3c24bb781e21bfa67b69d556aa85214f" dependencies = [ "pin-project", - "rand 0.8.5", + "rand", "tokio", ] @@ -10559,7 +10478,7 @@ version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61c5bb1d698276a2443e5ecfabc1008bf15a36c12e6a7176e7bf089ea9131140" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.2", "bytes", "futures-core", "futures-util", @@ -10721,7 +10640,7 @@ dependencies = [ "idna 0.2.3", "ipnet", "lazy_static", - "rand 0.8.5", + "rand", "smallvec", "socket2 0.4.10", "thiserror", @@ -10763,7 +10682,7 @@ version = "0.10.0-dev" dependencies = [ "assert_cmd", "async-trait", - "clap 4.4.14", + "clap 4.4.18", "frame-remote-externalities", "frame-try-runtime", "hex", @@ -10798,9 +10717,9 @@ dependencies = [ [[package]] name = "trybuild" -version = "1.0.88" +version = "1.0.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76de4f783e610194f6c98bfd53f9fc52bb2e0d02c947621e8a0f4ecc799b2880" +checksum = "9a9d3ba662913483d6722303f619e75ea10b7855b0f8e0d72799cf8621bb488f" dependencies = [ "basic-toml", "dissimilar", @@ -10826,7 +10745,7 @@ checksum = "97fee6b57c6a41524a810daee9286c02d7752c4253064d0b05472833a438f675" dependencies = [ "cfg-if", "digest 0.10.7", - "rand 0.8.5", + "rand", "static_assertions", ] @@ -10856,9 +10775,9 @@ dependencies = [ [[package]] name = "unicode-bidi" -version = "0.3.14" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f2528f27a9eb2b21e69c95319b30bd0efd85d09c379741b0f78ea1d86be2416" +checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" [[package]] name = "unicode-ident" @@ -10983,8 +10902,8 @@ dependencies = [ "arrayref", "constcat", "digest 0.10.7", - "rand 0.8.5", - "rand_chacha 0.3.1", + "rand", + "rand_chacha", "rand_core 0.6.4", "sha2 0.10.8", "sha3", @@ -11034,9 +10953,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.89" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ed0d4f68a3015cc185aff4db9506a015f4b96f95303897bfa23f846db54064e" +checksum = "b1223296a201415c7fad14792dbefaace9bd52b62d33453ade1c5b5f07555406" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -11044,9 +10963,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.89" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b56f625e64f3a1084ded111c4d5f477df9f8c92df113852fa5a374dbda78826" +checksum = "fcdc935b63408d58a32f8cc9738a0bffd8f05cc7c002086c6ef20b7312ad9dcd" dependencies = [ "bumpalo", "log", @@ -11059,9 +10978,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.39" +version = "0.4.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac36a15a220124ac510204aec1c3e5db8a22ab06fd6706d881dc6149f8ed9a12" +checksum = "bde2032aeb86bdfaecc8b261eef3cba735cc426c1f3a3416d1e0791be95fc461" dependencies = [ "cfg-if", "js-sys", @@ -11071,9 +10990,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.89" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0162dbf37223cd2afce98f3d0785506dcb8d266223983e4b5b525859e6e182b2" +checksum = "3e4c238561b2d428924c49815533a8b9121c664599558a5d9ec51f8a1740a999" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -11081,9 +11000,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.89" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283" +checksum = "bae1abb6806dc1ad9e560ed242107c0f6c84335f1749dd4e8ddb012ebd5e25a7" dependencies = [ "proc-macro2", "quote", @@ -11094,9 +11013,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.89" +version = "0.2.90" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ab9b36309365056cd639da3134bf87fa8f3d86008abf99e612384a6eecd459f" +checksum = "4d91413b1c31d7539ba5ef2451af3f0b833a005eb27a631cec32bc0635a8602b" [[package]] name = "wasm-encoder" @@ -11237,7 +11156,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a3ce373743892002f9391c6741ef0cb0335b55ec899d874f311222b7e36f4594" dependencies = [ "anyhow", - "base64 0.21.6", + "base64 0.21.7", "bincode", "directories-next", "log", @@ -11437,9 +11356,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.66" +version = "0.3.67" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50c24a44ec86bb68fbecd1b3efed7e85ea5621b39b35ef2766b66cd984f8010f" +checksum = "58cd2333b6e0be7a39605f0e255892fd7418a682d8da8fe042fe25128794d2ed" dependencies = [ "js-sys", "wasm-bindgen", @@ -11557,15 +11476,6 @@ dependencies = [ "windows-targets 0.52.0", ] -[[package]] -name = "windows-sys" -version = "0.45.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" -dependencies = [ - "windows-targets 0.42.2", -] - [[package]] name = "windows-sys" version = "0.48.0" @@ -11584,21 +11494,6 @@ dependencies = [ "windows-targets 0.52.0", ] -[[package]] -name = "windows-targets" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" -dependencies = [ - "windows_aarch64_gnullvm 0.42.2", - "windows_aarch64_msvc 0.42.2", - "windows_i686_gnu 0.42.2", - "windows_i686_msvc 0.42.2", - "windows_x86_64_gnu 0.42.2", - "windows_x86_64_gnullvm 0.42.2", - "windows_x86_64_msvc 0.42.2", -] - [[package]] name = "windows-targets" version = "0.48.5" @@ -11629,12 +11524,6 @@ dependencies = [ "windows_x86_64_msvc 0.52.0", ] -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" - [[package]] name = "windows_aarch64_gnullvm" version = "0.48.5" @@ -11647,12 +11536,6 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" -[[package]] -name = "windows_aarch64_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" - [[package]] name = "windows_aarch64_msvc" version = "0.48.5" @@ -11665,12 +11548,6 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" -[[package]] -name = "windows_i686_gnu" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" - [[package]] name = "windows_i686_gnu" version = "0.48.5" @@ -11683,12 +11560,6 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" -[[package]] -name = "windows_i686_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" - [[package]] name = "windows_i686_msvc" version = "0.48.5" @@ -11701,12 +11572,6 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" -[[package]] -name = "windows_x86_64_gnu" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" - [[package]] name = "windows_x86_64_gnu" version = "0.48.5" @@ -11719,12 +11584,6 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" - [[package]] name = "windows_x86_64_gnullvm" version = "0.48.5" @@ -11737,12 +11596,6 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" -[[package]] -name = "windows_x86_64_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" - [[package]] name = "windows_x86_64_msvc" version = "0.48.5" @@ -11757,9 +11610,9 @@ checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" [[package]] name = "winnow" -version = "0.5.33" +version = "0.5.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b7520bbdec7211caa7c4e682eb1fbe07abe20cee6756b6e00f537c82c11816aa" +checksum = "b7cf47b659b318dccbd69cc4797a39ae128f533dce7902a1096044d1967b9c16" dependencies = [ "memchr", ] @@ -11814,9 +11667,9 @@ dependencies = [ [[package]] name = "xattr" -version = "1.2.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "914566e6413e7fa959cc394fb30e563ba80f3541fbd40816d4c05a0fc3f2a0f1" +checksum = "8da84f1a25939b27f6820d92aed108f83ff920fdf11a7b19366c27c4cda81d4f" dependencies = [ "libc", "linux-raw-sys", @@ -11833,7 +11686,7 @@ dependencies = [ "log", "nohash-hasher", "parking_lot 0.12.1", - "rand 0.8.5", + "rand", "static_assertions", ] diff --git a/Cargo.toml b/Cargo.toml index 02596b8f37f2..a70c0e4ee21a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -210,6 +210,35 @@ members = [ ] default-members = ["substrate/bin/node/cli"] +[workspace.lints.rust] +suspicious_double_ref_op = { level = "allow", priority = 2 } + +[workspace.lints.clippy] +all = { level = "allow", priority = 0 } +correctness = { level = "warn", priority = 1 } +complexity = { level = "warn", priority = 1 } +if-same-then-else = { level = "allow", priority = 2 } +zero-prefixed-literal = { level = "allow", priority = 2 } # 00_1000_000 +type_complexity = { level = "allow", priority = 2 } # raison d'etre +nonminimal-bool = { level = "allow", priority = 2 } # maybe +borrowed-box = { level = "allow", priority = 2 } # Reasonable to fix this one +too-many-arguments = { level = "allow", priority = 2 } # (Turning this on would lead to) +needless-lifetimes = { level = "allow", priority = 2 } # generated code +unnecessary_cast = { level = "allow", priority = 2 } # Types may change +identity-op = { level = "allow", priority = 2 } # One case where we do 0 + +useless_conversion = { level = "allow", priority = 2 } # Types may change +unit_arg = { level = "allow", priority = 2 } # stylistic +option-map-unit-fn = { level = "allow", priority = 2 } # stylistic +bind_instead_of_map = { level = "allow", priority = 2 } # stylistic +erasing_op = { level = "allow", priority = 2 } # E.g. 0 * DOLLARS +eq_op = { level = "allow", priority = 2 } # In tests we test equality. +while_immutable_condition = { level = "allow", priority = 2 } # false positives +needless_option_as_deref = { level = "allow", priority = 2 } # false positives +derivable_impls = { level = "allow", priority = 2 } # false positives +stable_sort_primitive = { level = "allow", priority = 2 } # prefer stable sort +extra-unused-type-parameters = { level = "allow", priority = 2 } # stylistic +default_constructed_unit_structs = { level = "allow", priority = 2 } # stylistic + [profile.release] # Polkadot runtime requires unwinding. panic = "unwind" diff --git a/README.md b/README.md index 1f255823b5b6..b94376b35ab0 100644 --- a/README.md +++ b/README.md @@ -22,7 +22,7 @@ currently contains runtimes for the Polkadot, Kusama, Westend, and Rococo networ relocated to the [`runtimes`](https://github.com/polkadot-fellows/runtimes/) repository. ## [Substrate](./substrate/) - [![SubstrateRustDocs](https://img.shields.io/badge/Rust_Docs-Substrate-24CC85?logo=rust)](https://paritytech.github.io/substrate/master/substrate/index.html) + [![SubstrateRustDocs](https://img.shields.io/badge/Rust_Docs-Substrate-24CC85?logo=rust)](https://paritytech.github.io/polkadot-sdk/master/polkadot_sdk_docs/polkadot_sdk/substrate/index.html) [![Substrate-license](https://img.shields.io/badge/License-GPL3%2FApache2.0-blue)](./substrate/README.md#LICENSE) Substrate is the primary blockchain SDK used by developers to create the parachains that make up the Polkadot network. @@ -30,7 +30,7 @@ Additionally, it allows for the development of self-sovereign blockchains that o Polkadot. ## [Cumulus](./cumulus/) -[![CumulusRustDocs](https://img.shields.io/badge/Rust_Docs-Cumulus-222222?logo=rust)](https://paritytech.github.io/cumulus/cumulus_client_collator/index.html) +[![CumulusRustDocs](https://img.shields.io/badge/Rust_Docs-Cumulus-222222?logo=rust)](https://paritytech.github.io/polkadot-sdk/master/polkadot_sdk_docs/polkadot_sdk/cumulus/index.html) [![Cumulus-license](https://img.shields.io/badge/License-GPL3-blue)](./cumulus/LICENSE) Cumulus is a set of tools for writing Substrate-based Polkadot parachains. diff --git a/docs/contributor/CONTRIBUTING.md b/docs/contributor/CONTRIBUTING.md index 3350d1344149..96dc86e97805 100644 --- a/docs/contributor/CONTRIBUTING.md +++ b/docs/contributor/CONTRIBUTING.md @@ -152,3 +152,9 @@ We use [zepter](https://github.com/ggwpez/zepter) to enforce features are propag If you're member of **paritytech** org - you can use command-bot to run various of common commands in CI: Start with comment in PR: `bot help` to see the list of available commands. + + +## Deprecating code + +When deprecating and removing code you need to be mindful of how this could impact downstream developers. In order to +mitigate this impact, it is recommended to adhere to the steps outlined in the [Deprecation Checklist](./DEPRECATION_CHECKLIST.md). diff --git a/docs/contributor/DEPRECATION_CHECKLIST.md b/docs/contributor/DEPRECATION_CHECKLIST.md index ffb99e1ec3f7..687c0a7cd7da 100644 --- a/docs/contributor/DEPRECATION_CHECKLIST.md +++ b/docs/contributor/DEPRECATION_CHECKLIST.md @@ -1,9 +1,7 @@ # Deprecation Checklist -This deprecation checklist makes sense while we don’t use [SemVer](https://semver.org/). -After that, this document will most likely change. -As deprecation and removal of existing code can happen on any release, we need to be mindful that external builders -could be impacted by the changes we make. +Polkadot SDK is under constant development and improvement, thus deprecation and removal of existing code happen often. +When creating a breaking change we need to be mindful that external builders could be impacted by this. The deprecation checklist tries to mitigate this impact, while still keeping the developer experience, the DevEx, as smooth as possible. diff --git a/docs/mermaid/polkadot_sdk_parachain.mmd b/docs/mermaid/polkadot_sdk_parachain.mmd index 3f38fce046c2..4cee54ba3f45 100644 --- a/docs/mermaid/polkadot_sdk_parachain.mmd +++ b/docs/mermaid/polkadot_sdk_parachain.mmd @@ -5,7 +5,7 @@ flowchart LR end FRAME -.-> ParachainRuntime - Substrate[Substrate Node Libraries] -.-> ParachainNoe + Substrate[Substrate Node Libraries] -.-> ParachainNode CumulusC[Cumulus Node Libraries] -.-> ParachainNode CumulusR[Cumulus Runtime Libraries] -.-> ParachainRuntime diff --git a/docs/sdk/Cargo.toml b/docs/sdk/Cargo.toml index f5bfd80fd10d..246da2cd68c6 100644 --- a/docs/sdk/Cargo.toml +++ b/docs/sdk/Cargo.toml @@ -10,6 +10,9 @@ edition.workspace = true publish = false version = "0.0.1" +[lints] +workspace = true + [dependencies] # Needed for all FRAME-based code parity-scale-codec = { version = "3.0.0", default-features = false } @@ -19,10 +22,10 @@ pallet-examples = { path = "../../substrate/frame/examples" } pallet-default-config-example = { path = "../../substrate/frame/examples/default-config" } # How we build docs in rust-docs -simple-mermaid = { git = "https://github.com/kianenigma/simple-mermaid.git", branch = "main" } +simple-mermaid = { git = "https://github.com/kianenigma/simple-mermaid.git", rev = "e48b187bcfd5cc75111acd9d241f1bd36604344b" } docify = "0.2.6" -# Polkadot SDK deps, typically all should only be scope such that we can link to their doc item. +# Polkadot SDK deps, typically all should only be in scope such that we can link to their doc item. node-cli = { package = "staging-node-cli", path = "../../substrate/bin/node/cli" } kitchensink-runtime = { path = "../../substrate/bin/node/runtime" } chain-spec-builder = { package = "staging-chain-spec-builder", path = "../../substrate/bin/utils/chain-spec-builder" } diff --git a/docs/sdk/src/guides/mod.rs b/docs/sdk/src/guides/mod.rs index 68ca9317b907..3120f2533109 100644 --- a/docs/sdk/src/guides/mod.rs +++ b/docs/sdk/src/guides/mod.rs @@ -1,7 +1,7 @@ //! # Polkadot SDK Docs Guides //! //! This crate contains a collection of guides that are foundational to the developers of -//! Polkadot SDK. They common user-journeys that are traversed in the Polkadot ecosystem. +//! Polkadot SDK. They are common user-journeys that are traversed in the Polkadot ecosystem. /// Write your first simple pallet, learning the most most basic features of FRAME along the way. pub mod your_first_pallet; diff --git a/docs/sdk/src/guides/your_first_pallet/mod.rs b/docs/sdk/src/guides/your_first_pallet/mod.rs index 855b390aba9d..24eada44a83a 100644 --- a/docs/sdk/src/guides/your_first_pallet/mod.rs +++ b/docs/sdk/src/guides/your_first_pallet/mod.rs @@ -12,12 +12,11 @@ //! > This guide will build a currency pallet from scratch using only the lowest primitives of //! > FRAME, and is mainly intended for education, not *applicability*. For example, almost all //! > FRAME-based runtimes use various techniques to re-use a currency pallet instead of writing -//! > one. Further advance FRAME related topics are discussed in [`crate::reference_docs`]. +//! > one. Further advanced FRAME related topics are discussed in [`crate::reference_docs`]. //! //! ## Topics Covered //! -//! The following FRAME topics are covered in this guide. See the documentation of the -//! associated items to know more. +//! The following FRAME topics are covered in this guide: //! //! - [Storage](frame::pallet_macros::storage) //! - [Call](frame::pallet_macros::call) @@ -50,8 +49,10 @@ //! //! One should be a mapping from account-ids to a balance type, and one value that is the total //! issuance. -// -// For the rest of this guide, we will opt for a balance type of u128. +//! +//! > For the rest of this guide, we will opt for a balance type of `u128`. For the sake of +//! > simplicity, we are hardcoding this type. In a real pallet is best practice to define it as a +//! > generic bounded type in the `Config` trait, and then specify it in the implementation. #![doc = docify::embed!("./src/guides/your_first_pallet/mod.rs", Balance)] //! //! The definition of these two storage items, based on [`frame::pallet_macros::storage`] details, @@ -160,13 +161,12 @@ #![doc = docify::embed!("./src/guides/your_first_pallet/mod.rs", first_test)] //! //! In the first test, we simply assert that there is no total issuance, and no balance associated -//! with account `1`. Then, we mint some balance into `1`, and re-check. +//! with Alice's account. Then, we mint some balance into Alice's, and re-check. //! //! As noted above, the `T::AccountId` is now `u64`. Moreover, `Runtime` is replacing ``. //! This is why for example you see `Balances::::get(..)`. Finally, notice that the //! dispatchables are simply functions that can be called on top of the `Pallet` struct. -//! -//! TODO: hard to explain exactly `RuntimeOrigin::signed(1)` at this point. +// TODO: hard to explain exactly `RuntimeOrigin::signed(ALICE)` at this point. //! //! Congratulations! You have written your first pallet and tested it! Next, we learn a few optional //! steps to improve our pallet. @@ -186,8 +186,8 @@ #![doc = docify::embed!("./src/guides/your_first_pallet/mod.rs", StateBuilder)] //! //! This struct is meant to contain the same list of accounts and balances that we want to have at -//! the beginning of each block. We hardcoded this to `let accounts = vec![(1, 100), (2, 100)];` so -//! far. Then, if desired, we attach a default value for this struct. +//! the beginning of each block. We hardcoded this to `let accounts = vec![(ALICE, 100), (2, 100)];` +//! so far. Then, if desired, we attach a default value for this struct. #![doc = docify::embed!("./src/guides/your_first_pallet/mod.rs", default_state_builder)] //! //! Like any other builder pattern, we attach functions to the type to mutate its internal @@ -222,7 +222,7 @@ //! "success path" of a dispatchable, and one test for each "failure path", such as: #![doc = docify::embed!("./src/guides/your_first_pallet/mod.rs", transfer_from_non_existent_fails)] //! -//! We leave it up to you to write a test that triggers to `InsufficientBalance` error. +//! We leave it up to you to write a test that triggers the `InsufficientBalance` error. //! //! ### Event and Error //! @@ -230,21 +230,24 @@ //! Errors. First, let's understand what each is. //! //! - **Error**: The static string-based error scheme we used so far is good for readability, but it -//! has a few drawbacks. These string literals will bloat the final wasm blob, and are relatively -//! heavy to transmit and encode/decode. Moreover, it is easy to mistype them by one character. -//! FRAME errors are exactly a solution to maintain readability, whilst fixing the drawbacks -//! mentioned. In short, we use an enum to represent different variants of our error. These -//! variants are then mapped in an efficient way (using only `u8` indices) to -//! [`sp_runtime::DispatchError::Module`] Read more about this in [`frame::pallet_macros::error`]. -//! -//! - **Event**: Events are akin to the return type of dispatchables. They should represent what -//! happened at the end of a dispatch operation. Therefore, the convention is to use passive tense -//! for event names (eg. `SomethingHappened`). This allows other sub-systems or external parties -//! (eg. a light-node, a DApp) to listen to particular events happening, without needing to -//! re-execute the whole state transition function. -//! -//! TODO: both need to be improved a lot at the pallet-macro rust-doc level. Also my explanation -//! of event is probably not the best. +//! has a few drawbacks. The biggest problem with strings are that they are not type safe, e.g. a +//! match statement cannot be exhaustive. These string literals will bloat the final wasm blob, +//! and are relatively heavy to transmit and encode/decode. Moreover, it is easy to mistype them +//! by one character. FRAME errors are exactly a solution to maintain readability, whilst fixing +//! the drawbacks mentioned. In short, we use an enum to represent different variants of our +//! error. These variants are then mapped in an efficient way (using only `u8` indices) to +//! [`sp_runtime::DispatchError::Module`]. Read more about this in +//! [`frame::pallet_macros::error`]. +//! +//! - **Event**: Events are akin to the return type of dispatchables. They are mostly data blobs +//! emitted by the runtime to let outside world know what is happening inside the pallet. Since +//! otherwise, the outside world does not have an easy access to the state changes. They should +//! represent what happened at the end of a dispatch operation. Therefore, the convention is to +//! use passive tense for event names (eg. `SomethingHappened`). This allows other sub-systems or +//! external parties (eg. a light-node, a DApp) to listen to particular events happening, without +//! needing to re-execute the whole state transition function. +// TODO: both need to be improved a lot at the pallet-macro rust-doc level. Also my explanation +// of event is probably not the best. //! //! With the explanation out of the way, let's see how these components can be added. Both follow a //! fairly familiar syntax: normal Rust enums, with an extra `#[frame::event/error]` attribute @@ -362,7 +365,7 @@ pub mod pallet { // ensure sender has enough balance, and if so, calculate what is left after `amount`. let sender_balance = Balances::::get(&sender).ok_or("NonExistentAccount")?; if sender_balance < amount { - return Err("InsufficientBalance".into()) + return Err("InsufficientBalance".into()); } let reminder = sender_balance - amount; @@ -413,6 +416,9 @@ pub mod pallet { pub(crate) mod tests { use crate::guides::your_first_pallet::pallet::*; use frame::testing_prelude::*; + const ALICE: u64 = 1; + const BOB: u64 = 2; + const CHARLIE: u64 = 3; #[docify::export] mod runtime { @@ -447,7 +453,7 @@ pub mod pallet { #[docify::export] fn new_test_state_basic() -> TestState { let mut state = TestState::new_empty(); - let accounts = vec![(1, 100), (2, 100)]; + let accounts = vec![(ALICE, 100), (BOB, 100)]; state.execute_with(|| { for (who, amount) in &accounts { Balances::::insert(who, amount); @@ -466,7 +472,7 @@ pub mod pallet { #[docify::export(default_state_builder)] impl Default for StateBuilder { fn default() -> Self { - Self { balances: vec![(1, 100), (2, 100)] } + Self { balances: vec![(ALICE, 100), (BOB, 100)] } } } @@ -509,15 +515,19 @@ pub mod pallet { #[test] fn first_test() { TestState::new_empty().execute_with(|| { - // We expect account 1 to have no funds. - assert_eq!(Balances::::get(&1), None); + // We expect Alice's account to have no funds. + assert_eq!(Balances::::get(&ALICE), None); assert_eq!(TotalIssuance::::get(), None); - // mint some funds into 1 - assert_ok!(Pallet::::mint_unsafe(RuntimeOrigin::signed(1), 1, 100)); + // mint some funds into Alice's account. + assert_ok!(Pallet::::mint_unsafe( + RuntimeOrigin::signed(ALICE), + ALICE, + 100 + )); // re-check the above - assert_eq!(Balances::::get(&1), Some(100)); + assert_eq!(Balances::::get(&ALICE), Some(100)); assert_eq!(TotalIssuance::::get(), Some(100)); }) } @@ -526,9 +536,9 @@ pub mod pallet { #[test] fn state_builder_works() { StateBuilder::default().build_and_execute(|| { - assert_eq!(Balances::::get(&1), Some(100)); - assert_eq!(Balances::::get(&2), Some(100)); - assert_eq!(Balances::::get(&3), None); + assert_eq!(Balances::::get(&ALICE), Some(100)); + assert_eq!(Balances::::get(&BOB), Some(100)); + assert_eq!(Balances::::get(&CHARLIE), None); assert_eq!(TotalIssuance::::get(), Some(200)); }); } @@ -536,8 +546,8 @@ pub mod pallet { #[docify::export] #[test] fn state_builder_add_balance() { - StateBuilder::default().add_balance(3, 42).build_and_execute(|| { - assert_eq!(Balances::::get(&3), Some(42)); + StateBuilder::default().add_balance(CHARLIE, 42).build_and_execute(|| { + assert_eq!(Balances::::get(&CHARLIE), Some(42)); assert_eq!(TotalIssuance::::get(), Some(242)); }) } @@ -546,10 +556,10 @@ pub mod pallet { #[should_panic] fn state_builder_duplicate_genesis_fails() { StateBuilder::default() - .add_balance(3, 42) - .add_balance(3, 43) + .add_balance(CHARLIE, 42) + .add_balance(CHARLIE, 43) .build_and_execute(|| { - assert_eq!(Balances::::get(&3), None); + assert_eq!(Balances::::get(&CHARLIE), None); assert_eq!(TotalIssuance::::get(), Some(242)); }) } @@ -559,17 +569,21 @@ pub mod pallet { fn mint_works() { StateBuilder::default().build_and_execute(|| { // given the initial state, when: - assert_ok!(Pallet::::mint_unsafe(RuntimeOrigin::signed(1), 2, 100)); + assert_ok!(Pallet::::mint_unsafe(RuntimeOrigin::signed(ALICE), BOB, 100)); // then: - assert_eq!(Balances::::get(&2), Some(200)); + assert_eq!(Balances::::get(&BOB), Some(200)); assert_eq!(TotalIssuance::::get(), Some(300)); // given: - assert_ok!(Pallet::::mint_unsafe(RuntimeOrigin::signed(1), 3, 100)); + assert_ok!(Pallet::::mint_unsafe( + RuntimeOrigin::signed(ALICE), + CHARLIE, + 100 + )); // then: - assert_eq!(Balances::::get(&3), Some(100)); + assert_eq!(Balances::::get(&CHARLIE), Some(100)); assert_eq!(TotalIssuance::::get(), Some(400)); }); } @@ -579,19 +593,19 @@ pub mod pallet { fn transfer_works() { StateBuilder::default().build_and_execute(|| { // given the the initial state, when: - assert_ok!(Pallet::::transfer(RuntimeOrigin::signed(1), 2, 50)); + assert_ok!(Pallet::::transfer(RuntimeOrigin::signed(ALICE), BOB, 50)); // then: - assert_eq!(Balances::::get(&1), Some(50)); - assert_eq!(Balances::::get(&2), Some(150)); + assert_eq!(Balances::::get(&ALICE), Some(50)); + assert_eq!(Balances::::get(&BOB), Some(150)); assert_eq!(TotalIssuance::::get(), Some(200)); // when: - assert_ok!(Pallet::::transfer(RuntimeOrigin::signed(2), 1, 50)); + assert_ok!(Pallet::::transfer(RuntimeOrigin::signed(BOB), ALICE, 50)); // then: - assert_eq!(Balances::::get(&1), Some(100)); - assert_eq!(Balances::::get(&2), Some(100)); + assert_eq!(Balances::::get(&ALICE), Some(100)); + assert_eq!(Balances::::get(&BOB), Some(100)); assert_eq!(TotalIssuance::::get(), Some(200)); }); } @@ -602,14 +616,14 @@ pub mod pallet { StateBuilder::default().build_and_execute(|| { // given the the initial state, when: assert_err!( - Pallet::::transfer(RuntimeOrigin::signed(3), 1, 10), + Pallet::::transfer(RuntimeOrigin::signed(CHARLIE), ALICE, 10), "NonExistentAccount" ); // then nothing has changed. - assert_eq!(Balances::::get(&1), Some(100)); - assert_eq!(Balances::::get(&2), Some(100)); - assert_eq!(Balances::::get(&3), None); + assert_eq!(Balances::::get(&ALICE), Some(100)); + assert_eq!(Balances::::get(&BOB), Some(100)); + assert_eq!(Balances::::get(&CHARLIE), None); assert_eq!(TotalIssuance::::get(), Some(200)); }); } @@ -685,6 +699,8 @@ pub mod pallet_v2 { pub mod tests { use super::{super::pallet::tests::StateBuilder, *}; use frame::testing_prelude::*; + const ALICE: u64 = 1; + const BOB: u64 = 2; #[docify::export] pub mod runtime_v2 { @@ -717,20 +733,20 @@ pub mod pallet_v2 { StateBuilder::default().build_and_execute(|| { // skip the genesis block, as events are not deposited there and we need them for // the final assertion. - System::set_block_number(1); + System::set_block_number(ALICE); // given the the initial state, when: - assert_ok!(Pallet::::transfer(RuntimeOrigin::signed(1), 2, 50)); + assert_ok!(Pallet::::transfer(RuntimeOrigin::signed(ALICE), BOB, 50)); // then: - assert_eq!(Balances::::get(&1), Some(50)); - assert_eq!(Balances::::get(&2), Some(150)); + assert_eq!(Balances::::get(&ALICE), Some(50)); + assert_eq!(Balances::::get(&BOB), Some(150)); assert_eq!(TotalIssuance::::get(), Some(200)); // now we can also check that an event has been deposited: assert_eq!( System::read_events_for_pallet::>(), - vec![Event::Transferred { from: 1, to: 2, amount: 50 }] + vec![Event::Transferred { from: ALICE, to: BOB, amount: 50 }] ); }); } diff --git a/docs/sdk/src/lib.rs b/docs/sdk/src/lib.rs index b0abb50b52da..075d9ddaffe5 100644 --- a/docs/sdk/src/lib.rs +++ b/docs/sdk/src/lib.rs @@ -23,8 +23,6 @@ //! //! This section paints a picture over the high-level information architecture of this crate. #![doc = simple_mermaid::mermaid!("../../mermaid/IA.mmd")] -#![allow(rustdoc::invalid_html_tags)] // TODO: remove later. https://github.com/paritytech/polkadot-sdk-docs/issues/65 -#![allow(rustdoc::bare_urls)] // TODO: remove later. https://github.com/paritytech/polkadot-sdk-docs/issues/65 #![warn(rustdoc::broken_intra_doc_links)] #![warn(rustdoc::private_intra_doc_links)] diff --git a/docs/sdk/src/meta_contributing.rs b/docs/sdk/src/meta_contributing.rs index 0d3ecea46557..bff475f8e6bf 100644 --- a/docs/sdk/src/meta_contributing.rs +++ b/docs/sdk/src/meta_contributing.rs @@ -54,8 +54,8 @@ //! > high level tutorial. They should be explained in the rust-doc of the corresponding type or //! > macro. //! -//! 2. 🧘 Less is More: For reasons mentioned [above](#crate::why-rust-docs), the more concise this -//! crate is, the better. +//! 2. 🧘 Less is More: For reasons mentioned [above](#why-rust-docs), the more concise this crate +//! is, the better. //! 3. √ Don’t Repeat Yourself – DRY: A summary of the above two points. Authors should always //! strive to avoid any duplicate information. Every concept should ideally be documented in //! *ONE* place and one place only. This makes the task of maintaining topics significantly @@ -69,8 +69,7 @@ //! > what topics are already covered in this crate, and how you can build on top of the information //! > that they already pose, rather than repeating yourself**. //! -//! For more details about documenting guidelines, see: -//! +//! For more details see the [latest documenting guidelines](https://github.com/paritytech/polkadot-sdk/blob/master/docs/contributor/DOCUMENTATION_GUIDELINES.md). //! //! #### Example: Explaining `#[pallet::call]` //! @@ -133,14 +132,16 @@ //! compromise, but in the long term, we should work towards finding a way to maintain different //! revisions of this crate. //! -//! ## How to Build +//! ## How to Develop Locally //! -//! To build this crate properly, with with right HTML headers injected, run: +//! To view the docs specific [`crate`] locally for development, including the correct HTML headers +//! injected, run: //! -//! ```no_compile -//! RUSTDOCFLAGS="--html-in-header $(pwd)/docs/sdk/headers/toc.html" cargo doc -p polkadot-sdk-docs +//! ```sh +//! SKIP_WASM_BUILD=1 RUSTDOCFLAGS="--html-in-header $(pwd)/docs/sdk/headers/toc.html" cargo doc -p polkadot-sdk-docs --no-deps --open //! ``` //! -//! adding `--no-deps` would speed up the process while development. If even faster build time for -//! docs is needed, you can temporarily remove most of the substrate/cumulus dependencies that are -//! only used for linking purposes. +//! If even faster build time for docs is needed, you can temporarily remove most of the +//! substrate/cumulus dependencies that are only used for linking purposes. +//! +//! For more on local development, see [`crate::reference_docs::development_environment_advice`]. diff --git a/docs/sdk/src/polkadot_sdk/polkadot.rs b/docs/sdk/src/polkadot_sdk/polkadot.rs index d157a660e564..61a6877696cb 100644 --- a/docs/sdk/src/polkadot_sdk/polkadot.rs +++ b/docs/sdk/src/polkadot_sdk/polkadot.rs @@ -23,7 +23,7 @@ //! //! ## Platform //! -//! In this section, we examine what what platform Polkadot exactly provides to developers. +//! In this section, we examine what platform Polkadot exactly provides to developers. //! //! ### Polkadot White Paper //! @@ -47,10 +47,12 @@ //! Chain*"). //! * (heterogenous) Sharded Execution: Yet, each parachain is free to have its own execution logic //! (runtime), which also encompasses governance and sovereignty. Moreover, Polkadot ensures the -//! correct execution of all parachain, without having all of its validators re-execute all -//! parachain blocks. When seen from this perspective, the fact that Polkadot executes different -//! parachains means it is a platform that has fully delivered (the holy grail of) "Full Execution -//! Sharding". TODO: link to approval checking article. https://github.com/paritytech/polkadot-sdk-docs/issues/66 +//! correct execution of all parachains, without having all of its validators re-execute all +//! parachain blocks. When seen from this perspective, Polkadot achieves the ability to verify +//! the validity of the block execution of multiple parachains using the same set of validators as +//! the Relay Chain. In practice, this means that the shards (parachains) share the same economic +//! security as the Relay Chain. +//! Learn about this process called [Approval Checking](https://polkadot.network/blog/polkadot-v1-0-sharding-and-economic-security#approval-checking-and-finality). //! * A framework to build blockchains: In order to materialize the ecosystem of parachains, an easy //! blockchain framework must exist. This is [Substrate](crate::polkadot_sdk::substrate), //! [FRAME](crate::polkadot_sdk::frame_runtime) and [Cumulus](crate::polkadot_sdk::cumulus). @@ -60,7 +62,12 @@ //! //! > Note that the interoperability promised by Polkadot is unparalleled in that any two parachains //! > connected to Polkadot have the same security and can have much better guarantees about the -//! > security of the recipient of any message. TODO: weakest link in bridges systems. https://github.com/paritytech/polkadot-sdk-docs/issues/66 +//! > security of the recipient of any message. +//! > Bridges enable transaction and information flow between different consensus systems, crucial +//! > for Polkadot's multi-chain architecture. However, they can become the network's most +//! > vulnerable points. If a bridge's security measures are weaker than those of the connected +//! > blockchains, it poses a significant risk. Attackers might exploit these weaknesses to launch +//! > attacks such as theft or disruption of services. //! //! Polkadot delivers the above vision, alongside a flexible means for parachains to schedule //! themselves with the Relay Chain. To achieve this, Polkadot has been developed with an @@ -83,5 +90,5 @@ //! Agile periodic-sale-based model for assigning Coretime on the Polkadot Ubiquitous Computer. //! - RFC#5: [Coretime-interface](https://github.com/polkadot-fellows/RFCs/blob/main/text/0005-coretime-interface.md): //! Interface for manipulating the usage of cores on the Polkadot Ubiquitous Computer. -// TODO: add more context and explanations about Polkadot as the Ubiquitous Computer and related -// tech. https://github.com/paritytech/polkadot-sdk-docs/issues/66 +//! +//! Learn more about [Polkadot as a Computational Resource](https://wiki.polkadot.network/docs/polkadot-direction#polkadot-as-a-computational-resource). diff --git a/docs/sdk/src/polkadot_sdk/smart_contracts.rs b/docs/sdk/src/polkadot_sdk/smart_contracts.rs index a4916f9c9218..4052c785f417 100644 --- a/docs/sdk/src/polkadot_sdk/smart_contracts.rs +++ b/docs/sdk/src/polkadot_sdk/smart_contracts.rs @@ -1,9 +1,9 @@ //! # Smart Contracts //! -//! TODO: @cmichi https://github.com/paritytech/polkadot-sdk-docs/issues/56 +//! TODO: @cmichi //! //! - WASM and EVM based, pallet-contracts and pallet-evm. //! - single-daap-chain, transition from ink! to FRAME. //! - Link to `use.ink` //! - Link to [`crate::reference_docs::runtime_vs_smart_contract`]. -//! - https://use.ink/migrate-ink-contracts-to-polkadot-frame-parachain/ +//! - diff --git a/docs/sdk/src/polkadot_sdk/substrate.rs b/docs/sdk/src/polkadot_sdk/substrate.rs index fd172f71469f..5021c55e581f 100644 --- a/docs/sdk/src/polkadot_sdk/substrate.rs +++ b/docs/sdk/src/polkadot_sdk/substrate.rs @@ -143,7 +143,7 @@ //! - [`sc_consensus_aura`] //! - [`sc_consensus_babe`] //! - [`sc_consensus_grandpa`] -//! - [`sc_consensus_beefy`] (TODO: @adrian, add some high level docs https://github.com/paritytech/polkadot-sdk-docs/issues/57) +//! - [`sc_consensus_beefy`] (TODO: @adrian, add some high level docs ) //! - [`sc_consensus_manual_seal`] //! - [`sc_consensus_pow`] diff --git a/docs/sdk/src/polkadot_sdk/xcm.rs b/docs/sdk/src/polkadot_sdk/xcm.rs index 0d600f751c8b..fd4d7f62aa70 100644 --- a/docs/sdk/src/polkadot_sdk/xcm.rs +++ b/docs/sdk/src/polkadot_sdk/xcm.rs @@ -2,4 +2,4 @@ //! //! @KiChjang @franciscoaguirre //! TODO: RFCs, xcm-spec, the future of the repo, minimal example perhaps, forward to where actual -//! docs are hosted. https://github.com/paritytech/polkadot-sdk-docs/issues/58 +//! docs are hosted. diff --git a/docs/sdk/src/reference_docs/cli.rs b/docs/sdk/src/reference_docs/cli.rs index 9274e86b04ef..5779e0f8d049 100644 --- a/docs/sdk/src/reference_docs/cli.rs +++ b/docs/sdk/src/reference_docs/cli.rs @@ -1,7 +1,104 @@ -//! # Command Line Arguments +//! # Substrate CLI //! +//! Let's see some examples of typical CLI arguments used when setting up and running a +//! Substrate-based blockchain. We use the [`substrate-node-template`](https://github.com/substrate-developer-hub/substrate-node-template) +//! on these examples. //! -//! Notes: +//! #### Checking the available CLI arguments +//! ```bash +//! ./target/debug/node-template --help +//! ``` +//! - `--help`: Displays the available CLI arguments. //! -//! - Command line arguments of a typical substrate based chain, and how to find and learn them. -//! - How to extend them with your custom stuff. +//! #### Starting a Local Substrate Node in Development Mode +//! ```bash +//! ./target/release/node-template \ +//! --dev +//! ``` +//! - `--dev`: Runs the node in development mode, using a pre-defined development chain +//! specification. +//! This mode ensures a fresh state by deleting existing data on restart. +//! +//! #### Generating Custom Chain Specification +//! ```bash +//! ./target/debug/node-template \ +//! build-spec \ +//! --disable-default-bootnode \ +//! --chain local \ +//! > customSpec.json +//! ``` +//! +//! - `build-spec`: A subcommand to generate a chain specification file. +//! - `--disable-default-bootnode`: Disables the default bootnodes in the node template. +//! - `--chain local`: Indicates the chain specification is for a local development chain. +//! - `> customSpec.json`: Redirects the output into a customSpec.json file. +//! +//! #### Converting Chain Specification to Raw Format +//! ```bash +//! ./target/debug/node-template build-spec \ +//! --chain=customSpec.json \ +//! --raw \ +//! --disable-default-bootnode \ +//! > customSpecRaw.json +//! ``` +//! +//! - `--chain=customSpec.json`: Uses the custom chain specification as input. +//! - `--disable-default-bootnode`: Disables the default bootnodes in the node template. +//! - `--raw`: Converts the chain specification into a raw format with encoded storage keys. +//! - `> customSpecRaw.json`: Outputs to customSpecRaw.json. +//! +//! #### Starting the First Node in a Private Network +//! ```bash +//! ./target/debug/node-template \ +//! --base-path /tmp/node01 \ +//! --chain ./customSpecRaw.json \ +//! --port 30333 \ +//! --ws-port 9945 \ +//! --rpc-port 9933 \ +//! --telemetry-url "wss://telemetry.polkadot.io/submit/ 0" \ +//! --validator \ +//! --rpc-methods Unsafe \ +//! --name MyNode01 +//! ``` +//! +//! - `--base-path`: Sets the directory for node data. +//! - `--chain`: Specifies the chain specification file. +//! - `--port`: TCP port for peer-to-peer communication. +//! - `--ws-port`: WebSocket port for RPC. +//! - `--rpc-port`: HTTP port for JSON-RPC. +//! - `--telemetry-url`: Endpoint for sending telemetry data. +//! - `--validator`: Indicates the node’s participation in block production. +//! - `--rpc-methods Unsafe`: Allows potentially unsafe RPC methods. +//! - `--name`: Sets a human-readable name for the node. +//! +//! #### Adding a Second Node to the Network +//! ```bash +//! ./target/release/node-template \ +//! --base-path /tmp/bob \ +//! --chain local \ +//! --bob \ +//! --port 30334 \ +//! --rpc-port 9946 \ +//! --telemetry-url "wss://telemetry.polkadot.io/submit/ 0" \ +//! --validator \ +//! --bootnodes /ip4/127.0.0.1/tcp/30333/p2p/12D3KooWEyoppNCUx8Yx66oV9fJnriXwCcXwDDUA2kj6vnc6iDEp +//! ``` +//! +//! - `--base-path`: Sets the directory for node data. +//! - `--chain`: Specifies the chain specification file. +//! - `--bob`: Initializes the node with the session keys of the "Bob" account. +//! - `--port`: TCP port for peer-to-peer communication. +//! - `--rpc-port`: HTTP port for JSON-RPC. +//! - `--telemetry-url`: Endpoint for sending telemetry data. +//! - `--validator`: Indicates the node’s participation in block production. +//! - `--bootnodes`: Specifies the address of the first node for peer discovery. Nodes should find +//! each other using mDNS. This command needs to be used if they don't find each other. +//! +//! --- +//! +//! > If you are interested in learning how to extend the CLI with your custom arguments, you can +//! > check out the [Customize your Substrate chain CLI](https://www.youtube.com/watch?v=IVifko1fqjw) +//! > seminar. +//! > Please note that the seminar is based on an older version of Substrate, and [Clap](https://docs.rs/clap/latest/clap/) +//! > is now used instead of [StructOpt](https://docs.rs/structopt/latest/structopt/) for parsing +//! > CLI arguments. diff --git a/docs/sdk/src/reference_docs/development_environment_advice.rs b/docs/sdk/src/reference_docs/development_environment_advice.rs new file mode 100644 index 000000000000..27dd46386047 --- /dev/null +++ b/docs/sdk/src/reference_docs/development_environment_advice.rs @@ -0,0 +1,113 @@ +//! # Development Environment Advice +//! +//! Large Rust projects are known for sometimes long compile times and sluggish dev tooling, and +//! polkadot-sdk is no exception. +//! +//! This page contains some advice to improve your workflow when using common tooling. +//! +//! ## Rust Analyzer Configuration +//! +//! [Rust Analyzer](https://rust-analyzer.github.io/) is the defacto [LSP](https://langserver.org/) for Rust. Its default +//! settings are fine for smaller projects, but not well optimised for polkadot-sdk. +//! +//! Below is a suggested configuration for VSCode: +//! +//! ```json +//! { +//! // Use a separate target dir for Rust Analyzer. Helpful if you want to use Rust +//! // Analyzer and cargo on the command line at the same time. +//! "rust-analyzer.rust.analyzerTargetDir": "target/vscode-rust-analyzer", +//! // Improve stability +//! "rust-analyzer.server.extraEnv": { +//! "CHALK_OVERFLOW_DEPTH": "100000000", +//! "CHALK_SOLVER_MAX_SIZE": "10000000" +//! }, +//! // Check feature-gated code +//! "rust-analyzer.cargo.features": "all", +//! "rust-analyzer.cargo.extraEnv": { +//! // Skip building WASM, there is never need for it here +//! "SKIP_WASM_BUILD": "1" +//! }, +//! // Don't expand some problematic proc_macros +//! "rust-analyzer.procMacro.ignored": { +//! "async-trait": ["async_trait"], +//! "napi-derive": ["napi"], +//! "async-recursion": ["async_recursion"], +//! "async-std": ["async_std"] +//! }, +//! // Use nightly formatting. +//! // See the polkadot-sdk CI job that checks formatting for the current version used in +//! // polkadot-sdk. +//! "rust-analyzer.rustfmt.extraArgs": ["+nightly-2023-11-01"], +//! } +//! ``` +//! +//! and the same in Lua for `neovim/nvim-lspconfig`: +//! +//! ```lua +//! ["rust-analyzer"] = { +//! rust = { +//! # Use a separate target dir for Rust Analyzer. Helpful if you want to use Rust +//! # Analyzer and cargo on the command line at the same time. +//! analyzerTargetDir = "target/nvim-rust-analyzer", +//! }, +//! server = { +//! # Improve stability +//! extraEnv = { +//! ["CHALK_OVERFLOW_DEPTH"] = "100000000", +//! ["CHALK_SOLVER_MAX_SIZE"] = "100000000", +//! }, +//! }, +//! cargo = { +//! # Check feature-gated code +//! features = "all", +//! extraEnv = { +//! # Skip building WASM, there is never need for it here +//! ["SKIP_WASM_BUILD"] = "1", +//! }, +//! }, +//! procMacro = { +//! # Don't expand some problematic proc_macros +//! ignored = { +//! ["async-trait"] = { "async_trait" }, +//! ["napi-derive"] = { "napi" }, +//! ["async-recursion"] = { "async_recursion" }, +//! ["async-std"] = { "async_std" }, +//! }, +//! }, +//! rustfmt = { +//! # Use nightly formatting. +//! # See the polkadot-sdk CI job that checks formatting for the current version used in +//! # polkadot-sdk. +//! extraArgs = { "+nightly-2023-11-01" }, +//! }, +//! }, +//! ``` +//! +//! For the full set of configuration options see . +//! +//! ## Cargo Usage +//! +//! ### Using `--package` (a.k.a. `-p`) +//! +//! polkadot-sdk is a monorepo containing many crates. When you run a cargo command without +//! `-p`, you will almost certainly compile crates outside of the scope you are working. +//! +//! Instead, you should identify the name of the crate you are working on by checking the `name` +//! field in the closest `Cargo.toml` file. Then, use `-p` with your cargo commands to only compile +//! that crate. +//! +//! ### `SKIP_WASM_BUILD=1` environment variable +//! +//! When cargo touches a runtime crate, by default it will also compile the WASM binary, +//! approximately doubling the compilation time. +//! +//! The WASM binary is usually not needed, especially when running `check` or `test`. To skip the +//! WASM build, set the `SKIP_WASM_BUILD` environment variable to `1`. For example: +//! `SKIP_WASM_BUILD=1 cargo check -p frame-support`. +//! +//! ### Cargo Remote +//! +//! If you have a powerful remote server available, you may consider using +//! [cargo-remote](https://github.com/sgeisler/cargo-remote) to execute cargo commands on it, +//! freeing up local resources for other tasks like `rust-analyzer`. diff --git a/docs/sdk/src/reference_docs/fee_less_runtime.rs b/docs/sdk/src/reference_docs/fee_less_runtime.rs index 43a761a6c52c..1213c2628253 100644 --- a/docs/sdk/src/reference_docs/fee_less_runtime.rs +++ b/docs/sdk/src/reference_docs/fee_less_runtime.rs @@ -9,4 +9,4 @@ //! and some kind of rate limiting (eg. any account gets 5 free tx per day). //! - The rule of thumb is that as long as the unsigned validate does one storage read, similar to //! nonce, it is fine. -//! - This could possibly be a good tutorial/template, rather than a reference doc. +//! - This could possibly be a good guide/template, rather than a reference doc. diff --git a/docs/sdk/src/reference_docs/frame_benchmarking_weight.rs b/docs/sdk/src/reference_docs/frame_benchmarking_weight.rs index f65f4174ec66..db77547a4bf0 100644 --- a/docs/sdk/src/reference_docs/frame_benchmarking_weight.rs +++ b/docs/sdk/src/reference_docs/frame_benchmarking_weight.rs @@ -20,4 +20,4 @@ //! - how to write benchmarks, how you must think of worst case. //! - how to run benchmarks. //! -//! - https://www.shawntabrizi.com/substrate/substrate-storage-deep-dive/ +//! - diff --git a/docs/sdk/src/reference_docs/frame_currency.rs b/docs/sdk/src/reference_docs/frame_currency.rs index ba181373062f..6987d51aec82 100644 --- a/docs/sdk/src/reference_docs/frame_currency.rs +++ b/docs/sdk/src/reference_docs/frame_currency.rs @@ -5,4 +5,4 @@ //! - History, `Currency` trait. //! - `Hold` and `Freeze` with diagram. //! - `HoldReason` and `FreezeReason` -//! - This footgun: https://github.com/paritytech/polkadot-sdk/pull/1900#discussion_r1363783609 +//! - This footgun: diff --git a/docs/sdk/src/reference_docs/light_nodes.rs b/docs/sdk/src/reference_docs/light_nodes.rs index a6a0a828ef58..d6670bf03ab1 100644 --- a/docs/sdk/src/reference_docs/light_nodes.rs +++ b/docs/sdk/src/reference_docs/light_nodes.rs @@ -3,5 +3,5 @@ //! //! Notes: should contain only high level information about light clients, then link to how to set //! it up in PAPI and SubXT -//! https://docs.substrate.io/learn/light-clients-in-substrate-connect/ -//! https://github.com/substrate-developer-hub/substrate-front-end-template/pull/277 +//! +//! diff --git a/docs/sdk/src/reference_docs/mod.rs b/docs/sdk/src/reference_docs/mod.rs index 44284394000d..c16122ee4287 100644 --- a/docs/sdk/src/reference_docs/mod.rs +++ b/docs/sdk/src/reference_docs/mod.rs @@ -69,6 +69,9 @@ pub mod frame_system_accounts; /// Learn about the currency-related abstractions provided in FRAME. pub mod frame_currency; +/// Advice for configuring your development environment for Substrate development. +pub mod development_environment_advice; + /// Learn about benchmarking and weight. // TODO: @shawntabrizi @ggwpez https://github.com/paritytech/polkadot-sdk-docs/issues/50 pub mod frame_benchmarking_weight; diff --git a/docs/sdk/src/reference_docs/runtime_vs_smart_contract.rs b/docs/sdk/src/reference_docs/runtime_vs_smart_contract.rs index 7f96fa1800ae..099512cf4ee1 100644 --- a/docs/sdk/src/reference_docs/runtime_vs_smart_contract.rs +++ b/docs/sdk/src/reference_docs/runtime_vs_smart_contract.rs @@ -1,6 +1,216 @@ -//! Runtime vs. Smart Contracts +//! # Runtime vs. Smart Contracts //! -//! Notes: +//! *TL;DR*: If you need to create a *Blockchain*, then write a runtime. If you need to create a +//! *DApp*, then write a Smart Contract. //! -//! Why one can be weighed, and one MUST be metered. -//! https://forum.polkadot.network/t/where-contracts-fail-and-runtimes-chains-are-needed/4464/3 +//! This is a comparative analysis of Substrate-based Runtimes and Smart Contracts, highlighting +//! their main differences. Our aim is to equip you with a clear understanding of how these two +//! methods of deploying on-chain logic diverge in their design, usage, and implications. +//! +//! Both Runtimes and Smart Contracts serve distinct purposes. Runtimes offer deep customization for +//! blockchain development, while Smart Contracts provide a more accessible approach for +//! decentralized applications. Understanding their differences is crucial in choosing the right +//! approach for a specific solution. +//! +//! ## Substrate +//! Substrate is a modular framework that enables the creation of purpose-specific blockchains. In +//! the Polkadot ecosystem you can find two distinct approaches for on-chain code execution: +//! [Runtime Development](#runtime-in-substrate) and [Smart Contracts](#smart-contracts). +//! +//! #### Smart Contracts in Substrate +//! Smart Contracts are autonomous, programmable constructs deployed on the blockchain. +//! In [FRAME](frame), Smart Contracts infrastructure is implemented by the +//! [`pallet_contracts`](../../../pallet_contracts/index.html) for WASM-based contracts or the +//! [`pallet_evm`](../../../pallet_evm/index.html) for EVM-compatible contracts. These pallets +//! enable Smart Contract developers to build applications and systems on top of a Substrate-based +//! blockchain. +//! +//! #### Runtime in Substrate +//! The Runtime is the state transition function of a Substrate-based blockchain. It defines the +//! rules for processing transactions and blocks, essentially governing the behavior and +//! capabilities of a blockchain. +//! +//! ## Comparative Table +//! +//! | Aspect | Runtime +//! | Smart Contracts | +//! |-----------------------|-------------------------------------------------------------------------|----------------------------------------------------------------------| +//! | **Design Philosophy** | Core logic of a blockchain, allowing broad and deep customization. +//! | Designed for DApps deployed on the blockchain runtime.| | **Development Complexity** | Requires in-depth knowledge of Rust and Substrate. Suitable for complex blockchain architectures. | Easier to develop with knowledge of Smart Contract languages like Solidity or [ink!](https://use.ink/). | +//! | **Upgradeability and Flexibility** | Offers comprehensive upgradeability with migration logic +//! and on-chain governance, allowing modifications to the entire blockchain logic without hard +//! forks. | Less flexible in upgrade migrations but offers more straightforward deployment and +//! iteration. | | **Performance and Efficiency** | More efficient, optimized for specific needs of +//! the blockchain. | Can be less efficient due to its generic nature (e.g. the overhead of a +//! virtual machine). | | **Security Considerations** | Security flaws can affect the entire +//! blockchain. | Security risks usually localized to the individual +//! contract. | | **Weighing and Metering** | Operations can be weighed, allowing for precise +//! benchmarking. | Execution is metered, allowing for measurement of resource +//! consumption. | +//! +//! We will now explore these differences in more detail. +//! +//! ## Design Philosophy +//! Runtimes and Smart Contracts are designed for different purposes. Runtimes are the core logic +//! of a blockchain, while Smart Contracts are designed for DApps on top of the blockchain. +//! Runtimes can be more complex, but also more flexible and efficient, while Smart Contracts are +//! easier to develop and deploy. +//! +//! #### Runtime Design Philosophy +//! - **Core Blockchain Logic**: Runtimes are essentially the backbone of a blockchain. They define +//! the fundamental rules, operations, and state transitions of the blockchain network. +//! - **Broad and Deep Customization**: Runtimes allow for extensive customization and flexibility. +//! Developers can tailor the most fundamental aspects of the blockchain, like introducing an +//! efficient transaction fee model to eliminating transaction fees completely. This level of +//! control is essential for creating specialized or application-specific blockchains. +//! +//! #### Smart Contract Design Philosophy +//! - **DApps Development**: Smart contracts are designed primarily for developing DApps. They +//! operate on top of the blockchain's infrastructure. +//! - **Modularity and Isolation**: Smart contracts offer a more modular approach. Each contract is +//! an isolated piece of code, executing predefined operations when triggered. This isolation +//! simplifies development and enhances security, as flaws in one contract do not directly +//! compromise the entire network. +//! +//! ## Development Complexity +//! Runtimes and Smart Contracts differ in their development complexity, largely due to their +//! differing purposes and technical requirements. +//! +//! #### Runtime Development Complexity +//! - **In-depth Knowledge Requirements**: Developing a Runtime in Substrate requires a +//! comprehensive understanding of Rust, Substrate's framework, and blockchain principles. +//! - **Complex Blockchain Architectures**: Runtime development is suitable for creating complex +//! blockchain architectures. Developers must consider aspects like security, scalability, and +//! network efficiency. +//! +//! #### Smart Contract Development Complexity +//! - **Accessibility**: Smart Contract development is generally more accessible, especially for +//! those already familiar with programming concepts. Knowledge of smart contract-specific +//! languages like Solidity or ink! is required. +//! - **Focused on Application Logic**: The development here is focused on the application logic +//! only. This includes writing functions that execute when certain conditions are met, managing +//! state within the contract, and ensuring security against common Smart Contract +//! vulnerabilities. +//! +//! ## Upgradeability and Flexibility +//! Runtimes and Smart Contracts differ significantly in how they handle upgrades and flexibility, +//! each with its own advantages and constraints. Runtimes are more flexible, allowing for writing +//! migration logic for upgrades, while Smart Contracts are less flexible but offer easier +//! deployment and iteration. +//! +//! #### Runtime Upgradeability and Flexibility +//! - **Migration Logic**: One of the key strengths of runtime development is the ability to define +//! migration logic. This allows developers to implement changes in the state or structure of the +//! blockchain during an upgrade. Such migrations can adapt the existing state to fit new +//! requirements or features seamlessly. +//! - **On-Chain Governance**: Upgrades in a Runtime environment are typically governed on-chain, +//! involving validators or a governance mechanism. This allows for a democratic and transparent +//! process for making substantial changes to the blockchain. +//! - **Broad Impact of Changes**: Changes made in Runtime affect the entire blockchain. This gives +//! developers the power to introduce significant improvements or changes but also necessitates a +//! high level of responsibility and scrutiny, we will talk further about it in the [Security +//! Considerations](#security-considerations) section. +//! +//! #### Smart Contract Upgradeability and Flexibility +//! - **Deployment and Iteration**: Smart Contracts, by nature, are designed for more +//! straightforward deployment and iteration. Developers can quickly deploy contracts. +//! - **Contract Code Updates**: Once deployed, although typically immutable, Smart Contracts can be +//! upgraded, but lack of migration logic. The [pallet_contracts](../../../pallet_contracts/index.html) +//! allows for contracts to be upgraded by exposing the `set_code` dispatchable. More details on this +//! can be found in [Ink! documentation on upgradeable contracts](https://use.ink/5.x/basics/upgradeable-contracts). +//! - **Isolated Impact**: Upgrades or changes to a smart contract generally impact only that +//! contract and its users, unlike Runtime upgrades that have a network-wide effect. +//! - **Simplicity and Rapid Development**: The development cycle for Smart Contracts is usually +//! faster and less complex than Runtime development, allowing for rapid prototyping and +//! deployment. +//! +//! ## Performance and Efficiency +//! Runtimes and Smart Contracts have distinct characteristics in terms of performance and +//! efficiency due to their inherent design and operational contexts. Runtimes are more efficient +//! and optimized for specific needs, while Smart Contracts are more generic and less efficient. +//! +//! #### Runtime Performance and Efficiency +//! - **Optimized for Specific Needs**: Runtime modules in Substrate are tailored to meet the +//! specific needs of the blockchain. They are integrated directly into the blockchain's core, +//! allowing them to operate with high efficiency and minimal overhead. +//! - **Direct Access to Blockchain State**: Runtime has direct access to the blockchain's state. +//! This direct access enables more efficient data processing and transaction handling, as there +//! is no additional layer between the runtime logic and the blockchain's core. +//! - **Resource Management**: Resource management is integral to runtime development to ensure that +//! the blockchain operates smoothly and efficiently. +//! +//! #### Smart Contract Performance and Efficiency +//! - **Generic Nature and Overhead**: Smart Contracts, particularly those running in virtual +//! machine environments, can be less efficient due to the generic nature of their execution +//! environment. The overhead of the virtual machine can lead to increased computational and +//! resource costs. +//! - **Isolation and Security Constraints**: Smart Contracts operate in an isolated environment to +//! ensure security and prevent unwanted interactions with the blockchain's state. This isolation, +//! while crucial for security, can introduce additional computational overhead. +//! - **Gas Mechanism and Metering**: The gas mechanism in Smart Contracts, used for metering +//! computational resources, ensures that contracts don't consume excessive resources. However, +//! this metering itself requires computational power, adding to the overall cost of contract +//! execution. +//! +//! ## Security Considerations +//! These two methodologies, while serving different purposes, come with their own unique security +//! considerations. +//! +//! #### Runtime Security Aspects +//! Runtimes, being at the core of blockchain functionality, have profound implications for the +//! security of the entire network: +//! +//! - **Broad Impact**: Security flaws in the runtime can compromise the entire blockchain, +//! affecting all network participants. +//! - **Governance and Upgradeability**: Runtime upgrades, while powerful, need rigorous governance +//! and testing to ensure security. Improperly executed upgrades can introduce vulnerabilities or +//! disrupt network operations. +//! - **Complexity and Expertise**: Developing and maintaining runtime requires a higher level of +//! expertise in blockchain architecture and security, as mistakes can be far-reaching. +//! +//! #### Smart Contract Security Aspects +//! Smart contracts, while more isolated, bring their own set of security challenges: +//! +//! - **Isolated Impact**: Security issues in a smart contract typically affect the contract itself +//! and its users, rather than the whole network. +//! - **Contract-specific Risks**: Common issues like reentrancy +//! attacks, improper handling of external calls, and gas limit vulnerabilities are specific to +//! smart contract development. +//! - **Permissionless Deployment**: Since anyone can deploy a smart contract, +//! the ecosystem is more open to potentially malicious or vulnerable code. +//! +//! ## Weighing and Metering +//! Weighing and metering are mechanisms designed to limit the resources used by external actors. +//! However, there are fundamental differences in how these resources are handled in FRAME-based +//! Runtimes and how they are handled in Smart Contracts, while Runtime operations are weighed, +//! Smart Contract executions must be metered. +//! +//! #### Weighing +//! In FRAME-based Runtimes, operations are *weighed*. This means that each operation in the Runtime +//! has a fixed upper cost, known in advance, determined through +//! [benchmarking](crate::reference_docs::frame_benchmarking_weight). Weighing is practical here +//! because: +//! +//! - *Predictability*: Runtime operations are part of the blockchain's core logic, which is static +//! until an upgrade occurs. This predictability allows for precise +//! [benchmarking](crate::reference_docs::frame_benchmarking_weight). +//! - *Prevention of Abuse*: By having a fixed upper cost that corresponds to the worst-case +//! complexity scenario of its execution (and a mechanism to refund unused weight), it becomes +//! infeasible for an attacker to create transactions that could unpredictably consume excessive +//! resources. +//! +//! #### Metering +//! For Smart Contracts resource consumption is metered. This is essential due to: +//! +//! - **Untrusted Nature**: Unlike Runtime operations, Smart Contracts can be deployed by any user, +//! and their behavior isn’t known in advance. Metering dynamically measures resource consumption +//! as the contract executes. +//! - **Safety Against Infinite Loops**: Metering protects the blockchain from poorly designed +//! contracts that might run into infinite loops, consuming an indefinite amount of resources. +//! +//! #### Implications for Developers and Users +//! - **For Runtime Developers**: Understanding the cost of each operation is essential. Misjudging +//! the weight of operations can lead to network congestion or vulnerability exploitation. +//! - **For Smart Contract Developers**: Being mindful of the gas cost associated with contract +//! execution is crucial. Efficiently written contracts save costs and are less likely to hit gas +//! limits, ensuring smoother execution on the blockchain. diff --git a/prdoc/pr_1234.prdoc b/prdoc/1.3.0/pr_1234.prdoc similarity index 100% rename from prdoc/pr_1234.prdoc rename to prdoc/1.3.0/pr_1234.prdoc diff --git a/prdoc/pr_1255.prdoc b/prdoc/1.3.0/pr_1255.prdoc similarity index 100% rename from prdoc/pr_1255.prdoc rename to prdoc/1.3.0/pr_1255.prdoc diff --git a/prdoc/pr_1818.prdoc b/prdoc/1.3.0/pr_1818.prdoc similarity index 100% rename from prdoc/pr_1818.prdoc rename to prdoc/1.3.0/pr_1818.prdoc diff --git a/prdoc/pr_1873.prdoc b/prdoc/1.3.0/pr_1873.prdoc similarity index 100% rename from prdoc/pr_1873.prdoc rename to prdoc/1.3.0/pr_1873.prdoc diff --git a/prdoc/pr_1913.prdoc b/prdoc/1.3.0/pr_1913.prdoc similarity index 100% rename from prdoc/pr_1913.prdoc rename to prdoc/1.3.0/pr_1913.prdoc diff --git a/prdoc/pr_1921.prdoc b/prdoc/1.3.0/pr_1921.prdoc similarity index 100% rename from prdoc/pr_1921.prdoc rename to prdoc/1.3.0/pr_1921.prdoc diff --git a/prdoc/1.3.0/readme.md b/prdoc/1.3.0/readme.md new file mode 100644 index 000000000000..3d74fa34247c --- /dev/null +++ b/prdoc/1.3.0/readme.md @@ -0,0 +1,2 @@ +Version 1.3.0 does not support `prddoc` yet. +Some prdoc files are provided but the list is NOT complete. diff --git a/prdoc/pr_1178.prdoc b/prdoc/1.4.0/pr_1178.prdoc similarity index 100% rename from prdoc/pr_1178.prdoc rename to prdoc/1.4.0/pr_1178.prdoc diff --git a/prdoc/pr_1246.prdoc b/prdoc/1.4.0/pr_1246.prdoc similarity index 100% rename from prdoc/pr_1246.prdoc rename to prdoc/1.4.0/pr_1246.prdoc diff --git a/prdoc/1.4.0/pr_1256.prdoc b/prdoc/1.4.0/pr_1256.prdoc new file mode 100644 index 000000000000..f486786fec35 --- /dev/null +++ b/prdoc/1.4.0/pr_1256.prdoc @@ -0,0 +1,19 @@ +# Schema: Polkadot SDK PRDoc Schema (prdoc) v1.0.0 +# See doc at https://raw.githubusercontent.com/paritytech/polkadot-sdk/master/prdoc/schema_user.json + +title: "`chain-spec`: getting ready for native-runtime-free world" + +doc: + - audience: Node Dev + description: | + - [`ChainSpec::from_genesis`](https://github.com/paritytech/polkadot-sdk/blob/3df6b4d00eb310900de6f4858114baf68239412c/substrate/client/chain-spec/src/chain_spec.rs#L525) becomes deprecated in favor of [`ChainSpec::builder()`](https://github.com/paritytech/polkadot-sdk/blob/3df6b4d00eb310900de6f4858114baf68239412c/substrate/client/chain-spec/src/chain_spec.rs#L432), + - The signature of [`ChainSpec::from_genesis`] method was changed by extending it with `code` argument. + +migrations: + db: [] + + runtime: [] + +crates: [] + +host_functions: [] diff --git a/prdoc/pr_1805.prdoc b/prdoc/1.4.0/pr_1805.prdoc similarity index 100% rename from prdoc/pr_1805.prdoc rename to prdoc/1.4.0/pr_1805.prdoc diff --git a/prdoc/pr_1926.prdoc b/prdoc/1.4.0/pr_1926.prdoc similarity index 100% rename from prdoc/pr_1926.prdoc rename to prdoc/1.4.0/pr_1926.prdoc diff --git a/prdoc/pr_2086.prdoc b/prdoc/1.4.0/pr_2086.prdoc similarity index 100% rename from prdoc/pr_2086.prdoc rename to prdoc/1.4.0/pr_2086.prdoc diff --git a/prdoc/pr_2107.prdoc b/prdoc/1.4.0/pr_2107.prdoc similarity index 100% rename from prdoc/pr_2107.prdoc rename to prdoc/1.4.0/pr_2107.prdoc diff --git a/prdoc/pr_2165.prdoc b/prdoc/1.4.0/pr_2165.prdoc similarity index 100% rename from prdoc/pr_2165.prdoc rename to prdoc/1.4.0/pr_2165.prdoc diff --git a/prdoc/1.4.0/readme.md b/prdoc/1.4.0/readme.md new file mode 100644 index 000000000000..e1a1055d9185 --- /dev/null +++ b/prdoc/1.4.0/readme.md @@ -0,0 +1,2 @@ +Version 1.4.0 does not support `prddoc` yet. +Some prdoc files are provided but the list is NOT complete. diff --git a/prdoc/1.5.0/pr_1370_special.prdoc b/prdoc/1.5.0/pr_1370_special.prdoc new file mode 100644 index 000000000000..692a6e03170b --- /dev/null +++ b/prdoc/1.5.0/pr_1370_special.prdoc @@ -0,0 +1,9 @@ +title: Rework the event system of `sc-network` +author: altonen +topic: Node + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/pr_1408_prodc-introduction.prdoc b/prdoc/1.5.0/pr_1408_prodc-introduction.prdoc similarity index 89% rename from prdoc/pr_1408_prodc-introduction.prdoc rename to prdoc/1.5.0/pr_1408_prodc-introduction.prdoc index 85b4661b127e..46f56068e271 100644 --- a/prdoc/pr_1408_prodc-introduction.prdoc +++ b/prdoc/1.5.0/pr_1408_prodc-introduction.prdoc @@ -1,6 +1,9 @@ # This PR does not need a prdoc but it is provided in order to test title: PRdoc check +author: chevdor +topic: documentation + doc: - audience: Node Dev description: | diff --git a/prdoc/1.5.0/pr_1497_special.prdoc b/prdoc/1.5.0/pr_1497_special.prdoc new file mode 100644 index 000000000000..3d6035482607 --- /dev/null +++ b/prdoc/1.5.0/pr_1497_special.prdoc @@ -0,0 +1,9 @@ +title: Update tick collator for async backing +author: Sophia-Gold +topic: Tests + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_1918_special.prdoc b/prdoc/1.5.0/pr_1918_special.prdoc new file mode 100644 index 000000000000..9220ee970bcb --- /dev/null +++ b/prdoc/1.5.0/pr_1918_special.prdoc @@ -0,0 +1,9 @@ +title: Preserve artifact cache unless stale +author: eagr +topic: Node + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/pr_1946_prdoc_new_schema.prdoc b/prdoc/1.5.0/pr_1946_prdoc_new_schema.prdoc similarity index 90% rename from prdoc/pr_1946_prdoc_new_schema.prdoc rename to prdoc/1.5.0/pr_1946_prdoc_new_schema.prdoc index c06321777382..fae063f6b1ec 100644 --- a/prdoc/pr_1946_prdoc_new_schema.prdoc +++ b/prdoc/1.5.0/pr_1946_prdoc_new_schema.prdoc @@ -3,6 +3,9 @@ title: New PRDoc Schema +author: chevdor +topic: documentation + doc: - audience: Node Dev description: &desc | diff --git a/prdoc/1.5.0/pr_1985_special.prdoc b/prdoc/1.5.0/pr_1985_special.prdoc new file mode 100644 index 000000000000..c4305d6bb295 --- /dev/null +++ b/prdoc/1.5.0/pr_1985_special.prdoc @@ -0,0 +1,9 @@ +title: Enable parallel key scraping +author: eagr +topic: Node + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2001_special.prdoc b/prdoc/1.5.0/pr_2001_special.prdoc new file mode 100644 index 000000000000..366b5fddb8b8 --- /dev/null +++ b/prdoc/1.5.0/pr_2001_special.prdoc @@ -0,0 +1,9 @@ +title: "cumulus-consensus-common: block import: `delayed_best_block` flag added" +author: michalkucharczyk +topic: Node + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2058_special.prdoc b/prdoc/1.5.0/pr_2058_special.prdoc new file mode 100644 index 000000000000..6e3c83b09fa1 --- /dev/null +++ b/prdoc/1.5.0/pr_2058_special.prdoc @@ -0,0 +1,9 @@ +title: "PVF: Add test instructions" +author: mrcnski +topic: Node + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/pr_2142.prdoc b/prdoc/1.5.0/pr_2142.prdoc similarity index 93% rename from prdoc/pr_2142.prdoc rename to prdoc/1.5.0/pr_2142.prdoc index 1d3794113460..9cd1b23906d0 100644 --- a/prdoc/pr_2142.prdoc +++ b/prdoc/1.5.0/pr_2142.prdoc @@ -1,5 +1,8 @@ title: Cleanup XCMP `QueueConfigData` +author: serban300 +topic: runtime + doc: - audience: Runtime Dev description: Removes obsolete fields from the `QueueConfigData` structure. For the remaining fields, if they use the old defaults, we replace them with the new defaults. diff --git a/prdoc/1.5.0/pr_2167_special.prdoc b/prdoc/1.5.0/pr_2167_special.prdoc new file mode 100644 index 000000000000..7bbde7002a2a --- /dev/null +++ b/prdoc/1.5.0/pr_2167_special.prdoc @@ -0,0 +1,9 @@ +title: "add pallet nomination-pools versioned migration to kitchensink" +author: brunopgalvao +topic: Tests + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2174_special.prdoc b/prdoc/1.5.0/pr_2174_special.prdoc new file mode 100644 index 000000000000..f23d2803e962 --- /dev/null +++ b/prdoc/1.5.0/pr_2174_special.prdoc @@ -0,0 +1,9 @@ +title: "chain-spec-builder: cleanup" +author: michalkucharczyk +topic: Node + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2182_special.prdoc b/prdoc/1.5.0/pr_2182_special.prdoc new file mode 100644 index 000000000000..ad57bf649163 --- /dev/null +++ b/prdoc/1.5.0/pr_2182_special.prdoc @@ -0,0 +1,9 @@ +title: "remove retry from backers on failed candidate validation" +author: Jpserrat +topic: Node + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2184_special.prdoc b/prdoc/1.5.0/pr_2184_special.prdoc new file mode 100644 index 000000000000..b838bf41ba15 --- /dev/null +++ b/prdoc/1.5.0/pr_2184_special.prdoc @@ -0,0 +1,9 @@ +title: Zombienet tests - disputes on finalized blocks +author: Overkillus +topic: Tests + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2221_special.prdoc b/prdoc/1.5.0/pr_2221_special.prdoc new file mode 100644 index 000000000000..dbd8c4a1fc14 --- /dev/null +++ b/prdoc/1.5.0/pr_2221_special.prdoc @@ -0,0 +1,9 @@ +title: "PVF worker: switch on seccomp networking restrictions" +author: mrcnski +topic: Node + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2250_special.prdoc b/prdoc/1.5.0/pr_2250_special.prdoc new file mode 100644 index 000000000000..d3f87b81b924 --- /dev/null +++ b/prdoc/1.5.0/pr_2250_special.prdoc @@ -0,0 +1,9 @@ +title: "crypto: `lazy_static` removed, light parser for address URI added" +author: michalkucharczyk +topic: Node + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/pr_2253.prdoc b/prdoc/1.5.0/pr_2253.prdoc similarity index 95% rename from prdoc/pr_2253.prdoc rename to prdoc/1.5.0/pr_2253.prdoc index 8a6dac754d1f..3f69bc2461e4 100644 --- a/prdoc/pr_2253.prdoc +++ b/prdoc/1.5.0/pr_2253.prdoc @@ -3,6 +3,9 @@ title: Different builder pattern constructors for XCM +author: franciscoaguirre +topic: runtime + doc: - audience: Runtime Dev description: | diff --git a/prdoc/1.5.0/pr_2265_special.prdoc b/prdoc/1.5.0/pr_2265_special.prdoc new file mode 100644 index 000000000000..336adec03abe --- /dev/null +++ b/prdoc/1.5.0/pr_2265_special.prdoc @@ -0,0 +1,9 @@ +title: Remove im-online pallet from Rococo and Westend +author: s0me0ne-unkn0wn +topic: Pallets + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2300_special.prdoc b/prdoc/1.5.0/pr_2300_special.prdoc new file mode 100644 index 000000000000..407f07663254 --- /dev/null +++ b/prdoc/1.5.0/pr_2300_special.prdoc @@ -0,0 +1,9 @@ +title: '[testnet] Remove Wococo stuff from BridgeHubRococo/AssetHubRococo' +author: bkontur  +topic: Bridges + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2351_special.prdoc b/prdoc/1.5.0/pr_2351_special.prdoc new file mode 100644 index 000000000000..16f9e5d15a79 --- /dev/null +++ b/prdoc/1.5.0/pr_2351_special.prdoc @@ -0,0 +1,9 @@ +title: "frame-system: Add last_runtime_upgrade_spec_version" +author: bkchr +topic: Frame + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2354_special.prdoc b/prdoc/1.5.0/pr_2354_special.prdoc new file mode 100644 index 000000000000..5fbedef03615 --- /dev/null +++ b/prdoc/1.5.0/pr_2354_special.prdoc @@ -0,0 +1,9 @@ +title: "Fix Typo: `PalletXcmExtrinsicsBenchmark`" +author: joepetrowski +topic: Benchmarks + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2361_special.prdoc b/prdoc/1.5.0/pr_2361_special.prdoc new file mode 100644 index 000000000000..d44b87287c43 --- /dev/null +++ b/prdoc/1.5.0/pr_2361_special.prdoc @@ -0,0 +1,9 @@ +title: "[ci] Enable zombienet jobs in PRs" +author: alvicsam +topic: Tests + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2368_special.prdoc b/prdoc/1.5.0/pr_2368_special.prdoc new file mode 100644 index 000000000000..e8ebcb38d30a --- /dev/null +++ b/prdoc/1.5.0/pr_2368_special.prdoc @@ -0,0 +1,9 @@ +title: "implementers-guide: update github link" +author: ordian +topic: Documentation + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2369_special.prdoc b/prdoc/1.5.0/pr_2369_special.prdoc new file mode 100644 index 000000000000..ebcc533712da --- /dev/null +++ b/prdoc/1.5.0/pr_2369_special.prdoc @@ -0,0 +1,9 @@ +title: "[NPoS] Check if staker is exposed in paged exposure storage entries" +author: Ank4n +topic: Pallets + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2377_special.prdoc b/prdoc/1.5.0/pr_2377_special.prdoc new file mode 100644 index 000000000000..2985db6f3f82 --- /dev/null +++ b/prdoc/1.5.0/pr_2377_special.prdoc @@ -0,0 +1,9 @@ +title: "fix typo" +author: cuteolaf +topic: Documentation + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2378_special.prdoc b/prdoc/1.5.0/pr_2378_special.prdoc new file mode 100644 index 000000000000..bdc965000945 --- /dev/null +++ b/prdoc/1.5.0/pr_2378_special.prdoc @@ -0,0 +1,9 @@ +title: "Beefy: small fixes" +author: serban300 +topic: Bridges + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2380_special.prdoc b/prdoc/1.5.0/pr_2380_special.prdoc new file mode 100644 index 000000000000..058be28bf5dd --- /dev/null +++ b/prdoc/1.5.0/pr_2380_special.prdoc @@ -0,0 +1,9 @@ +title: Deprecate `RewardDestination::Controller` +author: rossbulat +topic: XCM + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2381_special.prdoc b/prdoc/1.5.0/pr_2381_special.prdoc new file mode 100644 index 000000000000..eb4020424d7f --- /dev/null +++ b/prdoc/1.5.0/pr_2381_special.prdoc @@ -0,0 +1,9 @@ +title: Make collator RPC mode non-experimental +author: skunert +topic: Cumulus + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2385_special.prdoc b/prdoc/1.5.0/pr_2385_special.prdoc new file mode 100644 index 000000000000..a5239d30652c --- /dev/null +++ b/prdoc/1.5.0/pr_2385_special.prdoc @@ -0,0 +1,9 @@ +title: "Relax `force_default_xcm_version` for testnet system parachains" +author: bkontur +topic: Cumulus + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/pr_2388.prdoc b/prdoc/1.5.0/pr_2388.prdoc similarity index 91% rename from prdoc/pr_2388.prdoc rename to prdoc/1.5.0/pr_2388.prdoc index fa560197aff8..8f79097b8f60 100644 --- a/prdoc/pr_2388.prdoc +++ b/prdoc/1.5.0/pr_2388.prdoc @@ -3,8 +3,11 @@ title: Add new flexible `pallet_xcm::transfer_assets()` call/extrinsic +author: acatangiu +topic: runtime + doc: - - audience: Builder + - audience: Runtime Dev description: | For complex combinations of asset transfers where assets and fees may have different reserves or different reserve/teleport trust configurations, users can use the newly added `transfer_assets()` @@ -21,6 +24,7 @@ migrations: runtime: [] -crates: pallet-xcm +crates: + - name: pallet-xcm host_functions: [] diff --git a/prdoc/1.5.0/pr_2397_special.prdoc b/prdoc/1.5.0/pr_2397_special.prdoc new file mode 100644 index 000000000000..5f07b269b1e2 --- /dev/null +++ b/prdoc/1.5.0/pr_2397_special.prdoc @@ -0,0 +1,9 @@ +title: "Pools: Add `MaxUnbonding` to metadata" +author: rossbulat +topic: Pallets + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2406_special.prdoc b/prdoc/1.5.0/pr_2406_special.prdoc new file mode 100644 index 000000000000..3fdb7ad8cf2f --- /dev/null +++ b/prdoc/1.5.0/pr_2406_special.prdoc @@ -0,0 +1,9 @@ +title: Refactor ValidationError +author: eagr +topic: Node + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2411_special.prdoc b/prdoc/1.5.0/pr_2411_special.prdoc new file mode 100644 index 000000000000..0bc01e66903a --- /dev/null +++ b/prdoc/1.5.0/pr_2411_special.prdoc @@ -0,0 +1,9 @@ +title: "polkadot-node-subsystems: `ChainApiBackend` added + polkadot-debug image version fixed" +author: michalkucharczyk +topic: Tests + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2413_special.prdoc b/prdoc/1.5.0/pr_2413_special.prdoc new file mode 100644 index 000000000000..38083ba845b7 --- /dev/null +++ b/prdoc/1.5.0/pr_2413_special.prdoc @@ -0,0 +1,9 @@ +title: "Update documentation for `SafeMode` and `TxPause` Pallets" +author: wilwade +topic: Documentation + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2426_special.prdoc b/prdoc/1.5.0/pr_2426_special.prdoc new file mode 100644 index 000000000000..a0f5ab8ac5b8 --- /dev/null +++ b/prdoc/1.5.0/pr_2426_special.prdoc @@ -0,0 +1,9 @@ +title: "PVF: Fix unshare `no such file or directory` error" +author: mrcnski +topic: Node + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2435_special.prdoc b/prdoc/1.5.0/pr_2435_special.prdoc new file mode 100644 index 000000000000..b2bb7a2b8155 --- /dev/null +++ b/prdoc/1.5.0/pr_2435_special.prdoc @@ -0,0 +1,9 @@ +title: "pallet-staking: Converts all math operations to safe" +author: gpestanaar +topic: Pallets + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2442_special.prdoc b/prdoc/1.5.0/pr_2442_special.prdoc new file mode 100644 index 000000000000..52e672e765fe --- /dev/null +++ b/prdoc/1.5.0/pr_2442_special.prdoc @@ -0,0 +1,9 @@ +title: "Fixes cumulus README instructions" +author: gpestana +topic: Documentation + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2446_special.prdoc b/prdoc/1.5.0/pr_2446_special.prdoc new file mode 100644 index 000000000000..9fec1ad139cc --- /dev/null +++ b/prdoc/1.5.0/pr_2446_special.prdoc @@ -0,0 +1,9 @@ +title: "sp-api: Move macro related re-exports to `__private`" +author: bkchr +topic: Runtime API + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2450_special.prdoc b/prdoc/1.5.0/pr_2450_special.prdoc new file mode 100644 index 000000000000..343e71fbf6d7 --- /dev/null +++ b/prdoc/1.5.0/pr_2450_special.prdoc @@ -0,0 +1,9 @@ +title: Adapt test worker to profile flag +author: eagr +topic: Node + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2455_special.prdoc b/prdoc/1.5.0/pr_2455_special.prdoc new file mode 100644 index 000000000000..928b84678074 --- /dev/null +++ b/prdoc/1.5.0/pr_2455_special.prdoc @@ -0,0 +1,9 @@ +title: "Remove `RuntimeApi` dependency on system parachain runtime code" +author: seadanda +topic: "System Parachains" + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2457_special.prdoc b/prdoc/1.5.0/pr_2457_special.prdoc new file mode 100644 index 000000000000..ca6401206f46 --- /dev/null +++ b/prdoc/1.5.0/pr_2457_special.prdoc @@ -0,0 +1,9 @@ +title: "polkadot-parachain: one chain-spec for all" +author: michalkucharczyk +topic: "System Parachains" + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2459_special.prdoc b/prdoc/1.5.0/pr_2459_special.prdoc new file mode 100644 index 000000000000..125f390f4ac9 --- /dev/null +++ b/prdoc/1.5.0/pr_2459_special.prdoc @@ -0,0 +1,9 @@ +title: '[NPoS] Use `EraInfo` to manipulate exposure in fast-unstake tests' +author: Ank4n +topic: Pallets,Tests + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2461_special.prdoc b/prdoc/1.5.0/pr_2461_special.prdoc new file mode 100644 index 000000000000..60a46714ca41 --- /dev/null +++ b/prdoc/1.5.0/pr_2461_special.prdoc @@ -0,0 +1,9 @@ +title: "PVF: remove audit log access" +author: mrcnski +topic: Node + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2462_special.prdoc b/prdoc/1.5.0/pr_2462_special.prdoc new file mode 100644 index 000000000000..ae1f14863276 --- /dev/null +++ b/prdoc/1.5.0/pr_2462_special.prdoc @@ -0,0 +1,9 @@ +title: "relay-chain-consensus: set a fork_choice" +author: michalkucharczyk +topic: Node + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2463_special.prdoc b/prdoc/1.5.0/pr_2463_special.prdoc new file mode 100644 index 000000000000..0f35d50036f0 --- /dev/null +++ b/prdoc/1.5.0/pr_2463_special.prdoc @@ -0,0 +1,9 @@ +title: Add `on-chain-release-build` feature for Collectives Westend +author: liamaharon +topic: System Parachains + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2474_special.prdoc b/prdoc/1.5.0/pr_2474_special.prdoc new file mode 100644 index 000000000000..42d67b5efa66 --- /dev/null +++ b/prdoc/1.5.0/pr_2474_special.prdoc @@ -0,0 +1,9 @@ +title: "Pools: Add ability to configure commission claiming permissions" +author: rossbulat +topic: Pallets + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2483_special.prdoc b/prdoc/1.5.0/pr_2483_special.prdoc new file mode 100644 index 000000000000..21fb045cae88 --- /dev/null +++ b/prdoc/1.5.0/pr_2483_special.prdoc @@ -0,0 +1,9 @@ +title: Remove `dmp-queue`` pallet from Rococo Asset Hub and Bridge Hub +author: liamaharon +topic: Frame + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/pr_2486.prdoc b/prdoc/1.5.0/pr_2486.prdoc similarity index 95% rename from prdoc/pr_2486.prdoc rename to prdoc/1.5.0/pr_2486.prdoc index 0d50a7279d10..c716f71c34e5 100644 --- a/prdoc/pr_2486.prdoc +++ b/prdoc/1.5.0/pr_2486.prdoc @@ -1,5 +1,8 @@ title: "PVF: Add Secure Validator Mode" +author: mrcnski +topic: node + doc: - audience: Node Operator description: | diff --git a/prdoc/1.5.0/pr_2487_special.prdoc b/prdoc/1.5.0/pr_2487_special.prdoc new file mode 100644 index 000000000000..3d6a2e11e268 --- /dev/null +++ b/prdoc/1.5.0/pr_2487_special.prdoc @@ -0,0 +1,9 @@ +title: "Do not pollute global base path with export genesis/wasm" +author: bkchr +topic: Cumulus + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2501_special.prdoc b/prdoc/1.5.0/pr_2501_special.prdoc new file mode 100644 index 000000000000..125b9452c984 --- /dev/null +++ b/prdoc/1.5.0/pr_2501_special.prdoc @@ -0,0 +1,9 @@ +title: "Staking: `chill_other` takes stash instead of controller" +author: rossbulat +topic: Pallets + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2509_special.prdoc b/prdoc/1.5.0/pr_2509_special.prdoc new file mode 100644 index 000000000000..03ebfd80c96d --- /dev/null +++ b/prdoc/1.5.0/pr_2509_special.prdoc @@ -0,0 +1,9 @@ +title: "Breaking: Remove long deprecated `AllPalletsWithoutSystemReversed`" +author: skunert +topic: Frame + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2515_special.prdoc b/prdoc/1.5.0/pr_2515_special.prdoc new file mode 100644 index 000000000000..4664058f86c8 --- /dev/null +++ b/prdoc/1.5.0/pr_2515_special.prdoc @@ -0,0 +1,9 @@ +title: Set `frame_system::LastRuntimeUpgrade` after running `try-runtime migrations` +author: liamaharon +topic: Frame + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2516_special.prdoc b/prdoc/1.5.0/pr_2516_special.prdoc new file mode 100644 index 000000000000..5d452b63e596 --- /dev/null +++ b/prdoc/1.5.0/pr_2516_special.prdoc @@ -0,0 +1,9 @@ +title: Remove `dmp_queue pallet` from Westend SP runtimes +author: liamaharon +topic: Frame + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2521_special.prdoc b/prdoc/1.5.0/pr_2521_special.prdoc new file mode 100644 index 000000000000..3b70150619e5 --- /dev/null +++ b/prdoc/1.5.0/pr_2521_special.prdoc @@ -0,0 +1,10 @@ +title: 'substrate-node: `NativeElseWasmExecutor` is no longer used' + +author: michalkucharczyk +topic: node + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2526_special.prdoc b/prdoc/1.5.0/pr_2526_special.prdoc new file mode 100644 index 000000000000..6008d7bfa9d5 --- /dev/null +++ b/prdoc/1.5.0/pr_2526_special.prdoc @@ -0,0 +1,10 @@ +title: Remove `pov-recovery` race condition/Improve zombienet test + +author: skunert +topic: testing + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2552_special.prdoc b/prdoc/1.5.0/pr_2552_special.prdoc new file mode 100644 index 000000000000..9f0140c81421 --- /dev/null +++ b/prdoc/1.5.0/pr_2552_special.prdoc @@ -0,0 +1,10 @@ +title: Withdraw Assets Before Checking Out in OnReapIdentity impl + +author: joepetrowski +topic: xcm + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2555_special.prdoc b/prdoc/1.5.0/pr_2555_special.prdoc new file mode 100644 index 000000000000..f817810f433e --- /dev/null +++ b/prdoc/1.5.0/pr_2555_special.prdoc @@ -0,0 +1,10 @@ +title: Remove dependency on rand's SliceRandom shuffle implementation in `gossip-support` + +author: rphmeier +topic: node + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2572_special.prdoc b/prdoc/1.5.0/pr_2572_special.prdoc new file mode 100644 index 000000000000..9d4c285798cc --- /dev/null +++ b/prdoc/1.5.0/pr_2572_special.prdoc @@ -0,0 +1,10 @@ +title: Add missing glossary to ref docs + +author: juangirini +topic: documentation + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2579_special.prdoc b/prdoc/1.5.0/pr_2579_special.prdoc new file mode 100644 index 000000000000..2992c92a8c52 --- /dev/null +++ b/prdoc/1.5.0/pr_2579_special.prdoc @@ -0,0 +1,10 @@ +title: "impl guide: update PVF host page; add diagrams" + +author: mrcnsk +topic: documentation + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2581_special.prdoc b/prdoc/1.5.0/pr_2581_special.prdoc new file mode 100644 index 000000000000..ebe5855b4016 --- /dev/null +++ b/prdoc/1.5.0/pr_2581_special.prdoc @@ -0,0 +1,10 @@ +title: 'Bandersnatch: `ring-context` generic over domain size' + +author: davxy +topic: node + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2591.prdoc b/prdoc/1.5.0/pr_2591.prdoc new file mode 100644 index 000000000000..f827e70af8bc --- /dev/null +++ b/prdoc/1.5.0/pr_2591.prdoc @@ -0,0 +1,12 @@ +title: Ensure to cleanup state in `remove_member` + +author: bkchr +topic: runtime + +doc: + - audience: Runtime Dev + description: | + Cleans up the state properly if a member of a ranked collective is removed. + +crates: + - name: pallet-ranked-collective diff --git a/prdoc/1.5.0/pr_2602_special.prdoc b/prdoc/1.5.0/pr_2602_special.prdoc new file mode 100644 index 000000000000..56896348b4f7 --- /dev/null +++ b/prdoc/1.5.0/pr_2602_special.prdoc @@ -0,0 +1,10 @@ +title: 'Bridges subtree update' + +author: bkontur +topic: bridges + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/pr_2625_special.prdoc b/prdoc/1.5.0/pr_2625_special.prdoc new file mode 100644 index 000000000000..3ffcf5986602 --- /dev/null +++ b/prdoc/1.5.0/pr_2625_special.prdoc @@ -0,0 +1,10 @@ +title: Improved `ExportXcm::validate` implementation for BridgeHubs + +author: bkontur +topic: bridges + +doc: + - audience: Runtime Dev + description: n/a + +crates: [] diff --git a/prdoc/1.5.0/readme.md b/prdoc/1.5.0/readme.md new file mode 100644 index 000000000000..14b6d6033147 --- /dev/null +++ b/prdoc/1.5.0/readme.md @@ -0,0 +1,2 @@ +Version 1.5.0 does not fully support `prddoc` yet. +While the list is complete, not all prdoc files have a valid or accurate content. diff --git a/prdoc/1.6.0/pr_1191.prdoc b/prdoc/1.6.0/pr_1191.prdoc new file mode 100644 index 000000000000..26626731be46 --- /dev/null +++ b/prdoc/1.6.0/pr_1191.prdoc @@ -0,0 +1,21 @@ +title: Approve multiple candidates with a single signature + +doc: + - audience: Node Operator + description: | + Changed approval-voting, approval-distribution to approve multiple candidate with a single message, it adds: + * A new parachains_db version. + * A new validation protocol to support the new message types. + The new logic will be disabled and will be enabled at a later date after all validators have upgraded. + +migrations: + db: + - name: Parachains database change from v4 to v5. + description: | + Approval-voting column format has been updated with several new fields. All existing data will be automatically + be migrated to the new values. + +crates: + - name: "polkadot" + +host_functions: [] diff --git a/prdoc/pr_1226.prdoc b/prdoc/1.6.0/pr_1226.prdoc similarity index 100% rename from prdoc/pr_1226.prdoc rename to prdoc/1.6.0/pr_1226.prdoc diff --git a/prdoc/pr_1289.prdoc b/prdoc/1.6.0/pr_1289.prdoc similarity index 100% rename from prdoc/pr_1289.prdoc rename to prdoc/1.6.0/pr_1289.prdoc diff --git a/prdoc/1.6.0/pr_1343.prdoc b/prdoc/1.6.0/pr_1343.prdoc new file mode 100644 index 000000000000..84168230e0af --- /dev/null +++ b/prdoc/1.6.0/pr_1343.prdoc @@ -0,0 +1,29 @@ +title: Tasks API - A general system for recognizing and executing service work + +doc: + - audience: Runtime Dev + description: | + The Tasks API allows you to define some service work that can be recognized by a script or an off-chain worker. + Such a script can then create and submit all such work items at any given time. + `#[pallet:tasks_experimental]` provides a convenient way to define such work items. It can be attached to an + `impl` block inside a pallet, whose functions can then be annotated by the following attributes: + 1. `#[pallet::task_list]`: Define an iterator over the available work items for a task + 2. `#[pallet::task_condition]`: Define the conditions for a given work item to be valid + 3. `#[pallet::task_weight]`: Define the weight of a given work item + 4. `#[pallet::task_index]`: Define the index of a given work item + Each such function becomes a variant of the autogenerated enum `Task` for this pallet. + All such enums are aggregated into a `RuntimeTask` by `construct_runtime`. + An example pallet that uses the Tasks API is available at `substrate/frame/example/tasks`. + +migrations: + db: [] + + runtime: [] + +crates: + - name: frame-system + - name: frame-support + - name: frame-support-procedural + - name: pallet-example-tasks + +host_functions: [] diff --git a/prdoc/1.6.0/pr_1454.prdoc b/prdoc/1.6.0/pr_1454.prdoc new file mode 100644 index 000000000000..c96bfcde6cfe --- /dev/null +++ b/prdoc/1.6.0/pr_1454.prdoc @@ -0,0 +1,10 @@ +title: Support XCM as part of Cosmos CosmWasm contract messages + +doc: + - audience: Runtime Dev + description: | + Made XCM JSON schema behind flag, bumped bounded-collection so to ensure it has that flag too. + +crates: + - name: staging-xcm + - name: sp-weights diff --git a/prdoc/1.6.0/pr_1479.prdoc b/prdoc/1.6.0/pr_1479.prdoc new file mode 100644 index 000000000000..33b798290f83 --- /dev/null +++ b/prdoc/1.6.0/pr_1479.prdoc @@ -0,0 +1,11 @@ +# Schema: Polkadot SDK PRDoc Schema (prdoc) v1.0.0 +# See doc at https://raw.githubusercontent.com/paritytech/polkadot-sdk/master/prdoc/schema_user.json + +title: Rococo/Westend Coretime Runtime + +doc: + - audience: Runtime User + description: | + Rococo/Westend runtime for the Coretime Chain (a.k.a. "Broker Chain") described in RFC-1. + +crates: [ ] \ No newline at end of file diff --git a/prdoc/1.6.0/pr_1677.prdoc b/prdoc/1.6.0/pr_1677.prdoc new file mode 100644 index 000000000000..9c5bee386ae3 --- /dev/null +++ b/prdoc/1.6.0/pr_1677.prdoc @@ -0,0 +1,22 @@ +# Schema: Polkadot SDK PRDoc Schema (prdoc) v1.0.0 +# See doc at https://raw.githubusercontent.com/paritytech/polkadot-sdk/master/prdoc/schema_user.json + +title: "pallet-asset-conversion: Swap Credit" + +doc: + - audience: Runtime Dev + description: | + Introduces a swap implementation that allows the exchange of a credit (aka Negative Imbalance) of one asset for a credit of another asset. + + This is particularly useful when a credit swap is required but may not have sufficient value to meet the ED constraint, hence cannot be deposited to temp account before. An example use case is when XCM fees are paid using an asset held in the XCM executor registry and has to be swapped for native currency. + + Additional Updates: + - encapsulates the existing `Swap` trait impl within a transactional context, since partial storage mutation is possible when an error occurs; + - supplied `Currency` and `Assets` impls must be implemented over the same `Balance` type, the `AssetBalance` generic type is dropped. This helps to avoid numerous type conversion and overflow cases. If those types are different it should be handled outside of the pallet; + - `Box` asset kind on a pallet level, unbox on a runtime level - here [why](https://substrate.stackexchange.com/questions/10039/boxed-argument-of-a-dispatchable/10103#10103); + - `path` uses `Vec` now, instead of `BoundedVec` since it is never used in PoV; + - removes the `Transfer` event due to it's redundancy with the events emitted by `fungible/s` implementations; + - modifies the `SwapExecuted` event type; + +crates: [ ] + diff --git a/prdoc/1.6.0/pr_1694.prdoc b/prdoc/1.6.0/pr_1694.prdoc new file mode 100644 index 000000000000..24797630efc9 --- /dev/null +++ b/prdoc/1.6.0/pr_1694.prdoc @@ -0,0 +1,24 @@ +# Schema: Polkadot SDK PRDoc Schema (prdoc) v1.0.0 +# See doc at https://raw.githubusercontent.com/paritytech/polkadot-sdk/master/prdoc/schema_user.json + +title: Agile Coretime Base Relaychain Functionality + +doc: + - audience: Runtime User + description: | + The relay chain is now capable of receiving assignments from the coretime + chain and will schedule parachains and on-demand orders accordingly. + Existing leases and system chains are preserved. They get a reserved + coretime core via a migration. +migrations: + db: [] + runtime: + - reference: polkadot-runtime-parachains + description: | + Claim queue in scheduler now no longer contains Option values and + assignments now contain information necessary to accomodate for coretime + features. Also all existing parachains are converted to coretime + assignments. + +crates: + - name: polkadot-runtime-parachains diff --git a/prdoc/1.6.0/pr_1841.prdoc b/prdoc/1.6.0/pr_1841.prdoc new file mode 100644 index 000000000000..c99583e6dc30 --- /dev/null +++ b/prdoc/1.6.0/pr_1841.prdoc @@ -0,0 +1,18 @@ +title: Validator disabling in Statement Distribution. + +doc: + - audience: Node Operator + description: | + Once a validator has been disabled for misbehavior, other validators + should no longer gossip its backing statements in the current era. + If they do, it might result in disconnects from the network due to low + reputation. + +migrations: + db: [] + runtime: [] + +crates: + - name: polkadot-statement-distribution + +host_functions: [] diff --git a/prdoc/1.6.0/pr_2031.prdoc b/prdoc/1.6.0/pr_2031.prdoc new file mode 100644 index 000000000000..fc2695df52e1 --- /dev/null +++ b/prdoc/1.6.0/pr_2031.prdoc @@ -0,0 +1,29 @@ +# Schema: Polkadot SDK PRDoc Schema (prdoc) v1.0.0 +# See doc at https://raw.githubusercontent.com/paritytech/polkadot-sdk/master/prdoc/schema_user.json + +title: "pallet-asset-conversion: Decoupling Native Currency Dependancy" + +doc: + - audience: Runtime Dev + description: | + Decoupling Pallet from the Concept of Native Currency + + Currently, the pallet used to intrinsically linked with the concept of native currency, requiring users to provide implementations of the `fungible::*` and `fungibles::*` traits to interact with native and non native assets. This incapsulates some non-related to the pallet complexity and makes it less adaptable in contexts where the native currency concept is absent. + + With this PR, the dependence on `fungible::*` for liquidity-supplying assets has been removed. Instead, the native and non-native currencies' handling is now overseen by a single type that implements the `fungibles::*` traits. To simplify this integration, types have been introduced to facilitate the creation of a union between `fungible::*` and `fungibles::*` implementations, producing a unified `fungibles::*` type. + + One of the reasons driving these changes is the ambition to create a more user-friendly API for the `SwapCredit` implementation. Given that it interacts with two distinct credit types from `fungible` and `fungibles`, a unified type was introduced. Clients now manage potential conversion failures for those credit types. In certain contexts, it's vital to guarantee that operations are fail-safe, like in this impl - [PR](https://github.com/paritytech/polkadot-sdk/pull/1845), place in [code](https://github.com/paritytech/polkadot-sdk/blob/20b85a5fada8f55c98ba831964f5866ffeadf4da/cumulus/primitives/utility/src/lib.rs#L429). + + Additional Updates: + - abstracted the pool ID and its account derivation logic via trait bounds, along with common implementation offerings; + - removed `inc_providers` on a pool creation for the pool account; + - benchmarks: + -- swap complexity is N, not const; + -- removed `From + Into` bound from `T::Balance`; + -- removed swap/liquidity/.. amount constants, resolve them dynamically based on pallet configuration; + -- migrated to v2 API; + - `OnUnbalanced` handler for the pool creation fee, replacing direct transfers to a specified account ID; + - renamed `MultiAssetId` to `AssetKind` aligning with naming across frame crates; + +crates: + - name: pallet-asset-conversion diff --git a/prdoc/1.6.0/pr_2033.prdoc b/prdoc/1.6.0/pr_2033.prdoc new file mode 100644 index 000000000000..eeb7ff2b4eed --- /dev/null +++ b/prdoc/1.6.0/pr_2033.prdoc @@ -0,0 +1,14 @@ +# Schema: Polkadot SDK PRDoc Schema (prdoc) v1.0.0 +# See doc at https://raw.githubusercontent.com/paritytech/polkadot-sdk/master/prdoc/schema_user.json + +title: "`UnionOf` types for merged `fungible` and `fungibles` implementations" + +doc: + - audience: Runtime Dev + description: | + Introduces `UnionOf` types, crafted to merge `fungible` and `fungibles` implementations or two + `fungibles` implementations into a single type implementing `fungibles`. This also addresses + an issue where `ItemOf` initiates a double drop for an imbalance type, leading to inaccurate + total issuance accounting. + +crates: [ ] diff --git a/prdoc/1.6.0/pr_2281.prdoc b/prdoc/1.6.0/pr_2281.prdoc new file mode 100644 index 000000000000..c5453a08f2a3 --- /dev/null +++ b/prdoc/1.6.0/pr_2281.prdoc @@ -0,0 +1,12 @@ +# Schema: Polkadot SDK PRDoc Schema (prdoc) v1.0.0 +# See doc at https://raw.githubusercontent.com/paritytech/polkadot-sdk/master/prdoc/schema_user.json + +title: Rococo and Westend People Chain Runtimes + +doc: + - audience: Runtime User + description: | + Rococo and Westend runtimes for the "People Chain". This chain contains the Identity pallet + with plans to migrate all related data from the Relay Chain. Changes `IdentityInfo` fields. + +crates: [ ] diff --git a/prdoc/1.6.0/pr_2331.prdoc b/prdoc/1.6.0/pr_2331.prdoc new file mode 100644 index 000000000000..e3daf4c45bd4 --- /dev/null +++ b/prdoc/1.6.0/pr_2331.prdoc @@ -0,0 +1,17 @@ +title: Rename `ExportGenesisStateCommand` to `ExportGenesisHeadCommand` + +doc: + - audience: Node Operator + description: | + The `export-genesis-state` subcommand is now called `export-gensis-head`, but + `export-genesis-state` stays as an alias to not break any scripts. + + - audience: Node Dev + description: | + The struct `ExportGenesisStateCommand` is now called `ExportGenesisHeadCommand`. + So, you only need to rename the import and usage. The `run` function is now + taking only a `client` as argument to fetch the genesis header. This way + the exported genesis head is respecting custom genesis block builders. + +crates: + - name: "cumulus-client-cli" diff --git a/prdoc/1.6.0/pr_2403.prdoc b/prdoc/1.6.0/pr_2403.prdoc new file mode 100644 index 000000000000..f1c4d3ecbaf1 --- /dev/null +++ b/prdoc/1.6.0/pr_2403.prdoc @@ -0,0 +1,9 @@ +title: Configurable block number provider in pallet-vesting + +doc: + - audience: Runtime Dev + description: | + Adds `BlockNumberProvider` type to pallet-vesting Config trait, allowing for custom providers instead of hardcoding frame-system. + This is particularly useful for parachains wanting to use `cumulus_pallet_parachain_system::RelaychainDataProvider` with `pallet-vesting`. + +crates: [ ] diff --git a/prdoc/1.6.0/pr_2481.prdoc b/prdoc/1.6.0/pr_2481.prdoc new file mode 100644 index 000000000000..d8736b1afd6e --- /dev/null +++ b/prdoc/1.6.0/pr_2481.prdoc @@ -0,0 +1,13 @@ +# Schema: Polkadot SDK PRDoc Schema (prdoc) v1.0.0 +# See doc at https://raw.githubusercontent.com/paritytech/polkadot-sdk/master/prdoc/schema_user.json + +title: "xcm-builder: `HaulBlobExporter` with improved XCM version check." + +doc: + - audience: Runtime Dev + description: | + Version check in `HaulBlobExporter` uses new trait `CheckVersion` to check known/configured destination versions, + ensuring compatibility. `HaulBlobExporter` will attempt to downgrade the message to destination's known version + instead of using the latest version. + +crates: [ ] diff --git a/prdoc/1.6.0/pr_2522.prdoc b/prdoc/1.6.0/pr_2522.prdoc new file mode 100644 index 000000000000..9a98f984bacb --- /dev/null +++ b/prdoc/1.6.0/pr_2522.prdoc @@ -0,0 +1,12 @@ +title: "Adds Snowbridge to Rococo runtime" + +doc: + - audience: Runtime Dev + description: | + Adds the snowbridge pallets as a git subtree under the bridges directory. Adds Snowbridge + to the Rococo Asset Hub and Bridge Hub runtimes. + + +crates: + - name: asset-hub-rococo-runtime + - name: bridge-hub-rococo-runtime diff --git a/prdoc/1.6.0/pr_2532.prdoc b/prdoc/1.6.0/pr_2532.prdoc new file mode 100644 index 000000000000..d0df0ee4aca9 --- /dev/null +++ b/prdoc/1.6.0/pr_2532.prdoc @@ -0,0 +1,11 @@ +# Schema: Polkadot SDK PRDoc Schema (prdoc) v1.0.0 +# See doc at https://raw.githubusercontent.com/paritytech/polkadot-sdk/master/prdoc/schema_user.json + +title: Westend Fellowship Treasury + +doc: + - audience: Runtime User + description: | + Treasury Pallet Instance for the Fellowship in Westend Collectives. + +crates: [ ] diff --git a/prdoc/1.6.0/pr_2597.prdoc b/prdoc/1.6.0/pr_2597.prdoc new file mode 100644 index 000000000000..33d850531841 --- /dev/null +++ b/prdoc/1.6.0/pr_2597.prdoc @@ -0,0 +1,17 @@ +title: Make crate visible methods of `OverlayedChanges` public. + +doc: + - audience: Node Dev + description: | + Make some methods of `OverlayedChanges` namely `set_child_storage`, `clear_child_storage`, `clear_prefix` + and `clear_child_prefix` public which only had crate level visibility. + +migrations: + db: [] + + runtime: [] + +crates: + - name: sp-state-machine + +host_functions: [] diff --git a/prdoc/1.6.0/pr_2637.prdoc b/prdoc/1.6.0/pr_2637.prdoc new file mode 100644 index 000000000000..a7ab4f93222e --- /dev/null +++ b/prdoc/1.6.0/pr_2637.prdoc @@ -0,0 +1,18 @@ +title: Validator disabling in Dispute Participation. + +doc: + - audience: Node Operator + description: | + Once a validator has been disabled for misbehavior, other validators + should no longer participate in disputes initiated by it. + This feature is needed to ensure robust spam protection against + malicious actors. + +migrations: + db: [] + runtime: [] + +crates: + - name: polkadot-node-core-dispute-coordinator + +host_functions: [] diff --git a/prdoc/1.6.0/pr_2651.prdoc b/prdoc/1.6.0/pr_2651.prdoc new file mode 100644 index 000000000000..e28013d4330e --- /dev/null +++ b/prdoc/1.6.0/pr_2651.prdoc @@ -0,0 +1,12 @@ +# Schema: Polkadot SDK PRDoc Schema (prdoc) v1.0.0 +# See doc at https://raw.githubusercontent.com/paritytech/polkadot-sdk/master/prdoc/schema_user.json + +title: Unique Usernames for Identity + +doc: + - audience: Runtime User + description: | + Adds the ability to add unique usernames for an account with reverse lookup (as in `AccountId` + to `Username` and `Username` to `AccountId`). + +crates: [ ] diff --git a/prdoc/1.6.0/pr_2656.prdoc b/prdoc/1.6.0/pr_2656.prdoc new file mode 100644 index 000000000000..563218dbde62 --- /dev/null +++ b/prdoc/1.6.0/pr_2656.prdoc @@ -0,0 +1,10 @@ +title: "pallet-broker: Small improvements to the origin checks" + +doc: + - audience: Runtime User + description: | + Change the permissionless calls `drop_region`, `drop_contribution`, `drop_history` and + `drop_renewal` to allow any kind of origin. + +crates: + - name: "pallet-broker" diff --git a/prdoc/1.6.0/pr_2663-fix-could-not-create-temporary-drectory.prdoc b/prdoc/1.6.0/pr_2663-fix-could-not-create-temporary-drectory.prdoc new file mode 100644 index 000000000000..2119599fce11 --- /dev/null +++ b/prdoc/1.6.0/pr_2663-fix-could-not-create-temporary-drectory.prdoc @@ -0,0 +1,17 @@ +title: "PVF: fix unshare 'could not create temporary directory'" + +doc: + - audience: Node Operator + description: | + For validators: fixes the potential warning/error: + "Cannot unshare user namespace and change root, which are Linux-specific kernel security features: could not create a temporary directory in "/tmp/.tmpIcLriO". + +migrations: + db: [] + + runtime: [] + +crates: + - name: polkadot-node-core-pvf + +host_functions: [] diff --git a/prdoc/1.6.0/pr_2666.prdoc b/prdoc/1.6.0/pr_2666.prdoc new file mode 100644 index 000000000000..d7fbbda51085 --- /dev/null +++ b/prdoc/1.6.0/pr_2666.prdoc @@ -0,0 +1,14 @@ +# Schema: Polkadot SDK PRDoc Schema (prdoc) v1.0.0 +# See doc at https://raw.githubusercontent.com/paritytech/polkadot-sdk/master/prdoc/schema_user.json + +title: Remove kusama and polkadot SP constants from parachains-common + +doc: + - audience: Runtime Dev + description: | + The constants for System Parachains in Kusama and Polkadot are now added to a new package in + the fellowship repo. This PR removes them from Polkadot-SDK. They are now accessible from the + `system-parachains-constants` package. + +crates: + - name: parachains-common diff --git a/prdoc/1.6.0/pr_2682.prdoc b/prdoc/1.6.0/pr_2682.prdoc new file mode 100644 index 000000000000..eaa5f5a4a9a6 --- /dev/null +++ b/prdoc/1.6.0/pr_2682.prdoc @@ -0,0 +1,21 @@ +# Schema: Polkadot SDK PRDoc Schema (prdoc) v1.0.0 +# See doc at https://raw.githubusercontent.com/paritytech/polkadot-sdk/master/prdoc/schema_user.json + +title: "Add Authorize Upgrade Pattern to Frame System" + +doc: + - audience: Runtime User + description: | + Adds the `authorize_upgrade` -> `enact_authorized_upgrade` pattern to `frame-system`. This + will be useful for upgrading bridged chains that are under the governance of Polkadot without + passing entire runtime Wasm blobs over a bridge. + + Notes: + + - Changed `enact_authorized_upgrade` to `apply_authorized_upgrade`. + - Left calls in `parachain-system` and marked as deprecated to prevent breaking the API. They + just call into the `frame-system` functions. + - Deprecated calls will be removed no earlier than June 2024. + - Updated `frame-system` benchmarks to v2 syntax. + +crates: [ ] diff --git a/prdoc/1.6.0/pr_2684.prdoc b/prdoc/1.6.0/pr_2684.prdoc new file mode 100644 index 000000000000..8960b6460f0d --- /dev/null +++ b/prdoc/1.6.0/pr_2684.prdoc @@ -0,0 +1,14 @@ +title: Add XCM FungibleAdapter + +doc: + - audience: Runtime Dev + description: | + A new AssetTransactor has been added to xcm-builder: FungibleAdapter. + It's meant to be used instead of the old CurrencyAdapter for configuring the XCM executor + to handle only one asset. + +crates: + - name: "xcm-builder" + +migrations: [] +host_functions: [] diff --git a/prdoc/1.6.0/pr_2687.prdoc b/prdoc/1.6.0/pr_2687.prdoc new file mode 100644 index 000000000000..90e635d80529 --- /dev/null +++ b/prdoc/1.6.0/pr_2687.prdoc @@ -0,0 +1,18 @@ +title: "pallet-uniques: Move migration over to `VersionedMigration`" + +doc: + - audience: Runtime Dev + description: | + Moves the migration over to `VersionedMigration`. Thus, if you had + used `migrate_to_v1` before in a custom `OnRuntimeUpgrade` implementation + you can now directly use the `MigrateV0ToV1`. + +migrations: + runtime: + - reference: MigrateV0ToV1 + description: | + Migrate the pallet storage from `0` to `1` by initializing + the `CollectionAccount` storage entry from all collections. + +crates: + - name: "pallet-uniques" diff --git a/prdoc/1.6.0/pr_2689.prdoc b/prdoc/1.6.0/pr_2689.prdoc new file mode 100644 index 000000000000..847c3e8026ce --- /dev/null +++ b/prdoc/1.6.0/pr_2689.prdoc @@ -0,0 +1,13 @@ +# Schema: Parity PR Documentation Schema (prdoc) +# See doc at https://github.com/paritytech/prdoc + +title: BEEFY: Support compatibility with Warp Sync - Allow Warp Sync for Validators + +doc: + - audience: Node Operator + description: | + BEEFY can now sync itself even when using Warp Sync to sync the node. This removes the limitation of not + being able to run BEEFY when warp syncing. Validators are now again able to warp sync. + +crates: + - name: sc-consensus-beefy diff --git a/prdoc/1.6.0/pr_2694.prdoc b/prdoc/1.6.0/pr_2694.prdoc new file mode 100644 index 000000000000..c393dcfeb9a8 --- /dev/null +++ b/prdoc/1.6.0/pr_2694.prdoc @@ -0,0 +1,14 @@ +# Schema: Polkadot SDK PRDoc Schema (prdoc) v1.0.0 +# See doc at https://raw.githubusercontent.com/paritytech/polkadot-sdk/master/prdoc/schema_user.json + +title: "pallet-election-provider-multi-phase: Removes `BetterUnsignedThreshold` from pallet config" + +doc: + - audience: Runtime Dev + description: | + Removes thresholding for accepting solutions better than the last queued for unsigned phase. This is unnecessary + as even without thresholding, the number of solutions that can be submitted to on-chain which is better than the + previous one is limited. + +crates: + - name: "pallet-election-provider-multi-phase" diff --git a/prdoc/1.6.0/pr_2758.prdoc b/prdoc/1.6.0/pr_2758.prdoc new file mode 100644 index 000000000000..d8cb0557e9b6 --- /dev/null +++ b/prdoc/1.6.0/pr_2758.prdoc @@ -0,0 +1,10 @@ +title: Fix vote weights of ranked members in the Society pallet + +doc: + - audience: Runtime User + description: | + Fixes a bug in the tally accrual of approvals/rejections when + ranked members vote for Candidates and Defender in the Society pallet. + +crates: + - name: pallet-society diff --git a/prdoc/1.6.0/pr_2764.prdoc b/prdoc/1.6.0/pr_2764.prdoc new file mode 100644 index 000000000000..adfa4f47c93d --- /dev/null +++ b/prdoc/1.6.0/pr_2764.prdoc @@ -0,0 +1,16 @@ +title: Validator disabling in Backing. + +doc: + - audience: Node Operator + description: | + Once a validator has been disabled for misbehavior, it will no longer + sign backing statements in the current era. + +migrations: + db: [] + runtime: [] + +crates: + - name: polkadot-node-core-backing + +host_functions: [] diff --git a/prdoc/1.6.0/pr_2767.prdoc b/prdoc/1.6.0/pr_2767.prdoc new file mode 100644 index 000000000000..c2cd466c0097 --- /dev/null +++ b/prdoc/1.6.0/pr_2767.prdoc @@ -0,0 +1,17 @@ +# Schema: Polkadot SDK PRDoc Schema (prdoc) v1.0.0 +# See doc at https://raw.githubusercontent.com/paritytech/polkadot-sdk/master/prdoc/schema_user.json + +title: Extract PartialComponents into type alias `Service` + +doc: + - audience: Node Dev + description: | + Simplifies service definitions by extraction of a complicated type into a type alias. No breaking changes. + +crates: + - name: "sc-service" + - name: "node-template" + - name: "minimal-node" + - name: "cumulus-test-service" + - name: "polkadot-parachain-bin" + - name: "parachain-template-node" diff --git a/prdoc/1.6.0/pr_2771.prdoc b/prdoc/1.6.0/pr_2771.prdoc new file mode 100644 index 000000000000..1b49162e4392 --- /dev/null +++ b/prdoc/1.6.0/pr_2771.prdoc @@ -0,0 +1,9 @@ +title: Add fallback request for req-response protocols + +doc: + - audience: Node Dev + description: | + Enable better req-response protocol versioning, by allowing for fallback requests on different protocols. + +crates: + - name: sc_network diff --git a/prdoc/1.6.0/pr_2783.prdoc b/prdoc/1.6.0/pr_2783.prdoc new file mode 100644 index 000000000000..0e4c99065414 --- /dev/null +++ b/prdoc/1.6.0/pr_2783.prdoc @@ -0,0 +1,12 @@ +title: "Accept Root origin as valid sudo" + +doc: + - audience: Runtime User + description: | + Dispatchables of `pallet-sudo` will now also accept the `Root` origin + as valid `sudo` origin. This enhancement is useful for parachains that + allow the relay chain as a superuser. It enables the relay chain to send + an XCM message to initialize the sudo key. + +crates: + - name: "pallet-sudo" diff --git a/prdoc/1.6.0/pr_2799.prdoc b/prdoc/1.6.0/pr_2799.prdoc new file mode 100644 index 000000000000..436dea643e20 --- /dev/null +++ b/prdoc/1.6.0/pr_2799.prdoc @@ -0,0 +1,10 @@ +title: Improve XCM debuggability + +doc: + - audience: Runtime User + description: | + Adds more logging to XCM execution to improve its debuggability. + +crates: + - name: "staging-xcm-builder" + - name: "staging-xcm-executor" diff --git a/prdoc/1.6.0/pr_2803.prdoc b/prdoc/1.6.0/pr_2803.prdoc new file mode 100644 index 000000000000..1ddd3dd677a1 --- /dev/null +++ b/prdoc/1.6.0/pr_2803.prdoc @@ -0,0 +1,19 @@ +title: "cumulus-primitives-parachain-inherent: Split into two crates" + +doc: + - audience: Node Dev + description: | + This splits `cumulus-primitives-parachain-inherent` into two crates. The new crate is called + `cumulus-client-parachain-inherent`. This is done to improve the compile time for runtimes, + as they are not required anymore to pull in half of the node side at compile time. + + To migrate your code you need to change + `cumulus_primitives_parachain_inherent::ParachainInherentData::create_at` to + `cumulus_client_parachain_inherent::ParachainInherentDataProvider::create_at`. + Any other code should be compatible. The mocking code also moved to the new client crate and + you may need to adapt your imports accordingly. Generally, replacing the old crate with the new + crate fix most compile errors resulting from this pull request. + +crates: + - name: "cumulus-primitives-parachain-inherent" + - name: "cumulus-client-parachain-inherent" diff --git a/prdoc/1.6.0/pr_2804.prdoc b/prdoc/1.6.0/pr_2804.prdoc new file mode 100644 index 000000000000..456120741d93 --- /dev/null +++ b/prdoc/1.6.0/pr_2804.prdoc @@ -0,0 +1,9 @@ +title: Fix malus implementation. + +doc: + - audience: Node Dev + description: | + The malus implementation is used to test security of Polkadot. + It was broken. This fixes it. + +crates: [ ] diff --git a/prdoc/1.6.0/pr_2811.prdoc b/prdoc/1.6.0/pr_2811.prdoc new file mode 100644 index 000000000000..647fb4c8ccd4 --- /dev/null +++ b/prdoc/1.6.0/pr_2811.prdoc @@ -0,0 +1,13 @@ +title: "Interlacing removes the region on which it is performed." + +doc: + - audience: Runtime User + description: | + The current implementation of the broker pallet does not remove + the region on which the interlacing is performed. This can create + a vulnerability, as the original region owner is still allowed to + assign a task to the region even after transferring an interlaced + part of it. + +crates: + - name: "pallet-broker" diff --git a/prdoc/1.6.0/pr_2813.prdoc b/prdoc/1.6.0/pr_2813.prdoc new file mode 100644 index 000000000000..ff6e5cf5cf6b --- /dev/null +++ b/prdoc/1.6.0/pr_2813.prdoc @@ -0,0 +1,11 @@ +title: "Implement only sending one notification at a time as per RFC 56" + +doc: + - audience: Node Dev + description: | + Transactions are now gossiped one at a time instead of as batches, as per RFC 56. This + allows decoding notifications without knowing how to decode individual transactions, and + allows for a more fine grained backpressure. + +crates: + - name: "sc-network-transactions" diff --git a/prdoc/1.6.0/pr_2823.prdoc b/prdoc/1.6.0/pr_2823.prdoc new file mode 100644 index 000000000000..64a309969efb --- /dev/null +++ b/prdoc/1.6.0/pr_2823.prdoc @@ -0,0 +1,11 @@ +title: "`fungible::Unbalanced::decrease_balance`: Handle `precision` properly" + +doc: + - audience: Runtime Dev + description: | + `fungible::Unbalanced::decrease_balance` will now handle `precision` properly. This means when + passing `Exact`, it will ensure that the available balance is bigger or equal to the `amount` + that should be deducted. + +crates: + - name: "frame-support" diff --git a/prdoc/1.6.0/pr_2834.prdoc b/prdoc/1.6.0/pr_2834.prdoc new file mode 100644 index 000000000000..3a5881659de6 --- /dev/null +++ b/prdoc/1.6.0/pr_2834.prdoc @@ -0,0 +1,13 @@ +title: "proposer: return optional block" + +doc: + - audience: Node Dev + description: | + The `ProposerInterface` trait now returns an optional `Proposal`, allowing + for no block to be created. This is a breaking change that only impacts custom + `ProposerInterface` implementations. The change allows more flexibility in choosing + when to create blocks. + +crates: + - name: "cumulus-client-consensus-aura" + - name: "cumulus-client-consensus-proposer" diff --git a/prdoc/1.6.0/pr_2835.prdoc b/prdoc/1.6.0/pr_2835.prdoc new file mode 100644 index 000000000000..037e9b8ec770 --- /dev/null +++ b/prdoc/1.6.0/pr_2835.prdoc @@ -0,0 +1,9 @@ +title: New malus variant `support-disabled` + +doc: + - audience: Node Dev + description: | + A new malicious flavor added to pretend that nobody + is disabled onchain. + +crates: [ ] diff --git a/prdoc/1.6.0/pr_2862.prdoc b/prdoc/1.6.0/pr_2862.prdoc new file mode 100644 index 000000000000..fa136b5d98ac --- /dev/null +++ b/prdoc/1.6.0/pr_2862.prdoc @@ -0,0 +1,11 @@ +title: Return latest known relay chain block number in `on_initialize` etc. + +doc: + - audience: Runtime Dev + description: | + `RelaychainDataProvider` and `RelaychainBlockNumberProvider` will now return the latest known + relay chain block number in `on_initialize`, aka when `validation_data` wasn't yet set by + the inherent. + +crates: + - name: "cumulus-pallet-parachain-system" diff --git a/prdoc/1.6.0/pr_2883.prdoc b/prdoc/1.6.0/pr_2883.prdoc new file mode 100644 index 000000000000..e2817d16a03f --- /dev/null +++ b/prdoc/1.6.0/pr_2883.prdoc @@ -0,0 +1,9 @@ +title: "pallet-core-fellowship: import an unimported on approve" + +doc: + - audience: Runtime User + description: | + To align with the documentation of the approve call, we import an untracked member on approval. + +crates: + - name: "pallet-core-fellowship" diff --git a/prdoc/1.6.0/pr_2886.prdoc b/prdoc/1.6.0/pr_2886.prdoc new file mode 100644 index 000000000000..9fd97c11e111 --- /dev/null +++ b/prdoc/1.6.0/pr_2886.prdoc @@ -0,0 +1,13 @@ +# Schema: Polkadot SDK PRDoc Schema (prdoc) v1.0.0 +# See doc at https://raw.githubusercontent.com/paritytech/polkadot-sdk/master/prdoc/schema_user.json + +title: Remove bounds from `PrevalidateAttests` struct definition + +doc: + - audience: Runtime Dev + description: | + Minimal change to `PrevalidateAssets` to remove some trait bounds on the struct itself while + keeping all its capabilities. + +crates: + - name: polkadot-runtime-common diff --git a/prdoc/1.6.0/pr_2899.prdoc b/prdoc/1.6.0/pr_2899.prdoc new file mode 100644 index 000000000000..0c7afc0ad088 --- /dev/null +++ b/prdoc/1.6.0/pr_2899.prdoc @@ -0,0 +1,10 @@ +title: Improve storage monitor API + +doc: + - audience: Node Dev + description: | + This removes the need to unnecessarily provide a very specific data structure DatabaseSource and removes huge + sc-client-db dependency from storage monitor. It is now possible to use storage monitor with any path. + +crates: + - name: sc-storage-monitor diff --git a/prdoc/pr_2591.prdoc b/prdoc/pr_2591.prdoc deleted file mode 100644 index fe967cb67859..000000000000 --- a/prdoc/pr_2591.prdoc +++ /dev/null @@ -1,9 +0,0 @@ -title: Ensure to cleanup state in remove_member - -doc: - - audience: Runtime Dev - description: | - Cleanes up the state properly if a member of a ranked collective is removed. - -crates: - - name: pallet-ranked-collective diff --git a/prdoc/schema_user.json b/prdoc/schema_user.json index 60ff28d36264..82215d51866b 100644 --- a/prdoc/schema_user.json +++ b/prdoc/schema_user.json @@ -17,6 +17,16 @@ "type": "string", "description": "Title for the PR. This is what will show up in the release notes.\nif needed, you may provide a different title override for each audience in the `doc` property." }, + "author": { + "title": "Author handle", + "type": "string", + "description": "Author handle" + }, + "topic": { + "title": "Topic", + "type": "string", + "description": "Topic" + }, "doc": { "type": "array", diff --git a/scripts/release/build-changelogs.sh b/scripts/release/build-changelogs.sh new file mode 100755 index 000000000000..a9275f45a50c --- /dev/null +++ b/scripts/release/build-changelogs.sh @@ -0,0 +1,54 @@ +#!/usr/bin/env bash + +export PRODUCT=polkadot +export VERSION=${VERSION:-1.5.0} + +PROJECT_ROOT=`git rev-parse --show-toplevel` +echo $PROJECT_ROOT + +TMP=$(mktemp -d) +TEMPLATE_AUDIENCE="${PROJECT_ROOT}/scripts/release/templates/audience.md.tera" +TEMPLATE_CHANGELOG="${PROJECT_ROOT}/scripts/release/templates/changelog.md.tera" + +DATA_JSON="${TMP}/data.json" +CONTEXT_JSON="${TMP}/context.json" +echo -e "TEMPLATE_AUDIENCE: \t$TEMPLATE_AUDIENCE" +echo -e "DATA_JSON: \t\t$DATA_JSON" +echo -e "CONTEXT_JSON: \t\t$CONTEXT_JSON" + +# Create output folder +OUTPUT="${TMP}/changelogs/$PRODUCT/$VERSION" +echo -e "OUTPUT: \t\t$OUTPUT" +mkdir -p $OUTPUT + +prdoc load -d "$PROJECT_ROOT/prdoc/$VERSION" --json > $DATA_JSON +# ls -al $DATA_JSON + +cat $DATA_JSON | jq ' { "prdoc" : .}' > $CONTEXT_JSON +# ls -al $CONTEXT_JSON + +# Fetch the list of valid audiences +SCHEMA_URL=https://raw.githubusercontent.com/paritytech/polkadot-sdk/master/prdoc/schema_user.json +SCHEMA=$(curl -s $SCHEMA_URL | sed 's|^//.*||') +AUDIENCE_ARRAY=$(echo -E $SCHEMA | jq -r '."$defs".audience.oneOf[] | .const') + +readarray -t audiences < <(echo "$AUDIENCE_ARRAY") +declare -p audiences + + +# Generate a changelog +echo "Generating changelog..." +tera -t "${TEMPLATE_CHANGELOG}" --env --env-key env "${CONTEXT_JSON}" > "$OUTPUT/changelog.md" +echo "Changelog ready in $OUTPUT/changelog.md" + +# Generate a release notes doc per audience +for audience in "${audiences[@]}"; do + audience_id="$(tr [A-Z] [a-z] <<< "$audience")" + audience_id="$(tr ' ' '_' <<< "$audience_id")" + echo "Processing audience: $audience ($audience_id)" + export TARGET_AUDIENCE=$audience + tera -t "${TEMPLATE_AUDIENCE}" --env --env-key env "${CONTEXT_JSON}" > "$OUTPUT/relnote_${audience_id}.md" +done + +# Show the files +tree -s -h -c $OUTPUT/ diff --git a/scripts/release/templates/audience.md.tera b/scripts/release/templates/audience.md.tera new file mode 100644 index 000000000000..dc507053dd5a --- /dev/null +++ b/scripts/release/templates/audience.md.tera @@ -0,0 +1,13 @@ +## Release {{ env.PRODUCT }} {{ env.VERSION }} + +Changelog for `{{ env.TARGET_AUDIENCE }}`. + +{% for file in prdoc -%} +#### PR #{{file.doc_filename.number}}: {{ file.content.title }} +{% for doc_item in file.content.doc %} +{%- if doc_item.audience == env.TARGET_AUDIENCE %} +{{ doc_item.description }} +{% endif -%} + +{%- endfor %} +{%- endfor %} diff --git a/scripts/release/templates/changelog.md.tera b/scripts/release/templates/changelog.md.tera new file mode 100644 index 000000000000..aaba761e8e47 --- /dev/null +++ b/scripts/release/templates/changelog.md.tera @@ -0,0 +1,7 @@ +## Changelog for `{{ env.PRODUCT | capitalize }} v{{ env.VERSION }}` + +{% for file in prdoc | sort(attribute="doc_filename.number") -%} +{%- set author= file.content.author | default(value="n/a") -%} +{%- set topic= file.content.topic | default(value="n/a") -%} +- #{{file.doc_filename.number}}: {{ file.content.title }} (@{{ author }}) [{{ topic | capitalize }}] +{% endfor -%} diff --git a/scripts/snowbridge_update_subtree.sh b/scripts/snowbridge_update_subtree.sh new file mode 100755 index 000000000000..2276bb35469f --- /dev/null +++ b/scripts/snowbridge_update_subtree.sh @@ -0,0 +1,66 @@ +#!/bin/bash + +# A script to udpate bridges repo as subtree to Cumulus +# Usage: +# ./scripts/update_subtree_snowbridge.sh fetch +# ./scripts/update_subtree_snowbridge.sh patch + +set -e + +SNOWBRIDGE_BRANCH="${SNOWBRIDGE_BRANCH:-main}" +POLKADOT_SDK_BRANCH="${POLKADOT_SDK_BRANCH:-master}" +SNOWBRIDGE_TARGET_DIR="${TARGET_DIR:-bridges/snowbridge}" + +function fetch() { + # the script is able to work only on clean git copy + [[ -z "$(git status --porcelain)" ]] || { + echo >&2 "The git copy must be clean (stash all your changes):"; + git status --porcelain + exit 1; + } + + local snowbridge_remote=$(git remote -v | grep "snowbridge.git (fetch)" | head -n1 | awk '{print $1;}') + if [ -z "$snowbridge_remote" ]; then + echo "Adding new remote: 'snowbridge' repo..." + git remote add -f snowbridge https://github.com/Snowfork/snowbridge.git + snowbridge_remote="snowbridge" + else + echo "Fetching remote: '${snowbridge_remote}' repo..." + git fetch https://github.com/Snowfork/snowbridge.git --prune + fi + + echo "Syncing/updating subtree with remote branch '${snowbridge_remote}/$SNOWBRIDGE_BRANCH' to target directory: '$SNOWBRIDGE_TARGET_DIR'" + git subtree pull --prefix=$SNOWBRIDGE_TARGET_DIR ${snowbridge_remote} $SNOWBRIDGE_BRANCH --squash +} + +function clean() { + echo "Patching/removing unneeded stuff from subtree in target directory: '$SNOWBRIDGE_TARGET_DIR'" + chmod +x $SNOWBRIDGE_TARGET_DIR/parachain/scripts/verify-pallets-build.sh + $SNOWBRIDGE_TARGET_DIR/parachain/scripts/verify-pallets-build.sh --ignore-git-state --no-revert +} + +function create_patch() { + [[ -z "$(git status --porcelain)" ]] || { + echo >&2 "The git copy must be clean (stash all your changes):"; + git status --porcelain + exit 1; + } + echo "Creating diff patch file to apply to snowbridge. No Cargo.toml files will be included in the patch." + git diff snowbridge/$SNOWBRIDGE_BRANCH $POLKADOT_SDK_BRANCH:bridges/snowbridge --diff-filter=ACM -- . ':(exclude)*/Cargo.toml' > snowbridge.patch +} + +case "$1" in + fetch) + fetch + ;; + clean) + clean + ;; + create_patch) + create_patch + ;; + update) + fetch + clean + ;; +esac diff --git a/substrate/bin/minimal/node/Cargo.toml b/substrate/bin/minimal/node/Cargo.toml index 0054ee919b0f..b56b58574b2e 100644 --- a/substrate/bin/minimal/node/Cargo.toml +++ b/substrate/bin/minimal/node/Cargo.toml @@ -10,6 +10,9 @@ publish = false repository = "https://github.com/substrate-developer-hub/substrate-node-template/" build = "build.rs" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -17,10 +20,10 @@ targets = ["x86_64-unknown-linux-gnu"] name = "minimal-node" [dependencies] -clap = { version = "4.4.10", features = ["derive"] } +clap = { version = "4.4.14", features = ["derive"] } futures = { version = "0.3.21", features = ["thread-pool"] } jsonrpsee = { version = "0.16.2", features = ["server"] } -serde_json = "1.0.108" +serde_json = "1.0.111" sc-cli = { path = "../../../client/cli", features = ["mixnet"] } sc-executor = { path = "../../../client/executor" } diff --git a/substrate/bin/minimal/node/src/service.rs b/substrate/bin/minimal/node/src/service.rs index b32de8f857ee..8d44fae8becf 100644 --- a/substrate/bin/minimal/node/src/service.rs +++ b/substrate/bin/minimal/node/src/service.rs @@ -39,19 +39,17 @@ pub(crate) type FullClient = type FullBackend = sc_service::TFullBackend; type FullSelectChain = sc_consensus::LongestChain; -pub fn new_partial( - config: &Configuration, -) -> Result< - sc_service::PartialComponents< - FullClient, - FullBackend, - FullSelectChain, - sc_consensus::DefaultImportQueue, - sc_transaction_pool::FullPool, - Option, - >, - ServiceError, -> { +/// Assembly of PartialComponents (enough to run chain ops subcommands) +pub type Service = sc_service::PartialComponents< + FullClient, + FullBackend, + FullSelectChain, + sc_consensus::DefaultImportQueue, + sc_transaction_pool::FullPool, + Option, +>; + +pub fn new_partial(config: &Configuration) -> Result { let telemetry = config .telemetry_endpoints .clone() diff --git a/substrate/bin/minimal/runtime/Cargo.toml b/substrate/bin/minimal/runtime/Cargo.toml index f7685642d274..296106544bbf 100644 --- a/substrate/bin/minimal/runtime/Cargo.toml +++ b/substrate/bin/minimal/runtime/Cargo.toml @@ -8,6 +8,9 @@ repository.workspace = true license.workspace = true publish = false +[lints] +workspace = true + [dependencies] parity-scale-codec = { version = "3.0.0", default-features = false } scale-info = { version = "2.6.0", default-features = false } diff --git a/substrate/bin/node-template/node/Cargo.toml b/substrate/bin/node-template/node/Cargo.toml index 06b4b0490efb..d2ed182d0809 100644 --- a/substrate/bin/node-template/node/Cargo.toml +++ b/substrate/bin/node-template/node/Cargo.toml @@ -10,6 +10,9 @@ publish = false repository = "https://github.com/substrate-developer-hub/substrate-node-template/" build = "build.rs" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -17,9 +20,9 @@ targets = ["x86_64-unknown-linux-gnu"] name = "node-template" [dependencies] -clap = { version = "4.4.10", features = ["derive"] } +clap = { version = "4.4.14", features = ["derive"] } futures = { version = "0.3.21", features = ["thread-pool"] } -serde_json = "1.0.108" +serde_json = "1.0.111" sc-cli = { path = "../../../client/cli", features = ["mixnet"] } sp-core = { path = "../../../primitives/core" } diff --git a/substrate/bin/node-template/node/src/rpc.rs b/substrate/bin/node-template/node/src/rpc.rs index f4f1540f732f..246391adcbbe 100644 --- a/substrate/bin/node-template/node/src/rpc.rs +++ b/substrate/bin/node-template/node/src/rpc.rs @@ -53,5 +53,12 @@ where // to call into the runtime. // `module.merge(YourRpcTrait::into_rpc(YourRpcStruct::new(ReferenceToClient, ...)))?;` + // You probably want to enable the `rpc v2 chainSpec` API as well + // + // let chain_name = chain_spec.name().to_string(); + // let genesis_hash = client.block_hash(0).ok().flatten().expect("Genesis block exists; qed"); + // let properties = chain_spec.properties(); + // module.merge(ChainSpec::new(chain_name, genesis_hash, properties).into_rpc())?; + Ok(module) } diff --git a/substrate/bin/node-template/node/src/service.rs b/substrate/bin/node-template/node/src/service.rs index 8dba2abbadd4..cf5260ad9f4b 100644 --- a/substrate/bin/node-template/node/src/service.rs +++ b/substrate/bin/node-template/node/src/service.rs @@ -23,29 +23,20 @@ type FullSelectChain = sc_consensus::LongestChain; /// imported and generated. const GRANDPA_JUSTIFICATION_PERIOD: u32 = 512; -#[allow(clippy::type_complexity)] -pub fn new_partial( - config: &Configuration, -) -> Result< - sc_service::PartialComponents< - FullClient, - FullBackend, - FullSelectChain, - sc_consensus::DefaultImportQueue, - sc_transaction_pool::FullPool, - ( - sc_consensus_grandpa::GrandpaBlockImport< - FullBackend, - Block, - FullClient, - FullSelectChain, - >, - sc_consensus_grandpa::LinkHalf, - Option, - ), - >, - ServiceError, -> { +pub type Service = sc_service::PartialComponents< + FullClient, + FullBackend, + FullSelectChain, + sc_consensus::DefaultImportQueue, + sc_transaction_pool::FullPool, + ( + sc_consensus_grandpa::GrandpaBlockImport, + sc_consensus_grandpa::LinkHalf, + Option, + ), +>; + +pub fn new_partial(config: &Configuration) -> Result { let telemetry = config .telemetry_endpoints .clone() diff --git a/substrate/bin/node-template/pallets/template/Cargo.toml b/substrate/bin/node-template/pallets/template/Cargo.toml index 405d9c229f88..51410a71c7bc 100644 --- a/substrate/bin/node-template/pallets/template/Cargo.toml +++ b/substrate/bin/node-template/pallets/template/Cargo.toml @@ -9,6 +9,9 @@ license = "MIT-0" publish = false repository = "https://github.com/substrate-developer-hub/substrate-node-template/" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/bin/node-template/runtime/Cargo.toml b/substrate/bin/node-template/runtime/Cargo.toml index 4c79bbfd17fb..c1c10b1c087c 100644 --- a/substrate/bin/node-template/runtime/Cargo.toml +++ b/substrate/bin/node-template/runtime/Cargo.toml @@ -9,6 +9,9 @@ license = "MIT-0" publish = false repository = "https://github.com/substrate-developer-hub/substrate-node-template/" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/bin/node/bench/Cargo.toml b/substrate/bin/node/bench/Cargo.toml index 903eb4de7e6a..e2b68b2a0e83 100644 --- a/substrate/bin/node/bench/Cargo.toml +++ b/substrate/bin/node/bench/Cargo.toml @@ -9,11 +9,14 @@ homepage = "https://substrate.io" repository.workspace = true publish = false +[lints] +workspace = true + # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] array-bytes = "6.1" -clap = { version = "4.4.10", features = ["derive"] } +clap = { version = "4.4.14", features = ["derive"] } log = "0.4.17" node-primitives = { path = "../primitives" } node-testing = { path = "../testing" } @@ -21,8 +24,8 @@ kitchensink-runtime = { path = "../runtime" } sc-client-api = { path = "../../../client/api" } sp-runtime = { path = "../../../primitives/runtime" } sp-state-machine = { path = "../../../primitives/state-machine" } -serde = "1.0.193" -serde_json = "1.0.108" +serde = "1.0.195" +serde_json = "1.0.111" derive_more = { version = "0.99.17", default-features = false, features = ["display"] } kvdb = "0.13.0" kvdb-rocksdb = "0.19.0" diff --git a/substrate/bin/node/cli/Cargo.toml b/substrate/bin/node/cli/Cargo.toml index eac662bbcab2..32186bc5539f 100644 --- a/substrate/bin/node/cli/Cargo.toml +++ b/substrate/bin/node/cli/Cargo.toml @@ -11,6 +11,9 @@ homepage = "https://substrate.io" repository.workspace = true publish = false +[lints] +workspace = true + [package.metadata.wasm-pack.profile.release] # `wasm-opt` has some problems on linux, see # https://github.com/rustwasm/wasm-pack/issues/781 etc. @@ -38,9 +41,9 @@ crate-type = ["cdylib", "rlib"] [dependencies] # third-party dependencies array-bytes = "6.1" -clap = { version = "4.4.10", features = ["derive"], optional = true } +clap = { version = "4.4.14", features = ["derive"], optional = true } codec = { package = "parity-scale-codec", version = "3.6.1" } -serde = { version = "1.0.193", features = ["derive"] } +serde = { version = "1.0.195", features = ["derive"] } jsonrpsee = { version = "0.16.2", features = ["server"] } futures = "0.3.21" log = "0.4.17" @@ -103,7 +106,7 @@ sc-cli = { path = "../../../client/cli", optional = true } frame-benchmarking-cli = { path = "../../../utils/frame/benchmarking-cli", optional = true } node-inspect = { package = "staging-node-inspect", path = "../inspect", optional = true } try-runtime-cli = { path = "../../../utils/frame/try-runtime/cli", optional = true } -serde_json = "1.0.108" +serde_json = "1.0.111" [dev-dependencies] sc-keystore = { path = "../../../client/keystore" } @@ -143,13 +146,13 @@ sp-consensus-babe = { path = "../../../primitives/consensus/babe" } sp-externalities = { path = "../../../primitives/externalities" } sp-keyring = { path = "../../../primitives/keyring" } sp-runtime = { path = "../../../primitives/runtime" } -serde_json = "1.0.108" +serde_json = "1.0.111" scale-info = { version = "2.10.0", features = ["derive", "serde"] } sp-trie = { path = "../../../primitives/trie" } sp-state-machine = { path = "../../../primitives/state-machine" } [build-dependencies] -clap = { version = "4.4.10", optional = true } +clap = { version = "4.4.14", optional = true } clap_complete = { version = "4.0.2", optional = true } node-inspect = { package = "staging-node-inspect", path = "../inspect", optional = true } frame-benchmarking-cli = { path = "../../../utils/frame/benchmarking-cli", optional = true } diff --git a/substrate/bin/node/cli/src/chain_spec.rs b/substrate/bin/node/cli/src/chain_spec.rs index 023ccc459f61..17010227ab90 100644 --- a/substrate/bin/node/cli/src/chain_spec.rs +++ b/substrate/bin/node/cli/src/chain_spec.rs @@ -86,7 +86,11 @@ fn configure_accounts_for_staging_testnet( // // and // - // for i in 1 2 3 4 ; do for j in session; do subkey --ed25519 inspect "$secret"//fir//$j//$i; done; done + // for i in 1 2 3 4 ; do for j in session; do subkey inspect --scheme ed25519 "$secret"//fir//$j//$i; done; done + // + // and + // + // for i in 1 2 3 4 ; do for j in session; do subkey inspect --scheme ecdsa "$secret"//fir//$j//$i; done; done let initial_authorities: Vec<( AccountId, diff --git a/substrate/bin/node/cli/src/service.rs b/substrate/bin/node/cli/src/service.rs index 35d4fe9ee044..3c7261b9c684 100644 --- a/substrate/bin/node/cli/src/service.rs +++ b/substrate/bin/node/cli/src/service.rs @@ -38,7 +38,7 @@ use sc_transaction_pool_api::OffchainTransactionPoolFactory; use sp_api::ProvideRuntimeApi; use sp_core::crypto::Pair; use sp_runtime::{generic, traits::Block as BlockT, SaturatedConversion}; -use std::sync::Arc; +use std::{path::Path, sync::Arc}; /// Host functions required for kitchensink runtime and Substrate node. #[cfg(not(feature = "runtime-benchmarks"))] @@ -262,30 +262,31 @@ pub fn new_partial( let chain_spec = config.chain_spec.cloned_box(); let rpc_backend = backend.clone(); - let rpc_extensions_builder = move |deny_unsafe, subscription_executor| { - let deps = node_rpc::FullDeps { - client: client.clone(), - pool: pool.clone(), - select_chain: select_chain.clone(), - chain_spec: chain_spec.cloned_box(), - deny_unsafe, - babe: node_rpc::BabeDeps { - keystore: keystore.clone(), - babe_worker_handle: babe_worker_handle.clone(), - }, - grandpa: node_rpc::GrandpaDeps { - shared_voter_state: shared_voter_state.clone(), - shared_authority_set: shared_authority_set.clone(), - justification_stream: justification_stream.clone(), - subscription_executor, - finality_provider: finality_proof_provider.clone(), - }, - backend: rpc_backend.clone(), - mixnet_api: mixnet_api.as_ref().cloned(), - }; + let rpc_extensions_builder = + move |deny_unsafe, subscription_executor: node_rpc::SubscriptionTaskExecutor| { + let deps = node_rpc::FullDeps { + client: client.clone(), + pool: pool.clone(), + select_chain: select_chain.clone(), + chain_spec: chain_spec.cloned_box(), + deny_unsafe, + babe: node_rpc::BabeDeps { + keystore: keystore.clone(), + babe_worker_handle: babe_worker_handle.clone(), + }, + grandpa: node_rpc::GrandpaDeps { + shared_voter_state: shared_voter_state.clone(), + shared_authority_set: shared_authority_set.clone(), + justification_stream: justification_stream.clone(), + subscription_executor: subscription_executor.clone(), + finality_provider: finality_proof_provider.clone(), + }, + backend: rpc_backend.clone(), + mixnet_api: mixnet_api.as_ref().cloned(), + }; - node_rpc::create_full(deps).map_err(Into::into) - }; + node_rpc::create_full(deps).map_err(Into::into) + }; (rpc_extensions_builder, shared_voter_state2) }; @@ -328,6 +329,15 @@ pub fn new_full_base( &sc_consensus_babe::BabeLink, ), ) -> Result { + let role = config.role.clone(); + let force_authoring = config.force_authoring; + let backoff_authoring_blocks = + Some(sc_consensus_slots::BackoffAuthoringOnFinalizedHeadLagging::default()); + let name = config.network.node_name.clone(); + let enable_grandpa = !config.disable_grandpa; + let prometheus_registry = config.prometheus_registry().cloned(); + let enable_offchain_worker = config.offchain_worker.enabled; + let hwbench = (!disable_hardware_benchmarks) .then_some(config.database.path().map(|database_path| { let _ = std::fs::create_dir_all(&database_path); @@ -401,15 +411,6 @@ pub fn new_full_base( task_manager.spawn_handle().spawn("mixnet", None, mixnet); } - let role = config.role.clone(); - let force_authoring = config.force_authoring; - let backoff_authoring_blocks = - Some(sc_consensus_slots::BackoffAuthoringOnFinalizedHeadLagging::default()); - let name = config.network.node_name.clone(); - let enable_grandpa = !config.disable_grandpa; - let prometheus_registry = config.prometheus_registry().cloned(); - let enable_offchain_worker = config.offchain_worker.enabled; - let rpc_handlers = sc_service::spawn_tasks(sc_service::SpawnTasksParams { config, backend: backend.clone(), @@ -606,16 +607,18 @@ pub fn new_full_base( /// Builds a new service for a full client. pub fn new_full(config: Configuration, cli: Cli) -> Result { let mixnet_config = cli.mixnet_params.config(config.role.is_authority()); - let database_source = config.database.clone(); + let database_path = config.database.path().map(Path::to_path_buf); let task_manager = new_full_base(config, mixnet_config, cli.no_hardware_benchmarks, |_, _| ()) .map(|NewFullBase { task_manager, .. }| task_manager)?; - sc_storage_monitor::StorageMonitorService::try_spawn( - cli.storage_monitor, - database_source, - &task_manager.spawn_essential_handle(), - ) - .map_err(|e| ServiceError::Application(e.into()))?; + if let Some(database_path) = database_path { + sc_storage_monitor::StorageMonitorService::try_spawn( + cli.storage_monitor, + database_path, + &task_manager.spawn_essential_handle(), + ) + .map_err(|e| ServiceError::Application(e.into()))?; + } Ok(task_manager) } diff --git a/substrate/bin/node/executor/src/lib.rs b/substrate/bin/node/executor/src/lib.rs index de43ced959e6..f3eea51d4a4b 100644 --- a/substrate/bin/node/executor/src/lib.rs +++ b/substrate/bin/node/executor/src/lib.rs @@ -19,4 +19,10 @@ //! A `CodeExecutor` specialization which uses natively compiled runtime when the wasm to be //! executed is equivalent to the natively compiled code. -pub type ExtendHostFunctions = (frame_benchmarking::benchmarking::HostFunctions,); +#[no_mangle] +#[polkavm_derive::polkavm_export] +pub extern "C" fn deploy() {} + +#[no_mangle] +#[polkavm_derive::polkavm_export] +pub extern "C" fn call() {} diff --git a/substrate/bin/node/inspect/Cargo.toml b/substrate/bin/node/inspect/Cargo.toml index 7d77103f29c9..c5a364f22c67 100644 --- a/substrate/bin/node/inspect/Cargo.toml +++ b/substrate/bin/node/inspect/Cargo.toml @@ -8,11 +8,14 @@ license = "GPL-3.0-or-later WITH Classpath-exception-2.0" homepage = "https://substrate.io" repository.workspace = true +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] -clap = { version = "4.4.10", features = ["derive"] } +clap = { version = "4.4.14", features = ["derive"] } codec = { package = "parity-scale-codec", version = "3.6.1" } thiserror = "1.0" sc-cli = { path = "../../../client/cli", features = ["mixnet"] } diff --git a/substrate/bin/node/primitives/Cargo.toml b/substrate/bin/node/primitives/Cargo.toml index 40735ff21d44..24279ad09c3d 100644 --- a/substrate/bin/node/primitives/Cargo.toml +++ b/substrate/bin/node/primitives/Cargo.toml @@ -9,6 +9,9 @@ homepage = "https://substrate.io" repository.workspace = true publish = false +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/bin/node/rpc/Cargo.toml b/substrate/bin/node/rpc/Cargo.toml index ec1c6c163889..bd025b767ae1 100644 --- a/substrate/bin/node/rpc/Cargo.toml +++ b/substrate/bin/node/rpc/Cargo.toml @@ -9,6 +9,9 @@ homepage = "https://substrate.io" repository.workspace = true publish = false +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/bin/node/rpc/src/lib.rs b/substrate/bin/node/rpc/src/lib.rs index 279e76b30851..76068ae5b15c 100644 --- a/substrate/bin/node/rpc/src/lib.rs +++ b/substrate/bin/node/rpc/src/lib.rs @@ -41,7 +41,7 @@ use sc_consensus_babe::BabeWorkerHandle; use sc_consensus_grandpa::{ FinalityProofProvider, GrandpaJustificationStream, SharedAuthoritySet, SharedVoterState, }; -use sc_rpc::SubscriptionTaskExecutor; +pub use sc_rpc::SubscriptionTaskExecutor; pub use sc_rpc_api::DenyUnsafe; use sc_transaction_pool_api::TransactionPool; use sp_api::ProvideRuntimeApi; diff --git a/substrate/bin/node/runtime/Cargo.toml b/substrate/bin/node/runtime/Cargo.toml index b9bba98cad0b..c4fdb1c319d5 100644 --- a/substrate/bin/node/runtime/Cargo.toml +++ b/substrate/bin/node/runtime/Cargo.toml @@ -10,6 +10,9 @@ homepage = "https://substrate.io" repository.workspace = true publish = false +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -23,7 +26,7 @@ codec = { package = "parity-scale-codec", version = "3.6.1", default-features = scale-info = { version = "2.10.0", default-features = false, features = ["derive", "serde"] } static_assertions = "1.1.0" log = { version = "0.4.17", default-features = false } -serde_json = { version = "1.0.108", default-features = false, features = ["alloc", "arbitrary_precision"] } +serde_json = { version = "1.0.111", default-features = false, features = ["alloc", "arbitrary_precision"] } # pallet-asset-conversion: turn on "num-traits" feature primitive-types = { version = "0.12.0", default-features = false, features = ["codec", "num-traits", "scale-info"] } @@ -242,3 +245,7 @@ try-runtime = [ "pallet-utility/try-runtime", "sp-runtime/try-runtime", ] +experimental = [ + "frame-support/experimental", + "frame-system/experimental", +] diff --git a/substrate/bin/node/runtime/src/lib.rs b/substrate/bin/node/runtime/src/lib.rs index 4844de6e5fac..b9c8b4d43bfa 100644 --- a/substrate/bin/node/runtime/src/lib.rs +++ b/substrate/bin/node/runtime/src/lib.rs @@ -37,6 +37,8 @@ use frame_support::{ pallet_prelude::Get, parameter_types, traits::{ + fungible::{NativeFromLeft, NativeOrWithId, UnionOf}, + tokens::imbalance::ResolveAssetTo, AsEnsureOriginWithArg, ConstU128, ConstU32, Contains, Currency, EqualPrivilegeOnly, Imbalance, InsideBoth, KeyOwnerProofSystem, OnUnbalanced, }, @@ -54,7 +56,7 @@ use frame_system::{ }; pub use node_primitives::{AccountId, Signature}; use node_primitives::{Balance, BlockNumber, Hash, Moment, Nonce}; -use pallet_asset_conversion::{NativeOrAssetId, NativeOrAssetIdConverter}; +use pallet_asset_conversion::{Ascending, Chain, WithFirstAsset}; use pallet_im_online::sr25519::AuthorityId as ImOnlineId; use pallet_session::historical as pallet_session_historical; pub use pallet_transaction_payment::{CurrencyAdapter, Multiplier, TargetedFeeAdjustment}; @@ -113,7 +115,7 @@ include!(concat!(env!("OUT_DIR"), "/wasm_binary.rs")); /// Max size for serialized extrinsic params for this testing runtime. /// This is a quite arbitrary but empirically battle tested value. #[cfg(test)] -pub const CALL_PARAMS_MAX_SIZE: usize = 208; +pub const CALL_PARAMS_MAX_SIZE: usize = 244; /// Wasm binary unwrapped. If built with `SKIP_WASM_BUILD`, the function panics. #[cfg(feature = "std")] @@ -415,8 +417,11 @@ impl pallet_asset_tx_payment::Config for Runtime { impl pallet_asset_conversion_tx_payment::Config for Runtime { type RuntimeEvent = RuntimeEvent; type Fungibles = Assets; - type OnChargeAssetTransaction = - pallet_asset_conversion_tx_payment::AssetConversionAdapter; + type OnChargeAssetTransaction = pallet_asset_conversion_tx_payment::AssetConversionAdapter< + Balances, + AssetConversion, + Native, + >; } impl pallet_skip_feeless_payment::Config for Runtime { @@ -483,6 +488,7 @@ parameter_types! { pub const RewardCurve: &'static PiecewiseLinear<'static> = &REWARD_CURVE; pub const MaxNominators: u32 = 64; pub const OffendingValidatorsThreshold: Perbill = Perbill::from_percent(17); + pub const MaxControllersInDeprecationBatch: u32 = 5900; pub OffchainRepeat: BlockNumber = 5; pub HistoryDepth: u32 = 84; } @@ -522,6 +528,7 @@ impl pallet_staking::Config for Runtime { // This a placeholder, to be introduced in the next PR as an instance of bags-list type TargetList = pallet_staking::UseValidatorsMap; type MaxUnlockingChunks = ConstU32<32>; + type MaxControllersInDeprecationBatch = MaxControllersInDeprecationBatch; type HistoryDepth = HistoryDepth; type EventListeners = (); type WeightInfo = pallet_staking::weights::SubstrateWeight; @@ -539,8 +546,6 @@ parameter_types! { pub const SignedDepositIncreaseFactor: Percent = Percent::from_percent(10); pub const SignedDepositByte: Balance = 1 * CENTS; - pub BetterUnsignedThreshold: Perbill = Perbill::from_rational(1u32, 10_000); - // miner configs pub const MultiPhaseUnsignedPriority: TransactionPriority = StakingUnsignedPriority::get() - 1u64; pub MinerMaxWeight: Weight = RuntimeBlockWeights::get() @@ -817,33 +822,34 @@ impl pallet_assets::Config for Runtime { parameter_types! { pub const AssetConversionPalletId: PalletId = PalletId(*b"py/ascon"); - pub AllowMultiAssetPools: bool = true; pub const PoolSetupFee: Balance = 1 * DOLLARS; // should be more or equal to the existential deposit pub const MintMinLiquidity: Balance = 100; // 100 is good enough when the main currency has 10-12 decimals. - pub const LiquidityWithdrawalFee: Permill = Permill::from_percent(0); // should be non-zero if AllowMultiAssetPools is true, otherwise can be zero. + pub const LiquidityWithdrawalFee: Permill = Permill::from_percent(0); + pub const Native: NativeOrWithId = NativeOrWithId::Native; } impl pallet_asset_conversion::Config for Runtime { type RuntimeEvent = RuntimeEvent; - type Currency = Balances; - type AssetBalance = ::Balance; - type HigherPrecisionBalance = sp_core::U256; - type Assets = Assets; type Balance = u128; - type PoolAssets = PoolAssets; - type AssetId = >::AssetId; - type MultiAssetId = NativeOrAssetId; + type HigherPrecisionBalance = sp_core::U256; + type AssetKind = NativeOrWithId; + type Assets = UnionOf, AccountId>; + type PoolId = (Self::AssetKind, Self::AssetKind); + type PoolLocator = Chain< + WithFirstAsset>, + Ascending>, + >; type PoolAssetId = >::AssetId; + type PoolAssets = PoolAssets; + type PoolSetupFee = PoolSetupFee; + type PoolSetupFeeAsset = Native; + type PoolSetupFeeTarget = ResolveAssetTo; type PalletId = AssetConversionPalletId; type LPFee = ConstU32<3>; // means 0.3% - type PoolSetupFee = PoolSetupFee; - type PoolSetupFeeReceiver = AssetConversionOrigin; type LiquidityWithdrawalFee = LiquidityWithdrawalFee; type WeightInfo = pallet_asset_conversion::weights::SubstrateWeight; - type AllowMultiAssetPools = AllowMultiAssetPools; type MaxSwapPathLength = ConstU32<4>; type MintMinLiquidity = MintMinLiquidity; - type MultiAssetIdConverter = NativeOrAssetIdConverter; #[cfg(feature = "runtime-benchmarks")] type BenchmarkHelper = (); } @@ -1245,20 +1251,19 @@ impl_runtime_apis! { impl pallet_asset_conversion::AssetConversionApi< Block, Balance, - u128, - NativeOrAssetId + NativeOrWithId > for Runtime { - fn quote_price_exact_tokens_for_tokens(asset1: NativeOrAssetId, asset2: NativeOrAssetId, amount: u128, include_fee: bool) -> Option { + fn quote_price_exact_tokens_for_tokens(asset1: NativeOrWithId, asset2: NativeOrWithId, amount: Balance, include_fee: bool) -> Option { AssetConversion::quote_price_exact_tokens_for_tokens(asset1, asset2, amount, include_fee) } - fn quote_price_tokens_for_exact_tokens(asset1: NativeOrAssetId, asset2: NativeOrAssetId, amount: u128, include_fee: bool) -> Option { + fn quote_price_tokens_for_exact_tokens(asset1: NativeOrWithId, asset2: NativeOrWithId, amount: Balance, include_fee: bool) -> Option { AssetConversion::quote_price_tokens_for_exact_tokens(asset1, asset2, amount, include_fee) } - fn get_reserves(asset1: NativeOrAssetId, asset2: NativeOrAssetId) -> Option<(Balance, Balance)> { - AssetConversion::get_reserves(&asset1, &asset2).ok() + fn get_reserves(asset1: NativeOrWithId, asset2: NativeOrWithId) -> Option<(Balance, Balance)> { + AssetConversion::get_reserves(asset1, asset2).ok() } } diff --git a/substrate/bin/node/testing/Cargo.toml b/substrate/bin/node/testing/Cargo.toml index 8dcacbfc9f7f..08acca4908dd 100644 --- a/substrate/bin/node/testing/Cargo.toml +++ b/substrate/bin/node/testing/Cargo.toml @@ -9,6 +9,9 @@ homepage = "https://substrate.io" repository.workspace = true publish = false +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/bin/node/testing/src/genesis.rs b/substrate/bin/node/testing/src/genesis.rs index a883bb831cc9..350a1101f168 100644 --- a/substrate/bin/node/testing/src/genesis.rs +++ b/substrate/bin/node/testing/src/genesis.rs @@ -25,7 +25,7 @@ use kitchensink_runtime::{ GrandpaConfig, RuntimeGenesisConfig, SessionConfig, StakerStatus, StakingConfig, BABE_GENESIS_EPOCH_CONFIG, }; -use sp_keyring::{Ed25519Keyring, Sr25519Keyring}; +use sp_keyring::Ed25519Keyring; use sp_runtime::Perbill; /// Create genesis runtime configuration for tests. @@ -52,13 +52,9 @@ pub fn config_endowed(extra_endowed: Vec) -> RuntimeGenesisConfig { balances: BalancesConfig { balances: endowed }, session: SessionConfig { keys: vec![ - (alice(), dave(), to_session_keys(&Ed25519Keyring::Alice, &Sr25519Keyring::Alice)), - (bob(), eve(), to_session_keys(&Ed25519Keyring::Bob, &Sr25519Keyring::Bob)), - ( - charlie(), - ferdie(), - to_session_keys(&Ed25519Keyring::Charlie, &Sr25519Keyring::Charlie), - ), + (alice(), dave(), session_keys_from_seed(Ed25519Keyring::Alice.into())), + (bob(), eve(), session_keys_from_seed(Ed25519Keyring::Bob.into())), + (charlie(), ferdie(), session_keys_from_seed(Ed25519Keyring::Charlie.into())), ], }, staking: StakingConfig { diff --git a/substrate/bin/node/testing/src/keyring.rs b/substrate/bin/node/testing/src/keyring.rs index db984452a885..ace1eabfe759 100644 --- a/substrate/bin/node/testing/src/keyring.rs +++ b/substrate/bin/node/testing/src/keyring.rs @@ -21,8 +21,10 @@ use codec::Encode; use kitchensink_runtime::{CheckedExtrinsic, SessionKeys, SignedExtra, UncheckedExtrinsic}; +use node_cli::chain_spec::get_from_seed; use node_primitives::{AccountId, Balance, Nonce}; -use sp_keyring::{AccountKeyring, Ed25519Keyring, Sr25519Keyring}; +use sp_core::{ed25519, sr25519}; +use sp_keyring::AccountKeyring; use sp_runtime::generic::Era; /// Alice's account id. @@ -56,15 +58,12 @@ pub fn ferdie() -> AccountId { } /// Convert keyrings into `SessionKeys`. -pub fn to_session_keys( - ed25519_keyring: &Ed25519Keyring, - sr25519_keyring: &Sr25519Keyring, -) -> SessionKeys { +pub fn session_keys_from_seed(seed: &str) -> SessionKeys { SessionKeys { - grandpa: ed25519_keyring.to_owned().public().into(), - babe: sr25519_keyring.to_owned().public().into(), - im_online: sr25519_keyring.to_owned().public().into(), - mixnet: sr25519_keyring.to_owned().public().into(), + grandpa: get_from_seed::(seed).into(), + babe: get_from_seed::(seed).into(), + im_online: get_from_seed::(seed).into(), + mixnet: get_from_seed::(seed).into(), } } diff --git a/substrate/bin/utils/chain-spec-builder/Cargo.toml b/substrate/bin/utils/chain-spec-builder/Cargo.toml index b8402ed9f70e..e9dc4d83b6bf 100644 --- a/substrate/bin/utils/chain-spec-builder/Cargo.toml +++ b/substrate/bin/utils/chain-spec-builder/Cargo.toml @@ -9,6 +9,9 @@ homepage = "https://substrate.io" repository.workspace = true publish = false +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -20,10 +23,8 @@ name = "chain-spec-builder" crate-type = ["rlib"] [dependencies] -clap = { version = "4.4.6", features = ["derive"] } -rand = "0.8" -kitchensink-runtime = { version = "3.0.0-dev", path = "../../node/runtime" } +clap = { version = "4.4.14", features = ["derive"] } log = "0.4.17" sc-chain-spec = { path = "../../../client/chain-spec" } -serde_json = "1.0.108" +serde_json = "1.0.111" sp-tracing = { version = "10.0.0", path = "../../../primitives/tracing" } diff --git a/substrate/bin/utils/chain-spec-builder/src/lib.rs b/substrate/bin/utils/chain-spec-builder/src/lib.rs index 8c678170e597..6e27956451aa 100644 --- a/substrate/bin/utils/chain-spec-builder/src/lib.rs +++ b/substrate/bin/utils/chain-spec-builder/src/lib.rs @@ -31,46 +31,52 @@ //! ## Typical use-cases. //! ##### Get default config from runtime. //! -//! Query the default genesis config from the provided `runtime.wasm` and use it in the chain -//! spec. Tool can also store runtime's default genesis config in given file: -//! ```text -//! chain-spec-builder create -r runtime.wasm default /dev/stdout +//! Query the default genesis config from the provided `runtime.wasm` and use it in the chain +//! spec. The tool allows specifying where to write the chain spec, and optionally also where the +//! write the default genesis state config (which is `/dev/stdout` in the following example): +//! ```text +//! chain-spec-builder --chain_spec_path ./my_chain_spec.json create -r runtime.wasm default /dev/stdout //! ``` -//! -//! _Note:_ [`GenesisBuilder::create_default_config`][sp-genesis-builder-create] runtime function is called. +//! +//! _Note:_ [`GenesisBuilder::create_default_config`][sp-genesis-builder-create] runtime function is +//! called. //! //! //! ##### Generate raw storage chain spec using genesis config patch. //! //! Patch the runtime's default genesis config with provided `patch.json` and generate raw //! storage (`-s`) version of chain spec: -//! ```text +//! +//! ```bash //! chain-spec-builder create -s -r runtime.wasm patch patch.json //! ``` -//! +//! //! _Note:_ [`GenesisBuilder::build_config`][sp-genesis-builder-build] runtime function is called. //! //! ##### Generate raw storage chain spec using full genesis config. //! //! Build the chain spec using provided full genesis config json file. No defaults will be used: -//! ```text +//! +//! ```bash //! chain-spec-builder create -s -r runtime.wasm full full-genesis-config.json //! ``` -//! +//! //! _Note_: [`GenesisBuilder::build_config`][sp-genesis-builder-build] runtime function is called. //! //! ##### Generate human readable chain spec using provided genesis config patch. -//! ```text +//! ```bash //! chain-spec-builder create -r runtime.wasm patch patch.json //! ``` -//! +//! //! ##### Generate human readable chain spec using provided full genesis config. -//! ```text +//! +//! ```bash //! chain-spec-builder create -r runtime.wasm full full-genesis-config.json //! ``` -//! +//! //! ##### Extra tools. -//! The `chain-spec-builder` provides also some extra utilities: [`VerifyCmd`], [`ConvertToRawCmd`], [`UpdateCodeCmd`]. +//! The `chain-spec-builder` provides also some extra utilities: [`VerifyCmd`], [`ConvertToRawCmd`], +//! [`UpdateCodeCmd`]. //! //! [`sc-chain-spec`]: ../sc_chain_spec/index.html //! [`node-cli`]: ../node_cli/index.html diff --git a/substrate/bin/utils/subkey/Cargo.toml b/substrate/bin/utils/subkey/Cargo.toml index 9a53b7e32fd0..822068f6614d 100644 --- a/substrate/bin/utils/subkey/Cargo.toml +++ b/substrate/bin/utils/subkey/Cargo.toml @@ -9,6 +9,9 @@ homepage = "https://substrate.io" repository.workspace = true readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -17,5 +20,5 @@ path = "src/main.rs" name = "subkey" [dependencies] -clap = { version = "4.4.10", features = ["derive"] } -sc-cli = { path = "../../../client/cli", features = ["mixnet"] } +clap = { version = "4.4.14", features = ["derive"] } +sc-cli = { path = "../../../client/cli" } diff --git a/substrate/client/allocator/Cargo.toml b/substrate/client/allocator/Cargo.toml index 31c714180ce5..ef13c1a4573f 100644 --- a/substrate/client/allocator/Cargo.toml +++ b/substrate/client/allocator/Cargo.toml @@ -10,6 +10,9 @@ description = "Collection of allocator implementations." documentation = "https://docs.rs/sc-allocator" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/client/api/Cargo.toml b/substrate/client/api/Cargo.toml index 283e0d1ec363..e6c696dae389 100644 --- a/substrate/client/api/Cargo.toml +++ b/substrate/client/api/Cargo.toml @@ -10,6 +10,9 @@ description = "Substrate client interfaces." documentation = "https://docs.rs/sc-client-api" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/client/authority-discovery/Cargo.toml b/substrate/client/authority-discovery/Cargo.toml index 372b6607a886..bb82b032d0c3 100644 --- a/substrate/client/authority-discovery/Cargo.toml +++ b/substrate/client/authority-discovery/Cargo.toml @@ -10,6 +10,9 @@ repository.workspace = true description = "Substrate authority discovery." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -21,7 +24,7 @@ codec = { package = "parity-scale-codec", version = "3.6.1", default-features = futures = "0.3.21" futures-timer = "3.0.1" ip_network = "0.4.1" -libp2p = { version = "0.51.3", features = ["ed25519", "kad"] } +libp2p = { version = "0.51.4", features = ["ed25519", "kad"] } multihash = { version = "0.18.1", default-features = false, features = [ "sha2", "std", @@ -39,7 +42,7 @@ sp-blockchain = { path = "../../primitives/blockchain" } sp-core = { path = "../../primitives/core" } sp-keystore = { path = "../../primitives/keystore" } sp-runtime = { path = "../../primitives/runtime" } -async-trait = "0.1.56" +async-trait = "0.1.74" multihash-codetable = { version = "0.1.1", features = [ "digest", "serde", diff --git a/substrate/client/basic-authorship/Cargo.toml b/substrate/client/basic-authorship/Cargo.toml index 1d60fc7f53e3..926909ec7b76 100644 --- a/substrate/client/basic-authorship/Cargo.toml +++ b/substrate/client/basic-authorship/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Basic implementation of block-authoring logic." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/client/block-builder/Cargo.toml b/substrate/client/block-builder/Cargo.toml index 852ee84f89b8..4477f5f1d776 100644 --- a/substrate/client/block-builder/Cargo.toml +++ b/substrate/client/block-builder/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Substrate block builder" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/client/chain-spec/Cargo.toml b/substrate/client/chain-spec/Cargo.toml index c4ad3778aab0..8e04f6a8f18c 100644 --- a/substrate/client/chain-spec/Cargo.toml +++ b/substrate/client/chain-spec/Cargo.toml @@ -9,13 +9,16 @@ repository.workspace = true description = "Substrate chain configurations." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive"] } -serde = { version = "1.0.193", features = ["derive"] } -serde_json = "1.0.108" +serde = { version = "1.0.195", features = ["derive"] } +serde_json = "1.0.111" sc-client-api = { path = "../api" } sc-chain-spec-derive = { path = "derive" } sc-executor = { path = "../executor" } diff --git a/substrate/client/chain-spec/derive/Cargo.toml b/substrate/client/chain-spec/derive/Cargo.toml index 4dea856b03f2..f9e291f897c9 100644 --- a/substrate/client/chain-spec/derive/Cargo.toml +++ b/substrate/client/chain-spec/derive/Cargo.toml @@ -8,6 +8,9 @@ homepage = "https://substrate.io" repository.workspace = true description = "Macros to derive chain spec extension traits implementation." +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -15,7 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] proc-macro = true [dependencies] -proc-macro-crate = "2.0.0" +proc-macro-crate = "3.0.0" proc-macro2 = "1.0.56" quote = "1.0.28" -syn = "2.0.39" +syn = "2.0.48" diff --git a/substrate/client/chain-spec/src/chain_spec.rs b/substrate/client/chain-spec/src/chain_spec.rs index e832bed298fc..e598d26f8b5f 100644 --- a/substrate/client/chain-spec/src/chain_spec.rs +++ b/substrate/client/chain-spec/src/chain_spec.rs @@ -769,9 +769,7 @@ fn json_eval_value_at_key( path: &mut VecDeque<&str>, fun: &dyn Fn(&json::Value) -> bool, ) -> bool { - let Some(key) = path.pop_front() else { - return false; - }; + let Some(key) = path.pop_front() else { return false }; if path.is_empty() { doc.as_object().map_or(false, |o| o.get(key).map_or(false, |v| fun(v))) diff --git a/substrate/client/cli/Cargo.toml b/substrate/client/cli/Cargo.toml index 6b51f4eb6900..2e577ef16c36 100644 --- a/substrate/client/cli/Cargo.toml +++ b/substrate/client/cli/Cargo.toml @@ -9,25 +9,28 @@ homepage = "https://substrate.io" repository.workspace = true readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] array-bytes = "6.1" -chrono = "0.4.27" -clap = { version = "4.4.10", features = ["derive", "string", "wrap_help"] } +chrono = "0.4.31" +clap = { version = "4.4.14", features = ["derive", "string", "wrap_help"] } fdlimit = "0.3.0" futures = "0.3.21" itertools = "0.12" libp2p-identity = { version = "0.1.3", features = ["ed25519", "peerid"] } log = "0.4.17" -names = { version = "0.14", default-features = false } +names = { version = "0.14.0", default-features = false } parity-scale-codec = "3.6.1" rand = "0.8.5" regex = "1.6.0" rpassword = "7.0.0" -serde = "1.0.193" -serde_json = "1.0.108" +serde = "1.0.195" +serde_json = "1.0.111" thiserror = "1.0.48" bip39 = "2.0.0" tokio = { version = "1.22.0", features = ["parking_lot", "rt-multi-thread", "signal"] } diff --git a/substrate/client/consensus/aura/Cargo.toml b/substrate/client/consensus/aura/Cargo.toml index bc9648f683a8..89a63a944166 100644 --- a/substrate/client/consensus/aura/Cargo.toml +++ b/substrate/client/consensus/aura/Cargo.toml @@ -9,11 +9,14 @@ homepage = "https://substrate.io" repository.workspace = true readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] -async-trait = "0.1.57" +async-trait = "0.1.74" codec = { package = "parity-scale-codec", version = "3.6.1" } futures = "0.3.21" log = "0.4.17" diff --git a/substrate/client/consensus/babe/Cargo.toml b/substrate/client/consensus/babe/Cargo.toml index c8cff0981b36..40c69d5780a5 100644 --- a/substrate/client/consensus/babe/Cargo.toml +++ b/substrate/client/consensus/babe/Cargo.toml @@ -10,17 +10,20 @@ repository.workspace = true documentation = "https://docs.rs/sc-consensus-babe" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] -async-trait = "0.1.57" +async-trait = "0.1.74" codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } futures = "0.3.21" log = "0.4.17" num-bigint = "0.4.3" num-rational = "0.4.1" -num-traits = "0.2.8" +num-traits = "0.2.17" parking_lot = "0.12.1" thiserror = "1.0" fork-tree = { path = "../../../utils/fork-tree" } diff --git a/substrate/client/consensus/babe/rpc/Cargo.toml b/substrate/client/consensus/babe/rpc/Cargo.toml index 913dd990fd33..753f8fbc821d 100644 --- a/substrate/client/consensus/babe/rpc/Cargo.toml +++ b/substrate/client/consensus/babe/rpc/Cargo.toml @@ -9,13 +9,16 @@ homepage = "https://substrate.io" repository.workspace = true readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] jsonrpsee = { version = "0.16.2", features = ["client-core", "macros", "server"] } futures = "0.3.21" -serde = { version = "1.0.193", features = ["derive"] } +serde = { version = "1.0.195", features = ["derive"] } thiserror = "1.0" sc-consensus-babe = { path = ".." } sc-consensus-epochs = { path = "../../epochs" } @@ -30,7 +33,7 @@ sp-keystore = { path = "../../../../primitives/keystore" } sp-runtime = { path = "../../../../primitives/runtime" } [dev-dependencies] -serde_json = "1.0.108" +serde_json = "1.0.111" tokio = "1.22.0" sc-consensus = { path = "../../common" } sc-keystore = { path = "../../../keystore" } diff --git a/substrate/client/consensus/babe/src/tests.rs b/substrate/client/consensus/babe/src/tests.rs index 8a6417687679..800731ee8766 100644 --- a/substrate/client/consensus/babe/src/tests.rs +++ b/substrate/client/consensus/babe/src/tests.rs @@ -412,8 +412,7 @@ async fn run_one_test(mutator: impl Fn(&mut TestHeader, Stage) + Send + Sync + ' let mut net = net.lock(); net.poll(cx); for p in net.peers() { - #[allow(clippy::never_loop)] - for (h, e) in p.failed_verifications() { + if let Some((h, e)) = p.failed_verifications().into_iter().next() { panic!("Verification failed for {:?}: {}", h, e); } } diff --git a/substrate/client/consensus/common/Cargo.toml b/substrate/client/consensus/common/Cargo.toml index 540b12f5d5e1..f93fac2b1271 100644 --- a/substrate/client/consensus/common/Cargo.toml +++ b/substrate/client/consensus/common/Cargo.toml @@ -9,11 +9,14 @@ repository.workspace = true description = "Collection of common consensus specific imlementations for Substrate (client)" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] -async-trait = "0.1.57" +async-trait = "0.1.74" futures = { version = "0.3.21", features = ["thread-pool"] } futures-timer = "3.0.1" libp2p-identity = { version = "0.1.3", features = ["ed25519", "peerid"] } diff --git a/substrate/client/consensus/epochs/Cargo.toml b/substrate/client/consensus/epochs/Cargo.toml index 07de83980bcf..76e4c05a6734 100644 --- a/substrate/client/consensus/epochs/Cargo.toml +++ b/substrate/client/consensus/epochs/Cargo.toml @@ -9,6 +9,9 @@ homepage = "https://substrate.io" repository.workspace = true readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/client/consensus/grandpa/Cargo.toml b/substrate/client/consensus/grandpa/Cargo.toml index e1baff3bbf2c..a6aacd564854 100644 --- a/substrate/client/consensus/grandpa/Cargo.toml +++ b/substrate/client/consensus/grandpa/Cargo.toml @@ -10,13 +10,16 @@ description = "Integration of the GRANDPA finality gadget into substrate." documentation = "https://docs.rs/sc-consensus-grandpa" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] ahash = "0.8.2" array-bytes = "6.1" -async-trait = "0.1.57" +async-trait = "0.1.74" dyn-clone = "1.0" finality-grandpa = { version = "0.16.2", features = ["derive-codec"] } futures = "0.3.21" @@ -25,7 +28,7 @@ log = "0.4.17" parity-scale-codec = { version = "3.6.1", features = ["derive"] } parking_lot = "0.12.1" rand = "0.8.5" -serde_json = "1.0.108" +serde_json = "1.0.111" thiserror = "1.0" fork-tree = { path = "../../../utils/fork-tree" } prometheus-endpoint = { package = "substrate-prometheus-endpoint", path = "../../../utils/prometheus" } @@ -53,7 +56,7 @@ sp-runtime = { path = "../../../primitives/runtime" } [dev-dependencies] assert_matches = "1.3.0" finality-grandpa = { version = "0.16.2", features = ["derive-codec", "test-helpers"] } -serde = "1.0.193" +serde = "1.0.195" tokio = "1.22.0" sc-network = { path = "../../network" } sc-network-test = { path = "../../network/test" } diff --git a/substrate/client/consensus/grandpa/rpc/Cargo.toml b/substrate/client/consensus/grandpa/rpc/Cargo.toml index 2a0d51dd616e..9cfc9616cbc0 100644 --- a/substrate/client/consensus/grandpa/rpc/Cargo.toml +++ b/substrate/client/consensus/grandpa/rpc/Cargo.toml @@ -9,13 +9,16 @@ license = "GPL-3.0-or-later WITH Classpath-exception-2.0" readme = "README.md" homepage = "https://substrate.io" +[lints] +workspace = true + [dependencies] finality-grandpa = { version = "0.16.2", features = ["derive-codec"] } futures = "0.3.16" jsonrpsee = { version = "0.16.2", features = ["client-core", "macros", "server"] } log = "0.4.8" parity-scale-codec = { version = "3.6.1", features = ["derive"] } -serde = { version = "1.0.193", features = ["derive"] } +serde = { version = "1.0.195", features = ["derive"] } thiserror = "1.0" sc-client-api = { path = "../../../api" } sc-consensus-grandpa = { path = ".." } diff --git a/substrate/client/consensus/grandpa/src/communication/tests.rs b/substrate/client/consensus/grandpa/src/communication/tests.rs index 91015130829e..ebff2fbd05e8 100644 --- a/substrate/client/consensus/grandpa/src/communication/tests.rs +++ b/substrate/client/consensus/grandpa/src/communication/tests.rs @@ -76,11 +76,15 @@ impl NetworkPeers for TestNetwork { unimplemented!(); } - fn report_peer(&self, who: PeerId, cost_benefit: ReputationChange) { - let _ = self.sender.unbounded_send(Event::Report(who, cost_benefit)); + fn report_peer(&self, peer_id: PeerId, cost_benefit: ReputationChange) { + let _ = self.sender.unbounded_send(Event::Report(peer_id, cost_benefit)); } - fn disconnect_peer(&self, _who: PeerId, _protocol: ProtocolName) {} + fn peer_reputation(&self, _peer_id: &PeerId) -> i32 { + unimplemented!() + } + + fn disconnect_peer(&self, _peer_id: PeerId, _protocol: ProtocolName) {} fn accept_unreserved_peers(&self) { unimplemented!(); diff --git a/substrate/client/consensus/slots/Cargo.toml b/substrate/client/consensus/slots/Cargo.toml index 52c528c3028a..801558a276a5 100644 --- a/substrate/client/consensus/slots/Cargo.toml +++ b/substrate/client/consensus/slots/Cargo.toml @@ -10,11 +10,14 @@ homepage = "https://substrate.io" repository.workspace = true readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] -async-trait = "0.1.57" +async-trait = "0.1.74" codec = { package = "parity-scale-codec", version = "3.6.1" } futures = "0.3.21" futures-timer = "3.0.1" diff --git a/substrate/client/db/Cargo.toml b/substrate/client/db/Cargo.toml index bb22ff4c6c19..e833b90b3ede 100644 --- a/substrate/client/db/Cargo.toml +++ b/substrate/client/db/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Client backend that uses RocksDB database as storage." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/client/executor/Cargo.toml b/substrate/client/executor/Cargo.toml index aa919372183d..12a766cf5001 100644 --- a/substrate/client/executor/Cargo.toml +++ b/substrate/client/executor/Cargo.toml @@ -10,6 +10,9 @@ description = "A crate that provides means of executing/dispatching calls into t documentation = "https://docs.rs/sc-executor" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/client/executor/common/Cargo.toml b/substrate/client/executor/common/Cargo.toml index 732e60f0ebb7..6d110c528c17 100644 --- a/substrate/client/executor/common/Cargo.toml +++ b/substrate/client/executor/common/Cargo.toml @@ -10,6 +10,9 @@ description = "A set of common definitions that are needed for defining executio documentation = "https://docs.rs/sc-executor-common/" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/client/executor/runtime-test/Cargo.toml b/substrate/client/executor/runtime-test/Cargo.toml index 84ed458fb1cd..82610c4f50c2 100644 --- a/substrate/client/executor/runtime-test/Cargo.toml +++ b/substrate/client/executor/runtime-test/Cargo.toml @@ -9,6 +9,9 @@ publish = false homepage = "https://substrate.io" repository.workspace = true +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/client/executor/wasmtime/Cargo.toml b/substrate/client/executor/wasmtime/Cargo.toml index ffbf54811123..f1a5ef0cfe71 100644 --- a/substrate/client/executor/wasmtime/Cargo.toml +++ b/substrate/client/executor/wasmtime/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Defines a `WasmRuntime` that uses the Wasmtime JIT to execute." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/client/informant/Cargo.toml b/substrate/client/informant/Cargo.toml index d15a66f4a26f..6b8167cbf71d 100644 --- a/substrate/client/informant/Cargo.toml +++ b/substrate/client/informant/Cargo.toml @@ -9,6 +9,9 @@ homepage = "https://substrate.io" repository.workspace = true readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/client/keystore/Cargo.toml b/substrate/client/keystore/Cargo.toml index 3fd88ae8b87e..8fa6221ff197 100644 --- a/substrate/client/keystore/Cargo.toml +++ b/substrate/client/keystore/Cargo.toml @@ -10,13 +10,16 @@ description = "Keystore (and session key management) for ed25519 based chains li documentation = "https://docs.rs/sc-keystore" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] array-bytes = "6.1" parking_lot = "0.12.1" -serde_json = "1.0.108" +serde_json = "1.0.111" thiserror = "1.0" sp-application-crypto = { path = "../../primitives/application-crypto" } sp-core = { path = "../../primitives/core" } diff --git a/substrate/client/mixnet/Cargo.toml b/substrate/client/mixnet/Cargo.toml index 2043f56322a2..8e289e076b36 100644 --- a/substrate/client/mixnet/Cargo.toml +++ b/substrate/client/mixnet/Cargo.toml @@ -9,6 +9,9 @@ homepage = "https://substrate.io" repository = "https://github.com/paritytech/substrate/" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/client/network-gossip/Cargo.toml b/substrate/client/network-gossip/Cargo.toml index 0ad9dec46517..c53c53fb1350 100644 --- a/substrate/client/network-gossip/Cargo.toml +++ b/substrate/client/network-gossip/Cargo.toml @@ -10,6 +10,9 @@ repository.workspace = true documentation = "https://docs.rs/sc-network-gossip" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -17,7 +20,7 @@ targets = ["x86_64-unknown-linux-gnu"] ahash = "0.8.2" futures = "0.3.21" futures-timer = "3.0.1" -libp2p = "0.51.3" +libp2p = "0.51.4" log = "0.4.17" schnellru = "0.2.1" tracing = "0.1.29" @@ -29,7 +32,7 @@ sp-runtime = { path = "../../primitives/runtime" } [dev-dependencies] tokio = "1.22.0" -async-trait = "0.1.73" +async-trait = "0.1.74" codec = { package = "parity-scale-codec", version = "3.0.0", features = ["derive"] } quickcheck = { version = "1.0.3", default-features = false } substrate-test-runtime-client = { path = "../../test-utils/runtime/client" } diff --git a/substrate/client/network-gossip/src/bridge.rs b/substrate/client/network-gossip/src/bridge.rs index 02e849515714..2c6dbb613c15 100644 --- a/substrate/client/network-gossip/src/bridge.rs +++ b/substrate/client/network-gossip/src/bridge.rs @@ -395,9 +395,13 @@ mod tests { unimplemented!(); } - fn report_peer(&self, _who: PeerId, _cost_benefit: ReputationChange) {} + fn report_peer(&self, _peer_id: PeerId, _cost_benefit: ReputationChange) {} - fn disconnect_peer(&self, _who: PeerId, _protocol: ProtocolName) { + fn peer_reputation(&self, _peer_id: &PeerId) -> i32 { + unimplemented!() + } + + fn disconnect_peer(&self, _peer_id: PeerId, _protocol: ProtocolName) { unimplemented!(); } diff --git a/substrate/client/network-gossip/src/state_machine.rs b/substrate/client/network-gossip/src/state_machine.rs index db805a84ee2d..23f994ecd66e 100644 --- a/substrate/client/network-gossip/src/state_machine.rs +++ b/substrate/client/network-gossip/src/state_machine.rs @@ -622,11 +622,15 @@ mod tests { unimplemented!(); } - fn report_peer(&self, who: PeerId, cost_benefit: ReputationChange) { - self.inner.lock().unwrap().peer_reports.push((who, cost_benefit)); + fn report_peer(&self, peer_id: PeerId, cost_benefit: ReputationChange) { + self.inner.lock().unwrap().peer_reports.push((peer_id, cost_benefit)); } - fn disconnect_peer(&self, _who: PeerId, _protocol: ProtocolName) { + fn peer_reputation(&self, _peer_id: &PeerId) -> i32 { + unimplemented!() + } + + fn disconnect_peer(&self, _peer_id: PeerId, _protocol: ProtocolName) { unimplemented!(); } diff --git a/substrate/client/network/Cargo.toml b/substrate/client/network/Cargo.toml index af813e9e3688..33506143837f 100644 --- a/substrate/client/network/Cargo.toml +++ b/substrate/client/network/Cargo.toml @@ -10,6 +10,9 @@ repository.workspace = true documentation = "https://docs.rs/sc-network" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -25,7 +28,7 @@ fnv = "1.0.6" futures = "0.3.21" futures-timer = "3.0.2" ip_network = "0.4.1" -libp2p = { version = "0.51.3", features = ["dns", "identify", "kad", "macros", "mdns", "noise", "ping", "request-response", "tcp", "tokio", "websocket", "yamux"] } +libp2p = { version = "0.51.4", features = ["dns", "identify", "kad", "macros", "mdns", "noise", "ping", "request-response", "tcp", "tokio", "websocket", "yamux"] } linked_hash_set = "0.1.3" log = "0.4.17" mockall = "0.12" @@ -33,8 +36,8 @@ parking_lot = "0.12.1" partial_sort = "0.2.0" pin-project = "1.0.12" rand = "0.8.5" -serde = { version = "1.0.193", features = ["derive"] } -serde_json = "1.0.108" +serde = { version = "1.0.195", features = ["derive"] } +serde_json = "1.0.111" smallvec = "1.11.0" thiserror = "1.0" tokio = { version = "1.22.0", features = ["macros", "sync"] } diff --git a/substrate/client/network/common/Cargo.toml b/substrate/client/network/common/Cargo.toml index 35cff59ddfbf..d1510ec1a732 100644 --- a/substrate/client/network/common/Cargo.toml +++ b/substrate/client/network/common/Cargo.toml @@ -9,6 +9,9 @@ homepage = "https://substrate.io" repository.workspace = true documentation = "https://docs.rs/sc-network-sync" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/client/network/light/Cargo.toml b/substrate/client/network/light/Cargo.toml index a8301ab6b8f3..cafe8af65710 100644 --- a/substrate/client/network/light/Cargo.toml +++ b/substrate/client/network/light/Cargo.toml @@ -9,6 +9,9 @@ homepage = "https://substrate.io" repository.workspace = true documentation = "https://docs.rs/sc-network-light" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/client/network/src/behaviour.rs b/substrate/client/network/src/behaviour.rs index 076a0c1917d4..901f3ea67587 100644 --- a/substrate/client/network/src/behaviour.rs +++ b/substrate/client/network/src/behaviour.rs @@ -232,13 +232,20 @@ impl Behaviour { pub fn send_request( &mut self, target: &PeerId, - protocol: &str, + protocol: ProtocolName, request: Vec, - pending_response: oneshot::Sender, RequestFailure>>, + fallback_request: Option<(Vec, ProtocolName)>, + pending_response: oneshot::Sender, ProtocolName), RequestFailure>>, connect: IfDisconnected, ) { - self.request_responses - .send_request(target, protocol, request, pending_response, connect) + self.request_responses.send_request( + target, + protocol, + request, + fallback_request, + pending_response, + connect, + ) } /// Returns a shared reference to the user protocol. diff --git a/substrate/client/network/src/protocol/notifications/behaviour.rs b/substrate/client/network/src/protocol/notifications/behaviour.rs index 4f1bc5b65df5..33de66c7910b 100644 --- a/substrate/client/network/src/protocol/notifications/behaviour.rs +++ b/substrate/client/network/src/protocol/notifications/behaviour.rs @@ -1038,7 +1038,7 @@ impl Notifications { peerset_rejected, incoming_index, }; - return self.report_reject(index).map_or((), |_| ()); + return self.report_reject(index).map_or((), |_| ()) } trace!( diff --git a/substrate/client/network/src/protocol/notifications/upgrade.rs b/substrate/client/network/src/protocol/notifications/upgrade.rs index ad7c63542f09..cb407d076219 100644 --- a/substrate/client/network/src/protocol/notifications/upgrade.rs +++ b/substrate/client/network/src/protocol/notifications/upgrade.rs @@ -17,13 +17,14 @@ // You should have received a copy of the GNU General Public License // along with this program. If not, see . -#[allow(unused_imports)] // False positive. These are exported and therefore used +#[cfg(test)] +pub(crate) use self::notifications::{ + NotificationsInOpen, NotificationsInSubstreamHandshake, NotificationsOutOpen, +}; pub use self::{ collec::UpgradeCollec, notifications::{ - NotificationsIn, NotificationsInOpen, NotificationsInSubstream, - NotificationsInSubstreamHandshake, NotificationsOut, NotificationsOutOpen, - NotificationsOutSubstream, + NotificationsIn, NotificationsInSubstream, NotificationsOut, NotificationsOutSubstream, }, }; diff --git a/substrate/client/network/src/request_responses.rs b/substrate/client/network/src/request_responses.rs index 3809d39fee81..cdc77c556619 100644 --- a/substrate/client/network/src/request_responses.rs +++ b/substrate/client/network/src/request_responses.rs @@ -57,6 +57,7 @@ use libp2p::{ use std::{ collections::{hash_map::Entry, HashMap}, io, iter, + ops::Deref, pin::Pin, task::{Context, Poll}, time::{Duration, Instant}, @@ -173,6 +174,13 @@ pub struct OutgoingResponse { pub sent_feedback: Option>, } +/// Information stored about a pending request. +struct PendingRequest { + started_at: Instant, + response_tx: oneshot::Sender, ProtocolName), RequestFailure>>, + fallback_request: Option<(Vec, ProtocolName)>, +} + /// When sending a request, what to do on a disconnected recipient. #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum IfDisconnected { @@ -265,8 +273,7 @@ pub struct RequestResponsesBehaviour { >, /// Pending requests, passed down to a request-response [`Behaviour`], awaiting a reply. - pending_requests: - HashMap, RequestFailure>>)>, + pending_requests: HashMap, /// Whenever an incoming request arrives, a `Future` is added to this list and will yield the /// start time and the response to send back to the remote. @@ -349,29 +356,25 @@ impl RequestResponsesBehaviour { pub fn send_request( &mut self, target: &PeerId, - protocol_name: &str, + protocol_name: ProtocolName, request: Vec, - pending_response: oneshot::Sender, RequestFailure>>, + fallback_request: Option<(Vec, ProtocolName)>, + pending_response: oneshot::Sender, ProtocolName), RequestFailure>>, connect: IfDisconnected, ) { log::trace!(target: "sub-libp2p", "send request to {target} ({protocol_name:?}), {} bytes", request.len()); - if let Some((protocol, _)) = self.protocols.get_mut(protocol_name) { - if protocol.is_connected(target) || connect.should_connect() { - let request_id = protocol.send_request(target, request); - let prev_req_id = self.pending_requests.insert( - (protocol_name.to_string().into(), request_id).into(), - (Instant::now(), pending_response), - ); - debug_assert!(prev_req_id.is_none(), "Expect request id to be unique."); - } else if pending_response.send(Err(RequestFailure::NotConnected)).is_err() { - log::debug!( - target: "sub-libp2p", - "Not connected to peer {:?}. At the same time local \ - node is no longer interested in the result.", - target, - ); - } + if let Some((protocol, _)) = self.protocols.get_mut(protocol_name.deref()) { + Self::send_request_inner( + protocol, + &mut self.pending_requests, + target, + protocol_name, + request, + fallback_request, + pending_response, + connect, + ) } else if pending_response.send(Err(RequestFailure::UnknownProtocol)).is_err() { log::debug!( target: "sub-libp2p", @@ -381,6 +384,37 @@ impl RequestResponsesBehaviour { ); } } + + fn send_request_inner( + behaviour: &mut Behaviour, + pending_requests: &mut HashMap, + target: &PeerId, + protocol_name: ProtocolName, + request: Vec, + fallback_request: Option<(Vec, ProtocolName)>, + pending_response: oneshot::Sender, ProtocolName), RequestFailure>>, + connect: IfDisconnected, + ) { + if behaviour.is_connected(target) || connect.should_connect() { + let request_id = behaviour.send_request(target, request); + let prev_req_id = pending_requests.insert( + (protocol_name.to_string().into(), request_id).into(), + PendingRequest { + started_at: Instant::now(), + response_tx: pending_response, + fallback_request, + }, + ); + debug_assert!(prev_req_id.is_none(), "Expect request id to be unique."); + } else if pending_response.send(Err(RequestFailure::NotConnected)).is_err() { + log::debug!( + target: "sub-libp2p", + "Not connected to peer {:?}. At the same time local \ + node is no longer interested in the result.", + target, + ); + } + } } impl NetworkBehaviour for RequestResponsesBehaviour { @@ -597,8 +631,10 @@ impl NetworkBehaviour for RequestResponsesBehaviour { } } + let mut fallback_requests = vec![]; + // Poll request-responses protocols. - for (protocol, (behaviour, resp_builder)) in &mut self.protocols { + for (protocol, (ref mut behaviour, ref mut resp_builder)) in &mut self.protocols { 'poll_protocol: while let Poll::Ready(ev) = behaviour.poll(cx, params) { let ev = match ev { // Main events we are interested in. @@ -699,17 +735,21 @@ impl NetworkBehaviour for RequestResponsesBehaviour { .pending_requests .remove(&(protocol.clone(), request_id).into()) { - Some((started, pending_response)) => { + Some(PendingRequest { started_at, response_tx, .. }) => { log::trace!( target: "sub-libp2p", "received response from {peer} ({protocol:?}), {} bytes", response.as_ref().map_or(0usize, |response| response.len()), ); - let delivered = pending_response - .send(response.map_err(|()| RequestFailure::Refused)) + let delivered = response_tx + .send( + response + .map_err(|()| RequestFailure::Refused) + .map(|resp| (resp, protocol.clone())), + ) .map_err(|_| RequestFailure::Obsolete); - (started, delivered) + (started_at, delivered) }, None => { log::warn!( @@ -743,8 +783,34 @@ impl NetworkBehaviour for RequestResponsesBehaviour { .pending_requests .remove(&(protocol.clone(), request_id).into()) { - Some((started, pending_response)) => { - if pending_response + Some(PendingRequest { + started_at, + response_tx, + fallback_request, + }) => { + // Try using the fallback request if the protocol was not + // supported. + if let OutboundFailure::UnsupportedProtocols = error { + if let Some((fallback_request, fallback_protocol)) = + fallback_request + { + log::trace!( + target: "sub-libp2p", + "Request with id {:?} failed. Trying the fallback protocol. {}", + request_id, + fallback_protocol.deref() + ); + fallback_requests.push(( + peer, + fallback_protocol, + fallback_request, + response_tx, + )); + continue + } + } + + if response_tx .send(Err(RequestFailure::Network(error.clone()))) .is_err() { @@ -755,7 +821,7 @@ impl NetworkBehaviour for RequestResponsesBehaviour { request_id, ); } - started + started_at }, None => { log::warn!( @@ -826,6 +892,25 @@ impl NetworkBehaviour for RequestResponsesBehaviour { } } + // Send out fallback requests. + for (peer, protocol, request, pending_response) in fallback_requests.drain(..) { + if let Some((behaviour, _)) = self.protocols.get_mut(&protocol) { + Self::send_request_inner( + behaviour, + &mut self.pending_requests, + &peer, + protocol, + request, + None, + pending_response, + // We can error if not connected because the + // previous attempt would have tried to establish a + // connection already or errored and we wouldn't have gotten here. + IfDisconnected::ImmediateError, + ); + } + } + break Poll::Pending } } @@ -977,6 +1062,7 @@ mod tests { use super::*; use crate::mock::MockPeerStore; + use assert_matches::assert_matches; use futures::{channel::oneshot, executor::LocalPool, task::Spawn}; use libp2p::{ core::{ @@ -1026,7 +1112,7 @@ mod tests { #[test] fn basic_request_response_works() { - let protocol_name = "/test/req-resp/1"; + let protocol_name = ProtocolName::from("/test/req-resp/1"); let mut pool = LocalPool::new(); // Build swarms whose behaviour is [`RequestResponsesBehaviour`]. @@ -1055,7 +1141,7 @@ mod tests { .unwrap(); let protocol_config = ProtocolConfig { - name: From::from(protocol_name), + name: protocol_name.clone(), fallback_names: Vec::new(), max_request_size: 1024, max_response_size: 1024 * 1024, @@ -1104,8 +1190,9 @@ mod tests { let (sender, receiver) = oneshot::channel(); swarm.behaviour_mut().send_request( &peer_id, - protocol_name, + protocol_name.clone(), b"this is a request".to_vec(), + None, sender, IfDisconnected::ImmediateError, ); @@ -1120,13 +1207,16 @@ mod tests { } } - assert_eq!(response_receiver.unwrap().await.unwrap().unwrap(), b"this is a response"); + assert_eq!( + response_receiver.unwrap().await.unwrap().unwrap(), + (b"this is a response".to_vec(), protocol_name) + ); }); } #[test] fn max_response_size_exceeded() { - let protocol_name = "/test/req-resp/1"; + let protocol_name = ProtocolName::from("/test/req-resp/1"); let mut pool = LocalPool::new(); // Build swarms whose behaviour is [`RequestResponsesBehaviour`]. @@ -1153,7 +1243,7 @@ mod tests { .unwrap(); let protocol_config = ProtocolConfig { - name: From::from(protocol_name), + name: protocol_name.clone(), fallback_names: Vec::new(), max_request_size: 1024, max_response_size: 8, // <-- important for the test @@ -1204,8 +1294,9 @@ mod tests { let (sender, receiver) = oneshot::channel(); swarm.behaviour_mut().send_request( &peer_id, - protocol_name, + protocol_name.clone(), b"this is a request".to_vec(), + None, sender, IfDisconnected::ImmediateError, ); @@ -1239,14 +1330,14 @@ mod tests { /// See [`ProtocolRequestId`] for additional information. #[test] fn request_id_collision() { - let protocol_name_1 = "/test/req-resp-1/1"; - let protocol_name_2 = "/test/req-resp-2/1"; + let protocol_name_1 = ProtocolName::from("/test/req-resp-1/1"); + let protocol_name_2 = ProtocolName::from("/test/req-resp-2/1"); let mut pool = LocalPool::new(); let mut swarm_1 = { let protocol_configs = vec![ ProtocolConfig { - name: From::from(protocol_name_1), + name: protocol_name_1.clone(), fallback_names: Vec::new(), max_request_size: 1024, max_response_size: 1024 * 1024, @@ -1254,7 +1345,7 @@ mod tests { inbound_queue: None, }, ProtocolConfig { - name: From::from(protocol_name_2), + name: protocol_name_2.clone(), fallback_names: Vec::new(), max_request_size: 1024, max_response_size: 1024 * 1024, @@ -1272,7 +1363,7 @@ mod tests { let protocol_configs = vec![ ProtocolConfig { - name: From::from(protocol_name_1), + name: protocol_name_1.clone(), fallback_names: Vec::new(), max_request_size: 1024, max_response_size: 1024 * 1024, @@ -1280,7 +1371,7 @@ mod tests { inbound_queue: Some(tx_1), }, ProtocolConfig { - name: From::from(protocol_name_2), + name: protocol_name_2.clone(), fallback_names: Vec::new(), max_request_size: 1024, max_response_size: 1024 * 1024, @@ -1362,15 +1453,17 @@ mod tests { let (sender_2, receiver_2) = oneshot::channel(); swarm_1.behaviour_mut().send_request( &peer_id, - protocol_name_1, + protocol_name_1.clone(), b"this is a request".to_vec(), + None, sender_1, IfDisconnected::ImmediateError, ); swarm_1.behaviour_mut().send_request( &peer_id, - protocol_name_2, + protocol_name_2.clone(), b"this is a request".to_vec(), + None, sender_2, IfDisconnected::ImmediateError, ); @@ -1388,8 +1481,239 @@ mod tests { } } let (response_receiver_1, response_receiver_2) = response_receivers.unwrap(); - assert_eq!(response_receiver_1.await.unwrap().unwrap(), b"this is a response"); - assert_eq!(response_receiver_2.await.unwrap().unwrap(), b"this is a response"); + assert_eq!( + response_receiver_1.await.unwrap().unwrap(), + (b"this is a response".to_vec(), protocol_name_1) + ); + assert_eq!( + response_receiver_2.await.unwrap().unwrap(), + (b"this is a response".to_vec(), protocol_name_2) + ); + }); + } + + #[test] + fn request_fallback() { + let protocol_name_1 = ProtocolName::from("/test/req-resp/2"); + let protocol_name_1_fallback = ProtocolName::from("/test/req-resp/1"); + let protocol_name_2 = ProtocolName::from("/test/another"); + let mut pool = LocalPool::new(); + + let protocol_config_1 = ProtocolConfig { + name: protocol_name_1.clone(), + fallback_names: Vec::new(), + max_request_size: 1024, + max_response_size: 1024 * 1024, + request_timeout: Duration::from_secs(30), + inbound_queue: None, + }; + let protocol_config_1_fallback = ProtocolConfig { + name: protocol_name_1_fallback.clone(), + fallback_names: Vec::new(), + max_request_size: 1024, + max_response_size: 1024 * 1024, + request_timeout: Duration::from_secs(30), + inbound_queue: None, + }; + let protocol_config_2 = ProtocolConfig { + name: protocol_name_2.clone(), + fallback_names: Vec::new(), + max_request_size: 1024, + max_response_size: 1024 * 1024, + request_timeout: Duration::from_secs(30), + inbound_queue: None, + }; + + // This swarm only speaks protocol_name_1_fallback and protocol_name_2. + // It only responds to requests. + let mut older_swarm = { + let (tx_1, mut rx_1) = async_channel::bounded::(64); + let (tx_2, mut rx_2) = async_channel::bounded::(64); + let mut protocol_config_1_fallback = protocol_config_1_fallback.clone(); + protocol_config_1_fallback.inbound_queue = Some(tx_1); + + let mut protocol_config_2 = protocol_config_2.clone(); + protocol_config_2.inbound_queue = Some(tx_2); + + pool.spawner() + .spawn_obj( + async move { + for _ in 0..2 { + if let Some(rq) = rx_1.next().await { + let (fb_tx, fb_rx) = oneshot::channel(); + assert_eq!(rq.payload, b"request on protocol /test/req-resp/1"); + let _ = rq.pending_response.send(super::OutgoingResponse { + result: Ok( + b"this is a response on protocol /test/req-resp/1".to_vec() + ), + reputation_changes: Vec::new(), + sent_feedback: Some(fb_tx), + }); + fb_rx.await.unwrap(); + } + } + + if let Some(rq) = rx_2.next().await { + let (fb_tx, fb_rx) = oneshot::channel(); + assert_eq!(rq.payload, b"request on protocol /test/other"); + let _ = rq.pending_response.send(super::OutgoingResponse { + result: Ok(b"this is a response on protocol /test/other".to_vec()), + reputation_changes: Vec::new(), + sent_feedback: Some(fb_tx), + }); + fb_rx.await.unwrap(); + } + } + .boxed() + .into(), + ) + .unwrap(); + + build_swarm(vec![protocol_config_1_fallback, protocol_config_2].into_iter()) + }; + + // This swarm speaks all protocols. + let mut new_swarm = build_swarm( + vec![ + protocol_config_1.clone(), + protocol_config_1_fallback.clone(), + protocol_config_2.clone(), + ] + .into_iter(), + ); + + { + let dial_addr = older_swarm.1.clone(); + Swarm::dial(&mut new_swarm.0, dial_addr).unwrap(); + } + + // Running `older_swarm`` in the background. + pool.spawner() + .spawn_obj({ + async move { + loop { + _ = older_swarm.0.select_next_some().await; + } + } + .boxed() + .into() + }) + .unwrap(); + + // Run the newer swarm. Attempt to make requests on all protocols. + let (mut swarm, _) = new_swarm; + let mut older_peer_id = None; + + pool.run_until(async move { + let mut response_receiver = None; + // Try the new protocol with a fallback. + loop { + match swarm.select_next_some().await { + SwarmEvent::ConnectionEstablished { peer_id, .. } => { + older_peer_id = Some(peer_id); + let (sender, receiver) = oneshot::channel(); + swarm.behaviour_mut().send_request( + &peer_id, + protocol_name_1.clone(), + b"request on protocol /test/req-resp/2".to_vec(), + Some(( + b"request on protocol /test/req-resp/1".to_vec(), + protocol_config_1_fallback.name.clone(), + )), + sender, + IfDisconnected::ImmediateError, + ); + response_receiver = Some(receiver); + }, + SwarmEvent::Behaviour(Event::RequestFinished { result, .. }) => { + result.unwrap(); + break + }, + _ => {}, + } + } + assert_eq!( + response_receiver.unwrap().await.unwrap().unwrap(), + ( + b"this is a response on protocol /test/req-resp/1".to_vec(), + protocol_name_1_fallback.clone() + ) + ); + // Try the old protocol with a useless fallback. + let (sender, response_receiver) = oneshot::channel(); + swarm.behaviour_mut().send_request( + older_peer_id.as_ref().unwrap(), + protocol_name_1_fallback.clone(), + b"request on protocol /test/req-resp/1".to_vec(), + Some(( + b"dummy request, will fail if processed".to_vec(), + protocol_config_1_fallback.name.clone(), + )), + sender, + IfDisconnected::ImmediateError, + ); + loop { + match swarm.select_next_some().await { + SwarmEvent::Behaviour(Event::RequestFinished { result, .. }) => { + result.unwrap(); + break + }, + _ => {}, + } + } + assert_eq!( + response_receiver.await.unwrap().unwrap(), + ( + b"this is a response on protocol /test/req-resp/1".to_vec(), + protocol_name_1_fallback.clone() + ) + ); + // Try the new protocol with no fallback. Should fail. + let (sender, response_receiver) = oneshot::channel(); + swarm.behaviour_mut().send_request( + older_peer_id.as_ref().unwrap(), + protocol_name_1.clone(), + b"request on protocol /test/req-resp-2".to_vec(), + None, + sender, + IfDisconnected::ImmediateError, + ); + loop { + match swarm.select_next_some().await { + SwarmEvent::Behaviour(Event::RequestFinished { result, .. }) => { + assert_matches!( + result.unwrap_err(), + RequestFailure::Network(OutboundFailure::UnsupportedProtocols) + ); + break + }, + _ => {}, + } + } + assert!(response_receiver.await.unwrap().is_err()); + // Try the other protocol with no fallback. + let (sender, response_receiver) = oneshot::channel(); + swarm.behaviour_mut().send_request( + older_peer_id.as_ref().unwrap(), + protocol_name_2.clone(), + b"request on protocol /test/other".to_vec(), + None, + sender, + IfDisconnected::ImmediateError, + ); + loop { + match swarm.select_next_some().await { + SwarmEvent::Behaviour(Event::RequestFinished { result, .. }) => { + result.unwrap(); + break + }, + _ => {}, + } + } + assert_eq!( + response_receiver.await.unwrap().unwrap(), + (b"this is a response on protocol /test/other".to_vec(), protocol_name_2.clone()) + ); }); } } diff --git a/substrate/client/network/src/service.rs b/substrate/client/network/src/service.rs index 5d654a990b79..d1b21b2b4197 100644 --- a/substrate/client/network/src/service.rs +++ b/substrate/client/network/src/service.rs @@ -121,6 +121,8 @@ pub struct NetworkService { local_identity: Keypair, /// Bandwidth logging system. Can be queried to know the average bandwidth consumed. bandwidth: Arc, + /// Used to query and report reputation changes. + peer_store_handle: PeerStoreHandle, /// Channel that sends messages to the actual worker. to_worker: TracingUnboundedSender, /// Protocol name -> `SetId` mapping for notification protocols. The map never changes after @@ -131,8 +133,6 @@ pub struct NetworkService { protocol_handles: Vec, /// Shortcut to sync protocol handle (`protocol_handles[0]`). sync_protocol_handle: protocol_controller::ProtocolHandle, - /// Handle to `PeerStore`. - peer_store_handle: PeerStoreHandle, /// Marker to pin the `H` generic. Serves no purpose except to not break backwards /// compatibility. _marker: PhantomData, @@ -866,12 +866,18 @@ where .unbounded_send(ServiceToWorkerMsg::AddKnownAddress(peer_id, addr)); } - fn report_peer(&self, who: PeerId, cost_benefit: ReputationChange) { - let _ = self.to_worker.unbounded_send(ServiceToWorkerMsg::ReportPeer(who, cost_benefit)); + fn report_peer(&self, peer_id: PeerId, cost_benefit: ReputationChange) { + self.peer_store_handle.clone().report_peer(peer_id, cost_benefit); } - fn disconnect_peer(&self, who: PeerId, protocol: ProtocolName) { - let _ = self.to_worker.unbounded_send(ServiceToWorkerMsg::DisconnectPeer(who, protocol)); + fn peer_reputation(&self, peer_id: &PeerId) -> i32 { + self.peer_store_handle.peer_reputation(peer_id) + } + + fn disconnect_peer(&self, peer_id: PeerId, protocol: ProtocolName) { + let _ = self + .to_worker + .unbounded_send(ServiceToWorkerMsg::DisconnectPeer(peer_id, protocol)); } fn accept_unreserved_peers(&self) { @@ -1043,11 +1049,12 @@ where target: PeerId, protocol: ProtocolName, request: Vec, + fallback_request: Option<(Vec, ProtocolName)>, connect: IfDisconnected, - ) -> Result, RequestFailure> { + ) -> Result<(Vec, ProtocolName), RequestFailure> { let (tx, rx) = oneshot::channel(); - self.start_request(target, protocol, request, tx, connect); + self.start_request(target, protocol, request, fallback_request, tx, connect); match rx.await { Ok(v) => v, @@ -1063,13 +1070,15 @@ where target: PeerId, protocol: ProtocolName, request: Vec, - tx: oneshot::Sender, RequestFailure>>, + fallback_request: Option<(Vec, ProtocolName)>, + tx: oneshot::Sender, ProtocolName), RequestFailure>>, connect: IfDisconnected, ) { let _ = self.to_worker.unbounded_send(ServiceToWorkerMsg::Request { target, protocol: protocol.into(), request, + fallback_request, pending_response: tx, connect, }); @@ -1150,13 +1159,13 @@ enum ServiceToWorkerMsg { GetValue(KademliaKey), PutValue(KademliaKey, Vec), AddKnownAddress(PeerId, Multiaddr), - ReportPeer(PeerId, ReputationChange), EventStream(out_events::Sender), Request { target: PeerId, protocol: ProtocolName, request: Vec, - pending_response: oneshot::Sender, RequestFailure>>, + fallback_request: Option<(Vec, ProtocolName)>, + pending_response: oneshot::Sender, ProtocolName), RequestFailure>>, connect: IfDisconnected, }, NetworkStatus { @@ -1278,20 +1287,20 @@ where self.network_service.behaviour_mut().put_value(key, value), ServiceToWorkerMsg::AddKnownAddress(peer_id, addr) => self.network_service.behaviour_mut().add_known_address(peer_id, addr), - ServiceToWorkerMsg::ReportPeer(peer_id, reputation_change) => - self.peer_store_handle.report_peer(peer_id, reputation_change), ServiceToWorkerMsg::EventStream(sender) => self.event_streams.push(sender), ServiceToWorkerMsg::Request { target, protocol, request, + fallback_request, pending_response, connect, } => { self.network_service.behaviour_mut().send_request( &target, - &protocol, + protocol, request, + fallback_request, pending_response, connect, ); diff --git a/substrate/client/network/src/service/traits.rs b/substrate/client/network/src/service/traits.rs index 037e16ec69f8..2d9bb0e26850 100644 --- a/substrate/client/network/src/service/traits.rs +++ b/substrate/client/network/src/service/traits.rs @@ -156,12 +156,15 @@ pub trait NetworkPeers { /// Report a given peer as either beneficial (+) or costly (-) according to the /// given scalar. - fn report_peer(&self, who: PeerId, cost_benefit: ReputationChange); + fn report_peer(&self, peer_id: PeerId, cost_benefit: ReputationChange); + + /// Get peer reputation. + fn peer_reputation(&self, peer_id: &PeerId) -> i32; /// Disconnect from a node as soon as possible. /// /// This triggers the same effects as if the connection had closed itself spontaneously. - fn disconnect_peer(&self, who: PeerId, protocol: ProtocolName); + fn disconnect_peer(&self, peer_id: PeerId, protocol: ProtocolName); /// Connect to unreserved peers and allow unreserved peers to connect for syncing purposes. fn accept_unreserved_peers(&self); @@ -255,16 +258,16 @@ where T::add_known_address(self, peer_id, addr) } - fn report_peer(&self, who: PeerId, cost_benefit: ReputationChange) { - // TODO: when we get rid of `Peerset`, we'll likely need to add some kind of async - // interface to `PeerStore`, otherwise we'll have trouble calling functions accepting - // `&mut self` via `Arc`. - // See https://github.com/paritytech/substrate/issues/14170. - T::report_peer(self, who, cost_benefit) + fn report_peer(&self, peer_id: PeerId, cost_benefit: ReputationChange) { + T::report_peer(self, peer_id, cost_benefit) + } + + fn peer_reputation(&self, peer_id: &PeerId) -> i32 { + T::peer_reputation(self, peer_id) } - fn disconnect_peer(&self, who: PeerId, protocol: ProtocolName) { - T::disconnect_peer(self, who, protocol) + fn disconnect_peer(&self, peer_id: PeerId, protocol: ProtocolName) { + T::disconnect_peer(self, peer_id, protocol) } fn accept_unreserved_peers(&self) { @@ -549,8 +552,9 @@ pub trait NetworkRequest { target: PeerId, protocol: ProtocolName, request: Vec, + fallback_request: Option<(Vec, ProtocolName)>, connect: IfDisconnected, - ) -> Result, RequestFailure>; + ) -> Result<(Vec, ProtocolName), RequestFailure>; /// Variation of `request` which starts a request whose response is delivered on a provided /// channel. @@ -567,7 +571,8 @@ pub trait NetworkRequest { target: PeerId, protocol: ProtocolName, request: Vec, - tx: oneshot::Sender, RequestFailure>>, + fallback_request: Option<(Vec, ProtocolName)>, + tx: oneshot::Sender, ProtocolName), RequestFailure>>, connect: IfDisconnected, ); } @@ -583,13 +588,20 @@ where target: PeerId, protocol: ProtocolName, request: Vec, + fallback_request: Option<(Vec, ProtocolName)>, connect: IfDisconnected, - ) -> Pin, RequestFailure>> + Send + 'async_trait>> + ) -> Pin< + Box< + dyn Future, ProtocolName), RequestFailure>> + + Send + + 'async_trait, + >, + > where 'life0: 'async_trait, Self: 'async_trait, { - T::request(self, target, protocol, request, connect) + T::request(self, target, protocol, request, fallback_request, connect) } fn start_request( @@ -597,10 +609,11 @@ where target: PeerId, protocol: ProtocolName, request: Vec, - tx: oneshot::Sender, RequestFailure>>, + fallback_request: Option<(Vec, ProtocolName)>, + tx: oneshot::Sender, ProtocolName), RequestFailure>>, connect: IfDisconnected, ) { - T::start_request(self, target, protocol, request, tx, connect) + T::start_request(self, target, protocol, request, fallback_request, tx, connect) } } diff --git a/substrate/client/network/sync/Cargo.toml b/substrate/client/network/sync/Cargo.toml index b1695094a533..7ce50fb6ea88 100644 --- a/substrate/client/network/sync/Cargo.toml +++ b/substrate/client/network/sync/Cargo.toml @@ -9,6 +9,9 @@ homepage = "https://substrate.io" repository.workspace = true documentation = "https://docs.rs/sc-network-sync" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -18,10 +21,10 @@ prost-build = "0.12" [dependencies] array-bytes = "6.1" async-channel = "2" -async-trait = "0.1.58" +async-trait = "0.1.74" codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } futures = "0.3.21" -libp2p = "0.51.3" +libp2p = "0.51.4" log = "0.4.17" mockall = "0.12" prost = "0.12" diff --git a/substrate/client/network/sync/src/block_relay_protocol.rs b/substrate/client/network/sync/src/block_relay_protocol.rs index bca7968ba438..32806da50c8f 100644 --- a/substrate/client/network/sync/src/block_relay_protocol.rs +++ b/substrate/client/network/sync/src/block_relay_protocol.rs @@ -18,7 +18,10 @@ use futures::channel::oneshot; use libp2p::PeerId; -use sc_network::request_responses::{ProtocolConfig, RequestFailure}; +use sc_network::{ + request_responses::{ProtocolConfig, RequestFailure}, + ProtocolName, +}; use sc_network_common::sync::message::{BlockData, BlockRequest}; use sp_runtime::traits::Block as BlockT; use std::sync::Arc; @@ -43,7 +46,7 @@ pub trait BlockDownloader: Send + Sync { &self, who: PeerId, request: BlockRequest, - ) -> Result, RequestFailure>, oneshot::Canceled>; + ) -> Result, ProtocolName), RequestFailure>, oneshot::Canceled>; /// Parses the protocol specific response to retrieve the block data. fn block_response_into_blocks( diff --git a/substrate/client/network/sync/src/block_request_handler.rs b/substrate/client/network/sync/src/block_request_handler.rs index fa7bbc18dcb0..78534af77a21 100644 --- a/substrate/client/network/sync/src/block_request_handler.rs +++ b/substrate/client/network/sync/src/block_request_handler.rs @@ -571,7 +571,7 @@ impl BlockDownloader for FullBlockDownloader { &self, who: PeerId, request: BlockRequest, - ) -> Result, RequestFailure>, oneshot::Canceled> { + ) -> Result, ProtocolName), RequestFailure>, oneshot::Canceled> { // Build the request protobuf. let bytes = BlockRequestSchema { fields: request.fields.to_be_u32(), diff --git a/substrate/client/network/sync/src/engine.rs b/substrate/client/network/sync/src/engine.rs index 902ac8188472..b560f92feab2 100644 --- a/substrate/client/network/sync/src/engine.rs +++ b/substrate/client/network/sync/src/engine.rs @@ -1257,7 +1257,7 @@ where let ResponseEvent { peer_id, request, response } = response_event; match response { - Ok(Ok(resp)) => match request { + Ok(Ok((resp, _))) => match request { PeerRequest::Block(req) => { match self.block_downloader.block_response_into_blocks(&req, resp) { Ok(blocks) => { diff --git a/substrate/client/network/sync/src/mock.rs b/substrate/client/network/sync/src/mock.rs index 3fbe8ed86ba1..4110b2938aa3 100644 --- a/substrate/client/network/sync/src/mock.rs +++ b/substrate/client/network/sync/src/mock.rs @@ -23,7 +23,7 @@ use crate::block_relay_protocol::{BlockDownloader as BlockDownloaderT, BlockResp use futures::channel::oneshot; use libp2p::PeerId; -use sc_network::RequestFailure; +use sc_network::{ProtocolName, RequestFailure}; use sc_network_common::sync::message::{BlockData, BlockRequest}; use sp_runtime::traits::Block as BlockT; @@ -36,7 +36,7 @@ mockall::mock! { &self, who: PeerId, request: BlockRequest, - ) -> Result, RequestFailure>, oneshot::Canceled>; + ) -> Result, ProtocolName), RequestFailure>, oneshot::Canceled>; fn block_response_into_blocks( &self, request: &BlockRequest, diff --git a/substrate/client/network/sync/src/pending_responses.rs b/substrate/client/network/sync/src/pending_responses.rs index 87833c4a5f40..b9e92a362323 100644 --- a/substrate/client/network/sync/src/pending_responses.rs +++ b/substrate/client/network/sync/src/pending_responses.rs @@ -29,7 +29,7 @@ use futures::{ }; use libp2p::PeerId; use log::error; -use sc_network::request_responses::RequestFailure; +use sc_network::{request_responses::RequestFailure, types::ProtocolName}; use sp_runtime::traits::Block as BlockT; use std::task::{Context, Poll, Waker}; use tokio_stream::StreamMap; @@ -38,7 +38,7 @@ use tokio_stream::StreamMap; const LOG_TARGET: &'static str = "sync"; /// Response result. -type ResponseResult = Result, RequestFailure>, oneshot::Canceled>; +type ResponseResult = Result, ProtocolName), RequestFailure>, oneshot::Canceled>; /// A future yielding [`ResponseResult`]. type ResponseFuture = BoxFuture<'static, ResponseResult>; diff --git a/substrate/client/network/sync/src/service/mock.rs b/substrate/client/network/sync/src/service/mock.rs index 4e088561efaf..c5ace09999b7 100644 --- a/substrate/client/network/sync/src/service/mock.rs +++ b/substrate/client/network/sync/src/service/mock.rs @@ -85,8 +85,9 @@ mockall::mock! { fn set_authorized_peers(&self, peers: HashSet); fn set_authorized_only(&self, reserved_only: bool); fn add_known_address(&self, peer_id: PeerId, addr: Multiaddr); - fn report_peer(&self, who: PeerId, cost_benefit: ReputationChange); - fn disconnect_peer(&self, who: PeerId, protocol: ProtocolName); + fn report_peer(&self, peer_id: PeerId, cost_benefit: ReputationChange); + fn peer_reputation(&self, peer_id: &PeerId) -> i32; + fn disconnect_peer(&self, peer_id: PeerId, protocol: ProtocolName); fn accept_unreserved_peers(&self); fn deny_unreserved_peers(&self); fn add_reserved_peer(&self, peer: MultiaddrWithPeerId) -> Result<(), String>; @@ -117,14 +118,16 @@ mockall::mock! { target: PeerId, protocol: ProtocolName, request: Vec, + fallback_request: Option<(Vec, ProtocolName)>, connect: IfDisconnected, - ) -> Result, RequestFailure>; + ) -> Result<(Vec, ProtocolName), RequestFailure>; fn start_request( &self, target: PeerId, protocol: ProtocolName, request: Vec, - tx: oneshot::Sender, RequestFailure>>, + fallback_request: Option<(Vec, ProtocolName)>, + tx: oneshot::Sender, ProtocolName), RequestFailure>>, connect: IfDisconnected, ); } diff --git a/substrate/client/network/sync/src/service/network.rs b/substrate/client/network/sync/src/service/network.rs index 315ee1fa2307..b210f5b4b0aa 100644 --- a/substrate/client/network/sync/src/service/network.rs +++ b/substrate/client/network/sync/src/service/network.rs @@ -55,7 +55,7 @@ pub enum ToServiceCommand { PeerId, ProtocolName, Vec, - oneshot::Sender, RequestFailure>>, + oneshot::Sender, ProtocolName), RequestFailure>>, IfDisconnected, ), @@ -95,7 +95,7 @@ impl NetworkServiceHandle { who: PeerId, protocol: ProtocolName, request: Vec, - tx: oneshot::Sender, RequestFailure>>, + tx: oneshot::Sender, ProtocolName), RequestFailure>>, connect: IfDisconnected, ) { let _ = self @@ -135,7 +135,7 @@ impl NetworkServiceProvider { ToServiceCommand::ReportPeer(peer, reputation_change) => service.report_peer(peer, reputation_change), ToServiceCommand::StartRequest(peer, protocol, request, tx, connect) => - service.start_request(peer, protocol, request, tx, connect), + service.start_request(peer, protocol, request, None, tx, connect), ToServiceCommand::WriteNotification(peer, protocol, message) => service.write_notification(peer, protocol, message), ToServiceCommand::SetNotificationHandshake(protocol, handshake) => diff --git a/substrate/client/network/test/Cargo.toml b/substrate/client/network/test/Cargo.toml index a11ed2a3ec8f..dced6ed67305 100644 --- a/substrate/client/network/test/Cargo.toml +++ b/substrate/client/network/test/Cargo.toml @@ -9,15 +9,18 @@ publish = false homepage = "https://substrate.io" repository.workspace = true +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] tokio = "1.22.0" -async-trait = "0.1.57" +async-trait = "0.1.74" futures = "0.3.21" futures-timer = "3.0.1" -libp2p = "0.51.3" +libp2p = "0.51.4" log = "0.4.17" parking_lot = "0.12.1" rand = "0.8.5" diff --git a/substrate/client/network/transactions/Cargo.toml b/substrate/client/network/transactions/Cargo.toml index 2a6aa4b3a40a..24b5087af1f4 100644 --- a/substrate/client/network/transactions/Cargo.toml +++ b/substrate/client/network/transactions/Cargo.toml @@ -9,6 +9,9 @@ homepage = "https://substrate.io" repository.workspace = true documentation = "https://docs.rs/sc-network-transactions" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -16,7 +19,7 @@ targets = ["x86_64-unknown-linux-gnu"] array-bytes = "6.1" codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } futures = "0.3.21" -libp2p = "0.51.3" +libp2p = "0.51.4" log = "0.4.17" prometheus-endpoint = { package = "substrate-prometheus-endpoint", path = "../../../utils/prometheus" } sc-network = { path = ".." } diff --git a/substrate/client/network/transactions/src/lib.rs b/substrate/client/network/transactions/src/lib.rs index 56206355a33f..ed0b845021aa 100644 --- a/substrate/client/network/transactions/src/lib.rs +++ b/substrate/client/network/transactions/src/lib.rs @@ -476,7 +476,20 @@ where propagated_to.entry(hash).or_default().push(who.to_base58()); } trace!(target: "sync", "Sending {} transactions to {}", to_send.len(), who); - let _ = self.notification_service.send_sync_notification(who, to_send.encode()); + // Historically, the format of a notification of the transactions protocol + // consisted in a (SCALE-encoded) `Vec`. + // After RFC 56, the format was modified in a backwards-compatible way to be + // a (SCALE-encoded) tuple `(Compact(1), Transaction)`, which is the same encoding + // as a `Vec` of length one. This is no coincidence, as the change was + // intentionally done in a backwards-compatible way. + // In other words, the `Vec` that is sent below **must** always have only a single + // element in it. + // See + for to_send in to_send { + let _ = self + .notification_service + .send_sync_notification(who, vec![to_send].encode()); + } } } diff --git a/substrate/client/offchain/Cargo.toml b/substrate/client/offchain/Cargo.toml index 7e0a379d7e83..ef12c43295f7 100644 --- a/substrate/client/offchain/Cargo.toml +++ b/substrate/client/offchain/Cargo.toml @@ -9,6 +9,9 @@ homepage = "https://substrate.io" repository.workspace = true readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -16,7 +19,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } futures = "0.3.21" futures-timer = "3.0.2" -libp2p = "0.51.3" +libp2p = "0.51.4" num_cpus = "1.13" parking_lot = "0.12.1" rand = "0.8.5" diff --git a/substrate/client/offchain/src/api.rs b/substrate/client/offchain/src/api.rs index affff46fc8f1..b71279bc12a8 100644 --- a/substrate/client/offchain/src/api.rs +++ b/substrate/client/offchain/src/api.rs @@ -181,11 +181,15 @@ mod tests { unimplemented!(); } - fn report_peer(&self, _who: PeerId, _cost_benefit: ReputationChange) { + fn report_peer(&self, _peer_id: PeerId, _cost_benefit: ReputationChange) { unimplemented!(); } - fn disconnect_peer(&self, _who: PeerId, _protocol: ProtocolName) { + fn peer_reputation(&self, _peer_id: &PeerId) -> i32 { + unimplemented!() + } + + fn disconnect_peer(&self, _peer_id: PeerId, _protocol: ProtocolName) { unimplemented!(); } diff --git a/substrate/client/offchain/src/lib.rs b/substrate/client/offchain/src/lib.rs index a458fcc4b278..cbda40a06afc 100644 --- a/substrate/client/offchain/src/lib.rs +++ b/substrate/client/offchain/src/lib.rs @@ -362,11 +362,15 @@ mod tests { unimplemented!(); } - fn report_peer(&self, _who: PeerId, _cost_benefit: ReputationChange) { + fn report_peer(&self, _peer_id: PeerId, _cost_benefit: ReputationChange) { unimplemented!(); } - fn disconnect_peer(&self, _who: PeerId, _protocol: ProtocolName) { + fn peer_reputation(&self, _peer_id: &PeerId) -> i32 { + unimplemented!() + } + + fn disconnect_peer(&self, _peer_id: PeerId, _protocol: ProtocolName) { unimplemented!(); } diff --git a/substrate/client/proposer-metrics/Cargo.toml b/substrate/client/proposer-metrics/Cargo.toml index b6b4452ecc64..664b72764a3b 100644 --- a/substrate/client/proposer-metrics/Cargo.toml +++ b/substrate/client/proposer-metrics/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Basic metrics for block production." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/client/rpc-api/Cargo.toml b/substrate/client/rpc-api/Cargo.toml index 768f733d5aa6..3aab4199cc09 100644 --- a/substrate/client/rpc-api/Cargo.toml +++ b/substrate/client/rpc-api/Cargo.toml @@ -9,14 +9,17 @@ repository.workspace = true description = "Substrate RPC interfaces." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1" } scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } -serde = { version = "1.0.193", features = ["derive"] } -serde_json = "1.0.108" +serde = { version = "1.0.195", features = ["derive"] } +serde_json = "1.0.111" thiserror = "1.0" sc-chain-spec = { path = "../chain-spec" } sc-mixnet = { path = "../mixnet", optional = true } diff --git a/substrate/client/rpc-api/src/state/mod.rs b/substrate/client/rpc-api/src/state/mod.rs index 376a01cf7dbd..9d142cc865a3 100644 --- a/substrate/client/rpc-api/src/state/mod.rs +++ b/substrate/client/rpc-api/src/state/mod.rs @@ -271,8 +271,8 @@ pub trait StateApi { /// [querying substrate storage via rpc][3]. /// /// [1]: https://docs.substrate.io/main-docs/fundamentals/state-transitions-and-storage/ - /// [2]: https://www.shawntabrizi.com/substrate/transparent-keys-in-substrate/ - /// [3]: https://www.shawntabrizi.com/substrate/querying-substrate-storage-via-rpc/ + /// [2]: https://www.shawntabrizi.com/blog/substrate/transparent-keys-in-substrate/ + /// [3]: https://www.shawntabrizi.com/blog/substrate/querying-substrate-storage-via-rpc/ /// /// ### Maximum payload size /// diff --git a/substrate/client/rpc-servers/Cargo.toml b/substrate/client/rpc-servers/Cargo.toml index a7cc374f97a1..60d999863cab 100644 --- a/substrate/client/rpc-servers/Cargo.toml +++ b/substrate/client/rpc-servers/Cargo.toml @@ -9,13 +9,16 @@ repository.workspace = true description = "Substrate RPC servers." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] jsonrpsee = { version = "0.16.2", features = ["server"] } log = "0.4.17" -serde_json = "1.0.108" +serde_json = "1.0.111" tokio = { version = "1.22.0", features = ["parking_lot"] } prometheus-endpoint = { package = "substrate-prometheus-endpoint", path = "../../utils/prometheus" } tower-http = { version = "0.4.0", features = ["cors"] } diff --git a/substrate/client/rpc-spec-v2/Cargo.toml b/substrate/client/rpc-spec-v2/Cargo.toml index 45a1d862f04a..ba32308e6783 100644 --- a/substrate/client/rpc-spec-v2/Cargo.toml +++ b/substrate/client/rpc-spec-v2/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Substrate RPC interface v2." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -39,7 +42,7 @@ log = "0.4.17" futures-util = { version = "0.3.19", default-features = false } [dev-dependencies] -serde_json = "1.0.108" +serde_json = "1.0.111" tokio = { version = "1.22.0", features = ["macros"] } substrate-test-runtime-client = { path = "../../test-utils/runtime/client" } substrate-test-runtime = { path = "../../test-utils/runtime" } diff --git a/substrate/client/rpc-spec-v2/src/archive/archive.rs b/substrate/client/rpc-spec-v2/src/archive/archive.rs index 3336c23bb3f8..fa2f7c7966f3 100644 --- a/substrate/client/rpc-spec-v2/src/archive/archive.rs +++ b/substrate/client/rpc-spec-v2/src/archive/archive.rs @@ -125,7 +125,7 @@ where let finalized_num = self.client.info().finalized_number; if finalized_num >= height { - let Ok(Some(hash)) = self.client.block_hash(height.into()) else { return Ok(vec![]) }; + let Ok(Some(hash)) = self.client.block_hash(height) else { return Ok(vec![]) }; return Ok(vec![hex_string(&hash.as_ref())]) } diff --git a/substrate/client/rpc/Cargo.toml b/substrate/client/rpc/Cargo.toml index 6c606a007e23..87ec11985535 100644 --- a/substrate/client/rpc/Cargo.toml +++ b/substrate/client/rpc/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Substrate Client RPC" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -18,7 +21,7 @@ futures = "0.3.21" jsonrpsee = { version = "0.16.2", features = ["server"] } log = "0.4.17" parking_lot = "0.12.1" -serde_json = "1.0.108" +serde_json = "1.0.111" sc-block-builder = { path = "../block-builder" } sc-chain-spec = { path = "../chain-spec" } sc-client-api = { path = "../api" } diff --git a/substrate/client/service/Cargo.toml b/substrate/client/service/Cargo.toml index 2eeb83fc4c3c..aecab906d72d 100644 --- a/substrate/client/service/Cargo.toml +++ b/substrate/client/service/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Substrate service. Starts a thread that spins up the network, client, and extrinsic pool. Manages communication between them." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -34,8 +37,8 @@ log = "0.4.17" futures-timer = "3.0.1" exit-future = "0.2.0" pin-project = "1.0.12" -serde = "1.0.193" -serde_json = "1.0.108" +serde = "1.0.195" +serde_json = "1.0.111" sc-keystore = { path = "../keystore" } sp-runtime = { path = "../../primitives/runtime" } sp-trie = { path = "../../primitives/trie" } @@ -74,7 +77,7 @@ sc-tracing = { path = "../tracing" } sc-sysinfo = { path = "../sysinfo" } tracing = "0.1.29" tracing-futures = { version = "0.2.4" } -async-trait = "0.1.57" +async-trait = "0.1.74" tokio = { version = "1.22.0", features = ["parking_lot", "rt-multi-thread", "time"] } tempfile = "3.1.0" directories-next = "2" diff --git a/substrate/client/service/test/Cargo.toml b/substrate/client/service/test/Cargo.toml index be536f98747e..c0264e46cd33 100644 --- a/substrate/client/service/test/Cargo.toml +++ b/substrate/client/service/test/Cargo.toml @@ -8,6 +8,9 @@ publish = false homepage = "https://substrate.io" repository.workspace = true +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/client/state-db/Cargo.toml b/substrate/client/state-db/Cargo.toml index c5e8272637d4..001ada02ef2f 100644 --- a/substrate/client/state-db/Cargo.toml +++ b/substrate/client/state-db/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "State database maintenance. Handles canonicalization and pruning in the database." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/client/storage-monitor/Cargo.toml b/substrate/client/storage-monitor/Cargo.toml index c0eb9d94b929..7185c62e276e 100644 --- a/substrate/client/storage-monitor/Cargo.toml +++ b/substrate/client/storage-monitor/Cargo.toml @@ -8,11 +8,13 @@ repository.workspace = true description = "Storage monitor service for substrate" homepage = "https://substrate.io" +[lints] +workspace = true + [dependencies] -clap = { version = "4.4.10", features = ["derive", "string"] } +clap = { version = "4.4.14", features = ["derive", "string"] } log = "0.4.17" fs4 = "0.7.0" -sc-client-db = { path = "../db", default-features = false } sp-core = { path = "../../primitives/core" } -tokio = "1.22.0" +tokio = { version = "1.22.0", features = ["time"] } thiserror = "1.0.48" diff --git a/substrate/client/storage-monitor/src/lib.rs b/substrate/client/storage-monitor/src/lib.rs index 75d3bf313a5d..5e4d4ca87f98 100644 --- a/substrate/client/storage-monitor/src/lib.rs +++ b/substrate/client/storage-monitor/src/lib.rs @@ -18,7 +18,6 @@ // along with this program. If not, see . use clap::Args; -use sc_client_db::DatabaseSource; use sp_core::traits::SpawnEssentialNamed; use std::{ io, @@ -71,43 +70,37 @@ impl StorageMonitorService { /// Creates new StorageMonitorService for given client config pub fn try_spawn( parameters: StorageMonitorParams, - database: DatabaseSource, + path: PathBuf, spawner: &impl SpawnEssentialNamed, ) -> Result<()> { - Ok(match (parameters.threshold, database.path()) { - (0, _) => { - log::info!( - target: LOG_TARGET, - "StorageMonitorService: threshold `0` given, storage monitoring disabled", - ); - }, - (_, None) => { - log::warn!( - target: LOG_TARGET, - "StorageMonitorService: no database path to observe", - ); - }, - (threshold, Some(path)) => { - log::debug!( - target: LOG_TARGET, - "Initializing StorageMonitorService for db path: {path:?}", - ); - - Self::check_free_space(&path, threshold)?; + if parameters.threshold == 0 { + log::info!( + target: LOG_TARGET, + "StorageMonitorService: threshold `0` given, storage monitoring disabled", + ); + } else { + log::debug!( + target: LOG_TARGET, + "Initializing StorageMonitorService for db path: {}", + path.display() + ); + + Self::check_free_space(&path, parameters.threshold)?; + + let storage_monitor_service = StorageMonitorService { + path, + threshold: parameters.threshold, + polling_period: Duration::from_secs(parameters.polling_period.into()), + }; - let storage_monitor_service = StorageMonitorService { - path: path.to_path_buf(), - threshold, - polling_period: Duration::from_secs(parameters.polling_period.into()), - }; + spawner.spawn_essential( + "storage-monitor", + None, + Box::pin(storage_monitor_service.run()), + ); + } - spawner.spawn_essential( - "storage-monitor", - None, - Box::pin(storage_monitor_service.run()), - ); - }, - }) + Ok(()) } /// Main monitoring loop, intended to be spawned as essential task. Quits if free space drop diff --git a/substrate/client/sync-state-rpc/Cargo.toml b/substrate/client/sync-state-rpc/Cargo.toml index 746f1c754f9c..b3de9585ab45 100644 --- a/substrate/client/sync-state-rpc/Cargo.toml +++ b/substrate/client/sync-state-rpc/Cargo.toml @@ -8,14 +8,17 @@ license = "Apache-2.0" homepage = "https://substrate.io" repository.workspace = true +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1" } jsonrpsee = { version = "0.16.2", features = ["client-core", "macros", "server"] } -serde = { version = "1.0.193", features = ["derive"] } -serde_json = "1.0.108" +serde = { version = "1.0.195", features = ["derive"] } +serde_json = "1.0.111" thiserror = "1.0.48" sc-chain-spec = { path = "../chain-spec" } sc-client-api = { path = "../api" } diff --git a/substrate/client/sysinfo/Cargo.toml b/substrate/client/sysinfo/Cargo.toml index 4cd1b222bc6d..18ac161f1ee1 100644 --- a/substrate/client/sysinfo/Cargo.toml +++ b/substrate/client/sysinfo/Cargo.toml @@ -10,6 +10,9 @@ description = "A crate that provides basic hardware and software telemetry infor documentation = "https://docs.rs/sc-sysinfo" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -21,8 +24,8 @@ rand = "0.8.5" rand_pcg = "0.3.1" derive_more = "0.99" regex = "1" -serde = { version = "1.0.193", features = ["derive"] } -serde_json = "1.0.108" +serde = { version = "1.0.195", features = ["derive"] } +serde_json = "1.0.111" sc-telemetry = { path = "../telemetry" } sp-core = { path = "../../primitives/core" } sp-io = { path = "../../primitives/io" } diff --git a/substrate/client/telemetry/Cargo.toml b/substrate/client/telemetry/Cargo.toml index 71119df11537..ba597ef898e0 100644 --- a/substrate/client/telemetry/Cargo.toml +++ b/substrate/client/telemetry/Cargo.toml @@ -10,19 +10,22 @@ repository.workspace = true documentation = "https://docs.rs/sc-telemetry" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] -chrono = "0.4.27" +chrono = "0.4.31" futures = "0.3.21" -libp2p = { version = "0.51.3", features = ["dns", "tcp", "tokio", "wasm-ext", "websocket"] } +libp2p = { version = "0.51.4", features = ["dns", "tcp", "tokio", "wasm-ext", "websocket"] } log = "0.4.17" parking_lot = "0.12.1" pin-project = "1.0.12" sc-utils = { path = "../utils" } rand = "0.8.5" -serde = { version = "1.0.193", features = ["derive"] } -serde_json = "1.0.108" +serde = { version = "1.0.195", features = ["derive"] } +serde_json = "1.0.111" thiserror = "1.0.48" wasm-timer = "0.2.5" diff --git a/substrate/client/tracing/Cargo.toml b/substrate/client/tracing/Cargo.toml index 23918fcabb5f..fb60a177d558 100644 --- a/substrate/client/tracing/Cargo.toml +++ b/substrate/client/tracing/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Instrumentation implementation for substrate." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -22,7 +25,7 @@ log = { version = "0.4.17" } parking_lot = "0.12.1" regex = "1.6.0" rustc-hash = "1.1.0" -serde = "1.0.193" +serde = "1.0.195" thiserror = "1.0.48" tracing = "0.1.29" tracing-log = "0.2" diff --git a/substrate/client/tracing/proc-macro/Cargo.toml b/substrate/client/tracing/proc-macro/Cargo.toml index 1d5d638c49bc..c293c1834e83 100644 --- a/substrate/client/tracing/proc-macro/Cargo.toml +++ b/substrate/client/tracing/proc-macro/Cargo.toml @@ -8,6 +8,9 @@ homepage = "https://substrate.io" repository.workspace = true description = "Helper macros for Substrate's client CLI" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -15,7 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] proc-macro = true [dependencies] -proc-macro-crate = "2.0.0" +proc-macro-crate = "3.0.0" proc-macro2 = "1.0.56" quote = { version = "1.0.28", features = ["proc-macro"] } -syn = { version = "2.0.39", features = ["extra-traits", "full", "parsing", "proc-macro"] } +syn = { version = "2.0.48", features = ["extra-traits", "full", "parsing", "proc-macro"] } diff --git a/substrate/client/tracing/src/logging/mod.rs b/substrate/client/tracing/src/logging/mod.rs index 7489724b7915..6c8695eb655b 100644 --- a/substrate/client/tracing/src/logging/mod.rs +++ b/substrate/client/tracing/src/logging/mod.rs @@ -34,7 +34,7 @@ pub(crate) type DefaultLogger = stderr_writer::MakeStderrWriter; pub use directives::*; pub use sc_tracing_proc_macro::*; -use std::io; +use std::io::{self, IsTerminal}; use tracing::Subscriber; use tracing_subscriber::{ filter::LevelFilter, @@ -171,9 +171,7 @@ where _ => true, } || detailed_output; - use std::io::IsTerminal; - - let enable_color = force_colors.unwrap_or_else(|| std::io::stderr().is_terminal()); + let enable_color = force_colors.unwrap_or_else(|| io::stderr().is_terminal()); let timer = fast_local_time::FastLocalTime { with_fractional: detailed_output }; let event_format = EventFormat { @@ -182,7 +180,7 @@ where display_level: detailed_output, display_thread_name: detailed_output, enable_color, - dup_to_stdout: !std::io::stderr().is_terminal() && std::io::stdout().is_terminal(), + dup_to_stdout: !io::stderr().is_terminal() && io::stdout().is_terminal(), }; let builder = FmtSubscriber::builder().with_env_filter(env_filter); diff --git a/substrate/client/transaction-pool/Cargo.toml b/substrate/client/transaction-pool/Cargo.toml index 3e90304497f3..8832c0bf5080 100644 --- a/substrate/client/transaction-pool/Cargo.toml +++ b/substrate/client/transaction-pool/Cargo.toml @@ -9,18 +9,21 @@ repository.workspace = true description = "Substrate transaction pool implementation." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] -async-trait = "0.1.57" +async-trait = "0.1.74" codec = { package = "parity-scale-codec", version = "3.6.1" } futures = "0.3.21" futures-timer = "3.0.2" linked-hash-map = "0.5.4" log = "0.4.17" parking_lot = "0.12.1" -serde = { version = "1.0.193", features = ["derive"] } +serde = { version = "1.0.195", features = ["derive"] } thiserror = "1.0.48" prometheus-endpoint = { package = "substrate-prometheus-endpoint", path = "../../utils/prometheus" } sc-client-api = { path = "../api" } diff --git a/substrate/client/transaction-pool/api/Cargo.toml b/substrate/client/transaction-pool/api/Cargo.toml index 89981c275113..2522739cf887 100644 --- a/substrate/client/transaction-pool/api/Cargo.toml +++ b/substrate/client/transaction-pool/api/Cargo.toml @@ -8,16 +8,19 @@ homepage = "https://substrate.io" repository.workspace = true description = "Transaction pool client facing API." +[lints] +workspace = true + [dependencies] -async-trait = "0.1.57" +async-trait = "0.1.74" codec = { package = "parity-scale-codec", version = "3.6.1" } futures = "0.3.21" log = "0.4.17" -serde = { version = "1.0.193", features = ["derive"] } +serde = { version = "1.0.195", features = ["derive"] } thiserror = "1.0.48" sp-blockchain = { path = "../../../primitives/blockchain" } sp-core = { path = "../../../primitives/core", default-features = false } sp-runtime = { path = "../../../primitives/runtime", default-features = false } [dev-dependencies] -serde_json = "1.0.108" +serde_json = "1.0.111" diff --git a/substrate/client/utils/Cargo.toml b/substrate/client/utils/Cargo.toml index a3f5d00eef9e..df13153d7ee5 100644 --- a/substrate/client/utils/Cargo.toml +++ b/substrate/client/utils/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "I/O for Substrate runtimes" readme = "README.md" +[lints] +workspace = true + [dependencies] async-channel = "2" futures = "0.3.21" diff --git a/substrate/frame/Cargo.toml b/substrate/frame/Cargo.toml index df3f51056b79..b143086ae252 100644 --- a/substrate/frame/Cargo.toml +++ b/substrate/frame/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "The single package to get you started with building frame pallets and runtimes" publish = false +[lints] +workspace = true + [package.metadata.docs.rs] # enable `experimental` feature for docs features = ["experimental"] @@ -45,7 +48,7 @@ frame-executive = { default-features = false, path = "../frame/executive", optio frame-system-rpc-runtime-api = { default-features = false, path = "../frame/system/rpc/runtime-api", optional = true } docify = "0.2.0" -simple-mermaid = { git = "https://github.com/kianenigma/simple-mermaid.git", rev = "e48b187bcfd5cc75111acd9d241f1bd36604344b" } +# simple-mermaid = { git = "https://github.com/kianenigma/simple-mermaid.git", rev = "e48b187bcfd5cc75111acd9d241f1bd36604344b", optional = true } log = { version = "0.4.20", default-features = false } [features] @@ -72,6 +75,7 @@ std = [ "log/std", "parity-scale-codec/std", "scale-info/std", + # "simple-mermaid", "sp-api?/std", "sp-arithmetic/std", "sp-block-builder?/std", diff --git a/substrate/frame/asset-conversion/Cargo.toml b/substrate/frame/asset-conversion/Cargo.toml index 5df86d402e0e..0c7b06abf55d 100644 --- a/substrate/frame/asset-conversion/Cargo.toml +++ b/substrate/frame/asset-conversion/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "FRAME asset conversion pallet" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/asset-conversion/src/benchmarking.rs b/substrate/frame/asset-conversion/src/benchmarking.rs index e01a9776f265..bc4cd03dc6e1 100644 --- a/substrate/frame/asset-conversion/src/benchmarking.rs +++ b/substrate/frame/asset-conversion/src/benchmarking.rs @@ -19,74 +19,142 @@ //! Asset Conversion pallet benchmarking. use super::*; -use frame_benchmarking::{benchmarks, whitelisted_caller}; +use crate::Pallet as AssetConversion; +use frame_benchmarking::{v2::*, whitelisted_caller}; use frame_support::{ assert_ok, - storage::bounded_vec::BoundedVec, traits::{ - fungible::{Inspect as InspectFungible, Mutate as MutateFungible, Unbalanced}, + fungible::NativeOrWithId, fungibles::{Create, Inspect, Mutate}, }, }; use frame_system::RawOrigin as SystemOrigin; use sp_core::Get; -use sp_runtime::traits::{Bounded, StaticLookup}; -use sp_std::{ops::Div, prelude::*}; +use sp_std::{marker::PhantomData, prelude::*}; -use crate::Pallet as AssetConversion; +/// Benchmark Helper +pub trait BenchmarkHelper { + /// Returns a valid assets pair for the pool creation. + /// + /// When a specific asset, such as the native asset, is required in every pool, it should be + /// returned for each odd-numbered seed. + fn create_pair(seed1: u32, seed2: u32) -> (AssetKind, AssetKind); +} + +impl BenchmarkHelper for () +where + AssetKind: From, +{ + fn create_pair(seed1: u32, seed2: u32) -> (AssetKind, AssetKind) { + (seed1.into(), seed2.into()) + } +} + +/// Factory for creating a valid asset pairs with [`NativeOrWithId::Native`] always leading in the +/// pair. +pub struct NativeOrWithIdFactory(PhantomData); +impl + Ord> BenchmarkHelper> + for NativeOrWithIdFactory +{ + fn create_pair(seed1: u32, seed2: u32) -> (NativeOrWithId, NativeOrWithId) { + if seed1 % 2 == 0 { + (NativeOrWithId::WithId(seed2.into()), NativeOrWithId::Native) + } else { + (NativeOrWithId::Native, NativeOrWithId::WithId(seed2.into())) + } + } +} -const INITIAL_ASSET_BALANCE: u128 = 1_000_000_000_000; -type AccountIdLookupOf = <::Lookup as StaticLookup>::Source; -type BalanceOf = - <::Currency as InspectFungible<::AccountId>>::Balance; +/// Provides a pair of amounts expected to serve as sufficient initial liquidity for a pool. +fn valid_liquidity_amount(ed1: T::Balance, ed2: T::Balance) -> (T::Balance, T::Balance) +where + T::Assets: Inspect, +{ + let l = + ed1.max(ed2) + T::MintMinLiquidity::get() + T::MintMinLiquidity::get() + T::Balance::one(); + (l, l) +} -fn get_lp_token_id() -> T::PoolAssetId +/// Create the `asset` and mint the `amount` for the `caller`. +fn create_asset(caller: &T::AccountId, asset: &T::AssetKind, amount: T::Balance) where - T::PoolAssetId: Into, + T::Assets: Create + Mutate, { - let next_id: u32 = AssetConversion::::get_next_pool_asset_id().into(); - (next_id - 1).into() + if !T::Assets::asset_exists(asset.clone()) { + assert_ok!(T::Assets::create(asset.clone(), caller.clone(), true, T::Balance::one())); + } + assert_ok!(T::Assets::mint_into( + asset.clone(), + &caller, + amount + T::Assets::minimum_balance(asset.clone()) + )); } -fn create_asset(asset: &T::MultiAssetId) -> (T::AccountId, AccountIdLookupOf) +/// Create the designated fee asset for pool creation. +fn create_fee_asset(caller: &T::AccountId) where - T::AssetBalance: From, - T::Currency: Unbalanced, T::Assets: Create + Mutate, { - let caller: T::AccountId = whitelisted_caller(); - let caller_lookup = T::Lookup::unlookup(caller.clone()); - if let MultiAssetIdConversionResult::Converted(asset_id) = - T::MultiAssetIdConverter::try_convert(asset) - { - T::Currency::set_balance(&caller, BalanceOf::::max_value().div(1000u32.into())); - assert_ok!(T::Assets::create(asset_id.clone(), caller.clone(), true, 1.into())); - assert_ok!(T::Assets::mint_into(asset_id, &caller, INITIAL_ASSET_BALANCE.into())); + let fee_asset = T::PoolSetupFeeAsset::get(); + if !T::Assets::asset_exists(fee_asset.clone()) { + assert_ok!(T::Assets::create(fee_asset.clone(), caller.clone(), true, T::Balance::one())); } - (caller, caller_lookup) + assert_ok!(T::Assets::mint_into( + fee_asset.clone(), + &caller, + T::Assets::minimum_balance(fee_asset) + )); } +/// Mint the fee asset for the `caller` sufficient to cover the fee for creating a new pool. +fn mint_setup_fee_asset( + caller: &T::AccountId, + asset1: &T::AssetKind, + asset2: &T::AssetKind, + lp_token: &T::PoolAssetId, +) where + T::Assets: Create + Mutate, +{ + assert_ok!(T::Assets::mint_into( + T::PoolSetupFeeAsset::get(), + &caller, + T::PoolSetupFee::get() + + T::Assets::deposit_required(asset1.clone()) + + T::Assets::deposit_required(asset2.clone()) + + T::PoolAssets::deposit_required(lp_token.clone()) + )); +} + +/// Creates a pool for a given asset pair. +/// +/// This action mints the necessary amounts of the given assets for the `caller` to provide initial +/// liquidity. It returns the LP token ID along with a pair of amounts sufficient for the pool's +/// initial liquidity. fn create_asset_and_pool( - asset1: &T::MultiAssetId, - asset2: &T::MultiAssetId, -) -> (T::PoolAssetId, T::AccountId, AccountIdLookupOf) + caller: &T::AccountId, + asset1: &T::AssetKind, + asset2: &T::AssetKind, +) -> (T::PoolAssetId, T::Balance, T::Balance) where - T::AssetBalance: From, - T::Currency: Unbalanced, T::Assets: Create + Mutate, - T::PoolAssetId: Into, { - let (_, _) = create_asset::(asset1); - let (caller, caller_lookup) = create_asset::(asset2); + let (liquidity1, liquidity2) = valid_liquidity_amount::( + T::Assets::minimum_balance(asset1.clone()), + T::Assets::minimum_balance(asset2.clone()), + ); + create_asset::(caller, asset1, liquidity1); + create_asset::(caller, asset2, liquidity2); + let lp_token = AssetConversion::::get_next_pool_asset_id(); + + mint_setup_fee_asset::(caller, asset1, asset2, &lp_token); assert_ok!(AssetConversion::::create_pool( SystemOrigin::Signed(caller.clone()).into(), - asset1.clone(), - asset2.clone() + Box::new(asset1.clone()), + Box::new(asset2.clone()) )); - let lp_token = get_lp_token_id::(); - (lp_token, caller, caller_lookup) + (lp_token, liquidity1, liquidity2) } fn assert_last_event(generic_event: ::RuntimeEvent) { @@ -97,242 +165,198 @@ fn assert_last_event(generic_event: ::RuntimeEvent) { assert_eq!(event, &system_event); } -benchmarks! { - where_clause { - where - T::AssetBalance: From + Into, - T::Currency: Unbalanced, - T::Balance: From + Into, - T::Assets: Create + Mutate, - T::PoolAssetId: Into, - } +#[benchmarks(where T::Assets: Create + Mutate, T::PoolAssetId: Into,)] +mod benchmarks { + use super::*; - create_pool { - let asset1 = T::MultiAssetIdConverter::get_native(); - let asset2 = T::BenchmarkHelper::multiasset_id(0); - let (caller, _) = create_asset::(&asset2); - }: _(SystemOrigin::Signed(caller.clone()), asset1.clone(), asset2.clone()) - verify { - let lp_token = get_lp_token_id::(); - let pool_id = (asset1.clone(), asset2.clone()); - assert_last_event::(Event::PoolCreated { - creator: caller.clone(), - pool_account: AssetConversion::::get_pool_account(&pool_id), - pool_id, - lp_token, - }.into()); - } + #[benchmark] + fn create_pool() { + let caller: T::AccountId = whitelisted_caller(); + let (asset1, asset2) = T::BenchmarkHelper::create_pair(0, 1); + create_asset::(&caller, &asset1, T::Assets::minimum_balance(asset1.clone())); + create_asset::(&caller, &asset2, T::Assets::minimum_balance(asset2.clone())); - add_liquidity { - let asset1 = T::MultiAssetIdConverter::get_native(); - let asset2 = T::BenchmarkHelper::multiasset_id(0); - let (lp_token, caller, _) = create_asset_and_pool::(&asset1, &asset2); - let ed: u128 = T::Currency::minimum_balance().into(); - let add_amount = 1000 + ed; - }: _(SystemOrigin::Signed(caller.clone()), asset1.clone(), asset2.clone(), add_amount.into(), 1000.into(), 0.into(), 0.into(), caller.clone()) - verify { - let pool_id = (asset1.clone(), asset2.clone()); - let lp_minted = AssetConversion::::calc_lp_amount_for_zero_supply(&add_amount.into(), &1000.into()).unwrap().into(); - assert_eq!( - T::PoolAssets::balance(lp_token, &caller), - lp_minted.into() - ); - assert_eq!( - T::Currency::balance(&AssetConversion::::get_pool_account(&pool_id)), - add_amount.into() - ); - assert_eq!( - T::Assets::balance(T::BenchmarkHelper::asset_id(0), &AssetConversion::::get_pool_account(&pool_id)), - 1000.into() + let lp_token = AssetConversion::::get_next_pool_asset_id(); + create_fee_asset::(&caller); + mint_setup_fee_asset::(&caller, &asset1, &asset2, &lp_token); + + #[extrinsic_call] + _(SystemOrigin::Signed(caller.clone()), Box::new(asset1.clone()), Box::new(asset2.clone())); + + let pool_id = T::PoolLocator::pool_id(&asset1, &asset2).unwrap(); + let pool_account = T::PoolLocator::address(&pool_id).unwrap(); + assert_last_event::( + Event::PoolCreated { creator: caller, pool_account, pool_id, lp_token }.into(), ); } - remove_liquidity { - let asset1 = T::MultiAssetIdConverter::get_native(); - let asset2 = T::BenchmarkHelper::multiasset_id(0); - let (lp_token, caller, _) = create_asset_and_pool::(&asset1, &asset2); - let ed: u128 = T::Currency::minimum_balance().into(); - let add_amount = 100 * ed; - let lp_minted = AssetConversion::::calc_lp_amount_for_zero_supply(&add_amount.into(), &1000.into()).unwrap().into(); - let remove_lp_amount = lp_minted.checked_div(10).unwrap(); + #[benchmark] + fn add_liquidity() { + let caller: T::AccountId = whitelisted_caller(); + let (asset1, asset2) = T::BenchmarkHelper::create_pair(0, 1); - AssetConversion::::add_liquidity( - SystemOrigin::Signed(caller.clone()).into(), - asset1.clone(), - asset2.clone(), - add_amount.into(), - 1000.into(), - 0.into(), - 0.into(), + create_fee_asset::(&caller); + let (lp_token, liquidity1, liquidity2) = + create_asset_and_pool::(&caller, &asset1, &asset2); + + #[extrinsic_call] + _( + SystemOrigin::Signed(caller.clone()), + Box::new(asset1.clone()), + Box::new(asset2.clone()), + liquidity1, + liquidity2, + T::Balance::one(), + T::Balance::zero(), caller.clone(), - )?; - let total_supply = >::total_issuance(lp_token.clone()); - }: _(SystemOrigin::Signed(caller.clone()), asset1, asset2, remove_lp_amount.into(), 0.into(), 0.into(), caller.clone()) - verify { - let new_total_supply = >::total_issuance(lp_token.clone()); - assert_eq!( - new_total_supply, - total_supply - remove_lp_amount.into() ); + + let pool_account = T::PoolLocator::pool_address(&asset1, &asset2).unwrap(); + let lp_minted = + AssetConversion::::calc_lp_amount_for_zero_supply(&liquidity1, &liquidity2).unwrap(); + assert_eq!(T::PoolAssets::balance(lp_token, &caller), lp_minted); + assert_eq!(T::Assets::balance(asset1, &pool_account), liquidity1); + assert_eq!(T::Assets::balance(asset2, &pool_account), liquidity2); } - swap_exact_tokens_for_tokens { - let native = T::MultiAssetIdConverter::get_native(); - let asset1 = T::BenchmarkHelper::multiasset_id(1); - let asset2 = T::BenchmarkHelper::multiasset_id(2); - let (_, caller, _) = create_asset_and_pool::(&native, &asset1); - let (_, _) = create_asset::(&asset2); - let ed: u128 = T::Currency::minimum_balance().into(); + #[benchmark] + fn remove_liquidity() { + let caller: T::AccountId = whitelisted_caller(); + let (asset1, asset2) = T::BenchmarkHelper::create_pair(0, 1); - AssetConversion::::add_liquidity( + create_fee_asset::(&caller); + let (lp_token, liquidity1, liquidity2) = + create_asset_and_pool::(&caller, &asset1, &asset2); + + let remove_lp_amount = T::Balance::one(); + + assert_ok!(AssetConversion::::add_liquidity( SystemOrigin::Signed(caller.clone()).into(), - native.clone(), - asset1.clone(), - (100 * ed).into(), - 200.into(), - 0.into(), - 0.into(), + Box::new(asset1.clone()), + Box::new(asset2.clone()), + liquidity1, + liquidity2, + T::Balance::one(), + T::Balance::zero(), caller.clone(), - )?; - - let path; - let swap_amount; - // if we only allow the native-asset pools, then the worst case scenario would be to swap - // asset1-native-asset2 - if !T::AllowMultiAssetPools::get() { - AssetConversion::::create_pool(SystemOrigin::Signed(caller.clone()).into(), native.clone(), asset2.clone())?; - AssetConversion::::add_liquidity( - SystemOrigin::Signed(caller.clone()).into(), - native.clone(), - asset2.clone(), - (500 * ed).into(), - 1000.into(), - 0.into(), - 0.into(), - caller.clone(), - )?; - path = vec![asset1.clone(), native.clone(), asset2.clone()]; - swap_amount = 100.into(); - } else { - let asset3 = T::BenchmarkHelper::multiasset_id(3); - AssetConversion::::create_pool(SystemOrigin::Signed(caller.clone()).into(), asset1.clone(), asset2.clone())?; - let (_, _) = create_asset::(&asset3); - AssetConversion::::create_pool(SystemOrigin::Signed(caller.clone()).into(), asset2.clone(), asset3.clone())?; + )); + let total_supply = + >::total_issuance(lp_token.clone()); - AssetConversion::::add_liquidity( - SystemOrigin::Signed(caller.clone()).into(), - asset1.clone(), - asset2.clone(), - 200.into(), - 2000.into(), - 0.into(), - 0.into(), - caller.clone(), - )?; - AssetConversion::::add_liquidity( + #[extrinsic_call] + _( + SystemOrigin::Signed(caller.clone()), + Box::new(asset1), + Box::new(asset2), + remove_lp_amount, + T::Balance::zero(), + T::Balance::zero(), + caller.clone(), + ); + + let new_total_supply = >::total_issuance(lp_token); + assert_eq!(new_total_supply, total_supply - remove_lp_amount); + } + + #[benchmark] + fn swap_exact_tokens_for_tokens(n: Linear<2, { T::MaxSwapPathLength::get() }>) { + let mut swap_amount = T::Balance::one(); + let mut path = vec![]; + + let caller: T::AccountId = whitelisted_caller(); + create_fee_asset::(&caller); + for n in 1..n { + let (asset1, asset2) = T::BenchmarkHelper::create_pair(n - 1, n); + swap_amount = swap_amount + T::Balance::one(); + if path.len() == 0 { + path = vec![Box::new(asset1.clone()), Box::new(asset2.clone())]; + } else { + path.push(Box::new(asset2.clone())); + } + + let (_, liquidity1, liquidity2) = create_asset_and_pool::(&caller, &asset1, &asset2); + + assert_ok!(AssetConversion::::add_liquidity( SystemOrigin::Signed(caller.clone()).into(), - asset2.clone(), - asset3.clone(), - 2000.into(), - 2000.into(), - 0.into(), - 0.into(), + Box::new(asset1.clone()), + Box::new(asset2.clone()), + liquidity1, + liquidity2, + T::Balance::one(), + T::Balance::zero(), caller.clone(), - )?; - path = vec![native.clone(), asset1.clone(), asset2.clone(), asset3.clone()]; - swap_amount = ed.into(); + )); } - let path: BoundedVec<_, T::MaxSwapPathLength> = BoundedVec::try_from(path).unwrap(); - let native_balance = T::Currency::balance(&caller); - let asset1_balance = T::Assets::balance(T::BenchmarkHelper::asset_id(1), &caller); - }: _(SystemOrigin::Signed(caller.clone()), path, swap_amount, 1.into(), caller.clone(), false) - verify { - if !T::AllowMultiAssetPools::get() { - let new_asset1_balance = T::Assets::balance(T::BenchmarkHelper::asset_id(1), &caller); - assert_eq!(new_asset1_balance, asset1_balance - 100.into()); - } else { - let new_native_balance = T::Currency::balance(&caller); - assert_eq!(new_native_balance, native_balance - ed.into()); - } + let asset_in = *path.first().unwrap().clone(); + assert_ok!(T::Assets::mint_into( + asset_in.clone(), + &caller, + swap_amount + T::Balance::one() + )); + let init_caller_balance = T::Assets::balance(asset_in.clone(), &caller); + + #[extrinsic_call] + _( + SystemOrigin::Signed(caller.clone()), + path, + swap_amount, + T::Balance::one(), + caller.clone(), + true, + ); + + let actual_balance = T::Assets::balance(asset_in, &caller); + assert_eq!(actual_balance, init_caller_balance - swap_amount); } - swap_tokens_for_exact_tokens { - let native = T::MultiAssetIdConverter::get_native(); - let asset1 = T::BenchmarkHelper::multiasset_id(1); - let asset2 = T::BenchmarkHelper::multiasset_id(2); - let (_, caller, _) = create_asset_and_pool::(&native, &asset1); - let (_, _) = create_asset::(&asset2); - let ed: u128 = T::Currency::minimum_balance().into(); + #[benchmark] + fn swap_tokens_for_exact_tokens(n: Linear<2, { T::MaxSwapPathLength::get() }>) { + let mut max_swap_amount = T::Balance::one(); + let mut path = vec![]; - AssetConversion::::add_liquidity( - SystemOrigin::Signed(caller.clone()).into(), - native.clone(), - asset1.clone(), - (1000 * ed).into(), - 500.into(), - 0.into(), - 0.into(), - caller.clone(), - )?; - - let path; - // if we only allow the native-asset pools, then the worst case scenario would be to swap - // asset1-native-asset2 - if !T::AllowMultiAssetPools::get() { - AssetConversion::::create_pool(SystemOrigin::Signed(caller.clone()).into(), native.clone(), asset2.clone())?; - AssetConversion::::add_liquidity( - SystemOrigin::Signed(caller.clone()).into(), - native.clone(), - asset2.clone(), - (500 * ed).into(), - 1000.into(), - 0.into(), - 0.into(), - caller.clone(), - )?; - path = vec![asset1.clone(), native.clone(), asset2.clone()]; - } else { - AssetConversion::::create_pool(SystemOrigin::Signed(caller.clone()).into(), asset1.clone(), asset2.clone())?; - let asset3 = T::BenchmarkHelper::multiasset_id(3); - let (_, _) = create_asset::(&asset3); - AssetConversion::::create_pool(SystemOrigin::Signed(caller.clone()).into(), asset2.clone(), asset3.clone())?; + let caller: T::AccountId = whitelisted_caller(); + create_fee_asset::(&caller); + for n in 1..n { + let (asset1, asset2) = T::BenchmarkHelper::create_pair(n - 1, n); + max_swap_amount = max_swap_amount + T::Balance::one() + T::Balance::one(); + if path.len() == 0 { + path = vec![Box::new(asset1.clone()), Box::new(asset2.clone())]; + } else { + path.push(Box::new(asset2.clone())); + } - AssetConversion::::add_liquidity( - SystemOrigin::Signed(caller.clone()).into(), - asset1.clone(), - asset2.clone(), - 2000.into(), - 2000.into(), - 0.into(), - 0.into(), - caller.clone(), - )?; - AssetConversion::::add_liquidity( + let (_, liquidity1, liquidity2) = create_asset_and_pool::(&caller, &asset1, &asset2); + + assert_ok!(AssetConversion::::add_liquidity( SystemOrigin::Signed(caller.clone()).into(), - asset2.clone(), - asset3.clone(), - 2000.into(), - 2000.into(), - 0.into(), - 0.into(), + Box::new(asset1.clone()), + Box::new(asset2.clone()), + liquidity1, + liquidity2, + T::Balance::one(), + T::Balance::zero(), caller.clone(), - )?; - path = vec![native.clone(), asset1.clone(), asset2.clone(), asset3.clone()]; + )); } - let path: BoundedVec<_, T::MaxSwapPathLength> = BoundedVec::try_from(path).unwrap(); - let asset2_balance = T::Assets::balance(T::BenchmarkHelper::asset_id(2), &caller); - let asset3_balance = T::Assets::balance(T::BenchmarkHelper::asset_id(3), &caller); - }: _(SystemOrigin::Signed(caller.clone()), path.clone(), 100.into(), (1000 * ed).into(), caller.clone(), false) - verify { - if !T::AllowMultiAssetPools::get() { - let new_asset2_balance = T::Assets::balance(T::BenchmarkHelper::asset_id(2), &caller); - assert_eq!(new_asset2_balance, asset2_balance + 100.into()); - } else { - let new_asset3_balance = T::Assets::balance(T::BenchmarkHelper::asset_id(3), &caller); - assert_eq!(new_asset3_balance, asset3_balance + 100.into()); - } + let asset_in = *path.first().unwrap().clone(); + let asset_out = *path.last().unwrap().clone(); + assert_ok!(T::Assets::mint_into(asset_in, &caller, max_swap_amount)); + let init_caller_balance = T::Assets::balance(asset_out.clone(), &caller); + + #[extrinsic_call] + _( + SystemOrigin::Signed(caller.clone()), + path, + T::Balance::one(), + max_swap_amount, + caller.clone(), + true, + ); + + let actual_balance = T::Assets::balance(asset_out, &caller); + assert_eq!(actual_balance, init_caller_balance + T::Balance::one()); } impl_benchmark_test_suite!(AssetConversion, crate::mock::new_test_ext(), crate::mock::Test); diff --git a/substrate/frame/asset-conversion/src/lib.rs b/substrate/frame/asset-conversion/src/lib.rs index d21de34c5ee0..397acf4bf3b6 100644 --- a/substrate/frame/asset-conversion/src/lib.rs +++ b/substrate/frame/asset-conversion/src/lib.rs @@ -54,63 +54,54 @@ //! (This can be run against the kitchen sync node in the `node` folder of this repo.) #![deny(missing_docs)] #![cfg_attr(not(feature = "std"), no_std)] -use frame_support::traits::{DefensiveOption, Incrementable}; #[cfg(feature = "runtime-benchmarks")] mod benchmarking; - -mod types; -pub mod weights; - -#[cfg(test)] -mod tests; - #[cfg(test)] mod mock; +mod swap; +#[cfg(test)] +mod tests; +mod types; +pub mod weights; +#[cfg(feature = "runtime-benchmarks")] +pub use benchmarking::{BenchmarkHelper, NativeOrWithIdFactory}; +pub use pallet::*; +pub use swap::*; +pub use types::*; +pub use weights::WeightInfo; use codec::Codec; use frame_support::{ - ensure, - traits::tokens::{AssetId, Balance}, -}; -use frame_system::{ - ensure_signed, - pallet_prelude::{BlockNumberFor, OriginFor}, + storage::{with_storage_layer, with_transaction}, + traits::{ + fungibles::{Balanced, Create, Credit, Inspect, Mutate}, + tokens::{ + AssetId, Balance, + Fortitude::Polite, + Precision::Exact, + Preservation::{Expendable, Preserve}, + }, + AccountTouch, Incrementable, OnUnbalanced, + }, + PalletId, }; -pub use pallet::*; -use sp_arithmetic::traits::Unsigned; +use sp_core::Get; use sp_runtime::{ traits::{ - CheckedAdd, CheckedDiv, CheckedMul, CheckedSub, Ensure, MaybeDisplay, TrailingZeroInput, + CheckedAdd, CheckedDiv, CheckedMul, CheckedSub, Ensure, IntegerSquareRoot, MaybeDisplay, + One, TrailingZeroInput, Zero, }, - DispatchError, + DispatchError, Saturating, TokenError, TransactionOutcome, }; -use sp_std::prelude::*; -pub use types::*; -pub use weights::WeightInfo; +use sp_std::{boxed::Box, collections::btree_set::BTreeSet, vec::Vec}; #[frame_support::pallet] pub mod pallet { use super::*; - use frame_support::{ - pallet_prelude::*, - traits::{ - fungible::{Inspect as InspectFungible, Mutate as MutateFungible}, - fungibles::{Create, Inspect, Mutate}, - tokens::{ - Fortitude::Polite, - Precision::Exact, - Preservation::{Expendable, Preserve}, - }, - AccountTouch, ContainsPair, - }, - BoundedBTreeSet, PalletId, - }; - use sp_arithmetic::Permill; - use sp_runtime::{ - traits::{IntegerSquareRoot, One, Zero}, - Saturating, - }; + use frame_support::pallet_prelude::*; + use frame_system::pallet_prelude::*; + use sp_arithmetic::{traits::Unsigned, Permill}; #[pallet::pallet] pub struct Pallet(_); @@ -120,57 +111,46 @@ pub mod pallet { /// Overarching event type. type RuntimeEvent: From> + IsType<::RuntimeEvent>; - /// Currency type that this works on. - type Currency: InspectFungible - + MutateFungible; - - /// The `Currency::Balance` type of the native currency. + /// The type in which the assets for swapping are measured. type Balance: Balance; - /// The type used to describe the amount of fractions converted into assets. - type AssetBalance: Balance; - - /// A type used for conversions between `Balance` and `AssetBalance`. + /// A type used for calculations concerning the `Balance` type to avoid possible overflows. type HigherPrecisionBalance: IntegerSquareRoot + One + Ensure + Unsigned + From - + From + From - + TryInto + TryInto; - /// Identifier for the class of non-native asset. - /// Note: A `From` bound here would prevent `MultiLocation` from being used as an - /// `AssetId`. - type AssetId: AssetId; + /// Type of asset class, sourced from [`Config::Assets`], utilized to offer liquidity to a + /// pool. + type AssetKind: Parameter + MaxEncodedLen; - /// Type that identifies either the native currency or a token class from `Assets`. - /// `Ord` is added because of `get_pool_id`. - /// - /// The pool's `AccountId` is derived from this type. Any changes to the type may - /// necessitate a migration. - type MultiAssetId: AssetId + Ord + From; + /// Registry of assets utilized for providing liquidity to pools. + type Assets: Inspect + + Mutate + + AccountTouch + + Balanced; - /// Type to convert an `AssetId` into `MultiAssetId`. - type MultiAssetIdConverter: MultiAssetIdConverter; + /// Liquidity pool identifier. + type PoolId: Parameter + MaxEncodedLen + Ord; - /// `AssetId` to address the lp tokens by. - type PoolAssetId: AssetId + PartialOrd + Incrementable + From; + /// Provides means to resolve the [`Config::PoolId`] and it's `AccountId` from a pair + /// of [`Config::AssetKind`]s. + /// + /// Examples: [`crate::types::WithFirstAsset`], [`crate::types::Ascending`]. + type PoolLocator: PoolLocator; - /// Registry for the assets. - type Assets: Inspect - + Mutate - + AccountTouch - + ContainsPair; + /// Asset class for the lp tokens from [`Self::PoolAssets`]. + type PoolAssetId: AssetId + PartialOrd + Incrementable + From; /// Registry for the lp tokens. Ideally only this pallet should have create permissions on /// the assets. - type PoolAssets: Inspect + type PoolAssets: Inspect + Create + Mutate - + AccountTouch; + + AccountTouch; /// A % the liquidity providers will take of every swap. Represents 10ths of a percent. #[pallet::constant] @@ -180,8 +160,12 @@ pub mod pallet { #[pallet::constant] type PoolSetupFee: Get; - /// An account that receives the pool setup fee. - type PoolSetupFeeReceiver: Get; + /// Asset class from [`Config::Assets`] used to pay the [`Config::PoolSetupFee`]. + #[pallet::constant] + type PoolSetupFeeAsset: Get; + + /// Handler for the [`Config::PoolSetupFee`]. + type PoolSetupFeeTarget: OnUnbalanced>; /// A fee to withdraw the liquidity. #[pallet::constant] @@ -189,7 +173,7 @@ pub mod pallet { /// The minimum LP token amount that could be minted. Ameliorates rounding errors. #[pallet::constant] - type MintMinLiquidity: Get; + type MintMinLiquidity: Get; /// The max number of hops in a swap. #[pallet::constant] @@ -199,23 +183,19 @@ pub mod pallet { #[pallet::constant] type PalletId: Get; - /// A setting to allow creating pools with both non-native assets. - #[pallet::constant] - type AllowMultiAssetPools: Get; - /// Weight information for extrinsics in this pallet. type WeightInfo: WeightInfo; /// The benchmarks need a way to create asset ids from u32s. #[cfg(feature = "runtime-benchmarks")] - type BenchmarkHelper: BenchmarkHelper; + type BenchmarkHelper: BenchmarkHelper; } /// Map from `PoolAssetId` to `PoolInfo`. This establishes whether a pool has been officially /// created rather than people sending tokens directly to a pool's public account. #[pallet::storage] pub type Pools = - StorageMap<_, Blake2_128Concat, PoolIdOf, PoolInfo, OptionQuery>; + StorageMap<_, Blake2_128Concat, T::PoolId, PoolInfo, OptionQuery>; /// Stores the `PoolAssetId` that is going to be used for the next lp token. /// This gets incremented whenever a new lp pool is created. @@ -232,7 +212,7 @@ pub mod pallet { creator: T::AccountId, /// The pool id associated with the pool. Note that the order of the assets may not be /// the same as the order specified in the create pool extrinsic. - pool_id: PoolIdOf, + pool_id: T::PoolId, /// The account ID of the pool. pool_account: T::AccountId, /// The id of the liquidity tokens that will be minted when assets are added to this @@ -247,15 +227,15 @@ pub mod pallet { /// The account that the liquidity tokens were minted to. mint_to: T::AccountId, /// The pool id of the pool that the liquidity was added to. - pool_id: PoolIdOf, + pool_id: T::PoolId, /// The amount of the first asset that was added to the pool. - amount1_provided: T::AssetBalance, + amount1_provided: T::Balance, /// The amount of the second asset that was added to the pool. - amount2_provided: T::AssetBalance, + amount2_provided: T::Balance, /// The id of the lp token that was minted. lp_token: T::PoolAssetId, /// The amount of lp tokens that were minted of that id. - lp_token_minted: T::AssetBalance, + lp_token_minted: T::Balance, }, /// A successful call of the `RemoveLiquidity` extrinsic will create this event. @@ -265,15 +245,15 @@ pub mod pallet { /// The account that the assets were transferred to. withdraw_to: T::AccountId, /// The pool id that the liquidity was removed from. - pool_id: PoolIdOf, + pool_id: T::PoolId, /// The amount of the first asset that was removed from the pool. - amount1: T::AssetBalance, + amount1: T::Balance, /// The amount of the second asset that was removed from the pool. - amount2: T::AssetBalance, + amount2: T::Balance, /// The id of the lp token that was burned. lp_token: T::PoolAssetId, /// The amount of lp tokens that were burned of that id. - lp_token_burned: T::AssetBalance, + lp_token_burned: T::Balance, /// Liquidity withdrawal fee (%). withdrawal_fee: Permill, }, @@ -284,33 +264,30 @@ pub mod pallet { who: T::AccountId, /// The account that the assets were transferred to. send_to: T::AccountId, - /// The route of asset ids that the swap went through. - /// E.g. A -> Dot -> B - path: BoundedVec, /// The amount of the first asset that was swapped. - amount_in: T::AssetBalance, + amount_in: T::Balance, /// The amount of the second asset that was received. - amount_out: T::AssetBalance, + amount_out: T::Balance, + /// The route of asset IDs with amounts that the swap went through. + /// E.g. (A, amount_in) -> (Dot, amount_out) -> (B, amount_out) + path: BalancePath, }, - /// An amount has been transferred from one account to another. - Transfer { - /// The account that the assets were transferred from. - from: T::AccountId, - /// The account that the assets were transferred to. - to: T::AccountId, - /// The asset that was transferred. - asset: T::MultiAssetId, - /// The amount of the asset that was transferred. - amount: T::AssetBalance, + /// Assets have been converted from one to another. + SwapCreditExecuted { + /// The amount of the first asset that was swapped. + amount_in: T::Balance, + /// The amount of the second asset that was received. + amount_out: T::Balance, + /// The route of asset IDs with amounts that the swap went through. + /// E.g. (A, amount_in) -> (Dot, amount_out) -> (B, amount_out) + path: BalancePath, }, } #[pallet::error] pub enum Error { - /// Provided assets are equal. - EqualAssets, - /// Provided asset is not supported for pool. - UnsupportedAsset, + /// Provided asset pair is not supported for pool. + InvalidAssetPair, /// Pool already exists. PoolExists, /// Desired amount can't be zero. @@ -346,26 +323,18 @@ pub mod pallet { ZeroLiquidity, /// Amount can't be zero. ZeroAmount, - /// Insufficient liquidity in the pool. - InsufficientLiquidity, /// Calculated amount out is less than provided minimum amount. ProvidedMinimumNotSufficientForSwap, /// Provided maximum amount is not sufficient for swap. ProvidedMaximumNotSufficientForSwap, - /// Only pools with native on one side are valid. - PoolMustContainNativeCurrency, /// The provided path must consists of 2 assets at least. InvalidPath, - /// It was not possible to calculate path data. - PathError, /// The provided path must consists of unique assets. NonUniquePath, /// It was not possible to get or increment the Id of the pool. IncorrectPoolAssetId, - /// Unable to find an element in an array/vec that should have one-to-one correspondence - /// with another. For example, an array of assets constituting a `path` should have a - /// corresponding array of `amounts` along the path. - CorrespondenceError, + /// The destination account cannot exist with the swapped funds. + BelowMinimum, } #[pallet::hooks] @@ -389,48 +358,32 @@ pub mod pallet { #[pallet::weight(T::WeightInfo::create_pool())] pub fn create_pool( origin: OriginFor, - asset1: T::MultiAssetId, - asset2: T::MultiAssetId, + asset1: Box, + asset2: Box, ) -> DispatchResult { let sender = ensure_signed(origin)?; - ensure!(asset1 != asset2, Error::::EqualAssets); + ensure!(asset1 != asset2, Error::::InvalidAssetPair); // prepare pool_id - let pool_id = Self::get_pool_id(asset1, asset2); + let pool_id = T::PoolLocator::pool_id(&asset1, &asset2) + .map_err(|_| Error::::InvalidAssetPair)?; ensure!(!Pools::::contains_key(&pool_id), Error::::PoolExists); - let (asset1, asset2) = &pool_id; - if !T::AllowMultiAssetPools::get() && !T::MultiAssetIdConverter::is_native(asset1) { - Err(Error::::PoolMustContainNativeCurrency)?; - } - let pool_account = Self::get_pool_account(&pool_id); - frame_system::Pallet::::inc_providers(&pool_account); + let pool_account = + T::PoolLocator::address(&pool_id).map_err(|_| Error::::InvalidAssetPair)?; // pay the setup fee - T::Currency::transfer( - &sender, - &T::PoolSetupFeeReceiver::get(), - T::PoolSetupFee::get(), - Preserve, - )?; + let fee = + Self::withdraw(T::PoolSetupFeeAsset::get(), &sender, T::PoolSetupFee::get(), true)?; + T::PoolSetupFeeTarget::on_unbalanced(fee); - // try to convert both assets - match T::MultiAssetIdConverter::try_convert(asset1) { - MultiAssetIdConversionResult::Converted(asset) => - if !T::Assets::contains(&asset, &pool_account) { - T::Assets::touch(asset, pool_account.clone(), sender.clone())? - }, - MultiAssetIdConversionResult::Unsupported(_) => Err(Error::::UnsupportedAsset)?, - MultiAssetIdConversionResult::Native => (), - } - match T::MultiAssetIdConverter::try_convert(asset2) { - MultiAssetIdConversionResult::Converted(asset) => - if !T::Assets::contains(&asset, &pool_account) { - T::Assets::touch(asset, pool_account.clone(), sender.clone())? - }, - MultiAssetIdConversionResult::Unsupported(_) => Err(Error::::UnsupportedAsset)?, - MultiAssetIdConversionResult::Native => (), - } + if T::Assets::should_touch(*asset1.clone(), &pool_account) { + T::Assets::touch(*asset1, &pool_account, &sender)? + }; + + if T::Assets::should_touch(*asset2.clone(), &pool_account) { + T::Assets::touch(*asset2, &pool_account, &sender)? + }; let lp_token = NextPoolAssetId::::get() .or(T::PoolAssetId::initial_value()) @@ -439,7 +392,7 @@ pub mod pallet { NextPoolAssetId::::set(Some(next_lp_token_id)); T::PoolAssets::create(lp_token.clone(), pool_account.clone(), false, 1u32.into())?; - T::PoolAssets::touch(lp_token.clone(), pool_account.clone(), sender.clone())?; + T::PoolAssets::touch(lp_token.clone(), &pool_account, &sender)?; let pool_info = PoolInfo { lp_token: lp_token.clone() }; Pools::::insert(pool_id.clone(), pool_info); @@ -467,39 +420,33 @@ pub mod pallet { #[pallet::weight(T::WeightInfo::add_liquidity())] pub fn add_liquidity( origin: OriginFor, - asset1: T::MultiAssetId, - asset2: T::MultiAssetId, - amount1_desired: T::AssetBalance, - amount2_desired: T::AssetBalance, - amount1_min: T::AssetBalance, - amount2_min: T::AssetBalance, + asset1: Box, + asset2: Box, + amount1_desired: T::Balance, + amount2_desired: T::Balance, + amount1_min: T::Balance, + amount2_min: T::Balance, mint_to: T::AccountId, ) -> DispatchResult { let sender = ensure_signed(origin)?; - let pool_id = Self::get_pool_id(asset1.clone(), asset2.clone()); - // swap params if needed - let (amount1_desired, amount2_desired, amount1_min, amount2_min) = - if pool_id.0 == asset1 { - (amount1_desired, amount2_desired, amount1_min, amount2_min) - } else { - (amount2_desired, amount1_desired, amount2_min, amount1_min) - }; + let pool_id = T::PoolLocator::pool_id(&asset1, &asset2) + .map_err(|_| Error::::InvalidAssetPair)?; + ensure!( amount1_desired > Zero::zero() && amount2_desired > Zero::zero(), Error::::WrongDesiredAmount ); - let maybe_pool = Pools::::get(&pool_id); - let pool = maybe_pool.as_ref().ok_or(Error::::PoolNotFound)?; - let pool_account = Self::get_pool_account(&pool_id); + let pool = Pools::::get(&pool_id).ok_or(Error::::PoolNotFound)?; + let pool_account = + T::PoolLocator::address(&pool_id).map_err(|_| Error::::InvalidAssetPair)?; - let (asset1, asset2) = &pool_id; - let reserve1 = Self::get_balance(&pool_account, asset1)?; - let reserve2 = Self::get_balance(&pool_account, asset2)?; + let reserve1 = Self::get_balance(&pool_account, *asset1.clone()); + let reserve2 = Self::get_balance(&pool_account, *asset2.clone()); - let amount1: T::AssetBalance; - let amount2: T::AssetBalance; + let amount1: T::Balance; + let amount2: T::Balance; if reserve1.is_zero() || reserve2.is_zero() { amount1 = amount1_desired; amount2 = amount2_desired; @@ -528,17 +475,21 @@ pub mod pallet { } } - Self::validate_minimal_amount(amount1.saturating_add(reserve1), asset1) - .map_err(|_| Error::::AmountOneLessThanMinimal)?; - Self::validate_minimal_amount(amount2.saturating_add(reserve2), asset2) - .map_err(|_| Error::::AmountTwoLessThanMinimal)?; + ensure!( + amount1.saturating_add(reserve1) >= T::Assets::minimum_balance(*asset1.clone()), + Error::::AmountOneLessThanMinimal + ); + ensure!( + amount2.saturating_add(reserve2) >= T::Assets::minimum_balance(*asset2.clone()), + Error::::AmountTwoLessThanMinimal + ); - Self::transfer(asset1, &sender, &pool_account, amount1, true)?; - Self::transfer(asset2, &sender, &pool_account, amount2, true)?; + T::Assets::transfer(*asset1, &sender, &pool_account, amount1, Preserve)?; + T::Assets::transfer(*asset2, &sender, &pool_account, amount2, Preserve)?; let total_supply = T::PoolAssets::total_issuance(pool.lp_token.clone()); - let lp_token_amount: T::AssetBalance; + let lp_token_amount: T::Balance; if total_supply.is_zero() { lp_token_amount = Self::calc_lp_amount_for_zero_supply(&amount1, &amount2)?; T::PoolAssets::mint_into( @@ -565,7 +516,7 @@ pub mod pallet { pool_id, amount1_provided: amount1, amount2_provided: amount2, - lp_token: pool.lp_token.clone(), + lp_token: pool.lp_token, lp_token_minted: lp_token_amount, }); @@ -579,32 +530,26 @@ pub mod pallet { #[pallet::weight(T::WeightInfo::remove_liquidity())] pub fn remove_liquidity( origin: OriginFor, - asset1: T::MultiAssetId, - asset2: T::MultiAssetId, - lp_token_burn: T::AssetBalance, - amount1_min_receive: T::AssetBalance, - amount2_min_receive: T::AssetBalance, + asset1: Box, + asset2: Box, + lp_token_burn: T::Balance, + amount1_min_receive: T::Balance, + amount2_min_receive: T::Balance, withdraw_to: T::AccountId, ) -> DispatchResult { let sender = ensure_signed(origin)?; - let pool_id = Self::get_pool_id(asset1.clone(), asset2.clone()); - // swap params if needed - let (amount1_min_receive, amount2_min_receive) = if pool_id.0 == asset1 { - (amount1_min_receive, amount2_min_receive) - } else { - (amount2_min_receive, amount1_min_receive) - }; - let (asset1, asset2) = pool_id.clone(); + let pool_id = T::PoolLocator::pool_id(&asset1, &asset2) + .map_err(|_| Error::::InvalidAssetPair)?; ensure!(lp_token_burn > Zero::zero(), Error::::ZeroLiquidity); - let maybe_pool = Pools::::get(&pool_id); - let pool = maybe_pool.as_ref().ok_or(Error::::PoolNotFound)?; + let pool = Pools::::get(&pool_id).ok_or(Error::::PoolNotFound)?; - let pool_account = Self::get_pool_account(&pool_id); - let reserve1 = Self::get_balance(&pool_account, &asset1)?; - let reserve2 = Self::get_balance(&pool_account, &asset2)?; + let pool_account = + T::PoolLocator::address(&pool_id).map_err(|_| Error::::InvalidAssetPair)?; + let reserve1 = Self::get_balance(&pool_account, *asset1.clone()); + let reserve2 = Self::get_balance(&pool_account, *asset2.clone()); let total_supply = T::PoolAssets::total_issuance(pool.lp_token.clone()); let withdrawal_fee_amount = T::LiquidityWithdrawalFee::get() * lp_token_burn; @@ -623,16 +568,20 @@ pub mod pallet { ); let reserve1_left = reserve1.saturating_sub(amount1); let reserve2_left = reserve2.saturating_sub(amount2); - Self::validate_minimal_amount(reserve1_left, &asset1) - .map_err(|_| Error::::ReserveLeftLessThanMinimal)?; - Self::validate_minimal_amount(reserve2_left, &asset2) - .map_err(|_| Error::::ReserveLeftLessThanMinimal)?; + ensure!( + reserve1_left >= T::Assets::minimum_balance(*asset1.clone()), + Error::::ReserveLeftLessThanMinimal + ); + ensure!( + reserve2_left >= T::Assets::minimum_balance(*asset2.clone()), + Error::::ReserveLeftLessThanMinimal + ); // burn the provided lp token amount that includes the fee T::PoolAssets::burn_from(pool.lp_token.clone(), &sender, lp_token_burn, Exact, Polite)?; - Self::transfer(&asset1, &pool_account, &withdraw_to, amount1, false)?; - Self::transfer(&asset2, &pool_account, &withdraw_to, amount2, false)?; + T::Assets::transfer(*asset1, &pool_account, &withdraw_to, amount1, Expendable)?; + T::Assets::transfer(*asset2, &pool_account, &withdraw_to, amount2, Expendable)?; Self::deposit_event(Event::LiquidityRemoved { who: sender, @@ -640,7 +589,7 @@ pub mod pallet { pool_id, amount1, amount2, - lp_token: pool.lp_token.clone(), + lp_token: pool.lp_token, lp_token_burned: lp_token_burn, withdrawal_fee: T::LiquidityWithdrawalFee::get(), }); @@ -655,19 +604,19 @@ pub mod pallet { /// [`AssetConversionApi::quote_price_exact_tokens_for_tokens`] runtime call can be called /// for a quote. #[pallet::call_index(3)] - #[pallet::weight(T::WeightInfo::swap_exact_tokens_for_tokens())] + #[pallet::weight(T::WeightInfo::swap_exact_tokens_for_tokens(path.len() as u32))] pub fn swap_exact_tokens_for_tokens( origin: OriginFor, - path: BoundedVec, - amount_in: T::AssetBalance, - amount_out_min: T::AssetBalance, + path: Vec>, + amount_in: T::Balance, + amount_out_min: T::Balance, send_to: T::AccountId, keep_alive: bool, ) -> DispatchResult { let sender = ensure_signed(origin)?; Self::do_swap_exact_tokens_for_tokens( sender, - path, + path.into_iter().map(|a| *a).collect(), amount_in, Some(amount_out_min), send_to, @@ -683,19 +632,19 @@ pub mod pallet { /// [`AssetConversionApi::quote_price_tokens_for_exact_tokens`] runtime call can be called /// for a quote. #[pallet::call_index(4)] - #[pallet::weight(T::WeightInfo::swap_tokens_for_exact_tokens())] + #[pallet::weight(T::WeightInfo::swap_tokens_for_exact_tokens(path.len() as u32))] pub fn swap_tokens_for_exact_tokens( origin: OriginFor, - path: BoundedVec, - amount_out: T::AssetBalance, - amount_in_max: T::AssetBalance, + path: Vec>, + amount_out: T::Balance, + amount_in_max: T::Balance, send_to: T::AccountId, keep_alive: bool, ) -> DispatchResult { let sender = ensure_signed(origin)?; Self::do_swap_tokens_for_exact_tokens( sender, - path, + path.into_iter().map(|a| *a).collect(), amount_out, Some(amount_in_max), send_to, @@ -714,25 +663,27 @@ pub mod pallet { /// respecting `keep_alive`. /// /// If successful, returns the amount of `path[1]` acquired for the `amount_in`. - pub fn do_swap_exact_tokens_for_tokens( + /// + /// WARNING: This may return an error after a partial storage mutation. It should be used + /// only inside a transactional storage context and an Err result must imply a storage + /// rollback. + pub(crate) fn do_swap_exact_tokens_for_tokens( sender: T::AccountId, - path: BoundedVec, - amount_in: T::AssetBalance, - amount_out_min: Option, + path: Vec, + amount_in: T::Balance, + amount_out_min: Option, send_to: T::AccountId, keep_alive: bool, - ) -> Result { + ) -> Result { ensure!(amount_in > Zero::zero(), Error::::ZeroAmount); if let Some(amount_out_min) = amount_out_min { ensure!(amount_out_min > Zero::zero(), Error::::ZeroAmount); } Self::validate_swap_path(&path)?; + let path = Self::balance_path_from_amount_in(amount_in, path)?; - let amounts = Self::get_amounts_out(&amount_in, &path)?; - let amount_out = - *amounts.last().defensive_ok_or("get_amounts_out() returned an empty result")?; - + let amount_out = path.last().map(|(_, a)| *a).ok_or(Error::::InvalidPath)?; if let Some(amount_out_min) = amount_out_min { ensure!( amount_out >= amount_out_min, @@ -740,7 +691,15 @@ pub mod pallet { ); } - Self::do_swap(sender, &amounts, path, send_to, keep_alive)?; + Self::swap(&sender, &path, &send_to, keep_alive)?; + + Self::deposit_event(Event::SwapExecuted { + who: sender, + send_to, + amount_in, + amount_out, + path, + }); Ok(amount_out) } @@ -752,25 +711,27 @@ pub mod pallet { /// respecting `keep_alive`. /// /// If successful returns the amount of the `path[0]` taken to provide `path[1]`. - pub fn do_swap_tokens_for_exact_tokens( + /// + /// WARNING: This may return an error after a partial storage mutation. It should be used + /// only inside a transactional storage context and an Err result must imply a storage + /// rollback. + pub(crate) fn do_swap_tokens_for_exact_tokens( sender: T::AccountId, - path: BoundedVec, - amount_out: T::AssetBalance, - amount_in_max: Option, + path: Vec, + amount_out: T::Balance, + amount_in_max: Option, send_to: T::AccountId, keep_alive: bool, - ) -> Result { + ) -> Result { ensure!(amount_out > Zero::zero(), Error::::ZeroAmount); if let Some(amount_in_max) = amount_in_max { ensure!(amount_in_max > Zero::zero(), Error::::ZeroAmount); } Self::validate_swap_path(&path)?; + let path = Self::balance_path_from_amount_out(amount_out, path)?; - let amounts = Self::get_amounts_in(&amount_out, &path)?; - let amount_in = - *amounts.first().defensive_ok_or("get_amounts_in() returned an empty result")?; - + let amount_in = path.first().map(|(_, a)| *a).ok_or(Error::::InvalidPath)?; if let Some(amount_in_max) = amount_in_max { ensure!( amount_in <= amount_in_max, @@ -778,198 +739,236 @@ pub mod pallet { ); } - Self::do_swap(sender, &amounts, path, send_to, keep_alive)?; + Self::swap(&sender, &path, &send_to, keep_alive)?; + + Self::deposit_event(Event::SwapExecuted { + who: sender, + send_to, + amount_in, + amount_out, + path, + }); + Ok(amount_in) } - /// Transfer an `amount` of `asset_id`, respecting the `keep_alive` requirements. - fn transfer( - asset_id: &T::MultiAssetId, - from: &T::AccountId, - to: &T::AccountId, - amount: T::AssetBalance, - keep_alive: bool, - ) -> Result { - let result = match T::MultiAssetIdConverter::try_convert(asset_id) { - MultiAssetIdConversionResult::Converted(asset_id) => - T::Assets::transfer(asset_id, from, to, amount, Expendable), - MultiAssetIdConversionResult::Native => { - let preservation = match keep_alive { - true => Preserve, - false => Expendable, - }; - let amount = Self::convert_asset_balance_to_native_balance(amount)?; - Ok(Self::convert_native_balance_to_asset_balance(T::Currency::transfer( - from, - to, - amount, - preservation, - )?)?) - }, - MultiAssetIdConversionResult::Unsupported(_) => - Err(Error::::UnsupportedAsset.into()), + /// Swap exactly `credit_in` of asset `path[0]` for asset `path[last]`. If `amount_out_min` + /// is provided and the swap can't achieve at least this amount, an error is returned. + /// + /// On a successful swap, the function returns the `credit_out` of `path[last]` obtained + /// from the `credit_in`. On failure, it returns an `Err` containing the original + /// `credit_in` and the associated error code. + /// + /// WARNING: This may return an error after a partial storage mutation. It should be used + /// only inside a transactional storage context and an Err result must imply a storage + /// rollback. + pub(crate) fn do_swap_exact_credit_tokens_for_tokens( + path: Vec, + credit_in: CreditOf, + amount_out_min: Option, + ) -> Result, (CreditOf, DispatchError)> { + let amount_in = credit_in.peek(); + let inspect_path = |credit_asset| { + ensure!( + path.first().map_or(false, |a| *a == credit_asset), + Error::::InvalidPath + ); + ensure!(!amount_in.is_zero(), Error::::ZeroAmount); + ensure!(amount_out_min.map_or(true, |a| !a.is_zero()), Error::::ZeroAmount); + + Self::validate_swap_path(&path)?; + let path = Self::balance_path_from_amount_in(amount_in, path)?; + + let amount_out = path.last().map(|(_, a)| *a).ok_or(Error::::InvalidPath)?; + ensure!( + amount_out_min.map_or(true, |a| amount_out >= a), + Error::::ProvidedMinimumNotSufficientForSwap + ); + Ok((path, amount_out)) + }; + let (path, amount_out) = match inspect_path(credit_in.asset()) { + Ok((p, a)) => (p, a), + Err(e) => return Err((credit_in, e)), }; - if result.is_ok() { - Self::deposit_event(Event::Transfer { - from: from.clone(), - to: to.clone(), - asset: (*asset_id).clone(), - amount, - }); - } - result - } + let credit_out = Self::credit_swap(credit_in, &path)?; - /// Convert a `Balance` type to an `AssetBalance`. - pub(crate) fn convert_native_balance_to_asset_balance( - amount: T::Balance, - ) -> Result> { - T::HigherPrecisionBalance::from(amount) - .try_into() - .map_err(|_| Error::::Overflow) - } + Self::deposit_event(Event::SwapCreditExecuted { amount_in, amount_out, path }); - /// Convert an `AssetBalance` type to a `Balance`. - pub(crate) fn convert_asset_balance_to_native_balance( - amount: T::AssetBalance, - ) -> Result> { - T::HigherPrecisionBalance::from(amount) - .try_into() - .map_err(|_| Error::::Overflow) + Ok(credit_out) } - /// Convert a `HigherPrecisionBalance` type to an `AssetBalance`. - pub(crate) fn convert_hpb_to_asset_balance( - amount: T::HigherPrecisionBalance, - ) -> Result> { - amount.try_into().map_err(|_| Error::::Overflow) + /// Swaps a portion of `credit_in` of `path[0]` asset to obtain the desired `amount_out` of + /// the `path[last]` asset. The provided `credit_in` must be adequate to achieve the target + /// `amount_out`, or an error will occur. + /// + /// On success, the function returns a (`credit_out`, `credit_change`) tuple, where + /// `credit_out` represents the acquired amount of the `path[last]` asset, and + /// `credit_change` is the remaining portion from the `credit_in`. On failure, an `Err` with + /// the initial `credit_in` and error code is returned. + /// + /// WARNING: This may return an error after a partial storage mutation. It should be used + /// only inside a transactional storage context and an Err result must imply a storage + /// rollback. + pub(crate) fn do_swap_credit_tokens_for_exact_tokens( + path: Vec, + credit_in: CreditOf, + amount_out: T::Balance, + ) -> Result<(CreditOf, CreditOf), (CreditOf, DispatchError)> { + let amount_in_max = credit_in.peek(); + let inspect_path = |credit_asset| { + ensure!( + path.first().map_or(false, |a| a == &credit_asset), + Error::::InvalidPath + ); + ensure!(amount_in_max > Zero::zero(), Error::::ZeroAmount); + ensure!(amount_out > Zero::zero(), Error::::ZeroAmount); + + Self::validate_swap_path(&path)?; + let path = Self::balance_path_from_amount_out(amount_out, path)?; + + let amount_in = path.first().map(|(_, a)| *a).ok_or(Error::::InvalidPath)?; + ensure!( + amount_in <= amount_in_max, + Error::::ProvidedMaximumNotSufficientForSwap + ); + + Ok((path, amount_in)) + }; + let (path, amount_in) = match inspect_path(credit_in.asset()) { + Ok((p, a)) => (p, a), + Err(e) => return Err((credit_in, e)), + }; + + let (credit_in, credit_change) = credit_in.split(amount_in); + let credit_out = Self::credit_swap(credit_in, &path)?; + + Self::deposit_event(Event::SwapCreditExecuted { amount_in, amount_out, path }); + + Ok((credit_out, credit_change)) } - /// Swap assets along a `path`, depositing in `send_to`. - pub(crate) fn do_swap( - sender: T::AccountId, - amounts: &Vec, - path: BoundedVec, - send_to: T::AccountId, + /// Swap assets along the `path`, withdrawing from `sender` and depositing in `send_to`. + /// + /// Note: It's assumed that the provided `path` is valid. + /// + /// WARNING: This may return an error after a partial storage mutation. It should be used + /// only inside a transactional storage context and an Err result must imply a storage + /// rollback. + fn swap( + sender: &T::AccountId, + path: &BalancePath, + send_to: &T::AccountId, keep_alive: bool, ) -> Result<(), DispatchError> { - ensure!(amounts.len() > 1, Error::::CorrespondenceError); - if let Some([asset1, asset2]) = &path.get(0..2) { - let pool_id = Self::get_pool_id(asset1.clone(), asset2.clone()); - let pool_account = Self::get_pool_account(&pool_id); - // amounts should always contain a corresponding element to path. - let first_amount = amounts.first().ok_or(Error::::CorrespondenceError)?; - - Self::transfer(asset1, &sender, &pool_account, *first_amount, keep_alive)?; - - let mut i = 0; - let path_len = path.len() as u32; - for assets_pair in path.windows(2) { - if let [asset1, asset2] = assets_pair { - let pool_id = Self::get_pool_id(asset1.clone(), asset2.clone()); - let pool_account = Self::get_pool_account(&pool_id); - - let amount_out = - amounts.get((i + 1) as usize).ok_or(Error::::CorrespondenceError)?; - - let to = if i < path_len - 2 { - let asset3 = path.get((i + 2) as usize).ok_or(Error::::PathError)?; - Self::get_pool_account(&Self::get_pool_id( + let (asset_in, amount_in) = path.first().ok_or(Error::::InvalidPath)?; + let credit_in = Self::withdraw(asset_in.clone(), sender, *amount_in, keep_alive)?; + + let credit_out = Self::credit_swap(credit_in, path).map_err(|(_, e)| e)?; + T::Assets::resolve(send_to, credit_out).map_err(|_| Error::::BelowMinimum)?; + + Ok(()) + } + + /// Swap assets along the specified `path`, consuming `credit_in` and producing + /// `credit_out`. + /// + /// If an error occurs, `credit_in` is returned back. + /// + /// Note: It's assumed that the provided `path` is valid and `credit_in` corresponds to the + /// first asset in the `path`. + /// + /// WARNING: This may return an error after a partial storage mutation. It should be used + /// only inside a transactional storage context and an Err result must imply a storage + /// rollback. + fn credit_swap( + credit_in: CreditOf, + path: &BalancePath, + ) -> Result, (CreditOf, DispatchError)> { + let resolve_path = || -> Result, DispatchError> { + for pos in 0..=path.len() { + if let Some([(asset1, _), (asset2, amount_out)]) = path.get(pos..=pos + 1) { + let pool_from = T::PoolLocator::pool_address(asset1, asset2) + .map_err(|_| Error::::InvalidAssetPair)?; + + if let Some((asset3, _)) = path.get(pos + 2) { + let pool_to = T::PoolLocator::pool_address(asset2, asset3) + .map_err(|_| Error::::InvalidAssetPair)?; + + T::Assets::transfer( asset2.clone(), - asset3.clone(), - )) + &pool_from, + &pool_to, + *amount_out, + Preserve, + )?; } else { - send_to.clone() - }; + let credit_out = + Self::withdraw(asset2.clone(), &pool_from, *amount_out, true)?; + return Ok(credit_out) + } + } + } + Err(Error::::InvalidPath.into()) + }; - let reserve = Self::get_balance(&pool_account, asset2)?; - let reserve_left = reserve.saturating_sub(*amount_out); - Self::validate_minimal_amount(reserve_left, asset2) - .map_err(|_| Error::::ReserveLeftLessThanMinimal)?; + let credit_out = match resolve_path() { + Ok(c) => c, + Err(e) => return Err((credit_in, e)), + }; - Self::transfer(asset2, &pool_account, &to, *amount_out, true)?; - } - i.saturating_inc(); + let pool_to = if let Some([(asset1, _), (asset2, _)]) = path.get(0..2) { + match T::PoolLocator::pool_address(asset1, asset2) { + Ok(address) => address, + Err(_) => return Err((credit_in, Error::::InvalidAssetPair.into())), } - Self::deposit_event(Event::SwapExecuted { - who: sender, - send_to, - path, - amount_in: *first_amount, - amount_out: *amounts.last().expect("Always has more than 1 element"), - }); } else { - return Err(Error::::InvalidPath.into()) - } - Ok(()) - } + return Err((credit_in, Error::::InvalidPath.into())) + }; - /// The account ID of the pool. - /// - /// This actually does computation. If you need to keep using it, then make sure you cache - /// the value and only call this once. - pub fn get_pool_account(pool_id: &PoolIdOf) -> T::AccountId { - let encoded_pool_id = sp_io::hashing::blake2_256(&Encode::encode(pool_id)[..]); + T::Assets::resolve(&pool_to, credit_in) + .map_err(|c| (c, Error::::BelowMinimum.into()))?; - Decode::decode(&mut TrailingZeroInput::new(encoded_pool_id.as_ref())) - .expect("infinite length input; no invalid inputs for type") + Ok(credit_out) } - /// Get the `owner`'s balance of `asset`, which could be the chain's native asset or another - /// fungible. Returns a value in the form of an `AssetBalance`. - fn get_balance( - owner: &T::AccountId, - asset: &T::MultiAssetId, - ) -> Result> { - match T::MultiAssetIdConverter::try_convert(asset) { - MultiAssetIdConversionResult::Converted(asset_id) => Ok( - <::Assets>::reducible_balance(asset_id, owner, Expendable, Polite), - ), - MultiAssetIdConversionResult::Native => - Self::convert_native_balance_to_asset_balance( - <::Currency>::reducible_balance(owner, Expendable, Polite), - ), - MultiAssetIdConversionResult::Unsupported(_) => - Err(Error::::UnsupportedAsset.into()), + /// Removes `value` balance of `asset` from `who` account if possible. + fn withdraw( + asset: T::AssetKind, + who: &T::AccountId, + value: T::Balance, + keep_alive: bool, + ) -> Result, DispatchError> { + let preservation = match keep_alive { + true => Preserve, + false => Expendable, + }; + if preservation == Preserve { + // TODO drop the ensure! when this issue addressed + // https://github.com/paritytech/polkadot-sdk/issues/1698 + let free = T::Assets::reducible_balance(asset.clone(), who, preservation, Polite); + ensure!(free >= value, TokenError::NotExpendable); } + T::Assets::withdraw(asset, who, value, Exact, preservation, Polite) } - /// Returns a pool id constructed from 2 assets. - /// 1. Native asset should be lower than the other asset ids. - /// 2. Two native or two non-native assets are compared by their `Ord` implementation. - /// - /// We expect deterministic order, so (asset1, asset2) or (asset2, asset1) returns the same - /// result. - pub fn get_pool_id(asset1: T::MultiAssetId, asset2: T::MultiAssetId) -> PoolIdOf { - match ( - T::MultiAssetIdConverter::is_native(&asset1), - T::MultiAssetIdConverter::is_native(&asset2), - ) { - (true, false) => return (asset1, asset2), - (false, true) => return (asset2, asset1), - _ => { - // else we want to be deterministic based on `Ord` implementation - if asset1 <= asset2 { - (asset1, asset2) - } else { - (asset2, asset1) - } - }, - } + /// Get the `owner`'s balance of `asset`, which could be the chain's native asset or another + /// fungible. Returns a value in the form of an `Balance`. + fn get_balance(owner: &T::AccountId, asset: T::AssetKind) -> T::Balance { + T::Assets::reducible_balance(asset, owner, Expendable, Polite) } /// Returns the balance of each asset in the pool. /// The tuple result is in the order requested (not necessarily the same as pool order). pub fn get_reserves( - asset1: &T::MultiAssetId, - asset2: &T::MultiAssetId, - ) -> Result<(T::AssetBalance, T::AssetBalance), Error> { - let pool_id = Self::get_pool_id(asset1.clone(), asset2.clone()); - let pool_account = Self::get_pool_account(&pool_id); + asset1: T::AssetKind, + asset2: T::AssetKind, + ) -> Result<(T::Balance, T::Balance), Error> { + let pool_account = T::PoolLocator::pool_address(&asset1, &asset2) + .map_err(|_| Error::::InvalidAssetPair)?; - let balance1 = Self::get_balance(&pool_account, asset1)?; - let balance2 = Self::get_balance(&pool_account, asset2)?; + let balance1 = Self::get_balance(&pool_account, asset1); + let balance2 = Self::get_balance(&pool_account, asset2); if balance1.is_zero() || balance2.is_zero() { Err(Error::::PoolNotFound)?; @@ -979,56 +978,66 @@ pub mod pallet { } /// Leading to an amount at the end of a `path`, get the required amounts in. - pub(crate) fn get_amounts_in( - amount_out: &T::AssetBalance, - path: &BoundedVec, - ) -> Result, DispatchError> { - let mut amounts: Vec = vec![*amount_out]; - - for assets_pair in path.windows(2).rev() { - if let [asset1, asset2] = assets_pair { - let (reserve_in, reserve_out) = Self::get_reserves(asset1, asset2)?; - let prev_amount = amounts.last().expect("Always has at least one element"); - let amount_in = Self::get_amount_in(prev_amount, &reserve_in, &reserve_out)?; - amounts.push(amount_in); - } + pub(crate) fn balance_path_from_amount_out( + amount_out: T::Balance, + path: Vec, + ) -> Result, DispatchError> { + let mut balance_path: BalancePath = Vec::with_capacity(path.len()); + let mut amount_in: T::Balance = amount_out; + + let mut iter = path.into_iter().rev().peekable(); + while let Some(asset2) = iter.next() { + let asset1 = match iter.peek() { + Some(a) => a, + None => { + balance_path.push((asset2, amount_in)); + break + }, + }; + let (reserve_in, reserve_out) = Self::get_reserves(asset1.clone(), asset2.clone())?; + balance_path.push((asset2, amount_in)); + amount_in = Self::get_amount_in(&amount_in, &reserve_in, &reserve_out)?; } + balance_path.reverse(); - amounts.reverse(); - Ok(amounts) + Ok(balance_path) } /// Following an amount into a `path`, get the corresponding amounts out. - pub(crate) fn get_amounts_out( - amount_in: &T::AssetBalance, - path: &BoundedVec, - ) -> Result, DispatchError> { - let mut amounts: Vec = vec![*amount_in]; - - for assets_pair in path.windows(2) { - if let [asset1, asset2] = assets_pair { - let (reserve_in, reserve_out) = Self::get_reserves(asset1, asset2)?; - let prev_amount = amounts.last().expect("Always has at least one element"); - let amount_out = Self::get_amount_out(prev_amount, &reserve_in, &reserve_out)?; - amounts.push(amount_out); - } + pub(crate) fn balance_path_from_amount_in( + amount_in: T::Balance, + path: Vec, + ) -> Result, DispatchError> { + let mut balance_path: BalancePath = Vec::with_capacity(path.len()); + let mut amount_out: T::Balance = amount_in; + + let mut iter = path.into_iter().peekable(); + while let Some(asset1) = iter.next() { + let asset2 = match iter.peek() { + Some(a) => a, + None => { + balance_path.push((asset1, amount_out)); + break + }, + }; + let (reserve_in, reserve_out) = Self::get_reserves(asset1.clone(), asset2.clone())?; + balance_path.push((asset1, amount_out)); + amount_out = Self::get_amount_out(&amount_out, &reserve_in, &reserve_out)?; } - - Ok(amounts) + Ok(balance_path) } /// Used by the RPC service to provide current prices. pub fn quote_price_exact_tokens_for_tokens( - asset1: T::MultiAssetId, - asset2: T::MultiAssetId, - amount: T::AssetBalance, + asset1: T::AssetKind, + asset2: T::AssetKind, + amount: T::Balance, include_fee: bool, - ) -> Option { - let pool_id = Self::get_pool_id(asset1.clone(), asset2.clone()); - let pool_account = Self::get_pool_account(&pool_id); + ) -> Option { + let pool_account = T::PoolLocator::pool_address(&asset1, &asset2).ok()?; - let balance1 = Self::get_balance(&pool_account, &asset1).ok()?; - let balance2 = Self::get_balance(&pool_account, &asset2).ok()?; + let balance1 = Self::get_balance(&pool_account, asset1); + let balance2 = Self::get_balance(&pool_account, asset2); if !balance1.is_zero() { if include_fee { Self::get_amount_out(&amount, &balance1, &balance2).ok() @@ -1042,16 +1051,15 @@ pub mod pallet { /// Used by the RPC service to provide current prices. pub fn quote_price_tokens_for_exact_tokens( - asset1: T::MultiAssetId, - asset2: T::MultiAssetId, - amount: T::AssetBalance, + asset1: T::AssetKind, + asset2: T::AssetKind, + amount: T::Balance, include_fee: bool, - ) -> Option { - let pool_id = Self::get_pool_id(asset1.clone(), asset2.clone()); - let pool_account = Self::get_pool_account(&pool_id); + ) -> Option { + let pool_account = T::PoolLocator::pool_address(&asset1, &asset2).ok()?; - let balance1 = Self::get_balance(&pool_account, &asset1).ok()?; - let balance2 = Self::get_balance(&pool_account, &asset2).ok()?; + let balance1 = Self::get_balance(&pool_account, asset1); + let balance2 = Self::get_balance(&pool_account, asset2); if !balance1.is_zero() { if include_fee { Self::get_amount_in(&amount, &balance1, &balance2).ok() @@ -1065,18 +1073,18 @@ pub mod pallet { /// Calculates the optimal amount from the reserves. pub fn quote( - amount: &T::AssetBalance, - reserve1: &T::AssetBalance, - reserve2: &T::AssetBalance, - ) -> Result> { - // amount * reserve2 / reserve1 + amount: &T::Balance, + reserve1: &T::Balance, + reserve2: &T::Balance, + ) -> Result> { + // (amount * reserve2) / reserve1 Self::mul_div(amount, reserve2, reserve1) } pub(super) fn calc_lp_amount_for_zero_supply( - amount1: &T::AssetBalance, - amount2: &T::AssetBalance, - ) -> Result> { + amount1: &T::Balance, + amount2: &T::Balance, + ) -> Result> { let amount1 = T::HigherPrecisionBalance::from(*amount1); let amount2 = T::HigherPrecisionBalance::from(*amount2); @@ -1090,11 +1098,7 @@ pub mod pallet { result.try_into().map_err(|_| Error::::Overflow) } - fn mul_div( - a: &T::AssetBalance, - b: &T::AssetBalance, - c: &T::AssetBalance, - ) -> Result> { + fn mul_div(a: &T::Balance, b: &T::Balance, c: &T::Balance) -> Result> { let a = T::HigherPrecisionBalance::from(*a); let b = T::HigherPrecisionBalance::from(*b); let c = T::HigherPrecisionBalance::from(*c); @@ -1113,16 +1117,16 @@ pub mod pallet { /// Given an input amount of an asset and pair reserves, returns the maximum output amount /// of the other asset. pub fn get_amount_out( - amount_in: &T::AssetBalance, - reserve_in: &T::AssetBalance, - reserve_out: &T::AssetBalance, - ) -> Result> { + amount_in: &T::Balance, + reserve_in: &T::Balance, + reserve_out: &T::Balance, + ) -> Result> { let amount_in = T::HigherPrecisionBalance::from(*amount_in); let reserve_in = T::HigherPrecisionBalance::from(*reserve_in); let reserve_out = T::HigherPrecisionBalance::from(*reserve_out); if reserve_in.is_zero() || reserve_out.is_zero() { - return Err(Error::::ZeroLiquidity.into()) + return Err(Error::::ZeroLiquidity) } let amount_in_with_fee = amount_in @@ -1148,20 +1152,20 @@ pub mod pallet { /// Given an output amount of an asset and pair reserves, returns a required input amount /// of the other asset. pub fn get_amount_in( - amount_out: &T::AssetBalance, - reserve_in: &T::AssetBalance, - reserve_out: &T::AssetBalance, - ) -> Result> { + amount_out: &T::Balance, + reserve_in: &T::Balance, + reserve_out: &T::Balance, + ) -> Result> { let amount_out = T::HigherPrecisionBalance::from(*amount_out); let reserve_in = T::HigherPrecisionBalance::from(*reserve_in); let reserve_out = T::HigherPrecisionBalance::from(*reserve_out); if reserve_in.is_zero() || reserve_out.is_zero() { - Err(Error::::ZeroLiquidity.into())? + Err(Error::::ZeroLiquidity)? } if amount_out >= reserve_out { - Err(Error::::AmountOutTooHigh.into())? + Err(Error::::AmountOutTooHigh)? } let numerator = reserve_in @@ -1185,42 +1189,19 @@ pub mod pallet { result.try_into().map_err(|_| Error::::Overflow) } - /// Ensure that a `value` meets the minimum balance requirements of an `asset` class. - fn validate_minimal_amount( - value: T::AssetBalance, - asset: &T::MultiAssetId, - ) -> Result<(), ()> { - if T::MultiAssetIdConverter::is_native(asset) { - let ed = T::Currency::minimum_balance(); - ensure!( - T::HigherPrecisionBalance::from(value) >= T::HigherPrecisionBalance::from(ed), - () - ); - } else { - let MultiAssetIdConversionResult::Converted(asset_id) = - T::MultiAssetIdConverter::try_convert(asset) - else { - return Err(()) - }; - let minimal = T::Assets::minimum_balance(asset_id); - ensure!(value >= minimal, ()); - } - Ok(()) - } - /// Ensure that a path is valid. - fn validate_swap_path( - path: &BoundedVec, - ) -> Result<(), DispatchError> { + fn validate_swap_path(path: &Vec) -> Result<(), DispatchError> { ensure!(path.len() >= 2, Error::::InvalidPath); + ensure!(path.len() as u32 <= T::MaxSwapPathLength::get(), Error::::InvalidPath); // validate all the pools in the path are unique - let mut pools = BoundedBTreeSet::, T::MaxSwapPathLength>::new(); + let mut pools = BTreeSet::::new(); for assets_pair in path.windows(2) { if let [asset1, asset2] = assets_pair { - let pool_id = Self::get_pool_id(asset1.clone(), asset2.clone()); - let new_element = - pools.try_insert(pool_id).map_err(|_| Error::::Overflow)?; + let pool_id = T::PoolLocator::pool_id(asset1, asset2) + .map_err(|_| Error::::InvalidAssetPair)?; + + let new_element = pools.insert(pool_id); if !new_element { return Err(Error::::NonUniquePath.into()) } @@ -1239,69 +1220,35 @@ pub mod pallet { } } -impl Swap for Pallet { - fn swap_exact_tokens_for_tokens( - sender: T::AccountId, - path: Vec, - amount_in: T::HigherPrecisionBalance, - amount_out_min: Option, - send_to: T::AccountId, - keep_alive: bool, - ) -> Result { - let path = path.try_into().map_err(|_| Error::::PathError)?; - let amount_out_min = amount_out_min.map(Self::convert_hpb_to_asset_balance).transpose()?; - let amount_out = Self::do_swap_exact_tokens_for_tokens( - sender, - path, - Self::convert_hpb_to_asset_balance(amount_in)?, - amount_out_min, - send_to, - keep_alive, - )?; - Ok(amount_out.into()) - } - - fn swap_tokens_for_exact_tokens( - sender: T::AccountId, - path: Vec, - amount_out: T::HigherPrecisionBalance, - amount_in_max: Option, - send_to: T::AccountId, - keep_alive: bool, - ) -> Result { - let path = path.try_into().map_err(|_| Error::::PathError)?; - let amount_in_max = amount_in_max.map(Self::convert_hpb_to_asset_balance).transpose()?; - let amount_in = Self::do_swap_tokens_for_exact_tokens( - sender, - path, - Self::convert_hpb_to_asset_balance(amount_out)?, - amount_in_max, - send_to, - keep_alive, - )?; - Ok(amount_in.into()) - } -} - sp_api::decl_runtime_apis! { /// This runtime api allows people to query the size of the liquidity pools /// and quote prices for swaps. - pub trait AssetConversionApi where - Balance: Codec + MaybeDisplay, - AssetBalance: frame_support::traits::tokens::Balance, - AssetId: Codec + pub trait AssetConversionApi + where + Balance: frame_support::traits::tokens::Balance + MaybeDisplay, + AssetId: Codec, { /// Provides a quote for [`Pallet::swap_tokens_for_exact_tokens`]. /// /// Note that the price may have changed by the time the transaction is executed. /// (Use `amount_in_max` to control slippage.) - fn quote_price_tokens_for_exact_tokens(asset1: AssetId, asset2: AssetId, amount: AssetBalance, include_fee: bool) -> Option; + fn quote_price_tokens_for_exact_tokens( + asset1: AssetId, + asset2: AssetId, + amount: Balance, + include_fee: bool, + ) -> Option; /// Provides a quote for [`Pallet::swap_exact_tokens_for_tokens`]. /// /// Note that the price may have changed by the time the transaction is executed. /// (Use `amount_out_min` to control slippage.) - fn quote_price_exact_tokens_for_tokens(asset1: AssetId, asset2: AssetId, amount: AssetBalance, include_fee: bool) -> Option; + fn quote_price_exact_tokens_for_tokens( + asset1: AssetId, + asset2: AssetId, + amount: Balance, + include_fee: bool, + ) -> Option; /// Returns the size of the liquidity pool for the given asset pair. fn get_reserves(asset1: AssetId, asset2: AssetId) -> Option<(Balance, Balance)>; diff --git a/substrate/frame/asset-conversion/src/mock.rs b/substrate/frame/asset-conversion/src/mock.rs index 64e755b18f74..bfc23309cb78 100644 --- a/substrate/frame/asset-conversion/src/mock.rs +++ b/substrate/frame/asset-conversion/src/mock.rs @@ -20,12 +20,17 @@ use super::*; use crate as pallet_asset_conversion; - use frame_support::{ construct_runtime, derive_impl, instances::{Instance1, Instance2}, ord_parameter_types, parameter_types, - traits::{AsEnsureOriginWithArg, ConstU128, ConstU32, ConstU64}, + traits::{ + tokens::{ + fungible::{NativeFromLeft, NativeOrWithId, UnionOf}, + imbalance::ResolveAssetTo, + }, + AsEnsureOriginWithArg, ConstU128, ConstU32, ConstU64, + }, PalletId, }; use frame_system::{EnsureSigned, EnsureSignedBy}; @@ -35,6 +40,7 @@ use sp_runtime::{ traits::{AccountIdConversion, BlakeTwo256, IdentityLookup}, BuildStorage, }; +use sp_std::default::Default; type Block = frame_system::mocking::MockBlock; @@ -140,38 +146,37 @@ impl pallet_assets::Config for Test { parameter_types! { pub const AssetConversionPalletId: PalletId = PalletId(*b"py/ascon"); - pub storage AllowMultiAssetPools: bool = true; - pub storage LiquidityWithdrawalFee: Permill = Permill::from_percent(0); // should be non-zero if AllowMultiAssetPools is true, otherwise can be zero + pub const Native: NativeOrWithId = NativeOrWithId::Native; + pub storage LiquidityWithdrawalFee: Permill = Permill::from_percent(0); } ord_parameter_types! { pub const AssetConversionOrigin: u128 = AccountIdConversion::::into_account_truncating(&AssetConversionPalletId::get()); } +pub type NativeAndAssets = UnionOf, u128>; +pub type AscendingLocator = Ascending>; +pub type WithFirstAssetLocator = WithFirstAsset>; + impl Config for Test { type RuntimeEvent = RuntimeEvent; - type Currency = Balances; - type AssetBalance = ::Balance; - type AssetId = u32; + type Balance = ::Balance; + type HigherPrecisionBalance = sp_core::U256; + type AssetKind = NativeOrWithId; + type Assets = NativeAndAssets; + type PoolId = (Self::AssetKind, Self::AssetKind); + type PoolLocator = Chain; type PoolAssetId = u32; - type Assets = Assets; type PoolAssets = PoolAssets; + type PoolSetupFee = ConstU128<100>; // should be more or equal to the existential deposit + type PoolSetupFeeAsset = Native; + type PoolSetupFeeTarget = ResolveAssetTo; type PalletId = AssetConversionPalletId; type WeightInfo = (); type LPFee = ConstU32<3>; // means 0.3% - type PoolSetupFee = ConstU128<100>; // should be more or equal to the existential deposit - type PoolSetupFeeReceiver = AssetConversionOrigin; type LiquidityWithdrawalFee = LiquidityWithdrawalFee; - type AllowMultiAssetPools = AllowMultiAssetPools; type MaxSwapPathLength = ConstU32<4>; type MintMinLiquidity = ConstU128<100>; // 100 is good enough when the main currency has 12 decimals. - - type Balance = u128; - type HigherPrecisionBalance = sp_core::U256; - - type MultiAssetId = NativeOrAssetId; - type MultiAssetIdConverter = NativeOrAssetIdConverter; - #[cfg(feature = "runtime-benchmarks")] type BenchmarkHelper = (); } diff --git a/substrate/frame/asset-conversion/src/swap.rs b/substrate/frame/asset-conversion/src/swap.rs new file mode 100644 index 000000000000..a6154e294147 --- /dev/null +++ b/substrate/frame/asset-conversion/src/swap.rs @@ -0,0 +1,212 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Traits and implementations for swap between the various asset classes. + +use super::*; + +/// Trait for providing methods to swap between the various asset classes. +pub trait Swap { + /// Measure units of the asset classes for swapping. + type Balance: Balance; + /// Kind of assets that are going to be swapped. + type AssetKind; + + /// Returns the upper limit on the length of the swap path. + fn max_path_len() -> u32; + + /// Swap exactly `amount_in` of asset `path[0]` for asset `path[last]`. + /// If an `amount_out_min` is specified, it will return an error if it is unable to acquire + /// the amount desired. + /// + /// Withdraws the `path[0]` asset from `sender`, deposits the `path[last]` asset to `send_to`, + /// respecting `keep_alive`. + /// + /// If successful, returns the amount of `path[last]` acquired for the `amount_in`. + /// + /// This operation is expected to be atomic. + fn swap_exact_tokens_for_tokens( + sender: AccountId, + path: Vec, + amount_in: Self::Balance, + amount_out_min: Option, + send_to: AccountId, + keep_alive: bool, + ) -> Result; + + /// Take the `path[0]` asset and swap some amount for `amount_out` of the `path[last]`. If an + /// `amount_in_max` is specified, it will return an error if acquiring `amount_out` would be + /// too costly. + /// + /// Withdraws `path[0]` asset from `sender`, deposits `path[last]` asset to `send_to`, + /// respecting `keep_alive`. + /// + /// If successful returns the amount of the `path[0]` taken to provide `path[last]`. + /// + /// This operation is expected to be atomic. + fn swap_tokens_for_exact_tokens( + sender: AccountId, + path: Vec, + amount_out: Self::Balance, + amount_in_max: Option, + send_to: AccountId, + keep_alive: bool, + ) -> Result; +} + +/// Trait providing methods to swap between the various asset classes. +pub trait SwapCredit { + /// Measure units of the asset classes for swapping. + type Balance: Balance; + /// Kind of assets that are going to be swapped. + type AssetKind; + /// Credit implying a negative imbalance in the system that can be placed into an account or + /// alter the total supply. + type Credit; + + /// Returns the upper limit on the length of the swap path. + fn max_path_len() -> u32; + + /// Swap exactly `credit_in` of asset `path[0]` for asset `path[last]`. If `amount_out_min` is + /// provided and the swap can't achieve at least this amount, an error is returned. + /// + /// On a successful swap, the function returns the `credit_out` of `path[last]` obtained from + /// the `credit_in`. On failure, it returns an `Err` containing the original `credit_in` and the + /// associated error code. + /// + /// This operation is expected to be atomic. + fn swap_exact_tokens_for_tokens( + path: Vec, + credit_in: Self::Credit, + amount_out_min: Option, + ) -> Result; + + /// Swaps a portion of `credit_in` of `path[0]` asset to obtain the desired `amount_out` of + /// the `path[last]` asset. The provided `credit_in` must be adequate to achieve the target + /// `amount_out`, or an error will occur. + /// + /// On success, the function returns a (`credit_out`, `credit_change`) tuple, where `credit_out` + /// represents the acquired amount of the `path[last]` asset, and `credit_change` is the + /// remaining portion from the `credit_in`. On failure, an `Err` with the initial `credit_in` + /// and error code is returned. + /// + /// This operation is expected to be atomic. + fn swap_tokens_for_exact_tokens( + path: Vec, + credit_in: Self::Credit, + amount_out: Self::Balance, + ) -> Result<(Self::Credit, Self::Credit), (Self::Credit, DispatchError)>; +} + +impl Swap for Pallet { + type Balance = T::Balance; + type AssetKind = T::AssetKind; + + fn max_path_len() -> u32 { + T::MaxSwapPathLength::get() + } + + fn swap_exact_tokens_for_tokens( + sender: T::AccountId, + path: Vec, + amount_in: Self::Balance, + amount_out_min: Option, + send_to: T::AccountId, + keep_alive: bool, + ) -> Result { + let amount_out = with_storage_layer(|| { + Self::do_swap_exact_tokens_for_tokens( + sender, + path, + amount_in, + amount_out_min, + send_to, + keep_alive, + ) + })?; + Ok(amount_out) + } + + fn swap_tokens_for_exact_tokens( + sender: T::AccountId, + path: Vec, + amount_out: Self::Balance, + amount_in_max: Option, + send_to: T::AccountId, + keep_alive: bool, + ) -> Result { + let amount_in = with_storage_layer(|| { + Self::do_swap_tokens_for_exact_tokens( + sender, + path, + amount_out, + amount_in_max, + send_to, + keep_alive, + ) + })?; + Ok(amount_in) + } +} + +impl SwapCredit for Pallet { + type Balance = T::Balance; + type AssetKind = T::AssetKind; + type Credit = CreditOf; + + fn max_path_len() -> u32 { + T::MaxSwapPathLength::get() + } + + fn swap_exact_tokens_for_tokens( + path: Vec, + credit_in: Self::Credit, + amount_out_min: Option, + ) -> Result { + let credit_asset = credit_in.asset(); + with_transaction(|| -> TransactionOutcome> { + let res = Self::do_swap_exact_credit_tokens_for_tokens(path, credit_in, amount_out_min); + match &res { + Ok(_) => TransactionOutcome::Commit(Ok(res)), + // wrapping `res` with `Ok`, since our `Err` doesn't satisfy the + // `From` bound of the `with_transaction` function. + Err(_) => TransactionOutcome::Rollback(Ok(res)), + } + }) + // should never map an error since `with_transaction` above never returns it. + .map_err(|_| (Self::Credit::zero(credit_asset), DispatchError::Corruption))? + } + + fn swap_tokens_for_exact_tokens( + path: Vec, + credit_in: Self::Credit, + amount_out: Self::Balance, + ) -> Result<(Self::Credit, Self::Credit), (Self::Credit, DispatchError)> { + let credit_asset = credit_in.asset(); + with_transaction(|| -> TransactionOutcome> { + let res = Self::do_swap_credit_tokens_for_exact_tokens(path, credit_in, amount_out); + match &res { + Ok(_) => TransactionOutcome::Commit(Ok(res)), + // wrapping `res` with `Ok`, since our `Err` doesn't satisfy the + // `From` bound of the `with_transaction` function. + Err(_) => TransactionOutcome::Rollback(Ok(res)), + } + }) + // should never map an error since `with_transaction` above never returns it. + .map_err(|_| (Self::Credit::zero(credit_asset), DispatchError::Corruption))? + } +} diff --git a/substrate/frame/asset-conversion/src/tests.rs b/substrate/frame/asset-conversion/src/tests.rs index 95d3d5f087a3..66ae57deb397 100644 --- a/substrate/frame/asset-conversion/src/tests.rs +++ b/substrate/frame/asset-conversion/src/tests.rs @@ -18,9 +18,15 @@ use crate::{mock::*, *}; use frame_support::{ - assert_noop, assert_ok, + assert_noop, assert_ok, assert_storage_noop, instances::Instance1, - traits::{fungible::Inspect, fungibles::InspectEnumerable, Get}, + traits::{ + fungible, + fungible::{Inspect as FungibleInspect, NativeOrWithId}, + fungibles, + fungibles::{Inspect, InspectEnumerable}, + Get, + }, }; use sp_arithmetic::Permill; use sp_runtime::{DispatchError, TokenError}; @@ -43,18 +49,14 @@ fn events() -> Vec> { result } -fn pools() -> Vec> { +fn pools() -> Vec<::PoolId> { let mut s: Vec<_> = Pools::::iter().map(|x| x.0).collect(); s.sort(); s } -fn assets() -> Vec> { - // if the storage would be public: - // let mut s: Vec<_> = pallet_assets::pallet::Asset::::iter().map(|x| x.0).collect(); - let mut s: Vec<_> = <::Assets>::asset_ids() - .map(|id| NativeOrAssetId::Asset(id)) - .collect(); +fn assets() -> Vec> { + let mut s: Vec<_> = Assets::asset_ids().map(|id| NativeOrWithId::WithId(id)).collect(); s.sort(); s } @@ -65,36 +67,71 @@ fn pool_assets() -> Vec { s } -fn create_tokens(owner: u128, tokens: Vec>) { +fn create_tokens(owner: u128, tokens: Vec>) { + create_tokens_with_ed(owner, tokens, 1) +} + +fn create_tokens_with_ed(owner: u128, tokens: Vec>, ed: u128) { for token_id in tokens { - let MultiAssetIdConversionResult::Converted(asset_id) = - NativeOrAssetIdConverter::try_convert(&token_id) - else { - unreachable!("invalid token") + let asset_id = match token_id { + NativeOrWithId::WithId(id) => id, + _ => unreachable!("invalid token"), }; - assert_ok!(Assets::force_create(RuntimeOrigin::root(), asset_id, owner, false, 1)); + assert_ok!(Assets::force_create(RuntimeOrigin::root(), asset_id, owner, false, ed)); } } -fn balance(owner: u128, token_id: NativeOrAssetId) -> u128 { - match token_id { - NativeOrAssetId::Native => <::Currency>::free_balance(owner), - NativeOrAssetId::Asset(token_id) => <::Assets>::balance(token_id, owner), - } +fn balance(owner: u128, token_id: NativeOrWithId) -> u128 { + <::Assets>::balance(token_id, &owner) } fn pool_balance(owner: u128, token_id: u32) -> u128 { <::PoolAssets>::balance(token_id, owner) } -fn get_ed() -> u128 { - <::Currency>::minimum_balance() +fn get_native_ed() -> u128 { + <::Assets>::minimum_balance(NativeOrWithId::Native) } macro_rules! bvec { - ($( $x:tt )*) => { - vec![$( $x )*].try_into().unwrap() - } + ($($x:expr),+ $(,)?) => ( + vec![$( Box::new( $x ), )*] + ) +} + +#[test] +fn validate_with_first_asset_pool_id_locator() { + new_test_ext().execute_with(|| { + use NativeOrWithId::{Native, WithId}; + assert_eq!(WithFirstAssetLocator::pool_id(&Native, &WithId(2)), Ok((Native, WithId(2)))); + assert_eq!(WithFirstAssetLocator::pool_id(&WithId(2), &Native), Ok((Native, WithId(2)))); + assert_noop!(WithFirstAssetLocator::pool_id(&Native, &Native), ()); + assert_noop!(WithFirstAssetLocator::pool_id(&WithId(2), &WithId(1)), ()); + }); +} + +#[test] +fn validate_ascending_pool_id_locator() { + new_test_ext().execute_with(|| { + use NativeOrWithId::{Native, WithId}; + assert_eq!(AscendingLocator::pool_id(&Native, &WithId(2)), Ok((Native, WithId(2)))); + assert_eq!(AscendingLocator::pool_id(&WithId(2), &Native), Ok((Native, WithId(2)))); + assert_eq!(AscendingLocator::pool_id(&WithId(2), &WithId(1)), Ok((WithId(1), WithId(2)))); + assert_eq!(AscendingLocator::pool_id(&Native, &Native), Err(())); + assert_eq!(AscendingLocator::pool_id(&WithId(1), &WithId(1)), Err(())); + }); +} + +#[test] +fn validate_native_or_with_id_sorting() { + new_test_ext().execute_with(|| { + use NativeOrWithId::{Native, WithId}; + assert!(WithId(2) > WithId(1)); + assert!(WithId(1) <= WithId(1)); + assert_eq!(WithId(1), WithId(1)); + assert_eq!(Native::, Native::); + assert!(Native < WithId(1)); + }); } #[test] @@ -103,10 +140,11 @@ fn check_pool_accounts_dont_collide() { let mut map = HashSet::new(); for i in 0..1_000_000u32 { - let account = AssetConversion::get_pool_account(&( - NativeOrAssetId::Native, - NativeOrAssetId::Asset(i), - )); + let account: u128 = ::PoolLocator::address(&( + NativeOrWithId::Native, + NativeOrWithId::WithId(i), + )) + .unwrap(); if map.contains(&account) { panic!("Collision at {}", i); } @@ -138,59 +176,67 @@ fn can_create_pool() { let asset_account_deposit: u128 = >::AssetAccountDeposit::get(); let user = 1; - let token_1 = NativeOrAssetId::Native; - let token_2 = NativeOrAssetId::Asset(2); - let pool_id = (token_1, token_2); + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); + let pool_id = (token_1.clone(), token_2.clone()); - create_tokens(user, vec![token_2]); + create_tokens(user, vec![token_2.clone()]); let lp_token = AssetConversion::get_next_pool_asset_id(); assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, 1000)); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_2, token_1)); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_2.clone()), + Box::new(token_1.clone()) + )); let setup_fee = <::PoolSetupFee as Get<::Balance>>::get(); - let pool_account = <::PoolSetupFeeReceiver as Get>::get(); + let pool_account = AssetConversionOrigin::get(); assert_eq!( - balance(user, NativeOrAssetId::Native), + balance(user, NativeOrWithId::Native), 1000 - (setup_fee + asset_account_deposit) ); - assert_eq!(balance(pool_account, NativeOrAssetId::Native), setup_fee); + assert_eq!(balance(pool_account, NativeOrWithId::Native), setup_fee); assert_eq!(lp_token + 1, AssetConversion::get_next_pool_asset_id()); assert_eq!( events(), [Event::::PoolCreated { creator: user, - pool_id, - pool_account: AssetConversion::get_pool_account(&pool_id), + pool_id: pool_id.clone(), + pool_account: ::PoolLocator::address(&pool_id).unwrap(), lp_token }] ); assert_eq!(pools(), vec![pool_id]); - assert_eq!(assets(), vec![token_2]); + assert_eq!(assets(), vec![token_2.clone()]); assert_eq!(pool_assets(), vec![lp_token]); assert_noop!( - AssetConversion::create_pool(RuntimeOrigin::signed(user), token_1, token_1), - Error::::EqualAssets + AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_1.clone()) + ), + Error::::InvalidAssetPair ); assert_noop!( - AssetConversion::create_pool(RuntimeOrigin::signed(user), token_2, token_2), - Error::::EqualAssets + AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_2.clone()), + Box::new(token_2.clone()) + ), + Error::::InvalidAssetPair ); - // validate we can create Asset(1)/Asset(2) pool - let token_1 = NativeOrAssetId::Asset(1); - create_tokens(user, vec![token_1]); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_1, token_2)); - - // validate we can force the first asset to be the Native currency only - AllowMultiAssetPools::set(&false); - let token_1 = NativeOrAssetId::Asset(3); - assert_noop!( - AssetConversion::create_pool(RuntimeOrigin::signed(user), token_1, token_2), - Error::::PoolMustContainNativeCurrency - ); + // validate we cannot create WithId(1)/WithId(2) pool + let token_1 = NativeOrWithId::WithId(1); + create_tokens(user, vec![token_1.clone()]); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); }); } @@ -198,25 +244,37 @@ fn can_create_pool() { fn create_same_pool_twice_should_fail() { new_test_ext().execute_with(|| { let user = 1; - let token_1 = NativeOrAssetId::Native; - let token_2 = NativeOrAssetId::Asset(2); + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); - create_tokens(user, vec![token_2]); + create_tokens(user, vec![token_2.clone()]); let lp_token = AssetConversion::get_next_pool_asset_id(); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_2, token_1)); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_2.clone()), + Box::new(token_1.clone()) + )); let expected_free = lp_token + 1; assert_eq!(expected_free, AssetConversion::get_next_pool_asset_id()); assert_noop!( - AssetConversion::create_pool(RuntimeOrigin::signed(user), token_2, token_1), + AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_2.clone()), + Box::new(token_1.clone()) + ), Error::::PoolExists ); assert_eq!(expected_free, AssetConversion::get_next_pool_asset_id()); // Try switching the same tokens around: assert_noop!( - AssetConversion::create_pool(RuntimeOrigin::signed(user), token_1, token_2), + AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + ), Error::::PoolExists ); assert_eq!(expected_free, AssetConversion::get_next_pool_asset_id()); @@ -227,35 +285,43 @@ fn create_same_pool_twice_should_fail() { fn different_pools_should_have_different_lp_tokens() { new_test_ext().execute_with(|| { let user = 1; - let token_1 = NativeOrAssetId::Native; - let token_2 = NativeOrAssetId::Asset(2); - let token_3 = NativeOrAssetId::Asset(3); - let pool_id_1_2 = (token_1, token_2); - let pool_id_1_3 = (token_1, token_3); + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); + let token_3 = NativeOrWithId::WithId(3); + let pool_id_1_2 = (token_1.clone(), token_2.clone()); + let pool_id_1_3 = (token_1.clone(), token_3.clone()); - create_tokens(user, vec![token_2, token_3]); + create_tokens(user, vec![token_2.clone(), token_3.clone()]); let lp_token2_1 = AssetConversion::get_next_pool_asset_id(); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_2, token_1)); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_2.clone()), + Box::new(token_1.clone()) + )); let lp_token3_1 = AssetConversion::get_next_pool_asset_id(); assert_eq!( events(), [Event::::PoolCreated { creator: user, - pool_id: pool_id_1_2, - pool_account: AssetConversion::get_pool_account(&pool_id_1_2), + pool_id: pool_id_1_2.clone(), + pool_account: ::PoolLocator::address(&pool_id_1_2).unwrap(), lp_token: lp_token2_1 }] ); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_3, token_1)); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_3.clone()), + Box::new(token_1.clone()) + )); assert_eq!( events(), [Event::::PoolCreated { creator: user, - pool_id: pool_id_1_3, - pool_account: AssetConversion::get_pool_account(&pool_id_1_3), + pool_id: pool_id_1_3.clone(), + pool_account: ::PoolLocator::address(&pool_id_1_3).unwrap(), lp_token: lp_token3_1, }] ); @@ -268,25 +334,33 @@ fn different_pools_should_have_different_lp_tokens() { fn can_add_liquidity() { new_test_ext().execute_with(|| { let user = 1; - let token_1 = NativeOrAssetId::Native; - let token_2 = NativeOrAssetId::Asset(2); - let token_3 = NativeOrAssetId::Asset(3); + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); + let token_3 = NativeOrWithId::WithId(3); - create_tokens(user, vec![token_2, token_3]); + create_tokens(user, vec![token_2.clone(), token_3.clone()]); let lp_token1 = AssetConversion::get_next_pool_asset_id(); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_1, token_2)); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); let lp_token2 = AssetConversion::get_next_pool_asset_id(); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_1, token_3)); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_3.clone()) + )); - let ed = get_ed(); + let ed = get_native_ed(); assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, 10000 * 2 + ed)); assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user, 1000)); assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 3, user, 1000)); assert_ok!(AssetConversion::add_liquidity( RuntimeOrigin::signed(user), - token_1, - token_2, + Box::new(token_1.clone()), + Box::new(token_2.clone()), 10000, 10, 10000, @@ -294,28 +368,28 @@ fn can_add_liquidity() { user, )); - let pool_id = (token_1, token_2); + let pool_id = (token_1.clone(), token_2.clone()); assert!(events().contains(&Event::::LiquidityAdded { who: user, mint_to: user, - pool_id, + pool_id: pool_id.clone(), amount1_provided: 10000, amount2_provided: 10, lp_token: lp_token1, lp_token_minted: 216, })); - let pallet_account = AssetConversion::get_pool_account(&pool_id); - assert_eq!(balance(pallet_account, token_1), 10000); - assert_eq!(balance(pallet_account, token_2), 10); - assert_eq!(balance(user, token_1), 10000 + ed); - assert_eq!(balance(user, token_2), 1000 - 10); + let pallet_account = ::PoolLocator::address(&pool_id).unwrap(); + assert_eq!(balance(pallet_account, token_1.clone()), 10000); + assert_eq!(balance(pallet_account, token_2.clone()), 10); + assert_eq!(balance(user, token_1.clone()), 10000 + ed); + assert_eq!(balance(user, token_2.clone()), 1000 - 10); assert_eq!(pool_balance(user, lp_token1), 216); // try to pass the non-native - native assets, the result should be the same assert_ok!(AssetConversion::add_liquidity( RuntimeOrigin::signed(user), - token_3, - token_1, + Box::new(token_3.clone()), + Box::new(token_1.clone()), 10, 10000, 10, @@ -323,21 +397,21 @@ fn can_add_liquidity() { user, )); - let pool_id = (token_1, token_3); + let pool_id = (token_1.clone(), token_3.clone()); assert!(events().contains(&Event::::LiquidityAdded { who: user, mint_to: user, - pool_id, - amount1_provided: 10000, - amount2_provided: 10, + pool_id: pool_id.clone(), + amount1_provided: 10, + amount2_provided: 10000, lp_token: lp_token2, lp_token_minted: 216, })); - let pallet_account = AssetConversion::get_pool_account(&pool_id); - assert_eq!(balance(pallet_account, token_1), 10000); - assert_eq!(balance(pallet_account, token_3), 10); - assert_eq!(balance(user, token_1), ed); - assert_eq!(balance(user, token_3), 1000 - 10); + let pallet_account = ::PoolLocator::address(&pool_id).unwrap(); + assert_eq!(balance(pallet_account, token_1.clone()), 10000); + assert_eq!(balance(pallet_account, token_3.clone()), 10); + assert_eq!(balance(user, token_1.clone()), ed); + assert_eq!(balance(user, token_3.clone()), 1000 - 10); assert_eq!(pool_balance(user, lp_token2), 216); }); } @@ -346,11 +420,15 @@ fn can_add_liquidity() { fn add_tiny_liquidity_leads_to_insufficient_liquidity_minted_error() { new_test_ext().execute_with(|| { let user = 1; - let token_1 = NativeOrAssetId::Native; - let token_2 = NativeOrAssetId::Asset(2); + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); - create_tokens(user, vec![token_2]); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_1, token_2)); + create_tokens(user, vec![token_2.clone()]); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, 1000)); assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user, 1000)); @@ -358,8 +436,8 @@ fn add_tiny_liquidity_leads_to_insufficient_liquidity_minted_error() { assert_noop!( AssetConversion::add_liquidity( RuntimeOrigin::signed(user), - token_1, - token_2, + Box::new(token_1.clone()), + Box::new(token_2.clone()), 1, 1, 1, @@ -372,9 +450,9 @@ fn add_tiny_liquidity_leads_to_insufficient_liquidity_minted_error() { assert_noop!( AssetConversion::add_liquidity( RuntimeOrigin::signed(user), - token_1, - token_2, - get_ed(), + Box::new(token_1.clone()), + Box::new(token_2.clone()), + get_native_ed(), 1, 1, 1, @@ -389,27 +467,37 @@ fn add_tiny_liquidity_leads_to_insufficient_liquidity_minted_error() { fn add_tiny_liquidity_directly_to_pool_address() { new_test_ext().execute_with(|| { let user = 1; - let token_1 = NativeOrAssetId::Native; - let token_2 = NativeOrAssetId::Asset(2); - let token_3 = NativeOrAssetId::Asset(3); + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); + let token_3 = NativeOrWithId::WithId(3); - create_tokens(user, vec![token_2, token_3]); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_1, token_2)); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_1, token_3)); + create_tokens(user, vec![token_2.clone(), token_3.clone()]); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_3.clone()) + )); - let ed = get_ed(); + let ed = get_native_ed(); assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, 10000 * 2 + ed)); assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user, 1000)); assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 3, user, 1000)); - // check we're still able to add the liquidity even when the pool already has some token_1 - let pallet_account = AssetConversion::get_pool_account(&(token_1, token_2)); + // check we're still able to add the liquidity even when the pool already has some + // token_1.clone() + let pallet_account = + ::PoolLocator::address(&(token_1.clone(), token_2.clone())).unwrap(); assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), pallet_account, 1000)); assert_ok!(AssetConversion::add_liquidity( RuntimeOrigin::signed(user), - token_1, - token_2, + Box::new(token_1.clone()), + Box::new(token_2.clone()), 10000, 10, 10000, @@ -417,13 +505,11 @@ fn add_tiny_liquidity_directly_to_pool_address() { user, )); - // check the same but for token_3 (non-native token) - let pallet_account = AssetConversion::get_pool_account(&(token_1, token_3)); - assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, pallet_account, 1)); + // check the same but for token_3.clone() (non-native token) assert_ok!(AssetConversion::add_liquidity( RuntimeOrigin::signed(user), - token_1, - token_3, + Box::new(token_1.clone()), + Box::new(token_3.clone()), 10000, 10, 10000, @@ -437,21 +523,31 @@ fn add_tiny_liquidity_directly_to_pool_address() { fn can_remove_liquidity() { new_test_ext().execute_with(|| { let user = 1; - let token_1 = NativeOrAssetId::Native; - let token_2 = NativeOrAssetId::Asset(2); - let pool_id = (token_1, token_2); + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); + let pool_id = (token_1.clone(), token_2.clone()); - create_tokens(user, vec![token_2]); + create_tokens(user, vec![token_2.clone()]); let lp_token = AssetConversion::get_next_pool_asset_id(); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_1, token_2)); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); - assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, 10000000000)); - assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user, 100000)); + let ed_token_1 = >::minimum_balance(); + let ed_token_2 = >::minimum_balance(2); + assert_ok!(Balances::force_set_balance( + RuntimeOrigin::root(), + user, + 10000000000 + ed_token_1 + )); + assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user, 100000 + ed_token_2)); assert_ok!(AssetConversion::add_liquidity( RuntimeOrigin::signed(user), - token_1, - token_2, + Box::new(token_1.clone()), + Box::new(token_2.clone()), 1000000000, 100000, 1000000000, @@ -464,8 +560,8 @@ fn can_remove_liquidity() { assert_ok!(AssetConversion::remove_liquidity( RuntimeOrigin::signed(user), - token_1, - token_2, + Box::new(token_1.clone()), + Box::new(token_2.clone()), total_lp_received, 0, 0, @@ -475,7 +571,7 @@ fn can_remove_liquidity() { assert!(events().contains(&Event::::LiquidityRemoved { who: user, withdraw_to: user, - pool_id, + pool_id: pool_id.clone(), amount1: 899991000, amount2: 89999, lp_token, @@ -483,13 +579,16 @@ fn can_remove_liquidity() { withdrawal_fee: ::LiquidityWithdrawalFee::get() })); - let pool_account = AssetConversion::get_pool_account(&pool_id); - assert_eq!(balance(pool_account, token_1), 100009000); - assert_eq!(balance(pool_account, token_2), 10001); + let pool_account = ::PoolLocator::address(&pool_id).unwrap(); + assert_eq!(balance(pool_account, token_1.clone()), 100009000); + assert_eq!(balance(pool_account, token_2.clone()), 10001); assert_eq!(pool_balance(pool_account, lp_token), 100); - assert_eq!(balance(user, token_1), 10000000000 - 1000000000 + 899991000); - assert_eq!(balance(user, token_2), 89999); + assert_eq!( + balance(user, token_1.clone()), + 10000000000 - 1000000000 + 899991000 + ed_token_1 + ); + assert_eq!(balance(user, token_2.clone()), 89999 + ed_token_2); assert_eq!(pool_balance(user, lp_token), 0); }); } @@ -498,20 +597,28 @@ fn can_remove_liquidity() { fn can_not_redeem_more_lp_tokens_than_were_minted() { new_test_ext().execute_with(|| { let user = 1; - let token_1 = NativeOrAssetId::Native; - let token_2 = NativeOrAssetId::Asset(2); + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); let lp_token = AssetConversion::get_next_pool_asset_id(); - create_tokens(user, vec![token_2]); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_1, token_2)); + create_tokens(user, vec![token_2.clone()]); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); - assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, 10000 + get_ed())); + assert_ok!(Balances::force_set_balance( + RuntimeOrigin::root(), + user, + 10000 + get_native_ed() + )); assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user, 1000)); assert_ok!(AssetConversion::add_liquidity( RuntimeOrigin::signed(user), - token_1, - token_2, + Box::new(token_1.clone()), + Box::new(token_2.clone()), 10000, 10, 10000, @@ -525,8 +632,8 @@ fn can_not_redeem_more_lp_tokens_than_were_minted() { assert_noop!( AssetConversion::remove_liquidity( RuntimeOrigin::signed(user), - token_1, - token_2, + Box::new(token_1.clone()), + Box::new(token_2.clone()), 216 + 1, // Try and redeem 10 lp tokens while only 9 minted. 0, 0, @@ -541,19 +648,23 @@ fn can_not_redeem_more_lp_tokens_than_were_minted() { fn can_quote_price() { new_test_ext().execute_with(|| { let user = 1; - let token_1 = NativeOrAssetId::Native; - let token_2 = NativeOrAssetId::Asset(2); + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); - create_tokens(user, vec![token_2]); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_1, token_2)); + create_tokens(user, vec![token_2.clone()]); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, 100000)); assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user, 1000)); assert_ok!(AssetConversion::add_liquidity( RuntimeOrigin::signed(user), - token_1, - token_2, + Box::new(token_1.clone()), + Box::new(token_2.clone()), 10000, 200, 1, @@ -563,8 +674,8 @@ fn can_quote_price() { assert_eq!( AssetConversion::quote_price_exact_tokens_for_tokens( - NativeOrAssetId::Native, - NativeOrAssetId::Asset(2), + NativeOrWithId::Native, + NativeOrWithId::WithId(2), 3000, false, ), @@ -573,8 +684,8 @@ fn can_quote_price() { // including fee so should get less out... assert_eq!( AssetConversion::quote_price_exact_tokens_for_tokens( - NativeOrAssetId::Native, - NativeOrAssetId::Asset(2), + NativeOrWithId::Native, + NativeOrWithId::WithId(2), 3000, true, ), @@ -584,8 +695,8 @@ fn can_quote_price() { // (if the above accidentally exchanged then it would not give same quote as before) assert_eq!( AssetConversion::quote_price_exact_tokens_for_tokens( - NativeOrAssetId::Native, - NativeOrAssetId::Asset(2), + NativeOrWithId::Native, + NativeOrWithId::WithId(2), 3000, false, ), @@ -594,8 +705,8 @@ fn can_quote_price() { // including fee so should get less out... assert_eq!( AssetConversion::quote_price_exact_tokens_for_tokens( - NativeOrAssetId::Native, - NativeOrAssetId::Asset(2), + NativeOrWithId::Native, + NativeOrWithId::WithId(2), 3000, true, ), @@ -605,8 +716,8 @@ fn can_quote_price() { // Check inverse: assert_eq!( AssetConversion::quote_price_exact_tokens_for_tokens( - NativeOrAssetId::Asset(2), - NativeOrAssetId::Native, + NativeOrWithId::WithId(2), + NativeOrWithId::Native, 60, false, ), @@ -615,8 +726,8 @@ fn can_quote_price() { // including fee so should get less out... assert_eq!( AssetConversion::quote_price_exact_tokens_for_tokens( - NativeOrAssetId::Asset(2), - NativeOrAssetId::Native, + NativeOrWithId::WithId(2), + NativeOrWithId::Native, 60, true, ), @@ -628,8 +739,8 @@ fn can_quote_price() { // assert_eq!( AssetConversion::quote_price_tokens_for_exact_tokens( - NativeOrAssetId::Native, - NativeOrAssetId::Asset(2), + NativeOrWithId::Native, + NativeOrWithId::WithId(2), 60, false, ), @@ -638,8 +749,8 @@ fn can_quote_price() { // including fee so should need to put more in... assert_eq!( AssetConversion::quote_price_tokens_for_exact_tokens( - NativeOrAssetId::Native, - NativeOrAssetId::Asset(2), + NativeOrWithId::Native, + NativeOrWithId::WithId(2), 60, true, ), @@ -649,8 +760,8 @@ fn can_quote_price() { // (if the above accidentally exchanged then it would not give same quote as before) assert_eq!( AssetConversion::quote_price_tokens_for_exact_tokens( - NativeOrAssetId::Native, - NativeOrAssetId::Asset(2), + NativeOrWithId::Native, + NativeOrWithId::WithId(2), 60, false, ), @@ -659,8 +770,8 @@ fn can_quote_price() { // including fee so should need to put more in... assert_eq!( AssetConversion::quote_price_tokens_for_exact_tokens( - NativeOrAssetId::Native, - NativeOrAssetId::Asset(2), + NativeOrWithId::Native, + NativeOrWithId::WithId(2), 60, true, ), @@ -670,8 +781,8 @@ fn can_quote_price() { // Check inverse: assert_eq!( AssetConversion::quote_price_tokens_for_exact_tokens( - NativeOrAssetId::Asset(2), - NativeOrAssetId::Native, + NativeOrWithId::WithId(2), + NativeOrWithId::Native, 3000, false, ), @@ -680,8 +791,8 @@ fn can_quote_price() { // including fee so should need to put more in... assert_eq!( AssetConversion::quote_price_tokens_for_exact_tokens( - NativeOrAssetId::Asset(2), - NativeOrAssetId::Native, + NativeOrWithId::WithId(2), + NativeOrWithId::Native, 3000, true, ), @@ -695,14 +806,14 @@ fn can_quote_price() { assert_eq!( AssetConversion::quote_price_exact_tokens_for_tokens( - NativeOrAssetId::Asset(2), - NativeOrAssetId::Native, + NativeOrWithId::WithId(2), + NativeOrWithId::Native, amount_in, false, ) .and_then(|amount| AssetConversion::quote_price_exact_tokens_for_tokens( - NativeOrAssetId::Native, - NativeOrAssetId::Asset(2), + NativeOrWithId::Native, + NativeOrWithId::WithId(2), amount, false, )), @@ -710,14 +821,14 @@ fn can_quote_price() { ); assert_eq!( AssetConversion::quote_price_exact_tokens_for_tokens( - NativeOrAssetId::Native, - NativeOrAssetId::Asset(2), + NativeOrWithId::Native, + NativeOrWithId::WithId(2), amount_in, false, ) .and_then(|amount| AssetConversion::quote_price_exact_tokens_for_tokens( - NativeOrAssetId::Asset(2), - NativeOrAssetId::Native, + NativeOrWithId::WithId(2), + NativeOrWithId::Native, amount, false, )), @@ -726,14 +837,14 @@ fn can_quote_price() { assert_eq!( AssetConversion::quote_price_tokens_for_exact_tokens( - NativeOrAssetId::Asset(2), - NativeOrAssetId::Native, + NativeOrWithId::WithId(2), + NativeOrWithId::Native, amount_in, false, ) .and_then(|amount| AssetConversion::quote_price_tokens_for_exact_tokens( - NativeOrAssetId::Native, - NativeOrAssetId::Asset(2), + NativeOrWithId::Native, + NativeOrWithId::WithId(2), amount, false, )), @@ -741,14 +852,14 @@ fn can_quote_price() { ); assert_eq!( AssetConversion::quote_price_tokens_for_exact_tokens( - NativeOrAssetId::Native, - NativeOrAssetId::Asset(2), + NativeOrWithId::Native, + NativeOrWithId::WithId(2), amount_in, false, ) .and_then(|amount| AssetConversion::quote_price_tokens_for_exact_tokens( - NativeOrAssetId::Asset(2), - NativeOrAssetId::Native, + NativeOrWithId::WithId(2), + NativeOrWithId::Native, amount, false, )), @@ -762,19 +873,23 @@ fn quote_price_exact_tokens_for_tokens_matches_execution() { new_test_ext().execute_with(|| { let user = 1; let user2 = 2; - let token_1 = NativeOrAssetId::Native; - let token_2 = NativeOrAssetId::Asset(2); + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); - create_tokens(user, vec![token_2]); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_1, token_2)); + create_tokens(user, vec![token_2.clone()]); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, 100000)); assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user, 1000)); assert_ok!(AssetConversion::add_liquidity( RuntimeOrigin::signed(user), - token_1, - token_2, + Box::new(token_1.clone()), + Box::new(token_2.clone()), 10000, 200, 1, @@ -785,23 +900,28 @@ fn quote_price_exact_tokens_for_tokens_matches_execution() { let amount = 1; let quoted_price = 49; assert_eq!( - AssetConversion::quote_price_exact_tokens_for_tokens(token_2, token_1, amount, true,), + AssetConversion::quote_price_exact_tokens_for_tokens( + token_2.clone(), + token_1.clone(), + amount, + true, + ), Some(quoted_price) ); assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user2, amount)); let prior_dot_balance = 20000; - assert_eq!(prior_dot_balance, balance(user2, token_1)); + assert_eq!(prior_dot_balance, balance(user2, token_1.clone())); assert_ok!(AssetConversion::swap_exact_tokens_for_tokens( RuntimeOrigin::signed(user2), - bvec![token_2, token_1], + bvec![token_2.clone(), token_1.clone()], amount, 1, user2, false, )); - assert_eq!(prior_dot_balance + quoted_price, balance(user2, token_1)); + assert_eq!(prior_dot_balance + quoted_price, balance(user2, token_1.clone())); }); } @@ -810,19 +930,23 @@ fn quote_price_tokens_for_exact_tokens_matches_execution() { new_test_ext().execute_with(|| { let user = 1; let user2 = 2; - let token_1 = NativeOrAssetId::Native; - let token_2 = NativeOrAssetId::Asset(2); + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); - create_tokens(user, vec![token_2]); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_1, token_2)); + create_tokens(user, vec![token_2.clone()]); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, 100000)); assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user, 1000)); assert_ok!(AssetConversion::add_liquidity( RuntimeOrigin::signed(user), - token_1, - token_2, + Box::new(token_1.clone()), + Box::new(token_2.clone()), 10000, 200, 1, @@ -833,26 +957,31 @@ fn quote_price_tokens_for_exact_tokens_matches_execution() { let amount = 49; let quoted_price = 1; assert_eq!( - AssetConversion::quote_price_tokens_for_exact_tokens(token_2, token_1, amount, true,), + AssetConversion::quote_price_tokens_for_exact_tokens( + token_2.clone(), + token_1.clone(), + amount, + true, + ), Some(quoted_price) ); assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user2, amount)); let prior_dot_balance = 20000; - assert_eq!(prior_dot_balance, balance(user2, token_1)); + assert_eq!(prior_dot_balance, balance(user2, token_1.clone())); let prior_asset_balance = 49; - assert_eq!(prior_asset_balance, balance(user2, token_2)); + assert_eq!(prior_asset_balance, balance(user2, token_2.clone())); assert_ok!(AssetConversion::swap_tokens_for_exact_tokens( RuntimeOrigin::signed(user2), - bvec![token_2, token_1], + bvec![token_2.clone(), token_1.clone()], amount, 1, user2, false, )); - assert_eq!(prior_dot_balance + amount, balance(user2, token_1)); - assert_eq!(prior_asset_balance - quoted_price, balance(user2, token_2)); + assert_eq!(prior_dot_balance + amount, balance(user2, token_1.clone())); + assert_eq!(prior_asset_balance - quoted_price, balance(user2, token_2.clone())); }); } @@ -860,14 +989,18 @@ fn quote_price_tokens_for_exact_tokens_matches_execution() { fn can_swap_with_native() { new_test_ext().execute_with(|| { let user = 1; - let token_1 = NativeOrAssetId::Native; - let token_2 = NativeOrAssetId::Asset(2); - let pool_id = (token_1, token_2); + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); + let pool_id = (token_1.clone(), token_2.clone()); - create_tokens(user, vec![token_2]); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_1, token_2)); + create_tokens(user, vec![token_2.clone()]); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); - let ed = get_ed(); + let ed = get_native_ed(); assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, 10000 + ed)); assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user, 1000)); @@ -876,8 +1009,8 @@ fn can_swap_with_native() { assert_ok!(AssetConversion::add_liquidity( RuntimeOrigin::signed(user), - token_1, - token_2, + Box::new(token_1.clone()), + Box::new(token_2.clone()), liquidity1, liquidity2, 1, @@ -893,18 +1026,18 @@ fn can_swap_with_native() { assert_ok!(AssetConversion::swap_exact_tokens_for_tokens( RuntimeOrigin::signed(user), - bvec![token_2, token_1], + bvec![token_2.clone(), token_1.clone()], input_amount, 1, user, false, )); - let pallet_account = AssetConversion::get_pool_account(&pool_id); - assert_eq!(balance(user, token_1), expect_receive + ed); - assert_eq!(balance(user, token_2), 1000 - liquidity2 - input_amount); - assert_eq!(balance(pallet_account, token_1), liquidity1 - expect_receive); - assert_eq!(balance(pallet_account, token_2), liquidity2 + input_amount); + let pallet_account = ::PoolLocator::address(&pool_id).unwrap(); + assert_eq!(balance(user, token_1.clone()), expect_receive + ed); + assert_eq!(balance(user, token_2.clone()), 1000 - liquidity2 - input_amount); + assert_eq!(balance(pallet_account, token_1.clone()), liquidity1 - expect_receive); + assert_eq!(balance(pallet_account, token_2.clone()), liquidity2 + input_amount); }); } @@ -912,10 +1045,14 @@ fn can_swap_with_native() { fn can_swap_with_realistic_values() { new_test_ext().execute_with(|| { let user = 1; - let dot = NativeOrAssetId::Native; - let usd = NativeOrAssetId::Asset(2); - create_tokens(user, vec![usd]); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), dot, usd)); + let dot = NativeOrWithId::Native; + let usd = NativeOrWithId::WithId(2); + create_tokens(user, vec![usd.clone()]); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(dot.clone()), + Box::new(usd.clone()) + )); const UNIT: u128 = 1_000_000_000; @@ -926,8 +1063,8 @@ fn can_swap_with_realistic_values() { let liquidity_usd = 1_000_000 * UNIT; assert_ok!(AssetConversion::add_liquidity( RuntimeOrigin::signed(user), - dot, - usd, + Box::new(dot.clone()), + Box::new(usd.clone()), liquidity_dot, liquidity_usd, 1, @@ -939,7 +1076,7 @@ fn can_swap_with_realistic_values() { assert_ok!(AssetConversion::swap_exact_tokens_for_tokens( RuntimeOrigin::signed(user), - bvec![usd, dot], + bvec![usd.clone(), dot.clone()], input_amount, 1, user, @@ -949,9 +1086,9 @@ fn can_swap_with_realistic_values() { assert!(events().contains(&Event::::SwapExecuted { who: user, send_to: user, - path: bvec![usd, dot], amount_in: 10 * UNIT, // usd amount_out: 1_993_980_120, // About 2 dot after div by UNIT. + path: vec![(usd, 10 * UNIT), (dot, 1_993_980_120)], })); }); } @@ -960,17 +1097,21 @@ fn can_swap_with_realistic_values() { fn can_not_swap_in_pool_with_no_liquidity_added_yet() { new_test_ext().execute_with(|| { let user = 1; - let token_1 = NativeOrAssetId::Native; - let token_2 = NativeOrAssetId::Asset(2); + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); - create_tokens(user, vec![token_2]); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_1, token_2)); + create_tokens(user, vec![token_2.clone()]); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); // Check can't swap an empty pool assert_noop!( AssetConversion::swap_exact_tokens_for_tokens( RuntimeOrigin::signed(user), - bvec![token_2, token_1], + bvec![token_2.clone(), token_1.clone()], 10, 1, user, @@ -985,15 +1126,19 @@ fn can_not_swap_in_pool_with_no_liquidity_added_yet() { fn check_no_panic_when_try_swap_close_to_empty_pool() { new_test_ext().execute_with(|| { let user = 1; - let token_1 = NativeOrAssetId::Native; - let token_2 = NativeOrAssetId::Asset(2); - let pool_id = (token_1, token_2); + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); + let pool_id = (token_1.clone(), token_2.clone()); let lp_token = AssetConversion::get_next_pool_asset_id(); - create_tokens(user, vec![token_2]); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_1, token_2)); + create_tokens(user, vec![token_2.clone()]); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); - let ed = get_ed(); + let ed = get_native_ed(); assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, 10000 + ed)); assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user, 1000)); @@ -1002,8 +1147,8 @@ fn check_no_panic_when_try_swap_close_to_empty_pool() { assert_ok!(AssetConversion::add_liquidity( RuntimeOrigin::signed(user), - token_1, - token_2, + Box::new(token_1.clone()), + Box::new(token_2.clone()), liquidity1, liquidity2, 1, @@ -1015,21 +1160,21 @@ fn check_no_panic_when_try_swap_close_to_empty_pool() { assert!(events().contains(&Event::::LiquidityAdded { who: user, mint_to: user, - pool_id, + pool_id: pool_id.clone(), amount1_provided: liquidity1, amount2_provided: liquidity2, lp_token, lp_token_minted, })); - let pallet_account = AssetConversion::get_pool_account(&pool_id); - assert_eq!(balance(pallet_account, token_1), liquidity1); - assert_eq!(balance(pallet_account, token_2), liquidity2); + let pallet_account = ::PoolLocator::address(&pool_id).unwrap(); + assert_eq!(balance(pallet_account, token_1.clone()), liquidity1); + assert_eq!(balance(pallet_account, token_2.clone()), liquidity2); assert_ok!(AssetConversion::remove_liquidity( RuntimeOrigin::signed(user), - token_1, - token_2, + Box::new(token_1.clone()), + Box::new(token_2.clone()), lp_token_minted, 1, 1, @@ -1038,33 +1183,33 @@ fn check_no_panic_when_try_swap_close_to_empty_pool() { // Now, the pool should exist but be almost empty. // Let's try and drain it. - assert_eq!(balance(pallet_account, token_1), 708); - assert_eq!(balance(pallet_account, token_2), 15); + assert_eq!(balance(pallet_account, token_1.clone()), 708); + assert_eq!(balance(pallet_account, token_2.clone()), 15); // validate the reserve should always stay above the ED assert_noop!( AssetConversion::swap_tokens_for_exact_tokens( RuntimeOrigin::signed(user), - bvec![token_2, token_1], + bvec![token_2.clone(), token_1.clone()], 708 - ed + 1, // amount_out 500, // amount_in_max user, false, ), - Error::::ReserveLeftLessThanMinimal + TokenError::NotExpendable, ); assert_ok!(AssetConversion::swap_tokens_for_exact_tokens( RuntimeOrigin::signed(user), - bvec![token_2, token_1], + bvec![token_2.clone(), token_1.clone()], 608, // amount_out 500, // amount_in_max user, false, )); - let token_1_left = balance(pallet_account, token_1); - let token_2_left = balance(pallet_account, token_2); + let token_1_left = balance(pallet_account, token_1.clone()); + let token_2_left = balance(pallet_account, token_2.clone()); assert_eq!(token_1_left, 708 - 608); // The price for the last tokens should be very high @@ -1078,7 +1223,7 @@ fn check_no_panic_when_try_swap_close_to_empty_pool() { assert_noop!( AssetConversion::swap_tokens_for_exact_tokens( RuntimeOrigin::signed(user), - bvec![token_2, token_1], + bvec![token_2.clone(), token_1.clone()], token_1_left - 1, // amount_out 1000, // amount_in_max user, @@ -1091,7 +1236,7 @@ fn check_no_panic_when_try_swap_close_to_empty_pool() { assert_noop!( AssetConversion::swap_tokens_for_exact_tokens( RuntimeOrigin::signed(user), - bvec![token_2, token_1], + bvec![token_2.clone(), token_1.clone()], token_1_left, // amount_out 1000, // amount_in_max user, @@ -1106,13 +1251,21 @@ fn check_no_panic_when_try_swap_close_to_empty_pool() { fn swap_should_not_work_if_too_much_slippage() { new_test_ext().execute_with(|| { let user = 1; - let token_1 = NativeOrAssetId::Native; - let token_2 = NativeOrAssetId::Asset(2); + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); - create_tokens(user, vec![token_2]); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_1, token_2)); + create_tokens(user, vec![token_2.clone()]); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); - assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, 10000 + get_ed())); + assert_ok!(Balances::force_set_balance( + RuntimeOrigin::root(), + user, + 10000 + get_native_ed() + )); assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user, 1000)); let liquidity1 = 10000; @@ -1120,8 +1273,8 @@ fn swap_should_not_work_if_too_much_slippage() { assert_ok!(AssetConversion::add_liquidity( RuntimeOrigin::signed(user), - token_1, - token_2, + Box::new(token_1.clone()), + Box::new(token_2.clone()), liquidity1, liquidity2, 1, @@ -1134,7 +1287,7 @@ fn swap_should_not_work_if_too_much_slippage() { assert_noop!( AssetConversion::swap_exact_tokens_for_tokens( RuntimeOrigin::signed(user), - bvec![token_2, token_1], + bvec![token_2.clone(), token_1.clone()], exchange_amount, // amount_in 4000, // amount_out_min user, @@ -1149,28 +1302,32 @@ fn swap_should_not_work_if_too_much_slippage() { fn can_swap_tokens_for_exact_tokens() { new_test_ext().execute_with(|| { let user = 1; - let token_1 = NativeOrAssetId::Native; - let token_2 = NativeOrAssetId::Asset(2); - let pool_id = (token_1, token_2); + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); + let pool_id = (token_1.clone(), token_2.clone()); - create_tokens(user, vec![token_2]); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_1, token_2)); + create_tokens(user, vec![token_2.clone()]); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); - let ed = get_ed(); + let ed = get_native_ed(); assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, 20000 + ed)); assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user, 1000)); - let pallet_account = AssetConversion::get_pool_account(&pool_id); - let before1 = balance(pallet_account, token_1) + balance(user, token_1); - let before2 = balance(pallet_account, token_2) + balance(user, token_2); + let pallet_account = ::PoolLocator::address(&pool_id).unwrap(); + let before1 = balance(pallet_account, token_1.clone()) + balance(user, token_1.clone()); + let before2 = balance(pallet_account, token_2.clone()) + balance(user, token_2.clone()); let liquidity1 = 10000; let liquidity2 = 200; assert_ok!(AssetConversion::add_liquidity( RuntimeOrigin::signed(user), - token_1, - token_2, + Box::new(token_1.clone()), + Box::new(token_2.clone()), liquidity1, liquidity2, 1, @@ -1185,23 +1342,29 @@ fn can_swap_tokens_for_exact_tokens() { assert_ok!(AssetConversion::swap_tokens_for_exact_tokens( RuntimeOrigin::signed(user), - bvec![token_1, token_2], + bvec![token_1.clone(), token_2.clone()], exchange_out, // amount_out 3500, // amount_in_max user, true, )); - assert_eq!(balance(user, token_1), 10000 + ed - expect_in); - assert_eq!(balance(user, token_2), 1000 - liquidity2 + exchange_out); - assert_eq!(balance(pallet_account, token_1), liquidity1 + expect_in); - assert_eq!(balance(pallet_account, token_2), liquidity2 - exchange_out); + assert_eq!(balance(user, token_1.clone()), 10000 + ed - expect_in); + assert_eq!(balance(user, token_2.clone()), 1000 - liquidity2 + exchange_out); + assert_eq!(balance(pallet_account, token_1.clone()), liquidity1 + expect_in); + assert_eq!(balance(pallet_account, token_2.clone()), liquidity2 - exchange_out); // check invariants: // native and asset totals should be preserved. - assert_eq!(before1, balance(pallet_account, token_1) + balance(user, token_1)); - assert_eq!(before2, balance(pallet_account, token_2) + balance(user, token_2)); + assert_eq!( + before1, + balance(pallet_account, token_1.clone()) + balance(user, token_1.clone()) + ); + assert_eq!( + before2, + balance(pallet_account, token_2.clone()) + balance(user, token_2.clone()) + ); }); } @@ -1210,34 +1373,40 @@ fn can_swap_tokens_for_exact_tokens_when_not_liquidity_provider() { new_test_ext().execute_with(|| { let user = 1; let user2 = 2; - let token_1 = NativeOrAssetId::Native; - let token_2 = NativeOrAssetId::Asset(2); - let pool_id = (token_1, token_2); + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); + let pool_id = (token_1.clone(), token_2.clone()); let lp_token = AssetConversion::get_next_pool_asset_id(); - create_tokens(user2, vec![token_2]); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user2), token_1, token_2)); + create_tokens(user2, vec![token_2.clone()]); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user2), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); - let ed = get_ed(); + let ed = get_native_ed(); let base1 = 10000; let base2 = 1000; assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, base1 + ed)); assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user2, base1 + ed)); assert_ok!(Assets::mint(RuntimeOrigin::signed(user2), 2, user2, base2)); - let pallet_account = AssetConversion::get_pool_account(&pool_id); - let before1 = - balance(pallet_account, token_1) + balance(user, token_1) + balance(user2, token_1); - let before2 = - balance(pallet_account, token_2) + balance(user, token_2) + balance(user2, token_2); + let pallet_account = ::PoolLocator::address(&pool_id).unwrap(); + let before1 = balance(pallet_account, token_1.clone()) + + balance(user, token_1.clone()) + + balance(user2, token_1.clone()); + let before2 = balance(pallet_account, token_2.clone()) + + balance(user, token_2.clone()) + + balance(user2, token_2.clone()); let liquidity1 = 10000; let liquidity2 = 200; assert_ok!(AssetConversion::add_liquidity( RuntimeOrigin::signed(user2), - token_1, - token_2, + Box::new(token_1.clone()), + Box::new(token_2.clone()), liquidity1, liquidity2, 1, @@ -1245,8 +1414,8 @@ fn can_swap_tokens_for_exact_tokens_when_not_liquidity_provider() { user2, )); - assert_eq!(balance(user, token_1), base1 + ed); - assert_eq!(balance(user, token_2), 0); + assert_eq!(balance(user, token_1.clone()), base1 + ed); + assert_eq!(balance(user, token_2.clone()), 0); let exchange_out = 50; let expect_in = AssetConversion::get_amount_in(&exchange_out, &liquidity1, &liquidity2) @@ -1255,28 +1424,32 @@ fn can_swap_tokens_for_exact_tokens_when_not_liquidity_provider() { assert_ok!(AssetConversion::swap_tokens_for_exact_tokens( RuntimeOrigin::signed(user), - bvec![token_1, token_2], + bvec![token_1.clone(), token_2.clone()], exchange_out, // amount_out 3500, // amount_in_max user, true, )); - assert_eq!(balance(user, token_1), base1 + ed - expect_in); - assert_eq!(balance(pallet_account, token_1), liquidity1 + expect_in); - assert_eq!(balance(user, token_2), exchange_out); - assert_eq!(balance(pallet_account, token_2), liquidity2 - exchange_out); + assert_eq!(balance(user, token_1.clone()), base1 + ed - expect_in); + assert_eq!(balance(pallet_account, token_1.clone()), liquidity1 + expect_in); + assert_eq!(balance(user, token_2.clone()), exchange_out); + assert_eq!(balance(pallet_account, token_2.clone()), liquidity2 - exchange_out); // check invariants: // native and asset totals should be preserved. assert_eq!( before1, - balance(pallet_account, token_1) + balance(user, token_1) + balance(user2, token_1) + balance(pallet_account, token_1.clone()) + + balance(user, token_1.clone()) + + balance(user2, token_1.clone()) ); assert_eq!( before2, - balance(pallet_account, token_2) + balance(user, token_2) + balance(user2, token_2) + balance(pallet_account, token_2.clone()) + + balance(user, token_2.clone()) + + balance(user2, token_2.clone()) ); let lp_token_minted = pool_balance(user2, lp_token); @@ -1284,8 +1457,8 @@ fn can_swap_tokens_for_exact_tokens_when_not_liquidity_provider() { assert_ok!(AssetConversion::remove_liquidity( RuntimeOrigin::signed(user2), - token_1, - token_2, + Box::new(token_1.clone()), + Box::new(token_2.clone()), lp_token_minted, 0, 0, @@ -1299,21 +1472,26 @@ fn swap_when_existential_deposit_would_cause_reaping_but_keep_alive_set() { new_test_ext().execute_with(|| { let user = 1; let user2 = 2; - let token_1 = NativeOrAssetId::Native; - let token_2 = NativeOrAssetId::Asset(2); + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); - create_tokens(user2, vec![token_2]); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user2), token_1, token_2)); + create_tokens(user2, vec![token_2.clone()]); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user2), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); - let ed = get_ed(); + let ed = get_native_ed(); assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, 101)); assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user2, 10000 + ed)); assert_ok!(Assets::mint(RuntimeOrigin::signed(user2), 2, user2, 1000)); + assert_ok!(Assets::mint(RuntimeOrigin::signed(user2), 2, user, 2)); assert_ok!(AssetConversion::add_liquidity( RuntimeOrigin::signed(user2), - token_1, - token_2, + Box::new(token_1.clone()), + Box::new(token_2.clone()), 10000, 200, 1, @@ -1324,7 +1502,7 @@ fn swap_when_existential_deposit_would_cause_reaping_but_keep_alive_set() { assert_noop!( AssetConversion::swap_tokens_for_exact_tokens( RuntimeOrigin::signed(user), - bvec![token_1, token_2], + bvec![token_1.clone(), token_2.clone()], 1, // amount_out 101, // amount_in_max user, @@ -1336,7 +1514,7 @@ fn swap_when_existential_deposit_would_cause_reaping_but_keep_alive_set() { assert_noop!( AssetConversion::swap_exact_tokens_for_tokens( RuntimeOrigin::signed(user), - bvec![token_1, token_2], + bvec![token_1.clone(), token_2.clone()], 51, // amount_in 1, // amount_out_min user, @@ -1344,6 +1522,197 @@ fn swap_when_existential_deposit_would_cause_reaping_but_keep_alive_set() { ), DispatchError::Token(TokenError::NotExpendable) ); + + assert_noop!( + AssetConversion::swap_tokens_for_exact_tokens( + RuntimeOrigin::signed(user), + bvec![token_2.clone(), token_1.clone()], + 51, // amount_out + 2, // amount_in_max + user, + true, + ), + DispatchError::Token(TokenError::NotExpendable) + ); + + assert_noop!( + AssetConversion::swap_exact_tokens_for_tokens( + RuntimeOrigin::signed(user), + bvec![token_2.clone(), token_1.clone()], + 2, // amount_in + 1, // amount_out_min + user, + true, + ), + DispatchError::Token(TokenError::NotExpendable) + ); + }); +} + +#[test] +fn swap_when_existential_deposit_would_cause_reaping_pool_account() { + new_test_ext().execute_with(|| { + let user = 1; + let user2 = 2; + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); + let token_3 = NativeOrWithId::WithId(3); + + let ed_assets = 100; + create_tokens_with_ed(user2, vec![token_2.clone(), token_3.clone()], ed_assets); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user2), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user2), + Box::new(token_1.clone()), + Box::new(token_3.clone()) + )); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user2), + Box::new(token_2.clone()), + Box::new(token_3.clone()) + )); + + let ed = get_native_ed(); + assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, 20000 + ed)); + assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user2, 20000 + ed)); + assert_ok!(Assets::mint(RuntimeOrigin::signed(user2), 2, user2, 400 + ed_assets)); + assert_ok!(Assets::mint(RuntimeOrigin::signed(user2), 3, user2, 20000 + ed_assets)); + + assert_ok!(Assets::mint(RuntimeOrigin::signed(user2), 2, user, 400 + ed_assets)); + assert_ok!(Assets::mint(RuntimeOrigin::signed(user2), 3, user, 20000 + ed_assets)); + + assert_ok!(AssetConversion::add_liquidity( + RuntimeOrigin::signed(user2), + Box::new(token_1.clone()), + Box::new(token_2.clone()), + 10000, + 200, + 1, + 1, + user2, + )); + + assert_ok!(AssetConversion::add_liquidity( + RuntimeOrigin::signed(user2), + Box::new(token_1.clone()), + Box::new(token_3.clone()), + 200, + 10000, + 1, + 1, + user2, + )); + + assert_ok!(AssetConversion::add_liquidity( + RuntimeOrigin::signed(user2), + Box::new(token_2.clone()), + Box::new(token_3.clone()), + 200, + 10000, + 1, + 1, + user2, + )); + + // causes an account removal for asset token 2 + assert_noop!( + AssetConversion::swap_tokens_for_exact_tokens( + RuntimeOrigin::signed(user), + bvec![token_1.clone(), token_2.clone()], + 110, // amount_out + 20000, // amount_in_max + user, + true, + ), + DispatchError::Token(TokenError::NotExpendable) + ); + + // causes an account removal for asset token 2 + assert_noop!( + AssetConversion::swap_exact_tokens_for_tokens( + RuntimeOrigin::signed(user), + bvec![token_1.clone(), token_2.clone()], + 15000, // amount_in + 110, // amount_out_min + user, + true, + ), + DispatchError::Token(TokenError::NotExpendable) + ); + + // causes an account removal for native token 1 + assert_noop!( + AssetConversion::swap_tokens_for_exact_tokens( + RuntimeOrigin::signed(user), + bvec![token_3.clone(), token_1.clone()], + 110, // amount_out + 20000, // amount_in_max + user, + true, + ), + DispatchError::Token(TokenError::NotExpendable) + ); + + // causes an account removal for native token 1 + assert_noop!( + AssetConversion::swap_exact_tokens_for_tokens( + RuntimeOrigin::signed(user), + bvec![token_3.clone(), token_1.clone()], + 15000, // amount_in + 110, // amount_out_min + user, + true, + ), + DispatchError::Token(TokenError::NotExpendable) + ); + + // causes an account removal for native token 1 locate in the middle of a swap path + let amount_in = AssetConversion::balance_path_from_amount_out( + 110, + vec![token_3.clone(), token_1.clone()], + ) + .unwrap() + .first() + .map(|(_, a)| *a) + .unwrap(); + + assert_noop!( + AssetConversion::swap_exact_tokens_for_tokens( + RuntimeOrigin::signed(user), + bvec![token_3.clone(), token_1.clone(), token_2.clone()], + amount_in, // amount_in + 1, // amount_out_min + user, + true, + ), + DispatchError::Token(TokenError::NotExpendable) + ); + + // causes an account removal for asset token 2 locate in the middle of a swap path + let amount_in = AssetConversion::balance_path_from_amount_out( + 110, + vec![token_1.clone(), token_2.clone()], + ) + .unwrap() + .first() + .map(|(_, a)| *a) + .unwrap(); + + assert_noop!( + AssetConversion::swap_exact_tokens_for_tokens( + RuntimeOrigin::signed(user), + bvec![token_1.clone(), token_2.clone(), token_3.clone()], + amount_in, // amount_in + 1, // amount_out_min + user, + true, + ), + DispatchError::Token(TokenError::NotExpendable) + ); }); } @@ -1351,13 +1720,21 @@ fn swap_when_existential_deposit_would_cause_reaping_but_keep_alive_set() { fn swap_tokens_for_exact_tokens_should_not_work_if_too_much_slippage() { new_test_ext().execute_with(|| { let user = 1; - let token_1 = NativeOrAssetId::Native; - let token_2 = NativeOrAssetId::Asset(2); + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); - create_tokens(user, vec![token_2]); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_1, token_2)); + create_tokens(user, vec![token_2.clone()]); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); - assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, 20000 + get_ed())); + assert_ok!(Balances::force_set_balance( + RuntimeOrigin::root(), + user, + 20000 + get_native_ed() + )); assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user, 1000)); let liquidity1 = 10000; @@ -1365,8 +1742,8 @@ fn swap_tokens_for_exact_tokens_should_not_work_if_too_much_slippage() { assert_ok!(AssetConversion::add_liquidity( RuntimeOrigin::signed(user), - token_1, - token_2, + Box::new(token_1.clone()), + Box::new(token_2.clone()), liquidity1, liquidity2, 1, @@ -1379,7 +1756,7 @@ fn swap_tokens_for_exact_tokens_should_not_work_if_too_much_slippage() { assert_noop!( AssetConversion::swap_tokens_for_exact_tokens( RuntimeOrigin::signed(user), - bvec![token_1, token_2], + bvec![token_1.clone(), token_2.clone()], exchange_out, // amount_out 50, // amount_in_max just greater than slippage. user, @@ -1394,15 +1771,23 @@ fn swap_tokens_for_exact_tokens_should_not_work_if_too_much_slippage() { fn swap_exact_tokens_for_tokens_in_multi_hops() { new_test_ext().execute_with(|| { let user = 1; - let token_1 = NativeOrAssetId::Native; - let token_2 = NativeOrAssetId::Asset(2); - let token_3 = NativeOrAssetId::Asset(3); + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); + let token_3 = NativeOrWithId::WithId(3); - create_tokens(user, vec![token_2, token_3]); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_1, token_2)); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_2, token_3)); + create_tokens(user, vec![token_2.clone(), token_3.clone()]); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_2.clone()), + Box::new(token_3.clone()) + )); - let ed = get_ed(); + let ed = get_native_ed(); let base1 = 10000; let base2 = 10000; assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, base1 * 2 + ed)); @@ -1415,8 +1800,8 @@ fn swap_exact_tokens_for_tokens_in_multi_hops() { assert_ok!(AssetConversion::add_liquidity( RuntimeOrigin::signed(user), - token_1, - token_2, + Box::new(token_1.clone()), + Box::new(token_2.clone()), liquidity1, liquidity2, 1, @@ -1425,8 +1810,8 @@ fn swap_exact_tokens_for_tokens_in_multi_hops() { )); assert_ok!(AssetConversion::add_liquidity( RuntimeOrigin::signed(user), - token_2, - token_3, + Box::new(token_2.clone()), + Box::new(token_3.clone()), liquidity2, liquidity3, 1, @@ -1445,7 +1830,7 @@ fn swap_exact_tokens_for_tokens_in_multi_hops() { assert_noop!( AssetConversion::swap_exact_tokens_for_tokens( RuntimeOrigin::signed(user), - bvec![token_1], + bvec![token_1.clone()], input_amount, 80, user, @@ -1457,7 +1842,7 @@ fn swap_exact_tokens_for_tokens_in_multi_hops() { assert_noop!( AssetConversion::swap_exact_tokens_for_tokens( RuntimeOrigin::signed(user), - bvec![token_1, token_2, token_3, token_2], + bvec![token_1.clone(), token_2.clone(), token_3.clone(), token_2.clone()], input_amount, 80, user, @@ -1468,24 +1853,24 @@ fn swap_exact_tokens_for_tokens_in_multi_hops() { assert_ok!(AssetConversion::swap_exact_tokens_for_tokens( RuntimeOrigin::signed(user), - bvec![token_1, token_2, token_3], + bvec![token_1.clone(), token_2.clone(), token_3.clone()], input_amount, // amount_in 80, // amount_out_min user, true, )); - let pool_id1 = (token_1, token_2); - let pool_id2 = (token_2, token_3); - let pallet_account1 = AssetConversion::get_pool_account(&pool_id1); - let pallet_account2 = AssetConversion::get_pool_account(&pool_id2); - - assert_eq!(balance(user, token_1), base1 + ed - input_amount); - assert_eq!(balance(pallet_account1, token_1), liquidity1 + input_amount); - assert_eq!(balance(pallet_account1, token_2), liquidity2 - expect_out2); - assert_eq!(balance(pallet_account2, token_2), liquidity2 + expect_out2); - assert_eq!(balance(pallet_account2, token_3), liquidity3 - expect_out3); - assert_eq!(balance(user, token_3), 10000 - liquidity3 + expect_out3); + let pool_id1 = (token_1.clone(), token_2.clone()); + let pool_id2 = (token_2.clone(), token_3.clone()); + let pallet_account1 = ::PoolLocator::address(&pool_id1).unwrap(); + let pallet_account2 = ::PoolLocator::address(&pool_id2).unwrap(); + + assert_eq!(balance(user, token_1.clone()), base1 + ed - input_amount); + assert_eq!(balance(pallet_account1, token_1.clone()), liquidity1 + input_amount); + assert_eq!(balance(pallet_account1, token_2.clone()), liquidity2 - expect_out2); + assert_eq!(balance(pallet_account2, token_2.clone()), liquidity2 + expect_out2); + assert_eq!(balance(pallet_account2, token_3.clone()), liquidity3 - expect_out3); + assert_eq!(balance(user, token_3.clone()), 10000 - liquidity3 + expect_out3); }); } @@ -1493,15 +1878,23 @@ fn swap_exact_tokens_for_tokens_in_multi_hops() { fn swap_tokens_for_exact_tokens_in_multi_hops() { new_test_ext().execute_with(|| { let user = 1; - let token_1 = NativeOrAssetId::Native; - let token_2 = NativeOrAssetId::Asset(2); - let token_3 = NativeOrAssetId::Asset(3); + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); + let token_3 = NativeOrWithId::WithId(3); - create_tokens(user, vec![token_2, token_3]); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_1, token_2)); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_2, token_3)); + create_tokens(user, vec![token_2.clone(), token_3.clone()]); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_2.clone()), + Box::new(token_3.clone()) + )); - let ed = get_ed(); + let ed = get_native_ed(); let base1 = 10000; let base2 = 10000; assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, base1 * 2 + ed)); @@ -1514,8 +1907,8 @@ fn swap_tokens_for_exact_tokens_in_multi_hops() { assert_ok!(AssetConversion::add_liquidity( RuntimeOrigin::signed(user), - token_1, - token_2, + Box::new(token_1.clone()), + Box::new(token_2.clone()), liquidity1, liquidity2, 1, @@ -1524,8 +1917,8 @@ fn swap_tokens_for_exact_tokens_in_multi_hops() { )); assert_ok!(AssetConversion::add_liquidity( RuntimeOrigin::signed(user), - token_2, - token_3, + Box::new(token_2.clone()), + Box::new(token_3.clone()), liquidity2, liquidity3, 1, @@ -1543,24 +1936,24 @@ fn swap_tokens_for_exact_tokens_in_multi_hops() { assert_ok!(AssetConversion::swap_tokens_for_exact_tokens( RuntimeOrigin::signed(user), - bvec![token_1, token_2, token_3], + bvec![token_1.clone(), token_2.clone(), token_3.clone()], exchange_out3, // amount_out 1000, // amount_in_max user, true, )); - let pool_id1 = (token_1, token_2); - let pool_id2 = (token_2, token_3); - let pallet_account1 = AssetConversion::get_pool_account(&pool_id1); - let pallet_account2 = AssetConversion::get_pool_account(&pool_id2); - - assert_eq!(balance(user, token_1), base1 + ed - expect_in1); - assert_eq!(balance(pallet_account1, token_1), liquidity1 + expect_in1); - assert_eq!(balance(pallet_account1, token_2), liquidity2 - expect_in2); - assert_eq!(balance(pallet_account2, token_2), liquidity2 + expect_in2); - assert_eq!(balance(pallet_account2, token_3), liquidity3 - exchange_out3); - assert_eq!(balance(user, token_3), 10000 - liquidity3 + exchange_out3); + let pool_id1 = (token_1.clone(), token_2.clone()); + let pool_id2 = (token_2.clone(), token_3.clone()); + let pallet_account1 = ::PoolLocator::address(&pool_id1).unwrap(); + let pallet_account2 = ::PoolLocator::address(&pool_id2).unwrap(); + + assert_eq!(balance(user, token_1.clone()), base1 + ed - expect_in1); + assert_eq!(balance(pallet_account1, token_1.clone()), liquidity1 + expect_in1); + assert_eq!(balance(pallet_account1, token_2.clone()), liquidity2 - expect_in2); + assert_eq!(balance(pallet_account2, token_2.clone()), liquidity2 + expect_in2); + assert_eq!(balance(pallet_account2, token_3.clone()), liquidity3 - exchange_out3); + assert_eq!(balance(user, token_3.clone()), 10000 - liquidity3 + exchange_out3); }); } @@ -1568,9 +1961,10 @@ fn swap_tokens_for_exact_tokens_in_multi_hops() { fn can_not_swap_same_asset() { new_test_ext().execute_with(|| { let user = 1; - let token_1 = NativeOrAssetId::Asset(1); + let token_1 = NativeOrWithId::WithId(1); + let token_2 = NativeOrWithId::Native; - create_tokens(user, vec![token_1]); + create_tokens(user, vec![token_1.clone()]); assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 1, user, 1000)); let liquidity1 = 1000; @@ -1578,60 +1972,44 @@ fn can_not_swap_same_asset() { assert_noop!( AssetConversion::add_liquidity( RuntimeOrigin::signed(user), - token_1, - token_1, + Box::new(token_1.clone()), + Box::new(token_1.clone()), liquidity1, liquidity2, 1, 1, user, ), - Error::::PoolNotFound + Error::::InvalidAssetPair ); let exchange_amount = 10; assert_noop!( AssetConversion::swap_exact_tokens_for_tokens( RuntimeOrigin::signed(user), - bvec![token_1, token_1], + bvec![token_1.clone(), token_1.clone()], exchange_amount, 1, user, true, ), - Error::::PoolNotFound + Error::::InvalidAssetPair ); assert_noop!( AssetConversion::swap_exact_tokens_for_tokens( RuntimeOrigin::signed(user), - bvec![NativeOrAssetId::Native, NativeOrAssetId::Native], + bvec![token_2.clone(), token_2.clone()], exchange_amount, 1, user, true, ), - Error::::PoolNotFound + Error::::InvalidAssetPair ); }); } -#[test] -fn validate_pool_id_sorting() { - new_test_ext().execute_with(|| { - use crate::NativeOrAssetId::{Asset, Native}; - assert_eq!(AssetConversion::get_pool_id(Native, Asset(2)), (Native, Asset(2))); - assert_eq!(AssetConversion::get_pool_id(Asset(2), Native), (Native, Asset(2))); - assert_eq!(AssetConversion::get_pool_id(Native, Native), (Native, Native)); - assert_eq!(AssetConversion::get_pool_id(Asset(2), Asset(1)), (Asset(1), Asset(2))); - assert!(Asset(2) > Asset(1)); - assert!(Asset(1) <= Asset(1)); - assert_eq!(Asset(1), Asset(1)); - assert_eq!(Native::, Native::); - assert!(Native < Asset(1)); - }); -} - #[test] fn cannot_block_pool_creation() { new_test_ext().execute_with(|| { @@ -1640,16 +2018,16 @@ fn cannot_block_pool_creation() { // User 2 is the attacker let attacker = 2; - let ed = get_ed(); + let ed = get_native_ed(); assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), attacker, 10000 + ed)); - // The target pool the user wants to create is Native <=> Asset(2) - let token_1 = NativeOrAssetId::Native; - let token_2 = NativeOrAssetId::Asset(2); + // The target pool the user wants to create is Native <=> WithId(2) + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); // Attacker computes the still non-existing pool account for the target pair let pool_account = - AssetConversion::get_pool_account(&AssetConversion::get_pool_id(token_2, token_1)); + ::PoolLocator::address(&(token_1.clone(), token_2.clone())).unwrap(); // And transfers the ED to that pool account assert_ok!(Balances::transfer_allow_death( RuntimeOrigin::signed(attacker), @@ -1658,17 +2036,21 @@ fn cannot_block_pool_creation() { )); // Then, the attacker creates 14 tokens and sends one of each to the pool account for i in 10..25 { - create_tokens(attacker, vec![NativeOrAssetId::Asset(i)]); + create_tokens(attacker, vec![NativeOrWithId::WithId(i)]); assert_ok!(Assets::mint(RuntimeOrigin::signed(attacker), i, attacker, 1000)); assert_ok!(Assets::transfer(RuntimeOrigin::signed(attacker), i, pool_account, 1)); } // User can still create the pool - create_tokens(user, vec![token_2]); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(user), token_1, token_2)); + create_tokens(user, vec![token_2.clone()]); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); - // User has to transfer one Asset(2) token to the pool account (otherwise add_liquidity will - // fail with `AssetTwoDepositDidNotMeetMinimum`) + // User has to transfer one WithId(2) token to the pool account (otherwise add_liquidity + // will fail with `AssetTwoDepositDidNotMeetMinimum`) assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, 10000 + ed)); assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user, 10000)); assert_ok!(Assets::transfer(RuntimeOrigin::signed(user), 2, pool_account, 1)); @@ -1676,8 +2058,8 @@ fn cannot_block_pool_creation() { // add_liquidity shouldn't fail because of the number of consumers assert_ok!(AssetConversion::add_liquidity( RuntimeOrigin::signed(user), - token_1, - token_2, + Box::new(token_1.clone()), + Box::new(token_2.clone()), 10000, 100, 10000, @@ -1686,3 +2068,429 @@ fn cannot_block_pool_creation() { )); }); } + +#[test] +fn swap_transactional() { + new_test_ext().execute_with(|| { + let user = 1; + let user2 = 2; + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); + let token_3 = NativeOrWithId::WithId(3); + + let asset_ed = 150; + create_tokens_with_ed(user, vec![token_2.clone(), token_3.clone()], asset_ed); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_3.clone()) + )); + + let ed = get_native_ed(); + assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, 20000 + ed)); + assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user, 1000)); + assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 3, user, 1000)); + + assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user2, 20000 + ed)); + assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user2, 1000)); + assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 3, user2, 1000)); + + let liquidity1 = 10000; + let liquidity2 = 200; + + assert_ok!(AssetConversion::add_liquidity( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()), + liquidity1, + liquidity2, + 1, + 1, + user, + )); + + assert_ok!(AssetConversion::add_liquidity( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_3.clone()), + liquidity1, + liquidity2, + 1, + 1, + user, + )); + + let pool_1 = + ::PoolLocator::address(&(token_1.clone(), token_2.clone())).unwrap(); + let pool_2 = + ::PoolLocator::address(&(token_1.clone(), token_3.clone())).unwrap(); + + assert_eq!(Balances::balance(&pool_1), liquidity1); + assert_eq!(Assets::balance(2, pool_1), liquidity2); + assert_eq!(Balances::balance(&pool_2), liquidity1); + assert_eq!(Assets::balance(3, pool_2), liquidity2); + + // the amount that would cause a transfer from the last pool in the path to fail + let expected_out = liquidity2 - asset_ed + 1; + let amount_in = AssetConversion::balance_path_from_amount_out( + expected_out, + vec![token_2.clone(), token_1.clone(), token_3.clone()], + ) + .unwrap() + .first() + .map(|(_, a)| *a) + .unwrap(); + + // swap credit with `swap_tokens_for_exact_tokens` transactional + let credit_in = NativeAndAssets::issue(token_2.clone(), amount_in); + let credit_in_err_expected = NativeAndAssets::issue(token_2.clone(), amount_in); + // avoiding drop of any credit, to assert any storage mutation from an actual call. + let error; + assert_storage_noop!( + error = >::swap_tokens_for_exact_tokens( + vec![token_2.clone(), token_1.clone(), token_3.clone()], + credit_in, + expected_out, + ) + .unwrap_err() + ); + assert_eq!(error, (credit_in_err_expected, TokenError::NotExpendable.into())); + + // swap credit with `swap_exact_tokens_for_tokens` transactional + let credit_in = NativeAndAssets::issue(token_2.clone(), amount_in); + let credit_in_err_expected = NativeAndAssets::issue(token_2.clone(), amount_in); + // avoiding drop of any credit, to assert any storage mutation from an actual call. + let error; + assert_storage_noop!( + error = >::swap_exact_tokens_for_tokens( + vec![token_2.clone(), token_1.clone(), token_3.clone()], + credit_in, + Some(expected_out), + ) + .unwrap_err() + ); + assert_eq!(error, (credit_in_err_expected, TokenError::NotExpendable.into())); + + // swap with `swap_exact_tokens_for_tokens` transactional + assert_noop!( + >::swap_exact_tokens_for_tokens( + user2, + vec![token_2.clone(), token_1.clone(), token_3.clone()], + amount_in, + Some(expected_out), + user2, + true, + ), + TokenError::NotExpendable + ); + + // swap with `swap_exact_tokens_for_tokens` transactional + assert_noop!( + >::swap_tokens_for_exact_tokens( + user2, + vec![token_2.clone(), token_1.clone(), token_3.clone()], + expected_out, + Some(amount_in), + user2, + true, + ), + TokenError::NotExpendable + ); + + assert_eq!(Balances::balance(&pool_1), liquidity1); + assert_eq!(Assets::balance(2, pool_1), liquidity2); + assert_eq!(Balances::balance(&pool_2), liquidity1); + assert_eq!(Assets::balance(3, pool_2), liquidity2); + }) +} + +#[test] +fn swap_credit_returns_change() { + new_test_ext().execute_with(|| { + let user = 1; + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); + + create_tokens(user, vec![token_2.clone()]); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); + + let ed = get_native_ed(); + assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, 20000 + ed)); + assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user, 1000)); + + let liquidity1 = 10000; + let liquidity2 = 200; + + assert_ok!(AssetConversion::add_liquidity( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()), + liquidity1, + liquidity2, + 1, + 1, + user, + )); + + let expected_change = NativeAndAssets::issue(token_1.clone(), 100); + let expected_credit_out = NativeAndAssets::issue(token_2.clone(), 20); + + let amount_in_max = + AssetConversion::get_amount_in(&expected_credit_out.peek(), &liquidity1, &liquidity2) + .unwrap(); + + let credit_in = + NativeAndAssets::issue(token_1.clone(), amount_in_max + expected_change.peek()); + assert_ok!( + >::swap_tokens_for_exact_tokens( + vec![token_1.clone(), token_2.clone()], + credit_in, + expected_credit_out.peek(), + ), + (expected_credit_out, expected_change) + ); + }) +} + +#[test] +fn swap_credit_insufficient_amount_bounds() { + new_test_ext().execute_with(|| { + let user = 1; + let user2 = 2; + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); + + create_tokens(user, vec![token_2.clone()]); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); + + let ed = get_native_ed(); + assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, 20000 + ed)); + assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user, 1000)); + + assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user2, 20000 + ed)); + assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user2, 1000)); + + let liquidity1 = 10000; + let liquidity2 = 200; + + assert_ok!(AssetConversion::add_liquidity( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()), + liquidity1, + liquidity2, + 1, + 1, + user, + )); + + // provided `credit_in` is not sufficient to swap for desired `amount_out_min` + let amount_out_min = 20; + let amount_in = + AssetConversion::get_amount_in(&(amount_out_min - 1), &liquidity2, &liquidity1) + .unwrap(); + let credit_in = NativeAndAssets::issue(token_1.clone(), amount_in); + let expected_credit_in = NativeAndAssets::issue(token_1.clone(), amount_in); + let error = >::swap_exact_tokens_for_tokens( + vec![token_1.clone(), token_2.clone()], + credit_in, + Some(amount_out_min), + ) + .unwrap_err(); + assert_eq!( + error, + (expected_credit_in, Error::::ProvidedMinimumNotSufficientForSwap.into()) + ); + + // provided `credit_in` is not sufficient to swap for desired `amount_out` + let amount_out = 20; + let amount_in_max = + AssetConversion::get_amount_in(&(amount_out - 1), &liquidity2, &liquidity1).unwrap(); + let credit_in = NativeAndAssets::issue(token_1.clone(), amount_in_max); + let expected_credit_in = NativeAndAssets::issue(token_1.clone(), amount_in_max); + let error = >::swap_tokens_for_exact_tokens( + vec![token_1.clone(), token_2.clone()], + credit_in, + amount_out, + ) + .unwrap_err(); + assert_eq!( + error, + (expected_credit_in, Error::::ProvidedMaximumNotSufficientForSwap.into()) + ); + }) +} + +#[test] +fn swap_credit_zero_amount() { + new_test_ext().execute_with(|| { + let user = 1; + let user2 = 2; + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); + + create_tokens(user, vec![token_2.clone()]); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); + + let ed = get_native_ed(); + assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, 20000 + ed)); + assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user, 1000)); + + assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user2, 20000 + ed)); + assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user2, 1000)); + + let liquidity1 = 10000; + let liquidity2 = 200; + + assert_ok!(AssetConversion::add_liquidity( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()), + liquidity1, + liquidity2, + 1, + 1, + user, + )); + + // swap with zero credit fails for `swap_exact_tokens_for_tokens` + let credit_in = CreditOf::::zero(token_1.clone()); + let expected_credit_in = CreditOf::::zero(token_1.clone()); + let error = >::swap_exact_tokens_for_tokens( + vec![token_1.clone(), token_2.clone()], + credit_in, + None, + ) + .unwrap_err(); + assert_eq!(error, (expected_credit_in, Error::::ZeroAmount.into())); + + // swap with zero credit fails for `swap_tokens_for_exact_tokens` + let credit_in = CreditOf::::zero(token_1.clone()); + let expected_credit_in = CreditOf::::zero(token_1.clone()); + let error = >::swap_tokens_for_exact_tokens( + vec![token_1.clone(), token_2.clone()], + credit_in, + 10, + ) + .unwrap_err(); + assert_eq!(error, (expected_credit_in, Error::::ZeroAmount.into())); + + // swap with zero amount_out_min fails for `swap_exact_tokens_for_tokens` + let credit_in = NativeAndAssets::issue(token_1.clone(), 10); + let expected_credit_in = NativeAndAssets::issue(token_1.clone(), 10); + let error = >::swap_exact_tokens_for_tokens( + vec![token_1.clone(), token_2.clone()], + credit_in, + Some(0), + ) + .unwrap_err(); + assert_eq!(error, (expected_credit_in, Error::::ZeroAmount.into())); + + // swap with zero amount_out fails with `swap_tokens_for_exact_tokens` fails + let credit_in = NativeAndAssets::issue(token_1.clone(), 10); + let expected_credit_in = NativeAndAssets::issue(token_1.clone(), 10); + let error = >::swap_tokens_for_exact_tokens( + vec![token_1.clone(), token_2.clone()], + credit_in, + 0, + ) + .unwrap_err(); + assert_eq!(error, (expected_credit_in, Error::::ZeroAmount.into())); + }); +} + +#[test] +fn swap_credit_invalid_path() { + new_test_ext().execute_with(|| { + let user = 1; + let user2 = 2; + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(2); + + create_tokens(user, vec![token_2.clone()]); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); + + let ed = get_native_ed(); + assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user, 20000 + ed)); + assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user, 1000)); + + assert_ok!(Balances::force_set_balance(RuntimeOrigin::root(), user2, 20000 + ed)); + assert_ok!(Assets::mint(RuntimeOrigin::signed(user), 2, user2, 1000)); + + let liquidity1 = 10000; + let liquidity2 = 200; + + assert_ok!(AssetConversion::add_liquidity( + RuntimeOrigin::signed(user), + Box::new(token_1.clone()), + Box::new(token_2.clone()), + liquidity1, + liquidity2, + 1, + 1, + user, + )); + + // swap with credit_in.asset different from path[0] asset fails + let credit_in = NativeAndAssets::issue(token_1.clone(), 10); + let expected_credit_in = NativeAndAssets::issue(token_1.clone(), 10); + let error = >::swap_exact_tokens_for_tokens( + vec![token_2.clone(), token_1.clone()], + credit_in, + None, + ) + .unwrap_err(); + assert_eq!(error, (expected_credit_in, Error::::InvalidPath.into())); + + // swap with credit_in.asset different from path[0] asset fails + let credit_in = NativeAndAssets::issue(token_2.clone(), 10); + let expected_credit_in = NativeAndAssets::issue(token_2.clone(), 10); + let error = >::swap_tokens_for_exact_tokens( + vec![token_1.clone(), token_2.clone()], + credit_in, + 10, + ) + .unwrap_err(); + assert_eq!(error, (expected_credit_in, Error::::InvalidPath.into())); + + // swap with path.len < 2 fails + let credit_in = NativeAndAssets::issue(token_1.clone(), 10); + let expected_credit_in = NativeAndAssets::issue(token_1.clone(), 10); + let error = >::swap_exact_tokens_for_tokens( + vec![token_2.clone()], + credit_in, + None, + ) + .unwrap_err(); + assert_eq!(error, (expected_credit_in, Error::::InvalidPath.into())); + + // swap with path.len < 2 fails + let credit_in = NativeAndAssets::issue(token_2.clone(), 10); + let expected_credit_in = NativeAndAssets::issue(token_2.clone(), 10); + let error = + >::swap_tokens_for_exact_tokens(vec![], credit_in, 10) + .unwrap_err(); + assert_eq!(error, (expected_credit_in, Error::::InvalidPath.into())); + }); +} diff --git a/substrate/frame/asset-conversion/src/types.rs b/substrate/frame/asset-conversion/src/types.rs index 9861ad6da08a..6be14970c14d 100644 --- a/substrate/frame/asset-conversion/src/types.rs +++ b/substrate/frame/asset-conversion/src/types.rs @@ -17,16 +17,22 @@ // limitations under the License. use super::*; - use codec::{Decode, Encode, MaxEncodedLen}; use scale_info::TypeInfo; -use sp_std::{cmp::Ordering, marker::PhantomData}; +use sp_std::marker::PhantomData; -/// Pool ID. +/// Represents a swap path with associated asset amounts indicating how much of the asset needs to +/// be deposited to get the following asset's amount withdrawn (this is inclusive of fees). /// -/// The pool's `AccountId` is derived from this type. Any changes to the type may necessitate a -/// migration. -pub(super) type PoolIdOf = (::MultiAssetId, ::MultiAssetId); +/// Example: +/// Given path [(asset1, amount_in), (asset2, amount_out2), (asset3, amount_out3)], can be resolved: +/// 1. `asset(asset1, amount_in)` take from `user` and move to the pool(asset1, asset2); +/// 2. `asset(asset2, amount_out2)` transfer from pool(asset1, asset2) to pool(asset2, asset3); +/// 3. `asset(asset3, amount_out3)` move from pool(asset2, asset3) to `user`. +pub(super) type BalancePath = Vec<(::AssetKind, ::Balance)>; + +/// Credit of [Config::Assets]. +pub type CreditOf = Credit<::AccountId, ::Assets>; /// Stores the lp_token asset id a particular pool has been assigned. #[derive(Decode, Encode, Default, PartialEq, Eq, MaxEncodedLen, TypeInfo)] @@ -35,155 +41,94 @@ pub struct PoolInfo { pub lp_token: PoolAssetId, } -/// A trait that converts between a MultiAssetId and either the native currency or an AssetId. -pub trait MultiAssetIdConverter { - /// Returns the MultiAssetId representing the native currency of the chain. - fn get_native() -> MultiAssetId; - - /// Returns true if the given MultiAssetId is the native currency. - fn is_native(asset: &MultiAssetId) -> bool; - - /// If it's not native, returns the AssetId for the given MultiAssetId. - fn try_convert(asset: &MultiAssetId) -> MultiAssetIdConversionResult; -} - -/// Result of `MultiAssetIdConverter::try_convert`. -#[cfg_attr(feature = "std", derive(PartialEq, Debug))] -pub enum MultiAssetIdConversionResult { - /// Input asset is successfully converted. Means that converted asset is supported. - Converted(AssetId), - /// Means that input asset is the chain's native asset, if it has one, so no conversion (see - /// `MultiAssetIdConverter::get_native`). - Native, - /// Means input asset is not supported for pool. - Unsupported(MultiAssetId), -} - -/// Benchmark Helper -#[cfg(feature = "runtime-benchmarks")] -pub trait BenchmarkHelper { - /// Returns an `AssetId` from a given integer. - fn asset_id(asset_id: u32) -> AssetId; - - /// Returns a `MultiAssetId` from a given integer. - fn multiasset_id(asset_id: u32) -> MultiAssetId; +/// Provides means to resolve the `PoolId` and `AccountId` from a pair of assets. +/// +/// Resulting `PoolId` remains consistent whether the asset pair is presented as (asset1, asset2) +/// or (asset2, asset1). The derived `AccountId` may serve as an address for liquidity provider +/// tokens. +pub trait PoolLocator { + /// Retrieves the account address associated with a valid `PoolId`. + fn address(id: &PoolId) -> Result; + /// Identifies the `PoolId` for a given pair of assets. + /// + /// Returns an error if the asset pair isn't supported. + fn pool_id(asset1: &AssetKind, asset2: &AssetKind) -> Result; + /// Retrieves the account address associated with a given asset pair. + /// + /// Returns an error if the asset pair isn't supported. + fn pool_address(asset1: &AssetKind, asset2: &AssetKind) -> Result { + if let Ok(id) = Self::pool_id(asset1, asset2) { + Self::address(&id) + } else { + Err(()) + } + } } -#[cfg(feature = "runtime-benchmarks")] -impl BenchmarkHelper for () +/// Pool locator that mandates the inclusion of the specified `FirstAsset` in every asset pair. +/// +/// The `PoolId` is represented as a tuple of `AssetKind`s with `FirstAsset` always positioned as +/// the first element. +pub struct WithFirstAsset( + PhantomData<(FirstAsset, AccountId, AssetKind)>, +); +impl PoolLocator + for WithFirstAsset where - AssetId: From, - MultiAssetId: From, + AssetKind: Eq + Clone + Encode, + AccountId: Decode, + FirstAsset: Get, { - fn asset_id(asset_id: u32) -> AssetId { - asset_id.into() + fn pool_id(asset1: &AssetKind, asset2: &AssetKind) -> Result<(AssetKind, AssetKind), ()> { + let first = FirstAsset::get(); + match true { + _ if asset1 == asset2 => Err(()), + _ if first == *asset1 => Ok((first, asset2.clone())), + _ if first == *asset2 => Ok((first, asset1.clone())), + _ => Err(()), + } } - - fn multiasset_id(asset_id: u32) -> MultiAssetId { - asset_id.into() + fn address(id: &(AssetKind, AssetKind)) -> Result { + let encoded = sp_io::hashing::blake2_256(&Encode::encode(id)[..]); + Decode::decode(&mut TrailingZeroInput::new(encoded.as_ref())).map_err(|_| ()) } } -/// Trait for providing methods to swap between the various asset classes. -pub trait Swap { - /// Swap exactly `amount_in` of asset `path[0]` for asset `path[1]`. - /// If an `amount_out_min` is specified, it will return an error if it is unable to acquire - /// the amount desired. - /// - /// Withdraws the `path[0]` asset from `sender`, deposits the `path[1]` asset to `send_to`, - /// respecting `keep_alive`. - /// - /// If successful, returns the amount of `path[1]` acquired for the `amount_in`. - fn swap_exact_tokens_for_tokens( - sender: AccountId, - path: Vec, - amount_in: Balance, - amount_out_min: Option, - send_to: AccountId, - keep_alive: bool, - ) -> Result; - - /// Take the `path[0]` asset and swap some amount for `amount_out` of the `path[1]`. If an - /// `amount_in_max` is specified, it will return an error if acquiring `amount_out` would be - /// too costly. - /// - /// Withdraws `path[0]` asset from `sender`, deposits `path[1]` asset to `send_to`, - /// respecting `keep_alive`. - /// - /// If successful returns the amount of the `path[0]` taken to provide `path[1]`. - fn swap_tokens_for_exact_tokens( - sender: AccountId, - path: Vec, - amount_out: Balance, - amount_in_max: Option, - send_to: AccountId, - keep_alive: bool, - ) -> Result; -} - -/// An implementation of MultiAssetId that can be either Native or an asset. -#[derive(Decode, Encode, Default, MaxEncodedLen, TypeInfo, Clone, Copy, Debug)] -pub enum NativeOrAssetId +/// Pool locator where the `PoolId` is a tuple of `AssetKind`s arranged in ascending order. +pub struct Ascending(PhantomData<(AccountId, AssetKind)>); +impl PoolLocator + for Ascending where - AssetId: Ord, + AssetKind: Ord + Clone + Encode, + AccountId: Decode, { - /// Native asset. For example, on the Polkadot Asset Hub this would be DOT. - #[default] - Native, - /// A non-native asset id. - Asset(AssetId), -} - -impl From for NativeOrAssetId { - fn from(asset: AssetId) -> Self { - Self::Asset(asset) - } -} - -impl Ord for NativeOrAssetId { - fn cmp(&self, other: &Self) -> Ordering { - match (self, other) { - (Self::Native, Self::Native) => Ordering::Equal, - (Self::Native, Self::Asset(_)) => Ordering::Less, - (Self::Asset(_), Self::Native) => Ordering::Greater, - (Self::Asset(id1), Self::Asset(id2)) => ::cmp(id1, id2), + fn pool_id(asset1: &AssetKind, asset2: &AssetKind) -> Result<(AssetKind, AssetKind), ()> { + match true { + _ if asset1 > asset2 => Ok((asset2.clone(), asset1.clone())), + _ if asset1 < asset2 => Ok((asset1.clone(), asset2.clone())), + _ => Err(()), } } -} -impl PartialOrd for NativeOrAssetId { - fn partial_cmp(&self, other: &Self) -> Option { - Some(::cmp(self, other)) - } -} -impl PartialEq for NativeOrAssetId { - fn eq(&self, other: &Self) -> bool { - self.cmp(other) == Ordering::Equal + fn address(id: &(AssetKind, AssetKind)) -> Result { + let encoded = sp_io::hashing::blake2_256(&Encode::encode(id)[..]); + Decode::decode(&mut TrailingZeroInput::new(encoded.as_ref())).map_err(|_| ()) } } -impl Eq for NativeOrAssetId {} -/// Converts between a MultiAssetId and an AssetId (or the native currency). -pub struct NativeOrAssetIdConverter { - _phantom: PhantomData, -} - -impl MultiAssetIdConverter, AssetId> - for NativeOrAssetIdConverter +/// Pool locator that chains the `First` and `Second` implementations of [`PoolLocator`]. +/// +/// If the `First` implementation fails, it falls back to the `Second`. +pub struct Chain(PhantomData<(First, Second)>); +impl PoolLocator + for Chain +where + First: PoolLocator, + Second: PoolLocator, { - fn get_native() -> NativeOrAssetId { - NativeOrAssetId::Native - } - - fn is_native(asset: &NativeOrAssetId) -> bool { - *asset == Self::get_native() + fn pool_id(asset1: &AssetKind, asset2: &AssetKind) -> Result<(AssetKind, AssetKind), ()> { + First::pool_id(asset1, asset2).or(Second::pool_id(asset1, asset2)) } - - fn try_convert( - asset: &NativeOrAssetId, - ) -> MultiAssetIdConversionResult, AssetId> { - match asset { - NativeOrAssetId::Asset(asset) => MultiAssetIdConversionResult::Converted(asset.clone()), - NativeOrAssetId::Native => MultiAssetIdConversionResult::Native, - } + fn address(id: &(AssetKind, AssetKind)) -> Result { + First::address(id).or(Second::address(id)) } } diff --git a/substrate/frame/asset-conversion/src/weights.rs b/substrate/frame/asset-conversion/src/weights.rs index 4341c9c2f2c6..40d03c7b6fe2 100644 --- a/substrate/frame/asset-conversion/src/weights.rs +++ b/substrate/frame/asset-conversion/src/weights.rs @@ -16,29 +16,27 @@ // See the License for the specific language governing permissions and // limitations under the License. -//! Autogenerated weights for pallet_asset_conversion +//! Autogenerated weights for `pallet_asset_conversion` //! //! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev -//! DATE: 2023-07-18, STEPS: `50`, REPEAT: `20`, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! DATE: 2023-10-30, STEPS: `5`, REPEAT: `2`, LOW RANGE: `[]`, HIGH RANGE: `[]` //! WORST CASE MAP SIZE: `1000000` -//! HOSTNAME: `runner-gghbxkbs-project-145-concurrent-0`, CPU: `Intel(R) Xeon(R) CPU @ 2.60GHz` -//! EXECUTION: ``, WASM-EXECUTION: `Compiled`, CHAIN: `Some("dev")`, DB CACHE: `1024` +//! HOSTNAME: `cob`, CPU: `` +//! WASM-EXECUTION: `Compiled`, CHAIN: `Some("dev")`, DB CACHE: `1024` // Executed Command: -// target/production/substrate +// ./target/debug/substrate-node // benchmark // pallet -// --steps=50 -// --repeat=20 +// --chain=dev +// --steps=5 +// --repeat=2 +// --pallet=pallet-asset-conversion // --extrinsic=* // --wasm-execution=compiled // --heap-pages=4096 -// --json-file=/builds/parity/mirrors/substrate/.git/.artifacts/bench.json -// --pallet=pallet_asset_conversion -// --chain=dev -// --header=./HEADER-APACHE2 -// --output=./frame/asset-conversion/src/weights.rs -// --template=./.maintain/frame-weight-template.hbs +// --output=./substrate/frame/asset-conversion/src/weights.rs +// --template=./substrate/.maintain/frame-weight-template.hbs #![cfg_attr(rustfmt, rustfmt_skip)] #![allow(unused_parens)] @@ -48,25 +46,25 @@ use frame_support::{traits::Get, weights::{Weight, constants::RocksDbWeight}}; use core::marker::PhantomData; -/// Weight functions needed for pallet_asset_conversion. +/// Weight functions needed for `pallet_asset_conversion`. pub trait WeightInfo { fn create_pool() -> Weight; fn add_liquidity() -> Weight; fn remove_liquidity() -> Weight; - fn swap_exact_tokens_for_tokens() -> Weight; - fn swap_tokens_for_exact_tokens() -> Weight; + fn swap_exact_tokens_for_tokens(n: u32, ) -> Weight; + fn swap_tokens_for_exact_tokens(n: u32, ) -> Weight; } -/// Weights for pallet_asset_conversion using the Substrate node and recommended hardware. +/// Weights for `pallet_asset_conversion` using the Substrate node and recommended hardware. pub struct SubstrateWeight(PhantomData); impl WeightInfo for SubstrateWeight { /// Storage: `AssetConversion::Pools` (r:1 w:1) /// Proof: `AssetConversion::Pools` (`max_values`: None, `max_size`: Some(30), added: 2505, mode: `MaxEncodedLen`) /// Storage: `System::Account` (r:2 w:2) /// Proof: `System::Account` (`max_values`: None, `max_size`: Some(128), added: 2603, mode: `MaxEncodedLen`) - /// Storage: `Assets::Account` (r:1 w:1) + /// Storage: `Assets::Account` (r:2 w:2) /// Proof: `Assets::Account` (`max_values`: None, `max_size`: Some(134), added: 2609, mode: `MaxEncodedLen`) - /// Storage: `Assets::Asset` (r:1 w:1) + /// Storage: `Assets::Asset` (r:2 w:2) /// Proof: `Assets::Asset` (`max_values`: None, `max_size`: Some(210), added: 2685, mode: `MaxEncodedLen`) /// Storage: `AssetConversion::NextPoolAssetId` (r:1 w:1) /// Proof: `AssetConversion::NextPoolAssetId` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) @@ -76,20 +74,18 @@ impl WeightInfo for SubstrateWeight { /// Proof: `PoolAssets::Account` (`max_values`: None, `max_size`: Some(134), added: 2609, mode: `MaxEncodedLen`) fn create_pool() -> Weight { // Proof Size summary in bytes: - // Measured: `729` - // Estimated: `6196` - // Minimum execution time: 131_688_000 picoseconds. - Weight::from_parts(134_092_000, 6196) - .saturating_add(T::DbWeight::get().reads(8_u64)) - .saturating_add(T::DbWeight::get().writes(8_u64)) + // Measured: `1081` + // Estimated: `6360` + // Minimum execution time: 1_576_000_000 picoseconds. + Weight::from_parts(1_668_000_000, 6360) + .saturating_add(T::DbWeight::get().reads(10_u64)) + .saturating_add(T::DbWeight::get().writes(10_u64)) } /// Storage: `AssetConversion::Pools` (r:1 w:0) /// Proof: `AssetConversion::Pools` (`max_values`: None, `max_size`: Some(30), added: 2505, mode: `MaxEncodedLen`) - /// Storage: `System::Account` (r:1 w:1) - /// Proof: `System::Account` (`max_values`: None, `max_size`: Some(128), added: 2603, mode: `MaxEncodedLen`) - /// Storage: `Assets::Asset` (r:1 w:1) + /// Storage: `Assets::Asset` (r:2 w:2) /// Proof: `Assets::Asset` (`max_values`: None, `max_size`: Some(210), added: 2685, mode: `MaxEncodedLen`) - /// Storage: `Assets::Account` (r:2 w:2) + /// Storage: `Assets::Account` (r:4 w:4) /// Proof: `Assets::Account` (`max_values`: None, `max_size`: Some(134), added: 2609, mode: `MaxEncodedLen`) /// Storage: `PoolAssets::Asset` (r:1 w:1) /// Proof: `PoolAssets::Asset` (`max_values`: None, `max_size`: Some(210), added: 2685, mode: `MaxEncodedLen`) @@ -97,20 +93,18 @@ impl WeightInfo for SubstrateWeight { /// Proof: `PoolAssets::Account` (`max_values`: None, `max_size`: Some(134), added: 2609, mode: `MaxEncodedLen`) fn add_liquidity() -> Weight { // Proof Size summary in bytes: - // Measured: `1382` - // Estimated: `6208` - // Minimum execution time: 157_310_000 picoseconds. - Weight::from_parts(161_547_000, 6208) - .saturating_add(T::DbWeight::get().reads(8_u64)) - .saturating_add(T::DbWeight::get().writes(7_u64)) + // Measured: `1761` + // Estimated: `11426` + // Minimum execution time: 1_636_000_000 picoseconds. + Weight::from_parts(1_894_000_000, 11426) + .saturating_add(T::DbWeight::get().reads(10_u64)) + .saturating_add(T::DbWeight::get().writes(9_u64)) } /// Storage: `AssetConversion::Pools` (r:1 w:0) /// Proof: `AssetConversion::Pools` (`max_values`: None, `max_size`: Some(30), added: 2505, mode: `MaxEncodedLen`) - /// Storage: `System::Account` (r:1 w:1) - /// Proof: `System::Account` (`max_values`: None, `max_size`: Some(128), added: 2603, mode: `MaxEncodedLen`) - /// Storage: `Assets::Asset` (r:1 w:1) + /// Storage: `Assets::Asset` (r:2 w:2) /// Proof: `Assets::Asset` (`max_values`: None, `max_size`: Some(210), added: 2685, mode: `MaxEncodedLen`) - /// Storage: `Assets::Account` (r:2 w:2) + /// Storage: `Assets::Account` (r:4 w:4) /// Proof: `Assets::Account` (`max_values`: None, `max_size`: Some(134), added: 2609, mode: `MaxEncodedLen`) /// Storage: `PoolAssets::Asset` (r:1 w:1) /// Proof: `PoolAssets::Asset` (`max_values`: None, `max_size`: Some(210), added: 2685, mode: `MaxEncodedLen`) @@ -118,42 +112,46 @@ impl WeightInfo for SubstrateWeight { /// Proof: `PoolAssets::Account` (`max_values`: None, `max_size`: Some(134), added: 2609, mode: `MaxEncodedLen`) fn remove_liquidity() -> Weight { // Proof Size summary in bytes: - // Measured: `1371` - // Estimated: `6208` - // Minimum execution time: 142_769_000 picoseconds. - Weight::from_parts(145_139_000, 6208) - .saturating_add(T::DbWeight::get().reads(7_u64)) - .saturating_add(T::DbWeight::get().writes(6_u64)) + // Measured: `1750` + // Estimated: `11426` + // Minimum execution time: 1_507_000_000 picoseconds. + Weight::from_parts(1_524_000_000, 11426) + .saturating_add(T::DbWeight::get().reads(9_u64)) + .saturating_add(T::DbWeight::get().writes(8_u64)) } - /// Storage: `System::Account` (r:1 w:1) - /// Proof: `System::Account` (`max_values`: None, `max_size`: Some(128), added: 2603, mode: `MaxEncodedLen`) - /// Storage: `Assets::Asset` (r:3 w:3) + /// Storage: `Assets::Asset` (r:4 w:4) /// Proof: `Assets::Asset` (`max_values`: None, `max_size`: Some(210), added: 2685, mode: `MaxEncodedLen`) - /// Storage: `Assets::Account` (r:6 w:6) + /// Storage: `Assets::Account` (r:8 w:8) /// Proof: `Assets::Account` (`max_values`: None, `max_size`: Some(134), added: 2609, mode: `MaxEncodedLen`) - fn swap_exact_tokens_for_tokens() -> Weight { + /// The range of component `n` is `[2, 4]`. + fn swap_exact_tokens_for_tokens(n: u32, ) -> Weight { // Proof Size summary in bytes: - // Measured: `1738` - // Estimated: `16644` - // Minimum execution time: 213_186_000 picoseconds. - Weight::from_parts(217_471_000, 16644) - .saturating_add(T::DbWeight::get().reads(10_u64)) - .saturating_add(T::DbWeight::get().writes(10_u64)) + // Measured: `0 + n * (522 ±0)` + // Estimated: `990 + n * (5218 ±0)` + // Minimum execution time: 937_000_000 picoseconds. + Weight::from_parts(941_000_000, 990) + // Standard Error: 40_863_477 + .saturating_add(Weight::from_parts(205_862_068, 0).saturating_mul(n.into())) + .saturating_add(T::DbWeight::get().reads((3_u64).saturating_mul(n.into()))) + .saturating_add(T::DbWeight::get().writes((3_u64).saturating_mul(n.into()))) + .saturating_add(Weight::from_parts(0, 5218).saturating_mul(n.into())) } - /// Storage: `Assets::Asset` (r:3 w:3) + /// Storage: `Assets::Asset` (r:4 w:4) /// Proof: `Assets::Asset` (`max_values`: None, `max_size`: Some(210), added: 2685, mode: `MaxEncodedLen`) - /// Storage: `Assets::Account` (r:6 w:6) + /// Storage: `Assets::Account` (r:8 w:8) /// Proof: `Assets::Account` (`max_values`: None, `max_size`: Some(134), added: 2609, mode: `MaxEncodedLen`) - /// Storage: `System::Account` (r:1 w:1) - /// Proof: `System::Account` (`max_values`: None, `max_size`: Some(128), added: 2603, mode: `MaxEncodedLen`) - fn swap_tokens_for_exact_tokens() -> Weight { + /// The range of component `n` is `[2, 4]`. + fn swap_tokens_for_exact_tokens(n: u32, ) -> Weight { // Proof Size summary in bytes: - // Measured: `1738` - // Estimated: `16644` - // Minimum execution time: 213_793_000 picoseconds. - Weight::from_parts(218_584_000, 16644) - .saturating_add(T::DbWeight::get().reads(10_u64)) - .saturating_add(T::DbWeight::get().writes(10_u64)) + // Measured: `0 + n * (522 ±0)` + // Estimated: `990 + n * (5218 ±0)` + // Minimum execution time: 935_000_000 picoseconds. + Weight::from_parts(947_000_000, 990) + // Standard Error: 46_904_620 + .saturating_add(Weight::from_parts(218_275_862, 0).saturating_mul(n.into())) + .saturating_add(T::DbWeight::get().reads((3_u64).saturating_mul(n.into()))) + .saturating_add(T::DbWeight::get().writes((3_u64).saturating_mul(n.into()))) + .saturating_add(Weight::from_parts(0, 5218).saturating_mul(n.into())) } } @@ -163,9 +161,9 @@ impl WeightInfo for () { /// Proof: `AssetConversion::Pools` (`max_values`: None, `max_size`: Some(30), added: 2505, mode: `MaxEncodedLen`) /// Storage: `System::Account` (r:2 w:2) /// Proof: `System::Account` (`max_values`: None, `max_size`: Some(128), added: 2603, mode: `MaxEncodedLen`) - /// Storage: `Assets::Account` (r:1 w:1) + /// Storage: `Assets::Account` (r:2 w:2) /// Proof: `Assets::Account` (`max_values`: None, `max_size`: Some(134), added: 2609, mode: `MaxEncodedLen`) - /// Storage: `Assets::Asset` (r:1 w:1) + /// Storage: `Assets::Asset` (r:2 w:2) /// Proof: `Assets::Asset` (`max_values`: None, `max_size`: Some(210), added: 2685, mode: `MaxEncodedLen`) /// Storage: `AssetConversion::NextPoolAssetId` (r:1 w:1) /// Proof: `AssetConversion::NextPoolAssetId` (`max_values`: Some(1), `max_size`: Some(4), added: 499, mode: `MaxEncodedLen`) @@ -175,20 +173,18 @@ impl WeightInfo for () { /// Proof: `PoolAssets::Account` (`max_values`: None, `max_size`: Some(134), added: 2609, mode: `MaxEncodedLen`) fn create_pool() -> Weight { // Proof Size summary in bytes: - // Measured: `729` - // Estimated: `6196` - // Minimum execution time: 131_688_000 picoseconds. - Weight::from_parts(134_092_000, 6196) - .saturating_add(RocksDbWeight::get().reads(8_u64)) - .saturating_add(RocksDbWeight::get().writes(8_u64)) + // Measured: `1081` + // Estimated: `6360` + // Minimum execution time: 1_576_000_000 picoseconds. + Weight::from_parts(1_668_000_000, 6360) + .saturating_add(RocksDbWeight::get().reads(10_u64)) + .saturating_add(RocksDbWeight::get().writes(10_u64)) } /// Storage: `AssetConversion::Pools` (r:1 w:0) /// Proof: `AssetConversion::Pools` (`max_values`: None, `max_size`: Some(30), added: 2505, mode: `MaxEncodedLen`) - /// Storage: `System::Account` (r:1 w:1) - /// Proof: `System::Account` (`max_values`: None, `max_size`: Some(128), added: 2603, mode: `MaxEncodedLen`) - /// Storage: `Assets::Asset` (r:1 w:1) + /// Storage: `Assets::Asset` (r:2 w:2) /// Proof: `Assets::Asset` (`max_values`: None, `max_size`: Some(210), added: 2685, mode: `MaxEncodedLen`) - /// Storage: `Assets::Account` (r:2 w:2) + /// Storage: `Assets::Account` (r:4 w:4) /// Proof: `Assets::Account` (`max_values`: None, `max_size`: Some(134), added: 2609, mode: `MaxEncodedLen`) /// Storage: `PoolAssets::Asset` (r:1 w:1) /// Proof: `PoolAssets::Asset` (`max_values`: None, `max_size`: Some(210), added: 2685, mode: `MaxEncodedLen`) @@ -196,20 +192,18 @@ impl WeightInfo for () { /// Proof: `PoolAssets::Account` (`max_values`: None, `max_size`: Some(134), added: 2609, mode: `MaxEncodedLen`) fn add_liquidity() -> Weight { // Proof Size summary in bytes: - // Measured: `1382` - // Estimated: `6208` - // Minimum execution time: 157_310_000 picoseconds. - Weight::from_parts(161_547_000, 6208) - .saturating_add(RocksDbWeight::get().reads(8_u64)) - .saturating_add(RocksDbWeight::get().writes(7_u64)) + // Measured: `1761` + // Estimated: `11426` + // Minimum execution time: 1_636_000_000 picoseconds. + Weight::from_parts(1_894_000_000, 11426) + .saturating_add(RocksDbWeight::get().reads(10_u64)) + .saturating_add(RocksDbWeight::get().writes(9_u64)) } /// Storage: `AssetConversion::Pools` (r:1 w:0) /// Proof: `AssetConversion::Pools` (`max_values`: None, `max_size`: Some(30), added: 2505, mode: `MaxEncodedLen`) - /// Storage: `System::Account` (r:1 w:1) - /// Proof: `System::Account` (`max_values`: None, `max_size`: Some(128), added: 2603, mode: `MaxEncodedLen`) - /// Storage: `Assets::Asset` (r:1 w:1) + /// Storage: `Assets::Asset` (r:2 w:2) /// Proof: `Assets::Asset` (`max_values`: None, `max_size`: Some(210), added: 2685, mode: `MaxEncodedLen`) - /// Storage: `Assets::Account` (r:2 w:2) + /// Storage: `Assets::Account` (r:4 w:4) /// Proof: `Assets::Account` (`max_values`: None, `max_size`: Some(134), added: 2609, mode: `MaxEncodedLen`) /// Storage: `PoolAssets::Asset` (r:1 w:1) /// Proof: `PoolAssets::Asset` (`max_values`: None, `max_size`: Some(210), added: 2685, mode: `MaxEncodedLen`) @@ -217,41 +211,45 @@ impl WeightInfo for () { /// Proof: `PoolAssets::Account` (`max_values`: None, `max_size`: Some(134), added: 2609, mode: `MaxEncodedLen`) fn remove_liquidity() -> Weight { // Proof Size summary in bytes: - // Measured: `1371` - // Estimated: `6208` - // Minimum execution time: 142_769_000 picoseconds. - Weight::from_parts(145_139_000, 6208) - .saturating_add(RocksDbWeight::get().reads(7_u64)) - .saturating_add(RocksDbWeight::get().writes(6_u64)) + // Measured: `1750` + // Estimated: `11426` + // Minimum execution time: 1_507_000_000 picoseconds. + Weight::from_parts(1_524_000_000, 11426) + .saturating_add(RocksDbWeight::get().reads(9_u64)) + .saturating_add(RocksDbWeight::get().writes(8_u64)) } - /// Storage: `System::Account` (r:1 w:1) - /// Proof: `System::Account` (`max_values`: None, `max_size`: Some(128), added: 2603, mode: `MaxEncodedLen`) - /// Storage: `Assets::Asset` (r:3 w:3) + /// Storage: `Assets::Asset` (r:4 w:4) /// Proof: `Assets::Asset` (`max_values`: None, `max_size`: Some(210), added: 2685, mode: `MaxEncodedLen`) - /// Storage: `Assets::Account` (r:6 w:6) + /// Storage: `Assets::Account` (r:8 w:8) /// Proof: `Assets::Account` (`max_values`: None, `max_size`: Some(134), added: 2609, mode: `MaxEncodedLen`) - fn swap_exact_tokens_for_tokens() -> Weight { + /// The range of component `n` is `[2, 4]`. + fn swap_exact_tokens_for_tokens(n: u32, ) -> Weight { // Proof Size summary in bytes: - // Measured: `1738` - // Estimated: `16644` - // Minimum execution time: 213_186_000 picoseconds. - Weight::from_parts(217_471_000, 16644) - .saturating_add(RocksDbWeight::get().reads(10_u64)) - .saturating_add(RocksDbWeight::get().writes(10_u64)) + // Measured: `0 + n * (522 ±0)` + // Estimated: `990 + n * (5218 ±0)` + // Minimum execution time: 937_000_000 picoseconds. + Weight::from_parts(941_000_000, 990) + // Standard Error: 40_863_477 + .saturating_add(Weight::from_parts(205_862_068, 0).saturating_mul(n.into())) + .saturating_add(RocksDbWeight::get().reads((3_u64).saturating_mul(n.into()))) + .saturating_add(RocksDbWeight::get().writes((3_u64).saturating_mul(n.into()))) + .saturating_add(Weight::from_parts(0, 5218).saturating_mul(n.into())) } - /// Storage: `Assets::Asset` (r:3 w:3) + /// Storage: `Assets::Asset` (r:4 w:4) /// Proof: `Assets::Asset` (`max_values`: None, `max_size`: Some(210), added: 2685, mode: `MaxEncodedLen`) - /// Storage: `Assets::Account` (r:6 w:6) + /// Storage: `Assets::Account` (r:8 w:8) /// Proof: `Assets::Account` (`max_values`: None, `max_size`: Some(134), added: 2609, mode: `MaxEncodedLen`) - /// Storage: `System::Account` (r:1 w:1) - /// Proof: `System::Account` (`max_values`: None, `max_size`: Some(128), added: 2603, mode: `MaxEncodedLen`) - fn swap_tokens_for_exact_tokens() -> Weight { + /// The range of component `n` is `[2, 4]`. + fn swap_tokens_for_exact_tokens(n: u32, ) -> Weight { // Proof Size summary in bytes: - // Measured: `1738` - // Estimated: `16644` - // Minimum execution time: 213_793_000 picoseconds. - Weight::from_parts(218_584_000, 16644) - .saturating_add(RocksDbWeight::get().reads(10_u64)) - .saturating_add(RocksDbWeight::get().writes(10_u64)) + // Measured: `0 + n * (522 ±0)` + // Estimated: `990 + n * (5218 ±0)` + // Minimum execution time: 935_000_000 picoseconds. + Weight::from_parts(947_000_000, 990) + // Standard Error: 46_904_620 + .saturating_add(Weight::from_parts(218_275_862, 0).saturating_mul(n.into())) + .saturating_add(RocksDbWeight::get().reads((3_u64).saturating_mul(n.into()))) + .saturating_add(RocksDbWeight::get().writes((3_u64).saturating_mul(n.into()))) + .saturating_add(Weight::from_parts(0, 5218).saturating_mul(n.into())) } } diff --git a/substrate/frame/assets/Cargo.toml b/substrate/frame/assets/Cargo.toml index 87709af27274..7b0af2421eaa 100644 --- a/substrate/frame/assets/Cargo.toml +++ b/substrate/frame/assets/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "FRAME asset management pallet" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/assets/src/benchmarking.rs b/substrate/frame/assets/src/benchmarking.rs index f844037c238f..49477f7f8d7c 100644 --- a/substrate/frame/assets/src/benchmarking.rs +++ b/substrate/frame/assets/src/benchmarking.rs @@ -103,7 +103,7 @@ fn add_sufficients, I: 'static>(minter: T::AccountId, n: u32) { fn add_approvals, I: 'static>(minter: T::AccountId, n: u32) { let asset_id = default_asset_id::(); - T::Currency::deposit_creating( + let _ = T::Currency::deposit_creating( &minter, T::ApprovalDeposit::get() * n.into() + T::Currency::minimum_balance(), ); diff --git a/substrate/frame/assets/src/functions.rs b/substrate/frame/assets/src/functions.rs index f258ebc6edcd..8044b5d5bfd1 100644 --- a/substrate/frame/assets/src/functions.rs +++ b/substrate/frame/assets/src/functions.rs @@ -78,7 +78,7 @@ impl, I: 'static> Pallet { } } else if d.is_sufficient { frame_system::Pallet::::inc_sufficients(who); - d.sufficients += 1; + d.sufficients.saturating_inc(); ExistenceReason::Sufficient } else { frame_system::Pallet::::inc_consumers(who) diff --git a/substrate/frame/assets/src/lib.rs b/substrate/frame/assets/src/lib.rs index 3672a60a892e..fee86ac0c8ac 100644 --- a/substrate/frame/assets/src/lib.rs +++ b/substrate/frame/assets/src/lib.rs @@ -1649,8 +1649,20 @@ pub mod pallet { T::AssetAccountDeposit::get() } - fn touch(asset: T::AssetId, who: T::AccountId, depositor: T::AccountId) -> DispatchResult { - Self::do_touch(asset, who, depositor, false) + fn should_touch(asset: T::AssetId, who: &T::AccountId) -> bool { + match Asset::::get(&asset) { + Some(info) if info.is_sufficient => false, + Some(_) => !Account::::contains_key(asset, who), + _ => true, + } + } + + fn touch( + asset: T::AssetId, + who: &T::AccountId, + depositor: &T::AccountId, + ) -> DispatchResult { + Self::do_touch(asset, who.clone(), depositor.clone(), false) } } diff --git a/substrate/frame/assets/src/tests.rs b/substrate/frame/assets/src/tests.rs index aa6794026222..e9e2fe57a486 100644 --- a/substrate/frame/assets/src/tests.rs +++ b/substrate/frame/assets/src/tests.rs @@ -29,6 +29,8 @@ use pallet_balances::Error as BalancesError; use sp_io::storage; use sp_runtime::{traits::ConvertInto, TokenError}; +mod sets; + fn asset_ids() -> Vec { let mut s: Vec<_> = Assets::asset_ids().collect(); s.sort(); diff --git a/substrate/frame/assets/src/tests/sets.rs b/substrate/frame/assets/src/tests/sets.rs new file mode 100644 index 000000000000..bdff5175185f --- /dev/null +++ b/substrate/frame/assets/src/tests/sets.rs @@ -0,0 +1,346 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Tests for [`ItemOf`], [`fungible::UnionOf`] and [`fungibles::UnionOf`] set types. + +use super::*; +use frame_support::{ + parameter_types, + traits::{ + fungible, + fungible::ItemOf, + fungibles, + tokens::{ + fungibles::{ + Balanced as FungiblesBalanced, Create as FungiblesCreate, + Inspect as FungiblesInspect, Mutate as FungiblesMutate, + }, + Fortitude, Precision, Preservation, + }, + }, +}; +use sp_runtime::{traits::ConvertToValue, Either}; + +const FIRST_ASSET: u32 = 0; +const UNKNOWN_ASSET: u32 = 10; + +parameter_types! { + pub const LeftAsset: Either<(), u32> = Either::Left(()); + pub const RightAsset: Either = Either::Right(()); + pub const RightUnitAsset: Either<(), ()> = Either::Right(()); +} + +/// Implementation of the `fungible` traits through the [`ItemOf`] type, specifically for a +/// single asset class from [`T`] identified by [`FIRST_ASSET`]. +type FirstFungible = ItemOf, u64>; + +/// Implementation of the `fungible` traits through the [`ItemOf`] type, specifically for a +/// single asset class from [`T`] identified by [`UNKNOWN_ASSET`]. +type UnknownFungible = ItemOf, u64>; + +/// Implementation of `fungibles` traits using [`fungibles::UnionOf`] that exclusively utilizes +/// the [`FirstFungible`] from the left. +type LeftFungible = fungible::UnionOf, T, ConvertToValue, (), u64>; + +/// Implementation of `fungibles` traits using [`fungibles::UnionOf`] that exclusively utilizes +/// the [`LeftFungible`] from the right. +type RightFungible = + fungible::UnionOf, LeftFungible, ConvertToValue, (), u64>; + +/// Implementation of `fungibles` traits using [`fungibles::UnionOf`] that exclusively utilizes +/// the [`RightFungible`] from the left. +type LeftFungibles = fungibles::UnionOf, T, ConvertToValue, (), u64>; + +/// Implementation of `fungibles` traits using [`fungibles::UnionOf`] that exclusively utilizes +/// the [`LeftFungibles`] from the right. +/// +/// By using this type, we can navigate through each branch of [`fungible::UnionOf`], +/// [`fungibles::UnionOf`], and [`ItemOf`] to access the underlying `fungibles::*` +/// implementation provided by the pallet. +type First = fungibles::UnionOf, ConvertToValue, (), u64>; + +#[test] +fn deposit_from_set_types_works() { + new_test_ext().execute_with(|| { + let asset1 = 0; + let account1 = 1; + let account2 = 2; + + assert_ok!(>::create(asset1, account1, true, 1)); + assert_ok!(Assets::mint_into(asset1, &account1, 100)); + + assert_eq!(First::::total_issuance(()), 100); + assert_eq!(First::::total_issuance(()), Assets::total_issuance(asset1)); + + let imb = First::::deposit((), &account2, 50, Precision::Exact).unwrap(); + assert_eq!(First::::balance((), &account2), 50); + assert_eq!(First::::total_issuance(()), 100); + + assert_eq!(imb.peek(), 50); + + let (imb1, imb2) = imb.split(30); + assert_eq!(imb1.peek(), 30); + assert_eq!(imb2.peek(), 20); + + drop(imb2); + assert_eq!(First::::total_issuance(()), 120); + + assert!(First::::settle(&account1, imb1, Preservation::Preserve).is_ok()); + assert_eq!(First::::balance((), &account1), 70); + assert_eq!(First::::balance((), &account2), 50); + assert_eq!(First::::total_issuance(()), 120); + + assert_eq!(First::::total_issuance(()), Assets::total_issuance(asset1)); + }); +} + +#[test] +fn issue_from_set_types_works() { + new_test_ext().execute_with(|| { + let asset1: u32 = 0; + let account1: u64 = 1; + + assert_ok!(>::create(asset1, account1, true, 1)); + assert_ok!(Assets::mint_into(asset1, &account1, 100)); + + assert_eq!(First::::balance((), &account1), 100); + assert_eq!(First::::total_issuance(()), 100); + assert_eq!(First::::total_issuance(()), Assets::total_issuance(asset1)); + + let imb = First::::issue((), 100); + assert_eq!(First::::total_issuance(()), 200); + assert_eq!(imb.peek(), 100); + + let (imb1, imb2) = imb.split(30); + assert_eq!(imb1.peek(), 30); + assert_eq!(imb2.peek(), 70); + + drop(imb2); + assert_eq!(First::::total_issuance(()), 130); + + assert!(First::::resolve(&account1, imb1).is_ok()); + assert_eq!(First::::balance((), &account1), 130); + assert_eq!(First::::total_issuance(()), 130); + + assert_eq!(First::::total_issuance(()), Assets::total_issuance(asset1)); + }); +} + +#[test] +fn pair_from_set_types_works() { + new_test_ext().execute_with(|| { + let asset1: u32 = 0; + let account1: u64 = 1; + + assert_ok!(>::create(asset1, account1, true, 1)); + assert_ok!(Assets::mint_into(asset1, &account1, 100)); + + assert_eq!(First::::balance((), &account1), 100); + assert_eq!(First::::total_issuance(()), 100); + assert_eq!(First::::total_issuance(()), Assets::total_issuance(asset1)); + + let (debt, credit) = First::::pair((), 100); + assert_eq!(First::::total_issuance(()), 100); + assert_eq!(debt.peek(), 100); + assert_eq!(credit.peek(), 100); + + let (debt1, debt2) = debt.split(30); + assert_eq!(debt1.peek(), 30); + assert_eq!(debt2.peek(), 70); + + drop(debt2); + assert_eq!(First::::total_issuance(()), 170); + + assert!(First::::settle(&account1, debt1, Preservation::Preserve).is_ok()); + assert_eq!(First::::balance((), &account1), 70); + assert_eq!(First::::total_issuance(()), 170); + + let (credit1, credit2) = credit.split(40); + assert_eq!(credit1.peek(), 40); + assert_eq!(credit2.peek(), 60); + + drop(credit2); + assert_eq!(First::::total_issuance(()), 110); + + assert!(First::::resolve(&account1, credit1).is_ok()); + assert_eq!(First::::balance((), &account1), 110); + assert_eq!(First::::total_issuance(()), 110); + + assert_eq!(First::::total_issuance(()), Assets::total_issuance(asset1)); + }); +} + +#[test] +fn rescind_from_set_types_works() { + new_test_ext().execute_with(|| { + let asset1: u32 = 0; + let account1: u64 = 1; + + assert_ok!(>::create(asset1, account1, true, 1)); + assert_ok!(Assets::mint_into(asset1, &account1, 100)); + + assert_eq!(First::::total_issuance(()), 100); + assert_eq!(First::::total_issuance(()), Assets::total_issuance(asset1)); + + let imb = First::::rescind((), 20); + assert_eq!(First::::total_issuance(()), 80); + + assert_eq!(imb.peek(), 20); + + let (imb1, imb2) = imb.split(15); + assert_eq!(imb1.peek(), 15); + assert_eq!(imb2.peek(), 5); + + drop(imb2); + assert_eq!(First::::total_issuance(()), 85); + + assert!(First::::settle(&account1, imb1, Preservation::Preserve).is_ok()); + assert_eq!(First::::balance((), &account1), 85); + assert_eq!(First::::total_issuance(()), 85); + + assert_eq!(First::::total_issuance(()), Assets::total_issuance(asset1)); + }); +} + +#[test] +fn resolve_from_set_types_works() { + new_test_ext().execute_with(|| { + let asset1: u32 = 0; + let account1: u64 = 1; + let account2: u64 = 2; + let ed = 11; + + assert_ok!(>::create(asset1, account1, true, ed)); + assert_ok!(Assets::mint_into(asset1, &account1, 100)); + + assert_eq!(First::::balance((), &account1), 100); + assert_eq!(First::::total_issuance(()), 100); + assert_eq!(First::::total_issuance(()), Assets::total_issuance(asset1)); + + let imb = First::::issue((), 100); + assert_eq!(First::::total_issuance(()), 200); + assert_eq!(imb.peek(), 100); + + let (imb1, imb2) = imb.split(10); + assert_eq!(imb1.peek(), 10); + assert_eq!(imb2.peek(), 90); + assert_eq!(First::::total_issuance(()), 200); + + // ed requirements not met. + let imb1 = First::::resolve(&account2, imb1).unwrap_err(); + assert_eq!(imb1.peek(), 10); + drop(imb1); + assert_eq!(First::::total_issuance(()), 190); + assert_eq!(First::::balance((), &account2), 0); + + // resolve to new account `2`. + assert_ok!(First::::resolve(&account2, imb2)); + assert_eq!(First::::total_issuance(()), 190); + assert_eq!(First::::balance((), &account2), 90); + + assert_eq!(First::::total_issuance(()), Assets::total_issuance(asset1)); + }); +} + +#[test] +fn settle_from_set_types_works() { + new_test_ext().execute_with(|| { + let asset1: u32 = 0; + let account1: u64 = 1; + let account2: u64 = 2; + let ed = 11; + + assert_ok!(>::create(asset1, account1, true, ed)); + assert_ok!(Assets::mint_into(asset1, &account1, 100)); + assert_ok!(Assets::mint_into(asset1, &account2, 100)); + + assert_eq!(First::::balance((), &account2), 100); + assert_eq!(First::::total_issuance(()), 200); + assert_eq!(First::::total_issuance(()), Assets::total_issuance(asset1)); + + let imb = First::::rescind((), 100); + assert_eq!(First::::total_issuance(()), 100); + assert_eq!(imb.peek(), 100); + + let (imb1, imb2) = imb.split(10); + assert_eq!(imb1.peek(), 10); + assert_eq!(imb2.peek(), 90); + assert_eq!(First::::total_issuance(()), 100); + + // ed requirements not met. + let imb2 = First::::settle(&account2, imb2, Preservation::Preserve).unwrap_err(); + assert_eq!(imb2.peek(), 90); + drop(imb2); + assert_eq!(First::::total_issuance(()), 190); + assert_eq!(First::::balance((), &account2), 100); + + // settle to account `1`. + assert_ok!(First::::settle(&account2, imb1, Preservation::Preserve)); + assert_eq!(First::::total_issuance(()), 190); + assert_eq!(First::::balance((), &account2), 90); + + let imb = First::::rescind((), 85); + assert_eq!(First::::total_issuance(()), 105); + assert_eq!(imb.peek(), 85); + + // settle to account `1` and expect some dust. + let imb = First::::settle(&account2, imb, Preservation::Expendable).unwrap(); + assert_eq!(imb.peek(), 5); + assert_eq!(First::::total_issuance(()), 105); + assert_eq!(First::::balance((), &account2), 0); + + drop(imb); + assert_eq!(First::::total_issuance(()), 100); + + assert_eq!(First::::total_issuance(()), Assets::total_issuance(asset1)); + }); +} + +#[test] +fn withdraw_from_set_types_works() { + new_test_ext().execute_with(|| { + let asset1 = 0; + let account1 = 1; + let account2 = 2; + + assert_ok!(>::create(asset1, account1, true, 1)); + assert_ok!(Assets::mint_into(asset1, &account1, 100)); + assert_ok!(Assets::mint_into(asset1, &account2, 100)); + + assert_eq!(First::::total_issuance(()), 200); + assert_eq!(First::::total_issuance(()), Assets::total_issuance(asset1)); + + let imb = First::::withdraw( + (), + &account2, + 50, + Precision::Exact, + Preservation::Preserve, + Fortitude::Polite, + ) + .unwrap(); + assert_eq!(First::::balance((), &account2), 50); + assert_eq!(First::::total_issuance(()), 200); + + assert_eq!(imb.peek(), 50); + drop(imb); + assert_eq!(First::::total_issuance(()), 150); + assert_eq!(First::::balance((), &account2), 50); + + assert_eq!(First::::total_issuance(()), Assets::total_issuance(asset1)); + }); +} diff --git a/substrate/frame/aura/Cargo.toml b/substrate/frame/aura/Cargo.toml index 1b9c1c2573bc..3ae42fe21c16 100644 --- a/substrate/frame/aura/Cargo.toml +++ b/substrate/frame/aura/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "FRAME AURA consensus pallet" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/authorship/Cargo.toml b/substrate/frame/authorship/Cargo.toml index 737c8da1361c..41d4cf139721 100644 --- a/substrate/frame/authorship/Cargo.toml +++ b/substrate/frame/authorship/Cargo.toml @@ -9,6 +9,9 @@ homepage = "https://substrate.io" repository.workspace = true readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/babe/Cargo.toml b/substrate/frame/babe/Cargo.toml index 50c633f6a9fd..e74f6f4f110d 100644 --- a/substrate/frame/babe/Cargo.toml +++ b/substrate/frame/babe/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Consensus extension module for BABE consensus. Collects on-chain randomness from VRF outputs and manages epoch transitions." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/babe/src/mock.rs b/substrate/frame/babe/src/mock.rs index fd376bff1fca..c7224823501c 100644 --- a/substrate/frame/babe/src/mock.rs +++ b/substrate/frame/babe/src/mock.rs @@ -184,6 +184,7 @@ impl pallet_staking::Config for Test { type TargetList = pallet_staking::UseValidatorsMap; type NominationsQuota = FixedNominationsQuota<16>; type MaxUnlockingChunks = ConstU32<32>; + type MaxControllersInDeprecationBatch = ConstU32<100>; type HistoryDepth = ConstU32<84>; type EventListeners = (); type BenchmarkingConfig = pallet_staking::TestBenchmarkingConfig; diff --git a/substrate/frame/bags-list/Cargo.toml b/substrate/frame/bags-list/Cargo.toml index ae369603b788..5b6a13edad72 100644 --- a/substrate/frame/bags-list/Cargo.toml +++ b/substrate/frame/bags-list/Cargo.toml @@ -8,6 +8,9 @@ homepage = "https://substrate.io" repository.workspace = true description = "FRAME pallet bags list" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -32,7 +35,7 @@ frame-election-provider-support = { path = "../election-provider-support", defau # third party log = { version = "0.4.17", default-features = false } docify = "0.2.6" -aquamarine = { version = "0.4" } +aquamarine = { version = "0.5.0" } # Optional imports for benchmarking frame-benchmarking = { path = "../benchmarking", default-features = false, optional = true } diff --git a/substrate/frame/bags-list/fuzzer/Cargo.toml b/substrate/frame/bags-list/fuzzer/Cargo.toml index f3785dd1beff..20760141b236 100644 --- a/substrate/frame/bags-list/fuzzer/Cargo.toml +++ b/substrate/frame/bags-list/fuzzer/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Fuzzer for FRAME pallet bags list" publish = false +[lints] +workspace = true + [dependencies] honggfuzz = "0.5" rand = { version = "0.8", features = ["small_rng", "std"] } diff --git a/substrate/frame/bags-list/remote-tests/Cargo.toml b/substrate/frame/bags-list/remote-tests/Cargo.toml index 8652ff8d26d7..b065e4305a4d 100644 --- a/substrate/frame/bags-list/remote-tests/Cargo.toml +++ b/substrate/frame/bags-list/remote-tests/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "FRAME pallet bags list remote test" publish = false +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/balances/Cargo.toml b/substrate/frame/balances/Cargo.toml index a148684e1fb7..23fe6e583222 100644 --- a/substrate/frame/balances/Cargo.toml +++ b/substrate/frame/balances/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "FRAME pallet to manage balances" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/balances/src/impl_fungible.rs b/substrate/frame/balances/src/impl_fungible.rs index 52975f4622ea..ca56f1a16077 100644 --- a/substrate/frame/balances/src/impl_fungible.rs +++ b/substrate/frame/balances/src/impl_fungible.rs @@ -18,10 +18,13 @@ //! Implementation of `fungible` traits for Balances pallet. use super::*; -use frame_support::traits::tokens::{ - Fortitude, - Preservation::{self, Preserve, Protect}, - Provenance::{self, Minted}, +use frame_support::traits::{ + tokens::{ + Fortitude, + Preservation::{self, Preserve, Protect}, + Provenance::{self, Minted}, + }, + AccountTouch, }; impl, I: 'static> fungible::Inspect for Pallet { @@ -305,3 +308,16 @@ impl, I: 'static> fungible::Balanced for Pallet } impl, I: 'static> fungible::BalancedHold for Pallet {} + +impl, I: 'static> AccountTouch<(), T::AccountId> for Pallet { + type Balance = T::Balance; + fn deposit_required(_: ()) -> Self::Balance { + Self::Balance::zero() + } + fn should_touch(_: (), _: &T::AccountId) -> bool { + false + } + fn touch(_: (), _: &T::AccountId, _: &T::AccountId) -> DispatchResult { + Ok(()) + } +} diff --git a/substrate/frame/balances/src/tests/fungible_tests.rs b/substrate/frame/balances/src/tests/fungible_tests.rs index 400d43367f66..a7856f91f414 100644 --- a/substrate/frame/balances/src/tests/fungible_tests.rs +++ b/substrate/frame/balances/src/tests/fungible_tests.rs @@ -132,7 +132,7 @@ fn unbalanced_trait_decrease_balance_works_2() { assert_eq!(Balances::total_balance_on_hold(&1337), 60); assert_noop!( Balances::decrease_balance(&1337, 40, Exact, Expendable, Polite), - Error::::InsufficientBalance + TokenError::FundsUnavailable ); assert_eq!(Balances::decrease_balance(&1337, 39, Exact, Expendable, Polite), Ok(39)); assert_eq!(>::balance(&1337), 1); diff --git a/substrate/frame/benchmarking/Cargo.toml b/substrate/frame/benchmarking/Cargo.toml index 9cfaac1abfd0..42001c2d08ca 100644 --- a/substrate/frame/benchmarking/Cargo.toml +++ b/substrate/frame/benchmarking/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Macro for benchmarking a FRAME runtime." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -18,7 +21,7 @@ linregress = { version = "0.5.1", optional = true } log = { version = "0.4.17", default-features = false } paste = "1.0" scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } -serde = { version = "1.0.193", optional = true } +serde = { version = "1.0.195", optional = true } frame-support = { path = "../support", default-features = false } frame-support-procedural = { path = "../support/procedural", default-features = false } frame-system = { path = "../system", default-features = false } diff --git a/substrate/frame/benchmarking/pov/Cargo.toml b/substrate/frame/benchmarking/pov/Cargo.toml index 1ec028285587..7c36b2f8eec3 100644 --- a/substrate/frame/benchmarking/pov/Cargo.toml +++ b/substrate/frame/benchmarking/pov/Cargo.toml @@ -8,6 +8,9 @@ homepage = "https://substrate.io" repository.workspace = true description = "Pallet for testing FRAME PoV benchmarking" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/election-provider-support/Cargo.toml b/substrate/frame/election-provider-support/Cargo.toml index 7062e54cdbca..8182863d7966 100644 --- a/substrate/frame/election-provider-support/Cargo.toml +++ b/substrate/frame/election-provider-support/Cargo.toml @@ -8,6 +8,9 @@ homepage = "https://substrate.io" repository.workspace = true description = "election provider supporting traits" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/election-provider-support/benchmarking/Cargo.toml b/substrate/frame/election-provider-support/benchmarking/Cargo.toml index d3cc5dc09dd9..562191d34f8a 100644 --- a/substrate/frame/election-provider-support/benchmarking/Cargo.toml +++ b/substrate/frame/election-provider-support/benchmarking/Cargo.toml @@ -8,6 +8,9 @@ homepage = "https://substrate.io" repository.workspace = true description = "Benchmarking for election provider support onchain config trait" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/election-provider-support/solution-type/Cargo.toml b/substrate/frame/election-provider-support/solution-type/Cargo.toml index 5bf84daf52d7..508c049e490c 100644 --- a/substrate/frame/election-provider-support/solution-type/Cargo.toml +++ b/substrate/frame/election-provider-support/solution-type/Cargo.toml @@ -8,6 +8,9 @@ homepage = "https://substrate.io" repository.workspace = true description = "NPoS Solution Type" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -15,10 +18,10 @@ targets = ["x86_64-unknown-linux-gnu"] proc-macro = true [dependencies] -syn = { version = "2.0.39", features = ["full", "visit"] } +syn = { version = "2.0.48", features = ["full", "visit"] } quote = "1.0.28" proc-macro2 = "1.0.56" -proc-macro-crate = "2.0.0" +proc-macro-crate = "3.0.0" [dev-dependencies] parity-scale-codec = "3.6.1" @@ -27,4 +30,4 @@ sp-arithmetic = { path = "../../../primitives/arithmetic" } # used by generate_solution_type: frame-election-provider-support = { path = ".." } frame-support = { path = "../../support" } -trybuild = "1.0.74" +trybuild = "1.0.88" diff --git a/substrate/frame/election-provider-support/solution-type/fuzzer/Cargo.toml b/substrate/frame/election-provider-support/solution-type/fuzzer/Cargo.toml index 046e93ce7f0f..02123182cee2 100644 --- a/substrate/frame/election-provider-support/solution-type/fuzzer/Cargo.toml +++ b/substrate/frame/election-provider-support/solution-type/fuzzer/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Fuzzer for phragmén solution type implementation." publish = false +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/executive/Cargo.toml b/substrate/frame/executive/Cargo.toml index c2a92ad3d729..b98ceb0ba9a5 100644 --- a/substrate/frame/executive/Cargo.toml +++ b/substrate/frame/executive/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "FRAME executives engine" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/executive/src/lib.rs b/substrate/frame/executive/src/lib.rs index 75b1e33ca700..0371009a26a6 100644 --- a/substrate/frame/executive/src/lib.rs +++ b/substrate/frame/executive/src/lib.rs @@ -410,9 +410,10 @@ where ) -> Result<(), TryRuntimeError> { match res { Ok(bytes) => { - log::debug!( + log::info!( target: LOG_TARGET, - "decoded the entire state ({bytes} bytes)", + "✅ Entire runtime state decodes without error. {} bytes total.", + bytes ); Ok(()) diff --git a/substrate/frame/glutton/Cargo.toml b/substrate/frame/glutton/Cargo.toml index 70963a92ba33..bc8d48fb9f1f 100644 --- a/substrate/frame/glutton/Cargo.toml +++ b/substrate/frame/glutton/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "FRAME pallet for pushing a chain to its weight limits" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/grandpa/Cargo.toml b/substrate/frame/grandpa/Cargo.toml index 7123fa1fa6ba..8206bd18f04c 100644 --- a/substrate/frame/grandpa/Cargo.toml +++ b/substrate/frame/grandpa/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "FRAME pallet for GRANDPA finality gadget" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/grandpa/src/mock.rs b/substrate/frame/grandpa/src/mock.rs index c23967596500..510f835de67c 100644 --- a/substrate/frame/grandpa/src/mock.rs +++ b/substrate/frame/grandpa/src/mock.rs @@ -203,6 +203,7 @@ impl pallet_staking::Config for Test { type TargetList = pallet_staking::UseValidatorsMap; type NominationsQuota = pallet_staking::FixedNominationsQuota<16>; type MaxUnlockingChunks = ConstU32<32>; + type MaxControllersInDeprecationBatch = ConstU32<100>; type HistoryDepth = ConstU32<84>; type EventListeners = (); type BenchmarkingConfig = pallet_staking::TestBenchmarkingConfig; diff --git a/substrate/frame/im-online/Cargo.toml b/substrate/frame/im-online/Cargo.toml index 5ec260c9b5be..b5b01858c898 100644 --- a/substrate/frame/im-online/Cargo.toml +++ b/substrate/frame/im-online/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "FRAME's I'm online pallet" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/mixnet/Cargo.toml b/substrate/frame/mixnet/Cargo.toml index c423f6daab55..520a4ce18552 100644 --- a/substrate/frame/mixnet/Cargo.toml +++ b/substrate/frame/mixnet/Cargo.toml @@ -9,6 +9,9 @@ homepage = "https://substrate.io" repository = "https://github.com/paritytech/substrate/" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -18,7 +21,7 @@ frame-support = { default-features = false, path = "../support" } frame-system = { default-features = false, path = "../system" } log = { version = "0.4.17", default-features = false } scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } -serde = { version = "1.0.193", default-features = false, features = ["derive"] } +serde = { version = "1.0.195", default-features = false, features = ["derive"] } sp-application-crypto = { default-features = false, path = "../../primitives/application-crypto" } sp-arithmetic = { default-features = false, path = "../../primitives/arithmetic" } sp-io = { default-features = false, path = "../../primitives/io" } diff --git a/substrate/frame/offences/Cargo.toml b/substrate/frame/offences/Cargo.toml index 6cc002ad6bde..c8c59b710b2f 100644 --- a/substrate/frame/offences/Cargo.toml +++ b/substrate/frame/offences/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "FRAME offences pallet" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -16,7 +19,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive"] } log = { version = "0.4.17", default-features = false } scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } -serde = { version = "1.0.193", optional = true } +serde = { version = "1.0.195", optional = true } frame-support = { path = "../support", default-features = false } frame-system = { path = "../system", default-features = false } sp-runtime = { path = "../../primitives/runtime", default-features = false } diff --git a/substrate/frame/offences/benchmarking/Cargo.toml b/substrate/frame/offences/benchmarking/Cargo.toml index ceba5eb43b58..edb1ee5df6b2 100644 --- a/substrate/frame/offences/benchmarking/Cargo.toml +++ b/substrate/frame/offences/benchmarking/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "FRAME offences pallet benchmarking" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/offences/benchmarking/src/mock.rs b/substrate/frame/offences/benchmarking/src/mock.rs index 101d1d0c5a58..c1ba61068c66 100644 --- a/substrate/frame/offences/benchmarking/src/mock.rs +++ b/substrate/frame/offences/benchmarking/src/mock.rs @@ -182,6 +182,7 @@ impl pallet_staking::Config for Test { type TargetList = pallet_staking::UseValidatorsMap; type NominationsQuota = pallet_staking::FixedNominationsQuota<16>; type MaxUnlockingChunks = ConstU32<32>; + type MaxControllersInDeprecationBatch = ConstU32<100>; type HistoryDepth = ConstU32<84>; type EventListeners = (); type BenchmarkingConfig = pallet_staking::TestBenchmarkingConfig; diff --git a/substrate/frame/preimage/Cargo.toml b/substrate/frame/preimage/Cargo.toml index 1806976ac963..2aa21d2a7136 100644 --- a/substrate/frame/preimage/Cargo.toml +++ b/substrate/frame/preimage/Cargo.toml @@ -8,6 +8,9 @@ homepage = "https://substrate.io" repository.workspace = true description = "FRAME pallet for storing preimages of hashes" +[lints] +workspace = true + [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive"] } scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } diff --git a/substrate/frame/remark/Cargo.toml b/substrate/frame/remark/Cargo.toml index a58473fe97eb..3ac57025ef94 100644 --- a/substrate/frame/remark/Cargo.toml +++ b/substrate/frame/remark/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Remark storage pallet" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/root-testing/Cargo.toml b/substrate/frame/root-testing/Cargo.toml index 896d2f6ef550..91d8f0fc2bf8 100644 --- a/substrate/frame/root-testing/Cargo.toml +++ b/substrate/frame/root-testing/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "FRAME root testing pallet" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/safe-mode/Cargo.toml b/substrate/frame/safe-mode/Cargo.toml index 1466759b91cb..95ca13d0aa8e 100644 --- a/substrate/frame/safe-mode/Cargo.toml +++ b/substrate/frame/safe-mode/Cargo.toml @@ -8,6 +8,9 @@ homepage = "https://substrate.io" repository.workspace = true description = "FRAME safe-mode pallet" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/sassafras/Cargo.toml b/substrate/frame/sassafras/Cargo.toml index 745297bd416d..ad4c0ba12f0b 100644 --- a/substrate/frame/sassafras/Cargo.toml +++ b/substrate/frame/sassafras/Cargo.toml @@ -10,6 +10,9 @@ description = "Consensus extension module for Sassafras consensus." readme = "README.md" publish = false +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/sassafras/src/benchmarking.rs b/substrate/frame/sassafras/src/benchmarking.rs index 95a2b4bbce4e..921f2f0793d3 100644 --- a/substrate/frame/sassafras/src/benchmarking.rs +++ b/substrate/frame/sassafras/src/benchmarking.rs @@ -260,7 +260,7 @@ mod benchmarks { // Update metadata let mut meta = TicketsMeta::::get(); meta.unsorted_tickets_count = tickets_count; - TicketsMeta::::set(meta.clone()); + TicketsMeta::::set(meta); log::debug!(target: LOG_TARGET, "Before sort: {:?}", meta); #[block] diff --git a/substrate/frame/scheduler/Cargo.toml b/substrate/frame/scheduler/Cargo.toml index 3f945616e27d..411ee44294e6 100644 --- a/substrate/frame/scheduler/Cargo.toml +++ b/substrate/frame/scheduler/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "FRAME Scheduler pallet" readme = "README.md" +[lints] +workspace = true + [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive"] } log = { version = "0.4.17", default-features = false } diff --git a/substrate/frame/scheduler/src/mock.rs b/substrate/frame/scheduler/src/mock.rs index 5e890778cd16..56847a0a4c62 100644 --- a/substrate/frame/scheduler/src/mock.rs +++ b/substrate/frame/scheduler/src/mock.rs @@ -22,7 +22,7 @@ use super::*; use crate as scheduler; use frame_support::{ - ord_parameter_types, parameter_types, + derive_impl, ord_parameter_types, parameter_types, traits::{ ConstU32, ConstU64, Contains, EitherOfDiverse, EqualPrivilegeOnly, OnFinalize, OnInitialize, }, @@ -119,6 +119,8 @@ parameter_types! { Weight::from_parts(2_000_000_000_000, u64::MAX), ); } + +#[derive_impl(frame_system::config_preludes::TestDefaultConfig as frame_system::DefaultConfig)] impl system::Config for Test { type BaseCallFilter = BaseFilter; type BlockWeights = BlockWeights; diff --git a/substrate/frame/session/Cargo.toml b/substrate/frame/session/Cargo.toml index 0a997f6ddb3e..4589dbb427a0 100644 --- a/substrate/frame/session/Cargo.toml +++ b/substrate/frame/session/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "FRAME sessions pallet" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/session/benchmarking/Cargo.toml b/substrate/frame/session/benchmarking/Cargo.toml index db2b8b72209d..16f85048d8d2 100644 --- a/substrate/frame/session/benchmarking/Cargo.toml +++ b/substrate/frame/session/benchmarking/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "FRAME sessions pallet benchmarking" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/session/benchmarking/src/mock.rs b/substrate/frame/session/benchmarking/src/mock.rs index 385aca37afdd..d774c93442e9 100644 --- a/substrate/frame/session/benchmarking/src/mock.rs +++ b/substrate/frame/session/benchmarking/src/mock.rs @@ -176,6 +176,7 @@ impl pallet_staking::Config for Test { type ElectionProvider = onchain::OnChainExecution; type GenesisElectionProvider = Self::ElectionProvider; type MaxUnlockingChunks = ConstU32<32>; + type MaxControllersInDeprecationBatch = ConstU32<100>; type HistoryDepth = ConstU32<84>; type VoterList = pallet_staking::UseNominatorsAndValidatorsMap; type TargetList = pallet_staking::UseValidatorsMap; diff --git a/substrate/frame/staking/Cargo.toml b/substrate/frame/staking/Cargo.toml index 1db6952b6622..80e82bd6b352 100644 --- a/substrate/frame/staking/Cargo.toml +++ b/substrate/frame/staking/Cargo.toml @@ -9,11 +9,14 @@ repository.workspace = true description = "FRAME pallet staking" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] -serde = { version = "1.0.193", default-features = false, features = ["alloc", "derive"] } +serde = { version = "1.0.195", default-features = false, features = ["alloc", "derive"] } codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = [ "derive", ] } diff --git a/substrate/frame/staking/reward-curve/Cargo.toml b/substrate/frame/staking/reward-curve/Cargo.toml index d3a1f439cf90..35bf240907a8 100644 --- a/substrate/frame/staking/reward-curve/Cargo.toml +++ b/substrate/frame/staking/reward-curve/Cargo.toml @@ -8,6 +8,9 @@ homepage = "https://substrate.io" repository.workspace = true description = "Reward Curve for FRAME staking pallet" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -15,10 +18,10 @@ targets = ["x86_64-unknown-linux-gnu"] proc-macro = true [dependencies] -proc-macro-crate = "2.0.0" +proc-macro-crate = "3.0.0" proc-macro2 = "1.0.56" quote = "1.0.28" -syn = { version = "2.0.39", features = ["full", "visit"] } +syn = { version = "2.0.48", features = ["full", "visit"] } [dev-dependencies] sp-runtime = { path = "../../../primitives/runtime" } diff --git a/substrate/frame/staking/reward-fn/Cargo.toml b/substrate/frame/staking/reward-fn/Cargo.toml index 001c2b626566..80a27cc0f534 100644 --- a/substrate/frame/staking/reward-fn/Cargo.toml +++ b/substrate/frame/staking/reward-fn/Cargo.toml @@ -8,6 +8,9 @@ homepage = "https://substrate.io" repository.workspace = true description = "Reward function for FRAME staking pallet" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/staking/runtime-api/Cargo.toml b/substrate/frame/staking/runtime-api/Cargo.toml index 061124fd1845..b3fd4cfda017 100644 --- a/substrate/frame/staking/runtime-api/Cargo.toml +++ b/substrate/frame/staking/runtime-api/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "RPC runtime API for transaction payment FRAME pallet" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/staking/src/benchmarking.rs b/substrate/frame/staking/src/benchmarking.rs index 3abc3e291b81..a07cd1600b03 100644 --- a/substrate/frame/staking/src/benchmarking.rs +++ b/substrate/frame/staking/src/benchmarking.rs @@ -26,6 +26,7 @@ use codec::Decode; use frame_election_provider_support::{bounds::DataProviderBounds, SortedListProvider}; use frame_support::{ pallet_prelude::*, + storage::bounded_vec::BoundedVec, traits::{Currency, Get, Imbalance, UnfilteredDispatchable}, }; use sp_runtime::{ @@ -250,7 +251,7 @@ benchmarks! { let original_bonded: BalanceOf = Ledger::::get(&controller).map(|l| l.active).ok_or("ledger not created after")?; - T::Currency::deposit_into_existing(&stash, max_additional).unwrap(); + let _ = T::Currency::deposit_into_existing(&stash, max_additional).unwrap(); whitelist_account!(stash); }: _(RawOrigin::Signed(stash), max_additional) @@ -526,6 +527,39 @@ benchmarks! { assert_eq!(Invulnerables::::get().len(), v as usize); } + deprecate_controller_batch { + // We pass a dynamic number of controllers to the benchmark, up to + // `MaxControllersInDeprecationBatch`. + let i in 0 .. T::MaxControllersInDeprecationBatch::get(); + + let mut controllers: Vec<_> = vec![]; + let mut stashes: Vec<_> = vec![]; + for n in 0..i as u32 { + let (stash, controller) = create_unique_stash_controller::( + n, + 100, + RewardDestination::Staked, + false + )?; + controllers.push(controller); + stashes.push(stash); + } + let bounded_controllers: BoundedVec<_, T::MaxControllersInDeprecationBatch> = + BoundedVec::try_from(controllers.clone()).unwrap(); + }: _(RawOrigin::Root, bounded_controllers) + verify { + for n in 0..i as u32 { + let stash = &stashes[n as usize]; + let controller = &controllers[n as usize]; + // Ledger no longer keyed by controller. + assert_eq!(Ledger::::get(controller), None); + // Bonded now maps to the stash. + assert_eq!(Bonded::::get(stash), Some(stash.clone())); + // Ledger is now keyed by stash. + assert_eq!(Ledger::::get(stash).unwrap().stash, *stash); + } + } + force_unstake { // Slashing Spans let s in 0 .. MAX_SPANS; diff --git a/substrate/frame/staking/src/migrations.rs b/substrate/frame/staking/src/migrations.rs index 4519b869b62f..b30da6a0cfc6 100644 --- a/substrate/frame/staking/src/migrations.rs +++ b/substrate/frame/staking/src/migrations.rs @@ -16,7 +16,7 @@ // See the License for the specific language governing permissions and //! Storage migrations for the Staking pallet. The changelog for this is maintained at -//! [CHANGELOG.md](https://github.com/paritytech/substrate/blob/master/frame/staking/CHANGELOG.md). +//! [CHANGELOG.md](https://github.com/paritytech/polkadot-sdk/blob/master/substrate/frame/staking/CHANGELOG.md). use super::*; use frame_election_provider_support::SortedListProvider; diff --git a/substrate/frame/staking/src/mock.rs b/substrate/frame/staking/src/mock.rs index 10ba6c626500..6a66a2678e77 100644 --- a/substrate/frame/staking/src/mock.rs +++ b/substrate/frame/staking/src/mock.rs @@ -123,6 +123,7 @@ parameter_types! { pub static SlashDeferDuration: EraIndex = 0; pub static Period: BlockNumber = 5; pub static Offset: BlockNumber = 0; + pub static MaxControllersInDeprecationBatch: u32 = 5900; } #[derive_impl(frame_system::config_preludes::TestDefaultConfig as frame_system::DefaultConfig)] @@ -313,6 +314,7 @@ impl crate::pallet::pallet::Config for Test { type NominationsQuota = WeightedNominationsQuota<16>; type MaxUnlockingChunks = MaxUnlockingChunks; type HistoryDepth = HistoryDepth; + type MaxControllersInDeprecationBatch = MaxControllersInDeprecationBatch; type EventListeners = EventListenerMock; type BenchmarkingConfig = TestBenchmarkingConfig; type WeightInfo = (); diff --git a/substrate/frame/staking/src/pallet/mod.rs b/substrate/frame/staking/src/pallet/mod.rs index fbbfbab41e1a..1f1975fddb7f 100644 --- a/substrate/frame/staking/src/pallet/mod.rs +++ b/substrate/frame/staking/src/pallet/mod.rs @@ -270,6 +270,9 @@ pub mod pallet { #[pallet::constant] type MaxUnlockingChunks: Get; + /// The maximum amount of controller accounts that can be deprecated in one call. + type MaxControllersInDeprecationBatch: Get; + /// Something that listens to staking updates and performs actions based on the data it /// receives. /// @@ -1324,7 +1327,7 @@ pub mod pallet { pub fn set_controller(origin: OriginFor) -> DispatchResult { let stash = ensure_signed(origin)?; - // the bonded map and ledger are mutated directly as this extrinsic is related to a + // The bonded map and ledger are mutated directly as this extrinsic is related to a // (temporary) passive migration. Self::ledger(StakingAccount::Stash(stash.clone())).map(|ledger| { let controller = ledger.controller() @@ -1332,10 +1335,9 @@ pub mod pallet { .ok_or(Error::::NotController)?; if controller == stash { - // stash is already its own controller. + // Stash is already its own controller. return Err(Error::::AlreadyPaired.into()) } - // update bond and ledger. >::remove(controller); >::insert(&stash, &stash); >::insert(&stash, ledger); @@ -1921,6 +1923,54 @@ pub mod pallet { Ok(Pays::No.into()) } + + /// Updates a batch of controller accounts to their corresponding stash account if they are + /// not the same. Ignores any controller accounts that do not exist, and does not operate if + /// the stash and controller are already the same. + /// + /// Effects will be felt instantly (as soon as this function is completed successfully). + /// + /// The dispatch origin must be `T::AdminOrigin`. + #[pallet::call_index(28)] + #[pallet::weight(T::WeightInfo::deprecate_controller_batch(controllers.len() as u32))] + pub fn deprecate_controller_batch( + origin: OriginFor, + controllers: BoundedVec, + ) -> DispatchResultWithPostInfo { + T::AdminOrigin::ensure_origin(origin)?; + + // Ignore controllers that do not exist or are already the same as stash. + let filtered_batch_with_ledger: Vec<_> = controllers + .iter() + .filter_map(|controller| { + let ledger = Self::ledger(StakingAccount::Controller(controller.clone())); + ledger.ok().map_or(None, |ledger| { + // If the controller `RewardDestination` is still the deprecated + // `Controller` variant, skip deprecating this account. + let payee_deprecated = Payee::::get(&ledger.stash) == { + #[allow(deprecated)] + RewardDestination::Controller + }; + + if ledger.stash != *controller && !payee_deprecated { + Some((controller.clone(), ledger)) + } else { + None + } + }) + }) + .collect(); + + // Update unique pairs. + for (controller, ledger) in filtered_batch_with_ledger { + let stash = ledger.stash.clone(); + + >::insert(&stash, &stash); + >::remove(controller); + >::insert(stash, ledger); + } + Ok(Some(T::WeightInfo::deprecate_controller_batch(controllers.len() as u32)).into()) + } } } diff --git a/substrate/frame/staking/src/tests.rs b/substrate/frame/staking/src/tests.rs index cd6bf935a2cf..b0e6a3c3b1c9 100644 --- a/substrate/frame/staking/src/tests.rs +++ b/substrate/frame/staking/src/tests.rs @@ -6207,7 +6207,7 @@ fn proportional_ledger_slash_works() { #[test] fn reducing_max_unlocking_chunks_abrupt() { // Concern is on validators only - // By Default 11, 10 are stash and ctrl and 21,20 + // By Default 11, 10 are stash and ctlr and 21,20 ExtBuilder::default().build_and_execute(|| { // given a staker at era=10 and MaxUnlockChunks set to 2 MaxUnlockingChunks::set(2); @@ -6868,4 +6868,181 @@ mod ledger { assert_eq!(Payee::::get(&21), RewardDestination::Stash); }) } + + #[test] + fn deprecate_controller_batch_works_full_weight() { + ExtBuilder::default().build_and_execute(|| { + // Given: + + let start = 1001; + let mut controllers: Vec<_> = vec![]; + for n in start..(start + MaxControllersInDeprecationBatch::get()).into() { + let ctlr: u64 = n.into(); + let stash: u64 = (n + 10000).into(); + + Ledger::::insert( + ctlr, + StakingLedger { + controller: None, + total: (10 + ctlr).into(), + active: (10 + ctlr).into(), + ..StakingLedger::default_from(stash) + }, + ); + Bonded::::insert(stash, ctlr); + Payee::::insert(stash, RewardDestination::Staked); + + controllers.push(ctlr); + } + + // When: + + let bounded_controllers: BoundedVec< + _, + ::MaxControllersInDeprecationBatch, + > = BoundedVec::try_from(controllers).unwrap(); + + // Only `AdminOrigin` can sign. + assert_noop!( + Staking::deprecate_controller_batch( + RuntimeOrigin::signed(2), + bounded_controllers.clone() + ), + BadOrigin + ); + + let result = + Staking::deprecate_controller_batch(RuntimeOrigin::root(), bounded_controllers); + assert_ok!(result); + assert_eq!( + result.unwrap().actual_weight.unwrap(), + ::WeightInfo::deprecate_controller_batch( + ::MaxControllersInDeprecationBatch::get() + ) + ); + + // Then: + + for n in start..(start + MaxControllersInDeprecationBatch::get()).into() { + let ctlr: u64 = n.into(); + let stash: u64 = (n + 10000).into(); + + // Ledger no longer keyed by controller. + assert_eq!(Ledger::::get(ctlr), None); + // Bonded now maps to the stash. + assert_eq!(Bonded::::get(stash), Some(stash)); + + // Ledger is now keyed by stash. + let ledger_updated = Ledger::::get(stash).unwrap(); + assert_eq!(ledger_updated.stash, stash); + + // Check `active` and `total` values match the original ledger set by controller. + assert_eq!(ledger_updated.active, (10 + ctlr).into()); + assert_eq!(ledger_updated.total, (10 + ctlr).into()); + } + }) + } + + #[test] + fn deprecate_controller_batch_works_half_weight() { + ExtBuilder::default().build_and_execute(|| { + // Given: + + let start = 1001; + let mut controllers: Vec<_> = vec![]; + for n in start..(start + MaxControllersInDeprecationBatch::get()).into() { + let ctlr: u64 = n.into(); + + // Only half of entries are unique pairs. + let stash: u64 = if n % 2 == 0 { (n + 10000).into() } else { ctlr }; + + Ledger::::insert( + ctlr, + StakingLedger { controller: None, ..StakingLedger::default_from(stash) }, + ); + Bonded::::insert(stash, ctlr); + Payee::::insert(stash, RewardDestination::Staked); + + controllers.push(ctlr); + } + + // When: + let bounded_controllers: BoundedVec< + _, + ::MaxControllersInDeprecationBatch, + > = BoundedVec::try_from(controllers.clone()).unwrap(); + + let result = + Staking::deprecate_controller_batch(RuntimeOrigin::root(), bounded_controllers); + assert_ok!(result); + assert_eq!( + result.unwrap().actual_weight.unwrap(), + ::WeightInfo::deprecate_controller_batch(controllers.len() as u32) + ); + + // Then: + + for n in start..(start + MaxControllersInDeprecationBatch::get()).into() { + let unique_pair = n % 2 == 0; + let ctlr: u64 = n.into(); + let stash: u64 = if unique_pair { (n + 10000).into() } else { ctlr }; + + // Side effect of migration for unique pair. + if unique_pair { + assert_eq!(Ledger::::get(ctlr), None); + } + // Bonded maps to the stash. + assert_eq!(Bonded::::get(stash), Some(stash)); + + // Ledger is keyed by stash. + let ledger_updated = Ledger::::get(stash).unwrap(); + assert_eq!(ledger_updated.stash, stash); + } + }) + } + + #[test] + fn deprecate_controller_batch_skips_unmigrated_controller_payees() { + ExtBuilder::default().build_and_execute(|| { + // Given: + + let stash: u64 = 1000; + let ctlr: u64 = 1001; + + Ledger::::insert( + ctlr, + StakingLedger { controller: None, ..StakingLedger::default_from(stash) }, + ); + Bonded::::insert(stash, ctlr); + #[allow(deprecated)] + Payee::::insert(stash, RewardDestination::Controller); + + // When: + + let bounded_controllers: BoundedVec< + _, + ::MaxControllersInDeprecationBatch, + > = BoundedVec::try_from(vec![ctlr]).unwrap(); + + let result = + Staking::deprecate_controller_batch(RuntimeOrigin::root(), bounded_controllers); + assert_ok!(result); + assert_eq!( + result.unwrap().actual_weight.unwrap(), + ::WeightInfo::deprecate_controller_batch(1 as u32) + ); + + // Then: + + // Esure deprecation did not happen. + assert_eq!(Ledger::::get(ctlr).is_some(), true); + + // Bonded still keyed by controller. + assert_eq!(Bonded::::get(stash), Some(ctlr)); + + // Ledger is still keyed by controller. + let ledger_updated = Ledger::::get(ctlr).unwrap(); + assert_eq!(ledger_updated.stash, stash); + }) + } } diff --git a/substrate/frame/staking/src/weights.rs b/substrate/frame/staking/src/weights.rs index c7d42e222da3..bc2b05c65da4 100644 --- a/substrate/frame/staking/src/weights.rs +++ b/substrate/frame/staking/src/weights.rs @@ -19,9 +19,9 @@ //! Autogenerated weights for `pallet_staking` //! //! THIS FILE WAS AUTO-GENERATED USING THE SUBSTRATE BENCHMARK CLI VERSION 4.0.0-dev -//! DATE: 2023-11-27, STEPS: `50`, REPEAT: `20`, LOW RANGE: `[]`, HIGH RANGE: `[]` +//! DATE: 2023-12-10, STEPS: `50`, REPEAT: `20`, LOW RANGE: `[]`, HIGH RANGE: `[]` //! WORST CASE MAP SIZE: `1000000` -//! HOSTNAME: `runner-yprdrvc7-project-674-concurrent-0`, CPU: `Intel(R) Xeon(R) CPU @ 2.60GHz` +//! HOSTNAME: `runner-itmxxexx-project-674-concurrent-0`, CPU: `Intel(R) Xeon(R) CPU @ 2.60GHz` //! WASM-EXECUTION: `Compiled`, CHAIN: `Some("dev")`, DB CACHE: `1024` // Executed Command: @@ -67,6 +67,7 @@ pub trait WeightInfo { fn force_new_era() -> Weight; fn force_new_era_always() -> Weight; fn set_invulnerables(v: u32, ) -> Weight; + fn deprecate_controller_batch(i: u32, ) -> Weight; fn force_unstake(s: u32, ) -> Weight; fn cancel_deferred_slash(s: u32, ) -> Weight; fn payout_stakers_alive_staked(n: u32, ) -> Weight; @@ -99,8 +100,8 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `927` // Estimated: `4764` - // Minimum execution time: 42_895_000 picoseconds. - Weight::from_parts(44_924_000, 4764) + // Minimum execution time: 42_491_000 picoseconds. + Weight::from_parts(44_026_000, 4764) .saturating_add(T::DbWeight::get().reads(3_u64)) .saturating_add(T::DbWeight::get().writes(4_u64)) } @@ -120,8 +121,8 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `1990` // Estimated: `8877` - // Minimum execution time: 87_734_000 picoseconds. - Weight::from_parts(90_762_000, 8877) + // Minimum execution time: 88_756_000 picoseconds. + Weight::from_parts(91_000_000, 8877) .saturating_add(T::DbWeight::get().reads(9_u64)) .saturating_add(T::DbWeight::get().writes(7_u64)) } @@ -147,8 +148,8 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `2195` // Estimated: `8877` - // Minimum execution time: 90_914_000 picoseconds. - Weight::from_parts(94_156_000, 8877) + // Minimum execution time: 91_331_000 picoseconds. + Weight::from_parts(94_781_000, 8877) .saturating_add(T::DbWeight::get().reads(12_u64)) .saturating_add(T::DbWeight::get().writes(7_u64)) } @@ -167,10 +168,10 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `1115` // Estimated: `4764` - // Minimum execution time: 43_141_000 picoseconds. - Weight::from_parts(45_081_969, 4764) - // Standard Error: 1_010 - .saturating_add(Weight::from_parts(39_539, 0).saturating_mul(s.into())) + // Minimum execution time: 42_495_000 picoseconds. + Weight::from_parts(44_189_470, 4764) + // Standard Error: 1_389 + .saturating_add(Weight::from_parts(47_484, 0).saturating_mul(s.into())) .saturating_add(T::DbWeight::get().reads(5_u64)) .saturating_add(T::DbWeight::get().writes(2_u64)) } @@ -207,10 +208,10 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `2196 + s * (4 ±0)` // Estimated: `6248 + s * (4 ±0)` - // Minimum execution time: 87_743_000 picoseconds. - Weight::from_parts(96_983_484, 6248) - // Standard Error: 4_271 - .saturating_add(Weight::from_parts(1_382_993, 0).saturating_mul(s.into())) + // Minimum execution time: 89_004_000 picoseconds. + Weight::from_parts(96_677_570, 6248) + // Standard Error: 4_635 + .saturating_add(Weight::from_parts(1_387_718, 0).saturating_mul(s.into())) .saturating_add(T::DbWeight::get().reads(13_u64)) .saturating_add(T::DbWeight::get().writes(11_u64)) .saturating_add(T::DbWeight::get().writes((1_u64).saturating_mul(s.into()))) @@ -242,8 +243,8 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `1372` // Estimated: `4556` - // Minimum execution time: 51_888_000 picoseconds. - Weight::from_parts(54_353_000, 4556) + // Minimum execution time: 51_532_000 picoseconds. + Weight::from_parts(53_308_000, 4556) .saturating_add(T::DbWeight::get().reads(11_u64)) .saturating_add(T::DbWeight::get().writes(5_u64)) } @@ -256,10 +257,10 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `1280 + k * (569 ±0)` // Estimated: `4556 + k * (3033 ±0)` - // Minimum execution time: 28_944_000 picoseconds. - Weight::from_parts(31_116_533, 4556) - // Standard Error: 11_848 - .saturating_add(Weight::from_parts(6_422_601, 0).saturating_mul(k.into())) + // Minimum execution time: 28_955_000 picoseconds. + Weight::from_parts(29_609_869, 4556) + // Standard Error: 6_793 + .saturating_add(Weight::from_parts(6_412_124, 0).saturating_mul(k.into())) .saturating_add(T::DbWeight::get().reads(1_u64)) .saturating_add(T::DbWeight::get().reads((1_u64).saturating_mul(k.into()))) .saturating_add(T::DbWeight::get().writes((1_u64).saturating_mul(k.into()))) @@ -292,10 +293,10 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `1866 + n * (102 ±0)` // Estimated: `6248 + n * (2520 ±0)` - // Minimum execution time: 63_921_000 picoseconds. - Weight::from_parts(62_662_863, 6248) - // Standard Error: 15_071 - .saturating_add(Weight::from_parts(3_950_084, 0).saturating_mul(n.into())) + // Minimum execution time: 64_080_000 picoseconds. + Weight::from_parts(61_985_382, 6248) + // Standard Error: 13_320 + .saturating_add(Weight::from_parts(4_030_513, 0).saturating_mul(n.into())) .saturating_add(T::DbWeight::get().reads(12_u64)) .saturating_add(T::DbWeight::get().reads((1_u64).saturating_mul(n.into()))) .saturating_add(T::DbWeight::get().writes(6_u64)) @@ -319,8 +320,8 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `1650` // Estimated: `6248` - // Minimum execution time: 54_605_000 picoseconds. - Weight::from_parts(56_406_000, 6248) + // Minimum execution time: 54_194_000 picoseconds. + Weight::from_parts(55_578_000, 6248) .saturating_add(T::DbWeight::get().reads(8_u64)) .saturating_add(T::DbWeight::get().writes(6_u64)) } @@ -334,8 +335,8 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `902` // Estimated: `4556` - // Minimum execution time: 16_826_000 picoseconds. - Weight::from_parts(17_326_000, 4556) + // Minimum execution time: 16_597_000 picoseconds. + Weight::from_parts(16_980_000, 4556) .saturating_add(T::DbWeight::get().reads(2_u64)) .saturating_add(T::DbWeight::get().writes(1_u64)) } @@ -349,8 +350,8 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `969` // Estimated: `4556` - // Minimum execution time: 20_831_000 picoseconds. - Weight::from_parts(21_615_000, 4556) + // Minimum execution time: 20_626_000 picoseconds. + Weight::from_parts(21_242_000, 4556) .saturating_add(T::DbWeight::get().reads(3_u64)) .saturating_add(T::DbWeight::get().writes(1_u64)) } @@ -362,8 +363,8 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `902` // Estimated: `4556` - // Minimum execution time: 20_190_000 picoseconds. - Weight::from_parts(20_993_000, 4556) + // Minimum execution time: 19_972_000 picoseconds. + Weight::from_parts(20_470_000, 4556) .saturating_add(T::DbWeight::get().reads(2_u64)) .saturating_add(T::DbWeight::get().writes(3_u64)) } @@ -373,8 +374,8 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 2_603_000 picoseconds. - Weight::from_parts(2_747_000, 0) + // Minimum execution time: 2_571_000 picoseconds. + Weight::from_parts(2_720_000, 0) .saturating_add(T::DbWeight::get().writes(1_u64)) } /// Storage: `Staking::ForceEra` (r:0 w:1) @@ -383,8 +384,8 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 8_070_000 picoseconds. - Weight::from_parts(8_745_000, 0) + // Minimum execution time: 8_056_000 picoseconds. + Weight::from_parts(8_413_000, 0) .saturating_add(T::DbWeight::get().writes(1_u64)) } /// Storage: `Staking::ForceEra` (r:0 w:1) @@ -393,8 +394,8 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 7_999_000 picoseconds. - Weight::from_parts(8_624_000, 0) + // Minimum execution time: 8_162_000 picoseconds. + Weight::from_parts(8_497_000, 0) .saturating_add(T::DbWeight::get().writes(1_u64)) } /// Storage: `Staking::ForceEra` (r:0 w:1) @@ -403,8 +404,8 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 8_131_000 picoseconds. - Weight::from_parts(8_467_000, 0) + // Minimum execution time: 8_320_000 picoseconds. + Weight::from_parts(8_564_000, 0) .saturating_add(T::DbWeight::get().writes(1_u64)) } /// Storage: `Staking::Invulnerables` (r:0 w:1) @@ -414,12 +415,31 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 2_731_000 picoseconds. - Weight::from_parts(3_298_421, 0) - // Standard Error: 31 - .saturating_add(Weight::from_parts(10_075, 0).saturating_mul(v.into())) + // Minimum execution time: 2_470_000 picoseconds. + Weight::from_parts(3_110_242, 0) + // Standard Error: 63 + .saturating_add(Weight::from_parts(11_786, 0).saturating_mul(v.into())) .saturating_add(T::DbWeight::get().writes(1_u64)) } + /// Storage: `Staking::Ledger` (r:5900 w:11800) + /// Proof: `Staking::Ledger` (`max_values`: None, `max_size`: Some(1091), added: 3566, mode: `MaxEncodedLen`) + /// Storage: `Staking::Payee` (r:5900 w:0) + /// Proof: `Staking::Payee` (`max_values`: None, `max_size`: Some(73), added: 2548, mode: `MaxEncodedLen`) + /// Storage: `Staking::Bonded` (r:0 w:5900) + /// Proof: `Staking::Bonded` (`max_values`: None, `max_size`: Some(72), added: 2547, mode: `MaxEncodedLen`) + /// The range of component `i` is `[0, 5900]`. + fn deprecate_controller_batch(i: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `1356 + i * (151 ±0)` + // Estimated: `990 + i * (3566 ±0)` + // Minimum execution time: 2_101_000 picoseconds. + Weight::from_parts(2_238_000, 990) + // Standard Error: 56_753 + .saturating_add(Weight::from_parts(18_404_902, 0).saturating_mul(i.into())) + .saturating_add(T::DbWeight::get().reads((2_u64).saturating_mul(i.into()))) + .saturating_add(T::DbWeight::get().writes((3_u64).saturating_mul(i.into()))) + .saturating_add(Weight::from_parts(0, 3566).saturating_mul(i.into())) + } /// Storage: `Staking::SlashingSpans` (r:1 w:1) /// Proof: `Staking::SlashingSpans` (`max_values`: None, `max_size`: None, mode: `Measured`) /// Storage: `Staking::Bonded` (r:1 w:1) @@ -453,10 +473,10 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `2196 + s * (4 ±0)` // Estimated: `6248 + s * (4 ±0)` - // Minimum execution time: 86_305_000 picoseconds. - Weight::from_parts(94_494_401, 6248) - // Standard Error: 3_602 - .saturating_add(Weight::from_parts(1_339_477, 0).saturating_mul(s.into())) + // Minimum execution time: 86_765_000 picoseconds. + Weight::from_parts(95_173_565, 6248) + // Standard Error: 4_596 + .saturating_add(Weight::from_parts(1_354_849, 0).saturating_mul(s.into())) .saturating_add(T::DbWeight::get().reads(13_u64)) .saturating_add(T::DbWeight::get().writes(12_u64)) .saturating_add(T::DbWeight::get().writes((1_u64).saturating_mul(s.into()))) @@ -469,10 +489,10 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `66672` // Estimated: `70137` - // Minimum execution time: 100_007_000 picoseconds. - Weight::from_parts(894_033_025, 70137) - // Standard Error: 57_584 - .saturating_add(Weight::from_parts(4_870_504, 0).saturating_mul(s.into())) + // Minimum execution time: 104_490_000 picoseconds. + Weight::from_parts(1_162_956_951, 70137) + // Standard Error: 76_760 + .saturating_add(Weight::from_parts(6_485_569, 0).saturating_mul(s.into())) .saturating_add(T::DbWeight::get().reads(1_u64)) .saturating_add(T::DbWeight::get().writes(1_u64)) } @@ -509,10 +529,10 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `33297 + n * (377 ±0)` // Estimated: `30944 + n * (3774 ±0)` - // Minimum execution time: 142_575_000 picoseconds. - Weight::from_parts(196_320_577, 30944) - // Standard Error: 29_330 - .saturating_add(Weight::from_parts(45_325_062, 0).saturating_mul(n.into())) + // Minimum execution time: 144_790_000 picoseconds. + Weight::from_parts(36_764_791, 30944) + // Standard Error: 89_592 + .saturating_add(Weight::from_parts(49_620_105, 0).saturating_mul(n.into())) .saturating_add(T::DbWeight::get().reads(14_u64)) .saturating_add(T::DbWeight::get().reads((6_u64).saturating_mul(n.into()))) .saturating_add(T::DbWeight::get().writes(4_u64)) @@ -536,10 +556,10 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `1991 + l * (7 ±0)` // Estimated: `8877` - // Minimum execution time: 81_113_000 picoseconds. - Weight::from_parts(84_470_927, 8877) - // Standard Error: 5_588 - .saturating_add(Weight::from_parts(97_606, 0).saturating_mul(l.into())) + // Minimum execution time: 81_768_000 picoseconds. + Weight::from_parts(85_332_982, 8877) + // Standard Error: 5_380 + .saturating_add(Weight::from_parts(70_298, 0).saturating_mul(l.into())) .saturating_add(T::DbWeight::get().reads(9_u64)) .saturating_add(T::DbWeight::get().writes(7_u64)) } @@ -574,10 +594,10 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `2196 + s * (4 ±0)` // Estimated: `6248 + s * (4 ±0)` - // Minimum execution time: 94_810_000 picoseconds. - Weight::from_parts(99_292_156, 6248) - // Standard Error: 3_677 - .saturating_add(Weight::from_parts(1_345_598, 0).saturating_mul(s.into())) + // Minimum execution time: 96_123_000 picoseconds. + Weight::from_parts(100_278_672, 6248) + // Standard Error: 3_487 + .saturating_add(Weight::from_parts(1_326_503, 0).saturating_mul(s.into())) .saturating_add(T::DbWeight::get().reads(12_u64)) .saturating_add(T::DbWeight::get().writes(11_u64)) .saturating_add(T::DbWeight::get().writes((1_u64).saturating_mul(s.into()))) @@ -622,13 +642,13 @@ impl WeightInfo for SubstrateWeight { fn new_era(v: u32, n: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `0 + n * (720 ±0) + v * (3598 ±0)` - // Estimated: `512390 + n * (3566 ±4) + v * (3566 ±40)` - // Minimum execution time: 583_230_000 picoseconds. - Weight::from_parts(585_794_000, 512390) - // Standard Error: 1_984_644 - .saturating_add(Weight::from_parts(65_914_551, 0).saturating_mul(v.into())) - // Standard Error: 197_758 - .saturating_add(Weight::from_parts(18_105_424, 0).saturating_mul(n.into())) + // Estimated: `512390 + n * (3566 ±0) + v * (3566 ±0)` + // Minimum execution time: 572_893_000 picoseconds. + Weight::from_parts(578_010_000, 512390) + // Standard Error: 2_094_268 + .saturating_add(Weight::from_parts(68_419_710, 0).saturating_mul(v.into())) + // Standard Error: 208_682 + .saturating_add(Weight::from_parts(18_826_175, 0).saturating_mul(n.into())) .saturating_add(T::DbWeight::get().reads(206_u64)) .saturating_add(T::DbWeight::get().reads((5_u64).saturating_mul(v.into()))) .saturating_add(T::DbWeight::get().reads((4_u64).saturating_mul(n.into()))) @@ -659,12 +679,12 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `3175 + n * (911 ±0) + v * (395 ±0)` // Estimated: `512390 + n * (3566 ±0) + v * (3566 ±0)` - // Minimum execution time: 33_312_958_000 picoseconds. - Weight::from_parts(4_949_866_209, 512390) - // Standard Error: 402_931 - .saturating_add(Weight::from_parts(16_448_367, 0).saturating_mul(v.into())) - // Standard Error: 402_931 - .saturating_add(Weight::from_parts(25_361_503, 0).saturating_mul(n.into())) + // Minimum execution time: 33_836_205_000 picoseconds. + Weight::from_parts(34_210_443_000, 512390) + // Standard Error: 441_692 + .saturating_add(Weight::from_parts(6_122_533, 0).saturating_mul(v.into())) + // Standard Error: 441_692 + .saturating_add(Weight::from_parts(4_418_264, 0).saturating_mul(n.into())) .saturating_add(T::DbWeight::get().reads(201_u64)) .saturating_add(T::DbWeight::get().reads((5_u64).saturating_mul(v.into()))) .saturating_add(T::DbWeight::get().reads((4_u64).saturating_mul(n.into()))) @@ -681,10 +701,10 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `979 + v * (50 ±0)` // Estimated: `3510 + v * (2520 ±0)` - // Minimum execution time: 2_474_646_000 picoseconds. - Weight::from_parts(2_512_113_000, 3510) - // Standard Error: 33_996 - .saturating_add(Weight::from_parts(1_992_173, 0).saturating_mul(v.into())) + // Minimum execution time: 2_454_689_000 picoseconds. + Weight::from_parts(161_771_064, 3510) + // Standard Error: 31_022 + .saturating_add(Weight::from_parts(4_820_158, 0).saturating_mul(v.into())) .saturating_add(T::DbWeight::get().reads(2_u64)) .saturating_add(T::DbWeight::get().reads((1_u64).saturating_mul(v.into()))) .saturating_add(Weight::from_parts(0, 2520).saturating_mul(v.into())) @@ -705,8 +725,8 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 5_466_000 picoseconds. - Weight::from_parts(5_861_000, 0) + // Minimum execution time: 5_073_000 picoseconds. + Weight::from_parts(5_452_000, 0) .saturating_add(T::DbWeight::get().writes(6_u64)) } /// Storage: `Staking::MinCommission` (r:0 w:1) @@ -725,8 +745,8 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 4_780_000 picoseconds. - Weight::from_parts(4_998_000, 0) + // Minimum execution time: 4_465_000 picoseconds. + Weight::from_parts(4_832_000, 0) .saturating_add(T::DbWeight::get().writes(6_u64)) } /// Storage: `Staking::Bonded` (r:1 w:0) @@ -755,8 +775,8 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `1939` // Estimated: `6248` - // Minimum execution time: 71_261_000 picoseconds. - Weight::from_parts(72_778_000, 6248) + // Minimum execution time: 71_239_000 picoseconds. + Weight::from_parts(74_649_000, 6248) .saturating_add(T::DbWeight::get().reads(12_u64)) .saturating_add(T::DbWeight::get().writes(6_u64)) } @@ -768,8 +788,8 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `691` // Estimated: `3510` - // Minimum execution time: 12_497_000 picoseconds. - Weight::from_parts(13_049_000, 3510) + // Minimum execution time: 12_525_000 picoseconds. + Weight::from_parts(13_126_000, 3510) .saturating_add(T::DbWeight::get().reads(2_u64)) .saturating_add(T::DbWeight::get().writes(1_u64)) } @@ -779,8 +799,8 @@ impl WeightInfo for SubstrateWeight { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 3_044_000 picoseconds. - Weight::from_parts(3_278_000, 0) + // Minimum execution time: 2_918_000 picoseconds. + Weight::from_parts(3_176_000, 0) .saturating_add(T::DbWeight::get().writes(1_u64)) } } @@ -801,8 +821,8 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `927` // Estimated: `4764` - // Minimum execution time: 42_895_000 picoseconds. - Weight::from_parts(44_924_000, 4764) + // Minimum execution time: 42_491_000 picoseconds. + Weight::from_parts(44_026_000, 4764) .saturating_add(RocksDbWeight::get().reads(3_u64)) .saturating_add(RocksDbWeight::get().writes(4_u64)) } @@ -822,8 +842,8 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `1990` // Estimated: `8877` - // Minimum execution time: 87_734_000 picoseconds. - Weight::from_parts(90_762_000, 8877) + // Minimum execution time: 88_756_000 picoseconds. + Weight::from_parts(91_000_000, 8877) .saturating_add(RocksDbWeight::get().reads(9_u64)) .saturating_add(RocksDbWeight::get().writes(7_u64)) } @@ -849,8 +869,8 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `2195` // Estimated: `8877` - // Minimum execution time: 90_914_000 picoseconds. - Weight::from_parts(94_156_000, 8877) + // Minimum execution time: 91_331_000 picoseconds. + Weight::from_parts(94_781_000, 8877) .saturating_add(RocksDbWeight::get().reads(12_u64)) .saturating_add(RocksDbWeight::get().writes(7_u64)) } @@ -869,10 +889,10 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `1115` // Estimated: `4764` - // Minimum execution time: 43_141_000 picoseconds. - Weight::from_parts(45_081_969, 4764) - // Standard Error: 1_010 - .saturating_add(Weight::from_parts(39_539, 0).saturating_mul(s.into())) + // Minimum execution time: 42_495_000 picoseconds. + Weight::from_parts(44_189_470, 4764) + // Standard Error: 1_389 + .saturating_add(Weight::from_parts(47_484, 0).saturating_mul(s.into())) .saturating_add(RocksDbWeight::get().reads(5_u64)) .saturating_add(RocksDbWeight::get().writes(2_u64)) } @@ -909,10 +929,10 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `2196 + s * (4 ±0)` // Estimated: `6248 + s * (4 ±0)` - // Minimum execution time: 87_743_000 picoseconds. - Weight::from_parts(96_983_484, 6248) - // Standard Error: 4_271 - .saturating_add(Weight::from_parts(1_382_993, 0).saturating_mul(s.into())) + // Minimum execution time: 89_004_000 picoseconds. + Weight::from_parts(96_677_570, 6248) + // Standard Error: 4_635 + .saturating_add(Weight::from_parts(1_387_718, 0).saturating_mul(s.into())) .saturating_add(RocksDbWeight::get().reads(13_u64)) .saturating_add(RocksDbWeight::get().writes(11_u64)) .saturating_add(RocksDbWeight::get().writes((1_u64).saturating_mul(s.into()))) @@ -944,8 +964,8 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `1372` // Estimated: `4556` - // Minimum execution time: 51_888_000 picoseconds. - Weight::from_parts(54_353_000, 4556) + // Minimum execution time: 51_532_000 picoseconds. + Weight::from_parts(53_308_000, 4556) .saturating_add(RocksDbWeight::get().reads(11_u64)) .saturating_add(RocksDbWeight::get().writes(5_u64)) } @@ -958,10 +978,10 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `1280 + k * (569 ±0)` // Estimated: `4556 + k * (3033 ±0)` - // Minimum execution time: 28_944_000 picoseconds. - Weight::from_parts(31_116_533, 4556) - // Standard Error: 11_848 - .saturating_add(Weight::from_parts(6_422_601, 0).saturating_mul(k.into())) + // Minimum execution time: 28_955_000 picoseconds. + Weight::from_parts(29_609_869, 4556) + // Standard Error: 6_793 + .saturating_add(Weight::from_parts(6_412_124, 0).saturating_mul(k.into())) .saturating_add(RocksDbWeight::get().reads(1_u64)) .saturating_add(RocksDbWeight::get().reads((1_u64).saturating_mul(k.into()))) .saturating_add(RocksDbWeight::get().writes((1_u64).saturating_mul(k.into()))) @@ -994,10 +1014,10 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `1866 + n * (102 ±0)` // Estimated: `6248 + n * (2520 ±0)` - // Minimum execution time: 63_921_000 picoseconds. - Weight::from_parts(62_662_863, 6248) - // Standard Error: 15_071 - .saturating_add(Weight::from_parts(3_950_084, 0).saturating_mul(n.into())) + // Minimum execution time: 64_080_000 picoseconds. + Weight::from_parts(61_985_382, 6248) + // Standard Error: 13_320 + .saturating_add(Weight::from_parts(4_030_513, 0).saturating_mul(n.into())) .saturating_add(RocksDbWeight::get().reads(12_u64)) .saturating_add(RocksDbWeight::get().reads((1_u64).saturating_mul(n.into()))) .saturating_add(RocksDbWeight::get().writes(6_u64)) @@ -1021,8 +1041,8 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `1650` // Estimated: `6248` - // Minimum execution time: 54_605_000 picoseconds. - Weight::from_parts(56_406_000, 6248) + // Minimum execution time: 54_194_000 picoseconds. + Weight::from_parts(55_578_000, 6248) .saturating_add(RocksDbWeight::get().reads(8_u64)) .saturating_add(RocksDbWeight::get().writes(6_u64)) } @@ -1036,8 +1056,8 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `902` // Estimated: `4556` - // Minimum execution time: 16_826_000 picoseconds. - Weight::from_parts(17_326_000, 4556) + // Minimum execution time: 16_597_000 picoseconds. + Weight::from_parts(16_980_000, 4556) .saturating_add(RocksDbWeight::get().reads(2_u64)) .saturating_add(RocksDbWeight::get().writes(1_u64)) } @@ -1051,8 +1071,8 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `969` // Estimated: `4556` - // Minimum execution time: 20_831_000 picoseconds. - Weight::from_parts(21_615_000, 4556) + // Minimum execution time: 20_626_000 picoseconds. + Weight::from_parts(21_242_000, 4556) .saturating_add(RocksDbWeight::get().reads(3_u64)) .saturating_add(RocksDbWeight::get().writes(1_u64)) } @@ -1064,8 +1084,8 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `902` // Estimated: `4556` - // Minimum execution time: 20_190_000 picoseconds. - Weight::from_parts(20_993_000, 4556) + // Minimum execution time: 19_972_000 picoseconds. + Weight::from_parts(20_470_000, 4556) .saturating_add(RocksDbWeight::get().reads(2_u64)) .saturating_add(RocksDbWeight::get().writes(3_u64)) } @@ -1075,8 +1095,8 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 2_603_000 picoseconds. - Weight::from_parts(2_747_000, 0) + // Minimum execution time: 2_571_000 picoseconds. + Weight::from_parts(2_720_000, 0) .saturating_add(RocksDbWeight::get().writes(1_u64)) } /// Storage: `Staking::ForceEra` (r:0 w:1) @@ -1085,8 +1105,8 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 8_070_000 picoseconds. - Weight::from_parts(8_745_000, 0) + // Minimum execution time: 8_056_000 picoseconds. + Weight::from_parts(8_413_000, 0) .saturating_add(RocksDbWeight::get().writes(1_u64)) } /// Storage: `Staking::ForceEra` (r:0 w:1) @@ -1095,8 +1115,8 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 7_999_000 picoseconds. - Weight::from_parts(8_624_000, 0) + // Minimum execution time: 8_162_000 picoseconds. + Weight::from_parts(8_497_000, 0) .saturating_add(RocksDbWeight::get().writes(1_u64)) } /// Storage: `Staking::ForceEra` (r:0 w:1) @@ -1105,8 +1125,8 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 8_131_000 picoseconds. - Weight::from_parts(8_467_000, 0) + // Minimum execution time: 8_320_000 picoseconds. + Weight::from_parts(8_564_000, 0) .saturating_add(RocksDbWeight::get().writes(1_u64)) } /// Storage: `Staking::Invulnerables` (r:0 w:1) @@ -1116,12 +1136,31 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 2_731_000 picoseconds. - Weight::from_parts(3_298_421, 0) - // Standard Error: 31 - .saturating_add(Weight::from_parts(10_075, 0).saturating_mul(v.into())) + // Minimum execution time: 2_470_000 picoseconds. + Weight::from_parts(3_110_242, 0) + // Standard Error: 63 + .saturating_add(Weight::from_parts(11_786, 0).saturating_mul(v.into())) .saturating_add(RocksDbWeight::get().writes(1_u64)) } + /// Storage: `Staking::Ledger` (r:5900 w:11800) + /// Proof: `Staking::Ledger` (`max_values`: None, `max_size`: Some(1091), added: 3566, mode: `MaxEncodedLen`) + /// Storage: `Staking::Payee` (r:5900 w:0) + /// Proof: `Staking::Payee` (`max_values`: None, `max_size`: Some(73), added: 2548, mode: `MaxEncodedLen`) + /// Storage: `Staking::Bonded` (r:0 w:5900) + /// Proof: `Staking::Bonded` (`max_values`: None, `max_size`: Some(72), added: 2547, mode: `MaxEncodedLen`) + /// The range of component `i` is `[0, 5900]`. + fn deprecate_controller_batch(i: u32, ) -> Weight { + // Proof Size summary in bytes: + // Measured: `1356 + i * (151 ±0)` + // Estimated: `990 + i * (3566 ±0)` + // Minimum execution time: 2_101_000 picoseconds. + Weight::from_parts(2_238_000, 990) + // Standard Error: 56_753 + .saturating_add(Weight::from_parts(18_404_902, 0).saturating_mul(i.into())) + .saturating_add(RocksDbWeight::get().reads((2_u64).saturating_mul(i.into()))) + .saturating_add(RocksDbWeight::get().writes((3_u64).saturating_mul(i.into()))) + .saturating_add(Weight::from_parts(0, 3566).saturating_mul(i.into())) + } /// Storage: `Staking::SlashingSpans` (r:1 w:1) /// Proof: `Staking::SlashingSpans` (`max_values`: None, `max_size`: None, mode: `Measured`) /// Storage: `Staking::Bonded` (r:1 w:1) @@ -1155,10 +1194,10 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `2196 + s * (4 ±0)` // Estimated: `6248 + s * (4 ±0)` - // Minimum execution time: 86_305_000 picoseconds. - Weight::from_parts(94_494_401, 6248) - // Standard Error: 3_602 - .saturating_add(Weight::from_parts(1_339_477, 0).saturating_mul(s.into())) + // Minimum execution time: 86_765_000 picoseconds. + Weight::from_parts(95_173_565, 6248) + // Standard Error: 4_596 + .saturating_add(Weight::from_parts(1_354_849, 0).saturating_mul(s.into())) .saturating_add(RocksDbWeight::get().reads(13_u64)) .saturating_add(RocksDbWeight::get().writes(12_u64)) .saturating_add(RocksDbWeight::get().writes((1_u64).saturating_mul(s.into()))) @@ -1171,10 +1210,10 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `66672` // Estimated: `70137` - // Minimum execution time: 100_007_000 picoseconds. - Weight::from_parts(894_033_025, 70137) - // Standard Error: 57_584 - .saturating_add(Weight::from_parts(4_870_504, 0).saturating_mul(s.into())) + // Minimum execution time: 104_490_000 picoseconds. + Weight::from_parts(1_162_956_951, 70137) + // Standard Error: 76_760 + .saturating_add(Weight::from_parts(6_485_569, 0).saturating_mul(s.into())) .saturating_add(RocksDbWeight::get().reads(1_u64)) .saturating_add(RocksDbWeight::get().writes(1_u64)) } @@ -1211,10 +1250,10 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `33297 + n * (377 ±0)` // Estimated: `30944 + n * (3774 ±0)` - // Minimum execution time: 142_575_000 picoseconds. - Weight::from_parts(196_320_577, 30944) - // Standard Error: 29_330 - .saturating_add(Weight::from_parts(45_325_062, 0).saturating_mul(n.into())) + // Minimum execution time: 144_790_000 picoseconds. + Weight::from_parts(36_764_791, 30944) + // Standard Error: 89_592 + .saturating_add(Weight::from_parts(49_620_105, 0).saturating_mul(n.into())) .saturating_add(RocksDbWeight::get().reads(14_u64)) .saturating_add(RocksDbWeight::get().reads((6_u64).saturating_mul(n.into()))) .saturating_add(RocksDbWeight::get().writes(4_u64)) @@ -1238,10 +1277,10 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `1991 + l * (7 ±0)` // Estimated: `8877` - // Minimum execution time: 81_113_000 picoseconds. - Weight::from_parts(84_470_927, 8877) - // Standard Error: 5_588 - .saturating_add(Weight::from_parts(97_606, 0).saturating_mul(l.into())) + // Minimum execution time: 81_768_000 picoseconds. + Weight::from_parts(85_332_982, 8877) + // Standard Error: 5_380 + .saturating_add(Weight::from_parts(70_298, 0).saturating_mul(l.into())) .saturating_add(RocksDbWeight::get().reads(9_u64)) .saturating_add(RocksDbWeight::get().writes(7_u64)) } @@ -1276,10 +1315,10 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `2196 + s * (4 ±0)` // Estimated: `6248 + s * (4 ±0)` - // Minimum execution time: 94_810_000 picoseconds. - Weight::from_parts(99_292_156, 6248) - // Standard Error: 3_677 - .saturating_add(Weight::from_parts(1_345_598, 0).saturating_mul(s.into())) + // Minimum execution time: 96_123_000 picoseconds. + Weight::from_parts(100_278_672, 6248) + // Standard Error: 3_487 + .saturating_add(Weight::from_parts(1_326_503, 0).saturating_mul(s.into())) .saturating_add(RocksDbWeight::get().reads(12_u64)) .saturating_add(RocksDbWeight::get().writes(11_u64)) .saturating_add(RocksDbWeight::get().writes((1_u64).saturating_mul(s.into()))) @@ -1324,13 +1363,13 @@ impl WeightInfo for () { fn new_era(v: u32, n: u32, ) -> Weight { // Proof Size summary in bytes: // Measured: `0 + n * (720 ±0) + v * (3598 ±0)` - // Estimated: `512390 + n * (3566 ±4) + v * (3566 ±40)` - // Minimum execution time: 583_230_000 picoseconds. - Weight::from_parts(585_794_000, 512390) - // Standard Error: 1_984_644 - .saturating_add(Weight::from_parts(65_914_551, 0).saturating_mul(v.into())) - // Standard Error: 197_758 - .saturating_add(Weight::from_parts(18_105_424, 0).saturating_mul(n.into())) + // Estimated: `512390 + n * (3566 ±0) + v * (3566 ±0)` + // Minimum execution time: 572_893_000 picoseconds. + Weight::from_parts(578_010_000, 512390) + // Standard Error: 2_094_268 + .saturating_add(Weight::from_parts(68_419_710, 0).saturating_mul(v.into())) + // Standard Error: 208_682 + .saturating_add(Weight::from_parts(18_826_175, 0).saturating_mul(n.into())) .saturating_add(RocksDbWeight::get().reads(206_u64)) .saturating_add(RocksDbWeight::get().reads((5_u64).saturating_mul(v.into()))) .saturating_add(RocksDbWeight::get().reads((4_u64).saturating_mul(n.into()))) @@ -1361,12 +1400,12 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `3175 + n * (911 ±0) + v * (395 ±0)` // Estimated: `512390 + n * (3566 ±0) + v * (3566 ±0)` - // Minimum execution time: 33_312_958_000 picoseconds. - Weight::from_parts(4_949_866_209, 512390) - // Standard Error: 402_931 - .saturating_add(Weight::from_parts(16_448_367, 0).saturating_mul(v.into())) - // Standard Error: 402_931 - .saturating_add(Weight::from_parts(25_361_503, 0).saturating_mul(n.into())) + // Minimum execution time: 33_836_205_000 picoseconds. + Weight::from_parts(34_210_443_000, 512390) + // Standard Error: 441_692 + .saturating_add(Weight::from_parts(6_122_533, 0).saturating_mul(v.into())) + // Standard Error: 441_692 + .saturating_add(Weight::from_parts(4_418_264, 0).saturating_mul(n.into())) .saturating_add(RocksDbWeight::get().reads(201_u64)) .saturating_add(RocksDbWeight::get().reads((5_u64).saturating_mul(v.into()))) .saturating_add(RocksDbWeight::get().reads((4_u64).saturating_mul(n.into()))) @@ -1383,10 +1422,10 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `979 + v * (50 ±0)` // Estimated: `3510 + v * (2520 ±0)` - // Minimum execution time: 2_474_646_000 picoseconds. - Weight::from_parts(2_512_113_000, 3510) - // Standard Error: 33_996 - .saturating_add(Weight::from_parts(1_992_173, 0).saturating_mul(v.into())) + // Minimum execution time: 2_454_689_000 picoseconds. + Weight::from_parts(161_771_064, 3510) + // Standard Error: 31_022 + .saturating_add(Weight::from_parts(4_820_158, 0).saturating_mul(v.into())) .saturating_add(RocksDbWeight::get().reads(2_u64)) .saturating_add(RocksDbWeight::get().reads((1_u64).saturating_mul(v.into()))) .saturating_add(Weight::from_parts(0, 2520).saturating_mul(v.into())) @@ -1407,8 +1446,8 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 5_466_000 picoseconds. - Weight::from_parts(5_861_000, 0) + // Minimum execution time: 5_073_000 picoseconds. + Weight::from_parts(5_452_000, 0) .saturating_add(RocksDbWeight::get().writes(6_u64)) } /// Storage: `Staking::MinCommission` (r:0 w:1) @@ -1427,8 +1466,8 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 4_780_000 picoseconds. - Weight::from_parts(4_998_000, 0) + // Minimum execution time: 4_465_000 picoseconds. + Weight::from_parts(4_832_000, 0) .saturating_add(RocksDbWeight::get().writes(6_u64)) } /// Storage: `Staking::Bonded` (r:1 w:0) @@ -1457,8 +1496,8 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `1939` // Estimated: `6248` - // Minimum execution time: 71_261_000 picoseconds. - Weight::from_parts(72_778_000, 6248) + // Minimum execution time: 71_239_000 picoseconds. + Weight::from_parts(74_649_000, 6248) .saturating_add(RocksDbWeight::get().reads(12_u64)) .saturating_add(RocksDbWeight::get().writes(6_u64)) } @@ -1470,8 +1509,8 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `691` // Estimated: `3510` - // Minimum execution time: 12_497_000 picoseconds. - Weight::from_parts(13_049_000, 3510) + // Minimum execution time: 12_525_000 picoseconds. + Weight::from_parts(13_126_000, 3510) .saturating_add(RocksDbWeight::get().reads(2_u64)) .saturating_add(RocksDbWeight::get().writes(1_u64)) } @@ -1481,8 +1520,8 @@ impl WeightInfo for () { // Proof Size summary in bytes: // Measured: `0` // Estimated: `0` - // Minimum execution time: 3_044_000 picoseconds. - Weight::from_parts(3_278_000, 0) + // Minimum execution time: 2_918_000 picoseconds. + Weight::from_parts(3_176_000, 0) .saturating_add(RocksDbWeight::get().writes(1_u64)) } } diff --git a/substrate/frame/state-trie-migration/Cargo.toml b/substrate/frame/state-trie-migration/Cargo.toml index d71dceba39d6..9cbdddc3e89d 100644 --- a/substrate/frame/state-trie-migration/Cargo.toml +++ b/substrate/frame/state-trie-migration/Cargo.toml @@ -8,6 +8,9 @@ homepage = "https://substrate.io" repository.workspace = true description = "FRAME pallet migration of trie" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -15,7 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } log = { version = "0.4.17", default-features = false } scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } -serde = { version = "1.0.193", optional = true } +serde = { version = "1.0.195", optional = true } thousands = { version = "0.2.0", optional = true } zstd = { version = "0.13", default-features = false, optional = true } frame-benchmarking = { path = "../benchmarking", default-features = false, optional = true } diff --git a/substrate/frame/state-trie-migration/src/lib.rs b/substrate/frame/state-trie-migration/src/lib.rs index 84a15738ba1c..7ca91cc5197a 100644 --- a/substrate/frame/state-trie-migration/src/lib.rs +++ b/substrate/frame/state-trie-migration/src/lib.rs @@ -477,7 +477,7 @@ pub mod pallet { /// - [`frame_support::storage::StorageDoubleMap`]: 96 byte /// /// For more info see - /// + /// #[pallet::constant] type MaxKeyLen: Get; diff --git a/substrate/frame/sudo/Cargo.toml b/substrate/frame/sudo/Cargo.toml index 70323590085e..027716ce3179 100644 --- a/substrate/frame/sudo/Cargo.toml +++ b/substrate/frame/sudo/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "FRAME pallet for sudo" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/sudo/src/lib.rs b/substrate/frame/sudo/src/lib.rs index 9012b152dbb5..5a79bacfc1b5 100644 --- a/substrate/frame/sudo/src/lib.rs +++ b/substrate/frame/sudo/src/lib.rs @@ -350,12 +350,16 @@ pub mod pallet { impl Pallet { /// Ensure that the caller is the sudo key. pub(crate) fn ensure_sudo(origin: OriginFor) -> DispatchResult { - let sender = ensure_signed(origin)?; - - if Self::key().map_or(false, |k| k == sender) { - Ok(()) + let sender = ensure_signed_or_root(origin)?; + + if let Some(sender) = sender { + if Self::key().map_or(false, |k| k == sender) { + Ok(()) + } else { + Err(Error::::RequireSudo.into()) + } } else { - Err(Error::::RequireSudo.into()) + Ok(()) } } } diff --git a/substrate/frame/sudo/src/tests.rs b/substrate/frame/sudo/src/tests.rs index 80e280269e15..e955493736f1 100644 --- a/substrate/frame/sudo/src/tests.rs +++ b/substrate/frame/sudo/src/tests.rs @@ -170,6 +170,18 @@ fn remove_key_works() { }); } +#[test] +fn using_root_origin_works() { + new_test_ext(1).execute_with(|| { + assert_ok!(Sudo::remove_key(RuntimeOrigin::root())); + assert!(Sudo::key().is_none()); + System::assert_has_event(TestEvent::Sudo(Event::KeyRemoved {})); + + assert_ok!(Sudo::set_key(RuntimeOrigin::root(), 1)); + assert_eq!(Some(1), Sudo::key()); + }); +} + #[test] fn sudo_as_basics() { new_test_ext(1).execute_with(|| { diff --git a/substrate/frame/support/Cargo.toml b/substrate/frame/support/Cargo.toml index a0d311b2ab30..db06eac5f44b 100644 --- a/substrate/frame/support/Cargo.toml +++ b/substrate/frame/support/Cargo.toml @@ -9,11 +9,15 @@ repository.workspace = true description = "Support code for the runtime." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] -serde = { version = "1.0.193", default-features = false, features = ["alloc", "derive"] } +array-bytes = { version = "6.1", default-features = false } +serde = { version = "1.0.195", default-features = false, features = ["alloc", "derive"] } codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive", "max-encoded-len"] } scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } frame-metadata = { version = "16.0.0", default-features = false, features = ["current"] } @@ -39,11 +43,11 @@ log = { version = "0.4.17", default-features = false } sp-core-hashing-proc-macro = { path = "../../primitives/core/hashing/proc-macro" } environmental = { version = "1.1.4", default-features = false } sp-genesis-builder = { path = "../../primitives/genesis-builder", default-features = false } -serde_json = { version = "1.0.108", default-features = false, features = ["alloc"] } +serde_json = { version = "1.0.111", default-features = false, features = ["alloc"] } docify = "0.2.6" static_assertions = "1.1.0" -aquamarine = { version = "0.4" } +aquamarine = { version = "0.5.0" } [dev-dependencies] assert_matches = "1.3.0" diff --git a/substrate/frame/support/procedural/Cargo.toml b/substrate/frame/support/procedural/Cargo.toml index ec9a4e216297..423b65e9a241 100644 --- a/substrate/frame/support/procedural/Cargo.toml +++ b/substrate/frame/support/procedural/Cargo.toml @@ -8,6 +8,9 @@ homepage = "https://substrate.io" repository.workspace = true description = "Proc macro of Support code for the runtime." +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -21,13 +24,16 @@ cfg-expr = "0.15.5" itertools = "0.12" proc-macro2 = "1.0.56" quote = "1.0.28" -syn = { version = "2.0.39", features = ["full"] } +syn = { version = "2.0.48", features = ["full"] } frame-support-procedural-tools = { path = "tools" } macro_magic = { version = "0.5.0", features = ["proc_support"] } proc-macro-warning = { version = "1.0.0", default-features = false } expander = "2.0.0" sp-core-hashing = { path = "../../../primitives/core/hashing" } +[dev-dependencies] +regex = "1" + [features] default = ["std"] std = [] diff --git a/substrate/frame/support/procedural/src/construct_runtime/expand/mod.rs b/substrate/frame/support/procedural/src/construct_runtime/expand/mod.rs index 10eb63bb2db3..4a91d0d4e53a 100644 --- a/substrate/frame/support/procedural/src/construct_runtime/expand/mod.rs +++ b/substrate/frame/support/procedural/src/construct_runtime/expand/mod.rs @@ -27,6 +27,7 @@ mod metadata; mod origin; mod outer_enums; mod slash_reason; +mod task; mod unsigned; pub use call::expand_outer_dispatch; @@ -39,4 +40,5 @@ pub use metadata::expand_runtime_metadata; pub use origin::expand_outer_origin; pub use outer_enums::{expand_outer_enum, OuterEnumType}; pub use slash_reason::expand_outer_slash_reason; +pub use task::expand_outer_task; pub use unsigned::expand_outer_validate_unsigned; diff --git a/substrate/frame/support/procedural/src/construct_runtime/expand/task.rs b/substrate/frame/support/procedural/src/construct_runtime/expand/task.rs new file mode 100644 index 000000000000..6531c0e9e070 --- /dev/null +++ b/substrate/frame/support/procedural/src/construct_runtime/expand/task.rs @@ -0,0 +1,131 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License + +use crate::construct_runtime::Pallet; +use proc_macro2::{Ident, TokenStream as TokenStream2}; +use quote::quote; + +/// Expands aggregate `RuntimeTask` enum. +pub fn expand_outer_task( + runtime_name: &Ident, + pallet_decls: &[Pallet], + scrate: &TokenStream2, +) -> TokenStream2 { + let mut from_impls = Vec::new(); + let mut task_variants = Vec::new(); + let mut variant_names = Vec::new(); + let mut task_paths = Vec::new(); + for decl in pallet_decls { + if decl.find_part("Task").is_none() { + continue + } + + let variant_name = &decl.name; + let path = &decl.path; + let index = decl.index; + + from_impls.push(quote! { + impl From<#path::Task<#runtime_name>> for RuntimeTask { + fn from(hr: #path::Task<#runtime_name>) -> Self { + RuntimeTask::#variant_name(hr) + } + } + + impl TryInto<#path::Task<#runtime_name>> for RuntimeTask { + type Error = (); + + fn try_into(self) -> Result<#path::Task<#runtime_name>, Self::Error> { + match self { + RuntimeTask::#variant_name(hr) => Ok(hr), + _ => Err(()), + } + } + } + }); + + task_variants.push(quote! { + #[codec(index = #index)] + #variant_name(#path::Task<#runtime_name>), + }); + + variant_names.push(quote!(#variant_name)); + + task_paths.push(quote!(#path::Task)); + } + + let prelude = quote!(#scrate::traits::tasks::__private); + + const INCOMPLETE_MATCH_QED: &'static str = + "cannot have an instantiated RuntimeTask without some Task variant in the runtime. QED"; + + let output = quote! { + /// An aggregation of all `Task` enums across all pallets included in the current runtime. + #[derive( + Clone, Eq, PartialEq, + #scrate::__private::codec::Encode, + #scrate::__private::codec::Decode, + #scrate::__private::scale_info::TypeInfo, + #scrate::__private::RuntimeDebug, + )] + pub enum RuntimeTask { + #( #task_variants )* + } + + #[automatically_derived] + impl #scrate::traits::Task for RuntimeTask { + type Enumeration = #prelude::IntoIter; + + fn is_valid(&self) -> bool { + match self { + #(RuntimeTask::#variant_names(val) => val.is_valid(),)* + _ => unreachable!(#INCOMPLETE_MATCH_QED), + } + } + + fn run(&self) -> Result<(), #scrate::traits::tasks::__private::DispatchError> { + match self { + #(RuntimeTask::#variant_names(val) => val.run(),)* + _ => unreachable!(#INCOMPLETE_MATCH_QED), + } + } + + fn weight(&self) -> #scrate::pallet_prelude::Weight { + match self { + #(RuntimeTask::#variant_names(val) => val.weight(),)* + _ => unreachable!(#INCOMPLETE_MATCH_QED), + } + } + + fn task_index(&self) -> u32 { + match self { + #(RuntimeTask::#variant_names(val) => val.task_index(),)* + _ => unreachable!(#INCOMPLETE_MATCH_QED), + } + } + + fn iter() -> Self::Enumeration { + let mut all_tasks = Vec::new(); + #(all_tasks.extend(#task_paths::iter().map(RuntimeTask::from).collect::>());)* + all_tasks.into_iter() + } + } + + #( #from_impls )* + }; + + output +} diff --git a/substrate/frame/support/procedural/src/construct_runtime/mod.rs b/substrate/frame/support/procedural/src/construct_runtime/mod.rs index 5f3ce923e86e..96597d2e1310 100644 --- a/substrate/frame/support/procedural/src/construct_runtime/mod.rs +++ b/substrate/frame/support/procedural/src/construct_runtime/mod.rs @@ -387,6 +387,7 @@ fn construct_runtime_final_expansion( let pallet_to_index = decl_pallet_runtime_setup(&pallets, &scrate); let dispatch = expand::expand_outer_dispatch(&name, system_pallet, &pallets, &scrate); + let tasks = expand::expand_outer_task(&name, &pallets, &scrate); let metadata = expand::expand_runtime_metadata( &name, &pallets, @@ -476,6 +477,8 @@ fn construct_runtime_final_expansion( #dispatch + #tasks + #metadata #outer_config diff --git a/substrate/frame/support/procedural/src/construct_runtime/parse.rs b/substrate/frame/support/procedural/src/construct_runtime/parse.rs index 53fbc37aa48d..f02cb7d06f48 100644 --- a/substrate/frame/support/procedural/src/construct_runtime/parse.rs +++ b/substrate/frame/support/procedural/src/construct_runtime/parse.rs @@ -43,6 +43,7 @@ mod keyword { syn::custom_keyword!(ValidateUnsigned); syn::custom_keyword!(FreezeReason); syn::custom_keyword!(HoldReason); + syn::custom_keyword!(Task); syn::custom_keyword!(LockId); syn::custom_keyword!(SlashReason); syn::custom_keyword!(exclude_parts); @@ -405,6 +406,7 @@ pub enum PalletPartKeyword { ValidateUnsigned(keyword::ValidateUnsigned), FreezeReason(keyword::FreezeReason), HoldReason(keyword::HoldReason), + Task(keyword::Task), LockId(keyword::LockId), SlashReason(keyword::SlashReason), } @@ -435,6 +437,8 @@ impl Parse for PalletPartKeyword { Ok(Self::FreezeReason(input.parse()?)) } else if lookahead.peek(keyword::HoldReason) { Ok(Self::HoldReason(input.parse()?)) + } else if lookahead.peek(keyword::Task) { + Ok(Self::Task(input.parse()?)) } else if lookahead.peek(keyword::LockId) { Ok(Self::LockId(input.parse()?)) } else if lookahead.peek(keyword::SlashReason) { @@ -460,6 +464,7 @@ impl PalletPartKeyword { Self::ValidateUnsigned(_) => "ValidateUnsigned", Self::FreezeReason(_) => "FreezeReason", Self::HoldReason(_) => "HoldReason", + Self::Task(_) => "Task", Self::LockId(_) => "LockId", Self::SlashReason(_) => "SlashReason", } @@ -472,7 +477,7 @@ impl PalletPartKeyword { /// Returns the names of all pallet parts that allow to have a generic argument. fn all_generic_arg() -> &'static [&'static str] { - &["Event", "Error", "Origin", "Config"] + &["Event", "Error", "Origin", "Config", "Task"] } } @@ -490,6 +495,7 @@ impl ToTokens for PalletPartKeyword { Self::ValidateUnsigned(inner) => inner.to_tokens(tokens), Self::FreezeReason(inner) => inner.to_tokens(tokens), Self::HoldReason(inner) => inner.to_tokens(tokens), + Self::Task(inner) => inner.to_tokens(tokens), Self::LockId(inner) => inner.to_tokens(tokens), Self::SlashReason(inner) => inner.to_tokens(tokens), } diff --git a/substrate/frame/support/procedural/src/derive_impl.rs b/substrate/frame/support/procedural/src/derive_impl.rs index bf7cddfe3f74..fffe012f42cc 100644 --- a/substrate/frame/support/procedural/src/derive_impl.rs +++ b/substrate/frame/support/procedural/src/derive_impl.rs @@ -137,9 +137,15 @@ fn combine_impls( return None } if let ImplItem::Type(typ) = item.clone() { + let cfg_attrs = typ + .attrs + .iter() + .filter(|attr| attr.path().get_ident().map_or(false, |ident| ident == "cfg")) + .map(|attr| attr.to_token_stream()); if is_runtime_type(&typ) { let item: ImplItem = if inject_runtime_types { parse_quote! { + #( #cfg_attrs )* type #ident = #ident; } } else { @@ -149,6 +155,7 @@ fn combine_impls( } // modify and insert uncolliding type items let modified_item: ImplItem = parse_quote! { + #( #cfg_attrs )* type #ident = <#default_impl_path as #disambiguation_path>::#ident; }; return Some(modified_item) diff --git a/substrate/frame/support/procedural/src/lib.rs b/substrate/frame/support/procedural/src/lib.rs index 7b5a89d60f83..e59bfdc1faa8 100644 --- a/substrate/frame/support/procedural/src/lib.rs +++ b/substrate/frame/support/procedural/src/lib.rs @@ -628,7 +628,6 @@ pub fn storage_alias(attributes: TokenStream, input: TokenStream) -> TokenStream /// ``` /// /// where `TestDefaultConfig` was defined and registered as follows: -/// /// ```ignore /// pub struct TestDefaultConfig; /// @@ -652,7 +651,6 @@ pub fn storage_alias(attributes: TokenStream, input: TokenStream) -> TokenStream /// ``` /// /// The above call to `derive_impl` would expand to roughly the following: -/// /// ```ignore /// impl frame_system::Config for Test { /// use frame_system::config_preludes::TestDefaultConfig; @@ -855,6 +853,7 @@ pub fn inject_runtime_type(_: TokenStream, tokens: TokenStream) -> TokenStream { let item = syn::parse_macro_input!(item as TraitItemType); if item.ident != "RuntimeCall" && item.ident != "RuntimeEvent" && + item.ident != "RuntimeTask" && item.ident != "RuntimeOrigin" && item.ident != "RuntimeHoldReason" && item.ident != "RuntimeFreezeReason" && @@ -862,10 +861,11 @@ pub fn inject_runtime_type(_: TokenStream, tokens: TokenStream) -> TokenStream { { return syn::Error::new_spanned( item, - "`#[inject_runtime_type]` can only be attached to `RuntimeCall`, `RuntimeEvent`, `RuntimeOrigin` or `PalletInfo`", + "`#[inject_runtime_type]` can only be attached to `RuntimeCall`, `RuntimeEvent`, \ + `RuntimeTask`, `RuntimeOrigin` or `PalletInfo`", ) .to_compile_error() - .into(); + .into() } tokens } @@ -1492,6 +1492,56 @@ pub fn composite_enum(_: TokenStream, _: TokenStream) -> TokenStream { pallet_macro_stub() } +/// +/// --- +/// +/// **Rust-Analyzer users**: See the documentation of the Rust item in +/// `frame_support::pallet_macros::tasks_experimental`. +#[proc_macro_attribute] +pub fn tasks_experimental(_: TokenStream, _: TokenStream) -> TokenStream { + pallet_macro_stub() +} + +/// +/// --- +/// +/// **Rust-Analyzer users**: See the documentation of the Rust item in +/// `frame_support::pallet_macros::task_list`. +#[proc_macro_attribute] +pub fn task_list(_: TokenStream, _: TokenStream) -> TokenStream { + pallet_macro_stub() +} + +/// +/// --- +/// +/// **Rust-Analyzer users**: See the documentation of the Rust item in +/// `frame_support::pallet_macros::task_condition`. +#[proc_macro_attribute] +pub fn task_condition(_: TokenStream, _: TokenStream) -> TokenStream { + pallet_macro_stub() +} + +/// +/// --- +/// +/// **Rust-Analyzer users**: See the documentation of the Rust item in +/// `frame_support::pallet_macros::task_weight`. +#[proc_macro_attribute] +pub fn task_weight(_: TokenStream, _: TokenStream) -> TokenStream { + pallet_macro_stub() +} + +/// +/// --- +/// +/// **Rust-Analyzer users**: See the documentation of the Rust item in +/// `frame_support::pallet_macros::task_index`. +#[proc_macro_attribute] +pub fn task_index(_: TokenStream, _: TokenStream) -> TokenStream { + pallet_macro_stub() +} + /// Can be attached to a module. Doing so will declare that module as importable into a pallet /// via [`#[import_section]`](`macro@import_section`). /// diff --git a/substrate/frame/support/procedural/src/pallet/expand/call.rs b/substrate/frame/support/procedural/src/pallet/expand/call.rs index 599facb4500d..cebeb97ffebc 100644 --- a/substrate/frame/support/procedural/src/pallet/expand/call.rs +++ b/substrate/frame/support/procedural/src/pallet/expand/call.rs @@ -242,6 +242,15 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { }) .collect::>(); + let cfg_attrs = methods + .iter() + .map(|method| { + let attrs = + method.cfg_attrs.iter().map(|attr| attr.to_token_stream()).collect::>(); + quote::quote!( #( #attrs )* ) + }) + .collect::>(); + let feeless_check = methods.iter().map(|method| &method.feeless_check).collect::>(); let feeless_check_result = feeless_check.iter().zip(args_name.iter()).map(|(feeless_check, arg_name)| { @@ -298,6 +307,7 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { #frame_support::Never, ), #( + #cfg_attrs #[doc = #fn_doc] #[codec(index = #call_index)] #fn_name { @@ -311,6 +321,7 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { impl<#type_impl_gen> #call_ident<#type_use_gen> #where_clause { #( + #cfg_attrs #[doc = #new_call_variant_doc] pub fn #new_call_variant_fn_name( #( #args_name_stripped: #args_type ),* @@ -329,6 +340,7 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { fn get_dispatch_info(&self) -> #frame_support::dispatch::DispatchInfo { match *self { #( + #cfg_attrs Self::#fn_name { #( #args_name_pattern_ref, )* } => { let __pallet_base_weight = #fn_weight; @@ -366,6 +378,7 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { fn is_feeless(&self, origin: &Self::Origin) -> bool { match *self { #( + #cfg_attrs Self::#fn_name { #( #args_name_pattern_ref, )* } => { #feeless_check_result }, @@ -380,13 +393,13 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { { fn get_call_name(&self) -> &'static str { match *self { - #( Self::#fn_name { .. } => stringify!(#fn_name), )* + #( #cfg_attrs Self::#fn_name { .. } => stringify!(#fn_name), )* Self::__Ignore(_, _) => unreachable!("__PhantomItem cannot be used."), } } fn get_call_names() -> &'static [&'static str] { - &[ #( stringify!(#fn_name), )* ] + &[ #( #cfg_attrs stringify!(#fn_name), )* ] } } @@ -395,13 +408,13 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { { fn get_call_index(&self) -> u8 { match *self { - #( Self::#fn_name { .. } => #call_index, )* + #( #cfg_attrs Self::#fn_name { .. } => #call_index, )* Self::__Ignore(_, _) => unreachable!("__PhantomItem cannot be used."), } } fn get_call_indices() -> &'static [u8] { - &[ #( #call_index, )* ] + &[ #( #cfg_attrs #call_index, )* ] } } @@ -417,6 +430,7 @@ pub fn expand_call(def: &mut Def) -> proc_macro2::TokenStream { #frame_support::dispatch_context::run_in_context(|| { match self { #( + #cfg_attrs Self::#fn_name { #( #args_name_pattern, )* } => { #frame_support::__private::sp_tracing::enter_span!( #frame_support::__private::sp_tracing::trace_span!(stringify!(#fn_name)) diff --git a/substrate/frame/support/procedural/src/pallet/expand/error.rs b/substrate/frame/support/procedural/src/pallet/expand/error.rs index 9494b10f3419..72ca5eb6a38d 100644 --- a/substrate/frame/support/procedural/src/pallet/expand/error.rs +++ b/substrate/frame/support/procedural/src/pallet/expand/error.rs @@ -17,10 +17,14 @@ // limitations under the License. use crate::{ - pallet::{parse::error::VariantField, Def}, + pallet::{ + parse::error::{VariantDef, VariantField}, + Def, + }, COUNTER, }; use frame_support_procedural_tools::get_doc_literals; +use quote::ToTokens; use syn::spanned::Spanned; /// @@ -68,20 +72,23 @@ pub fn expand_error(def: &mut Def) -> proc_macro2::TokenStream { ) ); - let as_str_matches = error.variants.iter().map(|(variant, field_ty, _)| { - let variant_str = variant.to_string(); - match field_ty { - Some(VariantField { is_named: true }) => { - quote::quote_spanned!(error.attr_span => Self::#variant { .. } => #variant_str,) - }, - Some(VariantField { is_named: false }) => { - quote::quote_spanned!(error.attr_span => Self::#variant(..) => #variant_str,) - }, - None => { - quote::quote_spanned!(error.attr_span => Self::#variant => #variant_str,) - }, - } - }); + let as_str_matches = error.variants.iter().map( + |VariantDef { ident: variant, field: field_ty, docs: _, cfg_attrs }| { + let variant_str = variant.to_string(); + let cfg_attrs = cfg_attrs.iter().map(|attr| attr.to_token_stream()); + match field_ty { + Some(VariantField { is_named: true }) => { + quote::quote_spanned!(error.attr_span => #( #cfg_attrs )* Self::#variant { .. } => #variant_str,) + }, + Some(VariantField { is_named: false }) => { + quote::quote_spanned!(error.attr_span => #( #cfg_attrs )* Self::#variant(..) => #variant_str,) + }, + None => { + quote::quote_spanned!(error.attr_span => #( #cfg_attrs )* Self::#variant => #variant_str,) + }, + } + }, + ); let error_item = { let item = &mut def.item.content.as_mut().expect("Checked by def parser").1[error.index]; diff --git a/substrate/frame/support/procedural/src/pallet/expand/mod.rs b/substrate/frame/support/procedural/src/pallet/expand/mod.rs index 0a2738fdbf84..e98e66dd7dfc 100644 --- a/substrate/frame/support/procedural/src/pallet/expand/mod.rs +++ b/substrate/frame/support/procedural/src/pallet/expand/mod.rs @@ -32,6 +32,7 @@ mod origin; mod pallet_struct; mod storage; mod store_trait; +mod tasks; mod tt_default_parts; mod type_value; mod validate_unsigned; @@ -61,6 +62,7 @@ pub fn expand(mut def: Def) -> proc_macro2::TokenStream { let pallet_struct = pallet_struct::expand_pallet_struct(&mut def); let config = config::expand_config(&mut def); let call = call::expand_call(&mut def); + let tasks = tasks::expand_tasks(&mut def); let error = error::expand_error(&mut def); let event = event::expand_event(&mut def); let storages = storage::expand_storages(&mut def); @@ -101,6 +103,7 @@ storage item. Otherwise, all storage items are listed among [*Type Definitions*] #pallet_struct #config #call + #tasks #error #event #storages diff --git a/substrate/frame/support/procedural/src/pallet/expand/tasks.rs b/substrate/frame/support/procedural/src/pallet/expand/tasks.rs new file mode 100644 index 000000000000..6697e5c822a3 --- /dev/null +++ b/substrate/frame/support/procedural/src/pallet/expand/tasks.rs @@ -0,0 +1,267 @@ +//! Contains logic for expanding task-related items. + +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Home of the expansion code for the Tasks API + +use crate::pallet::{parse::tasks::*, Def}; +use derive_syn_parse::Parse; +use inflector::Inflector; +use proc_macro2::TokenStream as TokenStream2; +use quote::{format_ident, quote, ToTokens}; +use syn::{parse_quote, spanned::Spanned, ItemEnum, ItemImpl}; + +impl TaskEnumDef { + /// Since we optionally allow users to manually specify a `#[pallet::task_enum]`, in the + /// event they _don't_ specify one (which is actually the most common behavior) we have to + /// generate one based on the existing [`TasksDef`]. This method performs that generation. + pub fn generate( + tasks: &TasksDef, + type_decl_bounded_generics: TokenStream2, + type_use_generics: TokenStream2, + ) -> Self { + let variants = if tasks.tasks_attr.is_some() { + tasks + .tasks + .iter() + .map(|task| { + let ident = &task.item.sig.ident; + let ident = + format_ident!("{}", ident.to_string().to_class_case(), span = ident.span()); + + let args = task.item.sig.inputs.iter().collect::>(); + + if args.is_empty() { + quote!(#ident) + } else { + quote!(#ident { + #(#args),* + }) + } + }) + .collect::>() + } else { + Vec::new() + }; + let mut task_enum_def: TaskEnumDef = parse_quote! { + /// Auto-generated enum that encapsulates all tasks defined by this pallet. + /// + /// Conceptually similar to the [`Call`] enum, but for tasks. This is only + /// generated if there are tasks present in this pallet. + #[pallet::task_enum] + pub enum Task<#type_decl_bounded_generics> { + #( + #variants, + )* + } + }; + task_enum_def.type_use_generics = type_use_generics; + task_enum_def + } +} + +impl ToTokens for TaskEnumDef { + fn to_tokens(&self, tokens: &mut TokenStream2) { + let item_enum = &self.item_enum; + let ident = &item_enum.ident; + let vis = &item_enum.vis; + let attrs = &item_enum.attrs; + let generics = &item_enum.generics; + let variants = &item_enum.variants; + let scrate = &self.scrate; + let type_use_generics = &self.type_use_generics; + if self.attr.is_some() { + // `item_enum` is short-hand / generated enum + tokens.extend(quote! { + #(#attrs)* + #[derive( + #scrate::CloneNoBound, + #scrate::EqNoBound, + #scrate::PartialEqNoBound, + #scrate::pallet_prelude::Encode, + #scrate::pallet_prelude::Decode, + #scrate::pallet_prelude::TypeInfo, + )] + #[codec(encode_bound())] + #[codec(decode_bound())] + #[scale_info(skip_type_params(#type_use_generics))] + #vis enum #ident #generics { + #variants + #[doc(hidden)] + #[codec(skip)] + __Ignore(core::marker::PhantomData, #scrate::Never), + } + + impl core::fmt::Debug for #ident<#type_use_generics> { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + f.debug_struct(stringify!(#ident)).field("value", self).finish() + } + } + }); + } else { + // `item_enum` is a manually specified enum (no attribute) + tokens.extend(item_enum.to_token_stream()); + } + } +} + +/// Represents an already-expanded [`TasksDef`]. +#[derive(Parse)] +pub struct ExpandedTasksDef { + pub task_item_impl: ItemImpl, + pub task_trait_impl: ItemImpl, +} + +impl ToTokens for TasksDef { + fn to_tokens(&self, tokens: &mut TokenStream2) { + let scrate = &self.scrate; + let enum_ident = syn::Ident::new("Task", self.enum_ident.span()); + let enum_arguments = &self.enum_arguments; + let enum_use = quote!(#enum_ident #enum_arguments); + + let task_fn_idents = self + .tasks + .iter() + .map(|task| { + format_ident!( + "{}", + &task.item.sig.ident.to_string().to_class_case(), + span = task.item.sig.ident.span() + ) + }) + .collect::>(); + let task_indices = self.tasks.iter().map(|task| &task.index_attr.meta.index); + let task_conditions = self.tasks.iter().map(|task| &task.condition_attr.meta.expr); + let task_weights = self.tasks.iter().map(|task| &task.weight_attr.meta.expr); + let task_iters = self.tasks.iter().map(|task| &task.list_attr.meta.expr); + + let task_fn_impls = self.tasks.iter().map(|task| { + let mut task_fn_impl = task.item.clone(); + task_fn_impl.attrs = vec![]; + task_fn_impl + }); + + let task_fn_names = self.tasks.iter().map(|task| &task.item.sig.ident); + let task_arg_names = self.tasks.iter().map(|task| &task.arg_names).collect::>(); + + let sp_std = quote!(#scrate::__private::sp_std); + let impl_generics = &self.item_impl.generics; + tokens.extend(quote! { + impl #impl_generics #enum_use + { + #(#task_fn_impls)* + } + + impl #impl_generics #scrate::traits::Task for #enum_use + { + type Enumeration = #sp_std::vec::IntoIter<#enum_use>; + + fn iter() -> Self::Enumeration { + let mut all_tasks = #sp_std::vec![]; + #(all_tasks + .extend(#task_iters.map(|(#(#task_arg_names),*)| #enum_ident::#task_fn_idents { #(#task_arg_names: #task_arg_names.clone()),* }) + .collect::<#sp_std::vec::Vec<_>>()); + )* + all_tasks.into_iter() + } + + fn task_index(&self) -> u32 { + match self.clone() { + #(#enum_ident::#task_fn_idents { .. } => #task_indices,)* + Task::__Ignore(_, _) => unreachable!(), + } + } + + fn is_valid(&self) -> bool { + match self.clone() { + #(#enum_ident::#task_fn_idents { #(#task_arg_names),* } => (#task_conditions)(#(#task_arg_names),* ),)* + Task::__Ignore(_, _) => unreachable!(), + } + } + + fn run(&self) -> Result<(), #scrate::pallet_prelude::DispatchError> { + match self.clone() { + #(#enum_ident::#task_fn_idents { #(#task_arg_names),* } => { + <#enum_use>::#task_fn_names(#( #task_arg_names, )* ) + },)* + Task::__Ignore(_, _) => unreachable!(), + } + } + + #[allow(unused_variables)] + fn weight(&self) -> #scrate::pallet_prelude::Weight { + match self.clone() { + #(#enum_ident::#task_fn_idents { #(#task_arg_names),* } => #task_weights,)* + Task::__Ignore(_, _) => unreachable!(), + } + } + } + }); + } +} + +/// Expands the [`TasksDef`] in the enclosing [`Def`], if present, and returns its tokens. +/// +/// This modifies the underlying [`Def`] in addition to returning any tokens that were added. +pub fn expand_tasks_impl(def: &mut Def) -> TokenStream2 { + let Some(tasks) = &mut def.tasks else { return quote!() }; + let ExpandedTasksDef { task_item_impl, task_trait_impl } = parse_quote!(#tasks); + quote! { + #task_item_impl + #task_trait_impl + } +} + +/// Represents a fully-expanded [`TaskEnumDef`]. +#[derive(Parse)] +pub struct ExpandedTaskEnum { + pub item_enum: ItemEnum, + pub debug_impl: ItemImpl, +} + +/// Modifies a [`Def`] to expand the underlying [`TaskEnumDef`] if present, and also returns +/// its tokens. A blank [`TokenStream2`] is returned if no [`TaskEnumDef`] has been generated +/// or defined. +pub fn expand_task_enum(def: &mut Def) -> TokenStream2 { + let Some(task_enum) = &mut def.task_enum else { return quote!() }; + let ExpandedTaskEnum { item_enum, debug_impl } = parse_quote!(#task_enum); + quote! { + #item_enum + #debug_impl + } +} + +/// Modifies a [`Def`] to expand the underlying [`TasksDef`] and also generate a +/// [`TaskEnumDef`] if applicable. The tokens for these items are returned if they are created. +pub fn expand_tasks(def: &mut Def) -> TokenStream2 { + if let Some(tasks_def) = &def.tasks { + if def.task_enum.is_none() { + def.task_enum = Some(TaskEnumDef::generate( + &tasks_def, + def.type_decl_bounded_generics(tasks_def.item_impl.span()), + def.type_use_generics(tasks_def.item_impl.span()), + )); + } + } + let tasks_extra_output = expand_tasks_impl(def); + let task_enum_extra_output = expand_task_enum(def); + quote! { + #tasks_extra_output + #task_enum_extra_output + } +} diff --git a/substrate/frame/support/procedural/src/pallet/expand/tt_default_parts.rs b/substrate/frame/support/procedural/src/pallet/expand/tt_default_parts.rs index bb66814da69d..f05959dd548d 100644 --- a/substrate/frame/support/procedural/src/pallet/expand/tt_default_parts.rs +++ b/substrate/frame/support/procedural/src/pallet/expand/tt_default_parts.rs @@ -32,6 +32,8 @@ pub fn expand_tt_default_parts(def: &mut Def) -> proc_macro2::TokenStream { let call_part = def.call.as_ref().map(|_| quote::quote!(Call,)); + let task_part = def.task_enum.as_ref().map(|_| quote::quote!(Task,)); + let storage_part = (!def.storages.is_empty()).then(|| quote::quote!(Storage,)); let event_part = def.event.as_ref().map(|event| { @@ -100,7 +102,7 @@ pub fn expand_tt_default_parts(def: &mut Def) -> proc_macro2::TokenStream { tokens = [{ expanded::{ Pallet, #call_part #storage_part #event_part #error_part #origin_part #config_part - #inherent_part #validate_unsigned_part #freeze_reason_part + #inherent_part #validate_unsigned_part #freeze_reason_part #task_part #hold_reason_part #lock_id_part #slash_reason_part } }] diff --git a/substrate/frame/support/procedural/src/pallet/parse/call.rs b/substrate/frame/support/procedural/src/pallet/parse/call.rs index 423a8b22a1fb..ec269865c916 100644 --- a/substrate/frame/support/procedural/src/pallet/parse/call.rs +++ b/substrate/frame/support/procedural/src/pallet/parse/call.rs @@ -86,6 +86,8 @@ pub struct CallVariantDef { pub docs: Vec, /// Attributes annotated at the top of the dispatchable function. pub attrs: Vec, + /// The `cfg` attributes. + pub cfg_attrs: Vec, /// The optional `feeless_if` attribute on the `pallet::call`. pub feeless_check: Option, } @@ -267,6 +269,7 @@ impl CallDef { return Err(syn::Error::new(method.sig.span(), msg)) } + let cfg_attrs: Vec = helper::get_item_cfg_attrs(&method.attrs); let mut call_idx_attrs = vec![]; let mut weight_attrs = vec![]; let mut feeless_attrs = vec![]; @@ -287,8 +290,7 @@ impl CallDef { if weight_attrs.is_empty() && dev_mode { // inject a default O(1) weight when dev mode is enabled and no weight has // been specified on the call - let empty_weight: syn::Expr = syn::parse(quote::quote!(0).into()) - .expect("we are parsing a quoted string"); + let empty_weight: syn::Expr = syn::parse_quote!(0); weight_attrs.push(FunctionAttr::Weight(empty_weight)); } @@ -444,6 +446,7 @@ impl CallDef { args, docs, attrs: method.attrs.clone(), + cfg_attrs, feeless_check, }); } else { diff --git a/substrate/frame/support/procedural/src/pallet/parse/composite.rs b/substrate/frame/support/procedural/src/pallet/parse/composite.rs index b0d031d3efdd..1899a19663bd 100644 --- a/substrate/frame/support/procedural/src/pallet/parse/composite.rs +++ b/substrate/frame/support/procedural/src/pallet/parse/composite.rs @@ -27,11 +27,14 @@ pub mod keyword { syn::custom_keyword!(HoldReason); syn::custom_keyword!(LockId); syn::custom_keyword!(SlashReason); + syn::custom_keyword!(Task); + pub enum CompositeKeyword { FreezeReason(FreezeReason), HoldReason(HoldReason), LockId(LockId), SlashReason(SlashReason), + Task(Task), } impl ToTokens for CompositeKeyword { @@ -42,6 +45,7 @@ pub mod keyword { HoldReason(inner) => inner.to_tokens(tokens), LockId(inner) => inner.to_tokens(tokens), SlashReason(inner) => inner.to_tokens(tokens), + Task(inner) => inner.to_tokens(tokens), } } } @@ -57,6 +61,8 @@ pub mod keyword { Ok(Self::LockId(input.parse()?)) } else if lookahead.peek(SlashReason) { Ok(Self::SlashReason(input.parse()?)) + } else if lookahead.peek(Task) { + Ok(Self::Task(input.parse()?)) } else { Err(lookahead.error()) } @@ -72,6 +78,7 @@ pub mod keyword { match self { FreezeReason(_) => "FreezeReason", HoldReason(_) => "HoldReason", + Task(_) => "Task", LockId(_) => "LockId", SlashReason(_) => "SlashReason", } @@ -81,7 +88,7 @@ pub mod keyword { } pub struct CompositeDef { - /// The index of the HoldReason item in the pallet module. + /// The index of the CompositeDef item in the pallet module. pub index: usize, /// The composite keyword used (contains span). pub composite_keyword: keyword::CompositeKeyword, diff --git a/substrate/frame/support/procedural/src/pallet/parse/error.rs b/substrate/frame/support/procedural/src/pallet/parse/error.rs index e54b1f084b7c..5e1f998171e7 100644 --- a/substrate/frame/support/procedural/src/pallet/parse/error.rs +++ b/substrate/frame/support/procedural/src/pallet/parse/error.rs @@ -26,19 +26,31 @@ mod keyword { syn::custom_keyword!(Error); } -/// Records information about the error enum variants. +/// Records information about the error enum variant field. pub struct VariantField { /// Whether or not the field is named, i.e. whether it is a tuple variant or struct variant. pub is_named: bool, } +/// Records information about the error enum variants. +pub struct VariantDef { + /// The variant ident. + pub ident: syn::Ident, + /// The variant field, if any. + pub field: Option, + /// The variant doc literals. + pub docs: Vec, + /// The `cfg` attributes. + pub cfg_attrs: Vec, +} + /// This checks error declaration as a enum declaration with only variants without fields nor /// discriminant. pub struct ErrorDef { /// The index of error item in pallet module. pub index: usize, - /// Variants ident, optional field and doc literals (ordered as declaration order) - pub variants: Vec<(syn::Ident, Option, Vec)>, + /// Variant definitions. + pub variants: Vec, /// A set of usage of instance, must be check for consistency with trait. pub instances: Vec, /// The keyword error used (contains span). @@ -88,8 +100,14 @@ impl ErrorDef { let span = variant.discriminant.as_ref().unwrap().0.span(); return Err(syn::Error::new(span, msg)) } + let cfg_attrs: Vec = helper::get_item_cfg_attrs(&variant.attrs); - Ok((variant.ident.clone(), field_ty, get_doc_literals(&variant.attrs))) + Ok(VariantDef { + ident: variant.ident.clone(), + field: field_ty, + docs: get_doc_literals(&variant.attrs), + cfg_attrs, + }) }) .collect::>()?; diff --git a/substrate/frame/support/procedural/src/pallet/parse/mod.rs b/substrate/frame/support/procedural/src/pallet/parse/mod.rs index 5a22d1a72fc2..3734b5c72de3 100644 --- a/substrate/frame/support/procedural/src/pallet/parse/mod.rs +++ b/substrate/frame/support/procedural/src/pallet/parse/mod.rs @@ -34,11 +34,16 @@ pub mod inherent; pub mod origin; pub mod pallet_struct; pub mod storage; +pub mod tasks; pub mod type_value; pub mod validate_unsigned; +#[cfg(test)] +pub mod tests; + use composite::{keyword::CompositeKeyword, CompositeDef}; use frame_support_procedural_tools::generate_access_from_frame_or_crate; +use quote::ToTokens; use syn::spanned::Spanned; /// Parsed definition of a pallet. @@ -50,6 +55,8 @@ pub struct Def { pub pallet_struct: pallet_struct::PalletStructDef, pub hooks: Option, pub call: Option, + pub tasks: Option, + pub task_enum: Option, pub storages: Vec, pub error: Option, pub event: Option, @@ -85,6 +92,8 @@ impl Def { let mut pallet_struct = None; let mut hooks = None; let mut call = None; + let mut tasks = None; + let mut task_enum = None; let mut error = None; let mut event = None; let mut origin = None; @@ -119,6 +128,32 @@ impl Def { }, Some(PalletAttr::RuntimeCall(cw, span)) if call.is_none() => call = Some(call::CallDef::try_from(span, index, item, dev_mode, cw)?), + Some(PalletAttr::Tasks(_)) if tasks.is_none() => { + let item_tokens = item.to_token_stream(); + // `TasksDef::parse` needs to know if attr was provided so we artificially + // re-insert it here + tasks = Some(syn::parse2::(quote::quote! { + #[pallet::tasks_experimental] + #item_tokens + })?); + + // replace item with a no-op because it will be handled by the expansion of tasks + *item = syn::Item::Verbatim(quote::quote!()); + } + Some(PalletAttr::TaskCondition(span)) => return Err(syn::Error::new( + span, + "`#[pallet::task_condition]` can only be used on items within an `impl` statement." + )), + Some(PalletAttr::TaskIndex(span)) => return Err(syn::Error::new( + span, + "`#[pallet::task_index]` can only be used on items within an `impl` statement." + )), + Some(PalletAttr::TaskList(span)) => return Err(syn::Error::new( + span, + "`#[pallet::task_list]` can only be used on items within an `impl` statement." + )), + Some(PalletAttr::RuntimeTask(_)) if task_enum.is_none() => + task_enum = Some(syn::parse2::(item.to_token_stream())?), Some(PalletAttr::Error(span)) if error.is_none() => error = Some(error::ErrorDef::try_from(span, index, item)?), Some(PalletAttr::RuntimeEvent(span)) if event.is_none() => @@ -191,6 +226,8 @@ impl Def { return Err(syn::Error::new(item_span, msg)) } + Self::resolve_tasks(&item_span, &mut tasks, &mut task_enum, items)?; + let def = Def { item, config: config @@ -199,6 +236,8 @@ impl Def { .ok_or_else(|| syn::Error::new(item_span, "Missing `#[pallet::pallet]`"))?, hooks, call, + tasks, + task_enum, extra_constants, genesis_config, genesis_build, @@ -221,6 +260,99 @@ impl Def { Ok(def) } + /// Performs extra logic checks necessary for the `#[pallet::tasks_experimental]` feature. + fn resolve_tasks( + item_span: &proc_macro2::Span, + tasks: &mut Option, + task_enum: &mut Option, + items: &mut Vec, + ) -> syn::Result<()> { + // fallback for manual (without macros) definition of tasks impl + Self::resolve_manual_tasks_impl(tasks, task_enum, items)?; + + // fallback for manual (without macros) definition of task enum + Self::resolve_manual_task_enum(tasks, task_enum, items)?; + + // ensure that if `task_enum` is specified, `tasks` is also specified + match (&task_enum, &tasks) { + (Some(_), None) => + return Err(syn::Error::new( + *item_span, + "Missing `#[pallet::tasks_experimental]` impl", + )), + (None, Some(tasks)) => + if tasks.tasks_attr.is_none() { + return Err(syn::Error::new( + tasks.item_impl.impl_token.span(), + "A `#[pallet::tasks_experimental]` attribute must be attached to your `Task` impl if the \ + task enum has been omitted", + )) + } else { + }, + _ => (), + } + + Ok(()) + } + + /// Tries to locate task enum based on the tasks impl target if attribute is not specified + /// but impl is present. If one is found, `task_enum` is set appropriately. + fn resolve_manual_task_enum( + tasks: &Option, + task_enum: &mut Option, + items: &mut Vec, + ) -> syn::Result<()> { + let (None, Some(tasks)) = (&task_enum, &tasks) else { return Ok(()) }; + let syn::Type::Path(type_path) = &*tasks.item_impl.self_ty else { return Ok(()) }; + let type_path = type_path.path.segments.iter().collect::>(); + let (Some(seg), None) = (type_path.get(0), type_path.get(1)) else { return Ok(()) }; + let mut result = None; + for item in items { + let syn::Item::Enum(item_enum) = item else { continue }; + if item_enum.ident == seg.ident { + result = Some(syn::parse2::(item_enum.to_token_stream())?); + // replace item with a no-op because it will be handled by the expansion of + // `task_enum`. We use a no-op instead of simply removing it from the vec + // so that any indices collected by `Def::try_from` remain accurate + *item = syn::Item::Verbatim(quote::quote!()); + break + } + } + *task_enum = result; + Ok(()) + } + + /// Tries to locate a manual tasks impl (an impl impling a trait whose last path segment is + /// `Task`) in the event that one has not been found already via the attribute macro + pub fn resolve_manual_tasks_impl( + tasks: &mut Option, + task_enum: &Option, + items: &Vec, + ) -> syn::Result<()> { + let None = tasks else { return Ok(()) }; + let mut result = None; + for item in items { + let syn::Item::Impl(item_impl) = item else { continue }; + let Some((_, path, _)) = &item_impl.trait_ else { continue }; + let Some(trait_last_seg) = path.segments.last() else { continue }; + let syn::Type::Path(target_path) = &*item_impl.self_ty else { continue }; + let target_path = target_path.path.segments.iter().collect::>(); + let (Some(target_ident), None) = (target_path.get(0), target_path.get(1)) else { + continue + }; + let matches_task_enum = match task_enum { + Some(task_enum) => task_enum.item_enum.ident == target_ident.ident, + None => true, + }; + if trait_last_seg.ident == "Task" && matches_task_enum { + result = Some(syn::parse2::(item_impl.to_token_stream())?); + break + } + } + *tasks = result; + Ok(()) + } + /// Check that usage of trait `Event` is consistent with the definition, i.e. it is declared /// and trait defines type RuntimeEvent, or not declared and no trait associated type. fn check_event_usage(&self) -> syn::Result<()> { @@ -409,6 +541,11 @@ impl GenericKind { mod keyword { syn::custom_keyword!(origin); syn::custom_keyword!(call); + syn::custom_keyword!(tasks_experimental); + syn::custom_keyword!(task_enum); + syn::custom_keyword!(task_list); + syn::custom_keyword!(task_condition); + syn::custom_keyword!(task_index); syn::custom_keyword!(weight); syn::custom_keyword!(event); syn::custom_keyword!(config); @@ -473,6 +610,11 @@ enum PalletAttr { /// instead of the zero weight. So to say: it works together with `dev_mode`. RuntimeCall(Option, proc_macro2::Span), Error(proc_macro2::Span), + Tasks(proc_macro2::Span), + TaskList(proc_macro2::Span), + TaskCondition(proc_macro2::Span), + TaskIndex(proc_macro2::Span), + RuntimeTask(proc_macro2::Span), RuntimeEvent(proc_macro2::Span), RuntimeOrigin(proc_macro2::Span), Inherent(proc_macro2::Span), @@ -491,8 +633,13 @@ impl PalletAttr { Self::Config(span, _) => *span, Self::Pallet(span) => *span, Self::Hooks(span) => *span, - Self::RuntimeCall(_, span) => *span, + Self::Tasks(span) => *span, + Self::TaskCondition(span) => *span, + Self::TaskIndex(span) => *span, + Self::TaskList(span) => *span, Self::Error(span) => *span, + Self::RuntimeTask(span) => *span, + Self::RuntimeCall(_, span) => *span, Self::RuntimeEvent(span) => *span, Self::RuntimeOrigin(span) => *span, Self::Inherent(span) => *span, @@ -536,6 +683,16 @@ impl syn::parse::Parse for PalletAttr { false => Some(InheritedCallWeightAttr::parse(&content)?), }; Ok(PalletAttr::RuntimeCall(attr, span)) + } else if lookahead.peek(keyword::tasks_experimental) { + Ok(PalletAttr::Tasks(content.parse::()?.span())) + } else if lookahead.peek(keyword::task_enum) { + Ok(PalletAttr::RuntimeTask(content.parse::()?.span())) + } else if lookahead.peek(keyword::task_condition) { + Ok(PalletAttr::TaskCondition(content.parse::()?.span())) + } else if lookahead.peek(keyword::task_index) { + Ok(PalletAttr::TaskIndex(content.parse::()?.span())) + } else if lookahead.peek(keyword::task_list) { + Ok(PalletAttr::TaskList(content.parse::()?.span())) } else if lookahead.peek(keyword::error) { Ok(PalletAttr::Error(content.parse::()?.span())) } else if lookahead.peek(keyword::event) { diff --git a/substrate/frame/support/procedural/src/pallet/parse/tasks.rs b/substrate/frame/support/procedural/src/pallet/parse/tasks.rs new file mode 100644 index 000000000000..6405bb415a6f --- /dev/null +++ b/substrate/frame/support/procedural/src/pallet/parse/tasks.rs @@ -0,0 +1,968 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Home of the parsing code for the Tasks API + +use std::collections::HashSet; + +#[cfg(test)] +use crate::assert_parse_error_matches; + +#[cfg(test)] +use crate::pallet::parse::tests::simulate_manifest_dir; + +use derive_syn_parse::Parse; +use frame_support_procedural_tools::generate_access_from_frame_or_crate; +use proc_macro2::TokenStream as TokenStream2; +use quote::{quote, ToTokens}; +use syn::{ + parse::ParseStream, + parse2, + spanned::Spanned, + token::{Bracket, Paren, PathSep, Pound}, + Attribute, Error, Expr, Ident, ImplItem, ImplItemFn, ItemEnum, ItemImpl, LitInt, Path, + PathArguments, Result, TypePath, +}; + +pub mod keywords { + use syn::custom_keyword; + + custom_keyword!(tasks_experimental); + custom_keyword!(task_enum); + custom_keyword!(task_list); + custom_keyword!(task_condition); + custom_keyword!(task_index); + custom_keyword!(task_weight); + custom_keyword!(pallet); +} + +/// Represents the `#[pallet::tasks_experimental]` attribute and its attached item. Also includes +/// metadata about the linked [`TaskEnumDef`] if applicable. +#[derive(Clone, Debug)] +pub struct TasksDef { + pub tasks_attr: Option, + pub tasks: Vec, + pub item_impl: ItemImpl, + /// Path to `frame_support` + pub scrate: Path, + pub enum_ident: Ident, + pub enum_arguments: PathArguments, +} + +impl syn::parse::Parse for TasksDef { + fn parse(input: ParseStream) -> Result { + let item_impl: ItemImpl = input.parse()?; + let (tasks_attrs, normal_attrs) = partition_tasks_attrs(&item_impl); + let tasks_attr = match tasks_attrs.first() { + Some(attr) => Some(parse2::(attr.to_token_stream())?), + None => None, + }; + if let Some(extra_tasks_attr) = tasks_attrs.get(1) { + return Err(Error::new( + extra_tasks_attr.span(), + "unexpected extra `#[pallet::tasks_experimental]` attribute", + )) + } + let tasks: Vec = if tasks_attr.is_some() { + item_impl + .items + .clone() + .into_iter() + .filter(|impl_item| matches!(impl_item, ImplItem::Fn(_))) + .map(|item| parse2::(item.to_token_stream())) + .collect::>()? + } else { + Vec::new() + }; + let mut task_indices = HashSet::::new(); + for task in tasks.iter() { + let task_index = &task.index_attr.meta.index; + if !task_indices.insert(task_index.clone()) { + return Err(Error::new( + task_index.span(), + format!("duplicate task index `{}`", task_index), + )) + } + } + let mut item_impl = item_impl; + item_impl.attrs = normal_attrs; + + // we require the path on the impl to be a TypePath + let enum_path = parse2::(item_impl.self_ty.to_token_stream())?; + let segments = enum_path.path.segments.iter().collect::>(); + let (Some(last_seg), None) = (segments.get(0), segments.get(1)) else { + return Err(Error::new( + enum_path.span(), + "if specified manually, the task enum must be defined locally in this \ + pallet and cannot be a re-export", + )) + }; + let enum_ident = last_seg.ident.clone(); + let enum_arguments = last_seg.arguments.clone(); + + // We do this here because it would be improper to do something fallible like this at + // the expansion phase. Fallible stuff should happen during parsing. + let scrate = generate_access_from_frame_or_crate("frame-support")?; + + Ok(TasksDef { tasks_attr, item_impl, tasks, scrate, enum_ident, enum_arguments }) + } +} + +/// Parsing for a `#[pallet::tasks_experimental]` attr. +pub type PalletTasksAttr = PalletTaskAttr; + +/// Parsing for any of the attributes that can be used within a `#[pallet::tasks_experimental]` +/// [`ItemImpl`]. +pub type TaskAttr = PalletTaskAttr; + +/// Parsing for a `#[pallet::task_index]` attr. +pub type TaskIndexAttr = PalletTaskAttr; + +/// Parsing for a `#[pallet::task_condition]` attr. +pub type TaskConditionAttr = PalletTaskAttr; + +/// Parsing for a `#[pallet::task_list]` attr. +pub type TaskListAttr = PalletTaskAttr; + +/// Parsing for a `#[pallet::task_weight]` attr. +pub type TaskWeightAttr = PalletTaskAttr; + +/// Parsing for a `#[pallet:task_enum]` attr. +pub type PalletTaskEnumAttr = PalletTaskAttr; + +/// Parsing for a manually-specified (or auto-generated) task enum, optionally including the +/// attached `#[pallet::task_enum]` attribute. +#[derive(Clone, Debug)] +pub struct TaskEnumDef { + pub attr: Option, + pub item_enum: ItemEnum, + pub scrate: Path, + pub type_use_generics: TokenStream2, +} + +impl syn::parse::Parse for TaskEnumDef { + fn parse(input: ParseStream) -> Result { + let mut item_enum = input.parse::()?; + let attr = extract_pallet_attr(&mut item_enum)?; + let attr = match attr { + Some(attr) => Some(parse2(attr)?), + None => None, + }; + + // We do this here because it would be improper to do something fallible like this at + // the expansion phase. Fallible stuff should happen during parsing. + let scrate = generate_access_from_frame_or_crate("frame-support")?; + + let type_use_generics = quote!(T); + + Ok(TaskEnumDef { attr, item_enum, scrate, type_use_generics }) + } +} + +/// Represents an individual tasks within a [`TasksDef`]. +#[derive(Debug, Clone)] +pub struct TaskDef { + pub index_attr: TaskIndexAttr, + pub condition_attr: TaskConditionAttr, + pub list_attr: TaskListAttr, + pub weight_attr: TaskWeightAttr, + pub normal_attrs: Vec, + pub item: ImplItemFn, + pub arg_names: Vec, +} + +impl syn::parse::Parse for TaskDef { + fn parse(input: ParseStream) -> Result { + let item = input.parse::()?; + // we only want to activate TaskAttrType parsing errors for tasks-related attributes, + // so we filter them here + let (task_attrs, normal_attrs) = partition_task_attrs(&item); + + let task_attrs: Vec = task_attrs + .into_iter() + .map(|attr| parse2(attr.to_token_stream())) + .collect::>()?; + + let Some(index_attr) = task_attrs + .iter() + .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskIndex(_))) + .cloned() + else { + return Err(Error::new( + item.sig.ident.span(), + "missing `#[pallet::task_index(..)]` attribute", + )) + }; + + let Some(condition_attr) = task_attrs + .iter() + .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskCondition(_))) + .cloned() + else { + return Err(Error::new( + item.sig.ident.span(), + "missing `#[pallet::task_condition(..)]` attribute", + )) + }; + + let Some(list_attr) = task_attrs + .iter() + .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskList(_))) + .cloned() + else { + return Err(Error::new( + item.sig.ident.span(), + "missing `#[pallet::task_list(..)]` attribute", + )) + }; + + let Some(weight_attr) = task_attrs + .iter() + .find(|attr| matches!(attr.meta, TaskAttrMeta::TaskWeight(_))) + .cloned() + else { + return Err(Error::new( + item.sig.ident.span(), + "missing `#[pallet::task_weight(..)]` attribute", + )) + }; + + if let Some(duplicate) = task_attrs + .iter() + .filter(|attr| matches!(attr.meta, TaskAttrMeta::TaskCondition(_))) + .collect::>() + .get(1) + { + return Err(Error::new( + duplicate.span(), + "unexpected extra `#[pallet::task_condition(..)]` attribute", + )) + } + + if let Some(duplicate) = task_attrs + .iter() + .filter(|attr| matches!(attr.meta, TaskAttrMeta::TaskList(_))) + .collect::>() + .get(1) + { + return Err(Error::new( + duplicate.span(), + "unexpected extra `#[pallet::task_list(..)]` attribute", + )) + } + + if let Some(duplicate) = task_attrs + .iter() + .filter(|attr| matches!(attr.meta, TaskAttrMeta::TaskIndex(_))) + .collect::>() + .get(1) + { + return Err(Error::new( + duplicate.span(), + "unexpected extra `#[pallet::task_index(..)]` attribute", + )) + } + + let mut arg_names = vec![]; + for input in item.sig.inputs.iter() { + match input { + syn::FnArg::Typed(pat_type) => match &*pat_type.pat { + syn::Pat::Ident(ident) => arg_names.push(ident.ident.clone()), + _ => return Err(Error::new(input.span(), "unexpected pattern type")), + }, + _ => return Err(Error::new(input.span(), "unexpected function argument type")), + } + } + + let index_attr = index_attr.try_into().expect("we check the type above; QED"); + let condition_attr = condition_attr.try_into().expect("we check the type above; QED"); + let list_attr = list_attr.try_into().expect("we check the type above; QED"); + let weight_attr = weight_attr.try_into().expect("we check the type above; QED"); + + Ok(TaskDef { + index_attr, + condition_attr, + list_attr, + weight_attr, + normal_attrs, + item, + arg_names, + }) + } +} + +/// The contents of a [`TasksDef`]-related attribute. +#[derive(Parse, Debug, Clone)] +pub enum TaskAttrMeta { + #[peek(keywords::task_list, name = "#[pallet::task_list(..)]")] + TaskList(TaskListAttrMeta), + #[peek(keywords::task_index, name = "#[pallet::task_index(..)")] + TaskIndex(TaskIndexAttrMeta), + #[peek(keywords::task_condition, name = "#[pallet::task_condition(..)")] + TaskCondition(TaskConditionAttrMeta), + #[peek(keywords::task_weight, name = "#[pallet::task_weight(..)")] + TaskWeight(TaskWeightAttrMeta), +} + +/// The contents of a `#[pallet::task_list]` attribute. +#[derive(Parse, Debug, Clone)] +pub struct TaskListAttrMeta { + pub task_list: keywords::task_list, + #[paren] + _paren: Paren, + #[inside(_paren)] + pub expr: Expr, +} + +/// The contents of a `#[pallet::task_index]` attribute. +#[derive(Parse, Debug, Clone)] +pub struct TaskIndexAttrMeta { + pub task_index: keywords::task_index, + #[paren] + _paren: Paren, + #[inside(_paren)] + pub index: LitInt, +} + +/// The contents of a `#[pallet::task_condition]` attribute. +#[derive(Parse, Debug, Clone)] +pub struct TaskConditionAttrMeta { + pub task_condition: keywords::task_condition, + #[paren] + _paren: Paren, + #[inside(_paren)] + pub expr: Expr, +} + +/// The contents of a `#[pallet::task_weight]` attribute. +#[derive(Parse, Debug, Clone)] +pub struct TaskWeightAttrMeta { + pub task_weight: keywords::task_weight, + #[paren] + _paren: Paren, + #[inside(_paren)] + pub expr: Expr, +} + +/// The contents of a `#[pallet::task]` attribute. +#[derive(Parse, Debug, Clone)] +pub struct PalletTaskAttr { + pub pound: Pound, + #[bracket] + _bracket: Bracket, + #[inside(_bracket)] + pub pallet: keywords::pallet, + #[inside(_bracket)] + pub colons: PathSep, + #[inside(_bracket)] + pub meta: T, +} + +impl ToTokens for TaskListAttrMeta { + fn to_tokens(&self, tokens: &mut TokenStream2) { + let task_list = self.task_list; + let expr = &self.expr; + tokens.extend(quote!(#task_list(#expr))); + } +} + +impl ToTokens for TaskConditionAttrMeta { + fn to_tokens(&self, tokens: &mut TokenStream2) { + let task_condition = self.task_condition; + let expr = &self.expr; + tokens.extend(quote!(#task_condition(#expr))); + } +} + +impl ToTokens for TaskWeightAttrMeta { + fn to_tokens(&self, tokens: &mut TokenStream2) { + let task_weight = self.task_weight; + let expr = &self.expr; + tokens.extend(quote!(#task_weight(#expr))); + } +} + +impl ToTokens for TaskIndexAttrMeta { + fn to_tokens(&self, tokens: &mut TokenStream2) { + let task_index = self.task_index; + let index = &self.index; + tokens.extend(quote!(#task_index(#index))) + } +} + +impl ToTokens for TaskAttrMeta { + fn to_tokens(&self, tokens: &mut TokenStream2) { + match self { + TaskAttrMeta::TaskList(list) => tokens.extend(list.to_token_stream()), + TaskAttrMeta::TaskIndex(index) => tokens.extend(index.to_token_stream()), + TaskAttrMeta::TaskCondition(condition) => tokens.extend(condition.to_token_stream()), + TaskAttrMeta::TaskWeight(weight) => tokens.extend(weight.to_token_stream()), + } + } +} + +impl ToTokens for PalletTaskAttr { + fn to_tokens(&self, tokens: &mut TokenStream2) { + let pound = self.pound; + let pallet = self.pallet; + let colons = self.colons; + let meta = &self.meta; + tokens.extend(quote!(#pound[#pallet #colons #meta])); + } +} + +impl TryFrom> for TaskIndexAttr { + type Error = syn::Error; + + fn try_from(value: PalletTaskAttr) -> Result { + let pound = value.pound; + let pallet = value.pallet; + let colons = value.colons; + match value.meta { + TaskAttrMeta::TaskIndex(meta) => parse2(quote!(#pound[#pallet #colons #meta])), + _ => + return Err(Error::new( + value.span(), + format!("`{:?}` cannot be converted to a `TaskIndexAttr`", value.meta), + )), + } + } +} + +impl TryFrom> for TaskConditionAttr { + type Error = syn::Error; + + fn try_from(value: PalletTaskAttr) -> Result { + let pound = value.pound; + let pallet = value.pallet; + let colons = value.colons; + match value.meta { + TaskAttrMeta::TaskCondition(meta) => parse2(quote!(#pound[#pallet #colons #meta])), + _ => + return Err(Error::new( + value.span(), + format!("`{:?}` cannot be converted to a `TaskConditionAttr`", value.meta), + )), + } + } +} + +impl TryFrom> for TaskWeightAttr { + type Error = syn::Error; + + fn try_from(value: PalletTaskAttr) -> Result { + let pound = value.pound; + let pallet = value.pallet; + let colons = value.colons; + match value.meta { + TaskAttrMeta::TaskWeight(meta) => parse2(quote!(#pound[#pallet #colons #meta])), + _ => + return Err(Error::new( + value.span(), + format!("`{:?}` cannot be converted to a `TaskWeightAttr`", value.meta), + )), + } + } +} + +impl TryFrom> for TaskListAttr { + type Error = syn::Error; + + fn try_from(value: PalletTaskAttr) -> Result { + let pound = value.pound; + let pallet = value.pallet; + let colons = value.colons; + match value.meta { + TaskAttrMeta::TaskList(meta) => parse2(quote!(#pound[#pallet #colons #meta])), + _ => + return Err(Error::new( + value.span(), + format!("`{:?}` cannot be converted to a `TaskListAttr`", value.meta), + )), + } + } +} + +fn extract_pallet_attr(item_enum: &mut ItemEnum) -> Result> { + let mut duplicate = None; + let mut attr = None; + item_enum.attrs = item_enum + .attrs + .iter() + .filter(|found_attr| { + let segs = found_attr + .path() + .segments + .iter() + .map(|seg| seg.ident.clone()) + .collect::>(); + let (Some(seg1), Some(_), None) = (segs.get(0), segs.get(1), segs.get(2)) else { + return true + }; + if seg1 != "pallet" { + return true + } + if attr.is_some() { + duplicate = Some(found_attr.span()); + } + attr = Some(found_attr.to_token_stream()); + false + }) + .cloned() + .collect(); + if let Some(span) = duplicate { + return Err(Error::new(span, "only one `#[pallet::_]` attribute is supported on this item")) + } + Ok(attr) +} + +fn partition_tasks_attrs(item_impl: &ItemImpl) -> (Vec, Vec) { + item_impl.attrs.clone().into_iter().partition(|attr| { + let mut path_segs = attr.path().segments.iter(); + let (Some(prefix), Some(suffix), None) = + (path_segs.next(), path_segs.next(), path_segs.next()) + else { + return false + }; + prefix.ident == "pallet" && suffix.ident == "tasks_experimental" + }) +} + +fn partition_task_attrs(item: &ImplItemFn) -> (Vec, Vec) { + item.attrs.clone().into_iter().partition(|attr| { + let mut path_segs = attr.path().segments.iter(); + let (Some(prefix), Some(suffix)) = (path_segs.next(), path_segs.next()) else { + return false + }; + // N.B: the `PartialEq` impl between `Ident` and `&str` is more efficient than + // parsing and makes no stack or heap allocations + prefix.ident == "pallet" && + (suffix.ident == "tasks_experimental" || + suffix.ident == "task_list" || + suffix.ident == "task_condition" || + suffix.ident == "task_weight" || + suffix.ident == "task_index") + }) +} + +#[test] +fn test_parse_task_list_() { + parse2::(quote!(#[pallet::task_list(Something::iter())])).unwrap(); + parse2::(quote!(#[pallet::task_list(Numbers::::iter_keys())])).unwrap(); + parse2::(quote!(#[pallet::task_list(iter())])).unwrap(); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_list()])), + "expected an expression" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_list])), + "expected parentheses" + ); +} + +#[test] +fn test_parse_task_index() { + parse2::(quote!(#[pallet::task_index(3)])).unwrap(); + parse2::(quote!(#[pallet::task_index(0)])).unwrap(); + parse2::(quote!(#[pallet::task_index(17)])).unwrap(); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_index])), + "expected parentheses" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_index("hey")])), + "expected integer literal" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::task_index(0.3)])), + "expected integer literal" + ); +} + +#[test] +fn test_parse_task_condition() { + parse2::(quote!(#[pallet::task_condition(|x| x.is_some())])).unwrap(); + parse2::(quote!(#[pallet::task_condition(|_x| some_expr())])).unwrap(); + parse2::(quote!(#[pallet::task_condition(|| some_expr())])).unwrap(); + parse2::(quote!(#[pallet::task_condition(some_expr())])).unwrap(); +} + +#[test] +fn test_parse_tasks_attr() { + parse2::(quote!(#[pallet::tasks_experimental])).unwrap(); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::taskss])), + "expected `tasks_experimental`" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::tasks_])), + "expected `tasks_experimental`" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pal::tasks])), + "expected `pallet`" + ); + assert_parse_error_matches!( + parse2::(quote!(#[pallet::tasks_experimental()])), + "unexpected token" + ); +} + +#[test] +fn test_parse_tasks_def_basic() { + simulate_manifest_dir("../../examples/basic", || { + let parsed = parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + /// Add a pair of numbers into the totals and remove them. + #[pallet::task_list(Numbers::::iter_keys())] + #[pallet::task_condition(|i| Numbers::::contains_key(i))] + #[pallet::task_index(0)] + #[pallet::task_weight(0)] + pub fn add_number_into_total(i: u32) -> DispatchResult { + let v = Numbers::::take(i).ok_or(Error::::NotFound)?; + Total::::mutate(|(total_keys, total_values)| { + *total_keys += i; + *total_values += v; + }); + Ok(()) + } + } + }) + .unwrap(); + assert_eq!(parsed.tasks.len(), 1); + }); +} + +#[test] +fn test_parse_tasks_def_basic_increment_decrement() { + simulate_manifest_dir("../../examples/basic", || { + let parsed = parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + /// Get the value and check if it can be incremented + #[pallet::task_index(0)] + #[pallet::task_condition(|| { + let value = Value::::get().unwrap(); + value < 255 + })] + #[pallet::task_list(Vec::>::new())] + #[pallet::task_weight(0)] + fn increment() -> DispatchResult { + let value = Value::::get().unwrap_or_default(); + if value >= 255 { + Err(Error::::ValueOverflow.into()) + } else { + let new_val = value.checked_add(1).ok_or(Error::::ValueOverflow)?; + Value::::put(new_val); + Pallet::::deposit_event(Event::Incremented { new_val }); + Ok(()) + } + } + + // Get the value and check if it can be decremented + #[pallet::task_index(1)] + #[pallet::task_condition(|| { + let value = Value::::get().unwrap(); + value > 0 + })] + #[pallet::task_list(Vec::>::new())] + #[pallet::task_weight(0)] + fn decrement() -> DispatchResult { + let value = Value::::get().unwrap_or_default(); + if value == 0 { + Err(Error::::ValueUnderflow.into()) + } else { + let new_val = value.checked_sub(1).ok_or(Error::::ValueUnderflow)?; + Value::::put(new_val); + Pallet::::deposit_event(Event::Decremented { new_val }); + Ok(()) + } + } + } + }) + .unwrap(); + assert_eq!(parsed.tasks.len(), 2); + }); +} + +#[test] +fn test_parse_tasks_def_duplicate_index() { + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_list(Something::iter())] + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_index(0)] + #[pallet::task_weight(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + + #[pallet::task_list(Numbers::::iter_keys())] + #[pallet::task_condition(|i| Numbers::::contains_key(i))] + #[pallet::task_index(0)] + #[pallet::task_weight(0)] + pub fn bar(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + "duplicate task index `0`" + ); + }); +} + +#[test] +fn test_parse_tasks_def_missing_task_list() { + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_index(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"missing `#\[pallet::task_list\(\.\.\)\]`" + ); + }); +} + +#[test] +fn test_parse_tasks_def_missing_task_condition() { + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_list(Something::iter())] + #[pallet::task_index(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"missing `#\[pallet::task_condition\(\.\.\)\]`" + ); + }); +} + +#[test] +fn test_parse_tasks_def_missing_task_index() { + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_list(Something::iter())] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"missing `#\[pallet::task_index\(\.\.\)\]`" + ); + }); +} + +#[test] +fn test_parse_tasks_def_missing_task_weight() { + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_list(Something::iter())] + #[pallet::task_index(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"missing `#\[pallet::task_weight\(\.\.\)\]`" + ); + }); +} + +#[test] +fn test_parse_tasks_def_unexpected_extra_task_list_attr() { + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_index(0)] + #[pallet::task_weight(0)] + #[pallet::task_list(Something::iter())] + #[pallet::task_list(SomethingElse::iter())] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"unexpected extra `#\[pallet::task_list\(\.\.\)\]`" + ); + }); +} + +#[test] +fn test_parse_tasks_def_unexpected_extra_task_condition_attr() { + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_condition(|i| i % 4 == 0)] + #[pallet::task_index(0)] + #[pallet::task_list(Something::iter())] + #[pallet::task_weight(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"unexpected extra `#\[pallet::task_condition\(\.\.\)\]`" + ); + }); +} + +#[test] +fn test_parse_tasks_def_unexpected_extra_task_index_attr() { + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + impl, I: 'static> Pallet { + #[pallet::task_condition(|i| i % 2 == 0)] + #[pallet::task_index(0)] + #[pallet::task_index(0)] + #[pallet::task_list(Something::iter())] + #[pallet::task_weight(0)] + pub fn foo(i: u32) -> DispatchResult { + Ok(()) + } + } + }), + r"unexpected extra `#\[pallet::task_index\(\.\.\)\]`" + ); + }); +} + +#[test] +fn test_parse_tasks_def_extra_tasks_attribute() { + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::tasks_experimental] + #[pallet::tasks_experimental] + impl, I: 'static> Pallet {} + }), + r"unexpected extra `#\[pallet::tasks_experimental\]` attribute" + ); + }); +} + +#[test] +fn test_parse_task_enum_def_basic() { + simulate_manifest_dir("../../examples/basic", || { + parse2::(quote! { + #[pallet::task_enum] + pub enum Task { + Increment, + Decrement, + } + }) + .unwrap(); + }); +} + +#[test] +fn test_parse_task_enum_def_non_task_name() { + simulate_manifest_dir("../../examples/basic", || { + parse2::(quote! { + #[pallet::task_enum] + pub enum Something { + Foo + } + }) + .unwrap(); + }); +} + +#[test] +fn test_parse_task_enum_def_missing_attr_allowed() { + simulate_manifest_dir("../../examples/basic", || { + parse2::(quote! { + pub enum Task { + Increment, + Decrement, + } + }) + .unwrap(); + }); +} + +#[test] +fn test_parse_task_enum_def_missing_attr_alternate_name_allowed() { + simulate_manifest_dir("../../examples/basic", || { + parse2::(quote! { + pub enum Foo { + Red, + } + }) + .unwrap(); + }); +} + +#[test] +fn test_parse_task_enum_def_wrong_attr() { + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::something] + pub enum Task { + Increment, + Decrement, + } + }), + "expected `task_enum`" + ); + }); +} + +#[test] +fn test_parse_task_enum_def_wrong_item() { + simulate_manifest_dir("../../examples/basic", || { + assert_parse_error_matches!( + parse2::(quote! { + #[pallet::task_enum] + pub struct Something; + }), + "expected `enum`" + ); + }); +} diff --git a/substrate/frame/support/procedural/src/pallet/parse/tests/mod.rs b/substrate/frame/support/procedural/src/pallet/parse/tests/mod.rs new file mode 100644 index 000000000000..a3661f3076d9 --- /dev/null +++ b/substrate/frame/support/procedural/src/pallet/parse/tests/mod.rs @@ -0,0 +1,264 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use std::{panic, sync::Mutex}; +use syn::parse_quote; + +#[doc(hidden)] +pub mod __private { + pub use regex; +} + +/// Allows you to assert that the input expression resolves to an error whose string +/// representation matches the specified regex literal. +/// +/// ## Example: +/// +/// ``` +/// use super::tasks::*; +/// +/// assert_parse_error_matches!( +/// parse2::(quote! { +/// #[pallet::task_enum] +/// pub struct Something; +/// }), +/// "expected `enum`" +/// ); +/// ``` +/// +/// More complex regular expressions are also possible (anything that could pass as a regex for +/// use with the [`regex`] crate.): +/// +/// ```ignore +/// assert_parse_error_matches!( +/// parse2::(quote! { +/// #[pallet::tasks_experimental] +/// impl, I: 'static> Pallet { +/// #[pallet::task_condition(|i| i % 2 == 0)] +/// #[pallet::task_index(0)] +/// pub fn foo(i: u32) -> DispatchResult { +/// Ok(()) +/// } +/// } +/// }), +/// r"missing `#\[pallet::task_list\(\.\.\)\]`" +/// ); +/// ``` +/// +/// Although this is primarily intended to be used with parsing errors, this macro is general +/// enough that it will work with any error with a reasonable [`core::fmt::Display`] impl. +#[macro_export] +macro_rules! assert_parse_error_matches { + ($expr:expr, $reg:literal) => { + match $expr { + Ok(_) => panic!("Expected an `Error(..)`, but got Ok(..)"), + Err(e) => { + let error_message = e.to_string(); + let re = $crate::pallet::parse::tests::__private::regex::Regex::new($reg) + .expect("Invalid regex pattern"); + assert!( + re.is_match(&error_message), + "Error message \"{}\" does not match the pattern \"{}\"", + error_message, + $reg + ); + }, + } + }; +} + +/// Allows you to assert that an entire pallet parses successfully. A custom syntax is used for +/// specifying arguments so please pay attention to the docs below. +/// +/// The general syntax is: +/// +/// ```ignore +/// assert_pallet_parses! { +/// #[manifest_dir("../../examples/basic")] +/// #[frame_support::pallet] +/// pub mod pallet { +/// #[pallet::config] +/// pub trait Config: frame_system::Config {} +/// +/// #[pallet::pallet] +/// pub struct Pallet(_); +/// } +/// }; +/// ``` +/// +/// The `#[manifest_dir(..)]` attribute _must_ be specified as the _first_ attribute on the +/// pallet module, and should reference the relative (to your current directory) path of a +/// directory containing containing the `Cargo.toml` of a valid pallet. Typically you will only +/// ever need to use the `examples/basic` pallet, but sometimes it might be advantageous to +/// specify a different one that has additional dependencies. +/// +/// The reason this must be specified is that our underlying parsing of pallets depends on +/// reaching out into the file system to look for particular `Cargo.toml` dependencies via the +/// [`generate_access_from_frame_or_crate`] method, so to simulate this properly in a proc +/// macro crate, we need to temporarily convince this function that we are running from the +/// directory of a valid pallet. +#[macro_export] +macro_rules! assert_pallet_parses { + ( + #[manifest_dir($manifest_dir:literal)] + $($tokens:tt)* + ) => { + { + let mut pallet: Option<$crate::pallet::parse::Def> = None; + $crate::pallet::parse::tests::simulate_manifest_dir($manifest_dir, core::panic::AssertUnwindSafe(|| { + pallet = Some($crate::pallet::parse::Def::try_from(syn::parse_quote! { + $($tokens)* + }, false).unwrap()); + })); + pallet.unwrap() + } + } +} + +/// Similar to [`assert_pallet_parses`], except this instead expects the pallet not to parse, +/// and allows you to specify a regex matching the expected parse error. +/// +/// This is identical syntactically to [`assert_pallet_parses`] in every way except there is a +/// second attribute that must be specified immediately after `#[manifest_dir(..)]` which is +/// `#[error_regex(..)]` which should contain a string/regex literal designed to match what you +/// consider to be the correct parsing error we should see when we try to parse this particular +/// pallet. +/// +/// ## Example: +/// +/// ``` +/// assert_pallet_parse_error! { +/// #[manifest_dir("../../examples/basic")] +/// #[error_regex("Missing `\\#\\[pallet::pallet\\]`")] +/// #[frame_support::pallet] +/// pub mod pallet { +/// #[pallet::config] +/// pub trait Config: frame_system::Config {} +/// } +/// } +/// ``` +#[macro_export] +macro_rules! assert_pallet_parse_error { + ( + #[manifest_dir($manifest_dir:literal)] + #[error_regex($reg:literal)] + $($tokens:tt)* + ) => { + $crate::pallet::parse::tests::simulate_manifest_dir($manifest_dir, || { + $crate::assert_parse_error_matches!( + $crate::pallet::parse::Def::try_from( + parse_quote! { + $($tokens)* + }, + false + ), + $reg + ); + }); + } +} + +/// Safely runs the specified `closure` while simulating an alternative `CARGO_MANIFEST_DIR`, +/// restoring `CARGO_MANIFEST_DIR` to its original value upon completion regardless of whether +/// the closure panics. +/// +/// This is useful in tests of `Def::try_from` and other pallet-related methods that internally +/// make use of [`generate_access_from_frame_or_crate`], which is sensitive to entries in the +/// "current" `Cargo.toml` files. +/// +/// This function uses a [`Mutex`] to avoid a race condition created when multiple tests try to +/// modify and then restore the `CARGO_MANIFEST_DIR` ENV var in an overlapping way. +pub fn simulate_manifest_dir, F: FnOnce() + std::panic::UnwindSafe>( + path: P, + closure: F, +) { + use std::{env::*, path::*}; + + /// Ensures that only one thread can modify/restore the `CARGO_MANIFEST_DIR` ENV var at a time, + /// avoiding a race condition because `cargo test` runs tests in parallel. + /// + /// Although this forces all tests that use [`simulate_manifest_dir`] to run sequentially with + /// respect to each other, this is still several orders of magnitude faster than using UI + /// tests, even if they are run in parallel. + static MANIFEST_DIR_LOCK: Mutex<()> = Mutex::new(()); + + // avoid race condition when swapping out `CARGO_MANIFEST_DIR` + let guard = MANIFEST_DIR_LOCK.lock().unwrap(); + + // obtain the current/original `CARGO_MANIFEST_DIR` + let orig = PathBuf::from( + var("CARGO_MANIFEST_DIR").expect("failed to read ENV var `CARGO_MANIFEST_DIR`"), + ); + + // set `CARGO_MANIFEST_DIR` to the provided path, relative to current working dir + set_var("CARGO_MANIFEST_DIR", orig.join(path.as_ref())); + + // safely run closure catching any panics + let result = panic::catch_unwind(closure); + + // restore original `CARGO_MANIFEST_DIR` before unwinding + set_var("CARGO_MANIFEST_DIR", &orig); + + // unlock the mutex so we don't poison it if there is a panic + drop(guard); + + // unwind any panics originally encountered when running closure + result.unwrap(); +} + +mod tasks; + +#[test] +fn test_parse_minimal_pallet() { + assert_pallet_parses! { + #[manifest_dir("../../examples/basic")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; +} + +#[test] +fn test_parse_pallet_missing_pallet() { + assert_pallet_parse_error! { + #[manifest_dir("../../examples/basic")] + #[error_regex("Missing `\\#\\[pallet::pallet\\]`")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::config] + pub trait Config: frame_system::Config {} + } + } +} + +#[test] +fn test_parse_pallet_missing_config() { + assert_pallet_parse_error! { + #[manifest_dir("../../examples/basic")] + #[error_regex("Missing `\\#\\[pallet::config\\]`")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::pallet] + pub struct Pallet(_); + } + } +} diff --git a/substrate/frame/support/procedural/src/pallet/parse/tests/tasks.rs b/substrate/frame/support/procedural/src/pallet/parse/tests/tasks.rs new file mode 100644 index 000000000000..9f1436284047 --- /dev/null +++ b/substrate/frame/support/procedural/src/pallet/parse/tests/tasks.rs @@ -0,0 +1,240 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use syn::parse_quote; + +#[test] +fn test_parse_pallet_with_task_enum_missing_impl() { + assert_pallet_parse_error! { + #[manifest_dir("../../examples/basic")] + #[error_regex("Missing `\\#\\[pallet::tasks_experimental\\]` impl")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::task_enum] + pub enum Task { + Something, + } + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + } +} + +#[test] +fn test_parse_pallet_with_task_enum_wrong_attribute() { + assert_pallet_parse_error! { + #[manifest_dir("../../examples/basic")] + #[error_regex("expected one of")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::wrong_attribute] + pub enum Task { + Something, + } + + #[pallet::task_list] + impl frame_support::traits::Task for Task + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + } +} + +#[test] +fn test_parse_pallet_missing_task_enum() { + assert_pallet_parses! { + #[manifest_dir("../../examples/basic")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::tasks_experimental] + #[cfg(test)] // aha, this means it's being eaten + impl frame_support::traits::Task for Task + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; +} + +#[test] +fn test_parse_pallet_task_list_in_wrong_place() { + assert_pallet_parse_error! { + #[manifest_dir("../../examples/basic")] + #[error_regex("can only be used on items within an `impl` statement.")] + #[frame_support::pallet] + pub mod pallet { + pub enum MyCustomTaskEnum { + Something, + } + + #[pallet::task_list] + pub fn something() { + println!("hey"); + } + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + } +} + +#[test] +fn test_parse_pallet_manual_tasks_impl_without_manual_tasks_enum() { + assert_pallet_parse_error! { + #[manifest_dir("../../examples/basic")] + #[error_regex(".*attribute must be attached to your.*")] + #[frame_support::pallet] + pub mod pallet { + + impl frame_support::traits::Task for Task + where + T: TypeInfo, + { + type Enumeration = sp_std::vec::IntoIter>; + + fn iter() -> Self::Enumeration { + sp_std::vec![Task::increment, Task::decrement].into_iter() + } + } + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + } +} + +#[test] +fn test_parse_pallet_manual_task_enum_non_manual_impl() { + assert_pallet_parses! { + #[manifest_dir("../../examples/basic")] + #[frame_support::pallet] + pub mod pallet { + pub enum MyCustomTaskEnum { + Something, + } + + #[pallet::tasks_experimental] + impl frame_support::traits::Task for MyCustomTaskEnum + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; +} + +#[test] +fn test_parse_pallet_non_manual_task_enum_manual_impl() { + assert_pallet_parses! { + #[manifest_dir("../../examples/basic")] + #[frame_support::pallet] + pub mod pallet { + #[pallet::task_enum] + pub enum MyCustomTaskEnum { + Something, + } + + impl frame_support::traits::Task for MyCustomTaskEnum + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; +} + +#[test] +fn test_parse_pallet_manual_task_enum_manual_impl() { + assert_pallet_parses! { + #[manifest_dir("../../examples/basic")] + #[frame_support::pallet] + pub mod pallet { + pub enum MyCustomTaskEnum { + Something, + } + + impl frame_support::traits::Task for MyCustomTaskEnum + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; +} + +#[test] +fn test_parse_pallet_manual_task_enum_mismatch_ident() { + assert_pallet_parses! { + #[manifest_dir("../../examples/basic")] + #[frame_support::pallet] + pub mod pallet { + pub enum WrongIdent { + Something, + } + + #[pallet::tasks_experimental] + impl frame_support::traits::Task for MyCustomTaskEnum + where + T: TypeInfo, + {} + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(_); + } + }; +} diff --git a/substrate/frame/support/procedural/tools/Cargo.toml b/substrate/frame/support/procedural/tools/Cargo.toml index bc5cc7fdda5c..6d1b9507797d 100644 --- a/substrate/frame/support/procedural/tools/Cargo.toml +++ b/substrate/frame/support/procedural/tools/Cargo.toml @@ -8,12 +8,15 @@ homepage = "https://substrate.io" repository.workspace = true description = "Proc macro helpers for procedural macros" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] -proc-macro-crate = "2.0.0" +proc-macro-crate = "3.0.0" proc-macro2 = "1.0.56" quote = "1.0.28" -syn = { version = "2.0.39", features = ["extra-traits", "full", "visit"] } +syn = { version = "2.0.48", features = ["extra-traits", "full", "visit"] } frame-support-procedural-tools-derive = { path = "derive" } diff --git a/substrate/frame/support/procedural/tools/derive/Cargo.toml b/substrate/frame/support/procedural/tools/derive/Cargo.toml index 6040449df656..d2d34d477146 100644 --- a/substrate/frame/support/procedural/tools/derive/Cargo.toml +++ b/substrate/frame/support/procedural/tools/derive/Cargo.toml @@ -8,6 +8,9 @@ homepage = "https://substrate.io" repository.workspace = true description = "Use to derive parsing for parsing struct." +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -17,4 +20,4 @@ proc-macro = true [dependencies] proc-macro2 = "1.0.56" quote = { version = "1.0.28", features = ["proc-macro"] } -syn = { version = "2.0.39", features = ["extra-traits", "full", "parsing", "proc-macro"] } +syn = { version = "2.0.48", features = ["extra-traits", "full", "parsing", "proc-macro"] } diff --git a/substrate/frame/support/src/dispatch.rs b/substrate/frame/support/src/dispatch.rs index d857c0a00670..14e60f8f2efe 100644 --- a/substrate/frame/support/src/dispatch.rs +++ b/substrate/frame/support/src/dispatch.rs @@ -698,6 +698,7 @@ mod weight_tests { type BaseCallFilter: crate::traits::Contains; type RuntimeOrigin; type RuntimeCall; + type RuntimeTask; type PalletInfo: crate::traits::PalletInfo; type DbWeight: Get; } @@ -794,6 +795,7 @@ mod weight_tests { type BaseCallFilter = crate::traits::Everything; type RuntimeOrigin = RuntimeOrigin; type RuntimeCall = RuntimeCall; + type RuntimeTask = RuntimeTask; type DbWeight = DbWeight; type PalletInfo = PalletInfo; } diff --git a/substrate/frame/support/src/lib.rs b/substrate/frame/support/src/lib.rs index bd57a82d2d25..0ab736878fde 100644 --- a/substrate/frame/support/src/lib.rs +++ b/substrate/frame/support/src/lib.rs @@ -834,7 +834,7 @@ pub mod pallet_prelude { }, traits::{ BuildGenesisConfig, ConstU32, EnsureOrigin, Get, GetDefault, GetStorageVersion, Hooks, - IsType, PalletInfoAccess, StorageInfoTrait, StorageVersion, TypedGet, + IsType, PalletInfoAccess, StorageInfoTrait, StorageVersion, Task, TypedGet, }, Blake2_128, Blake2_128Concat, Blake2_256, CloneNoBound, DebugNoBound, EqNoBound, Identity, PartialEqNoBound, RuntimeDebugNoBound, Twox128, Twox256, Twox64Concat, @@ -2659,6 +2659,61 @@ pub mod pallet_macros { /// } /// ``` pub use frame_support_procedural::storage; + /// This attribute is attached to a function inside an `impl` block annoated with + /// [`pallet::tasks_experimental`](`tasks_experimental`) to define the conditions for a + /// given work item to be valid. + /// + /// It takes a closure as input, which is then used to define the condition. The closure + /// should have the same signature as the function it is attached to, except that it should + /// return a `bool` instead. + pub use frame_support_procedural::task_condition; + /// This attribute is attached to a function inside an `impl` block annoated with + /// [`pallet::tasks_experimental`](`tasks_experimental`) to define the index of a given + /// work item. + /// + /// It takes an integer literal as input, which is then used to define the index. This + /// index should be unique for each function in the `impl` block. + pub use frame_support_procedural::task_index; + /// This attribute is attached to a function inside an `impl` block annoated with + /// [`pallet::tasks_experimental`](`tasks_experimental`) to define an iterator over the + /// available work items for a task. + /// + /// It takes an iterator as input that yields a tuple with same types as the function + /// arguments. + pub use frame_support_procedural::task_list; + /// This attribute is attached to a function inside an `impl` block annoated with + /// [`pallet::tasks_experimental`](`tasks_experimental`) define the weight of a given work + /// item. + /// + /// It takes a closure as input, which should return a `Weight` value. + pub use frame_support_procedural::task_weight; + /// Allows you to define some service work that can be recognized by a script or an + /// off-chain worker. Such a script can then create and submit all such work items at any + /// given time. + /// + /// These work items are defined as instances of the [`Task`](frame_support::traits::Task) + /// trait. [`pallet:tasks_experimental`](`tasks_experimental`) when attached to an `impl` + /// block inside a pallet, will generate an enum `Task` whose variants are mapped to + /// functions inside this `impl` block. + /// + /// Each such function must have the following set of attributes: + /// + /// * [`pallet::task_list`](`task_list`) + /// * [`pallet::task_condition`](`task_condition`) + /// * [`pallet::task_weight`](`task_weight`) + /// * [`pallet::task_index`](`task_index`) + /// + /// All of such Tasks are then aggregated into a `RuntimeTask` by + /// [`construct_runtime`](frame_support::construct_runtime). + /// + /// Finally, the `RuntimeTask` can then used by a script or off-chain worker to create and + /// submit such tasks via an extrinsic defined in `frame_system` called `do_task`. + /// + /// ## Example + #[doc = docify::embed!("src/tests/tasks.rs", tasks_example)] + /// Now, this can be executed as follows: + #[doc = docify::embed!("src/tests/tasks.rs", tasks_work)] + pub use frame_support_procedural::tasks_experimental; } #[deprecated(note = "Will be removed after July 2023; Use `sp_runtime::traits` directly instead.")] diff --git a/substrate/frame/support/src/storage/generator/mod.rs b/substrate/frame/support/src/storage/generator/mod.rs index e6ea48201ab3..6e68ad479b96 100644 --- a/substrate/frame/support/src/storage/generator/mod.rs +++ b/substrate/frame/support/src/storage/generator/mod.rs @@ -64,6 +64,7 @@ mod tests { type BaseCallFilter: crate::traits::Contains; type RuntimeOrigin; type RuntimeCall; + type RuntimeTask; type PalletInfo: crate::traits::PalletInfo; type DbWeight: Get; } @@ -130,6 +131,7 @@ mod tests { type BaseCallFilter = crate::traits::Everything; type RuntimeOrigin = RuntimeOrigin; type RuntimeCall = RuntimeCall; + type RuntimeTask = RuntimeTask; type PalletInfo = PalletInfo; type DbWeight = (); } diff --git a/substrate/frame/support/src/storage/unhashed.rs b/substrate/frame/support/src/storage/unhashed.rs index 0f87e4ed40a2..083098db9523 100644 --- a/substrate/frame/support/src/storage/unhashed.rs +++ b/substrate/frame/support/src/storage/unhashed.rs @@ -28,8 +28,8 @@ pub fn get(key: &[u8]) -> Option { // TODO #3700: error should be handleable. log::error!( target: "runtime::storage", - "Corrupted state at `{:?}: {:?}`", - key, + "Corrupted state at `{}`: {:?}", + array_bytes::bytes2hex("0x", key), e, ); None diff --git a/substrate/frame/support/src/tests/mod.rs b/substrate/frame/support/src/tests/mod.rs index 4be7c8abcdbf..19cf982cab44 100644 --- a/substrate/frame/support/src/tests/mod.rs +++ b/substrate/frame/support/src/tests/mod.rs @@ -17,6 +17,7 @@ // limitations under the License. use super::*; +use frame_support_procedural::import_section; use sp_io::{MultiRemovalResults, TestExternalities}; use sp_metadata_ir::{ PalletStorageMetadataIR, StorageEntryMetadataIR, StorageEntryModifierIR, StorageEntryTypeIR, @@ -28,13 +29,15 @@ pub use self::frame_system::{pallet_prelude::*, Config, Pallet}; mod inject_runtime_type; mod storage_alias; +mod tasks; +#[import_section(tasks::tasks_example)] #[pallet] pub mod frame_system { #[allow(unused)] use super::{frame_system, frame_system::pallet_prelude::*}; pub use crate::dispatch::RawOrigin; - use crate::pallet_prelude::*; + use crate::{pallet_prelude::*, traits::tasks::Task as TaskTrait}; pub mod config_preludes { use super::{inject_runtime_type, DefaultConfig}; @@ -50,6 +53,8 @@ pub mod frame_system { type RuntimeCall = (); #[inject_runtime_type] type PalletInfo = (); + #[inject_runtime_type] + type RuntimeTask = (); type DbWeight = (); } } @@ -70,6 +75,8 @@ pub mod frame_system { #[pallet::no_default_bounds] type RuntimeCall; #[pallet::no_default_bounds] + type RuntimeTask: crate::traits::tasks::Task; + #[pallet::no_default_bounds] type PalletInfo: crate::traits::PalletInfo; type DbWeight: Get; } @@ -78,13 +85,33 @@ pub mod frame_system { pub enum Error { /// Required by construct_runtime CallFiltered, + /// Used in tasks example. + NotFound, + /// The specified [`Task`] is not valid. + InvalidTask, + /// The specified [`Task`] failed during execution. + FailedTask, } #[pallet::origin] pub type Origin = RawOrigin<::AccountId>; #[pallet::call] - impl Pallet {} + impl Pallet { + #[pallet::call_index(0)] + #[pallet::weight(task.weight())] + pub fn do_task(_origin: OriginFor, task: T::RuntimeTask) -> DispatchResultWithPostInfo { + if !task.is_valid() { + return Err(Error::::InvalidTask.into()) + } + + if let Err(_err) = task.run() { + return Err(Error::::FailedTask.into()) + } + + Ok(().into()) + } + } #[pallet::storage] pub type Data = StorageMap<_, Twox64Concat, u32, u64, ValueQuery>; @@ -170,6 +197,14 @@ pub mod frame_system { } } + /// Some running total. + #[pallet::storage] + pub type Total = StorageValue<_, (u32, u32), ValueQuery>; + + /// Numbers to be added into the total. + #[pallet::storage] + pub type Numbers = StorageMap<_, Twox64Concat, u32, u32, OptionQuery>; + pub mod pallet_prelude { pub type OriginFor = ::RuntimeOrigin; @@ -623,6 +658,24 @@ fn expected_metadata() -> PalletStorageMetadataIR { default: vec![0], docs: vec![], }, + StorageEntryMetadataIR { + name: "Total", + modifier: StorageEntryModifierIR::Default, + ty: StorageEntryTypeIR::Plain(scale_info::meta_type::<(u32, u32)>()), + default: vec![0, 0, 0, 0, 0, 0, 0, 0], + docs: vec![" Some running total."], + }, + StorageEntryMetadataIR { + name: "Numbers", + modifier: StorageEntryModifierIR::Optional, + ty: StorageEntryTypeIR::Map { + hashers: vec![StorageHasherIR::Twox64Concat], + key: scale_info::meta_type::(), + value: scale_info::meta_type::(), + }, + default: vec![0], + docs: vec![" Numbers to be added into the total."], + }, ], } } diff --git a/substrate/frame/support/src/tests/tasks.rs b/substrate/frame/support/src/tests/tasks.rs new file mode 100644 index 000000000000..2774c1300757 --- /dev/null +++ b/substrate/frame/support/src/tests/tasks.rs @@ -0,0 +1,62 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use crate::{ + assert_ok, + tests::{ + frame_system::{Numbers, Total}, + new_test_ext, Runtime, RuntimeOrigin, RuntimeTask, System, + }, +}; +use frame_support_procedural::pallet_section; + +#[pallet_section] +mod tasks_example { + #[docify::export(tasks_example)] + #[pallet::tasks_experimental] + impl Pallet { + /// Add a pair of numbers into the totals and remove them. + #[pallet::task_list(Numbers::::iter_keys())] + #[pallet::task_condition(|i| Numbers::::contains_key(i))] + #[pallet::task_weight(0.into())] + #[pallet::task_index(0)] + pub fn add_number_into_total(i: u32) -> DispatchResult { + let v = Numbers::::take(i).ok_or(Error::::NotFound)?; + Total::::mutate(|(total_keys, total_values)| { + *total_keys += i; + *total_values += v; + }); + Ok(()) + } + } +} + +#[docify::export] +#[test] +fn tasks_work() { + new_test_ext().execute_with(|| { + Numbers::::insert(0, 1); + + let task = RuntimeTask::System(super::frame_system::Task::::AddNumberIntoTotal { + i: 0u32, + }); + + assert_ok!(System::do_task(RuntimeOrigin::signed(1), task.clone(),)); + assert_eq!(Numbers::::get(0), None); + assert_eq!(Total::::get(), (0, 1)); + }); +} diff --git a/substrate/frame/support/src/traits.rs b/substrate/frame/support/src/traits.rs index c3ee5e8b9c42..60b59faa2fd5 100644 --- a/substrate/frame/support/src/traits.rs +++ b/substrate/frame/support/src/traits.rs @@ -112,6 +112,9 @@ pub use safe_mode::{SafeMode, SafeModeError, SafeModeNotify}; mod tx_pause; pub use tx_pause::{TransactionPause, TransactionPauseError}; +pub mod tasks; +pub use tasks::Task; + #[cfg(feature = "try-runtime")] mod try_runtime; #[cfg(feature = "try-runtime")] diff --git a/substrate/frame/support/src/traits/misc.rs b/substrate/frame/support/src/traits/misc.rs index 6fbb44416b4c..dbe2276b39ca 100644 --- a/substrate/frame/support/src/traits/misc.rs +++ b/substrate/frame/support/src/traits/misc.rs @@ -1153,17 +1153,26 @@ impl PreimageRecipient for () { fn unnote_preimage(_: &Hash) {} } -/// Trait for creating an asset account with a deposit taken from a designated depositor specified -/// by the client. +/// Trait for touching/creating an asset account with a deposit taken from a designated depositor +/// specified by the client. +/// +/// Ensures that transfers to the touched account will succeed without being denied by the account +/// creation requirements. For example, it is useful for the account creation of non-sufficient +/// assets when its system account may not have the free consumer reference required for it. If +/// there is no risk of failing to meet those requirements, the touch operation can be a no-op, as +/// is common for native assets. pub trait AccountTouch { /// The type for currency units of the deposit. type Balance; - /// The deposit amount of a native currency required for creating an account of the `asset`. + /// The deposit amount of a native currency required for touching an account of the `asset`. fn deposit_required(asset: AssetId) -> Self::Balance; + /// Check if an account for a given asset should be touched to meet the existence requirements. + fn should_touch(asset: AssetId, who: &AccountId) -> bool; + /// Create an account for `who` of the `asset` with a deposit taken from the `depositor`. - fn touch(asset: AssetId, who: AccountId, depositor: AccountId) -> DispatchResult; + fn touch(asset: AssetId, who: &AccountId, depositor: &AccountId) -> DispatchResult; } #[cfg(test)] diff --git a/substrate/frame/support/src/traits/tasks.rs b/substrate/frame/support/src/traits/tasks.rs new file mode 100644 index 000000000000..24f3430cf50b --- /dev/null +++ b/substrate/frame/support/src/traits/tasks.rs @@ -0,0 +1,87 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Contains the [`Task`] trait, which defines a general-purpose way for defining and executing +//! service work, and supporting types. + +use codec::FullCodec; +use scale_info::TypeInfo; +use sp_runtime::DispatchError; +use sp_std::{fmt::Debug, iter::Iterator, vec, vec::IntoIter}; +use sp_weights::Weight; + +/// Contain's re-exports of all the supporting types for the [`Task`] trait. Used in the macro +/// expansion of `RuntimeTask`. +#[doc(hidden)] +pub mod __private { + pub use codec::FullCodec; + pub use scale_info::TypeInfo; + pub use sp_runtime::DispatchError; + pub use sp_std::{fmt::Debug, iter::Iterator, vec, vec::IntoIter}; + pub use sp_weights::Weight; +} + +/// A general-purpose trait which defines a type of service work (i.e., work to performed by an +/// off-chain worker) including methods for enumerating, validating, indexing, and running +/// tasks of this type. +pub trait Task: Sized + FullCodec + TypeInfo + Clone + Debug + PartialEq + Eq { + /// An [`Iterator`] over tasks of this type used as the return type for `enumerate`. + type Enumeration: Iterator; + + /// Inspects the pallet's state and enumerates tasks of this type. + fn iter() -> Self::Enumeration; + + /// Checks if a particular instance of this `Task` variant is a valid piece of work. + fn is_valid(&self) -> bool; + + /// Performs the work for this particular `Task` variant. + fn run(&self) -> Result<(), DispatchError>; + + /// Returns the weight of executing this `Task`. + fn weight(&self) -> Weight; + + /// A unique value representing this `Task` within the current pallet. Analogous to + /// `call_index`, but for tasks.' + /// + /// This value should be unique within the current pallet and can overlap with task indices + /// in other pallets. + fn task_index(&self) -> u32; +} + +impl Task for () { + type Enumeration = IntoIter; + + fn iter() -> Self::Enumeration { + vec![].into_iter() + } + + fn is_valid(&self) -> bool { + true + } + + fn run(&self) -> Result<(), DispatchError> { + Ok(()) + } + + fn weight(&self) -> Weight { + Weight::default() + } + + fn task_index(&self) -> u32 { + 0 + } +} diff --git a/substrate/frame/support/src/traits/tokens/fungible/imbalance.rs b/substrate/frame/support/src/traits/tokens/fungible/imbalance.rs index d76c5f6bbb79..4c53812748e8 100644 --- a/substrate/frame/support/src/traits/tokens/fungible/imbalance.rs +++ b/substrate/frame/support/src/traits/tokens/fungible/imbalance.rs @@ -21,8 +21,9 @@ use super::{super::Imbalance as ImbalanceT, Balanced, *}; use crate::traits::{ + fungibles, misc::{SameOrOther, TryDrop}, - tokens::Balance, + tokens::{AssetId, Balance}, }; use frame_support_procedural::{EqNoBound, PartialEqNoBound, RuntimeDebugNoBound}; use sp_runtime::traits::Zero; @@ -88,6 +89,11 @@ impl, OppositeOnDrop: HandleImbalance pub(crate) fn new(amount: B) -> Self { Self { amount, _phantom: PhantomData } } + + /// Forget the imbalance without invoking the on-drop handler. + pub(crate) fn forget(imbalance: Self) { + sp_std::mem::forget(imbalance); + } } impl, OppositeOnDrop: HandleImbalanceDrop> @@ -150,6 +156,27 @@ impl, OppositeOnDrop: HandleImbalance } } +/// Converts a `fungibles` `imbalance` instance to an instance of a `fungible` imbalance type. +/// +/// This function facilitates imbalance conversions within the implementations of +/// [`frame_support::traits::fungibles::UnionOf`], [`frame_support::traits::fungible::UnionOf`], and +/// [`frame_support::traits::fungible::ItemOf`] adapters. It is intended only for internal use +/// within the current crate. +pub(crate) fn from_fungibles< + A: AssetId, + B: Balance, + OnDropIn: fungibles::HandleImbalanceDrop, + OppositeIn: fungibles::HandleImbalanceDrop, + OnDropOut: HandleImbalanceDrop, + OppositeOut: HandleImbalanceDrop, +>( + imbalance: fungibles::Imbalance, +) -> Imbalance { + let new = Imbalance::new(imbalance.peek()); + fungibles::Imbalance::forget(imbalance); + new +} + /// Imbalance implying that the total_issuance value is less than the sum of all account balances. pub type Debt = Imbalance< >::Balance, diff --git a/substrate/frame/support/src/traits/tokens/fungible/mod.rs b/substrate/frame/support/src/traits/tokens/fungible/mod.rs index 86b7e67a3f56..c13a8a01661e 100644 --- a/substrate/frame/support/src/traits/tokens/fungible/mod.rs +++ b/substrate/frame/support/src/traits/tokens/fungible/mod.rs @@ -39,8 +39,9 @@ pub mod conformance_tests; pub mod hold; -mod imbalance; +pub(crate) mod imbalance; mod regular; +mod union_of; use codec::{Decode, Encode, MaxEncodedLen}; use frame_support_procedural::{CloneNoBound, EqNoBound, PartialEqNoBound, RuntimeDebugNoBound}; @@ -59,6 +60,7 @@ pub use regular::{ use sp_arithmetic::traits::Zero; use sp_core::Get; use sp_runtime::{traits::Convert, DispatchError}; +pub use union_of::{NativeFromLeft, NativeOrWithId, UnionOf}; use crate::{ ensure, diff --git a/substrate/frame/support/src/traits/tokens/fungible/regular.rs b/substrate/frame/support/src/traits/tokens/fungible/regular.rs index 4bc2c080f206..795081be5598 100644 --- a/substrate/frame/support/src/traits/tokens/fungible/regular.rs +++ b/substrate/frame/support/src/traits/tokens/fungible/regular.rs @@ -182,9 +182,16 @@ pub trait Unbalanced: Inspect { ) -> Result { let old_balance = Self::balance(who); let free = Self::reducible_balance(who, preservation, force); - if let BestEffort = precision { - amount = amount.min(free); + match precision { + BestEffort => { + amount = amount.min(free); + }, + Exact => + if free < amount { + return Err(TokenError::FundsUnavailable.into()) + }, } + let new_balance = old_balance.checked_sub(&amount).ok_or(TokenError::FundsUnavailable)?; if let Some(dust) = Self::write_balance(who, new_balance)? { Self::handle_dust(Dust(dust)); @@ -515,3 +522,47 @@ pub trait Balanced: Inspect + Unbalanced { fn done_deposit(_who: &AccountId, _amount: Self::Balance) {} fn done_withdraw(_who: &AccountId, _amount: Self::Balance) {} } + +/// Dummy implementation of [`Inspect`] +#[cfg(feature = "std")] +impl Inspect for () { + type Balance = u32; + fn total_issuance() -> Self::Balance { + 0 + } + fn minimum_balance() -> Self::Balance { + 0 + } + fn total_balance(_: &AccountId) -> Self::Balance { + 0 + } + fn balance(_: &AccountId) -> Self::Balance { + 0 + } + fn reducible_balance(_: &AccountId, _: Preservation, _: Fortitude) -> Self::Balance { + 0 + } + fn can_deposit(_: &AccountId, _: Self::Balance, _: Provenance) -> DepositConsequence { + DepositConsequence::Success + } + fn can_withdraw(_: &AccountId, _: Self::Balance) -> WithdrawConsequence { + WithdrawConsequence::Success + } +} + +/// Dummy implementation of [`Unbalanced`] +#[cfg(feature = "std")] +impl Unbalanced for () { + fn handle_dust(_: Dust) {} + fn write_balance( + _: &AccountId, + _: Self::Balance, + ) -> Result, DispatchError> { + Ok(None) + } + fn set_total_issuance(_: Self::Balance) {} +} + +/// Dummy implementation of [`Mutate`] +#[cfg(feature = "std")] +impl Mutate for () {} diff --git a/substrate/frame/support/src/traits/tokens/fungible/union_of.rs b/substrate/frame/support/src/traits/tokens/fungible/union_of.rs new file mode 100644 index 000000000000..58572f4ef8ad --- /dev/null +++ b/substrate/frame/support/src/traits/tokens/fungible/union_of.rs @@ -0,0 +1,855 @@ +// This file is part of Substrate. + +// Copyright (Criterion) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Types to combine some `fungible::*` and `fungibles::*` implementations into one union +//! `fungibles::*` implementation. + +use codec::{Decode, Encode, MaxEncodedLen}; +use frame_support::traits::{ + fungible::imbalance, + tokens::{ + fungible, fungibles, AssetId, DepositConsequence, Fortitude, Precision, Preservation, + Provenance, Restriction, WithdrawConsequence, + }, + AccountTouch, +}; +use scale_info::TypeInfo; +use sp_runtime::{ + traits::Convert, + DispatchError, DispatchResult, Either, + Either::{Left, Right}, + RuntimeDebug, +}; +use sp_std::cmp::Ordering; + +/// The `NativeOrWithId` enum classifies an asset as either `Native` to the current chain or as an +/// asset with a specific ID. +#[derive(Decode, Encode, Default, MaxEncodedLen, TypeInfo, Clone, RuntimeDebug, Eq)] +pub enum NativeOrWithId +where + AssetId: Ord, +{ + /// Represents the native asset of the current chain. + /// + /// E.g., DOT for the Polkadot Asset Hub. + #[default] + Native, + /// Represents an asset identified by its underlying `AssetId`. + WithId(AssetId), +} +impl From for NativeOrWithId { + fn from(asset: AssetId) -> Self { + Self::WithId(asset) + } +} +impl Ord for NativeOrWithId { + fn cmp(&self, other: &Self) -> Ordering { + match (self, other) { + (Self::Native, Self::Native) => Ordering::Equal, + (Self::Native, Self::WithId(_)) => Ordering::Less, + (Self::WithId(_), Self::Native) => Ordering::Greater, + (Self::WithId(id1), Self::WithId(id2)) => ::cmp(id1, id2), + } + } +} +impl PartialOrd for NativeOrWithId { + fn partial_cmp(&self, other: &Self) -> Option { + Some(::cmp(self, other)) + } +} +impl PartialEq for NativeOrWithId { + fn eq(&self, other: &Self) -> bool { + self.cmp(other) == Ordering::Equal + } +} + +/// Criterion for [`UnionOf`] where a set for [`NativeOrWithId::Native`] asset located from the left +/// and for [`NativeOrWithId::WithId`] from the right. +pub struct NativeFromLeft; +impl Convert, Either<(), AssetId>> for NativeFromLeft { + fn convert(asset: NativeOrWithId) -> Either<(), AssetId> { + match asset { + NativeOrWithId::Native => Either::Left(()), + NativeOrWithId::WithId(id) => Either::Right(id), + } + } +} + +/// Type to combine some `fungible::*` and `fungibles::*` implementations into one union +/// `fungibles::*` implementation. +/// +/// ### Parameters: +/// - `Left` is `fungible::*` implementation that is incorporated into the resulting union. +/// - `Right` is `fungibles::*` implementation that is incorporated into the resulting union. +/// - `Criterion` determines whether the `AssetKind` belongs to the `Left` or `Right` set. +/// - `AssetKind` is a superset type encompassing asset kinds from `Left` and `Right` sets. +/// - `AccountId` is an account identifier type. +pub struct UnionOf( + sp_std::marker::PhantomData<(Left, Right, Criterion, AssetKind, AccountId)>, +); + +impl< + Left: fungible::Inspect, + Right: fungibles::Inspect, + Criterion: Convert>, + AssetKind: AssetId, + AccountId, + > fungibles::Inspect for UnionOf +{ + type AssetId = AssetKind; + type Balance = Left::Balance; + + fn total_issuance(asset: Self::AssetId) -> Self::Balance { + match Criterion::convert(asset) { + Left(()) => >::total_issuance(), + Right(a) => >::total_issuance(a), + } + } + fn active_issuance(asset: Self::AssetId) -> Self::Balance { + match Criterion::convert(asset) { + Left(()) => >::active_issuance(), + Right(a) => >::active_issuance(a), + } + } + fn minimum_balance(asset: Self::AssetId) -> Self::Balance { + match Criterion::convert(asset) { + Left(()) => >::minimum_balance(), + Right(a) => >::minimum_balance(a), + } + } + fn balance(asset: Self::AssetId, who: &AccountId) -> Self::Balance { + match Criterion::convert(asset) { + Left(()) => >::balance(who), + Right(a) => >::balance(a, who), + } + } + fn total_balance(asset: Self::AssetId, who: &AccountId) -> Self::Balance { + match Criterion::convert(asset) { + Left(()) => >::total_balance(who), + Right(a) => >::total_balance(a, who), + } + } + fn reducible_balance( + asset: Self::AssetId, + who: &AccountId, + preservation: Preservation, + force: Fortitude, + ) -> Self::Balance { + match Criterion::convert(asset) { + Left(()) => + >::reducible_balance(who, preservation, force), + Right(a) => >::reducible_balance( + a, + who, + preservation, + force, + ), + } + } + fn can_deposit( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + provenance: Provenance, + ) -> DepositConsequence { + match Criterion::convert(asset) { + Left(()) => + >::can_deposit(who, amount, provenance), + Right(a) => + >::can_deposit(a, who, amount, provenance), + } + } + fn can_withdraw( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + ) -> WithdrawConsequence { + match Criterion::convert(asset) { + Left(()) => >::can_withdraw(who, amount), + Right(a) => >::can_withdraw(a, who, amount), + } + } + fn asset_exists(asset: Self::AssetId) -> bool { + match Criterion::convert(asset) { + Left(()) => true, + Right(a) => >::asset_exists(a), + } + } +} + +impl< + Left: fungible::InspectHold, + Right: fungibles::InspectHold, + Criterion: Convert>, + AssetKind: AssetId, + AccountId, + > fungibles::InspectHold for UnionOf +{ + type Reason = Left::Reason; + + fn reducible_total_balance_on_hold( + asset: Self::AssetId, + who: &AccountId, + force: Fortitude, + ) -> Self::Balance { + match Criterion::convert(asset) { + Left(()) => + >::reducible_total_balance_on_hold( + who, force, + ), + Right(a) => + >::reducible_total_balance_on_hold( + a, who, force, + ), + } + } + fn hold_available(asset: Self::AssetId, reason: &Self::Reason, who: &AccountId) -> bool { + match Criterion::convert(asset) { + Left(()) => >::hold_available(reason, who), + Right(a) => + >::hold_available(a, reason, who), + } + } + fn total_balance_on_hold(asset: Self::AssetId, who: &AccountId) -> Self::Balance { + match Criterion::convert(asset) { + Left(()) => >::total_balance_on_hold(who), + Right(a) => >::total_balance_on_hold(a, who), + } + } + fn balance_on_hold( + asset: Self::AssetId, + reason: &Self::Reason, + who: &AccountId, + ) -> Self::Balance { + match Criterion::convert(asset) { + Left(()) => >::balance_on_hold(reason, who), + Right(a) => + >::balance_on_hold(a, reason, who), + } + } + fn can_hold( + asset: Self::AssetId, + reason: &Self::Reason, + who: &AccountId, + amount: Self::Balance, + ) -> bool { + match Criterion::convert(asset) { + Left(()) => >::can_hold(reason, who, amount), + Right(a) => + >::can_hold(a, reason, who, amount), + } + } +} + +impl< + Left: fungible::Unbalanced, + Right: fungibles::Unbalanced, + Criterion: Convert>, + AssetKind: AssetId, + AccountId, + > fungibles::Unbalanced for UnionOf +{ + fn handle_dust(dust: fungibles::Dust) + where + Self: Sized, + { + match Criterion::convert(dust.0) { + Left(()) => + >::handle_dust(fungible::Dust(dust.1)), + Right(a) => + >::handle_dust(fungibles::Dust(a, dust.1)), + } + } + fn write_balance( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + ) -> Result, DispatchError> { + match Criterion::convert(asset) { + Left(()) => >::write_balance(who, amount), + Right(a) => >::write_balance(a, who, amount), + } + } + fn set_total_issuance(asset: Self::AssetId, amount: Self::Balance) -> () { + match Criterion::convert(asset) { + Left(()) => >::set_total_issuance(amount), + Right(a) => >::set_total_issuance(a, amount), + } + } + fn decrease_balance( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + precision: Precision, + preservation: Preservation, + force: Fortitude, + ) -> Result { + match Criterion::convert(asset) { + Left(()) => >::decrease_balance( + who, + amount, + precision, + preservation, + force, + ), + Right(a) => >::decrease_balance( + a, + who, + amount, + precision, + preservation, + force, + ), + } + } + fn increase_balance( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + precision: Precision, + ) -> Result { + match Criterion::convert(asset) { + Left(()) => + >::increase_balance(who, amount, precision), + Right(a) => >::increase_balance( + a, who, amount, precision, + ), + } + } +} + +impl< + Left: fungible::UnbalancedHold, + Right: fungibles::UnbalancedHold, + Criterion: Convert>, + AssetKind: AssetId, + AccountId, + > fungibles::UnbalancedHold for UnionOf +{ + fn set_balance_on_hold( + asset: Self::AssetId, + reason: &Self::Reason, + who: &AccountId, + amount: Self::Balance, + ) -> DispatchResult { + match Criterion::convert(asset) { + Left(()) => >::set_balance_on_hold( + reason, who, amount, + ), + Right(a) => >::set_balance_on_hold( + a, reason, who, amount, + ), + } + } + fn decrease_balance_on_hold( + asset: Self::AssetId, + reason: &Self::Reason, + who: &AccountId, + amount: Self::Balance, + precision: Precision, + ) -> Result { + match Criterion::convert(asset) { + Left(()) => >::decrease_balance_on_hold( + reason, who, amount, precision, + ), + Right(a) => >::decrease_balance_on_hold( + a, reason, who, amount, precision, + ), + } + } + fn increase_balance_on_hold( + asset: Self::AssetId, + reason: &Self::Reason, + who: &AccountId, + amount: Self::Balance, + precision: Precision, + ) -> Result { + match Criterion::convert(asset) { + Left(()) => >::increase_balance_on_hold( + reason, who, amount, precision, + ), + Right(a) => >::increase_balance_on_hold( + a, reason, who, amount, precision, + ), + } + } +} + +impl< + Left: fungible::Mutate, + Right: fungibles::Mutate, + Criterion: Convert>, + AssetKind: AssetId, + AccountId: Eq, + > fungibles::Mutate for UnionOf +{ + fn mint_into( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + ) -> Result { + match Criterion::convert(asset) { + Left(()) => >::mint_into(who, amount), + Right(a) => >::mint_into(a, who, amount), + } + } + fn burn_from( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + precision: Precision, + force: Fortitude, + ) -> Result { + match Criterion::convert(asset) { + Left(()) => + >::burn_from(who, amount, precision, force), + Right(a) => + >::burn_from(a, who, amount, precision, force), + } + } + fn shelve( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + ) -> Result { + match Criterion::convert(asset) { + Left(()) => >::shelve(who, amount), + Right(a) => >::shelve(a, who, amount), + } + } + fn restore( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + ) -> Result { + match Criterion::convert(asset) { + Left(()) => >::restore(who, amount), + Right(a) => >::restore(a, who, amount), + } + } + fn transfer( + asset: Self::AssetId, + source: &AccountId, + dest: &AccountId, + amount: Self::Balance, + preservation: Preservation, + ) -> Result { + match Criterion::convert(asset) { + Left(()) => + >::transfer(source, dest, amount, preservation), + Right(a) => >::transfer( + a, + source, + dest, + amount, + preservation, + ), + } + } + + fn set_balance(asset: Self::AssetId, who: &AccountId, amount: Self::Balance) -> Self::Balance { + match Criterion::convert(asset) { + Left(()) => >::set_balance(who, amount), + Right(a) => >::set_balance(a, who, amount), + } + } +} + +impl< + Left: fungible::MutateHold, + Right: fungibles::MutateHold, + Criterion: Convert>, + AssetKind: AssetId, + AccountId, + > fungibles::MutateHold for UnionOf +{ + fn hold( + asset: Self::AssetId, + reason: &Self::Reason, + who: &AccountId, + amount: Self::Balance, + ) -> DispatchResult { + match Criterion::convert(asset) { + Left(()) => >::hold(reason, who, amount), + Right(a) => >::hold(a, reason, who, amount), + } + } + fn release( + asset: Self::AssetId, + reason: &Self::Reason, + who: &AccountId, + amount: Self::Balance, + precision: Precision, + ) -> Result { + match Criterion::convert(asset) { + Left(()) => + >::release(reason, who, amount, precision), + Right(a) => >::release( + a, reason, who, amount, precision, + ), + } + } + fn burn_held( + asset: Self::AssetId, + reason: &Self::Reason, + who: &AccountId, + amount: Self::Balance, + precision: Precision, + force: Fortitude, + ) -> Result { + match Criterion::convert(asset) { + Left(()) => >::burn_held( + reason, who, amount, precision, force, + ), + Right(a) => >::burn_held( + a, reason, who, amount, precision, force, + ), + } + } + fn transfer_on_hold( + asset: Self::AssetId, + reason: &Self::Reason, + source: &AccountId, + dest: &AccountId, + amount: Self::Balance, + precision: Precision, + mode: Restriction, + force: Fortitude, + ) -> Result { + match Criterion::convert(asset) { + Left(()) => >::transfer_on_hold( + reason, source, dest, amount, precision, mode, force, + ), + Right(a) => >::transfer_on_hold( + a, reason, source, dest, amount, precision, mode, force, + ), + } + } + fn transfer_and_hold( + asset: Self::AssetId, + reason: &Self::Reason, + source: &AccountId, + dest: &AccountId, + amount: Self::Balance, + precision: Precision, + preservation: Preservation, + force: Fortitude, + ) -> Result { + match Criterion::convert(asset) { + Left(()) => >::transfer_and_hold( + reason, + source, + dest, + amount, + precision, + preservation, + force, + ), + Right(a) => >::transfer_and_hold( + a, + reason, + source, + dest, + amount, + precision, + preservation, + force, + ), + } + } +} + +pub struct ConvertImbalanceDropHandler< + Left, + Right, + Criterion, + AssetKind, + Balance, + AssetId, + AccountId, +>(sp_std::marker::PhantomData<(Left, Right, Criterion, AssetKind, Balance, AssetId, AccountId)>); + +impl< + Left: fungible::HandleImbalanceDrop, + Right: fungibles::HandleImbalanceDrop, + Criterion: Convert>, + AssetKind, + Balance, + AssetId, + AccountId, + > fungibles::HandleImbalanceDrop + for ConvertImbalanceDropHandler +{ + fn handle(asset: AssetKind, amount: Balance) { + match Criterion::convert(asset) { + Left(()) => Left::handle(amount), + Right(a) => Right::handle(a, amount), + } + } +} + +impl< + Left: fungible::Balanced, + Right: fungibles::Balanced, + Criterion: Convert>, + AssetKind: AssetId, + AccountId, + > fungibles::Balanced for UnionOf +{ + type OnDropDebt = ConvertImbalanceDropHandler< + Left::OnDropDebt, + Right::OnDropDebt, + Criterion, + AssetKind, + Left::Balance, + Right::AssetId, + AccountId, + >; + type OnDropCredit = ConvertImbalanceDropHandler< + Left::OnDropCredit, + Right::OnDropCredit, + Criterion, + AssetKind, + Left::Balance, + Right::AssetId, + AccountId, + >; + + fn deposit( + asset: Self::AssetId, + who: &AccountId, + value: Self::Balance, + precision: Precision, + ) -> Result, DispatchError> { + match Criterion::convert(asset.clone()) { + Left(()) => >::deposit(who, value, precision) + .map(|d| fungibles::imbalance::from_fungible(d, asset)), + Right(a) => + >::deposit(a, who, value, precision) + .map(|d| fungibles::imbalance::from_fungibles(d, asset)), + } + } + fn issue(asset: Self::AssetId, amount: Self::Balance) -> fungibles::Credit { + match Criterion::convert(asset.clone()) { + Left(()) => { + let credit = >::issue(amount); + fungibles::imbalance::from_fungible(credit, asset) + }, + Right(a) => { + let credit = >::issue(a, amount); + fungibles::imbalance::from_fungibles(credit, asset) + }, + } + } + fn pair( + asset: Self::AssetId, + amount: Self::Balance, + ) -> (fungibles::Debt, fungibles::Credit) { + match Criterion::convert(asset.clone()) { + Left(()) => { + let (a, b) = >::pair(amount); + ( + fungibles::imbalance::from_fungible(a, asset.clone()), + fungibles::imbalance::from_fungible(b, asset), + ) + }, + Right(a) => { + let (a, b) = >::pair(a, amount); + ( + fungibles::imbalance::from_fungibles(a, asset.clone()), + fungibles::imbalance::from_fungibles(b, asset), + ) + }, + } + } + fn rescind(asset: Self::AssetId, amount: Self::Balance) -> fungibles::Debt { + match Criterion::convert(asset.clone()) { + Left(()) => { + let debt = >::rescind(amount); + fungibles::imbalance::from_fungible(debt, asset) + }, + Right(a) => { + let debt = >::rescind(a, amount); + fungibles::imbalance::from_fungibles(debt, asset) + }, + } + } + fn resolve( + who: &AccountId, + credit: fungibles::Credit, + ) -> Result<(), fungibles::Credit> { + let asset = credit.asset(); + match Criterion::convert(asset.clone()) { + Left(()) => { + let credit = imbalance::from_fungibles(credit); + >::resolve(who, credit) + .map_err(|credit| fungibles::imbalance::from_fungible(credit, asset)) + }, + Right(a) => { + let credit = fungibles::imbalance::from_fungibles(credit, a); + >::resolve(who, credit) + .map_err(|credit| fungibles::imbalance::from_fungibles(credit, asset)) + }, + } + } + fn settle( + who: &AccountId, + debt: fungibles::Debt, + preservation: Preservation, + ) -> Result, fungibles::Debt> { + let asset = debt.asset(); + match Criterion::convert(asset.clone()) { + Left(()) => { + let debt = imbalance::from_fungibles(debt); + match >::settle(who, debt, preservation) { + Ok(c) => Ok(fungibles::imbalance::from_fungible(c, asset)), + Err(d) => Err(fungibles::imbalance::from_fungible(d, asset)), + } + }, + Right(a) => { + let debt = fungibles::imbalance::from_fungibles(debt, a); + match >::settle(who, debt, preservation) { + Ok(c) => Ok(fungibles::imbalance::from_fungibles(c, asset)), + Err(d) => Err(fungibles::imbalance::from_fungibles(d, asset)), + } + }, + } + } + fn withdraw( + asset: Self::AssetId, + who: &AccountId, + value: Self::Balance, + precision: Precision, + preservation: Preservation, + force: Fortitude, + ) -> Result, DispatchError> { + match Criterion::convert(asset.clone()) { + Left(()) => >::withdraw( + who, + value, + precision, + preservation, + force, + ) + .map(|c| fungibles::imbalance::from_fungible(c, asset)), + Right(a) => >::withdraw( + a, + who, + value, + precision, + preservation, + force, + ) + .map(|c| fungibles::imbalance::from_fungibles(c, asset)), + } + } +} + +impl< + Left: fungible::BalancedHold, + Right: fungibles::BalancedHold, + Criterion: Convert>, + AssetKind: AssetId, + AccountId, + > fungibles::BalancedHold for UnionOf +{ + fn slash( + asset: Self::AssetId, + reason: &Self::Reason, + who: &AccountId, + amount: Self::Balance, + ) -> (fungibles::Credit, Self::Balance) { + match Criterion::convert(asset.clone()) { + Left(()) => { + let (credit, amount) = + >::slash(reason, who, amount); + (fungibles::imbalance::from_fungible(credit, asset), amount) + }, + Right(a) => { + let (credit, amount) = + >::slash(a, reason, who, amount); + (fungibles::imbalance::from_fungibles(credit, asset), amount) + }, + } + } +} + +impl< + Left: fungible::Inspect, + Right: fungibles::Inspect + fungibles::Create, + Criterion: Convert>, + AssetKind: AssetId, + AccountId, + > fungibles::Create for UnionOf +{ + fn create( + asset: AssetKind, + admin: AccountId, + is_sufficient: bool, + min_balance: Self::Balance, + ) -> DispatchResult { + match Criterion::convert(asset) { + // no-op for `Left` since `Create` trait is not defined within `fungible::*`. + Left(()) => Ok(()), + Right(a) => >::create( + a, + admin, + is_sufficient, + min_balance, + ), + } + } +} + +impl< + Left: fungible::Inspect + + AccountTouch<(), AccountId, Balance = >::Balance>, + Right: fungibles::Inspect + + AccountTouch< + Right::AssetId, + AccountId, + Balance = >::Balance, + >, + Criterion: Convert>, + AssetKind: AssetId, + AccountId, + > AccountTouch for UnionOf +{ + type Balance = >::Balance; + + fn deposit_required(asset: AssetKind) -> Self::Balance { + match Criterion::convert(asset) { + Left(()) => >::deposit_required(()), + Right(a) => >::deposit_required(a), + } + } + + fn should_touch(asset: AssetKind, who: &AccountId) -> bool { + match Criterion::convert(asset) { + Left(()) => >::should_touch((), who), + Right(a) => >::should_touch(a, who), + } + } + + fn touch(asset: AssetKind, who: &AccountId, depositor: &AccountId) -> DispatchResult { + match Criterion::convert(asset) { + Left(()) => >::touch((), who, depositor), + Right(a) => + >::touch(a, who, depositor), + } + } +} diff --git a/substrate/frame/support/src/traits/tokens/fungibles/freeze.rs b/substrate/frame/support/src/traits/tokens/fungibles/freeze.rs deleted file mode 100644 index 68e9ab7ea407..000000000000 --- a/substrate/frame/support/src/traits/tokens/fungibles/freeze.rs +++ /dev/null @@ -1,151 +0,0 @@ -// This file is part of a fork of Substrate which has had various changes. - -// Copyright (C) Parity Technologies (UK) Ltd. -// Copyright (C) 2022-2023 Luke Parker -// SPDX-License-Identifier: Apache-2.0 - -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -//! The traits for putting freezes within a single fungible token class. - -use crate::{ensure, traits::tokens::Fortitude}; -use scale_info::TypeInfo; -use sp_arithmetic::{ - traits::{CheckedAdd, CheckedSub}, - ArithmeticError, -}; -use sp_runtime::{DispatchResult, TokenError}; - -/// Trait for inspecting a fungible asset which can be frozen. Freezing is essentially setting a -/// minimum balance below which the total balance (inclusive of any funds placed on hold) may not -/// be normally allowed to drop. Generally, freezers will provide an "update" function such that -/// if the total balance does drop below the limit, then the freezer can update their housekeeping -/// accordingly. -pub trait Inspect: super::Inspect { - /// An identifier for a freeze. - type Id: codec::Encode + TypeInfo + 'static; - - /// Amount of funds held in reserve by `who` for the given `id`. - fn balance_frozen(asset: Self::AssetId, id: &Self::Id, who: &AccountId) -> Self::Balance; - - /// The amount of the balance which can become frozen. Defaults to `total_balance()`. - fn balance_freezable(asset: Self::AssetId, who: &AccountId) -> Self::Balance { - Self::total_balance(asset, who) - } - - /// Returns `true` if it's possible to introduce a freeze for the given `id` onto the - /// account of `who`. This will be true as long as the implementor supports as many - /// concurrent freeze locks as there are possible values of `id`. - fn can_freeze(asset: Self::AssetId, id: &Self::Id, who: &AccountId) -> bool; -} - -/// Trait for introducing, altering and removing locks to freeze an account's funds so they never -/// go below a set minimum. -pub trait Mutate: Inspect { - /// Prevent actions which would reduce the balance of the account of `who` below the given - /// `amount` and identify this restriction though the given `id`. Unlike `extend_freeze`, any - /// outstanding freeze in place for `who` under the `id` are dropped. - /// - /// If `amount` is zero, it is equivalent to using `thaw`. - /// - /// Note that `amount` can be greater than the total balance, if desired. - fn set_freeze( - asset: Self::AssetId, - id: &Self::Id, - who: &AccountId, - amount: Self::Balance, - ) -> DispatchResult; - - /// Prevent the balance of the account of `who` from being reduced below the given `amount` and - /// identify this restriction though the given `id`. Unlike `set_freeze`, this does not - /// counteract any pre-existing freezes in place for `who` under the `id`. Also unlike - /// `set_freeze`, in the case that `amount` is zero, this is no-op and never fails. - /// - /// Note that more funds can be locked than the total balance, if desired. - fn extend_freeze( - asset: Self::AssetId, - id: &Self::Id, - who: &AccountId, - amount: Self::Balance, - ) -> DispatchResult; - - /// Remove an existing lock. - fn thaw(asset: Self::AssetId, id: &Self::Id, who: &AccountId) -> DispatchResult; - - /// Attempt to alter the amount frozen under the given `id` to `amount`. - /// - /// Fail if the account of `who` has fewer freezable funds than `amount`, unless `fortitude` is - /// `Fortitude::Force`. - fn set_frozen( - asset: Self::AssetId, - id: &Self::Id, - who: &AccountId, - amount: Self::Balance, - fortitude: Fortitude, - ) -> DispatchResult { - let force = fortitude == Fortitude::Force; - ensure!( - force || Self::balance_freezable(asset.clone(), who) >= amount, - TokenError::FundsUnavailable - ); - Self::set_freeze(asset, id, who, amount) - } - - /// Attempt to set the amount frozen under the given `id` to `amount`, iff this would increase - /// the amount frozen under `id`. Do nothing otherwise. - /// - /// Fail if the account of `who` has fewer freezable funds than `amount`, unless `fortitude` is - /// `Fortitude::Force`. - fn ensure_frozen( - asset: Self::AssetId, - id: &Self::Id, - who: &AccountId, - amount: Self::Balance, - fortitude: Fortitude, - ) -> DispatchResult { - let force = fortitude == Fortitude::Force; - ensure!( - force || Self::balance_freezable(asset.clone(), who) >= amount, - TokenError::FundsUnavailable - ); - Self::extend_freeze(asset, id, who, amount) - } - - /// Decrease the amount which is being frozen for a particular lock, failing in the case of - /// underflow. - fn decrease_frozen( - asset: Self::AssetId, - id: &Self::Id, - who: &AccountId, - amount: Self::Balance, - ) -> DispatchResult { - let a = Self::balance_frozen(asset.clone(), id, who) - .checked_sub(&amount) - .ok_or(ArithmeticError::Underflow)?; - Self::set_frozen(asset, id, who, a, Fortitude::Polite) - } - - /// Increase the amount which is being frozen for a particular lock, failing in the case that - /// too little balance is available for being frozen. - fn increase_frozen( - asset: Self::AssetId, - id: &Self::Id, - who: &AccountId, - amount: Self::Balance, - ) -> DispatchResult { - let a = Self::balance_frozen(asset.clone(), id, who) - .checked_add(&amount) - .ok_or(ArithmeticError::Overflow)?; - Self::set_frozen(asset, id, who, a, Fortitude::Polite) - } -} diff --git a/substrate/frame/support/src/traits/tokens/fungibles/imbalance.rs b/substrate/frame/support/src/traits/tokens/fungibles/imbalance.rs index f01adc5012f8..9973e1f8184d 100644 --- a/substrate/frame/support/src/traits/tokens/fungibles/imbalance.rs +++ b/substrate/frame/support/src/traits/tokens/fungibles/imbalance.rs @@ -21,8 +21,9 @@ use super::*; use crate::traits::{ + fungible, misc::{SameOrOther, TryDrop}, - tokens::{AssetId, Balance}, + tokens::{imbalance::Imbalance as ImbalanceT, AssetId, Balance}, }; use frame_support_procedural::{EqNoBound, PartialEqNoBound, RuntimeDebugNoBound}; use sp_runtime::traits::Zero; @@ -94,6 +95,11 @@ impl< Self { asset, amount, _phantom: PhantomData } } + /// Forget the imbalance without invoking the on-drop handler. + pub(crate) fn forget(imbalance: Self) { + sp_std::mem::forget(imbalance); + } + pub fn drop_zero(self) -> Result<(), Self> { if self.amount.is_zero() { sp_std::mem::forget(self); @@ -169,6 +175,52 @@ impl< } } +/// Converts a `fungible` `imbalance` instance to an instance of a `fungibles` imbalance type using +/// a specified `asset`. +/// +/// This function facilitates imbalance conversions within the implementations of +/// [`frame_support::traits::fungibles::UnionOf`], [`frame_support::traits::fungible::UnionOf`], and +/// [`frame_support::traits::fungible::ItemOf`] adapters. It is intended only for internal use +/// within the current crate. +pub(crate) fn from_fungible< + A: AssetId, + B: Balance, + OnDropIn: fungible::HandleImbalanceDrop, + OppositeIn: fungible::HandleImbalanceDrop, + OnDropOut: HandleImbalanceDrop, + OppositeOut: HandleImbalanceDrop, +>( + imbalance: fungible::Imbalance, + asset: A, +) -> Imbalance { + let new = Imbalance::new(asset, imbalance.peek()); + fungible::Imbalance::forget(imbalance); + new +} + +/// Converts a `fungibles` `imbalance` instance of one type to another using a specified `asset`. +/// +/// This function facilitates imbalance conversions within the implementations of +/// [`frame_support::traits::fungibles::UnionOf`], [`frame_support::traits::fungible::UnionOf`], and +/// [`frame_support::traits::fungible::ItemOf`] adapters. It is intended only for internal use +/// within the current crate. +pub(crate) fn from_fungibles< + A: AssetId, + B: Balance, + OnDropIn: HandleImbalanceDrop, + OppositeIn: HandleImbalanceDrop, + AssetOut: AssetId, + OnDropOut: HandleImbalanceDrop, + OppositeOut: HandleImbalanceDrop, +>( + imbalance: Imbalance, + asset: AssetOut, +) -> Imbalance { + let new = Imbalance::new(asset, imbalance.peek()); + Imbalance::forget(imbalance); + new +} + /// Imbalance implying that the total_issuance value is less than the sum of all account balances. pub type Debt = Imbalance< >::AssetId, diff --git a/substrate/frame/support/src/traits/tokens/fungibles/mod.rs b/substrate/frame/support/src/traits/tokens/fungibles/mod.rs index 5502c7da0442..07583fde2d6c 100644 --- a/substrate/frame/support/src/traits/tokens/fungibles/mod.rs +++ b/substrate/frame/support/src/traits/tokens/fungibles/mod.rs @@ -21,11 +21,12 @@ pub mod approvals; mod enumerable; pub mod hold; -mod imbalance; +pub(crate) mod imbalance; mod lifetime; pub mod metadata; mod regular; pub mod roles; +mod union_of; pub use enumerable::Inspect as InspectEnumerable; pub use hold::{ @@ -37,3 +38,4 @@ pub use lifetime::{Create, Destroy}; pub use regular::{ Balanced, DecreaseIssuance, Dust, IncreaseIssuance, Inspect, Mutate, Unbalanced, }; +pub use union_of::UnionOf; diff --git a/substrate/frame/support/src/traits/tokens/fungibles/regular.rs b/substrate/frame/support/src/traits/tokens/fungibles/regular.rs index d56485745c60..c4bf25fd873c 100644 --- a/substrate/frame/support/src/traits/tokens/fungibles/regular.rs +++ b/substrate/frame/support/src/traits/tokens/fungibles/regular.rs @@ -594,3 +594,66 @@ pub trait Balanced: Inspect + Unbalanced { fn done_deposit(_asset: Self::AssetId, _who: &AccountId, _amount: Self::Balance) {} fn done_withdraw(_asset: Self::AssetId, _who: &AccountId, _amount: Self::Balance) {} } + +/// Dummy implementation of [`Inspect`] +#[cfg(feature = "std")] +impl Inspect for () { + type AssetId = u32; + type Balance = u32; + fn total_issuance(_: Self::AssetId) -> Self::Balance { + 0 + } + fn minimum_balance(_: Self::AssetId) -> Self::Balance { + 0 + } + fn total_balance(_: Self::AssetId, _: &AccountId) -> Self::Balance { + 0 + } + fn balance(_: Self::AssetId, _: &AccountId) -> Self::Balance { + 0 + } + fn reducible_balance( + _: Self::AssetId, + _: &AccountId, + _: Preservation, + _: Fortitude, + ) -> Self::Balance { + 0 + } + fn can_deposit( + _: Self::AssetId, + _: &AccountId, + _: Self::Balance, + _: Provenance, + ) -> DepositConsequence { + DepositConsequence::Success + } + fn can_withdraw( + _: Self::AssetId, + _: &AccountId, + _: Self::Balance, + ) -> WithdrawConsequence { + WithdrawConsequence::Success + } + fn asset_exists(_: Self::AssetId) -> bool { + false + } +} + +/// Dummy implementation of [`Unbalanced`] +#[cfg(feature = "std")] +impl Unbalanced for () { + fn handle_dust(_: Dust) {} + fn write_balance( + _: Self::AssetId, + _: &AccountId, + _: Self::Balance, + ) -> Result, DispatchError> { + Ok(None) + } + fn set_total_issuance(_: Self::AssetId, _: Self::Balance) {} +} + +/// Dummy implementation of [`Mutate`] +#[cfg(feature = "std")] +impl Mutate for () {} diff --git a/substrate/frame/support/src/traits/tokens/fungibles/union_of.rs b/substrate/frame/support/src/traits/tokens/fungibles/union_of.rs new file mode 100644 index 000000000000..4185bf8da3cf --- /dev/null +++ b/substrate/frame/support/src/traits/tokens/fungibles/union_of.rs @@ -0,0 +1,828 @@ +// This file is part of Substrate. + +// Copyright (Criterion) Parity Technologies (UK) Ltd. +// Copyright (C) 2024 Luke Parker +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Type to combine two `fungibles::*` implementations into one union `fungibles::*` implementation. + +use frame_support::traits::{ + tokens::{ + fungibles, fungibles::imbalance, AssetId, DepositConsequence, Fortitude, Precision, + Preservation, Provenance, Restriction, WithdrawConsequence, + }, + AccountTouch, +}; +use sp_runtime::{ + traits::Convert, + DispatchError, DispatchResult, Either, + Either::{Left, Right}, +}; + +/// Type to combine two `fungibles::*` implementations into one union `fungibles::*` implementation. +/// +/// ### Parameters: +/// - `Left` is `fungibles::*` implementation that is incorporated into the resulting union. +/// - `Right` is `fungibles::*` implementation that is incorporated into the resulting union. +/// - `Criterion` determines whether the `AssetKind` belongs to the `Left` or `Right` set. +/// - `AssetKind` is a superset type encompassing asset kinds from `Left` and `Right` sets. +/// - `AccountId` is an account identifier type. +pub struct UnionOf( + sp_std::marker::PhantomData<(Left, Right, Criterion, AssetKind, AccountId)>, +); + +impl< + Left: fungibles::Inspect, + Right: fungibles::Inspect, + Criterion: Convert>, + AssetKind: AssetId, + AccountId, + > fungibles::Inspect for UnionOf +{ + type AssetId = AssetKind; + type Balance = Left::Balance; + + fn total_issuance(asset: Self::AssetId) -> Self::Balance { + match Criterion::convert(asset) { + Left(a) => >::total_issuance(a), + Right(a) => >::total_issuance(a), + } + } + fn active_issuance(asset: Self::AssetId) -> Self::Balance { + match Criterion::convert(asset) { + Left(a) => >::active_issuance(a), + Right(a) => >::active_issuance(a), + } + } + fn minimum_balance(asset: Self::AssetId) -> Self::Balance { + match Criterion::convert(asset) { + Left(a) => >::minimum_balance(a), + Right(a) => >::minimum_balance(a), + } + } + fn balance(asset: Self::AssetId, who: &AccountId) -> Self::Balance { + match Criterion::convert(asset) { + Left(a) => >::balance(a, who), + Right(a) => >::balance(a, who), + } + } + fn total_balance(asset: Self::AssetId, who: &AccountId) -> Self::Balance { + match Criterion::convert(asset) { + Left(a) => >::total_balance(a, who), + Right(a) => >::total_balance(a, who), + } + } + fn reducible_balance( + asset: Self::AssetId, + who: &AccountId, + preservation: Preservation, + force: Fortitude, + ) -> Self::Balance { + match Criterion::convert(asset) { + Left(a) => >::reducible_balance( + a, + who, + preservation, + force, + ), + Right(a) => >::reducible_balance( + a, + who, + preservation, + force, + ), + } + } + fn can_deposit( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + provenance: Provenance, + ) -> DepositConsequence { + match Criterion::convert(asset) { + Left(a) => + >::can_deposit(a, who, amount, provenance), + Right(a) => + >::can_deposit(a, who, amount, provenance), + } + } + fn can_withdraw( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + ) -> WithdrawConsequence { + match Criterion::convert(asset) { + Left(a) => >::can_withdraw(a, who, amount), + Right(a) => >::can_withdraw(a, who, amount), + } + } + fn asset_exists(asset: Self::AssetId) -> bool { + match Criterion::convert(asset) { + Left(a) => >::asset_exists(a), + Right(a) => >::asset_exists(a), + } + } +} + +impl< + Left: fungibles::InspectHold, + Right: fungibles::InspectHold, + Criterion: Convert>, + AssetKind: AssetId, + AccountId, + > fungibles::InspectHold for UnionOf +{ + type Reason = Left::Reason; + + fn reducible_total_balance_on_hold( + asset: Self::AssetId, + who: &AccountId, + force: Fortitude, + ) -> Self::Balance { + match Criterion::convert(asset) { + Left(a) => + >::reducible_total_balance_on_hold( + a, who, force, + ), + Right(a) => + >::reducible_total_balance_on_hold( + a, who, force, + ), + } + } + fn hold_available(asset: Self::AssetId, reason: &Self::Reason, who: &AccountId) -> bool { + match Criterion::convert(asset) { + Left(a) => >::hold_available(a, reason, who), + Right(a) => + >::hold_available(a, reason, who), + } + } + fn total_balance_on_hold(asset: Self::AssetId, who: &AccountId) -> Self::Balance { + match Criterion::convert(asset) { + Left(a) => >::total_balance_on_hold(a, who), + Right(a) => >::total_balance_on_hold(a, who), + } + } + fn balance_on_hold( + asset: Self::AssetId, + reason: &Self::Reason, + who: &AccountId, + ) -> Self::Balance { + match Criterion::convert(asset) { + Left(a) => >::balance_on_hold(a, reason, who), + Right(a) => + >::balance_on_hold(a, reason, who), + } + } + fn can_hold( + asset: Self::AssetId, + reason: &Self::Reason, + who: &AccountId, + amount: Self::Balance, + ) -> bool { + match Criterion::convert(asset) { + Left(a) => + >::can_hold(a, reason, who, amount), + Right(a) => + >::can_hold(a, reason, who, amount), + } + } +} + +impl< + Left: fungibles::Unbalanced, + Right: fungibles::Unbalanced, + Criterion: Convert>, + AssetKind: AssetId, + AccountId, + > fungibles::Unbalanced for UnionOf +{ + fn handle_dust(dust: fungibles::Dust) + where + Self: Sized, + { + match Criterion::convert(dust.0) { + Left(a) => + >::handle_dust(fungibles::Dust(a, dust.1)), + Right(a) => + >::handle_dust(fungibles::Dust(a, dust.1)), + } + } + fn write_balance( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + ) -> Result, DispatchError> { + match Criterion::convert(asset) { + Left(a) => >::write_balance(a, who, amount), + Right(a) => >::write_balance(a, who, amount), + } + } + fn set_total_issuance(asset: Self::AssetId, amount: Self::Balance) -> () { + match Criterion::convert(asset) { + Left(a) => >::set_total_issuance(a, amount), + Right(a) => >::set_total_issuance(a, amount), + } + } + fn decrease_balance( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + precision: Precision, + preservation: Preservation, + force: Fortitude, + ) -> Result { + match Criterion::convert(asset) { + Left(a) => >::decrease_balance( + a, + who, + amount, + precision, + preservation, + force, + ), + Right(a) => >::decrease_balance( + a, + who, + amount, + precision, + preservation, + force, + ), + } + } + fn increase_balance( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + precision: Precision, + ) -> Result { + match Criterion::convert(asset) { + Left(a) => >::increase_balance( + a, who, amount, precision, + ), + Right(a) => >::increase_balance( + a, who, amount, precision, + ), + } + } +} + +impl< + Left: fungibles::UnbalancedHold, + Right: fungibles::UnbalancedHold, + Criterion: Convert>, + AssetKind: AssetId, + AccountId, + > fungibles::UnbalancedHold for UnionOf +{ + fn set_balance_on_hold( + asset: Self::AssetId, + reason: &Self::Reason, + who: &AccountId, + amount: Self::Balance, + ) -> DispatchResult { + match Criterion::convert(asset) { + Left(a) => >::set_balance_on_hold( + a, reason, who, amount, + ), + Right(a) => >::set_balance_on_hold( + a, reason, who, amount, + ), + } + } + fn decrease_balance_on_hold( + asset: Self::AssetId, + reason: &Self::Reason, + who: &AccountId, + amount: Self::Balance, + precision: Precision, + ) -> Result { + match Criterion::convert(asset) { + Left(a) => >::decrease_balance_on_hold( + a, reason, who, amount, precision, + ), + Right(a) => >::decrease_balance_on_hold( + a, reason, who, amount, precision, + ), + } + } + fn increase_balance_on_hold( + asset: Self::AssetId, + reason: &Self::Reason, + who: &AccountId, + amount: Self::Balance, + precision: Precision, + ) -> Result { + match Criterion::convert(asset) { + Left(a) => >::increase_balance_on_hold( + a, reason, who, amount, precision, + ), + Right(a) => >::increase_balance_on_hold( + a, reason, who, amount, precision, + ), + } + } +} + +impl< + Left: fungibles::Mutate, + Right: fungibles::Mutate, + Criterion: Convert>, + AssetKind: AssetId, + AccountId: Eq, + > fungibles::Mutate for UnionOf +{ + fn mint_into( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + ) -> Result { + match Criterion::convert(asset) { + Left(a) => >::mint_into(a, who, amount), + Right(a) => >::mint_into(a, who, amount), + } + } + fn burn_from( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + precision: Precision, + force: Fortitude, + ) -> Result { + match Criterion::convert(asset) { + Left(a) => + >::burn_from(a, who, amount, precision, force), + Right(a) => + >::burn_from(a, who, amount, precision, force), + } + } + fn shelve( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + ) -> Result { + match Criterion::convert(asset) { + Left(a) => >::shelve(a, who, amount), + Right(a) => >::shelve(a, who, amount), + } + } + fn restore( + asset: Self::AssetId, + who: &AccountId, + amount: Self::Balance, + ) -> Result { + match Criterion::convert(asset) { + Left(a) => >::restore(a, who, amount), + Right(a) => >::restore(a, who, amount), + } + } + fn transfer( + asset: Self::AssetId, + source: &AccountId, + dest: &AccountId, + amount: Self::Balance, + preservation: Preservation, + ) -> Result { + match Criterion::convert(asset) { + Left(a) => >::transfer( + a, + source, + dest, + amount, + preservation, + ), + Right(a) => >::transfer( + a, + source, + dest, + amount, + preservation, + ), + } + } + + fn set_balance(asset: Self::AssetId, who: &AccountId, amount: Self::Balance) -> Self::Balance { + match Criterion::convert(asset) { + Left(a) => >::set_balance(a, who, amount), + Right(a) => >::set_balance(a, who, amount), + } + } +} + +impl< + Left: fungibles::MutateHold, + Right: fungibles::MutateHold, + Criterion: Convert>, + AssetKind: AssetId, + AccountId, + > fungibles::MutateHold for UnionOf +{ + fn hold( + asset: Self::AssetId, + reason: &Self::Reason, + who: &AccountId, + amount: Self::Balance, + ) -> DispatchResult { + match Criterion::convert(asset) { + Left(a) => >::hold(a, reason, who, amount), + Right(a) => >::hold(a, reason, who, amount), + } + } + fn release( + asset: Self::AssetId, + reason: &Self::Reason, + who: &AccountId, + amount: Self::Balance, + precision: Precision, + ) -> Result { + match Criterion::convert(asset) { + Left(a) => >::release( + a, reason, who, amount, precision, + ), + Right(a) => >::release( + a, reason, who, amount, precision, + ), + } + } + fn burn_held( + asset: Self::AssetId, + reason: &Self::Reason, + who: &AccountId, + amount: Self::Balance, + precision: Precision, + force: Fortitude, + ) -> Result { + match Criterion::convert(asset) { + Left(a) => >::burn_held( + a, reason, who, amount, precision, force, + ), + Right(a) => >::burn_held( + a, reason, who, amount, precision, force, + ), + } + } + fn transfer_on_hold( + asset: Self::AssetId, + reason: &Self::Reason, + source: &AccountId, + dest: &AccountId, + amount: Self::Balance, + precision: Precision, + mode: Restriction, + force: Fortitude, + ) -> Result { + match Criterion::convert(asset) { + Left(a) => >::transfer_on_hold( + a, reason, source, dest, amount, precision, mode, force, + ), + Right(a) => >::transfer_on_hold( + a, reason, source, dest, amount, precision, mode, force, + ), + } + } + fn transfer_and_hold( + asset: Self::AssetId, + reason: &Self::Reason, + source: &AccountId, + dest: &AccountId, + amount: Self::Balance, + precision: Precision, + preservation: Preservation, + force: Fortitude, + ) -> Result { + match Criterion::convert(asset) { + Left(a) => >::transfer_and_hold( + a, + reason, + source, + dest, + amount, + precision, + preservation, + force, + ), + Right(a) => >::transfer_and_hold( + a, + reason, + source, + dest, + amount, + precision, + preservation, + force, + ), + } + } +} + +pub struct ConvertImbalanceDropHandler< + Left, + Right, + LeftAssetId, + RightAssetId, + Criterion, + AssetKind, + Balance, + AccountId, +>( + sp_std::marker::PhantomData<( + Left, + Right, + LeftAssetId, + RightAssetId, + Criterion, + AssetKind, + Balance, + AccountId, + )>, +); + +impl< + Left: fungibles::HandleImbalanceDrop, + Right: fungibles::HandleImbalanceDrop, + LeftAssetId, + RightAssetId, + Criterion: Convert>, + AssetKind, + Balance, + AccountId, + > fungibles::HandleImbalanceDrop + for ConvertImbalanceDropHandler< + Left, + Right, + LeftAssetId, + RightAssetId, + Criterion, + AssetKind, + Balance, + AccountId, + > +{ + fn handle(asset: AssetKind, amount: Balance) { + match Criterion::convert(asset) { + Left(a) => Left::handle(a, amount), + Right(a) => Right::handle(a, amount), + } + } +} + +impl< + Left: fungibles::Balanced, + Right: fungibles::Balanced, + Criterion: Convert>, + AssetKind: AssetId, + AccountId, + > fungibles::Balanced for UnionOf +{ + type OnDropDebt = ConvertImbalanceDropHandler< + Left::OnDropDebt, + Right::OnDropDebt, + Left::AssetId, + Right::AssetId, + Criterion, + AssetKind, + Left::Balance, + AccountId, + >; + type OnDropCredit = ConvertImbalanceDropHandler< + Left::OnDropCredit, + Right::OnDropCredit, + Left::AssetId, + Right::AssetId, + Criterion, + AssetKind, + Left::Balance, + AccountId, + >; + + fn deposit( + asset: Self::AssetId, + who: &AccountId, + value: Self::Balance, + precision: Precision, + ) -> Result, DispatchError> { + match Criterion::convert(asset.clone()) { + Left(a) => >::deposit(a, who, value, precision) + .map(|debt| imbalance::from_fungibles(debt, asset)), + Right(a) => + >::deposit(a, who, value, precision) + .map(|debt| imbalance::from_fungibles(debt, asset)), + } + } + fn issue(asset: Self::AssetId, amount: Self::Balance) -> fungibles::Credit { + match Criterion::convert(asset.clone()) { + Left(a) => { + let credit = >::issue(a, amount); + imbalance::from_fungibles(credit, asset) + }, + Right(a) => { + let credit = >::issue(a, amount); + imbalance::from_fungibles(credit, asset) + }, + } + } + fn pair( + asset: Self::AssetId, + amount: Self::Balance, + ) -> (fungibles::Debt, fungibles::Credit) { + match Criterion::convert(asset.clone()) { + Left(a) => { + let (a, b) = >::pair(a, amount); + (imbalance::from_fungibles(a, asset.clone()), imbalance::from_fungibles(b, asset)) + }, + Right(a) => { + let (a, b) = >::pair(a, amount); + (imbalance::from_fungibles(a, asset.clone()), imbalance::from_fungibles(b, asset)) + }, + } + } + fn rescind(asset: Self::AssetId, amount: Self::Balance) -> fungibles::Debt { + match Criterion::convert(asset.clone()) { + Left(a) => { + let debt = >::rescind(a, amount); + imbalance::from_fungibles(debt, asset) + }, + Right(a) => { + let debt = >::rescind(a, amount); + imbalance::from_fungibles(debt, asset) + }, + } + } + fn resolve( + who: &AccountId, + credit: fungibles::Credit, + ) -> Result<(), fungibles::Credit> { + let asset = credit.asset(); + match Criterion::convert(asset.clone()) { + Left(a) => { + let credit = imbalance::from_fungibles(credit, a); + >::resolve(who, credit) + .map_err(|credit| imbalance::from_fungibles(credit, asset)) + }, + Right(a) => { + let credit = imbalance::from_fungibles(credit, a); + >::resolve(who, credit) + .map_err(|credit| imbalance::from_fungibles(credit, asset)) + }, + } + } + fn settle( + who: &AccountId, + debt: fungibles::Debt, + preservation: Preservation, + ) -> Result, fungibles::Debt> { + let asset = debt.asset(); + match Criterion::convert(asset.clone()) { + Left(a) => { + let debt = imbalance::from_fungibles(debt, a); + match >::settle(who, debt, preservation) { + Ok(credit) => Ok(imbalance::from_fungibles(credit, asset)), + Err(debt) => Err(imbalance::from_fungibles(debt, asset)), + } + }, + Right(a) => { + let debt = imbalance::from_fungibles(debt, a); + match >::settle(who, debt, preservation) { + Ok(credit) => Ok(imbalance::from_fungibles(credit, asset)), + Err(debt) => Err(imbalance::from_fungibles(debt, asset)), + } + }, + } + } + fn withdraw( + asset: Self::AssetId, + who: &AccountId, + value: Self::Balance, + precision: Precision, + preservation: Preservation, + force: Fortitude, + ) -> Result, DispatchError> { + match Criterion::convert(asset.clone()) { + Left(a) => >::withdraw( + a, + who, + value, + precision, + preservation, + force, + ) + .map(|credit| imbalance::from_fungibles(credit, asset)), + Right(a) => >::withdraw( + a, + who, + value, + precision, + preservation, + force, + ) + .map(|credit| imbalance::from_fungibles(credit, asset)), + } + } +} + +impl< + Left: fungibles::BalancedHold, + Right: fungibles::BalancedHold, + Criterion: Convert>, + AssetKind: AssetId, + AccountId, + > fungibles::BalancedHold for UnionOf +{ + fn slash( + asset: Self::AssetId, + reason: &Self::Reason, + who: &AccountId, + amount: Self::Balance, + ) -> (fungibles::Credit, Self::Balance) { + match Criterion::convert(asset.clone()) { + Left(a) => { + let (credit, amount) = + >::slash(a, reason, who, amount); + (imbalance::from_fungibles(credit, asset), amount) + }, + Right(a) => { + let (credit, amount) = + >::slash(a, reason, who, amount); + (imbalance::from_fungibles(credit, asset), amount) + }, + } + } +} + +impl< + Left: fungibles::Inspect + fungibles::Create, + Right: fungibles::Inspect + fungibles::Create, + Criterion: Convert>, + AssetKind: AssetId, + AccountId, + > fungibles::Create for UnionOf +{ + fn create( + asset: AssetKind, + admin: AccountId, + is_sufficient: bool, + min_balance: Self::Balance, + ) -> DispatchResult { + match Criterion::convert(asset) { + Left(a) => + >::create(a, admin, is_sufficient, min_balance), + Right(a) => >::create( + a, + admin, + is_sufficient, + min_balance, + ), + } + } +} + +impl< + Left: fungibles::Inspect + AccountTouch, + Right: fungibles::Inspect + + AccountTouch< + Right::AssetId, + AccountId, + Balance = >::Balance, + >, + Criterion: Convert>, + AssetKind: AssetId, + AccountId, + > AccountTouch for UnionOf +{ + type Balance = >::Balance; + + fn deposit_required(asset: AssetKind) -> Self::Balance { + match Criterion::convert(asset) { + Left(a) => >::deposit_required(a), + Right(a) => >::deposit_required(a), + } + } + + fn should_touch(asset: AssetKind, who: &AccountId) -> bool { + match Criterion::convert(asset) { + Left(a) => >::should_touch(a, who), + Right(a) => >::should_touch(a, who), + } + } + + fn touch(asset: AssetKind, who: &AccountId, depositor: &AccountId) -> DispatchResult { + match Criterion::convert(asset) { + Left(a) => >::touch(a, who, depositor), + Right(a) => + >::touch(a, who, depositor), + } + } +} diff --git a/substrate/frame/support/src/traits/try_runtime/decode_entire_state.rs b/substrate/frame/support/src/traits/try_runtime/decode_entire_state.rs index c77427abf4a3..dc3cd71a2de3 100644 --- a/substrate/frame/support/src/traits/try_runtime/decode_entire_state.rs +++ b/substrate/frame/support/src/traits/try_runtime/decode_entire_state.rs @@ -65,7 +65,7 @@ impl TryDecodeEntireStorage for Tuple { } /// A value could not be decoded. -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Clone, PartialEq, Eq)] pub struct TryDecodeEntireStorageError { /// The key of the undecodable value. pub key: Vec, @@ -79,9 +79,22 @@ impl core::fmt::Display for TryDecodeEntireStorageError { fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { write!( f, - "Failed to decode storage item `{}::{}`", + "`{}::{}` key `{}` is undecodable", &sp_std::str::from_utf8(&self.info.pallet_name).unwrap_or(""), &sp_std::str::from_utf8(&self.info.storage_name).unwrap_or(""), + array_bytes::bytes2hex("0x", &self.key) + ) + } +} + +impl core::fmt::Debug for TryDecodeEntireStorageError { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!( + f, + "key: {} value: {} info: {:?}", + array_bytes::bytes2hex("0x", &self.key), + array_bytes::bytes2hex("0x", self.raw.clone().unwrap_or_default()), + self.info ) } } @@ -94,7 +107,6 @@ impl core::fmt::Display for TryDecodeEntireStorageError { fn decode_storage_info( info: StorageInfo, ) -> Result> { - let mut next_key = info.prefix.clone(); let mut decoded = 0; let decode_key = |key: &[u8]| match sp_io::storage::get(key) { @@ -102,29 +114,39 @@ fn decode_storage_info( Some(bytes) => { let len = bytes.len(); let _ = ::decode_all(&mut bytes.as_ref()).map_err(|_| { - vec![TryDecodeEntireStorageError { + TryDecodeEntireStorageError { key: key.to_vec(), raw: Some(bytes.to_vec()), info: info.clone(), - }] + } })?; - Ok::>(len) + Ok::(len) }, }; - decoded += decode_key(&next_key)?; + let mut errors = vec![]; + let mut next_key = Some(info.prefix.clone()); loop { - match sp_io::storage::next_key(&next_key) { + match next_key { Some(key) if key.starts_with(&info.prefix) => { - decoded += decode_key(&key)?; - next_key = key; + match decode_key(&key) { + Ok(bytes) => { + decoded += bytes; + }, + Err(e) => errors.push(e), + }; + next_key = sp_io::storage::next_key(&key); }, _ => break, } } - Ok(decoded) + if errors.is_empty() { + Ok(decoded) + } else { + Err(errors) + } } impl TryDecodeEntireStorage @@ -322,6 +344,12 @@ mod tests { // two bytes, cannot be decoded into u32. sp_io::storage::set(&Map::hashed_key_for(2), &[0u8, 1]); assert!(Map::try_decode_entire_state().is_err()); + assert_eq!(Map::try_decode_entire_state().unwrap_err().len(), 1); + + // multiple errs in the same map are be detected + sp_io::storage::set(&Map::hashed_key_for(3), &[0u8, 1]); + assert!(Map::try_decode_entire_state().is_err()); + assert_eq!(Map::try_decode_entire_state().unwrap_err().len(), 2); }) } diff --git a/substrate/frame/support/test/Cargo.toml b/substrate/frame/support/test/Cargo.toml index 7ed6e48abf0b..9a342bfdff3f 100644 --- a/substrate/frame/support/test/Cargo.toml +++ b/substrate/frame/support/test/Cargo.toml @@ -8,6 +8,9 @@ publish = false homepage = "https://substrate.io" repository.workspace = true +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -26,7 +29,7 @@ sp-core = { path = "../../../primitives/core", default-features = false } sp-std = { path = "../../../primitives/std", default-features = false } sp-version = { path = "../../../primitives/version", default-features = false } sp-metadata-ir = { path = "../../../primitives/metadata-ir", default-features = false } -trybuild = { version = "1.0.74", features = ["diff"] } +trybuild = { version = "1.0.88", features = ["diff"] } pretty_assertions = "1.3.0" rustversion = "1.0.6" frame-system = { path = "../../system", default-features = false } @@ -54,7 +57,10 @@ std = [ "sp-version/std", "test-pallet/std", ] -experimental = ["frame-support/experimental"] +experimental = [ + "frame-support/experimental", + "frame-system/experimental", +] try-runtime = [ "frame-executive/try-runtime", "frame-support/try-runtime", diff --git a/substrate/frame/support/test/compile_pass/Cargo.toml b/substrate/frame/support/test/compile_pass/Cargo.toml index 916771bd4717..0617aa105a21 100644 --- a/substrate/frame/support/test/compile_pass/Cargo.toml +++ b/substrate/frame/support/test/compile_pass/Cargo.toml @@ -8,6 +8,9 @@ publish = false homepage = "https://substrate.io" repository.workspace = true +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/support/test/compile_pass/src/lib.rs b/substrate/frame/support/test/compile_pass/src/lib.rs index 5f8a53fb7bfe..789807f11fe2 100644 --- a/substrate/frame/support/test/compile_pass/src/lib.rs +++ b/substrate/frame/support/test/compile_pass/src/lib.rs @@ -23,7 +23,7 @@ #![cfg_attr(not(feature = "std"), no_std)] use renamed_frame_support::{ - construct_runtime, parameter_types, + construct_runtime, derive_impl, parameter_types, traits::{ConstU32, ConstU64, Everything}, }; use sp_core::{sr25519, H256}; @@ -49,6 +49,7 @@ parameter_types! { pub const Version: RuntimeVersion = VERSION; } +#[derive_impl(renamed_frame_system::config_preludes::TestDefaultConfig as renamed_frame_system::DefaultConfig)] impl renamed_frame_system::Config for Runtime { type BaseCallFilter = Everything; type BlockWeights = (); diff --git a/substrate/frame/support/test/pallet/Cargo.toml b/substrate/frame/support/test/pallet/Cargo.toml index 3d71ef2070d5..3debf6e9065b 100644 --- a/substrate/frame/support/test/pallet/Cargo.toml +++ b/substrate/frame/support/test/pallet/Cargo.toml @@ -8,6 +8,9 @@ publish = false homepage = "https://substrate.io" repository.workspace = true +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/support/test/src/lib.rs b/substrate/frame/support/test/src/lib.rs index 0ed75d1fc57f..3e45a52d0990 100644 --- a/substrate/frame/support/test/src/lib.rs +++ b/substrate/frame/support/test/src/lib.rs @@ -51,6 +51,8 @@ pub mod pallet { + From>; /// The runtime call type. type RuntimeCall; + /// Contains an aggregation of all tasks in this runtime. + type RuntimeTask; /// The runtime event type. type RuntimeEvent: Parameter + Member diff --git a/substrate/frame/support/test/stg_frame_crate/Cargo.toml b/substrate/frame/support/test/stg_frame_crate/Cargo.toml index 0b3b584910a9..632ea4e794f6 100644 --- a/substrate/frame/support/test/stg_frame_crate/Cargo.toml +++ b/substrate/frame/support/test/stg_frame_crate/Cargo.toml @@ -8,6 +8,9 @@ publish = false homepage = "https://substrate.io" repository.workspace = true +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/support/test/tests/construct_runtime_ui/deprecated_where_block.stderr b/substrate/frame/support/test/tests/construct_runtime_ui/deprecated_where_block.stderr index a791c313b4a8..b08efb3a8421 100644 --- a/substrate/frame/support/test/tests/construct_runtime_ui/deprecated_where_block.stderr +++ b/substrate/frame/support/test/tests/construct_runtime_ui/deprecated_where_block.stderr @@ -17,6 +17,7 @@ error: use of deprecated constant `WhereSection::_w`: | |_^ | = note: `-D deprecated` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(deprecated)]` = note: this error originates in the macro `frame_support::match_and_insert` which comes from the expansion of the macro `construct_runtime` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0277]: the trait bound `Runtime: Config` is not satisfied diff --git a/substrate/frame/support/test/tests/construct_runtime_ui/generics_in_invalid_module.stderr b/substrate/frame/support/test/tests/construct_runtime_ui/generics_in_invalid_module.stderr index bf53f43b9ba7..8458de97f6d3 100644 --- a/substrate/frame/support/test/tests/construct_runtime_ui/generics_in_invalid_module.stderr +++ b/substrate/frame/support/test/tests/construct_runtime_ui/generics_in_invalid_module.stderr @@ -1,4 +1,4 @@ -error: `Call` is not allowed to have generics. Only the following pallets are allowed to have generics: `Event`, `Error`, `Origin`, `Config`. +error: `Call` is not allowed to have generics. Only the following pallets are allowed to have generics: `Event`, `Error`, `Origin`, `Config`, `Task`. --> tests/construct_runtime_ui/generics_in_invalid_module.rs:24:36 | 24 | Balance: balances::::{Call, Origin}, diff --git a/substrate/frame/support/test/tests/construct_runtime_ui/invalid_module_details_keyword.stderr b/substrate/frame/support/test/tests/construct_runtime_ui/invalid_module_details_keyword.stderr index ad631de204e6..feb61793151d 100644 --- a/substrate/frame/support/test/tests/construct_runtime_ui/invalid_module_details_keyword.stderr +++ b/substrate/frame/support/test/tests/construct_runtime_ui/invalid_module_details_keyword.stderr @@ -1,4 +1,4 @@ -error: expected one of: `Pallet`, `Call`, `Storage`, `Event`, `Error`, `Config`, `Origin`, `Inherent`, `ValidateUnsigned`, `FreezeReason`, `HoldReason`, `LockId`, `SlashReason` +error: expected one of: `Pallet`, `Call`, `Storage`, `Event`, `Error`, `Config`, `Origin`, `Inherent`, `ValidateUnsigned`, `FreezeReason`, `HoldReason`, `Task`, `LockId`, `SlashReason` --> tests/construct_runtime_ui/invalid_module_details_keyword.rs:23:20 | 23 | system: System::{enum}, diff --git a/substrate/frame/support/test/tests/construct_runtime_ui/invalid_module_entry.stderr b/substrate/frame/support/test/tests/construct_runtime_ui/invalid_module_entry.stderr index b5b89a5a270c..97943dfc1763 100644 --- a/substrate/frame/support/test/tests/construct_runtime_ui/invalid_module_entry.stderr +++ b/substrate/frame/support/test/tests/construct_runtime_ui/invalid_module_entry.stderr @@ -1,4 +1,4 @@ -error: expected one of: `Pallet`, `Call`, `Storage`, `Event`, `Error`, `Config`, `Origin`, `Inherent`, `ValidateUnsigned`, `FreezeReason`, `HoldReason`, `LockId`, `SlashReason` +error: expected one of: `Pallet`, `Call`, `Storage`, `Event`, `Error`, `Config`, `Origin`, `Inherent`, `ValidateUnsigned`, `FreezeReason`, `HoldReason`, `Task`, `LockId`, `SlashReason` --> tests/construct_runtime_ui/invalid_module_entry.rs:24:23 | 24 | Balance: balances::{Unexpected}, diff --git a/substrate/frame/support/test/tests/construct_runtime_ui/number_of_pallets_exceeds_tuple_size.stderr b/substrate/frame/support/test/tests/construct_runtime_ui/number_of_pallets_exceeds_tuple_size.stderr index 7ed77e75da4c..3b6329c650fa 100644 --- a/substrate/frame/support/test/tests/construct_runtime_ui/number_of_pallets_exceeds_tuple_size.stderr +++ b/substrate/frame/support/test/tests/construct_runtime_ui/number_of_pallets_exceeds_tuple_size.stderr @@ -31,19 +31,34 @@ error[E0412]: cannot find type `RuntimeOrigin` in this scope --> tests/construct_runtime_ui/number_of_pallets_exceeds_tuple_size.rs:42:23 | 42 | type RuntimeOrigin = RuntimeOrigin; - | ^^^^^^^^^^^^^ help: you might have meant to use the associated type: `Self::RuntimeOrigin` + | ^^^^^^^^^^^^^ + | +help: you might have meant to use the associated type + | +42 | type RuntimeOrigin = Self::RuntimeOrigin; + | ++++++ error[E0412]: cannot find type `RuntimeCall` in this scope --> tests/construct_runtime_ui/number_of_pallets_exceeds_tuple_size.rs:44:21 | 44 | type RuntimeCall = RuntimeCall; - | ^^^^^^^^^^^ help: you might have meant to use the associated type: `Self::RuntimeCall` + | ^^^^^^^^^^^ + | +help: you might have meant to use the associated type + | +44 | type RuntimeCall = Self::RuntimeCall; + | ++++++ error[E0412]: cannot find type `RuntimeEvent` in this scope --> tests/construct_runtime_ui/number_of_pallets_exceeds_tuple_size.rs:50:22 | 50 | type RuntimeEvent = RuntimeEvent; - | ^^^^^^^^^^^^ help: you might have meant to use the associated type: `Self::RuntimeEvent` + | ^^^^^^^^^^^^ + | +help: you might have meant to use the associated type + | +50 | type RuntimeEvent = Self::RuntimeEvent; + | ++++++ error[E0412]: cannot find type `PalletInfo` in this scope --> tests/construct_runtime_ui/number_of_pallets_exceeds_tuple_size.rs:56:20 @@ -54,10 +69,23 @@ error[E0412]: cannot find type `PalletInfo` in this scope help: you might have meant to use the associated type | 56 | type PalletInfo = Self::PalletInfo; - | ~~~~~~~~~~~~~~~~ + | ++++++ help: consider importing one of these items | 18 + use frame_benchmarking::__private::traits::PalletInfo; | 18 + use frame_support::traits::PalletInfo; | + +error[E0412]: cannot find type `RuntimeTask` in this scope + --> tests/construct_runtime_ui/number_of_pallets_exceeds_tuple_size.rs:39:1 + | +39 | #[derive_impl(frame_system::config_preludes::TestDefaultConfig as frame_system::DefaultConfig)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: this error originates in the macro `frame_system::config_preludes::TestDefaultConfig` which comes from the expansion of the macro `frame_support::macro_magic::forward_tokens_verbatim` (in Nightly builds, run with -Z macro-backtrace for more info) +help: you might have meant to use the associated type + --> $WORKSPACE/substrate/frame/system/src/lib.rs + | + | type Self::RuntimeTask = (); + | ++++++ diff --git a/substrate/frame/support/test/tests/derive_impl.rs b/substrate/frame/support/test/tests/derive_impl.rs new file mode 100644 index 000000000000..675e85f4bfce --- /dev/null +++ b/substrate/frame/support/test/tests/derive_impl.rs @@ -0,0 +1,52 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +use frame_support::derive_impl; + +trait Shape { + fn area(&self) -> u32; +} + +struct SomeRectangle {} + +#[frame_support::register_default_impl(SomeRectangle)] +impl Shape for SomeRectangle { + #[cfg(not(feature = "feature-frame-testing"))] + fn area(&self) -> u32 { + 10 + } + + #[cfg(feature = "feature-frame-testing")] + fn area(&self) -> u32 { + 0 + } +} + +struct SomeSquare {} + +#[derive_impl(SomeRectangle)] +impl Shape for SomeSquare {} + +#[test] +fn test_feature_parsing() { + let square = SomeSquare {}; + #[cfg(not(feature = "feature-frame-testing"))] + assert_eq!(square.area(), 10); + + #[cfg(feature = "feature-frame-testing")] + assert_eq!(square.area(), 0); +} diff --git a/substrate/frame/support/test/tests/derive_impl_ui/inject_runtime_type_fails_when_type_not_in_scope.stderr b/substrate/frame/support/test/tests/derive_impl_ui/inject_runtime_type_fails_when_type_not_in_scope.stderr index f3ac6b232811..58aae713f752 100644 --- a/substrate/frame/support/test/tests/derive_impl_ui/inject_runtime_type_fails_when_type_not_in_scope.stderr +++ b/substrate/frame/support/test/tests/derive_impl_ui/inject_runtime_type_fails_when_type_not_in_scope.stderr @@ -2,9 +2,13 @@ error[E0412]: cannot find type `RuntimeCall` in this scope --> tests/derive_impl_ui/inject_runtime_type_fails_when_type_not_in_scope.rs:30:10 | 30 | type RuntimeCall = (); - | ^^^^^^^^^^^ help: you might have meant to use the associated type: `Self::RuntimeCall` + | ^^^^^^^^^^^ ... 35 | #[derive_impl(Pallet)] // Injects type RuntimeCall = RuntimeCall; | ---------------------- in this macro invocation | = note: this error originates in the macro `Pallet` which comes from the expansion of the macro `frame_support::macro_magic::forward_tokens_verbatim` (in Nightly builds, run with -Z macro-backtrace for more info) +help: you might have meant to use the associated type + | +30 | type Self::RuntimeCall = (); + | ++++++ diff --git a/substrate/frame/support/test/tests/derive_impl_ui/inject_runtime_type_invalid.stderr b/substrate/frame/support/test/tests/derive_impl_ui/inject_runtime_type_invalid.stderr index 501aad0419f8..cda20288984a 100644 --- a/substrate/frame/support/test/tests/derive_impl_ui/inject_runtime_type_invalid.stderr +++ b/substrate/frame/support/test/tests/derive_impl_ui/inject_runtime_type_invalid.stderr @@ -1,4 +1,4 @@ -error: `#[inject_runtime_type]` can only be attached to `RuntimeCall`, `RuntimeEvent`, `RuntimeOrigin` or `PalletInfo` +error: `#[inject_runtime_type]` can only be attached to `RuntimeCall`, `RuntimeEvent`, `RuntimeTask`, `RuntimeOrigin` or `PalletInfo` --> tests/derive_impl_ui/inject_runtime_type_invalid.rs:32:5 | 32 | type RuntimeInfo = (); diff --git a/substrate/frame/support/test/tests/pallet.rs b/substrate/frame/support/test/tests/pallet.rs index 0d3c088d4c4f..c44b6f035fb9 100644 --- a/substrate/frame/support/test/tests/pallet.rs +++ b/substrate/frame/support/test/tests/pallet.rs @@ -258,6 +258,13 @@ pub mod pallet { pub fn check_for_dispatch_context(_origin: OriginFor) -> DispatchResult { with_context::<(), _>(|_| ()).ok_or_else(|| DispatchError::Unavailable) } + + #[cfg(feature = "frame-feature-testing")] + #[pallet::call_index(5)] + #[pallet::weight({1})] + pub fn foo_feature_test(_origin: OriginFor) -> DispatchResult { + Ok(()) + } } #[pallet::error] @@ -270,6 +277,8 @@ pub mod pallet { #[codec(skip)] Skipped(u128), CompactU8(#[codec(compact)] u8), + #[cfg(feature = "frame-feature-testing")] + FeatureTest, } #[pallet::event] @@ -763,6 +772,7 @@ fn call_expand() { } ); assert_eq!(call_foo.get_call_name(), "foo"); + #[cfg(not(feature = "frame-feature-testing"))] assert_eq!( pallet::Call::::get_call_names(), &[ @@ -773,9 +783,24 @@ fn call_expand() { "check_for_dispatch_context" ], ); + #[cfg(feature = "frame-feature-testing")] + assert_eq!( + pallet::Call::::get_call_names(), + &[ + "foo", + "foo_storage_layer", + "foo_index_out_of_order", + "foo_no_post_info", + "check_for_dispatch_context", + "foo_feature_test" + ], + ); assert_eq!(call_foo.get_call_index(), 0u8); - assert_eq!(pallet::Call::::get_call_indices(), &[0u8, 1u8, 4u8, 2u8, 3u8]) + #[cfg(not(feature = "frame-feature-testing"))] + assert_eq!(pallet::Call::::get_call_indices(), &[0u8, 1u8, 4u8, 2u8, 3u8]); + #[cfg(feature = "frame-feature-testing")] + assert_eq!(pallet::Call::::get_call_indices(), &[0u8, 1u8, 4u8, 2u8, 3u8, 5u8]); } #[test] @@ -783,7 +808,10 @@ fn call_expand_index() { let call_foo = pallet::Call::::foo_index_out_of_order {}; assert_eq!(call_foo.get_call_index(), 4u8); - assert_eq!(pallet::Call::::get_call_indices(), &[0u8, 1u8, 4u8, 2u8, 3u8]) + #[cfg(not(feature = "frame-feature-testing"))] + assert_eq!(pallet::Call::::get_call_indices(), &[0u8, 1u8, 4u8, 2u8, 3u8]); + #[cfg(feature = "frame-feature-testing")] + assert_eq!(pallet::Call::::get_call_indices(), &[0u8, 1u8, 4u8, 2u8, 3u8, 5u8]); } #[test] @@ -805,6 +833,8 @@ fn error_expand() { }), ); assert_eq!( as PalletError>::MAX_ENCODED_SIZE, 3); + #[cfg(feature = "frame-feature-testing")] + assert_eq!(format!("{:?}", pallet::Error::::FeatureTest), String::from("FeatureTest"),); } #[test] @@ -2157,3 +2187,33 @@ fn test_dispatch_context() { .dispatch(RuntimeOrigin::root())); }); } + +#[test] +fn test_call_feature_parsing() { + let call = pallet::Call::::check_for_dispatch_context {}; + match call { + pallet::Call::::check_for_dispatch_context {} | + pallet::Call::::foo { .. } | + pallet::Call::foo_storage_layer { .. } | + pallet::Call::foo_index_out_of_order {} | + pallet::Call::foo_no_post_info {} => (), + #[cfg(feature = "frame-feature-testing")] + pallet::Call::foo_feature_test {} => (), + pallet::Call::__Ignore(_, _) => (), + } +} + +#[test] +fn test_error_feature_parsing() { + let err = pallet::Error::::InsufficientProposersBalance; + match err { + pallet::Error::InsufficientProposersBalance | + pallet::Error::NonExistentStorageValue | + pallet::Error::Code(_) | + pallet::Error::Skipped(_) | + pallet::Error::CompactU8(_) => (), + #[cfg(feature = "frame-feature-testing")] + pallet::Error::FeatureTest => (), + pallet::Error::__Ignore(_, _) => (), + } +} diff --git a/substrate/frame/support/test/tests/pallet_outer_enums_explicit.rs b/substrate/frame/support/test/tests/pallet_outer_enums_explicit.rs index bb6bc02eaa13..b72b44179585 100644 --- a/substrate/frame/support/test/tests/pallet_outer_enums_explicit.rs +++ b/substrate/frame/support/test/tests/pallet_outer_enums_explicit.rs @@ -90,6 +90,12 @@ fn module_error_outer_enum_expand_explicit() { frame_system::Error::NonDefaultComposite => (), frame_system::Error::NonZeroRefCount => (), frame_system::Error::CallFiltered => (), + #[cfg(feature = "experimental")] + frame_system::Error::InvalidTask => (), + #[cfg(feature = "experimental")] + frame_system::Error::FailedTask => (), + frame_system::Error::NothingAuthorized => (), + frame_system::Error::Unauthorized => (), frame_system::Error::__Ignore(_, _) => (), }, diff --git a/substrate/frame/support/test/tests/pallet_outer_enums_implicit.rs b/substrate/frame/support/test/tests/pallet_outer_enums_implicit.rs index 913620adcbbe..cd280de5b340 100644 --- a/substrate/frame/support/test/tests/pallet_outer_enums_implicit.rs +++ b/substrate/frame/support/test/tests/pallet_outer_enums_implicit.rs @@ -90,6 +90,12 @@ fn module_error_outer_enum_expand_implicit() { frame_system::Error::NonDefaultComposite => (), frame_system::Error::NonZeroRefCount => (), frame_system::Error::CallFiltered => (), + #[cfg(feature = "experimental")] + frame_system::Error::InvalidTask => (), + #[cfg(feature = "experimental")] + frame_system::Error::FailedTask => (), + frame_system::Error::NothingAuthorized => (), + frame_system::Error::Unauthorized => (), frame_system::Error::__Ignore(_, _) => (), }, diff --git a/substrate/frame/support/test/tests/pallet_ui/call_argument_invalid_bound.stderr b/substrate/frame/support/test/tests/pallet_ui/call_argument_invalid_bound.stderr index 08ea7c0bec3a..40f8f1298304 100644 --- a/substrate/frame/support/test/tests/pallet_ui/call_argument_invalid_bound.stderr +++ b/substrate/frame/support/test/tests/pallet_ui/call_argument_invalid_bound.stderr @@ -10,6 +10,7 @@ error: use of deprecated constant `pallet::warnings::ConstantWeight_0::_w`: | ^ | = note: `-D deprecated` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(deprecated)]` error[E0277]: `::Bar` doesn't implement `std::fmt::Debug` --> tests/pallet_ui/call_argument_invalid_bound.rs:38:36 diff --git a/substrate/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.stderr b/substrate/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.stderr index 80316fcd2489..5744c6362350 100644 --- a/substrate/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.stderr +++ b/substrate/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_2.stderr @@ -10,6 +10,7 @@ error: use of deprecated constant `pallet::warnings::ConstantWeight_0::_w`: | ^ | = note: `-D deprecated` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(deprecated)]` error[E0277]: `::Bar` doesn't implement `std::fmt::Debug` --> tests/pallet_ui/call_argument_invalid_bound_2.rs:38:36 @@ -45,9 +46,9 @@ error[E0277]: the trait bound `::Bar: WrapperTypeEncode` is = note: required for `::Bar` to implement `Encode` error[E0277]: the trait bound `::Bar: WrapperTypeDecode` is not satisfied - --> tests/pallet_ui/call_argument_invalid_bound_2.rs:34:12 + --> tests/pallet_ui/call_argument_invalid_bound_2.rs:38:42 | -34 | #[pallet::call] - | ^^^^ the trait `WrapperTypeDecode` is not implemented for `::Bar` +38 | pub fn foo(origin: OriginFor, _bar: T::Bar) -> DispatchResultWithPostInfo { + | ^^^^^^ the trait `WrapperTypeDecode` is not implemented for `::Bar` | = note: required for `::Bar` to implement `Decode` diff --git a/substrate/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_3.stderr b/substrate/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_3.stderr index d45b74bad842..b58e4516bceb 100644 --- a/substrate/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_3.stderr +++ b/substrate/frame/support/test/tests/pallet_ui/call_argument_invalid_bound_3.stderr @@ -10,6 +10,7 @@ error: use of deprecated constant `pallet::warnings::ConstantWeight_0::_w`: | ^ | = note: `-D deprecated` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(deprecated)]` error[E0277]: `Bar` doesn't implement `std::fmt::Debug` --> tests/pallet_ui/call_argument_invalid_bound_3.rs:40:36 diff --git a/substrate/frame/support/test/tests/pallet_ui/call_missing_index.stderr b/substrate/frame/support/test/tests/pallet_ui/call_missing_index.stderr index 4d55ef798569..ba06285bdeea 100644 --- a/substrate/frame/support/test/tests/pallet_ui/call_missing_index.stderr +++ b/substrate/frame/support/test/tests/pallet_ui/call_missing_index.stderr @@ -11,6 +11,7 @@ error: use of deprecated constant `pallet::warnings::ImplicitCallIndex_0::_w`: | ^^^ | = note: `-D deprecated` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(deprecated)]` error: use of deprecated constant `pallet::warnings::ImplicitCallIndex_1::_w`: It is deprecated to use implicit call indices. diff --git a/substrate/frame/support/test/tests/pallet_ui/call_weight_argument_has_suffix.stderr b/substrate/frame/support/test/tests/pallet_ui/call_weight_argument_has_suffix.stderr index cf23a76f8ea0..4b5abdcd0e6d 100644 --- a/substrate/frame/support/test/tests/pallet_ui/call_weight_argument_has_suffix.stderr +++ b/substrate/frame/support/test/tests/pallet_ui/call_weight_argument_has_suffix.stderr @@ -18,3 +18,4 @@ error: use of deprecated constant `pallet::warnings::ConstantWeight_0::_w`: | ^^^^^^^^^^^^^^^ | = note: `-D deprecated` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(deprecated)]` diff --git a/substrate/frame/support/test/tests/pallet_ui/call_weight_const_warning.stderr b/substrate/frame/support/test/tests/pallet_ui/call_weight_const_warning.stderr index ccd5a935773c..d399df4d85bd 100644 --- a/substrate/frame/support/test/tests/pallet_ui/call_weight_const_warning.stderr +++ b/substrate/frame/support/test/tests/pallet_ui/call_weight_const_warning.stderr @@ -10,3 +10,4 @@ error: use of deprecated constant `pallet::warnings::ConstantWeight_0::_w`: | ^^^^^^^ | = note: `-D deprecated` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(deprecated)]` diff --git a/substrate/frame/support/test/tests/pallet_ui/call_weight_const_warning_twice.stderr b/substrate/frame/support/test/tests/pallet_ui/call_weight_const_warning_twice.stderr index aadb939b6454..d7e4951e49fc 100644 --- a/substrate/frame/support/test/tests/pallet_ui/call_weight_const_warning_twice.stderr +++ b/substrate/frame/support/test/tests/pallet_ui/call_weight_const_warning_twice.stderr @@ -18,6 +18,7 @@ error: use of deprecated constant `pallet::warnings::ConstantWeight_0::_w`: | ^^^ | = note: `-D deprecated` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(deprecated)]` error: use of deprecated constant `pallet::warnings::ConstantWeight_1::_w`: It is deprecated to use hard-coded constant as call weight. diff --git a/substrate/frame/support/test/tests/pallet_ui/call_weight_inherited_invalid3.stderr b/substrate/frame/support/test/tests/pallet_ui/call_weight_inherited_invalid3.stderr index e8e6f2fe6df0..339551d9811c 100644 --- a/substrate/frame/support/test/tests/pallet_ui/call_weight_inherited_invalid3.stderr +++ b/substrate/frame/support/test/tests/pallet_ui/call_weight_inherited_invalid3.stderr @@ -17,3 +17,4 @@ error: unused import: `frame_system::pallet_prelude::*` | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: `-D unused-imports` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(unused_imports)]` diff --git a/substrate/frame/support/test/tests/pallet_ui/call_weight_unchecked_warning.stderr b/substrate/frame/support/test/tests/pallet_ui/call_weight_unchecked_warning.stderr index 89fc1e0820f5..33302a2a0278 100644 --- a/substrate/frame/support/test/tests/pallet_ui/call_weight_unchecked_warning.stderr +++ b/substrate/frame/support/test/tests/pallet_ui/call_weight_unchecked_warning.stderr @@ -10,3 +10,4 @@ error: use of deprecated constant `pallet::warnings::UncheckedWeightWitness_0::_ | ^^^^^^^ | = note: `-D deprecated` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(deprecated)]` diff --git a/substrate/frame/support/test/tests/pallet_ui/composite_enum_unsupported_identifier.stderr b/substrate/frame/support/test/tests/pallet_ui/composite_enum_unsupported_identifier.stderr index cdc8f623142b..8de9c8990b00 100644 --- a/substrate/frame/support/test/tests/pallet_ui/composite_enum_unsupported_identifier.stderr +++ b/substrate/frame/support/test/tests/pallet_ui/composite_enum_unsupported_identifier.stderr @@ -1,4 +1,4 @@ -error: expected one of: `FreezeReason`, `HoldReason`, `LockId`, `SlashReason` +error: expected one of: `FreezeReason`, `HoldReason`, `LockId`, `SlashReason`, `Task` --> tests/pallet_ui/composite_enum_unsupported_identifier.rs:27:11 | 27 | pub enum HoldReasons {} diff --git a/substrate/frame/support/test/tests/pallet_ui/deprecated_store_attr.stderr b/substrate/frame/support/test/tests/pallet_ui/deprecated_store_attr.stderr index 942db0ab4699..e227033d3646 100644 --- a/substrate/frame/support/test/tests/pallet_ui/deprecated_store_attr.stderr +++ b/substrate/frame/support/test/tests/pallet_ui/deprecated_store_attr.stderr @@ -7,3 +7,4 @@ error: use of deprecated struct `pallet::_::Store`: | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: `-D deprecated` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(deprecated)]` diff --git a/substrate/frame/support/test/tests/pallet_ui/dev_mode_without_arg_call_index.stderr b/substrate/frame/support/test/tests/pallet_ui/dev_mode_without_arg_call_index.stderr index bcfe43d008f8..01254584c626 100644 --- a/substrate/frame/support/test/tests/pallet_ui/dev_mode_without_arg_call_index.stderr +++ b/substrate/frame/support/test/tests/pallet_ui/dev_mode_without_arg_call_index.stderr @@ -11,6 +11,7 @@ error: use of deprecated constant `pallet::warnings::ImplicitCallIndex_0::_w`: | ^^^^^^^ | = note: `-D deprecated` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(deprecated)]` error: use of deprecated constant `pallet::warnings::ConstantWeight_0::_w`: It is deprecated to use hard-coded constant as call weight. diff --git a/substrate/frame/support/test/tests/pallet_ui/dev_mode_without_arg_max_encoded_len.stderr b/substrate/frame/support/test/tests/pallet_ui/dev_mode_without_arg_max_encoded_len.stderr index 531e8bdffeb0..02ead305dd81 100644 --- a/substrate/frame/support/test/tests/pallet_ui/dev_mode_without_arg_max_encoded_len.stderr +++ b/substrate/frame/support/test/tests/pallet_ui/dev_mode_without_arg_max_encoded_len.stderr @@ -11,6 +11,7 @@ error: use of deprecated constant `pallet::warnings::ImplicitCallIndex_0::_w`: | ^^^^^^^ | = note: `-D deprecated` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(deprecated)]` error: use of deprecated constant `pallet::warnings::ConstantWeight_0::_w`: It is deprecated to use hard-coded constant as call weight. @@ -26,8 +27,15 @@ error: use of deprecated constant `pallet::warnings::ConstantWeight_0::_w`: error[E0277]: the trait bound `Vec: MaxEncodedLen` is not satisfied --> tests/pallet_ui/dev_mode_without_arg_max_encoded_len.rs:28:12 | -28 | #[pallet::pallet] - | ^^^^^^ the trait `MaxEncodedLen` is not implemented for `Vec` +28 | #[pallet::pallet] + | _______________^ +29 | | pub struct Pallet(_); +30 | | +31 | | // Your Pallet's configuration trait, representing custom external types and interfaces. +... | +35 | | #[pallet::storage] +36 | | type MyStorage = StorageValue<_, Vec>; + | |__________________^ the trait `MaxEncodedLen` is not implemented for `Vec` | = help: the following other types implement trait `MaxEncodedLen`: bool diff --git a/substrate/frame/support/test/tests/pallet_ui/error_does_not_derive_pallet_error.stderr b/substrate/frame/support/test/tests/pallet_ui/error_does_not_derive_pallet_error.stderr index ea1d0ed99cd3..9cefd2f4899a 100644 --- a/substrate/frame/support/test/tests/pallet_ui/error_does_not_derive_pallet_error.stderr +++ b/substrate/frame/support/test/tests/pallet_ui/error_does_not_derive_pallet_error.stderr @@ -1,8 +1,8 @@ error[E0277]: the trait bound `MyError: PalletError` is not satisfied - --> tests/pallet_ui/error_does_not_derive_pallet_error.rs:18:1 + --> tests/pallet_ui/error_does_not_derive_pallet_error.rs:28:15 | -18 | #[frame_support::pallet] - | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `PalletError` is not implemented for `MyError` +28 | CustomError(crate::MyError), + | ^^^^^^^^^^^^^^ the trait `PalletError` is not implemented for `MyError` | = help: the following other types implement trait `PalletError`: bool @@ -14,4 +14,3 @@ error[E0277]: the trait bound `MyError: PalletError` is not satisfied u8 u16 and $N others - = note: this error originates in the derive macro `frame_support::PalletError` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/substrate/frame/support/test/tests/pallet_ui/pass/task_valid.rs b/substrate/frame/support/test/tests/pallet_ui/pass/task_valid.rs new file mode 100644 index 000000000000..234e220f49d8 --- /dev/null +++ b/substrate/frame/support/test/tests/pallet_ui/pass/task_valid.rs @@ -0,0 +1,43 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#[frame_support::pallet(dev_mode)] +mod pallet { + use frame_support::{ensure, pallet_prelude::DispatchResult}; + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(core::marker::PhantomData); + + #[pallet::tasks_experimental] + impl Pallet { + #[pallet::task_index(0)] + #[pallet::task_condition(|i, j| i == 0u32 && j == 2u64)] + #[pallet::task_list(vec![(0u32, 2u64), (2u32, 4u64)].iter())] + #[pallet::task_weight(0.into())] + fn foo(i: u32, j: u64) -> DispatchResult { + ensure!(i == 0, "i must be 0"); + ensure!(j == 2, "j must be 2"); + Ok(()) + } + } +} + +fn main() { +} diff --git a/substrate/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.stderr b/substrate/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.stderr index b5d108275249..4229d1e8a545 100644 --- a/substrate/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.stderr +++ b/substrate/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.stderr @@ -1,8 +1,15 @@ error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.rs:27:12 | -27 | #[pallet::without_storage_info] - | ^^^^^^^^^^^^^^^^^^^^ the trait `WrapperTypeDecode` is not implemented for `Bar` +27 | #[pallet::without_storage_info] + | _______________^ +28 | | pub struct Pallet(core::marker::PhantomData); +29 | | +30 | | #[pallet::hooks] +... | +38 | | #[pallet::storage] +39 | | type Foo = StorageValue; + | |____________^ the trait `WrapperTypeDecode` is not implemented for `Bar` | = help: the following other types implement trait `WrapperTypeDecode`: Box @@ -16,8 +23,15 @@ error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.rs:27:12 | -27 | #[pallet::without_storage_info] - | ^^^^^^^^^^^^^^^^^^^^ the trait `EncodeLike` is not implemented for `Bar` +27 | #[pallet::without_storage_info] + | _______________^ +28 | | pub struct Pallet(core::marker::PhantomData); +29 | | +30 | | #[pallet::hooks] +... | +38 | | #[pallet::storage] +39 | | type Foo = StorageValue; + | |____________^ the trait `EncodeLike` is not implemented for `Bar` | = help: the following other types implement trait `EncodeLike`: @@ -36,8 +50,15 @@ error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.rs:27:12 | -27 | #[pallet::without_storage_info] - | ^^^^^^^^^^^^^^^^^^^^ the trait `WrapperTypeEncode` is not implemented for `Bar` +27 | #[pallet::without_storage_info] + | _______________^ +28 | | pub struct Pallet(core::marker::PhantomData); +29 | | +30 | | #[pallet::hooks] +... | +38 | | #[pallet::storage] +39 | | type Foo = StorageValue; + | |____________^ the trait `WrapperTypeEncode` is not implemented for `Bar` | = help: the following other types implement trait `WrapperTypeEncode`: Box @@ -57,8 +78,10 @@ error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied error[E0277]: the trait bound `Bar: TypeInfo` is not satisfied --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.rs:38:12 | -38 | #[pallet::storage] - | ^^^^^^^ the trait `TypeInfo` is not implemented for `Bar` +38 | #[pallet::storage] + | _______________^ +39 | | type Foo = StorageValue; + | |____________^ the trait `TypeInfo` is not implemented for `Bar` | = help: the following other types implement trait `TypeInfo`: bool @@ -76,8 +99,10 @@ error[E0277]: the trait bound `Bar: TypeInfo` is not satisfied error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.rs:38:12 | -38 | #[pallet::storage] - | ^^^^^^^ the trait `WrapperTypeDecode` is not implemented for `Bar` +38 | #[pallet::storage] + | _______________^ +39 | | type Foo = StorageValue; + | |____________^ the trait `WrapperTypeDecode` is not implemented for `Bar` | = help: the following other types implement trait `WrapperTypeDecode`: Box @@ -91,8 +116,10 @@ error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.rs:38:12 | -38 | #[pallet::storage] - | ^^^^^^^ the trait `EncodeLike` is not implemented for `Bar` +38 | #[pallet::storage] + | _______________^ +39 | | type Foo = StorageValue; + | |____________^ the trait `EncodeLike` is not implemented for `Bar` | = help: the following other types implement trait `EncodeLike`: @@ -111,8 +138,10 @@ error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.rs:38:12 | -38 | #[pallet::storage] - | ^^^^^^^ the trait `WrapperTypeEncode` is not implemented for `Bar` +38 | #[pallet::storage] + | _______________^ +39 | | type Foo = StorageValue; + | |____________^ the trait `WrapperTypeEncode` is not implemented for `Bar` | = help: the following other types implement trait `WrapperTypeEncode`: Box @@ -130,10 +159,12 @@ error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied = note: required for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` to implement `StorageEntryMetadataBuilder` error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied - --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.rs:18:1 + --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.rs:38:12 | -18 | #[frame_support::pallet] - | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `WrapperTypeDecode` is not implemented for `Bar` +38 | #[pallet::storage] + | _______________^ +39 | | type Foo = StorageValue; + | |____________^ the trait `WrapperTypeDecode` is not implemented for `Bar` | = help: the following other types implement trait `WrapperTypeDecode`: Box @@ -143,13 +174,14 @@ error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied = note: required for `Bar` to implement `Decode` = note: required for `Bar` to implement `FullCodec` = note: required for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` to implement `TryDecodeEntireStorage` - = note: this error originates in the attribute macro `frame_support::pallet` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied - --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.rs:18:1 + --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.rs:38:12 | -18 | #[frame_support::pallet] - | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `EncodeLike` is not implemented for `Bar` +38 | #[pallet::storage] + | _______________^ +39 | | type Foo = StorageValue; + | |____________^ the trait `EncodeLike` is not implemented for `Bar` | = help: the following other types implement trait `EncodeLike`: @@ -164,13 +196,14 @@ error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied = note: required for `Bar` to implement `FullEncode` = note: required for `Bar` to implement `FullCodec` = note: required for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` to implement `TryDecodeEntireStorage` - = note: this error originates in the attribute macro `frame_support::pallet` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied - --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.rs:18:1 + --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen.rs:38:12 | -18 | #[frame_support::pallet] - | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `WrapperTypeEncode` is not implemented for `Bar` +38 | #[pallet::storage] + | _______________^ +39 | | type Foo = StorageValue; + | |____________^ the trait `WrapperTypeEncode` is not implemented for `Bar` | = help: the following other types implement trait `WrapperTypeEncode`: Box @@ -186,4 +219,3 @@ error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied = note: required for `Bar` to implement `FullEncode` = note: required for `Bar` to implement `FullCodec` = note: required for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` to implement `TryDecodeEntireStorage` - = note: this error originates in the attribute macro `frame_support::pallet` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/substrate/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.stderr b/substrate/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.stderr index b58902590b85..855d289d0a16 100644 --- a/substrate/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.stderr +++ b/substrate/frame/support/test/tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.stderr @@ -1,8 +1,15 @@ error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:27:12 | -27 | #[pallet::without_storage_info] - | ^^^^^^^^^^^^^^^^^^^^ the trait `WrapperTypeDecode` is not implemented for `Bar` +27 | #[pallet::without_storage_info] + | _______________^ +28 | | pub struct Pallet(core::marker::PhantomData); +29 | | +30 | | #[pallet::hooks] +... | +38 | | #[pallet::storage] +39 | | type Foo = StorageValue<_, Bar>; + | |____________^ the trait `WrapperTypeDecode` is not implemented for `Bar` | = help: the following other types implement trait `WrapperTypeDecode`: Box @@ -16,8 +23,15 @@ error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:27:12 | -27 | #[pallet::without_storage_info] - | ^^^^^^^^^^^^^^^^^^^^ the trait `EncodeLike` is not implemented for `Bar` +27 | #[pallet::without_storage_info] + | _______________^ +28 | | pub struct Pallet(core::marker::PhantomData); +29 | | +30 | | #[pallet::hooks] +... | +38 | | #[pallet::storage] +39 | | type Foo = StorageValue<_, Bar>; + | |____________^ the trait `EncodeLike` is not implemented for `Bar` | = help: the following other types implement trait `EncodeLike`: @@ -36,8 +50,15 @@ error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:27:12 | -27 | #[pallet::without_storage_info] - | ^^^^^^^^^^^^^^^^^^^^ the trait `WrapperTypeEncode` is not implemented for `Bar` +27 | #[pallet::without_storage_info] + | _______________^ +28 | | pub struct Pallet(core::marker::PhantomData); +29 | | +30 | | #[pallet::hooks] +... | +38 | | #[pallet::storage] +39 | | type Foo = StorageValue<_, Bar>; + | |____________^ the trait `WrapperTypeEncode` is not implemented for `Bar` | = help: the following other types implement trait `WrapperTypeEncode`: Box @@ -57,8 +78,10 @@ error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied error[E0277]: the trait bound `Bar: TypeInfo` is not satisfied --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:38:12 | -38 | #[pallet::storage] - | ^^^^^^^ the trait `TypeInfo` is not implemented for `Bar` +38 | #[pallet::storage] + | _______________^ +39 | | type Foo = StorageValue<_, Bar>; + | |____________^ the trait `TypeInfo` is not implemented for `Bar` | = help: the following other types implement trait `TypeInfo`: bool @@ -76,8 +99,10 @@ error[E0277]: the trait bound `Bar: TypeInfo` is not satisfied error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:38:12 | -38 | #[pallet::storage] - | ^^^^^^^ the trait `WrapperTypeDecode` is not implemented for `Bar` +38 | #[pallet::storage] + | _______________^ +39 | | type Foo = StorageValue<_, Bar>; + | |____________^ the trait `WrapperTypeDecode` is not implemented for `Bar` | = help: the following other types implement trait `WrapperTypeDecode`: Box @@ -91,8 +116,10 @@ error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:38:12 | -38 | #[pallet::storage] - | ^^^^^^^ the trait `EncodeLike` is not implemented for `Bar` +38 | #[pallet::storage] + | _______________^ +39 | | type Foo = StorageValue<_, Bar>; + | |____________^ the trait `EncodeLike` is not implemented for `Bar` | = help: the following other types implement trait `EncodeLike`: @@ -111,8 +138,10 @@ error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:38:12 | -38 | #[pallet::storage] - | ^^^^^^^ the trait `WrapperTypeEncode` is not implemented for `Bar` +38 | #[pallet::storage] + | _______________^ +39 | | type Foo = StorageValue<_, Bar>; + | |____________^ the trait `WrapperTypeEncode` is not implemented for `Bar` | = help: the following other types implement trait `WrapperTypeEncode`: Box @@ -130,10 +159,12 @@ error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied = note: required for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` to implement `StorageEntryMetadataBuilder` error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied - --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:18:1 + --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:38:12 | -18 | #[frame_support::pallet] - | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `WrapperTypeDecode` is not implemented for `Bar` +38 | #[pallet::storage] + | _______________^ +39 | | type Foo = StorageValue<_, Bar>; + | |____________^ the trait `WrapperTypeDecode` is not implemented for `Bar` | = help: the following other types implement trait `WrapperTypeDecode`: Box @@ -143,13 +174,14 @@ error[E0277]: the trait bound `Bar: WrapperTypeDecode` is not satisfied = note: required for `Bar` to implement `Decode` = note: required for `Bar` to implement `FullCodec` = note: required for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` to implement `TryDecodeEntireStorage` - = note: this error originates in the attribute macro `frame_support::pallet` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied - --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:18:1 + --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:38:12 | -18 | #[frame_support::pallet] - | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `EncodeLike` is not implemented for `Bar` +38 | #[pallet::storage] + | _______________^ +39 | | type Foo = StorageValue<_, Bar>; + | |____________^ the trait `EncodeLike` is not implemented for `Bar` | = help: the following other types implement trait `EncodeLike`: @@ -164,13 +196,14 @@ error[E0277]: the trait bound `Bar: EncodeLike` is not satisfied = note: required for `Bar` to implement `FullEncode` = note: required for `Bar` to implement `FullCodec` = note: required for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` to implement `TryDecodeEntireStorage` - = note: this error originates in the attribute macro `frame_support::pallet` (in Nightly builds, run with -Z macro-backtrace for more info) error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied - --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:18:1 + --> tests/pallet_ui/storage_ensure_span_are_ok_on_wrong_gen_unnamed.rs:38:12 | -18 | #[frame_support::pallet] - | ^^^^^^^^^^^^^^^^^^^^^^^^ the trait `WrapperTypeEncode` is not implemented for `Bar` +38 | #[pallet::storage] + | _______________^ +39 | | type Foo = StorageValue<_, Bar>; + | |____________^ the trait `WrapperTypeEncode` is not implemented for `Bar` | = help: the following other types implement trait `WrapperTypeEncode`: Box @@ -186,4 +219,3 @@ error[E0277]: the trait bound `Bar: WrapperTypeEncode` is not satisfied = note: required for `Bar` to implement `FullEncode` = note: required for `Bar` to implement `FullCodec` = note: required for `frame_support::pallet_prelude::StorageValue<_GeneratedPrefixForStorageFoo, Bar>` to implement `TryDecodeEntireStorage` - = note: this error originates in the attribute macro `frame_support::pallet` (in Nightly builds, run with -Z macro-backtrace for more info) diff --git a/substrate/frame/support/test/tests/pallet_ui/storage_info_unsatisfied.stderr b/substrate/frame/support/test/tests/pallet_ui/storage_info_unsatisfied.stderr index e04de98800ec..504db21feeb2 100644 --- a/substrate/frame/support/test/tests/pallet_ui/storage_info_unsatisfied.stderr +++ b/substrate/frame/support/test/tests/pallet_ui/storage_info_unsatisfied.stderr @@ -1,8 +1,15 @@ error[E0277]: the trait bound `Bar: MaxEncodedLen` is not satisfied --> tests/pallet_ui/storage_info_unsatisfied.rs:26:12 | -26 | #[pallet::pallet] - | ^^^^^^ the trait `MaxEncodedLen` is not implemented for `Bar` +26 | #[pallet::pallet] + | _______________^ +27 | | pub struct Pallet(core::marker::PhantomData); +28 | | +29 | | #[pallet::hooks] +... | +38 | | #[pallet::storage] +39 | | type Foo = StorageValue<_, Bar>; + | |____________^ the trait `MaxEncodedLen` is not implemented for `Bar` | = help: the following other types implement trait `MaxEncodedLen`: bool diff --git a/substrate/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr b/substrate/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr index 31fe3b573389..6fd0b1959c86 100644 --- a/substrate/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr +++ b/substrate/frame/support/test/tests/pallet_ui/storage_info_unsatisfied_nmap.stderr @@ -1,8 +1,15 @@ error[E0277]: the trait bound `Bar: MaxEncodedLen` is not satisfied --> tests/pallet_ui/storage_info_unsatisfied_nmap.rs:29:12 | -29 | #[pallet::pallet] - | ^^^^^^ the trait `MaxEncodedLen` is not implemented for `Bar` +29 | #[pallet::pallet] + | _______________^ +30 | | pub struct Pallet(core::marker::PhantomData); +31 | | +32 | | #[pallet::hooks] +... | +41 | | #[pallet::storage] +42 | | type Foo = StorageNMap<_, Key, u32>; + | |____________^ the trait `MaxEncodedLen` is not implemented for `Bar` | = help: the following other types implement trait `MaxEncodedLen`: bool diff --git a/substrate/frame/support/test/tests/pallet_ui/store_trait_leak_private.stderr b/substrate/frame/support/test/tests/pallet_ui/store_trait_leak_private.stderr index 20144d825e83..ccb55122e816 100644 --- a/substrate/frame/support/test/tests/pallet_ui/store_trait_leak_private.stderr +++ b/substrate/frame/support/test/tests/pallet_ui/store_trait_leak_private.stderr @@ -7,6 +7,7 @@ error: use of deprecated struct `pallet::_::Store`: | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: `-D deprecated` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(deprecated)]` error[E0446]: private type `_GeneratedPrefixForStorageFoo` in public interface --> tests/pallet_ui/store_trait_leak_private.rs:28:37 diff --git a/substrate/frame/support/test/tests/pallet_ui/task_can_only_be_attached_to_impl.rs b/substrate/frame/support/test/tests/pallet_ui/task_can_only_be_attached_to_impl.rs new file mode 100644 index 000000000000..95f5655af198 --- /dev/null +++ b/substrate/frame/support/test/tests/pallet_ui/task_can_only_be_attached_to_impl.rs @@ -0,0 +1,34 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#[frame_support::pallet(dev_mode)] +mod pallet { + use frame_support::pallet_prelude::DispatchResult; + use frame_system::pallet_prelude::OriginFor; + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(core::marker::PhantomData); + + #[pallet::tasks_experimental] + pub struct Task; +} + +fn main() { +} diff --git a/substrate/frame/support/test/tests/pallet_ui/task_can_only_be_attached_to_impl.stderr b/substrate/frame/support/test/tests/pallet_ui/task_can_only_be_attached_to_impl.stderr new file mode 100644 index 000000000000..eaa8e718840e --- /dev/null +++ b/substrate/frame/support/test/tests/pallet_ui/task_can_only_be_attached_to_impl.stderr @@ -0,0 +1,5 @@ +error: expected `impl` + --> tests/pallet_ui/task_can_only_be_attached_to_impl.rs:30:5 + | +30 | pub struct Task; + | ^^^ diff --git a/substrate/frame/support/test/tests/pallet_ui/task_condition_invalid_arg.rs b/substrate/frame/support/test/tests/pallet_ui/task_condition_invalid_arg.rs new file mode 100644 index 000000000000..1db96869155b --- /dev/null +++ b/substrate/frame/support/test/tests/pallet_ui/task_condition_invalid_arg.rs @@ -0,0 +1,42 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#[frame_support::pallet(dev_mode)] +mod pallet { + use frame_support::pallet_prelude::DispatchResult; + use frame_system::pallet_prelude::OriginFor; + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(core::marker::PhantomData); + + #[pallet::tasks_experimental] + impl Pallet { + #[pallet::task_index(0)] + #[pallet::task_condition(|flag: bool| flag)] + #[pallet::task_list(vec![1, 2].iter())] + #[pallet::task_weight(0.into())] + fn foo(_i: u32) -> DispatchResult { + Ok(()) + } + } +} + +fn main() { +} diff --git a/substrate/frame/support/test/tests/pallet_ui/task_condition_invalid_arg.stderr b/substrate/frame/support/test/tests/pallet_ui/task_condition_invalid_arg.stderr new file mode 100644 index 000000000000..9c7bad8119f5 --- /dev/null +++ b/substrate/frame/support/test/tests/pallet_ui/task_condition_invalid_arg.stderr @@ -0,0 +1,23 @@ +error: unused import: `frame_system::pallet_prelude::OriginFor` + --> tests/pallet_ui/task_condition_invalid_arg.rs:21:6 + | +21 | use frame_system::pallet_prelude::OriginFor; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `-D unused-imports` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(unused_imports)]` + +error[E0308]: mismatched types + --> tests/pallet_ui/task_condition_invalid_arg.rs:35:10 + | +32 | #[pallet::task_condition(|flag: bool| flag)] + | ----------------- arguments to this function are incorrect +... +35 | fn foo(_i: u32) -> DispatchResult { + | ^^ expected `bool`, found `u32` + | +note: closure parameter defined here + --> tests/pallet_ui/task_condition_invalid_arg.rs:32:29 + | +32 | #[pallet::task_condition(|flag: bool| flag)] + | ^^^^^^^^^^ diff --git a/substrate/frame/support/test/tests/pallet_ui/task_invalid_condition.rs b/substrate/frame/support/test/tests/pallet_ui/task_invalid_condition.rs new file mode 100644 index 000000000000..6875bc13b8fa --- /dev/null +++ b/substrate/frame/support/test/tests/pallet_ui/task_invalid_condition.rs @@ -0,0 +1,42 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#[frame_support::pallet(dev_mode)] +mod pallet { + use frame_support::pallet_prelude::DispatchResult; + use frame_system::pallet_prelude::OriginFor; + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(core::marker::PhantomData); + + #[pallet::tasks_experimental] + impl Pallet { + #[pallet::task_index(0)] + #[pallet::task_condition(0)] + #[pallet::task_list(vec![1, 2].iter())] + #[pallet::task_weight(0.into())] + fn foo() -> DispatchResult { + Ok(()) + } + } +} + +fn main() { +} diff --git a/substrate/frame/support/test/tests/pallet_ui/task_invalid_condition.stderr b/substrate/frame/support/test/tests/pallet_ui/task_invalid_condition.stderr new file mode 100644 index 000000000000..05c0ba5eecf2 --- /dev/null +++ b/substrate/frame/support/test/tests/pallet_ui/task_invalid_condition.stderr @@ -0,0 +1,28 @@ +error: unused import: `frame_system::pallet_prelude::OriginFor` + --> tests/pallet_ui/task_invalid_condition.rs:21:6 + | +21 | use frame_system::pallet_prelude::OriginFor; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `-D unused-imports` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(unused_imports)]` + +error[E0308]: mismatched types + --> tests/pallet_ui/task_invalid_condition.rs:18:1 + | +18 | #[frame_support::pallet(dev_mode)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | expected integer, found `()` + | expected due to this + | + = note: this error originates in the attribute macro `frame_support::pallet` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0618]: expected function, found `{integer}` + --> tests/pallet_ui/task_invalid_condition.rs:32:28 + | +18 | #[frame_support::pallet(dev_mode)] + | ---------------------------------- call expression requires function +... +32 | #[pallet::task_condition(0)] + | ^ diff --git a/substrate/frame/support/test/tests/pallet_ui/task_invalid_index.rs b/substrate/frame/support/test/tests/pallet_ui/task_invalid_index.rs new file mode 100644 index 000000000000..2a4b40523a68 --- /dev/null +++ b/substrate/frame/support/test/tests/pallet_ui/task_invalid_index.rs @@ -0,0 +1,39 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#[frame_support::pallet(dev_mode)] +mod pallet { + use frame_support::pallet_prelude::DispatchResult; + use frame_system::pallet_prelude::OriginFor; + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(core::marker::PhantomData); + + #[pallet::tasks_experimental] + impl Pallet { + #[pallet::task_index("0")] + fn foo() -> DispatchResult { + Ok(()) + } + } +} + +fn main() { +} diff --git a/substrate/frame/support/test/tests/pallet_ui/task_invalid_index.stderr b/substrate/frame/support/test/tests/pallet_ui/task_invalid_index.stderr new file mode 100644 index 000000000000..d33600455bf8 --- /dev/null +++ b/substrate/frame/support/test/tests/pallet_ui/task_invalid_index.stderr @@ -0,0 +1,5 @@ +error: expected integer literal + --> tests/pallet_ui/task_invalid_index.rs:31:24 + | +31 | #[pallet::task_index("0")] + | ^^^ diff --git a/substrate/frame/support/test/tests/pallet_ui/task_invalid_list.rs b/substrate/frame/support/test/tests/pallet_ui/task_invalid_list.rs new file mode 100644 index 000000000000..bb6438aaf105 --- /dev/null +++ b/substrate/frame/support/test/tests/pallet_ui/task_invalid_list.rs @@ -0,0 +1,42 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#[frame_support::pallet(dev_mode)] +mod pallet { + use frame_support::pallet_prelude::DispatchResult; + use frame_system::pallet_prelude::OriginFor; + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(core::marker::PhantomData); + + #[pallet::tasks_experimental] + impl Pallet { + #[pallet::task_index(0)] + #[pallet::task_condition(|| true)] + #[pallet::task_list(0)] + #[pallet::task_weight(0.into())] + fn foo() -> DispatchResult { + Ok(()) + } + } +} + +fn main() { +} diff --git a/substrate/frame/support/test/tests/pallet_ui/task_invalid_list.stderr b/substrate/frame/support/test/tests/pallet_ui/task_invalid_list.stderr new file mode 100644 index 000000000000..536d02610cb9 --- /dev/null +++ b/substrate/frame/support/test/tests/pallet_ui/task_invalid_list.stderr @@ -0,0 +1,20 @@ +error: unused import: `frame_system::pallet_prelude::OriginFor` + --> tests/pallet_ui/task_invalid_list.rs:21:6 + | +21 | use frame_system::pallet_prelude::OriginFor; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `-D unused-imports` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(unused_imports)]` + +error[E0689]: can't call method `map` on ambiguous numeric type `{integer}` + --> tests/pallet_ui/task_invalid_list.rs:18:1 + | +18 | #[frame_support::pallet(dev_mode)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: this error originates in the attribute macro `frame_support::pallet` (in Nightly builds, run with -Z macro-backtrace for more info) +help: you must specify a concrete type for this numeric value, like `i32` + | +33 | #[pallet::task_list(0_i32)] + | ~~~~~ diff --git a/substrate/frame/support/test/tests/pallet_ui/task_invalid_weight.rs b/substrate/frame/support/test/tests/pallet_ui/task_invalid_weight.rs new file mode 100644 index 000000000000..a0c4040347a0 --- /dev/null +++ b/substrate/frame/support/test/tests/pallet_ui/task_invalid_weight.rs @@ -0,0 +1,42 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#[frame_support::pallet(dev_mode)] +mod pallet { + use frame_support::pallet_prelude::DispatchResult; + use frame_system::pallet_prelude::OriginFor; + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(core::marker::PhantomData); + + #[pallet::tasks_experimental] + impl Pallet { + #[pallet::task_index(0)] + #[pallet::task_condition(|| true)] + #[pallet::task_list(vec![1, 2].iter())] + #[pallet::task_weight("0")] + fn foo() -> DispatchResult { + Ok(()) + } + } +} + +fn main() { +} diff --git a/substrate/frame/support/test/tests/pallet_ui/task_invalid_weight.stderr b/substrate/frame/support/test/tests/pallet_ui/task_invalid_weight.stderr new file mode 100644 index 000000000000..24e925a06992 --- /dev/null +++ b/substrate/frame/support/test/tests/pallet_ui/task_invalid_weight.stderr @@ -0,0 +1,28 @@ +error: unused import: `frame_system::pallet_prelude::OriginFor` + --> tests/pallet_ui/task_invalid_weight.rs:21:6 + | +21 | use frame_system::pallet_prelude::OriginFor; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `-D unused-imports` implied by `-D warnings` + = help: to override `-D warnings` add `#[allow(unused_imports)]` + +error[E0308]: mismatched types + --> tests/pallet_ui/task_invalid_weight.rs:18:1 + | +18 | #[frame_support::pallet(dev_mode)] + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | expected integer, found `()` + | expected due to this + | + = note: this error originates in the attribute macro `frame_support::pallet` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0308]: mismatched types + --> tests/pallet_ui/task_invalid_weight.rs:34:25 + | +18 | #[frame_support::pallet(dev_mode)] + | ---------------------------------- expected `Weight` because of return type +... +34 | #[pallet::task_weight("0")] + | ^^^ expected `Weight`, found `&str` diff --git a/substrate/frame/support/test/tests/pallet_ui/task_missing_condition.rs b/substrate/frame/support/test/tests/pallet_ui/task_missing_condition.rs new file mode 100644 index 000000000000..6ca6e37a5bdb --- /dev/null +++ b/substrate/frame/support/test/tests/pallet_ui/task_missing_condition.rs @@ -0,0 +1,39 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#[frame_support::pallet(dev_mode)] +mod pallet { + use frame_support::pallet_prelude::DispatchResult; + use frame_system::pallet_prelude::OriginFor; + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(core::marker::PhantomData); + + #[pallet::tasks_experimental] + impl Pallet { + #[pallet::task_index(0)] + fn foo() -> DispatchResult { + Ok(()) + } + } +} + +fn main() { +} diff --git a/substrate/frame/support/test/tests/pallet_ui/task_missing_condition.stderr b/substrate/frame/support/test/tests/pallet_ui/task_missing_condition.stderr new file mode 100644 index 000000000000..c709ec7eac94 --- /dev/null +++ b/substrate/frame/support/test/tests/pallet_ui/task_missing_condition.stderr @@ -0,0 +1,5 @@ +error: missing `#[pallet::task_condition(..)]` attribute + --> tests/pallet_ui/task_missing_condition.rs:32:6 + | +32 | fn foo() -> DispatchResult { + | ^^^ diff --git a/substrate/frame/support/test/tests/pallet_ui/task_missing_index.rs b/substrate/frame/support/test/tests/pallet_ui/task_missing_index.rs new file mode 100644 index 000000000000..ed98d229f18b --- /dev/null +++ b/substrate/frame/support/test/tests/pallet_ui/task_missing_index.rs @@ -0,0 +1,38 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#[frame_support::pallet(dev_mode)] +mod pallet { + use frame_support::pallet_prelude::DispatchResult; + use frame_system::pallet_prelude::OriginFor; + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(core::marker::PhantomData); + + #[pallet::tasks_experimental] + impl Pallet { + fn foo() -> DispatchResult { + Ok(()) + } + } +} + +fn main() { +} diff --git a/substrate/frame/support/test/tests/pallet_ui/task_missing_index.stderr b/substrate/frame/support/test/tests/pallet_ui/task_missing_index.stderr new file mode 100644 index 000000000000..ba3c9d132b81 --- /dev/null +++ b/substrate/frame/support/test/tests/pallet_ui/task_missing_index.stderr @@ -0,0 +1,5 @@ +error: missing `#[pallet::task_index(..)]` attribute + --> tests/pallet_ui/task_missing_index.rs:31:6 + | +31 | fn foo() -> DispatchResult { + | ^^^ diff --git a/substrate/frame/support/test/tests/pallet_ui/task_missing_list.rs b/substrate/frame/support/test/tests/pallet_ui/task_missing_list.rs new file mode 100644 index 000000000000..427efe127634 --- /dev/null +++ b/substrate/frame/support/test/tests/pallet_ui/task_missing_list.rs @@ -0,0 +1,40 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#[frame_support::pallet(dev_mode)] +mod pallet { + use frame_support::pallet_prelude::DispatchResult; + use frame_system::pallet_prelude::OriginFor; + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(core::marker::PhantomData); + + #[pallet::tasks_experimental] + impl Pallet { + #[pallet::task_index(0)] + #[pallet::task_condition(|| true)] + fn foo() -> DispatchResult { + Ok(()) + } + } +} + +fn main() { +} diff --git a/substrate/frame/support/test/tests/pallet_ui/task_missing_list.stderr b/substrate/frame/support/test/tests/pallet_ui/task_missing_list.stderr new file mode 100644 index 000000000000..f4ae26a75add --- /dev/null +++ b/substrate/frame/support/test/tests/pallet_ui/task_missing_list.stderr @@ -0,0 +1,5 @@ +error: missing `#[pallet::task_list(..)]` attribute + --> tests/pallet_ui/task_missing_list.rs:33:6 + | +33 | fn foo() -> DispatchResult { + | ^^^ diff --git a/substrate/frame/support/test/tests/pallet_ui/task_missing_weight.rs b/substrate/frame/support/test/tests/pallet_ui/task_missing_weight.rs new file mode 100644 index 000000000000..704be1f1e0b8 --- /dev/null +++ b/substrate/frame/support/test/tests/pallet_ui/task_missing_weight.rs @@ -0,0 +1,41 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#[frame_support::pallet(dev_mode)] +mod pallet { + use frame_support::pallet_prelude::DispatchResult; + use frame_system::pallet_prelude::OriginFor; + + #[pallet::config] + pub trait Config: frame_system::Config {} + + #[pallet::pallet] + pub struct Pallet(core::marker::PhantomData); + + #[pallet::tasks_experimental] + impl Pallet { + #[pallet::task_index(0)] + #[pallet::task_condition(|| true)] + #[pallet::task_list(vec![1, 2].iter())] + fn foo() -> DispatchResult { + Ok(()) + } + } +} + +fn main() { +} diff --git a/substrate/frame/support/test/tests/pallet_ui/task_missing_weight.stderr b/substrate/frame/support/test/tests/pallet_ui/task_missing_weight.stderr new file mode 100644 index 000000000000..de7b2eb17205 --- /dev/null +++ b/substrate/frame/support/test/tests/pallet_ui/task_missing_weight.stderr @@ -0,0 +1,5 @@ +error: missing `#[pallet::task_weight(..)]` attribute + --> tests/pallet_ui/task_missing_weight.rs:34:6 + | +34 | fn foo() -> DispatchResult { + | ^^^ diff --git a/substrate/frame/system/Cargo.toml b/substrate/frame/system/Cargo.toml index 3b454ac18f98..b0bab4ec756a 100644 --- a/substrate/frame/system/Cargo.toml +++ b/substrate/frame/system/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "FRAME system module" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -17,7 +20,7 @@ cfg-if = "1.0" codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive"] } log = { version = "0.4.17", default-features = false } scale-info = { version = "2.10.0", default-features = false, features = ["derive", "serde"] } -serde = { version = "1.0.193", default-features = false, features = ["alloc", "derive"] } +serde = { version = "1.0.195", default-features = false, features = ["alloc", "derive"] } frame-support = { path = "../support", default-features = false } sp-core = { path = "../../primitives/core", default-features = false, features = ["serde"] } sp-io = { path = "../../primitives/io", default-features = false } @@ -53,6 +56,7 @@ runtime-benchmarks = [ "sp-runtime/runtime-benchmarks", ] try-runtime = ["frame-support/try-runtime", "sp-runtime/try-runtime"] +experimental = ["frame-support/experimental"] [[bench]] name = "bench" diff --git a/substrate/frame/system/benchmarking/Cargo.toml b/substrate/frame/system/benchmarking/Cargo.toml index 3e92c56408e5..8b9873f44b86 100644 --- a/substrate/frame/system/benchmarking/Cargo.toml +++ b/substrate/frame/system/benchmarking/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "FRAME System benchmarking" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/system/benchmarking/src/lib.rs b/substrate/frame/system/benchmarking/src/lib.rs index 61b697ee0cab..a19d44c0bf35 100644 --- a/substrate/frame/system/benchmarking/src/lib.rs +++ b/substrate/frame/system/benchmarking/src/lib.rs @@ -21,10 +21,7 @@ #![cfg_attr(not(feature = "std"), no_std)] #![cfg(feature = "runtime-benchmarks")] -use frame_benchmarking::{ - v1::{benchmarks, whitelisted_caller}, - BenchmarkError, -}; +use frame_benchmarking::{impl_benchmark_test_suite, v2::*}; use frame_support::{dispatch::DispatchClass, traits::Get}; use frame_system::{Call, Pallet as System, RawOrigin}; use sp_std::{prelude::*, vec}; @@ -53,19 +50,33 @@ pub trait Config: frame_system::Config { } } -benchmarks! { - remark { - let b in 0 .. *T::BlockLength::get().max.get(DispatchClass::Normal) as u32; +#[benchmarks] +mod benchmarks { + use super::*; + + #[benchmark] + fn remark( + b: Linear<0, { *T::BlockLength::get().max.get(DispatchClass::Normal) as u32 }>, + ) -> Result<(), BenchmarkError> { let remark_message = vec![1; b as usize]; let caller = whitelisted_caller(); - }: _(RawOrigin::Signed(caller), remark_message) - set_code { + #[extrinsic_call] + remark(RawOrigin::Signed(caller), remark_message); + + Ok(()) + } + + #[benchmark] + fn set_code() -> Result<(), BenchmarkError> { let runtime_blob = T::prepare_set_code_data(); T::setup_set_code_requirements(&runtime_blob)?; - }: _(RawOrigin::Root, runtime_blob) - verify { - T::verify_set_code() + + #[extrinsic_call] + set_code(RawOrigin::Root, runtime_blob); + + T::verify_set_code(); + Ok(()) } impl_benchmark_test_suite!(Pallet, crate::mock::new_test_ext(), crate::mock::Test); diff --git a/substrate/frame/system/rpc/runtime-api/Cargo.toml b/substrate/frame/system/rpc/runtime-api/Cargo.toml index 68dc7fc99059..8cec5de8d1e5 100644 --- a/substrate/frame/system/rpc/runtime-api/Cargo.toml +++ b/substrate/frame/system/rpc/runtime-api/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Runtime API definition required by System RPC extensions." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/system/src/lib.rs b/substrate/frame/system/src/lib.rs index ddbde79c618a..fe6efda6e4cb 100644 --- a/substrate/frame/system/src/lib.rs +++ b/substrate/frame/system/src/lib.rs @@ -18,27 +18,49 @@ //! # System Pallet //! -//! The System pallet provides low-level access to core types and cross-cutting utilities. -//! It acts as the base layer for other pallets to interact with the Substrate framework components. +//! The System pallet provides low-level access to core types and cross-cutting utilities. It acts +//! as the base layer for other pallets to interact with the Substrate framework components. //! //! - [`Config`] //! //! ## Overview //! -//! The System pallet defines the core data types used in a Substrate runtime. -//! It also provides several utility functions (see [`Pallet`]) for other FRAME pallets. +//! The System pallet defines the core data types used in a Substrate runtime. It also provides +//! several utility functions (see [`Pallet`]) for other FRAME pallets. //! -//! In addition, it manages the storage items for extrinsics data, indexes, event records, and -//! digest items, among other things that support the execution of the current block. +//! In addition, it manages the storage items for extrinsic data, indices, event records, and digest +//! items, among other things that support the execution of the current block. //! -//! It also handles low-level tasks like depositing logs, basic set up and take down of -//! temporary storage entries, and access to previous block hashes. +//! It also handles low-level tasks like depositing logs, basic set up and take down of temporary +//! storage entries, and access to previous block hashes. //! //! ## Interface //! //! ### Dispatchable Functions //! -//! The System pallet does not implement any dispatchable functions. +//! The System pallet provides dispatchable functions that, with the exception of `remark`, manage +//! low-level or privileged functionality of a Substrate-based runtime. +//! +//! - `remark`: Make some on-chain remark. +//! - `set_code`: Set the new runtime code. +//! +//! #### A Note on Upgrades +//! +//! The pallet provides two primary means of upgrading the runtime, a single-phase means using +//! `set_code` and a two-phase means using `authorize_upgrade` followed by +//! `apply_authorized_upgrade`. The first will directly attempt to apply the provided `code` +//! (application may have to be scheduled, depending on the context and implementation of the +//! `OnSetCode` trait). +//! +//! The `authorize_upgrade` route allows the authorization of a runtime's code hash. Once +//! authorized, anyone may upload the correct runtime to apply the code. This pattern is useful when +//! providing the runtime ahead of time may be unwieldy, for example when a large preimage (the +//! code) would need to be stored on-chain or sent over a message transport protocol such as a +//! bridge. +//! +//! The `*_without_checks` variants do not perform any version checks, so using them runs the risk +//! of applying a downgrade or entirely other chain specification. They will still validate that the +//! `code` meets the authorized hash. //! //! ### Public Functions //! @@ -58,7 +80,7 @@ //! - [`CheckSpecVersion`]: Checks that the runtime version is the same as the one used to sign //! the transaction. //! -//! Lookup the runtime aggregator file (e.g. `node/runtime`) to see the full list of signed +//! Look up the runtime aggregator file (e.g. `node/runtime`) to see the full list of signed //! extensions included in a chain. #![cfg_attr(not(feature = "std"), no_std)] @@ -223,6 +245,8 @@ pub mod pallet { type RuntimeCall = (); #[inject_runtime_type] type PalletInfo = (); + #[inject_runtime_type] + type RuntimeTask = (); type BaseCallFilter = frame_support::traits::Everything; type BlockHashCount = frame_support::traits::ConstU64<10>; } @@ -293,6 +317,10 @@ pub mod pallet { #[inject_runtime_type] type RuntimeCall = (); + /// The aggregated Task type, injected by `construct_runtime!`. + #[inject_runtime_type] + type RuntimeTask = (); + /// Converts a module to the index of the module, injected by `construct_runtime!`. #[inject_runtime_type] type PalletInfo = (); @@ -366,6 +394,10 @@ pub mod pallet { #[pallet::no_default_bounds] type RuntimeCall: Parameter + Dispatchable + Debug; + /// The aggregated `RuntimeTask` type. + #[pallet::no_default_bounds] + type RuntimeTask: Task; + /// This stores the number of previous transactions associated with a sender account. type Nonce: Parameter + Member @@ -1701,6 +1733,11 @@ impl BlockNumberProvider for Pallet { fn current_block_number() -> Self::BlockNumber { Pallet::::block_number() } + + #[cfg(feature = "runtime-benchmarks")] + fn set_block_number(n: BlockNumberFor) { + Self::set_block_number(n) + } } /// Implement StoredMap for a simple single-item, provide-when-not-default system. This works fine diff --git a/substrate/frame/system/src/mock.rs b/substrate/frame/system/src/mock.rs index 55a028dd3ad0..74937a9ec5f4 100644 --- a/substrate/frame/system/src/mock.rs +++ b/substrate/frame/system/src/mock.rs @@ -18,7 +18,7 @@ use crate::{self as frame_system, *}; use frame_support::{ - parameter_types, + derive_impl, parameter_types, traits::{ConstU32, ConstU64}, }; use sp_core::H256; @@ -72,6 +72,7 @@ parameter_types! { limits::BlockLength::max_with_normal_ratio(1024, NORMAL_DISPATCH_RATIO); } +#[derive_impl(frame_system::config_preludes::TestDefaultConfig as frame_system::DefaultConfig)] impl Config for Test { type BaseCallFilter = frame_support::traits::Everything; type BlockWeights = RuntimeBlockWeights; diff --git a/substrate/frame/system/src/tests.rs b/substrate/frame/system/src/tests.rs index caa461e182a9..9b24cf60d04e 100644 --- a/substrate/frame/system/src/tests.rs +++ b/substrate/frame/system/src/tests.rs @@ -714,7 +714,7 @@ fn last_runtime_upgrade_spec_version_usage() { // a runtime upgrade in the pipeline of being applied, you should use the spec version // of this upgrade. if System::last_runtime_upgrade_spec_version() > 1337 { - return Weight::zero(); + return Weight::zero() } // Do the migration. diff --git a/substrate/frame/timestamp/Cargo.toml b/substrate/frame/timestamp/Cargo.toml index 5de9002dd17b..78471d17abd5 100644 --- a/substrate/frame/timestamp/Cargo.toml +++ b/substrate/frame/timestamp/Cargo.toml @@ -10,6 +10,9 @@ description = "FRAME Timestamp Module" documentation = "https://docs.rs/pallet-timestamp" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/transaction-payment/Cargo.toml b/substrate/frame/transaction-payment/Cargo.toml index c431f7f82434..d52e8e11c829 100644 --- a/substrate/frame/transaction-payment/Cargo.toml +++ b/substrate/frame/transaction-payment/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "FRAME pallet to manage transaction payments" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -17,7 +20,7 @@ codec = { package = "parity-scale-codec", version = "3.6.1", default-features = "derive", ] } scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } -serde = { version = "1.0.193", optional = true } +serde = { version = "1.0.195", optional = true } frame-support = { path = "../support", default-features = false } frame-system = { path = "../system", default-features = false } sp-core = { path = "../../primitives/core", default-features = false } @@ -26,7 +29,7 @@ sp-runtime = { path = "../../primitives/runtime", default-features = false } sp-std = { path = "../../primitives/std", default-features = false } [dev-dependencies] -serde_json = "1.0.108" +serde_json = "1.0.111" pallet-balances = { path = "../balances" } [features] diff --git a/substrate/frame/transaction-payment/asset-conversion-tx-payment/Cargo.toml b/substrate/frame/transaction-payment/asset-conversion-tx-payment/Cargo.toml index 7d15740e8249..0bfe37a52679 100644 --- a/substrate/frame/transaction-payment/asset-conversion-tx-payment/Cargo.toml +++ b/substrate/frame/transaction-payment/asset-conversion-tx-payment/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Pallet to manage transaction payments in assets by converting them to native assets." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/transaction-payment/asset-conversion-tx-payment/src/lib.rs b/substrate/frame/transaction-payment/asset-conversion-tx-payment/src/lib.rs index 7abef84810c8..2f456ab13f64 100644 --- a/substrate/frame/transaction-payment/asset-conversion-tx-payment/src/lib.rs +++ b/substrate/frame/transaction-payment/asset-conversion-tx-payment/src/lib.rs @@ -70,7 +70,6 @@ mod tests; mod payment; use frame_support::traits::tokens::AssetId; -use pallet_asset_conversion::MultiAssetIdConverter; pub use payment::*; /// Type aliases used for interaction with `OnChargeTransaction`. diff --git a/substrate/frame/transaction-payment/asset-conversion-tx-payment/src/mock.rs b/substrate/frame/transaction-payment/asset-conversion-tx-payment/src/mock.rs index b0e439462d55..72161dd16f2f 100644 --- a/substrate/frame/transaction-payment/asset-conversion-tx-payment/src/mock.rs +++ b/substrate/frame/transaction-payment/asset-conversion-tx-payment/src/mock.rs @@ -17,7 +17,6 @@ use super::*; use crate as pallet_asset_conversion_tx_payment; -use codec; use frame_support::{ derive_impl, dispatch::DispatchClass, @@ -25,13 +24,19 @@ use frame_support::{ ord_parameter_types, pallet_prelude::*, parameter_types, - traits::{AsEnsureOriginWithArg, ConstU32, ConstU64, ConstU8, Imbalance, OnUnbalanced}, + traits::{ + tokens::{ + fungible::{NativeFromLeft, NativeOrWithId, UnionOf}, + imbalance::ResolveAssetTo, + }, + AsEnsureOriginWithArg, ConstU32, ConstU64, ConstU8, Imbalance, OnUnbalanced, + }, weights::{Weight, WeightToFee as WeightToFeeT}, PalletId, }; use frame_system as system; use frame_system::{EnsureRoot, EnsureSignedBy}; -use pallet_asset_conversion::{NativeOrAssetId, NativeOrAssetIdConverter}; +use pallet_asset_conversion::{Ascending, Chain, WithFirstAsset}; use pallet_transaction_payment::CurrencyAdapter; use sp_core::H256; use sp_runtime::{ @@ -222,10 +227,9 @@ impl pallet_assets::Config for Runtime { parameter_types! { pub const AssetConversionPalletId: PalletId = PalletId(*b"py/ascon"); - pub storage AllowMultiAssetPools: bool = false; - // should be non-zero if AllowMultiAssetPools is true, otherwise can be zero pub storage LiquidityWithdrawalFee: Permill = Permill::from_percent(0); pub const MaxSwapPathLength: u32 = 4; + pub const Native: NativeOrWithId = NativeOrWithId::Native; } ord_parameter_types! { @@ -234,28 +238,26 @@ ord_parameter_types! { impl pallet_asset_conversion::Config for Runtime { type RuntimeEvent = RuntimeEvent; - type Currency = Balances; - type AssetBalance = ::Balance; - type AssetId = u32; + type Balance = Balance; + type HigherPrecisionBalance = u128; + type AssetKind = NativeOrWithId; + type Assets = UnionOf, AccountId>; + type PoolId = (Self::AssetKind, Self::AssetKind); + type PoolLocator = Chain< + WithFirstAsset>, + Ascending>, + >; type PoolAssetId = u32; - type Assets = Assets; type PoolAssets = PoolAssets; + type PoolSetupFee = ConstU64<100>; // should be more or equal to the existential deposit + type PoolSetupFeeAsset = Native; + type PoolSetupFeeTarget = ResolveAssetTo; type PalletId = AssetConversionPalletId; - type WeightInfo = (); type LPFee = ConstU32<3>; // means 0.3% - type PoolSetupFee = ConstU64<100>; // should be more or equal to the existential deposit - type PoolSetupFeeReceiver = AssetConversionOrigin; type LiquidityWithdrawalFee = LiquidityWithdrawalFee; - type AllowMultiAssetPools = AllowMultiAssetPools; type MaxSwapPathLength = MaxSwapPathLength; type MintMinLiquidity = ConstU64<100>; // 100 is good enough when the main currency has 12 decimals. - - type Balance = u64; - type HigherPrecisionBalance = u128; - - type MultiAssetId = NativeOrAssetId; - type MultiAssetIdConverter = NativeOrAssetIdConverter; - + type WeightInfo = (); pallet_asset_conversion::runtime_benchmarks_enabled! { type BenchmarkHelper = (); } @@ -264,5 +266,5 @@ impl pallet_asset_conversion::Config for Runtime { impl Config for Runtime { type RuntimeEvent = RuntimeEvent; type Fungibles = Assets; - type OnChargeAssetTransaction = AssetConversionAdapter; + type OnChargeAssetTransaction = AssetConversionAdapter; } diff --git a/substrate/frame/transaction-payment/asset-conversion-tx-payment/src/payment.rs b/substrate/frame/transaction-payment/asset-conversion-tx-payment/src/payment.rs index 0690536a058f..5acb4d8e4235 100644 --- a/substrate/frame/transaction-payment/asset-conversion-tx-payment/src/payment.rs +++ b/substrate/frame/transaction-payment/asset-conversion-tx-payment/src/payment.rs @@ -25,7 +25,7 @@ use frame_support::{ }; use pallet_asset_conversion::Swap; use sp_runtime::{ - traits::{DispatchInfoOf, PostDispatchInfoOf, Zero}, + traits::{DispatchInfoOf, Get, PostDispatchInfoOf, Zero}, transaction_validity::InvalidTransaction, Saturating, }; @@ -77,16 +77,17 @@ pub trait OnChargeAssetTransaction { /// Implements the asset transaction for a balance to asset converter (implementing [`Swap`]). /// /// The converter is given the complete fee in terms of the asset used for the transaction. -pub struct AssetConversionAdapter(PhantomData<(C, CON)>); +pub struct AssetConversionAdapter(PhantomData<(C, CON, N)>); /// Default implementation for a runtime instantiating this pallet, an asset to native swapper. -impl OnChargeAssetTransaction for AssetConversionAdapter +impl OnChargeAssetTransaction for AssetConversionAdapter where + N: Get, T: Config, C: Inspect<::AccountId>, - CON: Swap, - T::HigherPrecisionBalance: From> + TryInto>, - T::MultiAssetId: From>, + CON: Swap, AssetKind = T::AssetKind>, + BalanceOf: Into>, + T::AssetKind: From>, BalanceOf: IsType<::AccountId>>::Balance>, { type Balance = BalanceOf; @@ -117,23 +118,19 @@ where let asset_consumed = CON::swap_tokens_for_exact_tokens( who.clone(), - vec![asset_id.into(), T::MultiAssetIdConverter::get_native()], - T::HigherPrecisionBalance::from(native_asset_required), + vec![asset_id.into(), N::get()], + native_asset_required, None, who.clone(), true, ) .map_err(|_| TransactionValidityError::from(InvalidTransaction::Payment))?; - let asset_consumed = asset_consumed - .try_into() - .map_err(|_| TransactionValidityError::from(InvalidTransaction::Payment))?; - ensure!(asset_consumed > Zero::zero(), InvalidTransaction::Payment); // charge the fee in native currency ::withdraw_fee(who, call, info, fee, tip) - .map(|r| (r, native_asset_required, asset_consumed)) + .map(|r| (r, native_asset_required, asset_consumed.into())) } /// Correct the fee and swap the refund back to asset. @@ -173,11 +170,10 @@ where match CON::swap_exact_tokens_for_tokens( who.clone(), // we already deposited the native to `who` vec![ - T::MultiAssetIdConverter::get_native(), // we provide the native - asset_id.into(), // we want asset_id back + N::get(), // we provide the native + asset_id.into(), // we want asset_id back ], - T::HigherPrecisionBalance::from(swap_back), /* amount of the native asset to - * convert to `asset_id` */ + swap_back, // amount of the native asset to convert to `asset_id` None, // no minimum amount back who.clone(), // we will refund to `who` false, // no need to keep alive diff --git a/substrate/frame/transaction-payment/asset-conversion-tx-payment/src/tests.rs b/substrate/frame/transaction-payment/asset-conversion-tx-payment/src/tests.rs index d3802987ed9b..b5492eba8aed 100644 --- a/substrate/frame/transaction-payment/asset-conversion-tx-payment/src/tests.rs +++ b/substrate/frame/transaction-payment/asset-conversion-tx-payment/src/tests.rs @@ -20,12 +20,14 @@ use frame_support::{ assert_ok, dispatch::{DispatchInfo, PostDispatchInfo}, pallet_prelude::*, - traits::{fungible::Inspect, fungibles::Mutate}, + traits::{ + fungible::{Inspect, NativeOrWithId}, + fungibles::{Inspect as FungiblesInspect, Mutate}, + }, weights::Weight, }; use frame_system as system; use mock::{ExtrinsicBaseWeight, *}; -use pallet_asset_conversion::NativeOrAssetId; use pallet_balances::Call as BalancesCall; use sp_runtime::{traits::StaticLookup, BuildStorage}; @@ -111,22 +113,32 @@ fn default_post_info() -> PostDispatchInfo { fn setup_lp(asset_id: u32, balance_factor: u64) { let lp_provider = 5; + let ed = Balances::minimum_balance(); + let ed_asset = Assets::minimum_balance(asset_id); assert_ok!(Balances::force_set_balance( RuntimeOrigin::root(), lp_provider, - 10_000 * balance_factor + 10_000 * balance_factor + ed, )); let lp_provider_account = ::Lookup::unlookup(lp_provider); - assert_ok!(Assets::mint_into(asset_id.into(), &lp_provider_account, 10_000 * balance_factor)); + assert_ok!(Assets::mint_into( + asset_id.into(), + &lp_provider_account, + 10_000 * balance_factor + ed_asset + )); - let token_1 = NativeOrAssetId::Native; - let token_2 = NativeOrAssetId::Asset(asset_id); - assert_ok!(AssetConversion::create_pool(RuntimeOrigin::signed(lp_provider), token_1, token_2)); + let token_1 = NativeOrWithId::Native; + let token_2 = NativeOrWithId::WithId(asset_id); + assert_ok!(AssetConversion::create_pool( + RuntimeOrigin::signed(lp_provider), + Box::new(token_1.clone()), + Box::new(token_2.clone()) + )); assert_ok!(AssetConversion::add_liquidity( RuntimeOrigin::signed(lp_provider), - token_1, - token_2, + Box::new(token_1), + Box::new(token_2), 1_000 * balance_factor, // 1 desired 10_000 * balance_factor, // 2 desired 1, // 1 min @@ -216,8 +228,8 @@ fn transaction_payment_in_asset_possible() { let fee_in_native = base_weight + tx_weight + len as u64; let input_quote = AssetConversion::quote_price_tokens_for_exact_tokens( - NativeOrAssetId::Asset(asset_id), - NativeOrAssetId::Native, + NativeOrWithId::WithId(asset_id), + NativeOrWithId::Native, fee_in_native, true, ); @@ -325,8 +337,8 @@ fn transaction_payment_without_fee() { let len = 10; let fee_in_native = base_weight + weight + len as u64; let input_quote = AssetConversion::quote_price_tokens_for_exact_tokens( - NativeOrAssetId::Asset(asset_id), - NativeOrAssetId::Native, + NativeOrWithId::WithId(asset_id), + NativeOrWithId::Native, fee_in_native, true, ); @@ -343,8 +355,8 @@ fn transaction_payment_without_fee() { assert_eq!(Assets::balance(asset_id, caller), balance - fee_in_asset); let refund = AssetConversion::quote_price_exact_tokens_for_tokens( - NativeOrAssetId::Native, - NativeOrAssetId::Asset(asset_id), + NativeOrWithId::Native, + NativeOrWithId::WithId(asset_id), fee_in_native, true, ) @@ -400,8 +412,8 @@ fn asset_transaction_payment_with_tip_and_refund() { let len = 10; let fee_in_native = base_weight + weight + len as u64 + tip; let input_quote = AssetConversion::quote_price_tokens_for_exact_tokens( - NativeOrAssetId::Asset(asset_id), - NativeOrAssetId::Native, + NativeOrWithId::WithId(asset_id), + NativeOrWithId::Native, fee_in_native, true, ); @@ -416,8 +428,8 @@ fn asset_transaction_payment_with_tip_and_refund() { let final_weight = 50; let expected_fee = fee_in_native - final_weight - tip; let expected_token_refund = AssetConversion::quote_price_exact_tokens_for_tokens( - NativeOrAssetId::Native, - NativeOrAssetId::Asset(asset_id), + NativeOrWithId::Native, + NativeOrWithId::WithId(asset_id), fee_in_native - expected_fee - tip, true, ) @@ -481,8 +493,8 @@ fn payment_from_account_with_only_assets() { let fee_in_native = base_weight + weight + len as u64; let ed = Balances::minimum_balance(); let fee_in_asset = AssetConversion::quote_price_tokens_for_exact_tokens( - NativeOrAssetId::Asset(asset_id), - NativeOrAssetId::Native, + NativeOrWithId::WithId(asset_id), + NativeOrWithId::Native, fee_in_native + ed, true, ) @@ -497,8 +509,8 @@ fn payment_from_account_with_only_assets() { assert_eq!(Assets::balance(asset_id, caller), balance - fee_in_asset); let refund = AssetConversion::quote_price_exact_tokens_for_tokens( - NativeOrAssetId::Native, - NativeOrAssetId::Asset(asset_id), + NativeOrWithId::Native, + NativeOrWithId::WithId(asset_id), ed, true, ) @@ -573,8 +585,8 @@ fn converted_fee_is_never_zero_if_input_fee_is_not() { // validate even a small fee gets converted to asset. let fee_in_native = base_weight + weight + len as u64; let fee_in_asset = AssetConversion::quote_price_tokens_for_exact_tokens( - NativeOrAssetId::Asset(asset_id), - NativeOrAssetId::Native, + NativeOrWithId::WithId(asset_id), + NativeOrWithId::Native, fee_in_native, true, ) diff --git a/substrate/frame/transaction-payment/asset-tx-payment/Cargo.toml b/substrate/frame/transaction-payment/asset-tx-payment/Cargo.toml index 4fd22e85cc59..60f807cd18fd 100644 --- a/substrate/frame/transaction-payment/asset-tx-payment/Cargo.toml +++ b/substrate/frame/transaction-payment/asset-tx-payment/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "pallet to manage transaction payments in assets" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -29,7 +32,7 @@ codec = { package = "parity-scale-codec", version = "3.6.1", default-features = scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } [dev-dependencies] -serde_json = "1.0.108" +serde_json = "1.0.111" sp-storage = { path = "../../../primitives/storage", default-features = false } diff --git a/substrate/frame/transaction-payment/rpc/Cargo.toml b/substrate/frame/transaction-payment/rpc/Cargo.toml index 048b7da63f6c..5a574a944d82 100644 --- a/substrate/frame/transaction-payment/rpc/Cargo.toml +++ b/substrate/frame/transaction-payment/rpc/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "RPC interface for the transaction payment pallet." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/transaction-payment/rpc/runtime-api/Cargo.toml b/substrate/frame/transaction-payment/rpc/runtime-api/Cargo.toml index 17213392e1c4..e384fcef692e 100644 --- a/substrate/frame/transaction-payment/rpc/runtime-api/Cargo.toml +++ b/substrate/frame/transaction-payment/rpc/runtime-api/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "RPC runtime API for transaction payment FRAME pallet" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/transaction-payment/skip-feeless-payment/Cargo.toml b/substrate/frame/transaction-payment/skip-feeless-payment/Cargo.toml index 25a708d69de6..0e3744626d3f 100644 --- a/substrate/frame/transaction-payment/skip-feeless-payment/Cargo.toml +++ b/substrate/frame/transaction-payment/skip-feeless-payment/Cargo.toml @@ -7,6 +7,9 @@ license.workspace = true repository.workspace = true description = "Pallet to skip payments for calls annotated with `feeless_if` if the respective conditions are satisfied." +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/try-runtime/Cargo.toml b/substrate/frame/try-runtime/Cargo.toml index ba61fd352d1b..f688fee4e237 100644 --- a/substrate/frame/try-runtime/Cargo.toml +++ b/substrate/frame/try-runtime/Cargo.toml @@ -8,6 +8,9 @@ homepage = "https://substrate.io" repository.workspace = true description = "FRAME pallet for democracy" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/tx-pause/Cargo.toml b/substrate/frame/tx-pause/Cargo.toml index c4ffaff85df6..b4d7dca4f914 100644 --- a/substrate/frame/tx-pause/Cargo.toml +++ b/substrate/frame/tx-pause/Cargo.toml @@ -8,6 +8,9 @@ homepage = "https://substrate.io" repository.workspace = true description = "FRAME transaction pause pallet" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/uniques/Cargo.toml b/substrate/frame/uniques/Cargo.toml index 300b319ede0f..218b4ffe4c05 100644 --- a/substrate/frame/uniques/Cargo.toml +++ b/substrate/frame/uniques/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "FRAME NFT asset management pallet" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/frame/uniques/src/lib.rs b/substrate/frame/uniques/src/lib.rs index 74c2d96b14cf..f1207e8d22a3 100644 --- a/substrate/frame/uniques/src/lib.rs +++ b/substrate/frame/uniques/src/lib.rs @@ -166,7 +166,7 @@ pub mod pallet { #[pallet::storage] #[pallet::storage_prefix = "Class"] /// Details of a collection. - pub(super) type Collection, I: 'static = ()> = StorageMap< + pub type Collection, I: 'static = ()> = StorageMap< _, Blake2_128Concat, T::CollectionId, @@ -175,7 +175,7 @@ pub mod pallet { #[pallet::storage] /// The collection, if any, of which an account is willing to take ownership. - pub(super) type OwnershipAcceptance, I: 'static = ()> = + pub type OwnershipAcceptance, I: 'static = ()> = StorageMap<_, Blake2_128Concat, T::AccountId, T::CollectionId>; #[pallet::storage] @@ -209,7 +209,7 @@ pub mod pallet { #[pallet::storage] #[pallet::storage_prefix = "Asset"] /// The items in existence and their ownership details. - pub(super) type Item, I: 'static = ()> = StorageDoubleMap< + pub type Item, I: 'static = ()> = StorageDoubleMap< _, Blake2_128Concat, T::CollectionId, @@ -258,7 +258,7 @@ pub mod pallet { #[pallet::storage] /// Price of an asset instance. - pub(super) type ItemPriceOf, I: 'static = ()> = StorageDoubleMap< + pub type ItemPriceOf, I: 'static = ()> = StorageDoubleMap< _, Blake2_128Concat, T::CollectionId, @@ -857,34 +857,37 @@ pub mod pallet { pub fn transfer_ownership( origin: OriginFor, collection: T::CollectionId, - owner: AccountIdLookupOf, + new_owner: AccountIdLookupOf, ) -> DispatchResult { let origin = ensure_signed(origin)?; - let owner = T::Lookup::lookup(owner)?; + let new_owner = T::Lookup::lookup(new_owner)?; - let acceptable_collection = OwnershipAcceptance::::get(&owner); + let acceptable_collection = OwnershipAcceptance::::get(&new_owner); ensure!(acceptable_collection.as_ref() == Some(&collection), Error::::Unaccepted); Collection::::try_mutate(collection.clone(), |maybe_details| { let details = maybe_details.as_mut().ok_or(Error::::UnknownCollection)?; ensure!(origin == details.owner, Error::::NoPermission); - if details.owner == owner { + if details.owner == new_owner { return Ok(()) } // Move the deposit to the new owner. T::Currency::repatriate_reserved( &details.owner, - &owner, + &new_owner, details.total_deposit, Reserved, )?; + CollectionAccount::::remove(&details.owner, &collection); - CollectionAccount::::insert(&owner, &collection, ()); - details.owner = owner.clone(); - OwnershipAcceptance::::remove(&owner); + CollectionAccount::::insert(&new_owner, &collection, ()); + + details.owner = new_owner.clone(); + OwnershipAcceptance::::remove(&new_owner); + frame_system::Pallet::::dec_consumers(&new_owner); - Self::deposit_event(Event::OwnerChanged { collection, new_owner: owner }); + Self::deposit_event(Event::OwnerChanged { collection, new_owner }); Ok(()) }) } @@ -1431,8 +1434,8 @@ pub mod pallet { maybe_collection: Option, ) -> DispatchResult { let who = ensure_signed(origin)?; - let old = OwnershipAcceptance::::get(&who); - match (old.is_some(), maybe_collection.is_some()) { + let exists = OwnershipAcceptance::::contains_key(&who); + match (exists, maybe_collection.is_some()) { (false, true) => { frame_system::Pallet::::inc_consumers(&who)?; }, diff --git a/substrate/frame/uniques/src/migration.rs b/substrate/frame/uniques/src/migration.rs index 456ea6b59b5f..821a3ab5a538 100644 --- a/substrate/frame/uniques/src/migration.rs +++ b/substrate/frame/uniques/src/migration.rs @@ -18,38 +18,39 @@ //! Various pieces of common functionality. use super::*; -use frame_support::traits::{Get, GetStorageVersion, PalletInfoAccess, StorageVersion}; - -/// Migrate the pallet storage to v1. -pub fn migrate_to_v1, I: 'static, P: GetStorageVersion + PalletInfoAccess>( -) -> frame_support::weights::Weight { - let on_chain_storage_version =

::on_chain_storage_version(); - log::info!( - target: LOG_TARGET, - "Running migration storage v1 for uniques with storage version {:?}", - on_chain_storage_version, - ); - - if on_chain_storage_version < 1 { - let mut count = 0; - for (collection, detail) in Collection::::iter() { - CollectionAccount::::insert(&detail.owner, &collection, ()); - count += 1; +use frame_support::traits::{Get, OnRuntimeUpgrade}; +use sp_std::marker::PhantomData; + +mod v1 { + use super::*; + + /// Actual implementation of the storage migration. + pub struct MigrateToV1Impl(PhantomData<(T, I)>); + + impl, I: 'static> OnRuntimeUpgrade for MigrateToV1Impl { + fn on_runtime_upgrade() -> frame_support::weights::Weight { + let mut count = 0; + for (collection, detail) in Collection::::iter() { + CollectionAccount::::insert(&detail.owner, &collection, ()); + count += 1; + } + + log::info!( + target: LOG_TARGET, + "Storage migration v1 for uniques finished.", + ); + + // calculate and return migration weights + T::DbWeight::get().reads_writes(count as u64 + 1, count as u64 + 1) } - StorageVersion::new(1).put::

(); - log::info!( - target: LOG_TARGET, - "Running migration storage v1 for uniques with storage version {:?} was complete", - on_chain_storage_version, - ); - // calculate and return migration weights - T::DbWeight::get().reads_writes(count as u64 + 1, count as u64 + 1) - } else { - log::warn!( - target: LOG_TARGET, - "Attempted to apply migration to v1 but failed because storage version is {:?}", - on_chain_storage_version, - ); - T::DbWeight::get().reads(1) } } + +/// Migrate the pallet storage from `0` to `1`. +pub type MigrateV0ToV1 = frame_support::migrations::VersionedMigration< + 0, + 1, + v1::MigrateToV1Impl, + Pallet, + ::DbWeight, +>; diff --git a/substrate/frame/uniques/src/tests.rs b/substrate/frame/uniques/src/tests.rs index 29ff2a0f354b..8c717a8fff77 100644 --- a/substrate/frame/uniques/src/tests.rs +++ b/substrate/frame/uniques/src/tests.rs @@ -255,8 +255,11 @@ fn transfer_owner_should_work() { Uniques::transfer_ownership(RuntimeOrigin::signed(1), 0, 2), Error::::Unaccepted ); + assert_eq!(System::consumers(&2), 0); assert_ok!(Uniques::set_accept_ownership(RuntimeOrigin::signed(2), Some(0))); + assert_eq!(System::consumers(&2), 1); assert_ok!(Uniques::transfer_ownership(RuntimeOrigin::signed(1), 0, 2)); + assert_eq!(System::consumers(&2), 1); assert_eq!(collections(), vec![(2, 0)]); assert_eq!(Balances::total_balance(&1), 98); diff --git a/substrate/frame/utility/Cargo.toml b/substrate/frame/utility/Cargo.toml index c2db351e8969..dc22e8b43478 100644 --- a/substrate/frame/utility/Cargo.toml +++ b/substrate/frame/utility/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "FRAME utilities pallet" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/primitives/api/Cargo.toml b/substrate/primitives/api/Cargo.toml index 1be131a7b4fb..345647cec25d 100644 --- a/substrate/primitives/api/Cargo.toml +++ b/substrate/primitives/api/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Substrate runtime api primitives" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/primitives/api/proc-macro/Cargo.toml b/substrate/primitives/api/proc-macro/Cargo.toml index 46d05c78b4aa..eff66d3fb29d 100644 --- a/substrate/primitives/api/proc-macro/Cargo.toml +++ b/substrate/primitives/api/proc-macro/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Macros for declaring and implementing runtime apis." documentation = "https://docs.rs/sp-api-proc-macro" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -17,10 +20,10 @@ proc-macro = true [dependencies] quote = "1.0.28" -syn = { version = "2.0.39", features = ["extra-traits", "fold", "full", "visit"] } +syn = { version = "2.0.48", features = ["extra-traits", "fold", "full", "visit"] } proc-macro2 = "1.0.56" blake2 = { version = "0.10.4", default-features = false } -proc-macro-crate = "2.0.0" +proc-macro-crate = "3.0.0" expander = "2.0.0" Inflector = "0.11.4" diff --git a/substrate/primitives/api/proc-macro/src/utils.rs b/substrate/primitives/api/proc-macro/src/utils.rs index 6d63d7cf7387..d307144bae5c 100644 --- a/substrate/primitives/api/proc-macro/src/utils.rs +++ b/substrate/primitives/api/proc-macro/src/utils.rs @@ -20,7 +20,7 @@ use crate::common::API_VERSION_ATTRIBUTE; use inflector::Inflector; use proc_macro2::{Span, TokenStream}; use proc_macro_crate::{crate_name, FoundCrate}; -use quote::{format_ident, quote, ToTokens}; +use quote::{format_ident, quote}; use syn::{ parse_quote, spanned::Spanned, token::And, Attribute, Error, FnArg, GenericArgument, Ident, ImplItem, ItemImpl, Pat, Path, PathArguments, Result, ReturnType, Signature, Type, TypePath, @@ -262,6 +262,7 @@ pub fn versioned_trait_name(trait_ident: &Ident, version: u64) -> Ident { /// Extract the documentation from the provided attributes. #[cfg(feature = "frame-metadata")] pub fn get_doc_literals(attrs: &[syn::Attribute]) -> Vec { + use quote::ToTokens; attrs .iter() .filter_map(|attr| { diff --git a/substrate/primitives/api/test/Cargo.toml b/substrate/primitives/api/test/Cargo.toml index f207f5ff02dd..b0975082c44e 100644 --- a/substrate/primitives/api/test/Cargo.toml +++ b/substrate/primitives/api/test/Cargo.toml @@ -8,6 +8,9 @@ publish = false homepage = "https://substrate.io" repository.workspace = true +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -21,7 +24,7 @@ sp-consensus = { path = "../../consensus/common" } sc-block-builder = { path = "../../../client/block-builder" } codec = { package = "parity-scale-codec", version = "3.6.1" } sp-state-machine = { path = "../../state-machine" } -trybuild = "1.0.74" +trybuild = "1.0.88" rustversion = "1.0.6" scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } diff --git a/substrate/primitives/api/test/tests/ui/impl_incorrect_method_signature.stderr b/substrate/primitives/api/test/tests/ui/impl_incorrect_method_signature.stderr index 4bd64c974f29..788d1807f3ba 100644 --- a/substrate/primitives/api/test/tests/ui/impl_incorrect_method_signature.stderr +++ b/substrate/primitives/api/test/tests/ui/impl_incorrect_method_signature.stderr @@ -1,3 +1,21 @@ +error[E0603]: struct `RuntimeVersion` is private + --> tests/ui/impl_incorrect_method_signature.rs:37:27 + | +37 | fn version() -> sp_api::RuntimeVersion { + | ^^^^^^^^^^^^^^ private struct + | +note: the struct `RuntimeVersion` is defined here + --> $WORKSPACE/substrate/primitives/api/src/lib.rs + | + | use sp_version::RuntimeVersion; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ +help: consider importing one of these items instead + | +37 | fn version() -> sp_api::__private::RuntimeVersion { + | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +37 | fn version() -> sp_version::RuntimeVersion { + | ~~~~~~~~~~~~~~~~~~~~~~~~~~ + error[E0053]: method `test` has an incompatible type for trait --> tests/ui/impl_incorrect_method_signature.rs:33:17 | diff --git a/substrate/primitives/api/test/tests/ui/type_reference_in_impl_runtime_apis_call.stderr b/substrate/primitives/api/test/tests/ui/type_reference_in_impl_runtime_apis_call.stderr index 4c21a3afb9b6..b4df7c068768 100644 --- a/substrate/primitives/api/test/tests/ui/type_reference_in_impl_runtime_apis_call.stderr +++ b/substrate/primitives/api/test/tests/ui/type_reference_in_impl_runtime_apis_call.stderr @@ -1,3 +1,21 @@ +error[E0603]: struct `RuntimeVersion` is private + --> tests/ui/type_reference_in_impl_runtime_apis_call.rs:39:27 + | +39 | fn version() -> sp_api::RuntimeVersion { + | ^^^^^^^^^^^^^^ private struct + | +note: the struct `RuntimeVersion` is defined here + --> $WORKSPACE/substrate/primitives/api/src/lib.rs + | + | use sp_version::RuntimeVersion; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ +help: consider importing one of these items instead + | +39 | fn version() -> sp_api::__private::RuntimeVersion { + | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +39 | fn version() -> sp_version::RuntimeVersion { + | ~~~~~~~~~~~~~~~~~~~~~~~~~~ + error[E0053]: method `test` has an incompatible type for trait --> tests/ui/type_reference_in_impl_runtime_apis_call.rs:33:17 | diff --git a/substrate/primitives/application-crypto/Cargo.toml b/substrate/primitives/application-crypto/Cargo.toml index a6c937a3469e..d8aa2689aa27 100644 --- a/substrate/primitives/application-crypto/Cargo.toml +++ b/substrate/primitives/application-crypto/Cargo.toml @@ -10,6 +10,9 @@ repository.workspace = true documentation = "https://docs.rs/sp-application-crypto" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -18,7 +21,7 @@ targets = ["x86_64-unknown-linux-gnu"] sp-core = { path = "../core", default-features = false } codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive"] } scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } -serde = { version = "1.0.193", default-features = false, optional = true, features = ["alloc", "derive"] } +serde = { version = "1.0.195", default-features = false, optional = true, features = ["alloc", "derive"] } sp-std = { path = "../std", default-features = false } sp-io = { path = "../io", default-features = false } diff --git a/substrate/primitives/application-crypto/test/Cargo.toml b/substrate/primitives/application-crypto/test/Cargo.toml index d9fb743e8cd7..0057606b38e5 100644 --- a/substrate/primitives/application-crypto/test/Cargo.toml +++ b/substrate/primitives/application-crypto/test/Cargo.toml @@ -9,6 +9,9 @@ publish = false homepage = "https://substrate.io" repository.workspace = true +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/primitives/arithmetic/Cargo.toml b/substrate/primitives/arithmetic/Cargo.toml index 8634dabe854f..47d2902e267d 100644 --- a/substrate/primitives/arithmetic/Cargo.toml +++ b/substrate/primitives/arithmetic/Cargo.toml @@ -10,6 +10,9 @@ description = "Minimal fixed point arithmetic primitives and types for runtime." documentation = "https://docs.rs/sp-arithmetic" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -19,9 +22,9 @@ codec = { package = "parity-scale-codec", version = "3.6.1", default-features = "max-encoded-len", ] } integer-sqrt = "0.1.2" -num-traits = { version = "0.2.8", default-features = false } +num-traits = { version = "0.2.17", default-features = false } scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } -serde = { version = "1.0.193", default-features = false, features = ["alloc", "derive"], optional = true } +serde = { version = "1.0.195", default-features = false, features = ["alloc", "derive"], optional = true } static_assertions = "1.1.0" sp-std = { path = "../std", default-features = false } diff --git a/substrate/primitives/arithmetic/fuzzer/Cargo.toml b/substrate/primitives/arithmetic/fuzzer/Cargo.toml index 5984779ddb6d..061b82ccadc2 100644 --- a/substrate/primitives/arithmetic/fuzzer/Cargo.toml +++ b/substrate/primitives/arithmetic/fuzzer/Cargo.toml @@ -10,6 +10,9 @@ description = "Fuzzer for fixed point arithmetic primitives." documentation = "https://docs.rs/sp-arithmetic-fuzzer" publish = false +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/primitives/authority-discovery/Cargo.toml b/substrate/primitives/authority-discovery/Cargo.toml index 5b9e034a7c65..77d1633c72d8 100644 --- a/substrate/primitives/authority-discovery/Cargo.toml +++ b/substrate/primitives/authority-discovery/Cargo.toml @@ -9,6 +9,9 @@ homepage = "https://substrate.io" repository.workspace = true readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/primitives/block-builder/Cargo.toml b/substrate/primitives/block-builder/Cargo.toml index a574689811be..de1ffd9d9e64 100644 --- a/substrate/primitives/block-builder/Cargo.toml +++ b/substrate/primitives/block-builder/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "The block builder runtime api." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/primitives/blockchain/Cargo.toml b/substrate/primitives/blockchain/Cargo.toml index 33db09ce0ac2..38b3b2030dc6 100644 --- a/substrate/primitives/blockchain/Cargo.toml +++ b/substrate/primitives/blockchain/Cargo.toml @@ -10,6 +10,9 @@ description = "Substrate blockchain traits and primitives." documentation = "https://docs.rs/sp-blockchain" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/primitives/consensus/aura/Cargo.toml b/substrate/primitives/consensus/aura/Cargo.toml index 4a19999a469a..6c797e15ae80 100644 --- a/substrate/primitives/consensus/aura/Cargo.toml +++ b/substrate/primitives/consensus/aura/Cargo.toml @@ -9,11 +9,14 @@ homepage = "https://substrate.io" repository.workspace = true readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] -async-trait = { version = "0.1.57", optional = true } +async-trait = { version = "0.1.74", optional = true } codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } sp-api = { path = "../../api", default-features = false } diff --git a/substrate/primitives/consensus/babe/Cargo.toml b/substrate/primitives/consensus/babe/Cargo.toml index 6ec50ea022b7..e48b4b4817b7 100644 --- a/substrate/primitives/consensus/babe/Cargo.toml +++ b/substrate/primitives/consensus/babe/Cargo.toml @@ -9,14 +9,17 @@ homepage = "https://substrate.io" repository.workspace = true readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] -async-trait = { version = "0.1.57", optional = true } +async-trait = { version = "0.1.74", optional = true } codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } -serde = { version = "1.0.193", default-features = false, features = ["alloc", "derive"], optional = true } +serde = { version = "1.0.195", default-features = false, features = ["alloc", "derive"], optional = true } sp-api = { path = "../../api", default-features = false } sp-application-crypto = { path = "../../application-crypto", default-features = false } sp-consensus-slots = { path = "../slots", default-features = false } diff --git a/substrate/primitives/consensus/common/Cargo.toml b/substrate/primitives/consensus/common/Cargo.toml index c273e0efc341..6db46d8497b6 100644 --- a/substrate/primitives/consensus/common/Cargo.toml +++ b/substrate/primitives/consensus/common/Cargo.toml @@ -10,11 +10,14 @@ description = "Common utilities for building and using consensus engines in subs documentation = "https://docs.rs/sp-consensus/" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] -async-trait = "0.1.57" +async-trait = "0.1.74" futures = { version = "0.3.21", features = ["thread-pool"] } log = "0.4.17" thiserror = "1.0.48" diff --git a/substrate/primitives/consensus/grandpa/Cargo.toml b/substrate/primitives/consensus/grandpa/Cargo.toml index 1ddc89df9836..be22f5b23df3 100644 --- a/substrate/primitives/consensus/grandpa/Cargo.toml +++ b/substrate/primitives/consensus/grandpa/Cargo.toml @@ -10,6 +10,9 @@ description = "Primitives for GRANDPA integration, suitable for WASM compilation documentation = "https://docs.rs/sp-consensus-grandpa" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -18,7 +21,7 @@ codec = { package = "parity-scale-codec", version = "3.6.1", default-features = grandpa = { package = "finality-grandpa", version = "0.16.2", default-features = false, features = ["derive-codec"] } log = { version = "0.4.17", default-features = false } scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } -serde = { version = "1.0.193", features = ["alloc", "derive"], default-features = false, optional = true } +serde = { version = "1.0.195", features = ["alloc", "derive"], default-features = false, optional = true } sp-api = { path = "../../api", default-features = false } sp-application-crypto = { path = "../../application-crypto", default-features = false } sp-core = { path = "../../core", default-features = false } diff --git a/substrate/primitives/consensus/sassafras/Cargo.toml b/substrate/primitives/consensus/sassafras/Cargo.toml index b1110023f52c..56a923f8587e 100644 --- a/substrate/primitives/consensus/sassafras/Cargo.toml +++ b/substrate/primitives/consensus/sassafras/Cargo.toml @@ -11,13 +11,16 @@ documentation = "https://docs.rs/sp-consensus-sassafras" readme = "README.md" publish = false +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] scale-codec = { package = "parity-scale-codec", version = "3.2.2", default-features = false } scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } -serde = { version = "1.0.193", default-features = false, features = ["derive"], optional = true } +serde = { version = "1.0.195", default-features = false, features = ["derive"], optional = true } sp-api = { path = "../../api", default-features = false } sp-application-crypto = { path = "../../application-crypto", default-features = false, features = ["bandersnatch-experimental"] } sp-consensus-slots = { path = "../slots", default-features = false } diff --git a/substrate/primitives/consensus/slots/Cargo.toml b/substrate/primitives/consensus/slots/Cargo.toml index 129405837574..91bbd1663a9c 100644 --- a/substrate/primitives/consensus/slots/Cargo.toml +++ b/substrate/primitives/consensus/slots/Cargo.toml @@ -9,6 +9,9 @@ homepage = "https://substrate.io" repository.workspace = true readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/primitives/core/Cargo.toml b/substrate/primitives/core/Cargo.toml index 4ee176048b62..1028161f7088 100644 --- a/substrate/primitives/core/Cargo.toml +++ b/substrate/primitives/core/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Shareable Substrate types." documentation = "https://docs.rs/sp-core" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -16,7 +19,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive", "max-encoded-len"] } scale-info = { version = "2.5.0", default-features = false, features = ["derive"] } log = { version = "0.4.17", default-features = false } -serde = { version = "1.0.193", optional = true, default-features = false, features = ["alloc", "derive"] } +serde = { version = "1.0.195", optional = true, default-features = false, features = ["alloc", "derive"] } bounded-collections = { version = "0.1.8", default-features = false } primitive-types = { version = "0.12.0", default-features = false, features = ["codec", "scale-info"] } impl-serde = { version = "0.4.0", default-features = false, optional = true } @@ -44,19 +47,19 @@ itertools = { version = "0.12", optional = true } array-bytes = { version = "6.1", optional = true } ed25519-zebra = { version = "3.1.0", default-features = false, optional = true } blake2 = { version = "0.10.4", default-features = false, optional = true } -schnorrkel = { version = "0.9.1", features = ["preaudit_deprecated", "u64_backend"], default-features = false } -merlin = { version = "2.0", default-features = false } +schnorrkel = { version = "0.11.4", features = ["preaudit_deprecated"], default-features = false } +merlin = { version = "3.0", default-features = false } sp-core-hashing = { path = "hashing", default-features = false, optional = true } sp-runtime-interface = { path = "../runtime-interface", default-features = false } # bls crypto w3f-bls = { version = "0.1.3", default-features = false, optional = true } # bandersnatch crypto -bandersnatch_vrfs = { git = "https://github.com/w3f/ring-vrf", rev = "2019248", default-features = false, features = ["substrate-curves"], optional = true } +bandersnatch_vrfs = { git = "https://github.com/w3f/ring-vrf", rev = "e9782f9", default-features = false, features = ["substrate-curves"], optional = true } [dev-dependencies] criterion = "0.4.0" -serde_json = "1.0.108" +serde_json = "1.0.111" lazy_static = "1.4.0" regex = "1.6.0" sp-core-hashing-proc-macro = { path = "hashing/proc-macro" } diff --git a/substrate/primitives/core/fuzz/Cargo.toml b/substrate/primitives/core/fuzz/Cargo.toml index 9a094b07d4a1..c6b5a065b6dc 100644 --- a/substrate/primitives/core/fuzz/Cargo.toml +++ b/substrate/primitives/core/fuzz/Cargo.toml @@ -3,6 +3,9 @@ name = "sp-core-fuzz" version = "0.0.0" publish = false +[lints] +workspace = true + [package.metadata] cargo-fuzz = true diff --git a/substrate/primitives/core/hashing/Cargo.toml b/substrate/primitives/core/hashing/Cargo.toml index bb261efdfd37..9b2f548d17be 100644 --- a/substrate/primitives/core/hashing/Cargo.toml +++ b/substrate/primitives/core/hashing/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Primitive core crate hashing implementation." documentation = "https://docs.rs/sp-core-hashing" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/primitives/core/hashing/proc-macro/Cargo.toml b/substrate/primitives/core/hashing/proc-macro/Cargo.toml index a5e5956e94ff..5c215bc77993 100644 --- a/substrate/primitives/core/hashing/proc-macro/Cargo.toml +++ b/substrate/primitives/core/hashing/proc-macro/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "This crate provides procedural macros for calculating static hash." documentation = "https://docs.rs/sp-core-hashing-proc-macro" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -17,5 +20,5 @@ proc-macro = true [dependencies] quote = "1.0.28" -syn = { version = "2.0.39", features = ["full", "parsing"] } +syn = { version = "2.0.48", features = ["full", "parsing"] } sp-core-hashing = { path = "..", default-features = false } diff --git a/substrate/primitives/core/src/address_uri.rs b/substrate/primitives/core/src/address_uri.rs index 862747c9a4b6..211d47c0093d 100644 --- a/substrate/primitives/core/src/address_uri.rs +++ b/substrate/primitives/core/src/address_uri.rs @@ -184,7 +184,7 @@ impl<'a> AddressUri<'a> { Error::in_pass(initial_input, initial_input_len - input.len()) } else { Error::in_phrase(initial_input, initial_input_len - input.len()) - }); + }) } } diff --git a/substrate/primitives/core/src/const_hex2array.rs b/substrate/primitives/core/src/const_hex2array.rs new file mode 100644 index 000000000000..cd6071028e6c --- /dev/null +++ b/substrate/primitives/core/src/const_hex2array.rs @@ -0,0 +1,162 @@ +// This file is part of Substrate. + +// Copyright (C) Parity Technologies (UK) Ltd. +// SPDX-License-Identifier: Apache-2.0 + +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +//! Provides a const function for converting a hex string to a `u8` array at compile time, when used +//! in the proper context. + +/// Provides a const array from given string literal. +/// +/// Valid characters are `[0-9a-fA-F]`, and the hex string should not start +/// with the `0x` prefix. +#[macro_export] +macro_rules! hex2array { + ($input:expr) => {{ + const BYTES: [u8; $input.len() / 2] = $crate::const_hex2array::private_hex2array($input); + BYTES + }}; +} + +/// Generates array from (static) string literal. +/// +/// Valid characters are `[0-9a-fA-F]`, and the hex string should not start +/// with the `0x` prefix. +/// +/// # Panics +/// +/// The function will panic at compile time when used in a const context if: +/// - The given hex string has an invalid length. +/// - It contains invalid characters. +/// +/// The function will panic at runtime when used in a non-const context if the above conditions are +/// met. +#[doc(hidden)] +pub const fn private_hex2array(hex: &str) -> [u8; N] { + const fn c2b(c: u8) -> u8 { + match c as char { + '0'..='9' => c - b'0', + 'a'..='f' => c - (b'a' - 10), + 'A'..='F' => c - (b'A' - 10), + _ => panic!("hex string contains invalid character"), + } + } + let mut output = [0; N]; + let mut i = 0; + if hex.len() != 2 * N { + panic!("hex string length is not valid"); + } + while i < N { + output[i] = 16 * c2b(hex.as_bytes()[2 * i]) + c2b(hex.as_bytes()[2 * i + 1]); + i += 1; + } + output +} + +#[cfg(test)] +mod testh2b { + use super::private_hex2array; + + #[test] + fn t00() { + const T0: [u8; 0] = private_hex2array(""); + const EMPTY: [u8; 0] = []; + assert_eq!(T0, EMPTY); + } + + macro_rules! test_byte { + ($a:expr, $b:expr) => {{ + const X: [u8; 1] = private_hex2array($a); + assert_eq!(X, [$b]); + }}; + } + + #[test] + fn t01() { + test_byte!("00", 0); + test_byte!("01", 1); + test_byte!("02", 2); + test_byte!("03", 3); + test_byte!("04", 4); + test_byte!("05", 5); + test_byte!("06", 6); + test_byte!("07", 7); + test_byte!("08", 8); + test_byte!("09", 9); + test_byte!("0a", 10); + test_byte!("0A", 10); + test_byte!("0b", 11); + test_byte!("0B", 11); + test_byte!("0c", 12); + test_byte!("0C", 12); + test_byte!("0d", 13); + test_byte!("0D", 13); + test_byte!("0e", 14); + test_byte!("0E", 14); + test_byte!("0f", 15); + test_byte!("0F", 15); + } + + #[test] + fn t02() { + const T0: [u8; 2] = private_hex2array("0a10"); + assert_eq!(T0, [10, 16]); + const T1: [u8; 2] = private_hex2array("4545"); + assert_eq!(T1, [69, 69]); + } + + #[test] + fn t02m() { + assert_eq!(hex2array!("0a10"), [10, 16]); + assert_eq!(hex2array!("4545"), [69, 69]); + assert_eq!( + hex2array!("000102030405060708090a0b0c0d0e0f"), + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15] + ); + } + + #[test] + fn t16() { + const T16: [u8; 16] = private_hex2array("000102030405060708090a0b0c0d0e0f"); + + assert_eq!(T16, [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]); + } + + #[test] + fn t33() { + const T33: [u8; 33] = + private_hex2array("9c8af77d3a4e3f6f076853922985b9e6724fc9675329087f47aff1ceaaae772180"); + + assert_eq!( + T33, + [ + 156, 138, 247, 125, 58, 78, 63, 111, 7, 104, 83, 146, 41, 133, 185, 230, 114, 79, + 201, 103, 83, 41, 8, 127, 71, 175, 241, 206, 170, 174, 119, 33, 128 + ] + ); + } + + #[test] + #[should_panic = "hex string length is not valid"] + fn t_panic_incorrect_length2() { + let _ = private_hex2array::<2>("454"); + } + + #[test] + #[should_panic = "hex string contains invalid character"] + fn t_panic_invalid_character() { + let _ = private_hex2array::<2>("45ag"); + } +} diff --git a/substrate/primitives/core/src/crypto.rs b/substrate/primitives/core/src/crypto.rs index 7abe68a16941..2694d10dbe01 100644 --- a/substrate/primitives/core/src/crypto.rs +++ b/substrate/primitives/core/src/crypto.rs @@ -448,7 +448,7 @@ impl + AsRef<[u8]> + Public + Derive> Ss58Codec for T { fn from_string(s: &str) -> Result { let cap = AddressUri::parse(s)?; if cap.pass.is_some() { - return Err(PublicError::PasswordNotAllowed); + return Err(PublicError::PasswordNotAllowed) } let s = cap.phrase.unwrap_or(DEV_ADDRESS); let addr = if let Some(stripped) = s.strip_prefix("0x") { @@ -468,7 +468,7 @@ impl + AsRef<[u8]> + Public + Derive> Ss58Codec for T { fn from_string_with_version(s: &str) -> Result<(Self, Ss58AddressFormat), PublicError> { let cap = AddressUri::parse(s)?; if cap.pass.is_some() { - return Err(PublicError::PasswordNotAllowed); + return Err(PublicError::PasswordNotAllowed) } let (addr, v) = Self::from_ss58check_with_version(cap.phrase.unwrap_or(DEV_ADDRESS))?; if cap.paths.is_empty() { diff --git a/substrate/primitives/core/src/lib.rs b/substrate/primitives/core/src/lib.rs index ba47f9d6ade9..c1d7f06b74a5 100644 --- a/substrate/primitives/core/src/lib.rs +++ b/substrate/primitives/core/src/lib.rs @@ -52,6 +52,7 @@ pub mod hashing; #[cfg(feature = "full_crypto")] pub use hashing::{blake2_128, blake2_256, keccak_256, twox_128, twox_256, twox_64}; +pub mod const_hex2array; pub mod crypto; pub mod hexdisplay; pub use paste; diff --git a/substrate/primitives/core/src/sr25519.rs b/substrate/primitives/core/src/sr25519.rs index 5ba48f4eb237..a7f9e8837549 100644 --- a/substrate/primitives/core/src/sr25519.rs +++ b/substrate/primitives/core/src/sr25519.rs @@ -556,7 +556,7 @@ pub mod vrf { use crate::crypto::{VrfCrypto, VrfPublic}; use schnorrkel::{ errors::MultiSignatureStage, - vrf::{VRF_OUTPUT_LENGTH, VRF_PROOF_LENGTH}, + vrf::{VRF_PREOUT_LENGTH, VRF_PROOF_LENGTH}, SignatureError, }; @@ -637,7 +637,7 @@ pub mod vrf { /// VRF pre-output type suitable for schnorrkel operations. #[derive(Clone, Debug, PartialEq, Eq)] - pub struct VrfPreOutput(pub schnorrkel::vrf::VRFOutput); + pub struct VrfPreOutput(pub schnorrkel::vrf::VRFPreOut); impl Encode for VrfPreOutput { fn encode(&self) -> Vec { @@ -647,19 +647,19 @@ pub mod vrf { impl Decode for VrfPreOutput { fn decode(i: &mut R) -> Result { - let decoded = <[u8; VRF_OUTPUT_LENGTH]>::decode(i)?; - Ok(Self(schnorrkel::vrf::VRFOutput::from_bytes(&decoded).map_err(convert_error)?)) + let decoded = <[u8; VRF_PREOUT_LENGTH]>::decode(i)?; + Ok(Self(schnorrkel::vrf::VRFPreOut::from_bytes(&decoded).map_err(convert_error)?)) } } impl MaxEncodedLen for VrfPreOutput { fn max_encoded_len() -> usize { - <[u8; VRF_OUTPUT_LENGTH]>::max_encoded_len() + <[u8; VRF_PREOUT_LENGTH]>::max_encoded_len() } } impl TypeInfo for VrfPreOutput { - type Identity = [u8; VRF_OUTPUT_LENGTH]; + type Identity = [u8; VRF_PREOUT_LENGTH]; fn type_info() -> scale_info::Type { Self::Identity::type_info() @@ -718,11 +718,11 @@ pub mod vrf { let proof = self.0.dleq_proove(extra, &inout, true).0; - VrfSignature { pre_output: VrfPreOutput(inout.to_output()), proof: VrfProof(proof) } + VrfSignature { pre_output: VrfPreOutput(inout.to_preout()), proof: VrfProof(proof) } } fn vrf_pre_output(&self, input: &Self::VrfInput) -> Self::VrfPreOutput { - let pre_output = self.0.vrf_create_hash(input.0.clone()).to_output(); + let pre_output = self.0.vrf_create_hash(input.0.clone()).to_preout(); VrfPreOutput(pre_output) } } @@ -763,6 +763,7 @@ pub mod vrf { ScalarFormatError => "Signature error: `ScalarFormatError`".into(), NotMarkedSchnorrkel => "Signature error: `NotMarkedSchnorrkel`".into(), BytesLengthError { .. } => "Signature error: `BytesLengthError`".into(), + InvalidKey => "Signature error: `InvalidKey`".into(), MuSigAbsent { musig_stage: Commitment } => "Signature error: `MuSigAbsent` at stage `Commitment`".into(), MuSigAbsent { musig_stage: Reveal } => @@ -1142,7 +1143,7 @@ mod tests { }) .unwrap(); let signature2 = - VrfSignature { pre_output: VrfPreOutput(inout.to_output()), proof: VrfProof(proof) }; + VrfSignature { pre_output: VrfPreOutput(inout.to_preout()), proof: VrfProof(proof) }; assert!(public.vrf_verify(&data, &signature2)); assert_eq!(signature.pre_output, signature2.pre_output); diff --git a/substrate/primitives/crypto/ec-utils/Cargo.toml b/substrate/primitives/crypto/ec-utils/Cargo.toml index 548328fec3c1..3baa8ea5b784 100644 --- a/substrate/primitives/crypto/ec-utils/Cargo.toml +++ b/substrate/primitives/crypto/ec-utils/Cargo.toml @@ -8,6 +8,9 @@ license = "Apache-2.0" homepage = "https://substrate.io" repository.workspace = true +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -23,7 +26,7 @@ ark-ed-on-bls12-381-bandersnatch-ext = { version = "0.4.1", default-features = f ark-ed-on-bls12-381-bandersnatch = { version = "0.4.0", default-features = false, optional = true } ark-ed-on-bls12-377-ext = { version = "0.4.1", default-features = false, optional = true } ark-ed-on-bls12-377 = { version = "0.4.0", default-features = false, optional = true } -ark-scale = { version = "0.0.11", default-features = false, features = ["hazmat"], optional = true } +ark-scale = { version = "0.0.12", default-features = false, features = ["hazmat"], optional = true } sp-runtime-interface = { path = "../../runtime-interface", default-features = false, optional = true } sp-std = { path = "../../std", default-features = false, optional = true } diff --git a/substrate/primitives/database/Cargo.toml b/substrate/primitives/database/Cargo.toml index 430895236d4f..00ccf97c83e9 100644 --- a/substrate/primitives/database/Cargo.toml +++ b/substrate/primitives/database/Cargo.toml @@ -10,6 +10,9 @@ description = "Substrate database trait." documentation = "https://docs.rs/sp-database" readme = "README.md" +[lints] +workspace = true + [dependencies] kvdb = "0.13.0" parking_lot = "0.12.1" diff --git a/substrate/primitives/database/src/lib.rs b/substrate/primitives/database/src/lib.rs index 58a336cf3b5f..790b8ba29055 100644 --- a/substrate/primitives/database/src/lib.rs +++ b/substrate/primitives/database/src/lib.rs @@ -102,7 +102,9 @@ pub trait Database>: Send + Sync { /// This may be faster than `get` since it doesn't allocate. /// Use `with_get` helper function if you need `f` to return a value from `f` fn with_get(&self, col: ColumnId, key: &[u8], f: &mut dyn FnMut(&[u8])) { - self.get(col, key).map(|v| f(&v)); + if let Some(v) = self.get(col, key) { + f(&v) + } } /// Check if database supports internal ref counting for state data. diff --git a/substrate/primitives/debug-derive/Cargo.toml b/substrate/primitives/debug-derive/Cargo.toml index 1f739c256d09..b7a65d658cfc 100644 --- a/substrate/primitives/debug-derive/Cargo.toml +++ b/substrate/primitives/debug-derive/Cargo.toml @@ -9,6 +9,8 @@ repository.workspace = true description = "Macros to derive runtime debug implementation." documentation = "https://docs.rs/sp-debug-derive" +[lints] +workspace = true [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -18,7 +20,7 @@ proc-macro = true [dependencies] quote = "1.0.28" -syn = "2.0.39" +syn = "2.0.48" proc-macro2 = "1.0.56" [features] diff --git a/substrate/primitives/externalities/Cargo.toml b/substrate/primitives/externalities/Cargo.toml index 86d31c31cbae..4c7afc38b815 100644 --- a/substrate/primitives/externalities/Cargo.toml +++ b/substrate/primitives/externalities/Cargo.toml @@ -10,6 +10,9 @@ description = "Substrate externalities abstraction" documentation = "https://docs.rs/sp-externalities" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/primitives/genesis-builder/Cargo.toml b/substrate/primitives/genesis-builder/Cargo.toml index dacfe0b53c8c..8e6286488f4f 100644 --- a/substrate/primitives/genesis-builder/Cargo.toml +++ b/substrate/primitives/genesis-builder/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Substrate GenesisConfig builder API" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/primitives/inherents/Cargo.toml b/substrate/primitives/inherents/Cargo.toml index 2b5bad5d7463..5c13694ceac1 100644 --- a/substrate/primitives/inherents/Cargo.toml +++ b/substrate/primitives/inherents/Cargo.toml @@ -10,11 +10,14 @@ description = "Provides types and traits for creating and checking inherents." documentation = "https://docs.rs/sp-inherents" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] -async-trait = { version = "0.1.57", optional = true } +async-trait = { version = "0.1.74", optional = true } codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive"] } scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } impl-trait-for-tuples = "0.2.2" diff --git a/substrate/primitives/io/Cargo.toml b/substrate/primitives/io/Cargo.toml index ecc9e92a540b..d530e1b0311b 100644 --- a/substrate/primitives/io/Cargo.toml +++ b/substrate/primitives/io/Cargo.toml @@ -11,6 +11,9 @@ documentation = "https://docs.rs/sp-io" readme = "README.md" build = "build.rs" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/primitives/keyring/Cargo.toml b/substrate/primitives/keyring/Cargo.toml index 2519b4d1da8b..b33877418414 100644 --- a/substrate/primitives/keyring/Cargo.toml +++ b/substrate/primitives/keyring/Cargo.toml @@ -10,11 +10,13 @@ description = "Keyring support code for the runtime. A set of test accounts." documentation = "https://docs.rs/sp-keyring" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] -lazy_static = "1.4.0" strum = { version = "0.25", features = ["derive"], default-features = false } sp-core = { path = "../core" } sp-runtime = { path = "../runtime" } diff --git a/substrate/primitives/keyring/src/bandersnatch.rs b/substrate/primitives/keyring/src/bandersnatch.rs index 649cb9341b01..bbdf55fd5557 100644 --- a/substrate/primitives/keyring/src/bandersnatch.rs +++ b/substrate/primitives/keyring/src/bandersnatch.rs @@ -22,12 +22,9 @@ pub use sp_core::bandersnatch; use sp_core::{ bandersnatch::{Pair, Public, Signature}, crypto::UncheckedFrom, - ByteArray, Pair as PairT, + hex2array, ByteArray, Pair as PairT, }; -use lazy_static::lazy_static; -use std::{collections::HashMap, ops::Deref, sync::Mutex}; - /// Set of test accounts. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, strum::Display, strum::EnumIter)] pub enum Keyring { @@ -75,7 +72,7 @@ impl Keyring { } pub fn public(self) -> Public { - self.pair().public() + Public::from(self) } pub fn to_seed(self) -> String { @@ -130,20 +127,9 @@ impl std::str::FromStr for Keyring { } } -lazy_static! { - static ref PRIVATE_KEYS: Mutex> = - Mutex::new(Keyring::iter().map(|who| (who, who.pair())).collect()); - static ref PUBLIC_KEYS: HashMap = PRIVATE_KEYS - .lock() - .unwrap() - .iter() - .map(|(&who, pair)| (who, pair.public())) - .collect(); -} - impl From for Public { fn from(k: Keyring) -> Self { - *(*PUBLIC_KEYS).get(&k).unwrap() + Public::unchecked_from(<[u8; PUBLIC_RAW_LEN]>::from(k)) } } @@ -155,32 +141,24 @@ impl From for Pair { impl From for [u8; PUBLIC_RAW_LEN] { fn from(k: Keyring) -> Self { - *(*PUBLIC_KEYS).get(&k).unwrap().as_ref() - } -} - -impl From for &'static [u8; PUBLIC_RAW_LEN] { - fn from(k: Keyring) -> Self { - PUBLIC_KEYS.get(&k).unwrap().as_ref() - } -} - -impl AsRef<[u8; PUBLIC_RAW_LEN]> for Keyring { - fn as_ref(&self) -> &[u8; PUBLIC_RAW_LEN] { - PUBLIC_KEYS.get(self).unwrap().as_ref() - } -} - -impl AsRef for Keyring { - fn as_ref(&self) -> &Public { - PUBLIC_KEYS.get(self).unwrap() - } -} - -impl Deref for Keyring { - type Target = [u8; PUBLIC_RAW_LEN]; - fn deref(&self) -> &[u8; PUBLIC_RAW_LEN] { - PUBLIC_KEYS.get(self).unwrap().as_ref() + match k { + Keyring::Alice => + hex2array!("9c8af77d3a4e3f6f076853922985b9e6724fc9675329087f47aff1ceaaae772180"), + Keyring::Bob => + hex2array!("1abfbb76dc8374a1a6d93d59a5c81f07c18835f4681a6258aa0f514d363bff4780"), + Keyring::Charlie => + hex2array!("0f4a9990aca3d39a7cd8bf187e2e81a9ea6f9cedb2db405f2fffff384c5dd02680"), + Keyring::Dave => + hex2array!("bd7a87d4dfa89926a408b5acbed554ae3b053fa3532531053295cbabf07d337000"), + Keyring::Eve => + hex2array!("f992d5b8eac8fc004d521bee6edc1174cfa7fae3a1baec8262511ee351f9f85e00"), + Keyring::Ferdie => + hex2array!("1ce2613e89bc5c8e358aad884099cfb576a61176f2f9968cd0d486a04457245180"), + Keyring::One => + hex2array!("a29e03ac273e521274d8e501a6242abd2ab393d7e197221a9113bdf8e2e5b34d00"), + Keyring::Two => + hex2array!("f968d47e819ddb18a9d0f2ebd16501680b1a3f07ee375c6f81310e5f99a04f4d00"), + } } } @@ -207,4 +185,9 @@ mod tests { &Keyring::Bob.public(), )); } + #[test] + fn verify_static_public_keys() { + assert!(Keyring::iter() + .all(|k| { k.pair().public().as_ref() == <[u8; PUBLIC_RAW_LEN]>::from(k) })); + } } diff --git a/substrate/primitives/keyring/src/ed25519.rs b/substrate/primitives/keyring/src/ed25519.rs index c590275683f3..03d1726b41c4 100644 --- a/substrate/primitives/keyring/src/ed25519.rs +++ b/substrate/primitives/keyring/src/ed25519.rs @@ -18,14 +18,12 @@ //! Support code for the runtime. A set of test accounts. -use lazy_static::lazy_static; pub use sp_core::ed25519; use sp_core::{ ed25519::{Pair, Public, Signature}, - ByteArray, Pair as PairT, H256, + hex2array, ByteArray, Pair as PairT, H256, }; use sp_runtime::AccountId32; -use std::{collections::HashMap, ops::Deref}; /// Set of test accounts. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, strum::Display, strum::EnumIter)] @@ -94,7 +92,7 @@ impl Keyring { } pub fn public(self) -> Public { - self.pair().public() + Public::from(self) } pub fn to_seed(self) -> String { @@ -129,16 +127,9 @@ impl From for sp_runtime::MultiSigner { } } -lazy_static! { - static ref PRIVATE_KEYS: HashMap = - Keyring::iter().map(|i| (i, i.pair())).collect(); - static ref PUBLIC_KEYS: HashMap = - PRIVATE_KEYS.iter().map(|(&name, pair)| (name, pair.public())).collect(); -} - impl From for Public { fn from(k: Keyring) -> Self { - *(*PUBLIC_KEYS).get(&k).unwrap() + Public::from_raw(k.into()) } } @@ -156,38 +147,42 @@ impl From for Pair { impl From for [u8; 32] { fn from(k: Keyring) -> Self { - *(*PUBLIC_KEYS).get(&k).unwrap().as_array_ref() + match k { + Keyring::Alice => + hex2array!("88dc3417d5058ec4b4503e0c12ea1a0a89be200fe98922423d4334014fa6b0ee"), + Keyring::Bob => + hex2array!("d17c2d7823ebf260fd138f2d7e27d114c0145d968b5ff5006125f2414fadae69"), + Keyring::Charlie => + hex2array!("439660b36c6c03afafca027b910b4fecf99801834c62a5e6006f27d978de234f"), + Keyring::Dave => + hex2array!("5e639b43e0052c47447dac87d6fd2b6ec50bdd4d0f614e4299c665249bbd09d9"), + Keyring::Eve => + hex2array!("1dfe3e22cc0d45c70779c1095f7489a8ef3cf52d62fbd8c2fa38c9f1723502b5"), + Keyring::Ferdie => + hex2array!("568cb4a574c6d178feb39c27dfc8b3f789e5f5423e19c71633c748b9acf086b5"), + Keyring::AliceStash => + hex2array!("451781cd0c5504504f69ceec484cc66e4c22a2b6a9d20fb1a426d91ad074a2a8"), + Keyring::BobStash => + hex2array!("292684abbb28def63807c5f6e84e9e8689769eb37b1ab130d79dbfbf1b9a0d44"), + Keyring::CharlieStash => + hex2array!("dd6a6118b6c11c9c9e5a4f34ed3d545e2c74190f90365c60c230fa82e9423bb9"), + Keyring::DaveStash => + hex2array!("1d0432d75331ab299065bee79cdb1bdc2497c597a3087b4d955c67e3c000c1e2"), + Keyring::EveStash => + hex2array!("c833bdd2e1a7a18acc1c11f8596e2e697bb9b42d6b6051e474091a1d43a294d7"), + Keyring::FerdieStash => + hex2array!("199d749dbf4b8135cb1f3c8fd697a390fc0679881a8a110c1d06375b3b62cd09"), + Keyring::One => + hex2array!("16f97016bbea8f7b45ae6757b49efc1080accc175d8f018f9ba719b60b0815e4"), + Keyring::Two => + hex2array!("5079bcd20fd97d7d2f752c4607012600b401950260a91821f73e692071c82bf5"), + } } } impl From for H256 { fn from(k: Keyring) -> Self { - (*PUBLIC_KEYS).get(&k).unwrap().as_array_ref().into() - } -} - -impl From for &'static [u8; 32] { - fn from(k: Keyring) -> Self { - (*PUBLIC_KEYS).get(&k).unwrap().as_array_ref() - } -} - -impl AsRef<[u8; 32]> for Keyring { - fn as_ref(&self) -> &[u8; 32] { - (*PUBLIC_KEYS).get(self).unwrap().as_array_ref() - } -} - -impl AsRef for Keyring { - fn as_ref(&self) -> &Public { - (*PUBLIC_KEYS).get(self).unwrap() - } -} - -impl Deref for Keyring { - type Target = [u8; 32]; - fn deref(&self) -> &[u8; 32] { - (*PUBLIC_KEYS).get(self).unwrap().as_array_ref() + k.into() } } @@ -214,4 +209,9 @@ mod tests { &Keyring::Bob.public(), )); } + + #[test] + fn verify_static_public_keys() { + assert!(Keyring::iter().all(|k| { k.pair().public().as_ref() == <[u8; 32]>::from(k) })); + } } diff --git a/substrate/primitives/keyring/src/sr25519.rs b/substrate/primitives/keyring/src/sr25519.rs index 886506d78dd1..67067fa80b72 100644 --- a/substrate/primitives/keyring/src/sr25519.rs +++ b/substrate/primitives/keyring/src/sr25519.rs @@ -18,14 +18,13 @@ //! Support code for the runtime. A set of test accounts. -use lazy_static::lazy_static; pub use sp_core::sr25519; use sp_core::{ + hex2array, sr25519::{Pair, Public, Signature}, ByteArray, Pair as PairT, H256, }; use sp_runtime::AccountId32; -use std::{collections::HashMap, ops::Deref}; /// Set of test accounts. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, strum::Display, strum::EnumIter)] @@ -94,7 +93,7 @@ impl Keyring { } pub fn public(self) -> Public { - self.pair().public() + Public::from(self) } pub fn to_seed(self) -> String { @@ -166,13 +165,6 @@ impl std::str::FromStr for Keyring { } } -lazy_static! { - static ref PRIVATE_KEYS: HashMap = - Keyring::iter().map(|i| (i, i.pair())).collect(); - static ref PUBLIC_KEYS: HashMap = - PRIVATE_KEYS.iter().map(|(&name, pair)| (name, pair.public())).collect(); -} - impl From for AccountId32 { fn from(k: Keyring) -> Self { k.to_account_id() @@ -181,7 +173,7 @@ impl From for AccountId32 { impl From for Public { fn from(k: Keyring) -> Self { - *(*PUBLIC_KEYS).get(&k).unwrap() + Public::from_raw(k.into()) } } @@ -193,38 +185,42 @@ impl From for Pair { impl From for [u8; 32] { fn from(k: Keyring) -> Self { - *(*PUBLIC_KEYS).get(&k).unwrap().as_array_ref() + match k { + Keyring::Alice => + hex2array!("d43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d"), + Keyring::Bob => + hex2array!("8eaf04151687736326c9fea17e25fc5287613693c912909cb226aa4794f26a48"), + Keyring::Charlie => + hex2array!("90b5ab205c6974c9ea841be688864633dc9ca8a357843eeacf2314649965fe22"), + Keyring::Dave => + hex2array!("306721211d5404bd9da88e0204360a1a9ab8b87c66c1bc2fcdd37f3c2222cc20"), + Keyring::Eve => + hex2array!("e659a7a1628cdd93febc04a4e0646ea20e9f5f0ce097d9a05290d4a9e054df4e"), + Keyring::Ferdie => + hex2array!("1cbd2d43530a44705ad088af313e18f80b53ef16b36177cd4b77b846f2a5f07c"), + Keyring::AliceStash => + hex2array!("be5ddb1579b72e84524fc29e78609e3caf42e85aa118ebfe0b0ad404b5bdd25f"), + Keyring::BobStash => + hex2array!("fe65717dad0447d715f660a0a58411de509b42e6efb8375f562f58a554d5860e"), + Keyring::CharlieStash => + hex2array!("1e07379407fecc4b89eb7dbd287c2c781cfb1907a96947a3eb18e4f8e7198625"), + Keyring::DaveStash => + hex2array!("e860f1b1c7227f7c22602f53f15af80747814dffd839719731ee3bba6edc126c"), + Keyring::EveStash => + hex2array!("8ac59e11963af19174d0b94d5d78041c233f55d2e19324665bafdfb62925af2d"), + Keyring::FerdieStash => + hex2array!("101191192fc877c24d725b337120fa3edc63d227bbc92705db1e2cb65f56981a"), + Keyring::One => + hex2array!("ac859f8a216eeb1b320b4c76d118da3d7407fa523484d0a980126d3b4d0d220a"), + Keyring::Two => + hex2array!("1254f7017f0b8347ce7ab14f96d818802e7e9e0c0d1b7c9acb3c726b080e7a03"), + } } } impl From for H256 { fn from(k: Keyring) -> Self { - (*PUBLIC_KEYS).get(&k).unwrap().as_array_ref().into() - } -} - -impl From for &'static [u8; 32] { - fn from(k: Keyring) -> Self { - (*PUBLIC_KEYS).get(&k).unwrap().as_array_ref() - } -} - -impl AsRef<[u8; 32]> for Keyring { - fn as_ref(&self) -> &[u8; 32] { - (*PUBLIC_KEYS).get(self).unwrap().as_array_ref() - } -} - -impl AsRef for Keyring { - fn as_ref(&self) -> &Public { - (*PUBLIC_KEYS).get(self).unwrap() - } -} - -impl Deref for Keyring { - type Target = [u8; 32]; - fn deref(&self) -> &[u8; 32] { - (*PUBLIC_KEYS).get(self).unwrap().as_array_ref() + k.into() } } @@ -251,4 +247,8 @@ mod tests { &Keyring::Bob.public(), )); } + #[test] + fn verify_static_public_keys() { + assert!(Keyring::iter().all(|k| { k.pair().public().as_ref() == <[u8; 32]>::from(k) })); + } } diff --git a/substrate/primitives/keystore/Cargo.toml b/substrate/primitives/keystore/Cargo.toml index e9695161f46e..592174657443 100644 --- a/substrate/primitives/keystore/Cargo.toml +++ b/substrate/primitives/keystore/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Keystore primitives." documentation = "https://docs.rs/sp-core" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -20,7 +23,7 @@ sp-core = { path = "../core", default-features = false } sp-externalities = { path = "../externalities", default-features = false } [dev-dependencies] -rand = "0.8" +rand = "0.8.5" rand_chacha = "0.3" [features] diff --git a/substrate/primitives/maybe-compressed-blob/Cargo.toml b/substrate/primitives/maybe-compressed-blob/Cargo.toml index 580bcbda862f..051f212b74af 100644 --- a/substrate/primitives/maybe-compressed-blob/Cargo.toml +++ b/substrate/primitives/maybe-compressed-blob/Cargo.toml @@ -10,6 +10,9 @@ description = "Handling of blobs, usually Wasm code, which may be compresed" documentation = "https://docs.rs/sp-maybe-compressed-blob" readme = "README.md" +[lints] +workspace = true + [dependencies] thiserror = "1.0" zstd = { version = "0.13", default-features = false } diff --git a/substrate/primitives/metadata-ir/Cargo.toml b/substrate/primitives/metadata-ir/Cargo.toml index f73a1d7b3802..0dc496bab531 100644 --- a/substrate/primitives/metadata-ir/Cargo.toml +++ b/substrate/primitives/metadata-ir/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Intermediate representation of the runtime metadata." documentation = "https://docs.rs/sp-metadata-ir" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/primitives/mixnet/Cargo.toml b/substrate/primitives/mixnet/Cargo.toml index a03fdab8741a..6ea7a6cbe8c4 100644 --- a/substrate/primitives/mixnet/Cargo.toml +++ b/substrate/primitives/mixnet/Cargo.toml @@ -9,6 +9,9 @@ homepage = "https://substrate.io" repository = "https://github.com/paritytech/substrate/" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/primitives/npos-elections/Cargo.toml b/substrate/primitives/npos-elections/Cargo.toml index 1ab6c2adf826..5d7e8704f4cb 100644 --- a/substrate/primitives/npos-elections/Cargo.toml +++ b/substrate/primitives/npos-elections/Cargo.toml @@ -9,13 +9,16 @@ repository.workspace = true description = "NPoS election algorithm primitives" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive"] } scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } -serde = { version = "1.0.193", default-features = false, features = ["alloc", "derive"], optional = true } +serde = { version = "1.0.195", default-features = false, features = ["alloc", "derive"], optional = true } sp-arithmetic = { path = "../arithmetic", default-features = false } sp-core = { path = "../core", default-features = false } sp-runtime = { path = "../runtime", default-features = false } diff --git a/substrate/primitives/npos-elections/fuzzer/Cargo.toml b/substrate/primitives/npos-elections/fuzzer/Cargo.toml index bd1fa856813b..e7cfb75e54b3 100644 --- a/substrate/primitives/npos-elections/fuzzer/Cargo.toml +++ b/substrate/primitives/npos-elections/fuzzer/Cargo.toml @@ -10,11 +10,14 @@ description = "Fuzzer for phragmén implementation." documentation = "https://docs.rs/sp-npos-elections-fuzzer" publish = false +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] -clap = { version = "4.4.10", features = ["derive"] } +clap = { version = "4.4.14", features = ["derive"] } honggfuzz = "0.5" rand = { version = "0.8", features = ["small_rng", "std"] } sp-npos-elections = { path = ".." } diff --git a/substrate/primitives/offchain/Cargo.toml b/substrate/primitives/offchain/Cargo.toml index 201e75802cf7..19d66ae31e9f 100644 --- a/substrate/primitives/offchain/Cargo.toml +++ b/substrate/primitives/offchain/Cargo.toml @@ -9,6 +9,9 @@ homepage = "https://substrate.io" repository.workspace = true readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/primitives/panic-handler/Cargo.toml b/substrate/primitives/panic-handler/Cargo.toml index 428062757c15..a0df527f56e0 100644 --- a/substrate/primitives/panic-handler/Cargo.toml +++ b/substrate/primitives/panic-handler/Cargo.toml @@ -10,6 +10,9 @@ description = "Custom panic hook with bug report link" documentation = "https://docs.rs/sp-panic-handler" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/primitives/rpc/Cargo.toml b/substrate/primitives/rpc/Cargo.toml index cf10af31977f..07bb3bf7293b 100644 --- a/substrate/primitives/rpc/Cargo.toml +++ b/substrate/primitives/rpc/Cargo.toml @@ -9,13 +9,16 @@ repository.workspace = true description = "Substrate RPC primitives and utilities." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] rustc-hash = "1.1.0" -serde = { version = "1.0.193", features = ["derive"] } +serde = { version = "1.0.195", features = ["derive"] } sp-core = { path = "../core" } [dev-dependencies] -serde_json = "1.0.108" +serde_json = "1.0.111" diff --git a/substrate/primitives/runtime-interface/Cargo.toml b/substrate/primitives/runtime-interface/Cargo.toml index 80565420f6b2..0fa2d1f3276c 100644 --- a/substrate/primitives/runtime-interface/Cargo.toml +++ b/substrate/primitives/runtime-interface/Cargo.toml @@ -10,6 +10,9 @@ description = "Substrate runtime interface" documentation = "https://docs.rs/sp-runtime-interface/" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -32,7 +35,7 @@ sp-state-machine = { path = "../state-machine" } sp-core = { path = "../core" } sp-io = { path = "../io" } rustversion = "1.0.6" -trybuild = "1.0.74" +trybuild = "1.0.88" [features] default = ["std"] diff --git a/substrate/primitives/runtime-interface/proc-macro/Cargo.toml b/substrate/primitives/runtime-interface/proc-macro/Cargo.toml index efabaee3aeb8..869cad06e56c 100644 --- a/substrate/primitives/runtime-interface/proc-macro/Cargo.toml +++ b/substrate/primitives/runtime-interface/proc-macro/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "This crate provides procedural macros for usage within the context of the Substrate runtime interface." documentation = "https://docs.rs/sp-runtime-interface-proc-macro" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -17,8 +20,8 @@ proc-macro = true [dependencies] Inflector = "0.11.4" -proc-macro-crate = "2.0.0" +proc-macro-crate = "3.0.0" proc-macro2 = "1.0.56" quote = "1.0.28" expander = "2.0.0" -syn = { version = "2.0.39", features = ["extra-traits", "fold", "full", "visit"] } +syn = { version = "2.0.48", features = ["extra-traits", "fold", "full", "visit"] } diff --git a/substrate/primitives/runtime-interface/proc-macro/src/utils.rs b/substrate/primitives/runtime-interface/proc-macro/src/utils.rs index 5a751916d702..09a02b04dd4a 100644 --- a/substrate/primitives/runtime-interface/proc-macro/src/utils.rs +++ b/substrate/primitives/runtime-interface/proc-macro/src/utils.rs @@ -90,7 +90,7 @@ struct RuntimeInterfaceFunctionSet { impl RuntimeInterfaceFunctionSet { fn new(version: VersionAttribute, trait_item: &TraitItemFn) -> Result { Ok(Self { - latest_version_to_call: version.is_callable().then(|| version.version), + latest_version_to_call: version.is_callable().then_some(version.version), versions: BTreeMap::from([( version.version, RuntimeInterfaceFunction::new(trait_item)?, diff --git a/substrate/primitives/runtime-interface/test-wasm-deprecated/Cargo.toml b/substrate/primitives/runtime-interface/test-wasm-deprecated/Cargo.toml index 07c820c06014..f663c6d47263 100644 --- a/substrate/primitives/runtime-interface/test-wasm-deprecated/Cargo.toml +++ b/substrate/primitives/runtime-interface/test-wasm-deprecated/Cargo.toml @@ -9,6 +9,9 @@ homepage = "https://substrate.io" repository.workspace = true publish = false +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/primitives/runtime-interface/test-wasm/Cargo.toml b/substrate/primitives/runtime-interface/test-wasm/Cargo.toml index 79e79857341b..ecb3c7f8732d 100644 --- a/substrate/primitives/runtime-interface/test-wasm/Cargo.toml +++ b/substrate/primitives/runtime-interface/test-wasm/Cargo.toml @@ -9,6 +9,9 @@ homepage = "https://substrate.io" repository.workspace = true publish = false +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/primitives/runtime-interface/test/Cargo.toml b/substrate/primitives/runtime-interface/test/Cargo.toml index 661af1fa3918..55d70960989e 100644 --- a/substrate/primitives/runtime-interface/test/Cargo.toml +++ b/substrate/primitives/runtime-interface/test/Cargo.toml @@ -8,6 +8,9 @@ publish = false homepage = "https://substrate.io" repository.workspace = true +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/primitives/runtime/Cargo.toml b/substrate/primitives/runtime/Cargo.toml index d42514c8ebfc..27534dea6a06 100644 --- a/substrate/primitives/runtime/Cargo.toml +++ b/substrate/primitives/runtime/Cargo.toml @@ -10,6 +10,9 @@ description = "Runtime Modules shared primitive types." documentation = "https://docs.rs/sp-runtime" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -22,7 +25,7 @@ log = { version = "0.4.17", default-features = false } paste = "1.0" rand = { version = "0.8.5", optional = true } scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } -serde = { version = "1.0.193", default-features = false, features = ["alloc", "derive"], optional = true } +serde = { version = "1.0.195", default-features = false, features = ["alloc", "derive"], optional = true } sp-application-crypto = { path = "../application-crypto", default-features = false } sp-arithmetic = { path = "../arithmetic", default-features = false } sp-core = { path = "../core", default-features = false } @@ -31,11 +34,11 @@ sp-std = { path = "../std", default-features = false } sp-weights = { path = "../weights", default-features = false } docify = { version = "0.2.6" } -# simple-mermaid = { git = "https://github.com/kianenigma/simple-mermaid.git", branch = "main" } +# simple-mermaid = { git = "https://github.com/kianenigma/simple-mermaid.git", rev = "e48b187bcfd5cc75111acd9d241f1bd36604344b", optional = true } [dev-dependencies] rand = "0.8.5" -serde_json = "1.0.108" +serde_json = "1.0.111" zstd = { version = "0.13", default-features = false } sp-api = { path = "../api" } sp-state-machine = { path = "../state-machine" } @@ -54,6 +57,7 @@ std = [ "rand", "scale-info/std", "serde/std", + # "simple-mermaid", "sp-api/std", "sp-application-crypto/std", "sp-arithmetic/std", diff --git a/substrate/primitives/runtime/src/generic/header.rs b/substrate/primitives/runtime/src/generic/header.rs index 64f485aec056..d1ab070fc1db 100644 --- a/substrate/primitives/runtime/src/generic/header.rs +++ b/substrate/primitives/runtime/src/generic/header.rs @@ -22,16 +22,11 @@ use crate::{ codec::{Codec, Decode, Encode}, generic::Digest, scale_info::TypeInfo, - traits::{ - self, AtLeast32BitUnsigned, Hash as HashT, MaybeDisplay, MaybeFromStr, - MaybeSerializeDeserialize, Member, - }, + traits::{self, AtLeast32BitUnsigned, BlockNumber, Hash as HashT, MaybeDisplay, Member}, }; -use codec::{FullCodec, MaxEncodedLen}; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; use sp_core::U256; -use sp_std::fmt::Debug; /// Abstraction over a block header for a substrate chain. #[derive(Encode, Decode, PartialEq, Eq, Clone, sp_core::RuntimeDebug, TypeInfo)] @@ -80,20 +75,7 @@ where impl traits::Header for Header where - Number: Member - + MaybeSerializeDeserialize - + MaybeFromStr - + Debug - + Default - + sp_std::hash::Hash - + MaybeDisplay - + AtLeast32BitUnsigned - + FullCodec - + Copy - + MaxEncodedLen - + Into - + TryFrom - + TypeInfo, + Number: BlockNumber, Hash: HashT, { type Number = Number; diff --git a/substrate/primitives/runtime/src/generic/unchecked_extrinsic.rs b/substrate/primitives/runtime/src/generic/unchecked_extrinsic.rs index 8d43ad21f53d..c1c9a6b75107 100644 --- a/substrate/primitives/runtime/src/generic/unchecked_extrinsic.rs +++ b/substrate/primitives/runtime/src/generic/unchecked_extrinsic.rs @@ -58,7 +58,8 @@ type UncheckedSignaturePayload = (Address, Signature, /// could in principle be any other interaction. Transactions are either signed or unsigned. A /// sensible transaction pool should ensure that only transactions that are worthwhile are /// considered for block-building. -// #[doc = simple_mermaid::mermaid!("../../../../../docs/mermaid/extrinsics.mmd")] +// #[cfg_attr(feature = "std", doc = +// simple_mermaid::mermaid!("../../../../../docs/mermaid/extrinsics.mmd"))] /// This type is by no means enforced within Substrate, but given its genericness, it is highly /// likely that for most use-cases it will suffice. Thus, the encoding of this type will dictate /// exactly what bytes should be sent to a runtime to transact with it. diff --git a/substrate/primitives/runtime/src/offchain/storage_lock.rs b/substrate/primitives/runtime/src/offchain/storage_lock.rs index fe42e3832d86..5477cfefc1e6 100644 --- a/substrate/primitives/runtime/src/offchain/storage_lock.rs +++ b/substrate/primitives/runtime/src/offchain/storage_lock.rs @@ -157,7 +157,7 @@ pub struct BlockAndTimeDeadline { impl Clone for BlockAndTimeDeadline { fn clone(&self) -> Self { - Self { block_number: self.block_number.clone(), timestamp: self.timestamp } + Self { block_number: self.block_number, timestamp: self.timestamp } } } diff --git a/substrate/primitives/runtime/src/traits.rs b/substrate/primitives/runtime/src/traits.rs index 946bcc0fcb54..16900fc3e157 100644 --- a/substrate/primitives/runtime/src/traits.rs +++ b/substrate/primitives/runtime/src/traits.rs @@ -39,7 +39,7 @@ pub use sp_arithmetic::traits::{ EnsureOp, EnsureOpAssign, EnsureSub, EnsureSubAssign, IntegerSquareRoot, One, SaturatedConversion, Saturating, UniqueSaturatedFrom, UniqueSaturatedInto, Zero, }; -use sp_core::{self, storage::StateVersion, Hasher, RuntimeDebug, TypeId}; +use sp_core::{self, storage::StateVersion, Hasher, RuntimeDebug, TypeId, U256}; #[doc(hidden)] pub use sp_core::{ parameter_types, ConstBool, ConstI128, ConstI16, ConstI32, ConstI64, ConstI8, ConstU128, @@ -1130,6 +1130,44 @@ pub trait IsMember { fn is_member(member_id: &MemberId) -> bool; } +/// Super trait with all the attributes for a block number. +pub trait BlockNumber: + Member + + MaybeSerializeDeserialize + + MaybeFromStr + + Debug + + sp_std::hash::Hash + + Copy + + MaybeDisplay + + AtLeast32BitUnsigned + + Into + + TryFrom + + Default + + TypeInfo + + MaxEncodedLen + + FullCodec +{ +} + +impl< + T: Member + + MaybeSerializeDeserialize + + MaybeFromStr + + Debug + + sp_std::hash::Hash + + Copy + + MaybeDisplay + + AtLeast32BitUnsigned + + Into + + TryFrom + + Default + + TypeInfo + + MaxEncodedLen + + FullCodec, + > BlockNumber for T +{ +} + /// Something which fulfills the abstract idea of a Substrate header. It has types for a `Number`, /// a `Hash` and a `Hashing`. It provides access to an `extrinsics_root`, `state_root` and /// `parent_hash`, as well as a `digest` and a block `number`. @@ -1139,18 +1177,7 @@ pub trait Header: Clone + Send + Sync + Codec + Eq + MaybeSerialize + Debug + TypeInfo + 'static { /// Header number. - type Number: Member - + MaybeSerializeDeserialize - + MaybeFromStr - + Debug - + sp_std::hash::Hash - + Copy - + MaybeDisplay - + AtLeast32BitUnsigned - + Default - + TypeInfo - + MaxEncodedLen - + FullCodec; + type Number: BlockNumber; /// Header hash type type Hash: HashOutput; /// Hashing algorithm @@ -1392,7 +1419,7 @@ pub trait Dispatchable { /// Every function call from your runtime has an origin, which specifies where the extrinsic was /// generated from. In the case of a signed extrinsic (transaction), the origin contains an /// identifier for the caller. The origin can be empty in the case of an inherent extrinsic. - type RuntimeOrigin; + type RuntimeOrigin: Debug; /// ... type Config; /// An opaque set of information attached to the transaction. This could be constructed anywhere @@ -2246,7 +2273,15 @@ pub trait BlockIdTo { /// Get current block number pub trait BlockNumberProvider { /// Type of `BlockNumber` to provide. - type BlockNumber: Codec + Clone + Ord + Eq + AtLeast32BitUnsigned; + type BlockNumber: Codec + + Clone + + Ord + + Eq + + AtLeast32BitUnsigned + + TypeInfo + + Debug + + MaxEncodedLen + + Copy; /// Returns the current block number. /// @@ -2274,6 +2309,13 @@ pub trait BlockNumberProvider { fn set_block_number(_block: Self::BlockNumber) {} } +impl BlockNumberProvider for () { + type BlockNumber = u32; + fn current_block_number() -> Self::BlockNumber { + 0 + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/substrate/primitives/session/Cargo.toml b/substrate/primitives/session/Cargo.toml index b7e43f973004..25700210feef 100644 --- a/substrate/primitives/session/Cargo.toml +++ b/substrate/primitives/session/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Primitives for sessions" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/primitives/staking/Cargo.toml b/substrate/primitives/staking/Cargo.toml index f52bf3316db2..5ffe6fbeaf5d 100644 --- a/substrate/primitives/staking/Cargo.toml +++ b/substrate/primitives/staking/Cargo.toml @@ -9,11 +9,14 @@ repository.workspace = true description = "A crate which contains primitives that are useful for implementation that uses staking approaches in general. Definitions related to sessions, slashing, etc go here." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] -serde = { version = "1.0.193", default-features = false, features = ["alloc", "derive"], optional = true } +serde = { version = "1.0.195", default-features = false, features = ["alloc", "derive"], optional = true } codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive"] } scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } impl-trait-for-tuples = "0.2.2" diff --git a/substrate/primitives/state-machine/Cargo.toml b/substrate/primitives/state-machine/Cargo.toml index ab07d83af6a9..f891a74dbf4d 100644 --- a/substrate/primitives/state-machine/Cargo.toml +++ b/substrate/primitives/state-machine/Cargo.toml @@ -10,6 +10,9 @@ repository.workspace = true documentation = "https://docs.rs/sp-state-machine" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/primitives/state-machine/src/overlayed_changes/changeset.rs b/substrate/primitives/state-machine/src/overlayed_changes/changeset.rs index a8b590ca0b60..8dc299699968 100644 --- a/substrate/primitives/state-machine/src/overlayed_changes/changeset.rs +++ b/substrate/primitives/state-machine/src/overlayed_changes/changeset.rs @@ -299,7 +299,7 @@ impl OverlayedMap { /// Call this when control returns from the runtime. /// - /// This commits all dangling transaction left open by the runtime. + /// This rollbacks all dangling transaction left open by the runtime. /// Calling this while already outside the runtime will return an error. pub fn exit_runtime(&mut self) -> Result<(), NotInRuntime> { if let ExecutionMode::Client = self.execution_mode { diff --git a/substrate/primitives/state-machine/src/overlayed_changes/mod.rs b/substrate/primitives/state-machine/src/overlayed_changes/mod.rs index 75fd0a028fbe..6e7a5132cde9 100644 --- a/substrate/primitives/state-machine/src/overlayed_changes/mod.rs +++ b/substrate/primitives/state-machine/src/overlayed_changes/mod.rs @@ -349,7 +349,7 @@ impl OverlayedChanges { /// `None` can be used to delete a value specified by the given key. /// /// Can be rolled back or committed when called inside a transaction. - pub(crate) fn set_child_storage( + pub fn set_child_storage( &mut self, child_info: &ChildInfo, key: StorageKey, @@ -374,7 +374,7 @@ impl OverlayedChanges { /// Clear child storage of given storage key. /// /// Can be rolled back or committed when called inside a transaction. - pub(crate) fn clear_child_storage(&mut self, child_info: &ChildInfo) -> u32 { + pub fn clear_child_storage(&mut self, child_info: &ChildInfo) -> u32 { self.mark_dirty(); let extrinsic_index = self.extrinsic_index(); @@ -392,7 +392,7 @@ impl OverlayedChanges { /// Removes all key-value pairs which keys share the given prefix. /// /// Can be rolled back or committed when called inside a transaction. - pub(crate) fn clear_prefix(&mut self, prefix: &[u8]) -> u32 { + pub fn clear_prefix(&mut self, prefix: &[u8]) -> u32 { self.mark_dirty(); self.top.clear_where(|key, _| key.starts_with(prefix), self.extrinsic_index()) @@ -401,7 +401,7 @@ impl OverlayedChanges { /// Removes all key-value pairs which keys share the given prefix. /// /// Can be rolled back or committed when called inside a transaction - pub(crate) fn clear_child_prefix(&mut self, child_info: &ChildInfo, prefix: &[u8]) -> u32 { + pub fn clear_child_prefix(&mut self, child_info: &ChildInfo, prefix: &[u8]) -> u32 { self.mark_dirty(); let extrinsic_index = self.extrinsic_index(); @@ -499,7 +499,7 @@ impl OverlayedChanges { /// Call this when control returns from the runtime. /// - /// This commits all dangling transaction left open by the runtime. + /// This rollbacks all dangling transaction left open by the runtime. /// Calling this while outside the runtime will return an error. pub fn exit_runtime(&mut self) -> Result<(), NotInRuntime> { self.top.exit_runtime()?; diff --git a/substrate/primitives/std/Cargo.toml b/substrate/primitives/std/Cargo.toml index eae37c6dfe37..f349a7b11968 100644 --- a/substrate/primitives/std/Cargo.toml +++ b/substrate/primitives/std/Cargo.toml @@ -10,6 +10,9 @@ description = "Lowest-abstraction level for the Substrate runtime: just exports documentation = "https://docs.rs/sp-std" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/primitives/storage/Cargo.toml b/substrate/primitives/storage/Cargo.toml index b7ff48cdd635..32f59b04a12a 100644 --- a/substrate/primitives/storage/Cargo.toml +++ b/substrate/primitives/storage/Cargo.toml @@ -10,6 +10,9 @@ repository.workspace = true documentation = "https://docs.rs/sp-storage/" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -17,7 +20,7 @@ targets = ["x86_64-unknown-linux-gnu"] codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive"] } impl-serde = { version = "0.4.0", optional = true, default-features = false } ref-cast = "1.0.0" -serde = { version = "1.0.193", default-features = false, features = ["alloc", "derive"], optional = true } +serde = { version = "1.0.195", default-features = false, features = ["alloc", "derive"], optional = true } sp-debug-derive = { path = "../debug-derive", default-features = false } sp-std = { path = "../std", default-features = false } diff --git a/substrate/primitives/storage/src/lib.rs b/substrate/primitives/storage/src/lib.rs index 256f5b77f053..e21804df0e58 100644 --- a/substrate/primitives/storage/src/lib.rs +++ b/substrate/primitives/storage/src/lib.rs @@ -415,12 +415,13 @@ impl ChildTrieParentKeyId { /// /// V0 and V1 uses a same trie implementation, but V1 will write external value node in the trie for /// value with size at least `TRIE_VALUE_NODE_THRESHOLD`. -#[derive(Debug, Clone, Copy, Eq, PartialEq)] +#[derive(Debug, Default, Clone, Copy, Eq, PartialEq)] #[cfg_attr(feature = "std", derive(Encode, Decode))] pub enum StateVersion { /// Old state version, no value nodes. V0 = 0, /// New state version can use value nodes. + #[default] V1 = 1, } @@ -433,12 +434,6 @@ impl Display for StateVersion { } } -impl Default for StateVersion { - fn default() -> Self { - StateVersion::V1 - } -} - impl From for u8 { fn from(version: StateVersion) -> u8 { version as u8 diff --git a/substrate/primitives/test-primitives/Cargo.toml b/substrate/primitives/test-primitives/Cargo.toml index 0f2a399bffb4..3649217cf74e 100644 --- a/substrate/primitives/test-primitives/Cargo.toml +++ b/substrate/primitives/test-primitives/Cargo.toml @@ -8,13 +8,16 @@ homepage = "https://substrate.io" repository.workspace = true publish = false +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive"] } scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } -serde = { version = "1.0.193", default-features = false, features = ["derive"], optional = true } +serde = { version = "1.0.195", default-features = false, features = ["derive"], optional = true } sp-application-crypto = { path = "../application-crypto", default-features = false } sp-core = { path = "../core", default-features = false } sp-runtime = { path = "../runtime", default-features = false } diff --git a/substrate/primitives/timestamp/Cargo.toml b/substrate/primitives/timestamp/Cargo.toml index 41afab0dcc22..b61f36f2056b 100644 --- a/substrate/primitives/timestamp/Cargo.toml +++ b/substrate/primitives/timestamp/Cargo.toml @@ -9,11 +9,14 @@ repository.workspace = true description = "Substrate core types and inherents for timestamps." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] -async-trait = { version = "0.1.57", optional = true } +async-trait = { version = "0.1.74", optional = true } codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive"] } thiserror = { version = "1.0.48", optional = true } sp-inherents = { path = "../inherents", default-features = false } diff --git a/substrate/primitives/tracing/Cargo.toml b/substrate/primitives/tracing/Cargo.toml index ed6d05c0e3b1..c4acebd17aa2 100644 --- a/substrate/primitives/tracing/Cargo.toml +++ b/substrate/primitives/tracing/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Instrumentation primitives and macros for Substrate." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] # let's default to wasm32 default-target = "wasm32-unknown-unknown" diff --git a/substrate/primitives/transaction-pool/Cargo.toml b/substrate/primitives/transaction-pool/Cargo.toml index 136d32002026..6e66910ac388 100644 --- a/substrate/primitives/transaction-pool/Cargo.toml +++ b/substrate/primitives/transaction-pool/Cargo.toml @@ -10,6 +10,9 @@ description = "Transaction pool runtime facing API." documentation = "https://docs.rs/sp-transaction-pool" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/primitives/trie/Cargo.toml b/substrate/primitives/trie/Cargo.toml index db87df37ddce..79ed5c200009 100644 --- a/substrate/primitives/trie/Cargo.toml +++ b/substrate/primitives/trie/Cargo.toml @@ -10,6 +10,9 @@ homepage = "https://substrate.io" documentation = "https://docs.rs/sp-trie" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -20,7 +23,6 @@ harness = false [dependencies] ahash = { version = "0.8.2", optional = true } codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false } -hashbrown = { version = "0.14", optional = true } hash-db = { version = "0.16.0", default-features = false } lazy_static = { version = "1.4.0", optional = true } memory-db = { version = "0.32.0", default-features = false } @@ -50,7 +52,6 @@ std = [ "ahash", "codec/std", "hash-db/std", - "hashbrown", "lazy_static", "memory-db/std", "nohash-hasher", diff --git a/substrate/primitives/trie/src/cache/shared_cache.rs b/substrate/primitives/trie/src/cache/shared_cache.rs index 1868d22a71e2..53396e055ce3 100644 --- a/substrate/primitives/trie/src/cache/shared_cache.rs +++ b/substrate/primitives/trie/src/cache/shared_cache.rs @@ -20,11 +20,11 @@ ///! that combines both caches and is exported to the outside. use super::{CacheSize, NodeCached}; use hash_db::Hasher; -use hashbrown::{hash_set::Entry as SetEntry, HashSet}; use nohash_hasher::BuildNoHashHasher; use parking_lot::{Mutex, RwLock, RwLockWriteGuard}; use schnellru::LruMap; use std::{ + collections::{hash_map::Entry as SetEntry, HashMap}, hash::{BuildHasher, Hasher as _}, sync::Arc, }; @@ -149,7 +149,7 @@ pub struct SharedValueCacheLimiter { heap_size: usize, /// A set with all of the keys deduplicated to save on memory. - known_storage_keys: HashSet>, + known_storage_keys: HashMap, (), ahash::RandomState>, /// A counter with the number of elements that got evicted from the cache. /// @@ -190,10 +190,10 @@ where } self.heap_size += new_item_heap_size; - entry.insert(); + entry.insert(()); }, SetEntry::Occupied(entry) => { - key.storage_key = entry.get().clone(); + key.storage_key = entry.key().clone(); }, } @@ -492,7 +492,7 @@ impl> SharedValueCache { max_inline_size, max_heap_size, heap_size: 0, - known_storage_keys: Default::default(), + known_storage_keys: HashMap::with_hasher(RANDOM_STATE.clone()), items_evicted: 0, max_items_evicted: 0, // Will be set during `update`. }, @@ -779,7 +779,9 @@ mod tests { assert_eq!(1, cache.lru.limiter_mut().known_storage_keys.len()); assert_eq!( 3, // Two instances inside the cache + one extra in `known_storage_keys`. - Arc::strong_count(cache.lru.limiter_mut().known_storage_keys.get(&key[..]).unwrap()) + Arc::strong_count( + cache.lru.limiter_mut().known_storage_keys.get_key_value(&key[..]).unwrap().0 + ) ); assert_eq!(key.len(), cache.lru.limiter().heap_size); assert_eq!(cache.lru.len(), 2); @@ -793,7 +795,9 @@ mod tests { assert_eq!(1, cache.lru.limiter_mut().known_storage_keys.len()); assert_eq!( 3, - Arc::strong_count(cache.lru.limiter_mut().known_storage_keys.get(&key[..]).unwrap()) + Arc::strong_count( + cache.lru.limiter_mut().known_storage_keys.get_key_value(&key[..]).unwrap().0 + ) ); assert_eq!(key.len(), cache.lru.limiter().heap_size); assert_eq!(cache.lru.len(), 2); @@ -813,7 +817,9 @@ mod tests { assert_eq!(1, cache.lru.limiter_mut().known_storage_keys.len()); assert_eq!( 3, - Arc::strong_count(cache.lru.limiter_mut().known_storage_keys.get(&key[..]).unwrap()) + Arc::strong_count( + cache.lru.limiter_mut().known_storage_keys.get_key_value(&key[..]).unwrap().0 + ) ); assert_eq!(key.len(), cache.lru.limiter().heap_size); assert_eq!(cache.lru.len(), 2); @@ -834,7 +840,7 @@ mod tests { assert_eq!(cache.lru.limiter().items_evicted, 2); assert_eq!(10, cache.lru.len()); assert_eq!(10, cache.lru.limiter_mut().known_storage_keys.len()); - assert!(cache.lru.limiter_mut().known_storage_keys.get(&key[..]).is_none()); + assert!(cache.lru.limiter_mut().known_storage_keys.get_key_value(&key[..]).is_none()); assert_eq!(key.len() * 10, cache.lru.limiter().heap_size); assert_eq!(cache.lru.len(), 10); assert!(cache.lru.limiter().heap_size <= cache.lru.limiter().max_heap_size); @@ -855,6 +861,6 @@ mod tests { vec![], ); - assert!(cache.lru.limiter_mut().known_storage_keys.get(&key[..]).is_none()); + assert!(cache.lru.limiter_mut().known_storage_keys.get_key_value(&key[..]).is_none()); } } diff --git a/substrate/primitives/version/Cargo.toml b/substrate/primitives/version/Cargo.toml index 9860ef54c2db..1ceda4e700f8 100644 --- a/substrate/primitives/version/Cargo.toml +++ b/substrate/primitives/version/Cargo.toml @@ -10,6 +10,9 @@ description = "Version module for the Substrate runtime; Provides a function tha documentation = "https://docs.rs/sp-version" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -18,7 +21,7 @@ codec = { package = "parity-scale-codec", version = "3.6.1", default-features = impl-serde = { version = "0.4.0", default-features = false, optional = true } parity-wasm = { version = "0.45", optional = true } scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } -serde = { version = "1.0.193", default-features = false, features = ["alloc", "derive"], optional = true } +serde = { version = "1.0.195", default-features = false, features = ["alloc", "derive"], optional = true } thiserror = { version = "1.0.48", optional = true } sp-core-hashing-proc-macro = { path = "../core/hashing/proc-macro" } sp-runtime = { path = "../runtime", default-features = false } diff --git a/substrate/primitives/version/proc-macro/Cargo.toml b/substrate/primitives/version/proc-macro/Cargo.toml index 715316b842dc..adf70dbd1661 100644 --- a/substrate/primitives/version/proc-macro/Cargo.toml +++ b/substrate/primitives/version/proc-macro/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Macro for defining a runtime version." documentation = "https://docs.rs/sp-api-proc-macro" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -19,7 +22,7 @@ proc-macro = true codec = { package = "parity-scale-codec", version = "3.6.1", features = ["derive"] } proc-macro2 = "1.0.56" quote = "1.0.28" -syn = { version = "2.0.39", features = ["extra-traits", "fold", "full", "visit"] } +syn = { version = "2.0.48", features = ["extra-traits", "fold", "full", "visit"] } [dev-dependencies] sp-version = { path = ".." } diff --git a/substrate/primitives/wasm-interface/Cargo.toml b/substrate/primitives/wasm-interface/Cargo.toml index f7d89f4378f1..57c0ea046052 100644 --- a/substrate/primitives/wasm-interface/Cargo.toml +++ b/substrate/primitives/wasm-interface/Cargo.toml @@ -10,6 +10,9 @@ description = "Types and traits for interfacing between the host and the wasm ru documentation = "https://docs.rs/sp-wasm-interface" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/primitives/weights/Cargo.toml b/substrate/primitives/weights/Cargo.toml index 163b241276ce..d89182b6642c 100644 --- a/substrate/primitives/weights/Cargo.toml +++ b/substrate/primitives/weights/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Types and traits for interfacing between the host and the wasm runtime." documentation = "https://docs.rs/sp-wasm-interface" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -16,11 +19,12 @@ targets = ["x86_64-unknown-linux-gnu"] bounded-collections = { version = "0.1.4", default-features = false } codec = { package = "parity-scale-codec", version = "3.6.1", default-features = false, features = ["derive"] } scale-info = { version = "2.10.0", default-features = false, features = ["derive"] } -serde = { version = "1.0.193", default-features = false, optional = true, features = ["alloc", "derive"] } +serde = { version = "1.0.195", default-features = false, optional = true, features = ["alloc", "derive"] } smallvec = "1.11.0" sp-arithmetic = { path = "../arithmetic", default-features = false } sp-debug-derive = { path = "../debug-derive", default-features = false } sp-std = { path = "../std", default-features = false } +schemars = { version = "0.8.3", default-features = false, optional = true } [features] default = ["std"] @@ -44,3 +48,7 @@ serde = [ "scale-info/serde", "sp-arithmetic/serde", ] + +json-schema = [ + "dep:schemars", +] diff --git a/substrate/primitives/weights/src/weight_v2.rs b/substrate/primitives/weights/src/weight_v2.rs index eb461657b2de..c37a520dd88b 100644 --- a/substrate/primitives/weights/src/weight_v2.rs +++ b/substrate/primitives/weights/src/weight_v2.rs @@ -24,6 +24,7 @@ use super::*; #[derive(Encode, Decode, MaxEncodedLen, TypeInfo, Eq, PartialEq, Copy, Clone, Debug, Default)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "json-schema", derive(schemars::JsonSchema))] pub struct Weight { #[codec(compact)] /// The weight of computational time used based on some reference hardware. diff --git a/substrate/scripts/ci/node-template-release/Cargo.toml b/substrate/scripts/ci/node-template-release/Cargo.toml index 0085be859305..dfe851f5f5fe 100644 --- a/substrate/scripts/ci/node-template-release/Cargo.toml +++ b/substrate/scripts/ci/node-template-release/Cargo.toml @@ -7,11 +7,14 @@ license = "GPL-3.0 WITH Classpath-exception-2.0" homepage = "https://substrate.io" publish = false +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] -clap = { version = "4.4.10", features = ["derive"] } +clap = { version = "4.4.14", features = ["derive"] } flate2 = "1.0" fs_extra = "1.3" glob = "0.3" diff --git a/substrate/test-utils/Cargo.toml b/substrate/test-utils/Cargo.toml index d8901f047b52..26c6e6c033f2 100644 --- a/substrate/test-utils/Cargo.toml +++ b/substrate/test-utils/Cargo.toml @@ -9,9 +9,12 @@ repository.workspace = true description = "Substrate test utilities" publish = false +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dev-dependencies] -trybuild = { version = "1.0.74", features = ["diff"] } +trybuild = { version = "1.0.88", features = ["diff"] } sc-service = { path = "../client/service" } diff --git a/substrate/test-utils/cli/Cargo.toml b/substrate/test-utils/cli/Cargo.toml index bd3fb53e7322..d8401c9363ec 100644 --- a/substrate/test-utils/cli/Cargo.toml +++ b/substrate/test-utils/cli/Cargo.toml @@ -9,6 +9,9 @@ homepage = "https://substrate.io" repository.workspace = true publish = false +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/test-utils/client/Cargo.toml b/substrate/test-utils/client/Cargo.toml index b8222a015dca..8fad78381993 100644 --- a/substrate/test-utils/client/Cargo.toml +++ b/substrate/test-utils/client/Cargo.toml @@ -9,16 +9,19 @@ homepage = "https://substrate.io" repository.workspace = true publish = false +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] array-bytes = "6.1" -async-trait = "0.1.57" +async-trait = "0.1.74" codec = { package = "parity-scale-codec", version = "3.6.1" } futures = "0.3.21" -serde = "1.0.193" -serde_json = "1.0.108" +serde = "1.0.195" +serde_json = "1.0.111" sc-client-api = { path = "../../client/api" } sc-client-db = { path = "../../client/db", default-features = false, features = [ "test-helpers", diff --git a/substrate/test-utils/runtime/Cargo.toml b/substrate/test-utils/runtime/Cargo.toml index 5fc4b506dc5e..6a9ea89bbcdd 100644 --- a/substrate/test-utils/runtime/Cargo.toml +++ b/substrate/test-utils/runtime/Cargo.toml @@ -9,6 +9,9 @@ homepage = "https://substrate.io" repository.workspace = true publish = false +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -58,8 +61,8 @@ sp-consensus = { path = "../../primitives/consensus/common" } substrate-test-runtime-client = { path = "client" } sp-tracing = { path = "../../primitives/tracing" } json-patch = { version = "1.0.0", default-features = false } -serde = { version = "1.0.193", features = ["alloc", "derive"], default-features = false } -serde_json = { version = "1.0.108", default-features = false, features = ["alloc"] } +serde = { version = "1.0.195", features = ["alloc", "derive"], default-features = false } +serde_json = { version = "1.0.111", default-features = false, features = ["alloc"] } [build-dependencies] substrate-wasm-builder = { path = "../../utils/wasm-builder", optional = true } diff --git a/substrate/test-utils/runtime/client/Cargo.toml b/substrate/test-utils/runtime/client/Cargo.toml index 40cfa8ab1b70..cbb964f67852 100644 --- a/substrate/test-utils/runtime/client/Cargo.toml +++ b/substrate/test-utils/runtime/client/Cargo.toml @@ -8,6 +8,9 @@ homepage = "https://substrate.io" repository.workspace = true publish = false +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/test-utils/runtime/src/lib.rs b/substrate/test-utils/runtime/src/lib.rs index 303b805b3110..998f0131b0c6 100644 --- a/substrate/test-utils/runtime/src/lib.rs +++ b/substrate/test-utils/runtime/src/lib.rs @@ -28,7 +28,7 @@ pub mod substrate_test_pallet; use codec::{Decode, Encode}; use frame_support::{ - construct_runtime, + construct_runtime, derive_impl, dispatch::DispatchClass, genesis_builder_helper::{build_config, create_default_config}, parameter_types, @@ -328,6 +328,7 @@ parameter_types! { .build_or_panic(); } +#[derive_impl(frame_system::config_preludes::TestDefaultConfig as frame_system::DefaultConfig)] impl frame_system::pallet::Config for Runtime { type BaseCallFilter = frame_support::traits::Everything; type BlockWeights = RuntimeBlockWeights; diff --git a/substrate/test-utils/runtime/transaction-pool/Cargo.toml b/substrate/test-utils/runtime/transaction-pool/Cargo.toml index cb6ee6d79f44..b52a897438b6 100644 --- a/substrate/test-utils/runtime/transaction-pool/Cargo.toml +++ b/substrate/test-utils/runtime/transaction-pool/Cargo.toml @@ -8,6 +8,9 @@ homepage = "https://substrate.io" repository.workspace = true publish = false +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/utils/build-script-utils/Cargo.toml b/substrate/utils/build-script-utils/Cargo.toml index ab15d5552c29..464647ea723e 100644 --- a/substrate/utils/build-script-utils/Cargo.toml +++ b/substrate/utils/build-script-utils/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Crate with utility functions for `build.rs` scripts." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/utils/build-script-utils/src/git.rs b/substrate/utils/build-script-utils/src/git.rs index b54ba15856e2..bbcbe0d45a8f 100644 --- a/substrate/utils/build-script-utils/src/git.rs +++ b/substrate/utils/build-script-utils/src/git.rs @@ -16,7 +16,13 @@ // See the License for the specific language governing permissions and // limitations under the License. -use std::{env, fs, fs::File, io, io::Read, path::PathBuf}; +use std::{ + env, fs, + fs::File, + io, + io::Read, + path::{Path, PathBuf}, +}; /// Make sure the calling `build.rs` script is rerun when `.git/HEAD` or the ref of `.git/HEAD` /// changed. @@ -56,7 +62,7 @@ pub fn rerun_if_git_head_changed() { } // Code taken from https://github.com/rustyhorde/vergen/blob/8d522db8c8e16e26c0fc9ea8e6b0247cbf5cca84/src/output/envvar.rs -fn get_git_paths(path: &PathBuf) -> Result>, io::Error> { +fn get_git_paths(path: &Path) -> Result>, io::Error> { let git_dir_or_file = path.join(".git"); if let Ok(metadata) = fs::metadata(&git_dir_or_file) { diff --git a/substrate/utils/build-script-utils/src/version.rs b/substrate/utils/build-script-utils/src/version.rs index 5eeafe1189df..2e0db9acead5 100644 --- a/substrate/utils/build-script-utils/src/version.rs +++ b/substrate/utils/build-script-utils/src/version.rs @@ -26,7 +26,7 @@ pub fn generate_cargo_keys() { // We deliberately set the length here to `11` to ensure that // the emitted hash is always of the same length; otherwise // it can (and will!) vary between different build environments. - match Command::new("git").args(&["rev-parse", "--short=11", "HEAD"]).output() { + match Command::new("git").args(["rev-parse", "--short=11", "HEAD"]).output() { Ok(o) if o.status.success() => { let sha = String::from_utf8_lossy(&o.stdout).trim().to_owned(); Cow::from(sha) @@ -60,34 +60,3 @@ fn get_version(impl_commit: &str) -> String { impl_commit ) } - -/// Generate `SUBSTRATE_WASMTIME_VERSION` -pub fn generate_wasmtime_version() { - generate_dependency_version("wasmtime", "SUBSTRATE_WASMTIME_VERSION"); -} - -fn generate_dependency_version(dep: &str, env_var: &str) { - // we only care about the root - match std::process::Command::new("cargo") - .args(["tree", "--depth=0", "--locked", "--package", dep]) - .output() - { - Ok(output) if output.status.success() => { - let version = String::from_utf8_lossy(&output.stdout); - - // vX.X.X - if let Some(ver) = version.strip_prefix(&format!("{} v", dep)) { - println!("cargo:rustc-env={}={}", env_var, ver); - } else { - println!("cargo:warning=Unexpected result {}", version); - } - }, - - // command errors out when it could not find the given dependency - // or when having multiple versions of it - Ok(output) => - println!("cargo:warning=`cargo tree` {}", String::from_utf8_lossy(&output.stderr)), - - Err(err) => println!("cargo:warning=Could not run `cargo tree`: {}", err), - } -} diff --git a/substrate/utils/fork-tree/Cargo.toml b/substrate/utils/fork-tree/Cargo.toml index eea500641fe4..27bb908986f8 100644 --- a/substrate/utils/fork-tree/Cargo.toml +++ b/substrate/utils/fork-tree/Cargo.toml @@ -10,6 +10,9 @@ description = "Utility library for managing tree-like ordered data with logic fo documentation = "https://docs.rs/fork-tree" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/utils/frame/benchmarking-cli/Cargo.toml b/substrate/utils/frame/benchmarking-cli/Cargo.toml index 362e368dccaf..5b389202ccac 100644 --- a/substrate/utils/frame/benchmarking-cli/Cargo.toml +++ b/substrate/utils/frame/benchmarking-cli/Cargo.toml @@ -9,13 +9,16 @@ repository.workspace = true description = "CLI for benchmarking FRAME" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] array-bytes = "6.1" chrono = "0.4" -clap = { version = "4.4.10", features = ["derive"] } +clap = { version = "4.4.14", features = ["derive"] } codec = { package = "parity-scale-codec", version = "3.6.1" } comfy-table = { version = "7.0.1", default-features = false } handlebars = "5" @@ -24,10 +27,10 @@ itertools = "0.12" lazy_static = "1.4.0" linked-hash-map = "0.5.4" log = "0.4.17" -rand = { version = "0.8.4", features = ["small_rng"] } +rand = { version = "0.8.5", features = ["small_rng"] } rand_pcg = "0.3.1" -serde = "1.0.193" -serde_json = "1.0.108" +serde = "1.0.195" +serde_json = "1.0.111" thiserror = "1.0.48" thousands = "0.2.0" frame-benchmarking = { path = "../../../frame/benchmarking" } diff --git a/substrate/utils/frame/frame-utilities-cli/Cargo.toml b/substrate/utils/frame/frame-utilities-cli/Cargo.toml index 6e33ed88e0a7..886e17280b1f 100644 --- a/substrate/utils/frame/frame-utilities-cli/Cargo.toml +++ b/substrate/utils/frame/frame-utilities-cli/Cargo.toml @@ -10,8 +10,11 @@ description = "cli interface for FRAME" documentation = "https://docs.rs/substrate-frame-cli" readme = "README.md" +[lints] +workspace = true + [dependencies] -clap = { version = "4.4.10", features = ["derive"] } +clap = { version = "4.4.14", features = ["derive"] } frame-support = { path = "../../../frame/support" } frame-system = { path = "../../../frame/system" } sc-cli = { path = "../../../client/cli" } diff --git a/substrate/utils/frame/generate-bags/Cargo.toml b/substrate/utils/frame/generate-bags/Cargo.toml index ac22197c5ac4..4afb2a80b771 100644 --- a/substrate/utils/frame/generate-bags/Cargo.toml +++ b/substrate/utils/frame/generate-bags/Cargo.toml @@ -8,6 +8,9 @@ homepage = "https://substrate.io" repository.workspace = true description = "Bag threshold generation script for pallet-bag-list" +[lints] +workspace = true + [dependencies] # FRAME frame-support = { path = "../../../frame/support" } @@ -17,5 +20,5 @@ pallet-staking = { path = "../../../frame/staking" } sp-staking = { path = "../../../primitives/staking" } # third party -chrono = { version = "0.4.27" } +chrono = { version = "0.4.31" } num-format = "0.4.3" diff --git a/substrate/utils/frame/generate-bags/node-runtime/Cargo.toml b/substrate/utils/frame/generate-bags/node-runtime/Cargo.toml index a2ee3883786b..d2aaaff9a69b 100644 --- a/substrate/utils/frame/generate-bags/node-runtime/Cargo.toml +++ b/substrate/utils/frame/generate-bags/node-runtime/Cargo.toml @@ -9,9 +9,12 @@ repository.workspace = true description = "Bag threshold generation script for pallet-bag-list and kitchensink-runtime." publish = false +[lints] +workspace = true + [dependencies] kitchensink-runtime = { path = "../../../../bin/node/runtime" } generate-bags = { path = ".." } # third-party -clap = { version = "4.4.10", features = ["derive"] } +clap = { version = "4.4.14", features = ["derive"] } diff --git a/substrate/utils/frame/remote-externalities/Cargo.toml b/substrate/utils/frame/remote-externalities/Cargo.toml index 88071f7d634d..ba0e8e869ccc 100644 --- a/substrate/utils/frame/remote-externalities/Cargo.toml +++ b/substrate/utils/frame/remote-externalities/Cargo.toml @@ -8,6 +8,9 @@ homepage = "https://substrate.io" repository.workspace = true description = "An externalities provided environment that can load itself from remote nodes or cached files" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -15,7 +18,7 @@ targets = ["x86_64-unknown-linux-gnu"] jsonrpsee = { version = "0.16.2", features = ["http-client"] } codec = { package = "parity-scale-codec", version = "3.6.1" } log = "0.4.17" -serde = "1.0.193" +serde = "1.0.195" sp-core = { path = "../../../primitives/core" } sp-state-machine = { path = "../../../primitives/state-machine" } sp-io = { path = "../../../primitives/io" } diff --git a/substrate/utils/frame/rpc/client/Cargo.toml b/substrate/utils/frame/rpc/client/Cargo.toml index d0f323c096ff..1e8a298726eb 100644 --- a/substrate/utils/frame/rpc/client/Cargo.toml +++ b/substrate/utils/frame/rpc/client/Cargo.toml @@ -8,13 +8,16 @@ homepage = "https://substrate.io" repository.workspace = true description = "Shared JSON-RPC client" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] [dependencies] jsonrpsee = { version = "0.16.2", features = ["ws-client"] } sc-rpc-api = { path = "../../../../client/rpc-api" } -async-trait = "0.1.57" +async-trait = "0.1.74" serde = "1" sp-runtime = { path = "../../../../primitives/runtime" } log = "0.4" diff --git a/substrate/utils/frame/rpc/state-trie-migration-rpc/Cargo.toml b/substrate/utils/frame/rpc/state-trie-migration-rpc/Cargo.toml index 6d3cb545efb6..6cd99e5a6fed 100644 --- a/substrate/utils/frame/rpc/state-trie-migration-rpc/Cargo.toml +++ b/substrate/utils/frame/rpc/state-trie-migration-rpc/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "Node-specific RPC methods for interaction with state trie migration." readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -29,4 +32,4 @@ sc-rpc-api = { path = "../../../../client/rpc-api" } sp-runtime = { path = "../../../../primitives/runtime" } [dev-dependencies] -serde_json = "1.0.108" +serde_json = "1.0.111" diff --git a/substrate/utils/frame/rpc/support/Cargo.toml b/substrate/utils/frame/rpc/support/Cargo.toml index da56297c82fb..1cc6d8e98b36 100644 --- a/substrate/utils/frame/rpc/support/Cargo.toml +++ b/substrate/utils/frame/rpc/support/Cargo.toml @@ -8,6 +8,9 @@ homepage = "https://substrate.io" repository.workspace = true description = "Substrate RPC for FRAME's support" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/utils/frame/rpc/support/src/lib.rs b/substrate/utils/frame/rpc/support/src/lib.rs index 6b307fbfc2fe..66d670f3d6b3 100644 --- a/substrate/utils/frame/rpc/support/src/lib.rs +++ b/substrate/utils/frame/rpc/support/src/lib.rs @@ -64,6 +64,7 @@ use sp_storage::{StorageData, StorageKey}; /// # type Lookup = IdentityLookup; /// # type Block = frame_system::mocking::MockBlock; /// # type RuntimeEvent = RuntimeEvent; +/// # type RuntimeTask = RuntimeTask; /// # type BlockHashCount = (); /// # type DbWeight = (); /// # type Version = (); diff --git a/substrate/utils/frame/rpc/system/Cargo.toml b/substrate/utils/frame/rpc/system/Cargo.toml index 636f2cd0485d..84c3265c93d3 100644 --- a/substrate/utils/frame/rpc/system/Cargo.toml +++ b/substrate/utils/frame/rpc/system/Cargo.toml @@ -9,6 +9,9 @@ repository.workspace = true description = "FRAME's system exposed over Substrate RPC" readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/utils/frame/try-runtime/cli/Cargo.toml b/substrate/utils/frame/try-runtime/cli/Cargo.toml index dbc0db3b99f6..853daed10368 100644 --- a/substrate/utils/frame/try-runtime/cli/Cargo.toml +++ b/substrate/utils/frame/try-runtime/cli/Cargo.toml @@ -8,6 +8,9 @@ homepage = "https://substrate.io" repository.workspace = true description = "Cli command runtime testing and dry-running" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -32,13 +35,13 @@ sp-weights = { path = "../../../../primitives/weights" } frame-try-runtime = { path = "../../../../frame/try-runtime", optional = true } substrate-rpc-client = { path = "../../rpc/client" } -async-trait = "0.1.57" -clap = { version = "4.4.10", features = ["derive"] } +async-trait = "0.1.74" +clap = { version = "4.4.14", features = ["derive"] } hex = { version = "0.4.3", default-features = false } log = "0.4.17" parity-scale-codec = "3.6.1" -serde = "1.0.193" -serde_json = "1.0.108" +serde = "1.0.195" +serde_json = "1.0.111" zstd = { version = "0.13", default-features = false } [dev-dependencies] diff --git a/substrate/utils/prometheus/Cargo.toml b/substrate/utils/prometheus/Cargo.toml index bf999a66111f..252998d94bd1 100644 --- a/substrate/utils/prometheus/Cargo.toml +++ b/substrate/utils/prometheus/Cargo.toml @@ -9,6 +9,9 @@ homepage = "https://substrate.io" repository.workspace = true readme = "README.md" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] diff --git a/substrate/utils/wasm-builder/Cargo.toml b/substrate/utils/wasm-builder/Cargo.toml index 081a1a91340a..d70afe59d5ff 100644 --- a/substrate/utils/wasm-builder/Cargo.toml +++ b/substrate/utils/wasm-builder/Cargo.toml @@ -8,6 +8,9 @@ repository.workspace = true license = "Apache-2.0" homepage = "https://substrate.io" +[lints] +workspace = true + [package.metadata.docs.rs] targets = ["x86_64-unknown-linux-gnu"] @@ -16,7 +19,7 @@ anstyle = "1" cargo_metadata = "0.18" strum = { version = "0.25", features = ["derive"] } tempfile = "3.1.0" -toml = "0.8" +toml = "0.8.2" walkdir = "2.3.2" sp-maybe-compressed-blob = { path = "../../primitives/maybe-compressed-blob" } wasm-opt = "0.116" diff --git a/substrate/utils/wasm-builder/src/wasm_project.rs b/substrate/utils/wasm-builder/src/wasm_project.rs index c5cf3a64697a..28ea2ef11e2d 100644 --- a/substrate/utils/wasm-builder/src/wasm_project.rs +++ b/substrate/utils/wasm-builder/src/wasm_project.rs @@ -939,7 +939,7 @@ fn generate_rerun_if_changed_instructions( while let Some(dependency) = dependencies.pop() { // Ignore all dev dependencies if dependency.kind == DependencyKind::Development { - continue; + continue } let path_or_git_dep =