From 79d983fd333f2f7597a69f32e29fa5a4e6531959 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Jan 2024 08:30:36 -0700 Subject: [PATCH 01/11] chore(deps): bump codecov/codecov-action from 3.1.4 to 3.1.5 (#3730) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build.yml | 2 +- .github/workflows/unit-tests.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index eb3f8e7c84..e6cf4d3b79 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -103,7 +103,7 @@ jobs: - name: Generate coverage report run: | go test ./... -coverprofile=coverage.out -covermode=atomic -timeout=20m - - uses: codecov/codecov-action@v3.1.4 + - uses: codecov/codecov-action@v3.1.5 with: files: ./coverage.out flags: unit-tests diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index c24efcc825..fd526ff41c 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -89,7 +89,7 @@ jobs: - name: Test - Race run: make test-using-race-detector - - uses: codecov/codecov-action@v3.1.4 + - uses: codecov/codecov-action@v3.1.5 with: if_ci_failed: success informational: true From 484aa761a71d296c3d9c9adc7936550cddf02114 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Jan 2024 08:46:57 -0700 Subject: [PATCH 02/11] chore(deps): bump styfle/cancel-workflow-action from 0.12.0 to 0.12.1 (#3729) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/checks.yml | 2 +- .github/workflows/copyright.yml | 2 +- .github/workflows/devnet.yml | 2 +- .github/workflows/docker-grandpa.yml | 2 +- .github/workflows/docker-js.yml | 2 +- .github/workflows/docker-rpc.yml | 2 +- .github/workflows/docker-stress.yml | 2 +- .github/workflows/fuzz.yml | 2 +- .github/workflows/integration-tests.yml | 2 +- .github/workflows/mocks.yml | 2 +- .github/workflows/unit-tests.yml | 2 +- 11 files changed, 11 insertions(+), 11 deletions(-) diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index 0dbb6a9c28..ae2a166c2d 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -17,7 +17,7 @@ jobs: runs-on: buildjet-4vcpu-ubuntu-2204 steps: - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.12.0 + uses: styfle/cancel-workflow-action@0.12.1 with: all_but_latest: true diff --git a/.github/workflows/copyright.yml b/.github/workflows/copyright.yml index d2d9d67cfd..021bed83ee 100644 --- a/.github/workflows/copyright.yml +++ b/.github/workflows/copyright.yml @@ -15,7 +15,7 @@ jobs: runs-on: buildjet-4vcpu-ubuntu-2204 steps: - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.12.0 + uses: styfle/cancel-workflow-action@0.12.1 with: all_but_latest: true diff --git a/.github/workflows/devnet.yml b/.github/workflows/devnet.yml index 5f4ff7adce..5be40215f0 100644 --- a/.github/workflows/devnet.yml +++ b/.github/workflows/devnet.yml @@ -9,7 +9,7 @@ jobs: runs-on: buildjet-4vcpu-ubuntu-2204 steps: - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.12.0 + uses: styfle/cancel-workflow-action@0.12.1 with: all_but_latest: true diff --git a/.github/workflows/docker-grandpa.yml b/.github/workflows/docker-grandpa.yml index cb9be47e5c..c43b86c1c6 100644 --- a/.github/workflows/docker-grandpa.yml +++ b/.github/workflows/docker-grandpa.yml @@ -23,7 +23,7 @@ jobs: DOCKER_BUILDKIT: "1" steps: - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.12.0 + uses: styfle/cancel-workflow-action@0.12.1 with: all_but_latest: true diff --git a/.github/workflows/docker-js.yml b/.github/workflows/docker-js.yml index e5cf669fb8..d2cee44ef3 100644 --- a/.github/workflows/docker-js.yml +++ b/.github/workflows/docker-js.yml @@ -23,7 +23,7 @@ jobs: DOCKER_BUILDKIT: "1" steps: - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.12.0 + uses: styfle/cancel-workflow-action@0.12.1 with: all_but_latest: true diff --git a/.github/workflows/docker-rpc.yml b/.github/workflows/docker-rpc.yml index 0053f507da..93d1dff46f 100644 --- a/.github/workflows/docker-rpc.yml +++ b/.github/workflows/docker-rpc.yml @@ -23,7 +23,7 @@ jobs: DOCKER_BUILDKIT: "1" steps: - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.12.0 + uses: styfle/cancel-workflow-action@0.12.1 with: all_but_latest: true diff --git a/.github/workflows/docker-stress.yml b/.github/workflows/docker-stress.yml index 974a6c1709..84a3205d14 100644 --- a/.github/workflows/docker-stress.yml +++ b/.github/workflows/docker-stress.yml @@ -23,7 +23,7 @@ jobs: DOCKER_BUILDKIT: "1" steps: - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.12.0 + uses: styfle/cancel-workflow-action@0.12.1 with: all_but_latest: true diff --git a/.github/workflows/fuzz.yml b/.github/workflows/fuzz.yml index b818a3eedb..c3d4d6a3d6 100644 --- a/.github/workflows/fuzz.yml +++ b/.github/workflows/fuzz.yml @@ -8,7 +8,7 @@ jobs: runs-on: buildjet-4vcpu-ubuntu-2204 steps: - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.12.0 + uses: styfle/cancel-workflow-action@0.12.1 with: all_but_latest: true diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 5300f79757..81f766de04 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -38,7 +38,7 @@ jobs: runs-on: buildjet-4vcpu-ubuntu-2204 steps: - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.12.0 + uses: styfle/cancel-workflow-action@0.12.1 with: all_but_latest: true diff --git a/.github/workflows/mocks.yml b/.github/workflows/mocks.yml index 960e01841c..7e9519e52e 100644 --- a/.github/workflows/mocks.yml +++ b/.github/workflows/mocks.yml @@ -14,7 +14,7 @@ jobs: runs-on: buildjet-4vcpu-ubuntu-2204 steps: - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.12.0 + uses: styfle/cancel-workflow-action@0.12.1 with: all_but_latest: true diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index fd526ff41c..b15d381ea4 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -23,7 +23,7 @@ jobs: runs-on: buildjet-4vcpu-ubuntu-2204 steps: - name: Cancel Previous Runs - uses: styfle/cancel-workflow-action@0.12.0 + uses: styfle/cancel-workflow-action@0.12.1 with: all_but_latest: true From a8241092d09afddeecb923d5fb19e58917b8edce Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Jan 2024 09:15:33 -0700 Subject: [PATCH 03/11] chore(deps): bump github.com/klauspost/compress from 1.17.4 to 1.17.5 (#3728) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 10d0d1cd13..4fedcfb5a4 100644 --- a/go.mod +++ b/go.mod @@ -24,7 +24,7 @@ require ( github.com/gtank/merlin v0.1.1 github.com/ipfs/go-ds-badger2 v0.1.3 github.com/jpillora/ipfilter v1.2.9 - github.com/klauspost/compress v1.17.4 + github.com/klauspost/compress v1.17.5 github.com/libp2p/go-libp2p v0.31.0 github.com/libp2p/go-libp2p-kad-dht v0.25.2 github.com/minio/sha256-simd v1.0.1 diff --git a/go.sum b/go.sum index dc31fb80a8..46fcc83819 100644 --- a/go.sum +++ b/go.sum @@ -388,8 +388,8 @@ github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6 github.com/klauspost/compress v1.8.2/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/compress v1.9.7/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A= github.com/klauspost/compress v1.12.3/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg= -github.com/klauspost/compress v1.17.4 h1:Ej5ixsIri7BrIjBkRZLTo6ghwrEtHFk7ijlczPW4fZ4= -github.com/klauspost/compress v1.17.4/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= +github.com/klauspost/compress v1.17.5 h1:d4vBd+7CHydUqpFBgUEKkSdtSugf9YFmSkvUYPquI5E= +github.com/klauspost/compress v1.17.5/go.mod h1:/dCuZOvVtNoHsyb+cuJD3itjs3NbnF6KH9zAO4BDxPM= github.com/klauspost/cpuid v1.2.1/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek= github.com/klauspost/cpuid/v2 v2.2.6 h1:ndNyv040zDGIDh8thGkXYjnFtiN02M1PVVF+JE/48xc= github.com/klauspost/cpuid/v2 v2.2.6/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws= From 4566b1468cb57a3ef8e75633e486635a942ac10a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Jan 2024 11:17:24 -0700 Subject: [PATCH 04/11] chore(deps): bump github.com/google/uuid from 1.5.0 to 1.6.0 (#3727) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 4fedcfb5a4..5b62f79ef5 100644 --- a/go.mod +++ b/go.mod @@ -17,7 +17,7 @@ require ( github.com/fatih/color v1.16.0 github.com/go-playground/validator/v10 v10.17.0 github.com/google/go-cmp v0.6.0 - github.com/google/uuid v1.5.0 + github.com/google/uuid v1.6.0 github.com/gorilla/mux v1.8.1 github.com/gorilla/rpc v1.2.1 github.com/gorilla/websocket v1.5.1 diff --git a/go.sum b/go.sum index 46fcc83819..87b7ca65fe 100644 --- a/go.sum +++ b/go.sum @@ -285,8 +285,8 @@ github.com/google/pprof v0.0.0-20230821062121-407c9e7a662f/go.mod h1:czg5+yv1E0Z github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/google/uuid v1.5.0 h1:1p67kYwdtXjb0gL0BPiP1Av9wiZPo5A8z2cWkTZ+eyU= -github.com/google/uuid v1.5.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go v2.0.0+incompatible/go.mod h1:SFVmujtThgffbyetf+mdk2eWhX2bMyUtNHzFKcPA9HY= github.com/googleapis/gax-go/v2 v2.0.3/go.mod h1:LLvjysVCY1JZeum8Z6l8qUty8fiNwE08qbEPm1M08qg= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= From 5ccea40bc20b2eece2fa00d41f312e9c5f2ffc5f Mon Sep 17 00:00:00 2001 From: JimboJ <40345116+jimjbrettj@users.noreply.github.com> Date: Mon, 29 Jan 2024 11:36:11 -0700 Subject: [PATCH 05/11] feat(tests/scripts): create script to retrieve trie state via rpc (#3714) --- scripts/trie_state_script.go | 138 ++++++++++++++++++++++++++++++ scripts/trie_state_script_test.go | 126 +++++++++++++++++++++++++++ 2 files changed, 264 insertions(+) create mode 100644 scripts/trie_state_script.go create mode 100644 scripts/trie_state_script_test.go diff --git a/scripts/trie_state_script.go b/scripts/trie_state_script.go new file mode 100644 index 0000000000..0348134c54 --- /dev/null +++ b/scripts/trie_state_script.go @@ -0,0 +1,138 @@ +// Copyright 2024 ChainSafe Systems (ON) +// SPDX-License-Identifier: LGPL-3.0-only + +package main + +import ( + "context" + "encoding/json" + "fmt" + "os" + "time" + + "github.com/ChainSafe/gossamer/dot/rpc/modules" + "github.com/ChainSafe/gossamer/lib/common" + "github.com/ChainSafe/gossamer/lib/trie" + "github.com/ChainSafe/gossamer/pkg/scale" + "github.com/ChainSafe/gossamer/tests/utils/rpc" +) + +func fetchWithTimeout(ctx context.Context, + method, params string, target interface{}) { + + // Can adjust timeout as desired, default is very long + getResponseCtx, getResponseCancel := context.WithTimeout(ctx, 1000000*time.Second) + defer getResponseCancel() + err := getResponse(getResponseCtx, method, params, target) + if err != nil { + panic(fmt.Sprintf("error getting response %v", err)) + } +} + +func getResponse(ctx context.Context, method, params string, target interface{}) (err error) { + const rpcPort = "8545" + endpoint := rpc.NewEndpoint(rpcPort) + respBody, err := rpc.Post(ctx, endpoint, method, params) + if err != nil { + return fmt.Errorf("cannot RPC post: %w", err) + } + + err = rpc.Decode(respBody, &target) + if err != nil { + return fmt.Errorf("cannot decode RPC response: %w", err) + } + + return nil +} + +func writeTrieState(response modules.StateTrieResponse, destination string) { + encResponse, err := json.Marshal(response) + if err != nil { + panic(fmt.Sprintf("json marshalling response %v", err)) + } + + err = os.WriteFile(destination, encResponse, 0o600) + if err != nil { + panic(fmt.Sprintf("writing to file %v", err)) + } +} + +func fetchTrieState(ctx context.Context, blockHash common.Hash, destination string) modules.StateTrieResponse { + params := fmt.Sprintf(`["%s"]`, blockHash) + var response modules.StateTrieResponse + fetchWithTimeout(ctx, "state_trie", params, &response) + + writeTrieState(response, destination) + return response +} + +func compareStateRoots(response modules.StateTrieResponse, expectedStateRoot common.Hash, trieVersion trie.TrieLayout) { + entries := make(map[string]string, len(response)) + for _, encodedEntry := range response { + bytesEncodedEntry := common.MustHexToBytes(encodedEntry) + + entry := trie.Entry{} + err := scale.Unmarshal(bytesEncodedEntry, &entry) + if err != nil { + panic(fmt.Sprintf("error unmarshalling into trie entry %v", err)) + } + entries[common.BytesToHex(entry.Key)] = common.BytesToHex(entry.Value) + } + + newTrie, err := trie.LoadFromMap(entries) + if err != nil { + panic(fmt.Sprintf("loading trie from map %v", err)) + } + + trieHash := trieVersion.MustHash(newTrie) + if expectedStateRoot != trieHash { + panic("westendDevStateRoot does not match trieHash") + } +} + +/* +This is a script to query the trie state from a specific block height from a running node. + +Example commands to run a node: + + 1. ./bin/gossamer init --chain westend-dev --key alice + + 2. ./bin/gossamer --chain westend-dev --key alice --rpc-external=true --unsafe-rpc=true + +Once the node has started and processed the block whose state you need, can execute the script like so: + 1. go run trieStateScript.go +*/ +func main() { + if len(os.Args) < 3 { + panic("expected more arguments, block hash and destination file required") + } + + blockHash, err := common.HexToHash(os.Args[1]) + if err != nil { + panic("block hash must be in hex format") + } + + destinationFile := os.Args[2] + expectedStateRoot := common.Hash{} + var trieVersion trie.TrieLayout + if len(os.Args) == 5 { + expectedStateRoot, err = common.HexToHash(os.Args[3]) + if err != nil { + panic("expected state root must be in hex format") + } + + trieVersion, err = trie.ParseVersion(os.Args[4]) + if err != nil { + panic("trie version must be an integer") + } + } else if len(os.Args) != 3 { + panic("invalid number of arguments") + } + + ctx, _ := context.WithCancel(context.Background()) //nolint + response := fetchTrieState(ctx, blockHash, destinationFile) + + if !expectedStateRoot.IsEmpty() { + compareStateRoots(response, expectedStateRoot, trieVersion) + } +} diff --git a/scripts/trie_state_script_test.go b/scripts/trie_state_script_test.go new file mode 100644 index 0000000000..06bbedf654 --- /dev/null +++ b/scripts/trie_state_script_test.go @@ -0,0 +1,126 @@ +// Copyright 2024 ChainSafe Systems (ON) +// SPDX-License-Identifier: LGPL-3.0-only + +package main + +import ( + "os" + "testing" + + "github.com/ChainSafe/gossamer/dot/rpc/modules" + "github.com/ChainSafe/gossamer/lib/common" + "github.com/ChainSafe/gossamer/lib/trie" + "github.com/stretchr/testify/require" +) + +// This is fake data used just for testing purposes +var testStateData = []string{"0x801cb6f36e027abb2091cfb5110ab5087faacf00b9b41fda7a9268821c2a2b3e4ca404d43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d0100000000000000", "0x801cb6f36e027abb2091cfb5110ab5087faacf00b9b41fda7a9268821c2a2b3e4ca404d43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d0100000000000000", "0x801cb6f36e027abb2091cfb5110ab5087faacf00b9b41fda7a9268821c2a2b3e4ca404d43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d0100000000000000", "0x801cb6f36e027abb2091cfb5110ab5087faacf00b9b41fda7a9268821c2a2b3e4ca404d43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d0100000000000000", "0x801cb6f36e027abb2091cfb5110ab5087faacf00b9b41fda7a9268821c2a2b3e4ca404d43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d0100000000000000", "0x801cb6f36e027abb2091cfb5110ab5087faacf00b9b41fda7a9268821c2a2b3e4ca404d43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d0100000000000000", "0x801cb6f36e027abb2091cfb5110ab5087faacf00b9b41fda7a9268821c2a2b3e4ca404d43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d0100000000000000", "0x801cb6f36e027abb2091cfb5110ab5087faacf00b9b41fda7a9268821c2a2b3e4ca404d43593c715fdd31c61141abd04a99fd6822c8558854ccde39a5684e7a56da27d0100000000000000"} //nolint + +func clean(t *testing.T, file string) { + t.Helper() + err := os.Remove(file) + require.NoError(t, err) +} + +func Test_writeTrieState(t *testing.T) { + writeTrieState(testStateData, "westendDevTestState.json") + _, err := os.Stat("./westendDevTestState.json") + require.NoError(t, err) + + clean(t, "westendDevTestState.json") +} + +func Test_compareStateRoots(t *testing.T) { + type args struct { + response modules.StateTrieResponse + expectedStateRoot common.Hash + trieVersion trie.TrieLayout + } + tests := []struct { + name string + args args + shouldPanic bool + }{ + { + name: "happy_path", + args: args{ + response: testStateData, + expectedStateRoot: common.MustHexToHash("0x3b1863ff981a31864be76037e4cf5c927b937dd8a8e1e25494128da7a95b5cdf"), + trieVersion: 0, + }, + }, + { + name: "invalid_trie_version", + args: args{ + response: testStateData, + expectedStateRoot: common.MustHexToHash("0x6120d3afde6c139305bd7c0dcf50bdff5b620203e00c7491b2c30f95dccacc32"), + trieVersion: 21, + }, + shouldPanic: true, + }, + { + name: "hashes_do_not_match", + args: args{ + response: testStateData, + expectedStateRoot: common.MustHexToHash("0x01"), + trieVersion: 21, + }, + shouldPanic: true, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if tt.shouldPanic { + require.Panics(t, + func() { + compareStateRoots(tt.args.response, tt.args.expectedStateRoot, tt.args.trieVersion) + }, + "The code did not panic") + } else { + compareStateRoots(tt.args.response, tt.args.expectedStateRoot, tt.args.trieVersion) + } + }) + } +} + +func Test_cli(t *testing.T) { + tests := []struct { + name string + args []string + }{ + { + name: "no_arguments", + }, + { + name: "to_few_arguments", + args: []string{"0x01"}, + }, + { + name: "invalid_formatting_for_block_hash", + args: []string{"hello", "output.json"}, + }, + { + name: "no_trie_version", + args: []string{"0x01", "output.json", "0x01"}, + }, + { + name: "invalid_formatting_for_root_hash", + args: []string{"0x01", "output.json", "hello", "1"}, + }, + { + name: "invalid_trie_version", + args: []string{"0x01", "output.json", "0x01", "hello"}, + }, + { + name: "to_many_arguments", + args: []string{"0x01", "output.json", "0x01", "1", "0x01"}, + }, + } + for _, tt := range tests { + tt := tt + t.Run(tt.name, func(t *testing.T) { + os.Args = tt.args + require.Panics(t, func() { main() }, "The code did not panic") + }) + } +} From 644b212ed3e4a133fbd9b069552b3f1d65e56012 Mon Sep 17 00:00:00 2001 From: Timothy Wu Date: Mon, 29 Jan 2024 21:56:16 -0500 Subject: [PATCH 06/11] refactor(pkg/scale): revise `VaryingDataType` to an interface (#3669) --- dot/core/messages_test.go | 2 +- dot/core/service.go | 4 +- dot/digest/block_import.go | 4 +- dot/digest/block_import_test.go | 14 +- dot/digest/digest_integration_test.go | 36 +- dot/digest/interfaces.go | 5 +- dot/digest/mock_epoch_state_test.go | 3 +- dot/digest/mock_grandpa_test.go | 3 +- dot/helpers_test.go | 3 +- dot/import_integration_test.go | 2 +- dot/import_test.go | 4 +- dot/network/block_announce.go | 2 +- dot/network/message_test.go | 8 +- dot/network/notifications_test.go | 2 +- dot/rpc/modules/chain.go | 2 +- dot/rpc/modules/chain_integration_test.go | 8 +- dot/rpc/modules/state_test.go | 4 +- .../websocket_integration_test.go | 4 +- dot/state/block_race_test.go | 2 +- dot/state/block_test.go | 18 +- dot/state/epoch.go | 4 +- dot/state/epoch_test.go | 10 +- dot/state/grandpa.go | 2 +- dot/state/service_integration_test.go | 2 +- dot/state/test_helpers.go | 4 +- dot/sync/chain_sync_test.go | 13 +- dot/sync/syncer_test.go | 5 +- dot/types/babe.go | 8 +- dot/types/babe_digest.go | 85 ++- dot/types/babe_digest_test.go | 6 +- dot/types/block_data_test.go | 24 +- dot/types/block_test.go | 2 +- dot/types/consensus_digest.go | 240 +++++-- dot/types/consensus_digest_test.go | 2 +- dot/types/digest.go | 127 +++- dot/types/digest_test.go | 49 +- dot/types/grandpa.go | 66 +- dot/types/grandpa_test.go | 2 +- dot/types/header.go | 31 +- lib/babe/build_integration_test.go | 2 +- lib/babe/errors.go | 309 ++++++--- lib/babe/inherents/parachain_inherents.go | 341 ++++++---- .../inherents/parachain_inherents_test.go | 57 +- lib/babe/verify.go | 14 +- lib/babe/verify_integration_test.go | 15 +- lib/babe/verify_test.go | 20 +- lib/blocktree/helpers_test.go | 4 +- lib/grandpa/message.go | 181 ++++-- .../message_handler_integration_test.go | 2 +- lib/grandpa/vote_message.go | 4 +- lib/grandpa/vote_message_test.go | 2 +- lib/runtime/invalid_transaction.go | 167 +++-- lib/runtime/test_helpers.go | 2 +- lib/runtime/transaction_validity.go | 64 +- lib/runtime/transaction_validity_test.go | 16 +- lib/runtime/unknown_transaction.go | 85 +-- lib/runtime/wazero/imports.go | 74 ++- lib/runtime/wazero/instance.go | 2 +- lib/runtime/wazero/instance_test.go | 10 +- pkg/scale/README.md | 320 ++-------- pkg/scale/decode.go | 84 +-- pkg/scale/decode_test.go | 63 +- pkg/scale/encode.go | 37 +- pkg/scale/encode_test.go | 5 + pkg/scale/errors.go | 2 +- pkg/scale/varying_data_type.go | 122 +--- pkg/scale/varying_data_type_example_test.go | 132 ++-- .../varying_data_type_nested_example_test.go | 220 ------- pkg/scale/varying_data_type_nested_test.go | 253 ++++---- pkg/scale/varying_data_type_test.go | 589 ++++++------------ tests/stress/stress_test.go | 2 +- tests/utils/rpc/request.go | 2 +- 72 files changed, 1986 insertions(+), 2028 deletions(-) delete mode 100644 pkg/scale/varying_data_type_nested_example_test.go diff --git a/dot/core/messages_test.go b/dot/core/messages_test.go index bd3342d0ca..08ce8c0fd2 100644 --- a/dot/core/messages_test.go +++ b/dot/core/messages_test.go @@ -122,7 +122,7 @@ func TestServiceHandleTransactionMessage(t *testing.T) { runtimeMock3 := NewMockInstance(ctrl) invalidTransaction := runtime.NewInvalidTransaction() - err := invalidTransaction.Set(runtime.Future{}) + err := invalidTransaction.SetValue(runtime.Future{}) require.NoError(t, err) type args struct { diff --git a/dot/core/service.go b/dot/core/service.go index a7dc445a3e..ca69713075 100644 --- a/dot/core/service.go +++ b/dot/core/service.go @@ -191,8 +191,8 @@ func (s *Service) HandleBlockProduced(block *types.Block, state *rtstorage.TrieS func createBlockAnnounce(block *types.Block, isBestBlock bool) ( blockAnnounce *network.BlockAnnounceMessage, err error) { digest := types.NewDigest() - for i := range block.Header.Digest.Types { - digestValue, err := block.Header.Digest.Types[i].Value() + for i := range block.Header.Digest { + digestValue, err := block.Header.Digest[i].Value() if err != nil { return nil, fmt.Errorf("getting value of digest type at index %d: %w", i, err) } diff --git a/dot/digest/block_import.go b/dot/digest/block_import.go index 60859ebfb1..4084c345d1 100644 --- a/dot/digest/block_import.go +++ b/dot/digest/block_import.go @@ -24,7 +24,7 @@ func NewBlockImportHandler(epochState EpochState, grandpaState GrandpaState) *Bl // HandleDigests handles consensus digests for an imported block func (h *BlockImportHandler) HandleDigests(header *types.Header) error { - consensusDigests := toConsensusDigests(header.Digest.Types) + consensusDigests := toConsensusDigests(header.Digest) consensusDigests, err := checkForGRANDPAForcedChanges(consensusDigests) if err != nil { return fmt.Errorf("failed while checking GRANDPA digests: %w", err) @@ -76,7 +76,7 @@ func (h *BlockImportHandler) handleConsensusDigest(d *types.ConsensusDigest, hea } // toConsensusDigests converts a slice of scale.VaryingDataType to a slice of types.ConsensusDigest. -func toConsensusDigests(scaleVaryingTypes []scale.VaryingDataType) []types.ConsensusDigest { +func toConsensusDigests(scaleVaryingTypes types.Digest) []types.ConsensusDigest { consensusDigests := make([]types.ConsensusDigest, 0, len(scaleVaryingTypes)) for _, d := range scaleVaryingTypes { diff --git a/dot/digest/block_import_test.go b/dot/digest/block_import_test.go index 7ec12d82d1..1efc9ef745 100644 --- a/dot/digest/block_import_test.go +++ b/dot/digest/block_import_test.go @@ -35,7 +35,7 @@ func TestBlockImportHandle(t *testing.T) { }) versionedNextConfigData := types.NewVersionedNextConfigData() - versionedNextConfigData.Set(types.NextConfigDataV1{ + versionedNextConfigData.SetValue(types.NextConfigDataV1{ C1: 9, C2: 10, SecondarySlots: 1, @@ -239,7 +239,7 @@ func TestBlockImportHandle(t *testing.T) { } versionedNextConfigData := types.NewVersionedNextConfigData() - versionedNextConfigData.Set(types.NextConfigDataV1{ + versionedNextConfigData.SetValue(types.NextConfigDataV1{ C1: 9, C2: 10, SecondarySlots: 1, @@ -288,11 +288,11 @@ func TestBlockImportHandle(t *testing.T) { } } -func createBABEConsensusDigest(t *testing.T, digestData scale.VaryingDataTypeValue) types.ConsensusDigest { +func createBABEConsensusDigest(t *testing.T, digestData any) types.ConsensusDigest { t.Helper() babeConsensusDigest := types.NewBabeConsensusDigest() - require.NoError(t, babeConsensusDigest.Set(digestData)) + require.NoError(t, babeConsensusDigest.SetValue(digestData)) marshaledData, err := scale.Marshal(babeConsensusDigest) require.NoError(t, err) @@ -303,11 +303,11 @@ func createBABEConsensusDigest(t *testing.T, digestData scale.VaryingDataTypeVal } } -func createGRANDPAConsensusDigest(t *testing.T, digestData scale.VaryingDataTypeValue) types.ConsensusDigest { +func createGRANDPAConsensusDigest(t *testing.T, digestData any) types.ConsensusDigest { t.Helper() grandpaConsensusDigest := types.NewGrandpaConsensusDigest() - require.NoError(t, grandpaConsensusDigest.Set(digestData)) + require.NoError(t, grandpaConsensusDigest.SetValue(digestData)) marshaledData, err := scale.Marshal(grandpaConsensusDigest) require.NoError(t, err) @@ -323,7 +323,7 @@ func createBlockWithDigests(t *testing.T, genesisHeader *types.Header, digestsTo t.Helper() digest := types.NewDigest() - digestAddArgs := make([]scale.VaryingDataTypeValue, len(digestsToApply)) + digestAddArgs := make([]any, len(digestsToApply)) for idx, consensusDigest := range digestsToApply { digestAddArgs[idx] = consensusDigest diff --git a/dot/digest/digest_integration_test.go b/dot/digest/digest_integration_test.go index 9133f4af9a..cc173a4584 100644 --- a/dot/digest/digest_integration_test.go +++ b/dot/digest/digest_integration_test.go @@ -77,7 +77,7 @@ func TestHandler_GrandpaScheduledChange(t *testing.T) { } var digest = types.NewGrandpaConsensusDigest() - err = digest.Set(sc) + err = digest.SetValue(sc) require.NoError(t, err) data, err := scale.Marshal(digest) @@ -110,48 +110,48 @@ func TestHandler_GrandpaScheduledChange(t *testing.T) { func TestMultipleGRANDPADigests_ShouldIncludeJustForcedChanges(t *testing.T) { tests := map[string]struct { - digestsTypes []scale.VaryingDataTypeValue - expectedHandled []scale.VaryingDataTypeValue + digestsTypes []any + expectedHandled []any }{ "forced_and_scheduled_changes_same_block": { - digestsTypes: []scale.VaryingDataTypeValue{ + digestsTypes: []any{ types.GrandpaForcedChange{}, types.GrandpaScheduledChange{}, }, - expectedHandled: []scale.VaryingDataTypeValue{ + expectedHandled: []any{ types.GrandpaForcedChange{}, }, }, "only_scheduled_change_in_block": { - digestsTypes: []scale.VaryingDataTypeValue{ + digestsTypes: []any{ types.GrandpaScheduledChange{}, }, - expectedHandled: []scale.VaryingDataTypeValue{ + expectedHandled: []any{ types.GrandpaScheduledChange{}, }, }, "more_than_one_forced_changes_in_block": { - digestsTypes: []scale.VaryingDataTypeValue{ + digestsTypes: []any{ types.GrandpaForcedChange{}, types.GrandpaForcedChange{}, types.GrandpaForcedChange{}, types.GrandpaScheduledChange{}, }, - expectedHandled: []scale.VaryingDataTypeValue{ + expectedHandled: []any{ types.GrandpaForcedChange{}, types.GrandpaForcedChange{}, types.GrandpaForcedChange{}, }, }, "multiple_consensus_digests_in_block": { - digestsTypes: []scale.VaryingDataTypeValue{ + digestsTypes: []any{ types.GrandpaOnDisabled{}, types.GrandpaPause{}, types.GrandpaResume{}, types.GrandpaForcedChange{}, types.GrandpaScheduledChange{}, }, - expectedHandled: []scale.VaryingDataTypeValue{ + expectedHandled: []any{ types.GrandpaOnDisabled{}, types.GrandpaPause{}, types.GrandpaResume{}, @@ -167,7 +167,7 @@ func TestMultipleGRANDPADigests_ShouldIncludeJustForcedChanges(t *testing.T) { for _, item := range tt.digestsTypes { var digest = types.NewGrandpaConsensusDigest() - require.NoError(t, digest.Set(item)) + require.NoError(t, digest.SetValue(item)) data, err := scale.Marshal(digest) require.NoError(t, err) @@ -190,7 +190,7 @@ func TestMultipleGRANDPADigests_ShouldIncludeJustForcedChanges(t *testing.T) { for _, item := range tt.expectedHandled { var digest = types.NewGrandpaConsensusDigest() - require.NoError(t, digest.Set(item)) + require.NoError(t, digest.SetValue(item)) data, err := scale.Marshal(digest) require.NoError(t, err) @@ -215,7 +215,7 @@ func TestHandler_HandleBABEOnDisabled(t *testing.T) { } var digest = types.NewBabeConsensusDigest() - err := digest.Set(types.BABEOnDisabled{ + err := digest.SetValue(types.BABEOnDisabled{ ID: 7, }) require.NoError(t, err) @@ -236,7 +236,7 @@ func createHeaderWithPreDigest(t *testing.T, slotNumber uint64) *types.Header { t.Helper() babeHeader := types.NewBabeDigest() - err := babeHeader.Set(*types.NewBabePrimaryPreDigest(0, slotNumber, [32]byte{}, [64]byte{})) + err := babeHeader.SetValue(*types.NewBabePrimaryPreDigest(0, slotNumber, [32]byte{}, [64]byte{})) require.NoError(t, err) enc, err := scale.Marshal(babeHeader) @@ -276,7 +276,7 @@ func TestHandler_HandleNextEpochData(t *testing.T) { } digest := types.NewBabeConsensusDigest() - err = digest.Set(nextEpochData) + err = digest.SetValue(nextEpochData) require.NoError(t, err) data, err := scale.Marshal(digest) @@ -339,9 +339,9 @@ func TestHandler_HandleNextConfigData(t *testing.T) { } versionedNextConfigData := types.NewVersionedNextConfigData() - versionedNextConfigData.Set(nextConfigData) + versionedNextConfigData.SetValue(nextConfigData) - err := digest.Set(versionedNextConfigData) + err := digest.SetValue(versionedNextConfigData) require.NoError(t, err) data, err := scale.Marshal(digest) diff --git a/dot/digest/interfaces.go b/dot/digest/interfaces.go index a32df96c10..311ca9eeaf 100644 --- a/dot/digest/interfaces.go +++ b/dot/digest/interfaces.go @@ -7,7 +7,6 @@ import ( "encoding/json" "github.com/ChainSafe/gossamer/dot/types" - "github.com/ChainSafe/gossamer/pkg/scale" ) // BlockState interface for block state methods @@ -21,14 +20,14 @@ type BlockState interface { // EpochState is the interface for state.EpochState type EpochState interface { GetEpochForBlock(header *types.Header) (uint64, error) - HandleBABEDigest(header *types.Header, digest scale.VaryingDataType) error + HandleBABEDigest(header *types.Header, digest types.BabeConsensusDigest) error FinalizeBABENextEpochData(finalizedHeader *types.Header) error FinalizeBABENextConfigData(finalizedHeader *types.Header) error } // GrandpaState is the interface for the state.GrandpaState type GrandpaState interface { - HandleGRANDPADigest(header *types.Header, digest scale.VaryingDataType) error + HandleGRANDPADigest(header *types.Header, digest types.GrandpaConsensusDigest) error ApplyScheduledChanges(finalizedHeader *types.Header) error } diff --git a/dot/digest/mock_epoch_state_test.go b/dot/digest/mock_epoch_state_test.go index 917414bf86..7faec9f1f2 100644 --- a/dot/digest/mock_epoch_state_test.go +++ b/dot/digest/mock_epoch_state_test.go @@ -13,7 +13,6 @@ import ( reflect "reflect" types "github.com/ChainSafe/gossamer/dot/types" - scale "github.com/ChainSafe/gossamer/pkg/scale" gomock "go.uber.org/mock/gomock" ) @@ -84,7 +83,7 @@ func (mr *MockEpochStateMockRecorder) GetEpochForBlock(arg0 any) *gomock.Call { } // HandleBABEDigest mocks base method. -func (m *MockEpochState) HandleBABEDigest(arg0 *types.Header, arg1 scale.VaryingDataType) error { +func (m *MockEpochState) HandleBABEDigest(arg0 *types.Header, arg1 types.BabeConsensusDigest) error { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "HandleBABEDigest", arg0, arg1) ret0, _ := ret[0].(error) diff --git a/dot/digest/mock_grandpa_test.go b/dot/digest/mock_grandpa_test.go index 9040e1316c..81ebb77863 100644 --- a/dot/digest/mock_grandpa_test.go +++ b/dot/digest/mock_grandpa_test.go @@ -13,7 +13,6 @@ import ( reflect "reflect" types "github.com/ChainSafe/gossamer/dot/types" - scale "github.com/ChainSafe/gossamer/pkg/scale" gomock "go.uber.org/mock/gomock" ) @@ -55,7 +54,7 @@ func (mr *MockGrandpaStateMockRecorder) ApplyScheduledChanges(arg0 any) *gomock. } // HandleGRANDPADigest mocks base method. -func (m *MockGrandpaState) HandleGRANDPADigest(arg0 *types.Header, arg1 scale.VaryingDataType) error { +func (m *MockGrandpaState) HandleGRANDPADigest(arg0 *types.Header, arg1 types.GrandpaConsensusDigest) error { m.ctrl.T.Helper() ret := m.ctrl.Call(m, "HandleGRANDPADigest", arg0, arg1) ret0, _ := ret[0].(error) diff --git a/dot/helpers_test.go b/dot/helpers_test.go index a4539a9299..089c9817ba 100644 --- a/dot/helpers_test.go +++ b/dot/helpers_test.go @@ -48,9 +48,8 @@ func newWestendDevGenesisWithTrieAndHeader(t *testing.T) ( extrinsicRoot := trie.EmptyHash const number = 0 - digest := types.NewDigest() genesisHeader = *types.NewHeader(parentHash, - stateRoot, extrinsicRoot, number, digest) + stateRoot, extrinsicRoot, number, nil) return gen, genesisTrie, genesisHeader } diff --git a/dot/import_integration_test.go b/dot/import_integration_test.go index 436e4c2204..649775b523 100644 --- a/dot/import_integration_test.go +++ b/dot/import_integration_test.go @@ -76,7 +76,7 @@ func TestNewHeaderFromFile(t *testing.T) { digest := types.NewDigest() err = scale.Unmarshal(digestBytes, &digest) require.NoError(t, err) - require.Len(t, digest.Types, 2) + require.Len(t, digest, 2) expected := &types.Header{ ParentHash: common.MustHexToHash("0x3b45c9c22dcece75a30acc9c2968cb311e6b0557350f83b430f47559db786975"), diff --git a/dot/import_test.go b/dot/import_test.go index 38b54f8db7..0462bd9e41 100644 --- a/dot/import_test.go +++ b/dot/import_test.go @@ -79,7 +79,7 @@ func Test_newHeaderFromFile(t *testing.T) { } preRuntimeDigestItem := types.NewDigestItem() - err := preRuntimeDigestItem.Set(preRuntimeDigest) + err := preRuntimeDigestItem.SetValue(preRuntimeDigest) require.NoError(t, err) preRuntimeDigestItemValue, err := preRuntimeDigestItem.Value() require.NoError(t, err) @@ -94,7 +94,7 @@ func Test_newHeaderFromFile(t *testing.T) { } sealDigestItem := types.NewDigestItem() - err = sealDigestItem.Set(sealDigest) + err = sealDigestItem.SetValue(sealDigest) require.NoError(t, err) sealDigestItemValue, err := sealDigestItem.Value() require.NoError(t, err) diff --git a/dot/network/block_announce.go b/dot/network/block_announce.go index 8739cfb3df..9fb37c3ac5 100644 --- a/dot/network/block_announce.go +++ b/dot/network/block_announce.go @@ -27,7 +27,7 @@ type BlockAnnounceMessage struct { Number uint StateRoot common.Hash ExtrinsicsRoot common.Hash - Digest scale.VaryingDataTypeSlice + Digest types.Digest BestBlock bool } diff --git a/dot/network/message_test.go b/dot/network/message_test.go index 8498ee4426..e0d713b9a6 100644 --- a/dot/network/message_test.go +++ b/dot/network/message_test.go @@ -165,7 +165,7 @@ func TestEncodeBlockResponseMessage_WithBody(t *testing.T) { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf}) - header := types.NewHeader(testHash, testHash, testHash, 1, types.NewDigest()) + header := types.NewHeader(testHash, testHash, testHash, 1, nil) exts := [][]byte{{1, 3, 5, 7}, {9, 1, 2}, {3, 4, 5}} body := types.NewBody(types.BytesArrayToExtrinsics(exts)) @@ -216,7 +216,7 @@ func TestEncodeBlockResponseMessage_WithAll(t *testing.T) { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf}) - header := types.NewHeader(testHash, testHash, testHash, 1, types.NewDigest()) + header := types.NewHeader(testHash, testHash, testHash, 1, nil) exts := [][]byte{{1, 3, 5, 7}, {9, 1, 2}, {3, 4, 5}} body := types.NewBody(types.BytesArrayToExtrinsics(exts)) @@ -297,7 +297,7 @@ func TestDecode_BlockAnnounceMessage(t *testing.T) { bhm := BlockAnnounceMessage{ Number: 0, - Digest: types.NewDigest(), + Digest: nil, } err := bhm.Decode(announceMessage) @@ -314,7 +314,7 @@ func TestDecode_BlockAnnounceMessage(t *testing.T) { Number: 1, StateRoot: stateRoot, ExtrinsicsRoot: extrinsicsRoot, - Digest: types.NewDigest(), + Digest: nil, } require.Equal(t, expected, bhm) diff --git a/dot/network/notifications_test.go b/dot/network/notifications_test.go index 440980f160..0a7d9d1044 100644 --- a/dot/network/notifications_test.go +++ b/dot/network/notifications_test.go @@ -65,7 +65,7 @@ func TestCreateDecoder_BlockAnnounce(t *testing.T) { Number: 77, StateRoot: common.Hash{2}, ExtrinsicsRoot: common.Hash{3}, - Digest: types.NewDigest(), + Digest: nil, } enc, err = testBlockAnnounce.Encode() diff --git a/dot/rpc/modules/chain.go b/dot/rpc/modules/chain.go index a487dcdafa..b5579ed2d8 100644 --- a/dot/rpc/modules/chain.go +++ b/dot/rpc/modules/chain.go @@ -246,7 +246,7 @@ func HeaderToJSON(header types.Header) (ChainBlockHeaderResponse, error) { res.Number = common.UintToHex(header.Number) } - for _, item := range header.Digest.Types { + for _, item := range header.Digest { enc, err := scale.Marshal(item) if err != nil { return ChainBlockHeaderResponse{}, err diff --git a/dot/rpc/modules/chain_integration_test.go b/dot/rpc/modules/chain_integration_test.go index de51c20c52..9c78f331b9 100644 --- a/dot/rpc/modules/chain_integration_test.go +++ b/dot/rpc/modules/chain_integration_test.go @@ -41,7 +41,7 @@ func TestChainGetHeader_Genesis(t *testing.T) { di := types.NewDigestItem() prd, err := types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest() require.NoError(t, err) - err = di.Set(*prd) + err = di.SetValue(*prd) require.NoError(t, err) d, err := scale.Marshal(di) @@ -77,7 +77,7 @@ func TestChainGetHeader_Latest(t *testing.T) { di := types.NewDigestItem() prd, err := types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest() require.NoError(t, err) - err = di.Set(*prd) + err = di.SetValue(*prd) require.NoError(t, err) d, err := scale.Marshal(di) @@ -125,7 +125,7 @@ func TestChainGetBlock_Genesis(t *testing.T) { di := types.NewDigestItem() prd, err := types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest() require.NoError(t, err) - err = di.Set(*prd) + err = di.SetValue(*prd) require.NoError(t, err) d, err := scale.Marshal(di) @@ -169,7 +169,7 @@ func TestChainGetBlock_Latest(t *testing.T) { di := types.NewDigestItem() prd, err := types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest() require.NoError(t, err) - err = di.Set(*prd) + err = di.SetValue(*prd) require.NoError(t, err) d, err := scale.Marshal(di) diff --git a/dot/rpc/modules/state_test.go b/dot/rpc/modules/state_test.go index 491922804d..6e40bf99e2 100644 --- a/dot/rpc/modules/state_test.go +++ b/dot/rpc/modules/state_test.go @@ -340,7 +340,7 @@ func TestStateTrie(t *testing.T) { fakeStateRoot := common.Hash([32]byte{5, 5, 5, 5, 5}) fakeBlockHeader := types.NewHeader(common.EmptyHash, fakeStateRoot, - common.EmptyHash, 1, scale.VaryingDataTypeSlice{}) + common.EmptyHash, 1, nil) blockAPIMock.EXPECT().GetHeader(bestBlockHash).Return(fakeBlockHeader, nil) @@ -365,7 +365,7 @@ func TestStateTrie(t *testing.T) { fakeStateRoot := common.Hash([32]byte{5, 5, 5, 5, 5}) fakeBlockHeader := types.NewHeader(common.EmptyHash, fakeStateRoot, - common.EmptyHash, 1, scale.VaryingDataTypeSlice{}) + common.EmptyHash, 1, nil) blockAPIMock.EXPECT().GetHeader(expecificBlockHash). Return(fakeBlockHeader, nil) diff --git a/dot/rpc/subscription/websocket_integration_test.go b/dot/rpc/subscription/websocket_integration_test.go index 775d650cda..ff062ff216 100644 --- a/dot/rpc/subscription/websocket_integration_test.go +++ b/dot/rpc/subscription/websocket_integration_test.go @@ -528,7 +528,9 @@ func TestWSConn_InitExtrinsicWatch(t *testing.T) { // test initExtrinsicWatch with invalid transaction invalidTransaction := runtime.NewInvalidTransaction() - err := invalidTransaction.Set(runtime.Future{}) + err := invalidTransaction.SetValue(runtime.Future{}) + require.NoError(t, err) + require.NoError(t, err) coreAPI := mocks.NewMockCoreAPI(ctrl) wsconn.CoreAPI = coreAPI diff --git a/dot/state/block_race_test.go b/dot/state/block_race_test.go index d840d1d426..716b94ebca 100644 --- a/dot/state/block_race_test.go +++ b/dot/state/block_race_test.go @@ -41,7 +41,7 @@ func TestConcurrencySetHeader(t *testing.T) { header := &types.Header{ Number: 1, StateRoot: trie.EmptyHash, - Digest: types.NewDigest(), + Digest: nil, } err = bs.SetHeader(header) diff --git a/dot/state/block_test.go b/dot/state/block_test.go index 2a44d62c5d..18ef1233f7 100644 --- a/dot/state/block_test.go +++ b/dot/state/block_test.go @@ -54,7 +54,7 @@ func TestSetAndGetHeader(t *testing.T) { header := &types.Header{ Number: 0, StateRoot: trie.EmptyHash, - Digest: types.NewDigest(), + Digest: nil, } err := bs.SetHeader(header) @@ -71,7 +71,7 @@ func TestHasHeader(t *testing.T) { header := &types.Header{ Number: 0, StateRoot: trie.EmptyHash, - Digest: types.NewDigest(), + Digest: nil, } err := bs.SetHeader(header) @@ -172,7 +172,7 @@ func TestGetSlotForBlock(t *testing.T) { expectedSlot := uint64(77) babeHeader := types.NewBabeDigest() - err := babeHeader.Set(*types.NewBabePrimaryPreDigest(0, expectedSlot, [32]byte{}, [64]byte{})) + err := babeHeader.SetValue(*types.NewBabePrimaryPreDigest(0, expectedSlot, [32]byte{}, [64]byte{})) require.NoError(t, err) data, err := scale.Marshal(babeHeader) require.NoError(t, err) @@ -207,7 +207,7 @@ func TestGetHashesByNumber(t *testing.T) { slot := uint64(77) babeHeader := types.NewBabeDigest() - err := babeHeader.Set(*types.NewBabePrimaryPreDigest(0, slot, [32]byte{}, [64]byte{})) + err := babeHeader.SetValue(*types.NewBabePrimaryPreDigest(0, slot, [32]byte{}, [64]byte{})) require.NoError(t, err) data, err := scale.Marshal(babeHeader) require.NoError(t, err) @@ -229,7 +229,7 @@ func TestGetHashesByNumber(t *testing.T) { require.NoError(t, err) babeHeader2 := types.NewBabeDigest() - err = babeHeader2.Set(*types.NewBabePrimaryPreDigest(1, slot+1, [32]byte{}, [64]byte{})) + err = babeHeader2.SetValue(*types.NewBabePrimaryPreDigest(1, slot+1, [32]byte{}, [64]byte{})) require.NoError(t, err) data2, err := scale.Marshal(babeHeader2) require.NoError(t, err) @@ -261,7 +261,7 @@ func TestGetAllDescendants(t *testing.T) { slot := uint64(77) babeHeader := types.NewBabeDigest() - err := babeHeader.Set(*types.NewBabePrimaryPreDigest(0, slot, [32]byte{}, [64]byte{})) + err := babeHeader.SetValue(*types.NewBabePrimaryPreDigest(0, slot, [32]byte{}, [64]byte{})) require.NoError(t, err) data, err := scale.Marshal(babeHeader) require.NoError(t, err) @@ -283,7 +283,7 @@ func TestGetAllDescendants(t *testing.T) { require.NoError(t, err) babeHeader2 := types.NewBabeDigest() - err = babeHeader2.Set(*types.NewBabePrimaryPreDigest(1, slot+1, [32]byte{}, [64]byte{})) + err = babeHeader2.SetValue(*types.NewBabePrimaryPreDigest(1, slot+1, [32]byte{}, [64]byte{})) require.NoError(t, err) data2, err := scale.Marshal(babeHeader2) require.NoError(t, err) @@ -326,7 +326,7 @@ func TestGetBlockHashesBySlot(t *testing.T) { slot := uint64(77) babeHeader := types.NewBabeDigest() - err := babeHeader.Set(*types.NewBabePrimaryPreDigest(0, slot, [32]byte{}, [64]byte{})) + err := babeHeader.SetValue(*types.NewBabePrimaryPreDigest(0, slot, [32]byte{}, [64]byte{})) require.NoError(t, err) data, err := scale.Marshal(babeHeader) require.NoError(t, err) @@ -348,7 +348,7 @@ func TestGetBlockHashesBySlot(t *testing.T) { require.NoError(t, err) babeHeader2 := types.NewBabeDigest() - err = babeHeader2.Set(*types.NewBabePrimaryPreDigest(1, slot, [32]byte{}, [64]byte{})) + err = babeHeader2.SetValue(*types.NewBabePrimaryPreDigest(1, slot, [32]byte{}, [64]byte{})) require.NoError(t, err) data2, err := scale.Marshal(babeHeader2) require.NoError(t, err) diff --git a/dot/state/epoch.go b/dot/state/epoch.go index 98f124eaf6..6d1d0880d5 100644 --- a/dot/state/epoch.go +++ b/dot/state/epoch.go @@ -197,7 +197,7 @@ func (s *EpochState) GetEpochForBlock(header *types.Header) (uint64, error) { return 0, err } - for _, d := range header.Digest.Types { + for _, d := range header.Digest { digestValue, err := d.Value() if err != nil { continue @@ -373,7 +373,7 @@ func (s *EpochState) getConfigDataFromDatabase(epoch uint64) (*types.ConfigData, return info, nil } -func (s *EpochState) HandleBABEDigest(header *types.Header, digest scale.VaryingDataType) error { +func (s *EpochState) HandleBABEDigest(header *types.Header, digest types.BabeConsensusDigest) error { headerHash := header.Hash() digestValue, err := digest.Value() diff --git a/dot/state/epoch_test.go b/dot/state/epoch_test.go index 7a8c92c8d4..3dde7de29b 100644 --- a/dot/state/epoch_test.go +++ b/dot/state/epoch_test.go @@ -134,7 +134,7 @@ func TestEpochState_GetEpochForBlock(t *testing.T) { s := newEpochStateFromGenesis(t) babeHeader := types.NewBabeDigest() - err := babeHeader.Set(*types.NewBabePrimaryPreDigest(0, s.epochLength+2, [32]byte{}, [64]byte{})) + err := babeHeader.SetValue(*types.NewBabePrimaryPreDigest(0, s.epochLength+2, [32]byte{}, [64]byte{})) require.NoError(t, err) enc, err := scale.Marshal(babeHeader) require.NoError(t, err) @@ -151,7 +151,7 @@ func TestEpochState_GetEpochForBlock(t *testing.T) { require.Equal(t, uint64(1), epoch) babeHeader = types.NewBabeDigest() - err = babeHeader.Set(*types.NewBabePrimaryPreDigest(0, s.epochLength*2+3, [32]byte{}, [64]byte{})) + err = babeHeader.SetValue(*types.NewBabePrimaryPreDigest(0, s.epochLength*2+3, [32]byte{}, [64]byte{})) require.NoError(t, err) enc, err = scale.Marshal(babeHeader) require.NoError(t, err) @@ -254,8 +254,7 @@ func TestStoreAndFinalizeBabeNextEpochData(t *testing.T) { preRuntimeDigest, err := babePrimaryPreDigest.ToPreRuntimeDigest() require.NoError(t, err) - digest := scale.NewVaryingDataTypeSlice(scale.MustNewVaryingDataType( - types.PreRuntimeDigest{})) + digest := types.NewDigest() require.NoError(t, digest.Add(*preRuntimeDigest)) @@ -424,8 +423,7 @@ func TestStoreAndFinalizeBabeNextConfigData(t *testing.T) { preRuntimeDigest, err := babePrimaryPreDigest.ToPreRuntimeDigest() require.NoError(t, err) - digest := scale.NewVaryingDataTypeSlice(scale.MustNewVaryingDataType( - types.PreRuntimeDigest{})) + digest := types.NewDigest() require.NoError(t, digest.Add(*preRuntimeDigest)) diff --git a/dot/state/grandpa.go b/dot/state/grandpa.go index 9e5ca23430..e5b1ad1ac1 100644 --- a/dot/state/grandpa.go +++ b/dot/state/grandpa.go @@ -88,7 +88,7 @@ func NewGrandpaState(db database.Database, bs *BlockState, telemetry Telemetry) } // HandleGRANDPADigest receives a decoded GRANDPA digest and calls the right function to handles the digest -func (s *GrandpaState) HandleGRANDPADigest(header *types.Header, digest scale.VaryingDataType) error { +func (s *GrandpaState) HandleGRANDPADigest(header *types.Header, digest types.GrandpaConsensusDigest) error { digestValue, err := digest.Value() if err != nil { return fmt.Errorf("getting digest value: %w", err) diff --git a/dot/state/service_integration_test.go b/dot/state/service_integration_test.go index d733b90934..3f19e8650a 100644 --- a/dot/state/service_integration_test.go +++ b/dot/state/service_integration_test.go @@ -90,7 +90,7 @@ func TestService_Initialise(t *testing.T) { require.NoError(t, err) genesisHeaderPtr := types.NewHeader(common.NewHash([]byte{77}), - genTrie.MustHash(trie.NoMaxInlineValueSize), trie.EmptyHash, 0, types.NewDigest()) + genTrie.MustHash(trie.NoMaxInlineValueSize), trie.EmptyHash, 0, nil) err = state.Initialise(&genData, genesisHeaderPtr, genTrieCopy) require.NoError(t, err) diff --git a/dot/state/test_helpers.go b/dot/state/test_helpers.go index 3109fede3d..6409aeb61a 100644 --- a/dot/state/test_helpers.go +++ b/dot/state/test_helpers.go @@ -33,9 +33,9 @@ func NewInMemoryDB(t *testing.T) database.Database { return db } -func createPrimaryBABEDigest(t testing.TB) scale.VaryingDataTypeSlice { +func createPrimaryBABEDigest(t testing.TB) types.Digest { babeDigest := types.NewBabeDigest() - err := babeDigest.Set(types.BabePrimaryPreDigest{AuthorityIndex: 0}) + err := babeDigest.SetValue(types.BabePrimaryPreDigest{AuthorityIndex: 0}) require.NoError(t, err) bdEnc, err := scale.Marshal(babeDigest) diff --git a/dot/sync/chain_sync_test.go b/dot/sync/chain_sync_test.go index d99afe8db6..26458ef617 100644 --- a/dot/sync/chain_sync_test.go +++ b/dot/sync/chain_sync_test.go @@ -18,7 +18,6 @@ import ( "github.com/ChainSafe/gossamer/lib/common/variadic" "github.com/ChainSafe/gossamer/lib/runtime/storage" "github.com/ChainSafe/gossamer/lib/trie" - "github.com/ChainSafe/gossamer/pkg/scale" "github.com/libp2p/go-libp2p/core/peer" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -66,10 +65,10 @@ func Test_chainSync_onBlockAnnounce(t *testing.T) { errTest := errors.New("test error") emptyTrieState := storage.NewTrieState(trie.NewEmptyTrie()) block1AnnounceHeader := types.NewHeader(common.Hash{}, emptyTrieState.MustRoot(trie.NoMaxInlineValueSize), - common.Hash{}, 1, scale.VaryingDataTypeSlice{}) + common.Hash{}, 1, nil) block2AnnounceHeader := types.NewHeader(block1AnnounceHeader.Hash(), emptyTrieState.MustRoot(trie.NoMaxInlineValueSize), - common.Hash{}, 2, scale.VaryingDataTypeSlice{}) + common.Hash{}, 2, nil) testCases := map[string]struct { waitBootstrapSync bool @@ -248,10 +247,10 @@ func Test_chainSync_onBlockAnnounceHandshake_tipModeNeedToCatchup(t *testing.T) emptyTrieState := storage.NewTrieState(trie.NewEmptyTrie()) block1AnnounceHeader := types.NewHeader(common.Hash{}, emptyTrieState.MustRoot(trie.NoMaxInlineValueSize), - common.Hash{}, 1, scale.VaryingDataTypeSlice{}) + common.Hash{}, 1, nil) block2AnnounceHeader := types.NewHeader(block1AnnounceHeader.Hash(), emptyTrieState.MustRoot(trie.NoMaxInlineValueSize), - common.Hash{}, 130, scale.VaryingDataTypeSlice{}) + common.Hash{}, 130, nil) blockStateMock := NewMockBlockState(ctrl) blockStateMock.EXPECT(). @@ -1251,7 +1250,7 @@ func createSuccesfullBlockResponse(t *testing.T, parentHeader common.Hash, tsRoot := emptyTrieState.MustRoot(trie.NoMaxInlineValueSize) firstHeader := types.NewHeader(parentHeader, tsRoot, common.Hash{}, - uint(startingAt), scale.VaryingDataTypeSlice{}) + uint(startingAt), nil) response.BlockData[0] = &types.BlockData{ Hash: firstHeader.Hash(), Header: firstHeader, @@ -1263,7 +1262,7 @@ func createSuccesfullBlockResponse(t *testing.T, parentHeader common.Hash, for idx := 1; idx < numBlocks; idx++ { blockNumber := idx + startingAt header := types.NewHeader(parentHash, tsRoot, common.Hash{}, - uint(blockNumber), scale.VaryingDataTypeSlice{}) + uint(blockNumber), nil) response.BlockData[idx] = &types.BlockData{ Hash: header.Hash(), Header: header, diff --git a/dot/sync/syncer_test.go b/dot/sync/syncer_test.go index 943d8baa2b..2f192ab6a3 100644 --- a/dot/sync/syncer_test.go +++ b/dot/sync/syncer_test.go @@ -12,7 +12,6 @@ import ( "github.com/ChainSafe/gossamer/dot/peerset" "github.com/ChainSafe/gossamer/dot/types" "github.com/ChainSafe/gossamer/lib/common" - "github.com/ChainSafe/gossamer/pkg/scale" "github.com/libp2p/go-libp2p/core/peer" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -69,9 +68,9 @@ func TestService_HandleBlockAnnounce(t *testing.T) { const somePeer = peer.ID("abc") block1AnnounceHeader := types.NewHeader(common.Hash{}, common.Hash{}, - common.Hash{}, 1, scale.VaryingDataTypeSlice{}) + common.Hash{}, 1, nil) block2AnnounceHeader := types.NewHeader(common.Hash{}, common.Hash{}, - common.Hash{}, 2, scale.VaryingDataTypeSlice{}) + common.Hash{}, 2, nil) testCases := map[string]struct { serviceBuilder func(ctrl *gomock.Controller) *Service diff --git a/dot/types/babe.go b/dot/types/babe.go index 3f37735e2c..ac388112f7 100644 --- a/dot/types/babe.go +++ b/dot/types/babe.go @@ -112,11 +112,11 @@ func GetSlotFromHeader(header *Header) (uint64, error) { return 0, ErrGenesisHeader } - if len(header.Digest.Types) == 0 { + if len(header.Digest) == 0 { return 0, ErrChainHeadMissingDigest } - digestValue, err := header.Digest.Types[0].Value() + digestValue, err := header.Digest[0].Value() if err != nil { return 0, fmt.Errorf("getting first digest type value: %w", err) } @@ -149,11 +149,11 @@ func IsPrimary(header *Header) (bool, error) { return false, fmt.Errorf("cannot have nil header") } - if len(header.Digest.Types) == 0 { + if len(header.Digest) == 0 { return false, ErrChainHeadMissingDigest } - digestValue, err := header.Digest.Types[0].Value() + digestValue, err := header.Digest[0].Value() if err != nil { return false, fmt.Errorf("getting first digest type value: %w", err) } diff --git a/dot/types/babe_digest.go b/dot/types/babe_digest.go index c81417379f..212722c5e0 100644 --- a/dot/types/babe_digest.go +++ b/dot/types/babe_digest.go @@ -11,13 +11,79 @@ import ( "github.com/ChainSafe/gossamer/pkg/scale" ) +type BabeDigestValues interface { + BabePrimaryPreDigest | BabeSecondaryPlainPreDigest | BabeSecondaryVRFPreDigest +} + +type BabeDigest struct { + inner any +} + +func setBabeDigest[Value BabeDigestValues](mvdt *BabeDigest, value Value) { + mvdt.inner = value +} + +func (mvdt *BabeDigest) SetValue(value any) (err error) { + switch value := value.(type) { + case BabePrimaryPreDigest: + setBabeDigest(mvdt, value) + return + + case BabeSecondaryPlainPreDigest: + setBabeDigest(mvdt, value) + return + + case BabeSecondaryVRFPreDigest: + setBabeDigest(mvdt, value) + return + + default: + return fmt.Errorf("unsupported type") + } +} + +func (mvdt BabeDigest) IndexValue() (index uint, value any, err error) { + switch mvdt.inner.(type) { + case BabePrimaryPreDigest: + return 1, mvdt.inner, nil + + case BabeSecondaryPlainPreDigest: + return 2, mvdt.inner, nil + + case BabeSecondaryVRFPreDigest: + return 3, mvdt.inner, nil + + } + return 0, nil, scale.ErrUnsupportedVaryingDataTypeValue +} + +func (mvdt BabeDigest) Value() (value any, err error) { + _, value, err = mvdt.IndexValue() + return +} + +func (mvdt BabeDigest) ValueAt(index uint) (value any, err error) { + switch index { + case 1: + return *new(BabePrimaryPreDigest), nil + + case 2: + return *new(BabeSecondaryPlainPreDigest), nil + + case 3: + return *new(BabeSecondaryVRFPreDigest), nil + + } + return nil, scale.ErrUnknownVaryingDataTypeValue +} + // NewBabeDigest returns a new VaryingDataType to represent a BabeDigest -func NewBabeDigest() scale.VaryingDataType { - return scale.MustNewVaryingDataType(BabePrimaryPreDigest{}, BabeSecondaryPlainPreDigest{}, BabeSecondaryVRFPreDigest{}) +func NewBabeDigest() BabeDigest { + return BabeDigest{} } // DecodeBabePreDigest decodes the input into a BabePreRuntimeDigest -func DecodeBabePreDigest(in []byte) (scale.VaryingDataTypeValue, error) { +func DecodeBabePreDigest(in []byte) (any, error) { babeDigest := NewBabeDigest() err := scale.Unmarshal(in, &babeDigest) if err != nil { @@ -61,9 +127,6 @@ func (d BabePrimaryPreDigest) ToPreRuntimeDigest() (*PreRuntimeDigest, error) { return toPreRuntimeDigest(d) } -// Index returns VDT index -func (BabePrimaryPreDigest) Index() uint { return 1 } - func (d BabePrimaryPreDigest) String() string { return fmt.Sprintf("BabePrimaryPreDigest{AuthorityIndex=%d, SlotNumber=%d, "+ "VRFOutput=0x%x, VRFProof=0x%x}", @@ -89,9 +152,6 @@ func (d BabeSecondaryPlainPreDigest) ToPreRuntimeDigest() (*PreRuntimeDigest, er return toPreRuntimeDigest(d) } -// Index returns VDT index -func (BabeSecondaryPlainPreDigest) Index() uint { return 2 } - func (d BabeSecondaryPlainPreDigest) String() string { return fmt.Sprintf("BabeSecondaryPlainPreDigest{AuthorityIndex=%d, SlotNumber: %d}", d.AuthorityIndex, d.SlotNumber) @@ -122,9 +182,6 @@ func (d BabeSecondaryVRFPreDigest) ToPreRuntimeDigest() (*PreRuntimeDigest, erro return toPreRuntimeDigest(d) } -// Index returns VDT index -func (BabeSecondaryVRFPreDigest) Index() uint { return 3 } - func (d BabeSecondaryVRFPreDigest) String() string { return fmt.Sprintf("BabeSecondaryVRFPreDigest{AuthorityIndex=%d, SlotNumber=%d, "+ "VrfOutput=0x%x, VrfProof=0x%x", @@ -132,9 +189,9 @@ func (d BabeSecondaryVRFPreDigest) String() string { } // toPreRuntimeDigest returns the VaryingDataTypeValue as a PreRuntimeDigest -func toPreRuntimeDigest(value scale.VaryingDataTypeValue) (*PreRuntimeDigest, error) { +func toPreRuntimeDigest(value any) (*PreRuntimeDigest, error) { digest := NewBabeDigest() - err := digest.Set(value) + err := digest.SetValue(value) if err != nil { return nil, fmt.Errorf("cannot set varying data type value to babe digest: %w", err) } diff --git a/dot/types/babe_digest_test.go b/dot/types/babe_digest_test.go index a239eaea59..06a6fbf197 100644 --- a/dot/types/babe_digest_test.go +++ b/dot/types/babe_digest_test.go @@ -14,7 +14,7 @@ import ( func TestBabePrimaryPreDigest_EncodeAndDecode(t *testing.T) { bh := NewBabeDigest() - err := bh.Set(BabePrimaryPreDigest{ + err := bh.SetValue(BabePrimaryPreDigest{ VRFOutput: [sr25519.VRFOutputLength]byte{ 0, 91, 50, 25, 214, 94, 119, 36, 71, 216, 33, 152, 85, 184, 34, 120, 61, 161, 164, 223, 76, @@ -42,7 +42,7 @@ func TestBabePrimaryPreDigest_EncodeAndDecode(t *testing.T) { func TestBabeSecondaryPlainPreDigest_EncodeAndDecode(t *testing.T) { bh := NewBabeDigest() - err := bh.Set(BabeSecondaryPlainPreDigest{ + err := bh.SetValue(BabeSecondaryPlainPreDigest{ AuthorityIndex: 17, SlotNumber: 420, }) @@ -59,7 +59,7 @@ func TestBabeSecondaryPlainPreDigest_EncodeAndDecode(t *testing.T) { func TestBabeSecondaryVRFPreDigest_EncodeAndDecode(t *testing.T) { bh := NewBabeDigest() - err := bh.Set(BabeSecondaryVRFPreDigest{ + err := bh.SetValue(BabeSecondaryVRFPreDigest{ VrfOutput: [sr25519.VRFOutputLength]byte{ 0, 91, 50, 25, 214, 94, 119, 36, 71, 216, 33, 152, 85, 184, 34, 120, 61, 161, 164, 223, 76, 53, 40, 246, 76, 38, 235, diff --git a/dot/types/block_data_test.go b/dot/types/block_data_test.go index e8e877e883..6cc2e5e882 100644 --- a/dot/types/block_data_test.go +++ b/dot/types/block_data_test.go @@ -12,20 +12,16 @@ import ( ) var ( - digestItem = scale.MustNewVaryingDataType(PreRuntimeDigest{}, ConsensusDigest{}, - SealDigest{}, RuntimeEnvironmentUpdated{}) - digest = scale.NewVaryingDataTypeSlice(digestItem) - testDigest = digest -) -var _ = testDigest.Add( - PreRuntimeDigest{ - ConsensusEngineID: BabeEngineID, - Data: []byte{1, 2, 3}, - }, - SealDigest{ - ConsensusEngineID: BabeEngineID, - Data: []byte{4, 5, 6, 7}, - }, + testDigest = []DigestItem{ + newDigestItem(PreRuntimeDigest{ + ConsensusEngineID: BabeEngineID, + Data: []byte{1, 2, 3}, + }), + newDigestItem(SealDigest{ + ConsensusEngineID: BabeEngineID, + Data: []byte{4, 5, 6, 7}, + }), + } ) func TestNumber(t *testing.T) { diff --git a/dot/types/block_test.go b/dot/types/block_test.go index 60a440d0df..aa2e4173eb 100644 --- a/dot/types/block_test.go +++ b/dot/types/block_test.go @@ -56,7 +56,7 @@ func TestEncodeAndDecodeBlock(t *testing.T) { extrinsicsRoot, err := common.HexToHash("0x03170a2e7597b7b7e3d84c05391d139a62b157e78786d8c082f29dcf4c111314") require.NoError(t, err) - header := NewHeader(parentHash, stateRoot, extrinsicsRoot, 1, NewDigest()) + header := NewHeader(parentHash, stateRoot, extrinsicsRoot, 1, nil) block := NewBlock(*header, *NewBody([]Extrinsic{[]byte{4, 1}})) diff --git a/dot/types/consensus_digest.go b/dot/types/consensus_digest.go index d6719c3b91..d4fdbf47c0 100644 --- a/dot/types/consensus_digest.go +++ b/dot/types/consensus_digest.go @@ -9,15 +9,164 @@ import ( "github.com/ChainSafe/gossamer/pkg/scale" ) +type BabeConsensusDigestValues interface { + NextEpochData | BABEOnDisabled | VersionedNextConfigData +} + +type BabeConsensusDigest struct { + inner any +} + +func setBabeConsensusDigest[Value BabeConsensusDigestValues](mvdt *BabeConsensusDigest, value Value) { + mvdt.inner = value +} + +func (mvdt *BabeConsensusDigest) SetValue(value any) (err error) { + switch value := value.(type) { + case NextEpochData: + setBabeConsensusDigest(mvdt, value) + return + + case BABEOnDisabled: + setBabeConsensusDigest(mvdt, value) + return + + case VersionedNextConfigData: + setBabeConsensusDigest(mvdt, value) + return + + default: + return fmt.Errorf("unsupported type") + } +} + +func (mvdt BabeConsensusDigest) IndexValue() (index uint, value any, err error) { + switch mvdt.inner.(type) { + case NextEpochData: + return 1, mvdt.inner, nil + + case BABEOnDisabled: + return 2, mvdt.inner, nil + + case VersionedNextConfigData: + return 3, mvdt.inner, nil + + } + return 0, nil, scale.ErrUnsupportedVaryingDataTypeValue +} + +func (mvdt BabeConsensusDigest) Value() (value any, err error) { + _, value, err = mvdt.IndexValue() + return +} +func (mvdt BabeConsensusDigest) ValueAt(index uint) (value any, err error) { + switch index { + case 1: + return *new(NextEpochData), nil + + case 2: + return *new(BABEOnDisabled), nil + + case 3: + return *new(VersionedNextConfigData), nil + + } + return nil, scale.ErrUnknownVaryingDataTypeValue +} + // NewBabeConsensusDigest constructs a vdt representing a babe consensus digest -func NewBabeConsensusDigest() scale.VaryingDataType { - return scale.MustNewVaryingDataType(NextEpochData{}, BABEOnDisabled{}, NewVersionedNextConfigData()) +func NewBabeConsensusDigest() BabeConsensusDigest { + return BabeConsensusDigest{} +} + +type GrandpaConsensusDigestValues interface { + GrandpaScheduledChange | GrandpaForcedChange | GrandpaOnDisabled | GrandpaPause | GrandpaResume +} + +type GrandpaConsensusDigest struct { + inner any +} + +func setGrandpaConsensusDigest[Value GrandpaConsensusDigestValues](mvdt *GrandpaConsensusDigest, value Value) { + mvdt.inner = value +} + +func (mvdt *GrandpaConsensusDigest) SetValue(value any) (err error) { + switch value := value.(type) { + case GrandpaScheduledChange: + setGrandpaConsensusDigest(mvdt, value) + return + + case GrandpaForcedChange: + setGrandpaConsensusDigest(mvdt, value) + return + + case GrandpaOnDisabled: + setGrandpaConsensusDigest(mvdt, value) + return + + case GrandpaPause: + setGrandpaConsensusDigest(mvdt, value) + return + + case GrandpaResume: + setGrandpaConsensusDigest(mvdt, value) + return + + default: + return fmt.Errorf("unsupported type") + } +} + +func (mvdt GrandpaConsensusDigest) IndexValue() (index uint, value any, err error) { + switch mvdt.inner.(type) { + case GrandpaScheduledChange: + return 1, mvdt.inner, nil + + case GrandpaForcedChange: + return 2, mvdt.inner, nil + + case GrandpaOnDisabled: + return 3, mvdt.inner, nil + + case GrandpaPause: + return 4, mvdt.inner, nil + + case GrandpaResume: + return 5, mvdt.inner, nil + + } + return 0, nil, scale.ErrUnsupportedVaryingDataTypeValue +} + +func (mvdt GrandpaConsensusDigest) Value() (value any, err error) { + _, value, err = mvdt.IndexValue() + return +} +func (mvdt GrandpaConsensusDigest) ValueAt(index uint) (value any, err error) { + switch index { + case 1: + return *new(GrandpaScheduledChange), nil + + case 2: + return *new(GrandpaForcedChange), nil + + case 3: + return *new(GrandpaOnDisabled), nil + + case 4: + return *new(GrandpaPause), nil + + case 5: + return *new(GrandpaResume), nil + + } + return nil, scale.ErrUnknownVaryingDataTypeValue } // NewGrandpaConsensusDigest constructs a vdt representing a grandpa consensus digest -func NewGrandpaConsensusDigest() scale.VaryingDataType { - return scale.MustNewVaryingDataType(GrandpaScheduledChange{}, GrandpaForcedChange{}, - GrandpaOnDisabled{}, GrandpaPause{}, GrandpaResume{}) +func NewGrandpaConsensusDigest() GrandpaConsensusDigest { + return GrandpaConsensusDigest{} } // GrandpaScheduledChange represents a GRANDPA scheduled authority change @@ -26,9 +175,6 @@ type GrandpaScheduledChange struct { Delay uint32 } -// Index returns VDT index -func (GrandpaScheduledChange) Index() uint { return 1 } - func (g GrandpaScheduledChange) String() string { return fmt.Sprintf("GrandpaScheduledChange{Auths=%v, Delay=%d", g.Auths, g.Delay) } @@ -43,9 +189,6 @@ type GrandpaForcedChange struct { Delay uint32 } -// Index returns VDT index -func (GrandpaForcedChange) Index() uint { return 2 } - func (g GrandpaForcedChange) String() string { return fmt.Sprintf("GrandpaForcedChange{BestFinalizedBlock=%d, Auths=%v, Delay=%d", g.BestFinalizedBlock, g.Auths, g.Delay) @@ -56,9 +199,6 @@ type GrandpaOnDisabled struct { ID uint64 } -// Index returns VDT index -func (GrandpaOnDisabled) Index() uint { return 3 } - func (g GrandpaOnDisabled) String() string { return fmt.Sprintf("GrandpaOnDisabled{ID=%d}", g.ID) } @@ -68,9 +208,6 @@ type GrandpaPause struct { Delay uint32 } -// Index returns VDT index -func (GrandpaPause) Index() uint { return 4 } - func (g GrandpaPause) String() string { return fmt.Sprintf("GrandpaPause{Delay=%d}", g.Delay) } @@ -80,9 +217,6 @@ type GrandpaResume struct { Delay uint32 } -// Index returns VDT index -func (GrandpaResume) Index() uint { return 5 } - func (g GrandpaResume) String() string { return fmt.Sprintf("GrandpaResume{Delay=%d}", g.Delay) } @@ -94,9 +228,6 @@ type NextEpochData struct { Randomness [RandomnessLength]byte } -// Index returns VDT index -func (NextEpochData) Index() uint { return 1 } //skipcq: GO-W1029 - func (d NextEpochData) String() string { //skipcq: GO-W1029 return fmt.Sprintf("NextEpochData Authorities=%v Randomness=%v", d.Authorities, d.Randomness) } @@ -114,9 +245,6 @@ type BABEOnDisabled struct { ID uint32 } -// Index returns VDT index -func (BABEOnDisabled) Index() uint { return 2 } - func (b BABEOnDisabled) String() string { return fmt.Sprintf("BABEOnDisabled{ID=%d}", b.ID) } @@ -129,9 +257,6 @@ type NextConfigDataV1 struct { SecondarySlots byte } -// Index returns VDT index -func (NextConfigDataV1) Index() uint { return 1 } //skipcq: GO-W1029 - func (d NextConfigDataV1) String() string { //skipcq: GO-W1029 return fmt.Sprintf("NextConfigData{C1=%d, C2=%d, SecondarySlots=%d}", d.C1, d.C2, d.SecondarySlots) @@ -147,26 +272,49 @@ func (d *NextConfigDataV1) ToConfigData() *ConfigData { //skipcq: GO-W1029 } // VersionedNextConfigData represents the enum of next config data consensus digest messages -type VersionedNextConfigData scale.VaryingDataType +type VersionedNextConfigDataValues interface { + NextConfigDataV1 +} -// Index returns VDT index -func (VersionedNextConfigData) Index() uint { return 3 } +type VersionedNextConfigData struct { + inner any +} -// Value returns the current VDT value -func (vncd *VersionedNextConfigData) Value() (val scale.VaryingDataTypeValue, err error) { - vdt := scale.VaryingDataType(*vncd) - return vdt.Value() +func setVersionedNextConfigData[Value VersionedNextConfigDataValues](mvdt *VersionedNextConfigData, value Value) { + mvdt.inner = value } -// Set updates the current VDT value to be `val` -func (vncd *VersionedNextConfigData) Set(val scale.VaryingDataTypeValue) (err error) { - vdt := scale.VaryingDataType(*vncd) - err = vdt.Set(val) - if err != nil { - return fmt.Errorf("setting varying data type value: %w", err) +func (mvdt *VersionedNextConfigData) SetValue(value any) (err error) { + switch value := value.(type) { + case NextConfigDataV1: + setVersionedNextConfigData(mvdt, value) + return + + default: + return fmt.Errorf("unsupported type") } - *vncd = VersionedNextConfigData(vdt) - return nil +} + +func (mvdt VersionedNextConfigData) IndexValue() (index uint, value any, err error) { + switch mvdt.inner.(type) { + case NextConfigDataV1: + return 1, mvdt.inner, nil + + } + return 0, nil, scale.ErrUnsupportedVaryingDataTypeValue +} + +func (mvdt VersionedNextConfigData) Value() (value any, err error) { + _, value, err = mvdt.IndexValue() + return +} +func (mvdt VersionedNextConfigData) ValueAt(index uint) (value any, err error) { + switch index { + case 1: + return *new(NextConfigDataV1), nil + + } + return nil, scale.ErrUnknownVaryingDataTypeValue } // String returns the string representation for the current VDT value @@ -181,7 +329,5 @@ func (vncd VersionedNextConfigData) String() string { // NewVersionedNextConfigData creates a new VersionedNextConfigData instance func NewVersionedNextConfigData() VersionedNextConfigData { - vdt := scale.MustNewVaryingDataType(NextConfigDataV1{}) - - return VersionedNextConfigData(vdt) + return VersionedNextConfigData{} } diff --git a/dot/types/consensus_digest_test.go b/dot/types/consensus_digest_test.go index ecd9e5f2c8..204eb67c9c 100644 --- a/dot/types/consensus_digest_test.go +++ b/dot/types/consensus_digest_test.go @@ -31,7 +31,7 @@ func TestBabeEncodeAndDecode(t *testing.T) { } var d = NewBabeConsensusDigest() - err = d.Set(NextEpochData{ + err = d.SetValue(NextEpochData{ Authorities: []AuthorityRaw{authA, authB}, Randomness: [32]byte{77, 88, 99}, }) diff --git a/dot/types/digest.go b/dot/types/digest.go index 3f3a4f921c..d18753b701 100644 --- a/dot/types/digest.go +++ b/dot/types/digest.go @@ -5,24 +5,123 @@ package types import ( "fmt" + "strings" "github.com/ChainSafe/gossamer/pkg/scale" ) // DigestItem is a varying date type that holds type identifier and a scaled encoded message payload. -type DigestItem struct { +type digestItem struct { ConsensusEngineID ConsensusEngineID Data []byte } +type DigestItem struct { + inner any +} + +type DigestItemValues interface { + PreRuntimeDigest | ConsensusDigest | SealDigest | RuntimeEnvironmentUpdated +} + +func newDigestItem[Value DigestItemValues](value Value) DigestItem { + item := DigestItem{} + setDigestItem[Value](&item, value) + return item +} + +func setDigestItem[Value DigestItemValues](mvdt *DigestItem, value Value) { + mvdt.inner = value +} + +func (mvdt *DigestItem) SetValue(value any) (err error) { + switch value := value.(type) { + case PreRuntimeDigest: + setDigestItem(mvdt, value) + return + case ConsensusDigest: + setDigestItem(mvdt, value) + return + case SealDigest: + setDigestItem(mvdt, value) + return + case RuntimeEnvironmentUpdated: + setDigestItem(mvdt, value) + return + default: + return fmt.Errorf("unsupported type") + } +} + +func (mvdt DigestItem) IndexValue() (index uint, value any, err error) { + switch mvdt.inner.(type) { + case PreRuntimeDigest: + return 6, mvdt.inner, nil + case ConsensusDigest: + return 4, mvdt.inner, nil + case SealDigest: + return 5, mvdt.inner, nil + case RuntimeEnvironmentUpdated: + return 8, mvdt.inner, nil + } + return 0, nil, scale.ErrUnsupportedVaryingDataTypeValue +} + +func (mvdt DigestItem) Value() (value any, err error) { + _, value, err = mvdt.IndexValue() + return +} + +func (mvdt DigestItem) ValueAt(index uint) (value any, err error) { + switch index { + case 6: + return PreRuntimeDigest{}, nil + case 4: + return ConsensusDigest{}, nil + case 5: + return SealDigest{}, nil + case 8: + return RuntimeEnvironmentUpdated{}, nil + } + return nil, scale.ErrUnknownVaryingDataTypeValue +} + +func (mvdt DigestItem) String() string { + return fmt.Sprintf("%s", mvdt.inner) +} + // NewDigestItem returns a new VaryingDataType to represent a DigestItem -func NewDigestItem() scale.VaryingDataType { - return scale.MustNewVaryingDataType(PreRuntimeDigest{}, ConsensusDigest{}, SealDigest{}, RuntimeEnvironmentUpdated{}) +func NewDigestItem() DigestItem { + return DigestItem{} +} + +// Digest is slice of DigestItem +type Digest []DigestItem + +func (d *Digest) Add(values ...any) (err error) { + for _, value := range values { + item := DigestItem{} + err := item.SetValue(value) + if err != nil { + return err + } + appended := append(*d, item) + *d = appended + } + return nil +} + +func (d *Digest) String() string { + stringTypes := make([]string, len(*d)) + for i, vdt := range *d { + stringTypes[i] = vdt.String() + } + return "[" + strings.Join(stringTypes, ", ") + "]" } // NewDigest returns a new Digest as a varying data type slice. -func NewDigest() scale.VaryingDataTypeSlice { - return scale.NewVaryingDataTypeSlice(NewDigestItem()) +func NewDigest() Digest { + return []DigestItem{} } // ConsensusEngineID is a 4-character identifier of the consensus engine that produced the digest. @@ -45,10 +144,7 @@ var BabeEngineID = ConsensusEngineID{'B', 'A', 'B', 'E'} var GrandpaEngineID = ConsensusEngineID{'F', 'R', 'N', 'K'} // PreRuntimeDigest contains messages from the consensus engine to the runtime. -type PreRuntimeDigest DigestItem - -// Index returns VDT index -func (PreRuntimeDigest) Index() uint { return 6 } +type PreRuntimeDigest digestItem // NewBABEPreRuntimeDigest returns a PreRuntimeDigest with the BABE consensus ID func NewBABEPreRuntimeDigest(data []byte) *PreRuntimeDigest { @@ -64,10 +160,7 @@ func (d PreRuntimeDigest) String() string { } // ConsensusDigest contains messages from the runtime to the consensus engine. -type ConsensusDigest DigestItem - -// Index returns VDT index -func (ConsensusDigest) Index() uint { return 4 } +type ConsensusDigest digestItem // String returns the digest as a string func (d ConsensusDigest) String() string { @@ -75,10 +168,7 @@ func (d ConsensusDigest) String() string { } // SealDigest contains the seal or signature. This is only used by native code. -type SealDigest DigestItem - -// Index returns VDT index -func (SealDigest) Index() uint { return 5 } +type SealDigest digestItem // String returns the digest as a string func (d SealDigest) String() string { @@ -88,9 +178,6 @@ func (d SealDigest) String() string { // RuntimeEnvironmentUpdated contains is an indicator for the light clients that the runtime environment is updated type RuntimeEnvironmentUpdated struct{} -// Index returns VDT index -func (RuntimeEnvironmentUpdated) Index() uint { return 8 } - // String returns the digest as a string func (RuntimeEnvironmentUpdated) String() string { return "RuntimeEnvironmentUpdated" diff --git a/dot/types/digest_test.go b/dot/types/digest_test.go index 19f444ce1f..bdf59fb597 100644 --- a/dot/types/digest_test.go +++ b/dot/types/digest_test.go @@ -16,31 +16,32 @@ func Test_Digest_String(t *testing.T) { t.Parallel() testCases := map[string]struct { - digestBuilder func() scale.VaryingDataTypeSlice + digestBuilder func() Digest s string }{ "empty": { - digestBuilder: func() scale.VaryingDataTypeSlice { - return scale.VaryingDataTypeSlice{} + digestBuilder: func() Digest { + return Digest{} }, s: "[]", }, "all_digests": { - digestBuilder: func() scale.VaryingDataTypeSlice { - digest := NewDigest() - digest.Add(PreRuntimeDigest{ - ConsensusEngineID: ConsensusEngineID{'a', 'b', 'c', 'd'}, - Data: []byte{1, 2, 3, 4}, - }) - digest.Add(ConsensusDigest{ - ConsensusEngineID: ConsensusEngineID{'f', 'f', 'g', 'g'}, - Data: []byte{5, 6}, - }) - digest.Add(SealDigest{ - ConsensusEngineID: ConsensusEngineID{'x', 'y', 'w', 'z'}, - Data: []byte{7, 8}, - }) - digest.Add(RuntimeEnvironmentUpdated{}) + digestBuilder: func() Digest { + digest := Digest{ + newDigestItem(PreRuntimeDigest{ + ConsensusEngineID: ConsensusEngineID{'a', 'b', 'c', 'd'}, + Data: []byte{1, 2, 3, 4}, + }), + newDigestItem(ConsensusDigest{ + ConsensusEngineID: ConsensusEngineID{'f', 'f', 'g', 'g'}, + Data: []byte{5, 6}, + }), + newDigestItem(SealDigest{ + ConsensusEngineID: ConsensusEngineID{'x', 'y', 'w', 'z'}, + Data: []byte{7, 8}, + }), + newDigestItem(RuntimeEnvironmentUpdated{}), + } return digest }, s: "[" + @@ -107,7 +108,7 @@ func TestDecodeSingleDigest(t *testing.T) { } di := NewDigestItem() - err := di.Set(d) + err := di.SetValue(d) require.NoError(t, err) enc, err := scale.Marshal(di) @@ -132,7 +133,7 @@ func TestDecodeDigest(t *testing.T) { v := NewDigest() err := scale.Unmarshal(d, &v) require.NoError(t, err) - require.Equal(t, 3, len(v.Types)) + require.Equal(t, 3, len(v)) enc, err := scale.Marshal(v) require.NoError(t, err) @@ -147,7 +148,7 @@ func TestPreRuntimeDigest(t *testing.T) { } di := NewDigestItem() - err := di.Set(d) + err := di.SetValue(d) require.NoError(t, err) enc, err := scale.Marshal(di) @@ -174,7 +175,7 @@ func TestConsensusDigest(t *testing.T) { } di := NewDigestItem() - err := di.Set(d) + err := di.SetValue(d) require.NoError(t, err) enc, err := scale.Marshal(di) @@ -198,7 +199,7 @@ func TestRuntimeEnvironmentUpdatedDigest(t *testing.T) { d := RuntimeEnvironmentUpdated{} di := NewDigestItem() - err := di.Set(d) + err := di.SetValue(d) require.NoError(t, err) enc, err := scale.Marshal(di) @@ -225,7 +226,7 @@ func TestSealDigest(t *testing.T) { } di := NewDigestItem() - err := di.Set(d) + err := di.SetValue(d) require.NoError(t, err) enc, err := scale.Marshal(di) diff --git a/dot/types/grandpa.go b/dot/types/grandpa.go index 93e93f72db..677d7abfdf 100644 --- a/dot/types/grandpa.go +++ b/dot/types/grandpa.go @@ -211,44 +211,66 @@ type GrandpaEquivocation struct { // GrandpaEquivocationEnum is a wrapper object for GRANDPA equivocation proofs, useful for unifying prevote // and precommit equivocations under a common type. // https://github.com/paritytech/substrate/blob/fb22096d2ec6bf38e67ce811ad2c31415237a9a5/primitives/finality-grandpa/src/lib.rs#L272 //nolint:lll -type GrandpaEquivocationEnum scale.VaryingDataType +type GrandpaEquivocationEnum struct { + inner any +} +type GrandpaEquivocationEnumValues interface { + PreVote | PreCommit +} -// Set sets a VaryingDataTypeValue using the underlying VaryingDataType -func (ge *GrandpaEquivocationEnum) Set(value scale.VaryingDataTypeValue) (err error) { - vdt := scale.VaryingDataType(*ge) - err = vdt.Set(value) - if err != nil { - return err +func setGrandpaEquivocationEnum[Value GrandpaEquivocationEnumValues](mvdt *GrandpaEquivocationEnum, value Value) { + mvdt.inner = value +} + +func (mvdt *GrandpaEquivocationEnum) SetValue(value any) (err error) { + switch value := value.(type) { + case PreVote: + setGrandpaEquivocationEnum(mvdt, value) + return + case PreCommit: + setGrandpaEquivocationEnum(mvdt, value) + return + default: + return fmt.Errorf("unsupported type") } - *ge = GrandpaEquivocationEnum(vdt) - return nil } -// Value will return the value from the underlying VaryingDataType -func (ge *GrandpaEquivocationEnum) Value() (value scale.VaryingDataTypeValue, err error) { - vdt := scale.VaryingDataType(*ge) - return vdt.Value() +func (mvdt GrandpaEquivocationEnum) IndexValue() (index uint, value any, err error) { + switch mvdt.inner.(type) { + case PreVote: + return 0, mvdt.inner, nil + case PreCommit: + return 1, mvdt.inner, nil + } + return 0, nil, scale.ErrUnsupportedVaryingDataTypeValue +} + +func (mvdt GrandpaEquivocationEnum) Value() (value any, err error) { + _, value, err = mvdt.IndexValue() + return +} + +func (mvdt GrandpaEquivocationEnum) ValueAt(index uint) (value any, err error) { + switch index { + case 0: + return PreVote{}, nil + case 1: + return PreCommit{}, nil + } + return nil, scale.ErrUnknownVaryingDataTypeValue } // NewGrandpaEquivocation returns a new VaryingDataType to represent a grandpa Equivocation func NewGrandpaEquivocation() *GrandpaEquivocationEnum { - vdt := scale.MustNewVaryingDataType(PreVote{}, PreCommit{}) - ge := GrandpaEquivocationEnum(vdt) - return &ge + return &GrandpaEquivocationEnum{} } // PreVote equivocation type for a prevote type PreVote GrandpaEquivocation -// Index returns VDT index -func (PreVote) Index() uint { return 0 } - // PreCommit equivocation type for a precommit type PreCommit GrandpaEquivocation -// Index returns VDT index -func (PreCommit) Index() uint { return 1 } - // GrandpaOpaqueKeyOwnershipProof contains a key ownership proof for reporting equivocations // https://github.com/paritytech/substrate/blob/fb22096d2ec6bf38e67ce811ad2c31415237a9a5/primitives/finality-grandpa/src/lib.rs#L533 //nolint:lll type GrandpaOpaqueKeyOwnershipProof []byte diff --git a/dot/types/grandpa_test.go b/dot/types/grandpa_test.go index 6b42382397..e4357174bf 100644 --- a/dot/types/grandpa_test.go +++ b/dot/types/grandpa_test.go @@ -71,7 +71,7 @@ func TestInstance_GrandpaSubmitReportEquivocationUnsignedExtrinsicEncoding(t *te preVoteEquivocation := PreVote(grandpaEquivocation) equivocationEnum := NewGrandpaEquivocation() - err := equivocationEnum.Set(preVoteEquivocation) + err := equivocationEnum.SetValue(preVoteEquivocation) require.NoError(t, err) equivocationProof := GrandpaEquivocationProof{ diff --git a/dot/types/header.go b/dot/types/header.go index f1d8003d70..59344641e5 100644 --- a/dot/types/header.go +++ b/dot/types/header.go @@ -12,17 +12,17 @@ import ( // Header is a state block header type Header struct { - ParentHash common.Hash `json:"parentHash"` - Number uint `json:"number"` - StateRoot common.Hash `json:"stateRoot"` - ExtrinsicsRoot common.Hash `json:"extrinsicsRoot"` - Digest scale.VaryingDataTypeSlice `json:"digest"` + ParentHash common.Hash `json:"parentHash"` + Number uint `json:"number"` + StateRoot common.Hash `json:"stateRoot"` + ExtrinsicsRoot common.Hash `json:"extrinsicsRoot"` + Digest Digest `json:"digest"` hash common.Hash } // NewHeader creates a new block header and sets its hash field func NewHeader(parentHash, stateRoot, extrinsicsRoot common.Hash, - number uint, digest scale.VaryingDataTypeSlice) (blockHeader *Header) { + number uint, digest Digest) (blockHeader *Header) { blockHeader = &Header{ ParentHash: parentHash, Number: number, @@ -36,9 +36,7 @@ func NewHeader(parentHash, stateRoot, extrinsicsRoot common.Hash, // NewEmptyHeader returns a new header with all zero values func NewEmptyHeader() *Header { - return &Header{ - Digest: NewDigest(), - } + return &Header{} } // Exists returns a boolean indicating if the header exists @@ -52,7 +50,7 @@ func (bh *Header) Empty() bool { if !bh.StateRoot.IsEmpty() || !bh.ExtrinsicsRoot.IsEmpty() || !bh.ParentHash.IsEmpty() { return false } - return bh.Number == 0 && len(bh.Digest.Types) == 0 + return bh.Number == 0 && len(bh.Digest) == 0 } // DeepCopy returns a deep copy of the header to prevent side effects down the road @@ -64,18 +62,9 @@ func (bh *Header) DeepCopy() (*Header, error) { cp.Number = bh.Number - if len(bh.Digest.Types) > 0 { + if len(bh.Digest) > 0 { cp.Digest = NewDigest() - for _, d := range bh.Digest.Types { - digestValue, err := d.Value() - if err != nil { - return nil, fmt.Errorf("getting digest type value: %w", err) - } - err = cp.Digest.Add(digestValue) - if err != nil { - return nil, err - } - } + cp.Digest = append(cp.Digest, bh.Digest...) } return cp, nil diff --git a/lib/babe/build_integration_test.go b/lib/babe/build_integration_test.go index 36566ba7ac..bfe2ff4a26 100644 --- a/lib/babe/build_integration_test.go +++ b/lib/babe/build_integration_test.go @@ -82,7 +82,7 @@ func TestBuildBlock_ok(t *testing.T) { require.Equal(t, expectedBlockHeader.Number, block.Header.Number) require.NotEqual(t, block.Header.StateRoot, emptyHash) require.NotEqual(t, block.Header.ExtrinsicsRoot, emptyHash) - require.Equal(t, 3, len(block.Header.Digest.Types)) + require.Equal(t, 3, len(block.Header.Digest)) // confirm block body is correct extsBytes := types.ExtrinsicsArrayToBytesArray(block.Body) diff --git a/lib/babe/errors.go b/lib/babe/errors.go index 46ea569a9d..26b35a49a6 100644 --- a/lib/babe/errors.go +++ b/lib/babe/errors.go @@ -74,10 +74,6 @@ var ( errLastDigestItemNotSeal = errors.New("last digest item is not seal") errLaggingSlot = errors.New("current slot is smaller than slot of best block") errNoDigest = errors.New("no digest provided") - - other Other - invalidCustom InvalidCustom - unknownCustom UnknownCustom ) // A DispatchOutcomeError is outcome of dispatching the extrinsic @@ -113,7 +109,7 @@ var ( errBadSigner = errors.New("invalid signing address") ) -func newUnknownError(data scale.VaryingDataTypeValue) error { +func newUnknownError(data any) error { return fmt.Errorf("unknown error: %d", data) } @@ -129,25 +125,16 @@ func (e UnmarshalError) Error() string { // Other Some error occurred type Other string -// Index returns VDT index -func (Other) Index() uint { return 0 } - func (o Other) String() string { return string(o) } // CannotLookup Failed to lookup some data type CannotLookup struct{} -// Index returns VDT index -func (CannotLookup) Index() uint { return 1 } - func (CannotLookup) String() string { return "cannot lookup" } // BadOrigin A bad origin type BadOrigin struct{} -// Index returns VDT index -func (BadOrigin) Index() uint { return 2 } - func (BadOrigin) String() string { return "bad origin" } // Module A custom error in a module @@ -157,9 +144,6 @@ type Module struct { Message *string } -// Index returns VDT index -func (Module) Index() uint { return 3 } - func (err Module) String() string { message := "nil" if err.Message != nil { @@ -171,116 +155,74 @@ func (err Module) String() string { // ValidityCannotLookup Could not lookup some information that is required to validate the transaction type ValidityCannotLookup struct{} -// Index returns VDT index -func (ValidityCannotLookup) Index() uint { return 0 } - func (ValidityCannotLookup) String() string { return "validity cannot lookup" } // NoUnsignedValidator No validator found for the given unsigned transaction type NoUnsignedValidator struct{} -// Index returns VDT index -func (NoUnsignedValidator) Index() uint { return 1 } - func (NoUnsignedValidator) String() string { return "no unsigned validator" } // UnknownCustom Any other custom unknown validity that is not covered type UnknownCustom uint8 -// Index returns VDT index -func (UnknownCustom) Index() uint { return 2 } - func (uc UnknownCustom) String() string { return fmt.Sprintf("UnknownCustom(%d)", uc) } // Call The call of the transaction is not expected type Call struct{} -// Index returns VDT index -func (Call) Index() uint { return 0 } - func (Call) String() string { return "call" } // Payment General error to do with the inability to pay some fees (e.g. account balance too low) type Payment struct{} -// Index returns VDT index -func (Payment) Index() uint { return 1 } - func (Payment) String() string { return "payment" } // Future General error to do with the transaction not yet being valid (e.g. nonce too high) type Future struct{} -// Index returns VDT index -func (Future) Index() uint { return 2 } - func (Future) String() string { return "future" } // Stale General error to do with the transaction being outdated (e.g. nonce too low) type Stale struct{} -// Index returns VDT index -func (Stale) Index() uint { return 3 } - func (Stale) String() string { return "stale" } // BadProof General error to do with the transaction’s proofs (e.g. signature) type BadProof struct{} -// Index returns VDT index -func (BadProof) Index() uint { return 4 } - func (BadProof) String() string { return "bad proof" } // AncientBirthBlock The transaction birth block is ancient type AncientBirthBlock struct{} -// Index returns VDT index -func (AncientBirthBlock) Index() uint { return 5 } - func (AncientBirthBlock) String() string { return "ancient birth block" } // ExhaustsResources The transaction would exhaust the resources of current block type ExhaustsResources struct{} -// Index returns VDT index -func (ExhaustsResources) Index() uint { return 6 } - func (ExhaustsResources) String() string { return "exhausts resources" } // InvalidCustom Any other custom invalid validity that is not covered type InvalidCustom uint8 -// Index returns VDT index -func (InvalidCustom) Index() uint { return 7 } - func (ic InvalidCustom) String() string { return fmt.Sprintf("InvalidCustom(%d)", ic) } // BadMandatory An extrinsic with a Mandatory dispatch resulted in Error type BadMandatory struct{} -// Index returns VDT index -func (BadMandatory) Index() uint { return 8 } - func (BadMandatory) String() string { return "bad mandatory" } // MandatoryDispatch A transaction with a mandatory dispatch type MandatoryDispatch struct{} -// Index returns VDT index -func (MandatoryDispatch) Index() uint { return 9 } - func (MandatoryDispatch) String() string { return "mandatory dispatch" } // BadSigner A transaction with a mandatory dispatch type BadSigner struct{} -// Index returns VDT index -func (BadSigner) Index() uint { return 10 } - func (BadSigner) String() string { return "invalid signing address" } -func determineErrType(vdt scale.VaryingDataType) (err error) { +func determineErrType(vdt scale.EncodeVaryingDataType) (err error) { vdtVal, err := vdt.Value() if err != nil { return fmt.Errorf("getting vdt value: %w", err) @@ -330,14 +272,241 @@ func determineErrType(vdt scale.VaryingDataType) (err error) { return err } -func determineErr(res []byte) error { - dispatchError := scale.MustNewVaryingDataType(other, CannotLookup{}, BadOrigin{}, Module{}) - invalid := scale.MustNewVaryingDataType(Call{}, Payment{}, Future{}, Stale{}, BadProof{}, AncientBirthBlock{}, - ExhaustsResources{}, invalidCustom, BadMandatory{}, MandatoryDispatch{}, BadSigner{}) - unknown := scale.MustNewVaryingDataType(ValidityCannotLookup{}, NoUnsignedValidator{}, unknownCustom) +type dispatchErrorValues interface { + Other | CannotLookup | BadOrigin | Module +} +type dispatchError struct { + inner any +} + +func setdispatchError[Value dispatchErrorValues](mvdt *dispatchError, value Value) { + mvdt.inner = value +} + +func (mvdt *dispatchError) SetValue(value any) (err error) { + switch value := value.(type) { + case Other: + setdispatchError(mvdt, value) + return + case CannotLookup: + setdispatchError(mvdt, value) + return + case BadOrigin: + setdispatchError(mvdt, value) + return + case Module: + setdispatchError(mvdt, value) + return + default: + return fmt.Errorf("unsupported type") + } +} + +func (mvdt dispatchError) IndexValue() (index uint, value any, err error) { + switch mvdt.inner.(type) { + case Other: + return 0, mvdt.inner, nil + case CannotLookup: + return 1, mvdt.inner, nil + case BadOrigin: + return 2, mvdt.inner, nil + case Module: + return 3, mvdt.inner, nil + } + return 0, nil, scale.ErrUnsupportedVaryingDataTypeValue +} + +func (mvdt dispatchError) Value() (value any, err error) { + _, value, err = mvdt.IndexValue() + return +} - okRes := scale.NewResult(nil, dispatchError) - errRes := scale.NewResult(invalid, unknown) +func (mvdt dispatchError) ValueAt(index uint) (value any, err error) { + switch index { + case 0: + return *new(Other), nil + case 1: + return *new(CannotLookup), nil + case 2: + return *new(BadOrigin), nil + case 3: + return *new(Module), nil + } + return nil, scale.ErrUnknownVaryingDataTypeValue +} + +type invalidValues interface { + Call | Payment | Future | Stale | BadProof | AncientBirthBlock | + ExhaustsResources | InvalidCustom | BadMandatory | MandatoryDispatch | BadSigner +} +type invalid struct { + inner any +} + +func setinvalid[Value invalidValues](mvdt *invalid, value Value) { + mvdt.inner = value +} + +func (mvdt *invalid) SetValue(value any) (err error) { + switch value := value.(type) { + case Call: + setinvalid(mvdt, value) + return + case Payment: + setinvalid(mvdt, value) + return + case Future: + setinvalid(mvdt, value) + return + case Stale: + setinvalid(mvdt, value) + return + case BadProof: + setinvalid(mvdt, value) + return + case AncientBirthBlock: + setinvalid(mvdt, value) + return + case ExhaustsResources: + setinvalid(mvdt, value) + return + case InvalidCustom: + setinvalid(mvdt, value) + return + case BadMandatory: + setinvalid(mvdt, value) + return + case MandatoryDispatch: + setinvalid(mvdt, value) + return + case BadSigner: + setinvalid(mvdt, value) + return + default: + return fmt.Errorf("unsupported type") + } +} + +func (mvdt invalid) IndexValue() (index uint, value any, err error) { + switch mvdt.inner.(type) { + case Call: + return 0, mvdt.inner, nil + case Payment: + return 1, mvdt.inner, nil + case Future: + return 2, mvdt.inner, nil + case Stale: + return 3, mvdt.inner, nil + case BadProof: + return 4, mvdt.inner, nil + case AncientBirthBlock: + return 5, mvdt.inner, nil + case ExhaustsResources: + return 6, mvdt.inner, nil + case InvalidCustom: + return 7, mvdt.inner, nil + case BadMandatory: + return 8, mvdt.inner, nil + case MandatoryDispatch: + return 9, mvdt.inner, nil + case BadSigner: + return 10, mvdt.inner, nil + + } + return 0, nil, scale.ErrUnsupportedVaryingDataTypeValue +} + +func (mvdt invalid) Value() (value any, err error) { + _, value, err = mvdt.IndexValue() + return +} +func (mvdt invalid) ValueAt(index uint) (value any, err error) { + switch index { + case 0: + return *new(Call), nil + case 1: + return *new(Payment), nil + case 2: + return *new(Future), nil + case 3: + return *new(Stale), nil + case 4: + return *new(BadProof), nil + case 5: + return *new(AncientBirthBlock), nil + case 6: + return *new(ExhaustsResources), nil + case 7: + return *new(InvalidCustom), nil + case 8: + return *new(BadMandatory), nil + case 9: + return *new(MandatoryDispatch), nil + case 10: + return *new(BadSigner), nil + } + return nil, scale.ErrUnknownVaryingDataTypeValue +} + +type unknownValues interface { + ValidityCannotLookup | NoUnsignedValidator | UnknownCustom +} +type unknown struct { + inner any +} + +func setunknown[Value unknownValues](mvdt *unknown, value Value) { + mvdt.inner = value +} + +func (mvdt *unknown) SetValue(value any) (err error) { + switch value := value.(type) { + case ValidityCannotLookup: + setunknown(mvdt, value) + return + case NoUnsignedValidator: + setunknown(mvdt, value) + return + case UnknownCustom: + setunknown(mvdt, value) + return + default: + return fmt.Errorf("unsupported type") + } +} + +func (mvdt unknown) IndexValue() (index uint, value any, err error) { + switch mvdt.inner.(type) { + case ValidityCannotLookup: + return 0, mvdt.inner, nil + case NoUnsignedValidator: + return 1, mvdt.inner, nil + case UnknownCustom: + return 2, mvdt.inner, nil + } + return 0, nil, scale.ErrUnsupportedVaryingDataTypeValue +} + +func (mvdt unknown) Value() (value any, err error) { + _, value, err = mvdt.IndexValue() + return +} + +func (mvdt unknown) ValueAt(index uint) (value any, err error) { + switch index { + case 0: + return *new(ValidityCannotLookup), nil + case 1: + return *new(NoUnsignedValidator), nil + case 2: + return *new(UnknownCustom), nil + } + return nil, scale.ErrUnknownVaryingDataTypeValue +} + +func determineErr(res []byte) error { + okRes := scale.NewResult(nil, dispatchError{}) + errRes := scale.NewResult(invalid{}, unknown{}) result := scale.NewResult(okRes, errRes) err := scale.Unmarshal(res, &result) @@ -354,12 +523,12 @@ func determineErr(res []byte) error { if err != nil { switch err := err.(type) { case scale.WrappedErr: - return determineErrType(err.Err.(scale.VaryingDataType)) + return determineErrType(err.Err.(scale.EncodeVaryingDataType)) default: return errInvalidResult } } else { - return determineErrType(ok.(scale.VaryingDataType)) + return determineErrType(ok.(scale.EncodeVaryingDataType)) } default: return errInvalidResult @@ -371,7 +540,7 @@ func determineErr(res []byte) error { if err != nil { switch err := err.(type) { case scale.WrappedErr: - return determineErrType(err.Err.(scale.VaryingDataType)) + return determineErrType(err.Err.(scale.EncodeVaryingDataType)) default: return errInvalidResult } diff --git a/lib/babe/inherents/parachain_inherents.go b/lib/babe/inherents/parachain_inherents.go index b1731abc91..b130128446 100644 --- a/lib/babe/inherents/parachain_inherents.go +++ b/lib/babe/inherents/parachain_inherents.go @@ -18,35 +18,65 @@ func (s signature) String() string { return fmt.Sprintf("0x%x", s[:]) } // validityAttestation is an implicit or explicit attestation to the validity of a parachain // candidate. -type validityAttestation scale.VaryingDataType - -// Set will set a VaryingDataTypeValue using the underlying VaryingDataType -func (va *validityAttestation) Set(val scale.VaryingDataTypeValue) (err error) { - // cast to VaryingDataType to use VaryingDataType.Set method - vdt := scale.VaryingDataType(*va) - err = vdt.Set(val) - if err != nil { - return fmt.Errorf("setting value to varying data type: %w", err) +type validityAttestationValues interface { + implicit | explicit +} + +type validityAttestation struct { + inner any +} + +func setvalidityAttestation[Value validityAttestationValues](mvdt *validityAttestation, value Value) { + mvdt.inner = value +} + +func (mvdt *validityAttestation) SetValue(value any) (err error) { + switch value := value.(type) { + case implicit: + setvalidityAttestation(mvdt, value) + return + + case explicit: + setvalidityAttestation(mvdt, value) + return + + default: + return fmt.Errorf("unsupported type") } - // store original ParentVDT with VaryingDataType that has been set - *va = validityAttestation(vdt) - return nil } -// Value returns the value from the underlying VaryingDataType -func (va *validityAttestation) Value() (scale.VaryingDataTypeValue, error) { - vdt := scale.VaryingDataType(*va) - return vdt.Value() +func (mvdt validityAttestation) IndexValue() (index uint, value any, err error) { + switch mvdt.inner.(type) { + case implicit: + return 1, mvdt.inner, nil + + case explicit: + return 2, mvdt.inner, nil + + } + return 0, nil, scale.ErrUnsupportedVaryingDataTypeValue } -// implicit is for implicit attestation. -type implicit validatorSignature //skipcq +func (mvdt validityAttestation) Value() (value any, err error) { + _, value, err = mvdt.IndexValue() + return +} + +func (mvdt validityAttestation) ValueAt(index uint) (value any, err error) { + switch index { + case 1: + return *new(implicit), nil -// Index returns VDT index -func (implicit) Index() uint { //skipcq - return 1 + case 2: + return *new(explicit), nil + + } + return nil, scale.ErrUnknownVaryingDataTypeValue } +// implicit is for implicit attestation. +type implicit validatorSignature //skipcq + func (i implicit) String() string { //skipcq:SCC-U1000 return fmt.Sprintf("implicit(%s)", validatorSignature(i)) } @@ -54,88 +84,153 @@ func (i implicit) String() string { //skipcq:SCC-U1000 // explicit is for explicit attestation. type explicit validatorSignature //skipcq -// Index returns VDT index -func (explicit) Index() uint { //skipcq - return 2 -} - func (e explicit) String() string { //skipcq:SCC-U1000 return fmt.Sprintf("explicit(%s)", validatorSignature(e)) } // newValidityAttestation creates a ValidityAttestation varying data type. func newValidityAttestation() validityAttestation { //skipcq - vdt, err := scale.NewVaryingDataType(implicit{}, explicit{}) - if err != nil { - panic(err) - } - - return validityAttestation(vdt) + return validityAttestation{} } // disputeStatement is a statement about a candidate, to be used within the dispute // resolution process. Statements are either in favour of the candidate's validity // or against it. -type disputeStatement scale.VaryingDataType - -// Set will set a VaryingDataTypeValue using the underlying VaryingDataType -func (d *disputeStatement) Set(val scale.VaryingDataTypeValue) (err error) { - // cast to VaryingDataType to use VaryingDataType.Set method - vdt := scale.VaryingDataType(*d) - err = vdt.Set(val) - if err != nil { - return fmt.Errorf("setting value to varying data type: %w", err) +type disputeStatementValues interface { + validDisputeStatementKind | invalidDisputeStatementKind +} + +type disputeStatement struct { + inner any +} + +func setdisputeStatement[Value disputeStatementValues](mvdt *disputeStatement, value Value) { + mvdt.inner = value +} + +func (mvdt *disputeStatement) SetValue(value any) (err error) { + switch value := value.(type) { + case validDisputeStatementKind: + setdisputeStatement(mvdt, value) + return + + case invalidDisputeStatementKind: + setdisputeStatement(mvdt, value) + return + + default: + return fmt.Errorf("unsupported type") } - // store original ParentVDT with VaryingDataType that has been set - *d = disputeStatement(vdt) - return nil } -// Value will return value from underying VaryingDataType -func (d *disputeStatement) Value() (scale.VaryingDataTypeValue, error) { - vdt := scale.VaryingDataType(*d) - return vdt.Value() +func (mvdt disputeStatement) IndexValue() (index uint, value any, err error) { + switch mvdt.inner.(type) { + case validDisputeStatementKind: + return 0, mvdt.inner, nil + + case invalidDisputeStatementKind: + return 1, mvdt.inner, nil + + } + return 0, nil, scale.ErrUnsupportedVaryingDataTypeValue +} + +func (mvdt disputeStatement) Value() (value any, err error) { + _, value, err = mvdt.IndexValue() + return +} + +func (mvdt disputeStatement) ValueAt(index uint) (value any, err error) { + switch index { + case 0: + return *new(validDisputeStatementKind), nil + + case 1: + return *new(invalidDisputeStatementKind), nil + + } + return nil, scale.ErrUnknownVaryingDataTypeValue } // validDisputeStatementKind is a kind of statements of validity on a candidate. -type validDisputeStatementKind scale.VaryingDataType //skipcq +type validDisputeStatementKind struct { + inner any +} +type validDisputeStatementKindValues interface { + explicitValidDisputeStatementKind | backingSeconded | backingValid | approvalChecking +} -// Index returns VDT index -func (validDisputeStatementKind) Index() uint { //skipcq - return 0 +func setvalidDisputeStatementKind[Value validDisputeStatementKindValues](mvdt *validDisputeStatementKind, value Value) { + mvdt.inner = value } -func (validDisputeStatementKind) String() string { //skipcq - return "valid dispute statement kind" +func (mvdt *validDisputeStatementKind) SetValue(value any) (err error) { + switch value := value.(type) { + case explicitValidDisputeStatementKind: + setvalidDisputeStatementKind(mvdt, value) + return + + case backingSeconded: + setvalidDisputeStatementKind(mvdt, value) + return + + case backingValid: + setvalidDisputeStatementKind(mvdt, value) + return + + case approvalChecking: + setvalidDisputeStatementKind(mvdt, value) + return + + default: + return fmt.Errorf("unsupported type") + } } -// Set will set a VaryingDataTypeValue using the underlying VaryingDataType -func (v *validDisputeStatementKind) Set(val scale.VaryingDataTypeValue) (err error) { //skipcq - // cast to VaryingDataType to use VaryingDataType.Set method - vdt := scale.VaryingDataType(*v) - err = vdt.Set(val) - if err != nil { - return fmt.Errorf("setting value to varying data type: %w", err) +func (mvdt validDisputeStatementKind) IndexValue() (index uint, value any, err error) { + switch mvdt.inner.(type) { + case explicitValidDisputeStatementKind: + return 0, mvdt.inner, nil + + case backingSeconded: + return 1, mvdt.inner, nil + + case backingValid: + return 2, mvdt.inner, nil + + case approvalChecking: + return 3, mvdt.inner, nil + } - // store original ParentVDT with VaryingDataType that has been set - *v = validDisputeStatementKind(vdt) - return nil + return 0, nil, scale.ErrUnsupportedVaryingDataTypeValue } -// Value will return value from underying VaryingDataType -func (v *validDisputeStatementKind) Value() (scale.VaryingDataTypeValue, error) { //skipcq - vdt := scale.VaryingDataType(*v) - return vdt.Value() +func (mvdt validDisputeStatementKind) Value() (value any, err error) { + _, value, err = mvdt.IndexValue() + return } -// ExplicitValidDisputeStatementKind is an explicit statement issued as part of a dispute. -type explicitValidDisputeStatementKind struct{} //skipcq +func (mvdt validDisputeStatementKind) ValueAt(index uint) (value any, err error) { + switch index { + case 0: + return *new(explicitValidDisputeStatementKind), nil + + case 1: + return *new(backingSeconded), nil + + case 2: + return *new(backingValid), nil + + case 3: + return *new(approvalChecking), nil -// Index returns VDT index -func (explicitValidDisputeStatementKind) Index() uint { //skipcq - return 0 + } + return nil, scale.ErrUnknownVaryingDataTypeValue } +// ExplicitValidDisputeStatementKind is an explicit statement issued as part of a dispute. +type explicitValidDisputeStatementKind struct{} //skipcq + func (explicitValidDisputeStatementKind) String() string { //skipcq:SCC-U1000 return "explicit valid dispute statement kind" } @@ -143,11 +238,6 @@ func (explicitValidDisputeStatementKind) String() string { //skipcq:SCC-U1000 // backingSeconded is a seconded statement on a candidate from the backing phase. type backingSeconded common.Hash //skipcq -// Index returns VDT index -func (backingSeconded) Index() uint { //skipcq - return 1 -} - func (b backingSeconded) String() string { //skipcq:SCC-U1000 return fmt.Sprintf("backingSeconded(%s)", common.Hash(b)) } @@ -155,11 +245,6 @@ func (b backingSeconded) String() string { //skipcq:SCC-U1000 // backingValid is a valid statement on a candidate from the backing phase. type backingValid common.Hash //skipcq -// Index returns VDT index -func (backingValid) Index() uint { //skipcq - return 2 -} - func (b backingValid) String() string { //skipcq:SCC-U1000 return fmt.Sprintf("backingValid(%s)", common.Hash(b)) } @@ -167,76 +252,70 @@ func (b backingValid) String() string { //skipcq:SCC-U1000 // approvalChecking is an approval vote from the approval checking phase. type approvalChecking struct{} //skipcq -// Index returns VDT index -func (approvalChecking) Index() uint { //skipcq - return 3 -} - func (approvalChecking) String() string { return "approval checking" } // invalidDisputeStatementKind is a kind of statements of invalidity on a candidate. -type invalidDisputeStatementKind scale.VaryingDataType //skipcq +type invalidDisputeStatementKindValues interface { + explicitInvalidDisputeStatementKind +} -// Index returns VDT index -func (invalidDisputeStatementKind) Index() uint { //skipcq - return 1 +type invalidDisputeStatementKind struct { + inner any } -func (invalidDisputeStatementKind) String() string { //skipcq - return "invalid dispute statement kind" +func setinvalidDisputeStatementKind[Value invalidDisputeStatementKindValues]( + mvdt *invalidDisputeStatementKind, value Value, +) { + mvdt.inner = value } -// Set will set a VaryingDataTypeValue using the underlying VaryingDataType -func (in *invalidDisputeStatementKind) Set(val scale.VaryingDataTypeValue) (err error) { //skipcq - // cast to VaryingDataType to use VaryingDataType.Set method - vdt := scale.VaryingDataType(*in) - err = vdt.Set(val) - if err != nil { - return fmt.Errorf("setting value to varying data type: %w", err) +func (mvdt *invalidDisputeStatementKind) SetValue(value any) (err error) { + switch value := value.(type) { + case explicitInvalidDisputeStatementKind: + setinvalidDisputeStatementKind(mvdt, value) + return + + default: + return fmt.Errorf("unsupported type") } - // store original ParentVDT with VaryingDataType that has been set - *in = invalidDisputeStatementKind(vdt) - return nil } -// Value will return value from underying VaryingDataType -func (in *invalidDisputeStatementKind) Value() (scale.VaryingDataTypeValue, error) { //skipcq - vdt := scale.VaryingDataType(*in) - return vdt.Value() +func (mvdt invalidDisputeStatementKind) IndexValue() (index uint, value any, err error) { + switch mvdt.inner.(type) { + case explicitInvalidDisputeStatementKind: + return 0, mvdt.inner, nil + + } + return 0, nil, scale.ErrUnsupportedVaryingDataTypeValue } -// explicitInvalidDisputeStatementKind is an explicit statement issued as part of a dispute. -type explicitInvalidDisputeStatementKind struct{} //skipcq +func (mvdt invalidDisputeStatementKind) Value() (value any, err error) { + _, value, err = mvdt.IndexValue() + return +} +func (mvdt invalidDisputeStatementKind) ValueAt(index uint) (value any, err error) { + switch index { + case 0: + return *new(explicitInvalidDisputeStatementKind), nil + + } + return nil, scale.ErrUnknownVaryingDataTypeValue +} -// Index returns VDT index -func (explicitInvalidDisputeStatementKind) Index() uint { //skipcq - return 0 +func (invalidDisputeStatementKind) String() string { //skipcq + return "invalid dispute statement kind" } +// explicitInvalidDisputeStatementKind is an explicit statement issued as part of a dispute. +type explicitInvalidDisputeStatementKind struct{} //skipcq + func (explicitInvalidDisputeStatementKind) String() string { //skipcq:SCC-U1000 return "explicit invalid dispute statement kind" } // newDisputeStatement create a new DisputeStatement varying data type. func newDisputeStatement() disputeStatement { //skipcq - idsKind, err := scale.NewVaryingDataType(explicitInvalidDisputeStatementKind{}) - if err != nil { - panic(err) - } - - vdsKind, err := scale.NewVaryingDataType( - explicitValidDisputeStatementKind{}, backingSeconded{}, backingValid{}, approvalChecking{}) - if err != nil { - panic(err) - } - - vdt, err := scale.NewVaryingDataType( - validDisputeStatementKind(vdsKind), invalidDisputeStatementKind(idsKind)) - if err != nil { - panic(err) - } - - return disputeStatement(vdt) + return disputeStatement{} } // collatorID is the collator's relay-chain account ID diff --git a/lib/babe/inherents/parachain_inherents_test.go b/lib/babe/inherents/parachain_inherents_test.go index 820949d5b8..9e4cecc039 100644 --- a/lib/babe/inherents/parachain_inherents_test.go +++ b/lib/babe/inherents/parachain_inherents_test.go @@ -18,7 +18,7 @@ func TestValidDisputeStatementKind(t *testing.T) { testCases := []struct { name string - enumValue scale.VaryingDataTypeValue + enumValue any encodingValue []byte }{ { @@ -49,11 +49,9 @@ func TestValidDisputeStatementKind(t *testing.T) { t.Run(c.name, func(t *testing.T) { t.Parallel() - vdsKind, err := scale.NewVaryingDataType( - explicitValidDisputeStatementKind{}, backingSeconded{}, backingValid{}, approvalChecking{}) - require.NoError(t, err) + vdsKind := validDisputeStatementKind{} - err = vdsKind.Set(c.enumValue) + err := vdsKind.SetValue(c.enumValue) require.NoError(t, err) bytes, err := scale.Marshal(vdsKind) @@ -69,7 +67,7 @@ func TestInvalidDisputeStatementKind(t *testing.T) { testCases := []struct { name string - enumValue scale.VaryingDataTypeValue + enumValue any encodingValue []byte }{ { @@ -84,14 +82,12 @@ func TestInvalidDisputeStatementKind(t *testing.T) { t.Run(c.name, func(t *testing.T) { t.Parallel() - invalidDisputeStatementKind, err := scale.NewVaryingDataType( - explicitInvalidDisputeStatementKind{}) - require.NoError(t, err) + idsKind := invalidDisputeStatementKind{} - err = invalidDisputeStatementKind.Set(c.enumValue) + err := idsKind.SetValue(c.enumValue) require.NoError(t, err) - bytes, err := scale.Marshal(invalidDisputeStatementKind) + bytes, err := scale.Marshal(idsKind) require.NoError(t, err) require.Equal(t, c.encodingValue, bytes) @@ -110,15 +106,13 @@ func TestDisputeStatement(t *testing.T) { { name: "Valid_Explicit", vdtBuilder: func(t *testing.T) disputeStatement { - vdsKind, err := scale.NewVaryingDataType( - explicitValidDisputeStatementKind{}, backingSeconded{}, backingValid{}, approvalChecking{}) - require.NoError(t, err) + vdsKind := validDisputeStatementKind{} - err = vdsKind.Set(explicitValidDisputeStatementKind{}) + err := vdsKind.SetValue(explicitValidDisputeStatementKind{}) require.NoError(t, err) ds := newDisputeStatement() - err = ds.Set(validDisputeStatementKind(vdsKind)) + err = ds.SetValue(vdsKind) require.NoError(t, err) return ds @@ -129,16 +123,13 @@ func TestDisputeStatement(t *testing.T) { { name: "Valid_ApprovalChecking", vdtBuilder: func(t *testing.T) disputeStatement { - vdsKind, err := scale.NewVaryingDataType( - explicitValidDisputeStatementKind{}, backingSeconded{}, backingValid{}, approvalChecking{}, - ) - require.NoError(t, err) + vdsKind := validDisputeStatementKind{} - err = vdsKind.Set(approvalChecking{}) + err := vdsKind.SetValue(approvalChecking{}) require.NoError(t, err) ds := newDisputeStatement() - err = ds.Set(validDisputeStatementKind(vdsKind)) + err = ds.SetValue(vdsKind) require.NoError(t, err) return ds @@ -148,16 +139,13 @@ func TestDisputeStatement(t *testing.T) { { name: "Valid_BackingSeconded", vdtBuilder: func(t *testing.T) disputeStatement { - vdsKind, err := scale.NewVaryingDataType( - explicitValidDisputeStatementKind{}, backingSeconded{}, backingValid{}, approvalChecking{}, - ) - require.NoError(t, err) + vdsKind := validDisputeStatementKind{} - err = vdsKind.Set(backingSeconded(common.Hash{})) + err := vdsKind.SetValue(backingSeconded(common.Hash{})) require.NoError(t, err) ds := newDisputeStatement() - err = ds.Set(validDisputeStatementKind(vdsKind)) + err = ds.SetValue(vdsKind) require.NoError(t, err) return ds @@ -168,16 +156,13 @@ func TestDisputeStatement(t *testing.T) { { name: "Invalid_Explicit", vdtBuilder: func(t *testing.T) disputeStatement { - idsKind, err := scale.NewVaryingDataType( - explicitInvalidDisputeStatementKind{}, - ) - require.NoError(t, err) + idsKind := invalidDisputeStatementKind{} - err = idsKind.Set(explicitInvalidDisputeStatementKind{}) + err := idsKind.SetValue(explicitInvalidDisputeStatementKind{}) require.NoError(t, err) disputeStatement := newDisputeStatement() - err = disputeStatement.Set(invalidDisputeStatementKind(idsKind)) + err = disputeStatement.SetValue(idsKind) require.NoError(t, err) return disputeStatement @@ -212,7 +197,7 @@ func TestValidityAttestation(t *testing.T) { testCases := []struct { name string - enumValue scale.VaryingDataTypeValue + enumValue any encodingValue []byte }{ { @@ -233,7 +218,7 @@ func TestValidityAttestation(t *testing.T) { t.Parallel() validityAttestation := newValidityAttestation() - err := validityAttestation.Set(c.enumValue) + err := validityAttestation.SetValue(c.enumValue) require.NoError(t, err) bytes, err := scale.Marshal(validityAttestation) diff --git a/lib/babe/verify.go b/lib/babe/verify.go index 8f4841eb04..36d796465f 100644 --- a/lib/babe/verify.go +++ b/lib/babe/verify.go @@ -249,15 +249,15 @@ func newVerifier(blockState BlockState, slotState SlotState, func (b *verifier) verifyAuthorshipRight(header *types.Header) error { // header should have 2 digest items (possibly more in the future) // first item should be pre-digest, second should be seal - if len(header.Digest.Types) < 2 { + if len(header.Digest) < 2 { return errMissingDigestItems } logger.Tracef("beginning BABE authorship right verification for block %s", header.Hash()) // check for valid seal by verifying signature - preDigestItem := header.Digest.Types[0] - sealItem := header.Digest.Types[len(header.Digest.Types)-1] + preDigestItem := header.Digest[0] + sealItem := header.Digest[len(header.Digest)-1] preDigestItemValue, err := preDigestItem.Value() if err != nil { @@ -302,7 +302,7 @@ func (b *verifier) verifyAuthorshipRight(header *types.Header) error { // remove seal before verifying signature h := types.NewDigest() - for _, val := range header.Digest.Types[:len(header.Digest.Types)-1] { + for _, val := range header.Digest[:len(header.Digest)-1] { digestValue, err := val.Value() if err != nil { return fmt.Errorf("getting digest type value: %w", err) @@ -419,7 +419,7 @@ func (b *verifier) verifyBlockEquivocation(header *types.Header) (bool, error) { return true, nil } -func (b *verifier) verifyPreRuntimeDigest(digest *types.PreRuntimeDigest) (scale.VaryingDataTypeValue, error) { +func (b *verifier) verifyPreRuntimeDigest(digest *types.PreRuntimeDigest) (any, error) { babePreDigest, err := types.DecodeBabePreDigest(digest.Data) if err != nil { return nil, err @@ -524,11 +524,11 @@ func (b *verifier) verifyPrimarySlotWinner(authorityIndex uint32, } func getAuthorityIndexAndSlot(header *types.Header) (authIdx uint32, slot uint64, err error) { - if len(header.Digest.Types) == 0 { + if len(header.Digest) == 0 { return 0, 0, fmt.Errorf("for block hash %s: %w", header.Hash(), errNoDigest) } - digestValue, err := header.Digest.Types[0].Value() + digestValue, err := header.Digest[0].Value() if err != nil { return 0, 0, fmt.Errorf("getting first digest type value: %w", err) } diff --git a/lib/babe/verify_integration_test.go b/lib/babe/verify_integration_test.go index 480731817d..8e0c49b692 100644 --- a/lib/babe/verify_integration_test.go +++ b/lib/babe/verify_integration_test.go @@ -145,7 +145,9 @@ func TestVerificationManager_VerifyBlock_Secondary(t *testing.T) { secondaryDigest := createSecondaryVRFPreDigest(t, keyring.Alice().(*sr25519.Keypair), 0, uint64(0), uint64(0), Randomness{}) babeDigest := types.NewBabeDigest() - err = babeDigest.Set(secondaryDigest) + // NOTE: I think this was get encoded incorrectly before the VDT interface change. + // *types.BabeSecondaryVRFPreDigest was being passed in and encoded later + err = babeDigest.SetValue(*secondaryDigest) require.NoError(t, err) encodedBabeDigest, err := scale.Marshal(babeDigest) @@ -179,7 +181,7 @@ func TestVerificationManager_VerifyBlock_Secondary(t *testing.T) { Body: nil, } err = vm.VerifyBlock(&block.Header) - require.EqualError(t, err, "failed to verify pre-runtime digest: block producer is not in authority set") + require.EqualError(t, err, "invalid signature length") } func TestVerificationManager_VerifyBlock_CurrentEpoch(t *testing.T) { @@ -737,14 +739,14 @@ func issueConsensusDigestsBlockFromGenesis(t *testing.T, genesisHeader *types.He require.NoError(t, err) babeConsensusDigestNextEpoch := types.NewBabeConsensusDigest() - require.NoError(t, babeConsensusDigestNextEpoch.Set(nextEpoch)) + require.NoError(t, babeConsensusDigestNextEpoch.SetValue(nextEpoch)) babeConsensusDigestNextConfigData := types.NewBabeConsensusDigest() versionedNextConfigData := types.NewVersionedNextConfigData() - versionedNextConfigData.Set(nextConfig) + versionedNextConfigData.SetValue(nextConfig) - require.NoError(t, babeConsensusDigestNextConfigData.Set(versionedNextConfigData)) + require.NoError(t, babeConsensusDigestNextConfigData.SetValue(versionedNextConfigData)) nextEpochData, err := scale.Marshal(babeConsensusDigestNextEpoch) require.NoError(t, err) @@ -800,8 +802,7 @@ func issueNewBlockFrom(t *testing.T, parentHeader *types.Header, preRuntimeDigest, err := babePrimaryPreDigest.ToPreRuntimeDigest() require.NoError(t, err) - digest := scale.NewVaryingDataTypeSlice(scale.MustNewVaryingDataType( - types.PreRuntimeDigest{})) + digest := types.NewDigest() require.NoError(t, digest.Add(*preRuntimeDigest)) diff --git a/lib/babe/verify_test.go b/lib/babe/verify_test.go index a70879fe3d..a6e2ee4e80 100644 --- a/lib/babe/verify_test.go +++ b/lib/babe/verify_test.go @@ -26,7 +26,7 @@ const testSlotDuration = time.Second // https://github.com/paritytech/substrate/blob/ded44948e2d5a398abcb4e342b0513cb690961bb/frame/grandpa/src/benchmarking.rs#L85 var testKeyOwnershipProof types.OpaqueKeyOwnershipProof = types.OpaqueKeyOwnershipProof([]byte{64, 138, 252, 29, 127, 102, 189, 129, 207, 47, 157, 60, 17, 138, 194, 121, 139, 92, 176, 175, 224, 16, 185, 93, 175, 251, 224, 81, 209, 61, 0, 71}) //nolint:lll -func newTestHeader(t *testing.T, digest ...scale.VaryingDataTypeValue) *types.Header { +func newTestHeader(t *testing.T, digest ...any) *types.Header { t.Helper() header := types.NewEmptyHeader() header.Number = 1 @@ -50,10 +50,10 @@ func signAndAddSeal(t *testing.T, kp *sr25519.Keypair, header *types.Header, dat assert.NoError(t, err) } -func newEncodedBabeDigest(t *testing.T, value scale.VaryingDataTypeValue) []byte { +func newEncodedBabeDigest(t *testing.T, value any) []byte { t.Helper() babeDigest := types.NewBabeDigest() - err := babeDigest.Set(value) + err := babeDigest.SetValue(value) require.NoError(t, err) enc, err := scale.Marshal(babeDigest) @@ -103,7 +103,7 @@ func Test_getAuthorityIndex(t *testing.T) { // BabePrimaryPreDigest Case babeDigest := types.NewBabeDigest() - err = babeDigest.Set(types.BabePrimaryPreDigest{AuthorityIndex: 21, SlotNumber: 1}) + err = babeDigest.SetValue(types.BabePrimaryPreDigest{AuthorityIndex: 21, SlotNumber: 1}) assert.NoError(t, err) bdEnc, err := scale.Marshal(babeDigest) @@ -120,7 +120,7 @@ func Test_getAuthorityIndex(t *testing.T) { //BabeSecondaryVRFPreDigest Case babeDigest2 := types.NewBabeDigest() - err = babeDigest2.Set(types.BabeSecondaryVRFPreDigest{AuthorityIndex: 21, SlotNumber: 10}) + err = babeDigest2.SetValue(types.BabeSecondaryVRFPreDigest{AuthorityIndex: 21, SlotNumber: 10}) assert.NoError(t, err) bdEnc2, err := scale.Marshal(babeDigest2) @@ -137,7 +137,7 @@ func Test_getAuthorityIndex(t *testing.T) { //BabeSecondaryPlainPreDigest case babeDigest3 := types.NewBabeDigest() - err = babeDigest3.Set(types.BabeSecondaryPlainPreDigest{AuthorityIndex: 21, SlotNumber: 100}) + err = babeDigest3.SetValue(types.BabeSecondaryPlainPreDigest{AuthorityIndex: 21, SlotNumber: 100}) assert.NoError(t, err) bdEnc3, err := scale.Marshal(babeDigest3) @@ -342,7 +342,7 @@ func Test_verifier_verifyPreRuntimeDigest(t *testing.T) { } digestSecondaryVRF := types.NewBabeDigest() - err = digestSecondaryVRF.Set(secVRFDigest) + err = digestSecondaryVRF.SetValue(secVRFDigest) assert.NoError(t, err) bdEnc, err := scale.Marshal(digestSecondaryVRF) @@ -386,7 +386,7 @@ func Test_verifier_verifyPreRuntimeDigest(t *testing.T) { name string verifier verifier args args - exp scale.VaryingDataTypeValue + exp any expErr error }{ { @@ -394,7 +394,7 @@ func Test_verifier_verifyPreRuntimeDigest(t *testing.T) { verifier: verifier{}, args: args{&types.PreRuntimeDigest{Data: []byte{0}}}, expErr: errors.New( - "unable to find VaryingDataTypeValue with index: for key 0"), + "unable to find VaryingDataTypeValue with index: for key 0 unable to find VaryingDataTypeValue with index"), }, { name: "Invalid BlockProducer Index", @@ -714,7 +714,7 @@ func Test_verifyBlockEquivocation(t *testing.T) { "cannot_get_slot_from_header": { header: func() *types.Header { wrongDigest := types.NewGrandpaConsensusDigest() - require.NoError(t, wrongDigest.Set(types.GrandpaForcedChange{})) + require.NoError(t, wrongDigest.SetValue(types.GrandpaForcedChange{})) data, err := scale.Marshal(wrongDigest) require.NoError(t, err) diff --git a/lib/blocktree/helpers_test.go b/lib/blocktree/helpers_test.go index 1edbfade0d..9fa1ef205d 100644 --- a/lib/blocktree/helpers_test.go +++ b/lib/blocktree/helpers_test.go @@ -27,9 +27,9 @@ type testBranch struct { arrivalTime int64 } -func createPrimaryBABEDigest(t testing.TB) scale.VaryingDataTypeSlice { +func createPrimaryBABEDigest(t testing.TB) types.Digest { babeDigest := types.NewBabeDigest() - err := babeDigest.Set(types.BabePrimaryPreDigest{AuthorityIndex: 0}) + err := babeDigest.SetValue(types.BabePrimaryPreDigest{AuthorityIndex: 0}) require.NoError(t, err) bdEnc, err := scale.Marshal(babeDigest) diff --git a/lib/grandpa/message.go b/lib/grandpa/message.go index fe9327eac3..f89bae2d76 100644 --- a/lib/grandpa/message.go +++ b/lib/grandpa/message.go @@ -18,11 +18,97 @@ type GrandpaMessage interface { ToConsensusMessage() (*network.ConsensusMessage, error) } +type grandpaMessages interface { + VoteMessage | CommitMessage | VersionedNeighbourPacket | CatchUpRequest | CatchUpResponse +} + +type grandpaMessage struct { + inner any +} + +func setgrandpaMessage[Value grandpaMessages](mvdt *grandpaMessage, value Value) { + mvdt.inner = value +} + +func (mvdt *grandpaMessage) SetValue(value any) (err error) { + switch value := value.(type) { + case VoteMessage: + setgrandpaMessage(mvdt, value) + return + + case CommitMessage: + setgrandpaMessage(mvdt, value) + return + + case VersionedNeighbourPacket: + setgrandpaMessage(mvdt, value) + return + + case CatchUpRequest: + setgrandpaMessage(mvdt, value) + return + + case CatchUpResponse: + setgrandpaMessage(mvdt, value) + return + + default: + return fmt.Errorf("unsupported type") + } +} + +func (mvdt grandpaMessage) IndexValue() (index uint, value any, err error) { + switch mvdt.inner.(type) { + case VoteMessage: + return 0, mvdt.inner, nil + + case CommitMessage: + return 1, mvdt.inner, nil + + case VersionedNeighbourPacket: + return 2, mvdt.inner, nil + + case CatchUpRequest: + return 3, mvdt.inner, nil + + case CatchUpResponse: + return 4, mvdt.inner, nil + + } + return 0, nil, scale.ErrUnsupportedVaryingDataTypeValue +} + +func (mvdt grandpaMessage) Value() (value any, err error) { + _, value, err = mvdt.IndexValue() + return +} +func (mvdt grandpaMessage) ValueAt(index uint) (value any, err error) { + switch index { + case 0: + return *new(VoteMessage), nil + + case 1: + return *new(CommitMessage), nil + + case 2: + return *new(VersionedNeighbourPacket), nil + + case 3: + return *new(CatchUpRequest), nil + + case 4: + return *new(CatchUpResponse), nil + + } + return nil, scale.ErrUnknownVaryingDataTypeValue +} + // NewGrandpaMessage returns a new VaryingDataType to represent a GrandpaMessage -func newGrandpaMessage() scale.VaryingDataType { - return scale.MustNewVaryingDataType( - VoteMessage{}, CommitMessage{}, newVersionedNeighbourPacket(), - CatchUpRequest{}, CatchUpResponse{}) +func newGrandpaMessage() grandpaMessage { + // return scale.MustNewVaryingDataType( + // VoteMessage{}, CommitMessage{}, newVersionedNeighbourPacket(), + // CatchUpRequest{}, CatchUpResponse{}) + return grandpaMessage{} } // FullVote represents a vote with additional information about the state @@ -60,13 +146,10 @@ func (v VoteMessage) String() string { return fmt.Sprintf("round=%d, setID=%d, message={%s}", v.Round, v.SetID, v.Message) } -// Index returns VDT index -func (VoteMessage) Index() uint { return 0 } - // ToConsensusMessage converts the VoteMessage into a network-level consensus message func (v *VoteMessage) ToConsensusMessage() (*ConsensusMessage, error) { msg := newGrandpaMessage() - err := msg.Set(*v) + err := msg.SetValue(*v) if err != nil { return nil, err } @@ -82,41 +165,47 @@ func (v *VoteMessage) ToConsensusMessage() (*ConsensusMessage, error) { } // VersionedNeighbourPacket represents the enum of neighbour messages -type VersionedNeighbourPacket scale.VaryingDataType - -// Index returns VDT index -func (VersionedNeighbourPacket) Index() uint { return 2 } - -func (vnp VersionedNeighbourPacket) String() string { - val, err := vnp.Value() - if err != nil { - return "VersionedNeighbourPacket()" - } +type VersionedNeighbourPacketValues interface { + NeighbourPacketV1 +} - return fmt.Sprintf("VersionedNeighbourPacket(%s)", val) +type VersionedNeighbourPacket struct { + inner any } -func newVersionedNeighbourPacket() VersionedNeighbourPacket { - vdt := scale.MustNewVaryingDataType(NeighbourPacketV1{}) +func setVersionedNeighbourPacket[Value VersionedNeighbourPacketValues](mvdt *VersionedNeighbourPacket, value Value) { + mvdt.inner = value +} - return VersionedNeighbourPacket(vdt) +func (mvdt *VersionedNeighbourPacket) SetValue(value any) (err error) { + switch value := value.(type) { + case NeighbourPacketV1: + setVersionedNeighbourPacket(mvdt, value) + return + default: + return fmt.Errorf("unsupported type") + } } -// Set updates the current VDT value to be `val` -func (vnp *VersionedNeighbourPacket) Set(val scale.VaryingDataTypeValue) (err error) { - vdt := scale.VaryingDataType(*vnp) - err = vdt.Set(val) - if err != nil { - return fmt.Errorf("setting varying data type value: %w", err) +func (mvdt VersionedNeighbourPacket) IndexValue() (index uint, value any, err error) { + switch mvdt.inner.(type) { + case NeighbourPacketV1: + return 1, mvdt.inner, nil } - *vnp = VersionedNeighbourPacket(vdt) - return nil + return 0, nil, scale.ErrUnsupportedVaryingDataTypeValue } -// Value returns the current VDT value -func (vnp *VersionedNeighbourPacket) Value() (val scale.VaryingDataTypeValue, err error) { - vdt := scale.VaryingDataType(*vnp) - return vdt.Value() +func (mvdt VersionedNeighbourPacket) Value() (value any, err error) { + _, value, err = mvdt.IndexValue() + return +} + +func (mvdt VersionedNeighbourPacket) ValueAt(index uint) (value any, err error) { + switch index { + case 1: + return *new(NeighbourPacketV1), nil + } + return nil, scale.ErrUnknownVaryingDataTypeValue } // NeighbourPacketV1 represents a network-level neighbour message @@ -128,23 +217,20 @@ type NeighbourPacketV1 struct { Number uint32 } -// Index returns VDT index -func (NeighbourPacketV1) Index() uint { return 1 } - func (m NeighbourPacketV1) String() string { return fmt.Sprintf("NeighbourPacketV1{Round=%d, SetID=%d, Number=%d}", m.Round, m.SetID, m.Number) } // ToConsensusMessage converts the NeighbourMessage into a network-level consensus message func (m *NeighbourPacketV1) ToConsensusMessage() (*network.ConsensusMessage, error) { - versionedNeighbourPacket := newVersionedNeighbourPacket() - err := versionedNeighbourPacket.Set(*m) + versionedNeighbourPacket := VersionedNeighbourPacket{} + err := versionedNeighbourPacket.SetValue(*m) if err != nil { return nil, fmt.Errorf("setting neighbour packet v1: %w", err) } msg := newGrandpaMessage() - err = msg.Set(versionedNeighbourPacket) + err = msg.SetValue(versionedNeighbourPacket) if err != nil { return nil, err } @@ -193,9 +279,6 @@ func (s *Service) newCommitMessage(header *types.Header, round, setID uint64) (* }, nil } -// Index returns VDT index -func (CommitMessage) Index() uint { return 1 } - func (m CommitMessage) String() string { return fmt.Sprintf("CommitMessage{Round=%d, SetID=%d, Vote={%s}, Precommits=%v, AuthData=%v}", m.Round, m.SetID, m.Vote, m.Precommits, m.AuthData) @@ -204,7 +287,7 @@ func (m CommitMessage) String() string { // ToConsensusMessage converts the CommitMessage into a network-level consensus message func (m *CommitMessage) ToConsensusMessage() (*ConsensusMessage, error) { msg := newGrandpaMessage() - err := msg.Set(*m) + err := msg.SetValue(*m) if err != nil { return nil, err } @@ -264,9 +347,6 @@ func newCatchUpRequest(round, setID uint64) *CatchUpRequest { } } -// Index returns VDT index -func (CatchUpRequest) Index() uint { return 3 } - func (r CatchUpRequest) String() string { return fmt.Sprintf("CatchUpRequest{Round=%d, SetID=%d}", r.Round, r.SetID) } @@ -274,7 +354,7 @@ func (r CatchUpRequest) String() string { // ToConsensusMessage converts the catchUpRequest into a network-level consensus message func (r *CatchUpRequest) ToConsensusMessage() (*ConsensusMessage, error) { msg := newGrandpaMessage() - err := msg.Set(*r) + err := msg.SetValue(*r) if err != nil { return nil, err } @@ -325,9 +405,6 @@ func (s *Service) newCatchUpResponse(round, setID uint64) (*CatchUpResponse, err }, nil } -// Index returns VDT index -func (CatchUpResponse) Index() uint { return 4 } - func (r CatchUpResponse) String() string { return fmt.Sprintf("CatchUpResponse{SetID=%d, Round=%d, PreVoteJustification=%v, "+ "PreCommitJustification=%v, Hash=%s, Number=%d}", @@ -337,7 +414,7 @@ func (r CatchUpResponse) String() string { // ToConsensusMessage converts the catchUpResponse into a network-level consensus message func (r *CatchUpResponse) ToConsensusMessage() (*ConsensusMessage, error) { msg := newGrandpaMessage() - err := msg.Set(*r) + err := msg.SetValue(*r) if err != nil { return nil, err } diff --git a/lib/grandpa/message_handler_integration_test.go b/lib/grandpa/message_handler_integration_test.go index d1321b0a01..89a43c184b 100644 --- a/lib/grandpa/message_handler_integration_test.go +++ b/lib/grandpa/message_handler_integration_test.go @@ -30,7 +30,7 @@ var testHeader = &types.Header{ var testHash = testHeader.Hash() -func newTestDigest() scale.VaryingDataTypeSlice { +func newTestDigest() types.Digest { digest := types.NewDigest() prd, _ := types.NewBabeSecondaryPlainPreDigest(0, 1).ToPreRuntimeDigest() digest.Add(*prd) diff --git a/lib/grandpa/vote_message.go b/lib/grandpa/vote_message.go index d8efeb7ce7..682e09b5fc 100644 --- a/lib/grandpa/vote_message.go +++ b/lib/grandpa/vote_message.go @@ -303,12 +303,12 @@ func (s *Service) reportEquivocation(stage Subround, existingVote *SignedVote, c equivocationVote := types.NewGrandpaEquivocation() switch stage { case prevote: - err = equivocationVote.Set(types.PreVote(grandpaEquivocation)) + err = equivocationVote.SetValue(types.PreVote(grandpaEquivocation)) if err != nil { return fmt.Errorf("setting grandpa equivocation VDT as prevote equivocation: %w", err) } case precommit: - err = equivocationVote.Set(types.PreCommit(grandpaEquivocation)) + err = equivocationVote.SetValue(types.PreCommit(grandpaEquivocation)) if err != nil { return fmt.Errorf("setting grandpa equivocation VDT as precommit equivocation: %w", err) } diff --git a/lib/grandpa/vote_message_test.go b/lib/grandpa/vote_message_test.go index 7c5539fa63..edda27c065 100644 --- a/lib/grandpa/vote_message_test.go +++ b/lib/grandpa/vote_message_test.go @@ -44,7 +44,7 @@ func TestService_reportEquivocation(t *testing.T) { } equivocationVote := types.NewGrandpaEquivocation() - err := equivocationVote.Set(types.PreVote(grandpaEquivocation)) + err := equivocationVote.SetValue(types.PreVote(grandpaEquivocation)) require.NoError(t, err) equivocationProof := types.GrandpaEquivocationProof{ diff --git a/lib/runtime/invalid_transaction.go b/lib/runtime/invalid_transaction.go index ff3dd81784..ef544ed8a7 100644 --- a/lib/runtime/invalid_transaction.go +++ b/lib/runtime/invalid_transaction.go @@ -10,30 +10,118 @@ import ( ) // InvalidTransaction is a child VDT of TransactionValidityError -type InvalidTransaction scale.VaryingDataType - -// Index returns the VDT index -func (InvalidTransaction) Index() uint { //skipcq: GO-W1029 - return 0 -} - -func (i InvalidTransaction) String() string { return i.Error() } //skipcq: GO-W1029 - -// Set will set a VaryingDataTypeValue using the underlying VaryingDataType -func (i *InvalidTransaction) Set(val scale.VaryingDataTypeValue) (err error) { //skipcq: GO-W1029 - vdt := scale.VaryingDataType(*i) - err = vdt.Set(val) - if err != nil { - return err +type InvalidTransaction struct { + inner any +} + +type InvalidTransactionValues interface { + Call | Payment | Future | Stale | BadProof | AncientBirthBlock | + ExhaustsResources | InvalidCustom | BadMandatory | MandatoryDispatch | BadSigner +} + +func setInvalidTransaction[Value InvalidTransactionValues](it *InvalidTransaction, value Value) { + it.inner = value +} + +func (it *InvalidTransaction) SetValue(value any) (err error) { + switch value := value.(type) { + case Call: + setInvalidTransaction(it, value) + return + case Payment: + setInvalidTransaction(it, value) + return + case Future: + setInvalidTransaction(it, value) + return + case Stale: + setInvalidTransaction(it, value) + return + case BadProof: + setInvalidTransaction(it, value) + return + case AncientBirthBlock: + setInvalidTransaction(it, value) + return + case ExhaustsResources: + setInvalidTransaction(it, value) + return + case InvalidCustom: + setInvalidTransaction(it, value) + return + case BadMandatory: + setInvalidTransaction(it, value) + return + case MandatoryDispatch: + setInvalidTransaction(it, value) + return + case BadSigner: + setInvalidTransaction(it, value) + return + default: + return fmt.Errorf("unsupported type") } - *i = InvalidTransaction(vdt) - return nil } -// Value will return the value from the underying VaryingDataType -func (i *InvalidTransaction) Value() (val scale.VaryingDataTypeValue, err error) { //skipcq: GO-W1029 - vdt := scale.VaryingDataType(*i) - return vdt.Value() +func (it InvalidTransaction) IndexValue() (index uint, value any, err error) { + switch it.inner.(type) { + case Call: + return 0, it.inner, nil + case Payment: + return 1, it.inner, nil + case Future: + return 2, it.inner, nil + case Stale: + return 3, it.inner, nil + case BadProof: + return 4, it.inner, nil + case AncientBirthBlock: + return 5, it.inner, nil + case ExhaustsResources: + return 6, it.inner, nil + case InvalidCustom: + return 7, it.inner, nil + case BadMandatory: + return 8, it.inner, nil + case MandatoryDispatch: + return 9, it.inner, nil + case BadSigner: + return 10, it.inner, nil + } + return 0, nil, scale.ErrUnsupportedVaryingDataTypeValue +} + +func (it InvalidTransaction) Value() (value any, err error) { + _, value, err = it.IndexValue() + return +} + +func (it InvalidTransaction) ValueAt(index uint) (value any, err error) { + switch index { + case 0: + return Call{}, nil + case 1: + return Payment{}, nil + case 2: + return Future{}, nil + case 3: + return Stale{}, nil + case 4: + return BadProof{}, nil + case 5: + return AncientBirthBlock{}, nil + case 6: + return ExhaustsResources{}, nil + case 7: + return InvalidCustom(0), nil + case 8: + return BadMandatory{}, nil + case 9: + return MandatoryDispatch{}, nil + case 10: + return BadSigner{}, nil + } + return nil, scale.ErrUnknownVaryingDataTypeValue } // Error returns the error message associated with the InvalidTransaction @@ -51,17 +139,12 @@ func (i InvalidTransaction) Error() string { //skipcq: GO-W1029 // NewInvalidTransaction is constructor for InvalidTransaction func NewInvalidTransaction() InvalidTransaction { - vdt := scale.MustNewVaryingDataType(Call{}, Payment{}, Future{}, Stale{}, BadProof{}, AncientBirthBlock{}, - ExhaustsResources{}, InvalidCustom(0), BadMandatory{}, MandatoryDispatch{}, BadSigner{}) - return InvalidTransaction(vdt) + return InvalidTransaction{} } // Call The call of the transaction is not expected type Call struct{} -// Index returns the VDT index -func (Call) Index() uint { return 0 } - func (c Call) String() string { return c.Error() } // Error returns the error message associated with the Call @@ -72,9 +155,6 @@ func (Call) Error() string { // Payment General error to do with the inability to pay some fees (e.g. account balance too low) type Payment struct{} -// Index returns the VDT index -func (Payment) Index() uint { return 1 } - func (p Payment) String() string { return p.Error() } // Error returns the error message associated with the Payment @@ -85,9 +165,6 @@ func (Payment) Error() string { // Future General error to do with the transaction not yet being valid (e.g. nonce too high) type Future struct{} -// Index returns the VDT index -func (Future) Index() uint { return 2 } - func (f Future) String() string { return f.Error() } // Error returns the error message associated with the Future @@ -98,9 +175,6 @@ func (Future) Error() string { // Stale General error to do with the transaction being outdated (e.g. nonce too low) type Stale struct{} -// Index returns the VDT index -func (Stale) Index() uint { return 3 } - func (s Stale) String() string { return s.Error() } // Error returns the error message associated with the Stale @@ -111,9 +185,6 @@ func (Stale) Error() string { // BadProof General error to do with the transaction’s proofs (e.g. signature) type BadProof struct{} -// Index returns the VDT index -func (BadProof) Index() uint { return 4 } - func (b BadProof) String() string { return b.Error() } // Error returns the error message associated with the BadProof @@ -124,9 +195,6 @@ func (BadProof) Error() string { // AncientBirthBlock The transaction birth block is ancient type AncientBirthBlock struct{} -// Index returns the VDT index -func (AncientBirthBlock) Index() uint { return 5 } - func (a AncientBirthBlock) String() string { return a.Error() } // Error returns the error message associated with the AncientBirthBlock @@ -137,9 +205,6 @@ func (AncientBirthBlock) Error() string { // ExhaustsResources The transaction would exhaust the resources of current block type ExhaustsResources struct{} -// Index returns the VDT index -func (ExhaustsResources) Index() uint { return 6 } - func (e ExhaustsResources) String() string { return e.Error() } // Error returns the error message associated with the ExhaustsResources @@ -150,9 +215,6 @@ func (ExhaustsResources) Error() string { // InvalidCustom Any other custom invalid validity that is not covered type InvalidCustom uint8 -// Index returns the VDT index -func (InvalidCustom) Index() uint { return 7 } - func (i InvalidCustom) String() string { return i.Error() } // Error returns the error message associated with the InvalidCustom @@ -163,9 +225,6 @@ func (i InvalidCustom) Error() string { // BadMandatory An extrinsic with a Mandatory dispatch resulted in Error type BadMandatory struct{} -// Index returns the VDT index -func (BadMandatory) Index() uint { return 8 } - func (b BadMandatory) String() string { return b.Error() } // Error returns the error message associated with the BadMandatory @@ -176,9 +235,6 @@ func (BadMandatory) Error() string { // MandatoryDispatch A transaction with a mandatory dispatch type MandatoryDispatch struct{} -// Index returns the VDT index -func (MandatoryDispatch) Index() uint { return 9 } - func (m MandatoryDispatch) String() string { return m.Error() } // Error returns the error message associated with the MandatoryDispatch @@ -189,9 +245,6 @@ func (MandatoryDispatch) Error() string { // BadSigner A transaction with a mandatory dispatch type BadSigner struct{} -// Index returns VDT index -func (BadSigner) Index() uint { return 10 } - func (b BadSigner) String() string { return b.Error() } // Error returns the error message associated with the MandatoryDispatch diff --git a/lib/runtime/test_helpers.go b/lib/runtime/test_helpers.go index bc54385a55..46eccc5575 100644 --- a/lib/runtime/test_helpers.go +++ b/lib/runtime/test_helpers.go @@ -264,7 +264,7 @@ func InitializeRuntimeToTest(t *testing.T, instance Instance, parentHeader *type currentSlot := timestamp / slotDuration babeDigest := types.NewBabeDigest() - err = babeDigest.Set(*types.NewBabePrimaryPreDigest(0, currentSlot, [32]byte{}, [64]byte{})) + err = babeDigest.SetValue(*types.NewBabePrimaryPreDigest(0, currentSlot, [32]byte{}, [64]byte{})) require.NoError(t, err) encodedBabeDigest, err := scale.Marshal(babeDigest) diff --git a/lib/runtime/transaction_validity.go b/lib/runtime/transaction_validity.go index 9e27c5a767..41570a3dea 100644 --- a/lib/runtime/transaction_validity.go +++ b/lib/runtime/transaction_validity.go @@ -16,23 +16,54 @@ var errInvalidTypeCast = errors.New("invalid type cast") // TransactionValidityError Information on a transaction's validity and, if valid, // on how it relates to other transactions. It is a result of the form: // Result -type TransactionValidityError scale.VaryingDataType +type TransactionValidityError struct { + inner any +} -// Set will set a VaryingDataTypeValue using the underlying VaryingDataType -func (tve *TransactionValidityError) Set(val scale.VaryingDataTypeValue) (err error) { //skipcq: GO-W1029 - vdt := scale.VaryingDataType(*tve) - err = vdt.Set(val) - if err != nil { - return err +type TransactionValidityErrorValues interface { + InvalidTransaction | UnknownTransaction +} + +func setMyVaryingDataType[Value TransactionValidityErrorValues](mvdt *TransactionValidityError, value Value) { + mvdt.inner = value +} + +func (mvdt *TransactionValidityError) SetValue(value any) (err error) { + switch value := value.(type) { + case InvalidTransaction: + setMyVaryingDataType(mvdt, value) + return + case UnknownTransaction: + setMyVaryingDataType(mvdt, value) + return + default: + return fmt.Errorf("unsupported type") + } +} + +func (mvdt TransactionValidityError) IndexValue() (index uint, value any, err error) { + switch mvdt.inner.(type) { + case InvalidTransaction: + return 0, mvdt.inner, nil + case UnknownTransaction: + return 1, mvdt.inner, nil } - *tve = TransactionValidityError(vdt) - return nil + return 0, nil, scale.ErrUnsupportedVaryingDataTypeValue } -// Value will return the value from the underlying VaryingDataType -func (tve *TransactionValidityError) Value() (val scale.VaryingDataTypeValue, err error) { //skipcq: GO-W1029 - vdt := scale.VaryingDataType(*tve) - return vdt.Value() +func (mvdt TransactionValidityError) Value() (value any, err error) { + _, value, err = mvdt.IndexValue() + return +} + +func (mvdt TransactionValidityError) ValueAt(index uint) (value any, err error) { + switch index { + case 0: + return InvalidTransaction{}, nil + case 1: + return UnknownTransaction{}, nil + } + return nil, scale.ErrUnknownVaryingDataTypeValue } // Error will return the error underlying TransactionValidityError @@ -50,12 +81,7 @@ func (tve TransactionValidityError) Error() string { //skipcq: GO-W1029 // NewTransactionValidityError is constructor for TransactionValidityError func NewTransactionValidityError() *TransactionValidityError { - vdt, err := scale.NewVaryingDataType(NewInvalidTransaction(), NewUnknownTransaction()) - if err != nil { - panic(err) - } - tve := TransactionValidityError(vdt) - return &tve + return &TransactionValidityError{} } // UnmarshalTransactionValidity takes the result of the validateTransaction runtime call and unmarshalls it diff --git a/lib/runtime/transaction_validity_test.go b/lib/runtime/transaction_validity_test.go index 267772ad4f..434c5fd8cc 100644 --- a/lib/runtime/transaction_validity_test.go +++ b/lib/runtime/transaction_validity_test.go @@ -73,9 +73,9 @@ func Test_UnmarshalTransactionValidity(t *testing.T) { func Test_InvalidTransactionValidity(t *testing.T) { transactionValidityErr := NewTransactionValidityError() invalidTransaction := NewInvalidTransaction() - err := invalidTransaction.Set(Future{}) + err := invalidTransaction.SetValue(Future{}) require.NoError(t, err) - err = transactionValidityErr.Set(invalidTransaction) + err = transactionValidityErr.SetValue(invalidTransaction) require.NoError(t, err) expErrMsg := "invalid transaction" @@ -98,9 +98,9 @@ func Test_InvalidTransactionValidity(t *testing.T) { func Test_UnknownTransactionValidity(t *testing.T) { transactionValidityErr := NewTransactionValidityError() unknownTransaction := NewUnknownTransaction() - err := unknownTransaction.Set(NoUnsignedValidator{}) + err := unknownTransaction.SetValue(NoUnsignedValidator{}) require.NoError(t, err) - err = transactionValidityErr.Set(unknownTransaction) + err = transactionValidityErr.SetValue(unknownTransaction) require.NoError(t, err) expErrMsg := "validator not found" @@ -123,16 +123,16 @@ func Test_UnknownTransactionValidity(t *testing.T) { func Test_UnknownTransactionValidity_EncodingAndDecoding(t *testing.T) { transactionValidityErr := NewTransactionValidityError() unknownTransaction := NewUnknownTransaction() - err := unknownTransaction.Set(NoUnsignedValidator{}) + err := unknownTransaction.SetValue(NoUnsignedValidator{}) require.NoError(t, err) - err = transactionValidityErr.Set(unknownTransaction) + err = transactionValidityErr.SetValue(unknownTransaction) require.NoError(t, err) - enc, err := scale.Marshal(transactionValidityErr) + enc, err := scale.Marshal(*transactionValidityErr) require.NoError(t, err) decodedTransactionValidityErr := NewTransactionValidityError() - err = scale.Unmarshal(enc, &decodedTransactionValidityErr) + err = scale.Unmarshal(enc, decodedTransactionValidityErr) require.NoError(t, err) require.Equal(t, transactionValidityErr, decodedTransactionValidityErr) diff --git a/lib/runtime/unknown_transaction.go b/lib/runtime/unknown_transaction.go index 89e9ae8301..40fca6f2de 100644 --- a/lib/runtime/unknown_transaction.go +++ b/lib/runtime/unknown_transaction.go @@ -10,30 +10,60 @@ import ( ) // UnknownTransaction is the child VDT of TransactionValidityError -type UnknownTransaction scale.VaryingDataType +type UnknownTransaction struct { + inner any +} +type UnknownTransactionValues interface { + ValidityCannotLookup | NoUnsignedValidator | UnknownCustom +} -// Index returns the VDT index -func (UnknownTransaction) Index() uint { //skipcq: GO-W1029 - return 1 +func setUnknownTransaction[Value UnknownTransactionValues](mvdt *UnknownTransaction, value Value) { + mvdt.inner = value } -func (u UnknownTransaction) String() string { return u.Error() } //skipcq: GO-W1029 +func (mvdt *UnknownTransaction) SetValue(value any) (err error) { + switch value := value.(type) { + case ValidityCannotLookup: + setUnknownTransaction(mvdt, value) + return + case NoUnsignedValidator: + setUnknownTransaction(mvdt, value) + return + case UnknownCustom: + setUnknownTransaction(mvdt, value) + return + default: + return fmt.Errorf("unsupported type") + } +} -// Set will set a VaryingDataTypeValue using the underlying VaryingDataType -func (u *UnknownTransaction) Set(val scale.VaryingDataTypeValue) (err error) { //skipcq: GO-W1029 - vdt := scale.VaryingDataType(*u) - err = vdt.Set(val) - if err != nil { - return err +func (mvdt UnknownTransaction) IndexValue() (index uint, value any, err error) { + switch mvdt.inner.(type) { + case ValidityCannotLookup: + return 0, mvdt.inner, nil + case NoUnsignedValidator: + return 1, mvdt.inner, nil + case UnknownCustom: + return 2, mvdt.inner, nil } - *u = UnknownTransaction(vdt) - return nil + return 0, nil, scale.ErrUnsupportedVaryingDataTypeValue +} + +func (mvdt UnknownTransaction) Value() (value any, err error) { + _, value, err = mvdt.IndexValue() + return } -// Value will return value from the underying VaryingDataType -func (u *UnknownTransaction) Value() (val scale.VaryingDataTypeValue, err error) { //skipcq: GO-W1029 - vdt := scale.VaryingDataType(*u) - return vdt.Value() +func (mvdt UnknownTransaction) ValueAt(index uint) (value any, err error) { + switch index { + case 0: + return ValidityCannotLookup{}, nil + case 1: + return NoUnsignedValidator{}, nil + case 2: + return UnknownCustom(0), nil + } + return nil, scale.ErrUnknownVaryingDataTypeValue } func (u UnknownTransaction) Error() string { //skipcq: GO-W1029 @@ -48,21 +78,14 @@ func (u UnknownTransaction) Error() string { //skipcq: GO-W1029 return err.Error() } -// NewUnknownTransaction is constructor for Unknown +// NewUnknownTransaction is constructor for UnknownTransaction func NewUnknownTransaction() UnknownTransaction { - vdt, err := scale.NewVaryingDataType(ValidityCannotLookup{}, NoUnsignedValidator{}, UnknownCustom(0)) - if err != nil { - panic(err) - } - return UnknownTransaction(vdt) + return UnknownTransaction{} } // ValidityCannotLookup Could not look up some information that is required to validate the transaction type ValidityCannotLookup struct{} -// Index returns the VDT index -func (ValidityCannotLookup) Index() uint { return 0 } - func (v ValidityCannotLookup) String() string { return v.Error() } // Error returns the error message associated with the ValidityCannotLookup @@ -73,9 +96,6 @@ func (ValidityCannotLookup) Error() string { // NoUnsignedValidator No validator found for the given unsigned transaction type NoUnsignedValidator struct{} -// Index returns the VDT index -func (NoUnsignedValidator) Index() uint { return 1 } - func (n NoUnsignedValidator) String() string { return n.Error() } // Error returns the error message associated with the NoUnsignedValidator @@ -86,16 +106,11 @@ func (NoUnsignedValidator) Error() string { // UnknownCustom Any other custom unknown validity that is not covered type UnknownCustom uint8 -// Index returns the VDT index -func (UnknownCustom) Index() uint { return 2 } - -func (m UnknownCustom) String() string { return m.Error() } - // Error returns the error message associated with the UnknownCustom func (m UnknownCustom) Error() string { return newUnknownError(m).Error() } -func newUnknownError(data scale.VaryingDataTypeValue) error { +func newUnknownError(data any) error { return fmt.Errorf("unknown error: %v", data) } diff --git a/lib/runtime/wazero/imports.go b/lib/runtime/wazero/imports.go index 1fb3ae59fe..d0bb9c0e55 100644 --- a/lib/runtime/wazero/imports.go +++ b/lib/runtime/wazero/imports.go @@ -1099,19 +1099,19 @@ func ext_default_child_storage_clear_prefix_version_1( // NewDigestItem returns a new VaryingDataType to represent a DigestItem func NewKillStorageResult(deleted uint32, allDeleted bool) scale.VaryingDataType { - killStorageResult := scale.MustNewVaryingDataType(new(noneRemain), new(someRemain)) + killStorageResult := killStorageResult{} var err error if allDeleted { - err = killStorageResult.Set(noneRemain(deleted)) + err = killStorageResult.SetValue(noneRemain(deleted)) } else { - err = killStorageResult.Set(someRemain(deleted)) + err = killStorageResult.SetValue(someRemain(deleted)) } - if err != nil { panic(err) } - return killStorageResult + + return &killStorageResult } //export ext_default_child_storage_clear_prefix_version_2 @@ -1335,15 +1335,58 @@ func ext_default_child_storage_storage_kill_version_2( return 0 } -type noneRemain uint32 +type killStorageResult struct { + inner any +} +type killStorageResultValues interface { + noneRemain | someRemain +} -func (noneRemain) Index() uint { return 0 } -func (nr noneRemain) String() string { return fmt.Sprintf("noneRemain(%d)", nr) } +func setkillStorageResult[Value killStorageResultValues](mvdt *killStorageResult, value Value) { + mvdt.inner = value +} -type someRemain uint32 +func (mvdt *killStorageResult) SetValue(value any) (err error) { + switch value := value.(type) { + case noneRemain: + setkillStorageResult(mvdt, value) + return + case someRemain: + setkillStorageResult(mvdt, value) + return + default: + return fmt.Errorf("unsupported type") + } +} -func (someRemain) Index() uint { return 1 } -func (sr someRemain) String() string { return fmt.Sprintf("someRemain(%d)", sr) } +func (mvdt killStorageResult) IndexValue() (index uint, value any, err error) { + switch mvdt.inner.(type) { + case noneRemain: + return 0, mvdt.inner, nil + case someRemain: + return 1, mvdt.inner, nil + } + return 0, nil, scale.ErrUnsupportedVaryingDataTypeValue +} + +func (mvdt killStorageResult) Value() (value any, err error) { + _, value, err = mvdt.IndexValue() + return +} + +func (mvdt killStorageResult) ValueAt(index uint) (value any, err error) { + switch index { + case 0: + return noneRemain(0), nil + case 1: + return someRemain(0), nil + } + return nil, scale.ErrUnknownVaryingDataTypeValue +} + +type noneRemain uint32 + +type someRemain uint32 func ext_default_child_storage_storage_kill_version_3( ctx context.Context, m api.Module, childStorageKeySpan, lim uint64) (pointerSize uint64) { @@ -1378,15 +1421,12 @@ func ext_default_child_storage_storage_kill_version_3( return ret } - vdt, err := scale.NewVaryingDataType(noneRemain(0), someRemain(0)) - if err != nil { - logger.Warnf("cannot create new varying data type: %s", err) - } + vdt := killStorageResult{} if all { - err = vdt.Set(noneRemain(deleted)) + err = vdt.SetValue(noneRemain(deleted)) } else { - err = vdt.Set(someRemain(deleted)) + err = vdt.SetValue(someRemain(deleted)) } if err != nil { logger.Warnf("cannot set varying data type: %s", err) diff --git a/lib/runtime/wazero/instance.go b/lib/runtime/wazero/instance.go index 8a793691a9..f5e963c2a2 100644 --- a/lib/runtime/wazero/instance.go +++ b/lib/runtime/wazero/instance.go @@ -667,7 +667,7 @@ func (in *Instance) ExecuteBlock(block *types.Block) ([]byte, error) { b.Header.Digest = types.NewDigest() // remove seal digest only - for _, d := range block.Header.Digest.Types { + for _, d := range block.Header.Digest { digestValue, err := d.Value() if err != nil { return nil, fmt.Errorf("getting digest type value: %w", err) diff --git a/lib/runtime/wazero/instance_test.go b/lib/runtime/wazero/instance_test.go index bc42235f01..ddf50db845 100644 --- a/lib/runtime/wazero/instance_test.go +++ b/lib/runtime/wazero/instance_test.go @@ -842,7 +842,7 @@ func TestInstance_ExecuteBlock_KusamaRuntime_KusamaBlock901442(t *testing.T) { digest := types.NewDigest() err = scale.Unmarshal(digestBytes, &digest) require.NoError(t, err) - require.Equal(t, 2, len(digest.Types)) + require.Equal(t, 2, len(digest)) // kusama block 901442, from polkadot.js block := &types.Block{ @@ -888,7 +888,7 @@ func TestInstance_ExecuteBlock_KusamaRuntime_KusamaBlock1377831(t *testing.T) { digest := types.NewDigest() err = scale.Unmarshal(digestBytes, &digest) require.NoError(t, err) - require.Equal(t, 2, len(digest.Types)) + require.Equal(t, 2, len(digest)) // kusama block 1377831, from polkadot.js block := &types.Block{ @@ -935,7 +935,7 @@ func TestInstance_ExecuteBlock_KusamaRuntime_KusamaBlock1482003(t *testing.T) { err = scale.Unmarshal(digestBytes, &digest) require.NoError(t, err) - require.Equal(t, 4, len(digest.Types)) + require.Equal(t, 4, len(digest)) // kusama block 1482003, from polkadot.js block := &types.Block{ @@ -981,7 +981,7 @@ func TestInstance_ExecuteBlock_PolkadotBlock1089328(t *testing.T) { digest := types.NewDigest() err = scale.Unmarshal(digestBytes, &digest) require.NoError(t, err) - require.Equal(t, 2, len(digest.Types)) + require.Equal(t, 2, len(digest)) block := &types.Block{ Header: types.Header{ @@ -1266,7 +1266,7 @@ func TestInstance_GrandpaSubmitReportEquivocationUnsignedExtrinsic(t *testing.T) } preVoteEquivocation := types.PreVote(grandpaEquivocation) equivocationVote := types.NewGrandpaEquivocation() - err = equivocationVote.Set(preVoteEquivocation) + err = equivocationVote.SetValue(preVoteEquivocation) require.NoError(t, err) equivocationProof := types.GrandpaEquivocationProof{ diff --git a/pkg/scale/README.md b/pkg/scale/README.md index edb7f9db67..5ebc06f5db 100644 --- a/pkg/scale/README.md +++ b/pkg/scale/README.md @@ -210,12 +210,26 @@ func ExampleResult() { ### Varying Data Type -A `VaryingDataType` is analogous to a Rust enum. A `VaryingDataType` needs to be constructed using the `NewVaryingDataType` constructor. `VaryingDataTypeValue` is an -interface with one `Index() uint` method that needs to be implemented. The returned `uint` index should be unique per type and needs to be the same index as defined in the Rust enum to ensure interopability. To set the value of the `VaryingDataType`, the `VaryingDataType.Set()` function should be called with an associated `VaryingDataTypeValue`. +A `VaryingDataType` is analogous to a Rust enum. A `VaryingDataType` is an interface that needs to be implemented. From the Polkadot spec there are values associated to a `VaryingDataType`, which is analagous to a rust enum variant. Each value has an associated index integer value which is used to determine which value type go-scale should decode to. The following interface needs to be implemented for go-scale to be able to marshal from or unmarshal into. +```go +type EncodeVaryingDataType interface { + IndexValue() (index uint, value any, err error) + Value() (value any, err error) + ValueAt(index uint) (value any, err error) +} + +type VaryingDataType interface { + EncodeVaryingDataType + SetValue(value any) (err error) +} +``` +Example implementation of `VaryingDataType`: ```go import ( "fmt" + "reflect" + "github.com/ChainSafe/gossamer/pkg/scale" ) @@ -225,298 +239,96 @@ type MyStruct struct { Foo []byte } -func (ms MyStruct) Index() uint { - return 1 -} - type MyOtherStruct struct { Foo string Bar uint64 Baz uint } -func (mos MyOtherStruct) Index() uint { - return 2 -} - type MyInt16 int16 -func (mi16 MyInt16) Index() uint { - return 3 +type MyVaryingDataType struct { + inner any } -func ExampleVaryingDataType() { - vdt, err := scale.NewVaryingDataType(MyStruct{}, MyOtherStruct{}, MyInt16(0)) - if err != nil { - panic(err) - } - - err = vdt.Set(MyStruct{ - Baz: true, - Bar: 999, - Foo: []byte{1, 2}, - }) - if err != nil { - panic(err) - } - - bytes, err := scale.Marshal(vdt) - if err != nil { - panic(err) - } - - vdt1, err := scale.NewVaryingDataType(MyStruct{}, MyOtherStruct{}, MyInt16(0)) - if err != nil { - panic(err) - } - - err = scale.Unmarshal(bytes, &vdt1) - if err != nil { - panic(err) - } - - if !reflect.DeepEqual(vdt, vdt1) { - panic(fmt.Errorf("uh oh: %+v %+v", vdt, vdt1)) - } +type MyVaryingDataTypeValues interface { + MyStruct | MyOtherStruct | MyInt16 } -``` - -A `VaryingDataTypeSlice` is a slice containing multiple `VaryingDataType` elements. Each `VaryingDataTypeValue` must be of a supported type of the `VaryingDataType` passed into the `NewVaryingDataTypeSlice` constructor. The method to call to add `VaryingDataTypeValue` instances is `VaryingDataTypeSlice.Add()`. - -``` -func ExampleVaryingDataTypeSlice() { - vdt, err := scale.NewVaryingDataType(MyStruct{}, MyOtherStruct{}, MyInt16(0)) - if err != nil { - panic(err) - } - - vdts := scale.NewVaryingDataTypeSlice(vdt) - - err = vdts.Add( - MyStruct{ - Baz: true, - Bar: 999, - Foo: []byte{1, 2}, - }, - MyInt16(1), - ) - if err != nil { - panic(err) - } - - bytes, err := scale.Marshal(vdts) - if err != nil { - panic(err) - } - - vdts1 := scale.NewVaryingDataTypeSlice(vdt) - if err != nil { - panic(err) - } - - err = scale.Unmarshal(bytes, &vdts1) - if err != nil { - panic(err) - } - if !reflect.DeepEqual(vdts, vdts1) { - panic(fmt.Errorf("uh oh: %+v %+v", vdts, vdts1)) - } +func setMyVaryingDataType[Value MyVaryingDataTypeValues](mvdt *MyVaryingDataType, value Value) { + mvdt.inner = value } -``` -#### Nested VaryingDataType - -See `varying_data_type_nested_example.go` for a working example of a custom `VaryingDataType` with another custom `VaryingDataType` as a value of the parent `VaryingDataType`. In the case of nested `VaryingDataTypes`, a custom type needs to be created for the child `VaryingDataType` because it needs to fulfill the `VaryingDataTypeValue` interface. - -```go -import ( - "fmt" - "reflect" - - "github.com/ChainSafe/gossamer/pkg/scale" -) - -// ParentVDT is a VaryingDataType that consists of multiple nested VaryingDataType -// instances (aka. a rust enum containing multiple enum options) -type ParentVDT scale.VaryingDataType - -// Set will set a VaryingDataTypeValue using the underlying VaryingDataType -func (pvdt *ParentVDT) Set(val scale.VaryingDataTypeValue) (err error) { - // cast to VaryingDataType to use VaryingDataType.Set method - vdt := scale.VaryingDataType(*pvdt) - err = vdt.Set(val) - if err != nil { +func (mvdt *MyVaryingDataType) SetValue(value any) (err error) { + switch value := value.(type) { + case MyStruct: + setMyVaryingDataType(mvdt, value) return - } - // store original ParentVDT with VaryingDataType that has been set - *pvdt = ParentVDT(vdt) - return -} - -// Value will return value from underying VaryingDataType -func (pvdt *ParentVDT) Value() (val scale.VaryingDataTypeValue, err error) { - vdt := scale.VaryingDataType(*pvdt) - return vdt.Value() -} - -// NewParentVDT is constructor for ParentVDT -func NewParentVDT() ParentVDT { - // use standard VaryingDataType constructor to construct a VaryingDataType - vdt, err := scale.NewVaryingDataType(NewChildVDT(), NewOtherChildVDT()) - if err != nil { - panic(err) - } - // cast to ParentVDT - return ParentVDT(vdt) -} - -// ChildVDT type is used as a VaryingDataTypeValue for ParentVDT -type ChildVDT scale.VaryingDataType - -// Index fulfills the VaryingDataTypeValue interface. T -func (cvdt ChildVDT) Index() uint { - return 1 -} - -// Set will set a VaryingDataTypeValue using the underlying VaryingDataType -func (cvdt *ChildVDT) Set(val scale.VaryingDataTypeValue) (err error) { - // cast to VaryingDataType to use VaryingDataType.Set method - vdt := scale.VaryingDataType(*cvdt) - err = vdt.Set(val) - if err != nil { + case MyOtherStruct: + setMyVaryingDataType(mvdt, value) + return + case MyInt16: + setMyVaryingDataType(mvdt, value) return + default: + return fmt.Errorf("unsupported type") } - // store original ParentVDT with VaryingDataType that has been set - *cvdt = ChildVDT(vdt) - return -} - -// Value will return value from underying VaryingDataType -func (cvdt *ChildVDT) Value() (val scale.VaryingDataTypeValue, err error) { - vdt := scale.VaryingDataType(*cvdt) - return vdt.Value() } -// NewChildVDT is constructor for ChildVDT -func NewChildVDT() ChildVDT { - // use standard VaryingDataType constructor to construct a VaryingDataType - // constarined to types ChildInt16, ChildStruct, and ChildString - vdt, err := scale.NewVaryingDataType(ChildInt16(0), ChildStruct{}, ChildString("")) - if err != nil { - panic(err) +func (mvdt MyVaryingDataType) IndexValue() (index uint, value any, err error) { + switch mvdt.inner.(type) { + case MyStruct: + return 1, mvdt.inner, nil + case MyOtherStruct: + return 2, mvdt.inner, nil + case MyInt16: + return 3, mvdt.inner, nil } - // cast to ParentVDT - return ChildVDT(vdt) -} - -// OtherChildVDT type is used as a VaryingDataTypeValue for ParentVDT -type OtherChildVDT scale.VaryingDataType - -// Index fulfills the VaryingDataTypeValue interface. -func (ocvdt OtherChildVDT) Index() uint { - return 2 + return 0, nil, scale.ErrUnsupportedVaryingDataTypeValue } -// Set will set a VaryingDataTypeValue using the underlying VaryingDataType -func (cvdt *OtherChildVDT) Set(val scale.VaryingDataTypeValue) (err error) { - // cast to VaryingDataType to use VaryingDataType.Set method - vdt := scale.VaryingDataType(*cvdt) - err = vdt.Set(val) - if err != nil { - return - } - // store original ParentVDT with VaryingDataType that has been set - *cvdt = OtherChildVDT(vdt) +func (mvdt MyVaryingDataType) Value() (value any, err error) { + _, value, err = mvdt.IndexValue() return } -// NewOtherChildVDT is constructor for OtherChildVDT -func NewOtherChildVDT() OtherChildVDT { - // use standard VaryingDataType constructor to construct a VaryingDataType - // constarined to types ChildInt16 and ChildStruct - vdt, err := scale.NewVaryingDataType(ChildInt16(0), ChildStruct{}, ChildString("")) - if err != nil { - panic(err) +func (mvdt MyVaryingDataType) ValueAt(index uint) (value any, err error) { + switch index { + case 1: + return MyStruct{}, nil + case 2: + return MyOtherStruct{}, nil + case 3: + return MyInt16(0), nil } - // cast to ParentVDT - return OtherChildVDT(vdt) -} - -// ChildInt16 is used as a VaryingDataTypeValue for ChildVDT and OtherChildVDT -type ChildInt16 int16 - -// Index fulfills the VaryingDataTypeValue interface. The ChildVDT type is used as a -// VaryingDataTypeValue for ParentVDT -func (ci ChildInt16) Index() uint { - return 1 -} - -// ChildStruct is used as a VaryingDataTypeValue for ChildVDT and OtherChildVDT -type ChildStruct struct { - A string - B bool -} - -// Index fulfills the VaryingDataTypeValue interface -func (cs ChildStruct) Index() uint { - return 2 -} - -// ChildString is used as a VaryingDataTypeValue for ChildVDT and OtherChildVDT -type ChildString string - -// Index fulfills the VaryingDataTypeValue interface -func (cs ChildString) Index() uint { - return 3 + return nil, scale.ErrUnknownVaryingDataTypeValue } -func ExampleNestedVaryingDataType() { - parent := NewParentVDT() +func ExampleVaryingDataType() { + vdt := MyVaryingDataType{} - // populate parent with ChildVDT - child := NewChildVDT() - child.Set(ChildInt16(888)) - err := parent.Set(child) + err := vdt.SetValue(MyStruct{ + Baz: true, + Bar: 999, + Foo: []byte{1, 2}, + }) if err != nil { panic(err) } - // validate ParentVDT.Value() - parentValue, err := parent.Value() - if err != nil { - panic(err) - } - fmt.Printf("parent.Value(): %+v\n", parentValue) - // should cast to ChildVDT, since that was set earlier - valChildVDT := parentValue.(ChildVDT) - // validate ChildVDT.Value() as ChildInt16(888) - valChildVDTValue, err := valChildVDT.Value() + bytes, err := scale.Marshal(vdt) if err != nil { panic(err) } - fmt.Printf("child.Value(): %+v\n", valChildVDTValue) - // marshal into scale encoded bytes - bytes, err := scale.Marshal(parent) - if err != nil { - panic(err) - } - fmt.Printf("bytes: % x\n", bytes) + dst := MyVaryingDataType{} - // unmarshal into another ParentVDT - dstParent := NewParentVDT() - err = scale.Unmarshal(bytes, &dstParent) + err = scale.Unmarshal(bytes, &dst) if err != nil { panic(err) } - // assert both ParentVDT instances are the same - fmt.Println(reflect.DeepEqual(parent, dstParent)) - - // Output: - // parent.Value(): {value:888 cache:map[1:0 2:{A: B:false} 3:]} - // child.Value(): 888 - // bytes: 01 01 78 03 - // true + + fmt.Println(reflect.DeepEqual(vdt, dst)) + // Output: true } ``` \ No newline at end of file diff --git a/pkg/scale/decode.go b/pkg/scale/decode.go index bcd99c8d1c..15b8af8eb6 100644 --- a/pkg/scale/decode.go +++ b/pkg/scale/decode.go @@ -125,6 +125,15 @@ func (ds *decodeState) unmarshal(dstv reflect.Value) (err error) { return } + if dstv.CanAddr() { + addr := dstv.Addr() + vdt, ok := addr.Interface().(VaryingDataType) + if ok { + err = ds.decodeVaryingDataType(vdt) + return + } + } + in := dstv.Interface() switch in.(type) { case *big.Int: @@ -143,10 +152,6 @@ func (ds *decodeState) unmarshal(dstv reflect.Value) (err error) { err = ds.decodeBool(dstv) case Result: err = ds.decodeResult(dstv) - case VaryingDataType: - err = ds.decodeVaryingDataType(dstv) - case VaryingDataTypeSlice: - err = ds.decodeVaryingDataTypeSlice(dstv) default: t := reflect.TypeOf(in) switch t.Kind() { @@ -157,12 +162,7 @@ func (ds *decodeState) unmarshal(dstv reflect.Value) (err error) { case reflect.Ptr: err = ds.decodePointer(dstv) case reflect.Struct: - ok := reflect.ValueOf(in).CanConvert(reflect.TypeOf(VaryingDataType{})) - if ok { - err = ds.decodeCustomVaryingDataType(dstv) - } else { - err = ds.decodeStruct(dstv) - } + err = ds.decodeStruct(dstv) case reflect.Array: err = ds.decodeArray(dstv) case reflect.Slice: @@ -339,66 +339,16 @@ func (ds *decodeState) decodePointer(dstv reflect.Value) (err error) { return } -func (ds *decodeState) decodeVaryingDataTypeSlice(dstv reflect.Value) (err error) { - vdts := dstv.Interface().(VaryingDataTypeSlice) - l, err := ds.decodeLength() - if err != nil { - return - } - for i := uint(0); i < l; i++ { - vdt := vdts.VaryingDataType - vdtv := reflect.New(reflect.TypeOf(vdt)) - vdtv.Elem().Set(reflect.ValueOf(vdt)) - err = ds.unmarshal(vdtv.Elem()) - if err != nil { - return - } - vdts.Types = append(vdts.Types, vdtv.Elem().Interface().(VaryingDataType)) - } - dstv.Set(reflect.ValueOf(vdts)) - return -} - -func (ds *decodeState) decodeCustomVaryingDataType(dstv reflect.Value) (err error) { - initialType := dstv.Type() - - methodVal := dstv.MethodByName("New") - if methodVal.IsValid() && !methodVal.IsZero() { - if methodVal.Type().Out(0).String() != dstv.Type().String() { - return fmt.Errorf("%s.New() returns %s instead of %s", dstv.Type(), methodVal.Type().Out(0), dstv.Type()) - } - - values := methodVal.Call(nil) - if len(values) > 1 { - return fmt.Errorf("%s.New() returns too many values", dstv.Type()) - } else if len(values) == 0 { - return fmt.Errorf("%s.New() does not return a value", dstv.Type()) - } - dstv.Set(values[0]) - } - - converted := dstv.Convert(reflect.TypeOf(VaryingDataType{})) - tempVal := reflect.New(converted.Type()) - tempVal.Elem().Set(converted) - err = ds.decodeVaryingDataType(tempVal.Elem()) - if err != nil { - return - } - dstv.Set(tempVal.Elem().Convert(initialType)) - return -} - -func (ds *decodeState) decodeVaryingDataType(dstv reflect.Value) (err error) { +func (ds *decodeState) decodeVaryingDataType(vdt VaryingDataType) (err error) { var b byte b, err = ds.ReadByte() if err != nil { return } - vdt := dstv.Interface().(VaryingDataType) - val, ok := vdt.cache[uint(b)] - if !ok { - err = fmt.Errorf("%w: for key %d", errUnknownVaryingDataTypeValue, uint(b)) + val, err := vdt.ValueAt(uint(b)) + if err != nil { + err = fmt.Errorf("%w: for key %d %v", ErrUnknownVaryingDataTypeValue, uint(b), err) return } @@ -408,11 +358,7 @@ func (ds *decodeState) decodeVaryingDataType(dstv reflect.Value) (err error) { if err != nil { return } - err = vdt.Set(tempVal.Elem().Interface().(VaryingDataTypeValue)) - if err != nil { - return - } - dstv.Set(reflect.ValueOf(vdt)) + err = vdt.SetValue(tempVal.Elem().Interface()) return } diff --git a/pkg/scale/decode_test.go b/pkg/scale/decode_test.go index 713f3a7bce..0a75f25f0d 100644 --- a/pkg/scale/decode_test.go +++ b/pkg/scale/decode_test.go @@ -270,57 +270,20 @@ func Test_unmarshal_optionality(t *testing.T) { } for _, tt := range ptrTests { t.Run(tt.name, func(t *testing.T) { - switch in := tt.in.(type) { - case VaryingDataType: - // copy the inputted vdt cause we need the cached values - cp := in - vdt := cp - vdt.value = nil - var dst interface{} = &vdt - if err := Unmarshal(tt.want, &dst); (err != nil) != tt.wantErr { - t.Errorf("decodeState.unmarshal() error = %v, wantErr %v", err, tt.wantErr) - return - } - diff := cmp.Diff( - vdt.value, - tt.in.(VaryingDataType).value, - cmpopts.IgnoreUnexported(big.Int{}, VDTValue2{}, MyStructWithIgnore{}, MyStructWithPrivate{})) - if diff != "" { - t.Errorf("decodeState.unmarshal() = %s", diff) - } - default: - var dst interface{} - - if reflect.TypeOf(tt.in).Kind().String() == "map" { - dst = &(map[int8][]byte{}) - } else { - dst = reflect.New(reflect.TypeOf(tt.in)).Interface() - } - - if err := Unmarshal(tt.want, &dst); (err != nil) != tt.wantErr { - t.Errorf("decodeState.unmarshal() error = %v, wantErr %v", err, tt.wantErr) - return - } - - // assert response only if we aren't expecting an error - if !tt.wantErr { - var diff string - if tt.out != nil { - diff = cmp.Diff( - reflect.ValueOf(dst).Elem().Interface(), - reflect.ValueOf(tt.out).Interface(), - cmpopts.IgnoreUnexported(tt.in)) - } else { - diff = cmp.Diff( - reflect.ValueOf(dst).Elem().Interface(), - reflect.ValueOf(tt.in).Interface(), - cmpopts.IgnoreUnexported(big.Int{}, VDTValue2{}, MyStructWithIgnore{}, MyStructWithPrivate{})) - } - if diff != "" { - t.Errorf("decodeState.unmarshal() = %s", diff) - } - } + dst := reflect.New(reflect.TypeOf(tt.in)).Interface() + err := Unmarshal(tt.want, &dst) + if tt.wantErr { + assert.Error(t, err) + return + } + assert.NoError(t, err) + var expected any + if tt.out != nil { + expected = reflect.ValueOf(tt.out).Interface() + } else { + expected = reflect.ValueOf(tt.in).Interface() } + assert.Equal(t, expected, reflect.ValueOf(dst).Elem().Interface()) }) } } diff --git a/pkg/scale/encode.go b/pkg/scale/encode.go index c9830aef9d..9429fb0bff 100644 --- a/pkg/scale/encode.go +++ b/pkg/scale/encode.go @@ -78,6 +78,12 @@ func (es *encodeState) marshal(in interface{}) (err error) { return } + vdt, ok := in.(EncodeVaryingDataType) + if ok { + err = es.encodeVaryingDataType(vdt) + return + } + switch in := in.(type) { case int: err = es.encodeUint(uint(in)) @@ -97,10 +103,6 @@ func (es *encodeState) marshal(in interface{}) (err error) { err = es.encodeBool(in) case Result: err = es.encodeResult(in) - case VaryingDataType: - err = es.encodeVaryingDataType(in) - case VaryingDataTypeSlice: - err = es.encodeVaryingDataTypeSlice(in) default: switch reflect.TypeOf(in).Kind() { case reflect.Bool, reflect.Int, reflect.Int8, reflect.Int16, @@ -121,12 +123,7 @@ func (es *encodeState) marshal(in interface{}) (err error) { err = es.marshal(elem.Interface()) } case reflect.Struct: - ok := reflect.ValueOf(in).CanConvert(reflect.TypeOf(VaryingDataType{})) - if ok { - err = es.encodeCustomVaryingDataType(in) - } else { - err = es.encodeStruct(in) - } + err = es.encodeStruct(in) case reflect.Array: err = es.encodeArray(in) case reflect.Slice: @@ -203,22 +200,16 @@ func (es *encodeState) encodeResult(res Result) (err error) { return } -func (es *encodeState) encodeCustomVaryingDataType(in interface{}) (err error) { - vdt := reflect.ValueOf(in).Convert(reflect.TypeOf(VaryingDataType{})).Interface().(VaryingDataType) - return es.encodeVaryingDataType(vdt) -} - -func (es *encodeState) encodeVaryingDataType(vdt VaryingDataType) (err error) { - _, err = es.Write([]byte{byte(vdt.value.Index())}) +func (es *encodeState) encodeVaryingDataType(vdt EncodeVaryingDataType) (err error) { + index, value, err := vdt.IndexValue() if err != nil { return } - err = es.marshal(vdt.value) - return -} - -func (es *encodeState) encodeVaryingDataTypeSlice(vdts VaryingDataTypeSlice) (err error) { - err = es.marshal(vdts.Types) + _, err = es.Write([]byte{byte(index)}) + if err != nil { + return + } + err = es.marshal(value) return } diff --git a/pkg/scale/encode_test.go b/pkg/scale/encode_test.go index 8f5d9a60ca..8984725dcf 100644 --- a/pkg/scale/encode_test.go +++ b/pkg/scale/encode_test.go @@ -686,6 +686,11 @@ var ( priv1: []byte{0x00}, }, want: []byte{0x04, 0x01, 0x02, 0, 0, 0, 0x01}, + out: MyStructWithPrivate{ + Foo: []byte{0x01}, + Bar: 2, + Baz: true, + }, }, { name: "struct_{[]byte,_int32,_bool}_with_ignored_attributes", diff --git a/pkg/scale/errors.go b/pkg/scale/errors.go index 5b317979dd..b0a2030f78 100644 --- a/pkg/scale/errors.go +++ b/pkg/scale/errors.go @@ -11,7 +11,7 @@ var ( ErrUnsupportedType = errors.New("unsupported type") ErrUnsupportedResult = errors.New("unsupported result") errUnsupportedOption = errors.New("unsupported option") - errUnknownVaryingDataTypeValue = errors.New("unable to find VaryingDataTypeValue with index") + ErrUnknownVaryingDataTypeValue = errors.New("unable to find VaryingDataTypeValue with index") errUint128IsNil = errors.New("uint128 in nil") ErrResultNotSet = errors.New("result not set") ErrResultAlreadySet = errors.New("result already has an assigned value") diff --git a/pkg/scale/varying_data_type.go b/pkg/scale/varying_data_type.go index c5b9be40bb..a829a78c73 100644 --- a/pkg/scale/varying_data_type.go +++ b/pkg/scale/varying_data_type.go @@ -3,120 +3,16 @@ package scale -import ( - "fmt" - "strings" -) - -// VaryingDataTypeValue is used to represent scale encodable types of an associated VaryingDataType -type VaryingDataTypeValue interface { - Index() uint -} - -// VaryingDataTypeSlice is used to represent []VaryingDataType. SCALE requires knowledge -// of the underlying data, so it is required to have the VaryingDataType required for decoding -type VaryingDataTypeSlice struct { - VaryingDataType - Types []VaryingDataType -} - -// Add takes variadic parameter values to add VaryingDataTypeValue(s) -func (vdts *VaryingDataTypeSlice) Add(values ...VaryingDataTypeValue) (err error) { //skipcq: GO-W1029 - for _, val := range values { - copied := vdts.VaryingDataType - err = copied.Set(val) - if err != nil { - err = fmt.Errorf("setting VaryingDataTypeValue: %w", err) - return - } - vdts.Types = append(vdts.Types, copied) - } - return -} - -func (vdts VaryingDataTypeSlice) String() string { //skipcq: GO-W1029 - stringTypes := make([]string, len(vdts.Types)) - for i, vdt := range vdts.Types { - stringTypes[i] = vdt.String() - } - return "[" + strings.Join(stringTypes, ", ") + "]" -} - -// NewVaryingDataTypeSlice is constructor for VaryingDataTypeSlice -func NewVaryingDataTypeSlice(vdt VaryingDataType) (vdts VaryingDataTypeSlice) { - vdts.VaryingDataType = vdt - vdts.Types = make([]VaryingDataType, 0) - return -} - -func mustNewVaryingDataTypeSliceAndSet(vdt VaryingDataType, - values ...VaryingDataTypeValue) (vdts VaryingDataTypeSlice) { - vdts = NewVaryingDataTypeSlice(vdt) - if err := vdts.Add(values...); err != nil { - panic(fmt.Sprintf("adding varying data type value: %s", err)) - } - return +// EncodeVaryingDataType is used in VaryingDataType. It contains the methods required +// for encoding. +type EncodeVaryingDataType interface { + IndexValue() (index uint, value any, err error) + Value() (value any, err error) + ValueAt(index uint) (value any, err error) } // VaryingDataType is analogous to a rust enum. Name is taken from polkadot spec. -type VaryingDataType struct { - value VaryingDataTypeValue - cache map[uint]VaryingDataTypeValue -} - -// Set will set the VaryingDataType value -func (vdt *VaryingDataType) Set(value VaryingDataTypeValue) (err error) { //skipcq: GO-W1029 - _, ok := vdt.cache[value.Index()] - if !ok { - err = fmt.Errorf("%w: %v (%T)", ErrUnsupportedVaryingDataTypeValue, value, value) - return - } - vdt.value = value - return -} - -// Value returns value stored in vdt -func (vdt *VaryingDataType) Value() (VaryingDataTypeValue, error) { //skipcq: GO-W1029 - if vdt.value == nil { - return nil, ErrVaryingDataTypeNotSet - } - return vdt.value, nil -} - -func (vdt *VaryingDataType) String() string { //skipcq: GO-W1029 - if vdt.value == nil { - return "VaryingDataType(nil)" - } - stringer, ok := vdt.value.(fmt.Stringer) - if !ok { - return fmt.Sprintf("VaryingDataType(%v)", vdt.value) - } - return stringer.String() -} - -// NewVaryingDataType is constructor for VaryingDataType -func NewVaryingDataType(values ...VaryingDataTypeValue) (vdt VaryingDataType, err error) { - if len(values) == 0 { - err = fmt.Errorf("%w", ErrMustProvideVaryingDataTypeValue) - return - } - vdt.cache = make(map[uint]VaryingDataTypeValue) - for _, value := range values { - _, ok := vdt.cache[value.Index()] - if ok { - err = fmt.Errorf("duplicate index with VaryingDataType: %T with index: %d", value, value.Index()) - return - } - vdt.cache[value.Index()] = value - } - return -} - -// MustNewVaryingDataType is constructor for VaryingDataType -func MustNewVaryingDataType(values ...VaryingDataTypeValue) (vdt VaryingDataType) { - vdt, err := NewVaryingDataType(values...) - if err != nil { - panic(err) - } - return +type VaryingDataType interface { + EncodeVaryingDataType + SetValue(value any) (err error) } diff --git a/pkg/scale/varying_data_type_example_test.go b/pkg/scale/varying_data_type_example_test.go index 417a77376a..56030eba4f 100644 --- a/pkg/scale/varying_data_type_example_test.go +++ b/pkg/scale/varying_data_type_example_test.go @@ -6,7 +6,6 @@ package scale_test import ( "fmt" "reflect" - "testing" "github.com/ChainSafe/gossamer/pkg/scale" ) @@ -17,112 +16,95 @@ type MyStruct struct { Foo []byte } -func (MyStruct) Index() uint { - return 1 -} - -func (m MyStruct) String() string { - return fmt.Sprintf("MyStruct{Baz: %t, Bar: %d, Foo: %x}", m.Baz, m.Bar, m.Foo) -} - type MyOtherStruct struct { Foo string Bar uint64 Baz uint } -func (MyOtherStruct) Index() uint { - return 2 -} - -func (m MyOtherStruct) String() string { - return fmt.Sprintf("MyOtherStruct{Foo: %s, Bar: %d, Baz: %d}", m.Foo, m.Bar, m.Baz) -} - type MyInt16 int16 -func (MyInt16) Index() uint { - return 3 +type MyVaryingDataType struct { + inner any } -func (m MyInt16) String() string { return fmt.Sprintf("MyInt16(%d)", m) } - -func ExampleVaryingDataType() { - vdt, err := scale.NewVaryingDataType(MyStruct{}, MyOtherStruct{}, MyInt16(0)) - if err != nil { - panic(err) - } - - err = vdt.Set(MyStruct{ - Baz: true, - Bar: 999, - Foo: []byte{1, 2}, - }) - if err != nil { - panic(err) - } +type MyVaryingDataTypeValues interface { + MyStruct | MyOtherStruct | MyInt16 +} - bytes, err := scale.Marshal(vdt) - if err != nil { - panic(err) - } +func setMyVaryingDataType[Value MyVaryingDataTypeValues](mvdt *MyVaryingDataType, value Value) { + mvdt.inner = value +} - vdt1, err := scale.NewVaryingDataType(MyStruct{}, MyOtherStruct{}, MyInt16(0)) - if err != nil { - panic(err) +func (mvdt *MyVaryingDataType) SetValue(value any) (err error) { + switch value := value.(type) { + case MyStruct: + setMyVaryingDataType(mvdt, value) + return + case MyOtherStruct: + setMyVaryingDataType(mvdt, value) + return + case MyInt16: + setMyVaryingDataType(mvdt, value) + return + default: + return fmt.Errorf("unsupported type") } +} - err = scale.Unmarshal(bytes, &vdt1) - if err != nil { - panic(err) +func (mvdt MyVaryingDataType) IndexValue() (index uint, value any, err error) { + switch mvdt.inner.(type) { + case MyStruct: + return 0, mvdt.inner, nil + case MyOtherStruct: + return 1, mvdt.inner, nil + case MyInt16: + return 2, mvdt.inner, nil } + return 0, nil, scale.ErrUnsupportedVaryingDataTypeValue +} - if !reflect.DeepEqual(vdt, vdt1) { - panic(fmt.Errorf("uh oh: %+v %+v", vdt, vdt1)) - } +func (mvdt MyVaryingDataType) Value() (value any, err error) { + _, value, err = mvdt.IndexValue() + return } -func ExampleVaryingDataTypeSlice() { - vdt, err := scale.NewVaryingDataType(MyStruct{}, MyOtherStruct{}, MyInt16(0)) - if err != nil { - panic(err) +func (mvdt MyVaryingDataType) ValueAt(index uint) (value any, err error) { + switch index { + case 0: + return MyStruct{}, nil + case 1: + return MyOtherStruct{}, nil + case 2: + return MyInt16(0), nil } + return nil, scale.ErrUnknownVaryingDataTypeValue +} - vdts := scale.NewVaryingDataTypeSlice(vdt) +func ExampleVaryingDataType() { + vdt := MyVaryingDataType{} - err = vdts.Add( - MyStruct{ - Baz: true, - Bar: 999, - Foo: []byte{1, 2}, - }, - MyInt16(1), - ) + err := vdt.SetValue(MyStruct{ + Baz: true, + Bar: 999, + Foo: []byte{1, 2}, + }) if err != nil { panic(err) } - bytes, err := scale.Marshal(vdts) + bytes, err := scale.Marshal(vdt) if err != nil { panic(err) } - vdts1 := scale.NewVaryingDataTypeSlice(vdt) - if err != nil { - panic(err) - } + dst := MyVaryingDataType{} - err = scale.Unmarshal(bytes, &vdts1) + err = scale.Unmarshal(bytes, &dst) if err != nil { panic(err) } - if !reflect.DeepEqual(vdts, vdts1) { - panic(fmt.Errorf("uh oh: %+v %+v", vdts, vdts1)) - } -} - -func TestExamples(_ *testing.T) { - ExampleVaryingDataType() - ExampleVaryingDataTypeSlice() + fmt.Println(reflect.DeepEqual(vdt, dst)) + // Output: true } diff --git a/pkg/scale/varying_data_type_nested_example_test.go b/pkg/scale/varying_data_type_nested_example_test.go deleted file mode 100644 index 107802b67c..0000000000 --- a/pkg/scale/varying_data_type_nested_example_test.go +++ /dev/null @@ -1,220 +0,0 @@ -// Copyright 2021 ChainSafe Systems (ON) -// SPDX-License-Identifier: LGPL-3.0-only - -package scale_test - -import ( - "fmt" - "reflect" - - "github.com/ChainSafe/gossamer/pkg/scale" -) - -// ParentVDT is a VaryingDataType that consists of multiple nested VaryingDataType -// instances (aka. a rust enum containing multiple enum options) -type ParentVDT scale.VaryingDataType - -// Set will set a VaryingDataTypeValue using the underlying VaryingDataType -func (pvdt *ParentVDT) Set(val scale.VaryingDataTypeValue) (err error) { - // cast to VaryingDataType to use VaryingDataType.Set method - vdt := scale.VaryingDataType(*pvdt) - err = vdt.Set(val) - if err != nil { - return - } - // store original ParentVDT with VaryingDataType that has been set - *pvdt = ParentVDT(vdt) - return -} - -// Value will return value from underying VaryingDataType -func (pvdt *ParentVDT) Value() (val scale.VaryingDataTypeValue, err error) { - vdt := scale.VaryingDataType(*pvdt) - return vdt.Value() -} - -// NewParentVDT is constructor for ParentVDT -func NewParentVDT() ParentVDT { - // use standard VaryingDataType constructor to construct a VaryingDataType - vdt, err := scale.NewVaryingDataType(NewChildVDT(), NewOtherChildVDT()) - if err != nil { - panic(err) - } - // cast to ParentVDT - return ParentVDT(vdt) -} - -// ChildVDT type is used as a VaryingDataTypeValue for ParentVDT -type ChildVDT scale.VaryingDataType - -// Index fulfils the VaryingDataTypeValue interface. T -func (ChildVDT) Index() uint { //skipcq: GO-W1029 - return 1 -} - -func (cvdt ChildVDT) String() string { //skipcq: GO-W1029 - value, err := cvdt.Value() - if err != nil { - return "ChildVDT()" - } - return fmt.Sprintf("ChildVDT(%s)", value) -} - -// Set will set a VaryingDataTypeValue using the underlying VaryingDataType -func (cvdt *ChildVDT) Set(val scale.VaryingDataTypeValue) (err error) { //skipcq: GO-W1029 - // cast to VaryingDataType to use VaryingDataType.Set method - vdt := scale.VaryingDataType(*cvdt) - err = vdt.Set(val) - if err != nil { - return - } - // store original ParentVDT with VaryingDataType that has been set - *cvdt = ChildVDT(vdt) - return -} - -// Value will return value from underying VaryingDataType -func (cvdt *ChildVDT) Value() (val scale.VaryingDataTypeValue, err error) { //skipcq: GO-W1029 - vdt := scale.VaryingDataType(*cvdt) - return vdt.Value() -} - -// NewChildVDT is constructor for ChildVDT -func NewChildVDT() ChildVDT { - // use standard VaryingDataType constructor to construct a VaryingDataType - // constarined to types ChildInt16, ChildStruct, and ChildString - vdt, err := scale.NewVaryingDataType(ChildInt16(0), ChildStruct{}, ChildString("")) - if err != nil { - panic(err) - } - // cast to ParentVDT - return ChildVDT(vdt) -} - -// OtherChildVDT type is used as a VaryingDataTypeValue for ParentVDT -type OtherChildVDT scale.VaryingDataType - -// Index fulfils the VaryingDataTypeValue interface. -func (OtherChildVDT) Index() uint { //skipcq: GO-W1029 - return 2 -} - -func (cvdt OtherChildVDT) String() string { //skipcq: GO-W1029 - vdt := scale.VaryingDataType(cvdt) - vdtPtr := &vdt - value, err := vdtPtr.Value() - if err != nil { - return "OtherChildVDT()" - } - return fmt.Sprintf("OtherChildVDT(%s)", value) -} - -// Set will set a VaryingDataTypeValue using the underlying VaryingDataType -func (cvdt *OtherChildVDT) Set(val scale.VaryingDataTypeValue) (err error) { //skipcq: GO-W1029 - // cast to VaryingDataType to use VaryingDataType.Set method - vdt := scale.VaryingDataType(*cvdt) - err = vdt.Set(val) - if err != nil { - return - } - // store original ParentVDT with VaryingDataType that has been set - *cvdt = OtherChildVDT(vdt) - return -} - -// NewOtherChildVDT is constructor for OtherChildVDT -func NewOtherChildVDT() OtherChildVDT { - // use standard VaryingDataType constructor to construct a VaryingDataType - // constarined to types ChildInt16 and ChildStruct - vdt, err := scale.NewVaryingDataType(ChildInt16(0), ChildStruct{}, ChildString("")) - if err != nil { - panic(err) - } - // cast to ParentVDT - return OtherChildVDT(vdt) -} - -// ChildInt16 is used as a VaryingDataTypeValue for ChildVDT and OtherChildVDT -type ChildInt16 int16 - -// Index fulfils the VaryingDataTypeValue interface. The ChildVDT type is used as a -// VaryingDataTypeValue for ParentVDT -func (ChildInt16) Index() uint { - return 1 -} - -func (c ChildInt16) String() string { return fmt.Sprintf("ChildInt16(%d)", c) } - -// ChildStruct is used as a VaryingDataTypeValue for ChildVDT and OtherChildVDT -type ChildStruct struct { - A string - B bool -} - -// Index fulfils the VaryingDataTypeValue interface -func (ChildStruct) Index() uint { - return 2 -} - -func (c ChildStruct) String() string { - return fmt.Sprintf("ChildStruct{A=%s, B=%t}", c.A, c.B) -} - -// ChildString is used as a VaryingDataTypeValue for ChildVDT and OtherChildVDT -type ChildString string - -// Index fulfils the VaryingDataTypeValue interface -func (ChildString) Index() uint { - return 3 -} - -func (c ChildString) String() string { return fmt.Sprintf("ChildString(%s)", string(c)) } - -func Example() { - parent := NewParentVDT() - - // populate parent with ChildVDT - child := NewChildVDT() - child.Set(ChildInt16(888)) - err := parent.Set(child) - if err != nil { - panic(err) - } - - // validate ParentVDT.Value() - parentVal, err := parent.Value() - if err != nil { - panic(err) - } - fmt.Printf("parent.Value(): %+v\n", parentVal) - // should cast to ChildVDT, since that was set earlier - valChildVDT := parentVal.(ChildVDT) - // validate ChildVDT.Value() as ChildInt16(888) - childVdtValue, err := valChildVDT.Value() - if err != nil { - panic(err) - } - fmt.Printf("child.Value(): %+v\n", childVdtValue) - - // marshal into scale encoded bytes - bytes, err := scale.Marshal(parent) - if err != nil { - panic(err) - } - fmt.Printf("bytes: % x\n", bytes) - - // unmarshal into another ParentVDT - dstParent := NewParentVDT() - err = scale.Unmarshal(bytes, &dstParent) - if err != nil { - panic(err) - } - // assert both ParentVDT instances are the same - fmt.Println(reflect.DeepEqual(parent, dstParent)) - - // Output: - // parent.Value(): ChildVDT(ChildInt16(888)) - // child.Value(): ChildInt16(888) - // bytes: 01 01 78 03 - // true -} diff --git a/pkg/scale/varying_data_type_nested_test.go b/pkg/scale/varying_data_type_nested_test.go index e6bb510049..bdca318256 100644 --- a/pkg/scale/varying_data_type_nested_test.go +++ b/pkg/scale/varying_data_type_nested_test.go @@ -8,113 +8,153 @@ import ( "math/big" "testing" - "github.com/google/go-cmp/cmp" - "github.com/google/go-cmp/cmp/cmpopts" + "github.com/stretchr/testify/assert" ) -type parentVDT VaryingDataType +type ParentVDT struct { + inner any +} -func (pvdt *parentVDT) Set(val VaryingDataTypeValue) (err error) { - vdt := VaryingDataType(*pvdt) - err = vdt.Set(val) - if err != nil { - return - } - *pvdt = parentVDT(vdt) - return +type ParentVDTValues interface { + ChildVDT | ChildVDT1 } -func mustNewParentVDT() parentVDT { - vdt, err := NewVaryingDataType(mustNewChildVDT(), mustNewChildVDT1()) - if err != nil { - panic(err) +func NewParentVDT[Value ParentVDTValues](value ...Value) *ParentVDT { + if len(value) == 0 { + return &ParentVDT{} + } + return &ParentVDT{ + inner: value[0], } - return parentVDT(vdt) } -type childVDT VaryingDataType +func setParentVDT[Value ParentVDTValues](mvdt *ParentVDT, value Value) { + mvdt.inner = value +} -func (childVDT) Index() uint { - return 1 +func (mvdt *ParentVDT) SetValue(value any) (err error) { + switch value := value.(type) { + case ChildVDT: + setParentVDT[ChildVDT](mvdt, value) + return + case ChildVDT1: + setParentVDT[ChildVDT1](mvdt, value) + return + default: + return fmt.Errorf("unsupported type") + } } -func (c childVDT) String() string { - if c.value == nil { - return "childVDT(nil)" +func (mvdt ParentVDT) IndexValue() (index uint, value any, err error) { + switch mvdt.inner.(type) { + case ChildVDT: + return 1, mvdt.inner, nil + case ChildVDT1: + return 2, mvdt.inner, nil } - return fmt.Sprintf("childVDT(%s)", c.value) + return 0, nil, ErrUnsupportedVaryingDataTypeValue } -func mustNewChildVDT() childVDT { - vdt, err := NewVaryingDataType(VDTValue{}, VDTValue1{}, VDTValue2{}, VDTValue3(0)) - if err != nil { - panic(err) +func (mvdt ParentVDT) Value() (value any, err error) { + _, value, err = mvdt.IndexValue() + return +} + +func (mvdt ParentVDT) ValueAt(index uint) (value any, err error) { + switch index { + case 1: + return ChildVDT{}, nil + case 2: + return ChildVDT1{}, nil } - return childVDT(vdt) + return nil, ErrUnknownVaryingDataTypeValue +} + +type ChildVDT struct { + MyVaryingDataType } -func mustNewChildVDTAndSet(vdtv VaryingDataTypeValue) childVDT { - vdt, err := NewVaryingDataType(VDTValue{}, VDTValue1{}, VDTValue2{}, VDTValue3(0)) - if err != nil { - panic(err) +type ChildVDTValues interface { + VDTValue | VDTValue1 | VDTValue2 | VDTValue3 +} + +func NewChildVDT[Value ChildVDTValues](value ...Value) *ChildVDT { + if len(value) == 0 { + return &ChildVDT{} } - err = vdt.Set(vdtv) - if err != nil { - panic(err) + return &ChildVDT{ + MyVaryingDataType: *NewMyVaringDataType[Value](value...), } - return childVDT(vdt) } -type childVDT1 VaryingDataType +func (cvdt *ChildVDT) SetValue(value any) (err error) { + return cvdt.MyVaryingDataType.SetValue(value) +} -func (childVDT1) Index() uint { - return 2 +func (cvdt ChildVDT) IndexValue() (index uint, value any, err error) { + return cvdt.MyVaryingDataType.IndexValue() } -func (c childVDT1) String() string { - if c.value == nil { - return "childVDT1(nil)" - } - return fmt.Sprintf("childVDT1(%s)", c.value) +func (cvdt ChildVDT) Value() (value any, err error) { + return cvdt.MyVaryingDataType.Value() } -func mustNewChildVDT1() childVDT1 { - vdt, err := NewVaryingDataType(VDTValue{}, VDTValue1{}, VDTValue2{}) - if err != nil { - panic(err) - } - return childVDT1(vdt) +func (cvdt ChildVDT) ValueAt(index uint) (value any, err error) { + return cvdt.MyVaryingDataType.ValueAt(index) +} + +type ChildVDT1 struct { + MyVaryingDataType +} + +type ChildVDT1Values interface { + VDTValue | VDTValue1 | VDTValue2 | VDTValue3 } -func mustNewChildVDT1AndSet(vdtv VaryingDataTypeValue) childVDT1 { - vdt, err := NewVaryingDataType(VDTValue{}, VDTValue1{}, VDTValue2{}) - if err != nil { - panic(err) +func NewChildVDT1[Value ChildVDT1Values](value ...Value) *ChildVDT1 { + if len(value) == 0 { + return &ChildVDT1{} } - err = vdt.Set(vdtv) - if err != nil { - panic(err) + return &ChildVDT1{ + MyVaryingDataType: *NewMyVaringDataType[Value](value...), } - return childVDT1(vdt) } +func (cvdt *ChildVDT1) SetValue(value any) (err error) { + return cvdt.MyVaryingDataType.SetValue(value) +} + +func (cvdt ChildVDT1) IndexValue() (index uint, value any, err error) { + return cvdt.MyVaryingDataType.IndexValue() +} + +func (cvdt ChildVDT1) Value() (value any, err error) { + return cvdt.MyVaryingDataType.Value() +} + +func (cvdt ChildVDT1) ValueAt(index uint) (value any, err error) { + return cvdt.MyVaryingDataType.ValueAt(index) +} + +var ( + _ = VaryingDataType(&ParentVDT{}) + _ = VaryingDataType(&ChildVDT{}) + _ = VaryingDataType(&ChildVDT1{}) +) + type constructorTest struct { - name string - newIn func(t *testing.T) interface{} - want []byte - wantErr bool + name string + newIn func(t *testing.T) interface{} + want []byte } var nestedVaryingDataTypeTests = []constructorTest{ { name: "ParentVDT_with_ChildVDT", newIn: func(t *testing.T) interface{} { - pvdt := mustNewParentVDT() - err := pvdt.Set(mustNewChildVDTAndSet(VDTValue3(16383))) - if err != nil { - t.Fatalf("%v", err) - } - return pvdt + child := NewChildVDT(VDTValue3(16383)) + parent := NewParentVDT(*child) + return parent }, want: newWant( // index of childVDT @@ -128,29 +168,24 @@ var nestedVaryingDataTypeTests = []constructorTest{ { name: "ParentVDT_with_ChildVDT1", newIn: func(t *testing.T) interface{} { - pvdt := mustNewParentVDT() - err := pvdt.Set(mustNewChildVDT1AndSet( - VDTValue{ - A: big.NewInt(1073741823), - B: int(1073741823), - C: uint(1073741823), - D: int8(1), - E: uint8(1), - F: int16(16383), - G: uint16(16383), - H: int32(1073741823), - I: uint32(1073741823), - J: int64(9223372036854775807), - K: uint64(9223372036854775807), - L: byteArray(64), - M: testStrings[1], - N: true, - }, - )) - if err != nil { - t.Fatalf("%v", err) - } - return pvdt + child1 := NewChildVDT1(VDTValue{ + A: big.NewInt(1073741823), + B: int(1073741823), + C: uint(1073741823), + D: int8(1), + E: uint8(1), + F: int16(16383), + G: uint16(16383), + H: int32(1073741823), + I: uint32(1073741823), + J: int64(9223372036854775807), + K: uint64(9223372036854775807), + L: byteArray(64), + M: testStrings[1], + N: true, + }) + parent := NewParentVDT(*child1) + return parent }, want: newWant( // index of childVDT1 @@ -178,37 +213,27 @@ var nestedVaryingDataTypeTests = []constructorTest{ }, } -func Test_encodeState_encodeCustomVaryingDataType_nested(t *testing.T) { +func TestVaryingDataType_EncodeNested(t *testing.T) { for _, tt := range nestedVaryingDataTypeTests { t.Run(tt.name, func(t *testing.T) { - b, err := Marshal(tt.newIn(t)) - if (err != nil) != tt.wantErr { - t.Errorf("Marshal() error = %v, wantErr %v", err, tt.wantErr) - } - if diff := cmp.Diff(b, tt.want); diff != "" { - t.Errorf("Marshal() diff: %s", diff) - } + vdt := tt.newIn(t).(*ParentVDT) + b, err := Marshal(*vdt) + assert.NoError(t, err) + assert.Equal(t, tt.want, b) }) } } -func Test_decodeState_decodeCustomVaryingDataType_nested(t *testing.T) { +func TestVaryingDataType_DecodeNested(t *testing.T) { for _, tt := range nestedVaryingDataTypeTests { t.Run(tt.name, func(t *testing.T) { - dst := mustNewParentVDT() - if err := Unmarshal(tt.want, &dst); (err != nil) != tt.wantErr { - t.Errorf("decodeState.unmarshal() error = %v, wantErr %v", err, tt.wantErr) - return - } - expected := tt.newIn(t) - - diff := cmp.Diff(dst, expected, - cmp.AllowUnexported(parentVDT{}, childVDT{}, childVDT1{}), - cmpopts.IgnoreUnexported(big.Int{}, VDTValue2{}, MyStructWithIgnore{}), - ) - if diff != "" { - t.Errorf("decodeState.unmarshal() = %s", diff) - } + dst := NewParentVDT[ChildVDT]() + err := Unmarshal(tt.want, dst) + assert.NoError(t, err) + + expected := tt.newIn(t).(*ParentVDT) + assert.Equal(t, expected.inner, dst.inner) + }) } } diff --git a/pkg/scale/varying_data_type_test.go b/pkg/scale/varying_data_type_test.go index 0fb9c755b4..f70b5a5a62 100644 --- a/pkg/scale/varying_data_type_test.go +++ b/pkg/scale/varying_data_type_test.go @@ -4,40 +4,13 @@ package scale import ( - "bytes" + "fmt" "math/big" - "reflect" "testing" - "github.com/google/go-cmp/cmp" - "github.com/google/go-cmp/cmp/cmpopts" + "github.com/stretchr/testify/assert" ) -func mustNewVaryingDataType(values ...VaryingDataTypeValue) (vdt VaryingDataType) { - vdt, err := NewVaryingDataType(values...) - if err != nil { - panic(err) - } - return -} - -func mustNewVaryingDataTypeAndSet(value VaryingDataTypeValue, values ...VaryingDataTypeValue) (vdt VaryingDataType) { - vdt = mustNewVaryingDataType(values...) - err := vdt.Set(value) - if err != nil { - panic(err) - } - return -} - -type customVDT VaryingDataType - -type customVDTWithNew VaryingDataType - -func (cvwn customVDTWithNew) New() customVDTWithNew { - return customVDTWithNew(mustNewVaryingDataType(VDTValue{}, VDTValue1{}, VDTValue2{}, VDTValue3(0))) -} - type VDTValue struct { A *big.Int B int @@ -55,12 +28,6 @@ type VDTValue struct { N bool } -func (VDTValue) Index() uint { - return 1 -} - -func (VDTValue) String() string { return "" } - type VDTValue1 struct { O **big.Int P *int @@ -78,12 +45,6 @@ type VDTValue1 struct { AB *bool } -func (VDTValue1) Index() uint { - return 2 -} - -func (VDTValue1) String() string { return "" } - type VDTValue2 struct { A MyStruct B MyStructWithIgnore @@ -105,26 +66,93 @@ type VDTValue2 struct { P [2][2]byte } -func (VDTValue2) Index() uint { - return 3 +type VDTValue3 int16 + +type MyVaryingDataType struct { + inner any } -func (VDTValue2) String() string { return "" } +type CustomAny any -type VDTValue3 int16 +type MyVaryingDataTypeValues interface { + VDTValue | VDTValue1 | VDTValue2 | VDTValue3 | CustomAny +} + +func NewMyVaringDataType[Value MyVaryingDataTypeValues](value ...Value) *MyVaryingDataType { + if len(value) == 0 { + return &MyVaryingDataType{} + } + return &MyVaryingDataType{ + inner: value[0], + } +} + +func setMyVaryingDataType[Value MyVaryingDataTypeValues](mvdt *MyVaryingDataType, value Value) { + mvdt.inner = value +} + +func (mvdt *MyVaryingDataType) SetValue(value any) (err error) { + switch value := value.(type) { + case VDTValue: + setMyVaryingDataType[VDTValue](mvdt, value) + return + case VDTValue1: + setMyVaryingDataType[VDTValue1](mvdt, value) + return + case VDTValue2: + setMyVaryingDataType[VDTValue2](mvdt, value) + return + case VDTValue3: + setMyVaryingDataType[VDTValue3](mvdt, value) + return + case CustomAny: + setMyVaryingDataType(mvdt, value) + return + default: + return fmt.Errorf("unsupported type") + } +} + +func (mvdt MyVaryingDataType) IndexValue() (index uint, value any, err error) { + switch mvdt.inner.(type) { + case VDTValue: + return 1, mvdt.inner, nil + case VDTValue1: + return 2, mvdt.inner, nil + case VDTValue2: + return 3, mvdt.inner, nil + case VDTValue3: + return 4, mvdt.inner, nil + case CustomAny: + return 5, mvdt.inner, nil + } + return 0, nil, ErrUnsupportedVaryingDataTypeValue +} -func (VDTValue3) Index() uint { - return 4 +func (mvdt MyVaryingDataType) Value() (value any, err error) { + _, value, err = mvdt.IndexValue() + return } -func (VDTValue3) String() string { return "" } +func (mvdt MyVaryingDataType) ValueAt(index uint) (value any, err error) { + switch index { + case 1: + return VDTValue{}, nil + case 2: + return VDTValue1{}, nil + case 3: + return VDTValue2{}, nil + case 4: + return VDTValue3(0), nil + case 5: + return CustomAny(int16(0)), nil + } + return nil, ErrUnknownVaryingDataTypeValue +} var varyingDataTypeTests = tests{ - { - in: mustNewVaryingDataTypeAndSet( - VDTValue1{O: newBigIntPtr(big.NewInt(1073741823))}, - VDTValue{}, VDTValue1{}, VDTValue2{}, VDTValue3(0), - ), + test{ + in: NewMyVaringDataType(VDTValue1{O: newBigIntPtr(big.NewInt(1073741823))}), want: []byte{ 2, 0x01, 0xfe, 0xff, 0xff, 0xff, @@ -144,7 +172,7 @@ var varyingDataTypeTests = tests{ }, }, { - in: mustNewVaryingDataTypeAndSet( + in: NewMyVaringDataType( VDTValue{ A: big.NewInt(1073741823), B: int(1073741823), @@ -161,7 +189,6 @@ var varyingDataTypeTests = tests{ M: testStrings[1], N: true, }, - VDTValue{}, VDTValue1{}, VDTValue2{}, VDTValue3(0), ), want: newWant( // index of VDTValue @@ -186,7 +213,7 @@ var varyingDataTypeTests = tests{ ), }, { - in: mustNewVaryingDataTypeAndSet( + in: NewMyVaringDataType( VDTValue1{ O: newBigIntPtr(big.NewInt(1073741823)), P: newIntPtr(int(1073741823)), @@ -203,7 +230,6 @@ var varyingDataTypeTests = tests{ AA: newStringPtr(testStrings[1]), AB: newBoolPtr(true), }, - VDTValue{}, VDTValue1{}, VDTValue2{}, VDTValue3(0), ), want: newWant( // index of VDTValue1 @@ -228,7 +254,7 @@ var varyingDataTypeTests = tests{ ), }, { - in: mustNewVaryingDataTypeAndSet( + in: NewMyVaringDataType( VDTValue2{ A: MyStruct{ Foo: []byte{0x01}, @@ -265,7 +291,6 @@ var varyingDataTypeTests = tests{ O: [2][]byte{{0x00, 0x01}, {0x01, 0x00}}, P: [2][2]byte{{0x00, 0x01}, {0x01, 0x00}}, }, - VDTValue{}, VDTValue1{}, VDTValue2{}, VDTValue3(0), ), want: newWant( // index of VDTValue2 @@ -292,9 +317,8 @@ var varyingDataTypeTests = tests{ ), }, { - in: mustNewVaryingDataTypeAndSet( + in: NewMyVaringDataType( VDTValue3(16383), - VDTValue{}, VDTValue1{}, VDTValue2{}, VDTValue3(0), ), want: newWant( // index of VDTValue2 @@ -303,318 +327,54 @@ var varyingDataTypeTests = tests{ []byte{0xff, 0x3f}, ), }, + { + name: "CustomAny", + in: NewMyVaringDataType( + CustomAny(int16(16383)), + ), + want: newWant( + // index of VDTValue2 + []byte{5}, + // encoding of int16 + []byte{0xff, 0x3f}, + ), + }, } -func Test_encodeState_encodeVaryingDataType(t *testing.T) { - for _, tt := range varyingDataTypeTests { - t.Run(tt.name, func(t *testing.T) { - buffer := bytes.NewBuffer(nil) - es := &encodeState{ - Writer: buffer, - fieldScaleIndicesCache: cache, - } - vdt := tt.in.(VaryingDataType) - if err := es.marshal(vdt); (err != nil) != tt.wantErr { - t.Errorf("encodeState.marshal() error = %v, wantErr %v", err, tt.wantErr) - } - if !reflect.DeepEqual(buffer.Bytes(), tt.want) { - t.Errorf("encodeState.marshal() = %v, want %v", buffer.Bytes(), tt.want) - } - }) - } -} - -func Test_decodeState_decodeVaryingDataType(t *testing.T) { - for _, tt := range varyingDataTypeTests { - t.Run(tt.name, func(t *testing.T) { - dst, err := NewVaryingDataType(VDTValue{}, VDTValue1{}, VDTValue2{}, VDTValue3(0)) - if err != nil { - t.Errorf("%v", err) - return - } - if err := Unmarshal(tt.want, &dst); (err != nil) != tt.wantErr { - t.Errorf("decodeState.unmarshal() error = %v, wantErr %v", err, tt.wantErr) - return - } - vdt := tt.in.(VaryingDataType) - dstVal, err := dst.Value() - if err != nil { - t.Errorf("%v", err) - return - } - vdtVal, err := vdt.Value() - if err != nil { - t.Errorf("%v", err) - return - } - diff := cmp.Diff(dstVal, vdtVal, cmpopts.IgnoreUnexported(big.Int{}, VDTValue2{}, MyStructWithIgnore{})) - if diff != "" { - t.Errorf("decodeState.unmarshal() = %s", diff) - } - }) - } -} - -func Test_encodeState_encodeCustomVaryingDataType(t *testing.T) { - for _, tt := range varyingDataTypeTests { - t.Run(tt.name, func(t *testing.T) { - buffer := bytes.NewBuffer(nil) - es := &encodeState{ - Writer: buffer, - fieldScaleIndicesCache: cache, - } - vdt := tt.in.(VaryingDataType) - cvdt := customVDT(vdt) - if err := es.marshal(cvdt); (err != nil) != tt.wantErr { - t.Errorf("encodeState.encodeStruct() error = %v, wantErr %v", err, tt.wantErr) - } - if !reflect.DeepEqual(buffer.Bytes(), tt.want) { - t.Errorf("encodeState.encodeStruct() = %v, want %v", buffer.Bytes(), tt.want) - } - }) - } -} -func Test_decodeState_decodeCustomVaryingDataType(t *testing.T) { +func TestVaryingDataType_Encode(t *testing.T) { for _, tt := range varyingDataTypeTests { t.Run(tt.name, func(t *testing.T) { - vdt, err := NewVaryingDataType(VDTValue{}, VDTValue1{}, VDTValue2{}, VDTValue3(0)) - if err != nil { - t.Errorf("%v", err) - return - } - dst := customVDT(vdt) - if err := Unmarshal(tt.want, &dst); (err != nil) != tt.wantErr { - t.Errorf("decodeState.unmarshal() error = %v, wantErr %v", err, tt.wantErr) - return - } - - dstVDT := reflect.ValueOf(tt.in).Convert(reflect.TypeOf(VaryingDataType{})).Interface().(VaryingDataType) - inVDT := reflect.ValueOf(tt.in).Convert(reflect.TypeOf(VaryingDataType{})).Interface().(VaryingDataType) - dstVDTVal, err := dstVDT.Value() - if err != nil { - t.Errorf("%v", err) - return - } - inVDTVal, err := inVDT.Value() - if err != nil { - t.Errorf("%v", err) - return - } - diff := cmp.Diff(dstVDTVal, inVDTVal, - cmpopts.IgnoreUnexported(big.Int{}, VDTValue2{}, MyStructWithIgnore{})) - if diff != "" { - t.Errorf("decodeState.unmarshal() = %s", diff) - } - if reflect.TypeOf(dst) != reflect.TypeOf(customVDT{}) { - t.Errorf("types mismatch dst: %v expected: %v", reflect.TypeOf(dst), reflect.TypeOf(customVDT{})) - } + vdt := tt.in.(EncodeVaryingDataType) + bytes, err := Marshal(vdt) + assert.NoError(t, err) + assert.Equal(t, tt.want, bytes) }) } } -func Test_decodeState_decodeCustomVaryingDataTypeWithNew(t *testing.T) { +func TestVaryingDataType_Decode(t *testing.T) { for _, tt := range varyingDataTypeTests { t.Run(tt.name, func(t *testing.T) { - dst := customVDTWithNew{} - if err := Unmarshal(tt.want, &dst); (err != nil) != tt.wantErr { - t.Errorf("decodeState.unmarshal() error = %v, wantErr %v", err, tt.wantErr) - return - } + dst := NewMyVaringDataType[VDTValue3]() + err := Unmarshal(tt.want, dst) + assert.NoError(t, err) - dstVDT := reflect.ValueOf(tt.in).Convert(reflect.TypeOf(VaryingDataType{})).Interface().(VaryingDataType) - inVDT := reflect.ValueOf(tt.in).Convert(reflect.TypeOf(VaryingDataType{})).Interface().(VaryingDataType) - dstVDTVal, err := dstVDT.Value() - if err != nil { - t.Errorf("%v", err) - return - } - inVDTVal, err := inVDT.Value() - if err != nil { - t.Errorf("%v", err) - return - } - diff := cmp.Diff(dstVDTVal, inVDTVal, - cmpopts.IgnoreUnexported(big.Int{}, VDTValue2{}, MyStructWithIgnore{})) - if diff != "" { - t.Errorf("decodeState.unmarshal() = %s", diff) - } - if reflect.TypeOf(dst) != reflect.TypeOf(customVDTWithNew{}) { - t.Errorf("types mismatch dst: %v expected: %v", reflect.TypeOf(dst), reflect.TypeOf(customVDT{})) - } - }) - } -} + dstVal, err := tt.in.(VaryingDataType).Value() + assert.NoError(t, err) -func TestNewVaryingDataType(t *testing.T) { - type args struct { - values []VaryingDataTypeValue - } - tests := []struct { - name string - args args - wantVdt VaryingDataType - wantErr bool - }{ - { - args: args{ - values: []VaryingDataTypeValue{}, - }, - wantErr: true, - }, - { - args: args{ - values: []VaryingDataTypeValue{ - VDTValue{}, VDTValue1{}, VDTValue2{}, VDTValue3(0), - }, - }, - wantVdt: VaryingDataType{ - cache: map[uint]VaryingDataTypeValue{ - VDTValue{}.Index(): VDTValue{}, - VDTValue1{}.Index(): VDTValue1{}, - VDTValue2{}.Index(): VDTValue2{}, - VDTValue3(0).Index(): VDTValue3(0), - }, - }, - }, - { - args: args{ - values: []VaryingDataTypeValue{ - VDTValue{}, VDTValue1{}, VDTValue2{}, VDTValue3(0), VDTValue{}, - }, - }, - wantVdt: VaryingDataType{ - cache: map[uint]VaryingDataTypeValue{ - VDTValue{}.Index(): VDTValue{}, - VDTValue1{}.Index(): VDTValue1{}, - VDTValue2{}.Index(): VDTValue2{}, - VDTValue3(0).Index(): VDTValue3(0), - }, - }, - wantErr: true, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - gotVdt, err := NewVaryingDataType(tt.args.values...) - if (err != nil) != tt.wantErr { - t.Errorf("NewVaryingDataType() error = %v, wantErr %v", err, tt.wantErr) - return - } - if !reflect.DeepEqual(gotVdt, tt.wantVdt) { - t.Errorf("NewVaryingDataType() = %v, want %v", gotVdt, tt.wantVdt) - } - }) - } -} + vdtVal, err := dst.Value() + assert.NoError(t, err) -func TestVaryingDataType_Set(t *testing.T) { - type args struct { - value VaryingDataTypeValue - } - tests := []struct { - name string - vdt VaryingDataType - args args - wantErr bool - }{ - { - vdt: mustNewVaryingDataType(VDTValue1{}), - args: args{ - value: VDTValue1{}, - }, - }, - { - vdt: mustNewVaryingDataType(VDTValue1{}, VDTValue2{}), - args: args{ - value: VDTValue1{}, - }, - }, - { - vdt: mustNewVaryingDataType(VDTValue1{}, VDTValue2{}), - args: args{ - value: VDTValue2{}, - }, - }, - { - vdt: mustNewVaryingDataType(VDTValue1{}, VDTValue2{}), - args: args{ - value: VDTValue3(0), - }, - wantErr: true, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - vdt := tt.vdt - if err := vdt.Set(tt.args.value); (err != nil) != tt.wantErr { - t.Errorf("VaryingDataType.SetValue() error = %v, wantErr %v", err, tt.wantErr) - } - }) - } -} - -func TestVaryingDataTypeSlice_Add(t *testing.T) { - type args struct { - values []VaryingDataTypeValue - } - tests := []struct { - name string - vdts VaryingDataTypeSlice - args args - wantErr bool - wantValues []VaryingDataType - }{ - { - name: "happy_path", - vdts: NewVaryingDataTypeSlice(MustNewVaryingDataType(VDTValue{}, VDTValue1{}, VDTValue2{}, VDTValue3(0))), - args: args{ - values: []VaryingDataTypeValue{ - VDTValue{ - B: 1, - }, - }, - }, - wantValues: []VaryingDataType{ - mustNewVaryingDataTypeAndSet( - VDTValue{ - B: 1, - }, - VDTValue{}, VDTValue1{}, VDTValue2{}, VDTValue3(0), - ), - }, - }, - { - name: "invalid_value_error_case", - vdts: NewVaryingDataTypeSlice(MustNewVaryingDataType(VDTValue{}, VDTValue1{}, VDTValue2{})), - args: args{ - values: []VaryingDataTypeValue{ - VDTValue3(0), - }, - }, - wantValues: []VaryingDataType{}, - wantErr: true, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - if err := tt.vdts.Add(tt.args.values...); (err != nil) != tt.wantErr { - t.Errorf("VaryingDataTypeSlice.Add() error = %v, wantErr %v", err, tt.wantErr) - } - if !reflect.DeepEqual(tt.vdts.Types, tt.wantValues) { - t.Errorf("NewVaryingDataType() = %v, want %v", tt.vdts.Types, tt.wantValues) - } + assert.Equal(t, vdtVal, dstVal) }) } } var varyingDataTypeSliceTests = tests{ { - in: mustNewVaryingDataTypeSliceAndSet( - mustNewVaryingDataType( - VDTValue{}, VDTValue1{}, VDTValue2{}, VDTValue3(0), - ), - VDTValue1{O: newBigIntPtr(big.NewInt(1073741823))}, - ), + in: []VaryingDataType{ + NewMyVaringDataType(VDTValue1{O: newBigIntPtr(big.NewInt(1073741823))}), + }, want: newWant( []byte{ // length @@ -640,12 +400,9 @@ var varyingDataTypeSliceTests = tests{ ), }, { - in: mustNewVaryingDataTypeSliceAndSet( - mustNewVaryingDataType( - VDTValue{}, VDTValue1{}, VDTValue2{}, VDTValue3(0), - ), - VDTValue1{O: newBigIntPtr(big.NewInt(1073741823))}, - VDTValue{ + in: []VaryingDataType{ + NewMyVaringDataType(VDTValue1{O: newBigIntPtr(big.NewInt(1073741823))}), + NewMyVaringDataType(VDTValue{ A: big.NewInt(1073741823), B: int(1073741823), C: uint(1073741823), @@ -660,8 +417,8 @@ var varyingDataTypeSliceTests = tests{ L: byteArray(64), M: testStrings[1], N: true, - }, - ), + }), + }, want: newWant( []byte{ // length @@ -709,39 +466,103 @@ var varyingDataTypeSliceTests = tests{ }, } -func Test_encodeState_encodeVaryingDataTypeSlice(t *testing.T) { +func TestVaryingDataType_EncodeSlice(t *testing.T) { for _, tt := range varyingDataTypeSliceTests { t.Run(tt.name, func(t *testing.T) { - vdt := tt.in.(VaryingDataTypeSlice) + vdt := tt.in.([]VaryingDataType) b, err := Marshal(vdt) - if (err != nil) != tt.wantErr { - t.Errorf("Marshal() error = %v, wantErr %v", err, tt.wantErr) - } - if !reflect.DeepEqual(b, tt.want) { - t.Errorf("Marshal() = %v, want %v", b, tt.want) - } + assert.NoError(t, err) + assert.Equal(t, tt.want, b) }) } } -func Test_decodeState_decodeVaryingDataTypeSlice(t *testing.T) { - opt := cmp.Comparer(func(x, y VaryingDataType) bool { - return reflect.DeepEqual(x.value, y.value) && reflect.DeepEqual(x.cache, y.cache) - }) - +func TestVaryingDataType_DecodeSlice(t *testing.T) { for _, tt := range varyingDataTypeSliceTests { t.Run(tt.name, func(t *testing.T) { - dst := tt.in.(VaryingDataTypeSlice) - dst.Types = make([]VaryingDataType, 0) - if err := Unmarshal(tt.want, &dst); (err != nil) != tt.wantErr { - t.Errorf("Unmarshal() error = %v, wantErr %v", err, tt.wantErr) - return + var dst []MyVaryingDataType + err := Unmarshal(tt.want, &dst) + assert.NoError(t, err) + + dstValues := make([]any, len(dst)) + for i, vdt := range dst { + value, err := vdt.Value() + assert.NoError(t, err) + dstValues[i] = value } - vdts := tt.in.(VaryingDataTypeSlice) - diff := cmp.Diff(dst, vdts, cmpopts.IgnoreUnexported(big.Int{}, VDTValue2{}, MyStructWithIgnore{}), opt) - if diff != "" { - t.Errorf("decodeState.unmarshal() = %s", diff) + + expectedValues := make([]any, len(tt.in.([]VaryingDataType))) + for i, vdt := range tt.in.([]VaryingDataType) { + value, err := vdt.Value() + assert.NoError(t, err) + expectedValues[i] = value } + + assert.Equal(t, expectedValues, dstValues) }) } } + +func TestVaryingDataType_EncodeArray(t *testing.T) { + vdtval1 := VDTValue1{O: newBigIntPtr(big.NewInt(1073741823))} + mvdt := NewMyVaringDataType[VDTValue1](vdtval1) + _ = VaryingDataType(mvdt) + mvdtArray := [1]VaryingDataType{ + mvdt, + } + expected := []byte{ + 2, + 0x01, 0xfe, 0xff, 0xff, 0xff, + 0x00, + 0x00, + 0x00, + 0x00, + 0x00, + 0x00, + 0x00, + 0x00, + 0x00, + 0x00, + 0x00, + 0x00, + 0x00, + } + + bytes, err := Marshal(mvdtArray) + if err != nil { + t.Errorf("wtf %v", err) + } + assert.NoError(t, err) + assert.Equal(t, expected, bytes) +} + +func TestVaryingDataType_DecodeArray(t *testing.T) { + vdtval1 := VDTValue1{O: newBigIntPtr(big.NewInt(1073741823))} + mvdt := NewMyVaringDataType[VDTValue1](vdtval1) + _ = VaryingDataType(mvdt) + expected := [1]MyVaryingDataType{ + *mvdt, + } + var mvdtArr [1]MyVaryingDataType + + bytes := []byte{ + 2, + 0x01, 0xfe, 0xff, 0xff, 0xff, + 0x00, + 0x00, + 0x00, + 0x00, + 0x00, + 0x00, + 0x00, + 0x00, + 0x00, + 0x00, + 0x00, + 0x00, + 0x00, + } + err := Unmarshal(bytes, &mvdtArr) + assert.NoError(t, err) + assert.Equal(t, expected, mvdtArr) +} diff --git a/tests/stress/stress_test.go b/tests/stress/stress_test.go index 24bbef8d3e..45e7d09282 100644 --- a/tests/stress/stress_test.go +++ b/tests/stress/stress_test.go @@ -821,7 +821,7 @@ func TestStress_SecondarySlotProduction(t *testing.T) { header := block.Header - preDigestItem := header.Digest.Types[0] + preDigestItem := header.Digest[0] preDigestItemValue, err := preDigestItem.Value() require.NoError(t, err) preDigest, ok := preDigestItemValue.(gosstypes.PreRuntimeDigest) diff --git a/tests/utils/rpc/request.go b/tests/utils/rpc/request.go index c79a0d0bc8..3121da6e0e 100644 --- a/tests/utils/rpc/request.go +++ b/tests/utils/rpc/request.go @@ -121,7 +121,7 @@ func NewEndpoint(port string) string { return "http://localhost:" + port } -func rpcLogsToDigest(logs []string) (digest scale.VaryingDataTypeSlice, err error) { +func rpcLogsToDigest(logs []string) (digest types.Digest, err error) { digest = types.NewDigest() for _, l := range logs { From c37b6b65e484d933578c726d6e4c3936a130bb12 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Jan 2024 22:57:34 -0500 Subject: [PATCH 07/11] chore(deps): bump github.com/ethereum/go-ethereum from 1.13.10 to 1.13.11 (#3726) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 4 ++-- go.sum | 8 +++++--- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/go.mod b/go.mod index 5b62f79ef5..15593f4d62 100644 --- a/go.mod +++ b/go.mod @@ -13,7 +13,7 @@ require ( github.com/dgraph-io/badger/v2 v2.2007.4 github.com/dgraph-io/ristretto v0.1.1 github.com/disiqueira/gotree v1.0.0 - github.com/ethereum/go-ethereum v1.13.10 + github.com/ethereum/go-ethereum v1.13.11 github.com/fatih/color v1.16.0 github.com/go-playground/validator/v10 v10.17.0 github.com/google/go-cmp v0.6.0 @@ -76,7 +76,7 @@ require ( github.com/getsentry/sentry-go v0.18.0 // indirect github.com/go-logr/logr v1.2.4 // indirect github.com/go-logr/stdr v1.2.2 // indirect - github.com/go-ole/go-ole v1.2.5 // indirect + github.com/go-ole/go-ole v1.3.0 // indirect github.com/go-playground/locales v0.14.1 // indirect github.com/go-playground/universal-translator v0.18.1 // indirect github.com/go-task/slim-sprig v0.0.0-20230315185526-52ccab3ef572 // indirect diff --git a/go.sum b/go.sum index 87b7ca65fe..a13d29246a 100644 --- a/go.sum +++ b/go.sum @@ -162,8 +162,8 @@ github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1m github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/etcd-io/bbolt v1.3.3/go.mod h1:ZF2nL25h33cCyBtcyWeZ2/I3HQOfTP+0PIEvHjkjCrw= -github.com/ethereum/go-ethereum v1.13.10 h1:Ppdil79nN+Vc+mXfge0AuUgmKWuVv4eMqzoIVSdqZek= -github.com/ethereum/go-ethereum v1.13.10/go.mod h1:sc48XYQxCzH3fG9BcrXCOOgQk2JfZzNAmIKnceogzsA= +github.com/ethereum/go-ethereum v1.13.11 h1:b51Dsm+rEg7anFRUMGB8hODXHvNfcRKzz9vcj8wSdUs= +github.com/ethereum/go-ethereum v1.13.11/go.mod h1:gFtlVORuUcT+UUIcJ/veCNjkuOSujCi338uSHJrYAew= github.com/fasthttp-contrib/websocket v0.0.0-20160511215533-1f3b11f56072/go.mod h1:duJ4Jxv5lDcvg4QuQr0oowTf7dz4/CR8NtyCooz9HL8= github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM= github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= @@ -199,8 +199,9 @@ github.com/go-logr/logr v1.2.4/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbV github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab/go.mod h1:/P9AEU963A2AYjv4d1V5eVL1CQbEJq6aCNHDDjibzu8= -github.com/go-ole/go-ole v1.2.5 h1:t4MGB5xEDZvXI+0rMjjsfBsD7yAgp/s9ZDkL1JndXwY= github.com/go-ole/go-ole v1.2.5/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0= +github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE= +github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78= github.com/go-playground/assert/v2 v2.2.0 h1:JvknZsQTYeFEAhQwI4qEt9cyV5ONwRHC+lYKSsYSR8s= github.com/go-playground/assert/v2 v2.2.0/go.mod h1:VDjEfimB/XKnb+ZQfWdccd7VUvScMdVu0Titje2rxJ4= github.com/go-playground/locales v0.14.1 h1:EWaQ/wswjilfKLTECiXz7Rh+3BjFhfDFKv/oXslEjJA= @@ -877,6 +878,7 @@ golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220209214540-3681064d5158/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20221010170243-090e33056c14/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= From b9d00bf51ed28fea3c0b1d7b1428fe66d80b0135 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 29 Jan 2024 23:29:14 -0500 Subject: [PATCH 08/11] chore(deps): bump github.com/multiformats/go-multiaddr from 0.12.1 to 0.12.2 (#3725) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 15593f4d62..ba317ded90 100644 --- a/go.mod +++ b/go.mod @@ -28,7 +28,7 @@ require ( github.com/libp2p/go-libp2p v0.31.0 github.com/libp2p/go-libp2p-kad-dht v0.25.2 github.com/minio/sha256-simd v1.0.1 - github.com/multiformats/go-multiaddr v0.12.1 + github.com/multiformats/go-multiaddr v0.12.2 github.com/nanobox-io/golang-scribble v0.0.0-20190309225732-aa3e7c118975 github.com/prometheus/client_golang v1.18.0 github.com/prometheus/client_model v0.5.0 diff --git a/go.sum b/go.sum index a13d29246a..8652b73046 100644 --- a/go.sum +++ b/go.sum @@ -502,8 +502,8 @@ github.com/multiformats/go-base36 v0.2.0 h1:lFsAbNOGeKtuKozrtBsAkSVhv1p9D0/qedU9 github.com/multiformats/go-base36 v0.2.0/go.mod h1:qvnKE++v+2MWCfePClUEjE78Z7P2a1UV0xHgWc0hkp4= github.com/multiformats/go-multiaddr v0.1.1/go.mod h1:aMKBKNEYmzmDmxfX88/vz+J5IU55txyt0p4aiWVohjo= github.com/multiformats/go-multiaddr v0.2.0/go.mod h1:0nO36NvPpyV4QzvTLi/lafl2y95ncPj0vFwVF6k6wJ4= -github.com/multiformats/go-multiaddr v0.12.1 h1:vm+BA/WZA8QZDp1pF1FWhi5CT3g1tbi5GJmqpb6wnlk= -github.com/multiformats/go-multiaddr v0.12.1/go.mod h1:7mPkiBMmLeFipt+nNSq9pHZUeJSt8lHBgH6yhj0YQzE= +github.com/multiformats/go-multiaddr v0.12.2 h1:9G9sTY/wCYajKa9lyfWPmpZAwe6oV+Wb1zcmMS1HG24= +github.com/multiformats/go-multiaddr v0.12.2/go.mod h1:GKyaTYjZRdcUhyOetrxTk9z0cW+jA/YrnqTOvKgi44M= github.com/multiformats/go-multiaddr-dns v0.3.1 h1:QgQgR+LQVt3NPTjbrLLpsaT2ufAA2y0Mkk+QRVJbW3A= github.com/multiformats/go-multiaddr-dns v0.3.1/go.mod h1:G/245BRQ6FJGmryJCrOuTdB37AMA5AMOVuO6NY3JwTk= github.com/multiformats/go-multiaddr-fmt v0.1.0 h1:WLEFClPycPkp4fnIzoFoV9FVd49/eQsuaL3/CWe167E= From 8d4633373f9153e4f1ba8d837e00c0420e824453 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ecl=C3=A9sio=20Junior?= Date: Wed, 31 Jan 2024 10:01:09 -0400 Subject: [PATCH 09/11] chore: improve sync logs (#3708) Co-authored-by: Kirill --- dot/state/block_finalisation.go | 3 + dot/sync/chain_sync.go | 97 +++++++++++++++++---------------- dot/sync/chain_sync_test.go | 54 +++++++++++++++--- dot/sync/syncer.go | 5 +- lib/grandpa/message_handler.go | 3 - 5 files changed, 103 insertions(+), 59 deletions(-) diff --git a/dot/state/block_finalisation.go b/dot/state/block_finalisation.go index 58428e7529..bf35bdae0b 100644 --- a/dot/state/block_finalisation.go +++ b/dot/state/block_finalisation.go @@ -182,6 +182,9 @@ func (bs *BlockState) SetFinalisedHash(hash common.Hash, round, setID uint64) er } bs.lastFinalised = hash + + logger.Infof( + "🔨 finalised block #%d (%s), round %d, set id %d", header.Number, hash, round, setID) return nil } diff --git a/dot/sync/chain_sync.go b/dot/sync/chain_sync.go index 17a6a587ad..dc15370223 100644 --- a/dot/sync/chain_sync.go +++ b/dot/sync/chain_sync.go @@ -238,17 +238,17 @@ func (cs *chainSync) stop() error { } } -func (cs *chainSync) currentSyncInformations() (bestBlockHeader *types.Header, syncTarget uint, +func (cs *chainSync) currentSyncInformations() (bestBlockHeader *types.Header, isBootstrap bool, err error) { bestBlockHeader, err = cs.blockState.BestBlockHeader() if err != nil { - return nil, syncTarget, false, fmt.Errorf("getting best block header: %w", err) + return nil, false, fmt.Errorf("getting best block header: %w", err) } - syncTarget = cs.peerViewSet.getTarget() + syncTarget := cs.peerViewSet.getTarget() bestBlockNumber := bestBlockHeader.Number isBootstrap = bestBlockNumber+network.MaxBlocksInResponse < syncTarget - return bestBlockHeader, syncTarget, isBootstrap, nil + return bestBlockHeader, isBootstrap, nil } func (cs *chainSync) bootstrapSync() { @@ -262,27 +262,12 @@ func (cs *chainSync) bootstrapSync() { default: } - bestBlockHeader, syncTarget, isBootstrap, err := cs.currentSyncInformations() + bestBlockHeader, isBootstrap, err := cs.currentSyncInformations() if err != nil { logger.Criticalf("ending bootstrap sync, getting current sync info: %s", err) return } - finalisedHeader, err := cs.blockState.GetHighestFinalisedHeader() - if err != nil { - logger.Criticalf("getting highest finalized header: %w", err) - return - } - - logger.Infof( - "🚣 currently syncing, %d peers connected, "+ - "%d available workers, "+ - "target block number %d, "+ - "finalised block number %d with hash %s", - len(cs.network.Peers()), - cs.workerPool.totalWorkers(), - syncTarget, finalisedHeader.Number, finalisedHeader.Hash()) - if isBootstrap { cs.workerPool.useConnectedPeers() err = cs.requestMaxBlocksFrom(bestBlockHeader, networkInitialSync) @@ -293,7 +278,7 @@ func (cs *chainSync) bootstrapSync() { // we are less than 128 blocks behind the target we can use tip sync cs.syncMode.Store(tip) isSyncedGauge.Set(1) - logger.Debugf("switched sync mode to %s", tip.String()) + logger.Infof("🔁 switched sync mode to %s", tip.String()) return } } @@ -312,7 +297,7 @@ func (cs *chainSync) onBlockAnnounceHandshake(who peer.ID, bestHash common.Hash, return nil } - _, _, isBootstrap, err := cs.currentSyncInformations() + _, isBootstrap, err := cs.currentSyncInformations() if err != nil { return fmt.Errorf("getting current sync info: %w", err) } @@ -324,7 +309,7 @@ func (cs *chainSync) onBlockAnnounceHandshake(who peer.ID, bestHash common.Hash, // we are more than 128 blocks behind the head, switch to bootstrap cs.syncMode.Store(bootstrap) isSyncedGauge.Set(0) - logger.Debugf("switched sync mode to %s", bootstrap.String()) + logger.Infof("🔁 switched sync mode to %s", bootstrap.String()) cs.wg.Add(1) go cs.bootstrapSync() @@ -335,8 +320,8 @@ func (cs *chainSync) onBlockAnnounce(announced announcedBlock) error { // TODO: https://github.com/ChainSafe/gossamer/issues/3432 cs.workerPool.fromBlockAnnounce(announced.who) if cs.pendingBlocks.hasBlock(announced.header.Hash()) { - return fmt.Errorf("%w: block %s (#%d)", - errAlreadyInDisjointSet, announced.header.Hash(), announced.header.Number) + return fmt.Errorf("%w: block #%d (%s)", + errAlreadyInDisjointSet, announced.header.Number, announced.header.Hash()) } err := cs.pendingBlocks.addHeader(announced.header) @@ -348,7 +333,7 @@ func (cs *chainSync) onBlockAnnounce(announced announcedBlock) error { return nil } - bestBlockHeader, _, isFarFromTarget, err := cs.currentSyncInformations() + bestBlockHeader, isFarFromTarget, err := cs.currentSyncInformations() if err != nil { return fmt.Errorf("getting current sync info: %w", err) } @@ -421,12 +406,12 @@ func (cs *chainSync) requestChainBlocks(announcedHeader, bestBlockHeader *types. startAtBlock = announcedHeader.Number - uint(*request.Max) + 1 totalBlocks = *request.Max - logger.Debugf("received a block announce from %s, requesting %d blocks, descending request from %s (#%d)", - peerWhoAnnounced, gapLength, announcedHeader.Hash(), announcedHeader.Number) + logger.Infof("requesting %d blocks, descending request from #%d (%s)", + peerWhoAnnounced, gapLength, announcedHeader.Number, announcedHeader.Hash().Short()) } else { request = network.NewBlockRequest(startingBlock, 1, network.BootstrapRequestData, network.Descending) - logger.Debugf("received a block announce from %s, requesting a single block %s (#%d)", - peerWhoAnnounced, announcedHeader.Hash(), announcedHeader.Number) + logger.Infof("requesting a single block #%d (%s)", + peerWhoAnnounced, announcedHeader.Number, announcedHeader.Hash().Short()) } resultsQueue := make(chan *syncTaskResult) @@ -441,8 +426,9 @@ func (cs *chainSync) requestChainBlocks(announcedHeader, bestBlockHeader *types. func (cs *chainSync) requestForkBlocks(bestBlockHeader, highestFinalizedHeader, announcedHeader *types.Header, peerWhoAnnounced peer.ID) error { - logger.Debugf("block announce lower than best block %s (#%d) and greater highest finalized %s (#%d)", - bestBlockHeader.Hash(), bestBlockHeader.Number, highestFinalizedHeader.Hash(), highestFinalizedHeader.Number) + logger.Infof("block announce lower than best block #%d (%s) and greater highest finalized #%d (%s)", + bestBlockHeader.Number, bestBlockHeader.Hash().Short(), + highestFinalizedHeader.Number, highestFinalizedHeader.Hash().Short()) parentExists, err := cs.blockState.HasHeader(announcedHeader.ParentHash) if err != nil && !errors.Is(err, database.ErrNotFound) { @@ -463,8 +449,8 @@ func (cs *chainSync) requestForkBlocks(bestBlockHeader, highestFinalizedHeader, request = network.NewBlockRequest(startingBlock, gapLength, network.BootstrapRequestData, network.Descending) } - logger.Debugf("requesting %d fork blocks, starting at %s (#%d)", - peerWhoAnnounced, gapLength, announcedHash, announcedHeader.Number) + logger.Infof("requesting %d fork blocks, starting at #%d (%s)", + peerWhoAnnounced, gapLength, announcedHeader.Number, announcedHash.Short()) resultsQueue := make(chan *syncTaskResult) cs.workerPool.submitRequest(request, &peerWhoAnnounced, resultsQueue) @@ -563,6 +549,34 @@ func (cs *chainSync) requestMaxBlocksFrom(bestBlockHeader *types.Header, origin return nil } +func (cs *chainSync) showSyncStats(syncBegin time.Time, syncedBlocks int) { + finalisedHeader, err := cs.blockState.GetHighestFinalisedHeader() + if err != nil { + logger.Criticalf("getting highest finalized header: %w", err) + return + } + + totalSyncAndImportSeconds := time.Since(syncBegin).Seconds() + bps := float64(syncedBlocks) / totalSyncAndImportSeconds + logger.Infof("⛓️ synced %d blocks, "+ + "took: %.2f seconds, bps: %.2f blocks/second", + syncedBlocks, totalSyncAndImportSeconds, bps) + + logger.Infof( + "🚣 currently syncing, %d peers connected, "+ + "%d available workers, "+ + "target block number %d, "+ + "finalised #%d (%s) "+ + "sync mode: %s", + len(cs.network.Peers()), + cs.workerPool.totalWorkers(), + cs.peerViewSet.getTarget(), + finalisedHeader.Number, + finalisedHeader.Hash().Short(), + cs.getSyncMode().String(), + ) +} + // handleWorkersResults, every time we submit requests to workers they results should be computed here // and every cicle we should endup with a complete chain, whenever we identify // any error from a worker we should evaluate the error and re-insert the request @@ -570,15 +584,7 @@ func (cs *chainSync) requestMaxBlocksFrom(bestBlockHeader *types.Header, origin // TODO: handle only justification requests func (cs *chainSync) handleWorkersResults( workersResults chan *syncTaskResult, origin blockOrigin, startAtBlock uint, expectedSyncedBlocks uint32) error { - startTime := time.Now() - defer func() { - totalSyncAndImportSeconds := time.Since(startTime).Seconds() - bps := float64(expectedSyncedBlocks) / totalSyncAndImportSeconds - logger.Debugf("⛓️ synced %d blocks, "+ - "took: %.2f seconds, bps: %.2f blocks/second", - expectedSyncedBlocks, totalSyncAndImportSeconds, bps) - }() syncingChain := make([]*types.BlockData, expectedSyncedBlocks) // the total numbers of blocks is missing in the syncing chain @@ -713,7 +719,7 @@ taskResultLoop: } retreiveBlocksSeconds := time.Since(startTime).Seconds() - logger.Debugf("🔽 retrieved %d blocks, took: %.2f seconds, starting process...", + logger.Infof("🔽 retrieved %d blocks, took: %.2f seconds, starting process...", expectedSyncedBlocks, retreiveBlocksSeconds) // response was validated! place into ready block queue @@ -724,6 +730,7 @@ taskResultLoop: } } + cs.showSyncStats(startTime, len(syncingChain)) return nil } @@ -787,7 +794,6 @@ func (cs *chainSync) processBlockData(blockData types.BlockData, origin blockOri } if blockData.Justification != nil && len(*blockData.Justification) > 0 { - logger.Infof("handling justification for block %s (#%d)", blockData.Hash.Short(), blockData.Number()) err := cs.handleJustification(blockData.Header, *blockData.Justification) if err != nil { return fmt.Errorf("handling justification: %w", err) @@ -840,8 +846,6 @@ func (cs *chainSync) handleBody(body *types.Body) { } func (cs *chainSync) handleJustification(header *types.Header, justification []byte) (err error) { - logger.Debugf("handling justification for block %d...", header.Number) - headerHash := header.Hash() err = cs.finalityGadget.VerifyBlockJustification(headerHash, justification) if err != nil { @@ -853,7 +857,6 @@ func (cs *chainSync) handleJustification(header *types.Header, justification []b return fmt.Errorf("setting justification for block number %d: %w", header.Number, err) } - logger.Infof("🔨 finalised block number %d with hash %s", header.Number, headerHash) return nil } diff --git a/dot/sync/chain_sync_test.go b/dot/sync/chain_sync_test.go index 26458ef617..1dbc5c67e1 100644 --- a/dot/sync/chain_sync_test.go +++ b/dot/sync/chain_sync_test.go @@ -93,8 +93,8 @@ func Test_chainSync_onBlockAnnounce(t *testing.T) { peerID: somePeer, blockAnnounceHeader: block2AnnounceHeader, errWrapped: errAlreadyInDisjointSet, - errMessage: fmt.Sprintf("already in disjoint set: block %s (#%d)", - block2AnnounceHeader.Hash(), block2AnnounceHeader.Number), + errMessage: fmt.Sprintf("already in disjoint set: block #%d (%s)", + block2AnnounceHeader.Number, block2AnnounceHeader.Hash()), }, "failed_to_add_announced_block_in_disjoint_set": { chainSyncBuilder: func(ctrl *gomock.Controller) *chainSync { @@ -140,7 +140,7 @@ func Test_chainSync_onBlockAnnounce(t *testing.T) { pendingBlocksMock.EXPECT().hasBlock(block2AnnounceHeader.Hash()).Return(false) pendingBlocksMock.EXPECT().addHeader(block2AnnounceHeader).Return(nil) pendingBlocksMock.EXPECT().removeBlock(block2AnnounceHeader.Hash()) - pendingBlocksMock.EXPECT().size().Return(int(0)) + pendingBlocksMock.EXPECT().size().Return(0) blockStateMock := NewMockBlockState(ctrl) blockStateMock.EXPECT(). @@ -153,7 +153,8 @@ func Test_chainSync_onBlockAnnounce(t *testing.T) { blockStateMock.EXPECT(). GetHighestFinalisedHeader(). - Return(block2AnnounceHeader, nil) + Return(block2AnnounceHeader, nil). + Times(2) expectedRequest := network.NewBlockRequest(*variadic.MustNewUint32OrHash(block2AnnounceHeader.Hash()), 1, network.BootstrapRequestData, network.Descending) @@ -170,6 +171,8 @@ func Test_chainSync_onBlockAnnounce(t *testing.T) { } networkMock := NewMockNetwork(ctrl) + networkMock.EXPECT().Peers().Return([]common.PeerInfo{}) + requestMaker := NewMockRequestMaker(ctrl) requestMaker.EXPECT(). Do(somePeer, expectedRequest, &network.BlockResponseMessage{}). @@ -266,7 +269,7 @@ func Test_chainSync_onBlockAnnounceHandshake_tipModeNeedToCatchup(t *testing.T) blockStateMock.EXPECT(). GetHighestFinalisedHeader(). Return(block1AnnounceHeader, nil). - Times(2) + Times(1) expectedRequest := network.NewAscendingBlockRequests( block1AnnounceHeader.Number+1, @@ -274,7 +277,7 @@ func Test_chainSync_onBlockAnnounceHandshake_tipModeNeedToCatchup(t *testing.T) networkMock := NewMockNetwork(ctrl) networkMock.EXPECT().Peers().Return([]common.PeerInfo{}). - Times(2) + Times(1) networkMock.EXPECT().AllConnectedPeersIDs().Return([]peer.ID{}) firstMockedResponse := createSuccesfullBlockResponse(t, block1AnnounceHeader.Hash(), 2, 128) @@ -306,7 +309,6 @@ func Test_chainSync_onBlockAnnounceHandshake_tipModeNeedToCatchup(t *testing.T) telemetryMock := NewMockTelemetry(ctrl) const announceBlock = false - ensureSuccessfulBlockImportFlow(t, block1AnnounceHeader, firstMockedResponse.BlockData, blockStateMock, babeVerifierMock, storageStateMock, importHandlerMock, telemetryMock, networkInitialSync, announceBlock) @@ -535,6 +537,9 @@ func TestChainSync_BootstrapSync_SuccessfulSync_WithOneWorker(t *testing.T) { mockImportHandler := NewMockBlockImportHandler(ctrl) mockTelemetry := NewMockTelemetry(ctrl) + mockedBlockState.EXPECT().GetHighestFinalisedHeader().Return(types.NewEmptyHeader(), nil).Times(1) + mockedNetwork.EXPECT().Peers().Return([]common.PeerInfo{}).Times(1) + const announceBlock = false // setup mocks for new synced blocks that doesn't exists in our local database ensureSuccessfulBlockImportFlow(t, mockedGenesisHeader, totalBlockResponse.BlockData, mockedBlockState, @@ -581,6 +586,9 @@ func TestChainSync_BootstrapSync_SuccessfulSync_WithTwoWorkers(t *testing.T) { mockImportHandler := NewMockBlockImportHandler(ctrl) mockTelemetry := NewMockTelemetry(ctrl) + mockBlockState.EXPECT().GetHighestFinalisedHeader().Return(types.NewEmptyHeader(), nil).Times(1) + mockNetwork.EXPECT().Peers().Return([]common.PeerInfo{}).Times(1) + // this test expects two workers responding each request with 128 blocks which means // we should import 256 blocks in total blockResponse := createSuccesfullBlockResponse(t, mockedGenesisHeader.Hash(), 1, 256) @@ -666,6 +674,9 @@ func TestChainSync_BootstrapSync_SuccessfulSync_WithOneWorkerFailing(t *testing. mockImportHandler := NewMockBlockImportHandler(ctrl) mockTelemetry := NewMockTelemetry(ctrl) + mockBlockState.EXPECT().GetHighestFinalisedHeader().Return(types.NewEmptyHeader(), nil).Times(1) + mockNetwork.EXPECT().Peers().Return([]common.PeerInfo{}).Times(1) + // this test expects two workers responding each request with 128 blocks which means // we should import 256 blocks in total blockResponse := createSuccesfullBlockResponse(t, mockedGenesisHeader.Hash(), 1, 256) @@ -748,10 +759,15 @@ func TestChainSync_BootstrapSync_SuccessfulSync_WithProtocolNotSupported(t *test ctrl := gomock.NewController(t) mockBlockState := NewMockBlockState(ctrl) mockBlockState.EXPECT().GetFinalisedNotifierChannel().Return(make(chan *types.FinalisationInfo)) + mockBlockState.EXPECT(). + GetHighestFinalisedHeader(). + Return(types.NewEmptyHeader(), nil). + Times(1) mockedGenesisHeader := types.NewHeader(common.NewHash([]byte{0}), trie.EmptyHash, trie.EmptyHash, 0, types.NewDigest()) mockNetwork := NewMockNetwork(ctrl) + mockNetwork.EXPECT().Peers().Return([]common.PeerInfo{}) mockRequestMaker := NewMockRequestMaker(ctrl) mockBabeVerifier := NewMockBabeVerifier(ctrl) @@ -847,10 +863,15 @@ func TestChainSync_BootstrapSync_SuccessfulSync_WithNilHeaderInResponse(t *testi ctrl := gomock.NewController(t) mockBlockState := NewMockBlockState(ctrl) mockBlockState.EXPECT().GetFinalisedNotifierChannel().Return(make(chan *types.FinalisationInfo)) + mockBlockState.EXPECT(). + GetHighestFinalisedHeader(). + Return(types.NewEmptyHeader(), nil). + Times(1) mockedGenesisHeader := types.NewHeader(common.NewHash([]byte{0}), trie.EmptyHash, trie.EmptyHash, 0, types.NewDigest()) mockNetwork := NewMockNetwork(ctrl) + mockNetwork.EXPECT().Peers().Return([]common.PeerInfo{}) mockRequestMaker := NewMockRequestMaker(ctrl) mockBabeVerifier := NewMockBabeVerifier(ctrl) @@ -948,10 +969,15 @@ func TestChainSync_BootstrapSync_SuccessfulSync_WithResponseIsNotAChain(t *testi ctrl := gomock.NewController(t) mockBlockState := NewMockBlockState(ctrl) mockBlockState.EXPECT().GetFinalisedNotifierChannel().Return(make(chan *types.FinalisationInfo)) + mockBlockState.EXPECT(). + GetHighestFinalisedHeader(). + Return(types.NewEmptyHeader(), nil). + Times(1) mockedGenesisHeader := types.NewHeader(common.NewHash([]byte{0}), trie.EmptyHash, trie.EmptyHash, 0, types.NewDigest()) mockNetwork := NewMockNetwork(ctrl) + mockNetwork.EXPECT().Peers().Return([]common.PeerInfo{}) mockRequestMaker := NewMockRequestMaker(ctrl) mockBabeVerifier := NewMockBabeVerifier(ctrl) @@ -1046,10 +1072,16 @@ func TestChainSync_BootstrapSync_SuccessfulSync_WithReceivedBadBlock(t *testing. ctrl := gomock.NewController(t) mockBlockState := NewMockBlockState(ctrl) mockBlockState.EXPECT().GetFinalisedNotifierChannel().Return(make(chan *types.FinalisationInfo)) + mockBlockState.EXPECT(). + GetHighestFinalisedHeader(). + Return(types.NewEmptyHeader(), nil). + Times(1) + mockedGenesisHeader := types.NewHeader(common.NewHash([]byte{0}), trie.EmptyHash, trie.EmptyHash, 0, types.NewDigest()) mockNetwork := NewMockNetwork(ctrl) + mockNetwork.EXPECT().Peers().Return([]common.PeerInfo{}) mockRequestMaker := NewMockRequestMaker(ctrl) mockBabeVerifier := NewMockBabeVerifier(ctrl) @@ -1164,10 +1196,17 @@ func TestChainSync_BootstrapSync_SucessfulSync_ReceivedPartialBlockData(t *testi ctrl := gomock.NewController(t) mockBlockState := NewMockBlockState(ctrl) mockBlockState.EXPECT().GetFinalisedNotifierChannel().Return(make(chan *types.FinalisationInfo)) + mockBlockState.EXPECT(). + GetHighestFinalisedHeader(). + Return(types.NewEmptyHeader(), nil). + Times(1) + mockedGenesisHeader := types.NewHeader(common.NewHash([]byte{0}), trie.EmptyHash, trie.EmptyHash, 0, types.NewDigest()) mockNetwork := NewMockNetwork(ctrl) + mockNetwork.EXPECT().Peers().Return([]common.PeerInfo{}) + mockRequestMaker := NewMockRequestMaker(ctrl) mockBabeVerifier := NewMockBabeVerifier(ctrl) @@ -1327,7 +1366,6 @@ func ensureSuccessfulBlockImportFlow(t *testing.T, parentHeader *types.Header, blockData.Header.Number, "NetworkInitialSync") mockTelemetry.EXPECT().SendMessage(expectedTelemetryMessage) - mockBlockState.EXPECT().CompareAndSetBlockData(blockData).Return(nil) } } diff --git a/dot/sync/syncer.go b/dot/sync/syncer.go index c511f2644c..ac74579163 100644 --- a/dot/sync/syncer.go +++ b/dot/sync/syncer.go @@ -89,14 +89,17 @@ func (s *Service) Stop() error { // HandleBlockAnnounceHandshake notifies the `chainSync` module that // we have received a BlockAnnounceHandshake from the given peer. func (s *Service) HandleBlockAnnounceHandshake(from peer.ID, msg *network.BlockAnnounceHandshake) error { + logger.Infof("received block announce handshake from: %s, #%d (%s)", + from, msg.BestBlockNumber, msg.BestBlockHash.Short()) return s.chainSync.onBlockAnnounceHandshake(from, msg.BestBlockHash, uint(msg.BestBlockNumber)) } // HandleBlockAnnounce notifies the `chainSync` module that we have received a block announcement from the given peer. func (s *Service) HandleBlockAnnounce(from peer.ID, msg *network.BlockAnnounceMessage) error { - logger.Debug("received BlockAnnounceMessage") blockAnnounceHeader := types.NewHeader(msg.ParentHash, msg.StateRoot, msg.ExtrinsicsRoot, msg.Number, msg.Digest) blockAnnounceHeaderHash := blockAnnounceHeader.Hash() + logger.Infof("received block announce from: %s, #%d (%s)", from, + blockAnnounceHeader.Number, blockAnnounceHeaderHash.Short()) // if the peer reports a lower or equal best block number than us, // check if they are on a fork or not diff --git a/lib/grandpa/message_handler.go b/lib/grandpa/message_handler.go index cb5105324f..b76b81d7ab 100644 --- a/lib/grandpa/message_handler.go +++ b/lib/grandpa/message_handler.go @@ -541,9 +541,6 @@ func (s *Service) VerifyBlockJustification(hash common.Hash, justification []byt return fmt.Errorf("setting finalised hash: %w", err) } - logger.Debugf( - "set finalised block with hash %s, round %d and set id %d", - hash, fj.Round, setID) return nil } From 12234de2308644da1a326965a012106dc7c98f64 Mon Sep 17 00:00:00 2001 From: Kishan Sagathiya Date: Fri, 2 Feb 2024 18:58:25 +0530 Subject: [PATCH 10/11] fix: don't panic if we fail to convert hex to bytes (#3734) --- dot/state/storage_notify.go | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/dot/state/storage_notify.go b/dot/state/storage_notify.go index 45cf810f06..e7449e4ab9 100644 --- a/dot/state/storage_notify.go +++ b/dot/state/storage_notify.go @@ -111,7 +111,11 @@ func (s *StorageState) notifyObserver(root common.Hash, o Observer) error { } else { // filter result to include only interested keys for k, cachedValue := range o.GetFilter() { - value := t.Get(common.MustHexToBytes(k)) + bytes, err := common.HexToBytes(k) + if err != nil { + return fmt.Errorf("failed to convert hex to bytes: %s", err) + } + value := t.Get(bytes) if !reflect.DeepEqual(cachedValue, value) { kv := &KeyValue{ Key: common.MustHexToBytes(k), From d1ca7aa6a013ba3b8190d0a953789c01d3620c36 Mon Sep 17 00:00:00 2001 From: JimboJ <40345116+jimjbrettj@users.noreply.github.com> Date: Fri, 2 Feb 2024 14:21:28 -0700 Subject: [PATCH 11/11] fix: segfault on node restart (#3736) --- internal/database/pebble.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/internal/database/pebble.go b/internal/database/pebble.go index ac6059f7fb..8b3aaac313 100644 --- a/internal/database/pebble.go +++ b/internal/database/pebble.go @@ -61,12 +61,13 @@ func (p *PebbleDB) Get(key []byte) (value []byte, err error) { return nil, err } + valueCpy := make([]byte, len(value)) + copy(valueCpy, value) + if err := closer.Close(); err != nil { return nil, fmt.Errorf("closing after get: %w", err) } - valueCpy := make([]byte, len(value)) - copy(valueCpy, value) return valueCpy, err }