From 4a1f24e043d5b1f53d65256761cff42133cc977f Mon Sep 17 00:00:00 2001 From: kylezs Date: Fri, 16 Feb 2024 11:04:23 +0100 Subject: [PATCH 01/10] chore: add start and end for lp api tests --- bouncer/shared/lp_api_test.ts | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/bouncer/shared/lp_api_test.ts b/bouncer/shared/lp_api_test.ts index 5610cd30a7..5131a25b33 100644 --- a/bouncer/shared/lp_api_test.ts +++ b/bouncer/shared/lp_api_test.ts @@ -119,6 +119,7 @@ async function testLiquidityDeposit() { } async function testWithdrawAsset() { + console.log('=== Starting testWithdrawAsset ==='); const oldBalance = await getBalance(testAsset, testAddress); const result = await lpApiRpc(`lp_withdraw_asset`, [ @@ -133,9 +134,11 @@ async function testWithdrawAsset() { assert(egressId > 0, `Unexpected egressId: ${egressId}`); await observeBalanceIncrease(testAsset, testAddress, oldBalance); + console.log('=== testWithdrawAsset complete ==='); } async function testRegisterWithExistingLpAccount() { + console.log('=== Starting testWithdrawAsset ==='); try { await lpApiRpc(`lp_register_account`, []); throw new Error(`Unexpected lp_register_account result`); @@ -147,11 +150,13 @@ async function testRegisterWithExistingLpAccount() { throw new Error(`Unexpected lp_register_account error: ${error}`); } } + console.log('=== testRegisterWithExistingLpAccount complete ==='); } /// Test lp_set_range_order and lp_update_range_order by minting, updating, and burning a range order. async function testRangeOrder() { + console.log('=== Starting testRangeOrder ===') const range = { start: 1, end: 2 }; const orderId = 74398; // Arbitrary order id so it does not interfere with other tests const zeroAssetAmounts = { @@ -245,19 +250,24 @@ async function testRangeOrder() { } }); assert.strictEqual(matchBurn, true, `Expected burn of range order to decrease liquidity to 0`); + + console.log('=== testRangeOrder complete ==='); } async function testGetOpenSwapChannels() { + console.log('=== Starting testGetOpenSwapChannels ===') // TODO: Test with some SwapChannelInfo data const openSwapChannels = await lpApiRpc(`lp_get_open_swap_channels`, []); assert(openSwapChannels.ethereum, `Missing ethereum swap channel info`); assert(openSwapChannels.polkadot, `Missing polkadot swap channel info`); assert(openSwapChannels.bitcoin, `Missing bitcoin swap channel info`); + console.log('=== testGetOpenSwapChannels complete ===') } /// Test lp_set_limit_order and lp_update_limit_order by minting, updating, and burning a limit order. async function testLimitOrder() { + console.log('=== Starting testLimitOrder ==='); const orderId = 98432; // Arbitrary order id so it does not interfere with other tests const tick = 2; @@ -336,6 +346,8 @@ async function testLimitOrder() { } }); assert.strictEqual(matchBurn, true, `Expected burn of limit order to decrease liquidity to 0`); + + console.log('=== testLimitOrder complete ==='); } /// Runs all of the LP commands via the LP API Json RPC Server that is running and checks that the returned data is as expected From dfea387402148e89bffbb02895c25e2f28dd8493 Mon Sep 17 00:00:00 2001 From: kylezs Date: Fri, 16 Feb 2024 11:04:50 +0100 Subject: [PATCH 02/10] fix: await on liquidity provision --- bouncer/shared/provide_liquidity.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bouncer/shared/provide_liquidity.ts b/bouncer/shared/provide_liquidity.ts index 07bb770342..916c553bda 100644 --- a/bouncer/shared/provide_liquidity.ts +++ b/bouncer/shared/provide_liquidity.ts @@ -73,7 +73,7 @@ export async function provideLiquidity(ccy: Asset, amount: number, waitForFinali undefined, waitForFinalization, ); - send(ccy, ingressAddress, String(amount)); + await send(ccy, ingressAddress, String(amount)); await eventHandle; } From 222bcbd3331fce0b999ca477c175536ed3f2a8ae Mon Sep 17 00:00:00 2001 From: kylezs Date: Fri, 16 Feb 2024 11:06:23 +0100 Subject: [PATCH 03/10] fix: don't hang --- bouncer/shared/submit_runtime_upgrade.ts | 1 + 1 file changed, 1 insertion(+) diff --git a/bouncer/shared/submit_runtime_upgrade.ts b/bouncer/shared/submit_runtime_upgrade.ts index ac59c0eec4..2b011a0712 100755 --- a/bouncer/shared/submit_runtime_upgrade.ts +++ b/bouncer/shared/submit_runtime_upgrade.ts @@ -54,6 +54,7 @@ export async function submitRuntimeUpgradeWithRestrictions( } console.log('Runtime upgrade completed.'); + chainflip.disconnect(); } export async function submitRuntimeUpgradeWasmPath(wasmPath: string) { From c50bde3891701fb71ec4ad4b67b659b98c4986cf Mon Sep 17 00:00:00 2001 From: kylezs Date: Fri, 16 Feb 2024 11:08:15 +0100 Subject: [PATCH 04/10] chore: shorten timeout duration --- bouncer/tests/all_concurrent_tests.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/bouncer/tests/all_concurrent_tests.ts b/bouncer/tests/all_concurrent_tests.ts index 9cd9d5322d..d3ca9f852e 100755 --- a/bouncer/tests/all_concurrent_tests.ts +++ b/bouncer/tests/all_concurrent_tests.ts @@ -43,7 +43,7 @@ async function runAllConcurrentTests() { await Promise.all([broadcastAborted, feeDeficitRefused]); } -runWithTimeout(runAllConcurrentTests(), 1800000) +runWithTimeout(runAllConcurrentTests(), 1000000) .then(() => { // There are some dangling resources that prevent the process from exiting process.exit(0); From 6af45ad668e87c03b68b5721f9091fb8450851a0 Mon Sep 17 00:00:00 2001 From: kylezs Date: Fri, 16 Feb 2024 11:08:36 +0100 Subject: [PATCH 05/10] fix: restart chainflip-node on upgrade --- .github/workflows/upgrade-test.yml | 68 ++++++++++++++++++++-- bouncer/shared/upgrade_network.ts | 53 ++++++++++++++--- localnet/init/scripts/start-all-engines.sh | 10 +++- localnet/init/scripts/start-all-nodes.sh | 17 ++++++ localnet/init/scripts/start-node.sh | 16 ++--- localnet/manage.sh | 35 ++++++----- 6 files changed, 163 insertions(+), 36 deletions(-) create mode 100755 localnet/init/scripts/start-all-nodes.sh diff --git a/.github/workflows/upgrade-test.yml b/.github/workflows/upgrade-test.yml index 0d29ee77c5..7dfd267e03 100644 --- a/.github/workflows/upgrade-test.yml +++ b/.github/workflows/upgrade-test.yml @@ -44,10 +44,33 @@ jobs: # conservatively 1.5 hours. 2 bouncer runs need to occur. timeout-minutes: 90 steps: - - name: Checkout chainflip-backend uses: actions/checkout@v3 + - name: Get upgrade-to-commit hash + uses: actions/github-script@v5 + id: get-upgrade-to-commit + with: + github-token: ${{secrets.GITHUB_TOKEN}} + script: | + if (${{ inputs.upgrade-to-commit != '' }}) { + return ${{ inputs.upgrade-to-commit }}; + } + const workflow_id = "${{ inputs.upgrade-to-workflow-name }}"; + const owner = context.repo.owner; + const repo = context.repo.repo; + // Use octokit to access the GitHub API + const runs = await github.rest.actions.listWorkflowRuns({ + owner, + repo, + workflow_id, + status: 'completed', + event: 'push', + }); + const run = runs.data.workflow_runs[0]; // Assuming you want the most recent run; adjust as necessary + return run.head_sha; // This is the commit SHA of the workflow run + + - name: Login to Github Container Registry 🔑 uses: docker/login-action@v2 with: @@ -87,6 +110,26 @@ jobs: rename-to: try-runtime chmod: 0755 + - name: Get upgrade-from run commit SHA + id: get-upgrade-from-commit + uses: actions/github-script@v5 + with: + github-token: ${{secrets.GITHUB_TOKEN}} + script: | + const workflow_id = 'release-${{ inputs.upgrade-from-release }}.yml'; + const owner = context.repo.owner; + const repo = context.repo.repo; + // Use octokit to access the GitHub API + const runs = await github.rest.actions.listWorkflowRuns({ + owner, + repo, + workflow_id, + status: 'completed', + event: 'push', + }); + const run = runs.data.workflow_runs[0]; // Assuming you want the most recent run; adjust as necessary + return run.head_sha; // This is the commit SHA of the workflow run + - name: Download latest release binaries uses: dawidd6/action-download-artifact@v2 with: @@ -139,27 +182,34 @@ jobs: - name: Start a localnet from current release env: BINARY_ROOT_PATH: ./latest-release-bins + DEBUG_OUTPUT_DESTINATION: /tmp/chainflip/debug.log run: | set -x mkdir -p /tmp/chainflip/bashful mkdir -p /tmp/chainflip/doc mkdir -p /tmp/chainflip/dopey + touch /tmp/chainflip/debug.log + chmod +x ${{ env.BINARY_ROOT_PATH }}/chainflip-* touch ./localnet/.setup_complete ./localnet/manage.sh - name: Run bouncer on latest release id: pre-upgrade-bouncer - working-directory: bouncer run: | - ./run.sh + git fetch --all + git checkout ${{ steps.get-upgrade-from-commit.outputs.result }} + cd bouncer + ./setup_for_test.sh # we need to be sure that when this fails, we catch the error, any panics etc. that occur # TODO: Run swaps simultaneously to the upgrade - we could do that inside the `upgrade_network` command itself. - name: Upgrade network shell: bash id: upgrade-network - working-directory: bouncer run: | + git checkout ${{ github.sha }} + git rev-parse HEAD + cd bouncer ./commands/upgrade_network.ts prebuilt \ --runtime ./../main-runtime/state_chain_runtime.compact.compressed.wasm \ --bins ./../upgrade-to-bins \ @@ -168,8 +218,11 @@ jobs: - name: Run bouncer after upgrade id: post-upgrade-bouncer - working-directory: bouncer + # Use git checkout ${ steps.get-upgrade-to-commit.outputs.result } instead run: | + git checkout ${{ github.sha }} + git rev-parse HEAD + cd bouncer ./tests/all_concurrent_tests.ts - name: Print old chainflip-engine logs @@ -197,6 +250,11 @@ jobs: run: | cat /tmp/chainflip/chainflip-lp-api.log + - name: Print localnet init debug logs 🕵️‍♂️ + if: always() + run: | + cat /tmp/chainflip/debug.log + - name: Upload Localnet Logs 💾 if: always() continue-on-error: true diff --git a/bouncer/shared/upgrade_network.ts b/bouncer/shared/upgrade_network.ts index c2b8a250d9..d5a8212a3a 100755 --- a/bouncer/shared/upgrade_network.ts +++ b/bouncer/shared/upgrade_network.ts @@ -1,4 +1,4 @@ -import { execSync } from 'child_process'; +import { exec, execSync } from 'child_process'; import fs from 'fs/promises'; import * as toml from 'toml'; import path from 'path'; @@ -62,7 +62,7 @@ async function incompatibleUpgradeNoBuild( const nodeCount = numberOfNodes + '-node'; execSync( - `LOG_SUFFIX="-upgrade" NODE_COUNT=${nodeCount} SELECTED_NODES="${selectedNodes.join( + `INIT_RUN=false LOG_SUFFIX="-upgrade" NODE_COUNT=${nodeCount} SELECTED_NODES="${selectedNodes.join( ' ', )}" LOCALNET_INIT_DIR=${localnetInitPath} BINARY_ROOT_PATH=${binaryPath} ${localnetInitPath}/scripts/start-all-engines.sh`, ); @@ -77,17 +77,56 @@ async function incompatibleUpgradeNoBuild( 'Check that the old engine has now shut down, and that the new engine is now running.', ); - execSync(`kill $(lsof -t -i:10997)`); - execSync(`kill $(lsof -t -i:10589)`); + // Wait for the old broker and lp-api to shut down, and ensure the runtime upgrade is finalised. + await sleep(20000); + + console.log('Killing the old node.'); + execSync(`kill $(ps aux | grep chainflip-node | grep -v grep | awk '{print $2}')`); + + console.log("Killed old node"); + + // let them shutdown + await sleep(2000); + console.log('Stopped old broker and lp-api. Starting the new ones.'); - // Wait for the old broker and lp-api to shut down, and ensure the runtime upgrade is finalised. - await sleep(22000); + console.log("Starting the new node"); const KEYS_DIR = `${localnetInitPath}/keys`; + + const selectedNodesSep = `"${selectedNodes.join(' ')}"`; + + try { + const buffer = execSync(`INIT_RPC_PORT=9944 KEYS_DIR=${KEYS_DIR} NODE_COUNT=${nodeCount} SELECTED_NODES=${selectedNodesSep} LOCALNET_INIT_DIR=${localnetInitPath} BINARY_ROOT_PATH=${binaryPath} ${localnetInitPath}/scripts/start-all-nodes.sh`); + console.log("start node success: " + buffer.toString()); + } catch (e) { + console.error("start node error: "); + console.log(e); + } + + await sleep(7000); + + const output = execSync("ps aux | grep chainflip-node | grep -v grep | awk '{print $2}'"); + console.log("New node PID: " + output.toString()); + + // Restart the engines + execSync( + `INIT_RUN=false LOG_SUFFIX="-upgrade" NODE_COUNT=${nodeCount} SELECTED_NODES=${selectedNodesSep} LOCALNET_INIT_DIR=${localnetInitPath} BINARY_ROOT_PATH=${binaryPath} ${localnetInitPath}/scripts/start-all-engines.sh`, + ); + + console.log('Starting new broker and lp-api.'); + execSync(`KEYS_DIR=${KEYS_DIR} ${localnetInitPath}/scripts/start-broker-api.sh ${binaryPath}`); execSync(`KEYS_DIR=${KEYS_DIR} ${localnetInitPath}/scripts/start-lp-api.sh ${binaryPath}`); - await sleep(6000); + + await sleep(20000); + + const brokerPID = execSync("lsof -t -i:10997"); + console.log("New broker PID: " + brokerPID.toString()); + const lpApiPID = execSync("lsof -t -i:10589"); + console.log("New LP API PID: " + lpApiPID.toString()); + + console.log('Started new broker and lp-api.'); } diff --git a/localnet/init/scripts/start-all-engines.sh b/localnet/init/scripts/start-all-engines.sh index e214424a18..14dcee2f62 100755 --- a/localnet/init/scripts/start-all-engines.sh +++ b/localnet/init/scripts/start-all-engines.sh @@ -5,11 +5,17 @@ # These need to match what's in the manage.py script. SC_RPC_PORT=9944 HEALTH_PORT=5555 - +# if INIT_RUN is not set then set it to true +INIT_RUN=${INIT_RUN:-true} ENGINE_P2P_PORT=3100 LOG_PORT=30687 for NODE in $SELECTED_NODES; do - cp -R $LOCALNET_INIT_DIR/keyshare/$NODE_COUNT/$NODE.db /tmp/chainflip/$NODE + if $INIT_RUN; then + echo "Copying db to tmp for $NODE" + cp -R "$LOCALNET_INIT_DIR/keyshare/$NODE_COUNT/$NODE.db" "/tmp/chainflip/$NODE" + else + echo "Not copying db to tmp for $NODE" + fi BINARY_ROOT_PATH=$BINARY_ROOT_PATH NODE_NAME=$NODE P2P_PORT=$ENGINE_P2P_PORT SC_RPC_PORT=$SC_RPC_PORT LOG_PORT=$LOG_PORT HEALTH_PORT=$HEALTH_PORT LOG_SUFFIX=$LOG_SUFFIX $LOCALNET_INIT_DIR/scripts/start-engine.sh echo "🚗 Starting chainflip-engine of $NODE ..." ((SC_RPC_PORT++)) diff --git a/localnet/init/scripts/start-all-nodes.sh b/localnet/init/scripts/start-all-nodes.sh new file mode 100755 index 0000000000..8c56c7637c --- /dev/null +++ b/localnet/init/scripts/start-all-nodes.sh @@ -0,0 +1,17 @@ +#!/bin/bash +set -e + +echo "🚧 Starting chainflip-node(s) ..." + +echo "start-all-nodes INIT_RPC_PORT: $INIT_RPC_PORT" + +P2P_PORT=30333 +RPC_PORT=$INIT_RPC_PORT +for NODE in $SELECTED_NODES; do + echo "🚧 Starting chainflip-node of $NODE ..." + + KEYS_DIR=$KEYS_DIR LOCALNET_INIT_DIR=$LOCALNET_INIT_DIR $LOCALNET_INIT_DIR/scripts/start-node.sh $BINARY_ROOT_PATH $NODE $P2P_PORT $RPC_PORT $NODE_COUNT + ((P2P_PORT++)) + ((RPC_PORT++)) +done + diff --git a/localnet/init/scripts/start-node.sh b/localnet/init/scripts/start-node.sh index fec887a8c1..ce2328c9fc 100755 --- a/localnet/init/scripts/start-node.sh +++ b/localnet/init/scripts/start-node.sh @@ -11,16 +11,16 @@ if [ $NODE_COUNT == "3-node" ]; then CHAIN="dev-3" fi -source ./localnet/init/env/eth.env -source ./localnet/init/env/arb.env -source ./localnet/init/env/node.env -export ETH_INIT_AGG_KEY=$(jq -r '.eth_agg_key' ./localnet/init/keyshare/$NODE_COUNT/agg_keys.json) -export DOT_INIT_AGG_KEY=$(jq -r '.dot_agg_key' ./localnet/init/keyshare/$NODE_COUNT/agg_keys.json) -$BINARY_ROOT_PATH/chainflip-node key insert --chain=$CHAIN --base-path=/tmp/chainflip/$NODE_NAME/chaindata --suri=0x$(cat ./localnet/init/keys/$NODE_NAME/signing_key_file) --key-type=aura --scheme=sr25519 -$BINARY_ROOT_PATH/chainflip-node key insert --chain=$CHAIN --base-path=/tmp/chainflip/$NODE_NAME/chaindata --suri=0x$(cat ./localnet/init/keys/$NODE_NAME/signing_key_file) --key-type=gran --scheme=ed25519 +source $LOCALNET_INIT_DIR/env/eth.env +source $LOCALNET_INIT_DIR/env/arb.env +source $LOCALNET_INIT_DIR/env/node.env +export ETH_INIT_AGG_KEY=$(jq -r '.eth_agg_key' $LOCALNET_INIT_DIR/keyshare/$NODE_COUNT/agg_keys.json) +export DOT_INIT_AGG_KEY=$(jq -r '.dot_agg_key' $LOCALNET_INIT_DIR/keyshare/$NODE_COUNT/agg_keys.json) +$BINARY_ROOT_PATH/chainflip-node key insert --chain=$CHAIN --base-path=/tmp/chainflip/$NODE_NAME/chaindata --suri=0x$(cat $KEYS_DIR/$NODE_NAME/signing_key_file) --key-type=aura --scheme=sr25519 +$BINARY_ROOT_PATH/chainflip-node key insert --chain=$CHAIN --base-path=/tmp/chainflip/$NODE_NAME/chaindata --suri=0x$(cat $KEYS_DIR/$NODE_NAME/signing_key_file) --key-type=gran --scheme=ed25519 $BINARY_ROOT_PATH/chainflip-node --chain=$CHAIN \ --base-path=/tmp/chainflip/$NODE_NAME/chaindata \ - --node-key-file=./localnet/init/keys/$NODE_NAME/node_key_file \ + --node-key-file=$KEYS_DIR/$NODE_NAME/node_key_file \ --validator \ --force-authoring \ --rpc-cors=all \ diff --git a/localnet/manage.sh b/localnet/manage.sh index b7d94a787f..4e84188193 100755 --- a/localnet/manage.sh +++ b/localnet/manage.sh @@ -141,20 +141,26 @@ build-localnet() { INIT_RPC_PORT=9944 - P2P_PORT=30333 - RPC_PORT=$INIT_RPC_PORT - for NODE in "${SELECTED_NODES[@]}"; do - echo "🚧 Starting chainflip-node of $NODE ..." - DOT_GENESIS_HASH=${DOT_GENESIS_HASH:2} ./$LOCALNET_INIT_DIR/scripts/start-node.sh $BINARY_ROOT_PATH $NODE $P2P_PORT $RPC_PORT $NODE_COUNT - ((P2P_PORT++)) - ((RPC_PORT++)) - done + # This is unset on `destroy()` + export DOT_GENESIS_HASH=${DOT_GENESIS_HASH:2} + + KEYS_DIR=./$LOCALNET_INIT_DIR/keys + + BINARY_ROOT_PATH=$BINARY_ROOT_PATH \ + SELECTED_NODES=${SELECTED_NODES[@]} \ + NODE_COUNT=$NODE_COUNT \ + INIT_RPC_PORT=$INIT_RPC_PORT \ + LOCALNET_INIT_DIR=$LOCALNET_INIT_DIR \ + KEYS_DIR=$KEYS_DIR \ + ./$LOCALNET_INIT_DIR/scripts/start-all-nodes.sh + + echo "🚧 Checking health ..." RPC_PORT=$INIT_RPC_PORT - for NODE in "${SELECTED_NODES[@]}"; do - check_endpoint_health -s -H "Content-Type: application/json" -d '{"id":1, "jsonrpc":"2.0", "method": "chain_getBlock"}' "http://localhost:$RPC_PORT" >>$DEBUG_OUTPUT_DESTINATION - echo "💚 $NODE's chainflip-node is running!" - ((RPC_PORT++)) + for NODE in $SELECTED_NODES; do + check_endpoint_health -s -H "Content-Type: application/json" -d '{"id":1, "jsonrpc":"2.0", "method": "chain_getBlock"}' "http://localhost:$RPC_PORT" >>$DEBUG_OUTPUT_DESTINATION + echo "💚 $NODE's chainflip-node is running!" + ((RPC_PORT++)) done NODE_COUNT=$NODE_COUNT \ @@ -179,8 +185,6 @@ build-localnet() { wait - KEYS_DIR=./$LOCALNET_INIT_DIR/keys - echo "🕺 Starting Broker API ..." KEYS_DIR=$KEYS_DIR ./$LOCALNET_INIT_DIR/scripts/start-broker-api.sh $BINARY_ROOT_PATH @@ -202,6 +206,9 @@ destroy() { for pid in $(ps -ef | grep solana | grep -v grep | awk '{print $2}'); do kill -9 $pid; done rm -rf /tmp/chainflip rm -rf /tmp/solana/ + + unset DOT_GENESIS_HASH + echo "done" } From 127644a8ba258ff4defa327ab86f71d7b565e8a3 Mon Sep 17 00:00:00 2001 From: kylezs Date: Fri, 16 Feb 2024 11:47:41 +0100 Subject: [PATCH 06/10] chore: run bouncer --- .github/workflows/upgrade-test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/upgrade-test.yml b/.github/workflows/upgrade-test.yml index 7dfd267e03..cf884078bc 100644 --- a/.github/workflows/upgrade-test.yml +++ b/.github/workflows/upgrade-test.yml @@ -199,7 +199,7 @@ jobs: git fetch --all git checkout ${{ steps.get-upgrade-from-commit.outputs.result }} cd bouncer - ./setup_for_test.sh + ./run.sh # we need to be sure that when this fails, we catch the error, any panics etc. that occur # TODO: Run swaps simultaneously to the upgrade - we could do that inside the `upgrade_network` command itself. From fe99789849ef0c326f3e22db65449ffec9bf8c7f Mon Sep 17 00:00:00 2001 From: kylezs Date: Fri, 16 Feb 2024 15:13:40 +0100 Subject: [PATCH 07/10] refactor: factor out get workflow latest commit SHA --- .../actions/get-workflow-commit/action.yml | 34 ++++++++++ .github/workflows/upgrade-test.yml | 62 +++++++------------ 2 files changed, 56 insertions(+), 40 deletions(-) create mode 100644 .github/actions/get-workflow-commit/action.yml diff --git a/.github/actions/get-workflow-commit/action.yml b/.github/actions/get-workflow-commit/action.yml new file mode 100644 index 0000000000..18d9459c56 --- /dev/null +++ b/.github/actions/get-workflow-commit/action.yml @@ -0,0 +1,34 @@ +name: "Get Workflow Latest Commit SHA" +description: "Fetches the commit SHA of the last successful workflow run" +inputs: + workflow-name: + description: "Workflow name to fetch the most recent run commit SHA from" + required: true + github-token: + description: "GitHub token for authentication" + required: true +outputs: + commit-sha: + description: "The latest commit of the selected workflow" + value: ${{ steps.get-workflow-commit.outputs.result }} +runs: + using: "composite" + steps: + - name: Get workflow commit SHA + id: get-workflow-commit + uses: actions/github-script@v5 + with: + github-token: ${{ inputs.github-token }} + script: | + const workflow_id = "${{ inputs.workflow-name }}"; + const owner = context.repo.owner; + const repo = context.repo.repo; + const runs = await github.rest.actions.listWorkflowRuns({ + owner, + repo, + workflow_id, + status: 'completed', + event: 'push', + }); + const run = runs.data.workflow_runs[0]; + return run.head_sha; diff --git a/.github/workflows/upgrade-test.yml b/.github/workflows/upgrade-test.yml index cf884078bc..afd910a200 100644 --- a/.github/workflows/upgrade-test.yml +++ b/.github/workflows/upgrade-test.yml @@ -47,29 +47,20 @@ jobs: - name: Checkout chainflip-backend uses: actions/checkout@v3 - - name: Get upgrade-to-commit hash - uses: actions/github-script@v5 + - name: Get upgrade-to-commit SHA + uses: ./.github/actions/get-workflow-commit id: get-upgrade-to-commit with: + workflow-name: ${{ inputs.upgrade-to-workflow-name }} github-token: ${{secrets.GITHUB_TOKEN}} - script: | - if (${{ inputs.upgrade-to-commit != '' }}) { - return ${{ inputs.upgrade-to-commit }}; - } - const workflow_id = "${{ inputs.upgrade-to-workflow-name }}"; - const owner = context.repo.owner; - const repo = context.repo.repo; - // Use octokit to access the GitHub API - const runs = await github.rest.actions.listWorkflowRuns({ - owner, - repo, - workflow_id, - status: 'completed', - event: 'push', - }); - const run = runs.data.workflow_runs[0]; // Assuming you want the most recent run; adjust as necessary - return run.head_sha; // This is the commit SHA of the workflow run + - name: Set upgrade-to-commit + run: | + if [ -z "${{ inputs.upgrade-to-commit }}" ]; then + echo "UPGRADE_TO_COMMIT=${{ steps.get-upgrade-to-commit.outputs.commit-sha }}" >> $GITHUB_ENV + else + echo "UPGRADE_TO_COMMIT=${{ inputs.upgrade-to-commit }}" >> $GITHUB_ENV + fi - name: Login to Github Container Registry 🔑 uses: docker/login-action@v2 @@ -110,25 +101,16 @@ jobs: rename-to: try-runtime chmod: 0755 - - name: Get upgrade-from run commit SHA + - name: Get upgrade-from-commit + uses: ./.github/actions/get-workflow-commit id: get-upgrade-from-commit - uses: actions/github-script@v5 with: + workflow-name: release-${{ inputs.upgrade-from-release }}.yml github-token: ${{secrets.GITHUB_TOKEN}} - script: | - const workflow_id = 'release-${{ inputs.upgrade-from-release }}.yml'; - const owner = context.repo.owner; - const repo = context.repo.repo; - // Use octokit to access the GitHub API - const runs = await github.rest.actions.listWorkflowRuns({ - owner, - repo, - workflow_id, - status: 'completed', - event: 'push', - }); - const run = runs.data.workflow_runs[0]; // Assuming you want the most recent run; adjust as necessary - return run.head_sha; // This is the commit SHA of the workflow run + + - name: echo upgrade from commit + run: | + echo "The upgrade-from-commit is: ${{ steps.get-upgrade-from-commit.outputs.commit-sha }}" - name: Download latest release binaries uses: dawidd6/action-download-artifact@v2 @@ -155,7 +137,7 @@ jobs: workflow: ${{ inputs.upgrade-to-workflow-name }} name: chainflip-backend-bin-try-runtime-ubuntu-22.04 path: upgrade-to-bins - commit: ${{ inputs.upgrade-to-commit }} + commit: ${{ inputs.UPGRADE_TO_COMMIT }} - name: Download latest main runtime uses: dawidd6/action-download-artifact@v2 @@ -163,7 +145,7 @@ jobs: workflow: ${{ inputs.upgrade-to-workflow-name }} name: chainflip-node-runtime-try-runtime path: main-runtime - commit: ${{ inputs.upgrade-to-commit }} + commit: ${{ env.UPGRADE_TO_COMMIT }} - name: Permissions for latest binaries run: | @@ -197,7 +179,8 @@ jobs: id: pre-upgrade-bouncer run: | git fetch --all - git checkout ${{ steps.get-upgrade-from-commit.outputs.result }} + git checkout ${{ steps.get-upgrade-from-commit.outputs.commit-sha }} + git rev-parse HEAD cd bouncer ./run.sh @@ -218,9 +201,8 @@ jobs: - name: Run bouncer after upgrade id: post-upgrade-bouncer - # Use git checkout ${ steps.get-upgrade-to-commit.outputs.result } instead run: | - git checkout ${{ github.sha }} + git checkout ${{ env.UPGRADE_TO_COMMIT }} git rev-parse HEAD cd bouncer ./tests/all_concurrent_tests.ts From f7e3533f9bb845e7753df40f4236d5411e27d385 Mon Sep 17 00:00:00 2001 From: kylezs Date: Fri, 16 Feb 2024 15:20:53 +0100 Subject: [PATCH 08/10] chore: lint --- bouncer/shared/lp_api_test.ts | 6 +++--- bouncer/shared/upgrade_network.ts | 25 +++++++++++++------------ 2 files changed, 16 insertions(+), 15 deletions(-) diff --git a/bouncer/shared/lp_api_test.ts b/bouncer/shared/lp_api_test.ts index 5131a25b33..4cb74ce0c6 100644 --- a/bouncer/shared/lp_api_test.ts +++ b/bouncer/shared/lp_api_test.ts @@ -156,7 +156,7 @@ async function testRegisterWithExistingLpAccount() { /// Test lp_set_range_order and lp_update_range_order by minting, updating, and burning a range order. async function testRangeOrder() { - console.log('=== Starting testRangeOrder ===') + console.log('=== Starting testRangeOrder ==='); const range = { start: 1, end: 2 }; const orderId = 74398; // Arbitrary order id so it does not interfere with other tests const zeroAssetAmounts = { @@ -255,13 +255,13 @@ async function testRangeOrder() { } async function testGetOpenSwapChannels() { - console.log('=== Starting testGetOpenSwapChannels ===') + console.log('=== Starting testGetOpenSwapChannels ==='); // TODO: Test with some SwapChannelInfo data const openSwapChannels = await lpApiRpc(`lp_get_open_swap_channels`, []); assert(openSwapChannels.ethereum, `Missing ethereum swap channel info`); assert(openSwapChannels.polkadot, `Missing polkadot swap channel info`); assert(openSwapChannels.bitcoin, `Missing bitcoin swap channel info`); - console.log('=== testGetOpenSwapChannels complete ===') + console.log('=== testGetOpenSwapChannels complete ==='); } /// Test lp_set_limit_order and lp_update_limit_order by minting, updating, and burning a limit order. diff --git a/bouncer/shared/upgrade_network.ts b/bouncer/shared/upgrade_network.ts index d5a8212a3a..686254b6d4 100755 --- a/bouncer/shared/upgrade_network.ts +++ b/bouncer/shared/upgrade_network.ts @@ -1,4 +1,4 @@ -import { exec, execSync } from 'child_process'; +import { execSync } from 'child_process'; import fs from 'fs/promises'; import * as toml from 'toml'; import path from 'path'; @@ -83,31 +83,33 @@ async function incompatibleUpgradeNoBuild( console.log('Killing the old node.'); execSync(`kill $(ps aux | grep chainflip-node | grep -v grep | awk '{print $2}')`); - console.log("Killed old node"); + console.log('Killed old node'); // let them shutdown await sleep(2000); console.log('Stopped old broker and lp-api. Starting the new ones.'); - console.log("Starting the new node"); + console.log('Starting the new node'); const KEYS_DIR = `${localnetInitPath}/keys`; const selectedNodesSep = `"${selectedNodes.join(' ')}"`; try { - const buffer = execSync(`INIT_RPC_PORT=9944 KEYS_DIR=${KEYS_DIR} NODE_COUNT=${nodeCount} SELECTED_NODES=${selectedNodesSep} LOCALNET_INIT_DIR=${localnetInitPath} BINARY_ROOT_PATH=${binaryPath} ${localnetInitPath}/scripts/start-all-nodes.sh`); - console.log("start node success: " + buffer.toString()); + const buffer = execSync( + `INIT_RPC_PORT=9944 KEYS_DIR=${KEYS_DIR} NODE_COUNT=${nodeCount} SELECTED_NODES=${selectedNodesSep} LOCALNET_INIT_DIR=${localnetInitPath} BINARY_ROOT_PATH=${binaryPath} ${localnetInitPath}/scripts/start-all-nodes.sh`, + ); + console.log('start node success: ' + buffer.toString()); } catch (e) { - console.error("start node error: "); + console.error('start node error: '); console.log(e); } await sleep(7000); const output = execSync("ps aux | grep chainflip-node | grep -v grep | awk '{print $2}'"); - console.log("New node PID: " + output.toString()); + console.log('New node PID: ' + output.toString()); // Restart the engines execSync( @@ -121,11 +123,10 @@ async function incompatibleUpgradeNoBuild( await sleep(20000); - const brokerPID = execSync("lsof -t -i:10997"); - console.log("New broker PID: " + brokerPID.toString()); - const lpApiPID = execSync("lsof -t -i:10589"); - console.log("New LP API PID: " + lpApiPID.toString()); - + const brokerPID = execSync('lsof -t -i:10997'); + console.log('New broker PID: ' + brokerPID.toString()); + const lpApiPID = execSync('lsof -t -i:10589'); + console.log('New LP API PID: ' + lpApiPID.toString()); console.log('Started new broker and lp-api.'); } From 966556b2856bf303b8e2af19302fe10a0ef72144 Mon Sep 17 00:00:00 2001 From: kylezs Date: Fri, 16 Feb 2024 15:23:51 +0100 Subject: [PATCH 09/10] fix: use env --- .github/workflows/upgrade-test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/upgrade-test.yml b/.github/workflows/upgrade-test.yml index afd910a200..9704298236 100644 --- a/.github/workflows/upgrade-test.yml +++ b/.github/workflows/upgrade-test.yml @@ -137,7 +137,7 @@ jobs: workflow: ${{ inputs.upgrade-to-workflow-name }} name: chainflip-backend-bin-try-runtime-ubuntu-22.04 path: upgrade-to-bins - commit: ${{ inputs.UPGRADE_TO_COMMIT }} + commit: ${{ env.UPGRADE_TO_COMMIT }} - name: Download latest main runtime uses: dawidd6/action-download-artifact@v2 From 8920292947df3b74b9ec4577c0ffbb1ed511ecaa Mon Sep 17 00:00:00 2001 From: kylezs Date: Mon, 19 Feb 2024 09:16:02 +0100 Subject: [PATCH 10/10] chore: spaces --- .github/workflows/upgrade-test.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/upgrade-test.yml b/.github/workflows/upgrade-test.yml index 9704298236..9eed67b94b 100644 --- a/.github/workflows/upgrade-test.yml +++ b/.github/workflows/upgrade-test.yml @@ -52,7 +52,7 @@ jobs: id: get-upgrade-to-commit with: workflow-name: ${{ inputs.upgrade-to-workflow-name }} - github-token: ${{secrets.GITHUB_TOKEN}} + github-token: ${{ secrets.GITHUB_TOKEN }} - name: Set upgrade-to-commit run: | @@ -106,7 +106,7 @@ jobs: id: get-upgrade-from-commit with: workflow-name: release-${{ inputs.upgrade-from-release }}.yml - github-token: ${{secrets.GITHUB_TOKEN}} + github-token: ${{ secrets.GITHUB_TOKEN }} - name: echo upgrade from commit run: |