diff --git a/.github/actions/get-workflow-commit/action.yml b/.github/actions/get-workflow-commit/action.yml new file mode 100644 index 0000000000..18d9459c56 --- /dev/null +++ b/.github/actions/get-workflow-commit/action.yml @@ -0,0 +1,34 @@ +name: "Get Workflow Latest Commit SHA" +description: "Fetches the commit SHA of the last successful workflow run" +inputs: + workflow-name: + description: "Workflow name to fetch the most recent run commit SHA from" + required: true + github-token: + description: "GitHub token for authentication" + required: true +outputs: + commit-sha: + description: "The latest commit of the selected workflow" + value: ${{ steps.get-workflow-commit.outputs.result }} +runs: + using: "composite" + steps: + - name: Get workflow commit SHA + id: get-workflow-commit + uses: actions/github-script@v5 + with: + github-token: ${{ inputs.github-token }} + script: | + const workflow_id = "${{ inputs.workflow-name }}"; + const owner = context.repo.owner; + const repo = context.repo.repo; + const runs = await github.rest.actions.listWorkflowRuns({ + owner, + repo, + workflow_id, + status: 'completed', + event: 'push', + }); + const run = runs.data.workflow_runs[0]; + return run.head_sha; diff --git a/.github/workflows/upgrade-test.yml b/.github/workflows/upgrade-test.yml index 0d29ee77c5..9eed67b94b 100644 --- a/.github/workflows/upgrade-test.yml +++ b/.github/workflows/upgrade-test.yml @@ -44,10 +44,24 @@ jobs: # conservatively 1.5 hours. 2 bouncer runs need to occur. timeout-minutes: 90 steps: - - name: Checkout chainflip-backend uses: actions/checkout@v3 + - name: Get upgrade-to-commit SHA + uses: ./.github/actions/get-workflow-commit + id: get-upgrade-to-commit + with: + workflow-name: ${{ inputs.upgrade-to-workflow-name }} + github-token: ${{ secrets.GITHUB_TOKEN }} + + - name: Set upgrade-to-commit + run: | + if [ -z "${{ inputs.upgrade-to-commit }}" ]; then + echo "UPGRADE_TO_COMMIT=${{ steps.get-upgrade-to-commit.outputs.commit-sha }}" >> $GITHUB_ENV + else + echo "UPGRADE_TO_COMMIT=${{ inputs.upgrade-to-commit }}" >> $GITHUB_ENV + fi + - name: Login to Github Container Registry 🔑 uses: docker/login-action@v2 with: @@ -87,6 +101,17 @@ jobs: rename-to: try-runtime chmod: 0755 + - name: Get upgrade-from-commit + uses: ./.github/actions/get-workflow-commit + id: get-upgrade-from-commit + with: + workflow-name: release-${{ inputs.upgrade-from-release }}.yml + github-token: ${{ secrets.GITHUB_TOKEN }} + + - name: echo upgrade from commit + run: | + echo "The upgrade-from-commit is: ${{ steps.get-upgrade-from-commit.outputs.commit-sha }}" + - name: Download latest release binaries uses: dawidd6/action-download-artifact@v2 with: @@ -112,7 +137,7 @@ jobs: workflow: ${{ inputs.upgrade-to-workflow-name }} name: chainflip-backend-bin-try-runtime-ubuntu-22.04 path: upgrade-to-bins - commit: ${{ inputs.upgrade-to-commit }} + commit: ${{ env.UPGRADE_TO_COMMIT }} - name: Download latest main runtime uses: dawidd6/action-download-artifact@v2 @@ -120,7 +145,7 @@ jobs: workflow: ${{ inputs.upgrade-to-workflow-name }} name: chainflip-node-runtime-try-runtime path: main-runtime - commit: ${{ inputs.upgrade-to-commit }} + commit: ${{ env.UPGRADE_TO_COMMIT }} - name: Permissions for latest binaries run: | @@ -139,18 +164,24 @@ jobs: - name: Start a localnet from current release env: BINARY_ROOT_PATH: ./latest-release-bins + DEBUG_OUTPUT_DESTINATION: /tmp/chainflip/debug.log run: | set -x mkdir -p /tmp/chainflip/bashful mkdir -p /tmp/chainflip/doc mkdir -p /tmp/chainflip/dopey + touch /tmp/chainflip/debug.log + chmod +x ${{ env.BINARY_ROOT_PATH }}/chainflip-* touch ./localnet/.setup_complete ./localnet/manage.sh - name: Run bouncer on latest release id: pre-upgrade-bouncer - working-directory: bouncer run: | + git fetch --all + git checkout ${{ steps.get-upgrade-from-commit.outputs.commit-sha }} + git rev-parse HEAD + cd bouncer ./run.sh # we need to be sure that when this fails, we catch the error, any panics etc. that occur @@ -158,8 +189,10 @@ jobs: - name: Upgrade network shell: bash id: upgrade-network - working-directory: bouncer run: | + git checkout ${{ github.sha }} + git rev-parse HEAD + cd bouncer ./commands/upgrade_network.ts prebuilt \ --runtime ./../main-runtime/state_chain_runtime.compact.compressed.wasm \ --bins ./../upgrade-to-bins \ @@ -168,8 +201,10 @@ jobs: - name: Run bouncer after upgrade id: post-upgrade-bouncer - working-directory: bouncer run: | + git checkout ${{ env.UPGRADE_TO_COMMIT }} + git rev-parse HEAD + cd bouncer ./tests/all_concurrent_tests.ts - name: Print old chainflip-engine logs @@ -197,6 +232,11 @@ jobs: run: | cat /tmp/chainflip/chainflip-lp-api.log + - name: Print localnet init debug logs 🕵️‍♂️ + if: always() + run: | + cat /tmp/chainflip/debug.log + - name: Upload Localnet Logs 💾 if: always() continue-on-error: true diff --git a/bouncer/shared/lp_api_test.ts b/bouncer/shared/lp_api_test.ts index 5610cd30a7..4cb74ce0c6 100644 --- a/bouncer/shared/lp_api_test.ts +++ b/bouncer/shared/lp_api_test.ts @@ -119,6 +119,7 @@ async function testLiquidityDeposit() { } async function testWithdrawAsset() { + console.log('=== Starting testWithdrawAsset ==='); const oldBalance = await getBalance(testAsset, testAddress); const result = await lpApiRpc(`lp_withdraw_asset`, [ @@ -133,9 +134,11 @@ async function testWithdrawAsset() { assert(egressId > 0, `Unexpected egressId: ${egressId}`); await observeBalanceIncrease(testAsset, testAddress, oldBalance); + console.log('=== testWithdrawAsset complete ==='); } async function testRegisterWithExistingLpAccount() { + console.log('=== Starting testWithdrawAsset ==='); try { await lpApiRpc(`lp_register_account`, []); throw new Error(`Unexpected lp_register_account result`); @@ -147,11 +150,13 @@ async function testRegisterWithExistingLpAccount() { throw new Error(`Unexpected lp_register_account error: ${error}`); } } + console.log('=== testRegisterWithExistingLpAccount complete ==='); } /// Test lp_set_range_order and lp_update_range_order by minting, updating, and burning a range order. async function testRangeOrder() { + console.log('=== Starting testRangeOrder ==='); const range = { start: 1, end: 2 }; const orderId = 74398; // Arbitrary order id so it does not interfere with other tests const zeroAssetAmounts = { @@ -245,19 +250,24 @@ async function testRangeOrder() { } }); assert.strictEqual(matchBurn, true, `Expected burn of range order to decrease liquidity to 0`); + + console.log('=== testRangeOrder complete ==='); } async function testGetOpenSwapChannels() { + console.log('=== Starting testGetOpenSwapChannels ==='); // TODO: Test with some SwapChannelInfo data const openSwapChannels = await lpApiRpc(`lp_get_open_swap_channels`, []); assert(openSwapChannels.ethereum, `Missing ethereum swap channel info`); assert(openSwapChannels.polkadot, `Missing polkadot swap channel info`); assert(openSwapChannels.bitcoin, `Missing bitcoin swap channel info`); + console.log('=== testGetOpenSwapChannels complete ==='); } /// Test lp_set_limit_order and lp_update_limit_order by minting, updating, and burning a limit order. async function testLimitOrder() { + console.log('=== Starting testLimitOrder ==='); const orderId = 98432; // Arbitrary order id so it does not interfere with other tests const tick = 2; @@ -336,6 +346,8 @@ async function testLimitOrder() { } }); assert.strictEqual(matchBurn, true, `Expected burn of limit order to decrease liquidity to 0`); + + console.log('=== testLimitOrder complete ==='); } /// Runs all of the LP commands via the LP API Json RPC Server that is running and checks that the returned data is as expected diff --git a/bouncer/shared/provide_liquidity.ts b/bouncer/shared/provide_liquidity.ts index 07bb770342..916c553bda 100644 --- a/bouncer/shared/provide_liquidity.ts +++ b/bouncer/shared/provide_liquidity.ts @@ -73,7 +73,7 @@ export async function provideLiquidity(ccy: Asset, amount: number, waitForFinali undefined, waitForFinalization, ); - send(ccy, ingressAddress, String(amount)); + await send(ccy, ingressAddress, String(amount)); await eventHandle; } diff --git a/bouncer/shared/submit_runtime_upgrade.ts b/bouncer/shared/submit_runtime_upgrade.ts index ac59c0eec4..2b011a0712 100755 --- a/bouncer/shared/submit_runtime_upgrade.ts +++ b/bouncer/shared/submit_runtime_upgrade.ts @@ -54,6 +54,7 @@ export async function submitRuntimeUpgradeWithRestrictions( } console.log('Runtime upgrade completed.'); + chainflip.disconnect(); } export async function submitRuntimeUpgradeWasmPath(wasmPath: string) { diff --git a/bouncer/shared/upgrade_network.ts b/bouncer/shared/upgrade_network.ts index c2b8a250d9..686254b6d4 100755 --- a/bouncer/shared/upgrade_network.ts +++ b/bouncer/shared/upgrade_network.ts @@ -62,7 +62,7 @@ async function incompatibleUpgradeNoBuild( const nodeCount = numberOfNodes + '-node'; execSync( - `LOG_SUFFIX="-upgrade" NODE_COUNT=${nodeCount} SELECTED_NODES="${selectedNodes.join( + `INIT_RUN=false LOG_SUFFIX="-upgrade" NODE_COUNT=${nodeCount} SELECTED_NODES="${selectedNodes.join( ' ', )}" LOCALNET_INIT_DIR=${localnetInitPath} BINARY_ROOT_PATH=${binaryPath} ${localnetInitPath}/scripts/start-all-engines.sh`, ); @@ -77,17 +77,57 @@ async function incompatibleUpgradeNoBuild( 'Check that the old engine has now shut down, and that the new engine is now running.', ); - execSync(`kill $(lsof -t -i:10997)`); - execSync(`kill $(lsof -t -i:10589)`); + // Wait for the old broker and lp-api to shut down, and ensure the runtime upgrade is finalised. + await sleep(20000); + + console.log('Killing the old node.'); + execSync(`kill $(ps aux | grep chainflip-node | grep -v grep | awk '{print $2}')`); + + console.log('Killed old node'); + + // let them shutdown + await sleep(2000); + console.log('Stopped old broker and lp-api. Starting the new ones.'); - // Wait for the old broker and lp-api to shut down, and ensure the runtime upgrade is finalised. - await sleep(22000); + console.log('Starting the new node'); const KEYS_DIR = `${localnetInitPath}/keys`; + + const selectedNodesSep = `"${selectedNodes.join(' ')}"`; + + try { + const buffer = execSync( + `INIT_RPC_PORT=9944 KEYS_DIR=${KEYS_DIR} NODE_COUNT=${nodeCount} SELECTED_NODES=${selectedNodesSep} LOCALNET_INIT_DIR=${localnetInitPath} BINARY_ROOT_PATH=${binaryPath} ${localnetInitPath}/scripts/start-all-nodes.sh`, + ); + console.log('start node success: ' + buffer.toString()); + } catch (e) { + console.error('start node error: '); + console.log(e); + } + + await sleep(7000); + + const output = execSync("ps aux | grep chainflip-node | grep -v grep | awk '{print $2}'"); + console.log('New node PID: ' + output.toString()); + + // Restart the engines + execSync( + `INIT_RUN=false LOG_SUFFIX="-upgrade" NODE_COUNT=${nodeCount} SELECTED_NODES=${selectedNodesSep} LOCALNET_INIT_DIR=${localnetInitPath} BINARY_ROOT_PATH=${binaryPath} ${localnetInitPath}/scripts/start-all-engines.sh`, + ); + + console.log('Starting new broker and lp-api.'); + execSync(`KEYS_DIR=${KEYS_DIR} ${localnetInitPath}/scripts/start-broker-api.sh ${binaryPath}`); execSync(`KEYS_DIR=${KEYS_DIR} ${localnetInitPath}/scripts/start-lp-api.sh ${binaryPath}`); - await sleep(6000); + + await sleep(20000); + + const brokerPID = execSync('lsof -t -i:10997'); + console.log('New broker PID: ' + brokerPID.toString()); + const lpApiPID = execSync('lsof -t -i:10589'); + console.log('New LP API PID: ' + lpApiPID.toString()); + console.log('Started new broker and lp-api.'); } diff --git a/bouncer/tests/all_concurrent_tests.ts b/bouncer/tests/all_concurrent_tests.ts index 9cd9d5322d..d3ca9f852e 100755 --- a/bouncer/tests/all_concurrent_tests.ts +++ b/bouncer/tests/all_concurrent_tests.ts @@ -43,7 +43,7 @@ async function runAllConcurrentTests() { await Promise.all([broadcastAborted, feeDeficitRefused]); } -runWithTimeout(runAllConcurrentTests(), 1800000) +runWithTimeout(runAllConcurrentTests(), 1000000) .then(() => { // There are some dangling resources that prevent the process from exiting process.exit(0); diff --git a/localnet/init/scripts/start-all-engines.sh b/localnet/init/scripts/start-all-engines.sh index e214424a18..14dcee2f62 100755 --- a/localnet/init/scripts/start-all-engines.sh +++ b/localnet/init/scripts/start-all-engines.sh @@ -5,11 +5,17 @@ # These need to match what's in the manage.py script. SC_RPC_PORT=9944 HEALTH_PORT=5555 - +# if INIT_RUN is not set then set it to true +INIT_RUN=${INIT_RUN:-true} ENGINE_P2P_PORT=3100 LOG_PORT=30687 for NODE in $SELECTED_NODES; do - cp -R $LOCALNET_INIT_DIR/keyshare/$NODE_COUNT/$NODE.db /tmp/chainflip/$NODE + if $INIT_RUN; then + echo "Copying db to tmp for $NODE" + cp -R "$LOCALNET_INIT_DIR/keyshare/$NODE_COUNT/$NODE.db" "/tmp/chainflip/$NODE" + else + echo "Not copying db to tmp for $NODE" + fi BINARY_ROOT_PATH=$BINARY_ROOT_PATH NODE_NAME=$NODE P2P_PORT=$ENGINE_P2P_PORT SC_RPC_PORT=$SC_RPC_PORT LOG_PORT=$LOG_PORT HEALTH_PORT=$HEALTH_PORT LOG_SUFFIX=$LOG_SUFFIX $LOCALNET_INIT_DIR/scripts/start-engine.sh echo "🚗 Starting chainflip-engine of $NODE ..." ((SC_RPC_PORT++)) diff --git a/localnet/init/scripts/start-all-nodes.sh b/localnet/init/scripts/start-all-nodes.sh new file mode 100755 index 0000000000..8c56c7637c --- /dev/null +++ b/localnet/init/scripts/start-all-nodes.sh @@ -0,0 +1,17 @@ +#!/bin/bash +set -e + +echo "🚧 Starting chainflip-node(s) ..." + +echo "start-all-nodes INIT_RPC_PORT: $INIT_RPC_PORT" + +P2P_PORT=30333 +RPC_PORT=$INIT_RPC_PORT +for NODE in $SELECTED_NODES; do + echo "🚧 Starting chainflip-node of $NODE ..." + + KEYS_DIR=$KEYS_DIR LOCALNET_INIT_DIR=$LOCALNET_INIT_DIR $LOCALNET_INIT_DIR/scripts/start-node.sh $BINARY_ROOT_PATH $NODE $P2P_PORT $RPC_PORT $NODE_COUNT + ((P2P_PORT++)) + ((RPC_PORT++)) +done + diff --git a/localnet/init/scripts/start-node.sh b/localnet/init/scripts/start-node.sh index fec887a8c1..ce2328c9fc 100755 --- a/localnet/init/scripts/start-node.sh +++ b/localnet/init/scripts/start-node.sh @@ -11,16 +11,16 @@ if [ $NODE_COUNT == "3-node" ]; then CHAIN="dev-3" fi -source ./localnet/init/env/eth.env -source ./localnet/init/env/arb.env -source ./localnet/init/env/node.env -export ETH_INIT_AGG_KEY=$(jq -r '.eth_agg_key' ./localnet/init/keyshare/$NODE_COUNT/agg_keys.json) -export DOT_INIT_AGG_KEY=$(jq -r '.dot_agg_key' ./localnet/init/keyshare/$NODE_COUNT/agg_keys.json) -$BINARY_ROOT_PATH/chainflip-node key insert --chain=$CHAIN --base-path=/tmp/chainflip/$NODE_NAME/chaindata --suri=0x$(cat ./localnet/init/keys/$NODE_NAME/signing_key_file) --key-type=aura --scheme=sr25519 -$BINARY_ROOT_PATH/chainflip-node key insert --chain=$CHAIN --base-path=/tmp/chainflip/$NODE_NAME/chaindata --suri=0x$(cat ./localnet/init/keys/$NODE_NAME/signing_key_file) --key-type=gran --scheme=ed25519 +source $LOCALNET_INIT_DIR/env/eth.env +source $LOCALNET_INIT_DIR/env/arb.env +source $LOCALNET_INIT_DIR/env/node.env +export ETH_INIT_AGG_KEY=$(jq -r '.eth_agg_key' $LOCALNET_INIT_DIR/keyshare/$NODE_COUNT/agg_keys.json) +export DOT_INIT_AGG_KEY=$(jq -r '.dot_agg_key' $LOCALNET_INIT_DIR/keyshare/$NODE_COUNT/agg_keys.json) +$BINARY_ROOT_PATH/chainflip-node key insert --chain=$CHAIN --base-path=/tmp/chainflip/$NODE_NAME/chaindata --suri=0x$(cat $KEYS_DIR/$NODE_NAME/signing_key_file) --key-type=aura --scheme=sr25519 +$BINARY_ROOT_PATH/chainflip-node key insert --chain=$CHAIN --base-path=/tmp/chainflip/$NODE_NAME/chaindata --suri=0x$(cat $KEYS_DIR/$NODE_NAME/signing_key_file) --key-type=gran --scheme=ed25519 $BINARY_ROOT_PATH/chainflip-node --chain=$CHAIN \ --base-path=/tmp/chainflip/$NODE_NAME/chaindata \ - --node-key-file=./localnet/init/keys/$NODE_NAME/node_key_file \ + --node-key-file=$KEYS_DIR/$NODE_NAME/node_key_file \ --validator \ --force-authoring \ --rpc-cors=all \ diff --git a/localnet/manage.sh b/localnet/manage.sh index b7d94a787f..4e84188193 100755 --- a/localnet/manage.sh +++ b/localnet/manage.sh @@ -141,20 +141,26 @@ build-localnet() { INIT_RPC_PORT=9944 - P2P_PORT=30333 - RPC_PORT=$INIT_RPC_PORT - for NODE in "${SELECTED_NODES[@]}"; do - echo "🚧 Starting chainflip-node of $NODE ..." - DOT_GENESIS_HASH=${DOT_GENESIS_HASH:2} ./$LOCALNET_INIT_DIR/scripts/start-node.sh $BINARY_ROOT_PATH $NODE $P2P_PORT $RPC_PORT $NODE_COUNT - ((P2P_PORT++)) - ((RPC_PORT++)) - done + # This is unset on `destroy()` + export DOT_GENESIS_HASH=${DOT_GENESIS_HASH:2} + + KEYS_DIR=./$LOCALNET_INIT_DIR/keys + + BINARY_ROOT_PATH=$BINARY_ROOT_PATH \ + SELECTED_NODES=${SELECTED_NODES[@]} \ + NODE_COUNT=$NODE_COUNT \ + INIT_RPC_PORT=$INIT_RPC_PORT \ + LOCALNET_INIT_DIR=$LOCALNET_INIT_DIR \ + KEYS_DIR=$KEYS_DIR \ + ./$LOCALNET_INIT_DIR/scripts/start-all-nodes.sh + + echo "🚧 Checking health ..." RPC_PORT=$INIT_RPC_PORT - for NODE in "${SELECTED_NODES[@]}"; do - check_endpoint_health -s -H "Content-Type: application/json" -d '{"id":1, "jsonrpc":"2.0", "method": "chain_getBlock"}' "http://localhost:$RPC_PORT" >>$DEBUG_OUTPUT_DESTINATION - echo "💚 $NODE's chainflip-node is running!" - ((RPC_PORT++)) + for NODE in $SELECTED_NODES; do + check_endpoint_health -s -H "Content-Type: application/json" -d '{"id":1, "jsonrpc":"2.0", "method": "chain_getBlock"}' "http://localhost:$RPC_PORT" >>$DEBUG_OUTPUT_DESTINATION + echo "💚 $NODE's chainflip-node is running!" + ((RPC_PORT++)) done NODE_COUNT=$NODE_COUNT \ @@ -179,8 +185,6 @@ build-localnet() { wait - KEYS_DIR=./$LOCALNET_INIT_DIR/keys - echo "🕺 Starting Broker API ..." KEYS_DIR=$KEYS_DIR ./$LOCALNET_INIT_DIR/scripts/start-broker-api.sh $BINARY_ROOT_PATH @@ -202,6 +206,9 @@ destroy() { for pid in $(ps -ef | grep solana | grep -v grep | awk '{print $2}'); do kill -9 $pid; done rm -rf /tmp/chainflip rm -rf /tmp/solana/ + + unset DOT_GENESIS_HASH + echo "done" }