Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Testnet Prod Sync #4277

Merged
merged 23 commits into from
May 19, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
d0b9a7c
feat: implement cache
wanglonghong May 17, 2023
95aef7c
refactor: spokeSMT and hubSMT creation
wanglonghong May 17, 2023
fdc7ead
chore: add try/catch for database calls.
sanchaymittal May 18, 2023
65e161f
chore: add try/catch for database for error status
sanchaymittal May 18, 2023
c29c7dd
chore: add try/catch for database for saveTransfers
sanchaymittal May 18, 2023
272506b
chore: add try/catch for database for simulation
sanchaymittal May 18, 2023
038f910
feat: batchProverSize per domain
wanglonghong May 18, 2023
43b701a
chore: remove console logs
wanglonghong May 18, 2023
c179c40
feat: pick up the available aggregate roots
wanglonghong May 18, 2023
1ae4ae7
fix: compare
wanglonghong May 18, 2023
755f4c2
ci: batchsize
wanglonghong May 18, 2023
56f2dc9
ci(test): coverage
wanglonghong May 18, 2023
1c67299
ci: remove older dupe publish action
just-a-node May 18, 2023
2207318
ci: subpackages can differ in version tags
just-a-node May 19, 2023
d3b2292
ci(test): fix
wanglonghong May 19, 2023
fd957a8
ci: typo
just-a-node May 19, 2023
7a0c420
ci(test): coverage
wanglonghong May 19, 2023
00b0b16
Merge pull request #4269 from connext/fix-build-workflow
just-a-node May 19, 2023
0ed025e
fix: remove local setup
wanglonghong May 19, 2023
c5ecec8
Merge pull request #4263 from connext/improve-message-verification
wanglonghong May 19, 2023
2ec6b5f
Merge pull request #4267 from connext/4244-handle-database
sanchaymittal May 19, 2023
c16ff29
Cache completed subtrees
preethamr May 19, 2023
fef9a92
Merge pull request #4276 from connext/subtree_cache
LayneHaber May 19, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
84 changes: 21 additions & 63 deletions .github/workflows/build-test-deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -90,14 +90,6 @@ jobs:
env:
YARN_NPM_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
run: |
VERSION=$(cat packages/agents/sdk/package.json | jq -r '.version')

if [[ "$VERSION" == *"-alpha"* ]]; then
tag="alpha"
elif [[ "$VERSION" == *"-beta"* ]]; then
tag="beta"
fi

workspaces=(
"packages/utils:@connext/nxtp-utils"
"packages/deployments/contracts:@connext/smart-contracts"
Expand All @@ -113,6 +105,13 @@ jobs:
workspace="${split_entry[1]}"
subpackage_version=$(cat $directory/package.json | jq -r '.version')

tag=""
if [[ "$subpackage_version" == *"-alpha"* ]]; then
tag="alpha"
elif [[ "$subpackage_version" == *"-beta"* ]]; then
tag="beta"
fi

echo "Checking $workspace for existing version..."
npm_package_info=$(npm view $workspace versions --json)

Expand Down Expand Up @@ -143,14 +142,6 @@ jobs:
env:
YARN_NPM_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
run: |
VERSION=$(cat packages/deployments/contracts/package.json | jq -r '.version')

if [[ "$VERSION" == *"-alpha"* ]]; then
tag="alpha"
elif [[ "$VERSION" == *"-beta"* ]]; then
tag="beta"
fi

workspaces=(
"packages/utils:@connext/nxtp-utils"
"packages/deployments/contracts:@connext/smart-contracts"
Expand All @@ -162,6 +153,13 @@ jobs:
workspace="${split_entry[1]}"
subpackage_version=$(cat $directory/package.json | jq -r '.version')

tag=""
if [[ "$subpackage_version" == *"-alpha"* ]]; then
tag="alpha"
elif [[ "$subpackage_version" == *"-beta"* ]]; then
tag="beta"
fi

echo "Checking $workspace for existing version..."
npm_package_info=$(npm view $workspace versions --json)

Expand Down Expand Up @@ -192,14 +190,6 @@ jobs:
env:
YARN_NPM_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
run: |
VERSION=$(cat packages/agents/chain-abstraction/package.json | jq -r '.version')

if [[ "$VERSION" == *"-alpha"* ]]; then
tag="alpha"
elif [[ "$VERSION" == *"-beta"* ]]; then
tag="beta"
fi

workspaces=(
"packages/utils:@connext/nxtp-utils"
"packages/agents/chain-abstraction:@connext/chain-abstraction"
Expand All @@ -211,6 +201,13 @@ jobs:
workspace="${split_entry[1]}"
subpackage_version=$(cat $directory/package.json | jq -r '.version')

tag=""
if [[ "$subpackage_version" == *"-alpha"* ]]; then
tag="alpha"
elif [[ "$subpackage_version" == *"-beta"* ]]; then
tag="beta"
fi

echo "Checking $workspace for existing version..."
npm_package_info=$(npm view $workspace versions --json)

Expand All @@ -236,45 +233,6 @@ jobs:
fi
done

- name: Extract version, determine tag, and publish chain-abstraction
if: ${{ startsWith(github.ref, 'refs/tags/chain-abstraction-v') }}
env:
YARN_NPM_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
run: |
VERSION=$(cat packages/agents/chain-abstraction/package.json | jq -r '.version')

if [[ "$VERSION" == *"-alpha"* ]]; then
tag="alpha"
elif [[ "$VERSION" == *"-beta"* ]]; then
tag="beta"
fi

workspaces=(
"packages/utils:@connext/nxtp-utils"
"packages/agents/chain-abstraction:@connext/chain-abstraction"
)

for entry in "${workspaces[@]}"; do
IFS=":"; read -ra split_entry <<< "$entry"
directory="${split_entry[0]}"
workspace="${split_entry[1]}"
subpackage_version=$(cat $directory/package.json | jq -r '.version')

echo "Checking $workspace for existing version..."
published_version=$(npm view $workspace version)

if [[ "$published_version" != "$subpackage_version" ]]; then
echo "Publishing $workspace with version $subpackage_version"
if [[ "$tag" ]]; then
yarn workspace $workspace npm publish --access public --tag $tag
else
yarn workspace $workspace npm publish --access public
fi
else
echo "Skipping $workspace as version $subpackage_version already exists"
fi
done

build-and-push-router-publisher-image:
if: github.ref == 'refs/heads/main' || github.ref == 'refs/heads/staging' || github.ref == 'refs/heads/testnet-prod' || github.ref == 'refs/heads/prod'
env:
Expand Down
4 changes: 3 additions & 1 deletion ops/testnet/prod/core/config.tf
Original file line number Diff line number Diff line change
Expand Up @@ -292,7 +292,9 @@ locals {
sendOutboundRoot = "https://betteruptime.com/api/v1/heartbeat/${var.lighthouse_send_outbound_root_heartbeat}"
}
hubDomain = "1735353714"
proverBatchSize = 1
proverBatchSize = {
"1668247156" = 10
}
})

local_relayer_config = jsonencode({
Expand Down
13 changes: 7 additions & 6 deletions packages/adapters/database/src/client.ts
Original file line number Diff line number Diff line change
Expand Up @@ -798,20 +798,21 @@ export const getLatestMessageRoot = async (
return root.length > 0 ? convertFromDbRootMessage(root[0]) : undefined;
};

export const getLatestAggregateRoot = async (
export const getLatestAggregateRoots = async (
domain: string,
limit = 1,
orderDirection: "ASC" | "DESC" = "DESC",
_pool?: Pool | db.TxnClientForRepeatableRead,
): Promise<ReceivedAggregateRoot | undefined> => {
): Promise<ReceivedAggregateRoot[]> => {
const poolToUse = _pool ?? pool;
const root = await db
.selectOne(
const roots = await db
.select(
"received_aggregate_roots",
{ domain: domain },
{ limit: 1, order: { by: "block_number", direction: orderDirection } },
{ limit, order: { by: "block_number", direction: orderDirection } },
)
.run(poolToUse);
return root ? convertFromDbReceivedAggregateRoot(root) : undefined;
return roots.map(convertFromDbReceivedAggregateRoot);
};

export const getAggregateRootByRootAndDomain = async (
Expand Down
9 changes: 5 additions & 4 deletions packages/adapters/database/src/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ import {
getAggregateRootCount,
getMessageRootIndex,
getLatestMessageRoot,
getLatestAggregateRoot,
getLatestAggregateRoots,
getMessageRootAggregatedFromIndex,
getMessageRootsFromIndex,
getMessageRootCount,
Expand Down Expand Up @@ -175,11 +175,12 @@ export type Database = {
aggregate_root: string,
_pool?: Pool | TxnClientForRepeatableRead,
) => Promise<RootMessage | undefined>;
getLatestAggregateRoot: (
getLatestAggregateRoots: (
domain: string,
limit: number,
orderDirection?: "ASC" | "DESC",
_pool?: Pool | TxnClientForRepeatableRead,
) => Promise<ReceivedAggregateRoot | undefined>;
) => Promise<ReceivedAggregateRoot[]>;
getAggregateRootByRootAndDomain: (
domain: string,
aggregatedRoot: string,
Expand Down Expand Up @@ -302,7 +303,7 @@ export const getDatabase = async (databaseUrl: string, logger: Logger): Promise<
getAggregateRootCount,
getMessageRootIndex,
getLatestMessageRoot,
getLatestAggregateRoot,
getLatestAggregateRoots,
getMessageRootAggregatedFromIndex,
getMessageRootsFromIndex,
getMessageRootCount,
Expand Down
21 changes: 17 additions & 4 deletions packages/adapters/database/test/client.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ import {
getRoot,
getMessageRootIndex,
getLatestMessageRoot,
getLatestAggregateRoot,
getLatestAggregateRoots,
getAggregateRootCount,
getUnProcessedMessages,
getUnProcessedMessagesByIndex,
Expand All @@ -69,6 +69,7 @@ import {
getAssets,
saveAssetPrice,
getPendingTransfersByDomains,
updateExecuteSimulationData,
} from "../src/client";

describe("Database client", () => {
Expand Down Expand Up @@ -959,7 +960,7 @@ describe("Database client", () => {
expect(await getAggregateRootCount("", pool)).to.eq(undefined);
expect(await getAggregateRoot("", pool)).to.eq(undefined);
expect(await getLatestMessageRoot("", "", pool)).to.eq(undefined);
expect(await getLatestAggregateRoot("", "DESC", pool)).to.eq(undefined);
expect(await getLatestAggregateRoots("", 1, "DESC", pool)).to.be.deep.eq([]);
});

it("should throw errors", async () => {
Expand Down Expand Up @@ -1076,8 +1077,8 @@ describe("Database client", () => {
}
await saveReceivedAggregateRoot(roots, pool);

const latest = await getLatestAggregateRoot(roots[0].domain, "DESC", pool);
expect(latest).to.deep.eq(roots[batchSize - 1]);
const latest = await getLatestAggregateRoots(roots[0].domain, 1, "DESC", pool);
expect(latest[0]).to.deep.eq(roots[batchSize - 1]);
});

it("should update error status", async () => {
Expand Down Expand Up @@ -1240,4 +1241,16 @@ describe("Database client", () => {
expect(transfers).includes(xTransfer1.transferId);
expect(transfers).includes(xTransfer2.transferId);
});

it("should update execution simulation data", async () => {
const originDomain = "1337";
const destinationDomain = "1338";
const xTransfer: XTransfer = mock.entity.xtransfer({
transferId: getRandomBytes32(),
originDomain,
destinationDomain,
status: XTransferStatus.XCalled,
});
await updateExecuteSimulationData(xTransfer.transferId, "0x", "0x", "0x", "0x", pool);
});
});
2 changes: 1 addition & 1 deletion packages/adapters/database/test/mock.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ export const mockDatabase = (): Database => {
getMessageRootAggregatedFromIndex: stub().resolves(),
getMessageRootCount: stub().resolves(),
getLatestMessageRoot: stub().resolves(),
getLatestAggregateRoot: stub().resolves(),
getLatestAggregateRoots: stub().resolves(),
getSpokeNode: stub().resolves(),
getSpokeNodes: stub().resolves([]),
getHubNode: stub().resolves(),
Expand Down
10 changes: 3 additions & 7 deletions packages/agents/lighthouse/src/config.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import { existsSync, readFileSync } from "./mockable";
// Polling mins and defaults.
const MIN_CARTOGRAPHER_POLL_INTERVAL = 30_000;
const DEFAULT_CARTOGRAPHER_POLL_INTERVAL = 60_000;
const DEFAULT_PROVER_BATCH_SIZE = 10;
export const DEFAULT_PROVER_BATCH_SIZE = 1;
export const DEFAULT_RELAYER_WAIT_TIME = 60_000 * 3600; // 1 hour

dotenvConfig();
Expand Down Expand Up @@ -60,7 +60,7 @@ export const NxtpLighthouseConfigSchema = Type.Object({
sendOutboundRoot: Type.String({ format: "uri" }),
}),
),
proverBatchSize: Type.Integer({ minimum: 1, maximum: 1000 }),
proverBatchSize: Type.Record(Type.String(), Type.Integer({ minimum: 1, maximum: 100 })),
relayerWaitTime: Type.Integer({ minimum: 0 }),
service: Type.Union([
Type.Literal("prover"),
Expand Down Expand Up @@ -154,11 +154,7 @@ export const getEnvConfig = (
subgraphPrefix: process.env.NXTP_SUBGRAPH_PREFIX || configJson.subgraphPrefix || configFile.subgraphPrefix,
healthUrls: process.env.NXTP_HEALTH_URLS || configJson.healthUrls || configFile.healthUrls || {},
service: process.env.LIGHTHOUSE_SERVICE || configJson.service || configFile.service,
proverBatchSize:
process.env.NXTP_PROVER_BATCH_SIZE ||
configJson.proverBatchSize ||
configFile.proverBatchSize ||
DEFAULT_PROVER_BATCH_SIZE,
proverBatchSize: configJson.proverBatchSize || configFile.proverBatchSize || {},
relayerWaitTime:
process.env.NXTP_RELAYER_WAIT_TIME ||
configJson.relayerWaitTime ||
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,22 +2,44 @@ import { Database } from "@connext/nxtp-adapters-database";
import { DBHelper } from "@connext/nxtp-utils";

export class SpokeDBHelper implements DBHelper {
private cachedNode: Record<string, string> = {};
private cachedNodes: Record<string, string[]> = {};
private cachedRoot: Record<string, string> = {};
constructor(private domain: string, private count: number, private db: Database) {}

public async getCount(): Promise<number> {
return this.count;
}

public async getNode(index: number): Promise<string | undefined> {
return await this.db.getSpokeNode(this.domain, index, this.count);
let node: string | undefined = this.cachedNode[`${index}`];
if (!node) {
node = await this.db.getSpokeNode(this.domain, index, this.count);
if (node) {
this.cachedNode[`${index}`] = node;
}
}
return node;
}

public async getNodes(start: number, end: number): Promise<string[]> {
return await this.db.getSpokeNodes(this.domain, start, end, this.count);
let nodes: string[] = this.cachedNodes[`${start}-${end}`];
if (!nodes || nodes.length == 0) {
nodes = await this.db.getSpokeNodes(this.domain, start, end, this.count);
this.cachedNodes[`${start}-${end}`] = nodes;
}
return nodes;
}

public async getRoot(path: string): Promise<string | undefined> {
return await this.db.getRoot(this.domain, path);
let root: string | undefined = this.cachedRoot[path];
if (!root) {
root = await this.db.getRoot(this.domain, path);
if (root) {
this.cachedRoot[path] = root;
}
}
return root;
}

public async putRoot(path: string, hash: string): Promise<void> {
Expand All @@ -26,22 +48,44 @@ export class SpokeDBHelper implements DBHelper {
}

export class HubDBHelper implements DBHelper {
private cachedNode: Record<string, string> = {};
private cachedNodes: Record<string, string[]> = {};
private cachedRoot: Record<string, string> = {};
constructor(private domain: string, private count: number, private db: Database) {}

public async getCount(): Promise<number> {
return this.count;
}

public async getNode(index: number): Promise<string | undefined> {
return await this.db.getHubNode(index, this.count);
let node: string | undefined = this.cachedNode[`${index}`];
if (!node) {
node = await this.db.getHubNode(index, this.count);
if (node) {
this.cachedNode[`${index}`] = node;
}
}
return node;
}

public async getNodes(start: number, end: number): Promise<string[]> {
return await this.db.getHubNodes(start, end, this.count);
let nodes: string[] = this.cachedNodes[`${start}-${end}`];
if (!nodes || nodes.length == 0) {
nodes = await this.db.getHubNodes(start, end, this.count);
this.cachedNodes[`${start}-${end}`] = nodes;
}
return nodes;
}

public async getRoot(path: string): Promise<string | undefined> {
return await this.db.getRoot(this.domain, path);
let root: string | undefined = this.cachedRoot[path];
if (!root) {
root = await this.db.getRoot(this.domain, path);
if (root) {
this.cachedRoot[path] = root;
}
}
return root;
}

public async putRoot(path: string, hash: string): Promise<void> {
Expand Down
Loading