Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Testnet Prod Sync #4334

Merged
merged 34 commits into from
May 25, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
34 commits
Select commit Hold shift + click to select a range
e9b90ab
feat: separate router and sequencer admin keys (#4316)
May 23, 2023
bb4f0e6
feat: add nonce to stableswap staging events
liu-zhipeng May 23, 2023
4a5d41a
feat: add nonce to v0 stableswap events
liu-zhipeng May 23, 2023
04550e9
fix: event id
liu-zhipeng May 23, 2023
2993cee
feat: add nonce field to types
liu-zhipeng May 23, 2023
f386c5c
feat: add nonce to stableswap db migration
liu-zhipeng May 23, 2023
33dbede
fix: database adapter for stableswap nonce
liu-zhipeng May 23, 2023
c8b74bf
chore: add commitlint local setup
rhlsthrm May 23, 2023
e1456b8
ci: update ci workflow for commitlint
rhlsthrm May 23, 2023
3c32ef6
feat: update subgraph & poller logic with nonce
liu-zhipeng May 23, 2023
29d401e
feat: rebuild graph-client
liu-zhipeng May 23, 2023
37e999c
fix: query with bigInt params
liu-zhipeng May 23, 2023
4e8abb4
fix: carto poller unit tests
liu-zhipeng May 23, 2023
28eb9ec
fix: remove unused checkpoint for asset updates
liu-zhipeng May 24, 2023
c222238
fix(test): unit tests for poller
liu-zhipeng May 24, 2023
efa30c4
Merge pull request #4321 from connext/4313-stableswap-lp-poller-500-tx
liu-zhipeng May 24, 2023
b91e4cb
chore: add code owners (#4318)
May 24, 2023
f754122
Merge pull request #4320 from connext/4319-add-commitlint-verificatio…
sanchaymittal May 24, 2023
8a94afd
ci: fix syntax
rhlsthrm May 24, 2023
f852100
ci: try different install
rhlsthrm May 24, 2023
d6b61e5
ci: fix commitlint
rhlsthrm May 24, 2023
043509d
ci: validate commit
rhlsthrm May 24, 2023
c204fb4
feat: graph-client build
liu-zhipeng May 24, 2023
ed4e269
Merge pull request #4332 from connext/rebuild-graph-client
liu-zhipeng May 24, 2023
57768c4
Merge branch 'testnet-prod'
rhlsthrm May 24, 2023
9817a68
Don't commitlint testnet-prod and prod
rhlsthrm May 24, 2023
af4c3ef
Hot Fix: Update Linea goerli RPC
preethamr May 24, 2023
7ca74b5
Merge branch 'testnet-prod' into main
preethamr May 24, 2023
1c24c81
Fix: Disable commitlint in testnet-prod
preethamr May 25, 2023
3626c78
feat: getSupportedAssets and tests added (#4324)
prathmeshkhandelwal1 May 25, 2023
998d0db
Fix aggregate root no exist (#4335)
wanglonghong May 25, 2023
6bc1259
feat: should ack and reject (#4336)
wanglonghong May 25, 2023
37cbd57
Merge branch 'testnet-prod'
rhlsthrm May 25, 2023
c2e21f3
chore: change comparison (#4330)
wanglonghong May 25, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/build-test-deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ jobs:
node-version: "18"
cache: "yarn"

- name: Validate commits using commitlint
- name: Validate using commitlint
if: github.ref != 'refs/heads/testnet-prod' || github.ref != 'refs/heads/prod'
uses: wagoid/commitlint-github-action@v5

Expand Down
4 changes: 2 additions & 2 deletions ops/testnet/prod/core/config.tf
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@ locals {
providers = ["https://arb-goerli.g.alchemy.com/v2/${var.arbgoerli_alchemy_key_0}", "https://goerli-rollup.arbitrum.io/rpc"]
}
"1668247156" = {
providers = ["https://consensys-zkevm-goerli-prealpha.infura.io/v3/${var.infura_key}", "https://rpc.goerli.linea.build", "${var.linea_node}"]
providers = ["https://linea-goerli.infura.io/v3/${var.infura_key}", "https://consensys-zkevm-goerli-prealpha.infura.io/v3/${var.infura_key}", "https://rpc.goerli.linea.build", "${var.linea_node}"]
}
"2053862260" = {
providers = ["https://testnet.era.zksync.dev"]
Expand Down Expand Up @@ -291,7 +291,7 @@ locals {
propagate = "https://betteruptime.com/api/v1/heartbeat/${var.lighthouse_propagate_heartbeat}"
sendOutboundRoot = "https://betteruptime.com/api/v1/heartbeat/${var.lighthouse_send_outbound_root_heartbeat}"
}
hubDomain = "1735353714"
hubDomain = "1735353714"
proverBatchSize = {
"1668247156" = 10
}
Expand Down
4 changes: 4 additions & 0 deletions packages/agents/chain-abstraction/src/helpers/api.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
export const UNISWAP_GATEWAY = "https://gateway.ipfs.io/ipns/tokens.uniswap.org";

// FOR GNOSIS
export const HONEYSWAP_TOKENS = "https://tokens.honeyswap.org/";
66 changes: 66 additions & 0 deletions packages/agents/chain-abstraction/src/helpers/asset.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
import { jsonifyError } from "@connext/nxtp-utils";

import { axiosGet } from "../mockable";
import { UniswapToken, Asset, HoneyswapToken } from "../types";
import { HONEYSWAP_TOKENS, UNISWAP_GATEWAY } from "../helpers/api";

/**
* Returns the `supportedAsset` which will be specific to chain ID
* in form of array to be used on client side.
*/
export const getSupportedAssets = async (chainID: number): Promise<Asset[] | null> => {
try {
if (chainID === 100) {
// get token from honeyswap for gnosis
const res = await axiosGet(HONEYSWAP_TOKENS);
if (!res) {
throw new Error("HONEY_SWAP API failing");
}
const { tokens } = res.data;
const supportedAssetForChain: [Asset] = tokens
.filter((token: HoneyswapToken) => token.chainId === chainID)
.map((token: HoneyswapToken) => ({
name: token.name,
chainId: chainID,
symbol: token.symbol,
logoURI: token.logoURI,
address: token.address,
}));
return supportedAssetForChain.length ? supportedAssetForChain : null;
} else {
const res = await axiosGet(UNISWAP_GATEWAY);
if (!res) {
throw new Error("UNISWAP Gateway failing");
}
const { tokens } = res.data;
const supportedTokens: Asset[] = tokens.map((token: UniswapToken) => {
if (token.chainId === chainID) {
const asset: Asset = {
name: token.name,
symbol: token.symbol,
logoURI: token.logoURI,
address: token.address,
chainId: chainID,
};
return asset;
} else {
if (token.extensions && token.extensions.bridgeInfo[chainID.toString()]) {
const asset: Asset = {
name: token.name,
symbol: token.symbol,
logoURI: token.logoURI,
address: token.extensions.bridgeInfo[chainID.toString()].tokenAddress,
chainId: chainID,
};
return asset;
} else false;
return false;
}
});
const filteredAssets = supportedTokens.filter((item) => typeof item === "object");
return filteredAssets.length ? filteredAssets : null;
}
} catch (error: unknown) {
throw new Error(`Getting supportedAsset from SDK failed, e: ${jsonifyError(error as Error).message}`);
}
};
34 changes: 34 additions & 0 deletions packages/agents/chain-abstraction/src/types/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -62,3 +62,37 @@ export const SwapQuoteParamsSchema = Type.Object({
fee: Type.Optional(TIntegerString),
});
export type SwapQuoteParams = Static<typeof SwapQuoteParamsSchema>;

export const HoneySwapTokenSchema = Type.Object({
name: Type.String(),
address: TAddress,
symbol: Type.String(),
chainId: Type.Number(),
logoURI: Type.String(),
});

export type HoneyswapToken = Static<typeof HoneySwapTokenSchema>;

export type UniswapToken = {
chainId: number;
address: string;
name: string;
symbol: string;
decimals: number;
logoURI: string;
extensions: {
bridgeInfo: {
[key: string]: {
tokenAddress: string;
};
};
};
};

export type Asset = {
name: string;
chainId: number;
symbol: string;
logoURI: string;
address: string;
};
106 changes: 106 additions & 0 deletions packages/agents/chain-abstraction/test/helpers/asset.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
import { expect, mkAddress } from "@connext/nxtp-utils";
import { stub, SinonStub, reset, restore } from "sinon";
import { getSupportedAssets } from "../../src/helpers/asset";
import * as MockableFns from "../../src/mockable";
import { asset } from "../../src/types";

const mockUniswapResponse = [
{
chainId: 1,
address: "0x111111111117dC0aa78b770fA6A738034120C302",
name: "1inch",
symbol: "1INCH",
decimals: 18,
logoURI: "https://assets.coingecko.com/coins/images/13469/thumb/1inch-token.png?1608803028",
extensions: {
bridgeInfo: {
"56": {
tokenAddress: "0x111111111117dC0aa78b770fA6A738034120C302",
},
"137": {
tokenAddress: "0x9c2C5fd7b07E95EE044DDeba0E97a665F142394f",
},
"42161": {
tokenAddress: "0x6314C31A7a1652cE482cffe247E9CB7c3f4BB9aF",
},
},
},
},
{
chainId: 1,
address: "0x7Fc66500c84A76Ad7e9c93437bFc5Ac33E2DDaE9",
name: "Aave",
symbol: "AAVE",
decimals: 18,
logoURI: "https://assets.coingecko.com/coins/images/12645/thumb/AAVE.png?1601374110",
extensions: {
bridgeInfo: {
"10": {
tokenAddress: "0x76FB31fb4af56892A25e32cFC43De717950c9278",
},
"56": {
tokenAddress: "0xfb6115445Bff7b52FeB98650C87f44907E58f802",
},
"137": {
tokenAddress: "0xD6DF932A45C0f255f85145f286eA0b292B21C90B",
},
"42161": {
tokenAddress: "0xba5DdD1f9d7F570dc94a51479a000E3BCE967196",
},
},
},
},
];

const mockHoneySwap = [
{
name: "0xMonero from Ethereum",
address: "0x8c88ea1fd60462ef7004b9e288afcb4680a3c50c",
symbol: "0xMR",
chainId: 100,
decimals: 18,
logoURI: "https://s2.coinmarketcap.com/static/img/coins/64x64/5668.png",
},
];

describe("Helpers:asset", () => {
describe("#getSupportedAssets", () => {
let axiosGetStub: SinonStub;
beforeEach(() => {
axiosGetStub = stub(MockableFns, "axiosGet");
});
afterEach(() => {
restore();
reset();
});
it("Should work with getting supported asset", async () => {
const mockChainID = 1;
axiosGetStub.resolves({ data: { tokens: mockUniswapResponse } });
const supportedAsset = (await getSupportedAssets(mockChainID)) as asset[];
expect(supportedAsset[0].symbol).to.be.eq("1INCH");
});
it("Should work with Polygon asset", async () => {
const mockChainID = 137;
axiosGetStub.resolves({ data: { tokens: mockUniswapResponse } });
const supportedAsset = (await getSupportedAssets(mockChainID)) as asset[];
expect(supportedAsset[0].symbol).to.be.eq("1INCH");
});
it("Should get null with different chain id", async () => {
const mockChainID = 45;
axiosGetStub.resolves({ data: { tokens: mockUniswapResponse } });
const supportedAsset = (await getSupportedAssets(mockChainID)) as asset[];
expect(supportedAsset).to.be.null;
});
it("Should work for gnosis with honeyswap api", async () => {
const mockChainID = 100;
axiosGetStub.resolves({ data: { tokens: mockHoneySwap } });
const supportedAsset = (await getSupportedAssets(mockChainID)) as asset[];
expect(supportedAsset[0].symbol).to.be.eq("0xMR");
});
it("should throw if axioGet fails", async () => {
const mockChainID = 56;
axiosGetStub.throws();
await expect(getSupportedAssets(mockChainID)).to.eventually.be.rejectedWith(Error);
});
});
});
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@ import {

import {
NoDestinationDomainForProof,
NoAggregatedRoot,
NoAggregateRootCount,
NoMessageRootIndex,
NoMessageRootCount,
Expand Down Expand Up @@ -65,6 +64,7 @@ export const proveAndProcess = async () => {
.map(async (originDomain) => {
try {
let latestMessageRoot: RootMessage | undefined = undefined;
const targetAggregateRoot: ReceivedAggregateRoot = curDestAggRoots[0];
for (const destAggregateRoot of curDestAggRoots) {
latestMessageRoot = await database.getLatestMessageRoot(originDomain, destAggregateRoot.root);
if (latestMessageRoot) break;
Expand All @@ -85,17 +85,10 @@ export const proveAndProcess = async () => {
throw new NoMessageRootIndex(originDomain, targetMessageRoot);
}

// Get the currentAggregateRoot from on-chain state (or pending, if the validation period
// has elapsed!) to determine which tree snapshot we should be generating the proof from.
const targetAggregateRoot = await database.getAggregateRoot(targetMessageRoot);
if (!targetAggregateRoot) {
throw new NoAggregatedRoot();
}

// Count of leafs in aggregate tree at targetAggregateRoot.
const aggregateRootCount = await database.getAggregateRootCount(targetAggregateRoot);
const aggregateRootCount = await database.getAggregateRootCount(targetAggregateRoot.root);
if (!aggregateRootCount) {
throw new NoAggregateRootCount(targetAggregateRoot);
throw new NoAggregateRootCount(targetAggregateRoot.root);
}
// TODO: Move to per domain storage adapters in context
const spokeStore = new SpokeDBHelper(originDomain, messageRootCount + 1, database);
Expand Down Expand Up @@ -150,7 +143,7 @@ export const proveAndProcess = async () => {
destinationDomain,
targetMessageRoot,
messageRootIndex,
targetAggregateRoot,
targetAggregateRoot.root,
spokeSMT,
hubSMT,
subContext,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,18 +21,18 @@ export const bindSubscriber = async (queueName: string, channel: Broker.Channel)
if (numberOfChild < maxChildCount) {
logger.debug("Trying to pull data from the queue", requestContext, methodContext);
try {
const messages: Message[] = [];
const messages: Broker.GetMessage[] = [];
for (let i = 0; i < batchSize; i++) {
const message = await channel.get(queueName, { noAck: true });
if (message === false) {
const message = await channel.get(queueName, { noAck: false });
if (!message) {
break;
} else {
messages.push(JSON.parse(message.content.toString()) as Message);
messages.push(message);
}
}

if (messages.length > 0) {
await batchExecute(messages);
await batchExecute(messages, channel);
}
} catch (e: unknown) {
logger.error("Error while binding subscriber", requestContext, methodContext, jsonifyError(e as Error));
Expand All @@ -46,14 +46,18 @@ export const bindSubscriber = async (queueName: string, channel: Broker.Channel)
}, waitPeriod);
};

const batchExecute = async (messages: Message[]) => {
const batchExecute = async (brokerMessages: Broker.GetMessage[], channel: Broker.Channel) => {
const {
logger,
config,
adapters: { cache },
} = getContext();
const { requestContext, methodContext } = createLoggingContext(batchExecute.name, undefined, "");
const termSignals: NodeJS.Signals[] = ["SIGTERM", "SIGINT"];
const messages: Message[] = brokerMessages.map(
(brokerMessage) => JSON.parse(brokerMessage.content.toString()) as Message,
);
// messages: Message[]
/// Mark - Executer
// if message.transferId, then call executer with it's type either Fast or Slow
logger.debug("Spawning executer for transfers", requestContext, methodContext, {
Expand Down Expand Up @@ -100,7 +104,8 @@ const batchExecute = async (messages: Message[]) => {
// ACK on success
// Validate transfer is sent to relayer before ACK
await Promise.all(
messages.map(async (message) => {
brokerMessages.map(async (brokerMessage) => {
const message = JSON.parse(brokerMessage.content.toString()) as Message;
const dataCache = message.type === MessageType.ExecuteFast ? cache.auctions : cache.executors;
const status = await dataCache.getExecStatus(message.transferId);
const task = await dataCache.getMetaTxTask(message.transferId);
Expand All @@ -109,11 +114,13 @@ const batchExecute = async (messages: Message[]) => {
transferId: message.transferId,
status,
});
channel.ack(brokerMessage);
} else {
logger.info("Transfer Rejected", requestContext, methodContext, {
transferId: message.transferId,
status,
});
channel.reject(brokerMessage);
}
if (message.type === MessageType.ExecuteFast) {
await cache.auctions.pruneAuctionData(message.transferId);
Expand All @@ -125,10 +132,12 @@ const batchExecute = async (messages: Message[]) => {
);
} else {
await Promise.all(
messages.map(async (message) => {
brokerMessages.map(async (brokerMessage) => {
const message = JSON.parse(brokerMessage.content.toString()) as Message;
if (message.type === MessageType.ExecuteFast) {
await cache.auctions.setExecStatus(message.transferId, ExecStatus.None);
}
channel.reject(brokerMessage);
logger.info("Error executing transfer. Message dropped", requestContext, methodContext, {
transferId: message.transferId,
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ export const storeFastPathData = async (bid: Bid, _requestContext: RequestContex

// Enqueue only once to dedup, when the first bid for the transfer is stored.
const execStatus = await cache.auctions.getExecStatusWithTime(transferId);
if (execStatus && execStatus.status === ExecStatus.Sent) {
if (execStatus && execStatus.status !== ExecStatus.None) {
const startTime = Number(execStatus.timestamp);
const elapsed = (getNtpTimeSeconds() - startTime) * 1000;
if (elapsed > config.executionWaitTime) {
Expand All @@ -100,7 +100,7 @@ export const storeFastPathData = async (bid: Bid, _requestContext: RequestContex
elapsed,
waitTime: config.executionWaitTime,
});
status = execStatus.status;
status = execStatus.status as ExecStatus;
}
}
if (status === ExecStatus.None) {
Expand Down