diff --git a/bin/config.ts b/bin/config.ts index 58804d96..b70c8edc 100644 --- a/bin/config.ts +++ b/bin/config.ts @@ -4,5 +4,6 @@ import { TableCapacityConfig } from './stacks/cron-stack'; export const PROD_TABLE_CAPACITY: TableCapacityConfig = { fadeRate: { billingMode: BillingMode.PROVISIONED, readCapacity: 50, writeCapacity: 5 }, + timestamps: { billingMode: BillingMode.PROVISIONED, readCapacity: 50, writeCapacity: 5 }, synthSwitch: { billingMode: BillingMode.PROVISIONED, readCapacity: 2000, writeCapacity: 5 }, }; diff --git a/bin/stacks/cron-stack.ts b/bin/stacks/cron-stack.ts index e59e5933..d703c3f1 100644 --- a/bin/stacks/cron-stack.ts +++ b/bin/stacks/cron-stack.ts @@ -31,6 +31,7 @@ type TableCapacityOptions = { export type TableCapacityConfig = { fadeRate: TableCapacityOptions; synthSwitch: TableCapacityOptions; + timestamps: TableCapacityOptions; }; export interface CronStackProps extends cdk.NestedStackProps { @@ -177,6 +178,19 @@ export class CronStack extends cdk.NestedStack { ...PROD_TABLE_CAPACITY.synthSwitch, }); this.alarmsPerTable(synthSwitchTable, DYNAMO_TABLE_NAME.SYNTHETIC_SWITCH_TABLE, chatbotTopic); + + const fillerCBTimestampsTable = new aws_dynamo.Table(this, `${SERVICE_NAME}FillerCBTimestampsTable`, { + tableName: DYNAMO_TABLE_NAME.FILLER_CB_TIMESTAMPS, + partitionKey: { + name: 'hash', + type: aws_dynamo.AttributeType.STRING, + }, + deletionProtection: true, + pointInTimeRecovery: true, + contributorInsightsEnabled: true, + ...PROD_TABLE_CAPACITY.timestamps, + }); + this.alarmsPerTable(fillerCBTimestampsTable, DYNAMO_TABLE_NAME.FILLER_CB_TIMESTAMPS, chatbotTopic); } private alarmsPerTable(table: aws_dynamo.Table, name: string, chatbotSNSTopic?: ITopic): void { diff --git a/jest-dynamodb-config.js b/jest-dynamodb-config.js index 532872f4..59d08dde 100644 --- a/jest-dynamodb-config.js +++ b/jest-dynamodb-config.js @@ -10,6 +10,16 @@ module.exports = { ], ProvisionedThroughput: { ReadCapacityUnits: 10, WriteCapacityUnits: 10 }, }, + { + TableName: 'FillerCBTimestamps', + KeySchema: [ + { AttributeName: 'hash', KeyType: 'HASH' }, + ], + AttributeDefinitions: [ + { AttributeName: 'hash', AttributeType: 'S' }, + ], + ProvisionedThroughput: { ReadCapacityUnits: 10, WriteCapacityUnits: 10 }, + } ], port: 8000, }; diff --git a/lib/constants.ts b/lib/constants.ts index 57bcf5eb..4f146d5d 100644 --- a/lib/constants.ts +++ b/lib/constants.ts @@ -12,6 +12,7 @@ export const BETA_COMPLIANCE_S3_KEY = 'beta.json'; export const DYNAMO_TABLE_NAME = { FADES: 'Fades', SYNTHETIC_SWITCH_TABLE: 'SyntheticSwitchTable', + FILLER_CB_TIMESTAMPS: 'FillerCBTimestamps', }; export const DYNAMO_TABLE_KEY = { @@ -23,4 +24,7 @@ export const DYNAMO_TABLE_KEY = { TRADE_TYPE: 'type', LOWER: 'lower', ENABLED: 'enabled', + BLOCK_UNTIL_TIMESTAMP: 'blockUntilTimestamp', + LAST_POST_TIMESTAMP: 'lastPostTimestamp', + FADED: 'faded', }; diff --git a/lib/cron/fade-rate.ts b/lib/cron/fade-rate.ts index 1ceddbd0..5b8f09c8 100644 --- a/lib/cron/fade-rate.ts +++ b/lib/cron/fade-rate.ts @@ -1,22 +1,23 @@ +import { DynamoDBClient } from '@aws-sdk/client-dynamodb'; +import { DynamoDBDocumentClient } from '@aws-sdk/lib-dynamodb'; import { metricScope, MetricsLogger } from 'aws-embedded-metrics'; import { ScheduledHandler } from 'aws-lambda/trigger/cloudwatch-events'; import { EventBridgeEvent } from 'aws-lambda/trigger/eventbridge'; import Logger from 'bunyan'; -import { - BETA_S3_KEY, - FADE_RATE_BUCKET, - FADE_RATE_S3_KEY, - PRODUCTION_S3_KEY, - WEBHOOK_CONFIG_BUCKET, -} from '../constants'; +import { BETA_S3_KEY, PRODUCTION_S3_KEY, WEBHOOK_CONFIG_BUCKET } from '../constants'; import { CircuitBreakerMetricDimension } from '../entities'; import { checkDefined } from '../preconditions/preconditions'; import { S3WebhookConfigurationProvider } from '../providers'; -import { S3CircuitBreakerConfigurationProvider } from '../providers/circuit-breaker/s3'; -import { FadesRepository, FadesRowType, SharedConfigs } from '../repositories'; +import { FadesRepository, FadesRowType, SharedConfigs, TimestampRepoRow } from '../repositories'; +import { TimestampRepository } from '../repositories/timestamp-repository'; import { STAGE } from '../util/stage'; +export type FillerFades = Record; +export type FillerTimestamps = Map>; + +export const BLOCK_PER_FADE_SECS = 60 * 5; // 5 minutes + export const handler: ScheduledHandler = metricScope((metrics) => async (_event: EventBridgeEvent) => { await main(metrics); }); @@ -32,6 +33,16 @@ async function main(metrics: MetricsLogger) { const stage = process.env['stage']; const s3Key = stage === STAGE.BETA ? BETA_S3_KEY : PRODUCTION_S3_KEY; const webhookProvider = new S3WebhookConfigurationProvider(log, `${WEBHOOK_CONFIG_BUCKET}-${stage}-1`, s3Key); + await webhookProvider.fetchEndpoints(); + const documentClient = DynamoDBDocumentClient.from(new DynamoDBClient({}), { + marshallOptions: { + convertEmptyValues: true, + }, + unmarshallOptions: { + wrapNumbers: true, + }, + }); + const timestampDB = TimestampRepository.create(documentClient); const sharedConfig: SharedConfigs = { Database: checkDefined(process.env.REDSHIFT_DATABASE), @@ -40,49 +51,92 @@ async function main(metrics: MetricsLogger) { }; const fadesRepository = FadesRepository.create(sharedConfig); await fadesRepository.createFadesView(); + /* + query redshift for recent orders + | fillerAddress | faded | postTimestamp | + |---- 0x1 ------|---- 0 ----|---- 12222222 ---| + |---- 0x2 ------|---- 1 ----|---- 12345679 --| + |---- 0x1 ------|---- 0 ----|---- 12345678 ---| + */ const result = await fadesRepository.getFades(); if (result) { + const fillerHashes = webhookProvider.fillers(); + const fillerTimestamps = await timestampDB.getFillerTimestampsMap(fillerHashes); const addressToFillerHash = await webhookProvider.addressToFillerHash(); - const fillerFadeRate = calculateFillerFadeRates(result, addressToFillerHash, log); - log.info({ fadeRates: [...fillerFadeRate.entries()] }, 'filler fade rate'); - const configProvider = new S3CircuitBreakerConfigurationProvider( - log, - `${FADE_RATE_BUCKET}-prod-1`, - FADE_RATE_S3_KEY + // aggregated # of fades by filler entity (not address) + // | hash | fades | + // |-- foo --|--- 3 ---|` + // |-- bar --|--- 1 ---| + const fillersNewFades = getFillersNewFades(result, addressToFillerHash, fillerTimestamps, log); + + // | hash |lastPostTimestamp|blockUntilTimestamp| + // |---- foo ----|---- 1300000 ----|---- now + fades * block_per_fade ----| + // |---- bar ----|---- 1300000 ----|---- 13500000 ----| + const updatedTimestamps = calculateNewTimestamps( + fillerTimestamps, + fillersNewFades, + Math.floor(Date.now() / 1000), + log ); - await configProvider.putConfigurations(fillerFadeRate, metrics); + log.info({ updatedTimestamps }, 'filler for which to update timestamp'); + await timestampDB.updateTimestampsBatch(updatedTimestamps); } } -// aggregates potentially multiple filler addresses into filler name -// and calculates fade rate for each -export function calculateFillerFadeRates( +/* compute blockUntil timestamp for each filler + blockedUntilTimestamp > current timestamp: skip + lastPostTimestamp < blockedUntilTimestamp < current timestamp: block for # * unit block time from now +*/ +export function calculateNewTimestamps( + fillerTimestamps: FillerTimestamps, + fillersNewFades: FillerFades, + newPostTimestamp: number, + log?: Logger +): [string, number, number][] { + const updatedTimestamps: [string, number, number][] = []; + Object.entries(fillersNewFades).forEach((row) => { + const hash = row[0]; + const fades = row[1]; + if (fillerTimestamps.has(hash) && fillerTimestamps.get(hash)!.blockUntilTimestamp > newPostTimestamp) { + return; + } + if (fades) { + const blockUntilTimestamp = newPostTimestamp + fades * BLOCK_PER_FADE_SECS; + updatedTimestamps.push([hash, newPostTimestamp, blockUntilTimestamp]); + } + }); + log?.info({ updatedTimestamps }, 'updated timestamps'); + return updatedTimestamps; +} + +/* find the number of new fades, for each filler entity, from + the last time this cron is run +*/ +export function getFillersNewFades( rows: FadesRowType[], addressToFillerHash: Map, + fillerTimestamps: FillerTimestamps, log?: Logger -): Map { - const fadeRateMap = new Map(); - const fillerToQuotesMap = new Map(); +): FillerFades { + const newFadesMap: FillerFades = {}; // filler hash -> # of new fades rows.forEach((row) => { const fillerAddr = row.fillerAddress.toLowerCase(); const fillerHash = addressToFillerHash.get(fillerAddr); if (!fillerHash) { - log?.info({ addressToFillerHash, fillerAddress: fillerAddr }, 'filler address not found in webhook config'); - } else { - if (!fillerToQuotesMap.has(fillerHash)) { - fillerToQuotesMap.set(fillerHash, [row.fadedQuotes, row.totalQuotes]); + log?.info({ fillerAddr }, 'filler address not found in webhook config'); + } else if ( + (fillerTimestamps.has(fillerHash) && row.postTimestamp > fillerTimestamps.get(fillerHash)!.lastPostTimestamp) || + !fillerTimestamps.has(fillerHash) + ) { + if (!newFadesMap[fillerHash]) { + newFadesMap[fillerHash] = row.faded; } else { - const [fadedQuotes, totalQuotes] = fillerToQuotesMap.get(fillerHash) as [number, number]; - fillerToQuotesMap.set(fillerHash, [fadedQuotes + row.fadedQuotes, totalQuotes + row.totalQuotes]); + newFadesMap[fillerHash] += row.faded; } } }); - - fillerToQuotesMap.forEach((value, key) => { - const fadeRate = value[0] / value[1]; - fadeRateMap.set(key, fadeRate); - }); - return fadeRateMap; + log?.info({ newFadesMap }, '# of new fades by filler'); + return newFadesMap; } diff --git a/lib/providers/circuit-breaker/s3.ts b/lib/providers/circuit-breaker/s3.ts index 00d56385..0fb0179e 100644 --- a/lib/providers/circuit-breaker/s3.ts +++ b/lib/providers/circuit-breaker/s3.ts @@ -3,18 +3,16 @@ import { NodeHttpHandler } from '@smithy/node-http-handler'; import { MetricsLogger, Unit } from 'aws-embedded-metrics'; import Logger from 'bunyan'; +import { CircuitBreakerConfiguration, CircuitBreakerConfigurationProvider } from '.'; import { Metric } from '../../entities'; import { checkDefined } from '../../preconditions/preconditions'; -import { CircuitBreakerConfiguration, CircuitBreakerConfigurationProvider } from '.'; export class S3CircuitBreakerConfigurationProvider implements CircuitBreakerConfigurationProvider { private log: Logger; private fillers: CircuitBreakerConfiguration[]; private lastUpdatedTimestamp: number; private client: S3Client; - - // try to refetch endpoints every 5 mins - private static UPDATE_PERIOD_MS = 5 * 60000; + private static UPDATE_PERIOD_MS = 1 * 60000; private static FILL_RATE_THRESHOLD = 0.75; constructor(_log: Logger, private bucket: string, private key: string) { diff --git a/lib/quoters/WebhookQuoter.ts b/lib/quoters/WebhookQuoter.ts index b7c77180..f8f9bb15 100644 --- a/lib/quoters/WebhookQuoter.ts +++ b/lib/quoters/WebhookQuoter.ts @@ -4,6 +4,7 @@ import axios, { AxiosError, AxiosResponse } from 'axios'; import Logger from 'bunyan'; import { v4 as uuidv4 } from 'uuid'; +import { Quoter, QuoterType } from '.'; import { AnalyticsEvent, AnalyticsEventType, @@ -18,7 +19,6 @@ import { FirehoseLogger } from '../providers/analytics'; import { CircuitBreakerConfigurationProvider } from '../providers/circuit-breaker'; import { FillerComplianceConfigurationProvider } from '../providers/compliance'; import { timestampInMstoISOString } from '../util/time'; -import { Quoter, QuoterType } from '.'; // TODO: shorten, maybe take from env config const WEBHOOK_TIMEOUT_MS = 500; diff --git a/lib/repositories/base.ts b/lib/repositories/base.ts index 277246ab..e63714c4 100644 --- a/lib/repositories/base.ts +++ b/lib/repositories/base.ts @@ -28,6 +28,17 @@ export enum TimestampThreshold { TWO_MONTHS = "'2 MONTHS'", } +export type TimestampRepoRow = { + hash: string; + lastPostTimestamp: number; + blockUntilTimestamp: number; +}; + +export type DynamoTimestampRepoRow = Exclude & { + lastPostTimestamp: string; + blockUntilTimestamp: string; +}; + export abstract class BaseRedshiftRepository { constructor(readonly client: RedshiftDataClient, private readonly configs: SharedConfigs) {} @@ -65,3 +76,10 @@ export interface BaseSwitchRepository { putSynthSwitch(trade: SynthSwitchTrade, lower: string, enabled: boolean): Promise; syntheticQuoteForTradeEnabled(trade: SynthSwitchQueryParams): Promise; } + +export interface BaseTimestampRepository { + updateTimestampsBatch(toUpdate: [string, number, number?][]): Promise; + getFillerTimestamps(hash: string): Promise; + getFillerTimestampsMap(hashes: string[]): Promise>>; + getTimestampsBatch(hashes: string[]): Promise; +} diff --git a/lib/repositories/fades-repository.ts b/lib/repositories/fades-repository.ts index 84987b64..b0ee00d4 100644 --- a/lib/repositories/fades-repository.ts +++ b/lib/repositories/fades-repository.ts @@ -5,8 +5,8 @@ import { BaseRedshiftRepository, SharedConfigs } from './base'; export type FadesRowType = { fillerAddress: string; - totalQuotes: number; - fadedQuotes: number; + faded: number; + postTimestamp: number; }; export class FadesRepository extends BaseRedshiftRepository { @@ -29,13 +29,15 @@ export class FadesRepository extends BaseRedshiftRepository { await this.executeStatement(CREATE_VIEW_SQL, FadesRepository.log, { waitTimeMs: 2_000 }); } + //get latest 20 orders for each filler address, and whether they are faded or not async getFades(): Promise { const stmtId = await this.executeStatement(FADE_RATE_SQL, FadesRepository.log, { waitTimeMs: 2_000 }); const response = await this.client.send(new GetStatementResultCommand({ Id: stmtId })); /* result should be in the following format - | rfqFiller | fade_rate | - |---- foo ------|---- 0.05 ------| - |---- bar ------|---- 0.01 ------| + | rfqFiller | faded | postTimestamp | + |---- bar ------|---- 0 ----|---- 12222222 ----| + |---- foo ------|---- 1 ----|---- 12345679 ----| + |---- foo ------|---- 0 ----|---- 12345678 ----| */ const result = response.Records; if (!result) { @@ -45,8 +47,8 @@ export class FadesRepository extends BaseRedshiftRepository { const formattedResult = result.map((row) => { const formattedRow: FadesRowType = { fillerAddress: row[0].stringValue as string, - totalQuotes: Number(row[1].longValue as number), - fadedQuotes: Number(row[2].longValue as number), + faded: Number(row[1].longValue as number), + postTimestamp: Number(row[2].longValue as number), }; return formattedRow; }); @@ -56,14 +58,15 @@ export class FadesRepository extends BaseRedshiftRepository { } const CREATE_VIEW_SQL = ` -CREATE OR REPLACE VIEW rfqOrdersTimestamp +CREATE OR REPLACE VIEW latestRfqs AS ( WITH latestOrders AS ( SELECT * FROM ( SELECT *, ROW_NUMBER() OVER (PARTITION BY filler ORDER BY createdat DESC) AS row_num FROM postedorders ) - WHERE row_num <= 30 + WHERE row_num <= 20 AND deadline < EXTRACT(EPOCH FROM GETDATE()) -- exclude orders that can still be filled + LIMIT 1000 ) SELECT latestOrders.chainid as chainId, latestOrders.filler as rfqFiller, latestOrders.startTime as decayStartTime, latestOrders.quoteid, archivedorders.filler as actualFiller, latestOrders.createdat as postTimestamp, archivedorders.txhash as txHash, archivedOrders.fillTimestamp as fillTimestamp, @@ -80,23 +83,17 @@ AND rfqFiller != '0x0000000000000000000000000000000000000000' AND chainId NOT IN (5,8001,420,421613) -- exclude mainnet goerli, polygon goerli, optimism goerli and arbitrum goerli testnets AND postTimestamp >= extract(epoch from (GETDATE() - INTERVAL '168 HOURS')) -- 7 days rolling window -); +) +ORDER BY rfqFiller, postTimestamp DESC +LIMIT 1000 `; const FADE_RATE_SQL = ` -WITH ORDERS_CTE AS ( - SELECT - rfqFiller, - COUNT(*) AS totalQuotes, - SUM(CASE WHEN (decayStartTime < fillTimestamp) THEN 1 ELSE 0 END) AS fadedQuotes - FROM rfqOrdersTimestamp - GROUP BY rfqFiller -) SELECT rfqFiller, - totalQuotes, - fadedQuotes, - fadedQuotes / totalQuotes as fadeRate -FROM ORDERS_CTE -WHERE totalQuotes >= 10; + postTimestamp, + CASE WHEN (decayStartTime < fillTimestamp) THEN 1 ELSE 0 END AS faded +FROM latestRfqs +ORDER BY rfqFiller, postTimestamp DESC +LIMIT 1000 `; diff --git a/lib/repositories/switch-repository.ts b/lib/repositories/switch-repository.ts index 2e9de20f..4b1ecd5e 100644 --- a/lib/repositories/switch-repository.ts +++ b/lib/repositories/switch-repository.ts @@ -61,7 +61,6 @@ export class SwitchRepository implements BaseSwitchRepository { { execute: true, consistent: true } ); - SwitchRepository.log.info({ res: result.Item }, 'get result'); if (result.Item && BigNumber.from(result.Item.lower).lte(amount)) { return result.Item.enabled; } else { @@ -71,7 +70,6 @@ export class SwitchRepository implements BaseSwitchRepository { } public async putSynthSwitch(trade: SynthSwitchTrade, lower: string, enabled: boolean): Promise { - SwitchRepository.log.info({ pk: `${SwitchRepository.getKey(trade)}` }, 'put pk'); await this.switchEntity.put( { [PARTITION_KEY]: `${SwitchRepository.getKey(trade)}`, diff --git a/lib/repositories/timestamp-repository.ts b/lib/repositories/timestamp-repository.ts new file mode 100644 index 00000000..cc6dceb8 --- /dev/null +++ b/lib/repositories/timestamp-repository.ts @@ -0,0 +1,111 @@ +import { DynamoDBDocumentClient } from '@aws-sdk/lib-dynamodb'; +import Logger from 'bunyan'; +import { Entity, Table } from 'dynamodb-toolbox'; + +import { DYNAMO_TABLE_KEY, DYNAMO_TABLE_NAME } from '../constants'; +import { BaseTimestampRepository, DynamoTimestampRepoRow, TimestampRepoRow } from './base'; + +export type BatchGetResponse = { + tableName: string; +}; + +export class TimestampRepository implements BaseTimestampRepository { + static log: Logger; + static PARTITION_KEY = 'hash'; + + static create(documentClient: DynamoDBDocumentClient): BaseTimestampRepository { + this.log = Logger.createLogger({ + name: 'DynamoTimestampRepository', + serializers: Logger.stdSerializers, + }); + delete this.log.fields.pid; + delete this.log.fields.hostname; + + const table = new Table({ + name: DYNAMO_TABLE_NAME.FILLER_CB_TIMESTAMPS, + partitionKey: TimestampRepository.PARTITION_KEY, + DocumentClient: documentClient, + }); + + const entity = new Entity({ + name: 'FillerTimestampEntity', + attributes: { + [TimestampRepository.PARTITION_KEY]: { partitionKey: true, type: 'string' }, + [`${DYNAMO_TABLE_KEY.LAST_POST_TIMESTAMP}`]: { type: 'string' }, + [`${DYNAMO_TABLE_KEY.BLOCK_UNTIL_TIMESTAMP}`]: { type: 'string' }, + }, + table: table, + autoExecute: true, + } as const); + + return new TimestampRepository(table, entity); + } + + private constructor( + // eslint-disable-next-line + private readonly table: Table<'Timestamp', 'hash', null>, + private readonly entity: Entity + ) {} + + public async updateTimestampsBatch(updatedTimestamps: [string, number, number?][]): Promise { + await this.table.batchWrite( + updatedTimestamps.map(([hash, lastTs, postTs]) => { + return this.entity.putBatch({ + [TimestampRepository.PARTITION_KEY]: hash, + [`${DYNAMO_TABLE_KEY.LAST_POST_TIMESTAMP}`]: lastTs, + [`${DYNAMO_TABLE_KEY.BLOCK_UNTIL_TIMESTAMP}`]: postTs, + }); + }), + { + execute: true, + } + ); + } + + public async getFillerTimestamps(hash: string): Promise { + const { Item } = await this.entity.get( + { hash: hash }, + { + execute: true, + } + ); + return { + hash: Item?.hash, + lastPostTimestamp: parseInt(Item?.lastPostTimestamp), + blockUntilTimestamp: parseInt(Item?.blockUntilTimestamp), + }; + } + + public async getTimestampsBatch(hashes: string[]): Promise { + const { Responses: items } = await this.table.batchGet( + hashes.map((hash) => { + return this.entity.getBatch({ + [TimestampRepository.PARTITION_KEY]: hash, + }); + }), + { + execute: true, + parse: true, + } + ); + return items[DYNAMO_TABLE_NAME.FILLER_CB_TIMESTAMPS].map((row: DynamoTimestampRepoRow) => { + return { + hash: row.hash, + lastPostTimestamp: parseInt(row.lastPostTimestamp), + blockUntilTimestamp: parseInt(row.blockUntilTimestamp), + }; + }); + } + + public async getFillerTimestampsMap(hashes: string[]): Promise>> { + const rows = await this.getTimestampsBatch(hashes); + const res = new Map>(); + rows.forEach((row) => { + res.set(row.hash, { + lastPostTimestamp: row.lastPostTimestamp, + blockUntilTimestamp: row.blockUntilTimestamp, + }); + }); + return res; + } +} diff --git a/test/crons/fade-rate.test.ts b/test/crons/fade-rate.test.ts index 730fd135..6198a973 100644 --- a/test/crons/fade-rate.test.ts +++ b/test/crons/fade-rate.test.ts @@ -1,18 +1,51 @@ import Logger from 'bunyan'; -import { calculateFillerFadeRates } from '../../lib/cron/fade-rate'; +import { + BLOCK_PER_FADE_SECS, + calculateNewTimestamps, + FillerFades, + FillerTimestamps, + getFillersNewFades, +} from '../../lib/cron/fade-rate'; import { FadesRowType } from '../../lib/repositories'; +const now = Math.floor(Date.now() / 1000); + const FADES_ROWS: FadesRowType[] = [ - { fillerAddress: '0x1', totalQuotes: 50, fadedQuotes: 10 }, - { fillerAddress: '0x2', totalQuotes: 50, fadedQuotes: 20 }, - { fillerAddress: '0x3', totalQuotes: 100, fadedQuotes: 5 }, + // filler1 + { fillerAddress: '0x1', faded: 1, postTimestamp: now - 100 }, + { fillerAddress: '0x1', faded: 0, postTimestamp: now - 90 }, + { fillerAddress: '0x1', faded: 1, postTimestamp: now - 80 }, + { fillerAddress: '0x2', faded: 1, postTimestamp: now - 80 }, + // filler2 + { fillerAddress: '0x3', faded: 1, postTimestamp: now - 70 }, + { fillerAddress: '0x3', faded: 1, postTimestamp: now - 100 }, + // filler3 + { fillerAddress: '0x4', faded: 1, postTimestamp: now - 100 }, + // filler4 + { fillerAddress: '0x5', faded: 0, postTimestamp: now - 100 }, + // filler5 + { fillerAddress: '0x6', faded: 0, postTimestamp: now - 100 }, + // filler6 + { fillerAddress: '0x7', faded: 1, postTimestamp: now - 100 }, ]; const ADDRESS_TO_FILLER = new Map([ ['0x1', 'filler1'], ['0x2', 'filler1'], ['0x3', 'filler2'], + ['0x4', 'filler3'], + ['0x5', 'filler4'], + ['0x6', 'filler5'], + ['0x7', 'filler6'], +]); + +const FILLER_TIMESTAMPS: FillerTimestamps = new Map([ + ['filler1', { lastPostTimestamp: now - 150, blockUntilTimestamp: NaN }], + ['filler2', { lastPostTimestamp: now - 75, blockUntilTimestamp: now - 50 }], + ['filler3', { lastPostTimestamp: now - 101, blockUntilTimestamp: now + 1000 }], + ['filler4', { lastPostTimestamp: now - 150, blockUntilTimestamp: NaN }], + ['filler5', { lastPostTimestamp: now - 150, blockUntilTimestamp: now + 100 }], ]); // silent logger in tests @@ -20,14 +53,51 @@ const logger = Logger.createLogger({ name: 'test' }); logger.level(Logger.FATAL); describe('FadeRateCron test', () => { - describe('calculateFillerFadeRates', () => { + let newFades: FillerFades; + beforeAll(() => { + newFades = getFillersNewFades(FADES_ROWS, ADDRESS_TO_FILLER, FILLER_TIMESTAMPS, logger); + }); + + describe('getFillersNewFades', () => { it('takes into account multiple filler addresses of the same filler', () => { - expect(calculateFillerFadeRates(FADES_ROWS, ADDRESS_TO_FILLER, logger)).toEqual( - new Map([ - ['filler1', 0.3], - ['filler2', 0.05], + expect(newFades).toEqual({ + filler1: 3, // count all fades in FADES_ROWS + filler2: 1, // only count postTimestamp == now - 70 + filler3: 1, + filler4: 0, + filler5: 0, + filler6: 1, + }); + }); + }); + + describe('calculateNewTimestamps', () => { + let newTimestamps: [string, number, number][]; + + beforeAll(() => { + newTimestamps = calculateNewTimestamps(FILLER_TIMESTAMPS, newFades, now, logger); + }); + + it('calculates blockUntilTimestamp for each filler', () => { + expect(newTimestamps).toEqual( + expect.arrayContaining([ + ['filler1', now, now + BLOCK_PER_FADE_SECS * 3], + ['filler2', now, now + BLOCK_PER_FADE_SECS * 1], ]) ); }); + + it('notices new fillers not already in fillerTimestamps', () => { + expect(newTimestamps).toEqual(expect.arrayContaining([['filler6', now, now + BLOCK_PER_FADE_SECS * 1]])); + }); + + it('keep old blockUntilTimestamp if no new fades', () => { + expect(newTimestamps).not.toContain([['filler5', expect.anything(), expect.anything()]]); + expect(newTimestamps).not.toContain([['filler4', expect.anything(), expect.anything()]]); + }); + + it('ignores fillers with blockUntilTimestamp > current timestamp', () => { + expect(newTimestamps).not.toContain([['filler3', expect.anything(), expect.anything()]]); + }); }); }); diff --git a/test/repositories/shared.ts b/test/repositories/shared.ts new file mode 100644 index 00000000..3d736119 --- /dev/null +++ b/test/repositories/shared.ts @@ -0,0 +1,10 @@ +import { DynamoDBClientConfig } from '@aws-sdk/client-dynamodb'; + +export const DYNAMO_CONFIG: DynamoDBClientConfig = { + endpoint: 'http://localhost:8000', + region: 'local', + credentials: { + accessKeyId: 'fakeMyKeyId', + secretAccessKey: 'fakeSecretAccessKey', + }, +}; diff --git a/test/repositories/switch-repository.test.ts b/test/repositories/switch-repository.test.ts index b0882a6f..b53034aa 100644 --- a/test/repositories/switch-repository.test.ts +++ b/test/repositories/switch-repository.test.ts @@ -1,19 +1,11 @@ /* eslint-disable @typescript-eslint/ban-ts-comment */ -import { DynamoDBClient, DynamoDBClientConfig } from '@aws-sdk/client-dynamodb'; +import { DynamoDBClient } from '@aws-sdk/client-dynamodb'; import { DynamoDBDocumentClient } from '@aws-sdk/lib-dynamodb'; import { SynthSwitchQueryParams } from '../../lib/handlers/synth-switch'; import { SwitchRepository } from '../../lib/repositories/switch-repository'; - -const dynamoConfig: DynamoDBClientConfig = { - endpoint: 'http://localhost:8000', - region: 'local', - credentials: { - accessKeyId: 'fakeMyKeyId', - secretAccessKey: 'fakeSecretAccessKey', - }, -}; +import { DYNAMO_CONFIG } from './shared'; const SWITCH: SynthSwitchQueryParams = { tokenIn: 'USDC', @@ -33,7 +25,7 @@ const NONEXISTENT_SWITCH: SynthSwitchQueryParams = { type: 'EXACT_OUTPUT', }; -const documentClient = DynamoDBDocumentClient.from(new DynamoDBClient(dynamoConfig), { +const documentClient = DynamoDBDocumentClient.from(new DynamoDBClient(DYNAMO_CONFIG), { marshallOptions: { convertEmptyValues: true, }, @@ -62,7 +54,7 @@ describe('put switch tests', () => { const enabled = await switchRepository.syntheticQuoteForTradeEnabled(SWITCH); expect(enabled).toBe(false); - }) + }); it('should return false for non-existent switch', async () => { await expect(switchRepository.syntheticQuoteForTradeEnabled(NONEXISTENT_SWITCH)).resolves.toBe(false); @@ -72,12 +64,12 @@ describe('put switch tests', () => { describe('static helper function tests', () => { it('correctly serializes key from trade', () => { expect(SwitchRepository.getKey(SWITCH)).toBe('usdc#1#uni#1#EXACT_INPUT'); - }) + }); it('should throw error for invalid key on parse', () => { expect(() => { // missing type SwitchRepository.parseKey('token0#1#token1#1'); }).toThrowError('Invalid key: token0#1#token1#1'); - }) -}) + }); +}); diff --git a/test/repositories/timestamp-repository.test.ts b/test/repositories/timestamp-repository.test.ts new file mode 100644 index 00000000..ef4fe06a --- /dev/null +++ b/test/repositories/timestamp-repository.test.ts @@ -0,0 +1,65 @@ +import { DynamoDBClient } from '@aws-sdk/client-dynamodb'; +import { DynamoDBDocumentClient } from '@aws-sdk/lib-dynamodb'; + +import { TimestampRepository } from '../../lib/repositories/timestamp-repository'; +import { DYNAMO_CONFIG } from './shared'; + +const documentClient = DynamoDBDocumentClient.from(new DynamoDBClient(DYNAMO_CONFIG), { + marshallOptions: { + convertEmptyValues: true, + }, + unmarshallOptions: { + wrapNumbers: true, + }, +}); + +const repo = TimestampRepository.create(documentClient); + +describe('Dynamo TimestampRepo tests', () => { + it('should batch put timestamps', async () => { + const toUpdate: [string, number, number?][] = [ + ['0x1', 1], + ['0x2', 2, 5], + ['0x3', 3, 6], + ]; + await expect(repo.updateTimestampsBatch(toUpdate)).resolves.not.toThrow(); + let row = await repo.getFillerTimestamps('0x1'); + expect(row).toBeDefined(); + expect(row?.lastPostTimestamp).toBe(1); + expect(row?.blockUntilTimestamp).toBe(NaN); + + row = await repo.getFillerTimestamps('0x2'); + expect(row).toBeDefined(); + expect(row?.lastPostTimestamp).toBe(2); + expect(row?.blockUntilTimestamp).toBe(5); + + row = await repo.getFillerTimestamps('0x3'); + expect(row).toBeDefined(); + expect(row?.lastPostTimestamp).toBe(3); + expect(row?.blockUntilTimestamp).toBe(6); + }); + + it('should batch get timestamps', async () => { + const res = await repo.getTimestampsBatch(['0x1', '0x2', '0x3']); + expect(res.length).toBe(3); + expect(res).toEqual( + expect.arrayContaining([ + { + hash: '0x1', + lastPostTimestamp: 1, + blockUntilTimestamp: NaN, + }, + { + hash: '0x2', + lastPostTimestamp: 2, + blockUntilTimestamp: 5, + }, + { + hash: '0x3', + lastPostTimestamp: 3, + blockUntilTimestamp: 6, + }, + ]) + ); + }); +});