From 41bf4904df6cc58c156d2bfe86774b42bedf6adf Mon Sep 17 00:00:00 2001 From: Leszek Wiesner Date: Thu, 7 Nov 2024 09:20:42 +0100 Subject: [PATCH] Fix CRT queries (#348) * Fix CRT queries * GitHub workflows docker compose fix * More docker compose CI fixes --- .github/workflows/checks.yml | 4 +- .github/workflows/tests.yml | 4 +- CHANGELOG.md | 10 ++ Makefile | 8 +- ...313757-Views.js => 1730895049782-Views.js} | 4 +- db/viewDefinitions.js | 148 +++++++----------- docker-compose.yml | 15 +- package.json | 7 +- src/auth-server/tests/run.sh | 6 +- src/mail-scheduler/tests/run-tests.sh | 8 +- .../resolvers/CreatorToken/index.ts | 74 +++------ .../resolvers/CreatorToken/types.ts | 7 + .../resolvers/CreatorToken/utils.ts | 107 +++++++++++++ src/tests/integration/run.sh | 6 +- 14 files changed, 234 insertions(+), 174 deletions(-) rename db/migrations/{1721141313757-Views.js => 1730895049782-Views.js} (94%) create mode 100644 src/server-extension/resolvers/CreatorToken/utils.ts diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index 8fe254df6..2f0a8bdc6 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -51,10 +51,10 @@ jobs: - name: create joystream_default network run: docker network create joystream_default - name: Start db - run: docker-compose up -d orion_db + run: docker compose up -d orion_db - name: Wait for db run: | - while ! docker-compose exec -T orion_db pg_isready -U postgres -p 23798; do + while ! docker compose exec -T orion_db pg_isready -U postgres -p 23798; do sleep 1 done - name: Run migrations diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index f9fbb2250..5eb51ddb6 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -16,10 +16,10 @@ jobs: - name: Prepare workspace run: make prepare - name: Start db - run: docker-compose up -d orion_db + run: docker compose up -d orion_db - name: Wait for db run: | - while ! docker-compose exec -T orion_db pg_isready -U postgres -p 23798; do + while ! docker compose exec -T orion_db pg_isready -U postgres -p 23798; do sleep 1 done - name: Run migrations diff --git a/CHANGELOG.md b/CHANGELOG.md index 65c831704..766756393 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,13 @@ +# 4.1.0 + +## Misc +- `tokensWithPriceChange`, `marketplaceTokens` and `topSellingToken` queries now only take the currently active AMM sale into account when calculating values such as volume / liquidity / price change. +- added `minVolume` argument to `tokensWithPriceChange` query to allow filtering out tokens w/ negligible volume +- updated docker setup to support the latest docker version + +## Bug Fixes: +- `tokensWithPriceChange` and `marketplaceTokens` queries returned incorrect price changes when no transactions were made in the given period. Now at least one transaction in the provided period is required to calculate price change. + # 4.0.6 ## Bug Fixes: diff --git a/Makefile b/Makefile index 79a230505..34382be07 100644 --- a/Makefile +++ b/Makefile @@ -59,19 +59,19 @@ prepare: install typegen codegen build up-squid: @docker network create joystream_default || true - @docker-compose up -d + @docker compose up -d up-archive: @docker network create joystream_default || true - @docker-compose -f archive/docker-compose.yml up -d + @docker compose -f archive/docker-compose.yml up -d up: up-archive up-squid down-squid: - @docker-compose down -v + @docker compose down -v down-archive: - @docker-compose -f archive/docker-compose.yml down -v + @docker compose -f archive/docker-compose.yml down -v down: down-squid down-archive diff --git a/db/migrations/1721141313757-Views.js b/db/migrations/1730895049782-Views.js similarity index 94% rename from db/migrations/1721141313757-Views.js rename to db/migrations/1730895049782-Views.js index dac2d967e..11251134d 100644 --- a/db/migrations/1721141313757-Views.js +++ b/db/migrations/1730895049782-Views.js @@ -1,8 +1,8 @@ const { getViewDefinitions } = require('../viewDefinitions') -module.exports = class Views1721141313757 { - name = 'Views1721141313757' +module.exports = class Views1730895049782 { + name = 'Views1730895049782' async up(db) { // these two queries will be invoked and the cleaned up by the squid itself diff --git a/db/viewDefinitions.js b/db/viewDefinitions.js index ae80eca44..8dfd4e7aa 100644 --- a/db/viewDefinitions.js +++ b/db/viewDefinitions.js @@ -1,3 +1,5 @@ +const { withPriceChange } = require('../lib/server-extension/resolvers/CreatorToken/utils') + const noCategoryVideosSupportedByDefault = process.env.SUPPORT_NO_CATEGORY_VIDEOS === 'true' || process.env.SUPPORT_NO_CATEGORY_VIDEOS === '1' @@ -90,101 +92,57 @@ function getViewDefinitions(db) { // TODO (notifications v2): make this part of the admin schema with appropriate resolver for queries // notification: ['FALSE'], marketplace_token: ` - WITH trading_volumes AS - (SELECT ac.token_id, - SUM(tr.price_paid) as amm_volume - FROM amm_transaction tr - JOIN amm_curve ac ON ac.id = tr.amm_id - GROUP BY token_id), - - base_price_transaction AS ( - WITH oldest_transactions AS ( - SELECT DISTINCT ON (ac.token_id) - tr.amm_id, - ac.token_id, - tr.price_per_unit AS oldest_price_paid, - tr.created_in - FROM amm_transaction tr - JOIN amm_curve ac ON tr.amm_id = ac.id - WHERE tr.created_in < (SELECT height FROM squid_processor.status) - ${ - BLOCKS_PER_DAY * 30 - } - ORDER BY ac.token_id, tr.created_in DESC - ), - fallback_transactions AS ( - SELECT DISTINCT ON (ac.token_id) - tr.amm_id, - ac.token_id, - tr.price_per_unit AS oldest_price_paid, - tr.created_in - FROM amm_transaction tr - JOIN amm_curve ac ON tr.amm_id = ac.id - WHERE tr.created_in > (SELECT height FROM squid_processor.status) - ${ - BLOCKS_PER_DAY * 30 - } - ORDER BY ac.token_id, tr.created_in ASC - ) - SELECT * FROM oldest_transactions - UNION ALL - SELECT * FROM fallback_transactions - WHERE NOT EXISTS (SELECT 1 FROM oldest_transactions) - ) - - SELECT - COALESCE(ac.total_liq, 0) as liquidity, - COALESCE((ct.last_price * ct.total_supply), 0) as market_cap, - c.cumulative_revenue, - c.id as channel_id, - COALESCE(tv.amm_volume, 0) as amm_volume, - CASE - WHEN ldt_o.oldest_price_paid = 0 - OR ldt_o.oldest_price_paid IS NULL THEN 0 - ELSE ((ct.last_price - ldt_o.oldest_price_paid) * 100.0 / ldt_o.oldest_price_paid) - END AS price_change, - CASE - WHEN liq_until.quantity IS NULL THEN 0 - ELSE ((ac.total_liq - liq_until.quantity) * 100 / GREATEST(liq_until.quantity, 1)) - END as liquidity_change, - ct.* - FROM creator_token ct - LEFT JOIN token_channel tc ON tc.token_id = ct.id - LEFT JOIN channel c ON c.id = tc.channel_id - LEFT JOIN base_price_transaction ldt_o ON ldt_o.token_id = ct.id - LEFT JOIN - - (SELECT token_id, - SUM(CASE - WHEN transaction_type = 'BUY' THEN quantity - ELSE quantity * -1 - END) AS total_liq - FROM - (SELECT ac.token_id, - tr.transaction_type, - tr.quantity - FROM amm_transaction tr - JOIN amm_curve ac ON tr.amm_id = ac.id) as tr - GROUP BY token_id) as ac ON ac.token_id = ct.id - - LEFT JOIN - - (SELECT token_id, - SUM(CASE - WHEN transaction_type = 'BUY' THEN quantity - ELSE quantity * -1 - END) AS quantity - FROM - (SELECT ac.token_id, - tr.transaction_type, - tr.quantity - FROM amm_transaction tr - JOIN amm_curve ac ON tr.amm_id = ac.id - WHERE tr.created_in < - (SELECT height - FROM squid_processor.status) - ${BLOCKS_PER_DAY * 30}) as tr - GROUP BY token_id) as liq_until ON liq_until.token_id = ct.id - - LEFT JOIN trading_volumes tv ON tv.token_id = ct.id - `, + WITH + last_block AS ( + SELECT height FROM squid_processor.status + ), + tokens_with_stats AS ( + SELECT + ac.token_id, + SUM(CASE + WHEN ( + transaction_type = 'BUY' + AND tr.created_in < last_block.height - ${BLOCKS_PER_DAY * 30} + ) THEN quantity + WHEN ( + transaction_type = 'SELL' + AND tr.created_in < last_block.height - ${BLOCKS_PER_DAY * 30} + ) THEN quantity * -1 + ELSE 0 + END) AS total_liquidity_30d_ago, + SUM (CASE + WHEN transaction_type = 'BUY' THEN quantity + ELSE quantity * -1 + END) AS total_liquidity, + SUM(tr.price_paid) as amm_volume + FROM amm_transaction tr + JOIN amm_curve ac ON ac.id = tr.amm_id + JOIN creator_token ct ON ct.current_amm_sale_id = ac.id + JOIN last_block ON 1=1 + GROUP BY token_id + ), + ${withPriceChange({ periodDays: 30, currentBlock: 'last_block' })} + SELECT + COALESCE(tws.total_liquidity, 0) as liquidity, + (ct.last_price * ct.total_supply) as market_cap, + c.cumulative_revenue, + c.id as channel_id, + COALESCE(tws.amm_volume, 0) as amm_volume, + COALESCE(twpc.percentage_change, 0) price_change, + CASE + WHEN (tws.total_liquidity_30d_ago IS NULL OR tws.total_liquidity_30d_ago = 0) THEN 0 + ELSE ( + (tws.total_liquidity - tws.total_liquidity_30d_ago) + * 100 + / tws.total_liquidity_30d_ago + ) + END as liquidity_change, + ct.* + FROM creator_token ct + LEFT JOIN token_channel tc ON tc.token_id = ct.id + LEFT JOIN channel c ON c.id = tc.channel_id + LEFT JOIN tokens_with_price_change twpc ON twpc.token_id = ct.id + LEFT JOIN tokens_with_stats tws ON tws.token_id = ct.id`, } } diff --git a/docker-compose.yml b/docker-compose.yml index 64d2d4e2a..ac0dd15eb 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,5 +1,3 @@ -version: '3' - services: orion_db: container_name: orion_db @@ -9,6 +7,8 @@ services: environment: POSTGRES_DB: squid POSTGRES_PASSWORD: squid + networks: + - joystream_default ports: - '127.0.0.1:${DB_PORT}:${DB_PORT}' - '[::1]:${DB_PORT}:${DB_PORT}' @@ -23,6 +23,8 @@ services: hostname: orion_processor image: node:18 restart: unless-stopped + networks: + - joystream_default env_file: - .env - docker.env @@ -43,6 +45,8 @@ services: hostname: orion_graphql-server image: node:18 restart: unless-stopped + networks: + - joystream_default env_file: - .env - docker.env @@ -65,6 +69,8 @@ services: hostname: orion_auth-api image: node:18 restart: unless-stopped + networks: + - joystream_default env_file: - .env - docker.env @@ -86,6 +92,5 @@ volumes: orion_db_data: networks: - default: - external: - name: joystream_default + joystream_default: + external: true diff --git a/package.json b/package.json index b70db4004..7d453b353 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "orion", - "version": "4.0.6", + "version": "4.1.0", "engines": { "node": ">=16" }, @@ -142,5 +142,8 @@ }, "workspaces": [ "network-tests" - ] + ], + "volta": { + "node": "16.20.2" + } } diff --git a/src/auth-server/tests/run.sh b/src/auth-server/tests/run.sh index 8bdb3fbf8..1543fc4bf 100755 --- a/src/auth-server/tests/run.sh +++ b/src/auth-server/tests/run.sh @@ -6,9 +6,9 @@ cd $SCRIPT_PATH/../../.. if ! [[ "$(docker container inspect -f '{{.State.Running}}' orion_db)" = "true" ]]; then docker network create joystream_default || true - docker-compose up -d orion_db - until docker-compose logs orion_db | grep "database system is ready to accept connections"; do - docker-compose logs --tail 10 orion_db + docker compose up -d orion_db + until docker compose logs orion_db | grep "database system is ready to accept connections"; do + docker compose logs --tail 10 orion_db echo "Waiting for the db to be ready..." sleep 1 done diff --git a/src/mail-scheduler/tests/run-tests.sh b/src/mail-scheduler/tests/run-tests.sh index 9172c7601..737e176f7 100755 --- a/src/mail-scheduler/tests/run-tests.sh +++ b/src/mail-scheduler/tests/run-tests.sh @@ -6,9 +6,9 @@ cd $SCRIPT_PATH/../../.. if ! [[ "$(docker container inspect -f '{{.State.Running}}' orion_db)" = "true" ]]; then docker network create joystream_default || true - docker-compose up -d orion_db - docker-compose up -d orion_processor # We need the processor to run the migrations - until docker-compose logs orion_db | grep "database system is ready to accept connections" >/dev/null; do + docker compose up -d orion_db + docker compose up -d orion_processor # We need the processor to run the migrations + until docker compose logs orion_db | grep "database system is ready to accept connections" >/dev/null; do sleep 1 done fi @@ -16,7 +16,7 @@ fi sleep 10 cleanup() { - docker-compose down -v + docker compose down -v } # Run the tests diff --git a/src/server-extension/resolvers/CreatorToken/index.ts b/src/server-extension/resolvers/CreatorToken/index.ts index 43eed2c62..eb2efffb2 100644 --- a/src/server-extension/resolvers/CreatorToken/index.ts +++ b/src/server-extension/resolvers/CreatorToken/index.ts @@ -25,6 +25,7 @@ import { MarketplaceTokensReturnType, TopSellingTokensReturnType, } from './types' +import { withPriceChange } from './utils' export const BLOCKS_PER_DAY = 10 * 60 * 24 // 10 blocs per minute, 60 mins * 24 hours @@ -80,17 +81,19 @@ export class TokenResolver { const topTokensCtes = ` WITH tokens_volumes AS ( - SELECT - ac.token_id, - SUM(tr.price_paid) as ammVolume - FROM - amm_transaction tr - JOIN - amm_curve ac ON ac.id = tr.amm_id - WHERE - tr.created_in >= ${lastProcessedBlock - args.periodDays * BLOCKS_PER_DAY} - GROUP BY - token_id + SELECT + ac.token_id, + SUM(tr.price_paid) as ammVolume + FROM + amm_transaction tr + JOIN + amm_curve ac ON ac.id = tr.amm_id + JOIN + creator_token cr ON cr.current_amm_sale_id = ac.id + WHERE + tr.created_in >= ${lastProcessedBlock - args.periodDays * BLOCKS_PER_DAY} + GROUP BY + token_id ) ` @@ -169,44 +172,11 @@ WITH tokens_volumes AS ( const tokenSubTree = tree.fieldsByTypeName.MarketplaceTokensReturnType.creatorToken const tokenFields = parseAnyTree(model, 'CreatorToken', info.schema, tokenSubTree) - const topTokensCtes = ` -WITH oldest_transactions_before AS - (SELECT DISTINCT ON (ac.token_id) tr.amm_id, - ac.token_id, - tr.price_per_unit as oldest_price_paid, - tr.created_in - FROM amm_transaction tr - JOIN amm_curve ac ON tr.amm_id = ac.id - WHERE tr.created_in < ${lastProcessedBlock - args.periodDays * BLOCKS_PER_DAY} - ORDER BY ac.token_id, - tr.created_in DESC), - - oldest_transactions_after AS - (SELECT DISTINCT ON (ac.token_id) tr.amm_id, - ac.token_id, - tr.price_per_unit as oldest_price_paid, - tr.created_in - FROM amm_transaction tr - JOIN amm_curve ac ON tr.amm_id = ac.id - WHERE tr.created_in > ${lastProcessedBlock - args.periodDays * BLOCKS_PER_DAY} - ORDER BY ac.token_id, - tr.created_in ASC), - - - price_changes AS - (SELECT ct.id, - ot.oldest_price_paid, - ct.symbol, - ct.last_price, -CASE - WHEN ot.oldest_price_paid = 0 AND ota.oldest_price_paid = 0 THEN 0 - WHEN ot.oldest_price_paid = 0 THEN ((ct.last_price - ota.oldest_price_paid) * 100.0 / ota.oldest_price_paid) - ELSE ((ct.last_price - ot.oldest_price_paid) * 100.0 / ot.oldest_price_paid) -END AS percentage_change - FROM creator_token ct - LEFT JOIN oldest_transactions_before as ot ON ot.token_id = ct.id - LEFT JOIN oldest_transactions_after as ota ON ota.token_id = ct.id) -` + const topTokensCtes = `WITH ${withPriceChange({ + currentBlock: lastProcessedBlock, + periodDays: args.periodDays, + minVolume: args.minVolume, + })}` const listQuery = new ListQuery( model, @@ -221,13 +191,13 @@ END AS percentage_change listQuerySql = extendClause( listQuerySql, 'SELECT', - `COALESCE(pc.percentage_change, 0) as pricePercentageChange` + `COALESCE(twpc.percentage_change, 0) as pricePercentageChange` ) listQuerySql = extendClause( listQuerySql, 'FROM', - 'LEFT JOIN price_changes pc ON creator_token.id = pc.id', + 'JOIN tokens_with_price_change twpc ON creator_token.id = twpc.token_id', '' ) @@ -235,7 +205,7 @@ END AS percentage_change listQuerySql = extendClause( listQuerySql, 'ORDER BY', - `COALESCE(pc.percentage_change, 0) ${args.orderByPriceDesc ? 'DESC' : 'ASC'}`, + `COALESCE(twpc.percentage_change, 0) ${args.orderByPriceDesc ? 'DESC' : 'ASC'}`, '' ) } diff --git a/src/server-extension/resolvers/CreatorToken/types.ts b/src/server-extension/resolvers/CreatorToken/types.ts index 990f79c10..fdf867b2d 100644 --- a/src/server-extension/resolvers/CreatorToken/types.ts +++ b/src/server-extension/resolvers/CreatorToken/types.ts @@ -5,6 +5,7 @@ import { MarketplaceTokenWhereInput, TokenWhereInput, } from '../baseTypes' +import { BigInteger } from '@subsquid/graphql-server' @ArgsType() export class GetShareDividensArgs { @@ -74,6 +75,12 @@ export class MarketplaceTokensArgs { }) periodDays: number + @Field(() => BigInteger, { + nullable: true, + description: 'Minimum required transaction volume in the period', + }) + minVolume: string | null + @Field(() => Int, { nullable: true, }) diff --git a/src/server-extension/resolvers/CreatorToken/utils.ts b/src/server-extension/resolvers/CreatorToken/utils.ts new file mode 100644 index 000000000..fc76ae307 --- /dev/null +++ b/src/server-extension/resolvers/CreatorToken/utils.ts @@ -0,0 +1,107 @@ +import { BLOCKS_PER_DAY } from '.' + +type WithPriceChangeParams = { + periodDays: number + currentBlock: number | string + minVolume?: string | null +} +/** + * Provides a list of `WITH` statements for a query that needs to access + * CRT price change in the last {periodDays} days + * + * Definitions: + * current_tx - latest tx made DURING the provided period in the currently active amm sale + * starting_tx - latest tx made BEFORE the start of the provided period in the currently active amm sale + * fallback_tx - oldest tx made in the currently active amm sale + * price_change = 100 * (current_tx ? (current_tx - starting_tx || fallback_tx) / (starting_tx || fallback_tx) : 0) + * + * @param periodDays How many days in the past the period of interest begins + * @param currentBlock Blocknumber to treat as current block. + * If string, it is treated as JOIN table with `height` column. + * @param minVolume If provided - ignore tokens with volume lower than {minVolume} in the period of interest. + * + * @returns List of WITH statements to add to a query + */ +export const withPriceChange = ({ periodDays, currentBlock, minVolume }: WithPriceChangeParams) => { + const currentBlockJoinStmt = + typeof currentBlock === 'string' ? `JOIN ${currentBlock} AS current_block ON 1=1` : '' + if (currentBlockJoinStmt) { + currentBlock = 'current_block.height' + } + return ` +toknes_with_current_tx AS ( + SELECT DISTINCT ON (amm.token_id) + amm.token_id, + tx.amm_id, + tx.price_per_unit AS current_price + FROM amm_transaction tx + JOIN amm_curve amm ON tx.amm_id = amm.id + JOIN creator_token ct ON amm.id = ct.current_amm_sale_id + ${currentBlockJoinStmt} + WHERE + tx.created_in > (${currentBlock} - ${periodDays * BLOCKS_PER_DAY}) + ORDER BY amm.token_id, tx.created_in DESC +), +tokens_with_fallback_tx AS ( + SELECT DISTINCT ON (amm.token_id) + amm.token_id, + tx.id, + tx.price_per_unit AS fallback_tx_price + FROM amm_transaction tx + JOIN amm_curve amm ON tx.amm_id = amm.id + JOIN creator_token ct ON amm.id = ct.current_amm_sale_id + ORDER BY amm.token_id, tx.created_in ASC +), +tokens_with_starting_tx AS ( + SELECT DISTINCT ON (amm.token_id) + amm.token_id, + tx.price_per_unit AS starting_price + FROM amm_transaction tx + JOIN amm_curve amm ON tx.amm_id = amm.id + JOIN creator_token ct ON amm.id = ct.current_amm_sale_id + ${currentBlockJoinStmt} + WHERE + tx.created_in <= (${currentBlock} - ${periodDays * BLOCKS_PER_DAY}) + ORDER BY amm.token_id, tx.created_in DESC +), +${ + minVolume + ? ` +tokens_with_period_volumes AS ( + SELECT + amm.token_id, + SUM(tx.price_paid) as period_volume + FROM amm_transaction tx + JOIN amm_curve amm ON tx.amm_id = amm.id + JOIN creator_token ct ON amm.id = ct.current_amm_sale_id + ${currentBlockJoinStmt} + WHERE tx.created_in >= (${currentBlock} - ${periodDays * BLOCKS_PER_DAY}) + GROUP BY amm.token_id + ORDER BY amm.token_id +),` + : '' +} +tokens_with_price_change AS ( + SELECT + t.id AS token_id, + CASE + WHEN twct.current_price IS NULL THEN 0 + ELSE ( + (twct.current_price - COALESCE(twst.starting_price, twft.fallback_tx_price)) + * 100.0 + / COALESCE(twst.starting_price, twft.fallback_tx_price) + ) + END as percentage_change + FROM creator_token t + LEFT JOIN toknes_with_current_tx twct ON twct.token_id = t.id + LEFT JOIN tokens_with_starting_tx twst ON twst.token_id = t.id + JOIN tokens_with_fallback_tx twft ON twft.token_id = t.id + ${ + minVolume + ? ` + JOIN tokens_with_period_volumes twpv ON twpv.token_id = t.id + WHERE twpv.period_volume >= ${minVolume}` + : '' + } +)` +} diff --git a/src/tests/integration/run.sh b/src/tests/integration/run.sh index e4f949aa9..986b6aa5a 100755 --- a/src/tests/integration/run.sh +++ b/src/tests/integration/run.sh @@ -6,9 +6,9 @@ cd $SCRIPT_PATH/../../.. if ! [[ "$(docker container inspect -f '{{.State.Running}}' orion_db)" = "true" ]]; then docker network create joystream_default || true - docker-compose up -d orion_db & + docker compose up -d orion_db & 2>/dev/null - until docker-compose logs orion_db | grep "database system is ready to accept connections" >/dev/null; do + until docker compose logs orion_db | grep "database system is ready to accept connections" >/dev/null; do echo "Waiting for the db to be ready..." sleep 1 done @@ -16,7 +16,7 @@ if ! [[ "$(docker container inspect -f '{{.State.Running}}' orion_db)" = "true" fi cleanup() { - docker-compose down -v + docker compose down -v } # Run the tests