From 3b288f0593c606eaa1d0d2dc5e7d2a97ebaaf90f Mon Sep 17 00:00:00 2001 From: Gosuto Inzasheru Date: Wed, 11 Dec 2024 15:45:51 +0700 Subject: [PATCH 1/3] chore: bal addresses needs updated bal tools --- tools/python/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/python/requirements.txt b/tools/python/requirements.txt index eb23cae94..51ede4b8e 100644 --- a/tools/python/requirements.txt +++ b/tools/python/requirements.txt @@ -5,7 +5,7 @@ pandas tabulate requests web3 -git+https://github.com/BalancerMaxis/bal_addresses@main +git+https://github.com/BalancerMaxis/bal_addresses@dev/use-bal_tools-dev-branch dune-client pytest dataclasses-json From 9bb84530b2a1a9249da5c1583aa2b77365fc8e3e Mon Sep 17 00:00:00 2001 From: Gosuto Inzasheru Date: Wed, 11 Dec 2024 15:46:23 +0700 Subject: [PATCH 2/3] feat: query v3 subgraph for user balances --- tools/python/gen_morpho_airdrop.py | 43 ++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 tools/python/gen_morpho_airdrop.py diff --git a/tools/python/gen_morpho_airdrop.py b/tools/python/gen_morpho_airdrop.py new file mode 100644 index 000000000..a58d212e0 --- /dev/null +++ b/tools/python/gen_morpho_airdrop.py @@ -0,0 +1,43 @@ +from bal_tools import Subgraph + + +SUBGRAPH = Subgraph() + + +def get_user_shares(pool, block): + query = """query PoolShares($where: PoolShare_filter, $block: Block_height) { + poolShares(where: $where, block: $block) { + user { + id + } + balance + } + }""" + params = { + "where": { + "balance_gt": 0.001, + "pool": pool, + }, + "block": {"number": block}, + } + raw = SUBGRAPH.fetch_graphql_data( + "subgraphs-v3", + query, + params, + url="https://api.studio.thegraph.com/query/75376/balancer-v3/version/latest", + ) + return dict([(x["user"]["id"], x["balance"]) for x in raw["poolShares"]]) + + +def build_airdrop(): + # https://docs.merkl.xyz/merkl-mechanisms/types-of-campaign/airdrop + pass + + +if __name__ == "__main__": + # https://etherscan.io/token/0x89bb794097234e5e930446c0cec0ea66b35d7570#balances + print( + get_user_shares( + pool="0x89bb794097234e5e930446c0cec0ea66b35d7570", block=21378029 + ) + ) From fb6e70a099804fe99e301f820eec78e4ae02a87d Mon Sep 17 00:00:00 2001 From: Gosuto Inzasheru Date: Tue, 17 Dec 2024 09:47:04 +0100 Subject: [PATCH 3/3] feat: consolidate multiple snapshots into an airdrop file --- tools/python/gen_morpho_airdrop.py | 70 +++++++++++++++++++++++++++--- 1 file changed, 63 insertions(+), 7 deletions(-) diff --git a/tools/python/gen_morpho_airdrop.py b/tools/python/gen_morpho_airdrop.py index a58d212e0..53e3e8f83 100644 --- a/tools/python/gen_morpho_airdrop.py +++ b/tools/python/gen_morpho_airdrop.py @@ -1,7 +1,13 @@ +import json + +import numpy as np +import pandas as pd + from bal_tools import Subgraph SUBGRAPH = Subgraph() +MORPHO = "0x58D97B57BB95320F9a05dC918Aef65434969c2B2" def get_user_shares(pool, block): @@ -29,15 +35,65 @@ def get_user_shares(pool, block): return dict([(x["user"]["id"], x["balance"]) for x in raw["poolShares"]]) -def build_airdrop(): +def get_block_from_timestamp(ts): + query = """query GetBlockFromTimestamp($where: Block_filter) { + blocks(orderBy: "number", orderDirection: "desc", where: $where) { + number + timestamp + } + }""" + params = {"where": {"timestamp_lte": ts}} + raw = SUBGRAPH.fetch_graphql_data( + "blocks", + query, + params, + url="https://api.studio.thegraph.com/query/48427/ethereum-blocks/version/latest", + ) + return int(raw["blocks"][0]["number"]) + + +def build_snapshot_df( + pool, # pool address + end, # timestamp of the last snapshot + n=7, # amount of snapshots + step_size=60 * 60 * 24, # amount of seconds between snapshots +): + shares = {} + for _ in range(n): + block = get_block_from_timestamp(end) + shares[block] = get_user_shares(pool=pool, block=block) + end -= step_size + return pd.DataFrame(shares, dtype=float).fillna(0) + + +def consolidate_shares(df): + consolidated = pd.DataFrame() + for block in df.columns: + # calculate the percentage of the pool each user owns + consolidated[block] = df[block] / df[block].sum() + # weigh it by the total pool size of that block + consolidated[block] *= df.sum()[block] + # sum the weighted percentages per user + consolidated["total"] = consolidated.sum(axis=1) + # divide the weighted percentages by the sum of all weights + consolidated["total"] = consolidated["total"] / df.sum().sum() + return consolidated + + +def build_airdrop(reward_token, reward_total_wei, df): # https://docs.merkl.xyz/merkl-mechanisms/types-of-campaign/airdrop - pass + df["wei"] = df["total"] * reward_total_wei + df["wei"] = df["wei"].apply(np.floor).astype(int).astype(str) + return {"rewardToken": reward_token, "rewards": df[["wei"]].to_dict(orient="index")} if __name__ == "__main__": - # https://etherscan.io/token/0x89bb794097234e5e930446c0cec0ea66b35d7570#balances - print( - get_user_shares( - pool="0x89bb794097234e5e930446c0cec0ea66b35d7570", block=21378029 - ) + # get bpt balances for a pool at different timestamps + df = build_snapshot_df( + pool="0x89bb794097234e5e930446c0cec0ea66b35d7570", end=1734393600 ) + # consolidate user pool shares + df = consolidate_shares(df) + # build airdrop object and dump to json file + airdrop = build_airdrop(reward_token=MORPHO, reward_total_wei=1e18, df=df) + json.dump(airdrop, open("airdrop.json", "w"), indent=2)