From af9ea714e7860b4591747f188bcfe1c8d366498a Mon Sep 17 00:00:00 2001 From: Chris Jacobs Date: Thu, 12 Oct 2023 13:06:51 -0700 Subject: [PATCH 01/13] feat: data requirements thoughts --- docs/REQUIREMENTS.md | 76 +++++ docs/deprecated/README.md | 26 ++ docs/deprecated/dune/OUSD Apy.md | 90 ++++++ .../dune/OUSD Curve LP Gauge Deposit.md | 76 +++++ docs/deprecated/dune/OUSD Total Supply.md | 71 ++++ ...e & median (only accounts over 10 OUSD).md | 173 ++++++++++ docs/deprecated/dune/OUSD average & median.md | 173 ++++++++++ docs/deprecated/dune/OUSD buybacks.md | 99 ++++++ docs/deprecated/dune/OUSD holder breakdown.md | 126 ++++++++ ...eries - breakdown by balance - detailed.md | 163 ++++++++++ ...rs - time series - breakdown by balance.md | 152 +++++++++ ...- breakdown by wallet number - detailed.md | 158 +++++++++ ...ime series - breakdown by wallet number.md | 157 +++++++++ .../deprecated/dune/Total supply breakdown.md | 153 +++++++++ graphql.config.yml | 16 +- package.json | 1 + schema-base.graphql | 17 + schema-oeth.graphql | 162 ++++++++++ schema-ogv.graphql | 35 ++ schema-ousd.graphql | 188 +++++++++++ schema.graphql | 303 +++++++++++++++--- .../{_historyType.ts => _oethHistoryType.ts} | 2 +- ...basingOption.ts => _oethRebasingOption.ts} | 2 +- src/model/generated/_ousdHistoryType.ts | 6 + src/model/generated/_ousdRebasingOption.ts | 4 + .../generated/convexLusdPlus3Crv.model.ts | 29 ++ src/model/generated/exchangeRate.model.ts | 5 + src/model/generated/index.ts | 47 ++- src/model/generated/makerDsrStrategy.model.ts | 23 ++ src/model/generated/oeth.model.ts | 3 - ...{address.model.ts => oethAddress.model.ts} | 26 +- ... => oethBalancerMetaPoolStrategy.model.ts} | 4 +- ...{curveLp.model.ts => oethCurveLp.model.ts} | 4 +- ...king.model.ts => oethFraxStaking.model.ts} | 4 +- ...{history.model.ts => oethHistory.model.ts} | 14 +- ...oAave.model.ts => oethMorphoAave.model.ts} | 4 +- .../{rebase.model.ts => oethRebase.model.ts} | 10 +- ...ion.model.ts => oethRebaseOption.model.ts} | 14 +- .../{vault.model.ts => oethVault.model.ts} | 4 +- .../{apy.model.ts => oethapy.model.ts} | 4 +- src/model/generated/ogv.model.ts | 26 ++ src/model/generated/ogvGovernance.model.ts | 28 ++ src/model/generated/ousd.model.ts | 32 ++ src/model/generated/ousdAaveStrategy.model.ts | 29 ++ src/model/generated/ousdAddress.model.ts | 38 +++ .../generated/ousdCompoundStrategy.model.ts | 29 ++ .../generated/ousdConvexStrategy.model.ts | 29 ++ src/model/generated/ousdFluxStrategy.model.ts | 29 ++ src/model/generated/ousdHistory.model.ts | 41 +++ src/model/generated/ousdMetaStrategy.model.ts | 29 ++ src/model/generated/ousdMorphoAave.model.ts | 29 ++ .../generated/ousdMorphoCompound.model.ts | 29 ++ src/model/generated/ousdRebase.model.ts | 47 +++ src/model/generated/ousdRebaseOption.model.ts | 35 ++ src/model/generated/ousdVault.model.ts | 32 ++ src/model/generated/ousdapy.model.ts | 45 +++ src/model/generated/stakedOgv.model.ts | 26 ++ 57 files changed, 3059 insertions(+), 118 deletions(-) create mode 100644 docs/REQUIREMENTS.md create mode 100644 docs/deprecated/README.md create mode 100644 docs/deprecated/dune/OUSD Apy.md create mode 100644 docs/deprecated/dune/OUSD Curve LP Gauge Deposit.md create mode 100644 docs/deprecated/dune/OUSD Total Supply.md create mode 100644 docs/deprecated/dune/OUSD average & median (only accounts over 10 OUSD).md create mode 100644 docs/deprecated/dune/OUSD average & median.md create mode 100644 docs/deprecated/dune/OUSD buybacks.md create mode 100644 docs/deprecated/dune/OUSD holder breakdown.md create mode 100644 docs/deprecated/dune/Token holders - time series - breakdown by balance - detailed.md create mode 100644 docs/deprecated/dune/Token holders - time series - breakdown by balance.md create mode 100644 docs/deprecated/dune/Token holders - time series - breakdown by wallet number - detailed.md create mode 100644 docs/deprecated/dune/Token holders - time series - breakdown by wallet number.md create mode 100644 docs/deprecated/dune/Total supply breakdown.md create mode 100644 schema-base.graphql create mode 100644 schema-oeth.graphql create mode 100644 schema-ogv.graphql create mode 100644 schema-ousd.graphql rename src/model/generated/{_historyType.ts => _oethHistoryType.ts} (74%) rename src/model/generated/{_rebasingOption.ts => _oethRebasingOption.ts} (58%) create mode 100644 src/model/generated/_ousdHistoryType.ts create mode 100644 src/model/generated/_ousdRebasingOption.ts create mode 100644 src/model/generated/convexLusdPlus3Crv.model.ts create mode 100644 src/model/generated/makerDsrStrategy.model.ts rename src/model/generated/{address.model.ts => oethAddress.model.ts} (60%) rename src/model/generated/{balancerMetaPoolStrategy.model.ts => oethBalancerMetaPoolStrategy.model.ts} (86%) rename src/model/generated/{curveLp.model.ts => oethCurveLp.model.ts} (93%) rename src/model/generated/{fraxStaking.model.ts => oethFraxStaking.model.ts} (88%) rename src/model/generated/{history.model.ts => oethHistory.model.ts} (75%) rename src/model/generated/{morphoAave.model.ts => oethMorphoAave.model.ts} (86%) rename src/model/generated/{rebase.model.ts => oethRebase.model.ts} (86%) rename src/model/generated/{rebaseOption.model.ts => oethRebaseOption.model.ts} (67%) rename src/model/generated/{vault.model.ts => oethVault.model.ts} (92%) rename src/model/generated/{apy.model.ts => oethapy.model.ts} (94%) create mode 100644 src/model/generated/ogv.model.ts create mode 100644 src/model/generated/ogvGovernance.model.ts create mode 100644 src/model/generated/ousd.model.ts create mode 100644 src/model/generated/ousdAaveStrategy.model.ts create mode 100644 src/model/generated/ousdAddress.model.ts create mode 100644 src/model/generated/ousdCompoundStrategy.model.ts create mode 100644 src/model/generated/ousdConvexStrategy.model.ts create mode 100644 src/model/generated/ousdFluxStrategy.model.ts create mode 100644 src/model/generated/ousdHistory.model.ts create mode 100644 src/model/generated/ousdMetaStrategy.model.ts create mode 100644 src/model/generated/ousdMorphoAave.model.ts create mode 100644 src/model/generated/ousdMorphoCompound.model.ts create mode 100644 src/model/generated/ousdRebase.model.ts create mode 100644 src/model/generated/ousdRebaseOption.model.ts create mode 100644 src/model/generated/ousdVault.model.ts create mode 100644 src/model/generated/ousdapy.model.ts create mode 100644 src/model/generated/stakedOgv.model.ts diff --git a/docs/REQUIREMENTS.md b/docs/REQUIREMENTS.md new file mode 100644 index 00000000..cffa25b4 --- /dev/null +++ b/docs/REQUIREMENTS.md @@ -0,0 +1,76 @@ +# Requirements + +Catalog of data requirements. + +## oeth.com + +### ousd.com/ogv-dashboard + +[schema-ogv.graphql](..%2Fschema-ogv.graphql) + +- OGV Price (5m?) +- OGV Market Cap (5m?) +- OGV Circulating Supply +- OGV Total Supply +- OGV Staking APY +- OGV Amount Staked & Percentage +- OGV Registered Voters +- OGV Open-source contributors +- OGV Improvement proposals + +## oeth.com/analytics + +[schema-oeth.graphql](..%2Fschema-oeth.graphql) + +- APY (as 7/14/30 day MA) +- Total Supply + - OETH + - Protocol Owned + - Circulating Supply +- Daily Protocol Revenue (as 7/14/30 day MA) + - ETH Value + - 7-day trailing average (ETH) + - Yield Distributed (ETH) + - Fees Collected (ETH) +- Current Collateral + - Frax ETH (frxETH) + - ETH + - Rocket Pool ETH (rETH) + - Lido Staked ETH (stETH) + - Wrapped Ether (WETH) +- ETH_OETH Rate +- Exchange Rates +- Collateral Distribution + - Convex OETH + - FraxETH + - Origin Vault + - Aura OETH + - Morpho AAVE +- Revenue Sums + - 24H + - 7D + - All-time +- Balance Sheet + - Vault (WETH, stETH, rETH, frxETH) + - Convex (ETH, OETH) + - Frax Staking (sfrxETH) + - Morpho Aave (WETH) + - Aura (rETH, WETH) + - Dripper (WETH) + - Total Supply (OETH) +- Strategies + - Convex Finance + - Convex ETH+OETH + - Frax + - Staked Frax Ether (sfrxETH) + - Rocket Pool + - Rocket Pool Ether (rETH) + - Lido + - Lido Staked Ether (stETH) + - Unallocated + - Showing WETH and frxETH as of writing this. +- Dripper + - Funds held by dripper + - Available for collection + - Drip rate (1d, 1h, 1m) + diff --git a/docs/deprecated/README.md b/docs/deprecated/README.md new file mode 100644 index 00000000..3375f89b --- /dev/null +++ b/docs/deprecated/README.md @@ -0,0 +1,26 @@ +# Deprecated Queries (?) + +Starting cataloguing these before I realized we don't link to them anymore. + +## Dune Queries + +Sources: [dune_analytics.html](https://github.com/OriginProtocol/ousd-analytics/blob/7173bd2a1e179a3826851c8fdaeb866a48f5f5b2/eagleproject/core/templates/dune_analytics.html) + +### General Stats + +- [OUSD Apy.md](dune%2FOUSD%20Apy.md) +- [Total supply breakdown.md](dune%2FTotal%20supply%20breakdown.md) +- [OUSD Total Supply.md](dune%2FOUSD%20Total%20Supply.md) +- [OUSD buybacks.md](dune%2FOUSD%20buybacks.md) + +### OUSD Holders + +- [OUSD average & median.md](dune%2FOUSD%20average%20%26%20median.md) +- [OUSD average & median (only accounts over 10 OUSD).md](dune%2FOUSD%20average%20%26%20median%20%28only%20accounts%20over%2010%20OUSD%29.md) +- [OUSD Curve LP Gauge Deposit.md](dune%2FOUSD%20Curve%20LP%20Gauge%20Deposit.md) +- [OUSD holder breakdown.md](dune%2FOUSD%20holder%20breakdown.md) +- [Token holders - time series - breakdown by balance.md](dune%2FToken%20holders%20-%20time%20series%20-%20breakdown%20by%20balance.md) +- [Token holders - time series - breakdown by balance - detailed.md](dune%2FToken%20holders%20-%20time%20series%20-%20breakdown%20by%20balance%20-%20detailed.md) +- [Token holders - time series - breakdown by wallet number.md](dune%2FToken%20holders%20-%20time%20series%20-%20breakdown%20by%20wallet%20number.md) +- [Token holders - time series - breakdown by wallet number - detailed.md](dune%2FToken%20holders%20-%20time%20series%20-%20breakdown%20by%20wallet%20number%20-%20detailed.md) + diff --git a/docs/deprecated/dune/OUSD Apy.md b/docs/deprecated/dune/OUSD Apy.md new file mode 100644 index 00000000..aab42b2e --- /dev/null +++ b/docs/deprecated/dune/OUSD Apy.md @@ -0,0 +1,90 @@ +# OUSD Apy + +https://dune.xyz/embeds/271634/519050/34dc346b-e90f-4c4b-86ab-1107fe8d6a4b +https://dune.com/queries/271634/519050 + +```sql +DROP FUNCTION IF EXISTS dune_user_generated.data2bignumber(BYTEA, INT, INT); +CREATE OR REPLACE FUNCTION dune_user_generated.data2bignumber(data BYTEA, topic INT, decimals INT) RETURNS FLOAT AS $$ +BEGIN +RETURN bytea2numeric(decode(SUBSTRING(ENCODE("data",'hex'),(1+(64*"topic")),64),'hex'))/POWER(10, "decimals"); +END; $$ +LANGUAGE PLPGSQL; + +-- BLOCKS_PER_DAY = 6400 +WITH rebase_logs AS ( + -- this is kind of hackish, but for some reason "DISTINCT ON" expression is not supported + SELECT block_number, + MAX(block_time) as block_time, + MAX(total_supply) as total_supply, + MAX(rebasing_credits) as rebasing_credits, + MIN(rebasing_credits_per_token) as rebasing_credits_per_token + FROM( + SELECT + dune_user_generated.data2bignumber(data, 0, 18) as total_supply, + dune_user_generated.data2bignumber(data, 1, 18) as rebasing_credits, + dune_user_generated.data2bignumber(data, 2, 18) as rebasing_credits_per_token, + * + FROM ethereum.logs + WHERE contract_address='\x2a8e1e676ec238d8a992307b495b45b3feaa5e86' -- OUSD + AND topic1='\x99e56f783b536ffacf422d59183ea321dd80dcd6d23daa13023e8afea38c3df1' -- old rebase logs + AND block_number > 11596940 -- OUSD V2 block number + + UNION ALL + + SELECT + dune_user_generated.data2bignumber(data, 0, 18) as total_supply, + dune_user_generated.data2bignumber(data, 1, 27) as rebasing_credits, + dune_user_generated.data2bignumber(data, 2, 27) as rebasing_credits_per_token, + * + FROM ethereum.logs + WHERE contract_address='\x2a8e1e676ec238d8a992307b495b45b3feaa5e86' -- OUSD + AND topic1='\x41645eb819d3011b13f97696a8109d14bfcddfaca7d063ec0564d62a3e257235' -- new rebase logs + AND block_number > 11596940 -- OUSD V2 block number + ) DATA + WHERE block_time > now() - interval '100 days' + GROUP BY 1 + ORDER BY block_number DESC +) + +SELECT +block_date, +apy, +AVG(apy) OVER (ORDER BY block_date ASC ROWS BETWEEN 29 PRECEDING AND CURRENT ROW) as _30_day_apy, +AVG(apy) OVER (ORDER BY block_date ASC ROWS BETWEEN 13 PRECEDING AND CURRENT ROW) as _14_day_apy, +AVG(apy) OVER (ORDER BY block_date ASC ROWS BETWEEN 6 PRECEDING AND CURRENT ROW) as _7_day_apy +FROM ( + SELECT + block_date, + -- Convert APR TO APY + ((1 + apr / 100 / 365.25) ^ (365.25 - 1)) * 100 - 100 as apy + FROM ( + SELECT + -- Calculate APR + (rebasing_credits_per_token_ratio - 1) * 100 * 6400 * 365.25 / block_number_diff as apr, + * + FROM + ( + -- This part deviates from the way analytics.ousd.com gets its daily APR. What we do here is pick the last TotalSuppluUpdatedHighres + -- event of each day and calculate block number difference and rebasing_credits_per_token_ratio between 2 neighbouring events. + -- + -- A better way would be to do it as we do it in analytics.ousd.com where end of each day block number is used and the rebasingCreditsPerToken + -- at that block time + SELECT + block_number - lag(block_number) over (order by block_number) as block_number_diff, + lag(rebasing_credits_per_token) over (order by block_number) / rebasing_credits_per_token as rebasing_credits_per_token_ratio, + * + FROM rebase_logs as t + JOIN ( + SELECT + date_trunc('day', block_time) as block_date, + MAX(block_number) as max_block_number + FROM rebase_logs + GROUP BY 1 + ORDER BY 1 DESC + ) t1 ON t.block_number = t1.max_block_number + ORDER BY block_number DESC + ) AS at + ) as at2 +) DATA; +``` \ No newline at end of file diff --git a/docs/deprecated/dune/OUSD Curve LP Gauge Deposit.md b/docs/deprecated/dune/OUSD Curve LP Gauge Deposit.md new file mode 100644 index 00000000..8869d4b7 --- /dev/null +++ b/docs/deprecated/dune/OUSD Curve LP Gauge Deposit.md @@ -0,0 +1,76 @@ +# OUSD Curve LP Gauge Deposit + +https://dune.xyz/embeds/278734/528075/c2b8b10f-9a32-495b-a5e1-bccbae2a860a +https://dune.com/queries/285485/539422 + +```sql +WITH transfers AS ( + SELECT evt_block_number as block_number, + address, + amount + FROM + ( + SELECT evt_block_number, + "to" AS address, + value AS amount + FROM ousd."OusdImplementation_evt_Transfer" + WHERE evt_block_number > 11596940 -- OUSD V2 block number + + UNION ALL + + SELECT evt_block_number, + "from" AS address, + -value AS amount + FROM ousd."OusdImplementation_evt_Transfer" + WHERE evt_block_number > 11596940 -- OUSD V2 block number + ) t +), + +curve_lp_transfers AS ( + SELECT evt_block_time, + "to" AS address, + ROUND(value / 1e18) as amount + FROM erc20."ERC20_evt_Transfer" tr + WHERE contract_address='\x25f0cE4E2F8dbA112D9b115710AC297F816087CD' + AND "to" != '\x0000000000000000000000000000000000000000' + + UNION ALL + + SELECT evt_block_time, + "from" AS address, + ROUND(-value / 1e18) as amount + FROM erc20."ERC20_evt_Transfer" tr + WHERE contract_address='\x25f0cE4E2F8dbA112D9b115710AC297F816087CD' + AND "from" != '\x0000000000000000000000000000000000000000' +), + +day_hops_3 AS ( + SELECT date_trunc('day', day) as day + FROM( + SELECT generate_series('2021-11-15'::date, 'now'::timestamp - '1 week'::interval, '3 day') AS day + UNION ALL + SELECT generate_series('now'::timestamp - '6 days'::interval, date_trunc('day', NOW()), '1 day') AS day + ) DAYS + ORDER BY day desc +), + +token_balances as ( + SELECT d.day, + t.address as wallet_address, + SUM(amount) as balance + FROM curve_lp_transfers t + INNER JOIN day_hops_3 d + ON t.evt_block_time < d.day + GROUP BY 1, 2 + ORDER BY 1 desc +) + + +select day, +ROUND(sum(balance) filter (WHERE wallet_address = '\x989aeb4d175e16225e39e87d0d97a3360524ad80')) AS "Convex", +ROUND(sum(balance) filter (WHERE wallet_address != '\x989aeb4d175e16225e39e87d0d97a3360524ad80')) AS "Other", +ROUND(sum(balance)) as amount, +count(distinct wallet_address) as holders +FROM token_balances +GROUP BY 1; +``` \ No newline at end of file diff --git a/docs/deprecated/dune/OUSD Total Supply.md b/docs/deprecated/dune/OUSD Total Supply.md new file mode 100644 index 00000000..19416138 --- /dev/null +++ b/docs/deprecated/dune/OUSD Total Supply.md @@ -0,0 +1,71 @@ +# OUSD Total Supply + +https://dune.com/embeds/278146/524806/476159a6-b7bb-4cf7-96e2-e74f99170b4a +https://dune.com/queries/278146/524806 + +```sql +DROP FUNCTION IF EXISTS dune_user_generated.data2bignumber(BYTEA, INT, INT); +CREATE OR REPLACE FUNCTION dune_user_generated.data2bignumber(data BYTEA, topic INT, decimals INT) RETURNS FLOAT AS $$ +BEGIN +RETURN bytea2numeric(decode(SUBSTRING(ENCODE("data",'hex'),(1+(64*"topic")),64),'hex'))/POWER(10, "decimals"); +END; $$ +LANGUAGE PLPGSQL; + +WITH rebase_logs AS ( + -- this is kind of hackish, but for some reason "DISTINCT ON" expression is not supported + SELECT block_number, + MAX(block_time) as block_time, + MAX(total_supply) as total_supply, + MAX(rebasing_credits) as rebasing_credits, + MIN(rebasing_credits_per_token) as rebasing_credits_per_token + FROM( + SELECT + dune_user_generated.data2bignumber(data, 0, 18) as total_supply, + dune_user_generated.data2bignumber(data, 1, 18) as rebasing_credits, + dune_user_generated.data2bignumber(data, 2, 18) as rebasing_credits_per_token, + * + FROM ethereum.logs + WHERE contract_address='\x2a8e1e676ec238d8a992307b495b45b3feaa5e86' -- OUSD + AND topic1='\x99e56f783b536ffacf422d59183ea321dd80dcd6d23daa13023e8afea38c3df1' -- old rebase logs + AND block_number > 11596940 -- OUSD V2 block number + + UNION ALL + + SELECT + dune_user_generated.data2bignumber(data, 0, 18) as total_supply, + dune_user_generated.data2bignumber(data, 1, 27) as rebasing_credits, + dune_user_generated.data2bignumber(data, 2, 27) as rebasing_credits_per_token, + * + FROM ethereum.logs + WHERE contract_address='\x2a8e1e676ec238d8a992307b495b45b3feaa5e86' -- OUSD + AND topic1='\x41645eb819d3011b13f97696a8109d14bfcddfaca7d063ec0564d62a3e257235' -- new rebase logs + AND block_number > 11596940 -- OUSD V2 block number + ) DATA + WHERE block_time > now() - interval '100 days' + GROUP BY 1 + ORDER BY block_number DESC +) + +SELECT +block_date, +total_supply, +ROUND(AVG(total_supply) OVER (ORDER BY block_date ASC ROWS BETWEEN 29 PRECEDING AND CURRENT ROW)) as _30_day_total_supply, +ROUND(AVG(total_supply) OVER (ORDER BY block_date ASC ROWS BETWEEN 13 PRECEDING AND CURRENT ROW)) as _14_day_total_supply, +ROUND(AVG(total_supply) OVER (ORDER BY block_date ASC ROWS BETWEEN 6 PRECEDING AND CURRENT ROW)) as _7_day_total_supply +FROM ( + SELECT + block_date, + total_supply + FROM rebase_logs as t + JOIN ( + SELECT + date_trunc('day', block_time) as block_date, + MAX(block_number) as max_block_number + FROM rebase_logs + GROUP BY 1 + ORDER BY 1 DESC + ) t1 ON t.block_number = t1.max_block_number + ORDER BY block_number DESC +) DATA +ORDER BY block_date DESC +``` \ No newline at end of file diff --git a/docs/deprecated/dune/OUSD average & median (only accounts over 10 OUSD).md b/docs/deprecated/dune/OUSD average & median (only accounts over 10 OUSD).md new file mode 100644 index 00000000..502ddf23 --- /dev/null +++ b/docs/deprecated/dune/OUSD average & median (only accounts over 10 OUSD).md @@ -0,0 +1,173 @@ +# OUSD average & median (only accounts over 10 OUSD) + +https://dune.xyz/embeds/283166/534275/1d88ccea-eec7-4e8f-9a86-3bc47b605053 +https://dune.com/queries/283166/534275 + +```sql +DROP FUNCTION IF EXISTS dune_user_generated.data2bignumber(BYTEA, INT, INT); +CREATE OR REPLACE FUNCTION dune_user_generated.data2bignumber(data BYTEA, topic INT, decimals INT) RETURNS FLOAT AS $$ +BEGIN +RETURN bytea2numeric(decode(SUBSTRING(ENCODE("data",'hex'),(1+(64*"topic")),64),'hex'))/POWER(10, "decimals"); +END; $$ +LANGUAGE PLPGSQL; + +WITH transfers AS ( + SELECT evt_block_number as block_number, + evt_block_time as block_time, + address, + amount + FROM + ( + SELECT evt_block_number, + "to" AS address, + evt_block_time, + value AS amount + FROM ousd."OusdImplementation_evt_Transfer" + WHERE evt_block_number > 11596940 -- OUSD V2 block number + + UNION ALL + + SELECT evt_block_number, + "from" AS address, + evt_block_time, + -value AS amount + FROM ousd."OusdImplementation_evt_Transfer" + WHERE evt_block_number > 11596940 -- OUSD V2 block number + ) t +), + +rebase_logs AS ( + -- this is kind of hackish, but for some reason "DISTINCT ON" expression is not supported + SELECT block_number, + MAX(block_time) as block_time, + MAX(total_supply) as total_supply, + MAX(rebasing_credits) as rebasing_credits, + MIN(rebasing_credits_per_token) as rebasing_credits_per_token + FROM( + SELECT + dune_user_generated.data2bignumber(data, 0, 18) as total_supply, + dune_user_generated.data2bignumber(data, 1, 18) as rebasing_credits, + dune_user_generated.data2bignumber(data, 2, 18) as rebasing_credits_per_token, + * + FROM ethereum.logs + WHERE contract_address='\x2a8e1e676ec238d8a992307b495b45b3feaa5e86' -- OUSD + AND topic1='\x99e56f783b536ffacf422d59183ea321dd80dcd6d23daa13023e8afea38c3df1' -- old rebase logs + AND block_number > 11596940 -- OUSD V2 block number + + UNION ALL + + SELECT + dune_user_generated.data2bignumber(data, 0, 18) as total_supply, + dune_user_generated.data2bignumber(data, 1, 27) as rebasing_credits, + dune_user_generated.data2bignumber(data, 2, 27) as rebasing_credits_per_token, + * + FROM ethereum.logs + WHERE contract_address='\x2a8e1e676ec238d8a992307b495b45b3feaa5e86' -- OUSD + AND topic1='\x41645eb819d3011b13f97696a8109d14bfcddfaca7d063ec0564d62a3e257235' -- new rebase logs + AND block_number > 11596940 -- OUSD V2 block number + ) DATA + GROUP BY 1 + ORDER BY block_number DESC +), + +nearest_rebase_log_to_transaction_log AS ( + SELECT t.block_number AS transaction_block_number, + MAX(r.block_number) AS rebase_block_number + FROM transfers AS t + LEFT JOIN rebase_logs AS r + ON t.block_number >= r.block_number + GROUP BY t.block_number +), + +transfer_with_credits_per_token as ( + SELECT t.*, r.rebasing_credits_per_token, + t.amount * r.rebasing_credits_per_token as credits, + r.block_number as rebase_block_number + FROM transfers t INNER JOIN nearest_rebase_log_to_transaction_log nrb + ON t.block_number = nrb.transaction_block_number + INNER JOIN rebase_logs r + ON r.block_number = nrb.rebase_block_number +), +-- TODO INSPECT THIS +-- last rebasing credits per token: 0.8369875568128926 +-- TODO some token holders have balances way off like this account: \x96feb7b6f808dd2bbd09c9e5ccde77cabd58d019 (has a lot of transactions) +-- SELECT SUM(amount / 1e18), SUM(credits / 1e18) FROM transfer_with_credits_per_token +-- WHERE address = '\x96feb7b6f808dd2bbd09c9e5ccde77cabd58d019' +-- GROUP BY address + +-- 2 months in 3 day hops, except last 7 days that have no hops +day_hops_3 AS ( + SELECT date_trunc('day', day) as day + FROM( + SELECT generate_series('now'::timestamp - '2 month'::interval, 'now'::timestamp - '1 week'::interval, '3 day') AS day + UNION ALL + SELECT generate_series('now'::timestamp - '6 days'::interval, date_trunc('day', NOW()), '1 day') AS day + ) DAYS + ORDER BY day desc +), + +latest_rebases as ( + SELECT + max_d.day as day, + r.* + FROM ( + SELECT d.day, MAX(r.block_time) as max_block_time + FROM day_hops_3 d + LEFT JOIN rebase_logs r + ON r.block_time < d.day + GROUP BY 1 + ) max_d INNER JOIN rebase_logs r + ON max_d.max_block_time = r.block_time +), + +token_balances as ( + SELECT + b.day, + wallet_address, + credits_sum / rebasing_credits_per_token as balance + FROM latest_rebases r + INNER JOIN ( + SELECT d.day, t.address as wallet_address, SUM(t.credits / 1e18) as credits_sum + FROM transfer_with_credits_per_token t + INNER JOIN day_hops_3 d + ON t.block_time < d.day + WHERE t.address != '\x87650d7bbfc3a9f10587d7778206671719d9910d' -- Curve Metapool + AND t.address != '\x129360c964e2e13910d603043f6287e5e9383374' -- Uniswap + GROUP BY 1, 2 + HAVING SUM(t.credits / 1e18) > 0 + ) b + ON r.day = b.day + ORDER BY 1 desc +), + +token_balances_over_10 AS ( + SELECT * + FROM token_balances + WHERE balance > 10 +) + +SELECT + s.day, + s.average, + s.median +-- s1.average_over_100, +-- s1.median_over_100 +FROM +( + select day, + ROUND(avg(balance) filter (WHERE balance > 0)) as average, + ROUND(PERCENTILE_CONT(0.5) WITHIN GROUP(ORDER BY balance)) as median + FROM token_balances_over_10 + GROUP BY 1 +) s +-- INNER JOIN +-- ( +-- select day, +-- ROUND(avg(balance) filter (WHERE balance > 0)) as average_over_100, +-- ROUND(PERCENTILE_CONT(0.5) WITHIN GROUP(ORDER BY balance)) as median_over_100 +-- FROM token_balances_over_100 +-- GROUP BY 1 +-- ) s1 +-- ON s.day = s1.day +ORDER BY day DESC +``` \ No newline at end of file diff --git a/docs/deprecated/dune/OUSD average & median.md b/docs/deprecated/dune/OUSD average & median.md new file mode 100644 index 00000000..0ef050ac --- /dev/null +++ b/docs/deprecated/dune/OUSD average & median.md @@ -0,0 +1,173 @@ +# OUSD average & median + +https://dune.xyz/embeds/282755/533459/735067b4-9f41-4f4b-9ef1-6b80a1fd9f12 +https://dune.com/queries/282755/533459 + +```sql +DROP FUNCTION IF EXISTS dune_user_generated.data2bignumber(BYTEA, INT, INT); +CREATE OR REPLACE FUNCTION dune_user_generated.data2bignumber(data BYTEA, topic INT, decimals INT) RETURNS FLOAT AS $$ +BEGIN +RETURN bytea2numeric(decode(SUBSTRING(ENCODE("data",'hex'),(1+(64*"topic")),64),'hex'))/POWER(10, "decimals"); +END; $$ +LANGUAGE PLPGSQL; + +WITH transfers AS ( + SELECT evt_block_number as block_number, + evt_block_time as block_time, + address, + amount + FROM + ( + SELECT evt_block_number, + "to" AS address, + evt_block_time, + value AS amount + FROM ousd."OusdImplementation_evt_Transfer" + WHERE evt_block_number > 11596940 -- OUSD V2 block number + + UNION ALL + + SELECT evt_block_number, + "from" AS address, + evt_block_time, + -value AS amount + FROM ousd."OusdImplementation_evt_Transfer" + WHERE evt_block_number > 11596940 -- OUSD V2 block number + ) t +), + +rebase_logs AS ( + -- this is kind of hackish, but for some reason "DISTINCT ON" expression is not supported + SELECT block_number, + MAX(block_time) as block_time, + MAX(total_supply) as total_supply, + MAX(rebasing_credits) as rebasing_credits, + MIN(rebasing_credits_per_token) as rebasing_credits_per_token + FROM( + SELECT + dune_user_generated.data2bignumber(data, 0, 18) as total_supply, + dune_user_generated.data2bignumber(data, 1, 18) as rebasing_credits, + dune_user_generated.data2bignumber(data, 2, 18) as rebasing_credits_per_token, + * + FROM ethereum.logs + WHERE contract_address='\x2a8e1e676ec238d8a992307b495b45b3feaa5e86' -- OUSD + AND topic1='\x99e56f783b536ffacf422d59183ea321dd80dcd6d23daa13023e8afea38c3df1' -- old rebase logs + AND block_number > 11596940 -- OUSD V2 block number + + UNION ALL + + SELECT + dune_user_generated.data2bignumber(data, 0, 18) as total_supply, + dune_user_generated.data2bignumber(data, 1, 27) as rebasing_credits, + dune_user_generated.data2bignumber(data, 2, 27) as rebasing_credits_per_token, + * + FROM ethereum.logs + WHERE contract_address='\x2a8e1e676ec238d8a992307b495b45b3feaa5e86' -- OUSD + AND topic1='\x41645eb819d3011b13f97696a8109d14bfcddfaca7d063ec0564d62a3e257235' -- new rebase logs + AND block_number > 11596940 -- OUSD V2 block number + ) DATA + GROUP BY 1 + ORDER BY block_number DESC +), + +nearest_rebase_log_to_transaction_log AS ( + SELECT t.block_number AS transaction_block_number, + MAX(r.block_number) AS rebase_block_number + FROM transfers AS t + LEFT JOIN rebase_logs AS r + ON t.block_number >= r.block_number + GROUP BY t.block_number +), + +transfer_with_credits_per_token as ( + SELECT t.*, r.rebasing_credits_per_token, + t.amount * r.rebasing_credits_per_token as credits, + r.block_number as rebase_block_number + FROM transfers t INNER JOIN nearest_rebase_log_to_transaction_log nrb + ON t.block_number = nrb.transaction_block_number + INNER JOIN rebase_logs r + ON r.block_number = nrb.rebase_block_number +), +-- TODO INSPECT THIS +-- last rebasing credits per token: 0.8369875568128926 +-- TODO some token holders have balances way off like this account: \x96feb7b6f808dd2bbd09c9e5ccde77cabd58d019 (has a lot of transactions) +-- SELECT SUM(amount / 1e18), SUM(credits / 1e18) FROM transfer_with_credits_per_token +-- WHERE address = '\x96feb7b6f808dd2bbd09c9e5ccde77cabd58d019' +-- GROUP BY address + +-- 2 months in 3 day hops, except last 7 days that have no hops +day_hops_3 AS ( + SELECT date_trunc('day', day) as day + FROM( + SELECT generate_series('now'::timestamp - '2 month'::interval, 'now'::timestamp - '1 week'::interval, '3 day') AS day + UNION ALL + SELECT generate_series('now'::timestamp - '6 days'::interval, date_trunc('day', NOW()), '1 day') AS day + ) DAYS + ORDER BY day desc +), + +latest_rebases as ( + SELECT + max_d.day as day, + r.* + FROM ( + SELECT d.day, MAX(r.block_time) as max_block_time + FROM day_hops_3 d + LEFT JOIN rebase_logs r + ON r.block_time < d.day + GROUP BY 1 + ) max_d INNER JOIN rebase_logs r + ON max_d.max_block_time = r.block_time +), + +token_balances as ( + SELECT + b.day, + wallet_address, + credits_sum / rebasing_credits_per_token as balance + FROM latest_rebases r + INNER JOIN ( + SELECT d.day, t.address as wallet_address, SUM(t.credits / 1e18) as credits_sum + FROM transfer_with_credits_per_token t + INNER JOIN day_hops_3 d + ON t.block_time < d.day + WHERE t.address != '\x87650d7bbfc3a9f10587d7778206671719d9910d' -- Curve Metapool + AND t.address != '\x129360c964e2e13910d603043f6287e5e9383374' -- Uniswap + GROUP BY 1, 2 + HAVING SUM(t.credits / 1e18) > 0 + ) b + ON r.day = b.day + ORDER BY 1 desc +), + +token_balances_over_100 AS ( + SELECT * + FROM token_balances + WHERE balance > 100 +) + +SELECT + s.day, + s.average, + s.median +-- s1.average_over_100, +-- s1.median_over_100 +FROM +( + select day, + ROUND(avg(balance) filter (WHERE balance > 0)) as average, + ROUND(PERCENTILE_CONT(0.5) WITHIN GROUP(ORDER BY balance)) as median + FROM token_balances + GROUP BY 1 +) s +-- INNER JOIN +-- ( +-- select day, +-- ROUND(avg(balance) filter (WHERE balance > 0)) as average_over_100, +-- ROUND(PERCENTILE_CONT(0.5) WITHIN GROUP(ORDER BY balance)) as median_over_100 +-- FROM token_balances_over_100 +-- GROUP BY 1 +-- ) s1 +-- ON s.day = s1.day +ORDER BY day DESC +``` \ No newline at end of file diff --git a/docs/deprecated/dune/OUSD buybacks.md b/docs/deprecated/dune/OUSD buybacks.md new file mode 100644 index 00000000..dcd69573 --- /dev/null +++ b/docs/deprecated/dune/OUSD buybacks.md @@ -0,0 +1,99 @@ +# OUSD buybacks + +https://dune.com/embeds/284111/536003/6b5abcab-4856-4590-b03f-47da6959202e +https://dune.com/queries/284111/536003 + +```sql +DROP FUNCTION IF EXISTS dune_user_generated.data2bignumber(BYTEA, INT, INT); +CREATE OR REPLACE FUNCTION dune_user_generated.data2bignumber(data BYTEA, topic INT, decimals INT) RETURNS FLOAT AS $$ +BEGIN +RETURN bytea2numeric(decode(SUBSTRING(ENCODE("data",'hex'),(1+(64*"topic")),64),'hex'))/POWER(10, "decimals"); +END; $$ +LANGUAGE PLPGSQL; + +WITH transfers AS ( + SELECT evt_block_number as block_number, + evt_block_time as block_time, + address, + amount + FROM + ( + SELECT evt_block_number, + "to" AS address, + evt_block_time, + ROUND(value / 1e18) AS amount + FROM ousd."OusdImplementation_evt_Transfer" + WHERE evt_block_number > 11596940 -- OUSD V2 block number + AND "to" = '\x77314eb392b2be47c014cde0706908b3307ad6a9' + AND "from" = '\x0000000000000000000000000000000000000000' + + -- UNION ALL + + -- SELECT evt_block_number, + -- "to" AS address, + -- evt_block_time, + -- ROUND(value / 1e18) AS amount + -- FROM ousd."OusdImplementation_evt_Transfer" + -- WHERE evt_block_number > 11596940 -- OUSD V2 block number + -- AND "to" = '\x7d82E86CF1496f9485a8ea04012afeb3C7489397' + -- AND "from" = '\x0000000000000000000000000000000000000000' + ) t +), + +ogn_transfers AS ( + SELECT evt_block_time, + ROUND(value / 1e18) as value + FROM erc20."ERC20_evt_Transfer" tr + WHERE tr."to" = '\x77314eb392b2be47c014cde0706908b3307ad6a9' + and contract_address='\x8207c1ffc5b6804f6024322ccf34f29c3541ae26' +), + +ogv_transfers AS ( + SELECT evt_block_time, + ROUND(value / 1e18) as value + FROM erc20."ERC20_evt_Transfer" tr + WHERE tr."to" = '\x7d82E86CF1496f9485a8ea04012afeb3C7489397' + and contract_address='\x9c354503C38481a7A7a51629142963F98eCC12D0' +) + +SELECT +COALESCE(OUSD_DATA.day, OGN_DATA.day, OGV_DATA.day) as day, +OUSD_DATA.amount as ousd_amount, +OGN_DATA.amount as ogn_amount, +OGV_DATA.amount as ogv_amount +FROM ( + SELECT day, + SUM(amount) OVER (ORDER BY day ASC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) as amount + FROM ( + SELECT + date_trunc('day', block_time) as day, + SUM(amount) as amount + FROM transfers + GROUP BY 1 + ) OUSD_DATA +) OUSD_DATA FULL OUTER JOIN ( + SELECT day, + SUM(amount) OVER (ORDER BY day ASC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) as amount + FROM ( + SELECT + date_trunc('day', evt_block_time) as day, + SUM(value) as amount + FROM ogn_transfers + GROUP BY 1 + ) OGN_DATA +) OGN_DATA +ON OUSD_DATA.day = OGN_DATA.day +FULL OUTER JOIN ( + SELECT day, + SUM(amount) OVER (ORDER BY day ASC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) as amount + FROM ( + SELECT + date_trunc('day', evt_block_time) as day, + SUM(value) as amount + FROM ogv_transfers + GROUP BY 1 + ) OGV_DATA +) OGV_DATA +ON OUSD_DATA.day = OGV_DATA.day +ORDER BY 1 desc +``` \ No newline at end of file diff --git a/docs/deprecated/dune/OUSD holder breakdown.md b/docs/deprecated/dune/OUSD holder breakdown.md new file mode 100644 index 00000000..7c4ea771 --- /dev/null +++ b/docs/deprecated/dune/OUSD holder breakdown.md @@ -0,0 +1,126 @@ +# OUSD holder breakdown + +https://dune.xyz/embeds/285485/539422/ea76c2ad-232d-4001-9c78-ecf5231b9700 +https://dune.com/queries/278734/528075 + +```sql +DROP FUNCTION IF EXISTS dune_user_generated.data2bignumber(BYTEA, INT, INT); +CREATE OR REPLACE FUNCTION dune_user_generated.data2bignumber(data BYTEA, topic INT, decimals INT) RETURNS FLOAT AS $$ +BEGIN +RETURN bytea2numeric(decode(SUBSTRING(ENCODE("data",'hex'),(1+(64*"topic")),64),'hex'))/POWER(10, "decimals"); +END; $$ +LANGUAGE PLPGSQL; + +WITH transfers AS ( + SELECT evt_block_number as block_number, + address, + amount + FROM + ( + SELECT evt_block_number, + "to" AS address, + value AS amount + FROM ousd."OusdImplementation_evt_Transfer" + WHERE evt_block_number > 11596940 -- OUSD V2 block number + + UNION ALL + + SELECT evt_block_number, + "from" AS address, + -value AS amount + FROM ousd."OusdImplementation_evt_Transfer" + WHERE evt_block_number > 11596940 -- OUSD V2 block number + ) t +), + +rebase_logs AS ( + -- this is kind of hackish, but for some reason "DISTINCT ON" expression is not supported + SELECT block_number, + MAX(block_time) as block_time, + MAX(total_supply) as total_supply, + MAX(rebasing_credits) as rebasing_credits, + MIN(rebasing_credits_per_token) as rebasing_credits_per_token + FROM( + SELECT + dune_user_generated.data2bignumber(data, 0, 18) as total_supply, + dune_user_generated.data2bignumber(data, 1, 18) as rebasing_credits, + dune_user_generated.data2bignumber(data, 2, 18) as rebasing_credits_per_token, + * + FROM ethereum.logs + WHERE contract_address='\x2a8e1e676ec238d8a992307b495b45b3feaa5e86' -- OUSD + AND topic1='\x99e56f783b536ffacf422d59183ea321dd80dcd6d23daa13023e8afea38c3df1' -- old rebase logs + AND block_number > 11596940 -- OUSD V2 block number + + UNION ALL + + SELECT + dune_user_generated.data2bignumber(data, 0, 18) as total_supply, + dune_user_generated.data2bignumber(data, 1, 27) as rebasing_credits, + dune_user_generated.data2bignumber(data, 2, 27) as rebasing_credits_per_token, + * + FROM ethereum.logs + WHERE contract_address='\x2a8e1e676ec238d8a992307b495b45b3feaa5e86' -- OUSD + AND topic1='\x41645eb819d3011b13f97696a8109d14bfcddfaca7d063ec0564d62a3e257235' -- new rebase logs + AND block_number > 11596940 -- OUSD V2 block number + ) DATA + GROUP BY 1 + ORDER BY block_number DESC +), + +nearest_rebase_log_to_transaction_log AS ( + SELECT t.block_number AS transaction_block_number, + MAX(r.block_number) AS rebase_block_number + FROM transfers AS t + LEFT JOIN rebase_logs AS r + ON t.block_number >= r.block_number + GROUP BY t.block_number +), + +transfer_with_credits_per_token as ( + SELECT t.*, r.rebasing_credits_per_token, + t.amount * r.rebasing_credits_per_token as credits, + r.block_number as rebase_block_number + FROM transfers t INNER JOIN nearest_rebase_log_to_transaction_log nrb + ON t.block_number = nrb.transaction_block_number + INNER JOIN rebase_logs r + ON r.block_number = nrb.rebase_block_number +), + +latest_rebase as ( + SELECT * from rebase_logs + ORDER BY block_number DESC + LIMIT 1 +), + +token_balances as ( + SELECT address as wallet_address, SUM(credits / 1e18) / MAX(latest_rebase.rebasing_credits_per_token) as amount FROM + transfer_with_credits_per_token, + latest_rebase + GROUP BY 1 +) + +-- TODO INSPECT THIS +-- last rebasing credits per token: 0.8369875568128926 +-- TODO some token holders have balances way off like this account: \x96feb7b6f808dd2bbd09c9e5ccde77cabd58d019 (has a lot of transactions) +-- SELECT SUM(amount / 1e18), SUM(credits / 1e18) FROM transfer_with_credits_per_token +-- WHERE address = '\x96feb7b6f808dd2bbd09c9e5ccde77cabd58d019' +-- GROUP BY address + +Select + CASE + when wallet_address = '\x87650D7bbfC3A9F10587d7778206671719d9910D' then 'Curve Metapool' + when wallet_address = '\x129360c964e2e13910d603043f6287e5e9383374' then 'Uniswap V3: OUSD - USDT' + -- when labels.get(wallet_address, 'project', 'contract_name')::text is not NULL then labels.get(wallet_address, 'project', 'contract_name', 'ens name')::text + when amount between 0 and 100 then 'mini 0-100 OUSD' + when amount between 100 and 1000 then 'light 100-1k OUSD' + when amount between 1000 and 10000 then 'heavy 1k-10k' + when amount between 10000 and 100000 then 'full 10k-100k' + when amount > 100000 then 'enormous >100k' + end as holder_type, + sum(amount) as amount, + count(distinct wallet_address) as holders +from token_balances +where wallet_address != '\x0000000000000000000000000000000000000000' and +amount > 0 +group by 1 +``` \ No newline at end of file diff --git a/docs/deprecated/dune/Token holders - time series - breakdown by balance - detailed.md b/docs/deprecated/dune/Token holders - time series - breakdown by balance - detailed.md new file mode 100644 index 00000000..729a767a --- /dev/null +++ b/docs/deprecated/dune/Token holders - time series - breakdown by balance - detailed.md @@ -0,0 +1,163 @@ +# Token holders - time series - breakdown by balance - detailed + +https://dune.com/embeds/284273/536199/c65ae6a8-90cf-4d06-8d86-84d0080aa39d +https://dune.com/queries/284273/536199 + +```sql +DROP FUNCTION IF EXISTS dune_user_generated.data2bignumber(BYTEA, INT, INT); +CREATE OR REPLACE FUNCTION dune_user_generated.data2bignumber(data BYTEA, topic INT, decimals INT) RETURNS FLOAT AS $$ +BEGIN +RETURN bytea2numeric(decode(SUBSTRING(ENCODE("data",'hex'),(1+(64*"topic")),64),'hex'))/POWER(10, "decimals"); +END; $$ +LANGUAGE PLPGSQL; + +WITH transfers AS ( + SELECT evt_block_number as block_number, + evt_block_time as block_time, + address, + amount + FROM + ( + SELECT evt_block_number, + "to" AS address, + evt_block_time, + value AS amount + FROM ousd."OusdImplementation_evt_Transfer" + WHERE evt_block_number > 11596940 -- OUSD V2 block number + + UNION ALL + + SELECT evt_block_number, + "from" AS address, + evt_block_time, + -value AS amount + FROM ousd."OusdImplementation_evt_Transfer" + WHERE evt_block_number > 11596940 -- OUSD V2 block number + ) t +), + +rebase_logs AS ( + -- this is kind of hackish, but for some reason "DISTINCT ON" expression is not supported + SELECT block_number, + MAX(block_time) as block_time, + MAX(total_supply) as total_supply, + MAX(rebasing_credits) as rebasing_credits, + MIN(rebasing_credits_per_token) as rebasing_credits_per_token + FROM( + SELECT + dune_user_generated.data2bignumber(data, 0, 18) as total_supply, + dune_user_generated.data2bignumber(data, 1, 18) as rebasing_credits, + dune_user_generated.data2bignumber(data, 2, 18) as rebasing_credits_per_token, + * + FROM ethereum.logs + WHERE contract_address='\x2a8e1e676ec238d8a992307b495b45b3feaa5e86' -- OUSD + AND topic1='\x99e56f783b536ffacf422d59183ea321dd80dcd6d23daa13023e8afea38c3df1' -- old rebase logs + AND block_number > 11596940 -- OUSD V2 block number + + UNION ALL + + SELECT + dune_user_generated.data2bignumber(data, 0, 18) as total_supply, + dune_user_generated.data2bignumber(data, 1, 27) as rebasing_credits, + dune_user_generated.data2bignumber(data, 2, 27) as rebasing_credits_per_token, + * + FROM ethereum.logs + WHERE contract_address='\x2a8e1e676ec238d8a992307b495b45b3feaa5e86' -- OUSD + AND topic1='\x41645eb819d3011b13f97696a8109d14bfcddfaca7d063ec0564d62a3e257235' -- new rebase logs + AND block_number > 11596940 -- OUSD V2 block number + ) DATA + GROUP BY 1 + ORDER BY block_number DESC +), + +nearest_rebase_log_to_transaction_log AS ( + SELECT t.block_number AS transaction_block_number, + MAX(r.block_number) AS rebase_block_number + FROM transfers AS t + LEFT JOIN rebase_logs AS r + ON t.block_number >= r.block_number + GROUP BY t.block_number +), + +transfer_with_credits_per_token as ( + SELECT t.*, r.rebasing_credits_per_token, + t.amount * r.rebasing_credits_per_token as credits, + r.block_number as rebase_block_number + FROM transfers t INNER JOIN nearest_rebase_log_to_transaction_log nrb + ON t.block_number = nrb.transaction_block_number + INNER JOIN rebase_logs r + ON r.block_number = nrb.rebase_block_number +), + +-- 2 months in 3 day hops, except last 7 days that have no hops +day_hops_3 AS ( + SELECT date_trunc('day', day) as day + FROM( + SELECT generate_series('now'::timestamp - '2 month'::interval, 'now'::timestamp - '1 week'::interval, '3 day') AS day + UNION ALL + SELECT generate_series('now'::timestamp - '6 days'::interval, date_trunc('day', NOW()), '1 day') AS day + ) DAYS + ORDER BY day desc +), + +latest_rebases as ( + SELECT + max_d.day as day, + r.* + FROM ( + SELECT d.day, MAX(r.block_time) as max_block_time + FROM day_hops_3 d + LEFT JOIN rebase_logs r + ON r.block_time < d.day + GROUP BY 1 + ) max_d INNER JOIN rebase_logs r + ON max_d.max_block_time = r.block_time +), + +token_balances as ( + SELECT + b.day, + wallet_address, + credits_sum / rebasing_credits_per_token as balance + FROM latest_rebases r + INNER JOIN ( + SELECT d.day, t.address as wallet_address, SUM(t.credits / 1e18) as credits_sum + FROM transfer_with_credits_per_token t + INNER JOIN day_hops_3 d + ON t.block_time < d.day + WHERE t.address != '\x87650d7bbfc3a9f10587d7778206671719d9910d' -- Curve Metapool + AND t.address != '\x129360c964e2e13910d603043f6287e5e9383374' -- Uniswap + GROUP BY 1, 2 + ) b + ON r.day = b.day + ORDER BY 1 desc +) + +-- TODO INSPECT THIS +-- last rebasing credits per token: 0.8369875568128926 +-- TODO some token holders have balances way off like this account: \x96feb7b6f808dd2bbd09c9e5ccde77cabd58d019 (has a lot of transactions) +-- SELECT SUM(amount / 1e18), SUM(credits / 1e18) FROM transfer_with_credits_per_token +-- WHERE address = '\x96feb7b6f808dd2bbd09c9e5ccde77cabd58d019' +-- GROUP BY address + +select day, +ROUND(sum(balance) filter (WHERE balance > 0 AND balance < 100)) AS "0-100 OUSD", +ROUND(sum(balance) filter (WHERE balance > 100 AND balance < 1000)) AS "100-1k OUSD", +ROUND(sum(balance) filter (WHERE balance > 1000 AND balance < 3000)) AS "1k-3k OUSD", +ROUND(sum(balance) filter (WHERE balance > 3000 AND balance < 9000)) AS "3k-9k OUSD", +ROUND(sum(balance) filter (WHERE balance > 9000 AND balance < 15000)) AS "9k-15k OUSD", +ROUND(sum(balance) filter (WHERE balance > 15000 AND balance < 25000)) AS "15k-25k OUSD", +ROUND(sum(balance) filter (WHERE balance > 25000 AND balance < 40000)) AS "25k-40k OUSD", +ROUND(sum(balance) filter (WHERE balance > 40000 AND balance < 70000)) AS "40k-70k OUSD", +ROUND(sum(balance) filter (WHERE balance > 70000 AND balance < 110000)) AS "70k-110k OUSD", +ROUND(sum(balance) filter (WHERE balance > 110000 AND balance < 150000)) AS "110k-150k OUSD", +ROUND(sum(balance) filter (WHERE balance > 150000 AND balance < 250000)) AS "150k-250k OUSD", +ROUND(sum(balance) filter (WHERE balance > 250000 AND balance < 500000)) AS "250k-500k OUSD", +ROUND(sum(balance) filter (WHERE balance > 500000 AND balance < 750000)) AS "500k-750k OUSD", +ROUND(sum(balance) filter (WHERE balance > 750000 AND balance < 2000000)) AS "750k-2m OUSD", +ROUND(sum(balance) filter (WHERE balance > 2000000)) AS ">2m OUSD", +sum(balance) as amount, +count(distinct wallet_address) as holders +FROM token_balances +GROUP BY 1; +``` \ No newline at end of file diff --git a/docs/deprecated/dune/Token holders - time series - breakdown by balance.md b/docs/deprecated/dune/Token holders - time series - breakdown by balance.md new file mode 100644 index 00000000..0e1cf2a0 --- /dev/null +++ b/docs/deprecated/dune/Token holders - time series - breakdown by balance.md @@ -0,0 +1,152 @@ +# Token holders - time series - breakdown by balance + +https://dune.com/embeds/280055/532120/dac10a6c-e731-44bf-a394-cbfe4c2903ae +https://dune.com/queries/280055/532120 + +```sql +DROP FUNCTION IF EXISTS dune_user_generated.data2bignumber(BYTEA, INT, INT); +CREATE OR REPLACE FUNCTION dune_user_generated.data2bignumber(data BYTEA, topic INT, decimals INT) RETURNS FLOAT AS $$ +BEGIN +RETURN bytea2numeric(decode(SUBSTRING(ENCODE("data",'hex'),(1+(64*"topic")),64),'hex'))/POWER(10, "decimals"); +END; $$ +LANGUAGE PLPGSQL; + +WITH transfers AS ( + SELECT evt_block_number as block_number, + evt_block_time as block_time, + address, + amount + FROM + ( + SELECT evt_block_number, + "to" AS address, + evt_block_time, + value AS amount + FROM ousd."OusdImplementation_evt_Transfer" + WHERE evt_block_number > 11596940 -- OUSD V2 block number + + UNION ALL + + SELECT evt_block_number, + "from" AS address, + evt_block_time, + -value AS amount + FROM ousd."OusdImplementation_evt_Transfer" + WHERE evt_block_number > 11596940 -- OUSD V2 block number + ) t +), + +rebase_logs AS ( + -- this is kind of hackish, but for some reason "DISTINCT ON" expression is not supported + SELECT block_number, + MAX(block_time) as block_time, + MAX(total_supply) as total_supply, + MAX(rebasing_credits) as rebasing_credits, + MIN(rebasing_credits_per_token) as rebasing_credits_per_token + FROM( + SELECT + dune_user_generated.data2bignumber(data, 0, 18) as total_supply, + dune_user_generated.data2bignumber(data, 1, 18) as rebasing_credits, + dune_user_generated.data2bignumber(data, 2, 18) as rebasing_credits_per_token, + * + FROM ethereum.logs + WHERE contract_address='\x2a8e1e676ec238d8a992307b495b45b3feaa5e86' -- OUSD + AND topic1='\x99e56f783b536ffacf422d59183ea321dd80dcd6d23daa13023e8afea38c3df1' -- old rebase logs + AND block_number > 11596940 -- OUSD V2 block number + + UNION ALL + + SELECT + dune_user_generated.data2bignumber(data, 0, 18) as total_supply, + dune_user_generated.data2bignumber(data, 1, 27) as rebasing_credits, + dune_user_generated.data2bignumber(data, 2, 27) as rebasing_credits_per_token, + * + FROM ethereum.logs + WHERE contract_address='\x2a8e1e676ec238d8a992307b495b45b3feaa5e86' -- OUSD + AND topic1='\x41645eb819d3011b13f97696a8109d14bfcddfaca7d063ec0564d62a3e257235' -- new rebase logs + AND block_number > 11596940 -- OUSD V2 block number + ) DATA + GROUP BY 1 + ORDER BY block_number DESC +), + +nearest_rebase_log_to_transaction_log AS ( + SELECT t.block_number AS transaction_block_number, + MAX(r.block_number) AS rebase_block_number + FROM transfers AS t + LEFT JOIN rebase_logs AS r + ON t.block_number >= r.block_number + GROUP BY t.block_number +), + +transfer_with_credits_per_token as ( + SELECT t.*, r.rebasing_credits_per_token, + t.amount * r.rebasing_credits_per_token as credits, + r.block_number as rebase_block_number + FROM transfers t INNER JOIN nearest_rebase_log_to_transaction_log nrb + ON t.block_number = nrb.transaction_block_number + INNER JOIN rebase_logs r + ON r.block_number = nrb.rebase_block_number +), +-- TODO INSPECT THIS +-- last rebasing credits per token: 0.8369875568128926 +-- TODO some token holders have balances way off like this account: \x96feb7b6f808dd2bbd09c9e5ccde77cabd58d019 (has a lot of transactions) +-- SELECT SUM(amount / 1e18), SUM(credits / 1e18) FROM transfer_with_credits_per_token +-- WHERE address = '\x96feb7b6f808dd2bbd09c9e5ccde77cabd58d019' +-- GROUP BY address + +-- 2 months in 3 day hops, except last 7 days that have no hops +day_hops_3 AS ( + SELECT date_trunc('day', day) as day + FROM( + SELECT generate_series('now'::timestamp - '2 month'::interval, 'now'::timestamp - '1 week'::interval, '3 day') AS day + UNION ALL + SELECT generate_series('now'::timestamp - '6 days'::interval, date_trunc('day', NOW()), '1 day') AS day + ) DAYS + ORDER BY day desc +), + +latest_rebases as ( + SELECT + max_d.day as day, + r.* + FROM ( + SELECT d.day, MAX(r.block_time) as max_block_time + FROM day_hops_3 d + LEFT JOIN rebase_logs r + ON r.block_time < d.day + GROUP BY 1 + ) max_d INNER JOIN rebase_logs r + ON max_d.max_block_time = r.block_time +), + +token_balances as ( + SELECT + b.day, + wallet_address, + credits_sum / rebasing_credits_per_token as balance + FROM latest_rebases r + INNER JOIN ( + SELECT d.day, t.address as wallet_address, SUM(t.credits / 1e18) as credits_sum + FROM transfer_with_credits_per_token t + INNER JOIN day_hops_3 d + ON t.block_time < d.day + WHERE t.address != '\x87650d7bbfc3a9f10587d7778206671719d9910d' -- Curve Metapool + AND t.address != '\x129360c964e2e13910d603043f6287e5e9383374' -- Uniswap + GROUP BY 1, 2 + ) b + ON r.day = b.day + ORDER BY 1 desc +) + +select day, +sum(balance) filter (WHERE balance > 0 AND balance < 100) AS "mini 0-100 OUSD", +sum(balance) filter (WHERE balance > 100 AND balance < 1000) AS "light 100-1k OUSD", +sum(balance) filter (WHERE balance > 1000 AND balance < 10000) AS "heavy 1k-10k", +sum(balance) filter (WHERE balance > 10000 AND balance < 100000) AS "full 10k-100k", +sum(balance) filter (WHERE balance > 100000) AS "enormous >100k", +sum(balance) as amount, +count(distinct wallet_address) as holders +FROM token_balances +GROUP BY 1; +``` \ No newline at end of file diff --git a/docs/deprecated/dune/Token holders - time series - breakdown by wallet number - detailed.md b/docs/deprecated/dune/Token holders - time series - breakdown by wallet number - detailed.md new file mode 100644 index 00000000..01af2fc9 --- /dev/null +++ b/docs/deprecated/dune/Token holders - time series - breakdown by wallet number - detailed.md @@ -0,0 +1,158 @@ +# Token holders - time series - breakdown by wallet number - detailed + +## DEPRECATED DATASET ACCORDING TO DUNE + +https://dune.com/embeds/284704/537456/a450c5db-957f-4832-8e5b-b91dd8c5db55 +https://dune.com/queries/284704/537456 + +```sql +DROP FUNCTION IF EXISTS dune_user_generated.data2bignumber(BYTEA, INT, INT); +CREATE OR REPLACE FUNCTION dune_user_generated.data2bignumber(data BYTEA, topic INT, decimals INT) RETURNS FLOAT AS $$ +BEGIN +RETURN bytea2numeric(decode(SUBSTRING(ENCODE("data",'hex'),(1+(64*"topic")),64),'hex'))/POWER(10, "decimals"); +END; $$ +LANGUAGE PLPGSQL; + +WITH transfers AS ( + SELECT evt_block_number as block_number, + evt_block_time as block_time, + address, + amount + FROM + ( + SELECT evt_block_number, + "to" AS address, + evt_block_time, + value AS amount + FROM ousd."OusdImplementation_evt_Transfer" + WHERE evt_block_number > 11596940 -- OUSD V2 block number + + UNION ALL + + SELECT evt_block_number, + "from" AS address, + evt_block_time, + -value AS amount + FROM ousd."OusdImplementation_evt_Transfer" + WHERE evt_block_number > 11596940 -- OUSD V2 block number + ) t +), + +rebase_logs AS ( + -- this is kind of hackish, but for some reason "DISTINCT ON" expression is not supported + SELECT block_number, + MAX(block_time) as block_time, + MAX(total_supply) as total_supply, + MAX(rebasing_credits) as rebasing_credits, + MIN(rebasing_credits_per_token) as rebasing_credits_per_token + FROM( + SELECT + dune_user_generated.data2bignumber(data, 0, 18) as total_supply, + dune_user_generated.data2bignumber(data, 1, 18) as rebasing_credits, + dune_user_generated.data2bignumber(data, 2, 18) as rebasing_credits_per_token, + * + FROM ethereum.logs + WHERE contract_address='\x2a8e1e676ec238d8a992307b495b45b3feaa5e86' -- OUSD + AND topic1='\x99e56f783b536ffacf422d59183ea321dd80dcd6d23daa13023e8afea38c3df1' -- old rebase logs + AND block_number > 11596940 -- OUSD V2 block number + + UNION ALL + + SELECT + dune_user_generated.data2bignumber(data, 0, 18) as total_supply, + dune_user_generated.data2bignumber(data, 1, 27) as rebasing_credits, + dune_user_generated.data2bignumber(data, 2, 27) as rebasing_credits_per_token, + * + FROM ethereum.logs + WHERE contract_address='\x2a8e1e676ec238d8a992307b495b45b3feaa5e86' -- OUSD + AND topic1='\x41645eb819d3011b13f97696a8109d14bfcddfaca7d063ec0564d62a3e257235' -- new rebase logs + AND block_number > 11596940 -- OUSD V2 block number + ) DATA + GROUP BY 1 + ORDER BY block_number DESC +), + +nearest_rebase_log_to_transaction_log AS ( + SELECT t.block_number AS transaction_block_number, + MAX(r.block_number) AS rebase_block_number + FROM transfers AS t + LEFT JOIN rebase_logs AS r + ON t.block_number >= r.block_number + GROUP BY t.block_number +), + +transfer_with_credits_per_token as ( + SELECT t.*, r.rebasing_credits_per_token, + t.amount * r.rebasing_credits_per_token as credits, + r.block_number as rebase_block_number + FROM transfers t INNER JOIN nearest_rebase_log_to_transaction_log nrb + ON t.block_number = nrb.transaction_block_number + INNER JOIN rebase_logs r + ON r.block_number = nrb.rebase_block_number +), + +-- 2 months in 3 day hops, except last 7 days that have no hops +day_hops_3 AS ( + SELECT date_trunc('day', day) as day + FROM( + SELECT generate_series('now'::timestamp - '2 month'::interval, 'now'::timestamp - '1 week'::interval, '3 day') AS day + UNION ALL + SELECT generate_series('now'::timestamp - '6 days'::interval, date_trunc('day', NOW()), '1 day') AS day + ) DAYS + ORDER BY day desc +), + +latest_rebases as ( + SELECT + max_d.day as day, + r.* + FROM ( + SELECT d.day, MAX(r.block_time) as max_block_time + FROM day_hops_3 d + LEFT JOIN rebase_logs r + ON r.block_time < d.day + GROUP BY 1 + ) max_d INNER JOIN rebase_logs r + ON max_d.max_block_time = r.block_time +), + +token_balances as ( + SELECT + b.day, + wallet_address, + credits_sum / rebasing_credits_per_token as balance + FROM latest_rebases r + INNER JOIN ( + SELECT d.day, t.address as wallet_address, SUM(t.credits / 1e18) as credits_sum + FROM transfer_with_credits_per_token t + INNER JOIN day_hops_3 d + ON t.block_time < d.day + WHERE t.address != '\x87650d7bbfc3a9f10587d7778206671719d9910d' -- Curve Metapool + AND t.address != '\x129360c964e2e13910d603043f6287e5e9383374' -- Uniswap + GROUP BY 1, 2 + ) b + ON r.day = b.day + ORDER BY 1 desc +) + +select day, +ROUND(count(1) filter (WHERE balance > 0 AND balance < 100)) AS "0-100 OUSD", +ROUND(count(1) filter (WHERE balance > 100 AND balance < 1000)) AS "100-1k OUSD", +ROUND(count(1) filter (WHERE balance > 1000 AND balance < 3000)) AS "1k-3k OUSD", +ROUND(count(1) filter (WHERE balance > 3000 AND balance < 9000)) AS "3k-9k OUSD", +ROUND(count(1) filter (WHERE balance > 9000 AND balance < 15000)) AS "9k-15k OUSD", +ROUND(count(1) filter (WHERE balance > 15000 AND balance < 25000)) AS "15k-25k OUSD", +ROUND(count(1) filter (WHERE balance > 25000 AND balance < 40000)) AS "25k-40k OUSD", +ROUND(count(1) filter (WHERE balance > 40000 AND balance < 70000)) AS "40k-70k OUSD", +ROUND(count(1) filter (WHERE balance > 70000 AND balance < 110000)) AS "70k-110k OUSD", +ROUND(count(1) filter (WHERE balance > 110000 AND balance < 150000)) AS "110k-150k OUSD", +ROUND(count(1) filter (WHERE balance > 150000 AND balance < 250000)) AS "150k-250k OUSD", +ROUND(count(1) filter (WHERE balance > 250000 AND balance < 500000)) AS "250k-500k OUSD", +ROUND(count(1) filter (WHERE balance > 500000 AND balance < 750000)) AS "500k-750k OUSD", +ROUND(count(1) filter (WHERE balance > 750000 AND balance < 2000000)) AS "750k-2m OUSD", +ROUND(count(1) filter (WHERE balance > 2000000)) AS ">2m OUSD", +sum(balance) as amount, +count(distinct wallet_address) as holders +FROM token_balances +GROUP BY 1; +``` \ No newline at end of file diff --git a/docs/deprecated/dune/Token holders - time series - breakdown by wallet number.md b/docs/deprecated/dune/Token holders - time series - breakdown by wallet number.md new file mode 100644 index 00000000..81352efc --- /dev/null +++ b/docs/deprecated/dune/Token holders - time series - breakdown by wallet number.md @@ -0,0 +1,157 @@ +# Token holders - time series - breakdown by wallet number + +## DEPRECATED DATASET ACCORDING TO DUNE + +https://dune.com/embeds/282332/532634/55ca01e1-77f3-4394-86f3-139765492a27 +https://dune.com/queries/282332/532634 + +DROP FUNCTION IF EXISTS dune_user_generated.data2bignumber(BYTEA, INT, INT); +CREATE OR REPLACE FUNCTION dune_user_generated.data2bignumber(data BYTEA, topic INT, decimals INT) RETURNS FLOAT AS $$ +BEGIN +RETURN bytea2numeric(decode(SUBSTRING(ENCODE("data",'hex'),(1+(64*"topic")),64),'hex'))/POWER(10, "decimals"); +END; $$ +LANGUAGE PLPGSQL; + +WITH transfers AS ( +SELECT evt_block_number as block_number, +evt_block_time as block_time, +address, +amount +FROM +( +SELECT evt_block_number, +"to" AS address, +evt_block_time, +value AS amount +FROM ousd."OusdImplementation_evt_Transfer" +WHERE evt_block_number > 11596940 -- OUSD V2 block number + + UNION ALL + + SELECT evt_block_number, + "from" AS address, + evt_block_time, + -value AS amount + FROM ousd."OusdImplementation_evt_Transfer" + WHERE evt_block_number > 11596940 -- OUSD V2 block number + ) t + +), + +rebase_logs AS ( +-- this is kind of hackish, but for some reason "DISTINCT ON" expression is not supported +SELECT block_number, +MAX(block_time) as block_time, +MAX(total_supply) as total_supply, +MAX(rebasing_credits) as rebasing_credits, +MIN(rebasing_credits_per_token) as rebasing_credits_per_token +FROM( +SELECT +dune_user_generated.data2bignumber(data, 0, 18) as total_supply, +dune_user_generated.data2bignumber(data, 1, 18) as rebasing_credits, +dune_user_generated.data2bignumber(data, 2, 18) as rebasing_credits_per_token, + +* + +FROM ethereum.logs +WHERE contract_address='\x2a8e1e676ec238d8a992307b495b45b3feaa5e86' -- OUSD +AND topic1='\x99e56f783b536ffacf422d59183ea321dd80dcd6d23daa13023e8afea38c3df1' -- old rebase logs +AND block_number > 11596940 -- OUSD V2 block number + + UNION ALL + + SELECT + dune_user_generated.data2bignumber(data, 0, 18) as total_supply, + dune_user_generated.data2bignumber(data, 1, 27) as rebasing_credits, + dune_user_generated.data2bignumber(data, 2, 27) as rebasing_credits_per_token, + * + FROM ethereum.logs + WHERE contract_address='\x2a8e1e676ec238d8a992307b495b45b3feaa5e86' -- OUSD + AND topic1='\x41645eb819d3011b13f97696a8109d14bfcddfaca7d063ec0564d62a3e257235' -- new rebase logs + AND block_number > 11596940 -- OUSD V2 block number + ) DATA + GROUP BY 1 + ORDER BY block_number DESC + +), + +nearest_rebase_log_to_transaction_log AS ( +SELECT t.block_number AS transaction_block_number, +MAX(r.block_number) AS rebase_block_number +FROM transfers AS t +LEFT JOIN rebase_logs AS r +ON t.block_number >= r.block_number +GROUP BY t.block_number +), + +transfer_with_credits_per_token as ( +SELECT t.*, r.rebasing_credits_per_token, +t.amount * r.rebasing_credits_per_token as credits, +r.block_number as rebase_block_number +FROM transfers t INNER JOIN nearest_rebase_log_to_transaction_log nrb +ON t.block_number = nrb.transaction_block_number +INNER JOIN rebase_logs r +ON r.block_number = nrb.rebase_block_number +), +-- TODO INSPECT THIS +-- last rebasing credits per token: 0.8369875568128926 +-- TODO some token holders have balances way off like this account: \x96feb7b6f808dd2bbd09c9e5ccde77cabd58d019 (has a +lot of transactions) +-- SELECT SUM(amount / 1e18), SUM(credits / 1e18) FROM transfer_with_credits_per_token +-- WHERE address = '\x96feb7b6f808dd2bbd09c9e5ccde77cabd58d019' +-- GROUP BY address + +-- 2 months in 3 day hops, except last 7 days that have no hops +day_hops_3 AS ( +SELECT date_trunc('day', day) as day +FROM( +SELECT generate_series('now'::timestamp - '2 month'::interval, 'now'::timestamp - '1 week'::interval, '3 day') AS day +UNION ALL +SELECT generate_series('now'::timestamp - '6 days'::interval, date_trunc('day', NOW()), '1 day') AS day +) DAYS +ORDER BY day desc +), + +latest_rebases as ( +SELECT +max_d.day as day, +r.* +FROM ( +SELECT d.day, MAX(r.block_time) as max_block_time +FROM day_hops_3 d +LEFT JOIN rebase_logs r +ON r.block_time < d.day +GROUP BY 1 +) max_d INNER JOIN rebase_logs r +ON max_d.max_block_time = r.block_time +), + +token_balances as ( +SELECT +b.day, +wallet_address, +credits_sum / rebasing_credits_per_token as balance +FROM latest_rebases r +INNER JOIN ( +SELECT d.day, t.address as wallet_address, SUM(t.credits / 1e18) as credits_sum +FROM transfer_with_credits_per_token t +INNER JOIN day_hops_3 d +ON t.block_time < d.day +WHERE t.address != '\x87650d7bbfc3a9f10587d7778206671719d9910d' -- Curve Metapool +AND t.address != '\x129360c964e2e13910d603043f6287e5e9383374' -- Uniswap +GROUP BY 1, 2 +) b +ON r.day = b.day +ORDER BY 1 desc +) + +select day, +count(1) filter (WHERE balance > 0 AND balance < 100) AS "mini 0-100 OUSD", +count(1) filter (WHERE balance > 100 AND balance < 1000) AS "light 100-1k OUSD", +count(1) filter (WHERE balance > 1000 AND balance < 10000) AS "heavy 1k-10k", +count(1) filter (WHERE balance > 10000 AND balance < 100000) AS "full 10k-100k", +count(1) filter (WHERE balance > 100000) AS "enormous >100k", +count(1) as amount, +count(distinct wallet_address) as holders +FROM token_balances +GROUP BY 1; \ No newline at end of file diff --git a/docs/deprecated/dune/Total supply breakdown.md b/docs/deprecated/dune/Total supply breakdown.md new file mode 100644 index 00000000..06ae90ff --- /dev/null +++ b/docs/deprecated/dune/Total supply breakdown.md @@ -0,0 +1,153 @@ +# Total supply breakdown + +https://dune.com/embeds/284182/536054/8efedf92-132d-43f9-974c-d6aaa2ad53b +https://dune.com/queries/284182/536054 + +```sql +DROP FUNCTION IF EXISTS dune_user_generated.data2bignumber(BYTEA, INT, INT); +CREATE OR REPLACE FUNCTION dune_user_generated.data2bignumber(data BYTEA, topic INT, decimals INT) RETURNS FLOAT AS $$ +BEGIN +RETURN bytea2numeric(decode(SUBSTRING(ENCODE("data",'hex'),(1+(64*"topic")),64),'hex'))/POWER(10, "decimals"); +END; $$ +LANGUAGE PLPGSQL; + +WITH transfers AS ( + SELECT evt_block_number as block_number, + evt_block_time as block_time, + address, + amount + FROM + ( + SELECT evt_block_number, + "to" AS address, + evt_block_time, + value AS amount + FROM ousd."OusdImplementation_evt_Transfer" + WHERE evt_block_number > 11596940 -- OUSD V2 block number + + UNION ALL + + SELECT evt_block_number, + "from" AS address, + evt_block_time, + -value AS amount + FROM ousd."OusdImplementation_evt_Transfer" + WHERE evt_block_number > 11596940 -- OUSD V2 block number + ) t +), + +rebase_logs AS ( + -- this is kind of hackish, but for some reason "DISTINCT ON" expression is not supported + SELECT block_number, + MAX(block_time) as block_time, + MAX(total_supply) as total_supply, + MAX(rebasing_credits) as rebasing_credits, + MIN(rebasing_credits_per_token) as rebasing_credits_per_token + FROM( + SELECT + dune_user_generated.data2bignumber(data, 0, 18) as total_supply, + dune_user_generated.data2bignumber(data, 1, 18) as rebasing_credits, + dune_user_generated.data2bignumber(data, 2, 18) as rebasing_credits_per_token, + * + FROM ethereum.logs + WHERE contract_address='\x2a8e1e676ec238d8a992307b495b45b3feaa5e86' -- OUSD + AND topic1='\x99e56f783b536ffacf422d59183ea321dd80dcd6d23daa13023e8afea38c3df1' -- old rebase logs + AND block_number > 11596940 -- OUSD V2 block number + + UNION ALL + + SELECT + dune_user_generated.data2bignumber(data, 0, 18) as total_supply, + dune_user_generated.data2bignumber(data, 1, 27) as rebasing_credits, + dune_user_generated.data2bignumber(data, 2, 27) as rebasing_credits_per_token, + * + FROM ethereum.logs + WHERE contract_address='\x2a8e1e676ec238d8a992307b495b45b3feaa5e86' -- OUSD + AND topic1='\x41645eb819d3011b13f97696a8109d14bfcddfaca7d063ec0564d62a3e257235' -- new rebase logs + AND block_number > 11596940 -- OUSD V2 block number + ) DATA + GROUP BY 1 + ORDER BY block_number DESC +), + +nearest_rebase_log_to_transaction_log AS ( + SELECT t.block_number AS transaction_block_number, + MAX(r.block_number) AS rebase_block_number + FROM transfers AS t + LEFT JOIN rebase_logs AS r + ON t.block_number >= r.block_number + GROUP BY t.block_number +), + +transfer_with_credits_per_token as ( + SELECT t.*, r.rebasing_credits_per_token, + t.amount * r.rebasing_credits_per_token as credits, + r.block_number as rebase_block_number + FROM transfers t INNER JOIN nearest_rebase_log_to_transaction_log nrb + ON t.block_number = nrb.transaction_block_number + INNER JOIN rebase_logs r + ON r.block_number = nrb.rebase_block_number +), +-- TODO INSPECT THIS +-- last rebasing credits per token: 0.8369875568128926 +-- TODO some token holders have balances way off like this account: \x96feb7b6f808dd2bbd09c9e5ccde77cabd58d019 (has a lot of transactions) +-- SELECT SUM(amount / 1e18), SUM(credits / 1e18) FROM transfer_with_credits_per_token +-- WHERE address = '\x96feb7b6f808dd2bbd09c9e5ccde77cabd58d019' +-- GROUP BY address + +-- 2 months in 3 day hops, except last 7 days that have no hops +day_hops_3 AS ( + SELECT date_trunc('day', day) as day + FROM( + SELECT generate_series('now'::timestamp - '2 month'::interval, 'now'::timestamp - '1 week'::interval, '3 day') AS day + UNION ALL + SELECT generate_series('now'::timestamp - '6 days'::interval, date_trunc('day', NOW()), '1 day') AS day + ) DAYS + ORDER BY day desc +), + +latest_rebases as ( + SELECT + max_d.day as day, + r.* + FROM ( + SELECT d.day, MAX(r.block_time) as max_block_time + FROM day_hops_3 d + LEFT JOIN rebase_logs r + ON r.block_time < d.day + GROUP BY 1 + ) max_d INNER JOIN rebase_logs r + ON max_d.max_block_time = r.block_time +), + +token_balances as ( + SELECT + b.day, + wallet_address, + credits_sum / rebasing_credits_per_token as balance + FROM latest_rebases r + INNER JOIN ( + SELECT d.day, t.address as wallet_address, SUM(t.credits / 1e18) as credits_sum + FROM transfer_with_credits_per_token t + INNER JOIN day_hops_3 d + ON t.block_time < d.day + GROUP BY 1, 2 + HAVING SUM(t.credits / 1e18) > 0 + ) b + ON r.day = b.day + ORDER BY 1 desc +) + +SELECT * +FROM +( + select day, + ROUND(SUM (balance) filter (WHERE wallet_address = '\x87650d7bbfc3a9f10587d7778206671719d9910d')) AS curve, + ROUND(SUM (balance) filter (WHERE wallet_address = '\x129360c964e2e13910d603043f6287e5e9383374')) AS uniswap, + ROUND(SUM (balance) filter (WHERE wallet_address = '\xcecad69d7d4ed6d52efcfa028af8732f27e08f70')) AS flipper, + ROUND(SUM (balance) filter (WHERE wallet_address != '\xcecad69d7d4ed6d52efcfa028af8732f27e08f70' AND wallet_address != '\x87650d7bbfc3a9f10587d7778206671719d9910d' AND wallet_address != '\x129360c964e2e13910d603043f6287e5e9383374')) AS other + FROM token_balances + GROUP BY 1 +) s +ORDER BY day DESC +``` \ No newline at end of file diff --git a/graphql.config.yml b/graphql.config.yml index c4401a08..bccf0b3a 100644 --- a/graphql.config.yml +++ b/graphql.config.yml @@ -1,4 +1,14 @@ -schema: schema.graphql -documents: '**/*.graphql' +projects: + combined: + schema: schema.graphql + base: + schema: schema-base.graphql + oeth: + schema: schema-oeth.graphql + ousd: + schema: schema-ousd.graphql + ogv: + schema: schema-ogv.graphql include: - - types.graphql \ No newline at end of file + - types.graphql + - types-otoken.graphql \ No newline at end of file diff --git a/package.json b/package.json index ad6c7152..3d2ae2fa 100644 --- a/package.json +++ b/package.json @@ -2,6 +2,7 @@ "name": "origin-squid", "private": true, "scripts": { + "codegen": "echo '# GENERATED, DO NOT MODIFY\n' > schema.graphql && cat schema-*.graphql >> schema.graphql && sqd codegen && git add src/model/generated/*", "build": "rm -rf lib && tsc", "prettier-check": "prettier --check **/*.ts **/*.json", "prettier-fix": "prettier --write **/*.ts **/*.json" diff --git a/schema-base.graphql b/schema-base.graphql new file mode 100644 index 00000000..7bfed0d6 --- /dev/null +++ b/schema-base.graphql @@ -0,0 +1,17 @@ +""" +Any entity which has a price associated with it should have that price go in here. +Prices can change very frequently and we don't want those changes on the same track +as values which change less frequently. +""" +type ExchangeRate @entity { + """ + Format: 'blockNumber:pair' ex '123456789:ETH_USD' + """ + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + pair: String! + base: String! + quote: String! + rate: BigInt! +} diff --git a/schema-oeth.graphql b/schema-oeth.graphql new file mode 100644 index 00000000..b390ebd7 --- /dev/null +++ b/schema-oeth.graphql @@ -0,0 +1,162 @@ +""" +The OETH entity tracks the change in total supply of OETH over time. +""" +type OETH @entity { + id: ID! @index + timestamp: DateTime! @index + blockNumber: Int! @index + totalSupply: BigInt! + rebasingSupply: BigInt! + nonRebasingSupply: BigInt! +} + +enum OETHRebasingOption { + OptIn + OptOut +} + +""" +The OETH balance, history and other information for a given address. +""" +type OETHAddress @entity { + id: ID! @index + isContract: Boolean! + rebasingOption: OETHRebasingOption! + balance: BigInt! + earned: BigInt! + credits: BigInt! + lastUpdated: DateTime! + history: [OETHHistory!]! @derivedFrom(field: "address") +} + +enum OETHHistoryType { + Swap + Sent + Received + Yield +} + +""" +The History entity tracks events that change the balance of OETH for an address. +""" +type OETHHistory @entity { + id: ID! + address: OETHAddress! @index + value: BigInt! + balance: BigInt! + timestamp: DateTime! + blockNumber: Int! @index + txHash: String! @index + type: OETHHistoryType! +} + +""" +The Rebase entity tracks historical rebase events on the OETH contract. +""" +type OETHRebase @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + txHash: String! @index + totalSupply: BigInt! + rebasingCredits: BigInt! + rebasingCreditsPerToken: BigInt! + apy: OETHAPY! + fee: BigInt! + yield: BigInt! +} + +""" +The RebaseOption entity tracks historical rebase option changes by address. +""" +type OETHRebaseOption @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + txHash: String! @index + address: OETHAddress! + status: OETHRebasingOption! +} + +""" +The APY entity tracks historical APY values by day. +""" +type OETHAPY @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + txHash: String! @index + apr: Float! + apy: Float! + apy7DayAvg: Float! + apy14DayAvg: Float! + apy30DayAvg: Float! + rebasingCreditsPerToken: BigInt! +} + +# OETH Vault: 0x39254033945AA2E4809Cc2977E7087BEE48bd7Ab +""" +The Vault entity tracks the OETH vault balance over time. +""" +type OETHVault @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + weth: BigInt! + stETH: BigInt! + rETH: BigInt! + frxETH: BigInt! +} + +# OETH Convex: 0x1827F9eA98E0bf96550b2FC20F7233277FcD7E63 +# Curve LP Token: 0x94B17476A93b3262d87B9a326965D1E91f9c13E7 +# Convex Deposit: 0xF403C135812408BFbE8713b5A23a04b3D48AAE31 +type OETHCurveLP @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + totalSupply: BigInt! + eth: BigInt! + oeth: BigInt! + totalSupplyOwned: BigInt! + ethOwned: BigInt! + oethOwned: BigInt! +} + +# OETH Frax Staking: 0x3fF8654D633D4Ea0faE24c52Aec73B4A20D0d0e5 +type OETHFraxStaking @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + """ + - sfrxETH is what's actually stored here, slightly confusing and may want to change. + - used by balance sheet + """ + frxETH: BigInt! +} + +# OETH Morpho Aave: 0xc1fc9E5eC3058921eA5025D703CBE31764756319 +type OETHMorphoAave @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + weth: BigInt! +} + +# OETH Dripper: 0xc0F42F73b8f01849a2DD99753524d4ba14317EB3 +type Dripper @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + weth: BigInt! +} + +# OETHBalancerMetaPoolStrategy: 0x49109629ac1deb03f2e9b2fe2ac4a623e0e7dfdc +type OETHBalancerMetaPoolStrategy @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + total: BigInt! + rETH: BigInt! + weth: BigInt! +} diff --git a/schema-ogv.graphql b/schema-ogv.graphql new file mode 100644 index 00000000..14ab77b5 --- /dev/null +++ b/schema-ogv.graphql @@ -0,0 +1,35 @@ +# OGV Price (5m?) +# OGV Market Cap (5m?) +# OGV Circulating Supply +# OGV Total Supply +type OGV @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + circulating: BigInt! + total: BigInt! +} + +# OGV Amount Staked & Percentage +# OGV Staking APY +type StakedOGV @entity { # Representing `veOGV` + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + total: BigInt! + apy: BigInt! +} + +# OGV Registered Voters +# OGV Open-source contributors +# OGV Improvement proposals +type OGVGovernance @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + registeredVoters: Int! + openSourceContributors: Int! + improvementProposals: Int! +} + +# TODO: More diff --git a/schema-ousd.graphql b/schema-ousd.graphql new file mode 100644 index 00000000..a703b50b --- /dev/null +++ b/schema-ousd.graphql @@ -0,0 +1,188 @@ +""" +The OUSD entity tracks the change in total supply of OUSD over time. +""" +type OUSD @entity { + id: ID! @index + timestamp: DateTime! @index + blockNumber: Int! @index + totalSupply: BigInt! + rebasingSupply: BigInt! + nonRebasingSupply: BigInt! +} + +enum OUSDRebasingOption { + OptIn + OptOut +} + +""" +The OUSD balance, history and other information for a given address. +""" +type OUSDAddress @entity { + id: ID! @index + isContract: Boolean! + rebasingOption: OUSDRebasingOption! + balance: BigInt! + earned: BigInt! + credits: BigInt! + lastUpdated: DateTime! + history: [OUSDHistory!]! @derivedFrom(field: "address") +} + +enum OUSDHistoryType { + Swap + Sent + Received + Yield +} + +""" +The History entity tracks events that change the balance of OUSD for an address. +""" +type OUSDHistory @entity { + id: ID! + address: OUSDAddress! @index + value: BigInt! + balance: BigInt! + timestamp: DateTime! + blockNumber: Int! @index + txHash: String! @index + type: OUSDHistoryType! +} + +""" +The Rebase entity tracks historical rebase events on the OUSD contract. +""" +type OUSDRebase @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + txHash: String! @index + totalSupply: BigInt! + rebasingCredits: BigInt! + rebasingCreditsPerToken: BigInt! + apy: OUSDAPY! + fee: BigInt! + yield: BigInt! +} + +""" +The RebaseOption entity tracks historical rebase option changes by address. +""" +type OUSDRebaseOption @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + txHash: String! @index + address: OUSDAddress! + status: OUSDRebasingOption! +} + +""" +The APY entity tracks historical APY values by day. +""" +type OUSDAPY @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + txHash: String! @index + apr: Float! + apy: Float! + apy7DayAvg: Float! + apy14DayAvg: Float! + apy30DayAvg: Float! + rebasingCreditsPerToken: BigInt! +} + +""" +The Vault entity tracks the OUSD vault balance over time. +""" +type OUSDVault @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + dai: BigInt! + usdt: BigInt! + usdc: BigInt! +} + +# OUSD Strategies! + +type OUSDMorphoAave @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + dai: BigInt! + usdt: BigInt! + usdc: BigInt! +} + +type OUSDMorphoCompound @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + dai: BigInt! + usdt: BigInt! + usdc: BigInt! +} + +type MakerDSRStrategy @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + dai: BigInt! +} + +type OUSDFluxStrategy @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + dai: BigInt! + usdt: BigInt! + usdc: BigInt! +} + +type OUSDCompoundStrategy @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + dai: BigInt! + usdt: BigInt! + usdc: BigInt! +} + +type OUSDConvexStrategy @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + dai: BigInt! + usdt: BigInt! + usdc: BigInt! +} + +type OUSDAaveStrategy @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + dai: BigInt! + usdt: BigInt! + usdc: BigInt! +} + +type OUSDMetaStrategy @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + dai: BigInt! + usdt: BigInt! + usdc: BigInt! +} + +type ConvexLUSDPlus3Crv @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + dai: BigInt! + usdt: BigInt! + usdc: BigInt! +} \ No newline at end of file diff --git a/schema.graphql b/schema.graphql index 2c3967f0..37e0d073 100644 --- a/schema.graphql +++ b/schema.graphql @@ -1,3 +1,22 @@ +# GENERATED, DO NOT MODIFY + +""" +Any entity which has a price associated with it should have that price go in here. +Prices can change very frequently and we don't want those changes on the same track +as values which change less frequently. +""" +type ExchangeRate @entity { + """ + Format: 'blockNumber:pair' ex '123456789:ETH_USD' + """ + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + pair: String! + base: String! + quote: String! + rate: BigInt! +} """ The OETH entity tracks the change in total supply of OETH over time. """ @@ -5,16 +24,12 @@ type OETH @entity { id: ID! @index timestamp: DateTime! @index blockNumber: Int! @index - - """ - The total supply of OETH tokens at the corresponding block. - """ totalSupply: BigInt! rebasingSupply: BigInt! nonRebasingSupply: BigInt! } -enum RebasingOption { +enum OETHRebasingOption { OptIn OptOut } @@ -22,34 +37,18 @@ enum RebasingOption { """ The OETH balance, history and other information for a given address. """ -type Address @entity { +type OETHAddress @entity { id: ID! @index isContract: Boolean! - - """ - Is the address opted in our out of yield. - """ - rebasingOption: RebasingOption! - - """ - The current balance of OETH held by the address. - """ + rebasingOption: OETHRebasingOption! balance: BigInt! - - """ - The total amount of OETH earned by the address. - """ earned: BigInt! credits: BigInt! - - """ - The last time the address information was updated. - """ lastUpdated: DateTime! - history: [History!]! @derivedFrom(field: "address") + history: [OETHHistory!]! @derivedFrom(field: "address") } -enum HistoryType { +enum OETHHistoryType { Swap Sent Received @@ -59,21 +58,21 @@ enum HistoryType { """ The History entity tracks events that change the balance of OETH for an address. """ -type History @entity { +type OETHHistory @entity { id: ID! - address: Address! @index + address: OETHAddress! @index value: BigInt! balance: BigInt! timestamp: DateTime! blockNumber: Int! @index txHash: String! @index - type: HistoryType! + type: OETHHistoryType! } """ The Rebase entity tracks historical rebase events on the OETH contract. """ -type Rebase @entity { +type OETHRebase @entity { id: ID! timestamp: DateTime! @index blockNumber: Int! @index @@ -81,7 +80,7 @@ type Rebase @entity { totalSupply: BigInt! rebasingCredits: BigInt! rebasingCreditsPerToken: BigInt! - apy: APY! + apy: OETHAPY! fee: BigInt! yield: BigInt! } @@ -89,19 +88,19 @@ type Rebase @entity { """ The RebaseOption entity tracks historical rebase option changes by address. """ -type RebaseOption @entity { +type OETHRebaseOption @entity { id: ID! timestamp: DateTime! @index blockNumber: Int! @index txHash: String! @index - address: Address! - status: RebasingOption! + address: OETHAddress! + status: OETHRebasingOption! } """ The APY entity tracks historical APY values by day. """ -type APY @entity { +type OETHAPY @entity { id: ID! timestamp: DateTime! @index blockNumber: Int! @index @@ -118,7 +117,7 @@ type APY @entity { """ The Vault entity tracks the OETH vault balance over time. """ -type Vault @entity { +type OETHVault @entity { id: ID! timestamp: DateTime! @index blockNumber: Int! @index @@ -131,7 +130,7 @@ type Vault @entity { # OETH Convex: 0x1827F9eA98E0bf96550b2FC20F7233277FcD7E63 # Curve LP Token: 0x94B17476A93b3262d87B9a326965D1E91f9c13E7 # Convex Deposit: 0xF403C135812408BFbE8713b5A23a04b3D48AAE31 -type CurveLP @entity { +type OETHCurveLP @entity { id: ID! timestamp: DateTime! @index blockNumber: Int! @index @@ -144,7 +143,7 @@ type CurveLP @entity { } # OETH Frax Staking: 0x3fF8654D633D4Ea0faE24c52Aec73B4A20D0d0e5 -type FraxStaking @entity { +type OETHFraxStaking @entity { id: ID! timestamp: DateTime! @index blockNumber: Int! @index @@ -156,7 +155,7 @@ type FraxStaking @entity { } # OETH Morpho Aave: 0xc1fc9E5eC3058921eA5025D703CBE31764756319 -type MorphoAave @entity { +type OETHMorphoAave @entity { id: ID! timestamp: DateTime! @index blockNumber: Int! @index @@ -172,7 +171,7 @@ type Dripper @entity { } # OETHBalancerMetaPoolStrategy: 0x49109629ac1deb03f2e9b2fe2ac4a623e0e7dfdc -type BalancerMetaPoolStrategy @entity { +type OETHBalancerMetaPoolStrategy @entity { id: ID! timestamp: DateTime! @index blockNumber: Int! @index @@ -180,16 +179,226 @@ type BalancerMetaPoolStrategy @entity { rETH: BigInt! weth: BigInt! } +# OGV Price (5m?) +# OGV Market Cap (5m?) +# OGV Circulating Supply +# OGV Total Supply +type OGV @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + circulating: BigInt! + total: BigInt! +} -type ExchangeRate @entity { - """ - Format: 'blockNumber:pair' ex '123456789:ETH_USD' - """ +# OGV Amount Staked & Percentage +# OGV Staking APY +type StakedOGV @entity { # Representing `veOGV` id: ID! timestamp: DateTime! @index blockNumber: Int! @index - pair: String! - base: String! - quote: String! - rate: BigInt! + total: BigInt! + apy: BigInt! +} + +# OGV Registered Voters +# OGV Open-source contributors +# OGV Improvement proposals +type OGVGovernance @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + registeredVoters: Int! + openSourceContributors: Int! + improvementProposals: Int! +} + +# TODO: More +""" +The OUSD entity tracks the change in total supply of OUSD over time. +""" +type OUSD @entity { + id: ID! @index + timestamp: DateTime! @index + blockNumber: Int! @index + totalSupply: BigInt! + rebasingSupply: BigInt! + nonRebasingSupply: BigInt! +} + +enum OUSDRebasingOption { + OptIn + OptOut +} + +""" +The OUSD balance, history and other information for a given address. +""" +type OUSDAddress @entity { + id: ID! @index + isContract: Boolean! + rebasingOption: OUSDRebasingOption! + balance: BigInt! + earned: BigInt! + credits: BigInt! + lastUpdated: DateTime! + history: [OUSDHistory!]! @derivedFrom(field: "address") +} + +enum OUSDHistoryType { + Swap + Sent + Received + Yield +} + +""" +The History entity tracks events that change the balance of OUSD for an address. +""" +type OUSDHistory @entity { + id: ID! + address: OUSDAddress! @index + value: BigInt! + balance: BigInt! + timestamp: DateTime! + blockNumber: Int! @index + txHash: String! @index + type: OUSDHistoryType! } + +""" +The Rebase entity tracks historical rebase events on the OUSD contract. +""" +type OUSDRebase @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + txHash: String! @index + totalSupply: BigInt! + rebasingCredits: BigInt! + rebasingCreditsPerToken: BigInt! + apy: OUSDAPY! + fee: BigInt! + yield: BigInt! +} + +""" +The RebaseOption entity tracks historical rebase option changes by address. +""" +type OUSDRebaseOption @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + txHash: String! @index + address: OUSDAddress! + status: OUSDRebasingOption! +} + +""" +The APY entity tracks historical APY values by day. +""" +type OUSDAPY @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + txHash: String! @index + apr: Float! + apy: Float! + apy7DayAvg: Float! + apy14DayAvg: Float! + apy30DayAvg: Float! + rebasingCreditsPerToken: BigInt! +} + +""" +The Vault entity tracks the OUSD vault balance over time. +""" +type OUSDVault @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + dai: BigInt! + usdt: BigInt! + usdc: BigInt! +} + +# OUSD Strategies! + +type OUSDMorphoAave @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + dai: BigInt! + usdt: BigInt! + usdc: BigInt! +} + +type OUSDMorphoCompound @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + dai: BigInt! + usdt: BigInt! + usdc: BigInt! +} + +type MakerDSRStrategy @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + dai: BigInt! +} + +type OUSDFluxStrategy @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + dai: BigInt! + usdt: BigInt! + usdc: BigInt! +} + +type OUSDCompoundStrategy @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + dai: BigInt! + usdt: BigInt! + usdc: BigInt! +} + +type OUSDConvexStrategy @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + dai: BigInt! + usdt: BigInt! + usdc: BigInt! +} + +type OUSDAaveStrategy @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + dai: BigInt! + usdt: BigInt! + usdc: BigInt! +} + +type OUSDMetaStrategy @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + dai: BigInt! + usdt: BigInt! + usdc: BigInt! +} + +type ConvexLUSDPlus3Crv @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + dai: BigInt! + usdt: BigInt! + usdc: BigInt! +} \ No newline at end of file diff --git a/src/model/generated/_historyType.ts b/src/model/generated/_oethHistoryType.ts similarity index 74% rename from src/model/generated/_historyType.ts rename to src/model/generated/_oethHistoryType.ts index 813557ec..717d0c2a 100644 --- a/src/model/generated/_historyType.ts +++ b/src/model/generated/_oethHistoryType.ts @@ -1,4 +1,4 @@ -export enum HistoryType { +export enum OETHHistoryType { Swap = "Swap", Sent = "Sent", Received = "Received", diff --git a/src/model/generated/_rebasingOption.ts b/src/model/generated/_oethRebasingOption.ts similarity index 58% rename from src/model/generated/_rebasingOption.ts rename to src/model/generated/_oethRebasingOption.ts index ea77587b..63013e44 100644 --- a/src/model/generated/_rebasingOption.ts +++ b/src/model/generated/_oethRebasingOption.ts @@ -1,4 +1,4 @@ -export enum RebasingOption { +export enum OETHRebasingOption { OptIn = "OptIn", OptOut = "OptOut", } diff --git a/src/model/generated/_ousdHistoryType.ts b/src/model/generated/_ousdHistoryType.ts new file mode 100644 index 00000000..f62e10f3 --- /dev/null +++ b/src/model/generated/_ousdHistoryType.ts @@ -0,0 +1,6 @@ +export enum OUSDHistoryType { + Swap = "Swap", + Sent = "Sent", + Received = "Received", + Yield = "Yield", +} diff --git a/src/model/generated/_ousdRebasingOption.ts b/src/model/generated/_ousdRebasingOption.ts new file mode 100644 index 00000000..4813d5a0 --- /dev/null +++ b/src/model/generated/_ousdRebasingOption.ts @@ -0,0 +1,4 @@ +export enum OUSDRebasingOption { + OptIn = "OptIn", + OptOut = "OptOut", +} diff --git a/src/model/generated/convexLusdPlus3Crv.model.ts b/src/model/generated/convexLusdPlus3Crv.model.ts new file mode 100644 index 00000000..0ab468ef --- /dev/null +++ b/src/model/generated/convexLusdPlus3Crv.model.ts @@ -0,0 +1,29 @@ +import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, Index as Index_} from "typeorm" +import * as marshal from "./marshal" + +@Entity_() +export class ConvexLUSDPlus3Crv { + constructor(props?: Partial) { + Object.assign(this, props) + } + + @PrimaryColumn_() + id!: string + + @Index_() + @Column_("timestamp with time zone", {nullable: false}) + timestamp!: Date + + @Index_() + @Column_("int4", {nullable: false}) + blockNumber!: number + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + dai!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + usdt!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + usdc!: bigint +} diff --git a/src/model/generated/exchangeRate.model.ts b/src/model/generated/exchangeRate.model.ts index aea649a0..dd870021 100644 --- a/src/model/generated/exchangeRate.model.ts +++ b/src/model/generated/exchangeRate.model.ts @@ -1,6 +1,11 @@ import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, Index as Index_} from "typeorm" import * as marshal from "./marshal" +/** + * Any entity which has a price associated with it should have that price go in here. + * Prices can change very frequently and we don't want those changes on the same track + * as values which change less frequently. + */ @Entity_() export class ExchangeRate { constructor(props?: Partial) { diff --git a/src/model/generated/index.ts b/src/model/generated/index.ts index 970c08cf..421100fd 100644 --- a/src/model/generated/index.ts +++ b/src/model/generated/index.ts @@ -1,15 +1,36 @@ +export * from "./exchangeRate.model" export * from "./oeth.model" -export * from "./address.model" -export * from "./_rebasingOption" -export * from "./history.model" -export * from "./_historyType" -export * from "./rebase.model" -export * from "./rebaseOption.model" -export * from "./apy.model" -export * from "./vault.model" -export * from "./curveLp.model" -export * from "./fraxStaking.model" -export * from "./morphoAave.model" +export * from "./oethAddress.model" +export * from "./_oethRebasingOption" +export * from "./oethHistory.model" +export * from "./_oethHistoryType" +export * from "./oethRebase.model" +export * from "./oethRebaseOption.model" +export * from "./oethapy.model" +export * from "./oethVault.model" +export * from "./oethCurveLp.model" +export * from "./oethFraxStaking.model" +export * from "./oethMorphoAave.model" export * from "./dripper.model" -export * from "./balancerMetaPoolStrategy.model" -export * from "./exchangeRate.model" +export * from "./oethBalancerMetaPoolStrategy.model" +export * from "./ogv.model" +export * from "./stakedOgv.model" +export * from "./ogvGovernance.model" +export * from "./ousd.model" +export * from "./ousdAddress.model" +export * from "./_ousdRebasingOption" +export * from "./ousdHistory.model" +export * from "./_ousdHistoryType" +export * from "./ousdRebase.model" +export * from "./ousdRebaseOption.model" +export * from "./ousdapy.model" +export * from "./ousdVault.model" +export * from "./ousdMorphoAave.model" +export * from "./ousdMorphoCompound.model" +export * from "./makerDsrStrategy.model" +export * from "./ousdFluxStrategy.model" +export * from "./ousdCompoundStrategy.model" +export * from "./ousdConvexStrategy.model" +export * from "./ousdAaveStrategy.model" +export * from "./ousdMetaStrategy.model" +export * from "./convexLusdPlus3Crv.model" diff --git a/src/model/generated/makerDsrStrategy.model.ts b/src/model/generated/makerDsrStrategy.model.ts new file mode 100644 index 00000000..7c2cbda6 --- /dev/null +++ b/src/model/generated/makerDsrStrategy.model.ts @@ -0,0 +1,23 @@ +import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, Index as Index_} from "typeorm" +import * as marshal from "./marshal" + +@Entity_() +export class MakerDSRStrategy { + constructor(props?: Partial) { + Object.assign(this, props) + } + + @PrimaryColumn_() + id!: string + + @Index_() + @Column_("timestamp with time zone", {nullable: false}) + timestamp!: Date + + @Index_() + @Column_("int4", {nullable: false}) + blockNumber!: number + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + dai!: bigint +} diff --git a/src/model/generated/oeth.model.ts b/src/model/generated/oeth.model.ts index b9bd8a4e..a9976c0c 100644 --- a/src/model/generated/oeth.model.ts +++ b/src/model/generated/oeth.model.ts @@ -21,9 +21,6 @@ export class OETH { @Column_("int4", {nullable: false}) blockNumber!: number - /** - * The total supply of OETH tokens at the corresponding block. - */ @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) totalSupply!: bigint diff --git a/src/model/generated/address.model.ts b/src/model/generated/oethAddress.model.ts similarity index 60% rename from src/model/generated/address.model.ts rename to src/model/generated/oethAddress.model.ts index 162bb34d..bb88eb47 100644 --- a/src/model/generated/address.model.ts +++ b/src/model/generated/oethAddress.model.ts @@ -1,14 +1,14 @@ import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, OneToMany as OneToMany_} from "typeorm" import * as marshal from "./marshal" -import {RebasingOption} from "./_rebasingOption" -import {History} from "./history.model" +import {OETHRebasingOption} from "./_oethRebasingOption" +import {OETHHistory} from "./oethHistory.model" /** * The OETH balance, history and other information for a given address. */ @Entity_() -export class Address { - constructor(props?: Partial
) { +export class OETHAddress { + constructor(props?: Partial) { Object.assign(this, props) } @@ -18,33 +18,21 @@ export class Address { @Column_("bool", {nullable: false}) isContract!: boolean - /** - * Is the address opted in our out of yield. - */ @Column_("varchar", {length: 6, nullable: false}) - rebasingOption!: RebasingOption + rebasingOption!: OETHRebasingOption - /** - * The current balance of OETH held by the address. - */ @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) balance!: bigint - /** - * The total amount of OETH earned by the address. - */ @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) earned!: bigint @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) credits!: bigint - /** - * The last time the address information was updated. - */ @Column_("timestamp with time zone", {nullable: false}) lastUpdated!: Date - @OneToMany_(() => History, e => e.address) - history!: History[] + @OneToMany_(() => OETHHistory, e => e.address) + history!: OETHHistory[] } diff --git a/src/model/generated/balancerMetaPoolStrategy.model.ts b/src/model/generated/oethBalancerMetaPoolStrategy.model.ts similarity index 86% rename from src/model/generated/balancerMetaPoolStrategy.model.ts rename to src/model/generated/oethBalancerMetaPoolStrategy.model.ts index 0d6d3ada..f16bb0ad 100644 --- a/src/model/generated/balancerMetaPoolStrategy.model.ts +++ b/src/model/generated/oethBalancerMetaPoolStrategy.model.ts @@ -2,8 +2,8 @@ import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, I import * as marshal from "./marshal" @Entity_() -export class BalancerMetaPoolStrategy { - constructor(props?: Partial) { +export class OETHBalancerMetaPoolStrategy { + constructor(props?: Partial) { Object.assign(this, props) } diff --git a/src/model/generated/curveLp.model.ts b/src/model/generated/oethCurveLp.model.ts similarity index 93% rename from src/model/generated/curveLp.model.ts rename to src/model/generated/oethCurveLp.model.ts index 37aef4fe..18f5d689 100644 --- a/src/model/generated/curveLp.model.ts +++ b/src/model/generated/oethCurveLp.model.ts @@ -2,8 +2,8 @@ import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, I import * as marshal from "./marshal" @Entity_() -export class CurveLP { - constructor(props?: Partial) { +export class OETHCurveLP { + constructor(props?: Partial) { Object.assign(this, props) } diff --git a/src/model/generated/fraxStaking.model.ts b/src/model/generated/oethFraxStaking.model.ts similarity index 88% rename from src/model/generated/fraxStaking.model.ts rename to src/model/generated/oethFraxStaking.model.ts index f52790e7..7e4d574a 100644 --- a/src/model/generated/fraxStaking.model.ts +++ b/src/model/generated/oethFraxStaking.model.ts @@ -2,8 +2,8 @@ import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, I import * as marshal from "./marshal" @Entity_() -export class FraxStaking { - constructor(props?: Partial) { +export class OETHFraxStaking { + constructor(props?: Partial) { Object.assign(this, props) } diff --git a/src/model/generated/history.model.ts b/src/model/generated/oethHistory.model.ts similarity index 75% rename from src/model/generated/history.model.ts rename to src/model/generated/oethHistory.model.ts index ec9345e4..6b25b972 100644 --- a/src/model/generated/history.model.ts +++ b/src/model/generated/oethHistory.model.ts @@ -1,14 +1,14 @@ import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, ManyToOne as ManyToOne_, Index as Index_} from "typeorm" import * as marshal from "./marshal" -import {Address} from "./address.model" -import {HistoryType} from "./_historyType" +import {OETHAddress} from "./oethAddress.model" +import {OETHHistoryType} from "./_oethHistoryType" /** * The History entity tracks events that change the balance of OETH for an address. */ @Entity_() -export class History { - constructor(props?: Partial) { +export class OETHHistory { + constructor(props?: Partial) { Object.assign(this, props) } @@ -16,8 +16,8 @@ export class History { id!: string @Index_() - @ManyToOne_(() => Address, {nullable: true}) - address!: Address + @ManyToOne_(() => OETHAddress, {nullable: true}) + address!: OETHAddress @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) value!: bigint @@ -37,5 +37,5 @@ export class History { txHash!: string @Column_("varchar", {length: 8, nullable: false}) - type!: HistoryType + type!: OETHHistoryType } diff --git a/src/model/generated/morphoAave.model.ts b/src/model/generated/oethMorphoAave.model.ts similarity index 86% rename from src/model/generated/morphoAave.model.ts rename to src/model/generated/oethMorphoAave.model.ts index 7361cae4..23394c8c 100644 --- a/src/model/generated/morphoAave.model.ts +++ b/src/model/generated/oethMorphoAave.model.ts @@ -2,8 +2,8 @@ import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, I import * as marshal from "./marshal" @Entity_() -export class MorphoAave { - constructor(props?: Partial) { +export class OETHMorphoAave { + constructor(props?: Partial) { Object.assign(this, props) } diff --git a/src/model/generated/rebase.model.ts b/src/model/generated/oethRebase.model.ts similarity index 86% rename from src/model/generated/rebase.model.ts rename to src/model/generated/oethRebase.model.ts index d4561fdc..126e8ad5 100644 --- a/src/model/generated/rebase.model.ts +++ b/src/model/generated/oethRebase.model.ts @@ -1,13 +1,13 @@ import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, Index as Index_, ManyToOne as ManyToOne_} from "typeorm" import * as marshal from "./marshal" -import {APY} from "./apy.model" +import {OETHAPY} from "./oethapy.model" /** * The Rebase entity tracks historical rebase events on the OETH contract. */ @Entity_() -export class Rebase { - constructor(props?: Partial) { +export class OETHRebase { + constructor(props?: Partial) { Object.assign(this, props) } @@ -36,8 +36,8 @@ export class Rebase { rebasingCreditsPerToken!: bigint @Index_() - @ManyToOne_(() => APY, {nullable: true}) - apy!: APY + @ManyToOne_(() => OETHAPY, {nullable: true}) + apy!: OETHAPY @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) fee!: bigint diff --git a/src/model/generated/rebaseOption.model.ts b/src/model/generated/oethRebaseOption.model.ts similarity index 67% rename from src/model/generated/rebaseOption.model.ts rename to src/model/generated/oethRebaseOption.model.ts index 68946e18..b454a93a 100644 --- a/src/model/generated/rebaseOption.model.ts +++ b/src/model/generated/oethRebaseOption.model.ts @@ -1,13 +1,13 @@ import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, Index as Index_, ManyToOne as ManyToOne_} from "typeorm" -import {Address} from "./address.model" -import {RebasingOption} from "./_rebasingOption" +import {OETHAddress} from "./oethAddress.model" +import {OETHRebasingOption} from "./_oethRebasingOption" /** * The RebaseOption entity tracks historical rebase option changes by address. */ @Entity_() -export class RebaseOption { - constructor(props?: Partial) { +export class OETHRebaseOption { + constructor(props?: Partial) { Object.assign(this, props) } @@ -27,9 +27,9 @@ export class RebaseOption { txHash!: string @Index_() - @ManyToOne_(() => Address, {nullable: true}) - address!: Address + @ManyToOne_(() => OETHAddress, {nullable: true}) + address!: OETHAddress @Column_("varchar", {length: 6, nullable: false}) - status!: RebasingOption + status!: OETHRebasingOption } diff --git a/src/model/generated/vault.model.ts b/src/model/generated/oethVault.model.ts similarity index 92% rename from src/model/generated/vault.model.ts rename to src/model/generated/oethVault.model.ts index 34b103e2..c2853319 100644 --- a/src/model/generated/vault.model.ts +++ b/src/model/generated/oethVault.model.ts @@ -5,8 +5,8 @@ import * as marshal from "./marshal" * The Vault entity tracks the OETH vault balance over time. */ @Entity_() -export class Vault { - constructor(props?: Partial) { +export class OETHVault { + constructor(props?: Partial) { Object.assign(this, props) } diff --git a/src/model/generated/apy.model.ts b/src/model/generated/oethapy.model.ts similarity index 94% rename from src/model/generated/apy.model.ts rename to src/model/generated/oethapy.model.ts index e937b5d0..0bc0e4ae 100644 --- a/src/model/generated/apy.model.ts +++ b/src/model/generated/oethapy.model.ts @@ -5,8 +5,8 @@ import * as marshal from "./marshal" * The APY entity tracks historical APY values by day. */ @Entity_() -export class APY { - constructor(props?: Partial) { +export class OETHAPY { + constructor(props?: Partial) { Object.assign(this, props) } diff --git a/src/model/generated/ogv.model.ts b/src/model/generated/ogv.model.ts new file mode 100644 index 00000000..719425ee --- /dev/null +++ b/src/model/generated/ogv.model.ts @@ -0,0 +1,26 @@ +import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, Index as Index_} from "typeorm" +import * as marshal from "./marshal" + +@Entity_() +export class OGV { + constructor(props?: Partial) { + Object.assign(this, props) + } + + @PrimaryColumn_() + id!: string + + @Index_() + @Column_("timestamp with time zone", {nullable: false}) + timestamp!: Date + + @Index_() + @Column_("int4", {nullable: false}) + blockNumber!: number + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + circulating!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + total!: bigint +} diff --git a/src/model/generated/ogvGovernance.model.ts b/src/model/generated/ogvGovernance.model.ts new file mode 100644 index 00000000..e3e14b3d --- /dev/null +++ b/src/model/generated/ogvGovernance.model.ts @@ -0,0 +1,28 @@ +import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, Index as Index_} from "typeorm" + +@Entity_() +export class OGVGovernance { + constructor(props?: Partial) { + Object.assign(this, props) + } + + @PrimaryColumn_() + id!: string + + @Index_() + @Column_("timestamp with time zone", {nullable: false}) + timestamp!: Date + + @Index_() + @Column_("int4", {nullable: false}) + blockNumber!: number + + @Column_("int4", {nullable: false}) + registeredVoters!: number + + @Column_("int4", {nullable: false}) + openSourceContributors!: number + + @Column_("int4", {nullable: false}) + improvementProposals!: number +} diff --git a/src/model/generated/ousd.model.ts b/src/model/generated/ousd.model.ts new file mode 100644 index 00000000..a9a3cd10 --- /dev/null +++ b/src/model/generated/ousd.model.ts @@ -0,0 +1,32 @@ +import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, Index as Index_} from "typeorm" +import * as marshal from "./marshal" + +/** + * The OUSD entity tracks the change in total supply of OUSD over time. + */ +@Entity_() +export class OUSD { + constructor(props?: Partial) { + Object.assign(this, props) + } + + @PrimaryColumn_() + id!: string + + @Index_() + @Column_("timestamp with time zone", {nullable: false}) + timestamp!: Date + + @Index_() + @Column_("int4", {nullable: false}) + blockNumber!: number + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + totalSupply!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + rebasingSupply!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + nonRebasingSupply!: bigint +} diff --git a/src/model/generated/ousdAaveStrategy.model.ts b/src/model/generated/ousdAaveStrategy.model.ts new file mode 100644 index 00000000..31caa64c --- /dev/null +++ b/src/model/generated/ousdAaveStrategy.model.ts @@ -0,0 +1,29 @@ +import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, Index as Index_} from "typeorm" +import * as marshal from "./marshal" + +@Entity_() +export class OUSDAaveStrategy { + constructor(props?: Partial) { + Object.assign(this, props) + } + + @PrimaryColumn_() + id!: string + + @Index_() + @Column_("timestamp with time zone", {nullable: false}) + timestamp!: Date + + @Index_() + @Column_("int4", {nullable: false}) + blockNumber!: number + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + dai!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + usdt!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + usdc!: bigint +} diff --git a/src/model/generated/ousdAddress.model.ts b/src/model/generated/ousdAddress.model.ts new file mode 100644 index 00000000..65fcc98a --- /dev/null +++ b/src/model/generated/ousdAddress.model.ts @@ -0,0 +1,38 @@ +import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, OneToMany as OneToMany_} from "typeorm" +import * as marshal from "./marshal" +import {OUSDRebasingOption} from "./_ousdRebasingOption" +import {OUSDHistory} from "./ousdHistory.model" + +/** + * The OUSD balance, history and other information for a given address. + */ +@Entity_() +export class OUSDAddress { + constructor(props?: Partial) { + Object.assign(this, props) + } + + @PrimaryColumn_() + id!: string + + @Column_("bool", {nullable: false}) + isContract!: boolean + + @Column_("varchar", {length: 6, nullable: false}) + rebasingOption!: OUSDRebasingOption + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + balance!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + earned!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + credits!: bigint + + @Column_("timestamp with time zone", {nullable: false}) + lastUpdated!: Date + + @OneToMany_(() => OUSDHistory, e => e.address) + history!: OUSDHistory[] +} diff --git a/src/model/generated/ousdCompoundStrategy.model.ts b/src/model/generated/ousdCompoundStrategy.model.ts new file mode 100644 index 00000000..f1ea185e --- /dev/null +++ b/src/model/generated/ousdCompoundStrategy.model.ts @@ -0,0 +1,29 @@ +import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, Index as Index_} from "typeorm" +import * as marshal from "./marshal" + +@Entity_() +export class OUSDCompoundStrategy { + constructor(props?: Partial) { + Object.assign(this, props) + } + + @PrimaryColumn_() + id!: string + + @Index_() + @Column_("timestamp with time zone", {nullable: false}) + timestamp!: Date + + @Index_() + @Column_("int4", {nullable: false}) + blockNumber!: number + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + dai!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + usdt!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + usdc!: bigint +} diff --git a/src/model/generated/ousdConvexStrategy.model.ts b/src/model/generated/ousdConvexStrategy.model.ts new file mode 100644 index 00000000..534a3108 --- /dev/null +++ b/src/model/generated/ousdConvexStrategy.model.ts @@ -0,0 +1,29 @@ +import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, Index as Index_} from "typeorm" +import * as marshal from "./marshal" + +@Entity_() +export class OUSDConvexStrategy { + constructor(props?: Partial) { + Object.assign(this, props) + } + + @PrimaryColumn_() + id!: string + + @Index_() + @Column_("timestamp with time zone", {nullable: false}) + timestamp!: Date + + @Index_() + @Column_("int4", {nullable: false}) + blockNumber!: number + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + dai!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + usdt!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + usdc!: bigint +} diff --git a/src/model/generated/ousdFluxStrategy.model.ts b/src/model/generated/ousdFluxStrategy.model.ts new file mode 100644 index 00000000..8049aeb9 --- /dev/null +++ b/src/model/generated/ousdFluxStrategy.model.ts @@ -0,0 +1,29 @@ +import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, Index as Index_} from "typeorm" +import * as marshal from "./marshal" + +@Entity_() +export class OUSDFluxStrategy { + constructor(props?: Partial) { + Object.assign(this, props) + } + + @PrimaryColumn_() + id!: string + + @Index_() + @Column_("timestamp with time zone", {nullable: false}) + timestamp!: Date + + @Index_() + @Column_("int4", {nullable: false}) + blockNumber!: number + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + dai!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + usdt!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + usdc!: bigint +} diff --git a/src/model/generated/ousdHistory.model.ts b/src/model/generated/ousdHistory.model.ts new file mode 100644 index 00000000..8278c429 --- /dev/null +++ b/src/model/generated/ousdHistory.model.ts @@ -0,0 +1,41 @@ +import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, ManyToOne as ManyToOne_, Index as Index_} from "typeorm" +import * as marshal from "./marshal" +import {OUSDAddress} from "./ousdAddress.model" +import {OUSDHistoryType} from "./_ousdHistoryType" + +/** + * The History entity tracks events that change the balance of OUSD for an address. + */ +@Entity_() +export class OUSDHistory { + constructor(props?: Partial) { + Object.assign(this, props) + } + + @PrimaryColumn_() + id!: string + + @Index_() + @ManyToOne_(() => OUSDAddress, {nullable: true}) + address!: OUSDAddress + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + value!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + balance!: bigint + + @Column_("timestamp with time zone", {nullable: false}) + timestamp!: Date + + @Index_() + @Column_("int4", {nullable: false}) + blockNumber!: number + + @Index_() + @Column_("text", {nullable: false}) + txHash!: string + + @Column_("varchar", {length: 8, nullable: false}) + type!: OUSDHistoryType +} diff --git a/src/model/generated/ousdMetaStrategy.model.ts b/src/model/generated/ousdMetaStrategy.model.ts new file mode 100644 index 00000000..f239e953 --- /dev/null +++ b/src/model/generated/ousdMetaStrategy.model.ts @@ -0,0 +1,29 @@ +import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, Index as Index_} from "typeorm" +import * as marshal from "./marshal" + +@Entity_() +export class OUSDMetaStrategy { + constructor(props?: Partial) { + Object.assign(this, props) + } + + @PrimaryColumn_() + id!: string + + @Index_() + @Column_("timestamp with time zone", {nullable: false}) + timestamp!: Date + + @Index_() + @Column_("int4", {nullable: false}) + blockNumber!: number + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + dai!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + usdt!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + usdc!: bigint +} diff --git a/src/model/generated/ousdMorphoAave.model.ts b/src/model/generated/ousdMorphoAave.model.ts new file mode 100644 index 00000000..eff939e2 --- /dev/null +++ b/src/model/generated/ousdMorphoAave.model.ts @@ -0,0 +1,29 @@ +import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, Index as Index_} from "typeorm" +import * as marshal from "./marshal" + +@Entity_() +export class OUSDMorphoAave { + constructor(props?: Partial) { + Object.assign(this, props) + } + + @PrimaryColumn_() + id!: string + + @Index_() + @Column_("timestamp with time zone", {nullable: false}) + timestamp!: Date + + @Index_() + @Column_("int4", {nullable: false}) + blockNumber!: number + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + dai!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + usdt!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + usdc!: bigint +} diff --git a/src/model/generated/ousdMorphoCompound.model.ts b/src/model/generated/ousdMorphoCompound.model.ts new file mode 100644 index 00000000..6b6bae91 --- /dev/null +++ b/src/model/generated/ousdMorphoCompound.model.ts @@ -0,0 +1,29 @@ +import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, Index as Index_} from "typeorm" +import * as marshal from "./marshal" + +@Entity_() +export class OUSDMorphoCompound { + constructor(props?: Partial) { + Object.assign(this, props) + } + + @PrimaryColumn_() + id!: string + + @Index_() + @Column_("timestamp with time zone", {nullable: false}) + timestamp!: Date + + @Index_() + @Column_("int4", {nullable: false}) + blockNumber!: number + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + dai!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + usdt!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + usdc!: bigint +} diff --git a/src/model/generated/ousdRebase.model.ts b/src/model/generated/ousdRebase.model.ts new file mode 100644 index 00000000..524deea1 --- /dev/null +++ b/src/model/generated/ousdRebase.model.ts @@ -0,0 +1,47 @@ +import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, Index as Index_, ManyToOne as ManyToOne_} from "typeorm" +import * as marshal from "./marshal" +import {OUSDAPY} from "./ousdapy.model" + +/** + * The Rebase entity tracks historical rebase events on the OUSD contract. + */ +@Entity_() +export class OUSDRebase { + constructor(props?: Partial) { + Object.assign(this, props) + } + + @PrimaryColumn_() + id!: string + + @Index_() + @Column_("timestamp with time zone", {nullable: false}) + timestamp!: Date + + @Index_() + @Column_("int4", {nullable: false}) + blockNumber!: number + + @Index_() + @Column_("text", {nullable: false}) + txHash!: string + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + totalSupply!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + rebasingCredits!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + rebasingCreditsPerToken!: bigint + + @Index_() + @ManyToOne_(() => OUSDAPY, {nullable: true}) + apy!: OUSDAPY + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + fee!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + yield!: bigint +} diff --git a/src/model/generated/ousdRebaseOption.model.ts b/src/model/generated/ousdRebaseOption.model.ts new file mode 100644 index 00000000..1aa6e022 --- /dev/null +++ b/src/model/generated/ousdRebaseOption.model.ts @@ -0,0 +1,35 @@ +import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, Index as Index_, ManyToOne as ManyToOne_} from "typeorm" +import {OUSDAddress} from "./ousdAddress.model" +import {OUSDRebasingOption} from "./_ousdRebasingOption" + +/** + * The RebaseOption entity tracks historical rebase option changes by address. + */ +@Entity_() +export class OUSDRebaseOption { + constructor(props?: Partial) { + Object.assign(this, props) + } + + @PrimaryColumn_() + id!: string + + @Index_() + @Column_("timestamp with time zone", {nullable: false}) + timestamp!: Date + + @Index_() + @Column_("int4", {nullable: false}) + blockNumber!: number + + @Index_() + @Column_("text", {nullable: false}) + txHash!: string + + @Index_() + @ManyToOne_(() => OUSDAddress, {nullable: true}) + address!: OUSDAddress + + @Column_("varchar", {length: 6, nullable: false}) + status!: OUSDRebasingOption +} diff --git a/src/model/generated/ousdVault.model.ts b/src/model/generated/ousdVault.model.ts new file mode 100644 index 00000000..21d5378d --- /dev/null +++ b/src/model/generated/ousdVault.model.ts @@ -0,0 +1,32 @@ +import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, Index as Index_} from "typeorm" +import * as marshal from "./marshal" + +/** + * The Vault entity tracks the OUSD vault balance over time. + */ +@Entity_() +export class OUSDVault { + constructor(props?: Partial) { + Object.assign(this, props) + } + + @PrimaryColumn_() + id!: string + + @Index_() + @Column_("timestamp with time zone", {nullable: false}) + timestamp!: Date + + @Index_() + @Column_("int4", {nullable: false}) + blockNumber!: number + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + dai!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + usdt!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + usdc!: bigint +} diff --git a/src/model/generated/ousdapy.model.ts b/src/model/generated/ousdapy.model.ts new file mode 100644 index 00000000..807b63a6 --- /dev/null +++ b/src/model/generated/ousdapy.model.ts @@ -0,0 +1,45 @@ +import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, Index as Index_} from "typeorm" +import * as marshal from "./marshal" + +/** + * The APY entity tracks historical APY values by day. + */ +@Entity_() +export class OUSDAPY { + constructor(props?: Partial) { + Object.assign(this, props) + } + + @PrimaryColumn_() + id!: string + + @Index_() + @Column_("timestamp with time zone", {nullable: false}) + timestamp!: Date + + @Index_() + @Column_("int4", {nullable: false}) + blockNumber!: number + + @Index_() + @Column_("text", {nullable: false}) + txHash!: string + + @Column_("numeric", {transformer: marshal.floatTransformer, nullable: false}) + apr!: number + + @Column_("numeric", {transformer: marshal.floatTransformer, nullable: false}) + apy!: number + + @Column_("numeric", {transformer: marshal.floatTransformer, nullable: false}) + apy7DayAvg!: number + + @Column_("numeric", {transformer: marshal.floatTransformer, nullable: false}) + apy14DayAvg!: number + + @Column_("numeric", {transformer: marshal.floatTransformer, nullable: false}) + apy30DayAvg!: number + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + rebasingCreditsPerToken!: bigint +} diff --git a/src/model/generated/stakedOgv.model.ts b/src/model/generated/stakedOgv.model.ts new file mode 100644 index 00000000..91f16a3f --- /dev/null +++ b/src/model/generated/stakedOgv.model.ts @@ -0,0 +1,26 @@ +import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, Index as Index_} from "typeorm" +import * as marshal from "./marshal" + +@Entity_() +export class StakedOGV { + constructor(props?: Partial) { + Object.assign(this, props) + } + + @PrimaryColumn_() + id!: string + + @Index_() + @Column_("timestamp with time zone", {nullable: false}) + timestamp!: Date + + @Index_() + @Column_("int4", {nullable: false}) + blockNumber!: number + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + total!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + apy!: bigint +} From 719b1dfd7332328764119001828ddd1411084d62 Mon Sep 17 00:00:00 2001 From: Chris Jacobs Date: Thu, 12 Oct 2023 13:14:18 -0700 Subject: [PATCH 02/13] feat: data requirements thoughts --- graphql.config.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/graphql.config.yml b/graphql.config.yml index bccf0b3a..376860dd 100644 --- a/graphql.config.yml +++ b/graphql.config.yml @@ -11,4 +11,3 @@ projects: schema: schema-ogv.graphql include: - types.graphql - - types-otoken.graphql \ No newline at end of file From 0a8182fbc1e5b5082f6b0f43c11864bc3acaf332 Mon Sep 17 00:00:00 2001 From: Chris Jacobs Date: Thu, 12 Oct 2023 13:22:29 -0700 Subject: [PATCH 03/13] feat: data requirements thoughts --- schema-ousd.graphql | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/schema-ousd.graphql b/schema-ousd.graphql index a703b50b..3d41eaf9 100644 --- a/schema-ousd.graphql +++ b/schema-ousd.graphql @@ -178,7 +178,7 @@ type OUSDMetaStrategy @entity { usdc: BigInt! } -type ConvexLUSDPlus3Crv @entity { +type OUSDConvexLUSDPlus3Crv @entity { id: ID! timestamp: DateTime! @index blockNumber: Int! @index From 579290875905978abdbee5fe1c72c35a35bbf418 Mon Sep 17 00:00:00 2001 From: Chris Jacobs Date: Sat, 14 Oct 2023 10:20:11 -0700 Subject: [PATCH 04/13] feat: data requirements thoughts test creation of OToken processing logic template for reuse in OETH and OUSD processors --- abi/otoken-1.json | 199 +++++++++ abi/{oeth-vault.json => otoken-vault.json} | 0 abi/{oeth.json => otoken.json} | 0 db/migrations/1696961254406-Data.js | 101 ----- db/migrations/1697301515198-Data.js | 229 +++++++++++ docs/REQUIREMENTS.md | 160 ++++++++ graphql.config.yml | 11 + schema-base.graphql | 12 + schema-oeth.graphql | 18 +- schema-ousd.graphql | 18 +- schema.graphql | 51 +-- src/abi/otoken-1.abi.ts | 205 +++++++++ src/abi/otoken-1.ts | 94 +++++ ...{oeth-vault.abi.ts => otoken-vault.abi.ts} | 0 src/abi/{oeth-vault.ts => otoken-vault.ts} | 2 +- src/abi/{oeth.abi.ts => otoken.abi.ts} | 0 src/abi/{oeth.ts => otoken.ts} | 2 +- src/main.ts | 18 +- .../{_ousdHistoryType.ts => _historyType.ts} | 2 +- src/model/generated/_oethHistoryType.ts | 6 - src/model/generated/_ousdRebasingOption.ts | 4 - ...thRebasingOption.ts => _rebasingOption.ts} | 2 +- src/model/generated/index.ts | 8 +- src/model/generated/oethAddress.model.ts | 4 +- src/model/generated/oethHistory.model.ts | 4 +- src/model/generated/oethRebaseOption.model.ts | 4 +- src/model/generated/ousdAddress.model.ts | 4 +- ...del.ts => ousdConvexLusdPlus3Crv.model.ts} | 4 +- src/model/generated/ousdHistory.model.ts | 4 +- src/model/generated/ousdRebaseOption.model.ts | 4 +- src/processor-templates/otoken/index.ts | 1 + src/processor-templates/otoken/otoken.ts | 388 ++++++++++++++++++ .../otoken}/utils.ts | 45 +- src/processor.ts | 14 + src/processors/curve-lp/curve-lp.ts | 8 +- src/processors/frax-staking/frax-staking.ts | 8 +- src/processors/morpho-aave/morpho-aave.ts | 8 +- src/processors/oeth/oeth.ts | 362 ++-------------- src/processors/ousd/index.ts | 1 + src/processors/ousd/ousd.ts | 56 +++ .../strategies/balancer-meta-pool.ts | 8 +- src/processors/vault/vault.ts | 14 +- src/utils/addresses.ts | 3 + types.graphql | 5 +- 44 files changed, 1509 insertions(+), 582 deletions(-) create mode 100644 abi/otoken-1.json rename abi/{oeth-vault.json => otoken-vault.json} (100%) rename abi/{oeth.json => otoken.json} (100%) delete mode 100644 db/migrations/1696961254406-Data.js create mode 100644 db/migrations/1697301515198-Data.js create mode 100644 src/abi/otoken-1.abi.ts create mode 100644 src/abi/otoken-1.ts rename src/abi/{oeth-vault.abi.ts => otoken-vault.abi.ts} (100%) rename src/abi/{oeth-vault.ts => otoken-vault.ts} (99%) rename src/abi/{oeth.abi.ts => otoken.abi.ts} (100%) rename src/abi/{oeth.ts => otoken.ts} (99%) rename src/model/generated/{_ousdHistoryType.ts => _historyType.ts} (74%) delete mode 100644 src/model/generated/_oethHistoryType.ts delete mode 100644 src/model/generated/_ousdRebasingOption.ts rename src/model/generated/{_oethRebasingOption.ts => _rebasingOption.ts} (58%) rename src/model/generated/{convexLusdPlus3Crv.model.ts => ousdConvexLusdPlus3Crv.model.ts} (88%) create mode 100644 src/processor-templates/otoken/index.ts create mode 100644 src/processor-templates/otoken/otoken.ts rename src/{processors/oeth => processor-templates/otoken}/utils.ts (81%) create mode 100644 src/processors/ousd/index.ts create mode 100644 src/processors/ousd/ousd.ts diff --git a/abi/otoken-1.json b/abi/otoken-1.json new file mode 100644 index 00000000..3a46600b --- /dev/null +++ b/abi/otoken-1.json @@ -0,0 +1,199 @@ +[ + { + "inputs": [], + "name": "_totalSupply", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "_account", + "type": "address" + } + ], + "name": "creditsBalanceOfHighres", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + }, + { + "internalType": "uint256", + "name": "", + "type": "uint256" + }, + { + "internalType": "bool", + "name": "", + "type": "bool" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "name": "isUpgraded", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "name": "nonRebasingCreditsPerToken", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "nonRebasingSupply", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "name": "rebaseState", + "outputs": [ + { + "internalType": "enum OUSDResolutionUpgrade.RebaseOptions", + "name": "", + "type": "uint8" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "rebasingCredits", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "rebasingCreditsHighres", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "rebasingCreditsPerToken", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [], + "name": "rebasingCreditsPerTokenHighres", + "outputs": [ + { + "internalType": "uint256", + "name": "", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address[]", + "name": "accounts", + "type": "address[]" + } + ], + "name": "upgradeAccounts", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "upgradeGlobals", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [], + "name": "vaultAddress", + "outputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + } + ], + "stateMutability": "view", + "type": "function" + } +] \ No newline at end of file diff --git a/abi/oeth-vault.json b/abi/otoken-vault.json similarity index 100% rename from abi/oeth-vault.json rename to abi/otoken-vault.json diff --git a/abi/oeth.json b/abi/otoken.json similarity index 100% rename from abi/oeth.json rename to abi/otoken.json diff --git a/db/migrations/1696961254406-Data.js b/db/migrations/1696961254406-Data.js deleted file mode 100644 index 9e7da8af..00000000 --- a/db/migrations/1696961254406-Data.js +++ /dev/null @@ -1,101 +0,0 @@ -module.exports = class Data1696961254406 { - name = 'Data1696961254406' - - async up(db) { - await db.query(`CREATE TABLE "oeth" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "total_supply" numeric NOT NULL, "rebasing_supply" numeric NOT NULL, "non_rebasing_supply" numeric NOT NULL, CONSTRAINT "PK_de1d885501070dbd1ab6f8577ba" PRIMARY KEY ("id"))`) - await db.query(`CREATE INDEX "IDX_5b81a67229bac2d68e0dc92cc4" ON "oeth" ("timestamp") `) - await db.query(`CREATE INDEX "IDX_408e5f79f83093aa5cf2b0ea32" ON "oeth" ("block_number") `) - await db.query(`CREATE TABLE "history" ("id" character varying NOT NULL, "value" numeric NOT NULL, "balance" numeric NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "tx_hash" text NOT NULL, "type" character varying(8) NOT NULL, "address_id" character varying, CONSTRAINT "PK_9384942edf4804b38ca0ee51416" PRIMARY KEY ("id"))`) - await db.query(`CREATE INDEX "IDX_59a55adcc59ddb69c297da693e" ON "history" ("address_id") `) - await db.query(`CREATE INDEX "IDX_7a259431108a22e8ca2f375fc7" ON "history" ("block_number") `) - await db.query(`CREATE INDEX "IDX_1b82c15d87635d95eaa4dd42ec" ON "history" ("tx_hash") `) - await db.query(`CREATE TABLE "address" ("id" character varying NOT NULL, "is_contract" boolean NOT NULL, "rebasing_option" character varying(6) NOT NULL, "balance" numeric NOT NULL, "earned" numeric NOT NULL, "credits" numeric NOT NULL, "last_updated" TIMESTAMP WITH TIME ZONE NOT NULL, CONSTRAINT "PK_d92de1f82754668b5f5f5dd4fd5" PRIMARY KEY ("id"))`) - await db.query(`CREATE TABLE "apy" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "tx_hash" text NOT NULL, "apr" numeric NOT NULL, "apy" numeric NOT NULL, "apy7_day_avg" numeric NOT NULL, "apy14_day_avg" numeric NOT NULL, "apy30_day_avg" numeric NOT NULL, "rebasing_credits_per_token" numeric NOT NULL, CONSTRAINT "PK_7826924ff9c029af7533753f6af" PRIMARY KEY ("id"))`) - await db.query(`CREATE INDEX "IDX_1f069a908b679be0b5fbc0b2e6" ON "apy" ("timestamp") `) - await db.query(`CREATE INDEX "IDX_7fb752652a983d6629a722ae7a" ON "apy" ("block_number") `) - await db.query(`CREATE INDEX "IDX_d1165411d71160d1230073d0fa" ON "apy" ("tx_hash") `) - await db.query(`CREATE TABLE "rebase" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "tx_hash" text NOT NULL, "total_supply" numeric NOT NULL, "rebasing_credits" numeric NOT NULL, "rebasing_credits_per_token" numeric NOT NULL, "fee" numeric NOT NULL, "yield" numeric NOT NULL, "apy_id" character varying, CONSTRAINT "PK_cadd381a400a7e41b538c788d13" PRIMARY KEY ("id"))`) - await db.query(`CREATE INDEX "IDX_c308a9ecd3d05b0c45e7c60d10" ON "rebase" ("timestamp") `) - await db.query(`CREATE INDEX "IDX_a5955dbd9ac031314697cbd54f" ON "rebase" ("block_number") `) - await db.query(`CREATE INDEX "IDX_7cd793b6c4bc15b9082e0eb97a" ON "rebase" ("tx_hash") `) - await db.query(`CREATE INDEX "IDX_02d02f9022ef86e60f1a84b9dc" ON "rebase" ("apy_id") `) - await db.query(`CREATE TABLE "rebase_option" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "tx_hash" text NOT NULL, "status" character varying(6) NOT NULL, "address_id" character varying, CONSTRAINT "PK_426a38c91faad05465d687740ea" PRIMARY KEY ("id"))`) - await db.query(`CREATE INDEX "IDX_751e6b6352737ddf4f0da0d6d8" ON "rebase_option" ("timestamp") `) - await db.query(`CREATE INDEX "IDX_e59c9a534fcf23a97a8ac92afa" ON "rebase_option" ("block_number") `) - await db.query(`CREATE INDEX "IDX_6b6c08ec25dacd1a5bd6170152" ON "rebase_option" ("tx_hash") `) - await db.query(`CREATE INDEX "IDX_66c04aee6855c74debae4add8f" ON "rebase_option" ("address_id") `) - await db.query(`CREATE TABLE "vault" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "weth" numeric NOT NULL, "st_eth" numeric NOT NULL, "r_eth" numeric NOT NULL, "frx_eth" numeric NOT NULL, CONSTRAINT "PK_dd0898234c77f9d97585171ac59" PRIMARY KEY ("id"))`) - await db.query(`CREATE INDEX "IDX_0f1a5b7e346813a4ec3a03010b" ON "vault" ("timestamp") `) - await db.query(`CREATE INDEX "IDX_a9b314451a9001a7b0a222f68a" ON "vault" ("block_number") `) - await db.query(`CREATE TABLE "curve_lp" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "total_supply" numeric NOT NULL, "eth" numeric NOT NULL, "oeth" numeric NOT NULL, "total_supply_owned" numeric NOT NULL, "eth_owned" numeric NOT NULL, "oeth_owned" numeric NOT NULL, CONSTRAINT "PK_78cd36e42a49ac6ca38349e93ca" PRIMARY KEY ("id"))`) - await db.query(`CREATE INDEX "IDX_330db36ce24d451bd225362497" ON "curve_lp" ("timestamp") `) - await db.query(`CREATE INDEX "IDX_21dec6975ac5df4cb0cf36a117" ON "curve_lp" ("block_number") `) - await db.query(`CREATE TABLE "frax_staking" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "frx_eth" numeric NOT NULL, CONSTRAINT "PK_8e4f242a30dc9aa67ce89dd9011" PRIMARY KEY ("id"))`) - await db.query(`CREATE INDEX "IDX_c40e57574ecb23502fa6755b03" ON "frax_staking" ("timestamp") `) - await db.query(`CREATE INDEX "IDX_ac105b3fae6f14114535b8d0e2" ON "frax_staking" ("block_number") `) - await db.query(`CREATE TABLE "morpho_aave" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "weth" numeric NOT NULL, CONSTRAINT "PK_8b9569518db5529db65205aaafe" PRIMARY KEY ("id"))`) - await db.query(`CREATE INDEX "IDX_3570ea91a91129f64a38665d39" ON "morpho_aave" ("timestamp") `) - await db.query(`CREATE INDEX "IDX_1263cc804aa44983b8f146c2c4" ON "morpho_aave" ("block_number") `) - await db.query(`CREATE TABLE "dripper" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "weth" numeric NOT NULL, CONSTRAINT "PK_74fd102c8d1c60f4b1650a61ffc" PRIMARY KEY ("id"))`) - await db.query(`CREATE INDEX "IDX_88c58f8948c3294c2a9e2791dc" ON "dripper" ("timestamp") `) - await db.query(`CREATE INDEX "IDX_06822c0a260797711acc9023d5" ON "dripper" ("block_number") `) - await db.query(`CREATE TABLE "balancer_meta_pool_strategy" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "total" numeric NOT NULL, "r_eth" numeric NOT NULL, "weth" numeric NOT NULL, CONSTRAINT "PK_45e940df650a615eee3b7d93551" PRIMARY KEY ("id"))`) - await db.query(`CREATE INDEX "IDX_7e32ebb0d5d950103ce8f14cf9" ON "balancer_meta_pool_strategy" ("timestamp") `) - await db.query(`CREATE INDEX "IDX_0bc3af65ef5d204a326388b7ad" ON "balancer_meta_pool_strategy" ("block_number") `) - await db.query(`CREATE TABLE "exchange_rate" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "pair" text NOT NULL, "base" text NOT NULL, "quote" text NOT NULL, "rate" numeric NOT NULL, CONSTRAINT "PK_5c5d27d2b900ef6cdeef0398472" PRIMARY KEY ("id"))`) - await db.query(`CREATE INDEX "IDX_9e23a3f1bf3634820c873a0fe8" ON "exchange_rate" ("timestamp") `) - await db.query(`CREATE INDEX "IDX_c61a93768eed9e58ce399bbe01" ON "exchange_rate" ("block_number") `) - await db.query(`ALTER TABLE "history" ADD CONSTRAINT "FK_59a55adcc59ddb69c297da693e5" FOREIGN KEY ("address_id") REFERENCES "address"("id") ON DELETE NO ACTION ON UPDATE NO ACTION`) - await db.query(`ALTER TABLE "rebase" ADD CONSTRAINT "FK_02d02f9022ef86e60f1a84b9dc2" FOREIGN KEY ("apy_id") REFERENCES "apy"("id") ON DELETE NO ACTION ON UPDATE NO ACTION`) - await db.query(`ALTER TABLE "rebase_option" ADD CONSTRAINT "FK_66c04aee6855c74debae4add8fe" FOREIGN KEY ("address_id") REFERENCES "address"("id") ON DELETE NO ACTION ON UPDATE NO ACTION`) - } - - async down(db) { - await db.query(`DROP TABLE "oeth"`) - await db.query(`DROP INDEX "public"."IDX_5b81a67229bac2d68e0dc92cc4"`) - await db.query(`DROP INDEX "public"."IDX_408e5f79f83093aa5cf2b0ea32"`) - await db.query(`DROP TABLE "history"`) - await db.query(`DROP INDEX "public"."IDX_59a55adcc59ddb69c297da693e"`) - await db.query(`DROP INDEX "public"."IDX_7a259431108a22e8ca2f375fc7"`) - await db.query(`DROP INDEX "public"."IDX_1b82c15d87635d95eaa4dd42ec"`) - await db.query(`DROP TABLE "address"`) - await db.query(`DROP TABLE "apy"`) - await db.query(`DROP INDEX "public"."IDX_1f069a908b679be0b5fbc0b2e6"`) - await db.query(`DROP INDEX "public"."IDX_7fb752652a983d6629a722ae7a"`) - await db.query(`DROP INDEX "public"."IDX_d1165411d71160d1230073d0fa"`) - await db.query(`DROP TABLE "rebase"`) - await db.query(`DROP INDEX "public"."IDX_c308a9ecd3d05b0c45e7c60d10"`) - await db.query(`DROP INDEX "public"."IDX_a5955dbd9ac031314697cbd54f"`) - await db.query(`DROP INDEX "public"."IDX_7cd793b6c4bc15b9082e0eb97a"`) - await db.query(`DROP INDEX "public"."IDX_02d02f9022ef86e60f1a84b9dc"`) - await db.query(`DROP TABLE "rebase_option"`) - await db.query(`DROP INDEX "public"."IDX_751e6b6352737ddf4f0da0d6d8"`) - await db.query(`DROP INDEX "public"."IDX_e59c9a534fcf23a97a8ac92afa"`) - await db.query(`DROP INDEX "public"."IDX_6b6c08ec25dacd1a5bd6170152"`) - await db.query(`DROP INDEX "public"."IDX_66c04aee6855c74debae4add8f"`) - await db.query(`DROP TABLE "vault"`) - await db.query(`DROP INDEX "public"."IDX_0f1a5b7e346813a4ec3a03010b"`) - await db.query(`DROP INDEX "public"."IDX_a9b314451a9001a7b0a222f68a"`) - await db.query(`DROP TABLE "curve_lp"`) - await db.query(`DROP INDEX "public"."IDX_330db36ce24d451bd225362497"`) - await db.query(`DROP INDEX "public"."IDX_21dec6975ac5df4cb0cf36a117"`) - await db.query(`DROP TABLE "frax_staking"`) - await db.query(`DROP INDEX "public"."IDX_c40e57574ecb23502fa6755b03"`) - await db.query(`DROP INDEX "public"."IDX_ac105b3fae6f14114535b8d0e2"`) - await db.query(`DROP TABLE "morpho_aave"`) - await db.query(`DROP INDEX "public"."IDX_3570ea91a91129f64a38665d39"`) - await db.query(`DROP INDEX "public"."IDX_1263cc804aa44983b8f146c2c4"`) - await db.query(`DROP TABLE "dripper"`) - await db.query(`DROP INDEX "public"."IDX_88c58f8948c3294c2a9e2791dc"`) - await db.query(`DROP INDEX "public"."IDX_06822c0a260797711acc9023d5"`) - await db.query(`DROP TABLE "balancer_meta_pool_strategy"`) - await db.query(`DROP INDEX "public"."IDX_7e32ebb0d5d950103ce8f14cf9"`) - await db.query(`DROP INDEX "public"."IDX_0bc3af65ef5d204a326388b7ad"`) - await db.query(`DROP TABLE "exchange_rate"`) - await db.query(`DROP INDEX "public"."IDX_9e23a3f1bf3634820c873a0fe8"`) - await db.query(`DROP INDEX "public"."IDX_c61a93768eed9e58ce399bbe01"`) - await db.query(`ALTER TABLE "history" DROP CONSTRAINT "FK_59a55adcc59ddb69c297da693e5"`) - await db.query(`ALTER TABLE "rebase" DROP CONSTRAINT "FK_02d02f9022ef86e60f1a84b9dc2"`) - await db.query(`ALTER TABLE "rebase_option" DROP CONSTRAINT "FK_66c04aee6855c74debae4add8fe"`) - } -} diff --git a/db/migrations/1697301515198-Data.js b/db/migrations/1697301515198-Data.js new file mode 100644 index 00000000..09cba074 --- /dev/null +++ b/db/migrations/1697301515198-Data.js @@ -0,0 +1,229 @@ +module.exports = class Data1697301515198 { + name = 'Data1697301515198' + + async up(db) { + await db.query(`CREATE TABLE "exchange_rate" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "pair" text NOT NULL, "base" text NOT NULL, "quote" text NOT NULL, "rate" numeric NOT NULL, CONSTRAINT "PK_5c5d27d2b900ef6cdeef0398472" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_9e23a3f1bf3634820c873a0fe8" ON "exchange_rate" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_c61a93768eed9e58ce399bbe01" ON "exchange_rate" ("block_number") `) + await db.query(`CREATE TABLE "oeth" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "total_supply" numeric NOT NULL, "rebasing_supply" numeric NOT NULL, "non_rebasing_supply" numeric NOT NULL, CONSTRAINT "PK_de1d885501070dbd1ab6f8577ba" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_5b81a67229bac2d68e0dc92cc4" ON "oeth" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_408e5f79f83093aa5cf2b0ea32" ON "oeth" ("block_number") `) + await db.query(`CREATE TABLE "oeth_history" ("id" character varying NOT NULL, "value" numeric NOT NULL, "balance" numeric NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "tx_hash" text NOT NULL, "type" character varying(8) NOT NULL, "address_id" character varying, CONSTRAINT "PK_2c7e7571cd9ea02b07a27a303f3" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_94e47c4c49128c78f60b185b46" ON "oeth_history" ("address_id") `) + await db.query(`CREATE INDEX "IDX_96956b1c8d29eb7066a97d5ea7" ON "oeth_history" ("block_number") `) + await db.query(`CREATE INDEX "IDX_b14170bdb7fbc0775bf55df15d" ON "oeth_history" ("tx_hash") `) + await db.query(`CREATE TABLE "oeth_address" ("id" character varying NOT NULL, "is_contract" boolean NOT NULL, "rebasing_option" character varying(6) NOT NULL, "balance" numeric NOT NULL, "earned" numeric NOT NULL, "credits" numeric NOT NULL, "last_updated" TIMESTAMP WITH TIME ZONE NOT NULL, CONSTRAINT "PK_92a966afe47d584af73ce77a1cd" PRIMARY KEY ("id"))`) + await db.query(`CREATE TABLE "oethapy" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "tx_hash" text NOT NULL, "apr" numeric NOT NULL, "apy" numeric NOT NULL, "apy7_day_avg" numeric NOT NULL, "apy14_day_avg" numeric NOT NULL, "apy30_day_avg" numeric NOT NULL, "rebasing_credits_per_token" numeric NOT NULL, CONSTRAINT "PK_8dbb4d04591848361200f18f62a" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_b1a448045d1ed9d655b679a371" ON "oethapy" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_6b8a7a706a0701e659a7d81508" ON "oethapy" ("block_number") `) + await db.query(`CREATE INDEX "IDX_c0c03168bb0139e3cffda4f00e" ON "oethapy" ("tx_hash") `) + await db.query(`CREATE TABLE "oeth_rebase" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "tx_hash" text NOT NULL, "total_supply" numeric NOT NULL, "rebasing_credits" numeric NOT NULL, "rebasing_credits_per_token" numeric NOT NULL, "fee" numeric NOT NULL, "yield" numeric NOT NULL, "apy_id" character varying, CONSTRAINT "PK_5f8f4dd071caf685b4ac2d54de3" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_fbb7b3f2fff9896eb683b86de7" ON "oeth_rebase" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_d3255d02d9407bba89380d01fa" ON "oeth_rebase" ("block_number") `) + await db.query(`CREATE INDEX "IDX_8b6bb0243472af88612fe6a01f" ON "oeth_rebase" ("tx_hash") `) + await db.query(`CREATE INDEX "IDX_3331819842173de7c27c046547" ON "oeth_rebase" ("apy_id") `) + await db.query(`CREATE TABLE "oeth_rebase_option" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "tx_hash" text NOT NULL, "status" character varying(6) NOT NULL, "address_id" character varying, CONSTRAINT "PK_32971725d5523200b4b3b7c07e5" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_1fc6bbd88037bfbf4361776909" ON "oeth_rebase_option" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_cbb7ceb49ef7c45432d0171296" ON "oeth_rebase_option" ("block_number") `) + await db.query(`CREATE INDEX "IDX_355826dadaacc5ae2d63c82f28" ON "oeth_rebase_option" ("tx_hash") `) + await db.query(`CREATE INDEX "IDX_034428879698039839b4ba6ffe" ON "oeth_rebase_option" ("address_id") `) + await db.query(`CREATE TABLE "oeth_vault" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "weth" numeric NOT NULL, "st_eth" numeric NOT NULL, "r_eth" numeric NOT NULL, "frx_eth" numeric NOT NULL, CONSTRAINT "PK_9debaa84944fe2be9dc4219ba8f" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_d6298a294864b4eaf793cf35a4" ON "oeth_vault" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_e20cb507a673817b2c68720415" ON "oeth_vault" ("block_number") `) + await db.query(`CREATE TABLE "oeth_curve_lp" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "total_supply" numeric NOT NULL, "eth" numeric NOT NULL, "oeth" numeric NOT NULL, "total_supply_owned" numeric NOT NULL, "eth_owned" numeric NOT NULL, "oeth_owned" numeric NOT NULL, CONSTRAINT "PK_2b055044664e80f44d6172fdf54" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_d9bbd20e888fa1b4b2c5d2f039" ON "oeth_curve_lp" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_7617d593c36dce1b1565a8d74a" ON "oeth_curve_lp" ("block_number") `) + await db.query(`CREATE TABLE "oeth_frax_staking" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "frx_eth" numeric NOT NULL, CONSTRAINT "PK_694f53c8600ae88c7bdcf7305dd" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_ce6c2c65e90967dfeaac97025b" ON "oeth_frax_staking" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_1a7f7d650390e2f9c212651e05" ON "oeth_frax_staking" ("block_number") `) + await db.query(`CREATE TABLE "oeth_morpho_aave" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "weth" numeric NOT NULL, CONSTRAINT "PK_86de8f846e9335c92b8ad7df3a1" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_25e239b985844f1d33fac79981" ON "oeth_morpho_aave" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_a6662224e95eb6921bb14cb5f9" ON "oeth_morpho_aave" ("block_number") `) + await db.query(`CREATE TABLE "dripper" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "weth" numeric NOT NULL, CONSTRAINT "PK_74fd102c8d1c60f4b1650a61ffc" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_88c58f8948c3294c2a9e2791dc" ON "dripper" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_06822c0a260797711acc9023d5" ON "dripper" ("block_number") `) + await db.query(`CREATE TABLE "oeth_balancer_meta_pool_strategy" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "total" numeric NOT NULL, "r_eth" numeric NOT NULL, "weth" numeric NOT NULL, CONSTRAINT "PK_6ddf5b8ba878e6d706e59bd6de0" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_5e7ef383756fa18cb602f50089" ON "oeth_balancer_meta_pool_strategy" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_11d344b3e0e03cdb6697dd61f7" ON "oeth_balancer_meta_pool_strategy" ("block_number") `) + await db.query(`CREATE TABLE "ogv" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "circulating" numeric NOT NULL, "total" numeric NOT NULL, CONSTRAINT "PK_f16038abf451ce82bd03ea54ee7" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_2418a8b8b92b2f5977be761cf9" ON "ogv" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_b8f20bcf48e4aa77e0f48d77db" ON "ogv" ("block_number") `) + await db.query(`CREATE TABLE "staked_ogv" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "total" numeric NOT NULL, "apy" numeric NOT NULL, CONSTRAINT "PK_b135611d9aab36c7889982c3be8" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_533195c60cfaef9e118789dee9" ON "staked_ogv" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_d601233411a33212b9d616aab0" ON "staked_ogv" ("block_number") `) + await db.query(`CREATE TABLE "ogv_governance" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "registered_voters" integer NOT NULL, "open_source_contributors" integer NOT NULL, "improvement_proposals" integer NOT NULL, CONSTRAINT "PK_b22758cd4ee8ff92c1b7ee0cf20" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_a0329e7109d5959b9aa3d9d374" ON "ogv_governance" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_63cd1ca46771965c68f6b85898" ON "ogv_governance" ("block_number") `) + await db.query(`CREATE TABLE "ousd" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "total_supply" numeric NOT NULL, "rebasing_supply" numeric NOT NULL, "non_rebasing_supply" numeric NOT NULL, CONSTRAINT "PK_acecae4a20bc14b22d9f6738d8d" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_c8d1e285213b445b088805ac7c" ON "ousd" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_806949dd853b7e8acab5d03b81" ON "ousd" ("block_number") `) + await db.query(`CREATE TABLE "ousd_history" ("id" character varying NOT NULL, "value" numeric NOT NULL, "balance" numeric NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "tx_hash" text NOT NULL, "type" character varying(8) NOT NULL, "address_id" character varying, CONSTRAINT "PK_dcbe3223b67f92d9ad4cffe8a7c" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_70291ea600c0c4d67d9bfe6a6b" ON "ousd_history" ("address_id") `) + await db.query(`CREATE INDEX "IDX_4d00d283e1ce3209dc43a0313c" ON "ousd_history" ("block_number") `) + await db.query(`CREATE INDEX "IDX_0c25caa59aa053a688a723d160" ON "ousd_history" ("tx_hash") `) + await db.query(`CREATE TABLE "ousd_address" ("id" character varying NOT NULL, "is_contract" boolean NOT NULL, "rebasing_option" character varying(6) NOT NULL, "balance" numeric NOT NULL, "earned" numeric NOT NULL, "credits" numeric NOT NULL, "last_updated" TIMESTAMP WITH TIME ZONE NOT NULL, CONSTRAINT "PK_bb061344757ede566d62854af6a" PRIMARY KEY ("id"))`) + await db.query(`CREATE TABLE "ousdapy" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "tx_hash" text NOT NULL, "apr" numeric NOT NULL, "apy" numeric NOT NULL, "apy7_day_avg" numeric NOT NULL, "apy14_day_avg" numeric NOT NULL, "apy30_day_avg" numeric NOT NULL, "rebasing_credits_per_token" numeric NOT NULL, CONSTRAINT "PK_d9889b7153efc82dbe88f9a7a33" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_c514963f42908ce84d65a84a77" ON "ousdapy" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_4f606414b3b5ce1a366bd0fbf6" ON "ousdapy" ("block_number") `) + await db.query(`CREATE INDEX "IDX_0e84a81a109b66fe6f01f77c74" ON "ousdapy" ("tx_hash") `) + await db.query(`CREATE TABLE "ousd_rebase" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "tx_hash" text NOT NULL, "total_supply" numeric NOT NULL, "rebasing_credits" numeric NOT NULL, "rebasing_credits_per_token" numeric NOT NULL, "fee" numeric NOT NULL, "yield" numeric NOT NULL, "apy_id" character varying, CONSTRAINT "PK_04cf0de72399a99798dde61b237" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_f8eb4a16ce58a146b3227ee21a" ON "ousd_rebase" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_3fb03b1a410e64c7367226d0b6" ON "ousd_rebase" ("block_number") `) + await db.query(`CREATE INDEX "IDX_1a76c478199672aaeec340f619" ON "ousd_rebase" ("tx_hash") `) + await db.query(`CREATE INDEX "IDX_427468c97f9838b804efd6c8e5" ON "ousd_rebase" ("apy_id") `) + await db.query(`CREATE TABLE "ousd_rebase_option" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "tx_hash" text NOT NULL, "status" character varying(6) NOT NULL, "address_id" character varying, CONSTRAINT "PK_d684f90866027104f3c929dfe10" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_64bd23947dc4c67e3b6a3f9352" ON "ousd_rebase_option" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_9b774e46b8b1cf7f828133809a" ON "ousd_rebase_option" ("block_number") `) + await db.query(`CREATE INDEX "IDX_4e95bf069de04533d83a9a97fd" ON "ousd_rebase_option" ("tx_hash") `) + await db.query(`CREATE INDEX "IDX_b04173f9349ddd991a3b60e914" ON "ousd_rebase_option" ("address_id") `) + await db.query(`CREATE TABLE "ousd_vault" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "dai" numeric NOT NULL, "usdt" numeric NOT NULL, "usdc" numeric NOT NULL, CONSTRAINT "PK_343f5538c71a1cd78f1659ef9d3" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_6860186ea2f56e2c7d54c22107" ON "ousd_vault" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_0d0a7113a505cf7f7adea9ca81" ON "ousd_vault" ("block_number") `) + await db.query(`CREATE TABLE "ousd_morpho_aave" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "dai" numeric NOT NULL, "usdt" numeric NOT NULL, "usdc" numeric NOT NULL, CONSTRAINT "PK_60676cde905a822ba73ff3a5c85" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_78e0701c2e9a28242db37bd8f8" ON "ousd_morpho_aave" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_46ccf673b376d654052fbd53e6" ON "ousd_morpho_aave" ("block_number") `) + await db.query(`CREATE TABLE "ousd_morpho_compound" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "dai" numeric NOT NULL, "usdt" numeric NOT NULL, "usdc" numeric NOT NULL, CONSTRAINT "PK_5f715d53ef8fc0fad595cacf4fa" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_9e7bd0d8ae23b877d5979ef80c" ON "ousd_morpho_compound" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_0bb3a0ad84071f1d80f6d4e90f" ON "ousd_morpho_compound" ("block_number") `) + await db.query(`CREATE TABLE "maker_dsr_strategy" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "dai" numeric NOT NULL, CONSTRAINT "PK_196da2d6910009ae04e3542fe22" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_a35308a3c5dbaab2d321eb1525" ON "maker_dsr_strategy" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_844b219d8faf9b1d24ab2dba9a" ON "maker_dsr_strategy" ("block_number") `) + await db.query(`CREATE TABLE "ousd_flux_strategy" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "dai" numeric NOT NULL, "usdt" numeric NOT NULL, "usdc" numeric NOT NULL, CONSTRAINT "PK_ac977221429e50e4de1ce253a8b" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_5b165b5d30b13e363d33a66e14" ON "ousd_flux_strategy" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_80f3392968fde7b99cccb805ac" ON "ousd_flux_strategy" ("block_number") `) + await db.query(`CREATE TABLE "ousd_compound_strategy" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "dai" numeric NOT NULL, "usdt" numeric NOT NULL, "usdc" numeric NOT NULL, CONSTRAINT "PK_9030e82bf3479d03c04e0d1919c" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_6920b1db5dc577295ac4d1379d" ON "ousd_compound_strategy" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_89c6d7d3104bd36dc88a37add4" ON "ousd_compound_strategy" ("block_number") `) + await db.query(`CREATE TABLE "ousd_convex_strategy" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "dai" numeric NOT NULL, "usdt" numeric NOT NULL, "usdc" numeric NOT NULL, CONSTRAINT "PK_2b8f6e749e15e49d8816f1ac949" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_2deac473cd0b2dd7082e7da148" ON "ousd_convex_strategy" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_157bf74171817dc5c60ee37036" ON "ousd_convex_strategy" ("block_number") `) + await db.query(`CREATE TABLE "ousd_aave_strategy" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "dai" numeric NOT NULL, "usdt" numeric NOT NULL, "usdc" numeric NOT NULL, CONSTRAINT "PK_b4b7ac6e395aa722df500f93623" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_ca34b5a0a33bc9abdd8213c2fa" ON "ousd_aave_strategy" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_dacd7c98223d7bc8be074d71e4" ON "ousd_aave_strategy" ("block_number") `) + await db.query(`CREATE TABLE "ousd_meta_strategy" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "dai" numeric NOT NULL, "usdt" numeric NOT NULL, "usdc" numeric NOT NULL, CONSTRAINT "PK_d99170af73d86fe74460bbfacc4" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_7e998dcf775263bc5df76ef987" ON "ousd_meta_strategy" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_55ce185680512d6a5b9fb0af89" ON "ousd_meta_strategy" ("block_number") `) + await db.query(`CREATE TABLE "ousd_convex_lusd_plus3_crv" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "dai" numeric NOT NULL, "usdt" numeric NOT NULL, "usdc" numeric NOT NULL, CONSTRAINT "PK_47290aa5dfa3cc5595f468e2f39" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_0783af95efb35fb3f13cde1656" ON "ousd_convex_lusd_plus3_crv" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_74ae01fb596a4f2733087ba454" ON "ousd_convex_lusd_plus3_crv" ("block_number") `) + await db.query(`ALTER TABLE "oeth_history" ADD CONSTRAINT "FK_94e47c4c49128c78f60b185b46b" FOREIGN KEY ("address_id") REFERENCES "oeth_address"("id") ON DELETE NO ACTION ON UPDATE NO ACTION`) + await db.query(`ALTER TABLE "oeth_rebase" ADD CONSTRAINT "FK_3331819842173de7c27c046547a" FOREIGN KEY ("apy_id") REFERENCES "oethapy"("id") ON DELETE NO ACTION ON UPDATE NO ACTION`) + await db.query(`ALTER TABLE "oeth_rebase_option" ADD CONSTRAINT "FK_034428879698039839b4ba6ffe8" FOREIGN KEY ("address_id") REFERENCES "oeth_address"("id") ON DELETE NO ACTION ON UPDATE NO ACTION`) + await db.query(`ALTER TABLE "ousd_history" ADD CONSTRAINT "FK_70291ea600c0c4d67d9bfe6a6bf" FOREIGN KEY ("address_id") REFERENCES "ousd_address"("id") ON DELETE NO ACTION ON UPDATE NO ACTION`) + await db.query(`ALTER TABLE "ousd_rebase" ADD CONSTRAINT "FK_427468c97f9838b804efd6c8e55" FOREIGN KEY ("apy_id") REFERENCES "ousdapy"("id") ON DELETE NO ACTION ON UPDATE NO ACTION`) + await db.query(`ALTER TABLE "ousd_rebase_option" ADD CONSTRAINT "FK_b04173f9349ddd991a3b60e914a" FOREIGN KEY ("address_id") REFERENCES "ousd_address"("id") ON DELETE NO ACTION ON UPDATE NO ACTION`) + } + + async down(db) { + await db.query(`DROP TABLE "exchange_rate"`) + await db.query(`DROP INDEX "public"."IDX_9e23a3f1bf3634820c873a0fe8"`) + await db.query(`DROP INDEX "public"."IDX_c61a93768eed9e58ce399bbe01"`) + await db.query(`DROP TABLE "oeth"`) + await db.query(`DROP INDEX "public"."IDX_5b81a67229bac2d68e0dc92cc4"`) + await db.query(`DROP INDEX "public"."IDX_408e5f79f83093aa5cf2b0ea32"`) + await db.query(`DROP TABLE "oeth_history"`) + await db.query(`DROP INDEX "public"."IDX_94e47c4c49128c78f60b185b46"`) + await db.query(`DROP INDEX "public"."IDX_96956b1c8d29eb7066a97d5ea7"`) + await db.query(`DROP INDEX "public"."IDX_b14170bdb7fbc0775bf55df15d"`) + await db.query(`DROP TABLE "oeth_address"`) + await db.query(`DROP TABLE "oethapy"`) + await db.query(`DROP INDEX "public"."IDX_b1a448045d1ed9d655b679a371"`) + await db.query(`DROP INDEX "public"."IDX_6b8a7a706a0701e659a7d81508"`) + await db.query(`DROP INDEX "public"."IDX_c0c03168bb0139e3cffda4f00e"`) + await db.query(`DROP TABLE "oeth_rebase"`) + await db.query(`DROP INDEX "public"."IDX_fbb7b3f2fff9896eb683b86de7"`) + await db.query(`DROP INDEX "public"."IDX_d3255d02d9407bba89380d01fa"`) + await db.query(`DROP INDEX "public"."IDX_8b6bb0243472af88612fe6a01f"`) + await db.query(`DROP INDEX "public"."IDX_3331819842173de7c27c046547"`) + await db.query(`DROP TABLE "oeth_rebase_option"`) + await db.query(`DROP INDEX "public"."IDX_1fc6bbd88037bfbf4361776909"`) + await db.query(`DROP INDEX "public"."IDX_cbb7ceb49ef7c45432d0171296"`) + await db.query(`DROP INDEX "public"."IDX_355826dadaacc5ae2d63c82f28"`) + await db.query(`DROP INDEX "public"."IDX_034428879698039839b4ba6ffe"`) + await db.query(`DROP TABLE "oeth_vault"`) + await db.query(`DROP INDEX "public"."IDX_d6298a294864b4eaf793cf35a4"`) + await db.query(`DROP INDEX "public"."IDX_e20cb507a673817b2c68720415"`) + await db.query(`DROP TABLE "oeth_curve_lp"`) + await db.query(`DROP INDEX "public"."IDX_d9bbd20e888fa1b4b2c5d2f039"`) + await db.query(`DROP INDEX "public"."IDX_7617d593c36dce1b1565a8d74a"`) + await db.query(`DROP TABLE "oeth_frax_staking"`) + await db.query(`DROP INDEX "public"."IDX_ce6c2c65e90967dfeaac97025b"`) + await db.query(`DROP INDEX "public"."IDX_1a7f7d650390e2f9c212651e05"`) + await db.query(`DROP TABLE "oeth_morpho_aave"`) + await db.query(`DROP INDEX "public"."IDX_25e239b985844f1d33fac79981"`) + await db.query(`DROP INDEX "public"."IDX_a6662224e95eb6921bb14cb5f9"`) + await db.query(`DROP TABLE "dripper"`) + await db.query(`DROP INDEX "public"."IDX_88c58f8948c3294c2a9e2791dc"`) + await db.query(`DROP INDEX "public"."IDX_06822c0a260797711acc9023d5"`) + await db.query(`DROP TABLE "oeth_balancer_meta_pool_strategy"`) + await db.query(`DROP INDEX "public"."IDX_5e7ef383756fa18cb602f50089"`) + await db.query(`DROP INDEX "public"."IDX_11d344b3e0e03cdb6697dd61f7"`) + await db.query(`DROP TABLE "ogv"`) + await db.query(`DROP INDEX "public"."IDX_2418a8b8b92b2f5977be761cf9"`) + await db.query(`DROP INDEX "public"."IDX_b8f20bcf48e4aa77e0f48d77db"`) + await db.query(`DROP TABLE "staked_ogv"`) + await db.query(`DROP INDEX "public"."IDX_533195c60cfaef9e118789dee9"`) + await db.query(`DROP INDEX "public"."IDX_d601233411a33212b9d616aab0"`) + await db.query(`DROP TABLE "ogv_governance"`) + await db.query(`DROP INDEX "public"."IDX_a0329e7109d5959b9aa3d9d374"`) + await db.query(`DROP INDEX "public"."IDX_63cd1ca46771965c68f6b85898"`) + await db.query(`DROP TABLE "ousd"`) + await db.query(`DROP INDEX "public"."IDX_c8d1e285213b445b088805ac7c"`) + await db.query(`DROP INDEX "public"."IDX_806949dd853b7e8acab5d03b81"`) + await db.query(`DROP TABLE "ousd_history"`) + await db.query(`DROP INDEX "public"."IDX_70291ea600c0c4d67d9bfe6a6b"`) + await db.query(`DROP INDEX "public"."IDX_4d00d283e1ce3209dc43a0313c"`) + await db.query(`DROP INDEX "public"."IDX_0c25caa59aa053a688a723d160"`) + await db.query(`DROP TABLE "ousd_address"`) + await db.query(`DROP TABLE "ousdapy"`) + await db.query(`DROP INDEX "public"."IDX_c514963f42908ce84d65a84a77"`) + await db.query(`DROP INDEX "public"."IDX_4f606414b3b5ce1a366bd0fbf6"`) + await db.query(`DROP INDEX "public"."IDX_0e84a81a109b66fe6f01f77c74"`) + await db.query(`DROP TABLE "ousd_rebase"`) + await db.query(`DROP INDEX "public"."IDX_f8eb4a16ce58a146b3227ee21a"`) + await db.query(`DROP INDEX "public"."IDX_3fb03b1a410e64c7367226d0b6"`) + await db.query(`DROP INDEX "public"."IDX_1a76c478199672aaeec340f619"`) + await db.query(`DROP INDEX "public"."IDX_427468c97f9838b804efd6c8e5"`) + await db.query(`DROP TABLE "ousd_rebase_option"`) + await db.query(`DROP INDEX "public"."IDX_64bd23947dc4c67e3b6a3f9352"`) + await db.query(`DROP INDEX "public"."IDX_9b774e46b8b1cf7f828133809a"`) + await db.query(`DROP INDEX "public"."IDX_4e95bf069de04533d83a9a97fd"`) + await db.query(`DROP INDEX "public"."IDX_b04173f9349ddd991a3b60e914"`) + await db.query(`DROP TABLE "ousd_vault"`) + await db.query(`DROP INDEX "public"."IDX_6860186ea2f56e2c7d54c22107"`) + await db.query(`DROP INDEX "public"."IDX_0d0a7113a505cf7f7adea9ca81"`) + await db.query(`DROP TABLE "ousd_morpho_aave"`) + await db.query(`DROP INDEX "public"."IDX_78e0701c2e9a28242db37bd8f8"`) + await db.query(`DROP INDEX "public"."IDX_46ccf673b376d654052fbd53e6"`) + await db.query(`DROP TABLE "ousd_morpho_compound"`) + await db.query(`DROP INDEX "public"."IDX_9e7bd0d8ae23b877d5979ef80c"`) + await db.query(`DROP INDEX "public"."IDX_0bb3a0ad84071f1d80f6d4e90f"`) + await db.query(`DROP TABLE "maker_dsr_strategy"`) + await db.query(`DROP INDEX "public"."IDX_a35308a3c5dbaab2d321eb1525"`) + await db.query(`DROP INDEX "public"."IDX_844b219d8faf9b1d24ab2dba9a"`) + await db.query(`DROP TABLE "ousd_flux_strategy"`) + await db.query(`DROP INDEX "public"."IDX_5b165b5d30b13e363d33a66e14"`) + await db.query(`DROP INDEX "public"."IDX_80f3392968fde7b99cccb805ac"`) + await db.query(`DROP TABLE "ousd_compound_strategy"`) + await db.query(`DROP INDEX "public"."IDX_6920b1db5dc577295ac4d1379d"`) + await db.query(`DROP INDEX "public"."IDX_89c6d7d3104bd36dc88a37add4"`) + await db.query(`DROP TABLE "ousd_convex_strategy"`) + await db.query(`DROP INDEX "public"."IDX_2deac473cd0b2dd7082e7da148"`) + await db.query(`DROP INDEX "public"."IDX_157bf74171817dc5c60ee37036"`) + await db.query(`DROP TABLE "ousd_aave_strategy"`) + await db.query(`DROP INDEX "public"."IDX_ca34b5a0a33bc9abdd8213c2fa"`) + await db.query(`DROP INDEX "public"."IDX_dacd7c98223d7bc8be074d71e4"`) + await db.query(`DROP TABLE "ousd_meta_strategy"`) + await db.query(`DROP INDEX "public"."IDX_7e998dcf775263bc5df76ef987"`) + await db.query(`DROP INDEX "public"."IDX_55ce185680512d6a5b9fb0af89"`) + await db.query(`DROP TABLE "ousd_convex_lusd_plus3_crv"`) + await db.query(`DROP INDEX "public"."IDX_0783af95efb35fb3f13cde1656"`) + await db.query(`DROP INDEX "public"."IDX_74ae01fb596a4f2733087ba454"`) + await db.query(`ALTER TABLE "oeth_history" DROP CONSTRAINT "FK_94e47c4c49128c78f60b185b46b"`) + await db.query(`ALTER TABLE "oeth_rebase" DROP CONSTRAINT "FK_3331819842173de7c27c046547a"`) + await db.query(`ALTER TABLE "oeth_rebase_option" DROP CONSTRAINT "FK_034428879698039839b4ba6ffe8"`) + await db.query(`ALTER TABLE "ousd_history" DROP CONSTRAINT "FK_70291ea600c0c4d67d9bfe6a6bf"`) + await db.query(`ALTER TABLE "ousd_rebase" DROP CONSTRAINT "FK_427468c97f9838b804efd6c8e55"`) + await db.query(`ALTER TABLE "ousd_rebase_option" DROP CONSTRAINT "FK_b04173f9349ddd991a3b60e914a"`) + } +} diff --git a/docs/REQUIREMENTS.md b/docs/REQUIREMENTS.md index cffa25b4..77079fde 100644 --- a/docs/REQUIREMENTS.md +++ b/docs/REQUIREMENTS.md @@ -74,3 +74,163 @@ Catalog of data requirements. - Available for collection - Drip rate (1d, 1h, 1m) +## [prometheus-monitoring](https://github.com/oplabs/prometheus-monitoring) + +### [Metrics](https://github.com/oplabs/prometheus-monitoring/blob/2ef3f67ccd88a965c67553457a265b9853c57b33/lambda-scrapers/exporters/src/utils/prometheus.js) + +#### total_supply + +Track total supply for **OUSD**, **OGV**, and **OETH**. + +#### vault + +Vault price metrics for: `["USDC", "USDT", "DAI"]` + +- `vault.priceUnitMint(assetAddress)` +- `vault.priceUnitRedeem(assetAddress)` + +Vault holdings (`balanceOf`) for: + +- OUSD: `["USDC", "USDT", "DAI"]` +- OETH: `["WETH", "stETH", "rETH", "frxETH"]` + +#### strategies + +OUSD Holdings: `["USDC", "USDT", "DAI"]` + +- Compound +- Aave +- Convex +- MorphoCompound +- MorphoAave +- OUSDMeta +- LUSDMeta + +OETH Holdings: `["frxETH", "rETH", "stETH", "WETH"]` + +- FraxETH +- CurveAMO + +#### threePool + +Assets: `["USDC", "USDT", "DAI"]` + +```javascript +const threepoolCoinIndexMap = { + DAI: 0, + USDC: 1, + USDT: 2, +}; + +contracts.ThreePool + +contract.balances(threepoolCoinIndexMap[asset]) +``` + +#### metapools + +- `curveMetapoolBalanceMetric` + - addresses.OUSDMetapool: 0xed279fdd11ca84beef15af5d39bb4d4bee23f0ca + - OUSD: `main_coin_balance = await poolContract.balances(0)` + - ThreePoolLP: `three_crv_balance = await poolContract.balances(1)` + - addresses.LUSDMetapool: 0xed279fdd11ca84beef15af5d39bb4d4bee23f0ca + - LUSD: `main_coin_balance = await poolContract.balances(0)` + - ThreePoolLP: `three_crv_balance = await poolContract.balances(1)` +- `balancerPoolMetric` + - Here we save the **rate** and **balance** for each pool. + - If an asset doesn't have a rate provider (zero address) default to 1e18 as suggested by + Balancer: https://docs.balancer.fi/reference/contracts/rate-providers.html + - rETH_sfrxETH_wstETH + - balancerPoolId: 0x42ed016f826165c2e5976fe5bc3df540c5ad0af700000000000000000000058b + - poolAddress: 0x42ED016F826165C2e5976fe5bC3df540C5aD0Af7 + - rETH_WETH + - balancerPoolId: 0x1e19cf2d73a72ef1332c882f20534b6519be0276000200000000000000000112 + - poolAddress: 0x1E19CF2D73a72Ef1332C882F20534B6519Be0276 + +```javascript +const [tokens, balances] = await balancerVault.getPoolTokens(poolId); +const rateProviders = await getBalancerPoolRateProviders(poolAddress); +rateProvider.getRate() +const balancerMetaStablePoolABI = [ + "function getRateProviders() external view returns (address[])", +]; +``` + +#### oeth + +- curvePoolBalanceMetric: `poolContract.balances(0) or poolContract.balances(1)` + - addresses.EthFrxEthPool: ETH frxETH + - addresses.EthStEthPool: ETH stETH + - addresses.REthEthPool: rETH ETH + - addresses.WEthStEthPool: WETH stETH + - addresses.OEthEthPool: OETH ETH + +#### aave_comp_platforms + +- AaveCompoundBorrowableMetric: `"USDC", "USDT", "DAI"` + - For each of the below: + +```javascript +balance = await assetContract.balanceOf(address) +``` + +`assetContract` being USDC, USDT, or DAI +balanceOf `address` from the map below. + +```javascript +const aaveAssetToPlatformMap = { + USDT: { + token: "aUSDT", + address: addresses.aUSDT, // 0x3Ed3B47Dd13EC9a98b44e6204A523E766B225811 + }, + USDC: { + token: "aUSDC", + address: addresses.aUSDC, // 0xBcca60bB61934080951369a648Fb03DF4F96263C + }, + DAI: { + token: "aDAI", + address: addresses.aDAI, // 0x028171bca77440897b824ca71d1c56cac55b68a3 + }, +}; +``` + +#### rebasing_credits + +- ✅ OETH + - This is done for OETH in Subsquid. +- OUSD + - A virtually identical implementation should work for OUSD. + + +- `rebasingCreditsPerTokenMetric`: OETH, OUSD + - `event TotalSupplyUpdatedHighres(uint256 totalSupply, uint256 rebasingCredits, uint256 rebasingCreditsPerToken)` +- `rebasingCreditsMetric`: OETH, OUSD + - `function rebasingCredits() external view returns (uint256)` +- `nonRebasingSupplyMetric`: OETH, OUSD + - `function nonRebasingSupply() external view returns (uint256)` + +##### Ramblings + +- AaveCompoundBorrowableMetric + - TRACK ERC20 BALANCES + - Is this as simple as tracking transfers or do some of these receive balance in other ways? (magical rebasing, + etc...) + - cUSDT: "0xf650c3d88d12db855b8bf7d11be6c55a4e07dcc9" + - cUSDC: "0x39aa39c021dfbae8fac545936693ac917d5e7563" + - cDAI: "0x5d3a536e4d6dbd6114cc1ead35777bab948e3643" + - aUSDT: "0x3Ed3B47Dd13EC9a98b44e6204A523E766B225811" + - aUSDC: "0xBcca60bB61934080951369a648Fb03DF4F96263C" + - aDAI: "0x028171bca77440897b824ca71d1c56cac55b68a3" +- balancerPoolMetric + - balancer vault `getPoolTokens(poolId)` data + - +- curveMetapoolBalanceMetric +- curvePoolBalanceMetric +- nonRebasingSupplyMetric +- rebasingCreditsMetric +- rebasingCreditsPerTokenMetric +- strategyHoldingMetric +- threepoolBalanceMetric +- totalSupplyMetric +- vaultHoldingMetric +- vaultUSDPriceMetric \ No newline at end of file diff --git a/graphql.config.yml b/graphql.config.yml index 376860dd..5b40586e 100644 --- a/graphql.config.yml +++ b/graphql.config.yml @@ -3,11 +3,22 @@ projects: schema: schema.graphql base: schema: schema-base.graphql + include: + - types.graphql oeth: schema: schema-oeth.graphql + include: + - schema-base.graphql + - types.graphql ousd: schema: schema-ousd.graphql + include: + - schema-base.graphql + - types.graphql ogv: schema: schema-ogv.graphql + include: + - schema-base.graphql + - types.graphql include: - types.graphql diff --git a/schema-base.graphql b/schema-base.graphql index 7bfed0d6..53550978 100644 --- a/schema-base.graphql +++ b/schema-base.graphql @@ -15,3 +15,15 @@ type ExchangeRate @entity { quote: String! rate: BigInt! } + +enum RebasingOption { + OptIn + OptOut +} + +enum HistoryType { + Swap + Sent + Received + Yield +} \ No newline at end of file diff --git a/schema-oeth.graphql b/schema-oeth.graphql index b390ebd7..94363edc 100644 --- a/schema-oeth.graphql +++ b/schema-oeth.graphql @@ -10,18 +10,13 @@ type OETH @entity { nonRebasingSupply: BigInt! } -enum OETHRebasingOption { - OptIn - OptOut -} - """ The OETH balance, history and other information for a given address. """ type OETHAddress @entity { id: ID! @index isContract: Boolean! - rebasingOption: OETHRebasingOption! + rebasingOption: RebasingOption! balance: BigInt! earned: BigInt! credits: BigInt! @@ -29,13 +24,6 @@ type OETHAddress @entity { history: [OETHHistory!]! @derivedFrom(field: "address") } -enum OETHHistoryType { - Swap - Sent - Received - Yield -} - """ The History entity tracks events that change the balance of OETH for an address. """ @@ -47,7 +35,7 @@ type OETHHistory @entity { timestamp: DateTime! blockNumber: Int! @index txHash: String! @index - type: OETHHistoryType! + type: HistoryType! } """ @@ -75,7 +63,7 @@ type OETHRebaseOption @entity { blockNumber: Int! @index txHash: String! @index address: OETHAddress! - status: OETHRebasingOption! + status: RebasingOption! } """ diff --git a/schema-ousd.graphql b/schema-ousd.graphql index 3d41eaf9..c364a3db 100644 --- a/schema-ousd.graphql +++ b/schema-ousd.graphql @@ -10,18 +10,13 @@ type OUSD @entity { nonRebasingSupply: BigInt! } -enum OUSDRebasingOption { - OptIn - OptOut -} - """ The OUSD balance, history and other information for a given address. """ type OUSDAddress @entity { id: ID! @index isContract: Boolean! - rebasingOption: OUSDRebasingOption! + rebasingOption: RebasingOption! balance: BigInt! earned: BigInt! credits: BigInt! @@ -29,13 +24,6 @@ type OUSDAddress @entity { history: [OUSDHistory!]! @derivedFrom(field: "address") } -enum OUSDHistoryType { - Swap - Sent - Received - Yield -} - """ The History entity tracks events that change the balance of OUSD for an address. """ @@ -47,7 +35,7 @@ type OUSDHistory @entity { timestamp: DateTime! blockNumber: Int! @index txHash: String! @index - type: OUSDHistoryType! + type: HistoryType! } """ @@ -75,7 +63,7 @@ type OUSDRebaseOption @entity { blockNumber: Int! @index txHash: String! @index address: OUSDAddress! - status: OUSDRebasingOption! + status: RebasingOption! } """ diff --git a/schema.graphql b/schema.graphql index 37e0d073..f1f34e47 100644 --- a/schema.graphql +++ b/schema.graphql @@ -17,7 +17,18 @@ type ExchangeRate @entity { quote: String! rate: BigInt! } -""" + +enum RebasingOption { + OptIn + OptOut +} + +enum HistoryType { + Swap + Sent + Received + Yield +}""" The OETH entity tracks the change in total supply of OETH over time. """ type OETH @entity { @@ -29,18 +40,13 @@ type OETH @entity { nonRebasingSupply: BigInt! } -enum OETHRebasingOption { - OptIn - OptOut -} - """ The OETH balance, history and other information for a given address. """ type OETHAddress @entity { id: ID! @index isContract: Boolean! - rebasingOption: OETHRebasingOption! + rebasingOption: RebasingOption! balance: BigInt! earned: BigInt! credits: BigInt! @@ -48,13 +54,6 @@ type OETHAddress @entity { history: [OETHHistory!]! @derivedFrom(field: "address") } -enum OETHHistoryType { - Swap - Sent - Received - Yield -} - """ The History entity tracks events that change the balance of OETH for an address. """ @@ -66,7 +65,7 @@ type OETHHistory @entity { timestamp: DateTime! blockNumber: Int! @index txHash: String! @index - type: OETHHistoryType! + type: HistoryType! } """ @@ -94,7 +93,7 @@ type OETHRebaseOption @entity { blockNumber: Int! @index txHash: String! @index address: OETHAddress! - status: OETHRebasingOption! + status: RebasingOption! } """ @@ -226,18 +225,13 @@ type OUSD @entity { nonRebasingSupply: BigInt! } -enum OUSDRebasingOption { - OptIn - OptOut -} - """ The OUSD balance, history and other information for a given address. """ type OUSDAddress @entity { id: ID! @index isContract: Boolean! - rebasingOption: OUSDRebasingOption! + rebasingOption: RebasingOption! balance: BigInt! earned: BigInt! credits: BigInt! @@ -245,13 +239,6 @@ type OUSDAddress @entity { history: [OUSDHistory!]! @derivedFrom(field: "address") } -enum OUSDHistoryType { - Swap - Sent - Received - Yield -} - """ The History entity tracks events that change the balance of OUSD for an address. """ @@ -263,7 +250,7 @@ type OUSDHistory @entity { timestamp: DateTime! blockNumber: Int! @index txHash: String! @index - type: OUSDHistoryType! + type: HistoryType! } """ @@ -291,7 +278,7 @@ type OUSDRebaseOption @entity { blockNumber: Int! @index txHash: String! @index address: OUSDAddress! - status: OUSDRebasingOption! + status: RebasingOption! } """ @@ -394,7 +381,7 @@ type OUSDMetaStrategy @entity { usdc: BigInt! } -type ConvexLUSDPlus3Crv @entity { +type OUSDConvexLUSDPlus3Crv @entity { id: ID! timestamp: DateTime! @index blockNumber: Int! @index diff --git a/src/abi/otoken-1.abi.ts b/src/abi/otoken-1.abi.ts new file mode 100644 index 00000000..7c4741f2 --- /dev/null +++ b/src/abi/otoken-1.abi.ts @@ -0,0 +1,205 @@ +export const ABI_JSON = [ + { + "type": "function", + "name": "_totalSupply", + "constant": true, + "stateMutability": "view", + "payable": false, + "inputs": [], + "outputs": [ + { + "type": "uint256", + "name": "" + } + ] + }, + { + "type": "function", + "name": "creditsBalanceOfHighres", + "constant": true, + "stateMutability": "view", + "payable": false, + "inputs": [ + { + "type": "address", + "name": "_account" + } + ], + "outputs": [ + { + "type": "uint256", + "name": "" + }, + { + "type": "uint256", + "name": "" + }, + { + "type": "bool", + "name": "" + } + ] + }, + { + "type": "function", + "name": "isUpgraded", + "constant": true, + "stateMutability": "view", + "payable": false, + "inputs": [ + { + "type": "address", + "name": "" + } + ], + "outputs": [ + { + "type": "uint256", + "name": "" + } + ] + }, + { + "type": "function", + "name": "nonRebasingCreditsPerToken", + "constant": true, + "stateMutability": "view", + "payable": false, + "inputs": [ + { + "type": "address", + "name": "" + } + ], + "outputs": [ + { + "type": "uint256", + "name": "" + } + ] + }, + { + "type": "function", + "name": "nonRebasingSupply", + "constant": true, + "stateMutability": "view", + "payable": false, + "inputs": [], + "outputs": [ + { + "type": "uint256", + "name": "" + } + ] + }, + { + "type": "function", + "name": "rebaseState", + "constant": true, + "stateMutability": "view", + "payable": false, + "inputs": [ + { + "type": "address", + "name": "" + } + ], + "outputs": [ + { + "type": "uint8", + "name": "" + } + ] + }, + { + "type": "function", + "name": "rebasingCredits", + "constant": true, + "stateMutability": "view", + "payable": false, + "inputs": [], + "outputs": [ + { + "type": "uint256", + "name": "" + } + ] + }, + { + "type": "function", + "name": "rebasingCreditsHighres", + "constant": true, + "stateMutability": "view", + "payable": false, + "inputs": [], + "outputs": [ + { + "type": "uint256", + "name": "" + } + ] + }, + { + "type": "function", + "name": "rebasingCreditsPerToken", + "constant": true, + "stateMutability": "view", + "payable": false, + "inputs": [], + "outputs": [ + { + "type": "uint256", + "name": "" + } + ] + }, + { + "type": "function", + "name": "rebasingCreditsPerTokenHighres", + "constant": true, + "stateMutability": "view", + "payable": false, + "inputs": [], + "outputs": [ + { + "type": "uint256", + "name": "" + } + ] + }, + { + "type": "function", + "name": "upgradeAccounts", + "constant": false, + "payable": false, + "inputs": [ + { + "type": "address[]", + "name": "accounts" + } + ], + "outputs": [] + }, + { + "type": "function", + "name": "upgradeGlobals", + "constant": false, + "payable": false, + "inputs": [], + "outputs": [] + }, + { + "type": "function", + "name": "vaultAddress", + "constant": true, + "stateMutability": "view", + "payable": false, + "inputs": [], + "outputs": [ + { + "type": "address", + "name": "" + } + ] + } +] diff --git a/src/abi/otoken-1.ts b/src/abi/otoken-1.ts new file mode 100644 index 00000000..77c5e356 --- /dev/null +++ b/src/abi/otoken-1.ts @@ -0,0 +1,94 @@ +import * as ethers from 'ethers' +import {LogEvent, Func, ContractBase} from './abi.support' +import {ABI_JSON} from './otoken-1.abi' + +export const abi = new ethers.Interface(ABI_JSON); + +export const functions = { + _totalSupply: new Func<[], {}, bigint>( + abi, '0x3eaaf86b' + ), + creditsBalanceOfHighres: new Func<[_account: string], {_account: string}, [_: bigint, _: bigint, _: boolean]>( + abi, '0xe5c4fffe' + ), + isUpgraded: new Func<[_: string], {}, bigint>( + abi, '0x95ef84b9' + ), + nonRebasingCreditsPerToken: new Func<[_: string], {}, bigint>( + abi, '0x609350cd' + ), + nonRebasingSupply: new Func<[], {}, bigint>( + abi, '0xe696393a' + ), + rebaseState: new Func<[_: string], {}, number>( + abi, '0x456ee286' + ), + rebasingCredits: new Func<[], {}, bigint>( + abi, '0x077f22b7' + ), + rebasingCreditsHighres: new Func<[], {}, bigint>( + abi, '0x7d0d66ff' + ), + rebasingCreditsPerToken: new Func<[], {}, bigint>( + abi, '0x6691cb3d' + ), + rebasingCreditsPerTokenHighres: new Func<[], {}, bigint>( + abi, '0x7a46a9c5' + ), + upgradeAccounts: new Func<[accounts: Array], {accounts: Array}, []>( + abi, '0xeec037f6' + ), + upgradeGlobals: new Func<[], {}, []>( + abi, '0x51cfd6fe' + ), + vaultAddress: new Func<[], {}, string>( + abi, '0x430bf08a' + ), +} + +export class Contract extends ContractBase { + + _totalSupply(): Promise { + return this.eth_call(functions._totalSupply, []) + } + + creditsBalanceOfHighres(_account: string): Promise<[_: bigint, _: bigint, _: boolean]> { + return this.eth_call(functions.creditsBalanceOfHighres, [_account]) + } + + isUpgraded(arg0: string): Promise { + return this.eth_call(functions.isUpgraded, [arg0]) + } + + nonRebasingCreditsPerToken(arg0: string): Promise { + return this.eth_call(functions.nonRebasingCreditsPerToken, [arg0]) + } + + nonRebasingSupply(): Promise { + return this.eth_call(functions.nonRebasingSupply, []) + } + + rebaseState(arg0: string): Promise { + return this.eth_call(functions.rebaseState, [arg0]) + } + + rebasingCredits(): Promise { + return this.eth_call(functions.rebasingCredits, []) + } + + rebasingCreditsHighres(): Promise { + return this.eth_call(functions.rebasingCreditsHighres, []) + } + + rebasingCreditsPerToken(): Promise { + return this.eth_call(functions.rebasingCreditsPerToken, []) + } + + rebasingCreditsPerTokenHighres(): Promise { + return this.eth_call(functions.rebasingCreditsPerTokenHighres, []) + } + + vaultAddress(): Promise { + return this.eth_call(functions.vaultAddress, []) + } +} diff --git a/src/abi/oeth-vault.abi.ts b/src/abi/otoken-vault.abi.ts similarity index 100% rename from src/abi/oeth-vault.abi.ts rename to src/abi/otoken-vault.abi.ts diff --git a/src/abi/oeth-vault.ts b/src/abi/otoken-vault.ts similarity index 99% rename from src/abi/oeth-vault.ts rename to src/abi/otoken-vault.ts index abeca014..0b2f2232 100644 --- a/src/abi/oeth-vault.ts +++ b/src/abi/otoken-vault.ts @@ -1,6 +1,6 @@ import * as ethers from 'ethers' import {LogEvent, Func, ContractBase} from './abi.support' -import {ABI_JSON} from './oeth-vault.abi' +import {ABI_JSON} from './otoken-vault.abi' export const abi = new ethers.Interface(ABI_JSON); diff --git a/src/abi/oeth.abi.ts b/src/abi/otoken.abi.ts similarity index 100% rename from src/abi/oeth.abi.ts rename to src/abi/otoken.abi.ts diff --git a/src/abi/oeth.ts b/src/abi/otoken.ts similarity index 99% rename from src/abi/oeth.ts rename to src/abi/otoken.ts index 880141c1..8c0204a4 100644 --- a/src/abi/oeth.ts +++ b/src/abi/otoken.ts @@ -1,6 +1,6 @@ import * as ethers from 'ethers' import {LogEvent, Func, ContractBase} from './abi.support' -import {ABI_JSON} from './oeth.abi' +import {ABI_JSON} from './otoken.abi' export const abi = new ethers.Interface(ABI_JSON); diff --git a/src/main.ts b/src/main.ts index 660c10df..246b8318 100644 --- a/src/main.ts +++ b/src/main.ts @@ -5,6 +5,7 @@ import * as dripper from './processors/dripper' import * as fraxStaking from './processors/frax-staking' import * as morphoAave from './processors/morpho-aave' import * as oeth from './processors/oeth' +import * as ousd from './processors/ousd' import * as balancerMetaPoolStrategy from './processors/strategies/balancer-meta-pool' import * as vault from './processors/vault' @@ -12,13 +13,16 @@ run({ // The idea is that these processors have zero dependencies on one another and can be processed asynchronously. processors: [ oeth, - vault, - fraxStaking, - morphoAave, - dripper, - curveLp, - balancerMetaPoolStrategy, + ousd, + // vault, + // fraxStaking, + // morphoAave, + // dripper, + // curveLp, + // balancerMetaPoolStrategy, ], // For processors which depend on results from other processors, post processors run after all processors have finished. - postProcessors: [exchangeRates], + postProcessors: [ + // exchangeRates + ], }) diff --git a/src/model/generated/_ousdHistoryType.ts b/src/model/generated/_historyType.ts similarity index 74% rename from src/model/generated/_ousdHistoryType.ts rename to src/model/generated/_historyType.ts index f62e10f3..813557ec 100644 --- a/src/model/generated/_ousdHistoryType.ts +++ b/src/model/generated/_historyType.ts @@ -1,4 +1,4 @@ -export enum OUSDHistoryType { +export enum HistoryType { Swap = "Swap", Sent = "Sent", Received = "Received", diff --git a/src/model/generated/_oethHistoryType.ts b/src/model/generated/_oethHistoryType.ts deleted file mode 100644 index 717d0c2a..00000000 --- a/src/model/generated/_oethHistoryType.ts +++ /dev/null @@ -1,6 +0,0 @@ -export enum OETHHistoryType { - Swap = "Swap", - Sent = "Sent", - Received = "Received", - Yield = "Yield", -} diff --git a/src/model/generated/_ousdRebasingOption.ts b/src/model/generated/_ousdRebasingOption.ts deleted file mode 100644 index 4813d5a0..00000000 --- a/src/model/generated/_ousdRebasingOption.ts +++ /dev/null @@ -1,4 +0,0 @@ -export enum OUSDRebasingOption { - OptIn = "OptIn", - OptOut = "OptOut", -} diff --git a/src/model/generated/_oethRebasingOption.ts b/src/model/generated/_rebasingOption.ts similarity index 58% rename from src/model/generated/_oethRebasingOption.ts rename to src/model/generated/_rebasingOption.ts index 63013e44..ea77587b 100644 --- a/src/model/generated/_oethRebasingOption.ts +++ b/src/model/generated/_rebasingOption.ts @@ -1,4 +1,4 @@ -export enum OETHRebasingOption { +export enum RebasingOption { OptIn = "OptIn", OptOut = "OptOut", } diff --git a/src/model/generated/index.ts b/src/model/generated/index.ts index 421100fd..9964427f 100644 --- a/src/model/generated/index.ts +++ b/src/model/generated/index.ts @@ -1,9 +1,9 @@ export * from "./exchangeRate.model" export * from "./oeth.model" export * from "./oethAddress.model" -export * from "./_oethRebasingOption" +export * from "./_rebasingOption" export * from "./oethHistory.model" -export * from "./_oethHistoryType" +export * from "./_historyType" export * from "./oethRebase.model" export * from "./oethRebaseOption.model" export * from "./oethapy.model" @@ -18,9 +18,7 @@ export * from "./stakedOgv.model" export * from "./ogvGovernance.model" export * from "./ousd.model" export * from "./ousdAddress.model" -export * from "./_ousdRebasingOption" export * from "./ousdHistory.model" -export * from "./_ousdHistoryType" export * from "./ousdRebase.model" export * from "./ousdRebaseOption.model" export * from "./ousdapy.model" @@ -33,4 +31,4 @@ export * from "./ousdCompoundStrategy.model" export * from "./ousdConvexStrategy.model" export * from "./ousdAaveStrategy.model" export * from "./ousdMetaStrategy.model" -export * from "./convexLusdPlus3Crv.model" +export * from "./ousdConvexLusdPlus3Crv.model" diff --git a/src/model/generated/oethAddress.model.ts b/src/model/generated/oethAddress.model.ts index bb88eb47..5b7bc2f4 100644 --- a/src/model/generated/oethAddress.model.ts +++ b/src/model/generated/oethAddress.model.ts @@ -1,6 +1,6 @@ import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, OneToMany as OneToMany_} from "typeorm" import * as marshal from "./marshal" -import {OETHRebasingOption} from "./_oethRebasingOption" +import {RebasingOption} from "./_rebasingOption" import {OETHHistory} from "./oethHistory.model" /** @@ -19,7 +19,7 @@ export class OETHAddress { isContract!: boolean @Column_("varchar", {length: 6, nullable: false}) - rebasingOption!: OETHRebasingOption + rebasingOption!: RebasingOption @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) balance!: bigint diff --git a/src/model/generated/oethHistory.model.ts b/src/model/generated/oethHistory.model.ts index 6b25b972..2f3eaa27 100644 --- a/src/model/generated/oethHistory.model.ts +++ b/src/model/generated/oethHistory.model.ts @@ -1,7 +1,7 @@ import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, ManyToOne as ManyToOne_, Index as Index_} from "typeorm" import * as marshal from "./marshal" import {OETHAddress} from "./oethAddress.model" -import {OETHHistoryType} from "./_oethHistoryType" +import {HistoryType} from "./_historyType" /** * The History entity tracks events that change the balance of OETH for an address. @@ -37,5 +37,5 @@ export class OETHHistory { txHash!: string @Column_("varchar", {length: 8, nullable: false}) - type!: OETHHistoryType + type!: HistoryType } diff --git a/src/model/generated/oethRebaseOption.model.ts b/src/model/generated/oethRebaseOption.model.ts index b454a93a..fa6dde00 100644 --- a/src/model/generated/oethRebaseOption.model.ts +++ b/src/model/generated/oethRebaseOption.model.ts @@ -1,6 +1,6 @@ import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, Index as Index_, ManyToOne as ManyToOne_} from "typeorm" import {OETHAddress} from "./oethAddress.model" -import {OETHRebasingOption} from "./_oethRebasingOption" +import {RebasingOption} from "./_rebasingOption" /** * The RebaseOption entity tracks historical rebase option changes by address. @@ -31,5 +31,5 @@ export class OETHRebaseOption { address!: OETHAddress @Column_("varchar", {length: 6, nullable: false}) - status!: OETHRebasingOption + status!: RebasingOption } diff --git a/src/model/generated/ousdAddress.model.ts b/src/model/generated/ousdAddress.model.ts index 65fcc98a..685a5903 100644 --- a/src/model/generated/ousdAddress.model.ts +++ b/src/model/generated/ousdAddress.model.ts @@ -1,6 +1,6 @@ import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, OneToMany as OneToMany_} from "typeorm" import * as marshal from "./marshal" -import {OUSDRebasingOption} from "./_ousdRebasingOption" +import {RebasingOption} from "./_rebasingOption" import {OUSDHistory} from "./ousdHistory.model" /** @@ -19,7 +19,7 @@ export class OUSDAddress { isContract!: boolean @Column_("varchar", {length: 6, nullable: false}) - rebasingOption!: OUSDRebasingOption + rebasingOption!: RebasingOption @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) balance!: bigint diff --git a/src/model/generated/convexLusdPlus3Crv.model.ts b/src/model/generated/ousdConvexLusdPlus3Crv.model.ts similarity index 88% rename from src/model/generated/convexLusdPlus3Crv.model.ts rename to src/model/generated/ousdConvexLusdPlus3Crv.model.ts index 0ab468ef..3b87e546 100644 --- a/src/model/generated/convexLusdPlus3Crv.model.ts +++ b/src/model/generated/ousdConvexLusdPlus3Crv.model.ts @@ -2,8 +2,8 @@ import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, I import * as marshal from "./marshal" @Entity_() -export class ConvexLUSDPlus3Crv { - constructor(props?: Partial) { +export class OUSDConvexLUSDPlus3Crv { + constructor(props?: Partial) { Object.assign(this, props) } diff --git a/src/model/generated/ousdHistory.model.ts b/src/model/generated/ousdHistory.model.ts index 8278c429..b68dadb6 100644 --- a/src/model/generated/ousdHistory.model.ts +++ b/src/model/generated/ousdHistory.model.ts @@ -1,7 +1,7 @@ import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, ManyToOne as ManyToOne_, Index as Index_} from "typeorm" import * as marshal from "./marshal" import {OUSDAddress} from "./ousdAddress.model" -import {OUSDHistoryType} from "./_ousdHistoryType" +import {HistoryType} from "./_historyType" /** * The History entity tracks events that change the balance of OUSD for an address. @@ -37,5 +37,5 @@ export class OUSDHistory { txHash!: string @Column_("varchar", {length: 8, nullable: false}) - type!: OUSDHistoryType + type!: HistoryType } diff --git a/src/model/generated/ousdRebaseOption.model.ts b/src/model/generated/ousdRebaseOption.model.ts index 1aa6e022..7f8c4fca 100644 --- a/src/model/generated/ousdRebaseOption.model.ts +++ b/src/model/generated/ousdRebaseOption.model.ts @@ -1,6 +1,6 @@ import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, Index as Index_, ManyToOne as ManyToOne_} from "typeorm" import {OUSDAddress} from "./ousdAddress.model" -import {OUSDRebasingOption} from "./_ousdRebasingOption" +import {RebasingOption} from "./_rebasingOption" /** * The RebaseOption entity tracks historical rebase option changes by address. @@ -31,5 +31,5 @@ export class OUSDRebaseOption { address!: OUSDAddress @Column_("varchar", {length: 6, nullable: false}) - status!: OUSDRebasingOption + status!: RebasingOption } diff --git a/src/processor-templates/otoken/index.ts b/src/processor-templates/otoken/index.ts new file mode 100644 index 00000000..40e0dfc9 --- /dev/null +++ b/src/processor-templates/otoken/index.ts @@ -0,0 +1 @@ +export * from './otoken' diff --git a/src/processor-templates/otoken/otoken.ts b/src/processor-templates/otoken/otoken.ts new file mode 100644 index 00000000..ec9504ed --- /dev/null +++ b/src/processor-templates/otoken/otoken.ts @@ -0,0 +1,388 @@ +import { v4 as uuidv4 } from 'uuid' + +import * as otoken from '../../abi/otoken' +import * as otokenVault from '../../abi/otoken-vault' +import { + HistoryType, + OETH, + OETHAPY, + OETHAddress, + OETHHistory, + OETHRebase, + OETHRebaseOption, + OUSD, + OUSDAPY, + OUSDAddress, + OUSDHistory, + OUSDRebase, + OUSDRebaseOption, + RebasingOption, +} from '../../model' +import { Context } from '../../processor' +import { getLatestEntity } from '../../processors/utils' +import { ADDRESS_ZERO } from '../../utils/addresses' +import { DECIMALS_18 } from '../../utils/constants' +import { createAddress, createRebaseAPY } from './utils' + +export interface EntityClass { + new (partial: Partial): T +} + +type InstanceTypeOfConstructor = + T extends { + new (...args: any[]): infer R + } + ? R + : any + +type OToken = EntityClass | EntityClass +type OTokenAPY = EntityClass | EntityClass +type OTokenAddress = EntityClass | EntityClass +type OTokenHistory = EntityClass | EntityClass +type OTokenRebase = EntityClass | EntityClass +type OTokenRebaseOption = + | EntityClass + | EntityClass + +export const createOTokenProcessor = (params: { + OTOKEN_ADDRESS: string + OTOKEN_VAULT_ADDRESS: string + OToken: OToken + OTokenAPY: OTokenAPY + OTokenAddress: OTokenAddress + OTokenHistory: OTokenHistory + OTokenRebase: OTokenRebase + OTokenRebaseOption: OTokenRebaseOption +}) => { + interface ProcessResult { + initialized: boolean + initialize: () => Promise + otokens: InstanceTypeOfConstructor[] + history: InstanceTypeOfConstructor[] + rebases: InstanceTypeOfConstructor[] + rebaseOptions: InstanceTypeOfConstructor[] + apies: InstanceTypeOfConstructor[] + owners: Map> + lastYieldDistributionEvent?: { + fee: bigint + yield: bigint + } + } + + const process = async (ctx: Context) => { + const result: ProcessResult = { + initialized: false, + // Saves ~5ms init time if we have no filter matches. + initialize: async () => { + if (result.initialized) return + result.initialized = true + result.owners = await ctx.store + .find>( + params.OTokenAddress as any, + ) + .then((q) => new Map(q.map((i) => [i.id, i]))) + }, + otokens: [], + history: [], + rebases: [], + rebaseOptions: [], + apies: [], + // get all addresses from the database. + // we need this because we increase their balance based on rebase events + owners: undefined as unknown as Map< + string, + InstanceTypeOfConstructor + >, // We want to error if someone forgets to initialize. + } + + for (const block of ctx.blocks) { + for (const trace of block.traces) { + await processRebaseOpt(ctx, result, block, trace) + } + for (const log of block.logs) { + await processTransfer(ctx, result, block, log) + await processYieldDistribution(ctx, result, block, log) + await processTotalSupplyUpdatedHighres(ctx, result, block, log) + } + } + + if (result.owners) { + await ctx.store.upsert([...result.owners.values()]) + } + await ctx.store.upsert(result.apies) + await ctx.store.insert(result.otokens) + await ctx.store.insert(result.history) + await ctx.store.insert(result.rebases) + await ctx.store.insert(result.rebaseOptions) + } + + const processTransfer = async ( + ctx: Context, + result: ProcessResult, + block: Context['blocks']['0'], + log: Context['blocks']['0']['logs']['0'], + ) => { + if (log.address !== params.OTOKEN_ADDRESS) return + if (log.topics[0] === otoken.events.Transfer.topic) { + await result.initialize() + const dataRaw = otoken.events.Transfer.decode(log) + const data = { + from: dataRaw.from.toLowerCase(), + to: dataRaw.to.toLowerCase(), + value: dataRaw.value, + } + + const otokenObject = await getLatestOTokenObject(ctx, result, block) + if (data.from === ADDRESS_ZERO) { + otokenObject.totalSupply += data.value + } else if (data.to === ADDRESS_ZERO) { + otokenObject.totalSupply -= data.value + } + + // Bind the token contract to the block number + const token = new otoken.Contract( + ctx, + block.header, + params.OTOKEN_ADDRESS, + ) + // Transfer events + let addressSub = result.owners.get(data.from) + let addressAdd = result.owners.get(data.to) + + if (addressSub == null) { + addressSub = await createAddress(params.OTokenAddress, ctx, data.from) + result.owners.set(addressSub.id, addressSub) + } + if (addressAdd == null) { + addressAdd = await createAddress(params.OTokenAddress, ctx, data.to) + result.owners.set(addressAdd.id, addressAdd) + } + + addressSub.lastUpdated = new Date(block.header.timestamp) + addressAdd.lastUpdated = new Date(block.header.timestamp) + + const isSwap = [data.from, data.to].includes(ADDRESS_ZERO) + + // update the address balance + await Promise.all( + [addressSub, addressAdd].map(async (address) => { + const credits = await token.creditsBalanceOfHighres(address.id) + const newBalance = (credits[0] * DECIMALS_18) / credits[1] + const change = newBalance - address.balance + result.history.push( + new params.OTokenHistory({ + // we can't use {t.id} because it's not unique + id: uuidv4(), + address: address, + value: change, + balance: newBalance, + timestamp: new Date(block.header.timestamp), + blockNumber: block.header.height, + txHash: log.transactionHash, + type: isSwap + ? HistoryType.Swap + : addressSub === address + ? HistoryType.Sent + : HistoryType.Received, + }), + ) + address.credits = BigInt(credits[0]) // token credits + address.balance = newBalance // token balance + }), + ) + + if ( + addressAdd.rebasingOption === RebasingOption.OptOut && + data.from === ADDRESS_ZERO + ) { + // If it's a mint and minter has opted out of rebasing, + // add to non-rebasing supply + otokenObject.nonRebasingSupply += data.value + } else if ( + data.to === ADDRESS_ZERO && + addressSub.rebasingOption === RebasingOption.OptOut + ) { + // If it's a redeem and redeemer has opted out of rebasing, + // subtract non-rebasing supply + otokenObject.nonRebasingSupply -= data.value + } else if ( + addressAdd.rebasingOption === RebasingOption.OptOut && + addressSub.rebasingOption === RebasingOption.OptIn + ) { + // If receiver has opted out but sender hasn't, + // Add to non-rebasing supply + otokenObject.nonRebasingSupply += data.value + } else if ( + addressAdd.rebasingOption === RebasingOption.OptIn && + addressSub.rebasingOption === RebasingOption.OptOut + ) { + // If sender has opted out but receiver hasn't, + // Subtract non-rebasing supply + otokenObject.nonRebasingSupply -= data.value + } + + // Update rebasing supply in all cases + otokenObject.rebasingSupply = + otokenObject.totalSupply - otokenObject.nonRebasingSupply + } + } + + const processTotalSupplyUpdatedHighres = async ( + ctx: Context, + result: ProcessResult, + block: Context['blocks']['0'], + log: Context['blocks']['0']['logs']['0'], + ) => { + if (log.address !== params.OTOKEN_ADDRESS) return + if (log.topics[0] !== otoken.events.TotalSupplyUpdatedHighres.topic) return + + await result.initialize() + const data = otoken.events.TotalSupplyUpdatedHighres.decode(log) + + // OToken Object + const oethObject = await getLatestOTokenObject(ctx, result, block) + oethObject.totalSupply = data.totalSupply + oethObject.rebasingSupply = + oethObject.totalSupply - oethObject.nonRebasingSupply + + if (!result.lastYieldDistributionEvent) { + throw new Error('lastYieldDistributionEvent is not set') + } + + // Rebase events + let rebase = createRebaseAPY( + ctx, + result.apies, + block, + log, + data, + result.lastYieldDistributionEvent, + ) + for (const address of result.owners.values()) { + if (address.rebasingOption === RebasingOption.OptOut) { + continue + } + const newBalance = + (address.credits * DECIMALS_18) / data.rebasingCreditsPerToken + const earned = newBalance - address.balance + + result.history.push( + new params.OTokenHistory({ + id: uuidv4(), + // we can't use {t.id} because it's not unique + address: address, + value: earned, + balance: newBalance, + timestamp: new Date(block.header.timestamp), + blockNumber: block.header.height, + txHash: log.transactionHash, + type: HistoryType.Yield, + }), + ) + + address.balance = newBalance + address.earned += earned + } + const entity = await rebase + result.rebases.push(entity) + } + + const processYieldDistribution = async ( + ctx: Context, + result: ProcessResult, + block: Context['blocks']['0'], + log: Context['blocks']['0']['logs']['0'], + ) => { + if (log.address !== params.OTOKEN_VAULT_ADDRESS) return + if (log.topics[0] !== otokenVault.events.YieldDistribution.topic) return + + await result.initialize() + const { _yield, _fee } = otokenVault.events.YieldDistribution.decode(log) + result.lastYieldDistributionEvent = { yield: _yield, fee: _fee } + } + + const processRebaseOpt = async ( + ctx: Context, + result: ProcessResult, + block: Context['blocks']['0'], + trace: Context['blocks']['0']['traces']['0'], + ) => { + if ( + trace.type === 'call' && + params.OTOKEN_ADDRESS === trace.action.to && + (trace.action.sighash === otoken.functions.rebaseOptIn.sighash || + trace.action.sighash === otoken.functions.rebaseOptOut.sighash) + ) { + await result.initialize() + const timestamp = new Date(block.header.timestamp) + const blockNumber = block.header.height + const address = trace.action.from.toLowerCase() + const otokenObject = await getLatestOTokenObject(ctx, result, block) + let owner = result.owners.get(address) + if (!owner) { + owner = await createAddress( + params.OTokenAddress, + ctx, + address, + timestamp, + ) + result.owners.set(address, owner) + } + + let rebaseOption = new params.OTokenRebaseOption({ + id: uuidv4(), + timestamp, + blockNumber, + txHash: trace.transaction?.hash, + address: owner, + status: owner.rebasingOption, + }) + result.rebaseOptions.push(rebaseOption) + if (trace.action.sighash === otoken.functions.rebaseOptIn.sighash) { + owner.rebasingOption = RebasingOption.OptIn + rebaseOption.status = RebasingOption.OptIn + otokenObject.nonRebasingSupply -= owner.balance + otokenObject.rebasingSupply = + otokenObject.totalSupply - otokenObject.nonRebasingSupply + } + if (trace.action.sighash === otoken.functions.rebaseOptOut.sighash) { + owner.rebasingOption = RebasingOption.OptOut + rebaseOption.status = RebasingOption.OptOut + otokenObject.nonRebasingSupply += owner.balance + otokenObject.rebasingSupply = + otokenObject.totalSupply - otokenObject.nonRebasingSupply + } + } + } + + const getLatestOTokenObject = async ( + ctx: Context, + result: ProcessResult, + block: Context['blocks']['0'], + ) => { + const timestampId = new Date(block.header.timestamp).toISOString() + const { latest, current } = await getLatestEntity( + ctx, + params.OToken as any, + result.otokens, + timestampId, + ) + + let otokenObject = current + if (!otokenObject) { + otokenObject = new params.OToken({ + id: timestampId, + timestamp: new Date(block.header.timestamp), + blockNumber: block.header.height, + totalSupply: latest?.totalSupply ?? 0n, + rebasingSupply: latest?.rebasingSupply ?? 0n, + nonRebasingSupply: latest?.nonRebasingSupply ?? 0n, + }) + result.otokens.push(otokenObject) + } + + return otokenObject + } + + return process +} diff --git a/src/processors/oeth/utils.ts b/src/processor-templates/otoken/utils.ts similarity index 81% rename from src/processors/oeth/utils.ts rename to src/processor-templates/otoken/utils.ts index 972f166d..49b27acd 100644 --- a/src/processors/oeth/utils.ts +++ b/src/processor-templates/otoken/utils.ts @@ -1,18 +1,24 @@ import dayjs from 'dayjs' import { LessThan, MoreThanOrEqual } from 'typeorm' -import * as oeth from '../../abi/oeth' -import { APY, Address, Rebase, RebasingOption } from '../../model' +import * as otoken from '../../abi/otoken' +import { + OETHAPY, + OETHAddress, + OETHRebase, + OUSDAddress, + RebasingOption, +} from '../../model' import { Context } from '../../processor' +export type Newable = { new (partial: Partial): T } + /** * Create a new Address entity */ -export async function createAddress( - ctx: Context, - addr: string, - lastUpdated?: Date, -): Promise
{ +export async function createAddress< + T extends typeof OETHAddress | typeof OUSDAddress, +>(entity: T, ctx: Context, addr: string, lastUpdated?: Date) { let isContract: boolean = false if (addr !== '0x0000000000000000000000000000000000000000') { isContract = @@ -20,7 +26,7 @@ export async function createAddress( } // ctx.log.info(`New address ${rawAddress}`); - return new Address({ + return new entity({ id: addr, balance: 0n, earned: 0n, @@ -36,16 +42,18 @@ export async function createAddress( */ export async function createRebaseAPY( ctx: Context, - apies: APY[], + apies: OETHAPY[], block: Context['blocks']['0'], log: Context['blocks']['0']['logs']['0'], - rebaseEvent: ReturnType, + rebaseEvent: ReturnType< + typeof otoken.events.TotalSupplyUpdatedHighres.decode + >, lastYieldDistributionEvent: { fee: bigint yield: bigint }, -): Promise { - const rebase = new Rebase({ +): Promise { + const rebase = new OETHRebase({ id: log.id, blockNumber: block.header.height, timestamp: new Date(block.header.timestamp), @@ -64,7 +72,7 @@ export async function createRebaseAPY( // get last APY to compare with current one let lastApy = apies.slice(apies.length - 2).find((apy) => apy.id < dateId) ?? - (await ctx.store.findOne(APY, { + (await ctx.store.findOne(OETHAPY, { where: { id: LessThan(dateId) }, order: { id: 'DESC' }, })) @@ -72,11 +80,11 @@ export async function createRebaseAPY( // check if there is already an APY for the current date let apy = apies.slice(apies.length - 1).find((apy) => apy.id === dateId) ?? - (await ctx.store.findOne(APY, { where: { id: dateId } })) + (await ctx.store.findOne(OETHAPY, { where: { id: dateId } })) // ctx.log.info(`APY: ${dateId} ${apy}, ${lastDateId} ${lastApy}`); // create a new APY if it doesn't exist if (!apy) { - apy = new APY({ + apy = new OETHAPY({ id: dateId, blockNumber: block.header.height, timestamp: new Date(block.header.timestamp), @@ -134,11 +142,14 @@ export async function createRebaseAPY( // calculate average APY for the last 7, 14 and 30 days await Promise.all( [last7daysDateId, last14daysDateId, last30daysDateId].map(async (i) => { - const pastAPYs = await ctx.store.findBy(APY, { + const pastAPYs = await ctx.store.findBy(OETHAPY, { id: MoreThanOrEqual(i.value), }) apy![i.key] = - pastAPYs.reduce((acc: number, cur: APY) => acc + cur.apy, apy!.apy) / + pastAPYs.reduce( + (acc: number, cur: OETHAPY) => acc + cur.apy, + apy!.apy, + ) / (pastAPYs.length + 1) }), ) diff --git a/src/processor.ts b/src/processor.ts index 361a92bb..df56b903 100644 --- a/src/processor.ts +++ b/src/processor.ts @@ -102,6 +102,20 @@ export const run = ({ ), ) postTimes.forEach((t) => t()) + + // ctx.log.info({ + // blocks: ctx.blocks.length, + // logs: ctx.blocks.reduce((sum, block) => sum + block.logs.length, 0), + // traces: ctx.blocks.reduce((sum, block) => sum + block.traces.length, 0), + // transactions: ctx.blocks.reduce( + // (sum, block) => sum + block.transactions.length, + // 0, + // ), + // logArray: ctx.blocks.reduce( + // (logs, block) => [...logs, ...block.logs], + // [] as Log[], + // ), + // }) }, ) } diff --git a/src/processors/curve-lp/curve-lp.ts b/src/processors/curve-lp/curve-lp.ts index a4553f69..22c6c8c0 100644 --- a/src/processors/curve-lp/curve-lp.ts +++ b/src/processors/curve-lp/curve-lp.ts @@ -4,7 +4,7 @@ import { pad } from 'viem' import * as baseRewardPool from '../../abi/base-reward-pool' import * as curveLpToken from '../../abi/curve-lp-token' import * as erc20 from '../../abi/erc20' -import { CurveLP } from '../../model' +import { OETHCurveLP } from '../../model' import { Context } from '../../processor' import { OETH_ADDRESS, @@ -16,7 +16,7 @@ import { getEthBalance } from '../../utils/getEthBalance' import { getLatestEntity, trackAddressBalances } from '../utils' interface ProcessResult { - curveLPs: CurveLP[] + curveLPs: OETHCurveLP[] } export const from = Math.min( @@ -215,7 +215,7 @@ const getLatestCurveLP = async ( const timestampId = new Date(block.header.timestamp).toISOString() const { latest, current } = await getLatestEntity( ctx, - CurveLP, + OETHCurveLP, result.curveLPs, timestampId, ) @@ -223,7 +223,7 @@ const getLatestCurveLP = async ( let isNew = false let curveLP = current if (!curveLP) { - curveLP = new CurveLP({ + curveLP = new OETHCurveLP({ id: timestampId, timestamp: new Date(block.header.timestamp), blockNumber: block.header.height, diff --git a/src/processors/frax-staking/frax-staking.ts b/src/processors/frax-staking/frax-staking.ts index e030902e..ba409a57 100644 --- a/src/processors/frax-staking/frax-staking.ts +++ b/src/processors/frax-staking/frax-staking.ts @@ -2,7 +2,7 @@ import { EvmBatchProcessor } from '@subsquid/evm-processor' import { pad } from 'viem' import * as erc20 from '../../abi/erc20' -import { FraxStaking } from '../../model' +import { OETHFraxStaking } from '../../model' import { ensureExchangeRate } from '../../post-processors/exchange-rates' import { Context } from '../../processor' import { @@ -12,7 +12,7 @@ import { import { getLatestEntity, trackAddressBalances } from '../utils' interface ProcessResult { - fraxStakings: FraxStaking[] + fraxStakings: OETHFraxStaking[] promises: Promise[] } @@ -62,7 +62,7 @@ const processTransfer = async ( const timestampId = new Date(block.header.timestamp).toISOString() const { latest, current } = await getLatestEntity( ctx, - FraxStaking, + OETHFraxStaking, result.fraxStakings, timestampId, ) @@ -70,7 +70,7 @@ const processTransfer = async ( let fraxStaking = current if (!fraxStaking) { result.promises.push(ensureExchangeRate(ctx, block, 'ETH', 'sfrxETH')) - fraxStaking = new FraxStaking({ + fraxStaking = new OETHFraxStaking({ id: timestampId, timestamp: new Date(block.header.timestamp), blockNumber: block.header.height, diff --git a/src/processors/morpho-aave/morpho-aave.ts b/src/processors/morpho-aave/morpho-aave.ts index 6564828c..b020b99e 100644 --- a/src/processors/morpho-aave/morpho-aave.ts +++ b/src/processors/morpho-aave/morpho-aave.ts @@ -2,14 +2,14 @@ import { EvmBatchProcessor } from '@subsquid/evm-processor' import { pad } from 'viem' import * as erc20 from '../../abi/erc20' -import { MorphoAave } from '../../model' +import { OETHMorphoAave } from '../../model' import { ensureExchangeRate } from '../../post-processors/exchange-rates' import { Context } from '../../processor' import { OETH_MORPHO_AAVE_ADDRESS, WETH_ADDRESS } from '../../utils/addresses' import { getLatestEntity, trackAddressBalances } from '../utils' interface ProcessResult { - morphoAaves: MorphoAave[] + morphoAaves: OETHMorphoAave[] } export const from = 17367102 // https://etherscan.io/tx/0x15294349d566059bb37e200b2dba45428e237d6050de11862aa57c7875476526 @@ -56,7 +56,7 @@ const processTransfer = async ( const timestampId = new Date(block.header.timestamp).toISOString() const { latest, current } = await getLatestEntity( ctx, - MorphoAave, + OETHMorphoAave, result.morphoAaves, timestampId, ) @@ -64,7 +64,7 @@ const processTransfer = async ( let morphoAave = current if (!morphoAave) { await ensureExchangeRate(ctx, block, 'ETH', 'WETH') // No async since WETH. - morphoAave = new MorphoAave({ + morphoAave = new OETHMorphoAave({ id: timestampId, timestamp: new Date(block.header.timestamp), blockNumber: block.header.height, diff --git a/src/processors/oeth/oeth.ts b/src/processors/oeth/oeth.ts index 65014643..c0d32634 100644 --- a/src/processors/oeth/oeth.ts +++ b/src/processors/oeth/oeth.ts @@ -1,27 +1,18 @@ import { EvmBatchProcessor } from '@subsquid/evm-processor' -import { v4 as uuidv4 } from 'uuid' -import * as oeth from '../../abi/oeth' -import * as oethVault from '../../abi/oeth-vault' +import * as otoken from '../../abi/otoken' +import * as otokenVault from '../../abi/otoken-vault' import { - APY, - Address, - History, HistoryType, OETH, - Rebase, - RebaseOption, - RebasingOption, + OETHAPY, + OETHAddress, + OETHHistory, + OETHRebase, + OETHRebaseOption, } from '../../model' -import { Context } from '../../processor' -import { - ADDRESS_ZERO, - OETH_ADDRESS, - OETH_VAULT_ADDRESS, -} from '../../utils/addresses' -import { DECIMALS_18 } from '../../utils/constants' -import { getLatestEntity } from '../utils' -import { createAddress, createRebaseAPY } from './utils' +import { createOTokenProcessor } from '../../processor-templates/otoken' +import { OETH_ADDRESS, OETH_VAULT_ADDRESS } from '../../utils/addresses' export const from = 16933090 // https://etherscan.io/tx/0x3b4ece4f5fef04bf7ceaec4f6c6edf700540d7597589f8da0e3a8c94264a3b50 @@ -29,337 +20,32 @@ export const setup = (processor: EvmBatchProcessor) => { processor.addTrace({ type: ['call'], callSighash: [ - oeth.functions.rebaseOptOut.sighash, - oeth.functions.rebaseOptIn.sighash, + otoken.functions.rebaseOptOut.sighash, + otoken.functions.rebaseOptIn.sighash, ], transaction: true, }) processor.addLog({ address: [OETH_ADDRESS], topic0: [ - oeth.events.Transfer.topic, - oeth.events.TotalSupplyUpdatedHighres.topic, + otoken.events.Transfer.topic, + otoken.events.TotalSupplyUpdatedHighres.topic, ], transaction: true, }) processor.addLog({ address: [OETH_VAULT_ADDRESS], - topic0: [oethVault.events.YieldDistribution.topic], + topic0: [otokenVault.events.YieldDistribution.topic], }) } -interface ProcessResult { - initialized: boolean - initialize: () => Promise - oeths: OETH[] - history: History[] - rebases: Rebase[] - rebaseOptions: RebaseOption[] - apies: APY[] - owners: Map - lastYieldDistributionEvent?: { - fee: bigint - yield: bigint - } -} - -export const process = async (ctx: Context) => { - const result: ProcessResult = { - initialized: false, - // Saves ~5ms init time if we have no filter matches. - initialize: async () => { - if (result.initialized) return - result.initialized = true - result.owners = await ctx.store - .find(Address) - .then((q) => new Map(q.map((i) => [i.id, i]))) - }, - oeths: [], - history: [], - rebases: [], - rebaseOptions: [], - apies: [], - // get all addresses from the database. - // we need this because we increase their balance based on rebase events - owners: undefined as unknown as Map, // We want to error if someone forgets to initialize. - } - - for (const block of ctx.blocks) { - for (const trace of block.traces) { - await processRebaseOpt(ctx, result, block, trace) - } - for (const log of block.logs) { - await processTransfer(ctx, result, block, log) - await processYieldDistribution(ctx, result, block, log) - await processTotalSupplyUpdatedHighres(ctx, result, block, log) - } - } - - if (result.owners) { - await ctx.store.upsert([...result.owners.values()]) - } - await ctx.store.upsert(result.apies) - await ctx.store.insert(result.oeths) - await ctx.store.insert(result.history) - await ctx.store.insert(result.rebases) - await ctx.store.insert(result.rebaseOptions) -} - -const processTransfer = async ( - ctx: Context, - result: ProcessResult, - block: Context['blocks']['0'], - log: Context['blocks']['0']['logs']['0'], -) => { - if (log.address !== OETH_ADDRESS) return - if (log.topics[0] === oeth.events.Transfer.topic) { - await result.initialize() - const dataRaw = oeth.events.Transfer.decode(log) - const data = { - from: dataRaw.from.toLowerCase(), - to: dataRaw.to.toLowerCase(), - value: dataRaw.value, - } - - const oethObject = await getLatestOETHObject(ctx, result, block) - if (data.from === ADDRESS_ZERO) { - oethObject.totalSupply += data.value - } else if (data.to === ADDRESS_ZERO) { - oethObject.totalSupply -= data.value - } - - // Bind the token contract to the block number - const token = new oeth.Contract(ctx, block.header, OETH_ADDRESS) - // Transfer events - let addressSub = result.owners.get(data.from) - let addressAdd = result.owners.get(data.to) - - if (addressSub == null) { - addressSub = await createAddress(ctx, data.from) - result.owners.set(addressSub.id, addressSub) - } - if (addressAdd == null) { - addressAdd = await createAddress(ctx, data.to) - result.owners.set(addressAdd.id, addressAdd) - } - - addressSub.lastUpdated = new Date(block.header.timestamp) - addressAdd.lastUpdated = new Date(block.header.timestamp) - - const isSwap = [data.from, data.to].includes(ADDRESS_ZERO) - - // update the address balance - await Promise.all( - [addressSub, addressAdd].map(async (address) => { - const credits = await token.creditsBalanceOfHighres(address.id) - const newBalance = (credits[0] * DECIMALS_18) / credits[1] - const change = newBalance - address.balance - result.history.push( - new History({ - // we can't use {t.id} because it's not unique - id: uuidv4(), - address: address, - value: change, - balance: newBalance, - timestamp: new Date(block.header.timestamp), - blockNumber: block.header.height, - txHash: log.transactionHash, - type: isSwap - ? HistoryType.Swap - : addressSub === address - ? HistoryType.Sent - : HistoryType.Received, - }), - ) - address.credits = BigInt(credits[0]) // token credits - address.balance = newBalance // token balance - }), - ) - - if ( - addressAdd.rebasingOption === RebasingOption.OptOut && - data.from === ADDRESS_ZERO - ) { - // If it's a mint and minter has opted out of rebasing, - // add to non-rebasing supply - oethObject.nonRebasingSupply += data.value - } else if ( - data.to === ADDRESS_ZERO && - addressSub.rebasingOption === RebasingOption.OptOut - ) { - // If it's a redeem and redeemer has opted out of rebasing, - // subtract non-rebasing supply - oethObject.nonRebasingSupply -= data.value - } else if ( - addressAdd.rebasingOption === RebasingOption.OptOut && - addressSub.rebasingOption === RebasingOption.OptIn - ) { - // If receiver has opted out but sender hasn't, - // Add to non-rebasing supply - oethObject.nonRebasingSupply += data.value - } else if ( - addressAdd.rebasingOption === RebasingOption.OptIn && - addressSub.rebasingOption === RebasingOption.OptOut - ) { - // If sender has opted out but receiver hasn't, - // Subtract non-rebasing supply - oethObject.nonRebasingSupply -= data.value - } - - // Update rebasing supply in all cases - oethObject.rebasingSupply = - oethObject.totalSupply - oethObject.nonRebasingSupply - } -} - -const processTotalSupplyUpdatedHighres = async ( - ctx: Context, - result: ProcessResult, - block: Context['blocks']['0'], - log: Context['blocks']['0']['logs']['0'], -) => { - if (log.address !== OETH_ADDRESS) return - if (log.topics[0] !== oeth.events.TotalSupplyUpdatedHighres.topic) return - - await result.initialize() - const data = oeth.events.TotalSupplyUpdatedHighres.decode(log) - - // OETH Object - const oethObject = await getLatestOETHObject(ctx, result, block) - oethObject.totalSupply = data.totalSupply - oethObject.rebasingSupply = - oethObject.totalSupply - oethObject.nonRebasingSupply - - if (!result.lastYieldDistributionEvent) { - throw new Error('lastYieldDistributionEvent is not set') - } - - // Rebase events - let rebase = createRebaseAPY( - ctx, - result.apies, - block, - log, - data, - result.lastYieldDistributionEvent, - ) - for (const address of result.owners.values()) { - if (address.rebasingOption === RebasingOption.OptOut) { - continue - } - const newBalance = - (address.credits * DECIMALS_18) / data.rebasingCreditsPerToken - const earned = newBalance - address.balance - - result.history.push( - new History({ - id: uuidv4(), - // we can't use {t.id} because it's not unique - address: address, - value: earned, - balance: newBalance, - timestamp: new Date(block.header.timestamp), - blockNumber: block.header.height, - txHash: log.transactionHash, - type: HistoryType.Yield, - }), - ) - - address.balance = newBalance - address.earned += earned - } - const entity = await rebase - result.rebases.push(entity) -} - -const processYieldDistribution = async ( - ctx: Context, - result: ProcessResult, - block: Context['blocks']['0'], - log: Context['blocks']['0']['logs']['0'], -) => { - if (log.address !== OETH_VAULT_ADDRESS) return - if (log.topics[0] !== oethVault.events.YieldDistribution.topic) return - - await result.initialize() - const { _yield, _fee } = oethVault.events.YieldDistribution.decode(log) - result.lastYieldDistributionEvent = { yield: _yield, fee: _fee } -} - -const processRebaseOpt = async ( - ctx: Context, - result: ProcessResult, - block: Context['blocks']['0'], - trace: Context['blocks']['0']['traces']['0'], -) => { - if ( - trace.type === 'call' && - OETH_ADDRESS === trace.action.to && - (trace.action.sighash === oeth.functions.rebaseOptIn.sighash || - trace.action.sighash === oeth.functions.rebaseOptOut.sighash) - ) { - await result.initialize() - const timestamp = new Date(block.header.timestamp) - const blockNumber = block.header.height - const address = trace.action.from.toLowerCase() - const oethObject = await getLatestOETHObject(ctx, result, block) - let owner = result.owners.get(address) - if (!owner) { - owner = await createAddress(ctx, address, timestamp) - result.owners.set(address, owner) - } - - let rebaseOption = new RebaseOption({ - id: uuidv4(), - timestamp, - blockNumber, - txHash: trace.transaction?.hash, - address: owner, - status: owner.rebasingOption, - }) - result.rebaseOptions.push(rebaseOption) - if (trace.action.sighash === oeth.functions.rebaseOptIn.sighash) { - owner.rebasingOption = RebasingOption.OptIn - rebaseOption.status = RebasingOption.OptIn - oethObject.nonRebasingSupply -= owner.balance - oethObject.rebasingSupply = - oethObject.totalSupply - oethObject.nonRebasingSupply - } - if (trace.action.sighash === oeth.functions.rebaseOptOut.sighash) { - owner.rebasingOption = RebasingOption.OptOut - rebaseOption.status = RebasingOption.OptOut - oethObject.nonRebasingSupply += owner.balance - oethObject.rebasingSupply = - oethObject.totalSupply - oethObject.nonRebasingSupply - } - } -} - -const getLatestOETHObject = async ( - ctx: Context, - result: ProcessResult, - block: Context['blocks']['0'], -) => { - const timestampId = new Date(block.header.timestamp).toISOString() - const { latest, current } = await getLatestEntity( - ctx, - OETH, - result.oeths, - timestampId, - ) - - let oethObject = current - if (!oethObject) { - oethObject = new OETH({ - id: timestampId, - timestamp: new Date(block.header.timestamp), - blockNumber: block.header.height, - totalSupply: latest?.totalSupply ?? 0n, - rebasingSupply: latest?.rebasingSupply ?? 0n, - nonRebasingSupply: latest?.nonRebasingSupply ?? 0n, - }) - result.oeths.push(oethObject) - } - - return oethObject -} +export const process = createOTokenProcessor({ + OTOKEN_ADDRESS: OETH_ADDRESS, + OTOKEN_VAULT_ADDRESS: OETH_VAULT_ADDRESS, + OToken: OETH, + OTokenAddress: OETHAddress, + OTokenHistory: OETHHistory, + OTokenAPY: OETHAPY, + OTokenRebase: OETHRebase, + OTokenRebaseOption: OETHRebaseOption, +}) diff --git a/src/processors/ousd/index.ts b/src/processors/ousd/index.ts new file mode 100644 index 00000000..997f08b8 --- /dev/null +++ b/src/processors/ousd/index.ts @@ -0,0 +1 @@ +export * from './ousd' diff --git a/src/processors/ousd/ousd.ts b/src/processors/ousd/ousd.ts new file mode 100644 index 00000000..c4e842a8 --- /dev/null +++ b/src/processors/ousd/ousd.ts @@ -0,0 +1,56 @@ +import { EvmBatchProcessor } from '@subsquid/evm-processor' + +import * as otoken from '../../abi/otoken' +import * as otokenVault from '../../abi/otoken-vault' +import { + OUSD, + OUSDAPY, + OUSDAddress, + OUSDHistory, + OUSDRebase, + OUSDRebaseOption, +} from '../../model' +import { createOTokenProcessor } from '../../processor-templates/otoken' +import { OUSD_ADDRESS, OUSD_VAULT_ADDRESS } from '../../utils/addresses' + +// V1 which we aren't coded for yet. +// export const from = 10884563 // https://etherscan.io/tx/0x9141921f5ebf072e58c00fe56332b6bee0c02f0ae4f54c42999b8a3a88662681 + +// Current version which we work with. +export const from = 13533937 // https://etherscan.io/tx/0xc9b6fc6a4fad18dad197ff7d0636f74bf066671d75656849a1c45122e00d54cf + +export const setup = (processor: EvmBatchProcessor) => { + processor.addTrace({ + type: ['call'], + callSighash: [ + otoken.functions.rebaseOptOut.sighash, + otoken.functions.rebaseOptIn.sighash, + ], + transaction: true, + }) + processor.addLog({ + address: [OUSD_ADDRESS], + topic0: [ + otoken.events.Transfer.topic, + otoken.events.TotalSupplyUpdatedHighres.topic, + ], + transaction: true, + }) + processor.addLog({ + address: [OUSD_VAULT_ADDRESS], + topic0: [otokenVault.events.YieldDistribution.topic], + }) +} + +// TODO: Handle the version upgrade gracefully so we have accurate numbers. + +export const process = createOTokenProcessor({ + OTOKEN_ADDRESS: OUSD_ADDRESS, + OTOKEN_VAULT_ADDRESS: OUSD_VAULT_ADDRESS, + OToken: OUSD, + OTokenAddress: OUSDAddress, + OTokenHistory: OUSDHistory, + OTokenAPY: OUSDAPY, + OTokenRebase: OUSDRebase, + OTokenRebaseOption: OUSDRebaseOption, +}) diff --git a/src/processors/strategies/balancer-meta-pool.ts b/src/processors/strategies/balancer-meta-pool.ts index edeabb88..1ed59d46 100644 --- a/src/processors/strategies/balancer-meta-pool.ts +++ b/src/processors/strategies/balancer-meta-pool.ts @@ -4,7 +4,7 @@ import * as balancerMetaPoolStrategy from '../../abi/balancer-meta-pool-strategy import * as baseRewardPool4626 from '../../abi/base-reward-pool-4626' import * as metaStablePool from '../../abi/meta-stable-pool' import * as originLens from '../../abi/origin-lens' -import { BalancerMetaPoolStrategy } from '../../model' +import { OETHBalancerMetaPoolStrategy } from '../../model' import { ensureExchangeRates } from '../../post-processors/exchange-rates' import { Context } from '../../processor' import { RETH_ADDRESS, WETH_ADDRESS } from '../../utils/addresses' @@ -70,7 +70,7 @@ const topicsToListenTo = new Set([ ]) interface ProcessResult { - strategies: BalancerMetaPoolStrategy[] + strategies: OETHBalancerMetaPoolStrategy[] promises: Promise[] } @@ -115,7 +115,7 @@ export const updateValues = async ( const [{ current, latest }, { total, rETH, weth }] = await Promise.all([ getLatestEntity( ctx, - BalancerMetaPoolStrategy, + OETHBalancerMetaPoolStrategy, result.strategies, timestampId, ), @@ -142,7 +142,7 @@ export const updateValues = async ( latest.weth !== weth ) { result.strategies.push( - new BalancerMetaPoolStrategy({ + new OETHBalancerMetaPoolStrategy({ id: timestampId, blockNumber: block.header.height, timestamp: new Date(block.header.timestamp), diff --git a/src/processors/vault/vault.ts b/src/processors/vault/vault.ts index a8069d5c..f03e68df 100644 --- a/src/processors/vault/vault.ts +++ b/src/processors/vault/vault.ts @@ -3,7 +3,7 @@ import { pad } from 'viem' import * as erc20 from '../../abi/erc20' import * as lido from '../../abi/lido' -import { Vault } from '../../model' +import { OETHVault } from '../../model' import { ensureExchangeRates } from '../../post-processors/exchange-rates' import { Context } from '../../processor' import { @@ -17,7 +17,7 @@ import { import { getLatestEntity, trackAddressBalances } from '../utils' interface ProcessResult { - vaults: Vault[] + vaults: OETHVault[] promises: Promise[] } @@ -66,7 +66,7 @@ const processStEthRebase = async ( log.address === STETH_ADDRESS && log.topics[0] === lido.events.TokenRebased.topic ) { - const { vault } = await getLatestVault(ctx, result, block) + const { vault } = await getLatestOETHVault(ctx, result, block) const contract = new lido.Contract(ctx, block.header, STETH_ADDRESS) vault.stETH = await contract.balanceOf(OETH_VAULT_ADDRESS) } @@ -84,7 +84,7 @@ const processTransfer = async ( address: OETH_VAULT_ADDRESS, tokens: VAULT_ERC20_ADDRESSES, fn: async ({ token, change }) => { - const { vault } = await getLatestVault(ctx, result, block) + const { vault } = await getLatestOETHVault(ctx, result, block) if (token === WETH_ADDRESS) { vault.weth += change } else if (token === RETH_ADDRESS) { @@ -99,7 +99,7 @@ const processTransfer = async ( } } -const getLatestVault = async ( +const getLatestOETHVault = async ( ctx: Context, result: ProcessResult, block: Context['blocks']['0'], @@ -108,7 +108,7 @@ const getLatestVault = async ( const timestampId = new Date(block.header.timestamp).toISOString() const { latest, current } = await getLatestEntity( ctx, - Vault, + OETHVault, result.vaults, timestampId, ) @@ -123,7 +123,7 @@ const getLatestVault = async ( ['ETH', 'frxETH'], ]), ) - vault = new Vault({ + vault = new OETHVault({ id: timestampId, timestamp: new Date(block.header.timestamp), blockNumber: block.header.height, diff --git a/src/utils/addresses.ts b/src/utils/addresses.ts index 6567c1dd..d455a26b 100644 --- a/src/utils/addresses.ts +++ b/src/utils/addresses.ts @@ -2,6 +2,9 @@ export const ADDRESS_ZERO = '0x0000000000000000000000000000000000000000' +export const OUSD_ADDRESS = '0x2a8e1e676ec238d8a992307b495b45b3feaa5e86' +export const OUSD_VAULT_ADDRESS = '0xe75d77b1865ae93c7eaa3040b038d7aa7bc02f70' + export const OETH_ADDRESS = '0x856c4efb76c1d1ae02e20ceb03a2a6a08b0b8dc3' export const OETH_VAULT_ADDRESS = '0x39254033945aa2e4809cc2977e7087bee48bd7ab' diff --git a/types.graphql b/types.graphql index bc5b64fc..41eb3051 100644 --- a/types.graphql +++ b/types.graphql @@ -15,4 +15,7 @@ scalar BigInt scalar BigDecimal scalar Bytes scalar DateTime -scalar JSON \ No newline at end of file +scalar JSON + + +# OToken Types \ No newline at end of file From 024abd24e9945712a0af365c0ec58a3695018017 Mon Sep 17 00:00:00 2001 From: Chris Jacobs Date: Mon, 16 Oct 2023 11:59:48 -0700 Subject: [PATCH 05/13] feat: data requirements thoughts speed improvements and more requirements work --- CHANGELOG.md | 2 +- abi/otoken-1.json | 199 ----------------- docs/REQUIREMENTS.md | 17 +- graphql.config.yml | 5 + schema-base.graphql | 3 +- schema-curve.graphql | 8 + schema.graphql | 12 +- src/abi/otoken-1.abi.ts | 205 ------------------ src/abi/otoken-1.ts | 94 -------- src/main.ts | 16 +- src/model/generated/curvePoolBalance.model.ts | 29 +++ src/model/generated/index.ts | 1 + src/processor-templates/otoken/otoken.ts | 20 +- src/processor.ts | 95 +++++--- src/processors/curve-lp/curve-lp.ts | 4 + src/processors/dripper/dripper.ts | 2 + src/processors/frax-staking/frax-staking.ts | 2 + src/processors/morpho-aave/morpho-aave.ts | 2 + src/processors/oeth/oeth.ts | 3 + src/processors/ousd/ousd.ts | 8 +- .../strategies/balancer-meta-pool.ts | 3 + src/processors/vault/vault.ts | 3 + 22 files changed, 175 insertions(+), 558 deletions(-) delete mode 100644 abi/otoken-1.json create mode 100644 schema-curve.graphql delete mode 100644 src/abi/otoken-1.abi.ts delete mode 100644 src/abi/otoken-1.ts create mode 100644 src/model/generated/curvePoolBalance.model.ts diff --git a/CHANGELOG.md b/CHANGELOG.md index 930f79cc..b9053439 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,5 +2,5 @@ ## v7 -- APY numbers have been changed to return proper percentages. (v6 value / 100) +- APY numbers have been changed to return proper percentages. (v6 value / 100) **BREAKING** - Added `ExchangeRate` entities. \ No newline at end of file diff --git a/abi/otoken-1.json b/abi/otoken-1.json deleted file mode 100644 index 3a46600b..00000000 --- a/abi/otoken-1.json +++ /dev/null @@ -1,199 +0,0 @@ -[ - { - "inputs": [], - "name": "_totalSupply", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "_account", - "type": "address" - } - ], - "name": "creditsBalanceOfHighres", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - }, - { - "internalType": "uint256", - "name": "", - "type": "uint256" - }, - { - "internalType": "bool", - "name": "", - "type": "bool" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "name": "isUpgraded", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "name": "nonRebasingCreditsPerToken", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "nonRebasingSupply", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "name": "rebaseState", - "outputs": [ - { - "internalType": "enum OUSDResolutionUpgrade.RebaseOptions", - "name": "", - "type": "uint8" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "rebasingCredits", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "rebasingCreditsHighres", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "rebasingCreditsPerToken", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [], - "name": "rebasingCreditsPerTokenHighres", - "outputs": [ - { - "internalType": "uint256", - "name": "", - "type": "uint256" - } - ], - "stateMutability": "view", - "type": "function" - }, - { - "inputs": [ - { - "internalType": "address[]", - "name": "accounts", - "type": "address[]" - } - ], - "name": "upgradeAccounts", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [], - "name": "upgradeGlobals", - "outputs": [], - "stateMutability": "nonpayable", - "type": "function" - }, - { - "inputs": [], - "name": "vaultAddress", - "outputs": [ - { - "internalType": "address", - "name": "", - "type": "address" - } - ], - "stateMutability": "view", - "type": "function" - } -] \ No newline at end of file diff --git a/docs/REQUIREMENTS.md b/docs/REQUIREMENTS.md index 77079fde..2f56e6de 100644 --- a/docs/REQUIREMENTS.md +++ b/docs/REQUIREMENTS.md @@ -159,11 +159,11 @@ const balancerMetaStablePoolABI = [ #### oeth - curvePoolBalanceMetric: `poolContract.balances(0) or poolContract.balances(1)` - - addresses.EthFrxEthPool: ETH frxETH - - addresses.EthStEthPool: ETH stETH - - addresses.REthEthPool: rETH ETH - - addresses.WEthStEthPool: WETH stETH - - addresses.OEthEthPool: OETH ETH + - EthFrxEthPool: "0xa1f8a6807c402e4a15ef4eba36528a3fed24e577" - ETH frxETH + - REthEthPool: "0x0f3159811670c117c372428d4e69ac32325e4d0f" - ETH stETH + - EthStEthPool: "0xDC24316b9AE028F1497c275EB9192a3Ea0f67022" - rETH ETH + - WEthStEthPool: "0x828b154032950c8ff7cf8085d841723db2696056" - WETH stETH + - OEthEthPool: "0x94B17476A93b3262d87B9a326965D1E91f9c13E7" - OETH ETH #### aave_comp_platforms @@ -201,12 +201,13 @@ const aaveAssetToPlatformMap = { - OUSD - A virtually identical implementation should work for OUSD. +For each of the above: -- `rebasingCreditsPerTokenMetric`: OETH, OUSD +- `rebasingCreditsPerTokenMetric` - `event TotalSupplyUpdatedHighres(uint256 totalSupply, uint256 rebasingCredits, uint256 rebasingCreditsPerToken)` -- `rebasingCreditsMetric`: OETH, OUSD +- `rebasingCreditsMetric` - `function rebasingCredits() external view returns (uint256)` -- `nonRebasingSupplyMetric`: OETH, OUSD +- `nonRebasingSupplyMetric` - `function nonRebasingSupply() external view returns (uint256)` ##### Ramblings diff --git a/graphql.config.yml b/graphql.config.yml index 5b40586e..612bcc5d 100644 --- a/graphql.config.yml +++ b/graphql.config.yml @@ -20,5 +20,10 @@ projects: include: - schema-base.graphql - types.graphql + curve: + schema: schema-curve.graphql + include: + - schema-base.graphql + - types.graphql include: - types.graphql diff --git a/schema-base.graphql b/schema-base.graphql index 53550978..846f4551 100644 --- a/schema-base.graphql +++ b/schema-base.graphql @@ -26,4 +26,5 @@ enum HistoryType { Sent Received Yield -} \ No newline at end of file +} + diff --git a/schema-curve.graphql b/schema-curve.graphql new file mode 100644 index 00000000..e05be2a1 --- /dev/null +++ b/schema-curve.graphql @@ -0,0 +1,8 @@ +type CurvePoolBalance @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + address: String! + balance0: BigInt! + balance1: BigInt! +} diff --git a/schema.graphql b/schema.graphql index f1f34e47..5c7875cd 100644 --- a/schema.graphql +++ b/schema.graphql @@ -28,7 +28,17 @@ enum HistoryType { Sent Received Yield -}""" +} + +type CurvePoolBalance @entity { + id: ID! + timestamp: DateTime! @index + blockNumber: Int! @index + address: String! + balance0: BigInt! + balance1: BigInt! +} +""" The OETH entity tracks the change in total supply of OETH over time. """ type OETH @entity { diff --git a/src/abi/otoken-1.abi.ts b/src/abi/otoken-1.abi.ts deleted file mode 100644 index 7c4741f2..00000000 --- a/src/abi/otoken-1.abi.ts +++ /dev/null @@ -1,205 +0,0 @@ -export const ABI_JSON = [ - { - "type": "function", - "name": "_totalSupply", - "constant": true, - "stateMutability": "view", - "payable": false, - "inputs": [], - "outputs": [ - { - "type": "uint256", - "name": "" - } - ] - }, - { - "type": "function", - "name": "creditsBalanceOfHighres", - "constant": true, - "stateMutability": "view", - "payable": false, - "inputs": [ - { - "type": "address", - "name": "_account" - } - ], - "outputs": [ - { - "type": "uint256", - "name": "" - }, - { - "type": "uint256", - "name": "" - }, - { - "type": "bool", - "name": "" - } - ] - }, - { - "type": "function", - "name": "isUpgraded", - "constant": true, - "stateMutability": "view", - "payable": false, - "inputs": [ - { - "type": "address", - "name": "" - } - ], - "outputs": [ - { - "type": "uint256", - "name": "" - } - ] - }, - { - "type": "function", - "name": "nonRebasingCreditsPerToken", - "constant": true, - "stateMutability": "view", - "payable": false, - "inputs": [ - { - "type": "address", - "name": "" - } - ], - "outputs": [ - { - "type": "uint256", - "name": "" - } - ] - }, - { - "type": "function", - "name": "nonRebasingSupply", - "constant": true, - "stateMutability": "view", - "payable": false, - "inputs": [], - "outputs": [ - { - "type": "uint256", - "name": "" - } - ] - }, - { - "type": "function", - "name": "rebaseState", - "constant": true, - "stateMutability": "view", - "payable": false, - "inputs": [ - { - "type": "address", - "name": "" - } - ], - "outputs": [ - { - "type": "uint8", - "name": "" - } - ] - }, - { - "type": "function", - "name": "rebasingCredits", - "constant": true, - "stateMutability": "view", - "payable": false, - "inputs": [], - "outputs": [ - { - "type": "uint256", - "name": "" - } - ] - }, - { - "type": "function", - "name": "rebasingCreditsHighres", - "constant": true, - "stateMutability": "view", - "payable": false, - "inputs": [], - "outputs": [ - { - "type": "uint256", - "name": "" - } - ] - }, - { - "type": "function", - "name": "rebasingCreditsPerToken", - "constant": true, - "stateMutability": "view", - "payable": false, - "inputs": [], - "outputs": [ - { - "type": "uint256", - "name": "" - } - ] - }, - { - "type": "function", - "name": "rebasingCreditsPerTokenHighres", - "constant": true, - "stateMutability": "view", - "payable": false, - "inputs": [], - "outputs": [ - { - "type": "uint256", - "name": "" - } - ] - }, - { - "type": "function", - "name": "upgradeAccounts", - "constant": false, - "payable": false, - "inputs": [ - { - "type": "address[]", - "name": "accounts" - } - ], - "outputs": [] - }, - { - "type": "function", - "name": "upgradeGlobals", - "constant": false, - "payable": false, - "inputs": [], - "outputs": [] - }, - { - "type": "function", - "name": "vaultAddress", - "constant": true, - "stateMutability": "view", - "payable": false, - "inputs": [], - "outputs": [ - { - "type": "address", - "name": "" - } - ] - } -] diff --git a/src/abi/otoken-1.ts b/src/abi/otoken-1.ts deleted file mode 100644 index 77c5e356..00000000 --- a/src/abi/otoken-1.ts +++ /dev/null @@ -1,94 +0,0 @@ -import * as ethers from 'ethers' -import {LogEvent, Func, ContractBase} from './abi.support' -import {ABI_JSON} from './otoken-1.abi' - -export const abi = new ethers.Interface(ABI_JSON); - -export const functions = { - _totalSupply: new Func<[], {}, bigint>( - abi, '0x3eaaf86b' - ), - creditsBalanceOfHighres: new Func<[_account: string], {_account: string}, [_: bigint, _: bigint, _: boolean]>( - abi, '0xe5c4fffe' - ), - isUpgraded: new Func<[_: string], {}, bigint>( - abi, '0x95ef84b9' - ), - nonRebasingCreditsPerToken: new Func<[_: string], {}, bigint>( - abi, '0x609350cd' - ), - nonRebasingSupply: new Func<[], {}, bigint>( - abi, '0xe696393a' - ), - rebaseState: new Func<[_: string], {}, number>( - abi, '0x456ee286' - ), - rebasingCredits: new Func<[], {}, bigint>( - abi, '0x077f22b7' - ), - rebasingCreditsHighres: new Func<[], {}, bigint>( - abi, '0x7d0d66ff' - ), - rebasingCreditsPerToken: new Func<[], {}, bigint>( - abi, '0x6691cb3d' - ), - rebasingCreditsPerTokenHighres: new Func<[], {}, bigint>( - abi, '0x7a46a9c5' - ), - upgradeAccounts: new Func<[accounts: Array], {accounts: Array}, []>( - abi, '0xeec037f6' - ), - upgradeGlobals: new Func<[], {}, []>( - abi, '0x51cfd6fe' - ), - vaultAddress: new Func<[], {}, string>( - abi, '0x430bf08a' - ), -} - -export class Contract extends ContractBase { - - _totalSupply(): Promise { - return this.eth_call(functions._totalSupply, []) - } - - creditsBalanceOfHighres(_account: string): Promise<[_: bigint, _: bigint, _: boolean]> { - return this.eth_call(functions.creditsBalanceOfHighres, [_account]) - } - - isUpgraded(arg0: string): Promise { - return this.eth_call(functions.isUpgraded, [arg0]) - } - - nonRebasingCreditsPerToken(arg0: string): Promise { - return this.eth_call(functions.nonRebasingCreditsPerToken, [arg0]) - } - - nonRebasingSupply(): Promise { - return this.eth_call(functions.nonRebasingSupply, []) - } - - rebaseState(arg0: string): Promise { - return this.eth_call(functions.rebaseState, [arg0]) - } - - rebasingCredits(): Promise { - return this.eth_call(functions.rebasingCredits, []) - } - - rebasingCreditsHighres(): Promise { - return this.eth_call(functions.rebasingCreditsHighres, []) - } - - rebasingCreditsPerToken(): Promise { - return this.eth_call(functions.rebasingCreditsPerToken, []) - } - - rebasingCreditsPerTokenHighres(): Promise { - return this.eth_call(functions.rebasingCreditsPerTokenHighres, []) - } - - vaultAddress(): Promise { - return this.eth_call(functions.vaultAddress, []) - } -} diff --git a/src/main.ts b/src/main.ts index 246b8318..26014c7b 100644 --- a/src/main.ts +++ b/src/main.ts @@ -14,15 +14,13 @@ run({ processors: [ oeth, ousd, - // vault, - // fraxStaking, - // morphoAave, - // dripper, - // curveLp, - // balancerMetaPoolStrategy, + vault, + fraxStaking, + morphoAave, + dripper, + curveLp, + balancerMetaPoolStrategy, ], // For processors which depend on results from other processors, post processors run after all processors have finished. - postProcessors: [ - // exchangeRates - ], + postProcessors: [exchangeRates], }) diff --git a/src/model/generated/curvePoolBalance.model.ts b/src/model/generated/curvePoolBalance.model.ts new file mode 100644 index 00000000..782d7e25 --- /dev/null +++ b/src/model/generated/curvePoolBalance.model.ts @@ -0,0 +1,29 @@ +import {Entity as Entity_, Column as Column_, PrimaryColumn as PrimaryColumn_, Index as Index_} from "typeorm" +import * as marshal from "./marshal" + +@Entity_() +export class CurvePoolBalance { + constructor(props?: Partial) { + Object.assign(this, props) + } + + @PrimaryColumn_() + id!: string + + @Index_() + @Column_("timestamp with time zone", {nullable: false}) + timestamp!: Date + + @Index_() + @Column_("int4", {nullable: false}) + blockNumber!: number + + @Column_("text", {nullable: false}) + address!: string + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + balance0!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + balance1!: bigint +} diff --git a/src/model/generated/index.ts b/src/model/generated/index.ts index 9964427f..43b7e16d 100644 --- a/src/model/generated/index.ts +++ b/src/model/generated/index.ts @@ -1,4 +1,5 @@ export * from "./exchangeRate.model" +export * from "./curvePoolBalance.model" export * from "./oeth.model" export * from "./oethAddress.model" export * from "./_rebasingOption" diff --git a/src/processor-templates/otoken/otoken.ts b/src/processor-templates/otoken/otoken.ts index ec9504ed..63a664fd 100644 --- a/src/processor-templates/otoken/otoken.ts +++ b/src/processor-templates/otoken/otoken.ts @@ -45,6 +45,7 @@ type OTokenRebaseOption = | EntityClass export const createOTokenProcessor = (params: { + Upgrade_CreditsBalanceOfHighRes?: number OTOKEN_ADDRESS: string OTOKEN_VAULT_ADDRESS: string OToken: OToken @@ -166,9 +167,22 @@ export const createOTokenProcessor = (params: { // update the address balance await Promise.all( [addressSub, addressAdd].map(async (address) => { - const credits = await token.creditsBalanceOfHighres(address.id) - const newBalance = (credits[0] * DECIMALS_18) / credits[1] - const change = newBalance - address.balance + let credits: [bigint, bigint] = [0n, 0n] + let newBalance: bigint + let change: bigint + if ( + block.header.height >= (params.Upgrade_CreditsBalanceOfHighRes ?? 0) + ) { + const credits = await token.creditsBalanceOfHighres(address.id) + newBalance = (credits[0] * DECIMALS_18) / credits[1] + change = newBalance - address.balance + } else { + const credits = await token + .creditsBalanceOf(address.id) + .then((credits) => credits.map((credit) => credit * 1000000000n)) + newBalance = (credits[0] * DECIMALS_18) / credits[1] + change = newBalance - address.balance + } result.history.push( new params.OTokenHistory({ // we can't use {t.id} because it's not unique diff --git a/src/processor.ts b/src/processor.ts index df56b903..5ce074e8 100644 --- a/src/processor.ts +++ b/src/processor.ts @@ -59,10 +59,13 @@ export const processor = new EvmBatchProcessor() interface Processor { name?: string from?: number + initialize?: (ctx: Context) => Promise // To only be run once per `sqd process`. setup?: (p: typeof processor) => void process: (ctx: Context) => Promise } +let initialized = false + export const run = ({ processors, postProcessors = [], @@ -79,43 +82,67 @@ export const run = ({ processor.run( new TypeormDatabase({ supportHotBlocks: true }), async (ctx) => { - resetProcessorState() - let start: number - const time = (name: string) => () => { - const message = `${name} ${Date.now() - start}ms` - return () => ctx.log.info(message) - } + try { + resetProcessorState() + let start: number + const time = (name: string) => () => { + const message = `${name} ${Date.now() - start}ms` + return () => ctx.log.info(message) + } - ctx.log.info(`=== processing from ${ctx.blocks[0].header.height}`) - start = Date.now() - const times = await Promise.all( - processors.map((p, index) => - p.process(ctx).then(time(p.name ?? `processor-${index}`)), - ), - ) - times.forEach((t) => t()) + // Initialization Run + if (!initialized) { + ctx.log.info(`=== initializing`) + start = Date.now() + const times = await Promise.all( + processors + .filter((p) => p.initialize) + .map((p, index) => + p.initialize!(ctx).then( + time(p.name ?? `initializing processor-${index}`), + ), + ), + ) + times.forEach((t) => t()) + } - start = Date.now() - const postTimes = await Promise.all( - postProcessors.map((p, index) => - p.process(ctx).then(time(p.name ?? `postProcessor-${index}`)), - ), - ) - postTimes.forEach((t) => t()) + // Main Processing Run + ctx.log.info(`=== processing from ${ctx.blocks[0].header.height}`) + start = Date.now() + const times = await Promise.all( + processors.map((p, index) => + p.process(ctx).then(time(p.name ?? `processor-${index}`)), + ), + ) + times.forEach((t) => t()) - // ctx.log.info({ - // blocks: ctx.blocks.length, - // logs: ctx.blocks.reduce((sum, block) => sum + block.logs.length, 0), - // traces: ctx.blocks.reduce((sum, block) => sum + block.traces.length, 0), - // transactions: ctx.blocks.reduce( - // (sum, block) => sum + block.transactions.length, - // 0, - // ), - // logArray: ctx.blocks.reduce( - // (logs, block) => [...logs, ...block.logs], - // [] as Log[], - // ), - // }) + // Post Processing Run + start = Date.now() + const postTimes = await Promise.all( + postProcessors.map((p, index) => + p.process(ctx).then(time(p.name ?? `postProcessor-${index}`)), + ), + ) + postTimes.forEach((t) => t()) + } catch (err) { + ctx.log.info({ + blocks: ctx.blocks.length, + logs: ctx.blocks.reduce((sum, block) => sum + block.logs.length, 0), + traces: ctx.blocks.reduce( + (sum, block) => sum + block.traces.length, + 0, + ), + transactions: ctx.blocks.reduce( + (sum, block) => sum + block.transactions.length, + 0, + ), + logArray: ctx.blocks.reduce( + (logs, block) => [...logs, ...block.logs], + [] as Log[], + ), + }) + throw err + } }, ) } diff --git a/src/processors/curve-lp/curve-lp.ts b/src/processors/curve-lp/curve-lp.ts index 22c6c8c0..ba17b3d1 100644 --- a/src/processors/curve-lp/curve-lp.ts +++ b/src/processors/curve-lp/curve-lp.ts @@ -34,16 +34,19 @@ export const setup = (processor: EvmBatchProcessor) => { curveLpToken.events.RemoveLiquidityOne.topic, // curve_lp_token.events.TokenExchange.topic, // Not sure if including this helps get up-to-date eth balances. ], + range: { from }, }) processor.addLog({ address: [OETH_ADDRESS], topic0: [erc20.events.Transfer.topic], topic1: [pad(OETH_CURVE_LP_ADDRESS)], + range: { from }, }) processor.addLog({ address: [OETH_ADDRESS], topic0: [erc20.events.Transfer.topic], topic2: [pad(OETH_CURVE_LP_ADDRESS)], + range: { from }, }) processor.addLog({ address: [OETH_CURVE_REWARD_LP_ADDRESS], @@ -52,6 +55,7 @@ export const setup = (processor: EvmBatchProcessor) => { baseRewardPool.events.Withdrawn.topic, ], topic1: [pad(OETH_CONVEX_ADDRESS)], + range: { from }, }) // Not sure if this is needed to get up-to-date ETH balances. // processor.addTransaction({ diff --git a/src/processors/dripper/dripper.ts b/src/processors/dripper/dripper.ts index 80eb352c..3d3c5c59 100644 --- a/src/processors/dripper/dripper.ts +++ b/src/processors/dripper/dripper.ts @@ -19,11 +19,13 @@ export const setup = (processor: EvmBatchProcessor) => { address: [WETH_ADDRESS], topic0: [erc20.events.Transfer.topic], topic1: [pad(OETH_DRIPPER_ADDRESS)], + range: { from }, }) processor.addLog({ address: [WETH_ADDRESS], topic0: [erc20.events.Transfer.topic], topic2: [pad(OETH_DRIPPER_ADDRESS)], + range: { from }, }) } diff --git a/src/processors/frax-staking/frax-staking.ts b/src/processors/frax-staking/frax-staking.ts index ba409a57..8a9bf5d1 100644 --- a/src/processors/frax-staking/frax-staking.ts +++ b/src/processors/frax-staking/frax-staking.ts @@ -23,11 +23,13 @@ export const setup = (processor: EvmBatchProcessor) => { address: [SFRXETH_ADDRESS], topic0: [erc20.events.Transfer.topic], topic1: [pad(OETH_FRAX_STAKING_ADDRESS)], + range: { from }, }) processor.addLog({ address: [SFRXETH_ADDRESS], topic0: [erc20.events.Transfer.topic], topic2: [pad(OETH_FRAX_STAKING_ADDRESS)], + range: { from }, }) } diff --git a/src/processors/morpho-aave/morpho-aave.ts b/src/processors/morpho-aave/morpho-aave.ts index b020b99e..8668ba4f 100644 --- a/src/processors/morpho-aave/morpho-aave.ts +++ b/src/processors/morpho-aave/morpho-aave.ts @@ -19,11 +19,13 @@ export const setup = (processor: EvmBatchProcessor) => { address: [WETH_ADDRESS], topic0: [erc20.events.Transfer.topic], topic1: [pad(OETH_MORPHO_AAVE_ADDRESS)], + range: { from }, }) processor.addLog({ address: [WETH_ADDRESS], topic0: [erc20.events.Transfer.topic], topic2: [pad(OETH_MORPHO_AAVE_ADDRESS)], + range: { from }, }) } diff --git a/src/processors/oeth/oeth.ts b/src/processors/oeth/oeth.ts index c0d32634..a99ae13a 100644 --- a/src/processors/oeth/oeth.ts +++ b/src/processors/oeth/oeth.ts @@ -24,6 +24,7 @@ export const setup = (processor: EvmBatchProcessor) => { otoken.functions.rebaseOptIn.sighash, ], transaction: true, + range: { from }, }) processor.addLog({ address: [OETH_ADDRESS], @@ -32,10 +33,12 @@ export const setup = (processor: EvmBatchProcessor) => { otoken.events.TotalSupplyUpdatedHighres.topic, ], transaction: true, + range: { from }, }) processor.addLog({ address: [OETH_VAULT_ADDRESS], topic0: [otokenVault.events.YieldDistribution.topic], + range: { from }, }) } diff --git a/src/processors/ousd/ousd.ts b/src/processors/ousd/ousd.ts index c4e842a8..4e49c40d 100644 --- a/src/processors/ousd/ousd.ts +++ b/src/processors/ousd/ousd.ts @@ -13,10 +13,8 @@ import { import { createOTokenProcessor } from '../../processor-templates/otoken' import { OUSD_ADDRESS, OUSD_VAULT_ADDRESS } from '../../utils/addresses' -// V1 which we aren't coded for yet. // export const from = 10884563 // https://etherscan.io/tx/0x9141921f5ebf072e58c00fe56332b6bee0c02f0ae4f54c42999b8a3a88662681 - -// Current version which we work with. +// export const from = 11585978 // OUSDReset - Has issues with archive queries. :( export const from = 13533937 // https://etherscan.io/tx/0xc9b6fc6a4fad18dad197ff7d0636f74bf066671d75656849a1c45122e00d54cf export const setup = (processor: EvmBatchProcessor) => { @@ -27,6 +25,7 @@ export const setup = (processor: EvmBatchProcessor) => { otoken.functions.rebaseOptIn.sighash, ], transaction: true, + range: { from }, }) processor.addLog({ address: [OUSD_ADDRESS], @@ -35,16 +34,19 @@ export const setup = (processor: EvmBatchProcessor) => { otoken.events.TotalSupplyUpdatedHighres.topic, ], transaction: true, + range: { from }, }) processor.addLog({ address: [OUSD_VAULT_ADDRESS], topic0: [otokenVault.events.YieldDistribution.topic], + range: { from }, }) } // TODO: Handle the version upgrade gracefully so we have accurate numbers. export const process = createOTokenProcessor({ + Upgrade_CreditsBalanceOfHighRes: 13533937, // https://etherscan.io/tx/0xc9b6fc6a4fad18dad197ff7d0636f74bf066671d75656849a1c45122e00d54cf OTOKEN_ADDRESS: OUSD_ADDRESS, OTOKEN_VAULT_ADDRESS: OUSD_VAULT_ADDRESS, OToken: OUSD, diff --git a/src/processors/strategies/balancer-meta-pool.ts b/src/processors/strategies/balancer-meta-pool.ts index 1ed59d46..ea7385b9 100644 --- a/src/processors/strategies/balancer-meta-pool.ts +++ b/src/processors/strategies/balancer-meta-pool.ts @@ -31,6 +31,7 @@ export const setup = (processor: EvmBatchProcessor) => { balancerMetaPoolStrategy.events.Deposit.topic, balancerMetaPoolStrategy.events.Withdrawal.topic, ], + range: { from }, }) processor.addLog({ address: [addresses.lpToken], @@ -38,6 +39,7 @@ export const setup = (processor: EvmBatchProcessor) => { metaStablePool.events.Transfer.topic, metaStablePool.events.PriceRateCacheUpdated.topic, ], + range: { from }, }) processor.addLog({ address: [addresses.auraRewardsPool], @@ -48,6 +50,7 @@ export const setup = (processor: EvmBatchProcessor) => { baseRewardPool4626.events.Withdrawn.topic, baseRewardPool4626.events.Withdraw.topic, ], + range: { from }, }) } diff --git a/src/processors/vault/vault.ts b/src/processors/vault/vault.ts index f03e68df..37817b94 100644 --- a/src/processors/vault/vault.ts +++ b/src/processors/vault/vault.ts @@ -28,15 +28,18 @@ export const setup = (processor: EvmBatchProcessor) => { address: VAULT_ERC20_ADDRESSES, topic0: [erc20.events.Transfer.topic], topic1: [pad(OETH_VAULT_ADDRESS)], + range: { from }, }) processor.addLog({ address: VAULT_ERC20_ADDRESSES, topic0: [erc20.events.Transfer.topic], topic2: [pad(OETH_VAULT_ADDRESS)], + range: { from }, }) processor.addLog({ address: [STETH_ADDRESS], topic0: [lido.events.TokenRebased.topic], + range: { from }, }) } From adb0b0f82c84eb16cbdebaea4587c862490456e1 Mon Sep 17 00:00:00 2001 From: Chris Jacobs Date: Mon, 16 Oct 2023 13:38:41 -0700 Subject: [PATCH 06/13] feat: data requirements thoughts wip on curve lps --- src/processor-templates/curve/curve.ts | 58 ++++++++++++++++++++++++++ src/processor-templates/curve/index.ts | 1 + src/processors/curve/curve.ts | 38 +++++++++++++++++ src/processors/curve/index.ts | 1 + 4 files changed, 98 insertions(+) create mode 100644 src/processor-templates/curve/curve.ts create mode 100644 src/processor-templates/curve/index.ts create mode 100644 src/processors/curve/curve.ts create mode 100644 src/processors/curve/index.ts diff --git a/src/processor-templates/curve/curve.ts b/src/processor-templates/curve/curve.ts new file mode 100644 index 00000000..7a2a612e --- /dev/null +++ b/src/processor-templates/curve/curve.ts @@ -0,0 +1,58 @@ +import { EvmBatchProcessor } from '@subsquid/evm-processor' + +import * as curveLpToken from '../../abi/curve-lp-token' +import { CurvePoolBalance } from '../../model' +import { Context } from '../../processor' + +interface ProcessResult { + curvePoolBalances: CurvePoolBalance[] +} + +const logsToListenFor = new Set([ + curveLpToken.events.AddLiquidity.topic, + curveLpToken.events.RemoveLiquidity.topic, + curveLpToken.events.RemoveLiquidityImbalance.topic, + curveLpToken.events.RemoveLiquidityOne.topic, + curveLpToken.events.TokenExchange.topic, // Not sure if including this helps get up-to-date eth balances. +]) + +export const createCurveSetup = + (poolAddress: string, from: number) => (processor: EvmBatchProcessor) => { + processor.addLog({ + address: [poolAddress], + topic0: [...logsToListenFor.values()], + range: { from }, + }) + } + +export const process = (poolAddress: string) => async (ctx: Context) => { + const result: ProcessResult = { + curvePoolBalances: [], + } + + for (const block of ctx.blocks) { + for (const log of block.logs) { + if (logsToListenFor.has(log.topics[0])) { + const contract = new curveLpToken.Contract( + ctx, + block.header, + poolAddress, + ) + const timestampId = new Date(block.header.timestamp).toISOString() + const [balance0, balance1] = await contract.get_balances() + result.curvePoolBalances.push( + new CurvePoolBalance({ + id: timestampId, + blockNumber: block.header.height, + timestamp: new Date(block.header.timestamp), + address: poolAddress, + balance0, + balance1, + }), + ) + } + } + } + + await ctx.store.insert(result.curvePoolBalances) +} diff --git a/src/processor-templates/curve/index.ts b/src/processor-templates/curve/index.ts new file mode 100644 index 00000000..e039a951 --- /dev/null +++ b/src/processor-templates/curve/index.ts @@ -0,0 +1 @@ +export * from './curve' diff --git a/src/processors/curve/curve.ts b/src/processors/curve/curve.ts new file mode 100644 index 00000000..6826a1d1 --- /dev/null +++ b/src/processors/curve/curve.ts @@ -0,0 +1,38 @@ +import { EvmBatchProcessor } from '@subsquid/evm-processor' +import { pad } from 'viem' + +import * as baseRewardPool from '../../abi/base-reward-pool' +import * as curveLpToken from '../../abi/curve-lp-token' +import * as erc20 from '../../abi/erc20' +import { CurvePoolBalance } from '../../model' +import { Context } from '../../processor' +import { createCurveSetup } from '../../processor-templates/curve' +import { + OETH_ADDRESS, + OETH_CONVEX_ADDRESS, + OETH_CURVE_LP_ADDRESS, + OETH_CURVE_REWARD_LP_ADDRESS, +} from '../../utils/addresses' +import { getEthBalance } from '../../utils/getEthBalance' +import { getLatestEntity, trackAddressBalances } from '../utils' + +interface ProcessResult { + curvePoolBalances: CurvePoolBalance[] +} + +export const from = Math.min(99999999) + +export const setup = createCurveSetup({}) + +export const process = async (ctx: Context) => { + const result: ProcessResult = { + curvePoolBalances: [], + } + + for (const block of ctx.blocks) { + for (const log of block.logs) { + } + } + + await ctx.store.insert(result.curvePoolBalances) +} diff --git a/src/processors/curve/index.ts b/src/processors/curve/index.ts new file mode 100644 index 00000000..e039a951 --- /dev/null +++ b/src/processors/curve/index.ts @@ -0,0 +1 @@ +export * from './curve' From 05e595e9d43b026c09b4f5a2b0f1d1bc1969aa3b Mon Sep 17 00:00:00 2001 From: Rafael Ugolini Date: Mon, 16 Oct 2023 12:30:27 -0700 Subject: [PATCH 07/13] feat: data requirements thoughts - wip on curve lps - proof out multiple processors --- ...01515198-Data.js => 1697501723589-Data.js} | 10 +- schema-curve.graphql | 1 + schema.graphql | 1 + src/main.ts | 39 +-- src/model/generated/curvePoolBalance.model.ts | 3 + src/processor-templates/curve/curve.ts | 145 +++++++--- src/processor.ts | 251 +++++++++--------- src/processors/curve/curve.ts | 96 +++++-- 8 files changed, 350 insertions(+), 196 deletions(-) rename db/migrations/{1697301515198-Data.js => 1697501723589-Data.js} (96%) diff --git a/db/migrations/1697301515198-Data.js b/db/migrations/1697501723589-Data.js similarity index 96% rename from db/migrations/1697301515198-Data.js rename to db/migrations/1697501723589-Data.js index 09cba074..82fa31ce 100644 --- a/db/migrations/1697301515198-Data.js +++ b/db/migrations/1697501723589-Data.js @@ -1,10 +1,13 @@ -module.exports = class Data1697301515198 { - name = 'Data1697301515198' +module.exports = class Data1697501723589 { + name = 'Data1697501723589' async up(db) { await db.query(`CREATE TABLE "exchange_rate" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "pair" text NOT NULL, "base" text NOT NULL, "quote" text NOT NULL, "rate" numeric NOT NULL, CONSTRAINT "PK_5c5d27d2b900ef6cdeef0398472" PRIMARY KEY ("id"))`) await db.query(`CREATE INDEX "IDX_9e23a3f1bf3634820c873a0fe8" ON "exchange_rate" ("timestamp") `) await db.query(`CREATE INDEX "IDX_c61a93768eed9e58ce399bbe01" ON "exchange_rate" ("block_number") `) + await db.query(`CREATE TABLE "curve_pool_balance" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "address" text NOT NULL, "balance0" numeric NOT NULL, "balance1" numeric NOT NULL, "balance2" numeric NOT NULL, CONSTRAINT "PK_40412750bb910ca560aa084dd88" PRIMARY KEY ("id"))`) + await db.query(`CREATE INDEX "IDX_ffb0d0f86f03faacef7cb3e092" ON "curve_pool_balance" ("timestamp") `) + await db.query(`CREATE INDEX "IDX_db5522c865eb8ed76fa7aeb4a8" ON "curve_pool_balance" ("block_number") `) await db.query(`CREATE TABLE "oeth" ("id" character varying NOT NULL, "timestamp" TIMESTAMP WITH TIME ZONE NOT NULL, "block_number" integer NOT NULL, "total_supply" numeric NOT NULL, "rebasing_supply" numeric NOT NULL, "non_rebasing_supply" numeric NOT NULL, CONSTRAINT "PK_de1d885501070dbd1ab6f8577ba" PRIMARY KEY ("id"))`) await db.query(`CREATE INDEX "IDX_5b81a67229bac2d68e0dc92cc4" ON "oeth" ("timestamp") `) await db.query(`CREATE INDEX "IDX_408e5f79f83093aa5cf2b0ea32" ON "oeth" ("block_number") `) @@ -118,6 +121,9 @@ module.exports = class Data1697301515198 { await db.query(`DROP TABLE "exchange_rate"`) await db.query(`DROP INDEX "public"."IDX_9e23a3f1bf3634820c873a0fe8"`) await db.query(`DROP INDEX "public"."IDX_c61a93768eed9e58ce399bbe01"`) + await db.query(`DROP TABLE "curve_pool_balance"`) + await db.query(`DROP INDEX "public"."IDX_ffb0d0f86f03faacef7cb3e092"`) + await db.query(`DROP INDEX "public"."IDX_db5522c865eb8ed76fa7aeb4a8"`) await db.query(`DROP TABLE "oeth"`) await db.query(`DROP INDEX "public"."IDX_5b81a67229bac2d68e0dc92cc4"`) await db.query(`DROP INDEX "public"."IDX_408e5f79f83093aa5cf2b0ea32"`) diff --git a/schema-curve.graphql b/schema-curve.graphql index e05be2a1..a1736ba1 100644 --- a/schema-curve.graphql +++ b/schema-curve.graphql @@ -5,4 +5,5 @@ type CurvePoolBalance @entity { address: String! balance0: BigInt! balance1: BigInt! + balance2: BigInt! } diff --git a/schema.graphql b/schema.graphql index 5c7875cd..3f387663 100644 --- a/schema.graphql +++ b/schema.graphql @@ -37,6 +37,7 @@ type CurvePoolBalance @entity { address: String! balance0: BigInt! balance1: BigInt! + balance2: BigInt! } """ The OETH entity tracks the change in total supply of OETH over time. diff --git a/src/main.ts b/src/main.ts index 26014c7b..0cad0835 100644 --- a/src/main.ts +++ b/src/main.ts @@ -1,5 +1,6 @@ import * as exchangeRates from './post-processors/exchange-rates' import { run } from './processor' +import * as curve from './processors/curve' import * as curveLp from './processors/curve-lp' import * as dripper from './processors/dripper' import * as fraxStaking from './processors/frax-staking' @@ -9,18 +10,26 @@ import * as ousd from './processors/ousd' import * as balancerMetaPoolStrategy from './processors/strategies/balancer-meta-pool' import * as vault from './processors/vault' -run({ - // The idea is that these processors have zero dependencies on one another and can be processed asynchronously. - processors: [ - oeth, - ousd, - vault, - fraxStaking, - morphoAave, - dripper, - curveLp, - balancerMetaPoolStrategy, - ], - // For processors which depend on results from other processors, post processors run after all processors have finished. - postProcessors: [exchangeRates], -}) +run([ + { + stateSchema: 'curve-processing', + processors: [curve], + }, + { + stateSchema: 'oeth-processing', + processors: [ + oeth, + vault, + fraxStaking, + morphoAave, + dripper, + curveLp, + balancerMetaPoolStrategy, + ], + postProcessors: [exchangeRates], + }, + { + stateSchema: 'ousd-processing', + processors: [ousd], + }, +]) diff --git a/src/model/generated/curvePoolBalance.model.ts b/src/model/generated/curvePoolBalance.model.ts index 782d7e25..969a1114 100644 --- a/src/model/generated/curvePoolBalance.model.ts +++ b/src/model/generated/curvePoolBalance.model.ts @@ -26,4 +26,7 @@ export class CurvePoolBalance { @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) balance1!: bigint + + @Column_("numeric", {transformer: marshal.bigintTransformer, nullable: false}) + balance2!: bigint } diff --git a/src/processor-templates/curve/curve.ts b/src/processor-templates/curve/curve.ts index 7a2a612e..55e8df45 100644 --- a/src/processor-templates/curve/curve.ts +++ b/src/processor-templates/curve/curve.ts @@ -1,58 +1,141 @@ import { EvmBatchProcessor } from '@subsquid/evm-processor' +import dayjs from 'dayjs' +import { LessThanOrEqual } from 'typeorm' import * as curveLpToken from '../../abi/curve-lp-token' import { CurvePoolBalance } from '../../model' -import { Context } from '../../processor' +import { Context, Log } from '../../processor' interface ProcessResult { curvePoolBalances: CurvePoolBalance[] } +const historicUpdateFrequency = 24 * 60 * 60000 // Frequency of updates for historical data. + const logsToListenFor = new Set([ curveLpToken.events.AddLiquidity.topic, curveLpToken.events.RemoveLiquidity.topic, curveLpToken.events.RemoveLiquidityImbalance.topic, curveLpToken.events.RemoveLiquidityOne.topic, - curveLpToken.events.TokenExchange.topic, // Not sure if including this helps get up-to-date eth balances. + curveLpToken.events.TokenExchange.topic, + curveLpToken.events.Transfer.topic, ]) -export const createCurveSetup = - (poolAddress: string, from: number) => (processor: EvmBatchProcessor) => { - processor.addLog({ - address: [poolAddress], - topic0: [...logsToListenFor.values()], - range: { from }, - }) - } +export const createCurveSetup = ( + poolAddress: string, + from: number, + processor: EvmBatchProcessor, +) => { + processor.addLog({ + address: [poolAddress], + topic0: [...logsToListenFor.values()], + range: { from }, + transaction: false, + }) +} -export const process = (poolAddress: string) => async (ctx: Context) => { - const result: ProcessResult = { - curvePoolBalances: [], - } +export const createCurveProcessor = + (poolAddress: string, count: number) => async (ctx: Context) => { + let last: CurvePoolBalance | undefined = undefined + const result: ProcessResult = { + curvePoolBalances: [], + } - for (const block of ctx.blocks) { - for (const log of block.logs) { - if (logsToListenFor.has(log.topics[0])) { + for (const block of ctx.blocks) { + const timestamp = new Date(block.header.timestamp) + const timestampId = timestamp.toISOString() + if (!last) { + last = await ctx.store.findOne(CurvePoolBalance, { + where: { id: LessThanOrEqual(timestampId) }, + order: { + id: 'desc', + }, + }) + } + if ( + last && + !ctx.isHead && + timestamp < dayjs(last.timestamp).add(1, 'day').toDate() + ) { + continue + } + const match = block.logs.find( + (log: Log) => + log.address === poolAddress && logsToListenFor.has(log.topics[0]), + ) + if (match) { const contract = new curveLpToken.Contract( ctx, block.header, poolAddress, ) - const timestampId = new Date(block.header.timestamp).toISOString() - const [balance0, balance1] = await contract.get_balances() - result.curvePoolBalances.push( - new CurvePoolBalance({ - id: timestampId, - blockNumber: block.header.height, - timestamp: new Date(block.header.timestamp), - address: poolAddress, - balance0, - balance1, - }), + + const balances = await Promise.all( + new Array(count) + .fill(0) + .map((_, index) => contract.balances(BigInt(index))), ) + const curve = new CurvePoolBalance({ + id: timestampId, + blockNumber: block.header.height, + timestamp: new Date(block.header.timestamp), + address: poolAddress, + balance0: balances[0] ?? 0n, + balance1: balances[1] ?? 0n, + balance2: balances[2] ?? 0n, + }) + result.curvePoolBalances.push(curve) + last = curve + // for (const log of block.logs) { + // if (filterFn(log)) { + // if (log.topics[0] === curveLpToken.events.AddLiquidity.topic) { + // const data = curveLpToken.events.AddLiquidity.decode(log) + // curve.balance0 += data.token_amounts[0] ?? 0n + // curve.balance1 += data.token_amounts[1] ?? 0n + // curve.balance2 += data.token_amounts[2] ?? 0n + // } else if ( + // log.topics[0] === curveLpToken.events.RemoveLiquidity.topic + // ) { + // const data = curveLpToken.events.RemoveLiquidity.decode(log) + // curve.balance0 -= data.token_amounts[0] ?? 0n + // curve.balance1 -= data.token_amounts[1] ?? 0n + // curve.balance2 -= data.token_amounts[2] ?? 0n + // } else if ( + // log.topics[0] === + // curveLpToken.events.RemoveLiquidityImbalance.topic + // ) { + // const data = + // curveLpToken.events.RemoveLiquidityImbalance.decode(log) + // curve.balance0 -= data.token_amounts[0] ?? 0n + // curve.balance1 -= data.token_amounts[1] ?? 0n + // curve.balance2 -= data.token_amounts[2] ?? 0n + // } else if ( + // log.topics[0] === curveLpToken.events.TokenExchange.topic + // ) { + // const data = curveLpToken.events.TokenExchange.decode(log) + // if (data.bought_id > 2n || data.sold_id > 2n) { + // ctx.log.error(data, 'Unexpected id greater than 1') + // throw new Error('Unexpected id greater than 1') + // } + // if (data.bought_id === 0n) { + // curve.balance0 -= data.tokens_bought + // } else if (data.bought_id === 1n) { + // curve.balance1 -= data.tokens_bought + // } else if (data.bought_id === 2n) { + // curve.balance2 -= data.tokens_bought + // } + // if (data.sold_id === 0n) { + // curve.balance0 += data.tokens_sold + // } else if (data.sold_id === 1n) { + // curve.balance1 += data.tokens_sold + // } else if (data.sold_id === 2n) { + // curve.balance2 += data.tokens_sold + // } + // } + // // TODO: log.topics[0] === curveLpToken.events.RemoveLiquidityOne.topic + // } + // } } } + await ctx.store.insert(result.curvePoolBalances) } - - await ctx.store.insert(result.curvePoolBalances) -} diff --git a/src/processor.ts b/src/processor.ts index 5ce074e8..d7e0212e 100644 --- a/src/processor.ts +++ b/src/processor.ts @@ -5,149 +5,158 @@ import { EvmBatchProcessorFields, } from '@subsquid/evm-processor' import { Store, TypeormDatabase } from '@subsquid/typeorm-store' +import { create } from 'domain' import { resetProcessorState } from './utils/state' -export const processor = new EvmBatchProcessor() - .setDataSource({ - // Change the Archive endpoints for run the squid - // against the other EVM networks - // For a full list of supported networks and config options - // see https://docs.subsquid.io/evm-indexing/ - archive: lookupArchive('eth-mainnet'), +export const createSquidProcessor = () => + new EvmBatchProcessor() + .setDataSource({ + // Change the Archive endpoints for run the squid + // against the other EVM networks + // For a full list of supported networks and config options + // see https://docs.subsquid.io/evm-indexing/ + archive: lookupArchive('eth-mainnet'), - // Must be set for RPC ingestion (https://docs.subsquid.io/evm-indexing/evm-processor/) - // OR to enable contract state queries (https://docs.subsquid.io/evm-indexing/query-state/) - // chain: 'https://rpc.ankr.com/eth', - // chain: "https://mainnet.infura.io/v3/03b96dfbb4904c5c89c04680dd480064", - chain: { - url: process.env.RPC_ENDPOINT || 'http://localhost:8545', - // Alchemy is deprecating `eth_getBlockReceipts` https://docs.alchemy.com/reference/eth-getblockreceipts - // so we need to set `maxBatchCallSize` 1 to avoid using this method - maxBatchCallSize: 1, - }, - }) - .setFinalityConfirmation(10) - .setFields({ - transaction: { - from: true, - to: true, - hash: true, - gasUsed: true, - gas: true, - value: true, - sighash: true, - input: true, - status: true, - }, - log: { - transactionHash: true, - topics: true, - data: true, - }, - trace: { - callFrom: true, - callTo: true, - callSighash: true, - callValue: true, - callInput: true, - createResultAddress: true, - // action: true, - }, - }) + // Must be set for RPC ingestion (https://docs.subsquid.io/evm-indexing/evm-processor/) + // OR to enable contract state queries (https://docs.subsquid.io/evm-indexing/query-state/) + // chain: 'https://rpc.ankr.com/eth', + // chain: "https://mainnet.infura.io/v3/03b96dfbb4904c5c89c04680dd480064", + chain: { + url: process.env.RPC_ENDPOINT || 'http://localhost:8545', + // Alchemy is deprecating `eth_getBlockReceipts` https://docs.alchemy.com/reference/eth-getblockreceipts + // so we need to set `maxBatchCallSize` 1 to avoid using this method + maxBatchCallSize: 1, + }, + }) + .setFinalityConfirmation(10) + .setFields({ + transaction: { + from: true, + to: true, + hash: true, + gasUsed: true, + gas: true, + value: true, + sighash: true, + input: true, + status: true, + }, + log: { + transactionHash: true, + topics: true, + data: true, + }, + trace: { + callFrom: true, + callTo: true, + callSighash: true, + callValue: true, + callInput: true, + createResultAddress: true, + // action: true, + }, + }) interface Processor { name?: string from?: number initialize?: (ctx: Context) => Promise // To only be run once per `sqd process`. - setup?: (p: typeof processor) => void + setup?: (p: ReturnType) => void process: (ctx: Context) => Promise } let initialized = false -export const run = ({ - processors, - postProcessors = [], -}: { - processors: Processor[] - postProcessors: Processor[] -}) => { - processor.setBlockRange({ - from: Math.min( - ...(processors.map((p) => p.from).filter((x) => x) as number[]), - ), - }) - processors.forEach((p) => p.setup?.(processor)) - processor.run( - new TypeormDatabase({ supportHotBlocks: true }), - async (ctx) => { - try { - resetProcessorState() - let start: number - const time = (name: string) => () => { - const message = `${name} ${Date.now() - start}ms` - return () => ctx.log.info(message) - } +export const run = ( + params: { + stateSchema?: string + processors: Processor[] + postProcessors?: Processor[] + }[], +) => { + for (const { stateSchema, processors, postProcessors = [] } of params) { + const processor = createSquidProcessor() + processor.setBlockRange({ + from: Math.min( + ...(processors.map((p) => p.from).filter((x) => x) as number[]), + ), + }) + processors.forEach((p) => p.setup?.(processor)) + processor.run( + new TypeormDatabase({ stateSchema, supportHotBlocks: true }), + async (ctx) => { + try { + resetProcessorState() + let start: number + const time = (name: string) => () => { + const message = `${name} ${Date.now() - start}ms` + return () => ctx.log.info(message) + } + + // Initialization Run + if (!initialized) { + ctx.log.info(`=== initializing`) + start = Date.now() + const times = await Promise.all( + processors + .filter((p) => p.initialize) + .map((p, index) => + p.initialize!(ctx).then( + time(p.name ?? `initializing processor-${index}`), + ), + ), + ) + times.forEach((t) => t()) + } - // Initialization Run - if (!initialized) { - ctx.log.info(`=== initializing`) + // Main Processing Run + ctx.log.info(`=== processing from ${ctx.blocks[0].header.height}`) start = Date.now() const times = await Promise.all( - processors - .filter((p) => p.initialize) - .map((p, index) => - p.initialize!(ctx).then( - time(p.name ?? `initializing processor-${index}`), - ), - ), + processors.map((p, index) => + p.process(ctx).then(time(p.name ?? `processor-${index}`)), + ), ) times.forEach((t) => t()) - } - - // Main Processing Run - ctx.log.info(`=== processing from ${ctx.blocks[0].header.height}`) - start = Date.now() - const times = await Promise.all( - processors.map((p, index) => - p.process(ctx).then(time(p.name ?? `processor-${index}`)), - ), - ) - times.forEach((t) => t()) - // Post Processing Run - start = Date.now() - const postTimes = await Promise.all( - postProcessors.map((p, index) => - p.process(ctx).then(time(p.name ?? `postProcessor-${index}`)), - ), - ) - postTimes.forEach((t) => t()) - } catch (err) { - ctx.log.info({ - blocks: ctx.blocks.length, - logs: ctx.blocks.reduce((sum, block) => sum + block.logs.length, 0), - traces: ctx.blocks.reduce( - (sum, block) => sum + block.traces.length, - 0, - ), - transactions: ctx.blocks.reduce( - (sum, block) => sum + block.transactions.length, - 0, - ), - logArray: ctx.blocks.reduce( - (logs, block) => [...logs, ...block.logs], - [] as Log[], - ), - }) - throw err - } - }, - ) + if (postProcessors) { + // Post Processing Run + start = Date.now() + const postTimes = await Promise.all( + postProcessors.map((p, index) => + p.process(ctx).then(time(p.name ?? `postProcessor-${index}`)), + ), + ) + postTimes.forEach((t) => t()) + } + } catch (err) { + ctx.log.info({ + blocks: ctx.blocks.length, + logs: ctx.blocks.reduce((sum, block) => sum + block.logs.length, 0), + traces: ctx.blocks.reduce( + (sum, block) => sum + block.traces.length, + 0, + ), + transactions: ctx.blocks.reduce( + (sum, block) => sum + block.transactions.length, + 0, + ), + // logArray: ctx.blocks.reduce( + // (logs, block) => [...logs, ...block.logs], + // [] as Log[], + // ), + }) + throw err + } + }, + ) + } } -export type Fields = EvmBatchProcessorFields +export type Fields = EvmBatchProcessorFields< + ReturnType +> export type Context = DataHandlerContext export type Block = Context['blocks']['0'] export type Log = Context['blocks']['0']['logs']['0'] diff --git a/src/processors/curve/curve.ts b/src/processors/curve/curve.ts index 6826a1d1..8e31e4b4 100644 --- a/src/processors/curve/curve.ts +++ b/src/processors/curve/curve.ts @@ -1,38 +1,80 @@ import { EvmBatchProcessor } from '@subsquid/evm-processor' -import { pad } from 'viem' -import * as baseRewardPool from '../../abi/base-reward-pool' -import * as curveLpToken from '../../abi/curve-lp-token' -import * as erc20 from '../../abi/erc20' -import { CurvePoolBalance } from '../../model' import { Context } from '../../processor' -import { createCurveSetup } from '../../processor-templates/curve' import { - OETH_ADDRESS, - OETH_CONVEX_ADDRESS, - OETH_CURVE_LP_ADDRESS, - OETH_CURVE_REWARD_LP_ADDRESS, -} from '../../utils/addresses' -import { getEthBalance } from '../../utils/getEthBalance' -import { getLatestEntity, trackAddressBalances } from '../utils' - -interface ProcessResult { - curvePoolBalances: CurvePoolBalance[] -} + createCurveProcessor, + createCurveSetup, +} from '../../processor-templates/curve' -export const from = Math.min(99999999) +const ousdResetFrom = 11585978 +const oethDeployFrom = 16933090 -export const setup = createCurveSetup({}) +export const from = Math.min(ousdResetFrom, oethDeployFrom) -export const process = async (ctx: Context) => { - const result: ProcessResult = { - curvePoolBalances: [], - } +const pools = [ + // Curve (OUSD) + { + name: 'ThreePool', + address: '0xbebc44782c7db0a1a60cb6fe97d0b483032ff1c7', + from: Math.max(10809473, ousdResetFrom), + count: 3, + }, + { + name: 'OUSDMetapool', + address: '0x87650d7bbfc3a9f10587d7778206671719d9910d', + from: Math.max(12860905, ousdResetFrom), + count: 2, + }, + { + name: 'LUSDMetapool', + address: '0xed279fdd11ca84beef15af5d39bb4d4bee23f0ca', + from: Math.max(12184843, ousdResetFrom), + count: 2, + }, + + // Curve (OETH) + { + name: 'EthFrxEthPool', + address: '0xa1f8a6807c402e4a15ef4eba36528a3fed24e577', + from: Math.max(15741010, oethDeployFrom), + count: 2, + }, + { + name: 'REthEthPool', + address: '0x0f3159811670c117c372428d4e69ac32325e4d0f', + from: Math.max(16615906, oethDeployFrom), + count: 2, + }, + { + name: 'EthStEthPool', + address: '0xDC24316b9AE028F1497c275EB9192a3Ea0f67022', + from: Math.max(11592551, oethDeployFrom), + count: 2, + }, + { + name: 'WEthStEthPool', + address: '0x828b154032950c8ff7cf8085d841723db2696056', + from: Math.max(14759666, oethDeployFrom), + count: 2, + }, + { + name: 'OEthEthPool', + address: '0x94B17476A93b3262d87B9a326965D1E91f9c13E7', + from: Math.max(17130232, oethDeployFrom), + count: 2, + }, +] as const - for (const block of ctx.blocks) { - for (const log of block.logs) { - } +const processors = pools.map((pool) => + createCurveProcessor(pool.address.toLowerCase(), pool.count), +) + +export const setup = (processor: EvmBatchProcessor) => { + for (const pool of pools) { + createCurveSetup(pool.address.toLowerCase(), pool.from, processor) } +} - await ctx.store.insert(result.curvePoolBalances) +export const process = async (ctx: Context) => { + await Promise.all(processors.map((p) => p(ctx))) } From 056d102b064df6750e57f0a11c84c9dc82f8cdb2 Mon Sep 17 00:00:00 2001 From: Chris Jacobs Date: Tue, 17 Oct 2023 13:12:33 -0700 Subject: [PATCH 08/13] feat: data requirements thoughts (start v8) - process curve lps on a daily basis - fix issues with 'hook state' caused by multiple processors - otoken setup fn - processor names - --- squid.yaml | 4 +- src/main.ts | 6 +- .../exchange-rates/exchange-rates.ts | 8 +- src/processor-templates/curve/curve.ts | 147 ++++-------------- src/processor-templates/otoken/otoken.ts | 37 +++++ src/processor.ts | 42 +++-- src/processors/curve/curve.ts | 10 +- src/processors/oeth/oeth.ts | 40 ++--- src/processors/ousd/ousd.ts | 41 ++--- src/utils/state.ts | 13 +- 10 files changed, 138 insertions(+), 210 deletions(-) diff --git a/squid.yaml b/squid.yaml index db22728c..9edf0fdd 100644 --- a/squid.yaml +++ b/squid.yaml @@ -1,7 +1,7 @@ manifestVersion: subsquid.io/v0.1 name: origin-squid -version: 7 -description: 'The very first evm squid from manifest ' +version: 8 +description: 'Origin Protocol 🦑' build: deploy: addons: diff --git a/src/main.ts b/src/main.ts index 0cad0835..7f0bf03c 100644 --- a/src/main.ts +++ b/src/main.ts @@ -12,11 +12,11 @@ import * as vault from './processors/vault' run([ { - stateSchema: 'curve-processing', + name: 'curve', processors: [curve], }, { - stateSchema: 'oeth-processing', + name: 'oeth', processors: [ oeth, vault, @@ -29,7 +29,7 @@ run([ postProcessors: [exchangeRates], }, { - stateSchema: 'ousd-processing', + name: 'ousd', processors: [ousd], }, ]) diff --git a/src/post-processors/exchange-rates/exchange-rates.ts b/src/post-processors/exchange-rates/exchange-rates.ts index e328d0f5..4a3c46f5 100644 --- a/src/post-processors/exchange-rates/exchange-rates.ts +++ b/src/post-processors/exchange-rates/exchange-rates.ts @@ -4,11 +4,11 @@ import { useProcessorState } from '../../utils/state' import { Currency } from './currencies' import { getPrice } from './price-routing' -const useExchangeRates = () => - useProcessorState('exchange-rates', new Map()) +const useExchangeRates = (ctx: Context) => + useProcessorState(ctx, 'exchange-rates', new Map()) export const process = async (ctx: Context) => { - const [rates] = useExchangeRates() + const [rates] = useExchangeRates(ctx) if (rates.size > 0) { ctx.log.info({ count: rates.size }, 'exchange-rates') await ctx.store.insert([...rates.values()]) @@ -21,7 +21,7 @@ export const ensureExchangeRate = async ( base: Currency, quote: Currency, ) => { - const [exchangeRates] = useExchangeRates() + const [exchangeRates] = useExchangeRates(ctx) const pair = `${base}_${quote}` const blockNumber = block.header.height const id = `${blockNumber}:${pair}` diff --git a/src/processor-templates/curve/curve.ts b/src/processor-templates/curve/curve.ts index 55e8df45..02a352df 100644 --- a/src/processor-templates/curve/curve.ts +++ b/src/processor-templates/curve/curve.ts @@ -1,141 +1,58 @@ import { EvmBatchProcessor } from '@subsquid/evm-processor' -import dayjs from 'dayjs' -import { LessThanOrEqual } from 'typeorm' import * as curveLpToken from '../../abi/curve-lp-token' import { CurvePoolBalance } from '../../model' -import { Context, Log } from '../../processor' +import { Context } from '../../processor' interface ProcessResult { curvePoolBalances: CurvePoolBalance[] } -const historicUpdateFrequency = 24 * 60 * 60000 // Frequency of updates for historical data. - -const logsToListenFor = new Set([ - curveLpToken.events.AddLiquidity.topic, - curveLpToken.events.RemoveLiquidity.topic, - curveLpToken.events.RemoveLiquidityImbalance.topic, - curveLpToken.events.RemoveLiquidityOne.topic, - curveLpToken.events.TokenExchange.topic, - curveLpToken.events.Transfer.topic, -]) +const ESTIMATED_BPS = 12.06 // Circa 2023 +const SECONDS_PER_DAY = 86400 +const BLOCKS_PER_DAY = SECONDS_PER_DAY / ESTIMATED_BPS +const UPDATE_FREQUENCY = Math.floor(BLOCKS_PER_DAY) export const createCurveSetup = ( - poolAddress: string, from: number, processor: EvmBatchProcessor, ) => { - processor.addLog({ - address: [poolAddress], - topic0: [...logsToListenFor.values()], - range: { from }, - transaction: false, - }) + processor.includeAllBlocks({ from }) } export const createCurveProcessor = - (poolAddress: string, count: number) => async (ctx: Context) => { - let last: CurvePoolBalance | undefined = undefined + (poolAddress: string, count: number, from: number) => + async (ctx: Context) => { const result: ProcessResult = { curvePoolBalances: [], } - - for (const block of ctx.blocks) { + let last = 0 + const nextBlockIndex = ctx.blocks.findIndex( + (b) => b.header.height >= last + UPDATE_FREQUENCY, + ) + for (let i = nextBlockIndex; i < ctx.blocks.length; i += UPDATE_FREQUENCY) { + const block = ctx.blocks[i] + if (!block || block.header.height < from) continue const timestamp = new Date(block.header.timestamp) const timestampId = timestamp.toISOString() - if (!last) { - last = await ctx.store.findOne(CurvePoolBalance, { - where: { id: LessThanOrEqual(timestampId) }, - order: { - id: 'desc', - }, - }) - } - if ( - last && - !ctx.isHead && - timestamp < dayjs(last.timestamp).add(1, 'day').toDate() - ) { - continue - } - const match = block.logs.find( - (log: Log) => - log.address === poolAddress && logsToListenFor.has(log.topics[0]), - ) - if (match) { - const contract = new curveLpToken.Contract( - ctx, - block.header, - poolAddress, - ) + const contract = new curveLpToken.Contract(ctx, block.header, poolAddress) - const balances = await Promise.all( - new Array(count) - .fill(0) - .map((_, index) => contract.balances(BigInt(index))), - ) - const curve = new CurvePoolBalance({ - id: timestampId, - blockNumber: block.header.height, - timestamp: new Date(block.header.timestamp), - address: poolAddress, - balance0: balances[0] ?? 0n, - balance1: balances[1] ?? 0n, - balance2: balances[2] ?? 0n, - }) - result.curvePoolBalances.push(curve) - last = curve - // for (const log of block.logs) { - // if (filterFn(log)) { - // if (log.topics[0] === curveLpToken.events.AddLiquidity.topic) { - // const data = curveLpToken.events.AddLiquidity.decode(log) - // curve.balance0 += data.token_amounts[0] ?? 0n - // curve.balance1 += data.token_amounts[1] ?? 0n - // curve.balance2 += data.token_amounts[2] ?? 0n - // } else if ( - // log.topics[0] === curveLpToken.events.RemoveLiquidity.topic - // ) { - // const data = curveLpToken.events.RemoveLiquidity.decode(log) - // curve.balance0 -= data.token_amounts[0] ?? 0n - // curve.balance1 -= data.token_amounts[1] ?? 0n - // curve.balance2 -= data.token_amounts[2] ?? 0n - // } else if ( - // log.topics[0] === - // curveLpToken.events.RemoveLiquidityImbalance.topic - // ) { - // const data = - // curveLpToken.events.RemoveLiquidityImbalance.decode(log) - // curve.balance0 -= data.token_amounts[0] ?? 0n - // curve.balance1 -= data.token_amounts[1] ?? 0n - // curve.balance2 -= data.token_amounts[2] ?? 0n - // } else if ( - // log.topics[0] === curveLpToken.events.TokenExchange.topic - // ) { - // const data = curveLpToken.events.TokenExchange.decode(log) - // if (data.bought_id > 2n || data.sold_id > 2n) { - // ctx.log.error(data, 'Unexpected id greater than 1') - // throw new Error('Unexpected id greater than 1') - // } - // if (data.bought_id === 0n) { - // curve.balance0 -= data.tokens_bought - // } else if (data.bought_id === 1n) { - // curve.balance1 -= data.tokens_bought - // } else if (data.bought_id === 2n) { - // curve.balance2 -= data.tokens_bought - // } - // if (data.sold_id === 0n) { - // curve.balance0 += data.tokens_sold - // } else if (data.sold_id === 1n) { - // curve.balance1 += data.tokens_sold - // } else if (data.sold_id === 2n) { - // curve.balance2 += data.tokens_sold - // } - // } - // // TODO: log.topics[0] === curveLpToken.events.RemoveLiquidityOne.topic - // } - // } - } + const balances = await Promise.all( + new Array(count) + .fill(0) + .map((_, index) => contract.balances(BigInt(index))), + ) + const curve = new CurvePoolBalance({ + id: `${poolAddress}-${timestampId}`, + blockNumber: block.header.height, + timestamp: new Date(block.header.timestamp), + address: poolAddress, + balance0: balances[0] ?? 0n, + balance1: balances[1] ?? 0n, + balance2: balances[2] ?? 0n, + }) + result.curvePoolBalances.push(curve) + last = block.header.height } await ctx.store.insert(result.curvePoolBalances) } diff --git a/src/processor-templates/otoken/otoken.ts b/src/processor-templates/otoken/otoken.ts index 63a664fd..bb29da63 100644 --- a/src/processor-templates/otoken/otoken.ts +++ b/src/processor-templates/otoken/otoken.ts @@ -1,3 +1,4 @@ +import { EvmBatchProcessor } from '@subsquid/evm-processor' import { v4 as uuidv4 } from 'uuid' import * as otoken from '../../abi/otoken' @@ -44,6 +45,42 @@ type OTokenRebaseOption = | EntityClass | EntityClass +export const createOTokenSetup = + ({ + address, + vaultAddress, + from, + }: { + address: string + vaultAddress: string + from: number + }) => + (processor: EvmBatchProcessor) => { + processor.addTrace({ + type: ['call'], + callSighash: [ + otoken.functions.rebaseOptOut.sighash, + otoken.functions.rebaseOptIn.sighash, + ], + transaction: true, + range: { from }, + }) + processor.addLog({ + address: [address], + topic0: [ + otoken.events.Transfer.topic, + otoken.events.TotalSupplyUpdatedHighres.topic, + ], + transaction: true, + range: { from }, + }) + processor.addLog({ + address: [vaultAddress], + topic0: [otokenVault.events.YieldDistribution.topic], + range: { from }, + }) + } + export const createOTokenProcessor = (params: { Upgrade_CreditsBalanceOfHighRes?: number OTOKEN_ADDRESS: string diff --git a/src/processor.ts b/src/processor.ts index d7e0212e..c26766b3 100644 --- a/src/processor.ts +++ b/src/processor.ts @@ -4,10 +4,8 @@ import { EvmBatchProcessor, EvmBatchProcessorFields, } from '@subsquid/evm-processor' +import { createLogger } from '@subsquid/logger' import { Store, TypeormDatabase } from '@subsquid/typeorm-store' -import { create } from 'domain' - -import { resetProcessorState } from './utils/state' export const createSquidProcessor = () => new EvmBatchProcessor() @@ -70,13 +68,19 @@ let initialized = false export const run = ( params: { - stateSchema?: string + name: string processors: Processor[] postProcessors?: Processor[] }[], ) => { - for (const { stateSchema, processors, postProcessors = [] } of params) { + for (const { name, processors, postProcessors = [] } of params) { const processor = createSquidProcessor() + if (name) { + // Hack our logging so it's unique per processor. + const hackableProcessor = processor as any + hackableProcessor.getLogger = () => createLogger(`sqd:processor-${name}`) + } + processor.setBlockRange({ from: Math.min( ...(processors.map((p) => p.from).filter((x) => x) as number[]), @@ -84,10 +88,15 @@ export const run = ( }) processors.forEach((p) => p.setup?.(processor)) processor.run( - new TypeormDatabase({ stateSchema, supportHotBlocks: true }), - async (ctx) => { + new TypeormDatabase({ + stateSchema: `${name}-processor`, + supportHotBlocks: true, + }), + async (_ctx) => { + const ctx = _ctx as Context try { - resetProcessorState() + ctx.name = name + ctx.state = new Map() let start: number const time = (name: string) => () => { const message = `${name} ${Date.now() - start}ms` @@ -96,7 +105,8 @@ export const run = ( // Initialization Run if (!initialized) { - ctx.log.info(`=== initializing`) + initialized = true + ctx.log.info(`initializing`) start = Date.now() const times = await Promise.all( processors @@ -111,14 +121,15 @@ export const run = ( } // Main Processing Run - ctx.log.info(`=== processing from ${ctx.blocks[0].header.height}`) start = Date.now() const times = await Promise.all( processors.map((p, index) => p.process(ctx).then(time(p.name ?? `processor-${index}`)), ), ) - times.forEach((t) => t()) + if (process.env.DEBUG_PERF === 'true') { + times.forEach((t) => t()) + } if (postProcessors) { // Post Processing Run @@ -128,7 +139,9 @@ export const run = ( p.process(ctx).then(time(p.name ?? `postProcessor-${index}`)), ), ) - postTimes.forEach((t) => t()) + if (process.env.DEBUG_PERF === 'true') { + postTimes.forEach((t) => t()) + } } } catch (err) { ctx.log.info({ @@ -157,7 +170,10 @@ export const run = ( export type Fields = EvmBatchProcessorFields< ReturnType > -export type Context = DataHandlerContext +export type Context = DataHandlerContext & { + name: string + state: Map +} export type Block = Context['blocks']['0'] export type Log = Context['blocks']['0']['logs']['0'] export type Transaction = Context['blocks']['0']['transactions']['0'] diff --git a/src/processors/curve/curve.ts b/src/processors/curve/curve.ts index 8e31e4b4..ca08e7e9 100644 --- a/src/processors/curve/curve.ts +++ b/src/processors/curve/curve.ts @@ -65,16 +65,16 @@ const pools = [ }, ] as const -const processors = pools.map((pool) => - createCurveProcessor(pool.address.toLowerCase(), pool.count), -) - export const setup = (processor: EvmBatchProcessor) => { for (const pool of pools) { - createCurveSetup(pool.address.toLowerCase(), pool.from, processor) + createCurveSetup(pool.from, processor) } } +const processors = pools.map((pool) => + createCurveProcessor(pool.address.toLowerCase(), pool.count, pool.from), +) + export const process = async (ctx: Context) => { await Promise.all(processors.map((p) => p(ctx))) } diff --git a/src/processors/oeth/oeth.ts b/src/processors/oeth/oeth.ts index a99ae13a..b8fbf21a 100644 --- a/src/processors/oeth/oeth.ts +++ b/src/processors/oeth/oeth.ts @@ -1,9 +1,4 @@ -import { EvmBatchProcessor } from '@subsquid/evm-processor' - -import * as otoken from '../../abi/otoken' -import * as otokenVault from '../../abi/otoken-vault' import { - HistoryType, OETH, OETHAPY, OETHAddress, @@ -11,36 +6,19 @@ import { OETHRebase, OETHRebaseOption, } from '../../model' -import { createOTokenProcessor } from '../../processor-templates/otoken' +import { + createOTokenProcessor, + createOTokenSetup, +} from '../../processor-templates/otoken' import { OETH_ADDRESS, OETH_VAULT_ADDRESS } from '../../utils/addresses' export const from = 16933090 // https://etherscan.io/tx/0x3b4ece4f5fef04bf7ceaec4f6c6edf700540d7597589f8da0e3a8c94264a3b50 -export const setup = (processor: EvmBatchProcessor) => { - processor.addTrace({ - type: ['call'], - callSighash: [ - otoken.functions.rebaseOptOut.sighash, - otoken.functions.rebaseOptIn.sighash, - ], - transaction: true, - range: { from }, - }) - processor.addLog({ - address: [OETH_ADDRESS], - topic0: [ - otoken.events.Transfer.topic, - otoken.events.TotalSupplyUpdatedHighres.topic, - ], - transaction: true, - range: { from }, - }) - processor.addLog({ - address: [OETH_VAULT_ADDRESS], - topic0: [otokenVault.events.YieldDistribution.topic], - range: { from }, - }) -} +export const setup = createOTokenSetup({ + address: OETH_ADDRESS, + vaultAddress: OETH_VAULT_ADDRESS, + from, +}) export const process = createOTokenProcessor({ OTOKEN_ADDRESS: OETH_ADDRESS, diff --git a/src/processors/ousd/ousd.ts b/src/processors/ousd/ousd.ts index 4e49c40d..848907d5 100644 --- a/src/processors/ousd/ousd.ts +++ b/src/processors/ousd/ousd.ts @@ -1,7 +1,3 @@ -import { EvmBatchProcessor } from '@subsquid/evm-processor' - -import * as otoken from '../../abi/otoken' -import * as otokenVault from '../../abi/otoken-vault' import { OUSD, OUSDAPY, @@ -10,40 +6,21 @@ import { OUSDRebase, OUSDRebaseOption, } from '../../model' -import { createOTokenProcessor } from '../../processor-templates/otoken' +import { + createOTokenProcessor, + createOTokenSetup, +} from '../../processor-templates/otoken' import { OUSD_ADDRESS, OUSD_VAULT_ADDRESS } from '../../utils/addresses' // export const from = 10884563 // https://etherscan.io/tx/0x9141921f5ebf072e58c00fe56332b6bee0c02f0ae4f54c42999b8a3a88662681 // export const from = 11585978 // OUSDReset - Has issues with archive queries. :( export const from = 13533937 // https://etherscan.io/tx/0xc9b6fc6a4fad18dad197ff7d0636f74bf066671d75656849a1c45122e00d54cf -export const setup = (processor: EvmBatchProcessor) => { - processor.addTrace({ - type: ['call'], - callSighash: [ - otoken.functions.rebaseOptOut.sighash, - otoken.functions.rebaseOptIn.sighash, - ], - transaction: true, - range: { from }, - }) - processor.addLog({ - address: [OUSD_ADDRESS], - topic0: [ - otoken.events.Transfer.topic, - otoken.events.TotalSupplyUpdatedHighres.topic, - ], - transaction: true, - range: { from }, - }) - processor.addLog({ - address: [OUSD_VAULT_ADDRESS], - topic0: [otokenVault.events.YieldDistribution.topic], - range: { from }, - }) -} - -// TODO: Handle the version upgrade gracefully so we have accurate numbers. +export const setup = createOTokenSetup({ + address: OUSD_ADDRESS, + vaultAddress: OUSD_VAULT_ADDRESS, + from, +}) export const process = createOTokenProcessor({ Upgrade_CreditsBalanceOfHighRes: 13533937, // https://etherscan.io/tx/0xc9b6fc6a4fad18dad197ff7d0636f74bf066671d75656849a1c45122e00d54cf diff --git a/src/utils/state.ts b/src/utils/state.ts index b9d54444..71d2d5f0 100644 --- a/src/utils/state.ts +++ b/src/utils/state.ts @@ -1,8 +1,11 @@ -let state = new Map() -export const resetProcessorState = () => { - state = new Map() -} -export const useProcessorState = (key: string, defaultValue: T) => { +import { Context } from '../processor' + +export const useProcessorState = ( + ctx: Context, + key: string, + defaultValue: T, +) => { + const { state } = ctx let value = state.get(key) as T | undefined if (!value) { value = defaultValue From e28532828229ae921412f9f4e794f9b6fc7c9997 Mon Sep 17 00:00:00 2001 From: Chris Jacobs Date: Tue, 17 Oct 2023 13:27:35 -0700 Subject: [PATCH 09/13] feat: data requirements thoughts (start v8) - fix otoken template issue --- src/processor-templates/otoken/otoken.ts | 2 ++ src/processor-templates/otoken/utils.ts | 38 ++++++++++++++---------- 2 files changed, 24 insertions(+), 16 deletions(-) diff --git a/src/processor-templates/otoken/otoken.ts b/src/processor-templates/otoken/otoken.ts index bb29da63..7e51aae7 100644 --- a/src/processor-templates/otoken/otoken.ts +++ b/src/processor-templates/otoken/otoken.ts @@ -302,6 +302,8 @@ export const createOTokenProcessor = (params: { // Rebase events let rebase = createRebaseAPY( + params.OTokenAPY, + params.OTokenRebase, ctx, result.apies, block, diff --git a/src/processor-templates/otoken/utils.ts b/src/processor-templates/otoken/utils.ts index d7820123..eb05461e 100644 --- a/src/processor-templates/otoken/utils.ts +++ b/src/processor-templates/otoken/utils.ts @@ -6,13 +6,13 @@ import { OETHAPY, OETHAddress, OETHRebase, + OUSDAPY, OUSDAddress, + OUSDRebase, RebasingOption, } from '../../model' import { Context } from '../../processor' -export type Newable = { new (partial: Partial): T } - /** * Create a new Address entity */ @@ -40,9 +40,14 @@ export async function createAddress< /** * Create Rebase entity and set APY */ -export async function createRebaseAPY( +export async function createRebaseAPY< + TOTokenAPY extends typeof OETHAPY | typeof OUSDAPY, + TOTokenRebase extends typeof OETHRebase | typeof OUSDRebase, +>( + OTokenAPY: TOTokenAPY, + OTokenRebase: TOTokenRebase, ctx: Context, - apies: OETHAPY[], + apies: InstanceType[], block: Context['blocks']['0'], log: Context['blocks']['0']['logs']['0'], rebaseEvent: ReturnType< @@ -52,8 +57,8 @@ export async function createRebaseAPY( fee: bigint yield: bigint }, -): Promise { - const rebase = new OETHRebase({ +) { + const rebase = new OTokenRebase({ id: log.id, blockNumber: block.header.height, timestamp: new Date(block.header.timestamp), @@ -72,15 +77,19 @@ export async function createRebaseAPY( // get last APY to compare with current one let lastApy = apies.find((apy) => apy.id < dateId) ?? - (await ctx.store.findOne(OETHAPY, { + (await ctx.store.findOne(OTokenAPY, { where: { id: LessThan(dateId) }, order: { id: 'DESC' }, })) // check if there is already an APY for the current date - let apy = apies.find((apy) => apy.id === dateId) + let apy: InstanceType | undefined = apies.find( + (apy) => apy.id === dateId, + ) if (!apy) { - apy = await ctx.store.findOne(OETHAPY, { where: { id: dateId } }) + apy = (await ctx.store.findOne(OTokenAPY, { + where: { id: dateId }, + })) as InstanceType if (apy) { apies.push(apy) } @@ -88,13 +97,13 @@ export async function createRebaseAPY( // ctx.log.info(`APY: ${dateId} ${apy}, ${lastDateId} ${lastApy}`); // create a new APY if it doesn't exist if (!apy) { - apy = new OETHAPY({ + apy = new OTokenAPY({ id: dateId, blockNumber: block.header.height, timestamp: new Date(block.header.timestamp), txHash: log.transactionHash, rebasingCreditsPerToken: rebaseEvent.rebasingCreditsPerToken, - }) + }) as InstanceType apies.push(apy) } @@ -145,14 +154,11 @@ export async function createRebaseAPY( // calculate average APY for the last 7, 14 and 30 days await Promise.all( [last7daysDateId, last14daysDateId, last30daysDateId].map(async (i) => { - const pastAPYs = await ctx.store.findBy(OETHAPY, { + const pastAPYs = await ctx.store.findBy(OTokenAPY, { id: MoreThanOrEqual(i.value), }) apy![i.key] = - pastAPYs.reduce( - (acc: number, cur: OETHAPY) => acc + cur.apy, - apy!.apy, - ) / + pastAPYs.reduce((acc, cur) => acc + cur.apy, apy!.apy) / (pastAPYs.length + 1) }), ) From fcd0602a6e6c38ccabd2712d49a9e3d1c0158921 Mon Sep 17 00:00:00 2001 From: Chris Jacobs Date: Tue, 17 Oct 2023 13:40:07 -0700 Subject: [PATCH 10/13] feat: data requirements thoughts (start v8) - docs: changelog / readme --- CHANGELOG.md | 12 ++++++++++++ README.md | 5 ++++- 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b9053439..8fa43f7c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,17 @@ # Changelog +## v8 + +- Ability to handle multiple squid processors: + - [main.ts](src%2Fmain.ts) + - [processor.ts](src%2Fprocessor.ts) +- `curve` processor & template + - processing by day historically and then realtime thereafter +- schema spread out into multiple `graphql` files and built via `yarn codegen` +- created otoken processor to handle OUSD and OETH contract processing +- `ousd` processing added, however the data has not yet been validated + - We're unable to process as far back as we want to due to an archive server bug. (reported to them) + ## v7 - APY numbers have been changed to return proper percentages. (v6 value / 100) **BREAKING** diff --git a/README.md b/README.md index f79a9640..24c44007 100644 --- a/README.md +++ b/README.md @@ -42,9 +42,12 @@ Full description of `schema.graphql` dialect is available [here](https://docs.su Mapping developers use TypeORM [EntityManager](https://typeorm.io/#/working-with-entity-manager) to interact with target database during data processing. All necessary entity classes are -generated by the squid framework from `schema.graphql`. This is done by running `sqd codegen` +generated by the squid framework from `schema.graphql`. This is done by running `yarn codegen` command. +NOTE: We don't directly use the `sqd codegen` command because we generate our schema.graphql file first in +the `yarn codegen` script. + ### 3. Generate database migrations All database changes are applied through migration files located at `db/migrations`. From f2f67bc4579c89ddffea9d541685ec797e0c554f Mon Sep 17 00:00:00 2001 From: Nick Poulden Date: Tue, 17 Oct 2023 14:47:55 -0600 Subject: [PATCH 11/13] Daily stats proposal --- docs/daily-stats.md | 136 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 136 insertions(+) create mode 100644 docs/daily-stats.md diff --git a/docs/daily-stats.md b/docs/daily-stats.md new file mode 100644 index 00000000..b256b804 --- /dev/null +++ b/docs/daily-stats.md @@ -0,0 +1,136 @@ +# Daily Stats type + +This type should be able to handle most of the charts and stats for oeth.com. +Stats for today would be kept up to date in real time. + +## Definition + +```graphql +dailyStats { + id: ID! """Timestamp, eg 2023-10-17""" + blockNumber: Int! """Last block number stats were updated""" + timestamp: DateTime! """Timestamp of block number stats were updated""" + + apr: Float! + apy: Float! + apy7DayAvg: Float! + apy14DayAvg: Float! + apy30DayAvg: Float! + + totalSupply: BigInt! + totalSupplyUSD: Float! + rebasingSupply: BigInt! + nonRebasingSupply: BigInt! + amoSupply: BigInt! + + yield: BigInt! + fees: BigInt! + revenue: BigInt! + revenue7DayAvg: BigInt! + revenue7DayTotal: BigInt! + revenueAllTime: BigInt! + + pegPrice: BigInt! """Price of OETH in ETH""" + + strategies { + id: String! + name: String! + address: String! """Contract address of the strategy""" + total: BigInt! """Sum of tokens in strategy""" + tvl: BigInt! """Total ETH value""" + holdings { + symbol: String! """Token symbol""" + amount: BigInt! """Amount held""" + value: BigInt! """Total ETH value""" + } + } + + collateral { + symbol: String! """Token symbol""" + amount: BigInt! """Amount held""" + price: BigInt! """Price in ETH""" + value: BigInt! """Total ETH value""" + } +} +``` + +## Example response + +```json +{ + dailyStats: [ + { + id: "2023-10-17", + blockNumber: 18361379, + timestamp: "2023-10-16T07:15:11.000Z", + + apr: 0.14973706558715433, + apy: 0.16144112845098268, + apy7DayAvg: 0.12818496009582647 + apy14DayAvg: 0.12818496009582647, + apy30DayAvg: 0.12818496009582647, + + + totalSupply: "40087773441569861381365", + totalSupplyUSD: 63231.0153, + rebasingSupply: "25361442351482631876504", + nonRebasingSupply: "14726331090087229504861", + amoSupply: "10070572720887441843875", + + yield: "3247579150222815340", + fees: "811894787555703834", + revenue: "811894787555703834", + revenue7DayAvg: "811894787555703834", + revenue7DayTotal: "811894787555703834", + revenueAllTime: "811894787555703834", + + pegPrice: "1001212340964123876", + + strategies [ + { + id: "frax_eth_strat", + name: "FraxETH", + address: "0x3ff8654d633d4ea0fae24c52aec73b4a20d0d0e5", + total: "14874775157977805", + tvl: "14874775157977805", + holdings: [ + { + symbol: "FRXETH", + amount: "14874775157977805", + value: "14874775157977805", + } + ] + } + ], + + collateral [ + { + name: "ETH", + total: "90660812201004131466148", + price: "1000000000000000000", + value: "90660812201004131466148" + }, + { + name: "WETH", + total: "618599415045049672320", + price: "1000000000000000000", + value: "618599415045049672320" + }, + { + name: "FRXETH", + total: "14874776250683471078445", + price: "1000000000000000000", + value: "14874776250683471078445" + }, + { + name: "RETH", + total: "3872976488796949771439", + price: "1087282384391932999", + value: "4211019111433044129729" + }, + ] + } + ] +} + +``` \ No newline at end of file From 17a01e9b5104958b5d50e4125bbcb16e40ed23b7 Mon Sep 17 00:00:00 2001 From: Nick Poulden Date: Tue, 17 Oct 2023 14:51:55 -0600 Subject: [PATCH 12/13] Format --- docs/daily-stats.md | 109 ++++++++++++++++++++++---------------------- 1 file changed, 54 insertions(+), 55 deletions(-) diff --git a/docs/daily-stats.md b/docs/daily-stats.md index b256b804..4e384bec 100644 --- a/docs/daily-stats.md +++ b/docs/daily-stats.md @@ -58,76 +58,75 @@ dailyStats { ```json { - dailyStats: [ + "dailyStats": [ { - id: "2023-10-17", - blockNumber: 18361379, - timestamp: "2023-10-16T07:15:11.000Z", - - apr: 0.14973706558715433, - apy: 0.16144112845098268, - apy7DayAvg: 0.12818496009582647 - apy14DayAvg: 0.12818496009582647, - apy30DayAvg: 0.12818496009582647, - - - totalSupply: "40087773441569861381365", - totalSupplyUSD: 63231.0153, - rebasingSupply: "25361442351482631876504", - nonRebasingSupply: "14726331090087229504861", - amoSupply: "10070572720887441843875", - - yield: "3247579150222815340", - fees: "811894787555703834", - revenue: "811894787555703834", - revenue7DayAvg: "811894787555703834", - revenue7DayTotal: "811894787555703834", - revenueAllTime: "811894787555703834", - - pegPrice: "1001212340964123876", - - strategies [ + "id": "2023-10-17", + "blockNumber": 18361379, + "timestamp": "2023-10-16T07:15:11.000Z", + + "apr": 0.14973706558715433, + "apy": 0.16144112845098268, + "apy7DayAvg": 0.12818496009582647, + "apy14DayAvg": 0.12818496009582647, + "apy30DayAvg": 0.12818496009582647, + + "totalSupply": "40087773441569861381365", + "totalSupplyUSD": 63231.0153, + "rebasingSupply": "25361442351482631876504", + "nonRebasingSupply": "14726331090087229504861", + "amoSupply": "10070572720887441843875", + + "yield": "3247579150222815340", + "fees": "811894787555703834", + "revenue": "811894787555703834", + "revenue7DayAvg": "811894787555703834", + "revenue7DayTotal": "811894787555703834", + "revenueAllTime": "811894787555703834", + + "pegPrice": "1001212340964123876", + + "strategies": [ { - id: "frax_eth_strat", - name: "FraxETH", - address: "0x3ff8654d633d4ea0fae24c52aec73b4a20d0d0e5", - total: "14874775157977805", - tvl: "14874775157977805", - holdings: [ + "id": "frax_eth_strat", + "name": "FraxETH", + "address": "0x3ff8654d633d4ea0fae24c52aec73b4a20d0d0e5", + "total": "14874775157977805", + "tvl": "14874775157977805", + "holdings": [ { - symbol: "FRXETH", - amount: "14874775157977805", - value: "14874775157977805", + "symbol": "FRXETH", + "amount": "14874775157977805", + "value": "14874775157977805" } ] } ], - collateral [ + "collateral": [ { - name: "ETH", - total: "90660812201004131466148", - price: "1000000000000000000", - value: "90660812201004131466148" + "name": "ETH", + "total": "90660812201004131466148", + "price": "1000000000000000000", + "value": "90660812201004131466148" }, { - name: "WETH", - total: "618599415045049672320", - price: "1000000000000000000", - value: "618599415045049672320" + "name": "WETH", + "total": "618599415045049672320", + "price": "1000000000000000000", + "value": "618599415045049672320" }, { - name: "FRXETH", - total: "14874776250683471078445", - price: "1000000000000000000", - value: "14874776250683471078445" + "name": "FRXETH", + "total": "14874776250683471078445", + "price": "1000000000000000000", + "value": "14874776250683471078445" }, { - name: "RETH", - total: "3872976488796949771439", - price: "1087282384391932999", - value: "4211019111433044129729" - }, + "name": "RETH", + "total": "3872976488796949771439", + "price": "1087282384391932999", + "value": "4211019111433044129729" + } ] } ] From fb6fb77e20cdd0406bebf58547630dd116bc4a27 Mon Sep 17 00:00:00 2001 From: Chris Jacobs Date: Tue, 17 Oct 2023 15:19:18 -0700 Subject: [PATCH 13/13] feat: data requirements thoughts (start v8) - docs: changelog / readme --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8fa43f7c..dc02cc11 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,7 @@ ## v8 +- Data requirements analysis - Ability to handle multiple squid processors: - [main.ts](src%2Fmain.ts) - [processor.ts](src%2Fprocessor.ts)