diff --git a/lib/agent0/agent0/hyperdrive/crash_report.py b/lib/agent0/agent0/hyperdrive/crash_report.py new file mode 100644 index 0000000000..586435c90b --- /dev/null +++ b/lib/agent0/agent0/hyperdrive/crash_report.py @@ -0,0 +1,28 @@ +"""Utility function for logging bot crash reports.""" +from __future__ import annotations + +import logging + +from elfpy.utils import logs + + +def setup_hyperdrive_crash_report_logging(log_format_string: str | None = None) -> None: + """Create a new logging file handler with CRITICAL log level for hyperdrive crash reporting. + + In the future, a custom log level could be used specific to crash reporting. + + Arguments + --------- + log_format_string : str, optional + Logging format described in string format. + """ + logs.add_file_handler( + logger=None, # use the default root logger + log_filename="hyperdrive_crash_report.log", + log_format_string=log_format_string, + delete_previous_logs=False, + log_level=logging.CRITICAL, + ) + + +# TODO implement hyperdrive crash report diff --git a/lib/agent0/agent0/hyperdrive/exec/setup_experiment.py b/lib/agent0/agent0/hyperdrive/exec/setup_experiment.py index 204bac6fb3..ae94918e7e 100644 --- a/lib/agent0/agent0/hyperdrive/exec/setup_experiment.py +++ b/lib/agent0/agent0/hyperdrive/exec/setup_experiment.py @@ -9,8 +9,8 @@ from agent0.base.config import EnvironmentConfig from agent0.hyperdrive.agents import HyperdriveAgent from agent0.hyperdrive.config import get_eth_bots_config +from agent0.hyperdrive.crash_report import setup_hyperdrive_crash_report_logging from agent0.hyperdrive.exec import get_agent_accounts -from chainsync.hyperdrive import setup_hyperdrive_crash_report_logging from elfpy.utils import logs from ethpy.base import initialize_web3_with_http_provider, load_all_abis from ethpy.hyperdrive import fetch_hyperdrive_address_from_url diff --git a/lib/chainsync/bin/acquire_data.py b/lib/chainsync/bin/acquire_data.py index f3248e25f7..c44801bfee 100644 --- a/lib/chainsync/bin/acquire_data.py +++ b/lib/chainsync/bin/acquire_data.py @@ -6,18 +6,18 @@ import time from dataclasses import dataclass -from chainsync.base import add_transactions, initialize_session -from chainsync.hyperdrive import ( +from chainsync.db.base import initialize_session +from chainsync.db.hyperdrive import ( add_checkpoint_infos, add_pool_config, add_pool_infos, + add_transactions, add_wallet_deltas, add_wallet_infos, convert_checkpoint_info, convert_hyperdrive_transactions_for_block, convert_pool_config, convert_pool_info, - get_hyperdrive_contract, get_latest_block_number_from_pool_info_table, get_wallet_info, ) @@ -32,6 +32,7 @@ get_hyperdrive_config, get_hyperdrive_pool_info, ) +from ethpy.hyperdrive.interface import get_hyperdrive_contract from web3 import Web3 from web3.contract.contract import Contract diff --git a/lib/chainsync/bin/register_username_server.py b/lib/chainsync/bin/register_username_server.py index d1842a0b65..786c60d5ad 100644 --- a/lib/chainsync/bin/register_username_server.py +++ b/lib/chainsync/bin/register_username_server.py @@ -1,5 +1,7 @@ """A simple Flask server to run python scripts.""" -from chainsync.base import postgres +import logging + +from chainsync.db.base import interface from dotenv import load_dotenv from flask import Flask, jsonify, request from flask_expects_json import expects_json @@ -27,17 +29,16 @@ def register_bots(): return jsonify({"data": data, "error": "request.json is None"}), 500 # initialize the postgres session - session = postgres.initialize_session() + session = interface.initialize_session() try: - postgres.add_user_map(username, wallet_addrs, session) - # TODO move this to logging - print(f"Registered {wallet_addrs=} to {username=}") + interface.add_user_map(username, wallet_addrs, session) + logging.debug("Registered wallet_addrs=%s to username=%s}", wallet_addrs, username) out = (jsonify({"data": data, "error": ""}), 200) except Exception as exc: # pylint: disable=broad-exception-caught # Ignoring broad exception, since we're simply printing out error and returning to client out = (jsonify({"data": data, "error": str(exc)}), 500) - postgres.close_session(session) + interface.close_session(session) return out diff --git a/lib/chainsync/bin/run_hyperdrive_dashboard.py b/lib/chainsync/bin/run_hyperdrive_dashboard.py index 767ab820f8..825f397275 100644 --- a/lib/chainsync/bin/run_hyperdrive_dashboard.py +++ b/lib/chainsync/bin/run_hyperdrive_dashboard.py @@ -1,19 +1,25 @@ -"""Run the streamlab demo.""" +"""Run the dashboard.""" from __future__ import annotations import os import time import mplfinance as mpf -import pandas as pd import streamlit as st from agent0.hyperdrive.config import get_eth_bots_config from chainsync.analysis.calc_fixed_rate import calc_fixed_rate from chainsync.analysis.calc_ohlcv import calc_ohlcv from chainsync.analysis.calc_pnl import calc_closeout_pnl, calc_total_returns -from chainsync.base import get_transactions, get_user_map, initialize_session -from chainsync.dashboard import get_combined_data, plot_fixed_rate, plot_ohlcv -from chainsync.hyperdrive import get_agents, get_pool_config, get_pool_info, get_wallet_deltas +from chainsync.dashboard import ( + build_leaderboard, + build_ticker, + get_combined_data, + get_user_lookup, + plot_fixed_rate, + plot_ohlcv, +) +from chainsync.db.base import get_user_map, initialize_session +from chainsync.db.hyperdrive import get_all_traders, get_pool_config, get_pool_info, get_transactions, get_wallet_deltas from dotenv import load_dotenv # pylint: disable=invalid-name @@ -21,195 +27,6 @@ st.set_page_config(page_title="Trading Competition Dashboard", layout="wide") st.set_option("deprecation.showPyplotGlobalUse", False) - -# Helper functions -# TODO should likely move these functions to another file -def get_ticker( - wallet_delta: pd.DataFrame, transactions: pd.DataFrame, pool_info: pd.DataFrame, lookup: pd.DataFrame -) -> pd.DataFrame: - """Show recent trades. - - Arguments - --------- - data: pd.DataFrame - The dataframe resulting from get_transactions - - Returns - ------- - pd.DataFrame - The filtered transaction data based on what we want to view in the ticker - """ - # TODO these merges should really happen via an sql query instead of in pandas here - # Set ticker so that each transaction is a single row - ticker_data = wallet_delta.groupby(["transactionHash"]).agg( - {"blockNumber": "first", "walletAddress": "first", "baseTokenType": tuple, "delta": tuple} - ) - - # Expand column of lists into seperate dataframes, then str cat them together - token_type = pd.DataFrame(ticker_data["baseTokenType"].to_list(), index=ticker_data.index) - token_deltas = pd.DataFrame(ticker_data["delta"].to_list(), index=ticker_data.index) - token_diffs = token_type + ": " + token_deltas.astype("str") - # Aggregate columns into a single list, removing nans - token_diffs = token_diffs.stack().groupby(level=0).agg(list) - - # Gather other information from other tables - usernames = address_to_username(lookup, ticker_data["walletAddress"]) - timestamps = pool_info.loc[ticker_data["blockNumber"], "timestamp"] - trade_type = transactions.set_index("transactionHash").loc[ticker_data.index, "input_method"] - - ticker_data = ticker_data[["blockNumber", "walletAddress"]].copy() - ticker_data.insert(0, "timestamp", timestamps.values) # type: ignore - ticker_data.insert(2, "username", usernames.values) # type: ignore - ticker_data.insert(4, "trade_type", trade_type) - ticker_data.insert(5, "token_diffs", token_diffs) # type: ignore - ticker_data.columns = ["Timestamp", "Block", "User", "Wallet", "Method", "Token Deltas"] - # Shorten wallet address string - ticker_data["Wallet"] = ticker_data["Wallet"].str[:6] + "..." + ticker_data["Wallet"].str[-4:] - # Return reverse of methods to put most recent transactions at the top - ticker_data = ticker_data.set_index("Timestamp").sort_index(ascending=False) - # Drop rows with nonexistant wallets - ticker_data = ticker_data.dropna(axis=0, subset="Wallet") - return ticker_data - - -def combine_usernames(username: pd.Series) -> pd.DataFrame: - """Map usernames to a single user (e.g., combine click with bots).""" - # Hard coded mapping: - user_mapping = { - "Charles St. Louis (click)": "Charles St. Louis", - "Alim Khamisa (click)": "Alim Khamisa", - "Danny Delott (click)": "Danny Delott", - "Gregory Lisa (click)": "Gregory Lisa", - "Jonny Rhea (click)": "Jonny Rhea", - "Matt Brown (click)": "Matt Brown", - "Giovanni Effio (click)": "Giovanni Effio", - "Mihai Cosma (click)": "Mihai Cosma", - "Ryan Goree (click)": "Ryan Goree", - "Alex Towle (click)": "Alex Towle", - "Adelina Ruffolo (click)": "Adelina Ruffolo", - "Jacob Arruda (click)": "Jacob Arruda", - "Dylan Paiton (click)": "Dylan Paiton", - "Sheng Lundquist (click)": "Sheng Lundquist", - "ControlC Schmidt (click)": "ControlC Schmidt", - "George Towle (click)": "George Towle", - "Jack Burrus (click)": "Jack Burrus", - "Jordan J (click)": "Jordan J", - # Bot accounts - "slundquist (bots)": "Sheng Lundquist", - } - user_mapping = pd.DataFrame.from_dict(user_mapping, orient="index") - user_mapping.columns = ["user"] - # Use merge in case mapping doesn't exist - username_column = username.name - user = username.to_frame().merge(user_mapping, how="left", left_on=username_column, right_index=True) - return user - - -def get_leaderboard(pnl: pd.Series, lookup: pd.DataFrame) -> tuple[pd.DataFrame, pd.DataFrame]: - """Rank users by PNL, individually and bomined across their accounts.""" - pnl = pnl.reset_index() # type: ignore - usernames = address_to_username(lookup, pnl["walletAddress"]) - pnl.insert(1, "username", usernames.values.tolist()) - # Hard coded funding provider from migration account - migration_addr = "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266" - # Don't show this account - pnl = pnl[pnl["walletAddress"] != migration_addr] - # Rank based on pnl - user = combine_usernames(pnl["username"]) - pnl["user"] = user["user"].values - - ind_leaderboard = ( - pnl[["username", "walletAddress", "pnl"]] - .sort_values("pnl", ascending=False) # type: ignore - .reset_index(drop=True) - ) - comb_leaderboard = ( - pnl[["user", "pnl"]].groupby("user")["pnl"].sum().reset_index().sort_values("pnl", ascending=False) - ).reset_index(drop=True) - - return (comb_leaderboard, ind_leaderboard) - - -def get_click_addresses() -> pd.DataFrame: - """Return a dataframe of hard coded click addresses.""" - addresses = { - "0x004dfC2dBA6573fa4dFb1E86e3723e1070C0CfdE": "Charles St. Louis (click)", - "0x005182C62DA59Ff202D53d6E42Cef6585eBF9617": "Alim Khamisa (click)", - "0x005BB73FddB8CE049eE366b50d2f48763E9Dc0De": "Danny Delott (click)", - "0x0065291E64E40FF740aE833BE2F68F536A742b70": "Gregory Lisa (click)", - "0x0076b154e60BF0E9088FcebAAbd4A778deC5ce2c": "Jonny Rhea (click)", - "0x00860d89A40a5B4835a3d498fC1052De04996de6": "Matt Brown (click)", - "0x00905A77Dc202e618d15d1a04Bc340820F99d7C4": "Giovanni Effio (click)", - "0x009ef846DcbaA903464635B0dF2574CBEE66caDd": "Mihai Cosma (click)", - "0x00D5E029aFCE62738fa01EdCA21c9A4bAeabd434": "Ryan Goree (click)", - "0x020A6F562884395A7dA2be0b607Bf824546699e2": "Alex Towle (click)", - "0x020a898437E9c9DCdF3c2ffdDB94E759C0DAdFB6": "Adelina Ruffolo (click)", - "0x020b42c1E3665d14275E2823bCef737015c7f787": "Jacob Arruda (click)", - "0x02147558D39cE51e19de3A2E1e5b7c8ff2778829": "Dylan Paiton (click)", - "0x021f1Bbd2Ec870FB150bBCAdaaA1F85DFd72407C": "Sheng Lundquist (click)", - "0x02237E07b7Ac07A17E1bdEc720722cb568f22840": "ControlC Schmidt (click)", - "0x022ca016Dc7af612e9A8c5c0e344585De53E9667": "George Towle (click)", - "0x0235037B42b4c0575c2575D50D700dD558098b78": "Jack Burrus (click)", - "0x0238811B058bA876Ae5F79cFbCAcCfA1c7e67879": "Jordan J (click)", - } - addresses = pd.DataFrame.from_dict(addresses, orient="index") - addresses = addresses.reset_index() - addresses.columns = ["address", "username"] - - return addresses - - -def get_user_lookup() -> pd.DataFrame: - """Generate username to agents mapping. - - Returns - ------- - pd.DataFrame - A dataframe with an "username" and "address" columns that provide a lookup - between a registered username and a wallet address. The username can also be - the wallet address itself if a wallet is found without a registered username. - """ - # Get data - agents = get_agents(session) - user_map = get_user_map(session) - # Usernames in postgres are bots - user_map["username"] = user_map["username"] + " (bots)" - - click_map = get_click_addresses() - user_map = pd.concat([click_map, user_map], axis=0) - - # Generate a lookup of users -> address, taking into account that some addresses don't have users - # Reindex looks up agent addresses against user_map, adding nans if it doesn't exist - options_map = user_map.set_index("address").reindex(agents) - - # Set username as address if agent doesn't exist - na_idx = options_map["username"].isna() - # If there are any nan usernames, set address itself as username - if na_idx.any(): - options_map.loc[na_idx, "username"] = options_map.index[na_idx].values - return options_map.reset_index() - - -def address_to_username(lookup: pd.DataFrame, selected_list: pd.Series) -> pd.Series: - """Look up selected users/addrs to all addresses. - - Arguments - --------- - lookup: pd.DataFrame - The lookup dataframe from `get_user_lookup` call - selected_list: list[str] - A list of addresses to look up usernames to - - Returns - ------- - list[str] - A list of usernames based on selected_list - """ - selected_list_column = selected_list.name - out = selected_list.to_frame().merge(lookup, how="left", left_on=selected_list_column, right_on="address") - return out["username"] - - # Connect to postgres load_dotenv() session = initialize_session() @@ -248,12 +65,14 @@ def address_to_username(lookup: pd.DataFrame, selected_list: pd.Series) -> pd.Se while True: # Place data and plots - user_lookup = get_user_lookup() + agents = get_all_traders(session) + user_map = get_user_map(session) txn_data = get_transactions(session, -max_live_blocks) pool_info_data = get_pool_info(session, -max_live_blocks, coerce_float=False) combined_data = get_combined_data(txn_data, pool_info_data) wallet_deltas = get_wallet_deltas(session, coerce_float=False) - ticker = get_ticker(wallet_deltas, txn_data, pool_info_data, user_lookup) + user_lookup = get_user_lookup(agents, user_map) + ticker = build_ticker(wallet_deltas, txn_data, pool_info_data, user_lookup) (fixed_rate_x, fixed_rate_y) = calc_fixed_rate(combined_data, config_data) ohlcv = calc_ohlcv(combined_data, config_data, freq="5T") @@ -267,7 +86,7 @@ def address_to_username(lookup: pd.DataFrame, selected_list: pd.Series) -> pd.Se ## Add initial budget column to bots ## when bot restarts, use initial budget for bot's wallet address to set "budget" in Agent.Wallet - comb_rank, ind_rank = get_leaderboard(current_returns, user_lookup) + comb_rank, ind_rank = build_leaderboard(current_returns, user_lookup) with ticker_placeholder.container(): st.header("Ticker") diff --git a/lib/chainsync/chainsync/analysis/__init__.py b/lib/chainsync/chainsync/analysis/__init__.py index 634a79fe92..78a79bee02 100644 --- a/lib/chainsync/chainsync/analysis/__init__.py +++ b/lib/chainsync/chainsync/analysis/__init__.py @@ -1,10 +1,5 @@ """Analysis for trading.""" from .calc_fixed_rate import calc_fixed_rate from .calc_ohlcv import calc_ohlcv -from .calc_pnl import ( - calc_closeout_pnl, - calc_single_closeout, - calc_total_returns, - calculate_spot_price, - calculate_spot_price_for_position, -) +from .calc_pnl import calc_closeout_pnl, calc_single_closeout, calc_total_returns +from .calc_spot_price import calculate_spot_price, calculate_spot_price_for_position diff --git a/lib/chainsync/chainsync/analysis/calc_fixed_rate.py b/lib/chainsync/chainsync/analysis/calc_fixed_rate.py index 988f4c85d5..bf972f0f4d 100644 --- a/lib/chainsync/chainsync/analysis/calc_fixed_rate.py +++ b/lib/chainsync/chainsync/analysis/calc_fixed_rate.py @@ -2,7 +2,8 @@ from decimal import Decimal import numpy as np -from chainsync.dashboard import calculate_spot_price + +from .calc_spot_price import calculate_spot_price def calc_fixed_rate(trade_data, config_data): diff --git a/lib/chainsync/chainsync/analysis/calc_ohlcv.py b/lib/chainsync/chainsync/analysis/calc_ohlcv.py index 01c02dd1b2..a65ba84211 100644 --- a/lib/chainsync/chainsync/analysis/calc_ohlcv.py +++ b/lib/chainsync/chainsync/analysis/calc_ohlcv.py @@ -2,7 +2,8 @@ from __future__ import annotations import pandas as pd -from chainsync.dashboard import calculate_spot_price + +from .calc_spot_price import calculate_spot_price def calc_ohlcv(trade_data, config_data, freq="D"): diff --git a/lib/chainsync/chainsync/analysis/calc_pnl.py b/lib/chainsync/chainsync/analysis/calc_pnl.py index 84994536da..4ecf52e420 100644 --- a/lib/chainsync/chainsync/analysis/calc_pnl.py +++ b/lib/chainsync/chainsync/analysis/calc_pnl.py @@ -1,21 +1,21 @@ """Plots the pnl.""" from __future__ import annotations -import logging from decimal import Decimal import numpy as np import pandas as pd from agent0.base.config import EnvironmentConfig -from chainsync.dashboard import calculate_spot_price -from chainsync.hyperdrive import get_hyperdrive_contract from eth_typing import ChecksumAddress, HexAddress, HexStr from ethpy.base import initialize_web3_with_http_provider, load_all_abis, smart_contract_preview_transaction from ethpy.hyperdrive import fetch_hyperdrive_address_from_url +from ethpy.hyperdrive.interface import get_hyperdrive_contract from fixedpointmath import FixedPoint from web3 import Web3 from web3.contract.contract import Contract +from .calc_spot_price import calculate_spot_price_for_position + def calc_single_closeout( position: pd.DataFrame, contract: Contract, pool_info: pd.DataFrame, min_output: int, as_underlying: bool @@ -195,48 +195,3 @@ def calc_total_returns( current_wallet.loc[long_returns.index, "pnl"] = long_returns current_wallet.loc[withdrawal_returns.index, "pnl"] = withdrawal_returns return current_wallet.reset_index().groupby("walletAddress")["pnl"].sum(), current_wallet - - -def calculate_spot_price_for_position( - share_reserves: pd.Series, - bond_reserves: pd.Series, - time_stretch: pd.Series, - initial_share_price: pd.Series, - position_duration: pd.Series, - maturity_timestamp: pd.Series, - block_timestamp: Decimal, -): - """Calculate the spot price given the pool info data. - - This is calculated in a vectorized way, with every input being a scalar except for maturity_timestamp. - - Arguments - --------- - share_reserves : pd.Series - The share reserves - bond_reserves : pd.Series - The bond reserves - time_stretch : pd.Series - The time stretch - initial_share_price : pd.Series - The initial share price - position_duration : pd.Series - The position duration - maturity_timestamp : pd.Series - The maturity timestamp - block_timestamp : Decimal - The block timestamp - """ - # pylint: disable=too-many-arguments - full_term_spot_price = calculate_spot_price(share_reserves, bond_reserves, initial_share_price, time_stretch) - time_left_seconds = maturity_timestamp - block_timestamp # type: ignore - if isinstance(time_left_seconds, pd.Timedelta): - time_left_seconds = time_left_seconds.total_seconds() - time_left_in_years = time_left_seconds / position_duration - logging.info( - " spot price is weighted average of %s(%s) and 1 (%s)", - full_term_spot_price, - time_left_in_years, - 1 - time_left_in_years, - ) - return full_term_spot_price * time_left_in_years + 1 * (1 - time_left_in_years) diff --git a/lib/chainsync/chainsync/analysis/calc_spot_price.py b/lib/chainsync/chainsync/analysis/calc_spot_price.py new file mode 100644 index 0000000000..43202bfb32 --- /dev/null +++ b/lib/chainsync/chainsync/analysis/calc_spot_price.py @@ -0,0 +1,57 @@ +"""Calculate the spot price.""" + +import logging +from decimal import Decimal + +import pandas as pd + + +# TODO these functions should be deprecated in favor of external call +def calculate_spot_price(share_reserves, bond_reserves, initial_share_price, time_stretch): + """Calculate the spot price.""" + return ((initial_share_price * share_reserves) / bond_reserves) ** time_stretch + + +def calculate_spot_price_for_position( + share_reserves: pd.Series, + bond_reserves: pd.Series, + time_stretch: pd.Series, + initial_share_price: pd.Series, + position_duration: pd.Series, + maturity_timestamp: pd.Series, + block_timestamp: Decimal, +): + """Calculate the spot price given the pool info data. + + This is calculated in a vectorized way, with every input being a scalar except for maturity_timestamp. + + Arguments + --------- + share_reserves : pd.Series + The share reserves + bond_reserves : pd.Series + The bond reserves + time_stretch : pd.Series + The time stretch + initial_share_price : pd.Series + The initial share price + position_duration : pd.Series + The position duration + maturity_timestamp : pd.Series + The maturity timestamp + block_timestamp : Decimal + The block timestamp + """ + # pylint: disable=too-many-arguments + full_term_spot_price = calculate_spot_price(share_reserves, bond_reserves, initial_share_price, time_stretch) + time_left_seconds = maturity_timestamp - block_timestamp # type: ignore + if isinstance(time_left_seconds, pd.Timedelta): + time_left_seconds = time_left_seconds.total_seconds() + time_left_in_years = time_left_seconds / position_duration + logging.info( + " spot price is weighted average of %s(%s) and 1 (%s)", + full_term_spot_price, + time_left_in_years, + 1 - time_left_in_years, + ) + return full_term_spot_price * time_left_in_years + 1 * (1 - time_left_in_years) diff --git a/lib/chainsync/chainsync/base/conversions.py b/lib/chainsync/chainsync/base/conversions.py deleted file mode 100644 index e38c9a0320..0000000000 --- a/lib/chainsync/chainsync/base/conversions.py +++ /dev/null @@ -1,28 +0,0 @@ -"""Useful conversions for database operations.""" -from __future__ import annotations - -from decimal import Decimal - -from fixedpointmath import FixedPoint - - -def convert_scaled_value_to_decimal(input_val: int | None) -> Decimal | None: - """ - Given a scaled value int, converts it to a Decimal, while supporting Nones - - Arguments - ---------- - input_val: int | None - The scaled integer value to unscale and convert to Decimal - - Returns - ------- - Decimal | None - The unscaled Decimal value - """ - if input_val is not None: - # TODO add this cast within fixedpoint - fp_val = FixedPoint(scaled_value=input_val) - str_val = str(fp_val) - return Decimal(str_val) - return None diff --git a/lib/chainsync/chainsync/base/db_schema.py b/lib/chainsync/chainsync/base/db_schema.py deleted file mode 100644 index c27168b00b..0000000000 --- a/lib/chainsync/chainsync/base/db_schema.py +++ /dev/null @@ -1,151 +0,0 @@ -"""Database Schemas for Basic Blockchain Datatypes. These include things like Transactions, Accounts Etc.""" -from __future__ import annotations - -from decimal import Decimal -from typing import Union - -from sqlalchemy import BigInteger, Boolean, ForeignKey, Integer, Numeric, String -from sqlalchemy.orm import DeclarativeBase, Mapped, MappedAsDataclass, mapped_column - -# Schema file doesn't need any methods in these dataclasses -# pylint: disable=too-few-public-methods - -# solidity returns things in camelCase. Keeping the formatting to indicate the source. -# pylint: disable=invalid-name - -# Ideally, we'd use `Mapped[str | None]`, but this breaks using Python 3.9: -# https://github.com/sqlalchemy/sqlalchemy/issues/9110 -# Currently using `Mapped[Union[str, None]]` for backwards compatibility - - -class Base(MappedAsDataclass, DeclarativeBase): - """Base class to subclass from to define the schema""" - - -class Transaction(Base): - """Table/dataclass schema for Transactions. - - Mapped class that is a data class on the python side, and an declarative base on the sql side. - """ - - __tablename__ = "transactions" - - # Default table primary key - # Note that we use postgres in production and sqlite in testing, but sqlite has issues with - # autoincrement with BigIntegers. Hence, we use the Integer variant when using sqlite in tests - id: Mapped[int] = mapped_column( - BigInteger().with_variant(Integer, "sqlite"), primary_key=True, init=False, autoincrement=True - ) - transactionHash: Mapped[str] = mapped_column(String, index=True, unique=True) - - #### Fields from base transactions #### - ############## - # NOOOOOOOOO, transaction shouldn't have a foreign key to poolInfo - ############## - blockNumber: Mapped[int] = mapped_column(BigInteger, ForeignKey("poolinfo.blockNumber"), index=True) - transactionIndex: Mapped[Union[int, None]] = mapped_column(Integer, default=None) - nonce: Mapped[Union[int, None]] = mapped_column(Integer, default=None) - # Transaction receipt to/from - # Almost always from wallet address to smart contract address - txn_to: Mapped[Union[str, None]] = mapped_column(String, default=None) - txn_from: Mapped[Union[str, None]] = mapped_column(String, default=None) - gasUsed: Mapped[Union[int, None]] = mapped_column(Numeric, default=None) - - #### Fields from solidity function calls #### - # These fields map solidity function calls and their corresponding arguments - # The params list is exhaustive against all possible methods - input_method: Mapped[Union[str, None]] = mapped_column(String, default=None) - - # Method: initialize - input_params_contribution: Mapped[Union[Decimal, None]] = mapped_column(Numeric, default=None) - input_params_apr: Mapped[Union[Decimal, None]] = mapped_column(Numeric, default=None) - input_params_destination: Mapped[Union[str, None]] = mapped_column(String, default=None) - input_params_asUnderlying: Mapped[Union[bool, None]] = mapped_column(Boolean, default=None) - - # Method: openLong - input_params_baseAmount: Mapped[Union[Decimal, None]] = mapped_column(Numeric, default=None) - input_params_minOutput: Mapped[Union[Decimal, None]] = mapped_column(Numeric, default=None) - # input_params_destination - # input_params_asUnderlying - - # Method: openShort - input_params_bondAmount: Mapped[Union[Decimal, None]] = mapped_column(Numeric, default=None) - input_params_maxDeposit: Mapped[Union[Decimal, None]] = mapped_column(Numeric, default=None) - # input_params_destination - # input_params_asUnderlying - - # Method: closeLong - input_params_maturityTime: Mapped[Union[int, None]] = mapped_column(Numeric, default=None) - # input_params_bondAmount - # input_params_minOutput - # input_params_destination - # input_params_asUnderlying - - # Method: closeShort - # input_params_maturityTime - # input_params_bondAmount - # input_params_minOutput - # input_params_destination - # input_params_asUnderlying - - # Method: addLiquidity - # input_params_contribution - input_params_minApr: Mapped[Union[Decimal, None]] = mapped_column(Numeric, default=None) - input_params_maxApr: Mapped[Union[Decimal, None]] = mapped_column(Numeric, default=None) - # input_params_destination - # input_params_asUnderlying - - # Method: removeLiquidity - input_params_shares: Mapped[Union[Decimal, None]] = mapped_column(Numeric, default=None) - # input_params_minOutput - # input_params_destination - # input_params_asUnderlying - - #### Fields from event logs #### - # Addresses in event logs - event_from: Mapped[Union[str, None]] = mapped_column(String, default=None) - event_to: Mapped[Union[str, None]] = mapped_column(String, default=None) - # args_owner - # args_spender - # args_id - event_value: Mapped[Union[Decimal, None]] = mapped_column(Numeric, default=None) - event_operator: Mapped[Union[str, None]] = mapped_column(String, default=None) - event_id: Mapped[Union[int, None]] = mapped_column(Numeric, default=None) - # Fields calculated from base - event_prefix: Mapped[Union[int, None]] = mapped_column(Integer, default=None) - event_maturity_time: Mapped[Union[int, None]] = mapped_column(Numeric, default=None) - - # Fields not used by postprocessing - - # blockHash - # hash - # value - # gasPrice - # gas - # v - # r - # s - # type - # accessList - # maxPriorityFeePerGas - # maxFeePerGas - # chainId - # logIndex - # address - # cumulativeGasUsed - # contractAddress - # status - # logsBloom - # effectiveGasPrice - - -class UserMap(Base): - """Table/dataclass schema for pool config.""" - - __tablename__ = "usermap" - - # Default table primary key - # Note that we use postgres in production and sqlite in testing, but sqlite has issues with - # autoincrement with BigIntegers. Hence, we use the Integer variant when using sqlite in tests - address: Mapped[str] = mapped_column(String, primary_key=True) - username: Mapped[str] = mapped_column(String, index=True) diff --git a/lib/chainsync/chainsync/dashboard/__init__.py b/lib/chainsync/chainsync/dashboard/__init__.py index 13ddbd4c76..a58f129b1c 100644 --- a/lib/chainsync/chainsync/dashboard/__init__.py +++ b/lib/chainsync/chainsync/dashboard/__init__.py @@ -1,5 +1,8 @@ """Dashboard utilities""" -from .extract_data_logs import calculate_spot_price, get_combined_data, read_json_to_pd +from .build_leaderboard import build_leaderboard +from .build_ticker import build_ticker +from .extract_data_logs import get_combined_data, read_json_to_pd from .plot_fixed_rate import plot_fixed_rate from .plot_ohlcv import plot_ohlcv +from .usernames import address_to_username, combine_usernames, get_user_lookup diff --git a/lib/chainsync/chainsync/dashboard/build_leaderboard.py b/lib/chainsync/chainsync/dashboard/build_leaderboard.py new file mode 100644 index 0000000000..242ac80ba3 --- /dev/null +++ b/lib/chainsync/chainsync/dashboard/build_leaderboard.py @@ -0,0 +1,29 @@ +"""Builds the leaderboard for the dashboard.""" +import pandas as pd + +from .usernames import address_to_username, combine_usernames + + +def build_leaderboard(pnl: pd.Series, lookup: pd.DataFrame) -> tuple[pd.DataFrame, pd.DataFrame]: + """Rank users by PNL, individually and bomined across their accounts.""" + pnl = pnl.reset_index() # type: ignore + usernames = address_to_username(lookup, pnl["walletAddress"]) + pnl.insert(1, "username", usernames.values.tolist()) + # TODO: Hard coded funding provider from migration account + migration_addr = "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266" + # Don't show this account + pnl = pnl[pnl["walletAddress"] != migration_addr] + # Rank based on pnl + user = combine_usernames(pnl["username"]) + pnl["user"] = user["user"].values + + ind_leaderboard = ( + pnl[["username", "walletAddress", "pnl"]] + .sort_values("pnl", ascending=False) # type: ignore + .reset_index(drop=True) + ) + comb_leaderboard = ( + pnl[["user", "pnl"]].groupby("user")["pnl"].sum().reset_index().sort_values("pnl", ascending=False) + ).reset_index(drop=True) + + return (comb_leaderboard, ind_leaderboard) diff --git a/lib/chainsync/chainsync/dashboard/build_ticker.py b/lib/chainsync/chainsync/dashboard/build_ticker.py new file mode 100644 index 0000000000..440169c804 --- /dev/null +++ b/lib/chainsync/chainsync/dashboard/build_ticker.py @@ -0,0 +1,52 @@ +"""Builds the ticker for the dashboard.""" +import pandas as pd + +from .usernames import address_to_username + + +def build_ticker( + wallet_delta: pd.DataFrame, transactions: pd.DataFrame, pool_info: pd.DataFrame, lookup: pd.DataFrame +) -> pd.DataFrame: + """Show recent trades. + + Arguments + --------- + data: pd.DataFrame + The dataframe resulting from get_transactions + + Returns + ------- + pd.DataFrame + The filtered transaction data based on what we want to view in the ticker + """ + # TODO these merges should really happen via an sql query instead of in pandas here + # Set ticker so that each transaction is a single row + ticker_data = wallet_delta.groupby(["transactionHash"]).agg( + {"blockNumber": "first", "walletAddress": "first", "baseTokenType": tuple, "delta": tuple} + ) + + # Expand column of lists into seperate dataframes, then str cat them together + token_type = pd.DataFrame(ticker_data["baseTokenType"].to_list(), index=ticker_data.index) + token_deltas = pd.DataFrame(ticker_data["delta"].to_list(), index=ticker_data.index) + token_diffs = token_type + ": " + token_deltas.astype("str") + # Aggregate columns into a single list, removing nans + token_diffs = token_diffs.stack().groupby(level=0).agg(list) + + # Gather other information from other tables + usernames = address_to_username(lookup, ticker_data["walletAddress"]) + timestamps = pool_info.loc[ticker_data["blockNumber"], "timestamp"] + trade_type = transactions.set_index("transactionHash").loc[ticker_data.index, "input_method"] + + ticker_data = ticker_data[["blockNumber", "walletAddress"]].copy() + ticker_data.insert(0, "timestamp", timestamps.values) # type: ignore + ticker_data.insert(2, "username", usernames.values) # type: ignore + ticker_data.insert(4, "trade_type", trade_type) + ticker_data.insert(5, "token_diffs", token_diffs) # type: ignore + ticker_data.columns = ["Timestamp", "Block", "User", "Wallet", "Method", "Token Deltas"] + # Shorten wallet address string + ticker_data["Wallet"] = ticker_data["Wallet"].str[:6] + "..." + ticker_data["Wallet"].str[-4:] + # Return reverse of methods to put most recent transactions at the top + ticker_data = ticker_data.set_index("Timestamp").sort_index(ascending=False) + # Drop rows with nonexistant wallets + ticker_data = ticker_data.dropna(axis=0, subset="Wallet") + return ticker_data diff --git a/lib/chainsync/chainsync/dashboard/extract_data_logs.py b/lib/chainsync/chainsync/dashboard/extract_data_logs.py index 0c73c8b2cf..da19c1f204 100644 --- a/lib/chainsync/chainsync/dashboard/extract_data_logs.py +++ b/lib/chainsync/chainsync/dashboard/extract_data_logs.py @@ -24,11 +24,6 @@ def read_json_to_pd(json_file): return pd.DataFrame(json_data) -def calculate_spot_price(share_reserves, bond_reserves, initial_share_price, time_stretch): - """Calculate the spot price.""" - return ((initial_share_price * share_reserves) / bond_reserves) ** time_stretch - - def get_combined_data(txn_data, pool_info_data): """Combine multiple datasets into one containing transaction data, and pool info.""" pool_info_data.index = pool_info_data.index.astype(int) @@ -68,6 +63,8 @@ def get_combined_data(txn_data, pool_info_data): # Calculate trade type and timetsamp from args.id def decode_prefix(row): # Check for nans + if row is None: + return None if np.isnan(row): out = np.nan else: diff --git a/lib/chainsync/chainsync/dashboard/plot_ohlcv.py b/lib/chainsync/chainsync/dashboard/plot_ohlcv.py index 2d7726ac79..254f9e14bb 100644 --- a/lib/chainsync/chainsync/dashboard/plot_ohlcv.py +++ b/lib/chainsync/chainsync/dashboard/plot_ohlcv.py @@ -6,17 +6,18 @@ def plot_ohlcv(ohlcv, ohlcv_ax, vol_ax): """Plots the ohlcv plot""" - mpf.plot(ohlcv, type="candle", volume=vol_ax, ax=ohlcv_ax) + if len(ohlcv > 0): + mpf.plot(ohlcv, type="candle", volume=vol_ax, ax=ohlcv_ax) - ohlcv_ax.set_xlabel("block timestamp") - ohlcv_ax.set_title("OHLCV") - ohlcv_ax.yaxis.set_label_position("right") - ohlcv_ax.yaxis.tick_right() + ohlcv_ax.set_xlabel("block timestamp") + ohlcv_ax.set_title("OHLCV") + ohlcv_ax.yaxis.set_label_position("right") + ohlcv_ax.yaxis.tick_right() - vol_ax.set_xlabel("block timestamp") - vol_ax.set_ylabel("Volume") - vol_ax.set_title("Volume") - vol_ax.yaxis.set_label_position("right") - vol_ax.yaxis.tick_right() + vol_ax.set_xlabel("block timestamp") + vol_ax.set_ylabel("Volume") + vol_ax.set_title("Volume") + vol_ax.yaxis.set_label_position("right") + vol_ax.yaxis.tick_right() # format x-axis as time diff --git a/lib/chainsync/chainsync/dashboard/usernames.py b/lib/chainsync/chainsync/dashboard/usernames.py new file mode 100644 index 0000000000..0c37a8f93a --- /dev/null +++ b/lib/chainsync/chainsync/dashboard/usernames.py @@ -0,0 +1,124 @@ +"""Helper functions for mapping addresses to usernames.""" +import pandas as pd + + +def combine_usernames(username: pd.Series) -> pd.DataFrame: + """Map usernames to a single user (e.g., combine click with bots).""" + # TODO Hard coded mapping, should be a config file somewhere + user_mapping = { + "Charles St. Louis (click)": "Charles St. Louis", + "Alim Khamisa (click)": "Alim Khamisa", + "Danny Delott (click)": "Danny Delott", + "Gregory Lisa (click)": "Gregory Lisa", + "Jonny Rhea (click)": "Jonny Rhea", + "Matt Brown (click)": "Matt Brown", + "Giovanni Effio (click)": "Giovanni Effio", + "Mihai Cosma (click)": "Mihai Cosma", + "Ryan Goree (click)": "Ryan Goree", + "Alex Towle (click)": "Alex Towle", + "Adelina Ruffolo (click)": "Adelina Ruffolo", + "Jacob Arruda (click)": "Jacob Arruda", + "Dylan Paiton (click)": "Dylan Paiton", + "Sheng Lundquist (click)": "Sheng Lundquist", + "ControlC Schmidt (click)": "ControlC Schmidt", + "George Towle (click)": "George Towle", + "Jack Burrus (click)": "Jack Burrus", + "Jordan J (click)": "Jordan J", + # Bot accounts + "slundquist (bots)": "Sheng Lundquist", + } + user_mapping = pd.DataFrame.from_dict(user_mapping, orient="index") + user_mapping.columns = ["user"] + # Use merge in case mapping doesn't exist + username_column = username.name + user = username.to_frame().merge(user_mapping, how="left", left_on=username_column, right_index=True) + return user + + +def get_click_addresses() -> pd.DataFrame: + """Return a dataframe of hard coded click addresses.""" + # TODO Hard coded mapping, should be a config file somewhere + addresses = { + "0x004dfC2dBA6573fa4dFb1E86e3723e1070C0CfdE": "Charles St. Louis (click)", + "0x005182C62DA59Ff202D53d6E42Cef6585eBF9617": "Alim Khamisa (click)", + "0x005BB73FddB8CE049eE366b50d2f48763E9Dc0De": "Danny Delott (click)", + "0x0065291E64E40FF740aE833BE2F68F536A742b70": "Gregory Lisa (click)", + "0x0076b154e60BF0E9088FcebAAbd4A778deC5ce2c": "Jonny Rhea (click)", + "0x00860d89A40a5B4835a3d498fC1052De04996de6": "Matt Brown (click)", + "0x00905A77Dc202e618d15d1a04Bc340820F99d7C4": "Giovanni Effio (click)", + "0x009ef846DcbaA903464635B0dF2574CBEE66caDd": "Mihai Cosma (click)", + "0x00D5E029aFCE62738fa01EdCA21c9A4bAeabd434": "Ryan Goree (click)", + "0x020A6F562884395A7dA2be0b607Bf824546699e2": "Alex Towle (click)", + "0x020a898437E9c9DCdF3c2ffdDB94E759C0DAdFB6": "Adelina Ruffolo (click)", + "0x020b42c1E3665d14275E2823bCef737015c7f787": "Jacob Arruda (click)", + "0x02147558D39cE51e19de3A2E1e5b7c8ff2778829": "Dylan Paiton (click)", + "0x021f1Bbd2Ec870FB150bBCAdaaA1F85DFd72407C": "Sheng Lundquist (click)", + "0x02237E07b7Ac07A17E1bdEc720722cb568f22840": "ControlC Schmidt (click)", + "0x022ca016Dc7af612e9A8c5c0e344585De53E9667": "George Towle (click)", + "0x0235037B42b4c0575c2575D50D700dD558098b78": "Jack Burrus (click)", + "0x0238811B058bA876Ae5F79cFbCAcCfA1c7e67879": "Jordan J (click)", + } + addresses = pd.DataFrame.from_dict(addresses, orient="index") + addresses = addresses.reset_index() + addresses.columns = ["address", "username"] + + return addresses + + +def get_user_lookup(traders: list[str], user_map: pd.DataFrame) -> pd.DataFrame: + """Generate username to address mapping. + + Arguments + --------- + traders: list[str] + A list of all traders to build a lookup for + user_map: pd.DataFrame + A dataframe with "username" and "address" columns that map from bot address to a username + generated from `get_bot_map` + + Returns + ------- + pd.DataFrame + A dataframe with an "username" and "address" columns that provide a lookup + between a registered username and a wallet address. The lookup contains all entries from + `traders`, with the wallet address itself if an address isn't registered. + """ + # Get data + user_map = user_map.copy() + # Usernames in postgres are bots + user_map["username"] = user_map["username"] + " (bots)" + # TODO move this to reading from a config file + click_map = get_click_addresses() + # Add click users to map + user_map = pd.concat([click_map, user_map], axis=0) + + # Generate a lookup of users -> address, taking into account that some addresses don't have users + # Reindex looks up agent addresses against user_map, adding nans if it doesn't exist + options_map = user_map.set_index("address").reindex(traders) + + # Set username as address if agent doesn't exist + na_idx = options_map["username"].isna() + # If there are any nan usernames, set address itself as username + if na_idx.any(): + options_map.loc[na_idx, "username"] = options_map.index[na_idx].values + return options_map.reset_index() + + +def address_to_username(lookup: pd.DataFrame, selected_list: pd.Series) -> pd.Series: + """Look up selected users/addrs to all addresses. + + Arguments + --------- + lookup: pd.DataFrame + The lookup dataframe from `get_user_lookup` call + selected_list: list[str] + A list of addresses to look up usernames to + + Returns + ------- + list[str] + A list of usernames based on selected_list + """ + selected_list_column = selected_list.name + out = selected_list.to_frame().merge(lookup, how="left", left_on=selected_list_column, right_on="address") + return out["username"] diff --git a/lib/ethpy/tests/__init__.py b/lib/chainsync/chainsync/db/__init__.py similarity index 100% rename from lib/ethpy/tests/__init__.py rename to lib/chainsync/chainsync/db/__init__.py diff --git a/lib/chainsync/chainsync/base/__init__.py b/lib/chainsync/chainsync/db/base/__init__.py similarity index 61% rename from lib/chainsync/chainsync/base/__init__.py rename to lib/chainsync/chainsync/db/base/__init__.py index ce48998198..e0685ffb3a 100644 --- a/lib/chainsync/chainsync/base/__init__.py +++ b/lib/chainsync/chainsync/db/base/__init__.py @@ -1,19 +1,16 @@ """Generic database utilities""" -from .conversions import convert_scaled_value_to_decimal -from .db_schema import Base, Transaction, UserMap -from .postgres import ( +from .interface import ( PostgresConfig, TableWithBlockNumber, - add_transactions, add_user_map, build_postgres_config, close_session, drop_table, get_latest_block_number_from_table, - get_transactions, get_user_map, initialize_engine, initialize_session, query_tables, ) +from .schema import Base, UserMap diff --git a/lib/chainsync/chainsync/base/postgres.py b/lib/chainsync/chainsync/db/base/interface.py similarity index 80% rename from lib/chainsync/chainsync/base/postgres.py rename to lib/chainsync/chainsync/db/base/interface.py index 66fa30dbd4..4780aa92cb 100644 --- a/lib/chainsync/chainsync/base/postgres.py +++ b/lib/chainsync/chainsync/db/base/interface.py @@ -15,7 +15,7 @@ from sqlalchemy.ext.declarative import declared_attr from sqlalchemy.orm import Session, sessionmaker -from .db_schema import Base, Transaction, UserMap +from .schema import Base, UserMap # classes for sqlalchemy that define table schemas have no methods. # pylint: disable=too-few-public-methods @@ -193,26 +193,6 @@ def close_session(session: Session) -> None: session.close() -def add_transactions(transactions: list[Transaction], session: Session) -> None: - """Add transactions to the poolinfo table. - - Arguments - --------- - transactions : list[Transaction] - A list of Transaction objects to insert into postgres - session : Session - The initialized session object - """ - for transaction in transactions: - session.add(transaction) - try: - session.commit() - except exc.DataError as err: - session.rollback() - print(f"{transactions=}") - raise err - - def add_user_map(username: str, addresses: list[str], session: Session) -> None: """Add username mapping to postgres during evm_bots initialization. @@ -248,45 +228,10 @@ def add_user_map(username: str, addresses: list[str], session: Session) -> None: try: session.commit() except exc.DataError as err: - print(f"{username=}, {addresses=}") + logging.error("DB Error adding user: %s", err) raise err -def get_transactions(session: Session, start_block: int | None = None, end_block: int | None = None) -> pd.DataFrame: - """Get all transactions and returns as a pandas dataframe. - - Arguments - --------- - session : Session - The initialized session object - start_block : int | None - The starting block to filter the query on. start_block integers - matches python slicing notation, e.g., list[:3], list[:-3] - end_block : int | None - The ending block to filter the query on. end_block integers - matches python slicing notation, e.g., list[:3], list[:-3] - - Returns - ------- - DataFrame - A DataFrame that consists of the queried transactions data - """ - query = session.query(Transaction) - - # Support for negative indices - if (start_block is not None) and (start_block < 0): - start_block = get_latest_block_number_from_table(Transaction, session) + start_block + 1 - if (end_block is not None) and (end_block < 0): - end_block = get_latest_block_number_from_table(Transaction, session) + end_block + 1 - - if start_block is not None: - query = query.filter(Transaction.blockNumber >= start_block) - if end_block is not None: - query = query.filter(Transaction.blockNumber < end_block) - - return pd.read_sql(query.statement, con=session.connection()).set_index("blockNumber") - - def get_user_map(session: Session, address: str | None = None) -> pd.DataFrame: """Get all usermapping and returns as a pandas dataframe. @@ -326,7 +271,7 @@ def get_latest_block_number_from_table(table_obj: Type[Base], session: Session) Arguments --------- - table_obj : Type[WalletInfo | PoolInfo | Transaction | CheckpointInfo] + table_obj : Type[Base] The sqlalchemy class that contains the blockNumber column session : Session The initialized session object diff --git a/lib/chainsync/chainsync/db/base/interface_test.py b/lib/chainsync/chainsync/db/base/interface_test.py new file mode 100644 index 0000000000..f2f6cd2411 --- /dev/null +++ b/lib/chainsync/chainsync/db/base/interface_test.py @@ -0,0 +1,139 @@ +"""CRUD tests for CheckpointInfo""" +import numpy as np +import pytest +from sqlalchemy import String, create_engine +from sqlalchemy.orm import DeclarativeBase, Mapped, MappedAsDataclass, mapped_column, sessionmaker + +from .interface import add_user_map, drop_table, get_user_map, query_tables +from .schema import Base + +engine = create_engine("sqlite:///:memory:") # in-memory SQLite database for testing +Session = sessionmaker(bind=engine) + +# fixture arguments in test function have to be the same as the fixture name +# pylint: disable=redefined-outer-name, too-few-public-methods + + +class Based(MappedAsDataclass, DeclarativeBase): + """Base class to subclass from to define the schema""" + + +class Very(Based): + """Dummy but very sincere table schema.""" + + __tablename__ = "verybased" + + key: Mapped[str] = mapped_column(String, primary_key=True) + + +class DropMe(Based): + """Dummy table schema that wants to be dropped.""" + + __tablename__ = "dropme" + + key: Mapped[str] = mapped_column(String, primary_key=True) + + +@pytest.fixture(scope="function") +def test_session(): + """Dummy session fixture for tests""" + Based.metadata.create_all(engine) # create tables + test_session_ = Session() + yield test_session_ + test_session_.close() + Based.metadata.drop_all(engine) # drop tables + + +@pytest.fixture(scope="function") +def session(): + """Session fixture for tests""" + Base.metadata.create_all(engine) # create tables + session_ = Session() + yield session_ + session_.close() + Base.metadata.drop_all(engine) # drop tables + + +def test_query_tables(test_session): + """Return a list of tables in the database.""" + table_names = query_tables(test_session) + test_session.commit() + + np.testing.assert_array_equal(table_names, ["dropme", "verybased"]) + + +def test_drop_table(test_session): + """Drop a table from the database.""" + drop_table(test_session, "dropme") + table_names = query_tables(test_session) + test_session.commit() + + np.testing.assert_array_equal(table_names, ["verybased"]) + + +class TestUserMapInterface: + """Testing postgres interface for usermap table""" + + def test_get_user_map(self, session): + """Testing retrevial of usermap via interface""" + username_1 = "a" + addresses_1 = ["1", "2", "3"] + add_user_map(username=username_1, addresses=addresses_1, session=session) + username_2 = "b" + addresses_2 = ["4", "5"] + add_user_map(username=username_2, addresses=addresses_2, session=session) + + # This is in order of insertion + user_map_df = get_user_map(session) + assert len(user_map_df) == 5 + np.testing.assert_array_equal(user_map_df["username"], ["a", "a", "a", "b", "b"]) + np.testing.assert_array_equal(user_map_df["address"], ["1", "2", "3", "4", "5"]) + + def test_get_query_user_map(self, session): + """Testing querying by block number of user map via interface""" + username_1 = "a" + addresses_1 = ["1", "2", "3"] + add_user_map(username=username_1, addresses=addresses_1, session=session) + username_2 = "b" + addresses_2 = ["4", "5"] + add_user_map(username=username_2, addresses=addresses_2, session=session) + + user_map_df = get_user_map(session, address="1") + np.testing.assert_array_equal(user_map_df["username"], ["a"]) + user_map_df = get_user_map(session, address="2") + np.testing.assert_array_equal(user_map_df["username"], ["a"]) + user_map_df = get_user_map(session, address="3") + np.testing.assert_array_equal(user_map_df["username"], ["a"]) + user_map_df = get_user_map(session, address="4") + np.testing.assert_array_equal(user_map_df["username"], ["b"]) + user_map_df = get_user_map(session, address="5") + np.testing.assert_array_equal(user_map_df["username"], ["b"]) + + def test_user_map_insertion_error(self, session): + """Testing retrevial of usermap via interface""" + username_1 = "a" + addresses_1 = ["1", "2", "3"] + add_user_map(username=username_1, addresses=addresses_1, session=session) + + # Adding the same addresses with the same username should pass + username_2 = "a" + addresses_2 = ["1", "2", "5"] + add_user_map(username=username_2, addresses=addresses_2, session=session) + + user_map_df = get_user_map(session) + assert len(user_map_df) == 4 + np.testing.assert_array_equal(user_map_df["username"], ["a", "a", "a", "a"]) + np.testing.assert_array_equal(user_map_df["address"], ["1", "2", "3", "5"]) + + # Adding the same addresses with different username should fail + username_3 = "b" + addresses_3 = ["6", "1", "2", "4"] + with pytest.raises(ValueError): + add_user_map(username=username_3, addresses=addresses_3, session=session) + + # Final db values shouldn't change + user_map_df = get_user_map(session) + user_map_df = get_user_map(session) + assert len(user_map_df) == 4 + np.testing.assert_array_equal(user_map_df["username"], ["a", "a", "a", "a"]) + np.testing.assert_array_equal(user_map_df["address"], ["1", "2", "3", "5"]) diff --git a/lib/chainsync/chainsync/db/base/schema.py b/lib/chainsync/chainsync/db/base/schema.py new file mode 100644 index 0000000000..3cc721df76 --- /dev/null +++ b/lib/chainsync/chainsync/db/base/schema.py @@ -0,0 +1,28 @@ +"""Database Schemas for Basic Blockchain Datatypes. These include things like Transactions, Accounts Etc.""" +from __future__ import annotations + +from sqlalchemy import String +from sqlalchemy.orm import DeclarativeBase, Mapped, MappedAsDataclass, mapped_column + +# Schema file doesn't need any methods in these dataclasses +# pylint: disable=too-few-public-methods + +# solidity returns things in camelCase. Keeping the formatting to indicate the source. +# pylint: disable=invalid-name + +# Ideally, we'd use `Mapped[str | None]`, but this breaks using Python 3.9: +# https://github.com/sqlalchemy/sqlalchemy/issues/9110 +# Currently using `Mapped[Union[str, None]]` for backwards compatibility + + +class Base(MappedAsDataclass, DeclarativeBase): + """Base class to subclass from to define the schema""" + + +class UserMap(Base): + """Table/dataclass schema for pool config.""" + + __tablename__ = "usermap" + + address: Mapped[str] = mapped_column(String, primary_key=True) + username: Mapped[str] = mapped_column(String, index=True) diff --git a/lib/chainsync/chainsync/db/base/schema_test.py b/lib/chainsync/chainsync/db/base/schema_test.py new file mode 100644 index 0000000000..dfab77d262 --- /dev/null +++ b/lib/chainsync/chainsync/db/base/schema_test.py @@ -0,0 +1,64 @@ +"""CRUD tests for UserMap""" +import pytest +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker + +from .schema import Base, UserMap + +engine = create_engine("sqlite:///:memory:") # in-memory SQLite database for testing +Session = sessionmaker(bind=engine) + +# fixture arguments in test function have to be the same as the fixture name +# pylint: disable=redefined-outer-name + + +@pytest.fixture(scope="function") +def session(): + """Session fixture for tests""" + Base.metadata.create_all(engine) # create tables + session_ = Session() + yield session_ + session_.close() + Base.metadata.drop_all(engine) # drop tables + + +class TestUserMapTable: + """CRUD tests for UserMap table""" + + def test_create_user_map(self, session): + """Create and entry""" + # Note: this test is using inmemory sqlite, which doesn't seem to support + # autoincrementing ids without init, whereas postgres does this with no issues + # Hence, we explicitly add id here + user_map = UserMap(address="1", username="a") + session.add(user_map) + session.commit() + + retrieved_user_map = session.query(UserMap).filter_by(address="1").first() + assert retrieved_user_map is not None + assert retrieved_user_map.username == "a" + + def test_update_user_map(self, session): + """Update an entry""" + user_map = UserMap(address="1", username="a") + session.add(user_map) + session.commit() + + user_map.username = "b" + session.commit() + + updated_user_map = session.query(UserMap).filter_by(address="1").first() + # tokenValue retreieved from postgres is in Decimal, cast to float + assert updated_user_map.username == "b" + + def test_delete_user_map(self, session): + """Delete an entry""" + user_map = UserMap(address="1", username="a") + session.add(user_map) + session.commit() + + session.delete(user_map) + session.commit() + + deleted_user_map = session.query(UserMap).filter_by(address="1").first() + assert deleted_user_map is None diff --git a/lib/chainsync/chainsync/hyperdrive/__init__.py b/lib/chainsync/chainsync/db/hyperdrive/__init__.py similarity index 68% rename from lib/chainsync/chainsync/hyperdrive/__init__.py rename to lib/chainsync/chainsync/db/hyperdrive/__init__.py index 857aa6dae4..f5b4fa29a0 100644 --- a/lib/chainsync/chainsync/hyperdrive/__init__.py +++ b/lib/chainsync/chainsync/db/hyperdrive/__init__.py @@ -7,23 +7,23 @@ convert_pool_info, get_wallet_info, ) -from .crash_report import log_hyperdrive_crash_report, setup_hyperdrive_crash_report_logging -from .db_schema import CheckpointInfo, PoolConfig, PoolInfo, WalletDelta, WalletInfo -from .get_hyperdrive_contract import get_hyperdrive_contract -from .postgres import ( +from .interface import ( add_checkpoint_infos, add_pool_config, add_pool_infos, + add_transactions, add_wallet_deltas, add_wallet_infos, get_agent_positions, - get_agents, + get_all_traders, get_all_wallet_info, get_checkpoint_info, get_current_wallet_info, get_latest_block_number_from_pool_info_table, get_pool_config, get_pool_info, + get_transactions, get_wallet_deltas, get_wallet_info_history, ) +from .schema import CheckpointInfo, HyperdriveTransaction, PoolConfig, PoolInfo, WalletDelta, WalletInfo diff --git a/lib/chainsync/chainsync/hyperdrive/agent_position.py b/lib/chainsync/chainsync/db/hyperdrive/agent_position.py similarity index 100% rename from lib/chainsync/chainsync/hyperdrive/agent_position.py rename to lib/chainsync/chainsync/db/hyperdrive/agent_position.py diff --git a/lib/chainsync/chainsync/hyperdrive/convert_data.py b/lib/chainsync/chainsync/db/hyperdrive/convert_data.py similarity index 84% rename from lib/chainsync/chainsync/hyperdrive/convert_data.py rename to lib/chainsync/chainsync/db/hyperdrive/convert_data.py index 454d641883..c0f85969fe 100644 --- a/lib/chainsync/chainsync/hyperdrive/convert_data.py +++ b/lib/chainsync/chainsync/db/hyperdrive/convert_data.py @@ -1,10 +1,11 @@ """Utilities to convert hyperdrive related things to database schema objects.""" +from __future__ import annotations + import logging from decimal import Decimal from typing import Any -from chainsync.base import Transaction, convert_scaled_value_to_decimal from eth_typing import BlockNumber from ethpy.base import get_token_balance, get_transaction_logs from ethpy.hyperdrive import AssetIdPrefix, decode_asset_id, encode_asset_id @@ -14,12 +15,12 @@ from web3.contract.contract import Contract from web3.types import TxData -from .db_schema import CheckpointInfo, PoolConfig, PoolInfo, WalletDelta, WalletInfo +from .schema import CheckpointInfo, HyperdriveTransaction, PoolConfig, PoolInfo, WalletDelta, WalletInfo def convert_hyperdrive_transactions_for_block( web3: Web3, hyperdrive_contract: Contract, transactions: list[TxData] -) -> tuple[list[Transaction], list[WalletDelta]]: +) -> tuple[list[HyperdriveTransaction], list[WalletDelta]]: """Fetch transactions related to the contract. Arguments @@ -33,12 +34,12 @@ def convert_hyperdrive_transactions_for_block( Returns ------- - tuple[list[Transaction], list[WalletDelta]] - A list of Transaction objects ready to be inserted into Postgres, and + tuple[list[HyperdriveTransaction], list[WalletDelta]] + A list of HyperdriveTransaction objects ready to be inserted into Postgres, and a list of wallet delta objects ready to be inserted into Postgres """ - out_transactions: list[Transaction] = [] + out_transactions: list[HyperdriveTransaction] = [] out_wallet_deltas: list[WalletDelta] = [] for transaction in transactions: transaction_dict = dict(transaction) @@ -86,17 +87,39 @@ def _convert_object_hexbytes_to_strings(obj: Any) -> Any: return obj +def _convert_scaled_value_to_decimal(input_val: int | None) -> Decimal | None: + """ + Given a scaled value int, converts it to a Decimal, while supporting Nones + + Arguments + ---------- + input_val: int | None + The scaled integer value to unscale and convert to Decimal + + Returns + ------- + Decimal | None + The unscaled Decimal value + """ + if input_val is not None: + # TODO add this cast within fixedpoint + fp_val = FixedPoint(scaled_value=input_val) + str_val = str(fp_val) + return Decimal(str_val) + return None + + # TODO move this function to hyperdrive_interface and return a list of dictionaries def get_wallet_info( hyperdrive_contract: Contract, base_contract: Contract, block_number: BlockNumber, - transactions: list[Transaction], + transactions: list[HyperdriveTransaction], pool_info: PoolInfo, ) -> list[WalletInfo]: """Retrieve wallet information at a given block given a transaction. - Transactions are needed here to get + HyperdriveTransactions are needed here to get (1) the wallet address of a transaction, and (2) the token id of the transaction @@ -108,7 +131,7 @@ def get_wallet_info( The deployed base contract instance block_number : BlockNumber The block number to query - transactions : list[Transaction] + transactions : list[HyperdriveTransaction] The list of transactions to get events from pool_info : PoolInfo The associated pool info, used to extract share price @@ -134,7 +157,7 @@ def get_wallet_info( walletAddress=wallet_addr, baseTokenType="BASE", tokenType="BASE", - tokenValue=convert_scaled_value_to_decimal(num_base_token), + tokenValue=_convert_scaled_value_to_decimal(num_base_token), ) ) @@ -150,7 +173,7 @@ def get_wallet_info( walletAddress=wallet_addr, baseTokenType="LP", tokenType="LP", - tokenValue=convert_scaled_value_to_decimal(num_lp_token), + tokenValue=_convert_scaled_value_to_decimal(num_lp_token), maturityTime=None, sharePrice=None, ) @@ -168,7 +191,7 @@ def get_wallet_info( walletAddress=wallet_addr, baseTokenType="WITHDRAWAL_SHARE", tokenType="WITHDRAWAL_SHARE", - tokenValue=convert_scaled_value_to_decimal(num_withdrawal_token), + tokenValue=_convert_scaled_value_to_decimal(num_withdrawal_token), maturityTime=None, sharePrice=None, ) @@ -196,7 +219,7 @@ def get_wallet_info( walletAddress=wallet_addr, baseTokenType=base_token_type, tokenType=token_type, - tokenValue=convert_scaled_value_to_decimal(num_custom_token), + tokenValue=_convert_scaled_value_to_decimal(num_custom_token), maturityTime=token_maturity_time, sharePrice=share_price, ) @@ -303,8 +326,8 @@ def _build_wallet_deltas(logs: list[dict], tx_hash: str, block_number) -> list[W Returns ------- - list[Transaction] - A list of Transaction objects ready to be inserted into Postgres + list[HyperdriveTransaction] + A list of HyperdriveTransaction objects ready to be inserted into Postgres """ wallet_deltas = [] # We iterate through the logs looking for specific events that describe the transaction @@ -313,8 +336,8 @@ def _build_wallet_deltas(logs: list[dict], tx_hash: str, block_number) -> list[W for log in logs: if log["event"] == "AddLiquidity": wallet_addr = log["args"]["provider"] - token_delta = convert_scaled_value_to_decimal(log["args"]["lpAmount"]) - base_delta = convert_scaled_value_to_decimal(-log["args"]["baseAmount"]) + token_delta = _convert_scaled_value_to_decimal(log["args"]["lpAmount"]) + base_delta = _convert_scaled_value_to_decimal(-log["args"]["baseAmount"]) wallet_deltas.extend( [ WalletDelta( @@ -338,8 +361,8 @@ def _build_wallet_deltas(logs: list[dict], tx_hash: str, block_number) -> list[W elif log["event"] == "OpenLong": wallet_addr = log["args"]["trader"] - token_delta = convert_scaled_value_to_decimal(log["args"]["bondAmount"]) - base_delta = convert_scaled_value_to_decimal(-log["args"]["baseAmount"]) + token_delta = _convert_scaled_value_to_decimal(log["args"]["bondAmount"]) + base_delta = _convert_scaled_value_to_decimal(-log["args"]["baseAmount"]) maturity_time = log["args"]["maturityTime"] wallet_deltas.extend( [ @@ -365,8 +388,8 @@ def _build_wallet_deltas(logs: list[dict], tx_hash: str, block_number) -> list[W elif log["event"] == "OpenShort": wallet_addr = log["args"]["trader"] - token_delta = convert_scaled_value_to_decimal(log["args"]["bondAmount"]) - base_delta = convert_scaled_value_to_decimal(-log["args"]["baseAmount"]) + token_delta = _convert_scaled_value_to_decimal(log["args"]["bondAmount"]) + base_delta = _convert_scaled_value_to_decimal(-log["args"]["baseAmount"]) maturity_time = log["args"]["maturityTime"] wallet_deltas.extend( [ @@ -393,9 +416,9 @@ def _build_wallet_deltas(logs: list[dict], tx_hash: str, block_number) -> list[W elif log["event"] == "RemoveLiquidity": wallet_addr = log["args"]["provider"] # Two deltas, one for withdrawal shares, one for lp tokens - lp_delta = convert_scaled_value_to_decimal(-log["args"]["lpAmount"]) - withdrawal_delta = convert_scaled_value_to_decimal(log["args"]["withdrawalShareAmount"]) - base_delta = convert_scaled_value_to_decimal(log["args"]["baseAmount"]) + lp_delta = _convert_scaled_value_to_decimal(-log["args"]["lpAmount"]) + withdrawal_delta = _convert_scaled_value_to_decimal(log["args"]["withdrawalShareAmount"]) + base_delta = _convert_scaled_value_to_decimal(log["args"]["baseAmount"]) wallet_deltas.extend( [ WalletDelta( @@ -427,8 +450,8 @@ def _build_wallet_deltas(logs: list[dict], tx_hash: str, block_number) -> list[W elif log["event"] == "CloseLong": wallet_addr = log["args"]["trader"] - token_delta = convert_scaled_value_to_decimal(-log["args"]["bondAmount"]) - base_delta = convert_scaled_value_to_decimal(log["args"]["baseAmount"]) + token_delta = _convert_scaled_value_to_decimal(-log["args"]["bondAmount"]) + base_delta = _convert_scaled_value_to_decimal(log["args"]["baseAmount"]) maturity_time = log["args"]["maturityTime"] wallet_deltas.extend( [ @@ -454,8 +477,8 @@ def _build_wallet_deltas(logs: list[dict], tx_hash: str, block_number) -> list[W elif log["event"] == "CloseShort": wallet_addr = log["args"]["trader"] - token_delta = convert_scaled_value_to_decimal(-log["args"]["bondAmount"]) - base_delta = convert_scaled_value_to_decimal(log["args"]["baseAmount"]) + token_delta = _convert_scaled_value_to_decimal(-log["args"]["bondAmount"]) + base_delta = _convert_scaled_value_to_decimal(log["args"]["baseAmount"]) maturity_time = log["args"]["maturityTime"] wallet_deltas.extend( [ @@ -482,8 +505,8 @@ def _build_wallet_deltas(logs: list[dict], tx_hash: str, block_number) -> list[W elif log["event"] == "RedeemWithdrawalShares": wallet_addr = log["args"]["provider"] maturity_time = None - token_delta = convert_scaled_value_to_decimal(-log["args"]["withdrawalShareAmount"]) - base_delta = convert_scaled_value_to_decimal(log["args"]["baseAmount"]) + token_delta = _convert_scaled_value_to_decimal(-log["args"]["withdrawalShareAmount"]) + base_delta = _convert_scaled_value_to_decimal(log["args"]["baseAmount"]) wallet_deltas.extend( [ WalletDelta( @@ -515,8 +538,8 @@ def _build_hyperdrive_transaction_object( transaction_dict: dict[str, Any], logs: list[dict[str, Any]], receipt: dict[str, Any], -) -> Transaction: - """Conversion function to translate output of chain queries to the Transaction object. +) -> HyperdriveTransaction: + """Conversion function to translate output of chain queries to the HyperdriveTransaction object. Arguments ---------- @@ -529,11 +552,11 @@ def _build_hyperdrive_transaction_object( Returns ------- - Transaction + HyperdriveTransaction A transaction object to be inserted into postgres """ - # Build output obj dict incrementally to be passed into Transaction - # i.e., Transaction(**out_dict) + # Build output obj dict incrementally to be passed into HyperdriveTransaction + # i.e., HyperdriveTransaction(**out_dict) # Base transaction fields out_dict: dict[str, Any] = { "blockNumber": transaction_dict["blockNumber"], @@ -548,18 +571,18 @@ def _build_hyperdrive_transaction_object( # TODO can the input field ever be empty or not exist? out_dict["input_method"] = transaction_dict["input"]["method"] input_params = transaction_dict["input"]["params"] - out_dict["input_params_contribution"] = convert_scaled_value_to_decimal(input_params.get("_contribution", None)) - out_dict["input_params_apr"] = convert_scaled_value_to_decimal(input_params.get("_apr", None)) + out_dict["input_params_contribution"] = _convert_scaled_value_to_decimal(input_params.get("_contribution", None)) + out_dict["input_params_apr"] = _convert_scaled_value_to_decimal(input_params.get("_apr", None)) out_dict["input_params_destination"] = input_params.get("_destination", None) out_dict["input_params_asUnderlying"] = input_params.get("_asUnderlying", None) - out_dict["input_params_baseAmount"] = convert_scaled_value_to_decimal(input_params.get("_baseAmount", None)) - out_dict["input_params_minOutput"] = convert_scaled_value_to_decimal(input_params.get("_minOutput", None)) - out_dict["input_params_bondAmount"] = convert_scaled_value_to_decimal(input_params.get("_bondAmount", None)) - out_dict["input_params_maxDeposit"] = convert_scaled_value_to_decimal(input_params.get("_maxDeposit", None)) + out_dict["input_params_baseAmount"] = _convert_scaled_value_to_decimal(input_params.get("_baseAmount", None)) + out_dict["input_params_minOutput"] = _convert_scaled_value_to_decimal(input_params.get("_minOutput", None)) + out_dict["input_params_bondAmount"] = _convert_scaled_value_to_decimal(input_params.get("_bondAmount", None)) + out_dict["input_params_maxDeposit"] = _convert_scaled_value_to_decimal(input_params.get("_maxDeposit", None)) out_dict["input_params_maturityTime"] = input_params.get("_maturityTime", None) - out_dict["input_params_minApr"] = convert_scaled_value_to_decimal(input_params.get("_minApr", None)) - out_dict["input_params_maxApr"] = convert_scaled_value_to_decimal(input_params.get("_maxApr", None)) - out_dict["input_params_shares"] = convert_scaled_value_to_decimal(input_params.get("_shares", None)) + out_dict["input_params_minApr"] = _convert_scaled_value_to_decimal(input_params.get("_minApr", None)) + out_dict["input_params_maxApr"] = _convert_scaled_value_to_decimal(input_params.get("_maxApr", None)) + out_dict["input_params_shares"] = _convert_scaled_value_to_decimal(input_params.get("_shares", None)) # Assuming one TransferSingle per transfer # TODO Fix this below eventually # There can be two transfer singles @@ -584,5 +607,5 @@ def _build_hyperdrive_transaction_object( event_prefix, event_maturity_time = decode_asset_id(out_dict["event_id"]) out_dict["event_prefix"] = event_prefix out_dict["event_maturity_time"] = event_maturity_time - transaction = Transaction(**out_dict) + transaction = HyperdriveTransaction(**out_dict) return transaction diff --git a/lib/chainsync/chainsync/hyperdrive/postgres.py b/lib/chainsync/chainsync/db/hyperdrive/interface.py similarity index 86% rename from lib/chainsync/chainsync/hyperdrive/postgres.py rename to lib/chainsync/chainsync/db/hyperdrive/interface.py index d57d9772d9..30b3b65cee 100644 --- a/lib/chainsync/chainsync/hyperdrive/postgres.py +++ b/lib/chainsync/chainsync/db/hyperdrive/interface.py @@ -1,13 +1,35 @@ """Utilities for hyperdrive related postgres interactions.""" from __future__ import annotations +import logging + import pandas as pd -from chainsync.base import get_latest_block_number_from_table from sqlalchemy import exc from sqlalchemy.orm import Session +from ..base import get_latest_block_number_from_table from .agent_position import AgentPosition -from .db_schema import CheckpointInfo, PoolConfig, PoolInfo, WalletDelta, WalletInfo +from .schema import CheckpointInfo, HyperdriveTransaction, PoolConfig, PoolInfo, WalletDelta, WalletInfo + + +def add_transactions(transactions: list[HyperdriveTransaction], session: Session) -> None: + """Add transactions to the poolinfo table. + + Arguments + --------- + transactions : list[HyperdriveTransaction] + A list of HyperdriveTransaction objects to insert into postgres + session : Session + The initialized session object + """ + for transaction in transactions: + session.add(transaction) + try: + session.commit() + except exc.DataError as err: + session.rollback() + logging.error("Error adding transaction: %s", err) + raise err def add_wallet_infos(wallet_infos: list[WalletInfo], session: Session) -> None: @@ -26,7 +48,7 @@ def add_wallet_infos(wallet_infos: list[WalletInfo], session: Session) -> None: session.commit() except exc.DataError as err: session.rollback() - print(f"{wallet_infos=}") + logging.error("Error on adding wallet_infos: %s", err) raise err @@ -77,7 +99,7 @@ def add_pool_config(pool_config: PoolConfig, session: Session) -> None: session.commit() except exc.DataError as err: session.rollback() - print(f"{pool_config=}") + logging.error("Error adding pool_config: %s", err) raise err elif len(existing_pool_config) == 1: # Verify pool config @@ -109,7 +131,7 @@ def add_pool_infos(pool_infos: list[PoolInfo], session: Session) -> None: session.commit() except exc.DataError as err: session.rollback() - print(f"{pool_infos=}") + logging.error("Error adding pool_infos: %s", err) raise err @@ -148,7 +170,7 @@ def add_wallet_deltas(wallet_deltas: list[WalletDelta], session: Session) -> Non session.commit() except exc.DataError as err: session.rollback() - print(f"{wallet_deltas=}") + logging.error("Error in adding wallet_deltas: %s", err) raise err @@ -208,6 +230,41 @@ def get_pool_info( return pd.read_sql(query.statement, con=session.connection(), coerce_float=coerce_float).set_index("blockNumber") +def get_transactions(session: Session, start_block: int | None = None, end_block: int | None = None) -> pd.DataFrame: + """Get all transactions and returns as a pandas dataframe. + + Arguments + --------- + session : Session + The initialized session object + start_block : int | None + The starting block to filter the query on. start_block integers + matches python slicing notation, e.g., list[:3], list[:-3] + end_block : int | None + The ending block to filter the query on. end_block integers + matches python slicing notation, e.g., list[:3], list[:-3] + + Returns + ------- + DataFrame + A DataFrame that consists of the queried transactions data + """ + query = session.query(HyperdriveTransaction) + + # Support for negative indices + if (start_block is not None) and (start_block < 0): + start_block = get_latest_block_number_from_table(HyperdriveTransaction, session) + start_block + 1 + if (end_block is not None) and (end_block < 0): + end_block = get_latest_block_number_from_table(HyperdriveTransaction, session) + end_block + 1 + + if start_block is not None: + query = query.filter(HyperdriveTransaction.blockNumber >= start_block) + if end_block is not None: + query = query.filter(HyperdriveTransaction.blockNumber < end_block) + + return pd.read_sql(query.statement, con=session.connection()).set_index("blockNumber") + + def get_checkpoint_info(session: Session, start_block: int | None = None, end_block: int | None = None) -> pd.DataFrame: """Get all info associated with a given checkpoint. @@ -423,8 +480,8 @@ def get_wallet_deltas( return pd.read_sql(query.statement, con=session.connection(), coerce_float=coerce_float) -def get_agents(session: Session, start_block: int | None = None, end_block: int | None = None) -> list[str]: - """Get the list of all agents from the WalletInfo table. +def get_all_traders(session: Session, start_block: int | None = None, end_block: int | None = None) -> list[str]: + """Get the list of all traders from the WalletInfo table. Arguments --------- @@ -440,7 +497,7 @@ def get_agents(session: Session, start_block: int | None = None, end_block: int Returns ------- list[str] - A list of agent addresses + A list of addresses that have made a trade """ query = session.query(WalletInfo.walletAddress) # Support for negative indices diff --git a/lib/chainsync/chainsync/db/hyperdrive/interface_test.py b/lib/chainsync/chainsync/db/hyperdrive/interface_test.py new file mode 100644 index 0000000000..e7be8b9a2c --- /dev/null +++ b/lib/chainsync/chainsync/db/hyperdrive/interface_test.py @@ -0,0 +1,377 @@ +"""CRUD tests for Transaction""" +from datetime import datetime +from decimal import Decimal + +import numpy as np +import pytest +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker + +from ..base.interface import get_latest_block_number_from_table +from ..base.schema import Base +from .interface import ( + add_checkpoint_infos, + add_pool_config, + add_pool_infos, + add_transactions, + add_wallet_deltas, + add_wallet_infos, + get_all_traders, + get_all_wallet_info, + get_checkpoint_info, + get_current_wallet_info, + get_latest_block_number_from_pool_info_table, + get_latest_block_number_from_table, + get_pool_config, + get_pool_info, + get_transactions, + get_wallet_deltas, +) +from .schema import CheckpointInfo, HyperdriveTransaction, PoolConfig, PoolInfo, WalletDelta, WalletInfo + +engine = create_engine("sqlite:///:memory:") # in-memory SQLite database for testing +Session = sessionmaker(bind=engine) + +# fixture arguments in test function have to be the same as the fixture name +# pylint: disable=redefined-outer-name + + +@pytest.fixture(scope="function") +def session(): + """Session fixture for tests""" + Base.metadata.create_all(engine) # create tables + session_ = Session() + yield session_ + session_.close() + Base.metadata.drop_all(engine) # drop tables + + +class TestTransactionInterface: + """Testing postgres interface for transaction table""" + + def test_latest_block_number(self, session): + """Testing retrevial of transaction via interface""" + transaction_1 = HyperdriveTransaction(blockNumber=1, transactionHash="a", event_value=Decimal("3.0")) + add_transactions([transaction_1], session) + + latest_block_number = get_latest_block_number_from_table(HyperdriveTransaction, session) + assert latest_block_number == 1 + + transaction_2 = HyperdriveTransaction(blockNumber=2, transactionHash="b", event_value=Decimal("3.2")) + transaction_3 = HyperdriveTransaction(blockNumber=3, transactionHash="c", event_value=Decimal("3.4")) + add_transactions([transaction_2, transaction_3], session) + + latest_block_number = get_latest_block_number_from_table(HyperdriveTransaction, session) + assert latest_block_number == 3 + + def test_get_transactions(self, session): + """Testing retrevial of transactions via interface""" + transaction_1 = HyperdriveTransaction(blockNumber=0, transactionHash="a", event_value=Decimal("3.1")) + transaction_2 = HyperdriveTransaction(blockNumber=1, transactionHash="b", event_value=Decimal("3.2")) + transaction_3 = HyperdriveTransaction(blockNumber=2, transactionHash="c", event_value=Decimal("3.3")) + add_transactions([transaction_1, transaction_2, transaction_3], session) + + transactions_df = get_transactions(session) + np.testing.assert_array_equal(transactions_df["event_value"], [3.1, 3.2, 3.3]) + + def test_block_query_transactions(self, session): + """Testing querying by block number of transactions via interface""" + transaction_1 = HyperdriveTransaction(blockNumber=0, transactionHash="a", event_value=Decimal("3.1")) + transaction_2 = HyperdriveTransaction(blockNumber=1, transactionHash="b", event_value=Decimal("3.2")) + transaction_3 = HyperdriveTransaction(blockNumber=2, transactionHash="c", event_value=Decimal("3.3")) + add_transactions([transaction_1, transaction_2, transaction_3], session) + + transactions_df = get_transactions(session, start_block=1) + np.testing.assert_array_equal(transactions_df["event_value"], [3.2, 3.3]) + + transactions_df = get_transactions(session, start_block=-1) + np.testing.assert_array_equal(transactions_df["event_value"], [3.3]) + + transactions_df = get_transactions(session, end_block=1) + np.testing.assert_array_equal(transactions_df["event_value"], [3.1]) + + transactions_df = get_transactions(session, end_block=-1) + np.testing.assert_array_equal(transactions_df["event_value"], [3.1, 3.2]) + + transactions_df = get_transactions(session, start_block=1, end_block=-1) + np.testing.assert_array_equal(transactions_df["event_value"], [3.2]) + + +class TestCheckpointInterface: + """Testing postgres interface for checkpoint table""" + + def test_latest_block_number(self, session): + """Testing retrevial of checkpoint via interface""" + checkpoint_1 = CheckpointInfo(blockNumber=1, timestamp=datetime.now()) + add_checkpoint_infos([checkpoint_1], session) + session.commit() + + latest_block_number = get_latest_block_number_from_table(CheckpointInfo, session) + assert latest_block_number == 1 + + checkpoint_2 = CheckpointInfo(blockNumber=2, timestamp=datetime.now()) + checkpoint_3 = CheckpointInfo(blockNumber=3, timestamp=datetime.now()) + add_checkpoint_infos([checkpoint_2, checkpoint_3], session) + + latest_block_number = get_latest_block_number_from_table(CheckpointInfo, session) + assert latest_block_number == 3 + + def test_get_checkpoints(self, session): + """Testing retrevial of checkpoints via interface""" + date_1 = datetime(1945, 8, 6) + date_2 = datetime(1984, 8, 9) + date_3 = datetime(2001, 9, 11) + checkpoint_1 = CheckpointInfo(blockNumber=0, timestamp=date_1) + checkpoint_2 = CheckpointInfo(blockNumber=1, timestamp=date_2) + checkpoint_3 = CheckpointInfo(blockNumber=2, timestamp=date_3) + add_checkpoint_infos([checkpoint_1, checkpoint_2, checkpoint_3], session) + + checkpoints_df = get_checkpoint_info(session) + np.testing.assert_array_equal( + checkpoints_df["timestamp"].dt.to_pydatetime(), np.array([date_1, date_2, date_3]) + ) + + def test_block_query_checkpoints(self, session): + """Testing querying by block number of checkpoints via interface""" + checkpoint_1 = CheckpointInfo(blockNumber=0, timestamp=datetime.now(), sharePrice=Decimal("3.1")) + checkpoint_2 = CheckpointInfo(blockNumber=1, timestamp=datetime.now(), sharePrice=Decimal("3.2")) + checkpoint_3 = CheckpointInfo(blockNumber=2, timestamp=datetime.now(), sharePrice=Decimal("3.3")) + add_checkpoint_infos([checkpoint_1, checkpoint_2, checkpoint_3], session) + + checkpoints_df = get_checkpoint_info(session, start_block=1) + np.testing.assert_array_equal(checkpoints_df["sharePrice"], [3.2, 3.3]) + + checkpoints_df = get_checkpoint_info(session, start_block=-1) + np.testing.assert_array_equal(checkpoints_df["sharePrice"], [3.3]) + + checkpoints_df = get_checkpoint_info(session, end_block=1) + np.testing.assert_array_equal(checkpoints_df["sharePrice"], [3.1]) + + checkpoints_df = get_checkpoint_info(session, end_block=-1) + np.testing.assert_array_equal(checkpoints_df["sharePrice"], [3.1, 3.2]) + + checkpoints_df = get_checkpoint_info(session, start_block=1, end_block=-1) + np.testing.assert_array_equal(checkpoints_df["sharePrice"], [3.2]) + + +class TestPoolConfigInterface: + """Testing postgres interface for poolconfig table""" + + def test_get_pool_config(self, session): + """Testing retrevial of pool config via interface""" + pool_config_1 = PoolConfig(contractAddress="0", initialSharePrice=Decimal("3.2")) + add_pool_config(pool_config_1, session) + + pool_config_df_1 = get_pool_config(session, coerce_float=False) + assert len(pool_config_df_1) == 1 + assert pool_config_df_1.loc[0, "initialSharePrice"] == Decimal("3.2") + + pool_config_2 = PoolConfig(contractAddress="1", initialSharePrice=Decimal("3.4")) + add_pool_config(pool_config_2, session) + + pool_config_df_2 = get_pool_config(session, coerce_float=False) + assert len(pool_config_df_2) == 2 + np.testing.assert_array_equal(pool_config_df_2["initialSharePrice"], np.array([Decimal("3.2"), Decimal("3.4")])) + + def test_primary_id_query_pool_config(self, session): + """Testing retrevial of pool config via interface""" + pool_config = PoolConfig(contractAddress="0", initialSharePrice=Decimal("3.2")) + add_pool_config(pool_config, session) + + pool_config_df_1 = get_pool_config(session, contract_address="0", coerce_float=False) + assert len(pool_config_df_1) == 1 + assert pool_config_df_1.loc[0, "initialSharePrice"] == Decimal("3.2") + + pool_config_df_2 = get_pool_config(session, contract_address="1", coerce_float=False) + assert len(pool_config_df_2) == 0 + + def test_pool_config_verify(self, session): + """Testing retrevial of pool config via interface""" + pool_config_1 = PoolConfig(contractAddress="0", initialSharePrice=Decimal("3.2")) + add_pool_config(pool_config_1, session) + pool_config_df_1 = get_pool_config(session, coerce_float=False) + assert len(pool_config_df_1) == 1 + assert pool_config_df_1.loc[0, "initialSharePrice"] == Decimal("3.2") + + # Nothing should happen if we give the same pool_config + pool_config_2 = PoolConfig(contractAddress="0", initialSharePrice=Decimal("3.2")) + add_pool_config(pool_config_2, session) + pool_config_df_2 = get_pool_config(session, coerce_float=False) + assert len(pool_config_df_2) == 1 + assert pool_config_df_2.loc[0, "initialSharePrice"] == Decimal("3.2") + + # If we try to add another pool config with a different value, should throw a ValueError + pool_config_3 = PoolConfig(contractAddress="0", initialSharePrice=Decimal("3.4")) + with pytest.raises(ValueError): + add_pool_config(pool_config_3, session) + + +class TestPoolInfoInterface: + """Testing postgres interface for poolinfo table""" + + def test_latest_block_number(self, session): + """Testing latest block number call""" + timestamp_1 = datetime.fromtimestamp(1628472000) + pool_info_1 = PoolInfo(blockNumber=1, timestamp=timestamp_1) + add_pool_infos([pool_info_1], session) + + latest_block_number = get_latest_block_number_from_pool_info_table(session) + assert latest_block_number == 1 + + timestamp_1 = datetime.fromtimestamp(1628472002) + pool_info_1 = PoolInfo(blockNumber=2, timestamp=timestamp_1) + timestamp_2 = datetime.fromtimestamp(1628472004) + pool_info_2 = PoolInfo(blockNumber=3, timestamp=timestamp_2) + add_pool_infos([pool_info_1, pool_info_2], session) + + latest_block_number = get_latest_block_number_from_pool_info_table(session) + assert latest_block_number == 3 + + def test_get_pool_info(self, session): + """Testing retrevial of pool info via interface""" + timestamp_1 = datetime.fromtimestamp(1628472000) + pool_info_1 = PoolInfo(blockNumber=0, timestamp=timestamp_1) + timestamp_2 = datetime.fromtimestamp(1628472002) + pool_info_2 = PoolInfo(blockNumber=1, timestamp=timestamp_2) + timestamp_3 = datetime.fromtimestamp(1628472004) + pool_info_3 = PoolInfo(blockNumber=2, timestamp=timestamp_3) + add_pool_infos([pool_info_1, pool_info_2, pool_info_3], session) + + pool_info_df = get_pool_info(session) + np.testing.assert_array_equal( + pool_info_df["timestamp"].dt.to_pydatetime(), np.array([timestamp_1, timestamp_2, timestamp_3]) + ) + + def test_block_query_pool_info(self, session): + """Testing retrevial of pool info via interface""" + timestamp_1 = datetime.fromtimestamp(1628472000) + pool_info_1 = PoolInfo(blockNumber=0, timestamp=timestamp_1) + timestamp_2 = datetime.fromtimestamp(1628472002) + pool_info_2 = PoolInfo(blockNumber=1, timestamp=timestamp_2) + timestamp_3 = datetime.fromtimestamp(1628472004) + pool_info_3 = PoolInfo(blockNumber=2, timestamp=timestamp_3) + add_pool_infos([pool_info_1, pool_info_2, pool_info_3], session) + pool_info_df = get_pool_info(session, start_block=1) + np.testing.assert_array_equal( + pool_info_df["timestamp"].dt.to_pydatetime(), np.array([timestamp_2, timestamp_3]) + ) + pool_info_df = get_pool_info(session, start_block=-1) + np.testing.assert_array_equal(pool_info_df["timestamp"].dt.to_pydatetime(), np.array([timestamp_3])) + pool_info_df = get_pool_info(session, end_block=1) + np.testing.assert_array_equal(pool_info_df["timestamp"].dt.to_pydatetime(), np.array([timestamp_1])) + pool_info_df = get_pool_info(session, end_block=-1) + np.testing.assert_array_equal( + pool_info_df["timestamp"].dt.to_pydatetime(), np.array([timestamp_1, timestamp_2]) + ) + pool_info_df = get_pool_info(session, start_block=1, end_block=-1) + np.testing.assert_array_equal(pool_info_df["timestamp"].dt.to_pydatetime(), np.array([timestamp_2])) + + +class TestWalletDeltaInterface: + """Testing postgres interface for walletinfo table""" + + def test_latest_block_number(self, session): + """Testing retrevial of wallet info via interface""" + wallet_delta_1 = WalletDelta(blockNumber=1, transactionHash="a", delta=Decimal("3.0")) + add_wallet_deltas([wallet_delta_1], session) + latest_block_number = get_latest_block_number_from_table(WalletDelta, session) + assert latest_block_number == 1 + wallet_delta_2 = WalletDelta(blockNumber=2, transactionHash="a", delta=Decimal("3.2")) + wallet_delta_3 = WalletDelta(blockNumber=3, transactionHash="a", delta=Decimal("3.4")) + add_wallet_deltas([wallet_delta_2, wallet_delta_3], session) + latest_block_number = get_latest_block_number_from_table(WalletDelta, session) + assert latest_block_number == 3 + + def test_get_wallet_delta(self, session): + """Testing retrevial of walletinfo via interface""" + wallet_delta_1 = WalletDelta(blockNumber=0, transactionHash="a", delta=Decimal("3.1")) + wallet_delta_2 = WalletDelta(blockNumber=1, transactionHash="a", delta=Decimal("3.2")) + wallet_delta_3 = WalletDelta(blockNumber=2, transactionHash="a", delta=Decimal("3.3")) + add_wallet_deltas([wallet_delta_1, wallet_delta_2, wallet_delta_3], session) + wallet_delta_df = get_wallet_deltas(session) + np.testing.assert_array_equal(wallet_delta_df["delta"], np.array([3.1, 3.2, 3.3])) + + def test_block_query_wallet_delta(self, session): + """Testing querying by block number of wallet info via interface""" + wallet_delta_1 = WalletDelta(blockNumber=0, transactionHash="a", delta=Decimal("3.1")) + wallet_delta_2 = WalletDelta(blockNumber=1, transactionHash="a", delta=Decimal("3.2")) + wallet_delta_3 = WalletDelta(blockNumber=2, transactionHash="a", delta=Decimal("3.3")) + add_wallet_deltas([wallet_delta_1, wallet_delta_2, wallet_delta_3], session) + wallet_delta_df = get_wallet_deltas(session, start_block=1) + np.testing.assert_array_equal(wallet_delta_df["delta"], np.array([3.2, 3.3])) + wallet_delta_df = get_wallet_deltas(session, start_block=-1) + np.testing.assert_array_equal(wallet_delta_df["delta"], np.array([3.3])) + wallet_delta_df = get_wallet_deltas(session, end_block=1) + np.testing.assert_array_equal(wallet_delta_df["delta"], np.array([3.1])) + wallet_delta_df = get_wallet_deltas(session, end_block=-1) + np.testing.assert_array_equal(wallet_delta_df["delta"], np.array([3.1, 3.2])) + wallet_delta_df = get_wallet_deltas(session, start_block=1, end_block=-1) + np.testing.assert_array_equal(wallet_delta_df["delta"], np.array([3.2])) + + +class TestWalletInfoInterface: + """Testing postgres interface for walletinfo table""" + + def test_latest_block_number(self, session): + """Testing retrevial of wallet info via interface""" + wallet_info_1 = WalletInfo(blockNumber=1, tokenValue=Decimal("3.0")) + add_wallet_infos([wallet_info_1], session) + latest_block_number = get_latest_block_number_from_table(WalletInfo, session) + assert latest_block_number == 1 + wallet_info_2 = WalletInfo(blockNumber=2, tokenValue=Decimal("3.2")) + wallet_info_3 = WalletInfo(blockNumber=3, tokenValue=Decimal("3.4")) + add_wallet_infos([wallet_info_2, wallet_info_3], session) + latest_block_number = get_latest_block_number_from_table(WalletInfo, session) + assert latest_block_number == 3 + + def test_get_wallet_info(self, session): + """Testing retrevial of walletinfo via interface""" + wallet_info_1 = WalletInfo(blockNumber=0, tokenValue=Decimal("3.1")) + wallet_info_2 = WalletInfo(blockNumber=1, tokenValue=Decimal("3.2")) + wallet_info_3 = WalletInfo(blockNumber=2, tokenValue=Decimal("3.3")) + add_wallet_infos([wallet_info_1, wallet_info_2, wallet_info_3], session) + wallet_info_df = get_all_wallet_info(session) + np.testing.assert_array_equal(wallet_info_df["tokenValue"], np.array([3.1, 3.2, 3.3])) + + def test_block_query_wallet_info(self, session): + """Testing querying by block number of wallet info via interface""" + wallet_info_1 = WalletInfo(blockNumber=0, tokenValue=Decimal("3.1")) + wallet_info_2 = WalletInfo(blockNumber=1, tokenValue=Decimal("3.2")) + wallet_info_3 = WalletInfo(blockNumber=2, tokenValue=Decimal("3.3")) + add_wallet_infos([wallet_info_1, wallet_info_2, wallet_info_3], session) + wallet_info_df = get_all_wallet_info(session, start_block=1) + np.testing.assert_array_equal(wallet_info_df["tokenValue"], np.array([3.2, 3.3])) + wallet_info_df = get_all_wallet_info(session, start_block=-1) + np.testing.assert_array_equal(wallet_info_df["tokenValue"], np.array([3.3])) + wallet_info_df = get_all_wallet_info(session, end_block=1) + np.testing.assert_array_equal(wallet_info_df["tokenValue"], np.array([3.1])) + wallet_info_df = get_all_wallet_info(session, end_block=-1) + np.testing.assert_array_equal(wallet_info_df["tokenValue"], np.array([3.1, 3.2])) + wallet_info_df = get_all_wallet_info(session, start_block=1, end_block=-1) + np.testing.assert_array_equal(wallet_info_df["tokenValue"], np.array([3.2])) + + def test_current_wallet_info(self, session): + """Testing helper function to get current wallet values""" + wallet_info_1 = WalletInfo(blockNumber=0, walletAddress="addr", tokenType="BASE", tokenValue=Decimal("3.1")) + wallet_info_2 = WalletInfo(blockNumber=1, walletAddress="addr", tokenType="LP", tokenValue=Decimal("5.1")) + add_wallet_infos([wallet_info_1, wallet_info_2], session) + wallet_info_df = get_current_wallet_info(session).reset_index() + np.testing.assert_array_equal(wallet_info_df["tokenType"], ["BASE", "LP"]) + np.testing.assert_array_equal(wallet_info_df["tokenValue"], [3.1, 5.1]) + # E.g., block 2, wallet base tokens gets updated to 6.1 + wallet_info_3 = WalletInfo(blockNumber=2, walletAddress="addr", tokenType="BASE", tokenValue=Decimal("6.1")) + add_wallet_infos([wallet_info_3], session) + wallet_info_df = get_current_wallet_info(session).reset_index() + np.testing.assert_array_equal(wallet_info_df["tokenType"], ["BASE", "LP"]) + np.testing.assert_array_equal(wallet_info_df["tokenValue"], [6.1, 5.1]) + + def test_get_agents(self, session): + """Testing helper function to get current wallet values""" + wallet_info_1 = WalletInfo(blockNumber=0, walletAddress="addr_1") + wallet_info_2 = WalletInfo(blockNumber=1, walletAddress="addr_1") + wallet_info_3 = WalletInfo(blockNumber=2, walletAddress="addr_2") + add_wallet_infos([wallet_info_1, wallet_info_2, wallet_info_3], session) + agents = get_all_traders(session) + assert len(agents) == 2 + assert "addr_1" in agents + assert "addr_2" in agents diff --git a/lib/chainsync/chainsync/hyperdrive/db_schema.py b/lib/chainsync/chainsync/db/hyperdrive/schema.py similarity index 58% rename from lib/chainsync/chainsync/hyperdrive/db_schema.py rename to lib/chainsync/chainsync/db/hyperdrive/schema.py index aa3f55f409..430423becf 100644 --- a/lib/chainsync/chainsync/hyperdrive/db_schema.py +++ b/lib/chainsync/chainsync/db/hyperdrive/schema.py @@ -4,10 +4,11 @@ from decimal import Decimal from typing import Union -from chainsync.base.db_schema import Base -from sqlalchemy import BigInteger, DateTime, ForeignKey, Integer, Numeric, String +from sqlalchemy import BigInteger, Boolean, DateTime, ForeignKey, Integer, Numeric, String from sqlalchemy.orm import Mapped, mapped_column +from ..base import Base + # pylint: disable=invalid-name @@ -116,3 +117,117 @@ class WalletDelta(Base): tokenType: Mapped[Union[str, None]] = mapped_column(String, default=None) delta: Mapped[Union[Decimal, None]] = mapped_column(Numeric, default=None) maturityTime: Mapped[Union[Decimal, None]] = mapped_column(Numeric, default=None) + + +class HyperdriveTransaction(Base): + """Table/dataclass schema for Transactions. + + Mapped class that is a data class on the python side, and an declarative base on the sql side. + """ + + __tablename__ = "transactions" + + # Default table primary key + # Note that we use postgres in production and sqlite in testing, but sqlite has issues with + # autoincrement with BigIntegers. Hence, we use the Integer variant when using sqlite in tests + id: Mapped[int] = mapped_column( + BigInteger().with_variant(Integer, "sqlite"), primary_key=True, init=False, autoincrement=True + ) + transactionHash: Mapped[str] = mapped_column(String, index=True, unique=True) + + #### Fields from base transactions #### + blockNumber: Mapped[int] = mapped_column(BigInteger, ForeignKey("poolinfo.blockNumber"), index=True) + transactionIndex: Mapped[Union[int, None]] = mapped_column(Integer, default=None) + nonce: Mapped[Union[int, None]] = mapped_column(Integer, default=None) + # Transaction receipt to/from + # Almost always from wallet address to smart contract address + txn_to: Mapped[Union[str, None]] = mapped_column(String, default=None) + txn_from: Mapped[Union[str, None]] = mapped_column(String, default=None) + gasUsed: Mapped[Union[int, None]] = mapped_column(Numeric, default=None) + + #### Fields from solidity function calls #### + # These fields map solidity function calls and their corresponding arguments + # The params list is exhaustive against all possible methods + input_method: Mapped[Union[str, None]] = mapped_column(String, default=None) + + # Method: initialize + input_params_contribution: Mapped[Union[Decimal, None]] = mapped_column(Numeric, default=None) + input_params_apr: Mapped[Union[Decimal, None]] = mapped_column(Numeric, default=None) + input_params_destination: Mapped[Union[str, None]] = mapped_column(String, default=None) + input_params_asUnderlying: Mapped[Union[bool, None]] = mapped_column(Boolean, default=None) + + # Method: openLong + input_params_baseAmount: Mapped[Union[Decimal, None]] = mapped_column(Numeric, default=None) + input_params_minOutput: Mapped[Union[Decimal, None]] = mapped_column(Numeric, default=None) + # input_params_destination + # input_params_asUnderlying + + # Method: openShort + input_params_bondAmount: Mapped[Union[Decimal, None]] = mapped_column(Numeric, default=None) + input_params_maxDeposit: Mapped[Union[Decimal, None]] = mapped_column(Numeric, default=None) + # input_params_destination + # input_params_asUnderlying + + # Method: closeLong + input_params_maturityTime: Mapped[Union[int, None]] = mapped_column(Numeric, default=None) + # input_params_bondAmount + # input_params_minOutput + # input_params_destination + # input_params_asUnderlying + + # Method: closeShort + # input_params_maturityTime + # input_params_bondAmount + # input_params_minOutput + # input_params_destination + # input_params_asUnderlying + + # Method: addLiquidity + # input_params_contribution + input_params_minApr: Mapped[Union[Decimal, None]] = mapped_column(Numeric, default=None) + input_params_maxApr: Mapped[Union[Decimal, None]] = mapped_column(Numeric, default=None) + # input_params_destination + # input_params_asUnderlying + + # Method: removeLiquidity + input_params_shares: Mapped[Union[Decimal, None]] = mapped_column(Numeric, default=None) + # input_params_minOutput + # input_params_destination + # input_params_asUnderlying + + #### Fields from event logs #### + # Addresses in event logs + event_from: Mapped[Union[str, None]] = mapped_column(String, default=None) + event_to: Mapped[Union[str, None]] = mapped_column(String, default=None) + # args_owner + # args_spender + # args_id + event_value: Mapped[Union[Decimal, None]] = mapped_column(Numeric, default=None) + event_operator: Mapped[Union[str, None]] = mapped_column(String, default=None) + event_id: Mapped[Union[int, None]] = mapped_column(Numeric, default=None) + # Fields calculated from base + event_prefix: Mapped[Union[int, None]] = mapped_column(Integer, default=None) + event_maturity_time: Mapped[Union[int, None]] = mapped_column(Numeric, default=None) + + # Fields not used by postprocessing + + # blockHash + # hash + # value + # gasPrice + # gas + # v + # r + # s + # type + # accessList + # maxPriorityFeePerGas + # maxFeePerGas + # chainId + # logIndex + # address + # cumulativeGasUsed + # contractAddress + # status + # logsBloom + # effectiveGasPrice diff --git a/lib/chainsync/chainsync/db/hyperdrive/schema_test.py b/lib/chainsync/chainsync/db/hyperdrive/schema_test.py new file mode 100644 index 0000000000..786b818021 --- /dev/null +++ b/lib/chainsync/chainsync/db/hyperdrive/schema_test.py @@ -0,0 +1,259 @@ +"""CRUD tests for Transaction""" +from datetime import datetime +from decimal import Decimal + +import pytest +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker + +from ..base.schema import Base +from .schema import CheckpointInfo, HyperdriveTransaction, PoolConfig, PoolInfo, WalletDelta, WalletInfo + +engine = create_engine("sqlite:///:memory:") # in-memory SQLite database for testing +Session = sessionmaker(bind=engine) + +# fixture arguments in test function have to be the same as the fixture name +# pylint: disable=redefined-outer-name + + +@pytest.fixture(scope="function") +def session(): + """Session fixture for tests""" + Base.metadata.create_all(engine) # create tables + session_ = Session() + yield session_ + session_.close() + Base.metadata.drop_all(engine) # drop tables + + +class TestTransactionTable: + """CRUD tests for transaction table""" + + def test_create_transaction(self, session): + """Create and entry""" + # Note: this test is using inmemory sqlite, which doesn't seem to support + # autoincrementing ids without init, whereas postgres does this with no issues + # Hence, we explicitly add id here + transaction = HyperdriveTransaction(blockNumber=1, transactionHash="a", event_value=Decimal("3.2")) + session.add(transaction) + session.commit() + + retrieved_transaction = session.query(HyperdriveTransaction).filter_by(blockNumber=1).first() + assert retrieved_transaction is not None + # event_value retreieved from postgres is in Decimal, cast to float + assert float(retrieved_transaction.event_value) == 3.2 + + def test_update_transaction(self, session): + """Update an entry""" + transaction = HyperdriveTransaction(blockNumber=1, transactionHash="a", event_value=Decimal("3.2")) + session.add(transaction) + session.commit() + + transaction.event_value = Decimal("5.0") + session.commit() + + updated_transaction = session.query(HyperdriveTransaction).filter_by(blockNumber=1).first() + # event_value retreieved from postgres is in Decimal, cast to float + assert float(updated_transaction.event_value) == 5.0 + + def test_delete_transaction(self, session): + """Delete an entry""" + transaction = HyperdriveTransaction(blockNumber=1, transactionHash="a", event_value=Decimal("3.2")) + session.add(transaction) + session.commit() + + session.delete(transaction) + session.commit() + + deleted_transaction = session.query(HyperdriveTransaction).filter_by(blockNumber=1).first() + assert deleted_transaction is None + + +class TestCheckpointTable: + """CRUD tests for checkpoint table""" + + def test_create_checkpoint(self, session): + """Create and entry""" + # Note: this test is using inmemory sqlite, which doesn't seem to support + # autoincrementing ids without init, whereas postgres does this with no issues + # Hence, we explicitly add id here + timestamp = datetime.now() + checkpoint = CheckpointInfo(blockNumber=1, timestamp=timestamp) + session.add(checkpoint) + session.commit() + + retrieved_checkpoint = session.query(CheckpointInfo).filter_by(blockNumber=1).first() + assert retrieved_checkpoint is not None + assert retrieved_checkpoint.timestamp == timestamp + + def test_update_checkpoint(self, session): + """Update an entry""" + timestamp = datetime.now() + checkpoint = CheckpointInfo(blockNumber=1, timestamp=timestamp) + session.add(checkpoint) + session.commit() + + checkpoint.sharePrice = Decimal("5.0") + session.commit() + + updated_checkpoint = session.query(CheckpointInfo).filter_by(blockNumber=1).first() + assert updated_checkpoint.sharePrice == 5.0 + + def test_delete_checkpoint(self, session): + """Delete an entry""" + timestamp = datetime.now() + checkpoint = CheckpointInfo(blockNumber=1, timestamp=timestamp) + session.add(checkpoint) + session.commit() + + session.delete(checkpoint) + session.commit() + + deleted_checkpoint = session.query(CheckpointInfo).filter_by(blockNumber=1).first() + assert deleted_checkpoint is None + + +class TestPoolConfigTable: + """CRUD tests for poolconfig table""" + + def test_create_pool_config(self, session): + """Create and entry""" + pool_config = PoolConfig(contractAddress="0", initialSharePrice=Decimal("3.2")) + session.add(pool_config) + session.commit() + + retrieved_pool_config = session.query(PoolConfig).filter_by(contractAddress="0").first() + assert retrieved_pool_config is not None + assert float(retrieved_pool_config.initialSharePrice) == 3.2 + + def test_delete_pool_config(self, session): + """Delete an entry""" + pool_config = PoolConfig(contractAddress="0", initialSharePrice=Decimal("3.2")) + session.add(pool_config) + session.commit() + + session.delete(pool_config) + session.commit() + + deleted_pool_config = session.query(PoolConfig).filter_by(contractAddress="0").first() + assert deleted_pool_config is None + + +class TestPoolInfoTable: + """CRUD tests for poolinfo table""" + + def test_create_pool_info(self, session): + """Create and entry""" + timestamp = datetime.fromtimestamp(1628472000) + pool_info = PoolInfo(blockNumber=1, timestamp=timestamp) + session.add(pool_info) + session.commit() + + retrieved_pool_info = session.query(PoolInfo).filter_by(blockNumber=1).first() + assert retrieved_pool_info is not None + assert retrieved_pool_info.timestamp == timestamp + + def test_update_pool_info(self, session): + """Update an entry""" + timestamp = datetime.fromtimestamp(1628472000) + pool_info = PoolInfo(blockNumber=1, timestamp=timestamp) + session.add(pool_info) + session.commit() + + # TODO: Solve this type issue. I read the sqlmypy can do this but I wasn't successful. + new_timestamp = datetime.fromtimestamp(1628472001) + pool_info.timestamp = new_timestamp # type: ignore + session.commit() + + updated_pool_info = session.query(PoolInfo).filter_by(blockNumber=1).first() + assert updated_pool_info.timestamp == new_timestamp + + def test_delete_pool_info(self, session): + """Delete an entry""" + timestamp = datetime.fromtimestamp(1628472000) + pool_info = PoolInfo(blockNumber=1, timestamp=timestamp) + session.add(pool_info) + session.commit() + + session.delete(pool_info) + session.commit() + + deleted_pool_info = session.query(PoolInfo).filter_by(blockNumber=1).first() + assert deleted_pool_info is None + + +class TestWalletDeltaTable: + """CRUD tests for WalletDelta table""" + + def test_create_wallet_delta(self, session): + """Create and entry""" + # Note: this test is using inmemory sqlite, which doesn't seem to support + # autoincrementing ids without init, whereas postgres does this with no issues + # Hence, we explicitly add id here + wallet_delta = WalletDelta(blockNumber=1, transactionHash="a", delta=Decimal("3.2")) + session.add(wallet_delta) + session.commit() + + retrieved_wallet_delta = session.query(WalletDelta).filter_by(blockNumber=1).first() + assert retrieved_wallet_delta is not None + # toekValue retreieved from postgres is in Decimal, cast to float + assert float(retrieved_wallet_delta.delta) == 3.2 + + def test_update_wallet_delta(self, session): + """Update an entry""" + wallet_delta = WalletDelta(blockNumber=1, transactionHash="a", delta=Decimal("3.2")) + session.add(wallet_delta) + session.commit() + wallet_delta.delta = Decimal("5.0") + session.commit() + updated_wallet_delta = session.query(WalletDelta).filter_by(blockNumber=1).first() + # delta retreieved from postgres is in Decimal, cast to float + assert float(updated_wallet_delta.delta) == 5.0 + + def test_delete_wallet_delta(self, session): + """Delete an entry""" + wallet_delta = WalletDelta(blockNumber=1, transactionHash="a", delta=Decimal("3.2")) + session.add(wallet_delta) + session.commit() + session.delete(wallet_delta) + session.commit() + deleted_wallet_delta = session.query(WalletDelta).filter_by(blockNumber=1).first() + assert deleted_wallet_delta is None + + +class TestWalletInfoTable: + """CRUD tests for WalletInfo table""" + + def test_create_wallet_info(self, session): + """Create and entry""" + # Note: this test is using inmemory sqlite, which doesn't seem to support + # autoincrementing ids without init, whereas postgres does this with no issues + # Hence, we explicitly add id here + wallet_info = WalletInfo(blockNumber=1, tokenValue=Decimal("3.2")) + session.add(wallet_info) + session.commit() + retrieved_wallet_info = session.query(WalletInfo).filter_by(blockNumber=1).first() + assert retrieved_wallet_info is not None + # toekValue retreieved from postgres is in Decimal, cast to float + assert float(retrieved_wallet_info.tokenValue) == 3.2 + + def test_update_wallet_info(self, session): + """Update an entry""" + wallet_info = WalletInfo(blockNumber=1, tokenValue=Decimal("3.2")) + session.add(wallet_info) + session.commit() + wallet_info.tokenValue = Decimal("5.0") + session.commit() + updated_wallet_info = session.query(WalletInfo).filter_by(blockNumber=1).first() + # tokenValue retreieved from postgres is in Decimal, cast to float + assert float(updated_wallet_info.tokenValue) == 5.0 + + def test_delete_wallet_info(self, session): + """Delete an entry""" + wallet_info = WalletInfo(blockNumber=1, tokenValue=Decimal("3.2")) + session.add(wallet_info) + session.commit() + session.delete(wallet_info) + session.commit() + deleted_wallet_info = session.query(WalletInfo).filter_by(blockNumber=1).first() + assert deleted_wallet_info is None diff --git a/lib/chainsync/chainsync/hyperdrive/crash_report.py b/lib/chainsync/chainsync/hyperdrive/crash_report.py deleted file mode 100644 index 3b29f74419..0000000000 --- a/lib/chainsync/chainsync/hyperdrive/crash_report.py +++ /dev/null @@ -1,93 +0,0 @@ -"""Utility function for logging bot crash reports.""" -from __future__ import annotations - -import json -import logging - -import elfpy.utils.format as format_utils -from chainsync.base import Base -from elfpy.utils import logs -from web3.exceptions import InvalidTransaction - -from .db_schema import PoolConfig, PoolInfo - - -def setup_hyperdrive_crash_report_logging(log_format_string: str | None = None) -> None: - """Create a new logging file handler with CRITICAL log level for hyperdrive crash reporting. - - In the future, a custom log level could be used specific to crash reporting. - - Arguments - --------- - log_format_string : str, optional - Logging format described in string format. - """ - logs.add_file_handler( - logger=None, # use the default root logger - log_filename="hyperdrive_crash_report.log", - log_format_string=log_format_string, - delete_previous_logs=False, - log_level=logging.CRITICAL, - ) - - -def log_hyperdrive_crash_report( - # TODO: better typing for this, an enum? - trade_type: str, - error: InvalidTransaction, - amount: float, - agent_address: str, - pool_info: PoolInfo, - pool_config: PoolConfig, -) -> None: - # pylint: disable=too-many-arguments - """Log a crash report for a hyperdrive transaction. - - Arguments - --------- - trade_type : str - The type of trade being executed. - error : TransactionError - The transaction error that occurred. - amount : float - The amount of the transaction. - agent_address : str - The address of the agent executing the transaction. - pool_info : PoolInfo - Information about the pool involved in the transaction. - pool_config : PoolConfig - Configuration of the pool involved in the transaction. - """ - pool_info_dict = _get_dict_from_schema(pool_info) - pool_info_dict["timestamp"] = int(pool_info.timestamp.timestamp()) - formatted_pool_info = json.dumps(pool_info_dict, indent=4) - - pool_config_dict = _get_dict_from_schema(pool_config) - formatted_pool_config = json.dumps(pool_config_dict, indent=4) - logging.critical( - """Failed to execute %s: %s\n Amount: %s\n Agent: %s\n PoolInfo: %s\n PoolConfig: %s\n""", - trade_type, - error, - format_utils.format_numeric_string(amount), - agent_address, - formatted_pool_info, - formatted_pool_config, - ) - - -def _get_dict_from_schema(db_schema: Base) -> dict: - """Convert a SqlAlcemcy Row into a dict for printing. There might be a better way to do this. - - Arguments - --------- - db_schema : Base - The database schema to convert to a dict. - - Returns - ------- - db_dict : dict - The database schema as a dict. - """ - db_dict = db_schema.__dict__ - del db_dict["_sa_instance_state"] - return db_dict diff --git a/lib/chainsync/chainsync/hyperdrive/get_hyperdrive_contract.py b/lib/chainsync/chainsync/hyperdrive/get_hyperdrive_contract.py deleted file mode 100644 index e7994f6c14..0000000000 --- a/lib/chainsync/chainsync/hyperdrive/get_hyperdrive_contract.py +++ /dev/null @@ -1,32 +0,0 @@ -"""Hyperdrive contract given some ABIs""" -from eth_utils import address -from ethpy.hyperdrive import HyperdriveAddresses -from web3 import Web3 -from web3.contract.contract import Contract - - -def get_hyperdrive_contract(web3: Web3, abis: dict, addresses: HyperdriveAddresses) -> Contract: - """Get the hyperdrive contract given abis. - - Arguments - --------- - web3: Web3 - web3 provider object - abis: dict - A dictionary that contains all abis keyed by the abi name, returned from `load_all_abis` - addresses: HyperdriveAddressesJson - The block number to query from the chain - - Returns - ------- - Contract - The contract object returned from the query - """ - if "IHyperdrive" not in abis: - raise AssertionError("IHyperdrive ABI was not provided") - state_abi = abis["IHyperdrive"] - # get contract instance of hyperdrive - hyperdrive_contract: Contract = web3.eth.contract( - address=address.to_checksum_address(addresses.mock_hyperdrive), abi=state_abi - ) - return hyperdrive_contract diff --git a/lib/chainsync/scripts/calc_prev_leaderboard.py b/lib/chainsync/scripts/calc_prev_leaderboard.py index 720007707a..396996323e 100644 --- a/lib/chainsync/scripts/calc_prev_leaderboard.py +++ b/lib/chainsync/scripts/calc_prev_leaderboard.py @@ -6,140 +6,19 @@ import pandas as pd from chainsync.analysis import calc_total_returns -from chainsync.base import postgres +from chainsync.dashboard import address_to_username, build_leaderboard, get_user_lookup +from chainsync.db.base import interface from dotenv import load_dotenv from sqlalchemy.sql import text # pylint: disable=invalid-name - -def get_user_lookup(agents, user_map) -> pd.DataFrame: - """Generate username to agents mapping.""" - # Usernames in postgres are bots - user_map["username"] = user_map["username"] + " (bots)" - click_map = get_click_addresses() - user_map = pd.concat([click_map, user_map], axis=0) - # Generate a lookup of users -> address, taking into account that some addresses don't have users - # Reindex looks up agent addresses against user_map, adding nans if it doesn't exist - options_map = user_map.set_index("address").reindex(agents) - # Set username as address if agent doesn't exist - na_idx = options_map["username"].isna() - # If there are any nan usernames, set address itself as username - if na_idx.any(): - options_map[na_idx] = options_map.index[na_idx] - return options_map.reset_index() - - -def combine_usernames(username: pd.Series) -> pd.DataFrame: - """Map usernames to a single user (e.g., combine click with bots).""" - # Hard coded mapping: - user_mapping = { - "Charles St. Louis (click)": "Charles St. Louis", - "Alim Khamisa (click)": "Alim Khamisa", - "Danny Delott (click)": "Danny Delott", - "Gregory Lisa (click)": "Gregory Lisa", - "Jonny Rhea (click)": "Jonny Rhea", - "Matt Brown (click)": "Matt Brown", - "Giovanni Effio (click)": "Giovanni Effio", - "Mihai Cosma (click)": "Mihai Cosma", - "Ryan Goree (click)": "Ryan Goree", - "Alex Towle (click)": "Alex Towle", - "Adelina Ruffolo (click)": "Adelina Ruffolo", - "Jacob Arruda (click)": "Jacob Arruda", - "Dylan Paiton (click)": "Dylan Paiton", - "Sheng Lundquist (click)": "Sheng Lundquist", - "ControlC Schmidt (click)": "ControlC Schmidt", - "George Towle (click)": "George Towle", - "Jack Burrus (click)": "Jack Burrus", - "Jordan J (click)": "Jordan J", - # Bot accounts - "slundquist (bots)": "Sheng Lundquist", - } - user_mapping = pd.DataFrame.from_dict(user_mapping, orient="index") - user_mapping.columns = ["user"] - # Use merge in case mapping doesn't exist - username_column = username.name - user = username.to_frame().merge(user_mapping, how="left", left_on=username_column, right_index=True) - return user - - -def get_leaderboard(pnl: pd.Series, lookup: pd.DataFrame) -> tuple[pd.DataFrame, pd.DataFrame]: - """Rank users by PNL, individually and bomined across their accounts.""" - pnl = pnl.reset_index() # type: ignore - wallet_usernames = address_to_username(lookup, pnl["walletAddress"]) - pnl.insert(1, "username", wallet_usernames.values.tolist()) - # Hard coded funding provider from migration account - migration_addr = "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266" - # Don't show this account - pnl = pnl[pnl["walletAddress"] != migration_addr] - # Rank based on pnl - user = combine_usernames(pnl["username"]) - pnl["user"] = user["user"].values - ind_leaderboard = ( - pnl[["username", "walletAddress", "pnl"]] - .sort_values("pnl", ascending=False) # type: ignore - .reset_index(drop=True) - ) - comb_leaderboard = ( - pnl[["user", "pnl"]].groupby("user")["pnl"].sum().reset_index().sort_values("pnl", ascending=False) - ).reset_index(drop=True) - return (comb_leaderboard, ind_leaderboard) - - -def get_click_addresses() -> pd.DataFrame: - """Returns a dataframe of hard coded click addresses.""" - addresses = { - "0x004dfC2dBA6573fa4dFb1E86e3723e1070C0CfdE": "Charles St. Louis (click)", - "0x005182C62DA59Ff202D53d6E42Cef6585eBF9617": "Alim Khamisa (click)", - "0x005BB73FddB8CE049eE366b50d2f48763E9Dc0De": "Danny Delott (click)", - "0x0065291E64E40FF740aE833BE2F68F536A742b70": "Gregory Lisa (click)", - "0x0076b154e60BF0E9088FcebAAbd4A778deC5ce2c": "Jonny Rhea (click)", - "0x00860d89A40a5B4835a3d498fC1052De04996de6": "Matt Brown (click)", - "0x00905A77Dc202e618d15d1a04Bc340820F99d7C4": "Giovanni Effio (click)", - "0x009ef846DcbaA903464635B0dF2574CBEE66caDd": "Mihai Cosma (click)", - "0x00D5E029aFCE62738fa01EdCA21c9A4bAeabd434": "Ryan Goree (click)", - "0x020A6F562884395A7dA2be0b607Bf824546699e2": "Alex Towle (click)", - "0x020a898437E9c9DCdF3c2ffdDB94E759C0DAdFB6": "Adelina Ruffolo (click)", - "0x020b42c1E3665d14275E2823bCef737015c7f787": "Jacob Arruda (click)", - "0x02147558D39cE51e19de3A2E1e5b7c8ff2778829": "Dylan Paiton (click)", - "0x021f1Bbd2Ec870FB150bBCAdaaA1F85DFd72407C": "Sheng Lundquist (click)", - "0x02237E07b7Ac07A17E1bdEc720722cb568f22840": "ControlC Schmidt (click)", - "0x022ca016Dc7af612e9A8c5c0e344585De53E9667": "George Towle (click)", - "0x0235037B42b4c0575c2575D50D700dD558098b78": "Jack Burrus (click)", - "0x0238811B058bA876Ae5F79cFbCAcCfA1c7e67879": "Jordan J (click)", - } - addresses = pd.DataFrame.from_dict(addresses, orient="index") - addresses = addresses.reset_index() - addresses.columns = ["address", "username"] - return addresses - - -def address_to_username(lookup: pd.DataFrame, selected_list: pd.Series) -> pd.Series: - """Look up selected users/addrs to all addresses. - - Arguments - --------- - lookup: pd.DataFrame - The lookup dataframe from `get_user_lookup` call - selected_list: list[str] - A list of addresses to look up usernames to - - Returns - ------- - list[str] - A list of usernames based on selected_list - """ - selected_list_column = selected_list.name - out = selected_list.to_frame().merge(lookup, how="left", left_on=selected_list_column, right_on="address") - return out["username"] - - # Connect to postgres load_dotenv() # Can't use existing postgres code due to mismatch of schema # so we do direct queries here -engine = postgres.initialize_engine() +engine = interface.initialize_engine() # sql queries config_query = text("select * from poolconfig;") @@ -331,7 +210,7 @@ def address_to_username(lookup: pd.DataFrame, selected_list: pd.Series) -> pd.Se current_returns = calc_total_returns(config_data, pool_info_data, all_wallet_deltas)[0] assert isinstance(current_returns, pd.Series) -comb_rank, ind_rank = get_leaderboard(current_returns, user_lookup) +comb_rank, ind_rank = build_leaderboard(current_returns, user_lookup) # TODO External transfers of base is getting captured, so need to undo this # Run 1: No change needed diff --git a/lib/chainsync/scripts/prev_leaderboard_final_tally.py b/lib/chainsync/scripts/prev_leaderboard_final_tally.py index 7f08d2239e..c7d1262dc5 100644 --- a/lib/chainsync/scripts/prev_leaderboard_final_tally.py +++ b/lib/chainsync/scripts/prev_leaderboard_final_tally.py @@ -5,40 +5,7 @@ from __future__ import annotations import pandas as pd - - -def combine_usernames(username: pd.Series) -> pd.DataFrame: - """Map usernames to a single user (e.g., combine click with bots).""" - # Hard coded mapping: - user_mapping = { - "Charles St. Louis (click)": "Charles St. Louis", - "Alim Khamisa (click)": "Alim Khamisa", - "Danny Delott (click)": "Danny Delott", - "Gregory Lisa (click)": "Gregory Lisa", - "Jonny Rhea (click)": "Jonny Rhea", - "Matt Brown (click)": "Matt Brown", - "Giovanni Effio (click)": "Giovanni Effio", - "Mihai Cosma (click)": "Mihai Cosma", - "Ryan Goree (click)": "Ryan Goree", - "Alex Towle (click)": "Alex Towle", - "Adelina Ruffolo (click)": "Adelina Ruffolo", - "Jacob Arruda (click)": "Jacob Arruda", - "Dylan Paiton (click)": "Dylan Paiton", - "Sheng Lundquist (click)": "Sheng Lundquist", - "ControlC Schmidt (click)": "ControlC Schmidt", - "George Towle (click)": "George Towle", - "Jack Burrus (click)": "Jack Burrus", - "Jordan J (click)": "Jordan J", - # Bot accounts - "slundquist (bots)": "Sheng Lundquist", - } - user_mapping = pd.DataFrame.from_dict(user_mapping, orient="index") - user_mapping.columns = ["user"] - # Use merge in case mapping doesn't exist - username_column = username.name - user = username.to_frame().merge(user_mapping, how="left", left_on=username_column, right_index=True) - return user - +from chainsync.dashboard import combine_usernames comb_rank_csvs = [ "../comb_rank_devnet_test.csv", diff --git a/lib/chainsync/tests/base/test_db_utils.py b/lib/chainsync/tests/base/test_db_utils.py deleted file mode 100644 index f5a7465f15..0000000000 --- a/lib/chainsync/tests/base/test_db_utils.py +++ /dev/null @@ -1,59 +0,0 @@ -"""CRUD tests for CheckpointInfo""" -import numpy as np -import pytest -from chainsync.base import postgres -from sqlalchemy import String, create_engine -from sqlalchemy.orm import DeclarativeBase, Mapped, MappedAsDataclass, mapped_column, sessionmaker - -engine = create_engine("sqlite:///:memory:") # in-memory SQLite database for testing -Session = sessionmaker(bind=engine) - -# fixture arguments in test function have to be the same as the fixture name -# pylint: disable=redefined-outer-name, too-few-public-methods - - -class Based(MappedAsDataclass, DeclarativeBase): - """Base class to subclass from to define the schema""" - - -class Very(Based): - """Dummy but very sincere table schema.""" - - __tablename__ = "verybased" - - key: Mapped[str] = mapped_column(String, primary_key=True) - - -class DropMe(Based): - """Dummy table schema that wants to be dropped.""" - - __tablename__ = "dropme" - - key: Mapped[str] = mapped_column(String, primary_key=True) - - -@pytest.fixture(scope="function") -def session(): - """Session fixture for tests""" - Based.metadata.create_all(engine) # create tables - session_ = Session() - yield session_ - session_.close() - Based.metadata.drop_all(engine) # drop tables - - -def test_query_tables(session): - """Return a list of tables in the database.""" - table_names = postgres.query_tables(session) - session.commit() - - np.testing.assert_array_equal(table_names, ["dropme", "verybased"]) - - -def test_drop_table(session): - """Drop a table from the database.""" - postgres.drop_table(session, "dropme") - table_names = postgres.query_tables(session) - session.commit() - - np.testing.assert_array_equal(table_names, ["verybased"]) diff --git a/lib/chainsync/tests/base/test_transaction.py b/lib/chainsync/tests/base/test_transaction.py deleted file mode 100644 index 0f4b496085..0000000000 --- a/lib/chainsync/tests/base/test_transaction.py +++ /dev/null @@ -1,118 +0,0 @@ -"""CRUD tests for Transaction""" -from decimal import Decimal - -import numpy as np -import pytest -from chainsync.base import Base, Transaction, postgres -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker - -engine = create_engine("sqlite:///:memory:") # in-memory SQLite database for testing -Session = sessionmaker(bind=engine) - -# fixture arguments in test function have to be the same as the fixture name -# pylint: disable=redefined-outer-name - - -@pytest.fixture(scope="function") -def session(): - """Session fixture for tests""" - Base.metadata.create_all(engine) # create tables - session_ = Session() - yield session_ - session_.close() - Base.metadata.drop_all(engine) # drop tables - - -class TestTransactionTable: - """CRUD tests for transaction table""" - - def test_create_transaction(self, session): - """Create and entry""" - # Note: this test is using inmemory sqlite, which doesn't seem to support - # autoincrementing ids without init, whereas postgres does this with no issues - # Hence, we explicitly add id here - transaction = Transaction(blockNumber=1, transactionHash="a", event_value=Decimal("3.2")) - session.add(transaction) - session.commit() - - retrieved_transaction = session.query(Transaction).filter_by(blockNumber=1).first() - assert retrieved_transaction is not None - # event_value retreieved from postgres is in Decimal, cast to float - assert float(retrieved_transaction.event_value) == 3.2 - - def test_update_transaction(self, session): - """Update an entry""" - transaction = Transaction(blockNumber=1, transactionHash="a", event_value=Decimal("3.2")) - session.add(transaction) - session.commit() - - transaction.event_value = Decimal("5.0") - session.commit() - - updated_transaction = session.query(Transaction).filter_by(blockNumber=1).first() - # event_value retreieved from postgres is in Decimal, cast to float - assert float(updated_transaction.event_value) == 5.0 - - def test_delete_transaction(self, session): - """Delete an entry""" - transaction = Transaction(blockNumber=1, transactionHash="a", event_value=Decimal("3.2")) - session.add(transaction) - session.commit() - - session.delete(transaction) - session.commit() - - deleted_transaction = session.query(Transaction).filter_by(blockNumber=1).first() - assert deleted_transaction is None - - -class TestTransactionInterface: - """Testing postgres interface for transaction table""" - - def test_latest_block_number(self, session): - """Testing retrevial of transaction via interface""" - transaction_1 = Transaction(blockNumber=1, transactionHash="a", event_value=Decimal("3.0")) - postgres.add_transactions([transaction_1], session) - - latest_block_number = postgres.get_latest_block_number_from_table(Transaction, session) - assert latest_block_number == 1 - - transaction_2 = Transaction(blockNumber=2, transactionHash="b", event_value=Decimal("3.2")) - transaction_3 = Transaction(blockNumber=3, transactionHash="c", event_value=Decimal("3.4")) - postgres.add_transactions([transaction_2, transaction_3], session) - - latest_block_number = postgres.get_latest_block_number_from_table(Transaction, session) - assert latest_block_number == 3 - - def test_get_transactions(self, session): - """Testing retrevial of transactions via interface""" - transaction_1 = Transaction(blockNumber=0, transactionHash="a", event_value=Decimal("3.1")) - transaction_2 = Transaction(blockNumber=1, transactionHash="b", event_value=Decimal("3.2")) - transaction_3 = Transaction(blockNumber=2, transactionHash="c", event_value=Decimal("3.3")) - postgres.add_transactions([transaction_1, transaction_2, transaction_3], session) - - transactions_df = postgres.get_transactions(session) - np.testing.assert_array_equal(transactions_df["event_value"], [3.1, 3.2, 3.3]) - - def test_block_query_transactions(self, session): - """Testing querying by block number of transactions via interface""" - transaction_1 = Transaction(blockNumber=0, transactionHash="a", event_value=Decimal("3.1")) - transaction_2 = Transaction(blockNumber=1, transactionHash="b", event_value=Decimal("3.2")) - transaction_3 = Transaction(blockNumber=2, transactionHash="c", event_value=Decimal("3.3")) - postgres.add_transactions([transaction_1, transaction_2, transaction_3], session) - - transactions_df = postgres.get_transactions(session, start_block=1) - np.testing.assert_array_equal(transactions_df["event_value"], [3.2, 3.3]) - - transactions_df = postgres.get_transactions(session, start_block=-1) - np.testing.assert_array_equal(transactions_df["event_value"], [3.3]) - - transactions_df = postgres.get_transactions(session, end_block=1) - np.testing.assert_array_equal(transactions_df["event_value"], [3.1]) - - transactions_df = postgres.get_transactions(session, end_block=-1) - np.testing.assert_array_equal(transactions_df["event_value"], [3.1, 3.2]) - - transactions_df = postgres.get_transactions(session, start_block=1, end_block=-1) - np.testing.assert_array_equal(transactions_df["event_value"], [3.2]) diff --git a/lib/chainsync/tests/base/test_user_map.py b/lib/chainsync/tests/base/test_user_map.py deleted file mode 100644 index 90662fda4b..0000000000 --- a/lib/chainsync/tests/base/test_user_map.py +++ /dev/null @@ -1,132 +0,0 @@ -"""CRUD tests for UserMap""" -import numpy as np -import pytest -from chainsync.base import Base, UserMap, postgres -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker - -engine = create_engine("sqlite:///:memory:") # in-memory SQLite database for testing -Session = sessionmaker(bind=engine) - -# fixture arguments in test function have to be the same as the fixture name -# pylint: disable=redefined-outer-name - - -@pytest.fixture(scope="function") -def session(): - """Session fixture for tests""" - Base.metadata.create_all(engine) # create tables - session_ = Session() - yield session_ - session_.close() - Base.metadata.drop_all(engine) # drop tables - - -class TestUserMapTable: - """CRUD tests for UserMap table""" - - def test_create_user_map(self, session): - """Create and entry""" - # Note: this test is using inmemory sqlite, which doesn't seem to support - # autoincrementing ids without init, whereas postgres does this with no issues - # Hence, we explicitly add id here - user_map = UserMap(address="1", username="a") - session.add(user_map) - session.commit() - - retrieved_user_map = session.query(UserMap).filter_by(address="1").first() - assert retrieved_user_map is not None - assert retrieved_user_map.username == "a" - - def test_update_user_map(self, session): - """Update an entry""" - user_map = UserMap(address="1", username="a") - session.add(user_map) - session.commit() - - user_map.username = "b" - session.commit() - - updated_user_map = session.query(UserMap).filter_by(address="1").first() - # tokenValue retreieved from postgres is in Decimal, cast to float - assert updated_user_map.username == "b" - - def test_delete_user_map(self, session): - """Delete an entry""" - user_map = UserMap(address="1", username="a") - session.add(user_map) - session.commit() - - session.delete(user_map) - session.commit() - - deleted_user_map = session.query(UserMap).filter_by(address="1").first() - assert deleted_user_map is None - - -class TestUserMapInterface: - """Testing postgres interface for usermap table""" - - def test_get_user_map(self, session): - """Testing retrevial of usermap via interface""" - username_1 = "a" - addresses_1 = ["1", "2", "3"] - postgres.add_user_map(username=username_1, addresses=addresses_1, session=session) - username_2 = "b" - addresses_2 = ["4", "5"] - postgres.add_user_map(username=username_2, addresses=addresses_2, session=session) - - # This is in order of insertion - user_map_df = postgres.get_user_map(session) - assert len(user_map_df) == 5 - np.testing.assert_array_equal(user_map_df["username"], ["a", "a", "a", "b", "b"]) - np.testing.assert_array_equal(user_map_df["address"], ["1", "2", "3", "4", "5"]) - - def test_get_query_user_map(self, session): - """Testing querying by block number of user map via interface""" - username_1 = "a" - addresses_1 = ["1", "2", "3"] - postgres.add_user_map(username=username_1, addresses=addresses_1, session=session) - username_2 = "b" - addresses_2 = ["4", "5"] - postgres.add_user_map(username=username_2, addresses=addresses_2, session=session) - - user_map_df = postgres.get_user_map(session, address="1") - np.testing.assert_array_equal(user_map_df["username"], ["a"]) - user_map_df = postgres.get_user_map(session, address="2") - np.testing.assert_array_equal(user_map_df["username"], ["a"]) - user_map_df = postgres.get_user_map(session, address="3") - np.testing.assert_array_equal(user_map_df["username"], ["a"]) - user_map_df = postgres.get_user_map(session, address="4") - np.testing.assert_array_equal(user_map_df["username"], ["b"]) - user_map_df = postgres.get_user_map(session, address="5") - np.testing.assert_array_equal(user_map_df["username"], ["b"]) - - def test_user_map_insertion_error(self, session): - """Testing retrevial of usermap via interface""" - username_1 = "a" - addresses_1 = ["1", "2", "3"] - postgres.add_user_map(username=username_1, addresses=addresses_1, session=session) - - # Adding the same addresses with the same username should pass - username_2 = "a" - addresses_2 = ["1", "2", "5"] - postgres.add_user_map(username=username_2, addresses=addresses_2, session=session) - - user_map_df = postgres.get_user_map(session) - assert len(user_map_df) == 4 - np.testing.assert_array_equal(user_map_df["username"], ["a", "a", "a", "a"]) - np.testing.assert_array_equal(user_map_df["address"], ["1", "2", "3", "5"]) - - # Adding the same addresses with different username should fail - username_3 = "b" - addresses_3 = ["6", "1", "2", "4"] - with pytest.raises(ValueError): - postgres.add_user_map(username=username_3, addresses=addresses_3, session=session) - - # Final db values shouldn't change - user_map_df = postgres.get_user_map(session) - user_map_df = postgres.get_user_map(session) - assert len(user_map_df) == 4 - np.testing.assert_array_equal(user_map_df["username"], ["a", "a", "a", "a"]) - np.testing.assert_array_equal(user_map_df["address"], ["1", "2", "3", "5"]) diff --git a/lib/chainsync/tests/hyperdrive/test_checkpoint.py b/lib/chainsync/tests/hyperdrive/test_checkpoint.py deleted file mode 100644 index 168a077f1e..0000000000 --- a/lib/chainsync/tests/hyperdrive/test_checkpoint.py +++ /dev/null @@ -1,128 +0,0 @@ -"""CRUD tests for CheckpointInfo""" -from datetime import datetime -from decimal import Decimal - -import numpy as np -import pytest -from chainsync.base import Base, get_latest_block_number_from_table -from chainsync.hyperdrive import CheckpointInfo, add_checkpoint_infos, get_checkpoint_info -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker - -engine = create_engine("sqlite:///:memory:") # in-memory SQLite database for testing -Session = sessionmaker(bind=engine) - -# fixture arguments in test function have to be the same as the fixture name -# pylint: disable=invalid-name -# pylint: disable=redefined-outer-name - - -@pytest.fixture(scope="function") -def session(): - """Session fixture for tests""" - Base.metadata.create_all(engine) # create tables - session_ = Session() - yield session_ - session_.close() - Base.metadata.drop_all(engine) # drop tables - - -class TestCheckpointTable: - """CRUD tests for checkpoint table""" - - def test_create_checkpoint(self, session): - """Create and entry""" - # Note: this test is using inmemory sqlite, which doesn't seem to support - # autoincrementing ids without init, whereas postgres does this with no issues - # Hence, we explicitly add id here - timestamp = datetime.now() - checkpoint = CheckpointInfo(blockNumber=1, timestamp=timestamp) - add_checkpoint_infos([checkpoint], session) - session.commit() - - retrieved_checkpoint = session.query(CheckpointInfo).filter_by(blockNumber=1).first() - assert retrieved_checkpoint is not None - assert retrieved_checkpoint.timestamp == timestamp - - def test_update_checkpoint(self, session): - """Update an entry""" - timestamp = datetime.now() - checkpoint = CheckpointInfo(blockNumber=1, timestamp=timestamp) - add_checkpoint_infos([checkpoint], session) - session.commit() - - checkpoint.sharePrice = Decimal("5.0") - session.commit() - - updated_checkpoint = session.query(CheckpointInfo).filter_by(blockNumber=1).first() - assert updated_checkpoint.sharePrice == 5.0 - - def test_delete_checkpoint(self, session): - """Delete an entry""" - timestamp = datetime.now() - checkpoint = CheckpointInfo(blockNumber=1, timestamp=timestamp) - add_checkpoint_infos([checkpoint], session) - session.commit() - - session.delete(checkpoint) - session.commit() - - deleted_checkpoint = session.query(CheckpointInfo).filter_by(blockNumber=1).first() - assert deleted_checkpoint is None - - -class TestCheckpointInterface: - """Testing postgres interface for checkpoint table""" - - def test_latest_block_number(self, session): - """Testing retrevial of checkpoint via interface""" - checkpoint_1 = CheckpointInfo(blockNumber=1, timestamp=datetime.now()) - add_checkpoint_infos([checkpoint_1], session) - session.commit() - - latest_block_number = get_latest_block_number_from_table(CheckpointInfo, session) - assert latest_block_number == 1 - - checkpoint_2 = CheckpointInfo(blockNumber=2, timestamp=datetime.now()) - checkpoint_3 = CheckpointInfo(blockNumber=3, timestamp=datetime.now()) - add_checkpoint_infos([checkpoint_2, checkpoint_3], session) - - latest_block_number = get_latest_block_number_from_table(CheckpointInfo, session) - assert latest_block_number == 3 - - def test_get_checkpoints(self, session): - """Testing retrevial of checkpoints via interface""" - date_1 = datetime(1945, 8, 6) - date_2 = datetime(1984, 8, 9) - date_3 = datetime(2001, 9, 11) - checkpoint_1 = CheckpointInfo(blockNumber=0, timestamp=date_1) - checkpoint_2 = CheckpointInfo(blockNumber=1, timestamp=date_2) - checkpoint_3 = CheckpointInfo(blockNumber=2, timestamp=date_3) - add_checkpoint_infos([checkpoint_1, checkpoint_2, checkpoint_3], session) - - checkpoints_df = get_checkpoint_info(session) - np.testing.assert_array_equal( - checkpoints_df["timestamp"].dt.to_pydatetime(), np.array([date_1, date_2, date_3]) - ) - - def test_block_query_checkpoints(self, session): - """Testing querying by block number of checkpoints via interface""" - checkpoint_1 = CheckpointInfo(blockNumber=0, timestamp=datetime.now(), sharePrice=Decimal("3.1")) - checkpoint_2 = CheckpointInfo(blockNumber=1, timestamp=datetime.now(), sharePrice=Decimal("3.2")) - checkpoint_3 = CheckpointInfo(blockNumber=2, timestamp=datetime.now(), sharePrice=Decimal("3.3")) - add_checkpoint_infos([checkpoint_1, checkpoint_2, checkpoint_3], session) - - checkpoints_df = get_checkpoint_info(session, start_block=1) - np.testing.assert_array_equal(checkpoints_df["sharePrice"], [3.2, 3.3]) - - checkpoints_df = get_checkpoint_info(session, start_block=-1) - np.testing.assert_array_equal(checkpoints_df["sharePrice"], [3.3]) - - checkpoints_df = get_checkpoint_info(session, end_block=1) - np.testing.assert_array_equal(checkpoints_df["sharePrice"], [3.1]) - - checkpoints_df = get_checkpoint_info(session, end_block=-1) - np.testing.assert_array_equal(checkpoints_df["sharePrice"], [3.1, 3.2]) - - checkpoints_df = get_checkpoint_info(session, start_block=1, end_block=-1) - np.testing.assert_array_equal(checkpoints_df["sharePrice"], [3.2]) diff --git a/lib/chainsync/tests/hyperdrive/test_crash_report.py b/lib/chainsync/tests/hyperdrive/test_crash_report.py deleted file mode 100644 index 2fe50ff769..0000000000 --- a/lib/chainsync/tests/hyperdrive/test_crash_report.py +++ /dev/null @@ -1,43 +0,0 @@ -"""Testing for crash report logging.""" -from __future__ import annotations - -import logging -import unittest -from datetime import datetime - -import elfpy.utils.logs as log_utils -from chainsync.hyperdrive import ( - PoolConfig, - PoolInfo, - log_hyperdrive_crash_report, - setup_hyperdrive_crash_report_logging, -) -from elfpy.simulators.smulation_config import SimulationConfig -from elfpy.utils import sim_utils -from web3.exceptions import InvalidTransaction - - -class TestCrashReport(unittest.TestCase): - """Run the tests.""" - - def test_hyperdrive_crash_report_logging(self): - """Tests hyperdrive crash report logging.""" - setup_hyperdrive_crash_report_logging() - config = SimulationConfig() - config.pricing_model_name = "Yieldspace" - config.num_trading_days = 3 - config.num_blocks_per_day = 3 - config.variable_apr = [0.05] * config.num_trading_days - simulator = sim_utils.get_simulator(config) # initialize - simulator.run_simulation() # run - - self.assertLogs(level=logging.CRITICAL) - log_hyperdrive_crash_report( - "CLOSE_LONG", - InvalidTransaction("Message"), - 1.23, - "0x0000000000000000000000000000000000000000", - PoolInfo(blockNumber=1234, timestamp=datetime.fromtimestamp(12345678)), - PoolConfig(contractAddress="0x0000000000000000000000000000000000000000"), - ) - log_utils.close_logging() diff --git a/lib/chainsync/tests/hyperdrive/test_pool_config.py b/lib/chainsync/tests/hyperdrive/test_pool_config.py deleted file mode 100644 index 56323ebeb2..0000000000 --- a/lib/chainsync/tests/hyperdrive/test_pool_config.py +++ /dev/null @@ -1,103 +0,0 @@ -"""CRUD tests for PoolConfig""" -from decimal import Decimal - -import numpy as np -import pytest -from chainsync.base import Base -from chainsync.hyperdrive import PoolConfig, add_pool_config, get_pool_config -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker - -engine = create_engine("sqlite:///:memory:") # in-memory SQLite database for testing -Session = sessionmaker(bind=engine) - -# fixture arguments in test function have to be the same as the fixture name -# pylint: disable=redefined-outer-name - - -@pytest.fixture(scope="function") -def session(): - """Session fixture for tests""" - Base.metadata.create_all(engine) # create tables - session_ = Session() - yield session_ - session_.close() - Base.metadata.drop_all(engine) # drop tables - - -class TestPoolConfigTable: - """CRUD tests for poolconfig table""" - - def test_create_pool_config(self, session): - """Create and entry""" - pool_config = PoolConfig(contractAddress="0", initialSharePrice=Decimal("3.2")) - session.add(pool_config) - session.commit() - - retrieved_pool_config = session.query(PoolConfig).filter_by(contractAddress="0").first() - assert retrieved_pool_config is not None - assert float(retrieved_pool_config.initialSharePrice) == 3.2 - - def test_delete_pool_config(self, session): - """Delete an entry""" - pool_config = PoolConfig(contractAddress="0", initialSharePrice=Decimal("3.2")) - session.add(pool_config) - session.commit() - - session.delete(pool_config) - session.commit() - - deleted_pool_config = session.query(PoolConfig).filter_by(contractAddress="0").first() - assert deleted_pool_config is None - - -class TestPoolConfigInterface: - """Testing postgres interface for poolconfig table""" - - def test_get_pool_config(self, session): - """Testing retrevial of pool config via interface""" - pool_config_1 = PoolConfig(contractAddress="0", initialSharePrice=Decimal("3.2")) - add_pool_config(pool_config_1, session) - - pool_config_df_1 = get_pool_config(session, coerce_float=False) - assert len(pool_config_df_1) == 1 - assert pool_config_df_1.loc[0, "initialSharePrice"] == Decimal("3.2") - - pool_config_2 = PoolConfig(contractAddress="1", initialSharePrice=Decimal("3.4")) - add_pool_config(pool_config_2, session) - - pool_config_df_2 = get_pool_config(session, coerce_float=False) - assert len(pool_config_df_2) == 2 - np.testing.assert_array_equal(pool_config_df_2["initialSharePrice"], np.array([Decimal("3.2"), Decimal("3.4")])) - - def test_primary_id_query_pool_config(self, session): - """Testing retrevial of pool config via interface""" - pool_config = PoolConfig(contractAddress="0", initialSharePrice=Decimal("3.2")) - add_pool_config(pool_config, session) - - pool_config_df_1 = get_pool_config(session, contract_address="0", coerce_float=False) - assert len(pool_config_df_1) == 1 - assert pool_config_df_1.loc[0, "initialSharePrice"] == Decimal("3.2") - - pool_config_df_2 = get_pool_config(session, contract_address="1", coerce_float=False) - assert len(pool_config_df_2) == 0 - - def test_pool_config_verify(self, session): - """Testing retrevial of pool config via interface""" - pool_config_1 = PoolConfig(contractAddress="0", initialSharePrice=Decimal("3.2")) - add_pool_config(pool_config_1, session) - pool_config_df_1 = get_pool_config(session, coerce_float=False) - assert len(pool_config_df_1) == 1 - assert pool_config_df_1.loc[0, "initialSharePrice"] == Decimal("3.2") - - # Nothing should happen if we give the same pool_config - pool_config_2 = PoolConfig(contractAddress="0", initialSharePrice=Decimal("3.2")) - add_pool_config(pool_config_2, session) - pool_config_df_2 = get_pool_config(session, coerce_float=False) - assert len(pool_config_df_2) == 1 - assert pool_config_df_2.loc[0, "initialSharePrice"] == Decimal("3.2") - - # If we try to add another pool config with a different value, should throw a ValueError - pool_config_3 = PoolConfig(contractAddress="0", initialSharePrice=Decimal("3.4")) - with pytest.raises(ValueError): - add_pool_config(pool_config_3, session) diff --git a/lib/chainsync/tests/hyperdrive/test_pool_info.py b/lib/chainsync/tests/hyperdrive/test_pool_info.py deleted file mode 100644 index 5dba0ac4a0..0000000000 --- a/lib/chainsync/tests/hyperdrive/test_pool_info.py +++ /dev/null @@ -1,129 +0,0 @@ -"""CRUD tests for PoolInfo""" -from datetime import datetime - -import numpy as np -import pytest -from chainsync.base import Base -from chainsync.hyperdrive import PoolInfo, add_pool_infos, get_latest_block_number_from_pool_info_table, get_pool_info -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker - -engine = create_engine("sqlite:///:memory:") # in-memory SQLite database for testing -Session = sessionmaker(bind=engine) - -# fixture arguments in test function have to be the same as the fixture name -# pylint: disable=redefined-outer-name - - -@pytest.fixture(scope="function") -def session(): - """Session fixture for tests""" - Base.metadata.create_all(engine) # create tables - session_ = Session() - yield session_ - session_.close() - Base.metadata.drop_all(engine) # drop tables - - -class TestPoolInfoTable: - """CRUD tests for poolinfo table""" - - def test_create_pool_info(self, session): - """Create and entry""" - timestamp = datetime.fromtimestamp(1628472000) - pool_info = PoolInfo(blockNumber=1, timestamp=timestamp) - session.add(pool_info) - session.commit() - - retrieved_pool_info = session.query(PoolInfo).filter_by(blockNumber=1).first() - assert retrieved_pool_info is not None - assert retrieved_pool_info.timestamp == timestamp - - def test_update_pool_info(self, session): - """Update an entry""" - timestamp = datetime.fromtimestamp(1628472000) - pool_info = PoolInfo(blockNumber=1, timestamp=timestamp) - session.add(pool_info) - session.commit() - - # TODO: Solve this type issue. I read the sqlmypy can do this but I wasn't successful. - new_timestamp = datetime.fromtimestamp(1628472001) - pool_info.timestamp = new_timestamp # type: ignore - session.commit() - - updated_pool_info = session.query(PoolInfo).filter_by(blockNumber=1).first() - assert updated_pool_info.timestamp == new_timestamp - - def test_delete_pool_info(self, session): - """Delete an entry""" - timestamp = datetime.fromtimestamp(1628472000) - pool_info = PoolInfo(blockNumber=1, timestamp=timestamp) - session.add(pool_info) - session.commit() - - session.delete(pool_info) - session.commit() - - deleted_pool_info = session.query(PoolInfo).filter_by(blockNumber=1).first() - assert deleted_pool_info is None - - -class TestPoolInfoInterface: - """Testing postgres interface for poolinfo table""" - - def test_latest_block_number(self, session): - """Testing latest block number call""" - timestamp_1 = datetime.fromtimestamp(1628472000) - pool_info_1 = PoolInfo(blockNumber=1, timestamp=timestamp_1) - add_pool_infos([pool_info_1], session) - - latest_block_number = get_latest_block_number_from_pool_info_table(session) - assert latest_block_number == 1 - - timestamp_1 = datetime.fromtimestamp(1628472002) - pool_info_1 = PoolInfo(blockNumber=2, timestamp=timestamp_1) - timestamp_2 = datetime.fromtimestamp(1628472004) - pool_info_2 = PoolInfo(blockNumber=3, timestamp=timestamp_2) - add_pool_infos([pool_info_1, pool_info_2], session) - - latest_block_number = get_latest_block_number_from_pool_info_table(session) - assert latest_block_number == 3 - - def test_get_pool_info(self, session): - """Testing retrevial of pool info via interface""" - timestamp_1 = datetime.fromtimestamp(1628472000) - pool_info_1 = PoolInfo(blockNumber=0, timestamp=timestamp_1) - timestamp_2 = datetime.fromtimestamp(1628472002) - pool_info_2 = PoolInfo(blockNumber=1, timestamp=timestamp_2) - timestamp_3 = datetime.fromtimestamp(1628472004) - pool_info_3 = PoolInfo(blockNumber=2, timestamp=timestamp_3) - add_pool_infos([pool_info_1, pool_info_2, pool_info_3], session) - - pool_info_df = get_pool_info(session) - np.testing.assert_array_equal( - pool_info_df["timestamp"].dt.to_pydatetime(), np.array([timestamp_1, timestamp_2, timestamp_3]) - ) - - def test_block_query_pool_info(self, session): - """Testing retrevial of pool info via interface""" - timestamp_1 = datetime.fromtimestamp(1628472000) - pool_info_1 = PoolInfo(blockNumber=0, timestamp=timestamp_1) - timestamp_2 = datetime.fromtimestamp(1628472002) - pool_info_2 = PoolInfo(blockNumber=1, timestamp=timestamp_2) - timestamp_3 = datetime.fromtimestamp(1628472004) - pool_info_3 = PoolInfo(blockNumber=2, timestamp=timestamp_3) - add_pool_infos([pool_info_1, pool_info_2, pool_info_3], session) - pool_info_df = get_pool_info(session, start_block=1) - np.testing.assert_array_equal( - pool_info_df["timestamp"].dt.to_pydatetime(), np.array([timestamp_2, timestamp_3]) - ) - pool_info_df = get_pool_info(session, start_block=-1) - np.testing.assert_array_equal(pool_info_df["timestamp"].dt.to_pydatetime(), np.array([timestamp_3])) - pool_info_df = get_pool_info(session, end_block=1) - np.testing.assert_array_equal(pool_info_df["timestamp"].dt.to_pydatetime(), np.array([timestamp_1])) - pool_info_df = get_pool_info(session, end_block=-1) - np.testing.assert_array_equal( - pool_info_df["timestamp"].dt.to_pydatetime(), np.array([timestamp_1, timestamp_2]) - ) - pool_info_df = get_pool_info(session, start_block=1, end_block=-1) - np.testing.assert_array_equal(pool_info_df["timestamp"].dt.to_pydatetime(), np.array([timestamp_2])) diff --git a/lib/chainsync/tests/hyperdrive/test_wallet_deltas.py b/lib/chainsync/tests/hyperdrive/test_wallet_deltas.py deleted file mode 100644 index 1206f19ac7..0000000000 --- a/lib/chainsync/tests/hyperdrive/test_wallet_deltas.py +++ /dev/null @@ -1,106 +0,0 @@ -"""CRUD tests for WalletDelta""" -from decimal import Decimal - -import numpy as np -import pytest -from chainsync.base import Base, get_latest_block_number_from_table -from chainsync.hyperdrive import WalletDelta, add_wallet_deltas, get_wallet_deltas -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker - -engine = create_engine("sqlite:///:memory:") # in-memory SQLite database for testing -Session = sessionmaker(bind=engine) - -# fixture arguments in test function have to be the same as the fixture name -# pylint: disable=redefined-outer-name - - -@pytest.fixture(scope="function") -def session(): - """Session fixture for tests""" - Base.metadata.create_all(engine) # create tables - session_ = Session() - yield session_ - session_.close() - Base.metadata.drop_all(engine) # drop tables - - -class TestWalletDeltaTable: - """CRUD tests for WalletDelta table""" - - def test_create_wallet_delta(self, session): - """Create and entry""" - # Note: this test is using inmemory sqlite, which doesn't seem to support - # autoincrementing ids without init, whereas postgres does this with no issues - # Hence, we explicitly add id here - wallet_delta = WalletDelta(blockNumber=1, transactionHash="a", delta=Decimal("3.2")) - session.add(wallet_delta) - session.commit() - - retrieved_wallet_delta = session.query(WalletDelta).filter_by(blockNumber=1).first() - assert retrieved_wallet_delta is not None - # toekValue retreieved from postgres is in Decimal, cast to float - assert float(retrieved_wallet_delta.delta) == 3.2 - - def test_update_wallet_delta(self, session): - """Update an entry""" - wallet_delta = WalletDelta(blockNumber=1, transactionHash="a", delta=Decimal("3.2")) - session.add(wallet_delta) - session.commit() - wallet_delta.delta = Decimal("5.0") - session.commit() - updated_wallet_delta = session.query(WalletDelta).filter_by(blockNumber=1).first() - # delta retreieved from postgres is in Decimal, cast to float - assert float(updated_wallet_delta.delta) == 5.0 - - def test_delete_wallet_delta(self, session): - """Delete an entry""" - wallet_delta = WalletDelta(blockNumber=1, transactionHash="a", delta=Decimal("3.2")) - session.add(wallet_delta) - session.commit() - session.delete(wallet_delta) - session.commit() - deleted_wallet_delta = session.query(WalletDelta).filter_by(blockNumber=1).first() - assert deleted_wallet_delta is None - - -class TestWalletDeltaInterface: - """Testing postgres interface for walletinfo table""" - - def test_latest_block_number(self, session): - """Testing retrevial of wallet info via interface""" - wallet_delta_1 = WalletDelta(blockNumber=1, transactionHash="a", delta=Decimal("3.0")) - add_wallet_deltas([wallet_delta_1], session) - latest_block_number = get_latest_block_number_from_table(WalletDelta, session) - assert latest_block_number == 1 - wallet_delta_2 = WalletDelta(blockNumber=2, transactionHash="a", delta=Decimal("3.2")) - wallet_delta_3 = WalletDelta(blockNumber=3, transactionHash="a", delta=Decimal("3.4")) - add_wallet_deltas([wallet_delta_2, wallet_delta_3], session) - latest_block_number = get_latest_block_number_from_table(WalletDelta, session) - assert latest_block_number == 3 - - def test_get_wallet_delta(self, session): - """Testing retrevial of walletinfo via interface""" - wallet_delta_1 = WalletDelta(blockNumber=0, transactionHash="a", delta=Decimal("3.1")) - wallet_delta_2 = WalletDelta(blockNumber=1, transactionHash="a", delta=Decimal("3.2")) - wallet_delta_3 = WalletDelta(blockNumber=2, transactionHash="a", delta=Decimal("3.3")) - add_wallet_deltas([wallet_delta_1, wallet_delta_2, wallet_delta_3], session) - wallet_delta_df = get_wallet_deltas(session) - np.testing.assert_array_equal(wallet_delta_df["delta"], np.array([3.1, 3.2, 3.3])) - - def test_block_query_wallet_delta(self, session): - """Testing querying by block number of wallet info via interface""" - wallet_delta_1 = WalletDelta(blockNumber=0, transactionHash="a", delta=Decimal("3.1")) - wallet_delta_2 = WalletDelta(blockNumber=1, transactionHash="a", delta=Decimal("3.2")) - wallet_delta_3 = WalletDelta(blockNumber=2, transactionHash="a", delta=Decimal("3.3")) - add_wallet_deltas([wallet_delta_1, wallet_delta_2, wallet_delta_3], session) - wallet_delta_df = get_wallet_deltas(session, start_block=1) - np.testing.assert_array_equal(wallet_delta_df["delta"], np.array([3.2, 3.3])) - wallet_delta_df = get_wallet_deltas(session, start_block=-1) - np.testing.assert_array_equal(wallet_delta_df["delta"], np.array([3.3])) - wallet_delta_df = get_wallet_deltas(session, end_block=1) - np.testing.assert_array_equal(wallet_delta_df["delta"], np.array([3.1])) - wallet_delta_df = get_wallet_deltas(session, end_block=-1) - np.testing.assert_array_equal(wallet_delta_df["delta"], np.array([3.1, 3.2])) - wallet_delta_df = get_wallet_deltas(session, start_block=1, end_block=-1) - np.testing.assert_array_equal(wallet_delta_df["delta"], np.array([3.2])) diff --git a/lib/chainsync/tests/hyperdrive/test_wallet_info.py b/lib/chainsync/tests/hyperdrive/test_wallet_info.py deleted file mode 100644 index 8d2ecc8625..0000000000 --- a/lib/chainsync/tests/hyperdrive/test_wallet_info.py +++ /dev/null @@ -1,131 +0,0 @@ -"""CRUD tests for WalletInfo""" -from decimal import Decimal - -import numpy as np -import pytest -from chainsync.base import Base, get_latest_block_number_from_table -from chainsync.hyperdrive import WalletInfo, add_wallet_infos, get_agents, get_all_wallet_info, get_current_wallet_info -from sqlalchemy import create_engine -from sqlalchemy.orm import sessionmaker - -engine = create_engine("sqlite:///:memory:") # in-memory SQLite database for testing -Session = sessionmaker(bind=engine) - -# fixture arguments in test function have to be the same as the fixture name -# pylint: disable=redefined-outer-name - - -@pytest.fixture(scope="function") -def session(): - """Session fixture for tests""" - Base.metadata.create_all(engine) # create tables - session_ = Session() - yield session_ - session_.close() - Base.metadata.drop_all(engine) # drop tables - - -class TestWalletInfoTable: - """CRUD tests for WalletInfo table""" - - def test_create_wallet_info(self, session): - """Create and entry""" - # Note: this test is using inmemory sqlite, which doesn't seem to support - # autoincrementing ids without init, whereas postgres does this with no issues - # Hence, we explicitly add id here - wallet_info = WalletInfo(blockNumber=1, tokenValue=Decimal("3.2")) - session.add(wallet_info) - session.commit() - retrieved_wallet_info = session.query(WalletInfo).filter_by(blockNumber=1).first() - assert retrieved_wallet_info is not None - # toekValue retreieved from postgres is in Decimal, cast to float - assert float(retrieved_wallet_info.tokenValue) == 3.2 - - def test_update_wallet_info(self, session): - """Update an entry""" - wallet_info = WalletInfo(blockNumber=1, tokenValue=Decimal("3.2")) - session.add(wallet_info) - session.commit() - wallet_info.tokenValue = Decimal("5.0") - session.commit() - updated_wallet_info = session.query(WalletInfo).filter_by(blockNumber=1).first() - # tokenValue retreieved from postgres is in Decimal, cast to float - assert float(updated_wallet_info.tokenValue) == 5.0 - - def test_delete_wallet_info(self, session): - """Delete an entry""" - wallet_info = WalletInfo(blockNumber=1, tokenValue=Decimal("3.2")) - session.add(wallet_info) - session.commit() - session.delete(wallet_info) - session.commit() - deleted_wallet_info = session.query(WalletInfo).filter_by(blockNumber=1).first() - assert deleted_wallet_info is None - - -class TestWalletInfoInterface: - """Testing postgres interface for walletinfo table""" - - def test_latest_block_number(self, session): - """Testing retrevial of wallet info via interface""" - wallet_info_1 = WalletInfo(blockNumber=1, tokenValue=Decimal("3.0")) - add_wallet_infos([wallet_info_1], session) - latest_block_number = get_latest_block_number_from_table(WalletInfo, session) - assert latest_block_number == 1 - wallet_info_2 = WalletInfo(blockNumber=2, tokenValue=Decimal("3.2")) - wallet_info_3 = WalletInfo(blockNumber=3, tokenValue=Decimal("3.4")) - add_wallet_infos([wallet_info_2, wallet_info_3], session) - latest_block_number = get_latest_block_number_from_table(WalletInfo, session) - assert latest_block_number == 3 - - def test_get_wallet_info(self, session): - """Testing retrevial of walletinfo via interface""" - wallet_info_1 = WalletInfo(blockNumber=0, tokenValue=Decimal("3.1")) - wallet_info_2 = WalletInfo(blockNumber=1, tokenValue=Decimal("3.2")) - wallet_info_3 = WalletInfo(blockNumber=2, tokenValue=Decimal("3.3")) - add_wallet_infos([wallet_info_1, wallet_info_2, wallet_info_3], session) - wallet_info_df = get_all_wallet_info(session) - np.testing.assert_array_equal(wallet_info_df["tokenValue"], np.array([3.1, 3.2, 3.3])) - - def test_block_query_wallet_info(self, session): - """Testing querying by block number of wallet info via interface""" - wallet_info_1 = WalletInfo(blockNumber=0, tokenValue=Decimal("3.1")) - wallet_info_2 = WalletInfo(blockNumber=1, tokenValue=Decimal("3.2")) - wallet_info_3 = WalletInfo(blockNumber=2, tokenValue=Decimal("3.3")) - add_wallet_infos([wallet_info_1, wallet_info_2, wallet_info_3], session) - wallet_info_df = get_all_wallet_info(session, start_block=1) - np.testing.assert_array_equal(wallet_info_df["tokenValue"], np.array([3.2, 3.3])) - wallet_info_df = get_all_wallet_info(session, start_block=-1) - np.testing.assert_array_equal(wallet_info_df["tokenValue"], np.array([3.3])) - wallet_info_df = get_all_wallet_info(session, end_block=1) - np.testing.assert_array_equal(wallet_info_df["tokenValue"], np.array([3.1])) - wallet_info_df = get_all_wallet_info(session, end_block=-1) - np.testing.assert_array_equal(wallet_info_df["tokenValue"], np.array([3.1, 3.2])) - wallet_info_df = get_all_wallet_info(session, start_block=1, end_block=-1) - np.testing.assert_array_equal(wallet_info_df["tokenValue"], np.array([3.2])) - - def test_current_wallet_info(self, session): - """Testing helper function to get current wallet values""" - wallet_info_1 = WalletInfo(blockNumber=0, walletAddress="addr", tokenType="BASE", tokenValue=Decimal("3.1")) - wallet_info_2 = WalletInfo(blockNumber=1, walletAddress="addr", tokenType="LP", tokenValue=Decimal("5.1")) - add_wallet_infos([wallet_info_1, wallet_info_2], session) - wallet_info_df = get_current_wallet_info(session).reset_index() - np.testing.assert_array_equal(wallet_info_df["tokenType"], ["BASE", "LP"]) - np.testing.assert_array_equal(wallet_info_df["tokenValue"], [3.1, 5.1]) - # E.g., block 2, wallet base tokens gets updated to 6.1 - wallet_info_3 = WalletInfo(blockNumber=2, walletAddress="addr", tokenType="BASE", tokenValue=Decimal("6.1")) - add_wallet_infos([wallet_info_3], session) - wallet_info_df = get_current_wallet_info(session).reset_index() - np.testing.assert_array_equal(wallet_info_df["tokenType"], ["BASE", "LP"]) - np.testing.assert_array_equal(wallet_info_df["tokenValue"], [6.1, 5.1]) - - def test_get_agents(self, session): - """Testing helper function to get current wallet values""" - wallet_info_1 = WalletInfo(blockNumber=0, walletAddress="addr_1") - wallet_info_2 = WalletInfo(blockNumber=1, walletAddress="addr_1") - wallet_info_3 = WalletInfo(blockNumber=2, walletAddress="addr_2") - add_wallet_infos([wallet_info_1, wallet_info_2, wallet_info_3], session) - agents = get_agents(session) - assert len(agents) == 2 - assert "addr_1" in agents - assert "addr_2" in agents diff --git a/lib/ethpy/ethpy/base/errors/test_errors.py b/lib/ethpy/ethpy/base/errors/errors_test.py similarity index 100% rename from lib/ethpy/ethpy/base/errors/test_errors.py rename to lib/ethpy/ethpy/base/errors/errors_test.py diff --git a/lib/ethpy/ethpy/hyperdrive/interface.py b/lib/ethpy/ethpy/hyperdrive/interface.py index 9b245c0485..a0bf88a1b5 100644 --- a/lib/ethpy/ethpy/hyperdrive/interface.py +++ b/lib/ethpy/ethpy/hyperdrive/interface.py @@ -7,12 +7,14 @@ from elfpy import time as elftime from elfpy.markets.hyperdrive import HyperdriveMarket, HyperdriveMarketState, HyperdrivePricingModel from eth_typing import BlockNumber -from ethpy.base import smart_contract_read +from eth_utils import address from fixedpointmath import FixedPoint from web3 import Web3 from web3.contract.contract import Contract from web3.types import BlockData +from ..base import smart_contract_read +from .addresses import HyperdriveAddresses from .assets import AssetIdPrefix, encode_asset_id @@ -183,3 +185,30 @@ def get_hyperdrive_market(web3: Web3, hyperdrive_contract: Contract) -> Hyperdri _step_size=FixedPoint(1) / FixedPoint(365), # TODO: Should get the anvil increment time ), ) + + +def get_hyperdrive_contract(web3: Web3, abis: dict, addresses: HyperdriveAddresses) -> Contract: + """Get the hyperdrive contract given abis. + + Arguments + --------- + web3: Web3 + web3 provider object + abis: dict + A dictionary that contains all abis keyed by the abi name, returned from `load_all_abis` + addresses: HyperdriveAddressesJson + The block number to query from the chain + + Returns + ------- + Contract + The contract object returned from the query + """ + if "IHyperdrive" not in abis: + raise AssertionError("IHyperdrive ABI was not provided") + state_abi = abis["IHyperdrive"] + # get contract instance of hyperdrive + hyperdrive_contract: Contract = web3.eth.contract( + address=address.to_checksum_address(addresses.mock_hyperdrive), abi=state_abi + ) + return hyperdrive_contract diff --git a/pyproject.toml b/pyproject.toml index 0bb27b1793..74354470e4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -19,7 +19,12 @@ authors = [ [tool.pytest.ini_options] -addopts = ["--import-mode=importlib"] +minversion = "6.0" +addopts = ["--tb=short"] +norecursedirs = ".git examples hyperdrive_solidity" +# TODO remove test_*.py, which is only used by elfpy +python_files = "*_test.py test_*.py" + [tool.pylint.format] max-line-length = "120" diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index 619ed9c9fd..0000000000 --- a/pytest.ini +++ /dev/null @@ -1,6 +0,0 @@ -# content of pytest.ini -[pytest] -# do not search these directories for tests -norecursedirs = .git examples hyperdrive_solidity -# shorter traceback format -addopts = --tb=short