Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
34 commits
Select commit Hold shift + click to select a range
d7d66c5
Moving and renaming transactions to hyperdrive transactions
Aug 8, 2023
9ed4f79
Fixing imports and moving helper functions
Aug 9, 2023
0dd2cbd
More moving of helper functions
Aug 9, 2023
00d7f77
Moving helper functions out of bin script for dashboard
Aug 9, 2023
e1d031b
Renaming postgres to db_interface
Aug 9, 2023
9d41f86
Removing redundant function
Aug 9, 2023
95a6043
Moving hyperdrive contract function to ethpy
Aug 9, 2023
2ac7e74
Changing postgres to db_interface in base
Aug 9, 2023
56b1d6a
One more import file change
Aug 9, 2023
6003046
Removing duplicate functions in scripts
Aug 9, 2023
0704880
Relative imports interally. Moving calc_spot_price to analysis
Aug 9, 2023
6e059df
Relative imports within chainsync
Aug 9, 2023
eac17d4
Wrapping db stuff in outer directory
Aug 9, 2023
b5eb894
Adding crash reporting to agent0
Aug 9, 2023
a5f0c84
Fixing import for crash reports
Aug 9, 2023
8213b62
Fixing imports for new db dir
Aug 9, 2023
61a8a5b
Moving crash report test to agent0
Aug 9, 2023
5ba6c14
Relative imports within subpackages
Aug 9, 2023
369a562
Fixing cyclic imports
Aug 9, 2023
def2b2b
New testing format
Aug 9, 2023
e884d5d
New pytest structure
Aug 9, 2023
2178dd9
Adding checks for empty data for clean continuation
Aug 9, 2023
b8c6a61
Renaming missed test
Aug 9, 2023
332cb55
black
Aug 9, 2023
de30f56
minor comment fix
Aug 9, 2023
74ce6ed
Removing dead code
Aug 9, 2023
5ea9727
Rename get_agents to get_all_traders
Aug 9, 2023
e2f32e1
Removing empty outer tests directories
Aug 9, 2023
219d443
Renaming db files
Aug 9, 2023
5f988c2
Renaming conversions to utils to differentiate with convert_data
Aug 9, 2023
a9452d3
Using logging instead of print statement
Aug 9, 2023
8d46037
Updating comment
Aug 9, 2023
eafe41c
Renaming file
Aug 9, 2023
f084ce5
Moving convert function to private method within convert_data
Aug 9, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 28 additions & 0 deletions lib/agent0/agent0/hyperdrive/crash_report.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
"""Utility function for logging bot crash reports."""
from __future__ import annotations

import logging

from elfpy.utils import logs


def setup_hyperdrive_crash_report_logging(log_format_string: str | None = None) -> None:
"""Create a new logging file handler with CRITICAL log level for hyperdrive crash reporting.

In the future, a custom log level could be used specific to crash reporting.

Arguments
---------
log_format_string : str, optional
Logging format described in string format.
"""
logs.add_file_handler(
logger=None, # use the default root logger
log_filename="hyperdrive_crash_report.log",
log_format_string=log_format_string,
delete_previous_logs=False,
log_level=logging.CRITICAL,
)


# TODO implement hyperdrive crash report
2 changes: 1 addition & 1 deletion lib/agent0/agent0/hyperdrive/exec/setup_experiment.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@
from agent0.base.config import EnvironmentConfig
from agent0.hyperdrive.agents import HyperdriveAgent
from agent0.hyperdrive.config import get_eth_bots_config
from agent0.hyperdrive.crash_report import setup_hyperdrive_crash_report_logging
from agent0.hyperdrive.exec import get_agent_accounts
from chainsync.hyperdrive import setup_hyperdrive_crash_report_logging
from elfpy.utils import logs
from ethpy.base import initialize_web3_with_http_provider, load_all_abis
from ethpy.hyperdrive import fetch_hyperdrive_address_from_url
Expand Down
7 changes: 4 additions & 3 deletions lib/chainsync/bin/acquire_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,18 +6,18 @@
import time
from dataclasses import dataclass

from chainsync.base import add_transactions, initialize_session
from chainsync.hyperdrive import (
from chainsync.db.base import initialize_session
from chainsync.db.hyperdrive import (
add_checkpoint_infos,
add_pool_config,
add_pool_infos,
add_transactions,
add_wallet_deltas,
add_wallet_infos,
convert_checkpoint_info,
convert_hyperdrive_transactions_for_block,
convert_pool_config,
convert_pool_info,
get_hyperdrive_contract,
get_latest_block_number_from_pool_info_table,
get_wallet_info,
)
Expand All @@ -32,6 +32,7 @@
get_hyperdrive_config,
get_hyperdrive_pool_info,
)
from ethpy.hyperdrive.interface import get_hyperdrive_contract
from web3 import Web3
from web3.contract.contract import Contract

Expand Down
13 changes: 7 additions & 6 deletions lib/chainsync/bin/register_username_server.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
"""A simple Flask server to run python scripts."""
from chainsync.base import postgres
import logging

from chainsync.db.base import interface
from dotenv import load_dotenv
from flask import Flask, jsonify, request
from flask_expects_json import expects_json
Expand Down Expand Up @@ -27,17 +29,16 @@ def register_bots():
return jsonify({"data": data, "error": "request.json is None"}), 500

# initialize the postgres session
session = postgres.initialize_session()
session = interface.initialize_session()
try:
postgres.add_user_map(username, wallet_addrs, session)
# TODO move this to logging
print(f"Registered {wallet_addrs=} to {username=}")
interface.add_user_map(username, wallet_addrs, session)
logging.debug("Registered wallet_addrs=%s to username=%s}", wallet_addrs, username)
out = (jsonify({"data": data, "error": ""}), 200)
except Exception as exc: # pylint: disable=broad-exception-caught
# Ignoring broad exception, since we're simply printing out error and returning to client
out = (jsonify({"data": data, "error": str(exc)}), 500)

postgres.close_session(session)
interface.close_session(session)
return out


Expand Down
213 changes: 16 additions & 197 deletions lib/chainsync/bin/run_hyperdrive_dashboard.py
Original file line number Diff line number Diff line change
@@ -1,215 +1,32 @@
"""Run the streamlab demo."""
"""Run the dashboard."""
from __future__ import annotations

import os
import time

import mplfinance as mpf
import pandas as pd
import streamlit as st
from agent0.hyperdrive.config import get_eth_bots_config
from chainsync.analysis.calc_fixed_rate import calc_fixed_rate
from chainsync.analysis.calc_ohlcv import calc_ohlcv
from chainsync.analysis.calc_pnl import calc_closeout_pnl, calc_total_returns
from chainsync.base import get_transactions, get_user_map, initialize_session
from chainsync.dashboard import get_combined_data, plot_fixed_rate, plot_ohlcv
from chainsync.hyperdrive import get_agents, get_pool_config, get_pool_info, get_wallet_deltas
from chainsync.dashboard import (
build_leaderboard,
build_ticker,
get_combined_data,
get_user_lookup,
plot_fixed_rate,
plot_ohlcv,
)
from chainsync.db.base import get_user_map, initialize_session
from chainsync.db.hyperdrive import get_all_traders, get_pool_config, get_pool_info, get_transactions, get_wallet_deltas
from dotenv import load_dotenv

# pylint: disable=invalid-name

st.set_page_config(page_title="Trading Competition Dashboard", layout="wide")
st.set_option("deprecation.showPyplotGlobalUse", False)


# Helper functions
# TODO should likely move these functions to another file
def get_ticker(
wallet_delta: pd.DataFrame, transactions: pd.DataFrame, pool_info: pd.DataFrame, lookup: pd.DataFrame
) -> pd.DataFrame:
"""Show recent trades.

Arguments
---------
data: pd.DataFrame
The dataframe resulting from get_transactions

Returns
-------
pd.DataFrame
The filtered transaction data based on what we want to view in the ticker
"""
# TODO these merges should really happen via an sql query instead of in pandas here
# Set ticker so that each transaction is a single row
ticker_data = wallet_delta.groupby(["transactionHash"]).agg(
{"blockNumber": "first", "walletAddress": "first", "baseTokenType": tuple, "delta": tuple}
)

# Expand column of lists into seperate dataframes, then str cat them together
token_type = pd.DataFrame(ticker_data["baseTokenType"].to_list(), index=ticker_data.index)
token_deltas = pd.DataFrame(ticker_data["delta"].to_list(), index=ticker_data.index)
token_diffs = token_type + ": " + token_deltas.astype("str")
# Aggregate columns into a single list, removing nans
token_diffs = token_diffs.stack().groupby(level=0).agg(list)

# Gather other information from other tables
usernames = address_to_username(lookup, ticker_data["walletAddress"])
timestamps = pool_info.loc[ticker_data["blockNumber"], "timestamp"]
trade_type = transactions.set_index("transactionHash").loc[ticker_data.index, "input_method"]

ticker_data = ticker_data[["blockNumber", "walletAddress"]].copy()
ticker_data.insert(0, "timestamp", timestamps.values) # type: ignore
ticker_data.insert(2, "username", usernames.values) # type: ignore
ticker_data.insert(4, "trade_type", trade_type)
ticker_data.insert(5, "token_diffs", token_diffs) # type: ignore
ticker_data.columns = ["Timestamp", "Block", "User", "Wallet", "Method", "Token Deltas"]
# Shorten wallet address string
ticker_data["Wallet"] = ticker_data["Wallet"].str[:6] + "..." + ticker_data["Wallet"].str[-4:]
# Return reverse of methods to put most recent transactions at the top
ticker_data = ticker_data.set_index("Timestamp").sort_index(ascending=False)
# Drop rows with nonexistant wallets
ticker_data = ticker_data.dropna(axis=0, subset="Wallet")
return ticker_data


def combine_usernames(username: pd.Series) -> pd.DataFrame:
"""Map usernames to a single user (e.g., combine click with bots)."""
# Hard coded mapping:
user_mapping = {
"Charles St. Louis (click)": "Charles St. Louis",
"Alim Khamisa (click)": "Alim Khamisa",
"Danny Delott (click)": "Danny Delott",
"Gregory Lisa (click)": "Gregory Lisa",
"Jonny Rhea (click)": "Jonny Rhea",
"Matt Brown (click)": "Matt Brown",
"Giovanni Effio (click)": "Giovanni Effio",
"Mihai Cosma (click)": "Mihai Cosma",
"Ryan Goree (click)": "Ryan Goree",
"Alex Towle (click)": "Alex Towle",
"Adelina Ruffolo (click)": "Adelina Ruffolo",
"Jacob Arruda (click)": "Jacob Arruda",
"Dylan Paiton (click)": "Dylan Paiton",
"Sheng Lundquist (click)": "Sheng Lundquist",
"ControlC Schmidt (click)": "ControlC Schmidt",
"George Towle (click)": "George Towle",
"Jack Burrus (click)": "Jack Burrus",
"Jordan J (click)": "Jordan J",
# Bot accounts
"slundquist (bots)": "Sheng Lundquist",
}
user_mapping = pd.DataFrame.from_dict(user_mapping, orient="index")
user_mapping.columns = ["user"]
# Use merge in case mapping doesn't exist
username_column = username.name
user = username.to_frame().merge(user_mapping, how="left", left_on=username_column, right_index=True)
return user


def get_leaderboard(pnl: pd.Series, lookup: pd.DataFrame) -> tuple[pd.DataFrame, pd.DataFrame]:
"""Rank users by PNL, individually and bomined across their accounts."""
pnl = pnl.reset_index() # type: ignore
usernames = address_to_username(lookup, pnl["walletAddress"])
pnl.insert(1, "username", usernames.values.tolist())
# Hard coded funding provider from migration account
migration_addr = "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266"
# Don't show this account
pnl = pnl[pnl["walletAddress"] != migration_addr]
# Rank based on pnl
user = combine_usernames(pnl["username"])
pnl["user"] = user["user"].values

ind_leaderboard = (
pnl[["username", "walletAddress", "pnl"]]
.sort_values("pnl", ascending=False) # type: ignore
.reset_index(drop=True)
)
comb_leaderboard = (
pnl[["user", "pnl"]].groupby("user")["pnl"].sum().reset_index().sort_values("pnl", ascending=False)
).reset_index(drop=True)

return (comb_leaderboard, ind_leaderboard)


def get_click_addresses() -> pd.DataFrame:
"""Return a dataframe of hard coded click addresses."""
addresses = {
"0x004dfC2dBA6573fa4dFb1E86e3723e1070C0CfdE": "Charles St. Louis (click)",
"0x005182C62DA59Ff202D53d6E42Cef6585eBF9617": "Alim Khamisa (click)",
"0x005BB73FddB8CE049eE366b50d2f48763E9Dc0De": "Danny Delott (click)",
"0x0065291E64E40FF740aE833BE2F68F536A742b70": "Gregory Lisa (click)",
"0x0076b154e60BF0E9088FcebAAbd4A778deC5ce2c": "Jonny Rhea (click)",
"0x00860d89A40a5B4835a3d498fC1052De04996de6": "Matt Brown (click)",
"0x00905A77Dc202e618d15d1a04Bc340820F99d7C4": "Giovanni Effio (click)",
"0x009ef846DcbaA903464635B0dF2574CBEE66caDd": "Mihai Cosma (click)",
"0x00D5E029aFCE62738fa01EdCA21c9A4bAeabd434": "Ryan Goree (click)",
"0x020A6F562884395A7dA2be0b607Bf824546699e2": "Alex Towle (click)",
"0x020a898437E9c9DCdF3c2ffdDB94E759C0DAdFB6": "Adelina Ruffolo (click)",
"0x020b42c1E3665d14275E2823bCef737015c7f787": "Jacob Arruda (click)",
"0x02147558D39cE51e19de3A2E1e5b7c8ff2778829": "Dylan Paiton (click)",
"0x021f1Bbd2Ec870FB150bBCAdaaA1F85DFd72407C": "Sheng Lundquist (click)",
"0x02237E07b7Ac07A17E1bdEc720722cb568f22840": "ControlC Schmidt (click)",
"0x022ca016Dc7af612e9A8c5c0e344585De53E9667": "George Towle (click)",
"0x0235037B42b4c0575c2575D50D700dD558098b78": "Jack Burrus (click)",
"0x0238811B058bA876Ae5F79cFbCAcCfA1c7e67879": "Jordan J (click)",
}
addresses = pd.DataFrame.from_dict(addresses, orient="index")
addresses = addresses.reset_index()
addresses.columns = ["address", "username"]

return addresses


def get_user_lookup() -> pd.DataFrame:
"""Generate username to agents mapping.

Returns
-------
pd.DataFrame
A dataframe with an "username" and "address" columns that provide a lookup
between a registered username and a wallet address. The username can also be
the wallet address itself if a wallet is found without a registered username.
"""
# Get data
agents = get_agents(session)
user_map = get_user_map(session)
# Usernames in postgres are bots
user_map["username"] = user_map["username"] + " (bots)"

click_map = get_click_addresses()
user_map = pd.concat([click_map, user_map], axis=0)

# Generate a lookup of users -> address, taking into account that some addresses don't have users
# Reindex looks up agent addresses against user_map, adding nans if it doesn't exist
options_map = user_map.set_index("address").reindex(agents)

# Set username as address if agent doesn't exist
na_idx = options_map["username"].isna()
# If there are any nan usernames, set address itself as username
if na_idx.any():
options_map.loc[na_idx, "username"] = options_map.index[na_idx].values
return options_map.reset_index()


def address_to_username(lookup: pd.DataFrame, selected_list: pd.Series) -> pd.Series:
"""Look up selected users/addrs to all addresses.

Arguments
---------
lookup: pd.DataFrame
The lookup dataframe from `get_user_lookup` call
selected_list: list[str]
A list of addresses to look up usernames to

Returns
-------
list[str]
A list of usernames based on selected_list
"""
selected_list_column = selected_list.name
out = selected_list.to_frame().merge(lookup, how="left", left_on=selected_list_column, right_on="address")
return out["username"]


# Connect to postgres
load_dotenv()
session = initialize_session()
Expand Down Expand Up @@ -248,12 +65,14 @@ def address_to_username(lookup: pd.DataFrame, selected_list: pd.Series) -> pd.Se

while True:
# Place data and plots
user_lookup = get_user_lookup()
agents = get_all_traders(session)
user_map = get_user_map(session)
txn_data = get_transactions(session, -max_live_blocks)
pool_info_data = get_pool_info(session, -max_live_blocks, coerce_float=False)
combined_data = get_combined_data(txn_data, pool_info_data)
wallet_deltas = get_wallet_deltas(session, coerce_float=False)
ticker = get_ticker(wallet_deltas, txn_data, pool_info_data, user_lookup)
user_lookup = get_user_lookup(agents, user_map)
ticker = build_ticker(wallet_deltas, txn_data, pool_info_data, user_lookup)

(fixed_rate_x, fixed_rate_y) = calc_fixed_rate(combined_data, config_data)
ohlcv = calc_ohlcv(combined_data, config_data, freq="5T")
Expand All @@ -267,7 +86,7 @@ def address_to_username(lookup: pd.DataFrame, selected_list: pd.Series) -> pd.Se
## Add initial budget column to bots
## when bot restarts, use initial budget for bot's wallet address to set "budget" in Agent.Wallet

comb_rank, ind_rank = get_leaderboard(current_returns, user_lookup)
comb_rank, ind_rank = build_leaderboard(current_returns, user_lookup)

with ticker_placeholder.container():
st.header("Ticker")
Expand Down
9 changes: 2 additions & 7 deletions lib/chainsync/chainsync/analysis/__init__.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,5 @@
"""Analysis for trading."""
from .calc_fixed_rate import calc_fixed_rate
from .calc_ohlcv import calc_ohlcv
from .calc_pnl import (
calc_closeout_pnl,
calc_single_closeout,
calc_total_returns,
calculate_spot_price,
calculate_spot_price_for_position,
)
from .calc_pnl import calc_closeout_pnl, calc_single_closeout, calc_total_returns
from .calc_spot_price import calculate_spot_price, calculate_spot_price_for_position
3 changes: 2 additions & 1 deletion lib/chainsync/chainsync/analysis/calc_fixed_rate.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@
from decimal import Decimal

import numpy as np
from chainsync.dashboard import calculate_spot_price

from .calc_spot_price import calculate_spot_price


def calc_fixed_rate(trade_data, config_data):
Expand Down
3 changes: 2 additions & 1 deletion lib/chainsync/chainsync/analysis/calc_ohlcv.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,8 @@
from __future__ import annotations

import pandas as pd
from chainsync.dashboard import calculate_spot_price

from .calc_spot_price import calculate_spot_price


def calc_ohlcv(trade_data, config_data, freq="D"):
Expand Down
Loading