Skip to content
This repository was archived by the owner on May 29, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,11 @@ cd trader; poetry run autonomy deploy stop --build-dir trader_service/abci_build
cd trader; poetry run python ../trades.py YOUR_SAFE_ADDRESS; cd ..
```

Or restrict the search to specific dates by defining the "from" and "to" dates:
```bash
cd trader; poetry run python ../trades.py YOUR_SAFE_ADDRESS --from-date 2023-08-15:03:50:00 --to-date 2023-08-20:13:45:00; cd ..
```

3. Use this command to investigate your agent's logs:

```bash
Expand Down
109 changes: 93 additions & 16 deletions trades.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,10 @@

"""This script queries the OMEN subgraph to obtain the trades of a given address."""

import datetime
import time
from argparse import ArgumentParser
from collections import defaultdict
from enum import Enum
from string import Template
from typing import Any
Expand All @@ -33,6 +35,9 @@
DUST_THRESHOLD = 10000000000000
INVALID_ANSWER = 0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF
FPMM_CREATOR = "0x89c5cc945dd550bcffb72fe42bff002429f46fec"
DEFAULT_FROM_DATE = "1970-01-01T00:00:00"
DEFAULT_TO_DATE = "2038-01-19T03:14:07"


headers = {
"Accept": "application/json, multipart/mixed",
Expand Down Expand Up @@ -127,6 +132,7 @@ class MarketAttribute(Enum):

NUM_TRADES = "Num. trades"
WINNER_TRADES = "Winner trades"
NUM_REDEEMED = "Num. redeemed"
INVESTMENT = "Invested"
FEES = "Fees"
EARNINGS = "Earnings"
Expand All @@ -143,12 +149,28 @@ def __str__(self) -> str:
STATS_TABLE_ROWS = list(MarketAttribute)


def _parse_arg() -> str:
def _parse_args() -> Any:
"""Parse the creator positional argument."""
parser = ArgumentParser(description="Get trades on Omen for a Safe address.")
parser.add_argument("creator")
parser.add_argument(
"--from-date",
type=datetime.datetime.fromisoformat,
default=DEFAULT_FROM_DATE,
help="Start date (UTC) in YYYY-MM-DD:HH:mm:ss format",
)
parser.add_argument(
"--to-date",
type=datetime.datetime.fromisoformat,
default=DEFAULT_TO_DATE,
help="End date (UTC) in YYYY-MM-DD:HH:mm:ss format",
)
args = parser.parse_args()
return args.creator

args.from_date = args.from_date.replace(tzinfo=datetime.timezone.utc)
args.to_date = args.to_date.replace(tzinfo=datetime.timezone.utc)

return args


def _to_content(q: str) -> dict[str, Any]:
Expand All @@ -161,18 +183,19 @@ def _to_content(q: str) -> dict[str, Any]:
return finalized_query


def _query_omen_xdai_subgraph() -> dict[str, Any]:
def _query_omen_xdai_subgraph(creator: str) -> dict[str, Any]:
"""Query the subgraph."""
url = "https://api.thegraph.com/subgraphs/name/protofire/omen-xdai"

all_results: dict[str, Any] = {"data": {"fpmmTrades": []}}
grouped_results = defaultdict(list)
skip = 0

while True:
query = omen_xdai_trades_query.substitute(
creator=creator.lower(),
fpmm_creator=FPMM_CREATOR.lower(),
first=QUERY_BATCH_SIZE,
skip=skip
skip=skip,
)
content_json = _to_content(query)
res = requests.post(url, headers=headers, json=content_json)
Expand All @@ -182,13 +205,26 @@ def _query_omen_xdai_subgraph() -> dict[str, Any]:
if not trades:
break

all_results["data"]["fpmmTrades"].extend(trades)
for trade in trades:
fpmm_id = trade.get("fpmm", {}).get("id")
grouped_results[fpmm_id].append(trade)

skip += QUERY_BATCH_SIZE

all_results = {
"data": {
"fpmmTrades": [
trade
for trades_list in grouped_results.values()
for trade in trades_list
]
}
}

return all_results


def _query_conditional_tokens_gc_subgraph() -> dict[str, Any]:
def _query_conditional_tokens_gc_subgraph(creator: str) -> dict[str, Any]:
"""Query the subgraph."""
url = "https://api.thegraph.com/subgraphs/name/gnosis/conditional-tokens-gc"

Expand Down Expand Up @@ -226,8 +262,17 @@ def _wei_to_dai(wei: int) -> str:
return f"{formatted_dai} DAI"


def _is_redeemed(user_json: dict[str, Any], condition_id: str) -> bool:
def _is_redeemed(user_json: dict[str, Any], fpmmTrade: dict[str, Any]) -> bool:
user_positions = user_json["data"]["user"]["userPositions"]
outcomes_tokens_traded = int(fpmmTrade["outcomeTokensTraded"])
condition_id = fpmmTrade["fpmm"]["condition"]["id"]

for position in user_positions:
position_condition_ids = position["position"]["conditionIds"]
balance = int(position["balance"])

if condition_id in position_condition_ids and balance == outcomes_tokens_traded:
return False

for position in user_positions:
position_condition_ids = position["position"]["conditionIds"]
Expand Down Expand Up @@ -296,6 +341,16 @@ def _format_table(table: dict[Any, dict[Any, Any]]) -> str:
)
+ "\n"
)
table_str += (
f"{MarketAttribute.NUM_REDEEMED:<{column_width}}"
+ "".join(
[
f"{table[MarketAttribute.NUM_REDEEMED][c]:>{column_width}}"
for c in STATS_TABLE_COLS
]
)
+ "\n"
)
table_str += (
f"{MarketAttribute.INVESTMENT:<{column_width}}"
+ "".join(
Expand Down Expand Up @@ -361,7 +416,10 @@ def _format_table(table: dict[Any, dict[Any, Any]]) -> str:


def _parse_response( # pylint: disable=too-many-locals,too-many-statements
trades_json: dict[str, Any], user_json: dict[str, Any]
trades_json: dict[str, Any],
user_json: dict[str, Any],
from_timestamp: float,
to_timestamp: float,
) -> str:
"""Parse the trades from the response."""

Expand All @@ -373,22 +431,33 @@ def _parse_response( # pylint: disable=too-many-locals,too-many-statements
output += "Trades\n"
output += "------\n"

for fpmmTrade in trades_json["data"]["fpmmTrades"]:
filtered_trades = [
fpmmTrade
for fpmmTrade in trades_json["data"]["fpmmTrades"]
if from_timestamp <= float(fpmmTrade["creationTimestamp"]) <= to_timestamp
]

for fpmmTrade in filtered_trades:
try:
collateral_amount = int(fpmmTrade["collateralAmount"])
outcome_index = int(fpmmTrade["outcomeIndex"])
fee_amount = int(fpmmTrade["feeAmount"])
outcomes_tokens_traded = int(fpmmTrade["outcomeTokensTraded"])
creation_timestamp = float(fpmmTrade["creationTimestamp"])

fpmm = fpmmTrade["fpmm"]
answer_finalized_timestamp = fpmm["answerFinalizedTimestamp"]
is_pending_arbitration = fpmm["isPendingArbitration"]
opening_timestamp = fpmm["openingTimestamp"]
condition_id = fpmm["condition"]["id"]

output += f' Question: {fpmmTrade["title"]}\n'
output += f' Market URL: https://aiomen.eth.limo/#/{fpmm["id"]}\n'

creation_timestamp_utc = datetime.datetime.fromtimestamp(
creation_timestamp, tz=datetime.timezone.utc
)
output += f' Trade date: {creation_timestamp_utc.strftime("%Y-%m-%d %H:%M:%S %Z")}\n'

market_status = MarketState.CLOSED
if fpmm["currentAnswer"] is None and time.time() >= float(
opening_timestamp
Expand Down Expand Up @@ -441,11 +510,14 @@ def _parse_response( # pylint: disable=too-many-locals,too-many-statements
earnings = outcomes_tokens_traded
output += f" Final answer: {fpmm['outcomes'][current_answer]!r} - Congrats! The trade was for the winner answer.\n"
output += f" Earnings: {_wei_to_dai(earnings)}\n"
redeemed = _is_redeemed(user_json, condition_id)
redeemed = _is_redeemed(user_json, fpmmTrade)
output += f" Redeemed: {redeemed}\n"
statistics_table[MarketAttribute.WINNER_TRADES][market_status] += 1

if redeemed:
statistics_table[MarketAttribute.NUM_REDEEMED][
market_status
] += 1
statistics_table[MarketAttribute.REDEMPTIONS][
market_status
] += earnings
Expand Down Expand Up @@ -475,8 +547,13 @@ def _parse_response( # pylint: disable=too-many-locals,too-many-statements


if __name__ == "__main__":
creator = _parse_arg()
_trades_json = _query_omen_xdai_subgraph()
_user_json = _query_conditional_tokens_gc_subgraph()
parsed = _parse_response(_trades_json, _user_json)
user_args = _parse_args()
_trades_json = _query_omen_xdai_subgraph(user_args.creator)
_user_json = _query_conditional_tokens_gc_subgraph(user_args.creator)
parsed = _parse_response(
_trades_json,
_user_json,
user_args.from_date.timestamp(),
user_args.to_date.timestamp(),
)
print(parsed)