Skip to content

Commit

Permalink
Visualize liquidation map depth mode="gross_value"
Browse files Browse the repository at this point in the history
  • Loading branch information
aoki-h-jp committed Sep 1, 2023
1 parent bc586e6 commit fe925c1
Show file tree
Hide file tree
Showing 4 changed files with 312 additions and 3 deletions.
56 changes: 55 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,58 @@
[![Format code](https://github.com/aoki-h-jp/py-liquidation-map/actions/workflows/Formatter.yml/badge.svg)](https://github.com/aoki-h-jp/py-liquidation-map/actions/workflows/Formatter.yml)

# py-liquidation-map
Visualize Liquidation Map from actual execution data.
Visualize Liquidation Map from actual execution data. Supports for all historical data from binance and bybit. Receiving orders in real-time via websocket and drawing liquidation maps is being implemented.

## Installation

```bash
pip install git+https://github.com/aoki-h-jp/py-liquidation-map
```

## Usage
### Visualize liquidation map from historical data
Download binance BTCUSDT data from start_datetime to end_datetime and draw a liquidation map calculated from orders above threshold=100000 [USDT].
```python
from liqmap.mapping import HistoricalMapping

mapping = HistoricalMapping(
start_datetime='2023-08-01 00:00:00',
end_datetime='2023-08-01 06:00:00',
symbol='BTCUSDT',
exchange='binance',
)

mapping.liquidation_map_from_historical(
mode="gross_value",
threshold_gross_value=100000
)
```
### Output
![image](img/BTCUSDT_2023-08-01_00-00-00-2023-08-01_06-00-00_gross_value_100000.png)
### Visualize liquidation map depth
```python
from liqmap.mapping import HistoricalMapping

mapping = HistoricalMapping(
start_datetime='2023-08-01 00:00:00',
end_datetime='2023-08-01 06:00:00',
symbol='BTCUSDT',
exchange='binance',
)

mapping.liquidation_map_depth_from_historical(
mode="gross_value",
threshold_gross_value=100000
)
```

### Output
![image](img/BTCUSDT_2023-08-01_00-00-00-2023-08-01_06-00-00_gross_value_100000_depth.png)

## If you want to report a bug or request a feature
Please create an issue on this repository!

## Disclaimer
This project is for educational purposes only. You should not construe any such information or other material as legal, tax, investment, financial, or other advice. Nothing contained here constitutes a solicitation, recommendation, endorsement, or offer by me or any third party service provider to buy or sell any securities or other financial instruments in this or in any other jurisdiction in which such solicitation or offer would be unlawful under the securities laws of such jurisdiction.

Under no circumstances will I be held responsible or liable in any way for any claims, damages, losses, expenses, costs, or liabilities whatsoever, including, without limitation, any direct or indirect damages for loss of profits.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
259 changes: 257 additions & 2 deletions liqmap/mapping.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,18 @@
# import standard libraries
import datetime
import math
import warnings

# import third-party libraries
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
import pandas as pd
from download import BinanceAggTradesDownload, BybitAggTradesDownloader
from exceptions import ExchangeNotSupportedError
from rich.progress import track

warnings.filterwarnings("ignore")


class HistoricalMapping:
"""
Expand Down Expand Up @@ -150,9 +154,18 @@ def _format_aggtrade_dataframe(self, filepath: str) -> pd.DataFrame:

return df

@staticmethod
def human_format(x, pos):
if x < 1e6:
return str(x)
elif x < 1e9:
return '{:.1f}M'.format(x * 1e-6)
else:
return '{:.1f}B'.format(x * 1e-9)

def liquidation_map_from_historical(
self, mode="gross_value", threshold_gross_value=100000
):
) -> None:
"""
Draw liquidation map from historical data
:param mode: draw mode
Expand Down Expand Up @@ -391,8 +404,9 @@ def liquidation_map_from_historical(
),
label="Current Price",
)
ax2.xaxis.set_major_formatter(ticker.FuncFormatter(self.human_format))
ax2.set_title("Estimated Liquidation Amount")
ax2.set_xlabel("Amount")
ax2.set_xlabel("Amount [USDT]")
ax2.tick_params(axis="x", labelrotation=45)
plt.legend(loc="upper right")
plt.tight_layout()
Expand All @@ -404,3 +418,244 @@ def liquidation_map_from_historical(
df_l.to_csv(
f"{save_title.replace('.png', '')}_{label.replace(' ','_')}_sell.csv"
)

def liquidation_map_depth_from_historical(
self, mode="gross_value", threshold_gross_value=100000
) -> None:
"""
Draw liquidation map depth from historical data
:param mode:
:param threshold_gross_value:
:return:
"""
# Downloading historical data
self._download()

# Formatting historical data
df_merged = pd.DataFrame()
for prefix in track(self._make_prefix_list(), description="Formatting data"):
df_prefix = self._format_aggtrade_dataframe(prefix)
df_merged = pd.concat([df_merged, df_prefix])

df_merged = df_merged.sort_values(by="timestamp")
df_merged = df_merged.reset_index(drop=True)
df_merged = df_merged[df_merged["timestamp"] <= self._end_datetime]
df_merged = df_merged[df_merged["timestamp"] >= self._start_datetime]

# Visualize liquidation map
# mode: gross_value
df_buy = df_merged[df_merged["side"] == "Buy"]
df_sell = df_merged[df_merged["side"] == "Sell"]

df_buy = df_buy[df_buy["amount"] >= threshold_gross_value]
df_sell = df_sell[df_sell["amount"] >= threshold_gross_value]

df_buy["LossCut100x"] = df_buy["price"] * 0.99
df_buy["LossCut50x"] = df_buy["price"] * 0.98
df_buy["LossCut25x"] = df_buy["price"] * 0.96
df_buy["LossCut10x"] = df_buy["price"] * 0.90

df_sell["LossCut100x"] = df_sell["price"] * 1.01
df_sell["LossCut50x"] = df_sell["price"] * 1.02
df_sell["LossCut25x"] = df_sell["price"] * 1.04
df_sell["LossCut10x"] = df_sell["price"] * 1.10

fig = plt.figure(figsize=(9, 9))
ax1 = fig.add_subplot(111)
ax2 = ax1.twinx()
plt.xlabel("datetime")
plt.ylabel("volume [USDT]")
plt.xticks(rotation=45)
title = f"{self._symbol}\n{self._start_datetime} -> {self._end_datetime}"
if mode == "gross_value":
title += f"\nthreshold: >= {threshold_gross_value} [USDT]"
plt.title(title)

df_losscut_10x = pd.DataFrame(columns=["price", "amount"])
for i, dt in enumerate(df_buy.index):
df_losscut_10x.loc[len(df_losscut_10x)] = {
"price": df_buy.iloc[i, 8],
"amount": df_buy.iloc[i, 4],
}

df_losscut_25x = pd.DataFrame(columns=["price", "amount"])
for i, dt in enumerate(df_buy.index):
df_losscut_25x.loc[len(df_losscut_25x)] = {
"price": df_buy.iloc[i, 7],
"amount": df_buy.iloc[i, 4],
}

df_losscut_50x = pd.DataFrame(columns=["price", "amount"])
for i, dt in enumerate(df_buy.index):
df_losscut_50x.loc[len(df_losscut_50x)] = {
"price": df_buy.iloc[i, 6],
"amount": df_buy.iloc[i, 4],
}

df_losscut_100x = pd.DataFrame(columns=["price", "amount"])
for i, dt in enumerate(df_buy.index):
df_losscut_100x.loc[len(df_losscut_100x)] = {
"price": df_buy.iloc[i, 5],
"amount": df_buy.iloc[i, 4],
}

current_price = df_merged.iloc[-1, 1]

df_depth_buy = pd.concat([df_losscut_10x, df_losscut_25x, df_losscut_50x, df_losscut_100x], ignore_index=True)
df_depth_buy = df_depth_buy.sort_values(by="price", ascending=False)
df_depth_buy = df_depth_buy[df_depth_buy["price"] <= current_price]
df_depth_buy = df_depth_buy.reset_index(drop=True)
df_depth_buy["price"] = df_depth_buy["price"].astype(float)
df_depth_buy["cumsum"] = df_depth_buy["amount"].cumsum().astype(float)
ax1.plot(df_depth_buy["price"], df_depth_buy["cumsum"], label="buy", c='b')

df_losscut_list = [
df_losscut_10x,
df_losscut_25x,
df_losscut_50x,
df_losscut_100x,
]
labels = ["10x Leveraged", "25x Leveraged", "50x Leveraged", "100x Leveraged"]
colors = ["r", "g", "b", "y"]
tick_degits = 2 - math.ceil(
math.log10(df_merged["price"].max() - df_merged["price"].min())
)
max_amount = 0
for i, df_losscut in enumerate(df_losscut_list):
df_losscut = df_losscut[df_losscut["price"] <= current_price]
g_ids = int(
(
round(df_losscut["price"].max(), tick_degits)
- round(df_losscut["price"].min(), tick_degits)
)
* 10 ** tick_degits
)
bins = [
round(
round(df_losscut["price"].min(), tick_degits)
+ i * 10 ** -tick_degits,
tick_degits,
)
for i in range(g_ids)
]
df_losscut["group_id"] = pd.cut(df_losscut["price"], bins=bins)
agg_df = df_losscut.groupby("group_id").sum()
ax2.bar(
x=[f.left for f in agg_df.index],
height=agg_df["amount"],
width=10 ** -tick_degits,
color=colors[i],
label=labels[i],
alpha=0.5,
)
if agg_df["amount"].max() > max_amount:
max_amount = agg_df["amount"].max()

# Save liquidation map data as csv
save_title = f"{self._symbol}_{self._start_datetime.replace(' ', '_').replace(':', '-')}-{self._end_datetime.replace(' ', '_').replace(':', '-')}_{mode}_{threshold_gross_value}_depth.png"
for df_l, label in zip(df_losscut_list, labels):
df_l.to_csv(
f"{save_title.replace('.png', '')}_{label.replace(' ', '_')}_buy.csv"
)

# Sell liquidation map on ax2
df_losscut_10x = pd.DataFrame(columns=["price", "amount"])
for i, dt in enumerate(df_sell.index):
df_losscut_10x.loc[len(df_losscut_10x)] = {
"price": df_sell.iloc[i, 8],
"amount": df_sell.iloc[i, 4],
}

df_losscut_25x = pd.DataFrame(columns=["price", "amount"])
for i, dt in enumerate(df_sell.index):
df_losscut_25x.loc[len(df_losscut_25x)] = {
"price": df_sell.iloc[i, 7],
"amount": df_sell.iloc[i, 4],
}

df_losscut_50x = pd.DataFrame(columns=["price", "amount"])
for i, dt in enumerate(df_sell.index):
df_losscut_50x.loc[len(df_losscut_50x)] = {
"price": df_sell.iloc[i, 6],
"amount": df_sell.iloc[i, 4],
}

df_losscut_100x = pd.DataFrame(columns=["price", "amount"])
for i, dt in enumerate(df_sell.index):
df_losscut_100x.loc[len(df_losscut_100x)] = {
"price": df_sell.iloc[i, 5],
"amount": df_sell.iloc[i, 4],
}

current_price = df_merged.iloc[-1, 1]

df_depth_sell = pd.concat([df_losscut_10x, df_losscut_25x, df_losscut_50x, df_losscut_100x], ignore_index=True)
df_depth_sell = df_depth_sell.sort_values(by="price")
df_depth_sell = df_depth_sell[df_depth_sell["price"] >= current_price]
df_depth_sell = df_depth_sell.reset_index(drop=True)
df_depth_sell["cumsum"] = df_depth_sell["amount"].cumsum()
ax1.plot(df_depth_sell["price"], df_depth_sell["cumsum"], label="sell", c='r')

df_losscut_list = [
df_losscut_10x,
df_losscut_25x,
df_losscut_50x,
df_losscut_100x,
]
labels = ["10x Leveraged", "25x Leveraged", "50x Leveraged", "100x Leveraged"]
colors = ["r", "g", "b", "y"]
tick_degits = 2 - math.ceil(
math.log10(df_merged["price"].max() - df_merged["price"].min())
)
max_amount = 0
for i, df_losscut in enumerate(df_losscut_list):
df_losscut = df_losscut[df_losscut["price"] >= current_price]
g_ids = int(
(
round(df_losscut["price"].max(), tick_degits)
- round(df_losscut["price"].min(), tick_degits)
)
* 10 ** tick_degits
)
bins = [
round(
round(df_losscut["price"].min(), tick_degits)
+ i * 10 ** -tick_degits,
tick_degits,
)
for i in range(g_ids)
]
df_losscut["group_id"] = pd.cut(df_losscut["price"], bins=bins)
agg_df = df_losscut.groupby("group_id").sum()
ax2.bar(
x=[f.left for f in agg_df.index],
height=agg_df["amount"],
width=10 ** -tick_degits,
color=colors[i],
alpha=0.5,
)
if agg_df["amount"].max() > max_amount:
max_amount = agg_df["amount"].max()

ax1.yaxis.set_major_formatter(ticker.FuncFormatter(self.human_format))
ax2.yaxis.set_major_formatter(ticker.FuncFormatter(self.human_format))
plt.annotate(
"",
xytext=(current_price, max_amount),
xy=(current_price, 0),
arrowprops=dict(
arrowstyle="->,head_length=1,head_width=0.5", lw=2, linestyle="dashed"
),
label="Current Price",
)
plt.legend(loc="upper right")
plt.xlabel("Price [USDT]")
plt.tight_layout()
plt.savefig(save_title)
plt.close()

# Save liquidation map data as csv
for df_l, label in zip(df_losscut_list, labels):
df_l.to_csv(
f"{save_title.replace('.png', '')}_{label.replace(' ', '_')}_sell.csv"
)

0 comments on commit fe925c1

Please sign in to comment.