Skip to content

Commit

Permalink
Merge pull request #31 from flashbots/website-dataload
Browse files Browse the repository at this point in the history
added index, more logging for loading data
  • Loading branch information
metachris committed Apr 3, 2024
2 parents 2df7c03 + 46bea87 commit db68bfe
Show file tree
Hide file tree
Showing 5 changed files with 47 additions and 10 deletions.
7 changes: 7 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -102,3 +102,10 @@ make test
make test-race
make build
```


### Updating relayscan

Notes for updating relayscan:

- Relay payloads are selected by `inserted_at`. When adding a new relay, you probably want to manually subtract a day from `inserted_at` so they don't show up all for today (`UPDATE mainnet_data_api_payload_delivered SET inserted_at = inserted_at - INTERVAL '1 DAY' WHERE relay='newrelay.xyz';`). See also https://github.com/flashbots/relayscan/issues/28
6 changes: 3 additions & 3 deletions database/database.go
Original file line number Diff line number Diff line change
Expand Up @@ -116,14 +116,14 @@ func (s *DatabaseService) GetDataAPILatestBid(relay string) (*DataAPIBuilderBidE
}

func (s *DatabaseService) GetTopRelays(since, until time.Time) (res []*TopRelayEntry, err error) {
query := `SELECT relay, count(relay) as payloads FROM mainnet_data_api_payload_delivered WHERE inserted_at > $1 AND inserted_at < $2 GROUP BY relay ORDER BY payloads DESC;`
query := `SELECT relay, count(relay) as payloads FROM ` + TableDataAPIPayloadDelivered + ` WHERE inserted_at > $1 AND inserted_at < $2 GROUP BY relay ORDER BY payloads DESC;`
err = s.DB.Select(&res, query, since.UTC(), until.UTC())
return res, err
}

func (s *DatabaseService) GetTopBuilders(since, until time.Time, relay string) (res []*TopBuilderEntry, err error) {
query := `SELECT extra_data, count(extra_data) as blocks FROM (
SELECT distinct(slot), extra_data FROM mainnet_data_api_payload_delivered WHERE inserted_at > $1 AND inserted_at < $2`
SELECT distinct(slot), extra_data FROM ` + TableDataAPIPayloadDelivered + ` WHERE inserted_at > $1 AND inserted_at < $2`
if relay != "" {
query += ` AND relay = '` + relay + `'`
}
Expand All @@ -144,7 +144,7 @@ func (s *DatabaseService) GetBuilderProfits(since, until time.Time) (res []*Buil
round(sum(CASE WHEN coinbase_diff_eth IS NOT NULL THEN coinbase_diff_eth ELSE 0 END), 4) as total_profit,
round(abs(sum(CASE WHEN coinbase_diff_eth < 0 THEN coinbase_diff_eth ELSE 0 END)), 4) as total_subsidies
FROM (
SELECT distinct(slot), extra_data, coinbase_diff_eth FROM mainnet_data_api_payload_delivered WHERE inserted_at > $1 AND inserted_at < $2
SELECT distinct(slot), extra_data, coinbase_diff_eth FROM ` + TableDataAPIPayloadDelivered + ` WHERE inserted_at > $1 AND inserted_at < $2
) AS x
GROUP BY extra_data
ORDER BY total_profit DESC;`
Expand Down
1 change: 1 addition & 0 deletions database/schema.go
Original file line number Diff line number Diff line change
Expand Up @@ -97,6 +97,7 @@ CREATE INDEX IF NOT EXISTS ` + TableDataAPIPayloadDelivered + `_value_wei_idx ON
CREATE INDEX IF NOT EXISTS ` + TableDataAPIPayloadDelivered + `_valuecheck_ok_idx ON ` + TableDataAPIPayloadDelivered + `("value_check_ok");
CREATE INDEX IF NOT EXISTS ` + TableDataAPIPayloadDelivered + `_slotmissed_idx ON ` + TableDataAPIPayloadDelivered + `("slot_missed");
CREATE INDEX IF NOT EXISTS ` + TableDataAPIPayloadDelivered + `_cb_diff_eth_idx ON ` + TableDataAPIPayloadDelivered + `("coinbase_diff_eth");
-- CREATE INDEX CONCURRENTLY IF NOT EXISTS ` + TableDataAPIPayloadDelivered + `_insertedat_relay_idx ON ` + TableDataAPIPayloadDelivered + `("inserted_at", "relay");
CREATE TABLE IF NOT EXISTS ` + TableDataAPIBuilderBid + ` (
Expand Down
9 changes: 9 additions & 0 deletions services/website/html.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,15 @@ type Stats struct {
TopBuildersByRelay map[string][]*database.TopBuilderEntry
}

func NewStats() *Stats {
return &Stats{
TopRelays: make([]*database.TopRelayEntry, 0),
TopBuilders: make([]*database.TopBuilderEntry, 0),
BuilderProfits: make([]*database.BuilderProfitEntry, 0),
TopBuildersByRelay: make(map[string][]*database.TopBuilderEntry),
}
}

type HTMLData struct {
Title string

Expand Down
34 changes: 27 additions & 7 deletions services/website/webserver.go
Original file line number Diff line number Diff line change
Expand Up @@ -152,20 +152,29 @@ func (srv *Webserver) getStatsForHours(duration time.Duration) (stats *Stats, er
now := time.Now().UTC()
since := now.Add(-1 * duration.Abs())

srv.log.Debug("- loading top relays...")
startTime := time.Now()
topRelays, err := srv.db.GetTopRelays(since, now)
if err != nil {
return nil, err
}
srv.log.WithField("duration", time.Since(startTime).String()).Debug("- got top relays")

srv.log.Debug("- loading top builders...")
startTime = time.Now()
topBuilders, err := srv.db.GetTopBuilders(since, now, "")
if err != nil {
return nil, err
}
srv.log.WithField("duration", time.Since(startTime).String()).Debug("- got top builders")

srv.log.Debug("- loading builder profits...")
startTime = time.Now()
builderProfits, err := srv.db.GetBuilderProfits(since, now)
if err != nil {
return nil, err
}
srv.log.WithField("duration", time.Since(startTime).String()).Debug("- got builder profits")

stats = &Stats{
Since: since,
Expand All @@ -178,13 +187,16 @@ func (srv *Webserver) getStatsForHours(duration time.Duration) (stats *Stats, er
}

// Query builders for each relay
srv.log.Debug("- loading builders per relay...")
startTime = time.Now()
for _, relay := range topRelays {
topBuildersForRelay, err := srv.db.GetTopBuilders(since, now, relay.Relay)
if err != nil {
return nil, err
}
stats.TopBuildersByRelay[relay.Relay] = consolidateBuilderEntries(topBuildersForRelay)
}
srv.log.WithField("duration", time.Since(startTime).String()).Debug("- got builders per relay")

return stats, nil
}
Expand Down Expand Up @@ -217,34 +229,42 @@ func (srv *Webserver) updateHTML() {
htmlData.LastUpdateSlot = entry.Slot
}

startUpdate := time.Now()
srv.log.Info("updating 24h stats...")
stats["24h"], err = srv.getStatsForHours(24 * time.Hour)
if err != nil {
srv.log.WithError(err).Error("Failed to get stats for 24h")
return
}
srv.log.WithField("duration", time.Since(startUpdate).String()).Info("updated 24h stats")

if !srv.opts.Only24h {
srv.log.Info("updating 7d stats...")
stats["7d"], err = srv.getStatsForHours(7 * 24 * time.Hour)
if err != nil {
srv.log.WithError(err).Error("Failed to get stats for 24h")
return
}

startUpdate = time.Now()
srv.log.Info("updating 12h stats...")
stats["12h"], err = srv.getStatsForHours(12 * time.Hour)
if err != nil {
srv.log.WithError(err).Error("Failed to get stats for 12h")
return
}
srv.log.WithField("duration", time.Since(startUpdate).String()).Info("updated 12h stats")

startUpdate = time.Now()
srv.log.Info("updating 1h stats...")
stats["1h"], err = srv.getStatsForHours(1 * time.Hour)
if err != nil {
srv.log.WithError(err).Error("Failed to get stats for 1h")
return
}
srv.log.WithField("duration", time.Since(startUpdate).String()).Info("updated 1h stats")

startUpdate = time.Now()
srv.log.Info("updating 7d stats...")
stats["7d"], err = srv.getStatsForHours(7 * 24 * time.Hour)
if err != nil {
srv.log.WithError(err).Error("Failed to get stats for 24h")
return
}
srv.log.WithField("duration", time.Since(startUpdate).String()).Info("updated 7d stats")
}

// Save the html data
Expand Down

0 comments on commit db68bfe

Please sign in to comment.