-
Notifications
You must be signed in to change notification settings - Fork 0
/
main.go
137 lines (124 loc) · 3.35 KB
/
main.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
package main
import (
"encoding/csv"
"flag"
"fmt"
"log"
"os"
"strconv"
"strings"
"github.com/gridironOne/furyint/store"
"github.com/gridironOne/furyint/test/loadtime/report"
)
const (
blockStoreDBName = "furyint"
)
var (
mainPrefix = [1]byte{0}
)
// BlockStore is a thin wrapper around the DefaultStore which will be used for inspecting the blocks
type BlockStore struct {
*store.DefaultStore
base uint64
}
// Base will be used to get the block height of the first block we want to generate the report for
func (b *BlockStore) Base() uint64 {
return b.base
}
func getStore(directory string) *store.PrefixKV {
dataDirectory := directory[strings.LastIndex(directory, "/")+1:]
baseDirectory := directory[:len(directory)-len(dataDirectory)]
baseKV := store.NewDefaultKVStore(baseDirectory, dataDirectory, blockStoreDBName)
mainKV := store.NewPrefixKV(baseKV, mainPrefix[:])
return mainKV
}
func newBlockStore(kvstore store.KVStore, baseHeight uint64) *BlockStore {
store := store.New(kvstore).(*store.DefaultStore)
_, err := store.LoadState()
if err != nil {
log.Fatalf("loading state %s", err)
}
return &BlockStore{
DefaultStore: store,
base: baseHeight,
}
}
var (
dir = flag.String("data-dir", "", "path to the directory containing the furyint database")
csvOut = flag.String("csv", "", "dump the extracted latencies as raw csv for use in additional tooling")
baseHeight = flag.Uint64("base-height", 1, "base height to start the report from")
)
func main() {
flag.Parse()
if *dir == "" {
log.Fatalf("must specify a data-dir")
}
d := strings.TrimPrefix(*dir, "~/")
if d != *dir {
h, err := os.UserHomeDir()
if err != nil {
panic(err)
}
d = h + "/" + d
}
if *baseHeight == 0 {
*baseHeight = uint64(1)
}
_, err := os.Stat(d)
if err != nil {
panic(err)
}
mainKV := getStore(d)
s := newBlockStore(mainKV, *baseHeight)
rs, err := report.GenerateFromBlockStore(s)
if err != nil {
panic(err)
}
if *csvOut != "" {
cf, err := os.Create(*csvOut)
if err != nil {
panic(err)
}
w := csv.NewWriter(cf)
err = w.WriteAll(toCSVRecords(rs.List()))
if err != nil {
panic(err)
}
return
}
for _, r := range rs.List() {
fmt.Printf(""+
"Experiment ID: %s\n\n"+
"\tConnections: %d\n"+
"\tRate: %d\n"+
"\tSize: %d\n\n"+
"\tTotal Valid Tx: %d\n"+
"\tTPS: %d\n"+
"\tTotal Negative Latencies: %d\n"+
"\tMinimum Latency: %s\n"+
"\tMaximum Latency: %s\n"+
"\tAverage Latency: %s\n"+
"\tStandard Deviation: %s\n\n", r.ID, r.Connections, r.Rate, r.Size, len(r.All), r.TPS, r.NegativeCount, r.Min, r.Max, r.Avg, r.StdDev)
}
fmt.Printf("Total Invalid Tx: %d\n", rs.ErrorCount())
}
func toCSVRecords(rs []report.Report) [][]string {
total := 0
for _, v := range rs {
total += len(v.All)
}
res := make([][]string, total+1)
res[0] = []string{"experiment_id", "block_time", "duration_ns", "tx_hash", "connections", "rate", "size"}
offset := 1
for _, r := range rs {
idStr := r.ID.String()
connStr := strconv.FormatInt(int64(r.Connections), 10)
rateStr := strconv.FormatInt(int64(r.Rate), 10)
sizeStr := strconv.FormatInt(int64(r.Size), 10)
for i, v := range r.All {
res[offset+i] = []string{idStr, strconv.FormatInt(v.BlockTime.UnixNano(), 10), strconv.FormatInt(int64(v.Duration), 10), fmt.Sprintf("%X", v.Hash), connStr, rateStr, sizeStr}
}
offset += len(r.All)
}
return res
}