Skip to content

Commit

Permalink
workbench: redo chain filtering specification
Browse files Browse the repository at this point in the history
  • Loading branch information
deepfire committed Jan 17, 2022
1 parent 87c7103 commit 8d46fb2
Show file tree
Hide file tree
Showing 16 changed files with 115 additions and 156 deletions.
6 changes: 0 additions & 6 deletions bench/flt-full-k10.json → bench/chain-filters/base-k10.json
Expand Up @@ -16,10 +16,4 @@
, "contents": true
}
}
, { "tag": "CBlock"
, "contents":
{ "tag": "BFullnessGEq"
, "contents": 0.9
}
}
]
File renamed without changes.
7 changes: 7 additions & 0 deletions bench/chain-filters/no-ebnd.json
@@ -0,0 +1,7 @@
[ { "tag": "CSlot"
, "contents":
{ "tag": "EpSlotGEq"
, "contents": 200
}
}
]
13 changes: 13 additions & 0 deletions bench/chain-filters/no-rewards.json
@@ -0,0 +1,13 @@
[ { "tag": "CSlot"
, "contents":
{ "tag": "EpochSafeIntLEq"
, "contents": 3
}
}
, { "tag": "CSlot"
, "contents":
{ "tag": "EpochSafeIntGEq"
, "contents": 8
}
}
]
13 changes: 13 additions & 0 deletions bench/chain-filters/rewards.json
@@ -0,0 +1,13 @@
[ { "tag": "CSlot"
, "contents":
{ "tag": "EpochSafeIntGEq"
, "contents": 4
}
}
, { "tag": "CSlot"
, "contents":
{ "tag": "EpochSafeIntLEq"
, "contents": 7
}
}
]
7 changes: 7 additions & 0 deletions bench/chain-filters/size-full.json
@@ -0,0 +1,7 @@
[ { "tag": "CBlock"
, "contents":
{ "tag": "BFullnessGEq"
, "contents": 0.9
}
}
]
13 changes: 13 additions & 0 deletions bench/chain-filters/size-mid.json
@@ -0,0 +1,13 @@
[ { "tag": "CBlock"
, "contents":
{ "tag": "BFullnessGEq"
, "contents": 0.1
}
}
, { "tag": "CBlock"
, "contents":
{ "tag": "BFullnessLEq"
, "contents": 0.9
}
}
]
13 changes: 13 additions & 0 deletions bench/chain-filters/size-small.json
@@ -0,0 +1,13 @@
[ { "tag": "CBlock"
, "contents":
{ "tag": "BFullnessGEq"
, "contents": 0.03
}
}
, { "tag": "CBlock"
, "contents":
{ "tag": "BFullnessLEq"
, "contents": 0.1
}
}
]
25 changes: 0 additions & 25 deletions bench/flt-full.json

This file was deleted.

31 changes: 0 additions & 31 deletions bench/flt-midsize.json

This file was deleted.

31 changes: 0 additions & 31 deletions bench/flt-small.json

This file was deleted.

92 changes: 34 additions & 58 deletions nix/workbench/analyse.sh
@@ -1,32 +1,40 @@
usage_analyse() {
usage "analyse" "Analyse cluster runs" <<EOF
standard RUN-NAME Standard batch of analyses: block-propagation, and
standard RUN-NAME.. Standard batch of analyses: block-propagation, and
machine-timeline
block-propagation RUN-NAME
block-propagation RUN-NAME..
Block propagation analysis for the entire cluster.
machine-timeline RUN-NAME [MACH-NAME=node-1]
Produce a general performance timeline for MACH-NAME
Performance timeline for MACH-NAME.
chaininfo RUN-NAME Print basic parameters of a run, as seen by locli
chaininfo RUN-NAME Print basic parameters of a run, as seen by locli.
Options of 'analyse' command:
--chain-filters F Read chain filters to apply from the F JSON file
--reanalyse Skip the preparatory steps and launch 'locli' directly
--filters F,F,F.. Comma-separated list of named chain filters: see bench/chain-filters
Note: filter names have no .json suffix
--dump-logobjects Dump the intermediate data: lifted log objects
EOF
}

analyse() {
local time= dump_logobjects= self_args=() locli_args=() prefilter='true' prefilter_jq='false'
local dump_logobjects= force_prefilter= prefilter_jq= self_args=() locli_args=() filters=()
while test $# -gt 0
do case "$1" in
--reanalyse | --re ) prefilter='false'; self_args+=($1);;
--prefilter-jq ) prefilter_jq='true'; self_args+=($1);;
--dump-logobjects ) dump_logobjects='true'; self_args+=($1);;
--chain-filters ) locli_args+=($1 $2); self_args+=($1 $2); shift;;
--force-prefilter ) force_prefilter='true'; self_args+=($1);;
--prefilter-jq ) prefilter_jq='true'; self_args+=($1);;
--filters ) local filter_names=('base')
filter_names+=($(echo $2 | sed 's_,_ _'))
local filter_paths=(${filter_names[*]/#/"bench/chain-filters/"})
local filter_files=(${filter_paths[*]/%/.json})
for f in ${filter_files[*]}
do test -f "$f" ||
fail "no such filter: $f"; done
locli_args+=(${filter_files[*]/#/--filter })
self_args+=($1 $2); shift;;
* ) break;; esac; shift; done

local op=${1:-$(usage_analyse)}; shift
Expand All @@ -45,8 +53,8 @@ case "$op" in
;;
standard | std )
for r in $*
do analyse ${self_args[*]} block-propagation $r
analyse ${self_args[*]} --reanalyse machine-timeline $r
do analyse ${self_args[*]} block-propagation $r
analyse ${self_args[*]} machine-timeline $r
done
;;
block-propagation | bp )
Expand All @@ -56,8 +64,6 @@ case "$op" in
local dir=$(run get "$name")
test -n "$dir" || fail "malformed run: $name"

echo "{ \"run\": \"$(jq .meta.tag "$dir"/meta.json --raw-output)\" }"

local adir=$dir/analysis
mkdir -p "$adir"

Expand All @@ -66,11 +72,13 @@ case "$op" in
locli analyse substring-keys > "$keyfile"

## 1. enumerate logs, filter by keyfile & consolidate
local logdirs=($(ls -d "$dir"/node-*/ 2>/dev/null) $(ls -d "$dir"/analysis/node-*/ 2>/dev/null))
# "$dir"/node-*/ "$dir"/analysis/node-*/
local logdirs=($(ls -d "$dir"/node-*/ 2>/dev/null))
local logfiles=($(ls "$adir"/logs-node-*.flt.json 2>/dev/null))
# echo logfiles: _${logfiles[*]}_
local prefilter=$(test "$force_prefilter" = 'true' -o -z "${logfiles[*]}" && echo 'true' || echo 'false')

echo "{ \"prefilter\": $prefilter, \"prefilter_jq\": $prefilter_jq }"
if test "$prefilter" = 'true' -o -z "$(ls "$adir"/logs-node-*.flt.json 2>/dev/null)"
echo "{ \"prefilter\": $prefilter }"
if test x$prefilter = xtrue
then
local jq_args=(
--sort-keys
Expand All @@ -94,16 +102,15 @@ case "$op" in
wait
fi

echo "{ \"dataSetSizeMB\": $(echo $(($(cat "$adir"/*.flt.json | wc -c) / 1000 / 1000))) }"
locli_args+=(
--genesis "$dir"/genesis-shelley.json
--run-metafile "$dir"/meta.json
## ->
--timeline-pretty "$adir"/block-propagation.txt
--analysis-json "$adir"/block-propagation.json
$(if test -n "$dump_logobjects"
then echo --logobjects-json "$adir"/logs-cluster.logobjects.json; fi)
)
if test -n "$dump_logobjects"; then
locli_args+=(--logobjects-json "$adir"/logs-cluster.logobjects.json); fi

time locli 'analyse' 'block-propagation' \
"${locli_args[@]}" "$adir"/*.flt.json
Expand All @@ -112,53 +119,19 @@ case "$op" in
if test $# -gt 0
then analyse ${self_args[*]} block-propagation "$@"; fi;;

grep-filtered-logs | grep | g )
local usage="USAGE: wb analyse $op BLOCK [MACHSPEC=*] [RUN-NAME=current]"
local expr=$1
local mach=${2:-*}
local name=${3:-current}
local dir=$(run get "$name")
local adir=$dir/analysis

grep -h "$expr" "$adir"/logs-$mach.flt.json;;

list-blocks | blocks | bs )
local usage="USAGE: wb analyse $op [RUN-NAME=current]"
local name=${1:-current}
local dir=$(run get "$name")
local adir=$dir/analysis

fgrep -h "TraceForgedBlock" "$adir"/*.flt.json |
jq '{ at: .at, host: .host } * .data | del(.peer) | del(.slot)' -c |
sort | uniq;;

block-propagation-block | bpb )
local usage="USAGE: wb analyse $op BLOCK [RUN-NAME=current]"
local block=$1
local name=${2:-current}
local dir=$(run get "$name")
local adir=$dir/analysis

grep -h "$block" "$adir"/*.flt.json |
grep 'AddBlock\|TraceForgedBlock\|AddedToCurrentChain' |
jq '{ at: .at, host: .host } * .data | del(.peer) | del(.slot)' -c |
sort --stable | uniq;;

machine-timeline | machine | mt )
local usage="USAGE: wb analyse $op [RUN-NAME=current] [MACH-NAME=node-1]"
local name=${1:-current}
local mach=${2:-node-1}
local dir=$(run get "$name")
test -n "$dir" || fail "malformed run: $name"

echo "{ \"run\": \"$(jq .meta.tag "$dir"/meta.json --raw-output)\" }"

local adir=$dir/analysis
mkdir -p "$adir"

## 0. subset what we care about into the keyfile
local keyfile=$adir/substring-keys
locli analyse substring-keys | grep -v 'Temporary modify' > "$keyfile"
locli analyse substring-keys > "$keyfile"

if test "$mach" = 'all'
then local machs=($(run list-hosts $name))
Expand All @@ -168,13 +141,16 @@ case "$op" in
do throttle_shell_job_spawns
(
## 1. enumerate logs, filter by keyfile & consolidate
local logs=($(ls "$dir"/$mach/stdout* 2>/dev/null | tac) $(ls "$dir"/$mach/node-*.json 2>/dev/null) $(ls "$dir"/analysis/$mach/node-*.json 2>/dev/null)) consolidated="$adir"/logs-$mach.json
local logs=($(ls "$dir"/$mach/node-*.json 2>/dev/null))
local consolidated="$adir"/logs-$mach.json

test -n "${logs[*]}" ||
fail "no logs for $mach in run $name"

local prefilter=$(test "$force_prefilter" = 'true' -o -z "$(ls "$adir"/logs-node-*.flt.json 2>/dev/null)" && echo 'yes')

echo "{ \"prefilter\": $prefilter }"
if test "$prefilter" = 'true' -o -z "$(ls "$adir"/logs-$mach.json 2>/dev/null)"
if test -n "$prefilter"
then grep -hFf "$keyfile" "${logs[@]}" > "$consolidated"; fi

locli_args+=(
Expand Down
3 changes: 1 addition & 2 deletions nix/workbench/profiles/defaults.jq
Expand Up @@ -9,8 +9,6 @@ def era_defaults($era):
## Choice of a cluster run scenario (wb scenario --help):
, scenario: "default"

, tracing_backend: "iohk-monitoring" ## or "trace-dispatcher"

## Cluster topology and composition:
, composition:
{ locations: ["LO"]
Expand Down Expand Up @@ -73,6 +71,7 @@ def era_defaults($era):

, node:
{ rts_flags_override: []
, tracing_backend: "iohk-monitoring" ## or "trace-dispatcher"
}

, tolerances:
Expand Down
3 changes: 2 additions & 1 deletion nix/workbench/profiles/node-services.nix
Expand Up @@ -129,6 +129,7 @@ let
TestAlonzoHardForkAtEpoch = 0;
};
}.${profile.value.era};
};
in
backend.finaliseNodeService nodeSpec
{
Expand All @@ -142,7 +143,7 @@ let
(if __hasAttr "preset" profile.value
then readJSONMay (./presets + "/${profile.value.preset}/config.json")
else nodeConfigBits.era_setup_hardforks //
nodeConfigBits.tracing.${profile.tracing_backend}));
nodeConfigBits.tracing.${profile.value.node.tracing_backend}));
};

## Given an env config, evaluate it and produce the node service.
Expand Down

0 comments on commit 8d46fb2

Please sign in to comment.