Skip to content

Commit

Permalink
locli & workbench: allow variance summaries to be processed
Browse files Browse the repository at this point in the history
  • Loading branch information
deepfire committed Apr 1, 2023
1 parent 7352cfc commit e6072a6
Show file tree
Hide file tree
Showing 24 changed files with 710 additions and 322 deletions.
1 change: 1 addition & 0 deletions bench/locli/locli.cabal
Expand Up @@ -138,6 +138,7 @@ library
, unordered-containers
, utf8-string
, vector
, witherable
, cardano-strict-containers ^>= 0.1

executable locli
Expand Down
29 changes: 20 additions & 9 deletions bench/locli/src/Cardano/Analysis/API/Context.hs
Expand Up @@ -19,7 +19,7 @@ data GenesisSpec
{ delegators :: Word64
, utxo :: Word64
}
deriving (Generic, Show, ToJSON, FromJSON)
deriving (Eq, Generic, Show, ToJSON, FromJSON, NFData)

-- | Partial 'Cardano.Ledger.Shelley.Genesis.ShelleyGenesis'
data Genesis
Expand All @@ -34,15 +34,26 @@ data Genesis
, maxKESEvolutions :: Word64
, securityParam :: Word64
}
deriving (Generic, Show, FromJSON, ToJSON)
deriving (Generic, Show, FromJSON, ToJSON, NFData)

genesesSameButTimeP :: Genesis -> Genesis -> Bool
genesesSameButTimeP l r =
((==) `on` activeSlotsCoeff) l r &&
((==) `on` protocolParams) l r &&
((==) `on` networkMagic) l r &&
((==) `on` epochLength) l r &&
((==) `on` slotsPerKESPeriod) l r &&
((==) `on` slotLength) l r &&
((==) `on` maxKESEvolutions) l r &&
((==) `on` securityParam) l r

-- | Partial 'Cardano.Ledger.Shelley.PParams.PParams'
data PParams
= PParams
{ maxTxSize :: Word64
, maxBlockBodySize :: Word64
}
deriving (Generic, Show, FromJSON, ToJSON)
deriving (Eq, Generic, Show, FromJSON, ToJSON, NFData)

data GeneratorProfile
= GeneratorProfile
Expand All @@ -54,11 +65,11 @@ data GeneratorProfile
, plutusMode :: Maybe Bool
, plutusLoopScript :: Maybe FilePath
}
deriving (Generic, Show, FromJSON, ToJSON)
deriving (Eq, Generic, Show, FromJSON, ToJSON, NFData)

newtype Commit = Commit { unCommit :: Text } deriving newtype (Show, FromJSON, ToJSON)
newtype Branch = Branch { unBranch :: Text } deriving newtype (Show, FromJSON, ToJSON)
newtype Version = Version { unVersion :: Text } deriving newtype (Show, FromJSON, ToJSON)
newtype Commit = Commit { unCommit :: Text } deriving newtype (Eq, Show, FromJSON, ToJSON, NFData)
newtype Branch = Branch { unBranch :: Text } deriving newtype (Eq, Show, FromJSON, ToJSON, NFData)
newtype Version = Version { unVersion :: Text } deriving newtype (Eq, Show, FromJSON, ToJSON, NFData)

unsafeShortenCommit :: Int -> Commit -> Commit
unsafeShortenCommit n (Commit c) = Commit (T.take n c)
Expand All @@ -76,7 +87,7 @@ data Manifest
, mBase :: !Commit
, mPrelude :: !Commit
}
deriving (Generic, Show)
deriving (Eq, Generic, NFData, Show)

unsafeShortenManifest :: Int -> Manifest -> Manifest
unsafeShortenManifest n m@Manifest{..} =
Expand Down Expand Up @@ -126,4 +137,4 @@ data Metadata
, era :: Text
, manifest :: Manifest
}
deriving (Generic, Show, FromJSON, ToJSON)
deriving (Generic, NFData, Show, FromJSON, ToJSON)
11 changes: 10 additions & 1 deletion bench/locli/src/Cardano/Analysis/API/Ground.hs
Expand Up @@ -31,6 +31,7 @@ import System.FilePath qualified as F
import Cardano.Slotting.Slot (EpochNo(..), SlotNo(..))
import Ouroboros.Network.Block (BlockNo(..))

import Data.CDF
import Data.DataDomain
import Cardano.Util

Expand Down Expand Up @@ -68,7 +69,7 @@ instance FromJSONKey Hash where

newtype Count a = Count { unCount :: Int }
deriving (Eq, Generic, Ord, Show)
deriving newtype (FromJSON, Num, ToJSON)
deriving newtype (Divisible, FromJSON, Num, Real, ToJSON)
deriving anyclass NFData

countMap :: Map.Map a b -> Count a
Expand Down Expand Up @@ -164,6 +165,14 @@ newtype OutputFile
= OutputFile { unOutputFile :: FilePath }
deriving (Show, Eq)

---
--- Orphans
---
deriving newtype instance Real BlockNo
deriving newtype instance Divisible BlockNo
deriving newtype instance Real SlotNo
deriving newtype instance Divisible SlotNo

---
--- Readers
---
Expand Down
68 changes: 35 additions & 33 deletions bench/locli/src/Cardano/Analysis/API/Metrics.hs
Expand Up @@ -52,8 +52,8 @@ sumFieldsReport =
, "ddFiltered.sumStartSpread", "ddFiltered.sumStopSpread"
, "sumDomainSlots", "sumDomainBlocks", "sumBlocksRejected" ]

instance TimelineFields SummaryOne where
data TimelineComments SummaryOne
instance (KnownCDF f) => TimelineFields (Summary f) where
data TimelineComments (Summary f)
deriving Show

timelineFields =
Expand All @@ -66,8 +66,8 @@ instance TimelineFields SummaryOne where
"Date of cluster genesis systemStart"

<> fScalar "time.systemStart" W8 Tim (ITime $ systemStart.sumGenesis)
"Cluster system start date"
"Date of cluster genesis systemStart"
"Cluster system start time"
"Time-of-day portion of cluster genesis systemStart"

<> fScalar "batch" Wno Id (IText $ batch.sumMeta)
"Run batch"
Expand Down Expand Up @@ -109,31 +109,33 @@ instance TimelineFields SummaryOne where
"Starting UTxO set size"
"Extra UTxO set size at the beginning of the benchmark"

<> fScalar "add_tx_size" W6 B (IWord64 $ add_tx_size.sumGenerator)
<> fScalar "add_tx_size" W6 B (IWord64 $ add_tx_size.sumWorkload
)
"Extra tx payload"
""

<> fScalar "inputs_per_tx" W3 Cnt (IWord64 $ inputs_per_tx.sumGenerator)
<> fScalar "inputs_per_tx" W3 Cnt (IWord64 $ inputs_per_tx.sumWorkload)
"Tx inputs"
""

<> fScalar "outputs_per_tx" W3 Cnt (IWord64 $ outputs_per_tx.sumGenerator)
<> fScalar "outputs_per_tx" W3 Cnt (IWord64 $ outputs_per_tx.sumWorkload)
"Tx Outputs"
""

<> fScalar "tps" W7 Hz (IFloat $ tps.sumGenerator)
<> fScalar "tps" W7 Hz (IFloat $ tps.sumWorkload)
"TPS"
"Offered load, transactions per second"

<> fScalar "tx_count" W12 Cnt (IWord64 $ tx_count.sumGenerator)
<> fScalar "tx_count" W12 Cnt (IWord64 $ tx_count.sumWorkload)
"Transaction count"
"Number of transactions prepared for submission, but not necessarily submitted"

<> fScalar "plutusScript" Wno Id (IText $ T.pack.fromMaybe "---".plutusLoopScript.sumGenerator)
<> fScalar "plutusScript" Wno Id (IText $ T.pack.fromMaybe "---".plutusLoopScript.sumWorkload)
"Plutus script"
"Name of th Plutus script used for smart contract workload generation, if any"

<> fScalar "sumHosts" W4 Cnt (IInt $ unCount.sumHosts)
<> fScalar "sumHosts" W4 Cnt
(IInt $ unCount.arityProj cdfMedian.sumHosts)
"Machines"
"Number of machines under analysis"

Expand Down Expand Up @@ -161,47 +163,47 @@ instance TimelineFields SummaryOne where
"Host log line rate, Hz"
""

<> fScalar "sumLogObjectsTotal" W12 Cnt (IInt $ unCount.sumLogObjectsTotal)
<> fScalar "sumLogObjectsTotal" W12 Cnt (IInt $ unCount.arityProj cdfMedian.sumLogObjectsTotal)
"Total log objects analysed"
""

<> fScalar "ddRawCount.sumDomainTime" W12 Sec (IInt $ ddRawCount.sumDomainTime)
<> fScalar "ddRawCount.sumDomainTime" W12 Sec (IInt $ arityProj cdfMedian.ddRawCount.sumDomainTime)
"Run time, s"
""

<> fScalar "ddFilteredCount.sumDomainTime" W12 Sec (IInt $ ddFilteredCount.sumDomainTime)
<> fScalar "ddFilteredCount.sumDomainTime" W12 Sec (IInt $ arityProj cdfMedian.ddFilteredCount.sumDomainTime)
"Analysed run duration, s"
""

<> fScalar "dataDomainFilterRatio.sumDomainTime" W4 Rto (IFloat $ dataDomainFilterRatio.sumDomainTime)
<> fScalar "dataDomainFilterRatio.sumDomainTime" W4 Rto (IFloat $ dataDomainFilterRatio (arityProj cdfMedian).sumDomainTime)
"Run time efficiency"
""

<> fScalar "ddRaw.sumStartSpread" W9 Sec (IDeltaT$ intvDurationSec.ddRaw.sumStartSpread)
<> fScalar "ddRaw.sumStartSpread" W9 Sec (IDeltaT$ intvDurationSec.fmap (fromRUTCTime . arityProj cdfMedian).ddRaw.sumStartSpread)
"Node start spread, s"
""

<> fScalar "ddRaw.sumStopSpread" W9 Sec (IDeltaT$ intvDurationSec.ddRaw.sumStopSpread)
<> fScalar "ddRaw.sumStopSpread" W9 Sec (IDeltaT$ intvDurationSec.fmap (fromRUTCTime . arityProj cdfMedian).ddRaw.sumStopSpread)
"Node stop spread, s"
""

<> fScalar "ddFiltered.sumStartSpread" W9 Sec (IDeltaT$ maybe 0 intvDurationSec.ddFiltered.sumStartSpread)
<> fScalar "ddFiltered.sumStartSpread" W9 Sec (IDeltaT$ maybe 0 (intvDurationSec.fmap (fromRUTCTime . arityProj cdfMedian)).ddFiltered.sumStartSpread)
"Perf analysis start spread, s"
""

<> fScalar "ddFiltered.sumStopSpread" W9 Sec (IDeltaT$ maybe 0 intvDurationSec.ddFiltered.sumStopSpread)
<> fScalar "ddFiltered.sumStopSpread" W9 Sec (IDeltaT$ maybe 0 (intvDurationSec.fmap (fromRUTCTime . arityProj cdfMedian)).ddFiltered.sumStopSpread)
"Perf analysis stop spread, s"
""

<> fScalar "sumDomainSlots" W12 Slo (IInt $ ddFilteredCount.sumDomainSlots)
<> fScalar "sumDomainSlots" W12 Slo (IInt $ floor.arityProj cdfMedian.cdfAverage.ddFilteredCount.sumDomainSlots)
"Slots analysed"
""

<> fScalar "sumDomainBlocks" W10 Blk (IInt $ ddFilteredCount.sumDomainBlocks)
<> fScalar "sumDomainBlocks" W10 Blk (IInt $ arityProj cdfMedian.ddFilteredCount.sumDomainBlocks)
"Blocks analysed"
""

<> fScalar "sumBlocksRejected" W10 Cnt (IInt $ unCount . sumBlocksRejected)
<> fScalar "sumBlocksRejected" W10 Cnt (IInt $ unCount.arityProj cdfMedian.sumBlocksRejected)
"Blocks rejected"
""
-- fieldJSONOverlay f = (:[]) . tryOverlayFieldDescription f
Expand Down Expand Up @@ -632,17 +634,17 @@ instance TimelineFields (SlotStats NominalDiffTime) where

-- * Instances, depending on the metrics' instances:
--
instance (ToJSON (f NominalDiffTime), ToJSON (f Int), ToJSON (f Double), ToJSON (f (Count BlockEvents)), ToJSON (f (DataDomain SlotNo)), ToJSON (f (DataDomain BlockNo))) => ToJSON (BlockProp f) where
toJSON x = AE.genericToJSON AE.defaultOptions x
& \case
Object o -> Object $ processFieldOverlays x o
_ -> error "Heh, serialised BlockProp to a non-Object."

instance (ToJSON (a Double), ToJSON (a Int), ToJSON (a NominalDiffTime), ToJSON (a (DataDomain UTCTime)), ToJSON (a Word64), ToJSON (a (DataDomain SlotNo)), ToJSON (a (DataDomain BlockNo))) => ToJSON (MachPerf a) where
toJSON x = AE.genericToJSON AE.defaultOptions x
& \case
Object o -> Object $ processFieldOverlays x o
_ -> error "Heh, serialised BlockProp to a non-Object."
-- instance (ToJSON (f NominalDiffTime), ToJSON (f Int), ToJSON (f Double), ToJSON (f (Count BlockEvents)), ToJSON (f (DataDomain f SlotNo)), ToJSON (f (DataDomain BlockNo))) => ToJSON (BlockProp f) where
-- toJSON x = AE.genericToJSON AE.defaultOptions x
-- & \case
-- Object o -> Object $ processFieldOverlays x o
-- _ -> error "Heh, serialised BlockProp to a non-Object."

-- instance (ToJSON (a Double), ToJSON (a Int), ToJSON (a NominalDiffTime), ToJSON (a (DataDomain UTCTime)), ToJSON (a Word64), ToJSON (a (DataDomain SlotNo)), ToJSON (a (DataDomain BlockNo))) => ToJSON (MachPerf a) where
-- toJSON x = AE.genericToJSON AE.defaultOptions x
-- & \case
-- Object o -> Object $ processFieldOverlays x o
-- _ -> error "Heh, serialised BlockProp to a non-Object."

deriving newtype instance ToJSON MultiClusterPerf

Expand Down
25 changes: 25 additions & 0 deletions bench/locli/src/Cardano/Analysis/API/Run.hs
Expand Up @@ -7,11 +7,13 @@ import Cardano.Prelude

import Control.Monad (fail)
import Data.Aeson qualified as Aeson
import Data.Aeson ((.=))

import Cardano.Util
import Cardano.Analysis.API.ChainFilter
import Cardano.Analysis.API.Context
import Cardano.Analysis.API.Ground
import Cardano.Analysis.API.Types

data AnalysisCmdError
= AnalysisCmdError !Text
Expand Down Expand Up @@ -58,6 +60,29 @@ instance FromJSON RunPartial where
genesis = ()
pure Run{..}

-- | Given a Summary object,
-- produce a JSON file readable by the above RunPartial FromJSON instance.
-- Keep in sync. Better still, automate it so it's not necessary.
summaryMetaJson :: Summary f -> Value
summaryMetaJson Summary{sumMeta=Metadata{..}, ..} =
object [ "meta" .= meta ]
where meta =
object $
-- keep in sync with 'data Metadata'
[ "tag" .= tag
, "batch" .= batch
, "profile" .= profile
, "era" .= era
, "manifest" .= manifest
] <>
-- keep in sync with the above instance
[ "profile_content" .=
object
[ "generator" .= toJSON sumWorkload
, "genesis" .= toJSON sumGenesisSpec
]
]

readRun :: JsonInputFile Genesis -> JsonInputFile RunPartial -> ExceptT AnalysisCmdError IO Run
readRun shelleyGenesis runmeta = do
runPartial <- readJsonData runmeta (RunMetaParseError runmeta)
Expand Down

0 comments on commit e6072a6

Please sign in to comment.