diff --git a/Makefile b/Makefile index fefcd26e53e..5449b999d87 100644 --- a/Makefile +++ b/Makefile @@ -39,7 +39,7 @@ profiles: profile-names: @./nix/workbench/wb profile-names -CLUSTER_PROFILE = default-alzo +CLUSTER_PROFILE ?= default-alzo CLUSTER_ARGS_EXTRA ?= cluster-shell: diff --git a/bench/script/probe.sh b/bench/script/probe.sh new file mode 100644 index 00000000000..5cb5a2bed47 --- /dev/null +++ b/bench/script/probe.sh @@ -0,0 +1,42 @@ +#!/usr/bin/env bash + +hostmap=$(jq ' + .public_ip + | values + | map({ "\(.hostname)": .public_ip}) + | add' last-meta.json) + +echo 'obtaining latency matrix for the hostmap:' +jq . -C <<<$hostmap + +nixops ssh-for-each --parallel -- " + self=\$(hostname) + + function probe() { + local host=\$1 ip=\$2 + + ping -qAc21 \$ip | + grep 'rtt\|transmitted' | + sed 's_, \|/_\n_g' | + sed 's_ packets transmitted\| received\| packet loss\|time \|rtt min\|avg\|max\|mdev = \|ipg\|ewma \| ms\|ms\|%__g' | + grep -v '^$' | + jq '{ source: \"'\$self'\" + , target: \"'\$host'\" + + , sent: .[0] + , received: .[1] + , percents_lost: .[2] + , duration_ms: .[3] + , ipg: .[8] + , ewma: .[9] + + , rtt: { min: .[4], avg: .[5], max: .[6], mdev: .[7] } + }' --slurp --compact-output + } + + hostmap=${hostmap@Q} + + for host in \$(jq 'keys | .[]' <<<\$hostmap --raw-output) + do ip=\$(jq '.[\$host]' --arg host \$host <<<\$hostmap --raw-output) + probe \$host \$ip\ & + done" diff --git a/nix/supervisord-cluster/default.nix b/nix/supervisord-cluster/default.nix index f63a91f7f1a..83ba00590d4 100644 --- a/nix/supervisord-cluster/default.nix +++ b/nix/supervisord-cluster/default.nix @@ -53,7 +53,7 @@ let { port, ... }: cfg: recursiveUpdate cfg ({ AlonzoGenesisFile = "../genesis/alonzo-genesis.json"; - ShelleyGenesisFile = "../genesis/genesis.json"; + ShelleyGenesisFile = "../genesis.json"; ByronGenesisFile = "../genesis/byron/genesis.json"; } // optionalAttrs enableEKG { hasEKG = port + supervisord.portShiftEkg; diff --git a/nix/workbench/analyse.sh b/nix/workbench/analyse.sh index bbd51369060..f2eaa6b5ce1 100644 --- a/nix/workbench/analyse.sh +++ b/nix/workbench/analyse.sh @@ -1,70 +1,118 @@ usage_analyse() { usage "analyse" "Analyse cluster runs" < "$keyfile" - cat >>"$keyfile" < \ - "$adir"/logs-$(basename "$d").flt.json & - done - wait - msg "log sizes: (files: $(ls "$adir"/*.flt.json | wc -l), lines: $(cat "$adir"/*.flt.json | wc -l))" + ## 1. enumerate logs, filter by keyfile & consolidate + local logdirs=("$dir"/node-*/ "$dir"/analysis/node-*/) + + if test -z "$skip_preparation" -o -z "$(ls "$adir"/logs-node-*.flt.json 2>/dev/null)" + then + msg "filtering logs in: $dir/node-* " + local jq_args=( + --sort-keys + --compact-output + $(wb backend lostream-fixup-jqargs "$dir") + ' delpaths([["app"],["env"],["loc"],["msg"],["ns"],["sev"]]) + '"$(wb backend lostream-fixup-jqexpr)" + ) + for d in "${logdirs[@]}" + do local logfiles="$(ls "$d"/stdout* 2>/dev/null | tac) $(ls "$d"/node-*.json 2>/dev/null)" + if test -z "$logfiles" + then msg "no logs in $d, skipping.."; fi + grep -hFf "$keyfile" $logfiles | + jq "${jq_args[@]}" --arg dirHostname "$(basename "$d")" \ + > "$adir"/logs-$(basename "$d").flt.json & + done + wait + fi + + msg "log sizes: (files: $(ls "$adir"/*.flt.json 2>/dev/null | wc -l), lines: $(cat "$adir"/*.flt.json | wc -l))" msg "analysing.." local locli_args=( - --genesis "$dir"/genesis/genesis.json + --genesis "$dir"/genesis.json --run-metafile "$dir"/meta.json ## -> - # --logobjects-json "$adir"/logs-cluster.logobjects.json - --analysis-json "$adir"/block-event-stream.json + --timeline-pretty "$adir"/block-propagation.txt + --analysis-json "$adir"/block-propagation.json ) + if test -n "$dump_logobjects"; then + locli_args+=(--logobjects-json "$adir"/logs-cluster.logobjects.json); fi - locli 'analyse' 'block-propagation' \ + ${time} locli 'analyse' 'block-propagation' \ "${locli_args[@]}" "$adir"/*.flt.json;; + grep-filtered-logs | grep | g ) + local usage="USAGE: wb analyse $op BLOCK [MACHSPEC=*] [RUN-NAME=current]" + local expr=$1 + local mach=${2:-*} + local name=${3:-current} + local dir=$(run get "$name") + local adir=$dir/analysis + + grep -h "$expr" "$adir"/logs-$mach.flt.json;; + + list-blocks | blocks | bs ) + local usage="USAGE: wb analyse $op [RUN-NAME=current]" + local name=${1:-current} + local dir=$(run get "$name") + local adir=$dir/analysis + + fgrep -h "TraceForgedBlock" "$adir"/*.flt.json | + jq '{ at: .at, host: .host } * .data | del(.peer) | del(.slot)' -c | + sort | uniq;; + + block-propagation-block | bpb ) + local usage="USAGE: wb analyse $op BLOCK [RUN-NAME=current]" + local block=$1 + local name=${2:-current} + local dir=$(run get "$name") + local adir=$dir/analysis + + grep -h "$block" "$adir"/*.flt.json | + grep 'AddBlock\|TraceForgedBlock\|AddedToCurrentChain' | + jq '{ at: .at, host: .host } * .data | del(.peer) | del(.slot)' -c | + sort --stable | uniq;; + machine-timeline | machine | mt ) local usage="USAGE: wb analyse $op [RUN-NAME=current] [MACH-NAME=node-1]" local name=${1:-current} @@ -72,34 +120,58 @@ EOF local dir=$(run get "$name") local adir=$dir/analysis + msg "analysing run $(jq .meta.tag "$dir"/meta.json --raw-output)" mkdir -p "$adir" ## 0. subset what we care about into the keyfile local keyfile=$adir/substring-keys locli analyse substring-keys | grep -v 'Temporary modify' > "$keyfile" - ## 1. enumerate logs, filter by keyfile & consolidate - local logs=("$dir"/$mach/stdout) consolidated="$adir"/logs-$mach.json - grep -hFf "$keyfile" "${logs[@]}" > "$consolidated" - msg "analysing logs of: $mach (lines: $(wc -l "$consolidated"))" - - local locli_args=( - --genesis "$dir"/genesis/genesis.json - --run-metafile "$dir"/meta.json - ## -> - --logobjects-json "$adir"/logs-$mach.logobjects.json - --slotstats-json "$adir"/logs-$mach.slotstats.json - --timeline-pretty "$adir"/logs-$mach.timeline.txt - --stats-csv "$adir"/logs-$mach.stats.csv - --analysis-json "$adir"/logs-$mach.analysis.json - # --timeline-csv "$adir"/logs-$mach.timeline.csv - # --cpu-spans-histogram-png "$adir"/logs-"$mach".cpu85-span-lens.png - # --derived-vectors-0-csv "$adir"/logs-$mach".derived.1.csv - # --derived-vectors-1-csv "$adir"/logs-$mach.derived.1.csv - ) + if test "$mach" = 'all' + then local machs=($(wb run list-hosts $name)) + else local machs=($mach); fi + + local num_jobs="\j" + local num_threads=$(grep processor /proc/cpuinfo | wc -l) + for mach in ${machs[*]} + do ## Limit parallelism: + sleep 0.5s + while ((${num_jobs@P} >= num_threads - 4)) + do wait -n; sleep 0.$(((RANDOM % 5) + 1))s; done + ( + ## 1. enumerate logs, filter by keyfile & consolidate + local logs=($(ls "$dir"/$mach/stdout* 2>/dev/null | tac) $(ls "$dir"/$mach/node-*.json 2>/dev/null) $(ls "$dir"/analysis/$mach/node-*.json 2>/dev/null)) consolidated="$adir"/logs-$mach.json + + if test -z "${logs[*]}" + then msg "no logs for $mach in run $name"; continue; fi + + if test -z "$skip_preparation" -o -z "$(ls "$adir"/logs-$mach.json 2>/dev/null)" + then grep -hFf "$keyfile" "${logs[@]}" > "$consolidated"; fi + + msg "analysing logs of: $mach (lines: $(wc -l "$consolidated"))" + local locli_args=( + --genesis "$dir"/genesis.json + --run-metafile "$dir"/meta.json + ## -> + --timeline-pretty "$adir"/logs-$mach.timeline.txt + --stats-csv "$adir"/logs-$mach.stats.csv + --analysis-json "$adir"/logs-$mach.analysis.json + # --slotstats-json "$adir"/logs-$mach.slotstats.json + # --timeline-csv "$adir"/logs-$mach.timeline.csv + # --cpu-spans-histogram-png "$adir"/logs-"$mach".cpu85-span-lens.png + # --derived-vectors-0-csv "$adir"/logs-$mach".derived.1.csv + # --derived-vectors-1-csv "$adir"/logs-$mach.derived.1.csv + ) + if test -n "$dump_logobjects"; then + locli_args+=(--logobjects-json "$adir"/logs-$mach.logobjects.json); fi + + ${time} locli 'analyse' 'machine-timeline' \ + "${locli_args[@]}" "$consolidated" + ) & + done - locli 'analyse' 'machine-timeline' \ - "${locli_args[@]}" "$consolidated";; + wait + msg "analysis machine-timeline: All done.";; * ) usage_analyse;; esac } diff --git a/nix/workbench/backend.sh b/nix/workbench/backend.sh index f2cd6d81ffa..7bf848dd87f 100644 --- a/nix/workbench/backend.sh +++ b/nix/workbench/backend.sh @@ -33,7 +33,8 @@ case "${op}" in record-extended-env-config ) $WORKBENCH_BACKEND "$@";; describe-run ) $WORKBENCH_BACKEND "$@";; pre-run-hook ) $WORKBENCH_BACKEND "$@";; - start-run ) $WORKBENCH_BACKEND "$@";; + start-run ) cp "$2"/genesis/genesis.json "$2"/genesis.json + $WORKBENCH_BACKEND "$@";; lostream-fixup-jqargs ) $WORKBENCH_BACKEND "$@";; lostream-fixup-jqexpr ) $WORKBENCH_BACKEND "$@";; diff --git a/nix/workbench/default.nix b/nix/workbench/default.nix index b22cc11cf70..8053b416679 100644 --- a/nix/workbench/default.nix +++ b/nix/workbench/default.nix @@ -69,7 +69,7 @@ let runWorkbenchJqOnly = name: command: runCommand name {} '' - ${workbench' [jq]}/bin/wb ${command} > $out + ${workbench' [jq moreutils]}/bin/wb ${command} > $out ''; runJq = @@ -133,7 +133,8 @@ let function workbench-prebuild-executables() { ${optionalString useCabalRun '' - git log -n1 --alternate-refs --pretty=format:"%Cblue%h %Cred%cr %Cgreen%D %Cblue%s%Creset" + git log -n1 --alternate-refs --pretty=format:"%Cred%cr %Cblue%h %Cgreen%D %Cblue%s%Creset" --color | sed "s/^/$(git diff --exit-code --quiet && echo ' ' || echo 'local changes + ')/" + echo echo -n "workbench: prebuilding executables (because of useCabalRun):" for exe in cardano-cli cardano-node cardano-topology locli do echo -n " $exe" @@ -171,7 +172,7 @@ let with envArgs; rec { inherit cardanoLib stateDir; - JSON = runWorkbench "environment.json" + JSON = runWorkbenchJqOnly "environment.json" ''env compute-config \ --cache-dir "${cacheDir}" \ --base-port ${toString basePort} \ @@ -194,7 +195,7 @@ let profiles = genAttrs profile-names mkProfile; profilesJSON = - runWorkbench "all-profiles.json" "profiles generate-all"; + runWorkbenchJqOnly "all-profiles.json" "profiles generate-all"; }; initialiseProfileRunDirShellScript = diff --git a/nix/workbench/locli/locli.cabal b/nix/workbench/locli/locli.cabal index 2ae5eb16a4e..765af3021df 100644 --- a/nix/workbench/locli/locli.cabal +++ b/nix/workbench/locli/locli.cabal @@ -20,15 +20,17 @@ library Cardano.Analysis.Profile Cardano.Analysis.TopHandler - Cardano.Unlog.BlockProp + Cardano.Analysis.BlockProp + Cardano.Analysis.Driver + Cardano.Analysis.MachTimeline + Cardano.Unlog.Commands Cardano.Unlog.LogObject Cardano.Unlog.Parsers + Cardano.Unlog.Render Cardano.Unlog.Resources Cardano.Unlog.Run Cardano.Unlog.SlotStats - Cardano.Unlog.Summary - Cardano.Unlog.Timeline other-modules: Paths_locli @@ -54,6 +56,7 @@ library , process , scientific , split + , statistics , template-haskell , text , text-short diff --git a/nix/workbench/locli/src/Cardano/Analysis/BlockProp.hs b/nix/workbench/locli/src/Cardano/Analysis/BlockProp.hs new file mode 100644 index 00000000000..cde6865e9f5 --- /dev/null +++ b/nix/workbench/locli/src/Cardano/Analysis/BlockProp.hs @@ -0,0 +1,701 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE DerivingVia #-} +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE NamedFieldPuns #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE StrictData #-} +{-# LANGUAGE ViewPatterns #-} +{-# OPTIONS_GHC -Wno-incomplete-patterns -Wno-name-shadowing #-} +{-# OPTIONS_GHC -Wno-unused-imports -Wno-partial-fields -Wno-unused-matches -Wno-deprecations -Wno-unused-local-binds -Wno-incomplete-record-updates #-} +{- HLINT ignore "Use head" -} +{- HLINT ignore "Avoid lambda" -} +module Cardano.Analysis.BlockProp (module Cardano.Analysis.BlockProp) where + +import Prelude (String, (!!), error, head, id, show, tail) +import Cardano.Prelude hiding (head, show) + +import Control.Arrow ((***), (&&&)) +import Control.Concurrent.Async (mapConcurrently) +import Data.Aeson (ToJSON(..), FromJSON(..)) +import qualified Data.Aeson as AE +import Data.Bifunctor +import Data.Function (on) +import Data.List (dropWhileEnd, intercalate) +import Data.Map.Strict (Map) +import qualified Data.Map.Strict as Map +import Data.Maybe (catMaybes, mapMaybe, isNothing) +import Data.Set (Set) +import qualified Data.Set as Set +import qualified Data.Text as T +import Data.Tuple (swap) +import Data.Vector (Vector) +import qualified Data.Vector as Vec + +import Data.Time.Clock (NominalDiffTime, UTCTime, addUTCTime, diffUTCTime) + +import Text.Printf (printf) + +import Ouroboros.Network.Block (BlockNo(..), SlotNo(..)) + +import Data.Accum +import Data.Distribution +import Cardano.Analysis.Profile +import Cardano.Unlog.LogObject hiding (Text) +import Cardano.Unlog.Render +import Cardano.Unlog.Resources +import Cardano.Unlog.SlotStats + +import qualified Debug.Trace as D + + +data BlockPropagation + = BlockPropagation + { bpForgerForges :: !(Distribution Float NominalDiffTime) + , bpForgerAdoptions :: !(Distribution Float NominalDiffTime) + , bpForgerAnnouncements :: !(Distribution Float NominalDiffTime) + , bpForgerSends :: !(Distribution Float NominalDiffTime) + , bpPeerNotices :: !(Distribution Float NominalDiffTime, Distribution Float NominalDiffTime) + , bpPeerRequests :: !(Distribution Float NominalDiffTime, Distribution Float NominalDiffTime) + , bpPeerFetches :: !(Distribution Float NominalDiffTime, Distribution Float NominalDiffTime) + , bpPeerAdoptions :: !(Distribution Float NominalDiffTime, Distribution Float NominalDiffTime) + , bpPeerAnnouncements :: !(Distribution Float NominalDiffTime, Distribution Float NominalDiffTime) + , bpPeerSends :: !(Distribution Float NominalDiffTime, Distribution Float NominalDiffTime) + , bpChainBlockEvents :: [BlockEvents] + } + deriving Show + +instance RenderDistributions BlockPropagation where + rdFields = + -- Width LeftPad + [ Field 6 0 "forged" (f!!0) "Forge" $ DDeltaT bpForgerForges + , Field 6 0 "fAdopted" (f!!1) "Adopt" $ DDeltaT bpForgerAdoptions + , Field 6 0 "fAnnounced" (f!!2) "Announ" $ DDeltaT bpForgerAnnouncements + , Field 6 0 "fSendStart" (f!!3) "Sendin" $ DDeltaT bpForgerSends + , Field 4 1 "noticedVal" (p!!0) " Noti" $ DDeltaT (fst . bpPeerNotices) + , Field 4 0 "noticedCoV" (p!!1) "ced " $ DDeltaT (snd . bpPeerNotices) + , Field 4 1 "requestedVal" (p!!2) "Reque" $ DDeltaT (fst . bpPeerRequests) + , Field 4 0 "requestedVal" (p!!3) "sted " $ DDeltaT (snd . bpPeerRequests) + , Field 4 1 "fetchedVal" (p!!4) " Fetc" $ DDeltaT (fst . bpPeerFetches) + , Field 4 0 "fetchedCoV" (p!!5) "hed " $ DDeltaT (snd . bpPeerFetches) + , Field 4 1 "pAdoptedVal" (p!!6) " Adop" $ DDeltaT (fst . bpPeerAdoptions) + , Field 4 0 "pAdoptedCoV" (p!!7) "ted " $ DDeltaT (snd . bpPeerAdoptions) + , Field 4 1 "pAnnouncedVal" (p!!8) "Annou" $ DDeltaT (fst . bpPeerAnnouncements) + , Field 4 0 "pAnnouncedCoV" (p!!9) "nced " $ DDeltaT (snd . bpPeerAnnouncements) + , Field 4 1 "pSendStartVal" (p!!10) " Send" $ DDeltaT (fst . bpPeerSends) + , Field 4 0 "pSendStartCoV" (p!!11) "ing " $ DDeltaT (snd . bpPeerSends) + ] + where + f = nChunksEachOf 4 7 "Forger event Δt:" + p = nChunksEachOf 12 5 "Peer event Δt, and coefficients of variation:" + +instance AE.ToJSON BlockPropagation where + toJSON BlockPropagation{..} = AE.Array $ Vec.fromList + [ extendObject "kind" "forgerForges" $ toJSON bpForgerForges + , extendObject "kind" "forgerAdoptions" $ toJSON bpForgerAdoptions + , extendObject "kind" "forgerAnnouncements" $ toJSON bpForgerAnnouncements + , extendObject "kind" "forgerSends" $ toJSON bpForgerSends + , extendObject "kind" "peerNoticesMean" $ toJSON (fst bpPeerNotices) + , extendObject "kind" "peerNoticesCoV" $ toJSON (snd bpPeerNotices) + , extendObject "kind" "peerRequestsMean" $ toJSON (fst bpPeerRequests) + , extendObject "kind" "peerRequestsCoV" $ toJSON (snd bpPeerRequests) + , extendObject "kind" "peerFetchesMean" $ toJSON (fst bpPeerFetches) + , extendObject "kind" "peerFetchesCoV" $ toJSON (snd bpPeerFetches) + , extendObject "kind" "peerAdoptionsMean" $ toJSON (fst bpPeerAdoptions) + , extendObject "kind" "peerAdoptionsCoV" $ toJSON (snd bpPeerAdoptions) + , extendObject "kind" "peerAnnouncementsMean" $ toJSON (fst bpPeerAnnouncements) + , extendObject "kind" "peerAnnouncementsCoV" $ toJSON (snd bpPeerAnnouncements) + , extendObject "kind" "peerSendsMean" $ toJSON (fst bpPeerSends) + , extendObject "kind" "peerSendsCoV" $ toJSON (snd bpPeerSends) + ] + +data BPError + = BPError + { eBlock :: !Hash + , eFile :: !(Maybe FilePath) + , eLO :: !(Maybe LogObject) + , eDesc :: !BPErrorKind + } + deriving (FromJSON, Generic, Show, ToJSON) + +data BPErrorKind + = BPEBefore !Phase !Phase + | BPEUnexpectedForObserver !Phase + | BPEUnexpectedForForger !Phase + | BPEUnexpectedAsFirst !Phase + | BPEDuplicateForge + | BPEMissingPhase !Phase + | BPENegativePhase !Phase !NominalDiffTime + | BPEFork !Hash + deriving (FromJSON, Generic, Show, ToJSON) + +bpeIsFork, bpeIsMissingAny, bpeIsNegativeAny :: BPError -> Bool +bpeIsFork BPError{eDesc=BPEFork{}} = True +bpeIsFork _ = False +bpeIsMissingAny BPError{eDesc=BPEMissingPhase{}} = True +bpeIsMissingAny _ = False +bpeIsNegativeAny BPError{eDesc=BPENegativePhase{}} = True +bpeIsNegativeAny _ = False + +bpeIsMissing, bpeIsNegative :: Phase -> BPError -> Bool +bpeIsMissing p BPError{eDesc=BPEMissingPhase p'} = p == p' +bpeIsMissing _ _ = False +bpeIsNegative p BPError{eDesc=BPENegativePhase p' _} = p == p' +bpeIsNegative _ _ = False + +data Phase + = Notice + | Request + | Fetch + | Forge + | Acquire + | Adopt + | Announce + | Send + deriving (FromJSON, Eq, Generic, Ord, Show, ToJSON) + +-- | Block's events, as seen by its forger. +data ForgerEvents a + = ForgerEvents + { bfeHost :: !Host + , bfeBlock :: !Hash + , bfeBlockPrev :: !Hash + , bfeBlockNo :: !BlockNo + , bfeSlotNo :: !SlotNo + , bfeSlotStart :: !SlotStart + , bfeForged :: !(Maybe a) + , bfeAdopted :: !(Maybe a) + , bfeChainDelta :: !Int + , bfeAnnounced :: !(Maybe a) + , bfeSending :: !(Maybe a) + , bfeErrs :: [BPError] + } + deriving (Generic, AE.FromJSON, AE.ToJSON, Show) + +type ForgerEventsAbs = ForgerEvents UTCTime +type ForgerEventsRel = ForgerEvents NominalDiffTime + +bfePrevBlock :: ForgerEvents a -> Maybe Hash +bfePrevBlock x = case bfeBlockNo x of + 0 -> Nothing + _ -> Just $ bfeBlockPrev x + +-- | Block's events, as seen by an observer. +data ObserverEvents a + = ObserverEvents + { boeHost :: !Host + , boeBlock :: !Hash + , boeBlockNo :: !BlockNo + , boeSlotNo :: !SlotNo + , boeSlotStart :: !SlotStart + , boeNoticed :: !(Maybe a) + , boeRequested :: !(Maybe a) + , boeFetched :: !(Maybe a) + , boeAdopted :: !(Maybe a) + , boeChainDelta :: !Int + , boeAnnounced :: !(Maybe a) + , boeSending :: !(Maybe a) + , boeErrs :: [BPError] + } + deriving (Generic, AE.FromJSON, AE.ToJSON, Show) + +type ObserverEventsAbs = ObserverEvents UTCTime +type ObserverEventsRel = ObserverEvents NominalDiffTime + +mbePhaseIndex :: Map Phase (MachBlockEvents a -> Maybe a) +mbePhaseIndex = Map.fromList + [ (Notice, mbeNoticed) + , (Request, mbeRequested) + , (Fetch, mbeAcquired) + , (Forge, mbeAcquired) + , (Acquire, mbeAcquired) + , (Adopt, mbeAdopted) + , (Announce, mbeAnnounced) + , (Send, mbeSending) + ] + +mbeGetProjection :: Phase -> (MachBlockEvents a -> Maybe a) +mbeGetProjection k = + Map.lookup k mbePhaseIndex + & fromMaybe (error $ "Unknown phase: " <> show k) + +-- | Sum of observer and forger events alike. +data MachBlockEvents a + = MFE (ForgerEvents a) + | MOE (ObserverEvents a) + | MBE BPError + +mbeForgP, mbeObsvP, mbeErrP :: MachBlockEvents a -> Bool +mbeForgP = \case + MFE{} -> True + _ -> False +mbeObsvP = \case + MOE{} -> True + _ -> False +mbeErrP = \case + MBE{} -> True + _ -> False + +mapMbe :: (ForgerEvents a -> b) -> (ObserverEvents a -> b) -> (BPError -> b) + -> MachBlockEvents a -> b +mapMbe f o e = \case + MFE x -> f x + MOE x -> o x + MBE x -> e x + +mapMbeErrs :: ([BPError] -> [BPError]) -> MachBlockEvents a -> MachBlockEvents a +mapMbeErrs f = mapMbe (\x -> MFE x { bfeErrs=f $ bfeErrs x } ) + (\x -> MOE x { boeErrs=f $ boeErrs x } ) + MBE + +partitionMbes :: [MachBlockEvents a] -> ([ForgerEvents a], [ObserverEvents a], [BPError]) +partitionMbes = go [] [] [] + where + go :: [ForgerEvents a] -> [ObserverEvents a] -> [BPError] -> [MachBlockEvents a] -> ([ForgerEvents a], [ObserverEvents a], [BPError]) + go as bs cs [] = (reverse as, reverse bs, reverse cs) + go as bs cs (MFE a:xs) = go (a:as) bs cs xs + go as bs cs (MOE b:xs) = go as (b:bs) cs xs + go as bs cs (MBE c:xs) = go as bs (c:cs) xs + +errorMbes :: [MachBlockEvents a] -> [BPError] +errorMbes = go [] + where + go :: [BPError] -> [MachBlockEvents a] -> [BPError] + go cs [] = reverse cs + go cs (MBE c:xs) = go (c:cs) xs + go cs (_:xs) = go cs xs + +trimapMbe :: + (ForgerEvents a -> ForgerEvents a) + -> (ObserverEvents a -> ObserverEvents a) + -> (BPError -> BPError) + -> MachBlockEvents a -> MachBlockEvents a +trimapMbe f o e = mapMbe (MFE . f) (MOE . o) (MBE . e) + +bimapMbe :: + (ForgerEvents a -> ForgerEvents a) + -> (ObserverEvents a -> ObserverEvents a) + -> MachBlockEvents a -> MachBlockEvents a +bimapMbe f o = trimapMbe f o id + +bimapMbe' :: + (ForgerEvents a -> Either BPError (ForgerEvents a)) + -> (ObserverEvents a -> Either BPError (ObserverEvents a)) + -> MachBlockEvents a -> MachBlockEvents a +bimapMbe' f o = \case + MFE x -> either MBE MFE (f x) + MOE x -> either MBE MOE (o x) + x@MBE{} -> x + +ordBlockEv :: MachBlockEvents a -> MachBlockEvents a -> Ordering +ordBlockEv l r + | (on (>) $ mapMbe bfeBlockNo boeBlockNo (const 0)) l r = GT + | (on (>) $ mapMbe bfeBlockNo boeBlockNo (const 0)) r l = LT + | mbeForgP l = GT + | mbeForgP r = LT + | mbeObsvP l = GT + | mbeObsvP r = LT + | otherwise = EQ + +mbeSlotStart :: MachBlockEvents a -> SlotStart +mbeSlotStart = mapMbe bfeSlotStart boeSlotStart (SlotStart . const zeroUTCTime) + +mbeNoticed, mbeRequested, mbeAcquired, mbeAdopted, mbeAnnounced, mbeSending :: MachBlockEvents a -> Maybe a +mbeNoticed = mapMbe (const Nothing) boeNoticed (const Nothing) +mbeRequested = mapMbe (const Nothing) boeRequested (const Nothing) +mbeAcquired = mapMbe bfeForged boeFetched (const Nothing) +mbeAdopted = mapMbe bfeAdopted boeAdopted (const Nothing) +mbeAnnounced = mapMbe bfeAnnounced boeAnnounced (const Nothing) +mbeSending = mapMbe bfeSending boeSending (const Nothing) + +mbeBlock :: MachBlockEvents a -> Hash +mbeBlock = mapMbe bfeBlock boeBlock eBlock + +mbeBlockNo :: MachBlockEvents a -> BlockNo +mbeBlockNo = mapMbe bfeBlockNo boeBlockNo (const (-1)) + +mbeError :: MachBlockEvents a -> Maybe BPError +mbeError = mapMbe (const Nothing) (const Nothing) Just + +mbeFailed :: MachBlockEvents a -> Bool +mbeFailed = isJust . mbeError + +-- | Machine's private view of all the blocks. +type MachBlockMap a + = Map.Map Hash (MachBlockEvents a) + +blockMapMaxBlock :: MachBlockMap a -> MachBlockEvents a +blockMapMaxBlock = maximumBy ordBlockEv . Map.elems + +blockMapBlock :: Hash -> MachBlockMap a -> MachBlockEvents a +blockMapBlock h = + fromMaybe (error $ "Invariant failed: missing hash " <> show h) . Map.lookup h + +-- | A completed, compactified version of ObserverEvents. +data BlockObservation + = BlockObservation + { boObserver :: !Host + , boSlotStart :: !SlotStart + , boNoticed :: !NominalDiffTime + , boRequested :: !NominalDiffTime + , boFetched :: !NominalDiffTime + , boAdopted :: !(Maybe NominalDiffTime) + , boChainDelta :: !Int -- ^ ChainDelta during adoption + , boAnnounced :: !(Maybe NominalDiffTime) + , boSending :: !(Maybe NominalDiffTime) + , boErrors :: [BPError] + } + deriving (Generic, AE.FromJSON, AE.ToJSON, Show) + +-- | All events related to a block. +data BlockEvents + = BlockEvents + { beForger :: !Host + , beBlock :: !Hash + , beBlockPrev :: !Hash + , beBlockNo :: !BlockNo + , beSlotNo :: !SlotNo + , beSlotStart :: !SlotStart + , beForged :: !NominalDiffTime + , beAdopted :: !NominalDiffTime + , beChainDelta :: !Int -- ^ ChainDelta during adoption + , beAnnounced :: !NominalDiffTime + , beSending :: !NominalDiffTime + , beObservations :: [BlockObservation] + , beValidObservs :: [BlockObservation] + , beOtherBlocks :: [Hash] + , beErrors :: [BPError] + } + deriving (Generic, AE.FromJSON, AE.ToJSON, Show) + +instance RenderTimeline BlockEvents where + rtFields = + -- Width LeftPad + [ Field 5 0 "block" "block" "no." $ IWord64 (unBlockNo . beBlockNo) + , Field 5 0 "abs.slot" "abs." "slot#" $ IWord64 (unSlotNo . beSlotNo) + , Field 6 0 "hash" "block" "hash" $ IText (shortHash . beBlock) + , Field 6 0 "hashPrev" "prev" "hash" $ IText (shortHash . beBlockPrev) + , Field 5 0 "valid.observ" "valid" "obsrv" $ IInt (length . beValidObservs) + , Field 5 0 "errors" "all" "errs" $ IInt (length . beErrors) + , Field 5 0 "forks" "" "forks" $ IInt (count bpeIsFork . beErrors) + , Field 5 0 "missAdopt" (m!!0) "adopt" $ IInt (count (bpeIsMissing Adopt) . beErrors) + , Field 5 0 "missAnnou" (m!!1) "annou" $ IInt (count (bpeIsMissing Announce) . beErrors) + , Field 5 0 "missSend" (m!!2) "send" $ IInt (count (bpeIsMissing Send) . beErrors) + , Field 5 0 "negAnnou" (n!!0) "annou" $ IInt (count (bpeIsNegative Announce) . beErrors) + , Field 5 0 "negSend" (n!!1) "send" $ IInt (count (bpeIsNegative Send) . beErrors) + ] + where + m = nChunksEachOf 3 6 "Missing phase" + n = nChunksEachOf 2 6 "Negative phase" + count :: (a -> Bool) -> [a] -> Int + count f = length . filter f + rtCommentary BlockEvents{..} = (" " <>) . T.pack . show <$> beErrors + +mapChainToForgerEventCDF :: + [PercSpec Float] + -> [BlockEvents] + -> (BlockEvents -> Maybe NominalDiffTime) + -> Distribution Float NominalDiffTime +mapChainToForgerEventCDF percs cbe proj = + computeDistribution percs (mapMaybe proj cbe) + +mapChainToPeerBlockObservationCDFs :: + [PercSpec Float] + -> [BlockEvents] + -> (BlockObservation -> Maybe NominalDiffTime) + -> String + -> (Distribution Float NominalDiffTime, Distribution Float NominalDiffTime) +mapChainToPeerBlockObservationCDFs percs chainBlockEvents proj desc = + (means, covs) + where + means, covs :: Distribution Float NominalDiffTime + (,) means covs = computeDistributionStats desc + (fmap realToFrac <$> allDistributions) + & either error id + & join (***) (fmap realToFrac) + + allDistributions :: [Distribution Float NominalDiffTime] + allDistributions = computeDistribution percs <$> allObservations + + allObservations :: [[NominalDiffTime]] + allObservations = + filter isValidBlockEvent chainBlockEvents + <&> blockObservations + + blockObservations :: BlockEvents -> [NominalDiffTime] + blockObservations be = mapMaybe proj (beValidObservs be) + +isValidBlockEvent :: BlockEvents -> Bool +isValidBlockEvent BlockEvents{..} = beChainDelta == 1 + +isValidBlockObservation :: BlockObservation -> Bool +isValidBlockObservation BlockObservation{..} = + -- 1. All phases are present + null boErrors + && + -- 2. All timings account for processing of a single block + boChainDelta == 1 + +blockProp :: ChainInfo -> [(JsonLogfile, [LogObject])] -> IO BlockPropagation +blockProp ci xs = do + putStrLn ("blockProp: recovering block event maps" :: String) + doBlockProp =<< mapConcurrently (pure + . fmap deltifyEvents + . blockEventMapsFromLogObjects ci) xs + +doBlockProp :: [MachBlockMap NominalDiffTime] -> IO BlockPropagation +doBlockProp eventMaps = do + putStrLn ("tip block: " <> show tipBlock :: String) + putStrLn ("chain length: " <> show (length chain) :: String) + pure $ BlockPropagation + (forgerEventsCDF (Just . beForged)) + (forgerEventsCDF (\x -> if beChainDelta x == 1 then Just (beAdopted x) + else Nothing)) + (forgerEventsCDF (Just . beAnnounced)) + (forgerEventsCDF (Just . beSending)) + (observerEventsCDFs (Just . boNoticed) "peer noticed") + (observerEventsCDFs (Just . boRequested) "peer requested") + (observerEventsCDFs (Just . boFetched) "peer fetched") + (observerEventsCDFs (\x -> if boChainDelta x == 1 then boAdopted x + else Nothing) "peer adopted") + (observerEventsCDFs boAnnounced "peer announced") + (observerEventsCDFs boSending "peer sending") + chain + where + forgerEventsCDF = mapChainToForgerEventCDF stdPercentiles chain + observerEventsCDFs = mapChainToPeerBlockObservationCDFs stdPercentiles chain + + chain :: [BlockEvents] + chain = rebuildChain eventMaps tipHash + heightMap :: Map BlockNo (Set Hash) + heightMap = foldr (\em acc -> + Map.foldr + (\mbe -> Map.alter + (maybe (Just $ Set.singleton (mbeBlock mbe)) + (Just . Set.insert (mbeBlock mbe))) + (mbeBlockNo mbe)) + acc em) + mempty eventMaps + tipBlock = getBlockForge eventMaps tipHash + tipHash = rewindChain eventMaps 1 (mbeBlock finalBlockEv) + finalBlockEv = maximumBy ordBlockEv $ blockMapMaxBlock <$> eventMaps + + rewindChain :: [MachBlockMap a] -> Int -> Hash -> Hash + rewindChain eventMaps count tip = go tip count + where go tip = \case + 0 -> tip + n -> go (bfeBlockPrev $ getBlockForge eventMaps tip) (n - 1) + + getBlockForge :: [MachBlockMap a] -> Hash -> ForgerEvents a + getBlockForge xs h = + mapMaybe (Map.lookup h) xs + & find mbeForgP + & fromMaybe + (error $ mconcat + [ "Invariant failed: couldn't find a forge for hash ", show h + , "\nErrors:\n", show (intercalate "\n" $ fmap show $ errorMbes $ mapMaybe (Map.lookup h) xs) + ]) + & mapMbe id (error "Silly invariant failed.") (error "Silly invariant failed.") + + rebuildChain :: [MachBlockMap NominalDiffTime] -> Hash -> [BlockEvents] + rebuildChain machBlockMaps tip = go (Just tip) [] + where go Nothing acc = acc + go (Just h) acc = + case partitionMbes $ mapMaybe (Map.lookup h) machBlockMaps of + ([], _, ers) -> error $ mconcat + [ "No forger for hash ", show h + , "\nErrors:\n" + ] ++ intercalate "\n" (show <$> ers) + blkEvs@(forgerEv:_, oEvs, ers) -> + go (bfePrevBlock forgerEv) (liftBlockEvents forgerEv oEvs ers : acc) + + liftBlockEvents :: ForgerEventsRel -> [ObserverEvents NominalDiffTime] -> [BPError] -> BlockEvents + liftBlockEvents ForgerEvents{..} os errs = + BlockEvents + { beForger = bfeHost + , beBlock = bfeBlock + , beBlockPrev = bfeBlockPrev + , beBlockNo = bfeBlockNo + , beSlotNo = bfeSlotNo + , beSlotStart = bfeSlotStart + , beForged = bfeForged & handleMiss "Δt Forged" + , beAdopted = bfeAdopted & handleMiss "Δt Adopted (forger)" + , beChainDelta = bfeChainDelta + , beAnnounced = bfeAnnounced & handleMiss "Δt Announced (forger)" + , beSending = bfeSending & handleMiss "Δt Sending (forger)" + , beObservations = observs + , beValidObservs = observs & filter isValidBlockObservation + , beOtherBlocks = otherBlocks + , beErrors = + errs + <> (fail' bfeBlock . BPEFork <$> otherBlocks) + <> bfeErrs + <> concatMap boeErrs os + } + where + observs = + catMaybes $ + os <&> \ObserverEvents{..}-> + BlockObservation + <$> Just boeHost + <*> Just bfeSlotStart + <*> boeNoticed + <*> boeRequested + <*> boeFetched + <*> Just boeAdopted + <*> Just boeChainDelta + <*> Just boeAnnounced + <*> Just boeSending + <*> Just boeErrs + otherBlocks = Map.lookup bfeBlockNo heightMap + & handleMiss "height map" + & Set.delete bfeBlock + & Set.toList + fail' :: Hash -> BPErrorKind -> BPError + fail' hash desc = BPError hash Nothing Nothing desc + + handleMiss :: String -> Maybe a -> a + handleMiss slotDesc = fromMaybe $ error $ mconcat + [ "While processing ", show bfeBlockNo, " hash ", show bfeBlock + , " forged by ", show bfeHost + , " -- missing: ", slotDesc + ] + +-- | Given a single machine's log object stream, recover its block map. +blockEventMapsFromLogObjects :: ChainInfo -> (JsonLogfile, [LogObject]) -> MachBlockMap UTCTime +blockEventMapsFromLogObjects ci (f@(unJsonLogfile -> fp), xs) = + trace ("processing " <> fp) + $ if Map.size machBlockMap == 0 + then error $ mconcat + ["No block events in ",fp," : ","LogObject count: ",show (length xs)] + else machBlockMap + where + machBlockMap = foldl (blockPropMachEventsStep ci f) mempty xs + +blockPropMachEventsStep :: ChainInfo -> JsonLogfile -> MachBlockMap UTCTime -> LogObject -> MachBlockMap UTCTime +blockPropMachEventsStep ci (JsonLogfile fp) bMap lo = case lo of + -- 0. Notice (observer only) + LogObject{loAt, loHost, loBody=LOChainSyncClientSeenHeader{loBlock,loBlockNo,loSlotNo}} -> + let mbe0 = Map.lookup loBlock bMap + in if isJust mbe0 then bMap else + MOE + (ObserverEvents + loHost loBlock loBlockNo loSlotNo + (slotStart ci loSlotNo) (Just loAt) + Nothing Nothing Nothing 0 Nothing Nothing []) + & doInsert loBlock + -- 1. Request (observer only) + LogObject{loAt, loHost, loBody=LOBlockFetchClientRequested{loBlock,loLength}} -> + let mbe0 = Map.lookup loBlock bMap + & fromMaybe (fail loBlock $ BPEUnexpectedAsFirst Request) + in if isJust (mbeRequested mbe0) then bMap else + bimapMbe' + (const . Left $ fail' loBlock $ BPEUnexpectedForForger Request) + (\x -> Right x { boeRequested=Just loAt, boeChainDelta=loLength `max` boeChainDelta x }) + mbe0 + & doInsert loBlock + -- 2. Acquire:Fetch (observer only) + LogObject{loAt, loHost, loBody=LOBlockFetchClientCompletedFetch{loBlock}} -> + let mbe0 = Map.lookup loBlock bMap + & fromMaybe (fail loBlock $ BPEUnexpectedAsFirst Fetch) + in if isJust (mbeAcquired mbe0) then bMap else + bimapMbe' + (const . Left $ fail' loBlock (BPEUnexpectedForForger Fetch)) + (\x -> Right x { boeFetched=Just loAt }) + mbe0 + & doInsert loBlock + -- 2. Acquire:Forge (forger only) + LogObject{loAt, loHost, loBody=LOBlockForged{loBlock,loPrev,loBlockNo,loSlotNo}} -> + Map.lookup loBlock bMap + <&> bimapMbe' + (const.Left $ + BPError loBlock (Just fp) (Just lo) BPEDuplicateForge) + (const.Left $ + BPError loBlock (Just fp) (Just lo) (BPEUnexpectedForObserver Forge)) + & fromMaybe + (MFE $ ForgerEvents + loHost loBlock loPrev loBlockNo loSlotNo + (slotStart ci loSlotNo) (Just loAt) + Nothing 0 Nothing Nothing []) + & doInsert loBlock + -- 3. Adopt + LogObject{loAt, loHost, loBody=LOBlockAddedToCurrentChain{loBlock,loLength}} -> + let mbe0 = Map.lookup loBlock bMap + & fromMaybe (fail loBlock $ BPEUnexpectedAsFirst Adopt) + in if isJust (mbeAdopted mbe0) then bMap else + bimapMbe + (\x -> x { bfeAdopted=Just loAt, bfeChainDelta=loLength }) + (\x -> x { boeAdopted=Just loAt, boeChainDelta=loLength `max` boeChainDelta x}) + mbe0 + & doInsert loBlock + -- 4. Announce + LogObject{loAt, loHost, loBody=LOChainSyncServerSendHeader{loBlock}} -> + let mbe0 = Map.lookup loBlock bMap + & fromMaybe (fail loBlock $ BPEUnexpectedAsFirst Announce) + in if isJust (mbeAnnounced mbe0) then bMap else + bimapMbe + (\x -> x { bfeAnnounced=Just loAt }) + (\x -> x { boeAnnounced=Just loAt }) + mbe0 + & doInsert loBlock + -- 5. Sending started + LogObject{loAt, loHost, loBody=LOBlockFetchServerSending{loBlock}} -> + let mbe0 = Map.lookup loBlock bMap + & fromMaybe (fail loBlock $ BPEUnexpectedAsFirst Send) + in if isJust (mbeSending mbe0) then bMap else + bimapMbe + (\x -> x { bfeSending=Just loAt }) + (\x -> x { boeSending=Just loAt }) + mbe0 + & doInsert loBlock + _ -> bMap + where + fail' :: Hash -> BPErrorKind -> BPError + fail' hash desc = BPError hash (Just fp) (Just lo) desc + + fail :: Hash -> BPErrorKind -> MachBlockEvents a + fail hash desc = MBE $ fail' hash desc + + doInsert :: Hash -> MachBlockEvents UTCTime -> MachBlockMap UTCTime + doInsert k x = Map.insert k x bMap + +deltifyEvents :: MachBlockEvents UTCTime -> MachBlockEvents NominalDiffTime +deltifyEvents (MBE e) = MBE e +deltifyEvents (MFE x@ForgerEvents{..}) = + MFE x + { bfeForged = bfeForged <&> (`sinceSlot` bfeSlotStart) + , bfeAdopted = diffUTCTime <$> bfeAdopted <*> bfeForged + , bfeAnnounced = diffUTCTime <$> bfeAnnounced <*> bfeAdopted + , bfeSending = diffUTCTime <$> bfeSending <*> bfeAnnounced + } & \case + v@(MFE x') -> MFE x' { bfeErrs = collectEventErrors v + [Forge, Adopt, Announce, Send] } + _ -> error "Impossible" +deltifyEvents (MOE x@ObserverEvents{..}) = + MOE x + { boeNoticed = boeNoticed <&> (`sinceSlot` boeSlotStart) + , boeRequested = diffUTCTime <$> boeRequested <*> boeNoticed + , boeFetched = diffUTCTime <$> boeFetched <*> boeRequested + , boeAdopted = diffUTCTime <$> boeAdopted <*> boeFetched + , boeAnnounced = diffUTCTime <$> boeAnnounced <*> boeAdopted + , boeSending = diffUTCTime <$> boeSending <*> boeAnnounced + } & \case + v@(MOE x') -> MOE x' { boeErrs = collectEventErrors v + [Notice, Request, Fetch, Adopt, Announce, Send] } + _ -> error "Impossible" + +collectEventErrors :: MachBlockEvents NominalDiffTime -> [Phase] -> [BPError] +collectEventErrors mbe phases = + [ BPError (mbeBlock mbe) Nothing Nothing $ + case (miss, proj) of + (,) True _ -> BPEMissingPhase phase + (,) _ (Just neg) -> BPENegativePhase phase neg + _ -> error "Impossible." + | phase <- phases + , let proj = mbeGetProjection phase mbe + , let miss = isNothing proj + , let neg = ((< 0) <$> proj) == Just True + , miss || neg + ] diff --git a/nix/workbench/locli/src/Cardano/Analysis/Driver.hs b/nix/workbench/locli/src/Cardano/Analysis/Driver.hs new file mode 100644 index 00000000000..e8dbe1f9721 --- /dev/null +++ b/nix/workbench/locli/src/Cardano/Analysis/Driver.hs @@ -0,0 +1,234 @@ +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE TypeApplications #-} +{-# LANGUAGE ImpredicativeTypes #-} +{-# OPTIONS_GHC -Wno-incomplete-patterns -Wno-name-shadowing #-} +module Cardano.Analysis.Driver + ( AnalysisCmdError + , renderAnalysisCmdError + , runAnalysisCommand + ) where + +import Prelude (String, error) +import Cardano.Prelude + +import Control.Arrow ((&&&)) +import Control.Monad.Trans.Except.Extra (firstExceptT, newExceptT) +import Control.Concurrent.Async (mapConcurrently) + +import qualified Data.Aeson as AE +import qualified Data.ByteString.Lazy.Char8 as LBS +import qualified Data.Text as T +import qualified Data.Text.IO as T + +import qualified System.FilePath as F + +import qualified Graphics.Histogram as Hist +import qualified Graphics.Gnuplot.Frame.OptionSet as Opts + +import Text.Printf + +import Cardano.Analysis.BlockProp +import Cardano.Analysis.MachTimeline +import Cardano.Analysis.Profile +import Cardano.Unlog.Commands +import Cardano.Unlog.LogObject hiding (Text) +import Cardano.Unlog.Render +import Cardano.Unlog.SlotStats + + +data AnalysisCmdError + = AnalysisCmdError !Text + | RunMetaParseError !JsonRunMetafile !Text + | GenesisParseError !JsonGenesisFile !Text + deriving Show + +renderAnalysisCmdError :: AnalysisCommand -> AnalysisCmdError -> Text +renderAnalysisCmdError cmd err = + case err of + AnalysisCmdError err' -> renderError cmd err' + "Analysis command failed" + pure + RunMetaParseError (JsonRunMetafile fp) err' -> renderError cmd err' + ("Benchmark run metafile parse failed: " <> T.pack fp) + pure + GenesisParseError (JsonGenesisFile fp) err' -> renderError cmd err' + ("Genesis parse failed: " <> T.pack fp) + pure + where + renderError :: AnalysisCommand -> a -> Text -> (a -> [Text]) -> Text + renderError cmd' cmdErr desc renderer = + mconcat [ desc, ": " + , renderAnalysisCommand cmd' + , " Error: " + , mconcat (renderer cmdErr) + ] + +-- +-- Analysis command dispatch +-- +runAnalysisCommand :: AnalysisCommand -> ExceptT AnalysisCmdError IO () +runAnalysisCommand (MachineTimelineCmd genesisFile metaFile logfiles oFiles) = do + chainInfo <- + ChainInfo + <$> firstExceptT (RunMetaParseError metaFile . T.pack) + (newExceptT $ + AE.eitherDecode @Profile <$> LBS.readFile (unJsonRunMetafile metaFile)) + <*> firstExceptT (GenesisParseError genesisFile . T.pack) + (newExceptT $ + AE.eitherDecode @Genesis <$> LBS.readFile (unJsonGenesisFile genesisFile)) + firstExceptT AnalysisCmdError $ + runMachineTimeline chainInfo logfiles oFiles +runAnalysisCommand (BlockPropagationCmd genesisFile metaFile logfiles oFiles) = do + chainInfo <- + ChainInfo + <$> firstExceptT (RunMetaParseError metaFile . T.pack) + (newExceptT $ + AE.eitherDecode @Profile <$> LBS.readFile (unJsonRunMetafile metaFile)) + <*> firstExceptT (GenesisParseError genesisFile . T.pack) + (newExceptT $ + AE.eitherDecode @Genesis <$> LBS.readFile (unJsonGenesisFile genesisFile)) + firstExceptT AnalysisCmdError $ + runBlockPropagation chainInfo logfiles oFiles +runAnalysisCommand SubstringKeysCmd = + liftIO $ mapM_ putStrLn logObjectStreamInterpreterKeys + +runBlockPropagation :: + ChainInfo -> [JsonLogfile] -> BlockPropagationOutputFiles -> ExceptT Text IO () +runBlockPropagation chainInfo logfiles BlockPropagationOutputFiles{..} = do + liftIO $ do + putStrLn ("runBlockPropagation: lifting LO streams" :: Text) + -- 0. Recover LogObjects + objLists :: [(JsonLogfile, [LogObject])] <- flip mapConcurrently logfiles + (joinT . (pure &&& readLogObjectStream)) + + forM_ bpofLogObjects . const $ do + flip mapConcurrently objLists $ + \(JsonLogfile f, objs) -> do + putStrLn ("runBlockPropagation: dumping LO streams" :: Text) + dumpLOStream objs + (JsonOutputFile $ F.dropExtension f <> ".logobjects.json") + + blockPropagation <- blockProp chainInfo objLists + + forM_ bpofTimelinePretty $ + \(TextOutputFile f) -> + withFile f WriteMode $ \hnd -> do + putStrLn ("runBlockPropagation: dumping pretty timeline" :: Text) + hPutStrLn hnd . T.pack $ printf "--- input: %s" f + mapM_ (T.hPutStrLn hnd) + (renderDistributions RenderPretty blockPropagation) + mapM_ (T.hPutStrLn hnd) + (renderTimeline $ bpChainBlockEvents blockPropagation) + + forM_ bpofAnalysis $ + \(JsonOutputFile f) -> + withFile f WriteMode $ \hnd -> do + putStrLn ("runBlockPropagation: dumping analysis core" :: Text) + LBS.hPutStrLn hnd (AE.encode blockPropagation) + where + joinT :: (IO a, IO b) -> IO (a, b) + joinT (a, b) = (,) <$> a <*> b + +runMachineTimeline :: + ChainInfo -> [JsonLogfile] -> MachineTimelineOutputFiles -> ExceptT Text IO () +runMachineTimeline chainInfo logfiles MachineTimelineOutputFiles{..} = do + liftIO $ do + -- 0. Recover LogObjects + objs :: [LogObject] <- concat <$> mapM readLogObjectStream logfiles + forM_ mtofLogObjects + (dumpLOStream objs) + + -- 1. Derive the basic scalars and vectors + let (,) runStats noisySlotStats = timelineFromLogObjects chainInfo objs + forM_ mtofSlotStats $ + \(JsonOutputFile f) -> + withFile f WriteMode $ \hnd -> + forM_ noisySlotStats $ LBS.hPutStrLn hnd . AE.encode + + -- 2. Reprocess the slot stats + let slotStats = cleanupSlotStats noisySlotStats + + -- 3. Derive the timeline + let drvVectors0, _drvVectors1 :: [DerivedSlot] + (,) drvVectors0 _drvVectors1 = computeDerivedVectors slotStats + timeline :: MachTimeline + timeline = slotStatsMachTimeline chainInfo slotStats + timelineOutput :: LBS.ByteString + timelineOutput = AE.encode timeline + + -- 4. Render various outputs + forM_ mtofTimelinePretty + (renderPrettyMachTimeline slotStats timeline logfiles) + forM_ mtofStatsCsv + (renderExportStats runStats timeline) + forM_ mtofTimelineCsv + (renderExportTimeline slotStats) + forM_ mtofDerivedVectors0Csv + (renderDerivedSlots drvVectors0) + forM_ mtofHistogram + (renderHistogram "CPU usage spans over 85%" "Span length" + (toList $ sort $ sSpanLensCPU85 timeline)) + + flip (maybe $ LBS.putStrLn timelineOutput) mtofAnalysis $ + \case + JsonOutputFile f -> + withFile f WriteMode $ \hnd -> + LBS.hPutStrLn hnd timelineOutput + where + renderHistogram :: Integral a + => String -> String -> [a] -> OutputFile -> IO () + renderHistogram desc ylab xs (OutputFile f) = + Hist.plotAdv f opts hist >> pure () + where + hist = Hist.histogram Hist.binFreedmanDiaconis $ fromIntegral <$> xs + opts = Opts.title desc $ Opts.yLabel ylab $ Opts.xLabel "Population" $ + Hist.defOpts hist + + renderPrettyMachTimeline :: + [SlotStats] -> MachTimeline -> [JsonLogfile] -> TextOutputFile -> IO () + renderPrettyMachTimeline xs s srcs o = + withFile (unTextOutputFile o) WriteMode $ \hnd -> do + hPutStrLn hnd . T.pack $ + printf "--- input: %s" (intercalate " " $ unJsonLogfile <$> srcs) + mapM_ (T.hPutStrLn hnd) + (renderDistributions RenderPretty s) + mapM_ (T.hPutStrLn hnd) + (renderTimeline xs) + renderExportStats :: RunScalars -> MachTimeline -> CsvOutputFile -> IO () + renderExportStats rs s (CsvOutputFile o) = + withFile o WriteMode $ + \h -> do + mapM_ (hPutStrLn h) + (renderDistributions RenderCsv s) + mapM_ (hPutStrLn h) $ + renderChainInfoExport chainInfo + <> + renderRunScalars rs + renderExportTimeline :: [SlotStats] -> CsvOutputFile -> IO () + renderExportTimeline _xs (CsvOutputFile _o) = + error "Timeline export is not supported." + -- withFile o WriteMode $ + -- mapM_ (T.hPutStrLn hnd) (renderTimeline xs) + + renderDerivedSlots :: [DerivedSlot] -> CsvOutputFile -> IO () + renderDerivedSlots slots (CsvOutputFile o) = do + withFile o WriteMode $ \hnd -> do + hPutStrLn hnd derivedSlotsHeader + forM_ slots $ + hPutStrLn hnd . renderDerivedSlot + +dumpLOStream :: [LogObject] -> JsonOutputFile -> IO () +dumpLOStream objs o = + withFile (unJsonOutputFile o) WriteMode $ \hnd -> do + forM_ objs $ LBS.hPutStrLn hnd . AE.encode + +renderRunScalars :: RunScalars -> [Text] +renderRunScalars RunScalars{..} = + T.intercalate "," <$> + [[ "Run time", maybe "---" show rsElapsed ] + ,[ "Txs submitted", maybe "---" show rsSubmitted ] + ,[ "Submission TPS", maybe "---" (show . sum) rsThreadwiseTps] + ] diff --git a/nix/workbench/locli/src/Cardano/Analysis/MachTimeline.hs b/nix/workbench/locli/src/Cardano/Analysis/MachTimeline.hs new file mode 100644 index 00000000000..8cefa904fe0 --- /dev/null +++ b/nix/workbench/locli/src/Cardano/Analysis/MachTimeline.hs @@ -0,0 +1,448 @@ +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE NamedFieldPuns #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE StrictData #-} +{-# OPTIONS_GHC -Wno-incomplete-patterns -Wno-name-shadowing -Wno-orphans #-} +{- HLINT ignore "Use head" -} +module Cardano.Analysis.MachTimeline (module Cardano.Analysis.MachTimeline) where + +import Prelude (String, (!!), error) +import Cardano.Prelude + +import Control.Arrow ((&&&), (***)) +import qualified Data.Aeson as AE +import Data.Aeson +import qualified Data.HashMap.Strict as HashMap +import Data.Vector (Vector) +import qualified Data.Vector as Vec +import qualified Data.Map.Strict as Map + +import Data.Time.Clock (NominalDiffTime, UTCTime) +import qualified Data.Time.Clock as Time + +import Ouroboros.Network.Block (SlotNo(..)) + +import Data.Accum +import Data.Distribution + +import Cardano.Analysis.Profile +import Cardano.Unlog.LogObject hiding (Text) +import Cardano.Unlog.Render +import Cardano.Unlog.Resources +import Cardano.Unlog.SlotStats + +-- | The top-level representation of the machine timeline analysis results. +data MachTimeline + = MachTimeline + { sMaxChecks :: !Word64 + , sSlotMisses :: ![Word64] + , sSpanLensCPU85 :: ![Int] + , sSpanLensCPU85EBnd :: ![Int] + , sSpanLensCPU85Rwd :: ![Int] + -- distributions + , sMissDistrib :: !(Distribution Float Float) + , sLeadsDistrib :: !(Distribution Float Word64) + , sUtxoDistrib :: !(Distribution Float Word64) + , sDensityDistrib :: !(Distribution Float Float) + , sSpanCheckDistrib :: !(Distribution Float NominalDiffTime) + , sSpanLeadDistrib :: !(Distribution Float NominalDiffTime) + , sBlocklessDistrib :: !(Distribution Float Word64) + , sSpanLensCPU85Distrib + :: !(Distribution Float Int) + , sSpanLensCPU85EBndDistrib :: !(Distribution Float Int) + , sSpanLensCPU85RwdDistrib :: !(Distribution Float Int) + , sResourceDistribs :: !(Resources (Distribution Float Word64)) + } + deriving Show + +instance RenderDistributions MachTimeline where + rdFields = + -- Width LeftPad + [ Field 4 0 "missR" "Miss" "ratio" $ DFloat sMissDistrib + , Field 6 0 "CheckΔ" "" "ChkΔt" $ DDeltaT sSpanCheckDistrib + , Field 6 0 "LeadΔ" "" "LeadΔt" $ DDeltaT sSpanLeadDistrib + , Field 4 0 "BlkGap" "Block" "gap" $ DWord64 sBlocklessDistrib + , Field 5 0 "chDensity" "Dens" "ity" $ DFloat sDensityDistrib + , Field 3 0 "CPU" "CPU" "%" $ DWord64 (rCentiCpu . sResourceDistribs) + , Field 3 0 "GC" "GC" "%" $ DWord64 (rCentiGC . sResourceDistribs) + , Field 3 0 "MUT" "MUT" "%" $ DWord64 (fmap (min 999) . rCentiMut . sResourceDistribs) + , Field 3 0 "GcMaj" "GC " "Maj" $ DWord64 (rGcsMajor . sResourceDistribs) + , Field 3 0 "GcMin" "flt " "Min" $ DWord64 (rGcsMinor . sResourceDistribs) + , Field 5 0 "RSS" (m!!0) "RSS" $ DWord64 (rRSS . sResourceDistribs) + , Field 5 0 "Heap" (m!!1) "Heap" $ DWord64 (rHeap . sResourceDistribs) + , Field 5 0 "Live" (m!!2) "Live" $ DWord64 (rLive . sResourceDistribs) + , Field 5 0 "Allocd" "Alloc" "MB" $ DWord64 (rAlloc . sResourceDistribs) + , Field 5 0 "CPU85%LensAll" (c!!0) "All" $ DInt sSpanLensCPU85Distrib + , Field 5 0 "CPU85%LensEBnd" (c!!1) "EBnd" $ DInt sSpanLensCPU85EBndDistrib + ] + where + m = nChunksEachOf 3 6 "Memory usage, MB" + c = nChunksEachOf 2 6 "CPU85% spans" + +instance ToJSON MachTimeline where + toJSON MachTimeline{..} = AE.Array $ Vec.fromList + [ AE.Object $ HashMap.fromList + [ "kind" .= String "spanLensCPU85EBnd" + , "xs" .= toJSON sSpanLensCPU85EBnd] + , AE.Object $ HashMap.fromList + [ "kind" .= String "spanLensCPU85Rwd" + , "xs" .= toJSON sSpanLensCPU85Rwd] + , AE.Object $ HashMap.fromList + [ "kind" .= String "spanLensCPU85" + , "xs" .= toJSON sSpanLensCPU85] + , AE.Object $ HashMap.fromList + [ "kind" .= String "spanLensCPU85Sorted" + , "xs" .= toJSON (sort sSpanLensCPU85)] + , extendObject "kind" "spancheck" $ toJSON sSpanCheckDistrib + , extendObject "kind" "spanlead" $ toJSON sSpanLeadDistrib + , extendObject "kind" "cpu" $ toJSON (rCentiCpu sResourceDistribs) + , extendObject "kind" "gc" $ toJSON (rCentiGC sResourceDistribs) + , extendObject "kind" "density" $ toJSON sDensityDistrib + , extendObject "kind" "utxo" $ toJSON sUtxoDistrib + , extendObject "kind" "leads" $ toJSON sLeadsDistrib + , extendObject "kind" "misses" $ toJSON sMissDistrib + , extendObject "kind" "blockless" $ toJSON sBlocklessDistrib + , extendObject "kind" "rss" $ toJSON (rRSS sResourceDistribs) + , extendObject "kind" "heap" $ toJSON (rHeap sResourceDistribs) + , extendObject "kind" "live" $ toJSON (rLive sResourceDistribs) + , extendObject "kind" "spanLensCPU85Distrib" $ + toJSON sSpanLensCPU85Distrib + , extendObject "kind" "spanLensCPU85EBndDistrib" $ + toJSON sSpanLensCPU85EBndDistrib + , extendObject "kind" "spanLensCPU85RwdDistrib" $ + toJSON sSpanLensCPU85RwdDistrib + ] + +slotStatsMachTimeline :: ChainInfo -> [SlotStats] -> MachTimeline +slotStatsMachTimeline CInfo{} slots = + MachTimeline + { sMaxChecks = maxChecks + , sSlotMisses = misses + , sSpanLensCPU85 = spanLensCPU85 + , sSpanLensCPU85EBnd = sSpanLensCPU85EBnd + , sSpanLensCPU85Rwd = sSpanLensCPU85Rwd + -- + , sMissDistrib = computeDistribution stdPercentiles missRatios + , sLeadsDistrib = + computeDistribution stdPercentiles (slCountLeads <$> slots) + , sUtxoDistrib = + computeDistribution stdPercentiles (slUtxoSize <$> slots) + , sDensityDistrib = + computeDistribution stdPercentiles (slDensity <$> slots) + , sSpanCheckDistrib = + computeDistribution stdPercentiles (slSpanCheck <$> slots) + , sSpanLeadDistrib = + computeDistribution stdPercentiles (slSpanLead <$> slots) + , sBlocklessDistrib = + computeDistribution stdPercentiles (slBlockless <$> slots) + , sSpanLensCPU85Distrib + = computeDistribution stdPercentiles spanLensCPU85 + , sResourceDistribs = + computeResDistrib stdPercentiles resDistProjs slots + , sSpanLensCPU85EBndDistrib = computeDistribution stdPercentiles sSpanLensCPU85EBnd + , sSpanLensCPU85RwdDistrib = computeDistribution stdPercentiles sSpanLensCPU85Rwd + } + where + sSpanLensCPU85EBnd = Vec.length <$> + filter (spanContainsEpochSlot 3) spansCPU85 + sSpanLensCPU85Rwd = Vec.length <$> + filter (spanContainsEpochSlot 803) spansCPU85 + + checkCounts = slCountChecks <$> slots + maxChecks = if length checkCounts == 0 + then 0 else maximum checkCounts + misses = (maxChecks -) <$> checkCounts + missRatios = missRatio <$> misses + spansCPU85 :: [Vector SlotStats] + spansCPU85 = spans + ((/= Just False) . fmap (>=85) . rCentiCpu . slResources) + (toList slots) + spanLensCPU85 = spanLen <$> spansCPU85 + spanContainsEpochSlot :: Word64 -> Vector SlotStats -> Bool + spanContainsEpochSlot s = + uncurry (&&) + . ((s >) . slEpochSlot . Vec.head &&& + (s <) . slEpochSlot . Vec.last) + spanLen :: Vector SlotStats -> Int + spanLen = fromIntegral . unSlotNo . uncurry (-) . (slSlot *** slSlot) . (Vec.last &&& Vec.head) + resDistProjs = + Resources + { rCentiCpu = rCentiCpu . slResources + , rCentiGC = rCentiGC . slResources + , rCentiMut = rCentiMut . slResources + , rGcsMajor = rGcsMajor . slResources + , rGcsMinor = rGcsMinor . slResources + , rRSS = rRSS . slResources + , rHeap = rHeap . slResources + , rLive = rLive . slResources + , rAlloc = rAlloc . slResources + , rCentiBlkIO = rCentiBlkIO . slResources + , rThreads = rThreads . slResources + } + + missRatio :: Word64 -> Float + missRatio = (/ fromIntegral maxChecks) . fromIntegral + +-- The "fold" state that accumulates as we process 'LogObject's into a stream +-- of 'SlotStats'. +data TimelineAccum + = TimelineAccum + { aResAccums :: ResAccums + , aResTimestamp :: UTCTime + , aMempoolTxs :: Word64 + , aBlockNo :: Word64 + , aLastBlockSlot :: SlotNo + , aSlotStats :: [SlotStats] + , aRunScalars :: RunScalars + , aTxsCollectedAt:: Map.Map TId UTCTime + } + +data RunScalars + = RunScalars + { rsElapsed :: Maybe NominalDiffTime + , rsSubmitted :: Maybe Word64 + , rsThreadwiseTps :: Maybe (Vector Float) + } + +timelineFromLogObjects :: ChainInfo -> [LogObject] -> (RunScalars, [SlotStats]) +timelineFromLogObjects ci = + (aRunScalars &&& reverse . aSlotStats) + . foldl (timelineStep ci) zeroTimelineAccum + where + zeroTimelineAccum :: TimelineAccum + zeroTimelineAccum = + TimelineAccum + { aResAccums = mkResAccums + , aResTimestamp = zeroUTCTime + , aMempoolTxs = 0 + , aBlockNo = 0 + , aLastBlockSlot = 0 + , aSlotStats = [zeroSlotStats] + , aRunScalars = zeroRunScalars + , aTxsCollectedAt= mempty + } + zeroRunScalars :: RunScalars + zeroRunScalars = RunScalars Nothing Nothing Nothing + +timelineStep :: ChainInfo -> TimelineAccum -> LogObject -> TimelineAccum +timelineStep ci a@TimelineAccum{aSlotStats=cur:rSLs, ..} = \case + lo@LogObject{loAt, loBody=LOTraceStartLeadershipCheck slot _ _} -> + if slSlot cur > slot + -- Slot log entry for a slot we've supposedly done processing. + then a { aSlotStats = cur + { slOrderViol = slOrderViol cur + 1 + } : case (slSlot cur - slot, rSLs) of + -- Limited back-patching: + (1, p1:rest) -> onLeadershipCheck loAt p1:rest + (2, p1:p2:rest) -> p1:onLeadershipCheck loAt p2:rest + (3, p1:p2:p3:rest) -> p1:p2:onLeadershipCheck loAt p3:rest + _ -> rSLs -- Give up. + } + else if slSlot cur == slot + then a { aSlotStats = onLeadershipCheck loAt cur : rSLs + } + else if slot - slSlot cur > 1 + then let gap = unSlotNo $ slot - slSlot cur - 1 + gapStartSlot = slSlot cur + 1 in + updateOnNewSlot lo $ -- We have a slot check gap to patch: + patchSlotCheckGap gap gapStartSlot a + else updateOnNewSlot lo a + LogObject{loAt, loBody=LOTraceNodeIsLeader _} -> + a { aSlotStats = onLeadershipCertainty loAt True cur : rSLs + } + LogObject{loAt, loBody=LOTraceNodeNotLeader _} -> + a { aSlotStats = onLeadershipCertainty loAt False cur : rSLs + } + LogObject{loAt, loBody=LOResources rs} -> + -- Update resource stats accumulators & record values current slot. + a { aResAccums = accs + , aResTimestamp = loAt + , aSlotStats = cur { slResources = Just <$> extractResAccums accs + } : rSLs + } + where accs = updateResAccums loAt rs aResAccums + LogObject{loBody=LOMempoolTxs txCount} -> + a { aMempoolTxs = txCount + , aSlotStats = cur { slMempoolTxs = txCount + } : rSLs + } + LogObject{loBody=LOBlockContext blockNo} -> + let newBlock = aBlockNo /= blockNo in + a { aBlockNo = blockNo + , aLastBlockSlot = if newBlock + then slSlot cur + else aLastBlockSlot + , aSlotStats = cur { slBlockNo = blockNo + , slBlockless = if newBlock + then 0 + else slBlockless cur + } : rSLs + } + LogObject{loBody=LOLedgerTookSnapshot} -> + a { aSlotStats = cur { slChainDBSnap = slChainDBSnap cur + 1 + } : rSLs + } + LogObject{loBody=LOMempoolRejectedTx} -> + a { aSlotStats = cur { slRejectedTx = slRejectedTx cur + 1 + } : rSLs + } + LogObject{loBody=LOGeneratorSummary _noFails sent elapsed threadwiseTps} -> + a { aRunScalars = + aRunScalars + { rsThreadwiseTps = Just threadwiseTps + , rsElapsed = Just elapsed + , rsSubmitted = Just sent + } + } + LogObject{loBody=LOTxsCollected coll, loTid, loAt} -> + a { aTxsCollectedAt = + aTxsCollectedAt & + (\case + Just{} -> Just loAt + -- error $ mconcat + -- ["Duplicate LOTxsCollected for tid ", show tid, " at ", show loAt] + Nothing -> Just loAt) + `Map.alter` loTid + , aSlotStats = + cur + { slTxsCollected = slTxsCollected cur + max 0 (fromIntegral coll) + } : rSLs + } + LogObject{loBody=LOTxsProcessed acc rej, loTid, loAt} -> + a { aTxsCollectedAt = loTid `Map.delete` aTxsCollectedAt + , aSlotStats = + cur + { slTxsMemSpan = + case loTid `Map.lookup` aTxsCollectedAt of + Nothing -> + -- error $ mconcat + -- ["LOTxsProcessed missing LOTxsCollected for tid", show tid, " at ", show loAt] + Just $ + 1.0 + + + fromMaybe 0 (slTxsMemSpan cur) + Just base -> + Just $ + (loAt `Time.diffUTCTime` base) + + + fromMaybe 0 (slTxsMemSpan cur) + , slTxsAccepted = slTxsAccepted cur + acc + , slTxsRejected = slTxsRejected cur + max 0 (fromIntegral rej) + } : rSLs + } + _ -> a + where + updateOnNewSlot :: LogObject -> TimelineAccum -> TimelineAccum + updateOnNewSlot LogObject{loAt, loBody=LOTraceStartLeadershipCheck slot utxo density} a' = + extendTimelineAccum ci slot loAt 1 utxo density a' + updateOnNewSlot _ _ = + error "Internal invariant violated: updateSlot called for a non-LOTraceStartLeadershipCheck LogObject." + + onLeadershipCheck :: UTCTime -> SlotStats -> SlotStats + onLeadershipCheck now sl@SlotStats{..} = + sl { slCountChecks = slCountChecks + 1 + , slSpanCheck = max 0 $ now `sinceSlot` slStart + } + + onLeadershipCertainty :: UTCTime -> Bool -> SlotStats -> SlotStats + onLeadershipCertainty now lead sl@SlotStats{..} = + sl { slCountLeads = slCountLeads + if lead then 1 else 0 + , slSpanLead = max 0 $ now `Time.diffUTCTime` (slSpanCheck `Time.addUTCTime` unSlotStart slStart) + } + + patchSlotCheckGap :: Word64 -> SlotNo -> TimelineAccum -> TimelineAccum + patchSlotCheckGap 0 _ a' = a' + patchSlotCheckGap n slot a'@TimelineAccum{aSlotStats=cur':_} = + patchSlotCheckGap (n - 1) (slot + 1) $ + extendTimelineAccum ci slot (unSlotStart $ slotStart ci slot) 0 (slUtxoSize cur') (slDensity cur') a' + patchSlotCheckGap _ _ _ = + error "Internal invariant violated: patchSlotCheckGap called with empty TimelineAccum chain." +timelineStep _ a = const a + +extendTimelineAccum :: + ChainInfo + -> SlotNo -> UTCTime -> Word64 -> Word64 -> Float + -> TimelineAccum -> TimelineAccum +extendTimelineAccum ci@CInfo{..} slot time checks utxo density a@TimelineAccum{..} = + let (epoch, epochSlot) = unSlotNo slot `divMod` epoch_length gsis in + a { aSlotStats = SlotStats + { slSlot = slot + , slEpoch = epoch + , slEpochSlot = epochSlot + , slStart = slStart + , slEarliest = time + , slOrderViol = 0 + -- Updated as we see repeats: + , slCountChecks = checks + , slCountLeads = 0 + , slSpanCheck = max 0 $ time `sinceSlot` slStart + , slSpanLead = 0 + , slTxsMemSpan = Nothing + , slTxsCollected= 0 + , slTxsAccepted = 0 + , slTxsRejected = 0 + , slMempoolTxs = aMempoolTxs + , slUtxoSize = utxo + , slDensity = density + , slChainDBSnap = 0 + , slRejectedTx = 0 + , slBlockNo = aBlockNo + , slBlockless = unSlotNo $ slot - aLastBlockSlot + , slResources = maybeDiscard + <$> discardObsoleteValues + <*> extractResAccums aResAccums + } : aSlotStats + } + where maybeDiscard :: (Word64 -> Maybe Word64) -> Word64 -> Maybe Word64 + maybeDiscard f = f + + slStart = slotStart ci slot + +data DerivedSlot + = DerivedSlot + { dsSlot :: SlotNo + , dsBlockless :: Word64 + } + +derivedSlotsHeader :: String +derivedSlotsHeader = + "Slot,Blockless span" + +renderDerivedSlot :: DerivedSlot -> String +renderDerivedSlot DerivedSlot{..} = + mconcat + [ show (unSlotNo dsSlot), ",", show dsBlockless + ] + +computeDerivedVectors :: [SlotStats] -> ([DerivedSlot], [DerivedSlot]) +computeDerivedVectors ss = + (\(_,_,d0,d1) -> (d0, d1)) $ + foldr step (0, 0, [], []) ss + where + step :: + SlotStats + -> (Word64, Word64, [DerivedSlot], [DerivedSlot]) + -> (Word64, Word64, [DerivedSlot], [DerivedSlot]) + step SlotStats{..} (lastBlockless, spanBLSC, accD0, accD1) = + if lastBlockless < slBlockless + then ( slBlockless + , slBlockless + , DerivedSlot + { dsSlot = slSlot + , dsBlockless = slBlockless + }:accD0 + , DerivedSlot + { dsSlot = slSlot + , dsBlockless = slBlockless + }:accD1 + ) + else ( slBlockless + , spanBLSC + , DerivedSlot + { dsSlot = slSlot + , dsBlockless = spanBLSC + }:accD0 + , accD1 + ) diff --git a/nix/workbench/locli/src/Cardano/Analysis/Profile.hs b/nix/workbench/locli/src/Cardano/Analysis/Profile.hs index 1402ef94483..3a38bdd4efb 100644 --- a/nix/workbench/locli/src/Cardano/Analysis/Profile.hs +++ b/nix/workbench/locli/src/Cardano/Analysis/Profile.hs @@ -1,4 +1,5 @@ {-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE GeneralizedNewtypeDeriving#-} {-# LANGUAGE OverloadedStrings #-} {-# LANGUAGE PatternSynonyms #-} {-# LANGUAGE RecordWildCards #-} @@ -114,13 +115,21 @@ instance FromJSON Profile where <*> ((meta .: "timestamp" :: Aeson.Parser Integer) <&> Time.posixSecondsToUTCTime . realToFrac) -slotStart :: ChainInfo -> SlotNo -> UTCTime +newtype SlotStart = + SlotStart { unSlotStart :: UTCTime } + deriving (Eq, Aeson.FromJSON, Generic, Show, Aeson.ToJSON) + +slotStart :: ChainInfo -> SlotNo -> SlotStart slotStart CInfo{..} = - flip Time.addUTCTime system_start + SlotStart + . flip Time.addUTCTime system_start . (* slot_duration gsis) . fromIntegral . unSlotNo +sinceSlot :: UTCTime -> SlotStart -> NominalDiffTime +sinceSlot t (SlotStart start) = Time.diffUTCTime t start + -- pChainParams :: Parser ChainParams -- pChainParams = -- ChainParams diff --git a/nix/workbench/locli/src/Cardano/Unlog/BlockProp.hs b/nix/workbench/locli/src/Cardano/Unlog/BlockProp.hs deleted file mode 100644 index f86b219943e..00000000000 --- a/nix/workbench/locli/src/Cardano/Unlog/BlockProp.hs +++ /dev/null @@ -1,287 +0,0 @@ -{-# LANGUAGE DeriveAnyClass #-} -{-# LANGUAGE DeriveGeneric #-} -{-# LANGUAGE DerivingVia #-} -{-# LANGUAGE LambdaCase #-} -{-# LANGUAGE NamedFieldPuns #-} -{-# LANGUAGE RankNTypes #-} -{-# LANGUAGE RecordWildCards #-} -{-# LANGUAGE ScopedTypeVariables #-} -{-# LANGUAGE StrictData #-} -{-# OPTIONS_GHC -Wno-incomplete-patterns -Wno-name-shadowing #-} -{-# OPTIONS_GHC -Wno-unused-imports -Wno-partial-fields -Wno-unused-matches -Wno-deprecations -Wno-unused-local-binds -Wno-incomplete-record-updates #-} -module Cardano.Unlog.BlockProp (module Cardano.Unlog.BlockProp) where - -import Prelude (String, error, head, tail, id) -import Cardano.Prelude hiding (head) - -import Control.Arrow ((&&&), (***)) -import Control.Concurrent.Async (mapConcurrently) -import qualified Data.Aeson as AE -import Data.Function (on) -import Data.Either (partitionEithers, isLeft, isRight) -import Data.Maybe (catMaybes, mapMaybe) -import qualified Data.Sequence as Seq -import Data.Tuple (swap) -import Data.Vector (Vector) -import qualified Data.Map.Strict as Map - -import Data.Time.Clock (NominalDiffTime, UTCTime) -import qualified Data.Time.Clock as Time - -import Ouroboros.Network.Block (BlockNo(..), SlotNo(..)) - -import Data.Accum -import Cardano.Analysis.Profile -import Cardano.Unlog.LogObject -import Cardano.Unlog.Resources -import Cardano.Unlog.SlotStats - -import qualified Debug.Trace as D -import qualified Text.Printf as D - - --- | Block's events, as seen by its forger. -data BlockForgerEvents - = BlockForgerEvents - { bfeHost :: !Host - , bfeBlock :: !Hash - , bfeBlockPrev :: !Hash - , bfeBlockNo :: !BlockNo - , bfeSlotNo :: !SlotNo - , bfeSlotStart :: !UTCTime - , bfeForged :: !(Maybe UTCTime) - , bfeAdopted :: !(Maybe UTCTime) - , bfeChainDelta :: !Int - , bfeAnnounced :: !(Maybe UTCTime) - , bfeSent :: !(Maybe UTCTime) - } - deriving (Generic, AE.FromJSON, AE.ToJSON, Show) - -bfePrevBlock :: BlockForgerEvents -> Maybe Hash -bfePrevBlock x = case bfeBlockNo x of - 0 -> Nothing - _ -> Just $ bfeBlockPrev x - --- | Block's events, as seen by an observer. -data BlockObserverEvents - = BlockObserverEvents - { boeHost :: !Host - , boeBlock :: !Hash - , boeBlockNo :: !BlockNo - , boeSlotNo :: !SlotNo - , boeSlotStart :: !UTCTime - , boeNoticed :: !(Maybe UTCTime) - , boeFetched :: !(Maybe UTCTime) - , boeAdopted :: !(Maybe UTCTime) - , boeChainDelta :: !Int - , boeAnnounced :: !(Maybe UTCTime) - , boeSent :: !(Maybe UTCTime) - } - deriving (Generic, AE.FromJSON, AE.ToJSON, Show) - --- | Sum of observer and forger events alike. -type MachBlockEvents = Either BlockObserverEvents BlockForgerEvents - -mbeIsForger :: MachBlockEvents -> Bool -mbeIsForger = isRight - -ordBlockEv :: MachBlockEvents -> MachBlockEvents -> Ordering -ordBlockEv l r - | (on (>) $ either boeBlockNo bfeBlockNo) l r = GT - | (on (>) $ either boeBlockNo bfeBlockNo) r l = LT - | isRight l = GT - | isRight r = LT - | otherwise = EQ - -mbeBlockHash :: MachBlockEvents -> Hash -mbeBlockHash = either boeBlock bfeBlock - -mbeSetAdoptedDelta :: UTCTime -> Int -> MachBlockEvents -> MachBlockEvents -mbeSetAdoptedDelta v d = either (\x -> Left x { boeAdopted=Just v, boeChainDelta=d }) (\x -> Right x { bfeAdopted=Just v, bfeChainDelta=d }) - -mbeSetAnnounced, mbeSetSent :: UTCTime -> MachBlockEvents -> MachBlockEvents -mbeSetAnnounced v = either (\x -> Left x { boeAnnounced=Just v }) (\x -> Right x { bfeAnnounced=Just v }) -mbeSetSent v = either (\x -> Left x { boeSent=Just v }) (\x -> Right x { bfeSent=Just v }) - --- | Machine's private view of all the blocks. -type MachBlockMap - = Map.Map Hash MachBlockEvents - -blockMapHost :: MachBlockMap -> Host -blockMapHost = either boeHost bfeHost . head . Map.elems - -blockMapMaxBlock :: MachBlockMap -> MachBlockEvents -blockMapMaxBlock = maximumBy ordBlockEv . Map.elems - -blockMapBlock :: Hash -> MachBlockMap -> MachBlockEvents -blockMapBlock h = - fromMaybe (error $ "Invariant failed: missing hash " <> show h) . Map.lookup h - --- | A completed, compactified version of BlockObserverEvents. -data BlockObservation - = BlockObservation - { boObserver :: !Host - , boNoticed :: !UTCTime - , boFetched :: !UTCTime - , boAdopted :: !(Maybe UTCTime) - , boChainDelta :: !Int -- ^ ChainDelta during adoption - , boAnnounced :: !(Maybe UTCTime) - , boSent :: !(Maybe UTCTime) - } - deriving (Generic, AE.FromJSON, AE.ToJSON) - --- | All events related to a block. -data BlockEvents - = BlockEvents - { beForger :: !Host - , beBlock :: !Hash - , beBlockPrev :: !Hash - , beBlockNo :: !BlockNo - , beSlotNo :: !SlotNo - , beSlotStart :: !UTCTime - , beForged :: !UTCTime - , beAdopted :: !UTCTime - , beChainDelta :: !Int -- ^ ChainDelta during adoption - , beAnnounced :: !UTCTime - , beSent :: !UTCTime - , beObservations :: [BlockObservation] - } - deriving (Generic, AE.FromJSON, AE.ToJSON) - --- | Ordered list of all block events of a chain. -type ChainBlockEvents - = [BlockEvents] - -blockProp :: ChainInfo -> [(JsonLogfile, [LogObject])] -> IO ChainBlockEvents -blockProp ci xs = do - putStrLn ("blockProp: recovering block event maps" :: String) - eventMaps <- mapConcurrently (pure . blockEventsFromLogObjects ci) xs - let finalBlockEv = maximumBy ordBlockEv $ blockMapMaxBlock <$> eventMaps - tipHash = rewindChain eventMaps 1 (mbeBlockHash finalBlockEv) - tipBlock = getBlockForge eventMaps tipHash - chain = rebuildChain eventMaps tipHash - putStrLn ("tip block: " <> show tipBlock :: String) - putStrLn ("chain length: " <> show (length chain) :: String) - pure chain - where - rewindChain :: [MachBlockMap] -> Int -> Hash -> Hash - rewindChain eventMaps count tip = go tip count - where go tip = \case - 0 -> tip - n -> go (bfeBlockPrev $ getBlockForge eventMaps tip) (n - 1) - - getBlockForge :: [MachBlockMap] -> Hash -> BlockForgerEvents - getBlockForge xs h = - either (error "Invariant failed: finalBlockEv isn't a forge.") id $ - maximumBy ordBlockEv $ - mapMaybe (Map.lookup h) xs - - rebuildChain :: [MachBlockMap] -> Hash -> ChainBlockEvents - rebuildChain machBlockMaps tip = go (Just tip) [] - where go Nothing acc = acc - go (Just h) acc = - let blkEvs@(forgerEv, _) = collectAllBlockEvents machBlockMaps h - in go (bfePrevBlock forgerEv) - (liftBlockEvents blkEvs : acc) - - collectAllBlockEvents :: [MachBlockMap] -> Hash -> (BlockForgerEvents, [BlockObserverEvents]) - collectAllBlockEvents xs blk = - partitionEithers (mapMaybe (Map.lookup blk) xs) - & swap & first head - - liftBlockEvents :: (BlockForgerEvents, [BlockObserverEvents]) -> BlockEvents - liftBlockEvents (BlockForgerEvents{..}, os) = - BlockEvents - { beForger = bfeHost - , beBlock = bfeBlock - , beBlockPrev = bfeBlockPrev - , beBlockNo = bfeBlockNo - , beSlotNo = bfeSlotNo - , beSlotStart = bfeSlotStart - , beForged = bfeForged & miss "Forged" - , beAdopted = bfeAdopted & miss "Adopted (forger)" - , beChainDelta = bfeChainDelta - , beAnnounced = bfeAnnounced & miss "Announced (forger)" - , beSent = bfeSent & miss "Sent (forger)" - , beObservations = catMaybes $ - os <&> \BlockObserverEvents{..}-> - BlockObservation - <$> Just boeHost - <*> boeNoticed - <*> boeFetched - <*> Just boeAdopted - <*> Just boeChainDelta - <*> Just boeAnnounced - <*> Just boeSent - } - where - miss :: String -> Maybe a -> a - miss slotDesc = fromMaybe $ error $ mconcat - [ "While processing ", show bfeBlockNo, " hash ", show bfeBlock - , " forged by ", show bfeHost - , " -- missing slot: ", slotDesc - ] - --- | Given a single machine's log object stream, recover its block map. -blockEventsFromLogObjects :: ChainInfo -> (JsonLogfile, [LogObject]) -> MachBlockMap -blockEventsFromLogObjects ci (fp, xs) = - foldl (blockPropMachEventsStep ci fp) mempty xs - -blockPropMachEventsStep :: ChainInfo -> JsonLogfile -> MachBlockMap -> LogObject -> MachBlockMap -blockPropMachEventsStep ci fp bMap = \case - LogObject{loAt, loHost, loBody=LOBlockForged{loBlock,loPrev,loBlockNo,loSlotNo}} -> - flip (Map.insert loBlock) bMap $ - Right (mbmGetForger loHost loBlock bMap "LOBlockForged" - & fromMaybe (mbe0forger loHost loBlock loPrev loBlockNo loSlotNo)) - { bfeForged = Just loAt } - LogObject{loAt, loHost, loBody=LOBlockAddedToCurrentChain{loBlock,loChainLengthDelta}} -> - let mbe0 = Map.lookup loBlock bMap & fromMaybe - (err loHost loBlock "LOBlockAddedToCurrentChain leads") - in Map.insert loBlock (mbeSetAdoptedDelta loAt loChainLengthDelta mbe0) bMap - LogObject{loAt, loHost, loBody=LOChainSyncServerSendHeader{loBlock}} -> - let mbe0 = Map.lookup loBlock bMap & fromMaybe - (err loHost loBlock "LOChainSyncServerSendHeader leads") - in Map.insert loBlock (mbeSetAnnounced loAt mbe0) bMap - LogObject{loAt, loHost, loBody=LOBlockFetchServerSend{loBlock}} -> - -- D.trace (D.printf "mbeSetSent %s %s" (show loHost :: String) (show loBlock :: String)) $ - let mbe0 = Map.lookup loBlock bMap & fromMaybe - (err loHost loBlock "LOBlockFetchServerSend leads") - in Map.insert loBlock (mbeSetSent loAt mbe0) bMap - LogObject{loAt, loHost, loBody=LOChainSyncClientSeenHeader{loBlock,loBlockNo,loSlotNo}} -> - case Map.lookup loBlock bMap of - -- We only record the first ChainSync observation of a block. - Nothing -> Map.insert loBlock - (Left $ - (mbe0observ loHost loBlock loBlockNo loSlotNo) - { boeNoticed = Just loAt }) - bMap - Just{} -> bMap - LogObject{loAt, loHost, loBody=LOBlockFetchClientCompletedFetch{loBlock}} -> - flip (Map.insert loBlock) bMap $ - Left (mbmGetObserver loHost loBlock bMap "LOBlockFetchClientCompletedFetch") - { boeFetched = Just loAt } - _ -> bMap - where - err :: Host -> Hash -> String -> a - err ho ha desc = error $ mconcat - [ "In file ", show fp - , ", for host ", show ho - , ", for block ", show ha - , ": ", desc - ] - mbe0observ :: Host -> Hash -> BlockNo -> SlotNo -> BlockObserverEvents - mbe0observ ho ha bn sn = - BlockObserverEvents ho ha bn sn (slotStart ci sn) - Nothing Nothing Nothing 0 Nothing Nothing - mbe0forger :: Host -> Hash -> Hash -> BlockNo -> SlotNo -> BlockForgerEvents - mbe0forger ho ha hp bn sn = - BlockForgerEvents ho ha hp bn sn (slotStart ci sn) - Nothing Nothing 0 Nothing Nothing - mbmGetObserver :: Host -> Hash -> MachBlockMap -> String -> BlockObserverEvents - mbmGetObserver ho ha m eDesc = case Map.lookup ha m of - Just (Left x) -> x - Just (Right x) -> err ho ha (eDesc <> " after a BlockForgerEvents") - Nothing -> err ho ha (eDesc <> " leads") - mbmGetForger :: Host -> Hash -> MachBlockMap -> String -> Maybe BlockForgerEvents - mbmGetForger ho ha m eDesc = Map.lookup ha m <&> - either (const $ err ho ha (eDesc <> " after a BlockObserverEvents")) id diff --git a/nix/workbench/locli/src/Cardano/Unlog/Commands.hs b/nix/workbench/locli/src/Cardano/Unlog/Commands.hs index 75dd1eec6ef..89c40ce0ca6 100644 --- a/nix/workbench/locli/src/Cardano/Unlog/Commands.hs +++ b/nix/workbench/locli/src/Cardano/Unlog/Commands.hs @@ -20,17 +20,17 @@ import Cardano.Unlog.LogObject hiding (Text) -- | All the CLI subcommands under \"analysis\". -- data AnalysisCommand - = MachineTimeline + = MachineTimelineCmd JsonGenesisFile JsonRunMetafile [JsonLogfile] MachineTimelineOutputFiles - | BlockPropagation + | BlockPropagationCmd JsonGenesisFile JsonRunMetafile [JsonLogfile] BlockPropagationOutputFiles - | SubstringKeys + | SubstringKeysCmd deriving (Show) data MachineTimelineOutputFiles @@ -58,9 +58,9 @@ data BlockPropagationOutputFiles renderAnalysisCommand :: AnalysisCommand -> Text renderAnalysisCommand sc = case sc of - MachineTimeline {} -> "analyse machine-timeline" - BlockPropagation {} -> "analyse block-propagation" - SubstringKeys {} -> "analyse substring-keys" + MachineTimelineCmd {} -> "analyse machine-timeline" + BlockPropagationCmd {} -> "analyse block-propagation" + SubstringKeysCmd {} -> "analyse substring-keys" parseMachineTimelineOutputFiles :: Parser MachineTimelineOutputFiles parseMachineTimelineOutputFiles = @@ -111,7 +111,7 @@ parseAnalysisCommands = Opt.subparser $ mconcat [ Opt.command "machine-timeline" - (Opt.info (MachineTimeline + (Opt.info (MachineTimelineCmd <$> argJsonGenesisFile "genesis" "Genesis file of the run" <*> argJsonRunMetafile "run-metafile" @@ -120,7 +120,7 @@ parseAnalysisCommands = <*> parseMachineTimelineOutputFiles) $ Opt.progDesc "Analyse leadership checks") , Opt.command "block-propagation" - (Opt.info (BlockPropagation + (Opt.info (BlockPropagationCmd <$> argJsonGenesisFile "genesis" "Genesis file of the run" <*> argJsonRunMetafile "run-metafile" @@ -129,7 +129,7 @@ parseAnalysisCommands = <*> parseBlockPropagationOutputFiles) $ Opt.progDesc "Analyse leadership checks") , Opt.command "substring-keys" - (Opt.info (pure SubstringKeys) $ + (Opt.info (pure SubstringKeysCmd) $ Opt.progDesc "Dump substrings that narrow logs to relevant subset") ] diff --git a/nix/workbench/locli/src/Cardano/Unlog/LogObject.hs b/nix/workbench/locli/src/Cardano/Unlog/LogObject.hs index 96e9241bf44..e2073b7c027 100644 --- a/nix/workbench/locli/src/Cardano/Unlog/LogObject.hs +++ b/nix/workbench/locli/src/Cardano/Unlog/LogObject.hs @@ -5,7 +5,7 @@ module Cardano.Unlog.LogObject (module Cardano.Unlog.LogObject) where -import Prelude (error) +import Prelude (String, error, id) import qualified Prelude import Cardano.Prelude hiding (Text) @@ -27,13 +27,19 @@ import Ouroboros.Network.Block (BlockNo(..), SlotNo(..)) import Cardano.BM.Stats.Resources +import Data.Accum (zeroUTCTime) + type Text = ShortText readLogObjectStream :: JsonLogfile -> IO [LogObject] readLogObjectStream (JsonLogfile f) = LBS.readFile f - <&> catMaybes . fmap AE.decode . LBS.split (fromIntegral $ fromEnum '\n') + <&> + fmap (either (LogObject zeroUTCTime "DecodeError" "" (TId "0") . LODecodeError) + id + . AE.eitherDecode) + . LBS.split (fromIntegral $ fromEnum '\n') newtype JsonRunMetafile = JsonRunMetafile { unJsonRunMetafile :: FilePath } @@ -68,6 +74,7 @@ data LogObject { loAt :: !UTCTime , loKind :: !Text , loHost :: !Host + , loTid :: !TId , loBody :: !LOBody } deriving (Generic, Show) @@ -90,13 +97,16 @@ newtype TId = TId { unTId :: ShortText } newtype Hash = Hash { unHash :: ShortText } deriving (Eq, Ord, FromJSON, ToJSON) +shortHash :: Hash -> LText.Text +shortHash = toText . Text.take 6 . unHash + instance Show Hash where show = LText.unpack . toText . unHash instance AE.ToJSONKey Hash where toJSONKey = AE.toJSONKeyText (toText . unHash) newtype Host = Host { unHost :: ShortText } - deriving (Eq, Ord, Show, FromJSON, ToJSON) + deriving (Eq, IsString, Ord, Show, FromJSON, ToJSON) instance FromJSON BlockNo where parseJSON o = BlockNo <$> parseJSON o @@ -107,44 +117,44 @@ instance ToJSON BlockNo where -- LogObject stream interpretation -- -interpreters :: Map Text (Object -> TId -> Parser LOBody) +interpreters :: Map Text (Object -> Parser LOBody) interpreters = Map.fromList [ (,) "TraceStartLeadershipCheck" $ - \v _ -> LOTraceStartLeadershipCheck + \v -> LOTraceStartLeadershipCheck <$> v .: "slot" <*> v .: "utxoSize" <*> v .: "chainDensity" , (,) "TraceBlockContext" $ - \v _ -> LOBlockContext + \v -> LOBlockContext <$> v .: "tipBlockNo" , (,) "TraceNodeIsLeader" $ - \v _ -> LOTraceNodeIsLeader + \v -> LOTraceNodeIsLeader <$> v .: "slot" , (,) "TraceNodeNotLeader" $ - \v _ -> LOTraceNodeNotLeader + \v -> LOTraceNodeNotLeader <$> v .: "slot" , (,) "TraceMempoolAddedTx" $ - \v _ -> do + \v -> do x :: Object <- v .: "mempoolSize" LOMempoolTxs <$> x .: "numTxs" , (,) "TraceMempoolRemoveTxs" $ - \v _ -> do + \v -> do x :: Object <- v .: "mempoolSize" LOMempoolTxs <$> x .: "numTxs" , (,) "TraceMempoolRejectedTx" $ - \_ _ -> pure LOMempoolRejectedTx + \_ -> pure LOMempoolRejectedTx , (,) "TraceLedgerEvent.TookSnapshot" $ - \_ _ -> pure LOLedgerTookSnapshot + \_ -> pure LOLedgerTookSnapshot , (,) "TraceBenchTxSubSummary" $ - \v _ -> do + \v -> do x :: Object <- v .: "summary" LOGeneratorSummary <$> ((x .: "ssFailures" :: Parser [Text]) @@ -154,57 +164,60 @@ interpreters = Map.fromList <*> x .: "ssThreadwiseTps" , (,) "TraceBenchTxSubServAck" $ - \v _ -> LOTxsAcked <$> v .: "txIds" + \v -> LOTxsAcked <$> v .: "txIds" , (,) "Resources" $ - \v _ -> LOResources <$> parsePartialResourceStates (Object v) + \v -> LOResources <$> parsePartialResourceStates (Object v) , (,) "TraceTxSubmissionCollected" $ - \v tid -> LOTxsCollected - <$> pure tid - <*> v .: "count" + \v -> LOTxsCollected + <$> v .: "count" , (,) "TraceTxSubmissionProcessed" $ - \v tid -> LOTxsProcessed - <$> pure tid - <*> v .: "accepted" + \v -> LOTxsProcessed + <$> v .: "accepted" <*> v .: "rejected" , (,) "TraceForgedBlock" $ - \v _ -> LOBlockForged + \v -> LOBlockForged <$> v .: "block" <*> v .: "blockPrev" <*> v .: "blockNo" <*> v .: "slot" , (,) "TraceAddBlockEvent.AddedToCurrentChain" $ - \v _ -> LOBlockAddedToCurrentChain + \v -> LOBlockAddedToCurrentChain <$> ((v .: "newtip") <&> hashFromPoint) <*> v .: "chainLengthDelta" -- TODO: we should clarify the distinction between the two cases (^ and v). , (,) "TraceAdoptedBlock" $ - \v _ -> LOBlockAddedToCurrentChain + \v -> LOBlockAddedToCurrentChain <$> v .: "blockHash" <*> pure 1 , (,) "ChainSyncServerEvent.TraceChainSyncServerRead.AddBlock" $ - \v _ -> LOChainSyncServerSendHeader + \v -> LOChainSyncServerSendHeader <$> v .: "block" <*> v .: "blockNo" <*> v .: "slot" , (,) "ChainSyncServerEvent.TraceChainSyncServerReadBlocked.AddBlock" $ - \v _ -> LOChainSyncServerSendHeader + \v -> LOChainSyncServerSendHeader <$> v .: "block" <*> v .: "blockNo" <*> v .: "slot" + -- v, but not ^ -- how is that possible? , (,) "TraceBlockFetchServerSendBlock" $ - \v _ -> LOBlockFetchServerSend + \v -> LOBlockFetchServerSending <$> v .: "block" + , (,) "SendFetchRequest" $ + \v -> LOBlockFetchClientRequested + <$> v .: "head" + <*> v .: "length" , (,) "ChainSyncClientEvent.TraceDownloadedHeader" $ - \v _ -> LOChainSyncClientSeenHeader + \v -> LOChainSyncClientSeenHeader <$> v .: "block" <*> v .: "blockNo" <*> v .: "slot" , (,) "CompletedBlockFetch" $ - \v _ -> LOBlockFetchClientCompletedFetch + \v -> LOBlockFetchClientCompletedFetch <$> v .: "block" ] where @@ -225,8 +238,8 @@ data LOBody | LOBlockContext !Word64 | LOGeneratorSummary !Bool !Word64 !NominalDiffTime (Vector Float) | LOTxsAcked !(Vector Text) - | LOTxsCollected !TId !Word64 - | LOTxsProcessed !TId !Word64 !Int + | LOTxsCollected !Word64 + | LOTxsProcessed !Word64 !Int | LOBlockForged { loBlock :: !Hash , loPrev :: !Hash @@ -235,15 +248,19 @@ data LOBody } | LOBlockAddedToCurrentChain { loBlock :: !Hash - , loChainLengthDelta :: !Int + , loLength :: !Int } | LOChainSyncServerSendHeader { loBlock :: !Hash , loBlockNo :: !BlockNo , loSlotNo :: !SlotNo } - | LOBlockFetchServerSend + | LOBlockFetchServerSending + { loBlock :: !Hash + } + | LOBlockFetchClientRequested { loBlock :: !Hash + , loLength :: !Int } | LOChainSyncClientSeenHeader { loBlock :: !Hash @@ -254,6 +271,7 @@ data LOBody { loBlock :: !Hash } | LOAny !Object + | LODecodeError !String deriving (Generic, Show) instance ToJSON LOBody @@ -261,15 +279,15 @@ instance ToJSON LOBody instance FromJSON LogObject where parseJSON = AE.withObject "LogObject" $ \v -> do body :: Object <- v .: "data" - tid :: TId <- v .: "thread" -- XXX: fix node causing the need for this workaround (,) unwrapped kind <- unwrap "credentials" "val" body LogObject <$> v .: "at" <*> pure kind <*> v .: "host" + <*> v .: "thread" <*> case Map.lookup kind interpreters of - Just interp -> interp unwrapped tid + Just interp -> interp unwrapped Nothing -> pure $ LOAny unwrapped where unwrap :: Text -> Text -> Object -> Parser (Object, Text) diff --git a/nix/workbench/locli/src/Cardano/Unlog/Render.hs b/nix/workbench/locli/src/Cardano/Unlog/Render.hs new file mode 100644 index 00000000000..44e1b7808c3 --- /dev/null +++ b/nix/workbench/locli/src/Cardano/Unlog/Render.hs @@ -0,0 +1,182 @@ +{-# LANGUAGE LambdaCase #-} +{-# LANGUAGE RankNTypes #-} +{-# LANGUAGE RecordWildCards #-} +{-# LANGUAGE ScopedTypeVariables #-} +{-# LANGUAGE ViewPatterns #-} +module Cardano.Unlog.Render (module Cardano.Unlog.Render) where + +import Prelude (head, id, tail) +import Cardano.Prelude hiding (head) + +import Control.Arrow ((&&&)) +import Data.List (dropWhileEnd) +import qualified Data.Text as T +import Data.Time.Clock (NominalDiffTime) +import Text.Printf (printf) + +import Data.Distribution + + +data RenderMode + = RenderPretty + | RenderCsv + deriving (Eq, Show) + +class Show a => RenderDistributions a where + rdFields :: [DField a] + +class Show a => RenderTimeline a where + rtFields :: [IField a] + rtCommentary :: a -> [Text] + rtCommentary _ = [] + +-- | Incapsulate all information necessary to render a column. +data Field s a + = Field + { fWidth :: Int + , fLeftPad :: Int + , fId :: Text + , fHead1 :: Text + , fHead2 :: Text + , fSelect :: s a + } + +type DField a = Field DSelect a +type IField a = Field ISelect a + +data DSelect a + = DInt (a -> Distribution Float Int) + | DWord64 (a -> Distribution Float Word64) + | DFloat (a -> Distribution Float Float) + | DDeltaT (a -> Distribution Float NominalDiffTime) + +data ISelect a + = IInt (a -> Int) + | IWord64 (a -> Word64) + | IFloat (a -> Float) + | IDeltaT (a -> NominalDiffTime) + | IText (a -> Text) + +mapSomeFieldDistribution :: (forall b. Distribution Float b -> c) -> a -> DSelect a -> c +mapSomeFieldDistribution f a = \case + DInt s -> f (s a) + DWord64 s -> f (s a) + DFloat s -> f (s a) + DDeltaT s -> f (s a) + +renderTimeline :: forall a. RenderTimeline a => [a] -> [Text] +renderTimeline xs = + concatMap (uncurry fLine) $ zip xs [(0 :: Int)..] + where + fLine :: a -> Int -> [Text] + fLine l i = (if i `mod` 33 == 0 then catMaybes [head1, head2] else []) + <> (entry l : rtCommentary l) + + entry :: a -> Text + entry v = renderLineDist $ + \Field{..} -> + let w = show fWidth + in case fSelect of + IInt (($v)->x) -> T.pack $ printf ('%':(w++"d")) x + IWord64 (($v)->x) -> T.pack $ printf ('%':(w++"d")) x + IFloat (($v)->x) -> T.take fWidth $ T.pack $ + printf ('%':'.':(show (fWidth - 2)++"F")) x + IDeltaT (($v)->x) -> T.take fWidth . T.dropWhileEnd (== 's') $ show x + IText (($v)->x) -> T.take fWidth . T.dropWhileEnd (== 's') $ x + + fields :: [IField a] + fields = rtFields + + head1, head2 :: Maybe Text + head1 = if all ((== 0) . T.length . fHead1) fields then Nothing + else Just (renderLineHead1 (uncurry T.take . ((+1) . fWidth &&& fHead1))) + head2 = if all ((== 0) . T.length . fHead2) fields then Nothing + else Just (renderLineHead2 (uncurry T.take . ((+1) . fWidth &&& fHead2))) + + renderLineHead1 = mconcat . renderLine' (const 0) ((+ 1) . fWidth) + renderLineHead2 = mconcat . renderLine' fLeftPad ((+ 1) . fWidth) + renderLineDist = T.intercalate " " . renderLine' fLeftPad fWidth + + renderLine' :: + (IField a -> Int) -> (IField a -> Int) -> (IField a -> Text) -> [Text] + renderLine' lpfn wfn rfn = renderField lpfn wfn rfn <$> fields + renderField lpfn wfn rfn f = T.replicate (lpfn f) " " <> T.center (wfn f) ' ' (rfn f) + +renderDistributions :: forall a. RenderDistributions a => RenderMode -> a -> [Text] +renderDistributions mode x = + case mode of + RenderPretty -> catMaybes [head1, head2] <> pLines <> sizeAvg + RenderCsv -> headCsv : pLines + where + pLines :: [Text] + pLines = fLine <$> [0..(nPercs - 1)] + + fLine :: Int -> Text + fLine pctIx = (if mode == RenderPretty + then renderLineDistPretty + else renderLineDistCsv) $ + \Field{..} -> + let getCapPerc :: forall b c. Distribution b c -> c + getCapPerc d = dPercIx d pctIx + in T.pack $ case fSelect of + DInt (($x)->d) -> (if mode == RenderPretty + then printf "%*d" fWidth + else printf "%d") (getCapPerc d) + DWord64 (($x)->d) -> (if mode == RenderPretty + then printf "%*d" fWidth + else printf "%d") (getCapPerc d) + DFloat (($x)->d) -> (if mode == RenderPretty + then take fWidth . printf "%*F" (fWidth - 2) + else printf "%F") (getCapPerc d) + DDeltaT (($x)->d) -> (if mode == RenderPretty + then take fWidth else id) + . dropWhileEnd (== 's') . show $ getCapPerc d + + head1, head2 :: Maybe Text + head1 = if all ((== 0) . T.length . fHead1) fields then Nothing + else Just (renderLineHead1 (uncurry T.take . ((+1) . fWidth &&& fHead1))) + head2 = if all ((== 0) . T.length . fHead2) fields then Nothing + else Just (renderLineHead2 (uncurry T.take . ((+1) . fWidth &&& fHead2))) + headCsv = T.intercalate "," $ fId <$> fields + + sizeAvg :: [Text] + sizeAvg = fmap (T.intercalate " ") + [ (T.center (fWidth (head fields)) ' ' "avg" :) $ + (\f -> flip (renderField fLeftPad fWidth) f $ const $ + mapSomeFieldDistribution + (T.take (fWidth f) .T.pack . printf "%F" . dAverage) x (fSelect f)) + <$> tail fields + , (T.center (fWidth (head fields)) ' ' "size" :) $ + (\f -> flip (renderField fLeftPad fWidth) f $ const $ + mapSomeFieldDistribution + (T.take (fWidth f) . T.pack . show . dSize) x (fSelect f)) + <$> tail fields + ] + + renderLineHead1 = mconcat . renderLine' (const 0) ((+ 1) . fWidth) + renderLineHead2 = mconcat . renderLine' fLeftPad ((+ 1) . fWidth) + renderLineDistPretty = T.intercalate " " . renderLine' fLeftPad fWidth + renderLineDistCsv = T.intercalate "," . renderLine' (const 0) (const 0) + + renderLine' :: + (DField a -> Int) -> (DField a -> Int) -> (DField a -> Text) -> [Text] + renderLine' lpfn wfn rfn = renderField lpfn wfn rfn <$> fields + renderField lpfn wfn rfn f = T.replicate (lpfn f) " " <> T.center (wfn f) ' ' (rfn f) + + fields :: [DField a] + fields = percField : rdFields + percField :: DField a + percField = Field 6 0 "%tile" "" "%tile" (DFloat $ const percsDistrib) + nPercs = length $ dPercentiles percsDistrib + percsDistrib = mapSomeFieldDistribution + distribPercsAsDistrib x (fSelect $ head rdFields) + +-- * Auxiliaries +-- +distribPercsAsDistrib :: Distribution Float b -> Distribution Float Float +distribPercsAsDistrib Distribution{..} = Distribution 1 0.5 $ + (\p -> p {pctSample = psFrac (pctSpec p)}) <$> dPercentiles + +nChunksEachOf :: Int -> Int -> Text -> [Text] +nChunksEachOf chunks each center = + T.chunksOf each (T.center (each * chunks) ' ' center) diff --git a/nix/workbench/locli/src/Cardano/Unlog/Resources.hs b/nix/workbench/locli/src/Cardano/Unlog/Resources.hs index ae719819b19..cfd6c8f83cf 100644 --- a/nix/workbench/locli/src/Cardano/Unlog/Resources.hs +++ b/nix/workbench/locli/src/Cardano/Unlog/Resources.hs @@ -17,7 +17,6 @@ import Cardano.Prelude import Data.Accum import Data.Distribution -import qualified Data.Sequence as Seq import Data.Time.Clock (UTCTime) import Cardano.BM.Stats.Resources @@ -55,14 +54,14 @@ computeResDistrib :: forall a . [PercSpec Float] -> ResDistribProjections a - -> Seq.Seq a + -> [a] -> Resources (Distribution Float Word64) computeResDistrib percentiles projs xs = compDist <$> projs where compDist :: (a -> Maybe Word64) -> Distribution Float Word64 compDist proj = computeDistribution percentiles - (Seq.fromList . catMaybes . toList $ proj <$> xs) + (catMaybes . toList $ proj <$> xs) type ResContinuity a = Resources (a -> Maybe a) diff --git a/nix/workbench/locli/src/Cardano/Unlog/Run.hs b/nix/workbench/locli/src/Cardano/Unlog/Run.hs index bfa5e378f4b..6de40521c59 100644 --- a/nix/workbench/locli/src/Cardano/Unlog/Run.hs +++ b/nix/workbench/locli/src/Cardano/Unlog/Run.hs @@ -12,9 +12,9 @@ import Cardano.Prelude import Control.Monad.Trans.Except.Extra (firstExceptT) import qualified Data.Text as Text -import Cardano.Unlog.Commands (AnalysisCommand) -import Cardano.Unlog.Summary (AnalysisCmdError, renderAnalysisCmdError, +import Cardano.Analysis.Driver (AnalysisCmdError, renderAnalysisCmdError, runAnalysisCommand) +import Cardano.Unlog.Commands (AnalysisCommand) import Cardano.Config.Git.Rev (gitRev) import Data.Version (showVersion) diff --git a/nix/workbench/locli/src/Cardano/Unlog/SlotStats.hs b/nix/workbench/locli/src/Cardano/Unlog/SlotStats.hs index 2056ecc49c4..78e47602140 100644 --- a/nix/workbench/locli/src/Cardano/Unlog/SlotStats.hs +++ b/nix/workbench/locli/src/Cardano/Unlog/SlotStats.hs @@ -3,15 +3,15 @@ {-# LANGUAGE RankNTypes #-} {-# LANGUAGE RecordWildCards #-} {-# LANGUAGE ScopedTypeVariables #-} - +{- HLINT ignore "Use head" -} module Cardano.Unlog.SlotStats (module Cardano.Unlog.SlotStats) where -import qualified Prelude as P +import Prelude ((!!)) import Cardano.Prelude import Data.Aeson -import qualified Data.Sequence as Seq -import qualified Data.Text as Text +import qualified Data.Text as T +import Data.List (dropWhileEnd) import Data.List.Split (splitOn) import Data.Time.Clock (UTCTime, NominalDiffTime) @@ -20,6 +20,8 @@ import Text.Printf import Ouroboros.Network.Block (SlotNo(..)) import Data.Accum +import Cardano.Analysis.Profile +import Cardano.Unlog.Render import Cardano.Unlog.Resources @@ -30,7 +32,7 @@ data SlotStats { slSlot :: !SlotNo , slEpoch :: !Word64 , slEpochSlot :: !Word64 - , slStart :: !UTCTime + , slStart :: !SlotStart , slCountChecks :: !Word64 , slCountLeads :: !Word64 , slChainDBSnap :: !Word64 @@ -52,78 +54,70 @@ data SlotStats } deriving (Generic, Show) -instance ToJSON SlotStats - -slotHeadE, slotFormatE :: Text -slotHeadP, slotFormatP :: Text -slotHeadP = - "abs. slot block block lead leader CDB rej check lead mempool tx chain %CPU GCs Produc- Memory use, kB Alloc rate Mempool UTxO Absolute" <>"\n"<> - "slot# epoch no. -less checks ships snap txs span span span col acc rej density all/ GC/mut maj/min tivity RSS Heap Live Alloc /mut sec,kB txs entries slot time" -slotHeadE = - "abs.slot#,slot,epoch,block,blockless,checkSpan,leadSpan,leadShips,cdbSnap,rejTx,checkSpan,mempoolTxSpan,chainDens,%CPU,%GC,%MUT,Productiv,MemLiveKb,MemAllocKb,MemRSSKb,AllocRate/Mut,MempoolTxs,UTxO" -slotFormatP = "%5d %4d:%2d %4d %2d %2d %2d %2d %2d %7s %5s %5s %2d %2d %2d %0.3f %3s %3s %3s %2s %4s %5s %5s %5s %5s %4s %8s %4d %9d %s" -slotFormatE = "%d,%d,%d,%d,%d,%d,%d,%d,%d,%s,%s,%s,%d,%d%0.3f,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%d,%d,%s" - -slotLine :: Bool -> Text -> SlotStats -> Text -slotLine exportMode leadershipF SlotStats{..} = Text.pack $ - printf (Text.unpack leadershipF) - sl epsl epo blk blkl chks lds cdbsn rejtx spanC spanL subdt scol sacc srej dens cpu gc mut majg ming pro rss hea liv alc atm mpo utx start - where sl = unSlotNo slSlot - epsl = slEpochSlot - epo = slEpoch - blk = slBlockNo - blkl = slBlockless - chks = slCountChecks - lds = slCountLeads - cdbsn = slChainDBSnap - rejtx = slRejectedTx - subdt = maybe "" (Text.init . show) slTxsMemSpan - scol = slTxsCollected - sacc = slTxsAccepted - srej = slTxsRejected - spanC = Text.init $ show slSpanCheck - spanL = Text.init $ show slSpanLead - cpu = d 3 $ rCentiCpu slResources - dens = slDensity - gc = d 2 $ min 999 -- workaround for ghc-8.10.x - <$> rCentiGC slResources - mut = d 2 $ min 999 -- workaround for ghc-8.10.x - <$> rCentiMut slResources - majg = d 2 $ rGcsMajor slResources - ming = d 2 $ rGcsMinor slResources - pro = f 2 $ calcProd <$> (min 6 . -- workaround for ghc-8.10.2 - fromIntegral <$> rCentiMut slResources :: Maybe Float) - <*> (fromIntegral <$> rCentiCpu slResources) - rss = d 5 (rRSS slResources) - hea = d 5 (rHeap slResources) - liv = d 5 (rLive slResources) - alc = d 5 (rAlloc slResources) - atm = d 8 $ - (ceiling :: Float -> Int) - <$> ((/) <$> (fromIntegral . (100 *) <$> rAlloc slResources) - <*> (fromIntegral . max 1 . (1024 *) <$> rCentiMut slResources)) - mpo = slMempoolTxs - utx = slUtxoSize - start = " " `splitOn` show slStart P.!! 1 +instance RenderTimeline SlotStats where + rtFields = + -- Width LeftPad + [ Field 5 0 "abs.slot" "abs." "slot#" $ IWord64 (unSlotNo . slSlot) + , Field 4 0 "slot" " epo" "slot" $ IWord64 slEpochSlot + , Field 2 0 "epoch" "ch " "#" $ IWord64 slEpoch + , Field 5 0 "block" "block" "no." $ IWord64 slBlockNo + , Field 5 0 "blockGap" "block" "gap" $ IWord64 slBlockless + , Field 3 0 "leadChecks" "lead" "chk" $ IWord64 slCountChecks + , Field 3 0 "leadShips" "ship" "win" $ IWord64 slCountLeads + , Field 4 0 "CDBSnap" "CDB" "snap" $ IWord64 slChainDBSnap + , Field 3 0 "rejTxs" "rej" "txs" $ IWord64 slRejectedTx + , Field 7 0 "checkSpan" "check" "span" $ IDeltaT slSpanCheck + , Field 5 0 "leadSpan" "lead" "span" $ IDeltaT slSpanLead + , Field 4 0 "mempoolTxSpan" (t 4!!0) "span" $ IText (maybe "" show.slTxsMemSpan) + , Field 4 0 "txsColl" (t 4!!1) "cold" $ IWord64 slTxsCollected + , Field 4 0 "txsAcc" (t 4!!2) "accd" $ IWord64 slTxsAccepted + , Field 4 0 "txsRej" (t 4!!3) "rejd" $ IWord64 slTxsRejected + , Field 5 1 "chDensity" "chain" "dens." $ IFloat slDensity + , Field 3 0 "CPU%" (c 3!!0) "all" $ IText (d 3.rCentiCpu.slResources) + , Field 3 0 "GC%" (c 3!!1) "GC" $ IText (d 3.fmap (min 999).rCentiGC.slResources) + , Field 3 0 "MUT%" (c 3!!2) "mut" $ IText (d 3.fmap (min 999).rCentiMut.slResources) + , Field 3 0 "majFlt" (g 3!!0) "maj" $ IText (d 3.rGcsMajor.slResources) + , Field 3 0 "minFlt" (g 3!!1) "min" $ IText (d 3.rGcsMinor.slResources) + , Field 6 0 "productiv" "Produc" "tivity" $ IText + (\SlotStats{..}-> + f 4 $ calcProd <$> (min 6 . -- workaround for ghc-8.10.2 + fromIntegral <$> rCentiMut slResources :: Maybe Float) + <*> (fromIntegral <$> rCentiCpu slResources)) + , Field 5 0 "rssMB" (m 5!!0) "RSS" $ IText (d 5.rRSS .slResources) + , Field 5 0 "heapMB" (m 5!!1) "Heap" $ IText (d 5.rHeap .slResources) + , Field 5 0 "liveMB" (m 5!!2) "Live" $ IText (d 5.rLive .slResources) + , Field 5 0 "allocatedMB" "Allocd" "MB" $ IText (d 5.rAlloc.slResources) + , Field 6 0 "allocMut" "Alloc/" "mutSec" $ IText + (\SlotStats{..}-> + d 5 $ + (ceiling :: Float -> Int) + <$> ((/) <$> (fromIntegral . (100 *) <$> rAlloc slResources) + <*> (fromIntegral . max 1 . (1024 *) <$> rCentiMut slResources))) + , Field 7 0 "mempoolTxs" "Mempool" "txs" $ IWord64 slMempoolTxs + , Field 9 0 "utxoEntries" "UTxO" "entries" $ IWord64 slUtxoSize + , Field 10 0 "absSlotTime" "Absolute" "slot time" $ IText + (\SlotStats{..}-> + T.pack $ " " `splitOn` show slStart !! 1) + ] + where + t w = nChunksEachOf 4 (w + 1) "mempool tx" + c w = nChunksEachOf 3 (w + 1) "%CPU" + g w = nChunksEachOf 2 (w + 1) "GCs" + m w = nChunksEachOf 3 (w + 1) "Memory use, MB" + + d, f :: PrintfArg a => Int -> Maybe a -> Text + d width = \case + Just x -> T.pack $ printf ("%"<>"" --(if exportMode then "0" else "") + <>show width<>"d") x + Nothing -> mconcat (replicate width "-") + f width = \case + Just x -> T.pack $ printf ("%0."<>show width<>"f") x + Nothing -> mconcat (replicate width "-") + + calcProd :: Float -> Float -> Float + calcProd mut' cpu' = if cpu' == 0 then 1 else mut' / cpu' - calcProd :: Float -> Float -> Float - calcProd mut' cpu' = if cpu' == 0 then 1 else mut' / cpu' - - d, f :: PrintfArg a => Int -> Maybe a -> Text - d width = \case - Just x -> Text.pack $ printf ("%"<>(if exportMode then "0" else "") - <>show width<>"d") x - Nothing -> mconcat (replicate width "-") - f width = \case - Just x -> Text.pack $ printf ("%0."<>show width<>"f") x - Nothing -> mconcat (replicate width "-") - -renderSlotTimeline :: Text -> Text -> Bool -> Seq SlotStats -> Handle -> IO () -renderSlotTimeline leadHead fmt exportMode slotStats hnd = do - forM_ (zip (toList slotStats) [(0 :: Int)..]) $ \(l, i) -> do - when (i `mod` 33 == 0 && (i == 0 || not exportMode)) $ - hPutStrLn hnd leadHead - hPutStrLn hnd $ slotLine exportMode fmt l +instance ToJSON SlotStats -- | Initial and trailing data are noisy outliers: drop that. -- @@ -132,11 +126,11 @@ renderSlotTimeline leadHead fmt exportMode slotStats hnd = do -- they start getting non-zero chain density reported. -- -- On the trailing part, we drop everything since the last leadership check. -cleanupSlotStats :: Seq SlotStats -> Seq SlotStats +cleanupSlotStats :: [SlotStats] -> [SlotStats] cleanupSlotStats = - -- Seq.dropWhileL ((== 0) . slDensity) . - Seq.dropWhileL ((/= 500) . slSlot) . - Seq.dropWhileR ((== 0) . slCountChecks) + -- dropWhile ((== 0) . slDensity) . + dropWhile ((/= 500) . slSlot) . + dropWhileEnd ((== 0) . slCountChecks) zeroSlotStats :: SlotStats zeroSlotStats = @@ -144,7 +138,7 @@ zeroSlotStats = { slSlot = 0 , slEpoch = 0 , slEpochSlot = 0 - , slStart = zeroUTCTime + , slStart = SlotStart zeroUTCTime , slCountChecks = 0 , slCountLeads = 0 , slOrderViol = 0 diff --git a/nix/workbench/locli/src/Cardano/Unlog/Summary.hs b/nix/workbench/locli/src/Cardano/Unlog/Summary.hs index 57ddddb87da..56004e8548b 100644 --- a/nix/workbench/locli/src/Cardano/Unlog/Summary.hs +++ b/nix/workbench/locli/src/Cardano/Unlog/Summary.hs @@ -22,8 +22,8 @@ import qualified Data.Aeson as Aeson import Data.Aeson import qualified Data.ByteString.Lazy.Char8 as LBS import qualified Data.HashMap.Strict as HashMap -import qualified Data.Sequence as Seq import qualified Data.Text as Text +import qualified Data.Text.IO as T import Data.Vector (Vector) import qualified Data.Vector as Vec @@ -42,6 +42,7 @@ import Cardano.Analysis.Profile import Cardano.Unlog.BlockProp import Cardano.Unlog.Commands import Cardano.Unlog.LogObject hiding (Text) +import Cardano.Unlog.Render import Cardano.Unlog.Resources import Cardano.Unlog.SlotStats import Cardano.Unlog.Timeline @@ -79,7 +80,7 @@ renderAnalysisCmdError cmd err = -- runAnalysisCommand :: AnalysisCommand -> ExceptT AnalysisCmdError IO () -runAnalysisCommand (MachineTimeline genesisFile metaFile logfiles oFiles) = do +runAnalysisCommand (MachineTimelineCmd genesisFile metaFile logfiles oFiles) = do chainInfo <- ChainInfo <$> firstExceptT (RunMetaParseError metaFile . Text.pack) @@ -90,7 +91,7 @@ runAnalysisCommand (MachineTimeline genesisFile metaFile logfiles oFiles) = do Aeson.eitherDecode @Genesis <$> LBS.readFile (unJsonGenesisFile genesisFile)) firstExceptT AnalysisCmdError $ runMachineTimeline chainInfo logfiles oFiles -runAnalysisCommand (BlockPropagation genesisFile metaFile logfiles oFiles) = do +runAnalysisCommand (BlockPropagationCmd genesisFile metaFile logfiles oFiles) = do chainInfo <- ChainInfo <$> firstExceptT (RunMetaParseError metaFile . Text.pack) @@ -101,7 +102,7 @@ runAnalysisCommand (BlockPropagation genesisFile metaFile logfiles oFiles) = do Aeson.eitherDecode @Genesis <$> LBS.readFile (unJsonGenesisFile genesisFile)) firstExceptT AnalysisCmdError $ runBlockPropagation chainInfo logfiles oFiles -runAnalysisCommand SubstringKeys = +runAnalysisCommand SubstringKeysCmd = liftIO $ mapM_ putStrLn logObjectStreamInterpreterKeys runBlockPropagation :: @@ -114,20 +115,26 @@ runBlockPropagation chainInfo logfiles BlockPropagationOutputFiles{..} = do (joinT . (pure &&& readLogObjectStream)) forM_ bpofLogObjects . const $ do - putStrLn ("runBlockPropagation: dumping LO streams" :: Text) flip mapConcurrently objLists $ - \(JsonLogfile f, objs) -> + \(JsonLogfile f, objs) -> do + putStrLn ("runBlockPropagation: dumping LO streams" :: Text) dumpLOStream objs (JsonOutputFile $ F.dropExtension f <> ".logobjects.json") - chainBlockEvents <- blockProp chainInfo objLists + blockPropagation <- blockProp chainInfo objLists + + forM_ bpofTimelinePretty $ + \(TextOutputFile f) -> + withFile f WriteMode $ \hnd -> do + putStrLn ("runBlockPropagation: dumping pretty timeline" :: Text) + hPutStrLn hnd . Text.pack $ printf "--- input: %s" f + mapM_ (T.hPutStrLn hnd) (renderDistributions blockPropagation) - putStrLn ("runBlockPropagation: dumping analyses" :: Text) forM_ bpofAnalysis $ \(JsonOutputFile f) -> - withFile f WriteMode $ \hnd -> - forM_ chainBlockEvents $ \x-> - LBS.hPutStrLn hnd (Aeson.encode x) + withFile f WriteMode $ \hnd -> do + putStrLn ("runBlockPropagation: dumping analysis core" :: Text) + LBS.hPutStrLn hnd (Aeson.encode blockPropagation) where joinT :: (IO a, IO b) -> IO (a, b) joinT (a, b) = (,) <$> a <*> b @@ -152,7 +159,7 @@ runMachineTimeline chainInfo logfiles MachineTimelineOutputFiles{..} = do let slotStats = cleanupSlotStats noisySlotStats -- 3. Derive the summary - let drvVectors0, _drvVectors1 :: Seq DerivedSlot + let drvVectors0, _drvVectors1 :: [DerivedSlot] (,) drvVectors0 _drvVectors1 = computeDerivedVectors slotStats summary :: Summary summary = slotStatsSummary chainInfo slotStats @@ -170,7 +177,7 @@ runMachineTimeline chainInfo logfiles MachineTimelineOutputFiles{..} = do (renderDerivedSlots drvVectors0) forM_ mtofHistogram (renderHistogram "CPU usage spans over 85%" "Span length" - (toList $ Seq.sort $ sSpanLensCPU85 summary)) + (toList $ sort $ sSpanLensCPU85 summary)) flip (maybe $ LBS.putStrLn timelineOutput) mtofAnalysis $ \case @@ -188,7 +195,7 @@ runMachineTimeline chainInfo logfiles MachineTimelineOutputFiles{..} = do Hist.defOpts hist renderPrettySummary :: - Seq SlotStats -> Summary -> [JsonLogfile] -> TextOutputFile -> IO () + [SlotStats] -> Summary -> [JsonLogfile] -> TextOutputFile -> IO () renderPrettySummary xs s srcs o = withFile (unTextOutputFile o) WriteMode $ \hnd -> do hPutStrLn hnd . Text.pack $ @@ -204,7 +211,7 @@ runMachineTimeline chainInfo logfiles MachineTimelineOutputFiles{..} = do renderChainInfoExport chainInfo <> renderRunScalars rs - renderExportTimeline :: Seq SlotStats -> CsvOutputFile -> IO () + renderExportTimeline :: [SlotStats] -> CsvOutputFile -> IO () renderExportTimeline xs (CsvOutputFile o) = withFile o WriteMode $ renderSlotTimeline slotHeadE slotFormatE True xs @@ -215,7 +222,7 @@ runMachineTimeline chainInfo logfiles MachineTimelineOutputFiles{..} = do forM_ (toDistribLines statFmt propFmt summary) $ hPutStrLn hnd - renderDerivedSlots :: Seq DerivedSlot -> CsvOutputFile -> IO () + renderDerivedSlots :: [DerivedSlot] -> CsvOutputFile -> IO () renderDerivedSlots slots (CsvOutputFile o) = do withFile o WriteMode $ \hnd -> do hPutStrLn hnd derivedSlotsHeader @@ -230,10 +237,10 @@ dumpLOStream objs o = data Summary = Summary { sMaxChecks :: !Word64 - , sSlotMisses :: !(Seq Word64) - , sSpanLensCPU85 :: !(Seq Int) - , sSpanLensCPU85EBnd :: !(Seq Int) - , sSpanLensCPU85Rwd :: !(Seq Int) + , sSlotMisses :: ![Word64] + , sSpanLensCPU85 :: ![Int] + , sSpanLensCPU85EBnd :: ![Int] + , sSpanLensCPU85Rwd :: ![Int] -- distributions , sMissDistrib :: !(Distribution Float Float) , sLeadsDistrib :: !(Distribution Float Word64) @@ -271,7 +278,7 @@ instance ToJSON Summary where , "xs" .= toJSON sSpanLensCPU85] , Aeson.Object $ HashMap.fromList [ "kind" .= String "spanLensCPU85Sorted" - , "xs" .= toJSON (Seq.sort sSpanLensCPU85)] + , "xs" .= toJSON (sort sSpanLensCPU85)] , extendObject "kind" "spancheck" $ toJSON sSpanCheckDistrib , extendObject "kind" "spanlead" $ toJSON sSpanLeadDistrib , extendObject "kind" "cpu" $ toJSON (rCentiCpu sResourceDistribs) @@ -292,7 +299,7 @@ instance ToJSON Summary where toJSON sSpanLensCPU85RwdDistrib ] -slotStatsSummary :: ChainInfo -> Seq SlotStats -> Summary +slotStatsSummary :: ChainInfo -> [SlotStats] -> Summary slotStatsSummary CInfo{} slots = Summary { sMaxChecks = maxChecks @@ -301,41 +308,31 @@ slotStatsSummary CInfo{} slots = , sSpanLensCPU85EBnd = sSpanLensCPU85EBnd , sSpanLensCPU85Rwd = sSpanLensCPU85Rwd -- - , sMissDistrib = computeDistribution pctiles missRatios + , sMissDistrib = computeDistribution stdPercentiles missRatios , sLeadsDistrib = - computeDistribution pctiles (slCountLeads <$> slots) + computeDistribution stdPercentiles (slCountLeads <$> slots) , sUtxoDistrib = - computeDistribution pctiles (slUtxoSize <$> slots) + computeDistribution stdPercentiles (slUtxoSize <$> slots) , sDensityDistrib = - computeDistribution pctiles (slDensity <$> slots) + computeDistribution stdPercentiles (slDensity <$> slots) , sSpanCheckDistrib = - computeDistribution pctiles (slSpanCheck <$> slots) + computeDistribution stdPercentiles (slSpanCheck <$> slots) , sSpanLeadDistrib = - computeDistribution pctiles (slSpanLead <$> slots) + computeDistribution stdPercentiles (slSpanLead <$> slots) , sBlocklessDistrib = - computeDistribution pctiles (slBlockless <$> slots) + computeDistribution stdPercentiles (slBlockless <$> slots) , sSpanLensCPU85Distrib - = computeDistribution pctiles spanLensCPU85 + = computeDistribution stdPercentiles spanLensCPU85 , sResourceDistribs = - computeResDistrib pctiles resDistProjs slots - , sSpanLensCPU85EBndDistrib = computeDistribution pctiles sSpanLensCPU85EBnd - , sSpanLensCPU85RwdDistrib = computeDistribution pctiles sSpanLensCPU85Rwd + computeResDistrib stdPercentiles resDistProjs slots + , sSpanLensCPU85EBndDistrib = computeDistribution stdPercentiles sSpanLensCPU85EBnd + , sSpanLensCPU85RwdDistrib = computeDistribution stdPercentiles sSpanLensCPU85Rwd } where - sSpanLensCPU85EBnd = Seq.fromList $ Vec.length <$> + sSpanLensCPU85EBnd = Vec.length <$> filter (spanContainsEpochSlot 3) spansCPU85 - sSpanLensCPU85Rwd = Seq.fromList $ Vec.length <$> + sSpanLensCPU85Rwd = Vec.length <$> filter (spanContainsEpochSlot 803) spansCPU85 - pctiles = sortBy (compare `on` psFrac) - [ Perc 0.01, Perc 0.05 - , Perc 0.1, Perc 0.2, Perc 0.3, Perc 0.4 - , Perc 0.5, Perc 0.6 - , Perc 0.7, Perc 0.75 - , Perc 0.8, Perc 0.85, Perc 0.875 - , Perc 0.9, Perc 0.925, Perc 0.95, Perc 0.97, Perc 0.98, Perc 0.99 - , Perc 0.995, Perc 0.997, Perc 0.998, Perc 0.999 - , Perc 0.9995, Perc 0.9997, Perc 0.9998, Perc 0.9999 - ] checkCounts = slCountChecks <$> slots maxChecks = if length checkCounts == 0 @@ -346,7 +343,7 @@ slotStatsSummary CInfo{} slots = spansCPU85 = spans ((/= Just False) . fmap (>=85) . rCentiCpu . slResources) (toList slots) - spanLensCPU85 = Seq.fromList $ spanLen <$> spansCPU85 + spanLensCPU85 = spanLen <$> spansCPU85 spanContainsEpochSlot :: Word64 -> Vector SlotStats -> Bool spanContainsEpochSlot s = uncurry (&&) @@ -415,7 +412,7 @@ toDistribLines :: Text -> Text -> Summary -> [Text] toDistribLines statsF distPropsF s@Summary{..} = distribLine <$> ZipList (pctSpec <$> dPercentiles sMissDistrib) - <*> ZipList (max 1 . ceiling . (* fromIntegral (dCount sMissDistrib)) + <*> ZipList (max 1 . ceiling . (* fromIntegral (dSize sMissDistrib)) . (1.0 -) . pctFrac <$> dPercentiles sMissDistrib) <*> ZipList (pctSample <$> dPercentiles sMissDistrib) @@ -439,8 +436,8 @@ toDistribLines statsF distPropsF s@Summary{..} = <*> ZipList (pctSample <$> dPercentiles sSpanLensCPU85EBndDistrib) <*> ZipList (pctSample <$> dPercentiles sSpanLensCPU85RwdDistrib) & getZipList - & (<> [ mapSummary distPropsF s "size" (fromIntegral . dCount) - , mapSummary distPropsF s "avg" dAverage + & (<> [ mapSummary distPropsF s "size" (fromIntegral . dSize) + , mapSummary distPropsF s "avg" dAverage ]) where distribLine :: diff --git a/nix/workbench/locli/src/Cardano/Unlog/Timeline.hs b/nix/workbench/locli/src/Cardano/Unlog/Timeline.hs index 6beaadc8295..e571355d4a1 100644 --- a/nix/workbench/locli/src/Cardano/Unlog/Timeline.hs +++ b/nix/workbench/locli/src/Cardano/Unlog/Timeline.hs @@ -11,7 +11,6 @@ import Prelude (String, error) import Cardano.Prelude import Control.Arrow ((&&&)) -import qualified Data.Sequence as Seq import Data.Vector (Vector) import qualified Data.Map.Strict as Map @@ -48,9 +47,9 @@ data RunScalars , rsThreadwiseTps :: Maybe (Vector Float) } -timelineFromLogObjects :: ChainInfo -> [LogObject] -> (RunScalars, Seq SlotStats) +timelineFromLogObjects :: ChainInfo -> [LogObject] -> (RunScalars, [SlotStats]) timelineFromLogObjects ci = - (aRunScalars &&& Seq.reverse . Seq.fromList . aSlotStats) + (aRunScalars &&& reverse . aSlotStats) . foldl (timelineStep ci) zeroTimelineAccum where zeroTimelineAccum :: TimelineAccum @@ -256,19 +255,16 @@ renderDerivedSlot DerivedSlot{..} = [ show (unSlotNo dsSlot), ",", show dsBlockless ] -computeDerivedVectors :: Seq SlotStats -> (Seq DerivedSlot, Seq DerivedSlot) +computeDerivedVectors :: [SlotStats] -> ([DerivedSlot], [DerivedSlot]) computeDerivedVectors ss = - (\(_,_,d0,d1) -> ( Seq.fromList d0 - , Seq.fromList d1 - )) $ - Seq.foldrWithIndex step (0, 0, [], []) ss + (\(_,_,d0,d1) -> (d0, d1)) $ + foldr step (0, 0, [], []) ss where step :: - Int - -> SlotStats + SlotStats -> (Word64, Word64, [DerivedSlot], [DerivedSlot]) -> (Word64, Word64, [DerivedSlot], [DerivedSlot]) - step _ SlotStats{..} (lastBlockless, spanBLSC, accD0, accD1) = + step SlotStats{..} (lastBlockless, spanBLSC, accD0, accD1) = if lastBlockless < slBlockless then ( slBlockless , slBlockless diff --git a/nix/workbench/locli/src/Data/Distribution.hs b/nix/workbench/locli/src/Data/Distribution.hs index 5e8f566694c..09c97aada17 100644 --- a/nix/workbench/locli/src/Data/Distribution.hs +++ b/nix/workbench/locli/src/Data/Distribution.hs @@ -1,48 +1,58 @@ +{-# LANGUAGE DeriveFunctor #-} {-# LANGUAGE DeriveGeneric #-} +{-# LANGUAGE GADTs #-} {-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE ImportQualifiedPost #-} {-# LANGUAGE LambdaCase #-} {-# LANGUAGE MultiParamTypeClasses #-} {-# LANGUAGE ScopedTypeVariables #-} {-# LANGUAGE ViewPatterns #-} +{-# OPTIONS_GHC -Wwarn #-} module Data.Distribution ( ToRealFrac(..) , Distribution(..) , computeDistribution + , computeDistributionStats + , mapToDistribution , zeroDistribution + , dPercIx , PercSpec(..) , renderPercSpec , Percentile(..) , pctFrac + , stdPercentiles -- Aux , spans ) where -import Prelude (String, id) -import Cardano.Prelude +import Prelude (String, (!!), head, last, show) +import Cardano.Prelude hiding (head, show) -import Control.Arrow -import Data.Aeson (ToJSON(..)) -import qualified Data.Foldable as F -import Data.List (span) -import qualified Data.Sequence as Seq -import Data.Sequence (index) -import Data.Vector (Vector) -import qualified Data.Vector as Vec -import Text.Printf (PrintfArg, printf) +import Control.Arrow +import Data.Aeson (ToJSON(..)) +import Data.Foldable qualified as F +import Data.List (span) +import Data.Vector (Vector) +import Data.Vector qualified as Vec +import Statistics.Sample qualified as Stat +import Text.Printf (PrintfArg, printf) data Distribution a b = Distribution - { dAverage :: a - , dCount :: Int + { dSize :: Int + , dAverage :: a , dPercentiles :: [Percentile a b] } - deriving (Generic, Show) + deriving (Functor, Generic, Show) instance (ToJSON a, ToJSON b) => ToJSON (Distribution a b) newtype PercSpec a = Perc { psFrac :: a } deriving (Generic, Show) +dPercIx :: Distribution a b -> Int -> b +dPercIx d = pctSample . (dPercentiles d !!) + renderPercSpec :: PrintfArg a => Int -> PercSpec a -> String renderPercSpec width = \case Perc x -> printf ("%0."<>show (width-2)<>"f") x @@ -50,55 +60,89 @@ renderPercSpec width = \case data Percentile a b = Percentile { pctSpec :: !(PercSpec a) - , pctSampleIndex :: !Int - , pctSamplePrev :: !Int , pctSample :: !b } - deriving (Generic, Show) + deriving (Functor, Generic, Show) pctFrac :: Percentile a b -> a pctFrac = psFrac . pctSpec +stdPercentiles :: [PercSpec Float] +stdPercentiles = + [ Perc 0.01, Perc 0.05 + , Perc 0.1, Perc 0.2, Perc 0.3, Perc 0.4 + , Perc 0.5, Perc 0.6 + , Perc 0.7, Perc 0.75 + , Perc 0.8, Perc 0.85, Perc 0.875 + , Perc 0.9, Perc 0.925, Perc 0.95, Perc 0.97, Perc 0.98, Perc 0.99 + , Perc 0.995, Perc 0.997, Perc 0.998, Perc 0.999 + , Perc 0.9995, Perc 0.9997, Perc 0.9998, Perc 0.9999 + ] + instance (ToJSON a) => ToJSON (PercSpec a) instance (ToJSON a, ToJSON b) => ToJSON (Percentile a b) zeroDistribution :: Num a => Distribution a b zeroDistribution = Distribution - { dAverage = 0 - , dCount = 0 + { dSize = 0 + , dAverage = 0 , dPercentiles = mempty } -countSeq :: Eq a => a -> Seq a -> Int -countSeq x = foldl' (\n e -> if e == x then n + 1 else n) 0 +-- | For a list of distributions, compute a distribution of averages and rel stddev +-- (aka. coefficient of variance). +computeDistributionStats :: + forall a v + . ( v ~ Double -- 'v' is fixed by Stat.stdDev + , Num a + ) + => String -> [Distribution a v] + -> Either String (Distribution a v, Distribution a v) +computeDistributionStats desc xs = do + when (null xs) $ + Left $ "Empty list of distributions in " <> desc + let distPcts = dPercentiles <$> xs + pctDistVals = transpose distPcts + unless (all (pctLen ==) (length <$> distPcts)) $ + Left ("Distributions with different percentile counts: " <> show (length <$> distPcts) <> " in " <> desc) + pure $ (join (***) (Distribution (length xs) 0) + :: ([Percentile a v], [Percentile a v]) -> (Distribution a v, Distribution a v)) + $ unzip (pctsMeanCoV <$> pctDistVals) + where + pctLen = length . dPercentiles $ head xs + + pctsMeanCoV :: [Percentile a v] -> (Percentile a v, Percentile a v) + pctsMeanCoV xs' = join (***) (Percentile . pctSpec $ head xs') + (mean, Stat.stdDev vec / mean) + where + vec = Vec.fromList $ pctSample <$> xs' + mean = Stat.mean vec + +mapToDistribution :: (Real v, ToRealFrac v a) => (b -> v) -> [PercSpec a] -> [b] -> Distribution a v +mapToDistribution f pspecs xs = computeDistribution pspecs (f <$> xs) -computeDistribution :: (RealFrac a, Real v, ToRealFrac v a) => [PercSpec a] -> Seq v -> Distribution a v -computeDistribution percentiles (Seq.sort -> sorted) = +computeDistribution :: (Real v, ToRealFrac v a) => [PercSpec a] -> [v] -> Distribution a v +computeDistribution percentiles (sort -> sorted) = Distribution - { dAverage = toRealFrac (F.sum sorted) / fromIntegral (size `max` 1) - , dCount = size + { dSize = size + , dAverage = toRealFrac (F.sum sorted) / fromIntegral (size `max` 1) , dPercentiles = - (Percentile (Perc 0) size (countSeq mini sorted) mini:) . - (<> [Percentile (Perc 1.0) 1 (countSeq maxi sorted) maxi]) $ + (Percentile (Perc 0) mini:) . + (<> [Percentile (Perc 1.0) maxi]) $ percentiles <&> \spec -> - let (sampleIndex :: Int, sample) = - if size == 0 - then (0, 0) - else floor (fromIntegral (size - 1) * psFrac spec) & - (id &&& Seq.index sorted) - in Percentile - spec - (size - sampleIndex) - (countSeq sample sorted) - sample + let sample = if size == 0 + then 0 + else sorted !! indexAtFrac (psFrac spec) + in Percentile spec sample } - where size = Seq.length sorted - (,) mini maxi = - if size == 0 - then (0, 0) - else (index sorted 0, index sorted $ size - 1) + where size = length sorted + indexAtFrac f = floor (fromIntegral (size - 1) * f) + (,) mini maxi = + if size == 0 + then (0, 0) + else (head sorted, last sorted) class RealFrac b => ToRealFrac a b where toRealFrac :: a -> b diff --git a/nix/workbench/profiles/adhoc.jq b/nix/workbench/profiles/adhoc.jq index 3d2479e399e..53884deb158 100644 --- a/nix/workbench/profiles/adhoc.jq +++ b/nix/workbench/profiles/adhoc.jq @@ -12,6 +12,16 @@ def adhoc_profiles: , init_cooldown: 25 } , tolerances: { finish_patience: 4 } } +, { name: "10" + , composition: + { n_singular_hosts: 10 + , n_dense_hosts: 0 + } + , genesis: + { genesis_future_offset: "10 seconds" + , utxo: 0 + } + } , { name: "default" , genesis: { verbatim: diff --git a/nix/workbench/run.sh b/nix/workbench/run.sh index 728473e437e..22f8eaf8cf2 100644 --- a/nix/workbench/run.sh +++ b/nix/workbench/run.sh @@ -1,6 +1,5 @@ -global_runsdir_def=$PWD/run -global_runsdir=$global_runsdir_def -global_envjson=$global_runsdir/env.json +global_rundir_def=$PWD/run +global_rundir_alt_def=$PWD/../cardano-ops/runs usage_run() { usage "run" "Managing cluster runs" < "$dir"/profile.json + + local topdirs=$(ls -d "$dir"/logs-*/ 2>/dev/null || true) + local anadirs=$(ls -d "$dir"/analysis/logs-*/ 2>/dev/null || true) + if test -n "$topdirs" + then for logdir in $topdirs + do local fixed=$(basename "$logdir" | cut -c6-) + mv "$logdir" "$dir"/$fixed; done + elif test -n "$anadirs" + then for logdir in $anadirs + do local fixed=$(basename "$logdir" | cut -c6-) + mv "$logdir" "$dir"/analysis/$fixed; done; fi + + cp "$global_envjson" "$dir"/env.json + jq_fmutate "$dir"/env.json '. * + { type: "legacy" + , staggerPorts: false + } + ' + fi;; get-path | get ) local usage="USAGE: wb run $op TAG" @@ -73,11 +106,11 @@ case "$op" in local tag=${1:?$usage} local dir=$(run get "$tag") - rm -f "$global_runsdir"/current - ln -s $tag "$global_runsdir"/current;; + rm -f "$global_rundir"/current + ln -s $tag "$global_rundir"/current;; current-run-path | current-path | path ) - realpath "$global_runsdir"/current;; + realpath "$global_rundir"/current;; current-run-tag | current-tag | tag | current ) basename "$(run current-path)";; @@ -105,10 +138,10 @@ case "$op" in local timestamp=$(date +'%s' --utc) local date=$(date +'%Y'-'%m'-'%d'-'%H.%M' --date=@$timestamp --utc) local tag=$date.$batch.$prof - local dir=$global_runsdir/$tag + local dir=$global_rundir/$tag local realdir=$(realpath --canonicalize-missing "$dir") - if test "$(dirname "$realdir")" != "$(realpath "$global_runsdir")" + if test "$(dirname "$realdir")" != "$(realpath "$global_rundir")" then fatal "bad tag/run dir: $tag @ $dir"; fi if test -e "$dir" @@ -182,10 +215,17 @@ case "$op" in msg "current run is: $tag / $dir" ;; + list-hosts | hosts ) + local usage="USAGE: wb run $op TAG" + local tag=${1:?$usage} + local dir=$global_rundir/$tag + + jq '.hostname | keys | .[]' -r "$dir"/meta.json;; + describe ) local usage="USAGE: wb run $op TAG" local tag=${1:?$usage} - local dir=$global_runsdir/$tag + local dir=$global_rundir/$tag if ! run check "$tag" then fatal "run fails sanity checks: $tag at $dir"; fi diff --git a/nix/workbench/supervisor.sh b/nix/workbench/supervisor.sh index 403fb35ab8b..04d8d294255 100755 --- a/nix/workbench/supervisor.sh +++ b/nix/workbench/supervisor.sh @@ -62,7 +62,8 @@ case "$op" in --argjson port_shift_prometheus "$port_shift_prometheus" ) jq_fmutate "$env_json" '. * - { port_shift_ekg: $port_shift_ekg + { type: "supervisor" + , port_shift_ekg: $port_shift_ekg , port_shift_prometheus: $port_shift_prometheus } ' "${args[@]}" @@ -152,11 +153,12 @@ EOF usage="USAGE: wb supervisor $op RUN-DIR" dir=${1:?$usage} - echo --compact-output --slurpfile mapp2n "$dir"/supervisor/pid2node.map;; + echo --compact-output --argjson mapp2n '[{}]';;# --slurpfile mapp2n "$dir"/supervisor/pid2node.map;; + #echo --compact-output --slurpfile mapp2n "$dir"/supervisor/pid2node.map;; lostream-fixup-jqexpr ) usage="USAGE: wb supervisor $op" - echo '| $mapp2n[0] as $map | . * { host: $map[.pid] }';; + echo '| $mapp2n[0] as $map | . * { host: ($map[.pid] // $dirHostname) }';; * ) usage_supervisor;; esac diff --git a/nix/workbench/wb b/nix/workbench/wb index d2553e595d3..fa040a6c642 100755 --- a/nix/workbench/wb +++ b/nix/workbench/wb @@ -43,6 +43,7 @@ usage_extra() { cat >&2 <