From eef52f07c8f65ba5046ae0a28baf49c88f96a771 Mon Sep 17 00:00:00 2001 From: Kosyrev Serge Date: Tue, 29 Nov 2022 21:15:03 +0800 Subject: [PATCH 1/8] workbench: collect log manifest during analysis --- nix/workbench/analyse.sh | 136 ++++++++++++++++++--------------------- 1 file changed, 63 insertions(+), 73 deletions(-) diff --git a/nix/workbench/analyse.sh b/nix/workbench/analyse.sh index c4a04cebc41..6c8d7dee9c5 100644 --- a/nix/workbench/analyse.sh +++ b/nix/workbench/analyse.sh @@ -56,6 +56,7 @@ usage_analyse() { $(helpopt --dump-slots) Machperf: dump filtered slots (JSON) $(helpopt --multi-overall) Multirun: Overall dataset statistical summary $(helpopt --multi-inter-cdf) Multirun: Inter-sample (i.e. inter-CDF) stats + $(helpopt --force-prepare) Logs: force re-prefiltering & manifest collection EOF } analysis_allowed_loanys=( @@ -69,7 +70,7 @@ analyse_default_op='standard' analyse() { local filters=() filter_exprs=() filter_reasons= chain_errors= aws= sargs=() unfiltered= perf_omit_hosts=() local dump_logobjects= dump_machviews= dump_chain= dump_slots_raw= dump_slots= -local multi_aspect='--inter-cdf' rtsmode= +local multi_aspect='--inter-cdf' rtsmode= force_prepare= locli_args=() progress "analyse" "args: $(yellow $*)" @@ -95,6 +96,7 @@ do case "$1" in --rtsmode-lomem | --lomem ) sargs+=($1); rtsmode='lomem';; --rtsmode-hipar ) sargs+=($1); rtsmode='hipar';; --perf-omit-host ) sargs+=($1 "$2"); perf_omit_hosts+=($2); shift;; + --force-prepare | -fp ) sargs+=($1); force_prepare='true';; --trace ) sargs+=($1); set -x;; * ) break;; esac; shift; done @@ -263,7 +265,7 @@ case "$op" in performance-host | perf-host ) local usage="USAGE: wb analyse $op HOST" - local host=${1:?usage}; shift + local host=${1:?$usage}; shift local script=( logs 'dump-logobjects' @@ -284,7 +286,7 @@ case "$op" in local usage="USAGE: wb analyse $op OP [-opt-flag] [--long-option OPTVAL] RUNS.." ## Meaning: map OP over RUNS, optionally giving flags/options to OP - local preop=${1:?usage}; shift + local preop=${1:?$usage}; shift local runs=($(expand_runspecs $*)) local op_split=($preop) @@ -355,7 +357,7 @@ case "$op" in local v0 v1 v2 v3 v4 v5 v6 v7 v8 v9 va vb vc vd ve vf vg vh vi vj vk vl vm vn vo v0=( $* ) - v1=("${v0[@]/#logs/ 'unlog' --host-from-log-filename ${analysis_allowed_loanys[*]/#/--ok-loany } ${logfiles[*]/#/--log } }") + v1=("${v0[@]/#logs/ 'unlog' --run-logs \"$adir\"/log-manifest.json ${analysis_allowed_loanys[*]/#/--ok-loany } }") v2=("${v1[@]/#context/ 'meta-genesis' --run-metafile \"$dir\"/meta.json --shelley-genesis \"$dir\"/genesis-shelley.json }") v3=("${v2[@]/#read-chain/ 'read-chain' --chain \"$adir\"/chain.json}") @@ -472,10 +474,15 @@ case "$op" in ## 1. unless already done, filter logs according to locli's requirements local logdirs=($(ls -d "$dir"/node-*/ 2>/dev/null)) local logfiles=($(ls "$adir"/logs-node-*.flt.json 2>/dev/null)) + local run_logs=$adir/log-manifest.json local prefilter=$(if test -z "${logfiles[*]}" then echo 'prefiltered-logs-not-yet-created' elif test "$key_new" != "$key_old" then echo 'prefiltering-keyset-changed' + elif test ! -f "$run_logs" + then echo 'missing '$run_logs + elif test -n "$force_prepare" + then echo '--force-prepare passed on CLI' else echo 'false' fi) echo "{ \"prefilter\": \"$prefilter\" }" @@ -489,16 +496,46 @@ case "$op" in --fixed-strings --no-filename ) + + echo '{}' > $run_logs for d in "${logdirs[@]}" do throttle_shell_job_spawns - local logfiles="$(ls "$d"/stdout* 2>/dev/null | tac) $(ls "$d"/node-*.json 2>/dev/null)" - if test -z "$logfiles" + local logfiles=($(ls --reverse -t "$d"stdout* "$d"node-[0-9]*.json \ + 2>/dev/null)) + if test -z "${logfiles[*]}" then msg "no logs in $d, skipping.."; fi - local output="$adir"/logs-$(basename "$d").flt.json - grep ${grep_params[*]} $logfiles | grep '^{' > "$output" & + local mach=$(basename "$d") + local out="$adir"/logs-$mach + grep ${grep_params[*]} ${logfiles[*]} | grep '^{' > "$out".flt.json & + trace_frequencies_json ${logfiles[*]} > "$out".tracefreq.json & + { cat ${logfiles[*]} | sha256sum | cut -d' ' -f1 | xargs echo -n;} > "$out".sha256 & + jq_fmutate "$run_logs" ' + .rlHostLogs["'"$mach"'"] = + { hlRawLogfiles: ["'"$(echo ${logfiles[*]} | sed 's/ /", "/')"'"] + , hlRawLines: '"$(cat ${logfiles[*]} | wc -l)"' + , hlRawSha256: "" + , hlRawTraceFreqs: {} + , hlLogs: ["'"$adir/logs-$mach.flt.json"'", null] + , hlFilteredSha256: "" + } + | .rlFilterDate = (now | todate) + | .rlFilterKeys = ($keys | split("\n")) + ' --rawfile keys $keyfile done - - wait;; + wait + + for mach in $(jq_tolist '.rlHostLogs | keys' $run_logs) + do jq_fmutate "$run_logs" ' + .rlHostLogs[$mach].hlRawSha256 = $raw_sha256 + | .rlHostLogs[$mach].hlRawTraceFreqs = $freqs[0] + | .rlHostLogs[$mach].hlFilteredSha256 = $filtered_sha256 + ' --sort-keys \ + --arg mach $mach \ + --rawfile raw_sha256 "$adir"/logs-$mach.sha256 \ + --arg filtered_sha256 $(sha256sum < $adir/logs-$mach.flt.json | \ + cut -d' ' -f1 | xargs echo -n) \ + --slurpfile freqs "$adir"/logs-$mach.tracefreq.json + done;; trace-frequencies | trace-freq | freq ) local new_only= sargs=() @@ -508,12 +545,13 @@ case "$op" in * ) break;; esac; shift; done local usage="USAGE: wb analyse $op LOGFILE" - local logfile=${1:?usage}; shift + local logfile=${1:?$usage}; shift + - trace_frequencies "${sargs[@]}" "" "$logfile" > "${logfile}.freq" + trace_frequencies "$logfile" > "${logfile}.tracefreqs.json" local src=$(wc -l <"$logfile") - local res=$(cut -d' ' -f1 "${logfile}.freq" | + local res=$(cut -d' ' -f1 "${logfile}.trace-freqs" | xargs echo | sed 's/ /, /g; s/^/\[/; s/$/\]/' | jq add) @@ -589,66 +627,18 @@ analysis_set_filters() { filters+=(${filter_files[*]/#/--filter }) } -classify_traces() { - jq --raw-output '(try .ns[0] // .ns) + ":" + (.data.kind //.data.val.kind)' 2>/dev/null | sort -u -} - -trace_frequencies() { - local new_only= - while test $# -gt 0 - do case "$1" in - --new-only ) new_only='true';; - * ) break;; esac; shift; done - - local types="$1"; shift - local files=("$@") - - if test -z "$types" - then types="$(cat "${files[@]}" | classify_traces)" - fi - - for ty in $types - do local ns=$(cut -d: -f1 <<<$ty) - local kind=$(cut -d: -f2 <<<$ty) - if test -n "$new_only" - then echo $(grep -hFe '"ns":"'$ns'"' "${files[@]}" | wc -l) $ty - else echo $(grep -hFe '"ns":"'$ns'"' "${files[@]}" | grep -Fe '"kind":"'$kind'"' | wc -l) $ty - fi - done | - sort -nr -} - -analysis_run_classify_traces() { - local name=${1:-current}; if test $# != 0; then shift; fi - local node=${1:-node-0}; if test $# != 0; then shift; fi - local dir=$(run get "$name") - - progress "analysis" "enumerating namespace from logs of $(with_color yellow $node)" - grep -h '^{' $dir/$node/stdout* | classify_traces - # grep -h '^{' $dir/$node/stdout* | jq --raw-output '.ns' 2>/dev/null | tr -d ']["' | sort -u -} - -analysis_trace_frequencies() { - while test $# -gt 0 - do case "$1" in - * ) break;; esac; shift; done - - local name=${1:-current}; if test $# != 0; then shift; fi - local dir=$(run get "$name") - local types=() - - for nodedir in $dir/node-*/ - do local node=$(basename $nodedir) - - progress "analysis" "message frequencies: $(with_color yellow $node)" - - types=($(analysis_run_classify_traces $name $node)) - trace_frequencies \ - "${types[*]}" \ - $nodedir/stdout* \ - > $nodedir/log-namespace-occurence-stats.txt - done - echo >&2 +trace_frequencies_json() { + grep --no-filename '^{.*}$' "$@" | + jq 'reduce inputs as $line + ( {}; + ( $line + | (try .ns[0] // .ns) + ":" + (.data.kind //.data.val.kind) + ) as $key + | (.[$key] // 0) as $acc + | . + { "\($key)": ($acc + 1) } + # | .[$key] += 1 ## This is somehow slower than set addition.. + ) + ' } analysis_config_extract_legacy_tracing() { From eb1f11baab629a31d01d73ab08e1fef789ff39f1 Mon Sep 17 00:00:00 2001 From: Kosyrev Serge Date: Wed, 30 Nov 2022 02:50:31 +0800 Subject: [PATCH 2/8] locli: log inventory & precise data stats --- .../locli/src/Cardano/Analysis/API/Ground.hs | 11 +++- .../locli/src/Cardano/Analysis/API/Metrics.hs | 34 +++++++++--- bench/locli/src/Cardano/Analysis/API/Types.hs | 13 +++-- bench/locli/src/Cardano/Analysis/Summary.hs | 25 ++++++--- bench/locli/src/Cardano/Command.hs | 49 ++++++++--------- bench/locli/src/Cardano/Unlog/LogObject.hs | 52 +++++++++++++++---- bench/locli/src/Data/CDF.hs | 38 ++++++++++++++ 7 files changed, 166 insertions(+), 56 deletions(-) diff --git a/bench/locli/src/Cardano/Analysis/API/Ground.hs b/bench/locli/src/Cardano/Analysis/API/Ground.hs index 829dc2465ff..03212908048 100644 --- a/bench/locli/src/Cardano/Analysis/API/Ground.hs +++ b/bench/locli/src/Cardano/Analysis/API/Ground.hs @@ -18,6 +18,7 @@ import Data.Aeson.Types (toJSONKeyText) import Data.Attoparsec.Text qualified as Atto import Data.Attoparsec.Time qualified as Iso8601 import Data.ByteString.Lazy.Char8 qualified as LBS +import Data.Map.Strict qualified as Map import Data.Text qualified as T import Data.Text.Short qualified as SText import Data.Text.Short (ShortText, fromText, toText) @@ -56,6 +57,10 @@ shortHash = toText . SText.take 6 . unHash instance Show Hash where show = T.unpack . toText . unHash +instance ToJSONKey Host where + toJSONKey = toJSONKeyText (toText . unHost) +instance FromJSONKey Host where + fromJSONKey = FromJSONKeyText (Host . fromText) instance ToJSONKey Hash where toJSONKey = toJSONKeyText (toText . unHash) instance FromJSONKey Hash where @@ -66,6 +71,9 @@ newtype Count a = Count { unCount :: Int } deriving newtype (FromJSON, Num, ToJSON) deriving anyclass NFData +countMap :: Map.Map a b -> Count a +countMap = Count . Map.size + countList :: (a -> Bool) -> [a] -> Count a countList f = Count . fromIntegral . count f @@ -121,11 +129,12 @@ newtype InputDir newtype JsonLogfile = JsonLogfile { unJsonLogfile :: FilePath } deriving (Show, Eq) - deriving newtype (NFData) + deriving newtype (FromJSON, ToJSON, NFData) newtype JsonInputFile (a :: Type) = JsonInputFile { unJsonInputFile :: FilePath } deriving (Show, Eq) + deriving newtype (FromJSON, ToJSON) newtype JsonOutputFile (a :: Type) = JsonOutputFile { unJsonOutputFile :: FilePath } diff --git a/bench/locli/src/Cardano/Analysis/API/Metrics.hs b/bench/locli/src/Cardano/Analysis/API/Metrics.hs index baf95a8d3bc..561a8cfd656 100644 --- a/bench/locli/src/Cardano/Analysis/API/Metrics.hs +++ b/bench/locli/src/Cardano/Analysis/API/Metrics.hs @@ -42,8 +42,10 @@ sumFieldsReport = , "delegators", "utxo" , "add_tx_size", "inputs_per_tx", "outputs_per_tx" , "tps", "tx_count" , "plutusScript" - , "sumLogStreams", "sumLogObjectsTotal" + , "sumHosts", "sumLogObjectsTotal" , "sumFilters" + , "cdfLogLinesEmitted", "cdfLogObjectsEmitted", "cdfLogObjects" + , "cdfRuntime", "cdfLogLineRate" , "ddRawCount.sumDomainTime", "ddFilteredCount.sumDomainTime", "dataDomainFilterRatio.sumDomainTime" , "ddRaw.sumStartSpread", "ddRaw.sumStopSpread" , "ddFiltered.sumStartSpread", "ddFiltered.sumStopSpread" @@ -130,16 +132,36 @@ instance TimelineFields SummaryOne where "Plutus script" "Name of th Plutus script used for smart contract workload generation, if any" - <> fScalar "sumLogStreams" Wno Cnt (IInt $ unCount.sumLogStreams) + <> fScalar "sumHosts" Wno Cnt (IInt $ unCount.sumHosts) "Machines" "Number of machines under analysis" - <> fScalar "sumLogObjectsTotal" Wno Cnt (IInt $ unCount.sumLogObjectsTotal) - "Total log objects analysed" + <> fScalar "sumFilters" Wno Cnt (IInt $ length.snd.sumFilters) + "Number of filters applied" "" - <> fScalar "sumFilters" Wno Cnt (IInt $ length.snd.sumFilters) - "Number of filters applied" + <> fScalar "cdfLogLinesEmitted" W6 Cnt (IFloat $ cdfAverageVal.cdfLogLinesEmitted) + "Log text lines emitted per host" + "" + + <> fScalar "cdfLogObjectsEmitted" W6 Cnt (IFloat $ cdfAverageVal.cdfLogObjectsEmitted) + "Log objects emitted per host" + "" + + <> fScalar "cdfLogObjects" W6 Cnt (IFloat $ cdfAverageVal.cdfLogObjects) + "Log objects analysed per host" + "" + + <> fScalar "cdfRuntime" W6 Sec (IFloat $ cdfAverageVal.cdfRuntime) + "Host run time, s" + "" + + <> fScalar "cdfLogLineRate" W6 Hz (IFloat $ cdfAverageVal.cdfLogLineRate) + "Host log line rate, Hz" + "" + + <> fScalar "sumLogObjectsTotal" Wno Cnt (IInt $ unCount.sumLogObjectsTotal) + "Total log objects analysed" "" <> fScalar "ddRawCount.sumDomainTime" Wno Sec (IInt $ ddRawCount.sumDomainTime) diff --git a/bench/locli/src/Cardano/Analysis/API/Types.hs b/bench/locli/src/Cardano/Analysis/API/Types.hs index 0aa22e604f0..5d9ba019531 100644 --- a/bench/locli/src/Cardano/Analysis/API/Types.hs +++ b/bench/locli/src/Cardano/Analysis/API/Types.hs @@ -34,7 +34,7 @@ data Summary f where , sumGenesis :: !Genesis , sumGenesisSpec :: !GenesisSpec , sumGenerator :: !GeneratorProfile - , sumLogStreams :: !(Count [LogObject]) + , sumHosts :: !(Count Host) , sumLogObjectsTotal :: !(Count LogObject) , sumFilters :: !([FilterName], [ChainFilter]) , sumChainRejectionStats :: ![(ChainFilter, Int)] @@ -44,17 +44,20 @@ data Summary f where , sumStopSpread :: !(DataDomain UTCTime) , sumDomainSlots :: !(DataDomain SlotNo) , sumDomainBlocks :: !(DataDomain BlockNo) - , cdfLogObjects :: !(CDF f Int) + , cdfLogLinesEmitted :: !(CDF f Int) , cdfLogObjectsEmitted :: !(CDF f Int) + , cdfLogObjects :: !(CDF f Int) + , cdfRuntime :: !(CDF f NominalDiffTime) + , cdfLogLineRate :: !(CDF f Double) } -> Summary f deriving (Generic) type SummaryOne = Summary I type MultiSummary = Summary (CDF I) -deriving instance (FromJSON (f Int), FromJSON (f Double)) => FromJSON (Summary f) -deriving instance ( ToJSON (f Int), ToJSON (f Double)) => ToJSON (Summary f) -deriving instance ( Show (f Int), Show (f Double)) => Show (Summary f) +deriving instance (FromJSON (f NominalDiffTime), FromJSON (f Int), FromJSON (f Double)) => FromJSON (Summary f) +deriving instance ( ToJSON (f NominalDiffTime), ToJSON (f Int), ToJSON (f Double)) => ToJSON (Summary f) +deriving instance ( Show (f NominalDiffTime), Show (f Int), Show (f Double)) => Show (Summary f) data BlockStats = BlockStats diff --git a/bench/locli/src/Cardano/Analysis/Summary.hs b/bench/locli/src/Cardano/Analysis/Summary.hs index eb8a492458a..a685312951c 100644 --- a/bench/locli/src/Cardano/Analysis/Summary.hs +++ b/bench/locli/src/Cardano/Analysis/Summary.hs @@ -18,7 +18,7 @@ computeSummary :: -> Genesis -> GenesisSpec -> GeneratorProfile - -> [(Count Cardano.Prelude.Text, [LogObject])] + -> RunLogs [LogObject] -> ([FilterName], [ChainFilter]) -> ClusterPerf -> BlockPropOne @@ -29,14 +29,14 @@ computeSummary sumAnalysisTime sumGenesis sumGenesisSpec sumGenerator - loCountsObjLists + rl@RunLogs{..} sumFilters MachPerf{..} BlockProp{..} Chain{..} = Summary - { sumLogStreams = countListAll objLists + { sumHosts = countMap rlHostLogs , sumLogObjectsTotal = countListsAll objLists , sumBlocksRejected = countListAll cRejecta , sumDomainTime = @@ -54,17 +54,30 @@ computeSummary sumAnalysisTime , sumDomainSlots = Prelude.head mpDomainSlots , sumDomainBlocks = Prelude.head bpDomainBlocks -- - , cdfLogObjects = cdf stdCentiles (length <$> objLists) - , cdfLogObjectsEmitted = cdf stdCentiles (loCountsObjLists <&> unCount . fst) + , cdfLogObjects = cdf stdCentiles (objLists <&> length) + , cdfLogObjectsEmitted = cdf stdCentiles logObjectsEmitted + , cdfLogLinesEmitted = cdf stdCentiles textLinesEmitted + , cdfRuntime = cdf stdCentiles runtimes , .. } where - objLists = loCountsObjLists <&> snd + cdfLogLineRate = cdf stdCentiles lineRates + + (,) logObjectsEmitted textLinesEmitted = + rlHostLogs + & Map.toList + & fmap ((hlRawLogObjects &&& hlRawLines) . snd) + & unzip + objLists = rlLogs rl <&> snd (,) minStartRaw maxStartRaw = (minimum &&& maximum) losFirsts (,) minStopRaw maxStopRaw = (minimum &&& maximum) losLasts losFirsts = objLists <&> loAt . Prelude.head losLasts = objLists <&> loAt . Prelude.last + runtimes :: [NominalDiffTime] + runtimes = zipWith diffUTCTime losLasts losFirsts + lineRates = zipWith (/) (textLinesEmitted <&> fromIntegral) + (runtimes <&> fromIntegral @Int . truncate) (,) minStartFlt maxStartFlt = (timeOf *** timeOf) startMinMaxS (,) minStopFlt maxStopFlt = (timeOf *** timeOf) stopMinMaxS diff --git a/bench/locli/src/Cardano/Command.hs b/bench/locli/src/Cardano/Command.hs index e2f4ec84b60..727ebe147eb 100644 --- a/bench/locli/src/Cardano/Command.hs +++ b/bench/locli/src/Cardano/Command.hs @@ -6,6 +6,7 @@ import Cardano.Prelude hiding (State) import Data.Aeson qualified as Aeson import Data.ByteString qualified as BS import Data.ByteString.Lazy.Char8 qualified as LBS +import Data.Map qualified as Map import Data.Text (pack) import Data.Text qualified as T import Data.Text.Short (toText) @@ -45,7 +46,7 @@ data ChainCommand | MetaGenesis (JsonInputFile RunPartial) (JsonInputFile Genesis) - | Unlog [JsonLogfile] (Maybe HostDeduction) Bool [LOAnyType] + | Unlog (JsonInputFile (RunLogs ())) Bool [LOAnyType] | DumpLogObjects | BuildMachViews @@ -107,11 +108,7 @@ parseChainCommand = subparser (mconcat [ commandGroup "Basic log objects" , op "unlog" "Read log files" (Unlog - <$> some - (optJsonLogfile "log" "JSON log stream") - <*> optional - (parseHostDeduction "host-from-log-filename" - "Derive hostname from log filename: logs-HOSTNAME.*") + <$> optJsonInputFile "run-logs" "Run log manifest (API/Types.hs:RunLogs)" <*> Opt.flag False True (Opt.long "lodecodeerror-ok" <> Opt.help "Allow non-EOF LODecodeError logobjects") <*> many @@ -243,13 +240,6 @@ parseChainCommand = command c $ info (p <**> helper) $ mconcat [ progDesc descr ] - parseHostDeduction :: String -> String -> Parser HostDeduction - parseHostDeduction name desc = - Opt.flag' HostFromLogfilename - ( Opt.long name - <> Opt.help desc - ) - optLOAnyType :: String -> String -> Parser LOAnyType optLOAnyType opt desc = Opt.option Opt.auto @@ -291,7 +281,7 @@ data State , sFilters :: ([FilterName], [ChainFilter]) , sTags :: [Text] , sRun :: Maybe Run - , sObjLists :: Maybe [(JsonLogfile, [LogObject])] + , sRunLogs :: Maybe (RunLogs [LogObject]) , sDomSlots :: Maybe (DataDomain SlotNo) -- propagation , sMachViews :: Maybe [(JsonLogfile, MachView)] @@ -313,19 +303,18 @@ callComputeSummary :: State -> Either Text SummaryOne callComputeSummary = \case State{sRun = Nothing} -> err "a run" - State{sObjLists = Nothing} -> err "logobjects" - State{sObjLists = Just []} -> err "logobjects" + State{sRunLogs = Nothing} -> err "logobjects" State{sClusterPerf = Nothing} -> err "cluster performance results" State{sBlockProp = Nothing} -> err "block propagation results" State{sChain = Nothing} -> err "chain" - State{ sObjLists = Just (fmap snd -> objLists) + State{ sRunLogs = Just runLogs , sClusterPerf = Just [clusterPerf] , sBlockProp = Just [blockProp'] , sChain = Just chain , sRun = Just Run{..} , ..} -> Right $ computeSummary sWhen metadata genesis genesisSpec generatorProfile - (zip (Count <$> [0..]) objLists) sFilters + runLogs sFilters clusterPerf blockProp' chain _ -> err "Impossible to get here." where @@ -367,13 +356,19 @@ runChainCommand s pure s { sRun = Just run } runChainCommand s - c@(Unlog logs mHostDed okDErr okAny) = do - progress "logs" (Q $ printf "parsing %d log files" $ length logs) - los <- runLiftLogObjects logs mHostDed okDErr okAny - & firstExceptT (CommandError c) - pure s { sObjLists = Just los } - -runChainCommand s@State{sObjLists=Just objs} + c@(Unlog rlf okDErr okAny) = do + progress "logs" (Q $ printf "reading run log manifest %s" $ unJsonInputFile rlf) + runLogsBare <- Aeson.eitherDecode @(RunLogs ()) + <$> LBS.readFile (unJsonInputFile rlf) + & newExceptT + & firstExceptT (CommandError c . pack) + progress "logs" (Q $ printf "parsing logs for %d hosts" $ + Map.size $ rlHostLogs runLogsBare) + runLogs <- runLiftLogObjects runLogsBare okDErr okAny + & firstExceptT (CommandError c) + pure s { sRunLogs = Just runLogs } + +runChainCommand s@State{sRunLogs=Just (rlLogs -> objs)} c@DumpLogObjects = do progress "logobjs" (Q $ printf "dumping %d logobject streams" $ length objs) dumpAssociatedObjectStreams "logobjs" objs & firstExceptT (CommandError c) @@ -383,7 +378,7 @@ runChainCommand _ c@DumpLogObjects = missingCommandData c -- runChainCommand s c@(ReadMachViews _ _) -- () -> [(JsonLogfile, MachView)] -runChainCommand s@State{sRun=Just run, sObjLists=Just objs} +runChainCommand s@State{sRun=Just run, sRunLogs=Just (rlLogs -> objs)} BuildMachViews = do progress "machviews" (Q $ printf "building %d machviews" $ length objs) mvs <- buildMachViews run objs & liftIO @@ -455,7 +450,7 @@ runChainCommand s@State{sRun=Just _run, sChain=Just Chain{..}} runChainCommand _ c@TimelineChain{} = missingCommandData c ["run metadata & genesis", "chain"] -runChainCommand s@State{sRun=Just run, sObjLists=Just objs} +runChainCommand s@State{sRun=Just run, sRunLogs=Just (rlLogs -> objs)} c@(CollectSlots ignores) = do let nonIgnored = flip filter objs $ (`notElem` ignores) . fst forM_ ignores $ diff --git a/bench/locli/src/Cardano/Unlog/LogObject.hs b/bench/locli/src/Cardano/Unlog/LogObject.hs index 475fd23d114..96404ef82d8 100644 --- a/bench/locli/src/Cardano/Unlog/LogObject.hs +++ b/bench/locli/src/Cardano/Unlog/LogObject.hs @@ -36,18 +36,48 @@ import Data.Accum (zeroUTCTime) type Text = ShortText -runLiftLogObjects :: [JsonLogfile] -> Maybe HostDeduction -> Bool -> [LOAnyType] - -> ExceptT LText.Text IO [(JsonLogfile, [LogObject])] -runLiftLogObjects fs (fmap hostDeduction -> mHostDed) okDErr anyOks = liftIO $ do - forConcurrently fs - (\f -> (f,) . fmap (setLOhost f mHostDed) <$> readLogObjectStream (unJsonLogfile f) okDErr anyOks) - where - setLOhost :: JsonLogfile -> Maybe (JsonLogfile -> Host) -> LogObject -> LogObject - setLOhost _ Nothing lo = lo - setLOhost lf (Just f) lo = lo { loHost = f lf } +-- | Input data. +data HostLogs a + = HostLogs + { hlRawLogfiles :: [FilePath] + , hlRawLines :: Int + , hlRawSha256 :: Hash + , hlRawTraceFreqs :: Map Text Int + , hlLogs :: (JsonLogfile, a) + , hlFilteredSha256 :: Hash + } + deriving (Generic, FromJSON, ToJSON) - -- joinT :: (IO a, IO b) -> IO (a, b) - -- joinT (a, b) = (,) <$> a <*> b +hlRawLogObjects :: HostLogs a -> Int +hlRawLogObjects = sum . Map.elems . hlRawTraceFreqs + +data RunLogs a + = RunLogs + { rlHostLogs :: Map.Map Host (HostLogs a) + , rlFilterKeys :: [Text] + , rlFilterDate :: UTCTime + } + deriving (Generic, FromJSON, ToJSON) + +rlLogs :: RunLogs a -> [(JsonLogfile, a)] +rlLogs = fmap hlLogs . Map.elems . rlHostLogs + +runLiftLogObjects :: RunLogs () -> Bool -> [LOAnyType] + -> ExceptT LText.Text IO (RunLogs [LogObject]) +runLiftLogObjects rl@RunLogs{..} okDErr anyOks = liftIO $ do + forConcurrently (Map.toList rlHostLogs) + (uncurry readHostLogs) + <&> \kvs -> rl { rlHostLogs = Map.fromList kvs } + where + readHostLogs :: Host -> HostLogs () -> IO (Host, HostLogs [LogObject]) + readHostLogs h hl@HostLogs{..} = + readLogObjectStream (unJsonLogfile $ fst hlLogs) okDErr anyOks + <&> (h,) . setLogs hl . fmap (setLOhost h) + + setLogs :: HostLogs a -> b -> HostLogs b + setLogs hl x = hl { hlLogs = (fst $ hlLogs hl, x) } + setLOhost :: Host -> LogObject -> LogObject + setLOhost h lo = lo { loHost = h } readLogObjectStream :: FilePath -> Bool -> [LOAnyType] -> IO [LogObject] readLogObjectStream f okDErr anyOks = diff --git a/bench/locli/src/Data/CDF.hs b/bench/locli/src/Data/CDF.hs index 748fedfd365..429a23c1c3f 100644 --- a/bench/locli/src/Data/CDF.hs +++ b/bench/locli/src/Data/CDF.hs @@ -22,9 +22,11 @@ module Data.CDF , nEquicentiles , Divisible (..) , weightedAverage + , averageDouble , CDFError (..) , CDF(..) , cdf + , cdfRatioCDF , cdfAverageVal , centilesCDF , filterCDF @@ -45,6 +47,7 @@ module Data.CDF , stdCombine1 , stdCombine2 , CDF2 + , collapseCDF , collapseCDFs , cdf2OfCDFs -- @@ -148,6 +151,9 @@ weightedAverage xs = (`divide` (fromIntegral . sum $ fst <$> xs)) . sum $ xs <&> \(size, avg) -> fromIntegral size * avg +averageDouble :: Divisible a => [a] -> Double +averageDouble xs = toDouble (sum xs) / fromIntegral (length xs) + -- -- * Parametric CDF (cumulative distribution function) -- @@ -161,6 +167,20 @@ data CDF p a = } deriving (Functor, Generic) +cdfRatioCDF :: forall a. Fractional a => CDF I a -> CDF I a -> CDF I a +cdfRatioCDF x y = + CDF + { cdfSize = cdfSize x + , cdfAverage = I $ ((/) `on` unI . cdfAverage) x y + , cdfStddev = cdfStddev x * cdfStddev y + , cdfRange = Interval (((/) `on` low . cdfRange) x y) (((/) `on` high . cdfRange) x y) + , cdfSamples = (zipWith divCentile `on` cdfSamples) x y + } + where divCentile :: (Centile, I a) -> (Centile, I a) -> (Centile, I a) + divCentile (cx, I x') (cy, I y') = + if cx == cy then (cx, I $ x' / y') + else error "Centile incoherency: %s vs %s" (show cx) (show cy) + deriving instance (Eq a, Eq (p a), Eq (p Double)) => Eq (CDF p a) deriving instance (Show a, Show (p a), Show (p Double)) => Show (CDF p a) deriving instance (NFData a, NFData (p a), NFData (p Double)) => NFData (CDF p a) @@ -325,6 +345,24 @@ stdCombine2 cs = , .. } +collapseCDF :: ([a] -> b) -> CDF (CDF I) a -> CDF I b +collapseCDF avg c = + CDF + { cdfSize = cdfSize c + , cdfAverage = I $ cdfAverageVal c + , cdfRange = cdfRange c + & low &&& high + & both (avg . (:[])) + & uncurry Interval + , cdfStddev = cdfStddev c + , cdfSamples = zip (cdfSamples c <&> fst) + (cdfSamples c <&> + I + . avg -- :: a + . fmap (unI . snd) -- :: [(Int, a)] + . cdfSamples . snd) -- :: [(Centile a)] + } + -- | Collapse basic CDFs. collapseCDFs :: forall a. Combine I a -> [CDF I a] -> Either CDFError (CDF I a) collapseCDFs _ [] = Left CDFEmptyDataset From 0c11ffd1e0faa8ef406e20f29d2424da395797ce Mon Sep 17 00:00:00 2001 From: Kosyrev Serge Date: Mon, 28 Nov 2022 22:15:28 +0800 Subject: [PATCH 3/8] Makefile: define workbench targets with the nomad backend --- lib.mk | 31 +++++++++++++++++-------------- 1 file changed, 17 insertions(+), 14 deletions(-) diff --git a/lib.mk b/lib.mk index 637ce286987..872075524d8 100644 --- a/lib.mk +++ b/lib.mk @@ -1,23 +1,24 @@ -## define_proftarget :: target -> profile -> fullnixmode -> devmode -> autostart -> autostay -> profiled -> IO () -define define_profile_target +## proftgt :: target -> profile -> fullnixmode -> devmode -> autostart -> autostay -> profiled -> backend -> IO () +define proftgt $(1): shell $(1): PROFILE = $(2)-${ERA} -ifeq ($(3),true) +$(1): BACKEND = $(8) +ifeq ($(strip $(3)),true) $(1): ARGS += --arg 'useCabalRun' false endif -ifeq ($(4),true) +ifeq ($(strip $(4)),true) $(1): ARGS += --arg 'workbenchDevMode' true endif -ifeq ($(7),true) +ifeq ($(strip $(7)),true) $(1): ARGS += --arg 'profiled' true endif -ifeq ($(5)$(6),truetrue) +ifeq ($(strip $(5))$(strip $(6)),truetrue) $(1): CMD := start-cluster $(if ${ITER},--iterations ${ITER}); return endif -ifeq ($(5)$(6),truefalse) +ifeq ($(strip $(5))$(strip $(6)),truefalse) $(1): RUN := start-cluster $(if ${ITER},--iterations ${ITER}) endif -ifeq ($(3)$(4)$(5)$(6),falsetruefalsefalse) +ifeq ($(strip $(3))$(strip $(4))$(strip $(5))$(strip $(6)),falsetruefalsefalse) define EXTRA_HELP += $(1): ## Shell for profile \033[34m$(2)\033[0m (also: \033[34m-auto -autostay -nix -autonix -prof\033[0m)\n endef @@ -26,10 +27,12 @@ endif endef define define_profile_targets -$$(foreach prof,$(1),$$(eval $$(call define_profile_target,$$(prof),$$(prof),false,true,false,false,false))) -$$(foreach prof,$(1),$$(eval $$(call define_profile_target,$$(prof)-prof,$$(prof),false,true,false,false,true))) -$$(foreach prof,$(1),$$(eval $$(call define_profile_target,$$(prof)-auto,$$(prof),false,true,true,false,false))) -$$(foreach prof,$(1),$$(eval $$(call define_profile_target,$$(prof)-autostay,$$(prof),false,true,true,true,false))) -$$(foreach prof,$(1),$$(eval $$(call define_profile_target,$$(prof)-nix,$$(prof),true,false,false,false,false))) -$$(foreach prof,$(1),$$(eval $$(call define_profile_target,$$(prof)-autonix,$$(prof),true,false,true,false,false))) +$$(foreach prof,$(1),$$(eval $$(call proftgt,$$(prof), $$(prof),false, true,false,false,false, supervisor))) +$$(foreach prof,$(1),$$(eval $$(call proftgt,$$(prof)-prof, $$(prof),false, true,false,false, true, supervisor))) +$$(foreach prof,$(1),$$(eval $$(call proftgt,$$(prof)-auto, $$(prof),false, true, true,false,false, supervisor))) +$$(foreach prof,$(1),$$(eval $$(call proftgt,$$(prof)-autostay, $$(prof),false, true, true, true,false, supervisor))) +$$(foreach prof,$(1),$$(eval $$(call proftgt,$$(prof)-nix, $$(prof), true,false,false,false,false, supervisor))) +$$(foreach prof,$(1),$$(eval $$(call proftgt,$$(prof)-autonix, $$(prof), true,false, true,false,false, supervisor))) +$$(foreach prof,$(1),$$(eval $$(call proftgt,$$(prof)-nomad, $$(prof), true,false,false,false,false, nomad))) +$$(foreach prof,$(1),$$(eval $$(call proftgt,$$(prof)-autonomad, $$(prof), true,false, true,false,false, nomad))) endef From 96c7a167f0744768aef9731af0a3030381c04ff7 Mon Sep 17 00:00:00 2001 From: Federico Mastellone Date: Thu, 1 Dec 2022 11:54:40 +0000 Subject: [PATCH 4/8] Makefile: allow list-profiles target to run without jq --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 5e1cadc49bc..74107a9a9f2 100644 --- a/Makefile +++ b/Makefile @@ -59,7 +59,7 @@ analyse: RUN := wb analyse std ${TAG} analyse: shell list-profiles: ## List workbench profiles - nix build .#workbench.profile-names-json --json | jq '.[0].outputs.out' -r | xargs jq . + nix build .#all-profiles-json && cat result show-profile: ## NAME=profile-name @test -n "${NAME}" || { echo 'HELP: to specify profile to show, add NAME=profle-name' && exit 1; } nix build .#all-profiles-json --json --option substitute false | jq '.[0].outputs.out' -r | xargs jq ".\"${NAME}\" | if . == null then error(\"\n###\n### Error: unknown profile: ${NAME} Please consult: make list-profiles\n###\") else . end" From 4d2f6a52c70d6cf76d07d367ee245eb0e2c48d8f Mon Sep 17 00:00:00 2001 From: Kosyrev Serge Date: Tue, 29 Nov 2022 00:50:01 +0800 Subject: [PATCH 5/8] workbench: get rid of some unnecessary complications Co-authored-by: Federico Mastellone --- flake.nix | 12 +++-- nix/custom-config.nix | 1 - nix/pkgs.nix | 54 ++++++++++------------- nix/workbench/backend/nomad-conf.nix | 3 +- nix/workbench/backend/nomad-run.nix | 19 ++++---- nix/workbench/backend/nomad.nix | 7 +-- nix/workbench/backend/services-config.nix | 11 ----- nix/workbench/backend/supervisor-run.nix | 19 ++++---- nix/workbench/backend/supervisor.nix | 3 +- nix/workbench/profiles/prof0-defaults.jq | 1 + nix/workbench/tests/default.nix | 4 +- shell.nix | 19 +++----- 12 files changed, 63 insertions(+), 90 deletions(-) diff --git a/flake.nix b/flake.nix index 32675c37a6c..e49472ee9ed 100644 --- a/flake.nix +++ b/flake.nix @@ -278,13 +278,9 @@ benchmarks = collectComponents' "benchmarks" projectPackages; }); - inherit (pkgs) workbench all-profiles-json supervisord-workbench-nix supervisord-workbench-for-profile; + inherit (pkgs) workbench all-profiles-json workbench-instance; packages = - let - supervisord-workbench = - pkgs.callPackage supervisord-workbench-nix { workbench = pinned-workbench; }; - in exes # Linux only packages: // optionalAttrs (system == "x86_64-linux") rec { @@ -296,10 +292,12 @@ ## This is a very light profile, no caching&pinning needed. workbench-ci-test = - (pkgs.supervisord-workbench-for-profile + (pkgs.workbench-instance { - # inherit supervisord-workbench; ## Not required, as long as it's fast. profileName = "ci-test-bage"; + backendName = "supervisor"; + ## Not required, as long as it's fast. + # workbench = pinned-workbench; cardano-node-rev = if __hasAttr "rev" self then self.rev diff --git a/nix/custom-config.nix b/nix/custom-config.nix index d06a2fa1635..f50508f5a9a 100644 --- a/nix/custom-config.nix +++ b/nix/custom-config.nix @@ -7,7 +7,6 @@ self: { profileName = "default-bage"; backendName = "supervisor"; basePort = 30000; - enableEKG = true; workbenchDevMode = true; extraBackendConfig = {}; }; diff --git a/nix/pkgs.nix b/nix/pkgs.nix index f0005afdcad..50cd692b95f 100644 --- a/nix/pkgs.nix +++ b/nix/pkgs.nix @@ -79,45 +79,39 @@ final: prev: with final; { # A generic, parameteric version of the workbench development environment. workbench = pkgs.callPackage ./workbench {}; - supervisord-workbench-cabal = - { workbench ? pkgs.workbench, ... }@args: pkgs.callPackage ./workbench/backend/supervisor.nix (args // { useCabalRun = true; }); - supervisord-workbench-nix = - { workbench ? pkgs.workbench, ... }@args: pkgs.callPackage ./workbench/backend/supervisor.nix args; + all-profiles-json = (workbench.all-profiles + { inherit (workbench-instance { backendName = "supervisor"; + profileName = "default-bage"; + }) backend; }).JSON; - nomad-workbench = - { workbench ? pkgs.workbench, ... }@args: pkgs.callPackage ./workbench/backend/nomad.nix (args // { inherit nix2container; }); - - all-profiles-json = (workbench.all-profiles{ inherit (supervisord-workbench-nix) backend; }).JSON; - - # An instance of the workbench, specialised to the supervisord backend and a profile, - # that can be used with nix-shell or lorri. + # A parametrisable workbench, that can be used with nix-shell or lorri. # See https://input-output-hk.github.io/haskell.nix/user-guide/development/ - supervisord-workbench-for-profile = - { batchName ? customConfig.localCluster.batchName - , profileName ? customConfig.localCluster.profileName - , useCabalRun ? false - , workbenchDevMode ? false - , profiled ? false - , supervisord-workbench ? pkgs.callPackage ./workbench/backend/supervisor.nix { inherit useCabalRun; } - , cardano-node-rev ? null - }: - pkgs.callPackage ./workbench/backend/supervisor-run.nix - { - inherit batchName profileName supervisord-workbench cardano-node-rev; - }; - - nomad-workbench-for-profile = - { batchName ? customConfig.localCluster.batchName + workbench-instance = + let backendRegistry = + { + supervisor = { + backend-workbench = ./workbench/backend/supervisor.nix; + workbench-runner = ./workbench/backend/supervisor-run.nix; + }; + nomad = { + backend-workbench = ./workbench/backend/nomad.nix; + workbench-runner = ./workbench/backend/nomad-run.nix; + }; + }; + in + { backendName , profileName ? customConfig.localCluster.profileName + , batchName ? customConfig.localCluster.batchName , useCabalRun ? false , workbenchDevMode ? false , profiled ? false - , nomad-workbench ? pkgs.callPackage ./workbench/backend/nomad.nix { inherit nix2container; } + , workbench ? pkgs.workbench + , backendWorkbench ? pkgs.callPackage (backendRegistry."${backendName}".backend-workbench) { inherit useCabalRun workbench; } , cardano-node-rev ? null }: - pkgs.callPackage ./workbench/backend/nomad-run.nix + pkgs.callPackage (backendRegistry."${backendName}".workbench-runner) { - inherit batchName profileName nomad-workbench cardano-node-rev; + inherit batchName profileName backendWorkbench cardano-node-rev; }; # Disable failing python uvloop tests diff --git a/nix/workbench/backend/nomad-conf.nix b/nix/workbench/backend/nomad-conf.nix index 614fa7f7cdf..4fba4bd2e9c 100644 --- a/nix/workbench/backend/nomad-conf.nix +++ b/nix/workbench/backend/nomad-conf.nix @@ -2,7 +2,6 @@ # Cardano packages/executables. , cardano-node, cardano-tracer, tx-generator # OCI Image builder. -, nix2container }: let @@ -10,7 +9,7 @@ let # Why `nix2container` instead of the built-in `dockerTools` ?: # - https://lewo.abesis.fr/posts/nix-build-container-image/ # - https://discourse.nixos.org/t/nix2container-another-dockertools-buildimage-implementation-based-on-skopeo/21688 - n2c = nix2container.outputs.packages.x86_64-linux.nix2container; + n2c = pkgs.nix2container.outputs.packages.x86_64-linux.nix2container; clusterImage = n2c.buildImage { name = "registry.workbench.iog.io/cluster"; diff --git a/nix/workbench/backend/nomad-run.nix b/nix/workbench/backend/nomad-run.nix index 534667eaff1..6bab4554b9e 100644 --- a/nix/workbench/backend/nomad-run.nix +++ b/nix/workbench/backend/nomad-run.nix @@ -1,10 +1,11 @@ let + backendName = "nomad"; batchNameDefault = "plain"; profileNameDefault = "default-bage"; in { pkgs , cardanoNodePackages -, nomad-workbench +, backendWorkbench ## , profileName ? profileNameDefault , batchName ? batchNameDefault @@ -13,14 +14,14 @@ in , cardano-node-rev ? "0000000000000000000000000000000000000000" }: let - inherit (nomad-workbench) workbench backend cacheDir stateDir basePort; + inherit (backendWorkbench) workbench backend cacheDir stateDir basePort; - with-nomad-profile = + with-backend-profile = { envArgsOverride ? {} }: ## TODO: envArgsOverride is not used! workbench.with-profile { inherit backend profileName; }; - inherit (with-nomad-profile {}) profileNix profile topology genesis; + inherit (with-backend-profile {}) profileNix profile topology genesis; in let @@ -70,11 +71,11 @@ in { trace ? false }: let inherit - (with-nomad-profile + (with-backend-profile { envArgsOverride = { cacheDir = "./cache"; stateDir = "./"; }; }) profileNix profile topology genesis; - run = pkgs.runCommand "workbench-run-nomad-${profileName}" + run = pkgs.runCommand "workbench-run-${backendName}-${profileName}" { requiredSystemFeatures = [ "benchmark" ]; nativeBuildInputs = with cardanoNodePackages; with pkgs; [ bash @@ -95,7 +96,7 @@ in cd $out export HOME=$out - export WB_BACKEND=nomad + export WB_BACKEND=${backendName} export CARDANO_NODE_SOCKET_PATH=$(wb backend get-node-socket-path ${stateDir} node-0) cmd=( @@ -144,8 +145,8 @@ in { inherit stateDir; inherit profileName; - inherit workbench nomad-workbench; - inherit (nomad-workbench) backend; + inherit workbench backendWorkbench; + inherit (backendWorkbench) backend; inherit profileNix profile topology genesis; inherit interactive-start interactive-stop interactive-restart; inherit profile-run; diff --git a/nix/workbench/backend/nomad.nix b/nix/workbench/backend/nomad.nix index d856818fc52..d939cf268a5 100644 --- a/nix/workbench/backend/nomad.nix +++ b/nix/workbench/backend/nomad.nix @@ -4,14 +4,12 @@ let stateDir = "run/current"; in { pkgs -, lib, nix2container +, lib , workbench ## , cacheDir ? cacheDirDefault , extraBackendConfig ? {} ## `useCabalRun` not used here like in `supervisor.nix`. -, enableEKG ? true -## , ... }: let @@ -22,7 +20,7 @@ let # Unlike the supervisor backend `useCabalRun` is always false here. useCabalRun = false; - services-config = import ./services-config.nix {inherit lib workbench basePort stateDir; useCabalRun = false; inherit enableEKG;}; + services-config = import ./services-config.nix {inherit lib workbench basePort stateDir; useCabalRun = false;}; extraShellPkgs = with pkgs; [ # https://docs.podman.io/en/latest/markdown/podman.1.html#rootless-mode @@ -52,7 +50,6 @@ let inherit (pkgs.cardanoNodePackages) cardano-node cardano-tracer tx-generator; - inherit nix2container; }; in pkgs.runCommand "workbench-backend-output-${profileNix.name}-${name}" (rec { diff --git a/nix/workbench/backend/services-config.nix b/nix/workbench/backend/services-config.nix index 4957d5949ca..9be8f6b9fd8 100644 --- a/nix/workbench/backend/services-config.nix +++ b/nix/workbench/backend/services-config.nix @@ -4,7 +4,6 @@ , basePort ? 30000 , stateDir ? "run/current" , useCabalRun ? false -, enableEKG ? true }: with lib; { @@ -68,16 +67,6 @@ with lib; ShelleyGenesisFile = "./genesis/genesis-shelley.json"; ByronGenesisFile = "./genesis/byron/genesis.json"; } - // optionalAttrs enableEKG - (let portShiftEkg = 100; - portShiftPrometheus = 200; - in { - hasEKG = port + portShiftEkg; - hasPrometheus = ["127.0.0.1" (port + portShiftPrometheus)]; - setupBackends = [ - "EKGViewBK" - ]; - }) ); finaliseNodeArgs = diff --git a/nix/workbench/backend/supervisor-run.nix b/nix/workbench/backend/supervisor-run.nix index 837d2998b76..e7e748e118e 100644 --- a/nix/workbench/backend/supervisor-run.nix +++ b/nix/workbench/backend/supervisor-run.nix @@ -1,10 +1,11 @@ let + backendName = "supervisor"; batchNameDefault = "plain"; profileNameDefault = "default-bage"; in { pkgs , cardanoNodePackages -, supervisord-workbench +, backendWorkbench ## , profileName ? profileNameDefault , batchName ? batchNameDefault @@ -13,14 +14,14 @@ in , cardano-node-rev ? "0000000000000000000000000000000000000000" }: let - inherit (supervisord-workbench) workbench backend cacheDir stateDir basePort; + inherit (backendWorkbench) workbench backend cacheDir stateDir basePort; - with-supervisord-profile = + with-backend-profile = { envArgsOverride ? {} }: ## TODO: envArgsOverride is not used! workbench.with-profile { inherit backend profileName; }; - inherit (with-supervisord-profile {}) profileNix profile topology genesis; + inherit (with-backend-profile {}) profileNix profile topology genesis; in let @@ -70,11 +71,11 @@ in { trace ? false }: let inherit - (with-supervisord-profile + (with-backend-profile { envArgsOverride = { cacheDir = "./cache"; stateDir = "./"; }; }) profileNix profile topology genesis; - run = pkgs.runCommand "workbench-run-supervisord-${profileName}" + run = pkgs.runCommand "workbench-run-${backendName}-${profileName}" { requiredSystemFeatures = [ "benchmark" ]; nativeBuildInputs = with cardanoNodePackages; with pkgs; [ bash @@ -95,7 +96,7 @@ in cd $out export HOME=$out - export WB_BACKEND=supervisor + export WB_BACKEND=${backendName} export CARDANO_NODE_SOCKET_PATH=$(wb backend get-node-socket-path ${stateDir} node-0) cmd=( @@ -144,8 +145,8 @@ in { inherit stateDir; inherit profileName; - inherit workbench supervisord-workbench; - inherit (supervisord-workbench) backend; + inherit workbench backendWorkbench; + inherit (backendWorkbench) backend; inherit profileNix profile topology genesis; inherit interactive-start interactive-stop interactive-restart; inherit profile-run; diff --git a/nix/workbench/backend/supervisor.nix b/nix/workbench/backend/supervisor.nix index 44f0d23ea60..3539640a35f 100644 --- a/nix/workbench/backend/supervisor.nix +++ b/nix/workbench/backend/supervisor.nix @@ -10,7 +10,6 @@ in , cacheDir ? cacheDirDefault , extraBackendConfig ? {} , useCabalRun ? false -, enableEKG ? true ## , ... }: @@ -22,7 +21,7 @@ let # Unlike the nomad backend `useCabalRun` is honored here. inherit useCabalRun; - services-config = import ./services-config.nix {inherit lib workbench basePort stateDir useCabalRun enableEKG;}; + services-config = import ./services-config.nix {inherit lib workbench basePort stateDir useCabalRun;}; extraShellPkgs = with pkgs; [ python3Packages.supervisor diff --git a/nix/workbench/profiles/prof0-defaults.jq b/nix/workbench/profiles/prof0-defaults.jq index 00f66f774fe..5e5977d9b33 100644 --- a/nix/workbench/profiles/prof0-defaults.jq +++ b/nix/workbench/profiles/prof0-defaults.jq @@ -75,6 +75,7 @@ def era_defaults($era): , shutdown_on_slot_synced: null , shutdown_on_block_synced: null , tracing_backend: "trace-dispatcher" ## or "iohk-monitoring" + , ekg: false , tracer: true , verbatim: { diff --git a/nix/workbench/tests/default.nix b/nix/workbench/tests/default.nix index 736fe2ee47b..fe64e762e4c 100644 --- a/nix/workbench/tests/default.nix +++ b/nix/workbench/tests/default.nix @@ -1,9 +1,9 @@ { pkgs }: let - inherit (pkgs) supervisord-workbench-for-profile cardano-cli cardanolib-py cardano-node; + inherit (pkgs) workbench-instance cardano-cli cardanolib-py cardano-node; stateDir = "./state-cluster-test"; # We want a really short duration for tests - cluster' = supervisord-workbench-for-profile { + cluster' = workbench-instance { genesisParams = { slotLength = 0.1; decentralisationParam = 0.8; diff --git a/shell.nix b/shell.nix index 2386ae8eb51..4ff0c9ff66f 100644 --- a/shell.nix +++ b/shell.nix @@ -57,30 +57,25 @@ let haveGlibcLocales = pkgs.glibcLocales != null && stdenv.hostPlatform.libc == "glibc"; workbench-shell = - let - workbenchRun = - if backendName == "nomad" - then pkgs.nomad-workbench-for-profile - { inherit profileName useCabalRun profiled; } - # Supervidor by default. - else pkgs.supervisord-workbench-for-profile - { inherit profileName useCabalRun profiled; } - ; - in with customConfig.localCluster; + with customConfig.localCluster; import ./nix/workbench/shell.nix { inherit pkgs lib haskellLib project; inherit setLocale haveGlibcLocales commandHelp; inherit cardano-mainnet-mirror; - inherit workbenchRun workbenchDevMode; + inherit workbenchDevMode; inherit profiled withHoogle; + workbenchRun = + pkgs.workbench-instance + { inherit backendName profileName useCabalRun profiled; }; }; devops = let profileName = "devops-bage"; - workbenchRun = pkgs.supervisord-workbench-for-profile + workbenchRun = pkgs.workbench-instance { inherit profileName; useCabalRun = false; + backendName = "supervisor"; }; devopsShellParams = { inherit profileName; From cfa0dd8da844526e8d897a48dbc77b32807cd431 Mon Sep 17 00:00:00 2001 From: Federico Mastellone Date: Tue, 29 Nov 2022 16:04:01 +0000 Subject: [PATCH 6/8] workbench: remove duplicate code/file --- nix/pkgs.nix | 19 +-- nix/workbench/backend/nomad-run.nix | 153 ------------------ .../backend/{supervisor-run.nix => run.nix} | 13 +- 3 files changed, 16 insertions(+), 169 deletions(-) delete mode 100644 nix/workbench/backend/nomad-run.nix rename nix/workbench/backend/{supervisor-run.nix => run.nix} (94%) diff --git a/nix/pkgs.nix b/nix/pkgs.nix index 50cd692b95f..d3363131fd1 100644 --- a/nix/pkgs.nix +++ b/nix/pkgs.nix @@ -88,16 +88,10 @@ final: prev: with final; { # See https://input-output-hk.github.io/haskell.nix/user-guide/development/ workbench-instance = let backendRegistry = - { - supervisor = { - backend-workbench = ./workbench/backend/supervisor.nix; - workbench-runner = ./workbench/backend/supervisor-run.nix; - }; - nomad = { - backend-workbench = ./workbench/backend/nomad.nix; - workbench-runner = ./workbench/backend/nomad-run.nix; - }; - }; + { + supervisor = ./workbench/backend/supervisor.nix; + nomad = ./workbench/backend/nomad.nix; + }; in { backendName , profileName ? customConfig.localCluster.profileName @@ -106,10 +100,11 @@ final: prev: with final; { , workbenchDevMode ? false , profiled ? false , workbench ? pkgs.workbench - , backendWorkbench ? pkgs.callPackage (backendRegistry."${backendName}".backend-workbench) { inherit useCabalRun workbench; } + , backendWorkbench ? pkgs.callPackage (backendRegistry."${backendName}") + { inherit useCabalRun workbench; } , cardano-node-rev ? null }: - pkgs.callPackage (backendRegistry."${backendName}".workbench-runner) + pkgs.callPackage ./workbench/backend/run.nix { inherit batchName profileName backendWorkbench cardano-node-rev; }; diff --git a/nix/workbench/backend/nomad-run.nix b/nix/workbench/backend/nomad-run.nix deleted file mode 100644 index 6bab4554b9e..00000000000 --- a/nix/workbench/backend/nomad-run.nix +++ /dev/null @@ -1,153 +0,0 @@ -let - backendName = "nomad"; - batchNameDefault = "plain"; - profileNameDefault = "default-bage"; -in -{ pkgs -, cardanoNodePackages -, backendWorkbench -## -, profileName ? profileNameDefault -, batchName ? batchNameDefault -## -, workbenchDevMode ? false -, cardano-node-rev ? "0000000000000000000000000000000000000000" -}: -let - inherit (backendWorkbench) workbench backend cacheDir stateDir basePort; - - with-backend-profile = - { envArgsOverride ? {} }: ## TODO: envArgsOverride is not used! - workbench.with-profile - { inherit backend profileName; }; - - inherit (with-backend-profile {}) profileNix profile topology genesis; -in - let - - inherit (profile.value) era composition monetary; - - path = pkgs.lib.makeBinPath path'; - path' = - [ cardanoNodePackages.bech32 pkgs.jq pkgs.gnused pkgs.coreutils pkgs.bash pkgs.moreutils - ] - ## In dev mode, call the script directly: - ++ pkgs.lib.optionals (!workbenchDevMode) - [ workbench.workbench ]; - - interactive-start = pkgs.writeScriptBin "start-cluster" '' - set -euo pipefail - - export PATH=$PATH:${path} - unset WB_MODE_CABAL= - wb start \ - --batch-name ${batchName} \ - --profile-name ${profileName} \ - --profile ${profile} \ - --cache-dir ${cacheDir} \ - --base-port ${toString basePort} \ - ''${WB_MODE_CABAL:+--cabal} \ - "$@" - ''; - - interactive-stop = pkgs.writeScriptBin "stop-cluster" '' - set -euo pipefail - - wb finish "$@" - ''; - - interactive-restart = pkgs.writeScriptBin "restart-cluster" '' - set -euo pipefail - - wb run restart "$@" && \ - echo "workbench: alternate command for this action: wb run restart" >&2 - ''; - - nodeBuildProduct = - name: - "report ${name}-log $out ${name}/stdout"; - - profile-run = - { trace ? false }: - let - inherit - (with-backend-profile - { envArgsOverride = { cacheDir = "./cache"; stateDir = "./"; }; }) - profileNix profile topology genesis; - - run = pkgs.runCommand "workbench-run-${backendName}-${profileName}" - { requiredSystemFeatures = [ "benchmark" ]; - nativeBuildInputs = with cardanoNodePackages; with pkgs; [ - bash - bech32 - coreutils - gnused - jq - moreutils - nixWrapped - pstree -# TODO: nomad - workbench.workbench - zstd - ]; - } - '' - mkdir -p $out/{cache,nix-support} - cd $out - export HOME=$out - - export WB_BACKEND=${backendName} - export CARDANO_NODE_SOCKET_PATH=$(wb backend get-node-socket-path ${stateDir} node-0) - - cmd=( - wb - ${pkgs.lib.optionalString trace "--trace"} - start - --profile-name ${profileName} - --profile ${profile} - --topology ${topology} - --genesis-cache-entry ${genesis} - --batch-name smoke-test - --base-port ${toString basePort} - --node-source ${cardanoNodePackages.cardano-node.src.origSrc} - --node-rev ${cardano-node-rev} - --cache-dir ./cache - ) - echo "''${cmd[*]}" > $out/wb-start.sh - - time "''${cmd[@]}" 2>&1 | - tee $out/wb-start.log - - ## Convert structure from $out/run/RUN-ID/* to $out/*: - rm -rf cache - rm -f run/{current,-current} - find $out -type s | xargs rm -f - run=$(cd run; ls) - (cd run; tar c $run --zstd) > archive.tar.zst - mv run/$run/* . - rmdir run/$run run - - cat > $out/nix-support/hydra-build-products < Date: Wed, 30 Nov 2022 13:31:14 +0000 Subject: [PATCH 7/8] workbench: refactoring of modules dependencies - Remove repeated code (services-config) from backends - Move 'services-config' to profiles and out of the backends - Move the backend dependency out from 'all-profiles' - Remove unused code and parameters or its defaults - Remove 'stateDir' from the backends - Renames for clarity --- flake.nix | 4 +- nix/pkgs.nix | 45 ++++--- nix/workbench/backend/nomad.nix | 111 ++++++++---------- nix/workbench/backend/{run.nix => runner.nix} | 43 +++---- nix/workbench/backend/supervisor-conf.nix | 7 +- nix/workbench/backend/supervisor.nix | 99 +++++++--------- nix/workbench/default.nix | 52 ++++---- nix/workbench/profile.nix | 6 +- nix/workbench/profiles/default.nix | 25 ++-- .../{backend => profiles}/services-config.nix | 10 +- nix/workbench/shell.nix | 16 +-- nix/workbench/tests/default.nix | 8 +- shell.nix | 26 ++-- 13 files changed, 207 insertions(+), 245 deletions(-) rename nix/workbench/backend/{run.nix => runner.nix} (81%) rename nix/workbench/{backend => profiles}/services-config.nix (95%) diff --git a/flake.nix b/flake.nix index e49472ee9ed..c023f7ddd10 100644 --- a/flake.nix +++ b/flake.nix @@ -278,7 +278,7 @@ benchmarks = collectComponents' "benchmarks" projectPackages; }); - inherit (pkgs) workbench all-profiles-json workbench-instance; + inherit (pkgs) workbench all-profiles-json workbench-runner; packages = exes @@ -292,7 +292,7 @@ ## This is a very light profile, no caching&pinning needed. workbench-ci-test = - (pkgs.workbench-instance + (pkgs.workbench-runner { profileName = "ci-test-bage"; backendName = "supervisor"; diff --git a/nix/pkgs.nix b/nix/pkgs.nix index d3363131fd1..ec1bb4a9424 100644 --- a/nix/pkgs.nix +++ b/nix/pkgs.nix @@ -79,34 +79,43 @@ final: prev: with final; { # A generic, parameteric version of the workbench development environment. workbench = pkgs.callPackage ./workbench {}; - all-profiles-json = (workbench.all-profiles - { inherit (workbench-instance { backendName = "supervisor"; - profileName = "default-bage"; - }) backend; }).JSON; + all-profiles-json = workbench.profile-names-json; # A parametrisable workbench, that can be used with nix-shell or lorri. # See https://input-output-hk.github.io/haskell.nix/user-guide/development/ - workbench-instance = + # The general idea is: + # backendName -> useCabalRun -> backend + # stateDir -> batchName -> profileName -> backend -> workbench -> runner + # * `workbench` is in case a pinned version of the workbench is needed. + workbench-runner = let backendRegistry = { supervisor = ./workbench/backend/supervisor.nix; - nomad = ./workbench/backend/nomad.nix; + nomad = ./workbench/backend/nomad.nix; }; in - { backendName - , profileName ? customConfig.localCluster.profileName - , batchName ? customConfig.localCluster.batchName - , useCabalRun ? false - , workbenchDevMode ? false - , profiled ? false - , workbench ? pkgs.workbench - , backendWorkbench ? pkgs.callPackage (backendRegistry."${backendName}") - { inherit useCabalRun workbench; } - , cardano-node-rev ? null + { stateDir ? customConfig.localCluster.stateDir + , batchName ? customConfig.localCluster.batchName + , profileName ? customConfig.localCluster.profileName + , backendName ? customConfig.localCluster.backendName + , useCabalRun ? false + , profiled ? false + , cardano-node-rev ? null + , workbench ? pkgs.workbench + , workbenchDevMode ? false }: - pkgs.callPackage ./workbench/backend/run.nix + let + # The `useCabalRun` flag is set in the backend to allow the backend to + # override its value. The runner uses the value of `useCabalRun` from + # the backend to prevent a runner using a different value. + backend = import (backendRegistry."${backendName}") + { inherit pkgs lib useCabalRun; }; + in import ./workbench/backend/runner.nix { - inherit batchName profileName backendWorkbench cardano-node-rev; + inherit pkgs lib cardanoNodePackages; + inherit stateDir batchName profileName backend; + inherit cardano-node-rev; + inherit workbench workbenchDevMode; }; # Disable failing python uvloop tests diff --git a/nix/workbench/backend/nomad.nix b/nix/workbench/backend/nomad.nix index d939cf268a5..39785eba149 100644 --- a/nix/workbench/backend/nomad.nix +++ b/nix/workbench/backend/nomad.nix @@ -1,79 +1,60 @@ -let - basePort = 30000; - cacheDirDefault = "${__getEnv "HOME"}/.cache/cardano-workbench"; - stateDir = "run/current"; -in { pkgs , lib -, workbench -## -, cacheDir ? cacheDirDefault -, extraBackendConfig ? {} ## `useCabalRun` not used here like in `supervisor.nix`. , ... }: let - backend = - rec - { name = "nomad"; - - # Unlike the supervisor backend `useCabalRun` is always false here. - useCabalRun = false; + name = "nomad"; - services-config = import ./services-config.nix {inherit lib workbench basePort stateDir; useCabalRun = false;}; + # Unlike the supervisor backend `useCabalRun` is always false here. + useCabalRun = false; - extraShellPkgs = with pkgs; [ - # https://docs.podman.io/en/latest/markdown/podman.1.html#rootless-mode - podman - # Was not needed even thou it says so! - # https://docs.podman.io/en/latest/markdown/podman.1.html#note-unsupported-file-systems-in-rootless-mode - # fuse-overlayfs - nomad - nomad-driver-podman - ]; + extraShellPkgs = with pkgs; [ + # https://docs.podman.io/en/latest/markdown/podman.1.html#rootless-mode + podman + # Was not needed even thou it says so! + # https://docs.podman.io/en/latest/markdown/podman.1.html#note-unsupported-file-systems-in-rootless-mode + # fuse-overlayfs + nomad + nomad-driver-podman + ]; - materialise-profile = - { profileNix }: - let - supervisorConfPath = - import ./supervisor-conf.nix - { inherit (profileNix) node-services; - inherit - pkgs lib stateDir - basePort - extraBackendConfig; - unixHttpServerPort = "/tmp/supervisor.sock"; - }; - nomadConf = - import ./nomad-conf.nix - { inherit pkgs; - inherit - (pkgs.cardanoNodePackages) - cardano-node cardano-tracer tx-generator; - }; - in pkgs.runCommand "workbench-backend-output-${profileNix.name}-${name}" - (rec { - inherit supervisorConfPath; - # All In One - clusterImage = nomadConf.clusterImage; - clusterImageCopyToPodman = clusterImage.copyToPodman; - clusterImageName = clusterImage.imageName; - clusterImageTag = clusterImage.imageTag; - }) - '' - mkdir $out + materialise-profile = + { stateDir, profileNix }: + let + supervisorConfPath = + import ./supervisor-conf.nix + { inherit (profileNix) node-services; + inherit pkgs lib stateDir; + unixHttpServerPort = "/tmp/supervisor.sock"; + }; + nomadConf = + import ./nomad-conf.nix + { inherit pkgs; + inherit + (pkgs.cardanoNodePackages) + cardano-node cardano-tracer tx-generator; + }; + in pkgs.runCommand "workbench-backend-output-${profileNix.name}-${name}" + (rec { + inherit supervisorConfPath; + # All In One + clusterImage = nomadConf.clusterImage; + clusterImageCopyToPodman = clusterImage.copyToPodman; + clusterImageName = clusterImage.imageName; + clusterImageTag = clusterImage.imageTag; + }) + '' + mkdir $out - ln -s $supervisorConfPath $out/supervisor.conf + ln -s $supervisorConfPath $out/supervisor.conf - ln -s $clusterImage $out/clusterImage - echo $clusterImageName > $out/clusterImageName - echo $clusterImageTag > $out/clusterImageTag - ln -s $clusterImageCopyToPodman/bin/copy-to-podman $out/clusterImageCopyToPodman - ''; - }; + ln -s $clusterImage $out/clusterImage + echo $clusterImageName > $out/clusterImageName + echo $clusterImageTag > $out/clusterImageTag + ln -s $clusterImageCopyToPodman/bin/copy-to-podman $out/clusterImageCopyToPodman + ''; in { - inherit cacheDir stateDir basePort; - inherit workbench; - inherit backend; + inherit name useCabalRun extraShellPkgs materialise-profile; } diff --git a/nix/workbench/backend/run.nix b/nix/workbench/backend/runner.nix similarity index 81% rename from nix/workbench/backend/run.nix rename to nix/workbench/backend/runner.nix index 148395da708..2aab5c6f8b3 100644 --- a/nix/workbench/backend/run.nix +++ b/nix/workbench/backend/runner.nix @@ -1,30 +1,28 @@ -let - batchNameDefault = "plain"; - profileNameDefault = "default-bage"; -in { pkgs +, lib , cardanoNodePackages -, backendWorkbench ## -, profileName ? profileNameDefault -, batchName ? batchNameDefault +, stateDir +, batchName +, profileName +, backend ## -, workbenchDevMode ? false , cardano-node-rev ? "0000000000000000000000000000000000000000" +, workbench +, workbenchDevMode ? false +## +, cacheDir ? "${__getEnv "HOME"}/.cache/cardano-workbench" +, basePort ? 30000 }: let - inherit (backendWorkbench) workbench backend cacheDir stateDir basePort; - - backendName = backendWorkbench.backend.name; + backendName = backend.name; - useCabalRun = backendWorkbench.backend.useCabalRun; + inherit (backend) useCabalRun; - with-backend-profile = - { envArgsOverride ? {} }: ## TODO: envArgsOverride is not used! - workbench.with-profile - { inherit backend profileName; }; + with-backend-profile = workbench.with-profile + { inherit stateDir profileName backend basePort workbench; }; - inherit (with-backend-profile {}) profileNix profile topology genesis; + inherit (with-backend-profile) profileNix profile topology genesis; in let @@ -74,8 +72,7 @@ in { trace ? false }: let inherit - (with-backend-profile - { envArgsOverride = { cacheDir = "./cache"; stateDir = "./"; }; }) + (with-backend-profile) profileNix profile topology genesis; run = pkgs.runCommand "workbench-run-${backendName}-${profileName}" @@ -93,7 +90,7 @@ in zstd ] ++ - backendWorkbench.backend.extraShellPkgs + backend.extraShellPkgs ; } '' @@ -148,10 +145,8 @@ in }; in { - inherit stateDir; - inherit profileName; - inherit workbench backendWorkbench; - inherit (backendWorkbench) backend; + inherit stateDir batchName profileName backend; + inherit workbench; inherit profileNix profile topology genesis; inherit interactive-start interactive-stop interactive-restart; inherit profile-run; diff --git a/nix/workbench/backend/supervisor-conf.nix b/nix/workbench/backend/supervisor-conf.nix index 9b03b38880e..51703db5a5b 100644 --- a/nix/workbench/backend/supervisor-conf.nix +++ b/nix/workbench/backend/supervisor-conf.nix @@ -1,12 +1,9 @@ { pkgs , lib , stateDir -, basePort , node-services , unixHttpServerPort ? null , inetHttpServerPort ? null - ## Last-moment overrides: -, extraBackendConfig }: with lib; @@ -71,9 +68,7 @@ let stopasgroup = true; killasgroup = true; }; - } - // - extraBackendConfig; + }; ## ## nodeSvcSupervisorProgram :: NodeService -> SupervisorConfSection diff --git a/nix/workbench/backend/supervisor.nix b/nix/workbench/backend/supervisor.nix index 3539640a35f..1c3106fa276 100644 --- a/nix/workbench/backend/supervisor.nix +++ b/nix/workbench/backend/supervisor.nix @@ -1,70 +1,51 @@ -let - basePort = 30000; - cacheDirDefault = "${__getEnv "HOME"}/.cache/cardano-workbench"; - stateDir = "run/current"; -in { pkgs , lib -, workbench -## -, cacheDir ? cacheDirDefault -, extraBackendConfig ? {} -, useCabalRun ? false -## +, useCabalRun , ... }: let - backend = - rec - { name = "supervisor"; - - # Unlike the nomad backend `useCabalRun` is honored here. - inherit useCabalRun; + name = "supervisor"; - services-config = import ./services-config.nix {inherit lib workbench basePort stateDir useCabalRun;}; + # Unlike the nomad backend `useCabalRun` is honored here. + inherit useCabalRun; - extraShellPkgs = with pkgs; [ - python3Packages.supervisor - ] - ++ lib.optionals ( useCabalRun) - (with haskellPackages; [ - cabalWrapped - ghcid - haskellBuildUtils - cabal-plan - ]) - ## Workbench's main script is called directly in dev mode. - ++ lib.optionals (!useCabalRun) - (with cardanoNodePackages; [ - cardano-node - cardano-tracer - tx-generator - ]); + extraShellPkgs = with pkgs; + [ + python3Packages.supervisor + ] + ++ lib.optionals ( useCabalRun) + (with haskellPackages; [ + cabalWrapped + ghcid + haskellBuildUtils + cabal-plan + ]) + ## Workbench's main script is called directly in dev mode. + ++ lib.optionals (!useCabalRun) + (with cardanoNodePackages; [ + cardano-node + cardano-tracer + tx-generator + ]); - materialise-profile = - { profileNix }: - pkgs.runCommand "workbench-backend-output-${profileNix.name}-${name}d" - { - ## Backend-specific Nix bits: - ## mkBackendConf :: Profile -> SupervisorConf/DockerConf - supervisorConfPath = - import ./supervisor-conf.nix - { inherit (profileNix) node-services; - inherit - pkgs lib stateDir - basePort - extraBackendConfig; - inetHttpServerPort = "127.0.0.1:9001"; - }; - } - '' - mkdir $out - cp $supervisorConfPath $out/supervisor.conf - ''; - }; + materialise-profile = + { stateDir, profileNix }: + pkgs.runCommand "workbench-backend-output-${profileNix.name}-${name}" + { + ## Backend-specific Nix bits: + ## mkBackendConf :: Profile -> SupervisorConf/DockerConf + supervisorConfPath = + import ./supervisor-conf.nix + { inherit (profileNix) node-services; + inherit pkgs lib stateDir; + inetHttpServerPort = "127.0.0.1:9001"; + }; + } + '' + mkdir $out + cp $supervisorConfPath $out/supervisor.conf + ''; in { - inherit cacheDir stateDir basePort; - inherit workbench; - inherit backend; + inherit name useCabalRun extraShellPkgs materialise-profile; } diff --git a/nix/workbench/default.nix b/nix/workbench/default.nix index 62cbbcf0f1c..009827bd3be 100644 --- a/nix/workbench/default.nix +++ b/nix/workbench/default.nix @@ -71,34 +71,28 @@ let profile-names = __fromJSON (__readFile profile-names-json); - all-profiles = - ## The backend is an attrset of AWS/supervisord-specific methods and parameters. - { backend }: - rec { - mkProfile = - profileName: - pkgs.callPackage ./profiles - { inherit - pkgs - runWorkbenchJqOnly runJq workbench - profileName; - inherit (backend) services-config; - }; - - value = genAttrs profile-names mkProfile; - - JSON = pkgs.writeText "all-profiles.json" (__toJSON (mapAttrs (_: x: x.value) value)); - }; - with-profile = - { backend, profileName }: + # `workbench` is the pinned workbench in case there is one. + { stateDir, profileName, backend, basePort, workbench }: let - ps = all-profiles { inherit backend; }; - - profileNix = ps.value."${profileName}" - or (throw "No such profile: ${profileName}; Known profiles: ${toString (__attrNames ps.value)}"); - - profile = import ./profile.nix { inherit pkgs lib profileNix backend; }; + ps = + let + mkProfile = + profileName: + pkgs.callPackage ./profiles + { inherit pkgs lib; + inherit stateDir profileName; + # `useCabalRun`, final decision, from the backend! + inherit (backend) useCabalRun; + inherit basePort; + inherit workbench; + }; + in genAttrs profile-names mkProfile; + + profileNix = ps."${profileName}" + or (throw "No such profile: ${profileName}; Known profiles: ${toString (__attrNames ps)}"); + + profile = import ./profile.nix { inherit pkgs lib stateDir profileNix backend; }; topology = import ./topology.nix { inherit pkgs profileNix profile; }; @@ -113,11 +107,13 @@ let run-analysis = import ./analyse.nix; in { + inherit workbench' workbench runWorkbench runWorkbenchJqOnly; + inherit runJq; - inherit workbench' workbench runWorkbench runWorkbenchJqOnly; + inherit profile-names profile-names-json; - inherit all-profiles profile-names profile-names-json with-profile; + inherit with-profile; inherit run-analysis; } diff --git a/nix/workbench/profile.nix b/nix/workbench/profile.nix index 9359b1d0e1b..c41a21766a5 100644 --- a/nix/workbench/profile.nix +++ b/nix/workbench/profile.nix @@ -1,10 +1,10 @@ -{ pkgs, lib, profileNix, backend }: +{ pkgs, lib, stateDir, profileNix, backend }: with lib; - pkgs.runCommand "workbench-profile-output-${profileNix.name}-${backend.name}d" + pkgs.runCommand "workbench-profile-output-${profileNix.name}-${backend.name}" { buildInputs = []; profileConfigJsonPath = profileNix.JSON; nodeSpecsJsonPath = profileNix.node-specs.JSON; - backendConfigPath = backend.materialise-profile { inherit profileNix; }; + backendConfigPath = backend.materialise-profile { inherit stateDir profileNix; }; nodeServices = __toJSON (flip mapAttrs profileNix.node-services diff --git a/nix/workbench/profiles/default.nix b/nix/workbench/profiles/default.nix index 783f987ecd7..e5c31224c89 100644 --- a/nix/workbench/profiles/default.nix +++ b/nix/workbench/profiles/default.nix @@ -1,13 +1,24 @@ -{ pkgs, cardanoLib -, runCommand, runWorkbenchJqOnly, runJq, workbench - -## An attrset of specific methods and parameters. -, services-config - +{ pkgs, lib, cardanoLib +, runCommand +, workbench +## +, stateDir , profileName +, useCabalRun +, basePort }: let + inherit (workbench) runWorkbenchJqOnly runJq; + + services-config = import ./services-config.nix + { + inherit lib workbench; + inherit stateDir; + inherit useCabalRun; + inherit basePort; + }; + JSON = runWorkbenchJqOnly "profile-${profileName}.json" "profile json ${profileName}"; @@ -21,7 +32,7 @@ let topology.files = runCommand "topology-${profileName}" {} - "${workbench}/bin/wb topology make ${JSON} $out"; + "${workbench.workbench}/bin/wb topology make ${JSON} $out"; node-specs = { diff --git a/nix/workbench/backend/services-config.nix b/nix/workbench/profiles/services-config.nix similarity index 95% rename from nix/workbench/backend/services-config.nix rename to nix/workbench/profiles/services-config.nix index 9be8f6b9fd8..56ff528a0e8 100644 --- a/nix/workbench/backend/services-config.nix +++ b/nix/workbench/profiles/services-config.nix @@ -1,9 +1,10 @@ { lib , workbench ## -, basePort ? 30000 -, stateDir ? "run/current" -, useCabalRun ? false +, stateDir +, useCabalRun +## +, basePort }: with lib; { @@ -69,9 +70,6 @@ with lib; } ); - finaliseNodeArgs = - profile: nodeSpec: args: args; - finaliseGeneratorService = profile: svc: recursiveUpdate svc ({ diff --git a/nix/workbench/shell.nix b/nix/workbench/shell.nix index 462e4474b95..a4006a8a365 100644 --- a/nix/workbench/shell.nix +++ b/nix/workbench/shell.nix @@ -7,7 +7,7 @@ ## , cardano-mainnet-mirror ## -, workbenchRun +, workbenchRunner , workbenchDevMode ? false ## , profiled ? false @@ -18,9 +18,9 @@ with lib; let - inherit (workbenchRun) profileName backend profile; + inherit (workbenchRunner) profileName backend profile; - shellHook = { profileName, backend, workbenchDevMode, profiled, withMainnet }: '' + shellHook = { profileName, backend, profiled, workbenchDevMode, withMainnet }: '' while test $# -gt 0 do shift; done ## Flush argv[] @@ -70,7 +70,7 @@ let in project.shellFor { name = "workbench-shell"; - shellHook = shellHook { inherit profileName backend workbenchDevMode profiled withMainnet; }; + shellHook = shellHook { inherit profileName backend profiled workbenchDevMode withMainnet; }; inherit withHoogle; @@ -109,15 +109,15 @@ in project.shellFor { pkgs.moreutils pkgs.pstree pkgs.time - workbenchRun.interactive-start - workbenchRun.interactive-stop - workbenchRun.interactive-restart + workbenchRunner.interactive-start + workbenchRunner.interactive-stop + workbenchRunner.interactive-restart ] ++ lib.optional haveGlibcLocales pkgs.glibcLocales ++ lib.optionals (!backend.useCabalRun) [cardano-topology cardano-cli locli] ++ backend.extraShellPkgs ++ lib.optionals (!workbenchDevMode) [ - workbenchRun.workbench.workbench + workbenchRunner.workbench.workbench ] ; diff --git a/nix/workbench/tests/default.nix b/nix/workbench/tests/default.nix index fe64e762e4c..befd0584423 100644 --- a/nix/workbench/tests/default.nix +++ b/nix/workbench/tests/default.nix @@ -1,13 +1,9 @@ { pkgs }: let - inherit (pkgs) workbench-instance cardano-cli cardanolib-py cardano-node; + inherit (pkgs) workbench-runner cardano-cli cardanolib-py cardano-node; stateDir = "./state-cluster-test"; # We want a really short duration for tests - cluster' = workbench-instance { - genesisParams = { - slotLength = 0.1; - decentralisationParam = 0.8; - }; + cluster' = workbench-runner { inherit stateDir; }; # Library bash functions for cluster tests diff --git a/shell.nix b/shell.nix index 4ff0c9ff66f..8ffb5d28c02 100644 --- a/shell.nix +++ b/shell.nix @@ -5,8 +5,8 @@ in { withHoogle ? defaultCustomConfig.withHoogle , profileName ? defaultCustomConfig.localCluster.profileName , backendName ? defaultCustomConfig.localCluster.backendName -, workbenchDevMode ? defaultCustomConfig.localCluster.workbenchDevMode , useCabalRun ? true +, workbenchDevMode ? defaultCustomConfig.localCluster.workbenchDevMode , customConfig ? { inherit withHoogle; localCluster = { @@ -64,22 +64,22 @@ let inherit cardano-mainnet-mirror; inherit workbenchDevMode; inherit profiled withHoogle; - workbenchRun = - pkgs.workbench-instance - { inherit backendName profileName useCabalRun profiled; }; + workbenchRunner = + pkgs.workbench-runner + { inherit profileName backendName useCabalRun profiled; }; }; devops = let profileName = "devops-bage"; - workbenchRun = pkgs.workbench-instance + workbenchRunner = pkgs.workbench-runner { inherit profileName; - useCabalRun = false; backendName = "supervisor"; + useCabalRun = false; }; devopsShellParams = { inherit profileName; - backend = workbenchRun.backend; + inherit (workbenchRunner) backend; inherit workbenchDevMode profiled; withMainnet = false; }; @@ -88,7 +88,7 @@ let { inherit pkgs lib haskellLib project; inherit setLocale haveGlibcLocales commandHelp; inherit cardano-mainnet-mirror; - inherit workbenchRun workbenchDevMode; + inherit workbenchRunner workbenchDevMode; inherit profiled withHoogle; }; in project.shellFor { @@ -110,11 +110,11 @@ let pkgs.graphviz python3Packages.supervisor python3Packages.ipython - workbenchRun.interactive-start - workbenchRun.interactive-stop - workbenchRun.interactive-restart + workbenchRunner.interactive-start + workbenchRunner.interactive-stop + workbenchRunner.interactive-restart cardanolib-py - workbenchRun.workbench.workbench + workbenchRunner.workbench.workbench pstree pkgs.time ]; @@ -127,7 +127,7 @@ let ${devopsShell.shellHook devopsShellParams} # Socket path default to first node launched by "start-cluster": - export CARDANO_NODE_SOCKET_PATH=$(wb backend get-node-socket-path ${workbenchRun.stateDir} 'node-0') + export CARDANO_NODE_SOCKET_PATH=$(wb backend get-node-socket-path ${workbenchRunner.stateDir} 'node-0') ${setLocale} From 963afb5eeb8bfc85145a0c8749d78145f64aba64 Mon Sep 17 00:00:00 2001 From: Kosyrev Serge Date: Wed, 7 Dec 2022 17:16:52 +0800 Subject: [PATCH 8/8] wb analyse: fix trace-frequencies for oddly-structured messages & less clutter in logs --- .../locli/src/Cardano/Analysis/API/Ground.hs | 7 +++--- nix/workbench/analyse.sh | 11 +++++---- nix/workbench/run.sh | 24 +++++++++---------- nix/workbench/wb | 4 ++-- 4 files changed, 24 insertions(+), 22 deletions(-) diff --git a/bench/locli/src/Cardano/Analysis/API/Ground.hs b/bench/locli/src/Cardano/Analysis/API/Ground.hs index 03212908048..3bab908ca75 100644 --- a/bench/locli/src/Cardano/Analysis/API/Ground.hs +++ b/bench/locli/src/Cardano/Analysis/API/Ground.hs @@ -323,10 +323,9 @@ dumpObjects ident xs (JsonOutputFile f) = liftIO $ do dumpAssociatedObjects :: ToJSON a => String -> [(JsonLogfile, a)] -> ExceptT Text IO () dumpAssociatedObjects ident xs = liftIO $ flip mapConcurrently_ xs $ - \(JsonLogfile f, x) -> do - progress ident (Q f) - withFile (replaceExtension f $ ident <> ".json") WriteMode $ \hnd -> - LBS.hPutStrLn hnd $ encode x + \(JsonLogfile f, x) -> + withFile (replaceExtension f $ ident <> ".json") WriteMode $ \hnd -> + LBS.hPutStrLn hnd $ encode x readAssociatedObjects :: forall a. FromJSON a => String -> [JsonLogfile] -> ExceptT Text IO [(JsonLogfile, a)] diff --git a/nix/workbench/analyse.sh b/nix/workbench/analyse.sh index 6c8d7dee9c5..78d0c30c07e 100644 --- a/nix/workbench/analyse.sh +++ b/nix/workbench/analyse.sh @@ -395,7 +395,7 @@ case "$op" in -e 'chain-rejecta.json' \ -e 'chain.json' )) - progress "analyse" "prettifying JSON data: ${analysis_jsons[*]}" + progress "analyse" "prettifying JSON data: ${#analysis_jsons[*]} files" time json_compact_prettify "${analysis_jsons[@]}" progress "output" "run: $(white $run) subdir: $(yellow analysis)" ;; @@ -537,7 +537,7 @@ case "$op" in --slurpfile freqs "$adir"/logs-$mach.tracefreq.json done;; - trace-frequencies | trace-freq | freq ) + trace-frequencies | trace-freq | freq | tf ) local new_only= sargs=() while test $# -gt 0 do case "$1" in @@ -548,7 +548,7 @@ case "$op" in local logfile=${1:?$usage}; shift - trace_frequencies "$logfile" > "${logfile}.tracefreqs.json" + trace_frequencies_json "$logfile" > "${logfile}.tracefreqs.json" local src=$(wc -l <"$logfile") local res=$(cut -d' ' -f1 "${logfile}.trace-freqs" | @@ -632,7 +632,10 @@ trace_frequencies_json() { jq 'reduce inputs as $line ( {}; ( $line - | (try .ns[0] // .ns) + ":" + (.data.kind //.data.val.kind) + | (try .ns[0] // .ns) + + ":" + + (if .data | type != "object" then .data | type + else (.data.kind //.data.val.kind) end) ) as $key | (.[$key] // 0) as $acc | . + { "\($key)": ($acc + 1) } diff --git a/nix/workbench/run.sh b/nix/workbench/run.sh index 1ff684a17d9..6fb49d79943 100644 --- a/nix/workbench/run.sh +++ b/nix/workbench/run.sh @@ -93,11 +93,11 @@ case "$op" in echo $global_rundir;; list-runs | runs | lsr ) - local usage="USAGE: wb run $op [--on-remote | -or | -r]" + local usage="USAGE: wb run $op [--remote | -r]" local on_remote= while test $# -gt 0 do case "$1" in - --on-remote | -or | -r ) on_remote='true';; + --remote | -r ) on_remote='true';; * ) msg "FATAL: list, unknown flag '$1'"; usage_run;; esac; shift; done @@ -109,14 +109,14 @@ case "$op" in fi;; list-remote | remote | lsrr ) ## Convenience alias for 'list' - run "${sargs[@]}" list --on-remote;; + run list-runs --remote;; list-verbose | verb | lsrv ) - local usage="USAGE: wb run $op [--on-remote | -or | -r] [--limit [N=10] | -n N]" + local usage="USAGE: wb run $op [--remote | -r] [--limit [N=10] | -n N]" local on_remote= limit=10 while test $# -gt 0 do case "$1" in - --on-remote | -or | -r ) on_remote='true';; + --remote | -r ) on_remote='true';; --limit | -n ) limit=$2; shift;; * ) msg "FATAL: list-verbose, unknown flag '$1'"; usage_run;; esac; shift; done @@ -129,14 +129,14 @@ case "$op" in fi;; list-verbose-remote | rverb | lsrvr ) ## Convenience alias for 'list-verbose' - run "${sargs[@]}" list-verbose --on-remote;; + run list-verbose --remote;; list-sets | sets | lss ) - local usage="USAGE: wb run $op [--on-remote | -or | -r]" + local usage="USAGE: wb run $op [--remote | -r]" local on_remote= while test $# -gt 0 do case "$1" in - --on-remote | -or | -r ) on_remote='true';; + --remote | -r ) on_remote='true';; * ) msg "FATAL: list-sets, unknown flag '$1'"; usage_run;; esac; shift; done @@ -148,7 +148,7 @@ case "$op" in fi;; list-sets-remote | rsets | lssr ) ## Convenience alias for 'list-sets' - run "${sargs[@]}" list-sets --on-remote;; + run list-sets --remote;; set-add | add | sa ) local usage="USAGE: wb run $op NAME [RUN..]" @@ -163,12 +163,12 @@ case "$op" in done);; run-or-set | ros ) - local usage="USAGE: wb run $op [--query] [--on-remote | -or | -r] NAME" + local usage="USAGE: wb run $op [--query] [--remote | -r] NAME" local query= get_args=() on_remote= while test $# -gt 0 do case "$1" in --try | --query ) get_args+=($1); query='true';; - --on-remote | -or | -r ) on_remote='true';; + --remote | -r ) on_remote='true';; --* ) msg "FATAL: run-or-set, unknown flag '$1'"; usage_run;; * ) break;; esac; shift; done @@ -619,7 +619,7 @@ EOF local usage="USAGE: wb run $op RUN.." local runs=() run for rs in $* - do runs+=($(run "${sargs[@]}" run-or-set --query --on-remote $rs || echo $rs)) + do runs+=($(run "${sargs[@]}" run-or-set --query --remote $rs || echo $rs)) done if test $# = 0; then runs=(current); fi diff --git a/nix/workbench/wb b/nix/workbench/wb index 1bf94bbfe84..aca6bdd9e0f 100755 --- a/nix/workbench/wb +++ b/nix/workbench/wb @@ -84,7 +84,7 @@ usage_extra() { block-propagation blockprop bp performance perf performance-host perf-host - trace-frequencies trace-freq freq + trace-frequencies trace-freq freq tf chain-rejecta-reasons chain-rejecta rejecta $(color white) EOF @@ -308,7 +308,7 @@ main() { | block-propagation | blockprop | bp \ | performance | perf \ | performance-host | perf-host \ - | trace-frequencies | trace-freq | freq \ + | trace-frequencies | trace-freq | freq | tf \ | chain-rejecta-reasons | chain-rejecta | rejecta \ ) analyse $op "$@";;