Skip to content

Commit

Permalink
Improving Node-Metrics backend canister (#712)
Browse files Browse the repository at this point in the history
  • Loading branch information
pietrodimarco-dfinity authored and sasa-tomic committed Aug 13, 2024
1 parent 86315e6 commit f3974fc
Show file tree
Hide file tree
Showing 4 changed files with 52 additions and 31 deletions.
4 changes: 4 additions & 0 deletions rs/dre-canisters/trustworthy-node-metrics/args-rewards.bin
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
(record {
from_ts = 0;
to_ts = 1723507199978493357;
})
4 changes: 2 additions & 2 deletions rs/dre-canisters/trustworthy-node-metrics/args.bin
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
(record {
subnet_id = opt principal "34mm6-e24vu-pnyce-lp4fx-xtz2h-y5jpo-kwzgz-grf4m-v5kvr-xmeu4-tqe";
ts = opt 1720742398550724680;
subnet_id = opt principal "yinp6-35cfo-wgcd2-oc4ty-2kqpf-t4dul-rfk33-fsq3r-mfmua-m2ngh-jqe";
ts = opt 0;
})
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ use dfn_core::api::PrincipalId;
use futures::FutureExt;
use ic_management_canister_types::{NodeMetricsHistoryArgs, NodeMetricsHistoryResponse};
use ic_protobuf::registry::subnet::v1::SubnetListRecord;
use itertools::Itertools;

use crate::types::{NodeMetricsGrouped, NodeMetricsStored, NodeMetricsStoredKey};
use crate::{
Expand Down Expand Up @@ -142,6 +143,7 @@ fn store_metrics(node_metrics_storable: Vec<((u64, candid::Principal), NodeMetri
}
}

/// Update metrics
pub async fn update_metrics() -> anyhow::Result<()> {
let subnets = fetch_subnets().await?;
let latest_ts = stable_memory::latest_ts().unwrap_or_default();
Expand All @@ -155,15 +157,17 @@ pub async fn update_metrics() -> anyhow::Result<()> {
);
let subnet_metrics: Vec<(PrincipalId, Vec<NodeMetricsHistoryResponse>)> = fetch_metrics(subnets, refresh_ts).await?;
let grouped_by_node: BTreeMap<PrincipalId, Vec<NodeMetricsGrouped>> = grouped_by_node(subnet_metrics);
let nodes_principal = grouped_by_node.keys().map(|p| p.0).collect_vec();

for (node_id, node_metrics_grouped) in grouped_by_node {
let first_ts = node_metrics_grouped.first().expect("node_metrics empty").0;
let metrics_before = stable_memory::metrics_before_ts(node_id.0, first_ts);

let initial_proposed_total = metrics_before.as_ref().map(|(_, metrics)| metrics.num_blocks_proposed_total).unwrap_or(0);
let initial_failed_total = metrics_before.as_ref().map(|(_, metrics)| metrics.num_blocks_failures_total).unwrap_or(0);
let latest_metrics = stable_memory::latest_metrics(nodes_principal);

for (node_id, node_metrics_grouped) in grouped_by_node {
let (initial_proposed_total, initial_failed_total) = latest_metrics
.get(&node_id.0)
.map(|metrics| (metrics.num_blocks_proposed_total, metrics.num_blocks_failures_total))
.unwrap_or((0, 0));
let node_metrics_storable = node_metrics_storable(node_id, node_metrics_grouped, initial_proposed_total, initial_failed_total);

store_metrics(node_metrics_storable);
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,12 +1,18 @@
use candid::{Decode, Encode, Principal};
use ic_stable_structures::memory_manager::{MemoryId, MemoryManager, VirtualMemory};
use ic_stable_structures::{storable::Bound, Storable};
use ic_stable_structures::{DefaultMemoryImpl, StableBTreeMap};
use itertools::Itertools;
use std::borrow::Cow;
use std::cell::RefCell;
use std::collections::BTreeMap;

use crate::types::{NodeMetricsStored, NodeMetricsStoredKey, TimestampNanos};

type Memory = VirtualMemory<DefaultMemoryImpl>;

const MAX_VALUE_SIZE_BYTE: u32 = 102;

impl Storable for NodeMetricsStored {
fn to_bytes(&self) -> std::borrow::Cow<[u8]> {
Cow::Owned(Encode!(self).unwrap())
Expand All @@ -16,42 +22,49 @@ impl Storable for NodeMetricsStored {
Decode!(bytes.as_ref(), Self).unwrap()
}

const BOUND: Bound = Bound::Unbounded;
const BOUND: Bound = Bound::Bounded {
max_size: MAX_VALUE_SIZE_BYTE,
is_fixed_size: false,
};
}

thread_local! {
pub static MAP: RefCell<StableBTreeMap<NodeMetricsStoredKey, NodeMetricsStored, DefaultMemoryImpl>> =
RefCell::new(StableBTreeMap::init(DefaultMemoryImpl::default()));

static MEMORY_MANAGER: RefCell<MemoryManager<DefaultMemoryImpl>> =
RefCell::new(MemoryManager::init(DefaultMemoryImpl::default()));

static NODE_METRICS_MAP: RefCell<StableBTreeMap<NodeMetricsStoredKey, NodeMetricsStored, Memory>> =
RefCell::new(StableBTreeMap::init(
MEMORY_MANAGER.with(|m| m.borrow().get(MemoryId::new(0)))
));

}

pub fn insert(key: NodeMetricsStoredKey, value: NodeMetricsStored) {
MAP.with(|p| p.borrow_mut().insert(key, value));
NODE_METRICS_MAP.with(|p| p.borrow_mut().insert(key, value));
}

pub fn latest_ts() -> Option<TimestampNanos> {
MAP.with(|p| p.borrow().last_key_value()).map(|((ts, _), _)| ts)
}

#[allow(dead_code)]
pub fn get(node_metrics_key: &NodeMetricsStoredKey) -> Option<NodeMetricsStored> {
MAP.with(|p| p.borrow().get(node_metrics_key))
NODE_METRICS_MAP.with(|p| p.borrow().last_key_value()).map(|((ts, _), _)| ts)
}

pub fn get_metrics_range(from_ts: TimestampNanos, to_ts: Option<TimestampNanos>) -> Vec<(NodeMetricsStoredKey, NodeMetricsStored)> {
let range = {
let first = (from_ts, Principal::anonymous());
let last = (to_ts.unwrap_or(u64::MAX), Principal::anonymous());
first..=last
};
NODE_METRICS_MAP.with(|p| {
let to_ts = to_ts.unwrap_or(u64::MAX);

MAP.with(|p| p.borrow().range(range).collect_vec())
p.borrow().iter().filter(|((ts, _), _)| *ts >= from_ts && *ts <= to_ts).collect_vec()
})
}

pub fn metrics_before_ts(principal: Principal, ts: u64) -> Option<(NodeMetricsStoredKey, NodeMetricsStored)> {
MAP.with(|p| {
p.borrow()
.range((u64::MIN, principal)..(ts, principal))
.filter(|((_, p), _)| p == &principal)
.last()
})
pub fn latest_metrics(principals: Vec<Principal>) -> BTreeMap<Principal, NodeMetricsStored> {
let mut latest_metrics = BTreeMap::new();
NODE_METRICS_MAP.with(|p| {
for ((_, principal), value) in p.borrow().iter() {
if principals.contains(&principal) {
latest_metrics.insert(principal, value);
}
}
});

latest_metrics
}

0 comments on commit f3974fc

Please sign in to comment.