From a79520e2b3cb5b3f6a1b39aacb414320d5005c0e Mon Sep 17 00:00:00 2001 From: Emil Ernerfeldt Date: Fri, 15 Mar 2024 10:24:20 +0100 Subject: [PATCH] Support loading `.bpl` blueprint files (#5513) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### What * Part of https://github.com/rerun-io/rerun/issues/5294 This implements loading of blueprint files (.rbl) on native and on web, using either drag-and-drop of the `Open…` command. One shortcoming of the approach in this PR is documented here: * https://github.com/rerun-io/rerun/issues/5514 ### Checklist * [x] I have read and agree to [Contributor Guide](https://github.com/rerun-io/rerun/blob/main/CONTRIBUTING.md) and the [Code of Conduct](https://github.com/rerun-io/rerun/blob/main/CODE_OF_CONDUCT.md) * [x] I've included a screenshot or gif (if applicable) * [x] I have tested the web demo (if applicable): * Using newly built examples: [app.rerun.io](https://app.rerun.io/pr/5513/index.html) * Using examples from latest `main` build: [app.rerun.io](https://app.rerun.io/pr/5513/index.html?manifest_url=https://app.rerun.io/version/main/examples_manifest.json) * Using full set of examples from `nightly` build: [app.rerun.io](https://app.rerun.io/pr/5513/index.html?manifest_url=https://app.rerun.io/version/nightly/examples_manifest.json) * [x] The PR title and labels are set such as to maximize their usefulness for the next release's CHANGELOG * [x] If applicable, add a new check to the [release checklist](https://github.com/rerun-io/rerun/blob/main/tests/python/release_checklist)! - [PR Build Summary](https://build.rerun.io/pr/5513) - [Docs preview](https://rerun.io/preview/9b3e7d9aed9113d340516caed9d87897c8ae8abb/docs) - [Examples preview](https://rerun.io/preview/9b3e7d9aed9113d340516caed9d87897c8ae8abb/examples) - [Recent benchmark results](https://build.rerun.io/graphs/crates.html) - [Wasm size tracking](https://build.rerun.io/graphs/sizes.html) --- .../src/data_loader/loader_rrd.rs | 6 +- crates/re_data_source/src/data_source.rs | 18 +++ crates/re_data_source/src/lib.rs | 13 +- crates/re_data_source/src/load_file.rs | 4 +- crates/re_entity_db/src/entity_db.rs | 9 +- crates/re_log_types/src/lib.rs | 11 ++ crates/re_viewer/src/app.rs | 140 +++++++++++------- crates/re_viewer/src/app_state.rs | 2 +- crates/re_viewer/src/store_hub.rs | 49 ++++-- crates/re_viewport/src/container.rs | 1 + 10 files changed, 172 insertions(+), 81 deletions(-) diff --git a/crates/re_data_source/src/data_loader/loader_rrd.rs b/crates/re_data_source/src/data_loader/loader_rrd.rs index 78b90bc10d66..a6f89c7dce3f 100644 --- a/crates/re_data_source/src/data_loader/loader_rrd.rs +++ b/crates/re_data_source/src/data_loader/loader_rrd.rs @@ -24,7 +24,8 @@ impl crate::DataLoader for RrdLoader { re_tracing::profile_function!(filepath.display().to_string()); let extension = crate::extension(&filepath); - if extension != "rrd" { + if !matches!(extension.as_str(), "rbl" | "rrd") { + // NOTE: blueprints and recordings has the same file format return Err(crate::DataLoaderError::Incompatible(filepath.clone())); } @@ -66,7 +67,8 @@ impl crate::DataLoader for RrdLoader { re_tracing::profile_function!(filepath.display().to_string()); let extension = crate::extension(&filepath); - if extension != "rrd" { + if !matches!(extension.as_str(), "rbl" | "rrd") { + // NOTE: blueprints and recordings has the same file format return Err(crate::DataLoaderError::Incompatible(filepath)); } diff --git a/crates/re_data_source/src/data_source.rs b/crates/re_data_source/src/data_source.rs index b8a334119ec1..d2728bde1f42 100644 --- a/crates/re_data_source/src/data_source.rs +++ b/crates/re_data_source/src/data_source.rs @@ -108,6 +108,24 @@ impl DataSource { } } + pub fn file_name(&self) -> Option { + match self { + DataSource::RrdHttpUrl(url) => url.split('/').last().map(|r| r.to_owned()), + #[cfg(not(target_arch = "wasm32"))] + DataSource::FilePath(_, path) => { + path.file_name().map(|s| s.to_string_lossy().to_string()) + } + DataSource::FileContents(_, file_contents) => Some(file_contents.name.clone()), + DataSource::WebSocketAddr(_) => None, + #[cfg(not(target_arch = "wasm32"))] + DataSource::Stdin => None, + } + } + + pub fn is_blueprint(&self) -> Option { + self.file_name().map(|name| name.ends_with(".rbl")) + } + /// Stream the data from the given data source. /// /// Will do minimal checks (e.g. that the file exists), for synchronous errors, diff --git a/crates/re_data_source/src/lib.rs b/crates/re_data_source/src/lib.rs index cfa37dab6875..0caba04b91ff 100644 --- a/crates/re_data_source/src/lib.rs +++ b/crates/re_data_source/src/lib.rs @@ -38,12 +38,21 @@ pub use self::load_file::load_from_path; /// This is what you get when loading a file on Web, or when using drag-n-drop. // // TODO(#4554): drag-n-drop streaming support -#[derive(Clone, Debug)] +#[derive(Clone)] pub struct FileContents { pub name: String, pub bytes: std::sync::Arc<[u8]>, } +impl std::fmt::Debug for FileContents { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("FileContents") + .field("name", &self.name) + .field("bytes", &format_args!("{} bytes", self.bytes.len())) + .finish() + } +} + // …given that all feature flags are turned on for the `image` crate. pub const SUPPORTED_IMAGE_EXTENSIONS: &[&str] = &[ "avif", "bmp", "dds", "exr", "farbfeld", "ff", "gif", "hdr", "ico", "jpeg", "jpg", "pam", @@ -55,7 +64,7 @@ pub const SUPPORTED_MESH_EXTENSIONS: &[&str] = &["glb", "gltf", "obj", "stl"]; // TODO(#4532): `.ply` data loader should support 2D point cloud & meshes pub const SUPPORTED_POINT_CLOUD_EXTENSIONS: &[&str] = &["ply"]; -pub const SUPPORTED_RERUN_EXTENSIONS: &[&str] = &["rrd"]; +pub const SUPPORTED_RERUN_EXTENSIONS: &[&str] = &["rbl", "rrd"]; // TODO(#4555): Add catch-all builtin `DataLoader` for text files pub const SUPPORTED_TEXT_EXTENSIONS: &[&str] = &["txt", "md"]; diff --git a/crates/re_data_source/src/load_file.rs b/crates/re_data_source/src/load_file.rs index b49572d8a344..158ec9028365 100644 --- a/crates/re_data_source/src/load_file.rs +++ b/crates/re_data_source/src/load_file.rs @@ -34,7 +34,7 @@ pub fn load_from_path( re_log::info!("Loading {path:?}…"); - let data = load(settings, path, None)?; + let rx = load(settings, path, None)?; // TODO(cmc): should we always unconditionally set store info though? // If we reach this point, then at least one compatible `DataLoader` has been found. @@ -45,7 +45,7 @@ pub fn load_from_path( } } - send(&settings.store_id, data, tx); + send(&settings.store_id, rx, tx); Ok(()) } diff --git a/crates/re_entity_db/src/entity_db.rs b/crates/re_entity_db/src/entity_db.rs index adb947d05d18..f27a2e5d38bf 100644 --- a/crates/re_entity_db/src/entity_db.rs +++ b/crates/re_entity_db/src/entity_db.rs @@ -82,9 +82,6 @@ fn insert_row_with_retries( /// /// NOTE: all mutation is to be done via public functions! pub struct EntityDb { - /// The [`StoreId`] for this log. - store_id: StoreId, - /// Set by whomever created this [`EntityDb`]. pub data_source: Option, @@ -126,7 +123,6 @@ impl EntityDb { ); let query_caches = re_query_cache::Caches::new(&data_store); Self { - store_id: store_id.clone(), data_source: None, set_store_info: None, last_modified_at: web_time::Instant::now(), @@ -193,11 +189,11 @@ impl EntityDb { } pub fn store_kind(&self) -> StoreKind { - self.store_id.kind + self.store_id().kind } pub fn store_id(&self) -> &StoreId { - &self.store_id + self.data_store.id() } pub fn timelines(&self) -> impl ExactSizeIterator { @@ -486,7 +482,6 @@ impl EntityDb { re_tracing::profile_function!(); let Self { - store_id: _, data_source: _, set_store_info: _, last_modified_at: _, diff --git a/crates/re_log_types/src/lib.rs b/crates/re_log_types/src/lib.rs index 0bc8f6a350b5..b1950ce26fea 100644 --- a/crates/re_log_types/src/lib.rs +++ b/crates/re_log_types/src/lib.rs @@ -227,6 +227,17 @@ impl LogMsg { Self::ArrowMsg(store_id, _) => store_id, } } + + pub fn set_store_id(&mut self, new_store_id: StoreId) { + match self { + LogMsg::SetStoreInfo(store_info) => { + store_info.info.store_id = new_store_id; + } + LogMsg::ArrowMsg(msg_store_id, _) => { + *msg_store_id = new_store_id; + } + } + } } impl_into_enum!(SetStoreInfo, LogMsg, SetStoreInfo); diff --git a/crates/re_viewer/src/app.rs b/crates/re_viewer/src/app.rs index 1faf301ecbff..7f96af6d2380 100644 --- a/crates/re_viewer/src/app.rs +++ b/crates/re_viewer/src/app.rs @@ -1,3 +1,5 @@ +use itertools::Itertools as _; + use re_data_source::{DataSource, FileContents}; use re_entity_db::entity_db::EntityDb; use re_log_types::{FileSource, LogMsg, StoreKind}; @@ -366,7 +368,7 @@ impl App { SystemCommand::LoadStoreDb(entity_db) => { let store_id = entity_db.store_id().clone(); - store_hub.insert_recording(entity_db); + store_hub.insert_entity_db(entity_db); store_hub.set_recording_id(store_id); } @@ -443,18 +445,24 @@ impl App { ) { match cmd { UICommand::SaveRecording => { - save_recording(self, store_context, None); + if let Err(err) = save_recording(self, store_context, None) { + re_log::error!("Failed to save recording: {err}"); + } } UICommand::SaveRecordingSelection => { - save_recording( + if let Err(err) = save_recording( self, store_context, self.state.loop_selection(store_context), - ); + ) { + re_log::error!("Failed to save recording: {err}"); + } } UICommand::SaveBlueprint => { - save_blueprint(self, store_context); + if let Err(err) = save_blueprint(self, store_context) { + re_log::error!("Failed to save blueprint: {err}"); + } } #[cfg(not(target_arch = "wasm32"))] @@ -934,7 +942,39 @@ impl App { if let Some(err) = err { re_log::warn!("Data source {} has left unexpectedly: {err}", msg.source); } else { - re_log::debug!("Data source {} has left", msg.source); + re_log::debug!("Data source {} has finished", msg.source); + + // This could be the signal that we finished loading a blueprint. + // In that case, we want to make it the default. + // We wait with activaing blueprints until they are fully loaded, + // so that we don't run heuristics on half-loaded blueprints. + + let blueprints = store_hub + .entity_dbs_from_channel_source(&channel_source) + .filter_map(|entity_db| { + if let Some(store_info) = entity_db.store_info() { + match store_info.store_id.kind { + StoreKind::Recording => { + // Recordings become active as soon as we start streaming them. + } + StoreKind::Blueprint => { + return Some(store_info.clone()); + } + } + } + None + }) + .collect_vec(); + + for re_log_types::StoreInfo { + store_id, + application_id, + .. + } in blueprints + { + re_log::debug!("Activating newly loaded blueprint"); + store_hub.set_blueprint_for_app_id(store_id, application_id); + } } continue; } @@ -942,6 +982,11 @@ impl App { let store_id = msg.store_id(); + if store_hub.is_active_blueprint(store_id) { + // TODO(#5514): handle loading of active blueprints. + re_log::warn_once!("Loading a blueprint {store_id} that is active. See https://github.com/rerun-io/rerun/issues/5514 for details."); + } + let entity_db = store_hub.entity_db_mut(store_id); if entity_db.data_source.is_none() { @@ -952,8 +997,8 @@ impl App { re_log::error_once!("Failed to add incoming msg: {err}"); }; - // Set the recording-id after potentially creating the store in the - // hub. This ordering is important because the `StoreHub` internally + // Set the recording-id after potentially creating the store in the hub. + // This ordering is important because the `StoreHub` internally // updates the app-id when changing the recording. if let LogMsg::SetStoreInfo(msg) = &msg { match msg.info.store_id.kind { @@ -961,19 +1006,18 @@ impl App { re_log::debug!("Opening a new recording: {:?}", msg.info); store_hub.set_recording_id(store_id.clone()); } - StoreKind::Blueprint => { - re_log::debug!("Opening a new blueprint: {:?}", msg.info); - store_hub.set_blueprint_for_app_id( - store_id.clone(), - msg.info.application_id.clone(), - ); + // We wait with activaing blueprints until they are fully loaded, + // so that we don't run heuristics on half-loaded blueprints. + // TODO(#5297): heed special "end-of-blueprint" message to activate blueprint. + // Otherwise on a mixed connection (SDK sending both blueprint and recording) + // the blueprint won't be activated until the whole _recording_ has finished loading. } } } // Do analytics after ingesting the new message, - // because thats when the `entity_db.store_info` is set, + // because that's when the `entity_db.store_info` is set, // which we use in the analytics call. let entity_db = store_hub.entity_db_mut(store_id); let is_new_store = matches!(&msg, LogMsg::SetStoreInfo(_msg)); @@ -1508,11 +1552,10 @@ fn save_recording( app: &mut App, store_context: Option<&StoreContext<'_>>, loop_selection: Option<(re_entity_db::Timeline, re_log_types::TimeRangeF)>, -) { +) -> anyhow::Result<()> { let Some(entity_db) = store_context.as_ref().and_then(|view| view.recording) else { // NOTE: Can only happen if saving through the command palette. - re_log::error!("No recording data to save"); - return; + anyhow::bail!("No recording data to save"); }; let file_name = "data.rrd"; @@ -1523,29 +1566,36 @@ fn save_recording( "Save recording" }; - save_entity_db( - app, - file_name.to_owned(), - title.to_owned(), - entity_db, - loop_selection, - ); + save_entity_db(app, file_name.to_owned(), title.to_owned(), || { + entity_db.to_messages(loop_selection) + }) } -fn save_blueprint(app: &mut App, store_context: Option<&StoreContext<'_>>) { +fn save_blueprint(app: &mut App, store_context: Option<&StoreContext<'_>>) -> anyhow::Result<()> { let Some(store_context) = store_context else { - re_log::error!("No blueprint to save"); - return; + anyhow::bail!("No blueprint to save"); }; - let entity_db = store_context.blueprint; + re_tracing::profile_function!(); + + // We change the recording id to a new random one, + // otherwise when saving and loading a blueprint file, we can end up + // in a situation where the store_id we're loading is the same as the currently active one, + // which mean they will merge in a strange way. + // This is also related to https://github.com/rerun-io/rerun/issues/5295 + let new_store_id = re_log_types::StoreId::random(StoreKind::Blueprint); + let mut messages = store_context.blueprint.to_messages(None)?; + for message in &mut messages { + message.set_store_id(new_store_id.clone()); + } let file_name = format!( "{}.rbl", crate::saving::sanitize_app_id(&store_context.app_id) ); let title = "Save blueprint"; - save_entity_db(app, file_name, title.to_owned(), entity_db, None); + + save_entity_db(app, file_name, title.to_owned(), || Ok(messages)) } #[allow(clippy::needless_pass_by_ref_mut)] // `app` is only used on native @@ -1553,21 +1603,14 @@ fn save_entity_db( #[allow(unused_variables)] app: &mut App, // only used on native file_name: String, title: String, - entity_db: &EntityDb, - loop_selection: Option<(re_log_types::Timeline, re_log_types::TimeRangeF)>, -) { + to_log_messages: impl FnOnce() -> re_log_types::DataTableResult>, +) -> anyhow::Result<()> { re_tracing::profile_function!(); // Web #[cfg(target_arch = "wasm32")] { - let messages = match entity_db.to_messages(loop_selection) { - Ok(messages) => messages, - Err(err) => { - re_log::error!("File saving failed: {err}"); - return; - } - }; + let messages = to_log_messages()?; wasm_bindgen_futures::spawn_local(async move { if let Err(err) = async_save_dialog(&file_name, &title, &messages).await { @@ -1587,22 +1630,15 @@ fn save_entity_db( .save_file() }; if let Some(path) = path { - let messages = match entity_db.to_messages(loop_selection) { - Ok(messages) => messages, - Err(err) => { - re_log::error!("File saving failed: {err}"); - return; - } - }; - if let Err(err) = app.background_tasks.spawn_file_saver(move || { + let messages = to_log_messages()?; + app.background_tasks.spawn_file_saver(move || { crate::saving::encode_to_file(&path, messages.iter())?; Ok(path) - }) { - // NOTE: Can only happen if saving through the command palette. - re_log::error!("File saving failed: {err}"); - } + })?; } } + + Ok(()) } #[cfg(target_arch = "wasm32")] diff --git a/crates/re_viewer/src/app_state.rs b/crates/re_viewer/src/app_state.rs index fc94eb3d1cad..4678373d7049 100644 --- a/crates/re_viewer/src/app_state.rs +++ b/crates/re_viewer/src/app_state.rs @@ -431,7 +431,7 @@ impl AppState { re_tracing::profile_function!(); self.recording_configs - .retain(|store_id, _| store_hub.contains_recording(store_id)); + .retain(|store_id, _| store_hub.contains_store(store_id)); } /// Returns the blueprint query that should be used for generating the current diff --git a/crates/re_viewer/src/store_hub.rs b/crates/re_viewer/src/store_hub.rs index 41ff28ecdfd3..93c558fbf02c 100644 --- a/crates/re_viewer/src/store_hub.rs +++ b/crates/re_viewer/src/store_hub.rs @@ -173,14 +173,22 @@ impl StoreHub { /// Change which blueprint is active for a given [`ApplicationId`] #[inline] pub fn set_blueprint_for_app_id(&mut self, blueprint_id: StoreId, app_id: ApplicationId) { - re_log::debug!("Switching blueprint for {app_id:?} to {blueprint_id:?}"); + re_log::debug!("Switching blueprint for {app_id} to {blueprint_id}"); self.blueprint_by_app_id.insert(app_id, blueprint_id); } + /// Is the given blueprint id the active blueprint for any app id? + pub fn is_active_blueprint(&self, blueprint_id: &StoreId) -> bool { + self.blueprint_by_app_id + .values() + .any(|id| id == blueprint_id) + } + /// Clear the current blueprint pub fn clear_current_blueprint(&mut self) { if let Some(app_id) = &self.selected_application_id { if let Some(blueprint_id) = self.blueprint_by_app_id.remove(app_id) { + re_log::debug!("Clearing blueprint for {app_id}: {blueprint_id}"); self.store_bundle.remove(&blueprint_id); } } @@ -193,12 +201,12 @@ impl StoreHub { } } - /// Insert a new recording into the [`StoreHub`]. + /// Insert a new recording or blueprint into the [`StoreHub`]. /// /// Note that the recording is not automatically made active. Use [`StoreHub::set_recording_id`] /// if needed. - pub fn insert_recording(&mut self, entity_db: EntityDb) { - self.store_bundle.insert_recording(entity_db); + pub fn insert_entity_db(&mut self, entity_db: EntityDb) { + self.store_bundle.insert_entity_db(entity_db); } /// Mutable access to a [`EntityDb`] by id @@ -271,9 +279,19 @@ impl StoreHub { .and_then(|id| self.store_bundle.recording(id)) } - /// Check whether the [`StoreHub`] contains the referenced recording - pub fn contains_recording(&self, id: &StoreId) -> bool { - self.store_bundle.contains_recording(id) + /// Check whether the [`StoreHub`] contains the referenced store (recording or blueprint). + pub fn contains_store(&self, id: &StoreId) -> bool { + self.store_bundle.contains_store(id) + } + + pub fn entity_dbs_from_channel_source<'a>( + &'a self, + source: &'a re_smart_channel::SmartChannelSource, + ) -> impl Iterator + 'a { + self.store_bundle + .entity_dbs + .values() + .filter(move |db| db.data_source.as_ref() == Some(source)) } /// Remove any recordings with a network source pointing at this `uri`. @@ -382,7 +400,7 @@ impl StoreHub { // We found the blueprint we were looking for; make it active. // borrow-checker won't let us just call `self.set_blueprint_for_app_id` re_log::debug!( - "Switching blueprint for {app_id:?} to {:?}", + "Switching blueprint for {app_id} to {} loaded from {blueprint_path:?}", store.store_id(), ); self.blueprint_by_app_id @@ -490,9 +508,10 @@ impl StoreBundle { /// Returns either a recording or blueprint [`EntityDb`]. /// One is created if it doesn't already exist. pub fn entity_db_entry(&mut self, id: &StoreId) -> &mut EntityDb { - self.entity_dbs - .entry(id.clone()) - .or_insert_with(|| EntityDb::new(id.clone())) + self.entity_dbs.entry(id.clone()).or_insert_with(|| { + re_log::debug!("Creating new store: {id}"); + EntityDb::new(id.clone()) + }) } /// All loaded [`EntityDb`], both recordings and blueprints, in arbitrary order. @@ -554,8 +573,7 @@ impl StoreBundle { // -- - pub fn contains_recording(&self, id: &StoreId) -> bool { - debug_assert_eq!(id.kind, StoreKind::Recording); + pub fn contains_store(&self, id: &StoreId) -> bool { self.entity_dbs.contains_key(id) } @@ -577,8 +595,7 @@ impl StoreBundle { .or_insert_with(|| EntityDb::new(id.clone())) } - pub fn insert_recording(&mut self, entity_db: EntityDb) { - debug_assert_eq!(entity_db.store_kind(), StoreKind::Recording); + pub fn insert_entity_db(&mut self, entity_db: EntityDb) { self.entity_dbs .insert(entity_db.store_id().clone(), entity_db); } @@ -629,6 +646,8 @@ impl StoreBundle { let mut blueprint_db = EntityDb::new(id.clone()); + re_log::debug!("Creating a new blueprint {id}"); + blueprint_db.set_store_info(re_log_types::SetStoreInfo { row_id: re_log_types::RowId::new(), info: re_log_types::StoreInfo { diff --git a/crates/re_viewport/src/container.rs b/crates/re_viewport/src/container.rs index c463575479bd..dff9e5dd92d3 100644 --- a/crates/re_viewport/src/container.rs +++ b/crates/re_viewport/src/container.rs @@ -1,5 +1,6 @@ use ahash::HashMap; use egui_tiles::TileId; + use re_data_store::LatestAtQuery; use re_entity_db::EntityDb; use re_log::ResultExt;