From 1e84aa5f718fbf9ff32158caf746f225f017b598 Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Fri, 31 Mar 2023 17:43:57 +0200 Subject: [PATCH 01/89] `arrow2_convert` primitive (de)serialization benchmarks (#1742) * arrow2_convert primitive benchmarks * addressing PR comments --- crates/re_arrow_store/Cargo.toml | 4 + .../re_arrow_store/benches/arrow2_convert.rs | 141 ++++++++++++++++++ 2 files changed, 145 insertions(+) create mode 100644 crates/re_arrow_store/benches/arrow2_convert.rs diff --git a/crates/re_arrow_store/Cargo.toml b/crates/re_arrow_store/Cargo.toml index 8487f656b17f..93e91b269a2f 100644 --- a/crates/re_arrow_store/Cargo.toml +++ b/crates/re_arrow_store/Cargo.toml @@ -111,3 +111,7 @@ required-features = ["polars"] [[bench]] name = "data_store" harness = false + +[[bench]] +name = "arrow2_convert" +harness = false diff --git a/crates/re_arrow_store/benches/arrow2_convert.rs b/crates/re_arrow_store/benches/arrow2_convert.rs new file mode 100644 index 000000000000..c53dd6b5bd80 --- /dev/null +++ b/crates/re_arrow_store/benches/arrow2_convert.rs @@ -0,0 +1,141 @@ +//! Keeping track of performance issues/regressions in `arrow2_convert` that directly affect us. + +#[global_allocator] +static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; + +use arrow2::{array::PrimitiveArray, datatypes::PhysicalType, types::PrimitiveType}; +use criterion::{criterion_group, criterion_main, Criterion}; +use re_log_types::{ + component_types::InstanceKey, external::arrow2_convert::deserialize::TryIntoCollection, + Component as _, DataCell, +}; + +// --- + +criterion_group!(benches, serialize, deserialize); +criterion_main!(benches); + +// --- + +#[cfg(not(debug_assertions))] +const NUM_INSTANCES: usize = 100_000; + +// `cargo test` also runs the benchmark setup code, so make sure they run quickly: +#[cfg(debug_assertions)] +const NUM_INSTANCES: usize = 1; + +// --- + +fn serialize(c: &mut Criterion) { + let mut group = c.benchmark_group(format!( + "arrow2_convert/serialize/primitive/instances={NUM_INSTANCES}" + )); + group.throughput(criterion::Throughput::Elements(NUM_INSTANCES as _)); + + { + group.bench_function("arrow2_convert", |b| { + b.iter(|| { + let cell = DataCell::from_component::(0..NUM_INSTANCES as u64); + assert_eq!(NUM_INSTANCES as u32, cell.num_instances()); + assert_eq!( + cell.datatype().to_physical_type(), + PhysicalType::Primitive(PrimitiveType::UInt64) + ); + cell + }); + }); + } + + { + group.bench_function("arrow2/from_values", |b| { + b.iter(|| { + let values = PrimitiveArray::from_values(0..NUM_INSTANCES as u64).boxed(); + let cell = crate::DataCell::from_arrow(InstanceKey::name(), values); + assert_eq!(NUM_INSTANCES as u32, cell.num_instances()); + assert_eq!( + cell.datatype().to_physical_type(), + PhysicalType::Primitive(PrimitiveType::UInt64) + ); + cell + }); + }); + } + + { + group.bench_function("arrow2/from_vec", |b| { + b.iter(|| { + // NOTE: We do the `collect()` here on purpose! + // + // All of these APIs have to allocate an array under the hood, except `from_vec` + // which is O(1) (it just unsafely reuses the vec's data pointer). + // We need to measure the collection in order to have a leveled playing field. + let values = PrimitiveArray::from_vec((0..NUM_INSTANCES as u64).collect()).boxed(); + let cell = crate::DataCell::from_arrow(InstanceKey::name(), values); + assert_eq!(NUM_INSTANCES as u32, cell.num_instances()); + assert_eq!( + cell.datatype().to_physical_type(), + PhysicalType::Primitive(PrimitiveType::UInt64) + ); + cell + }); + }); + } +} + +fn deserialize(c: &mut Criterion) { + let mut group = c.benchmark_group(format!( + "arrow2_convert/deserialize/primitive/instances={NUM_INSTANCES}" + )); + group.throughput(criterion::Throughput::Elements(NUM_INSTANCES as _)); + + let cell = DataCell::from_component::(0..NUM_INSTANCES as u64); + let data = cell.as_arrow(); + + { + group.bench_function("arrow2_convert", |b| { + b.iter(|| { + let keys: Vec = data.as_ref().try_into_collection().unwrap(); + assert_eq!(NUM_INSTANCES, keys.len()); + assert_eq!( + InstanceKey(NUM_INSTANCES as u64 / 2), + keys[NUM_INSTANCES / 2] + ); + keys + }); + }); + } + + { + group.bench_function("arrow2/validity_checks", |b| { + b.iter(|| { + let data = data.as_any().downcast_ref::>().unwrap(); + let keys: Vec = data + .into_iter() + .filter_map(|v| v.copied().map(InstanceKey)) + .collect(); + assert_eq!(NUM_INSTANCES, keys.len()); + assert_eq!( + InstanceKey(NUM_INSTANCES as u64 / 2), + keys[NUM_INSTANCES / 2] + ); + keys + }); + }); + } + + { + group.bench_function("arrow2/validity_bypass", |b| { + b.iter(|| { + let data = data.as_any().downcast_ref::>().unwrap(); + assert!(data.validity().is_none()); + let keys: Vec = data.values_iter().copied().map(InstanceKey).collect(); + assert_eq!(NUM_INSTANCES, keys.len()); + assert_eq!( + InstanceKey(NUM_INSTANCES as u64 / 2), + keys[NUM_INSTANCES / 2] + ); + keys + }); + }); + } +} From 731d941eb776cf36d24d314d98de9f9efd69e537 Mon Sep 17 00:00:00 2001 From: benjamin de charmoy Date: Sat, 1 Apr 2023 10:57:19 +0200 Subject: [PATCH 02/89] Fix logged obb being displayed with half of the requested size (#1749) --- .../re_viewer/src/ui/view_spatial/scene/scene_part/boxes3d.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/boxes3d.rs b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/boxes3d.rs index 9a4cc28388b6..6411a9ca0b90 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/boxes3d.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/boxes3d.rs @@ -64,7 +64,7 @@ impl Boxes3DPart { let color = annotation_info.color(color.map(move |c| c.to_array()).as_ref(), default_color); - let scale = glam::Vec3::from(half_size); + let scale = glam::Vec3::from(half_size) * 2.0; let rot = rotation.map(glam::Quat::from).unwrap_or_default(); let tran = position.map_or(glam::Vec3::ZERO, glam::Vec3::from); let transform = glam::Affine3A::from_scale_rotation_translation(scale, rot, tran); From ef2b5dc851d3449a1eadf1d2c84b4b2e158b9619 Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Sat, 1 Apr 2023 16:15:11 +0200 Subject: [PATCH 03/89] benchmarks for common vector ops across `smallvec`/`tinyvec`/std (#1747) * benchmarks for common vector ops * handle N=1 --- Cargo.lock | 2 + crates/re_arrow_store/Cargo.toml | 6 + crates/re_arrow_store/benches/vectors.rs | 329 +++++++++++++++++++++++ 3 files changed, 337 insertions(+) create mode 100644 crates/re_arrow_store/benches/vectors.rs diff --git a/Cargo.lock b/Cargo.lock index 2b615dd38f5b..6fe5c47177ba 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3802,8 +3802,10 @@ dependencies = [ "re_format", "re_log", "re_log_types", + "smallvec", "static_assertions", "thiserror", + "tinyvec", ] [[package]] diff --git a/crates/re_arrow_store/Cargo.toml b/crates/re_arrow_store/Cargo.toml index 93e91b269a2f..9e7ddde3a6a3 100644 --- a/crates/re_arrow_store/Cargo.toml +++ b/crates/re_arrow_store/Cargo.toml @@ -81,6 +81,8 @@ polars-core = { workspace = true, features = [ "sort_multiple", ] } rand = "0.8" +smallvec = { version = "1.0", features = ["const_generics", "union"] } +tinyvec = { version = "1.6", features = ["alloc", "rustc_1_55"] } [lib] @@ -115,3 +117,7 @@ harness = false [[bench]] name = "arrow2_convert" harness = false + +[[bench]] +name = "vectors" +harness = false diff --git a/crates/re_arrow_store/benches/vectors.rs b/crates/re_arrow_store/benches/vectors.rs new file mode 100644 index 000000000000..c61b4667fa92 --- /dev/null +++ b/crates/re_arrow_store/benches/vectors.rs @@ -0,0 +1,329 @@ +//! Keeping track of performance issues/regressions for common vector operations. + +#[global_allocator] +static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; + +use criterion::{criterion_group, criterion_main, Criterion}; + +use smallvec::SmallVec; +use tinyvec::TinyVec; + +criterion_group!(benches, sort, split, swap, swap_opt); +criterion_main!(benches); + +// --- + +#[cfg(not(debug_assertions))] +const NUM_INSTANCES: usize = 10_000; +#[cfg(not(debug_assertions))] +const SMALLVEC_SIZE: usize = 4; + +// `cargo test` also runs the benchmark setup code, so make sure they run quickly: +#[cfg(debug_assertions)] +const NUM_INSTANCES: usize = 1; +#[cfg(debug_assertions)] +const SMALLVEC_SIZE: usize = 1; + +// --- Benchmarks --- + +fn split(c: &mut Criterion) { + let mut group = c.benchmark_group(format!("vector_ops/split_off/instances={NUM_INSTANCES}")); + group.throughput(criterion::Throughput::Elements(NUM_INSTANCES as _)); + + { + fn split_off( + data: &mut SmallVec<[T; N]>, + split_idx: usize, + ) -> SmallVec<[T; N]> { + if split_idx >= data.len() { + return SmallVec::default(); + } + + let second_half = SmallVec::from_slice(&data[split_idx..]); + data.truncate(split_idx); + second_half + } + + let data: SmallVec<[i64; SMALLVEC_SIZE]> = (0..NUM_INSTANCES as i64).collect(); + + group.bench_function(format!("smallvec/n={SMALLVEC_SIZE}/manual"), |b| { + b.iter(|| { + let mut data = data.clone(); + let second_half = split_off(&mut data, NUM_INSTANCES / 2); + assert_eq!(NUM_INSTANCES, data.len() + second_half.len()); + assert_eq!(NUM_INSTANCES as i64 / 2, second_half[0]); + (data, second_half) + }); + }); + } + + { + let data: TinyVec<[i64; SMALLVEC_SIZE]> = (0..NUM_INSTANCES as i64).collect(); + + group.bench_function(format!("tinyvec/n={SMALLVEC_SIZE}"), |b| { + b.iter(|| { + let mut data = data.clone(); + let second_half = data.split_off(NUM_INSTANCES / 2); + assert_eq!(NUM_INSTANCES, data.len() + second_half.len()); + assert_eq!(NUM_INSTANCES as i64 / 2, second_half[0]); + (data, second_half) + }); + }); + } + + { + fn split_off( + data: &mut TinyVec<[T; N]>, + split_idx: usize, + ) -> TinyVec<[T; N]> { + if split_idx >= data.len() { + return TinyVec::default(); + } + + let second_half = TinyVec::from(&data[split_idx..]); + data.truncate(split_idx); + second_half + } + + let data: TinyVec<[i64; SMALLVEC_SIZE]> = (0..NUM_INSTANCES as i64).collect(); + + group.bench_function(format!("tinyvec/n={SMALLVEC_SIZE}/manual"), |b| { + b.iter(|| { + let mut data = data.clone(); + let second_half = split_off(&mut data, NUM_INSTANCES / 2); + assert_eq!(NUM_INSTANCES, data.len() + second_half.len()); + assert_eq!(NUM_INSTANCES as i64 / 2, second_half[0]); + (data, second_half) + }); + }); + } + + { + let data: Vec = (0..NUM_INSTANCES as i64).collect(); + + group.bench_function("vec", |b| { + b.iter(|| { + let mut data = data.clone(); + let second_half = data.split_off(NUM_INSTANCES / 2); + assert_eq!(NUM_INSTANCES, data.len() + second_half.len()); + assert_eq!(NUM_INSTANCES as i64 / 2, second_half[0]); + (data, second_half) + }); + }); + } + + { + fn split_off(data: &mut Vec, split_idx: usize) -> Vec { + if split_idx >= data.len() { + return Vec::default(); + } + + let second_half = Vec::from(&data[split_idx..]); + data.truncate(split_idx); + second_half + } + + let data: Vec = (0..NUM_INSTANCES as i64).collect(); + + group.bench_function("vec/manual", |b| { + b.iter(|| { + let mut data = data.clone(); + let second_half = split_off(&mut data, NUM_INSTANCES / 2); + assert_eq!(NUM_INSTANCES, data.len() + second_half.len()); + assert_eq!(NUM_INSTANCES as i64 / 2, second_half[0]); + (data, second_half) + }); + }); + } +} + +fn sort(c: &mut Criterion) { + let mut group = c.benchmark_group(format!("vector_ops/sort/instances={NUM_INSTANCES}")); + group.throughput(criterion::Throughput::Elements(NUM_INSTANCES as _)); + + { + let data: SmallVec<[i64; SMALLVEC_SIZE]> = (0..NUM_INSTANCES as i64).rev().collect(); + + group.bench_function(format!("smallvec/n={SMALLVEC_SIZE}"), |b| { + b.iter(|| { + let mut data = data.clone(); + data.sort_unstable(); + assert_eq!(NUM_INSTANCES, data.len()); + assert_eq!(NUM_INSTANCES as i64 / 2, data[NUM_INSTANCES / 2]); + data + }); + }); + } + + { + let data: TinyVec<[i64; SMALLVEC_SIZE]> = (0..NUM_INSTANCES as i64).rev().collect(); + + group.bench_function(format!("tinyvec/n={SMALLVEC_SIZE}"), |b| { + b.iter(|| { + let mut data = data.clone(); + data.sort_unstable(); + assert_eq!(NUM_INSTANCES, data.len()); + assert_eq!(NUM_INSTANCES as i64 / 2, data[NUM_INSTANCES / 2]); + data + }); + }); + } + + { + let data: Vec = (0..NUM_INSTANCES as i64).rev().collect(); + + group.bench_function("vec", |b| { + b.iter(|| { + let mut data = data.clone(); + data.sort_unstable(); + assert_eq!(NUM_INSTANCES, data.len()); + assert_eq!(NUM_INSTANCES as i64 / 2, data[NUM_INSTANCES / 2]); + data + }); + }); + } +} + +fn swap(c: &mut Criterion) { + let mut group = c.benchmark_group(format!("vector_ops/swap/instances={NUM_INSTANCES}")); + group.throughput(criterion::Throughput::Elements(NUM_INSTANCES as _)); + + { + let data: SmallVec<[i64; SMALLVEC_SIZE]> = (0..NUM_INSTANCES as i64).collect(); + let swaps: SmallVec<[usize; SMALLVEC_SIZE]> = (0..NUM_INSTANCES).rev().collect(); + + group.bench_function(format!("smallvec/n={SMALLVEC_SIZE}"), |b| { + b.iter(|| { + let mut data1 = data.clone(); + let data2 = data.clone(); + for &swap in &swaps { + data1[NUM_INSTANCES - swap - 1] = data2[swap]; + } + assert_eq!(NUM_INSTANCES, data1.len()); + assert_eq!(NUM_INSTANCES, data2.len()); + assert_eq!( + (NUM_INSTANCES as i64 / 2).max(1) - 1, + data1[NUM_INSTANCES / 2] + ); + (data1, data2) + }); + }); + } + + { + let data: TinyVec<[i64; SMALLVEC_SIZE]> = (0..NUM_INSTANCES as i64).collect(); + let swaps: TinyVec<[usize; SMALLVEC_SIZE]> = (0..NUM_INSTANCES).rev().collect(); + + group.bench_function(format!("tinyvec/n={SMALLVEC_SIZE}"), |b| { + b.iter(|| { + let mut data1 = data.clone(); + let data2 = data.clone(); + for &swap in &swaps { + data1[NUM_INSTANCES - swap - 1] = data2[swap]; + } + assert_eq!(NUM_INSTANCES, data1.len()); + assert_eq!(NUM_INSTANCES, data2.len()); + assert_eq!( + (NUM_INSTANCES as i64 / 2).max(1) - 1, + data1[NUM_INSTANCES / 2] + ); + (data1, data2) + }); + }); + } + + { + let data: Vec = (0..NUM_INSTANCES as i64).collect(); + let swaps: Vec = (0..NUM_INSTANCES).rev().collect(); + + group.bench_function("vec", |b| { + b.iter(|| { + let mut data1 = data.clone(); + let data2 = data.clone(); + for &swap in &swaps { + data1[NUM_INSTANCES - swap - 1] = data2[swap]; + } + assert_eq!(NUM_INSTANCES, data1.len()); + assert_eq!(NUM_INSTANCES, data2.len()); + assert_eq!( + (NUM_INSTANCES as i64 / 2).max(1) - 1, + data1[NUM_INSTANCES / 2] + ); + (data1, data2) + }); + }); + } +} + +fn swap_opt(c: &mut Criterion) { + let mut group = c.benchmark_group(format!("vector_ops/swap_opt/instances={NUM_INSTANCES}")); + group.throughput(criterion::Throughput::Elements(NUM_INSTANCES as _)); + + { + let data: SmallVec<[Option; SMALLVEC_SIZE]> = + (0..NUM_INSTANCES as i64).map(Some).collect(); + let swaps: SmallVec<[usize; SMALLVEC_SIZE]> = (0..NUM_INSTANCES).rev().collect(); + + group.bench_function(format!("smallvec/n={SMALLVEC_SIZE}"), |b| { + b.iter(|| { + let mut data1 = data.clone(); + let mut data2 = data.clone(); + for &swap in &swaps { + data1[NUM_INSTANCES - swap - 1] = data2[swap].take(); + } + assert_eq!(NUM_INSTANCES, data1.len()); + assert_eq!(NUM_INSTANCES, data2.len()); + assert_eq!( + Some((NUM_INSTANCES as i64 / 2).max(1) - 1), + data1[NUM_INSTANCES / 2] + ); + (data1, data2) + }); + }); + } + + { + let data: TinyVec<[Option; SMALLVEC_SIZE]> = + (0..NUM_INSTANCES as i64).map(Some).collect(); + let swaps: TinyVec<[usize; SMALLVEC_SIZE]> = (0..NUM_INSTANCES).rev().collect(); + + group.bench_function(format!("tinyvec/n={SMALLVEC_SIZE}"), |b| { + b.iter(|| { + let mut data1 = data.clone(); + let mut data2 = data.clone(); + for &swap in &swaps { + data1[NUM_INSTANCES - swap - 1] = data2[swap].take(); + } + assert_eq!(NUM_INSTANCES, data1.len()); + assert_eq!(NUM_INSTANCES, data2.len()); + assert_eq!( + Some((NUM_INSTANCES as i64 / 2).max(1) - 1), + data1[NUM_INSTANCES / 2] + ); + (data1, data2) + }); + }); + } + + { + let data: Vec> = (0..NUM_INSTANCES as i64).map(Some).collect(); + let swaps: Vec = (0..NUM_INSTANCES).rev().collect(); + + group.bench_function("vec", |b| { + b.iter(|| { + let mut data1 = data.clone(); + let mut data2 = data.clone(); + for &swap in &swaps { + data1[NUM_INSTANCES - swap - 1] = data2[swap].take(); + } + assert_eq!(NUM_INSTANCES, data1.len()); + assert_eq!(NUM_INSTANCES, data2.len()); + assert_eq!( + Some((NUM_INSTANCES as i64 / 2).max(1) - 1), + data1[NUM_INSTANCES / 2] + ); + (data1, data2) + }); + }); + } +} From d5b68f2e64f7f4894b8e4b3f6fb0283001665101 Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Mon, 3 Apr 2023 10:01:46 +0200 Subject: [PATCH 04/89] Tracked 3D cameras lead now to on-hover rays in other space views that show the same camera but don't track it. (#1751) In the same way as a 2D scene causes a on-hover ray in all space views that contain the space camera at which the 2D view "sits". --- crates/re_viewer/src/misc/selection_state.rs | 13 +- .../src/ui/view_spatial/scene/mod.rs | 2 +- .../view_spatial/scene/scene_part/cameras.rs | 1 - .../src/ui/view_spatial/space_camera_3d.rs | 11 +- crates/re_viewer/src/ui/view_spatial/ui_2d.rs | 8 +- crates/re_viewer/src/ui/view_spatial/ui_3d.rs | 139 ++++++++++++------ 6 files changed, 112 insertions(+), 62 deletions(-) diff --git a/crates/re_viewer/src/misc/selection_state.rs b/crates/re_viewer/src/misc/selection_state.rs index a57c5a40cae2..6597f0af161d 100644 --- a/crates/re_viewer/src/misc/selection_state.rs +++ b/crates/re_viewer/src/misc/selection_state.rs @@ -3,7 +3,7 @@ use egui::NumExt; use lazy_static::lazy_static; use nohash_hasher::IntMap; -use re_data_store::{EntityPath, LogDb}; +use re_data_store::{EntityPath, InstancePath, InstancePathHash, LogDb}; use re_log_types::{component_types::InstanceKey, EntityPathHash}; use re_renderer::OutlineMaskPreference; @@ -29,8 +29,15 @@ pub enum HoveredSpace { /// The 3D space with the camera(s) space_3d: EntityPath, - /// 2D spaces and pixel coordinates (with Z=depth) - target_spaces: Vec<(EntityPath, Option)>, + /// The point in 3D space that is hovered, if any. + pos: Option, + + /// Path of a space camera, this 3D space is viewed through. + /// (None for a free floating Eye) + tracked_space_camera: Option, + + /// Corresponding 2D spaces and pixel coordinates (with Z=depth) + point_in_space_cameras: Vec<(InstancePathHash, Option)>, }, } diff --git a/crates/re_viewer/src/ui/view_spatial/scene/mod.rs b/crates/re_viewer/src/ui/view_spatial/scene/mod.rs index 9d10b54a77e2..7f7c9cb90a7c 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/mod.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/mod.rs @@ -232,7 +232,7 @@ impl SceneSpatial { if self .space_cameras .iter() - .any(|camera| &camera.entity_path != space_info_path) + .any(|camera| camera.instance_path_hash.entity_path_hash != space_info_path.hash()) { return SpatialNavigationMode::ThreeD; } diff --git a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/cameras.rs b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/cameras.rs index 0e6daaf6c109..72bbdc40c883 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/cameras.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/cameras.rs @@ -89,7 +89,6 @@ impl CamerasPart { let frustum_length = *props.pinhole_image_plane_distance.get(); scene.space_cameras.push(SpaceCamera3D { - entity_path: entity_path.clone(), instance_path_hash, view_coordinates, world_from_camera, diff --git a/crates/re_viewer/src/ui/view_spatial/space_camera_3d.rs b/crates/re_viewer/src/ui/view_spatial/space_camera_3d.rs index c73cf94c1cd1..fc687bd958df 100644 --- a/crates/re_viewer/src/ui/view_spatial/space_camera_3d.rs +++ b/crates/re_viewer/src/ui/view_spatial/space_camera_3d.rs @@ -1,18 +1,15 @@ use glam::{vec3, Affine3A, Mat3, Quat, Vec2, Vec3}; use macaw::{IsoTransform, Ray3}; -use re_data_store::{EntityPath, InstancePathHash}; +use re_data_store::InstancePathHash; use re_log_types::ViewCoordinates; /// A logged camera that connects spaces. #[derive(Clone)] pub struct SpaceCamera3D { - /// Path to the entity which has the projection (pinhole, ortho or otherwise) transforms. + /// Path to the instance which has the projection (pinhole, ortho or otherwise) transforms. /// - /// We expect the camera transform to apply to this entity and every path below it. - pub entity_path: EntityPath, - - /// The instance that has the projection. + /// We expect the camera transform to apply to this instance and every path below it. pub instance_path_hash: InstancePathHash, /// The coordinate system of the camera ("view-space"). @@ -49,7 +46,7 @@ impl SpaceCamera3D { match from_rub_quat(self.view_coordinates) { Ok(from_rub) => Some(self.world_from_camera * IsoTransform::from_quat(from_rub)), Err(err) => { - re_log::warn_once!("Camera {:?}: {err}", self.entity_path); + re_log::warn_once!("Camera {:?}: {err}", self.instance_path_hash); None } } diff --git a/crates/re_viewer/src/ui/view_spatial/ui_2d.rs b/crates/re_viewer/src/ui/view_spatial/ui_2d.rs index 5813e171e377..df3edcee23a4 100644 --- a/crates/re_viewer/src/ui/view_spatial/ui_2d.rs +++ b/crates/re_viewer/src/ui/view_spatial/ui_2d.rs @@ -581,9 +581,13 @@ fn show_projections_from_3d_space( ui_from_space: &RectTransform, ) -> Vec { let mut shapes = Vec::new(); - if let HoveredSpace::ThreeD { target_spaces, .. } = ctx.selection_state().hovered_space() { + if let HoveredSpace::ThreeD { + point_in_space_cameras: target_spaces, + .. + } = ctx.selection_state().hovered_space() + { for (space_2d, pos_2d) in target_spaces { - if space_2d == space { + if space_2d.entity_path_hash == space.hash() { if let Some(pos_2d) = pos_2d { // User is hovering a 2D point inside a 3D view. let pos_in_ui = ui_from_space.transform_pos(pos2(pos_2d.x, pos_2d.y)); diff --git a/crates/re_viewer/src/ui/view_spatial/ui_3d.rs b/crates/re_viewer/src/ui/view_spatial/ui_3d.rs index 40f2a4e6a600..01dc139aee03 100644 --- a/crates/re_viewer/src/ui/view_spatial/ui_3d.rs +++ b/crates/re_viewer/src/ui/view_spatial/ui_3d.rs @@ -29,6 +29,7 @@ use crate::{ use super::{ eye::{Eye, OrbitEye}, + scene::SceneSpatialPrimitives, ViewSpatialState, }; @@ -454,13 +455,29 @@ pub fn view_3d( .resolve(&ctx.log_db.entity_db) .map(|instance_path| Item::InstancePath(Some(space_view_id), instance_path)) })); - state.state_3d.hovered_point = picking_result + + let hovered_point = picking_result .opaque_hit .as_ref() .or_else(|| picking_result.transparent_hits.last()) .map(|hit| picking_result.space_position(hit)); - project_onto_other_spaces(ctx, &scene.space_cameras, &mut state.state_3d, space); + ctx.selection_state_mut() + .set_hovered_space(HoveredSpace::ThreeD { + space_3d: space.clone(), + pos: hovered_point, + tracked_space_camera: state.state_3d.tracked_camera.clone(), + point_in_space_cameras: scene + .space_cameras + .iter() + .map(|cam| { + ( + cam.instance_path_hash, + hovered_point.and_then(|pos| cam.project_onto_2d(pos)), + ) + }) + .collect(), + }); } else { state.previous_picking_result = None; } @@ -516,7 +533,12 @@ pub fn view_3d( view_builder.schedule_screenshot(ctx.render_ctx, space_view_id.gpu_readback_id(), mode); } - show_projections_from_2d_space(ctx, &mut scene, &state.scene_bbox_accum); + show_projections_from_2d_space( + ctx, + &mut scene, + &state.state_3d.tracked_camera, + &state.scene_bbox_accum, + ); if state.state_3d.show_axes { let axis_length = 1.0; // The axes are also a measuring stick @@ -591,13 +613,16 @@ pub fn view_3d( fn show_projections_from_2d_space( ctx: &mut ViewerContext<'_>, scene: &mut SceneSpatial, + tracked_space_camera: &Option, scene_bbox_accum: &BoundingBox, ) { - if let HoveredSpace::TwoD { space_2d, pos } = ctx.selection_state().hovered_space() { - let mut line_batch = scene.primitives.line_strips.batch("picking ray"); - - for cam in &scene.space_cameras { - if &cam.entity_path == space_2d { + match ctx.selection_state().hovered_space() { + HoveredSpace::TwoD { space_2d, pos } => { + if let Some(cam) = scene + .space_cameras + .iter() + .find(|cam| cam.instance_path_hash.entity_path_hash == space_2d.hash()) + { if let Some(ray) = cam.unproject_as_ray(glam::vec2(pos.x, pos.y)) { // Render a thick line to the actual z value if any and a weaker one as an extension // If we don't have a z value, we only render the thick one. @@ -607,54 +632,72 @@ fn show_projections_from_2d_space( cam.picture_plane_distance }; - let origin = ray.point_along(0.0); - // No harm in making this ray _very_ long. (Infinite messes with things though!) - let fallback_ray_end = ray.point_along(scene_bbox_accum.size().length() * 10.0); - - if let Some(line_length) = thick_ray_length { - let main_ray_end = ray.point_along(line_length); - line_batch - .add_segment(origin, main_ray_end) - .color(egui::Color32::WHITE) - .flags(re_renderer::renderer::LineStripFlags::NO_COLOR_GRADIENT) - .radius(Size::new_points(1.0)); - line_batch - .add_segment(main_ray_end, fallback_ray_end) - .color(egui::Color32::DARK_GRAY) - // TODO(andreas): Make this dashed. - .flags(re_renderer::renderer::LineStripFlags::NO_COLOR_GRADIENT) - .radius(Size::new_points(0.5)); - } else { - line_batch - .add_segment(origin, fallback_ray_end) - .color(egui::Color32::WHITE) - .flags(re_renderer::renderer::LineStripFlags::NO_COLOR_GRADIENT) - .radius(Size::new_points(1.0)); - } + add_picking_ray( + &mut scene.primitives, + ray, + scene_bbox_accum, + thick_ray_length, + ); } } } + HoveredSpace::ThreeD { + pos: Some(pos), + tracked_space_camera: Some(camera_path), + .. + } => { + if tracked_space_camera + .as_ref() + .map_or(true, |tracked| tracked != camera_path) + { + if let Some(cam) = scene + .space_cameras + .iter() + .find(|cam| cam.instance_path_hash == camera_path.hash()) + { + let cam_to_pos = *pos - cam.position(); + let distance = cam_to_pos.length(); + let ray = macaw::Ray3::from_origin_dir(cam.position(), cam_to_pos / distance); + add_picking_ray(&mut scene.primitives, ray, scene_bbox_accum, Some(distance)); + } + } + } + _ => {} } } -fn project_onto_other_spaces( - ctx: &mut ViewerContext<'_>, - space_cameras: &[SpaceCamera3D], - state: &mut View3DState, - space: &EntityPath, +fn add_picking_ray( + primitives: &mut SceneSpatialPrimitives, + ray: macaw::Ray3, + scene_bbox_accum: &BoundingBox, + thick_ray_length: Option, ) { - let mut target_spaces = vec![]; - for cam in space_cameras { - let point_in_2d = state - .hovered_point - .and_then(|hovered_point| cam.project_onto_2d(hovered_point)); - target_spaces.push((cam.entity_path.clone(), point_in_2d)); + let mut line_batch = primitives.line_strips.batch("picking ray"); + + let origin = ray.point_along(0.0); + // No harm in making this ray _very_ long. (Infinite messes with things though!) + let fallback_ray_end = ray.point_along(scene_bbox_accum.size().length() * 10.0); + + if let Some(line_length) = thick_ray_length { + let main_ray_end = ray.point_along(line_length); + line_batch + .add_segment(origin, main_ray_end) + .color(egui::Color32::WHITE) + .flags(re_renderer::renderer::LineStripFlags::NO_COLOR_GRADIENT) + .radius(Size::new_points(1.0)); + line_batch + .add_segment(main_ray_end, fallback_ray_end) + .color(egui::Color32::DARK_GRAY) + // TODO(andreas): Make this dashed. + .flags(re_renderer::renderer::LineStripFlags::NO_COLOR_GRADIENT) + .radius(Size::new_points(0.5)); + } else { + line_batch + .add_segment(origin, fallback_ray_end) + .color(egui::Color32::WHITE) + .flags(re_renderer::renderer::LineStripFlags::NO_COLOR_GRADIENT) + .radius(Size::new_points(1.0)); } - ctx.selection_state_mut() - .set_hovered_space(HoveredSpace::ThreeD { - space_3d: space.clone(), - target_spaces, - }); } fn default_eye(scene_bbox: &macaw::BoundingBox, space_specs: &SpaceSpecs) -> OrbitEye { From 27288541523ffdce04e16c3007830902e0feb138 Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Mon, 3 Apr 2023 11:49:38 +0200 Subject: [PATCH 05/89] Improve dealing with raw buffers for texture read/write (#1744) * Replace TextureRowDataInfo with the more versatile Texture2DBufferInfo * comment & naming fixes --- .../src/allocator/cpu_write_gpu_read_belt.rs | 10 +- .../src/allocator/gpu_readback_belt.rs | 10 +- .../src/draw_phases/picking_layer.rs | 44 ++++----- .../re_renderer/src/draw_phases/screenshot.rs | 13 ++- .../re_renderer/src/renderer/depth_cloud.rs | 21 ++--- crates/re_renderer/src/wgpu_resources/mod.rs | 93 +++++++++++++++---- 6 files changed, 119 insertions(+), 72 deletions(-) diff --git a/crates/re_renderer/src/allocator/cpu_write_gpu_read_belt.rs b/crates/re_renderer/src/allocator/cpu_write_gpu_read_belt.rs index db9f14b0a8fa..a3900e18a143 100644 --- a/crates/re_renderer/src/allocator/cpu_write_gpu_read_belt.rs +++ b/crates/re_renderer/src/allocator/cpu_write_gpu_read_belt.rs @@ -1,6 +1,6 @@ use std::{num::NonZeroU32, sync::mpsc}; -use crate::wgpu_resources::{BufferDesc, GpuBuffer, GpuBufferPool, TextureRowDataInfo}; +use crate::wgpu_resources::{BufferDesc, GpuBuffer, GpuBufferPool, Texture2DBufferInfo}; /// A sub-allocated staging buffer that can be written to. /// @@ -100,15 +100,13 @@ where destination: wgpu::ImageCopyTexture<'_>, copy_extent: glam::UVec2, ) { - let bytes_per_row = TextureRowDataInfo::new(destination.texture.format(), copy_extent.x) - .bytes_per_row_padded; + let buffer_info = Texture2DBufferInfo::new(destination.texture.format(), copy_extent); // Validate that we stay within the written part of the slice (wgpu can't fully know our intention here, so we have to check). // We go one step further and require the size to be exactly equal - it's too unlikely that you wrote more than is needed! // (and if you did you probably have regrets anyways!) - let required_buffer_size = bytes_per_row * copy_extent.y; debug_assert_eq!( - required_buffer_size as usize, + buffer_info.buffer_size_padded as usize, self.num_written() * std::mem::size_of::() ); @@ -117,7 +115,7 @@ where buffer: &self.chunk_buffer, layout: wgpu::ImageDataLayout { offset: self.byte_offset_in_chunk_buffer, - bytes_per_row: NonZeroU32::new(bytes_per_row), + bytes_per_row: NonZeroU32::new(buffer_info.bytes_per_row_padded), rows_per_image: None, }, }, diff --git a/crates/re_renderer/src/allocator/gpu_readback_belt.rs b/crates/re_renderer/src/allocator/gpu_readback_belt.rs index 09fcdd981054..6facafc3e761 100644 --- a/crates/re_renderer/src/allocator/gpu_readback_belt.rs +++ b/crates/re_renderer/src/allocator/gpu_readback_belt.rs @@ -1,6 +1,6 @@ use std::{num::NonZeroU32, ops::Range, sync::mpsc}; -use crate::wgpu_resources::{BufferDesc, GpuBuffer, GpuBufferPool, TextureRowDataInfo}; +use crate::wgpu_resources::{BufferDesc, GpuBuffer, GpuBufferPool, Texture2DBufferInfo}; /// Identifier used to identify a buffer upon retrieval of the data. /// @@ -61,13 +61,11 @@ impl GpuReadbackBuffer { source.texture.format().describe().block_size as u64, ); - let bytes_per_row = TextureRowDataInfo::new(source.texture.format(), copy_extents.x) - .bytes_per_row_padded; - let num_bytes = bytes_per_row * copy_extents.y; + let buffer_info = Texture2DBufferInfo::new(source.texture.format(), *copy_extents); // Validate that stay within the slice (wgpu can't fully know our intention here, so we have to check). debug_assert!( - (num_bytes as u64) <= self.range_in_chunk.end - start_offset, + buffer_info.buffer_size_padded <= self.range_in_chunk.end - start_offset, "Texture data is too large to fit into the readback buffer!" ); @@ -77,7 +75,7 @@ impl GpuReadbackBuffer { buffer: &self.chunk_buffer, layout: wgpu::ImageDataLayout { offset: start_offset, - bytes_per_row: NonZeroU32::new(bytes_per_row), + bytes_per_row: NonZeroU32::new(buffer_info.bytes_per_row_padded), rows_per_image: None, }, }, diff --git a/crates/re_renderer/src/draw_phases/picking_layer.rs b/crates/re_renderer/src/draw_phases/picking_layer.rs index 8a52147077ba..5270f4bf6093 100644 --- a/crates/re_renderer/src/draw_phases/picking_layer.rs +++ b/crates/re_renderer/src/draw_phases/picking_layer.rs @@ -13,7 +13,7 @@ use crate::{ allocator::create_and_fill_uniform_buffer, global_bindings::FrameUniformBuffer, view_builder::ViewBuilder, - wgpu_resources::{GpuBindGroup, GpuTexture, TextureDesc, TextureRowDataInfo}, + wgpu_resources::{GpuBindGroup, GpuTexture, Texture2DBufferInfo, TextureDesc}, DebugLabel, GpuReadbackBuffer, GpuReadbackIdentifier, IntRect, RenderContext, }; @@ -122,12 +122,23 @@ impl PickingLayerProcessor { readback_identifier: GpuReadbackIdentifier, readback_user_data: T, ) -> Self { - let row_info = TextureRowDataInfo::new(Self::PICKING_LAYER_FORMAT, picking_rect.width()); - let buffer_size = row_info.bytes_per_row_padded * picking_rect.height(); + let row_info_id = Texture2DBufferInfo::new(Self::PICKING_LAYER_FORMAT, picking_rect.extent); + //let row_info_depth = + //Texture2DBufferInfo::new(Self::PICKING_LAYER_FORMAT, picking_rect.extent); + + // Offset of the depth buffer in the readback buffer needs to be aligned to size of a depth pixel. + // This is "trivially true" if the size of the depth format is a multiple of the size of the id format. + debug_assert!( + Self::PICKING_LAYER_FORMAT.describe().block_size + % Self::PICKING_LAYER_DEPTH_FORMAT.describe().block_size + == 0 + ); + let buffer_size = row_info_id.buffer_size_padded; // + row_info_depth.buffer_size_padded; + let readback_buffer = ctx.gpu_readback_belt.lock().allocate( &ctx.device, &ctx.gpu_resources.buffers, - buffer_size as u64, + buffer_size, readback_identifier, Box::new(ReadbackBeltMetadata { picking_rect, @@ -279,31 +290,12 @@ impl PickingLayerProcessor { ctx.gpu_readback_belt .lock() .readback_data::>(identifier, |data, metadata| { - // Due to https://github.com/gfx-rs/wgpu/issues/3508 the data might be completely unaligned, - // so much, that we can't interpret it just as `PickingLayerId`. - // Therefore, we have to do a copy of the data regardless. - let row_info = TextureRowDataInfo::new( + let buffer_info_id = Texture2DBufferInfo::new( Self::PICKING_LAYER_FORMAT, - metadata.picking_rect.extent.x, + metadata.picking_rect.extent, ); - // Copies need to use [u8] because of aforementioned alignment issues. - let mut picking_data = vec![ - PickingLayerId::default(); - (metadata.picking_rect.extent.x * metadata.picking_rect.extent.y) - as usize - ]; - let picking_data_as_u8 = bytemuck::cast_slice_mut(&mut picking_data); - for row in 0..metadata.picking_rect.extent.y { - let offset_padded = (row_info.bytes_per_row_padded * row) as usize; - let offset_unpadded = (row_info.bytes_per_row_unpadded * row) as usize; - picking_data_as_u8[offset_unpadded - ..(offset_unpadded + row_info.bytes_per_row_unpadded as usize)] - .copy_from_slice( - &data[offset_padded - ..(offset_padded + row_info.bytes_per_row_unpadded as usize)], - ); - } + let picking_data = buffer_info_id.remove_padding_and_convert(data); result = Some(PickingResult { picking_data, diff --git a/crates/re_renderer/src/draw_phases/screenshot.rs b/crates/re_renderer/src/draw_phases/screenshot.rs index 875284986c90..68c05b3b545c 100644 --- a/crates/re_renderer/src/draw_phases/screenshot.rs +++ b/crates/re_renderer/src/draw_phases/screenshot.rs @@ -11,7 +11,7 @@ //! Or alternatively try to render the images in several tiles πŸ€”. In any case this would greatly improve quality! use crate::{ - wgpu_resources::{GpuTexture, TextureDesc, TextureRowDataInfo}, + wgpu_resources::{GpuTexture, Texture2DBufferInfo, TextureDesc}, DebugLabel, GpuReadbackBuffer, GpuReadbackIdentifier, RenderContext, }; @@ -37,12 +37,11 @@ impl ScreenshotProcessor { readback_identifier: GpuReadbackIdentifier, readback_user_data: T, ) -> Self { - let row_info = TextureRowDataInfo::new(Self::SCREENSHOT_COLOR_FORMAT, resolution.x); - let buffer_size = row_info.bytes_per_row_padded * resolution.y; + let buffer_info = Texture2DBufferInfo::new(Self::SCREENSHOT_COLOR_FORMAT, resolution); let screenshot_readback_buffer = ctx.gpu_readback_belt.lock().allocate( &ctx.device, &ctx.gpu_resources.buffers, - buffer_size as u64, + buffer_info.buffer_size_padded, readback_identifier, Box::new(ReadbackBeltMetadata { extent: resolution, @@ -130,9 +129,9 @@ impl ScreenshotProcessor { .lock() .readback_data::>(identifier, |data: &[u8], metadata| { screenshot_was_available = Some(()); - let texture_row_info = - TextureRowDataInfo::new(Self::SCREENSHOT_COLOR_FORMAT, metadata.extent.x); - let texture_data = texture_row_info.remove_padding(data); + let buffer_info = + Texture2DBufferInfo::new(Self::SCREENSHOT_COLOR_FORMAT, metadata.extent); + let texture_data = buffer_info.remove_padding(data); on_screenshot(&texture_data, metadata.extent, metadata.user_data); }); screenshot_was_available diff --git a/crates/re_renderer/src/renderer/depth_cloud.rs b/crates/re_renderer/src/renderer/depth_cloud.rs index 358132110442..c69def6eca10 100644 --- a/crates/re_renderer/src/renderer/depth_cloud.rs +++ b/crates/re_renderer/src/renderer/depth_cloud.rs @@ -21,8 +21,8 @@ use crate::{ view_builder::ViewBuilder, wgpu_resources::{ BindGroupDesc, BindGroupEntry, BindGroupLayoutDesc, GpuBindGroup, GpuBindGroupLayoutHandle, - GpuRenderPipelineHandle, GpuTexture, PipelineLayoutDesc, RenderPipelineDesc, TextureDesc, - TextureRowDataInfo, + GpuRenderPipelineHandle, GpuTexture, PipelineLayoutDesc, RenderPipelineDesc, + Texture2DBufferInfo, TextureDesc, }, ColorMap, OutlineMaskPreference, PickingLayerProcessor, }; @@ -336,34 +336,33 @@ fn create_and_upload_texture( .textures .alloc(&ctx.device, &depth_texture_desc); - let TextureRowDataInfo { - bytes_per_row_unpadded: bytes_per_row_unaligned, - bytes_per_row_padded, - } = TextureRowDataInfo::new(depth_texture_desc.format, depth_texture_desc.size.width); - // Not supporting compressed formats here. debug_assert!(depth_texture_desc.format.describe().block_dimensions == (1, 1)); + let buffer_info = + Texture2DBufferInfo::new(depth_texture_desc.format, depth_cloud.depth_dimensions); + // TODO(andreas): CpuGpuWriteBelt should make it easier to do this. - let bytes_padding_per_row = (bytes_per_row_padded - bytes_per_row_unaligned) as usize; + let bytes_padding_per_row = + (buffer_info.bytes_per_row_padded - buffer_info.bytes_per_row_unpadded) as usize; // Sanity check the padding size. If this happens something is seriously wrong, as it would imply // that we can't express the required alignment with the block size. debug_assert!( bytes_padding_per_row % std::mem::size_of::() == 0, "Padding is not a multiple of pixel size. Can't correctly pad the texture data" ); - let num_pixel_padding_per_row = bytes_padding_per_row / std::mem::size_of::(); let mut depth_texture_staging = ctx.cpu_write_gpu_read_belt.lock().allocate::( &ctx.device, &ctx.gpu_resources.buffers, - data.len() + num_pixel_padding_per_row * depth_texture_desc.size.height as usize, + buffer_info.buffer_size_padded as usize / std::mem::size_of::(), ); // Fill with a single copy if possible, otherwise do multiple, filling in padding. - if num_pixel_padding_per_row == 0 { + if bytes_padding_per_row == 0 { depth_texture_staging.extend_from_slice(data); } else { + let num_pixel_padding_per_row = bytes_padding_per_row / std::mem::size_of::(); for row in data.chunks(depth_texture_desc.size.width as usize) { depth_texture_staging.extend_from_slice(row); depth_texture_staging diff --git a/crates/re_renderer/src/wgpu_resources/mod.rs b/crates/re_renderer/src/wgpu_resources/mod.rs index ef272248e3dd..403e46350b47 100644 --- a/crates/re_renderer/src/wgpu_resources/mod.rs +++ b/crates/re_renderer/src/wgpu_resources/mod.rs @@ -117,9 +117,9 @@ impl WgpuResourcePools { } } -/// Utility for dealing with rows of raw texture data. -#[derive(Clone, Copy)] -pub struct TextureRowDataInfo { +/// Utility for dealing with buffers containing raw 2D texture data. +#[derive(Clone)] +pub struct Texture2DBufferInfo { /// How many bytes per row contain actual data. pub bytes_per_row_unpadded: u32, @@ -127,34 +127,57 @@ pub struct TextureRowDataInfo { /// /// Padding bytes are always at the end of a row. pub bytes_per_row_padded: u32, + + /// Size required for an unpadded buffer. + pub buffer_size_unpadded: wgpu::BufferAddress, + + /// Size required for a padded buffer as it is read/written from/to the GPU. + pub buffer_size_padded: wgpu::BufferAddress, } -impl TextureRowDataInfo { - pub fn new(format: wgpu::TextureFormat, width: u32) -> Self { +impl Texture2DBufferInfo { + #[inline] + pub fn new(format: wgpu::TextureFormat, extent: glam::UVec2) -> Self { let format_info = format.describe(); - let width_blocks = width / format_info.block_dimensions.0 as u32; - let bytes_per_row_unaligned = width_blocks * format_info.block_size as u32; + + let width_blocks = extent.x / format_info.block_dimensions.0 as u32; + let height_blocks = extent.y / format_info.block_dimensions.1 as u32; + + let bytes_per_row_unpadded = width_blocks * format_info.block_size as u32; + let bytes_per_row_padded = + wgpu::util::align_to(bytes_per_row_unpadded, wgpu::COPY_BYTES_PER_ROW_ALIGNMENT); Self { - bytes_per_row_unpadded: bytes_per_row_unaligned, - bytes_per_row_padded: wgpu::util::align_to( - bytes_per_row_unaligned, - wgpu::COPY_BYTES_PER_ROW_ALIGNMENT, - ), + bytes_per_row_unpadded, + bytes_per_row_padded, + buffer_size_unpadded: (bytes_per_row_unpadded * height_blocks) as wgpu::BufferAddress, + buffer_size_padded: (bytes_per_row_padded * height_blocks) as wgpu::BufferAddress, } } + #[inline] + pub fn num_rows(&self) -> u32 { + self.buffer_size_padded as u32 / self.bytes_per_row_padded + } + /// Removes the padding from a buffer containing gpu texture data. + /// + /// The passed in buffer is to be expected to be exactly of size [`Texture2DBufferInfo::buffer_size_padded`]. + /// + /// Note that if you're passing in gpu data, there no alignment guarantees on the returned slice, + /// do NOT convert it using [`bytemuck`]. Use [`Texture2DBufferInfo::remove_padding_and_convert`] instead. pub fn remove_padding<'a>(&self, buffer: &'a [u8]) -> Cow<'a, [u8]> { + crate::profile_function!(); + + assert!(buffer.len() as wgpu::BufferAddress == self.buffer_size_padded); + if self.bytes_per_row_padded == self.bytes_per_row_unpadded { return Cow::Borrowed(buffer); } - let height = (buffer.len() as u32) / self.bytes_per_row_padded; - let mut unpadded_buffer = - Vec::with_capacity((self.bytes_per_row_unpadded * height) as usize); + let mut unpadded_buffer = Vec::with_capacity(self.buffer_size_unpadded as _); - for row in 0..height { + for row in 0..self.num_rows() { let offset = (self.bytes_per_row_padded * row) as usize; unpadded_buffer.extend_from_slice( &buffer[offset..(offset + self.bytes_per_row_unpadded as usize)], @@ -163,4 +186,42 @@ impl TextureRowDataInfo { unpadded_buffer.into() } + + /// Removes the padding from a buffer containing gpu texture data and remove convert to a given type. + /// + /// The passed in buffer is to be expected to be exactly of size [`Texture2DBufferInfo::buffer_size_padded`]. + /// + /// The unpadded row size is expected to be a multiple of the size of the target type. + /// (Which means that, while uncommon, it technically doesn't need to be as big as a block in the pixel - this can be useful for e.g. packing wide bitfields) + pub fn remove_padding_and_convert(&self, buffer: &[u8]) -> Vec { + crate::profile_function!(); + + assert!(buffer.len() as wgpu::BufferAddress == self.buffer_size_padded); + assert!(self.bytes_per_row_unpadded % std::mem::size_of::() as u32 == 0); + + // Due to https://github.com/gfx-rs/wgpu/issues/3508 the data might be completely unaligned, + // so much, that we can't even interpret it as e.g. a u32 slice. + // Therefore, we have to do a copy of the data regardless of whether it's padded or not. + + let mut unpadded_buffer: Vec = vec![ + T::zeroed(); + (self.num_rows() * self.bytes_per_row_unpadded / std::mem::size_of::() as u32) + as usize + ]; // TODO(andreas): Consider using unsafe set_len() instead of vec![] to avoid zeroing the memory. + + // The copy has to happen on a u8 slice, because any other type would assume some alignment that we can't guarantee because of the above. + let unpadded_buffer_u8_view = bytemuck::cast_slice_mut(&mut unpadded_buffer); + + for row in 0..self.num_rows() { + let offset_padded = (self.bytes_per_row_padded * row) as usize; + let offset_unpadded = (self.bytes_per_row_unpadded * row) as usize; + unpadded_buffer_u8_view + [offset_unpadded..(offset_unpadded + self.bytes_per_row_unpadded as usize)] + .copy_from_slice( + &buffer[offset_padded..(offset_padded + self.bytes_per_row_unpadded as usize)], + ); + } + + unpadded_buffer + } } From 44393097b9619c49d4bfa16664d75dd67a49d9c5 Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Mon, 3 Apr 2023 12:31:17 +0200 Subject: [PATCH 06/89] `arrow2` erased refcounted clones benchmarks (#1745) * arrow2 erased refcounted clone benchmarks * lint * addressing PR comments * dude --- crates/re_arrow_store/Cargo.toml | 4 + crates/re_arrow_store/benches/arrow2.rs | 244 ++++++++++++++++++++++++ 2 files changed, 248 insertions(+) create mode 100644 crates/re_arrow_store/benches/arrow2.rs diff --git a/crates/re_arrow_store/Cargo.toml b/crates/re_arrow_store/Cargo.toml index 9e7ddde3a6a3..a1bef5e6c886 100644 --- a/crates/re_arrow_store/Cargo.toml +++ b/crates/re_arrow_store/Cargo.toml @@ -114,6 +114,10 @@ required-features = ["polars"] name = "data_store" harness = false +[[bench]] +name = "arrow2" +harness = false + [[bench]] name = "arrow2_convert" harness = false diff --git a/crates/re_arrow_store/benches/arrow2.rs b/crates/re_arrow_store/benches/arrow2.rs new file mode 100644 index 000000000000..51f10a0c2fe5 --- /dev/null +++ b/crates/re_arrow_store/benches/arrow2.rs @@ -0,0 +1,244 @@ +//! Keeping track of performance issues/regressions in `arrow2` that directly affect us. + +#[global_allocator] +static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; + +use std::sync::Arc; + +use arrow2::array::{Array, PrimitiveArray, StructArray}; +use criterion::{criterion_group, criterion_main, Criterion}; +use itertools::Itertools; +use re_log_types::{ + component_types::{InstanceKey, Point2D}, + datagen::{build_some_instances, build_some_point2d}, + DataCell, +}; + +// --- + +criterion_group!(benches, estimated_size_bytes); +criterion_main!(benches); + +// --- + +#[cfg(not(debug_assertions))] +const NUM_ROWS: usize = 10_000; +#[cfg(not(debug_assertions))] +const NUM_INSTANCES: usize = 100; + +// `cargo test` also runs the benchmark setup code, so make sure they run quickly: +#[cfg(debug_assertions)] +const NUM_ROWS: usize = 1; +#[cfg(debug_assertions)] +const NUM_INSTANCES: usize = 1; + +// --- + +#[derive(Debug, Clone, Copy)] +enum ArrayKind { + /// E.g. an array of `InstanceKey`. + Primitive, + + /// E.g. an array of `Point2D`. + Struct, +} + +impl std::fmt::Display for ArrayKind { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(match self { + ArrayKind::Primitive => "primitive", + ArrayKind::Struct => "struct", + }) + } +} + +fn estimated_size_bytes(c: &mut Criterion) { + let kind = [ArrayKind::Primitive, ArrayKind::Struct]; + + for kind in kind { + let mut group = c.benchmark_group(format!( + "arrow2/erased_clone/{kind}/rows={NUM_ROWS}/instances={NUM_INSTANCES}" + )); + group.throughput(criterion::Throughput::Elements(NUM_ROWS as _)); + + fn generate_cells(kind: ArrayKind) -> Vec { + match kind { + ArrayKind::Primitive => (0..NUM_ROWS) + .map(|_| DataCell::from_native(build_some_instances(NUM_INSTANCES).as_slice())) + .collect(), + ArrayKind::Struct => (0..NUM_ROWS) + .map(|_| DataCell::from_native(build_some_point2d(NUM_INSTANCES).as_slice())) + .collect(), + } + } + + { + { + let cells = generate_cells(kind); + let total_instances = cells.iter().map(|cell| cell.num_instances()).sum::(); + assert_eq!(total_instances, (NUM_ROWS * NUM_INSTANCES) as u32); + + group.bench_function("cell/arc_erased", |b| { + b.iter(|| { + let cells = cells.clone(); + assert_eq!( + total_instances, + cells.iter().map(|cell| cell.num_instances()).sum::() + ); + cells + }); + }); + } + + { + let cells = generate_cells(kind).into_iter().map(Arc::new).collect_vec(); + let total_instances = cells.iter().map(|cell| cell.num_instances()).sum::(); + assert_eq!(total_instances, (NUM_ROWS * NUM_INSTANCES) as u32); + + group.bench_function("cell/wrapped_in_arc", |b| { + b.iter(|| { + let cells = cells.clone(); + assert_eq!( + total_instances, + cells.iter().map(|cell| cell.num_instances()).sum::() + ); + cells + }); + }); + } + + { + let cells = generate_cells(kind); + let arrays = cells.iter().map(|cell| cell.as_arrow()).collect_vec(); + let total_instances = arrays.iter().map(|array| array.len() as u32).sum::(); + assert_eq!(total_instances, (NUM_ROWS * NUM_INSTANCES) as u32); + + group.bench_function("array", |b| { + b.iter(|| { + let arrays = arrays.clone(); + assert_eq!( + total_instances, + arrays.iter().map(|array| array.len() as u32).sum::() + ); + arrays + }); + }); + } + + match kind { + ArrayKind::Primitive => { + let cells = generate_cells(kind); + let arrays = cells + .iter() + .map(|cell| { + cell.as_arrow_ref() + .as_any() + .downcast_ref::>() + .unwrap() + .clone() + }) + .collect_vec(); + let total_instances = + arrays.iter().map(|array| array.len() as u32).sum::(); + assert_eq!(total_instances, (NUM_ROWS * NUM_INSTANCES) as u32); + + group.bench_function("array/downcast_first", |b| { + b.iter(|| { + let arrays = arrays.clone(); + assert_eq!( + total_instances, + arrays.iter().map(|array| array.len() as u32).sum::() + ); + arrays + }); + }); + } + ArrayKind::Struct => { + let cells = generate_cells(kind); + let arrays = cells + .iter() + .map(|cell| { + cell.as_arrow_ref() + .as_any() + .downcast_ref::() + .unwrap() + .clone() + }) + .collect_vec(); + let total_instances = + arrays.iter().map(|array| array.len() as u32).sum::(); + assert_eq!(total_instances, (NUM_ROWS * NUM_INSTANCES) as u32); + + group.bench_function("array/downcast_first", |b| { + b.iter(|| { + let arrays = arrays.clone(); + assert_eq!( + total_instances, + arrays.iter().map(|array| array.len() as u32).sum::() + ); + arrays + }); + }); + } + } + } + + { + fn generate_points() -> Vec> { + (0..NUM_ROWS) + .map(|_| build_some_point2d(NUM_INSTANCES)) + .collect() + } + + fn generate_keys() -> Vec> { + (0..NUM_ROWS) + .map(|_| build_some_instances(NUM_INSTANCES)) + .collect() + } + + match kind { + ArrayKind::Primitive => bench_std(&mut group, generate_keys()), + ArrayKind::Struct => bench_std(&mut group, generate_points()), + } + + fn bench_std( + group: &mut criterion::BenchmarkGroup<'_, criterion::measurement::WallTime>, + data: Vec>, + ) { + { + let vecs = data.clone(); + let total_instances = vecs.iter().map(|vec| vec.len() as u32).sum::(); + assert_eq!(total_instances, (NUM_ROWS * NUM_INSTANCES) as u32); + + group.bench_function("vec/full_copy", |b| { + b.iter(|| { + let vecs = vecs.clone(); + assert_eq!( + total_instances, + vecs.iter().map(|vec| vec.len() as u32).sum::() + ); + vecs + }); + }); + } + + { + let vecs = data.into_iter().map(Arc::new).collect_vec(); + let total_instances = vecs.iter().map(|vec| vec.len() as u32).sum::(); + assert_eq!(total_instances, (NUM_ROWS * NUM_INSTANCES) as u32); + + group.bench_function("vec/wrapped_in_arc", |b| { + b.iter(|| { + let vecs = vecs.clone(); + assert_eq!( + total_instances, + vecs.iter().map(|vec| vec.len() as u32).sum::() + ); + vecs + }); + }); + } + } + } + } +} From 29a13ea8fad4020c8c2c653f23e83e9a31a7beb1 Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Mon, 3 Apr 2023 15:54:15 +0200 Subject: [PATCH 07/89] `arrow2` estimated_bytes_size benchmarks (#1743) * arrow2 estimated_bytes_size benchmarks * cleanup --- crates/re_arrow_store/benches/arrow2.rs | 226 ++++++++++++++++++------ 1 file changed, 170 insertions(+), 56 deletions(-) diff --git a/crates/re_arrow_store/benches/arrow2.rs b/crates/re_arrow_store/benches/arrow2.rs index 51f10a0c2fe5..9021b26e2dde 100644 --- a/crates/re_arrow_store/benches/arrow2.rs +++ b/crates/re_arrow_store/benches/arrow2.rs @@ -5,18 +5,22 @@ static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; use std::sync::Arc; -use arrow2::array::{Array, PrimitiveArray, StructArray}; +use arrow2::{ + array::{Array, PrimitiveArray, StructArray, UnionArray}, + compute::aggregate::estimated_bytes_size, +}; use criterion::{criterion_group, criterion_main, Criterion}; use itertools::Itertools; use re_log_types::{ - component_types::{InstanceKey, Point2D}, - datagen::{build_some_instances, build_some_point2d}, - DataCell, + component_types::{InstanceKey, Point2D, Rect2D}, + datagen::{build_some_instances, build_some_point2d, build_some_rects}, + external::arrow2_convert::serialize::TryIntoArrow, + DataCell, SerializableComponent, }; // --- -criterion_group!(benches, estimated_size_bytes); +criterion_group!(benches, erased_clone, estimated_size_bytes); criterion_main!(benches); // --- @@ -41,6 +45,9 @@ enum ArrayKind { /// E.g. an array of `Point2D`. Struct, + + /// E.g. an array of `Rect2D`. + StructLarge, } impl std::fmt::Display for ArrayKind { @@ -48,12 +55,128 @@ impl std::fmt::Display for ArrayKind { f.write_str(match self { ArrayKind::Primitive => "primitive", ArrayKind::Struct => "struct", + ArrayKind::StructLarge => "struct_large", }) } } +fn erased_clone(c: &mut Criterion) { + let kind = [ + ArrayKind::Primitive, + ArrayKind::Struct, + ArrayKind::StructLarge, + ]; + + for kind in kind { + let mut group = c.benchmark_group(format!( + "arrow2/size_bytes/{kind}/rows={NUM_ROWS}/instances={NUM_INSTANCES}" + )); + group.throughput(criterion::Throughput::Elements(NUM_ROWS as _)); + + match kind { + ArrayKind::Primitive => { + let data = build_some_instances(NUM_INSTANCES); + bench_arrow(&mut group, data.as_slice()); + bench_native(&mut group, data.as_slice()); + } + ArrayKind::Struct => { + let data = build_some_point2d(NUM_INSTANCES); + bench_arrow(&mut group, data.as_slice()); + bench_native(&mut group, data.as_slice()); + } + ArrayKind::StructLarge => { + let data = build_some_rects(NUM_INSTANCES); + bench_arrow(&mut group, data.as_slice()); + bench_native(&mut group, data.as_slice()); + } + } + } + + // TODO(cmc): Use cells once `cell.size_bytes()` has landed (#1727) + fn bench_arrow( + group: &mut criterion::BenchmarkGroup<'_, criterion::measurement::WallTime>, + data: &[T], + ) { + let arrays: Vec> = (0..NUM_ROWS) + .map(|_| TryIntoArrow::try_into_arrow(data).unwrap()) + .collect_vec(); + + let total_size_bytes = arrays + .iter() + .map(|array| estimated_bytes_size(&**array) as u64) + .sum::(); + assert!(total_size_bytes as usize >= NUM_ROWS * NUM_INSTANCES * std::mem::size_of::()); + + group.bench_function("array", |b| { + b.iter(|| { + let sz = arrays + .iter() + .map(|array| estimated_bytes_size(&**array) as u64) + .sum::(); + assert_eq!(total_size_bytes, sz); + sz + }); + }); + } + + fn bench_native( + group: &mut criterion::BenchmarkGroup<'_, criterion::measurement::WallTime>, + data: &[T], + ) { + let vecs = (0..NUM_ROWS).map(|_| data.to_vec()).collect_vec(); + + let total_size_bytes = vecs + .iter() + .map(|vec| std::mem::size_of_val(vec.as_slice()) as u64) + .sum::(); + assert!(total_size_bytes as usize >= NUM_ROWS * NUM_INSTANCES * std::mem::size_of::()); + + { + let vecs = (0..NUM_ROWS).map(|_| data.to_vec()).collect_vec(); + group.bench_function("vec", |b| { + b.iter(|| { + let sz = vecs + .iter() + .map(|vec| std::mem::size_of_val(vec.as_slice()) as u64) + .sum::(); + assert_eq!(total_size_bytes, sz); + sz + }); + }); + } + + trait SizeOf { + fn size_of(&self) -> usize; + } + + impl SizeOf for Vec { + fn size_of(&self) -> usize { + std::mem::size_of_val(self.as_slice()) + } + } + + { + let vecs: Vec> = (0..NUM_ROWS) + .map(|_| Box::new(data.to_vec()) as Box) + .collect_vec(); + + group.bench_function("vec/erased", |b| { + b.iter(|| { + let sz = vecs.iter().map(|vec| vec.size_of() as u64).sum::(); + assert_eq!(total_size_bytes, sz); + sz + }); + }); + } + } +} + fn estimated_size_bytes(c: &mut Criterion) { - let kind = [ArrayKind::Primitive, ArrayKind::Struct]; + let kind = [ + ArrayKind::Primitive, + ArrayKind::Struct, + ArrayKind::StructLarge, + ]; for kind in kind { let mut group = c.benchmark_group(format!( @@ -69,6 +192,9 @@ fn estimated_size_bytes(c: &mut Criterion) { ArrayKind::Struct => (0..NUM_ROWS) .map(|_| DataCell::from_native(build_some_point2d(NUM_INSTANCES).as_slice())) .collect(), + ArrayKind::StructLarge => (0..NUM_ROWS) + .map(|_| DataCell::from_native(build_some_rects(NUM_INSTANCES).as_slice())) + .collect(), } } @@ -127,59 +253,40 @@ fn estimated_size_bytes(c: &mut Criterion) { match kind { ArrayKind::Primitive => { - let cells = generate_cells(kind); - let arrays = cells - .iter() - .map(|cell| { - cell.as_arrow_ref() - .as_any() - .downcast_ref::>() - .unwrap() - .clone() - }) - .collect_vec(); - let total_instances = - arrays.iter().map(|array| array.len() as u32).sum::(); - assert_eq!(total_instances, (NUM_ROWS * NUM_INSTANCES) as u32); - - group.bench_function("array/downcast_first", |b| { - b.iter(|| { - let arrays = arrays.clone(); - assert_eq!( - total_instances, - arrays.iter().map(|array| array.len() as u32).sum::() - ); - arrays - }); - }); + bench_downcast_first::>(&mut group, kind); } - ArrayKind::Struct => { - let cells = generate_cells(kind); - let arrays = cells - .iter() - .map(|cell| { - cell.as_arrow_ref() - .as_any() - .downcast_ref::() - .unwrap() - .clone() - }) - .collect_vec(); - let total_instances = - arrays.iter().map(|array| array.len() as u32).sum::(); - assert_eq!(total_instances, (NUM_ROWS * NUM_INSTANCES) as u32); + ArrayKind::Struct => bench_downcast_first::(&mut group, kind), + ArrayKind::StructLarge => bench_downcast_first::(&mut group, kind), + } - group.bench_function("array/downcast_first", |b| { - b.iter(|| { - let arrays = arrays.clone(); - assert_eq!( - total_instances, - arrays.iter().map(|array| array.len() as u32).sum::() - ); - arrays - }); + fn bench_downcast_first( + group: &mut criterion::BenchmarkGroup<'_, criterion::measurement::WallTime>, + kind: ArrayKind, + ) { + let cells = generate_cells(kind); + let arrays = cells + .iter() + .map(|cell| { + cell.as_arrow_ref() + .as_any() + .downcast_ref::() + .unwrap() + .clone() + }) + .collect_vec(); + let total_instances = arrays.iter().map(|array| array.len() as u32).sum::(); + assert_eq!(total_instances, (NUM_ROWS * NUM_INSTANCES) as u32); + + group.bench_function("array/downcast_first", |b| { + b.iter(|| { + let arrays = arrays.clone(); + assert_eq!( + total_instances, + arrays.iter().map(|array| array.len() as u32).sum::() + ); + arrays }); - } + }); } } @@ -196,9 +303,16 @@ fn estimated_size_bytes(c: &mut Criterion) { .collect() } + fn generate_rects() -> Vec> { + (0..NUM_ROWS) + .map(|_| build_some_rects(NUM_INSTANCES)) + .collect() + } + match kind { ArrayKind::Primitive => bench_std(&mut group, generate_keys()), ArrayKind::Struct => bench_std(&mut group, generate_points()), + ArrayKind::StructLarge => bench_std(&mut group, generate_rects()), } fn bench_std( From d94ca3dd35e73e1984ccb969d0c7abd0d3e0faa9 Mon Sep 17 00:00:00 2001 From: Emil Ernerfeldt Date: Tue, 4 Apr 2023 09:44:30 +0200 Subject: [PATCH 08/89] Fix crash when trying to do picking on depth clouds --- crates/re_renderer/src/renderer/depth_cloud.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/re_renderer/src/renderer/depth_cloud.rs b/crates/re_renderer/src/renderer/depth_cloud.rs index c69def6eca10..b66097528b64 100644 --- a/crates/re_renderer/src/renderer/depth_cloud.rs +++ b/crates/re_renderer/src/renderer/depth_cloud.rs @@ -551,7 +551,7 @@ impl Renderer for DepthCloudRenderer { let bind_group = match phase { DrawPhase::OutlineMask => &instance.bind_group_outline, - DrawPhase::Opaque => &instance.bind_group_opaque, + DrawPhase::PickingLayer | DrawPhase::Opaque => &instance.bind_group_opaque, _ => unreachable!(), }; From bf27c8ef14308491df8d8b4db2c000a6e76d8af7 Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Tue, 4 Apr 2023 10:32:26 +0200 Subject: [PATCH 09/89] Readback depth from GPU picking (#1752) * gpu picking in the viewer picks up depth now * WebGL workarounds --- crates/re_renderer/examples/picking.rs | 18 +- crates/re_renderer/shader/copy_texture.wgsl | 15 + .../src/allocator/gpu_readback_belt.rs | 3 +- crates/re_renderer/src/config.rs | 20 +- .../src/draw_phases/picking_layer.rs | 424 +++++++++++++++--- .../re_renderer/src/renderer/debug_overlay.rs | 4 +- crates/re_renderer/src/view_builder.rs | 2 +- crates/re_renderer/src/wgpu_resources/mod.rs | 4 +- crates/re_renderer/src/workspace_shaders.rs | 6 + .../src/ui/view_spatial/scene/picking.rs | 17 +- 10 files changed, 422 insertions(+), 91 deletions(-) create mode 100644 crates/re_renderer/shader/copy_texture.wgsl diff --git a/crates/re_renderer/examples/picking.rs b/crates/re_renderer/examples/picking.rs index ed21f8f3875f..5344e044d26d 100644 --- a/crates/re_renderer/examples/picking.rs +++ b/crates/re_renderer/examples/picking.rs @@ -102,19 +102,19 @@ impl framework::Example for Picking { PickingLayerProcessor::next_readback_result::<()>(re_ctx, READBACK_IDENTIFIER) { // Grab the middle pixel. usually we'd want to do something clever that snaps the the closest object of interest. - let picked_pixel = picking_result.picking_data[(picking_result.rect.extent.x / 2 - + (picking_result.rect.extent.y / 2) * picking_result.rect.extent.x) - as usize]; + let picked_id = picking_result.picked_id(picking_result.rect.extent / 2); + //let picked_position = + // picking_result.picked_world_position(picking_result.rect.extent / 2); + //dbg!(picked_position, picked_id); self.mesh_is_hovered = false; - if picked_pixel == MESH_ID { + if picked_id == MESH_ID { self.mesh_is_hovered = true; - } else if picked_pixel.object.0 != 0 - && picked_pixel.object.0 <= self.point_sets.len() as u64 + } else if picked_id.object.0 != 0 && picked_id.object.0 <= self.point_sets.len() as u64 { - let point_set = &mut self.point_sets[picked_pixel.object.0 as usize - 1]; - point_set.radii[picked_pixel.instance.0 as usize] = Size::new_scene(0.1); - point_set.colors[picked_pixel.instance.0 as usize] = Color32::DEBUG_COLOR; + let point_set = &mut self.point_sets[picked_id.object.0 as usize - 1]; + point_set.radii[picked_id.instance.0 as usize] = Size::new_scene(0.1); + point_set.colors[picked_id.instance.0 as usize] = Color32::DEBUG_COLOR; } } diff --git a/crates/re_renderer/shader/copy_texture.wgsl b/crates/re_renderer/shader/copy_texture.wgsl new file mode 100644 index 000000000000..aaa5bb4c36b6 --- /dev/null +++ b/crates/re_renderer/shader/copy_texture.wgsl @@ -0,0 +1,15 @@ +// Reads the content of a texture and writes it out as is. +// +// This is needed e.g. on WebGL to convert from a depth format to a regular color format that can be read back to the CPU. + +#import <./types.wgsl> +#import <./global_bindings.wgsl> +#import <./screen_triangle_vertex.wgsl> + +@group(1) @binding(0) +var tex: texture_2d; + +@fragment +fn main(in: FragmentInput) -> @location(0) Vec4 { + return textureSample(tex, nearest_sampler, in.texcoord); +} diff --git a/crates/re_renderer/src/allocator/gpu_readback_belt.rs b/crates/re_renderer/src/allocator/gpu_readback_belt.rs index 6facafc3e761..8e5f413743e9 100644 --- a/crates/re_renderer/src/allocator/gpu_readback_belt.rs +++ b/crates/re_renderer/src/allocator/gpu_readback_belt.rs @@ -86,7 +86,8 @@ impl GpuReadbackBuffer { }, ); - self.range_in_chunk = start_offset..self.range_in_chunk.end; + self.range_in_chunk = + (start_offset + buffer_info.buffer_size_padded)..self.range_in_chunk.end; } } diff --git a/crates/re_renderer/src/config.rs b/crates/re_renderer/src/config.rs index d168ab274373..9d4318ce9b5e 100644 --- a/crates/re_renderer/src/config.rs +++ b/crates/re_renderer/src/config.rs @@ -5,7 +5,7 @@ #[derive(Clone, Copy, Debug)] pub enum HardwareTier { /// For WebGL and native OpenGL. Maintains strict WebGL capability. - Basic, + Web, /// Run natively with Vulkan/Metal but don't demand anything that isn't widely available. Native, @@ -17,7 +17,15 @@ impl HardwareTier { /// Whether the current hardware tier supports sampling from textures with a sample count higher than 1. pub fn support_sampling_msaa_texture(&self) -> bool { match self { - HardwareTier::Basic => false, + HardwareTier::Web => false, + HardwareTier::Native => true, + } + } + + /// Whether the current hardware tier supports sampling from textures with a sample count higher than 1. + pub fn support_depth_readback(&self) -> bool { + match self { + HardwareTier::Web => false, HardwareTier::Native => true, } } @@ -27,7 +35,7 @@ impl Default for HardwareTier { fn default() -> Self { // Use "Basic" tier for actual web but also if someone forces the GL backend! if supported_backends() == wgpu::Backends::GL { - HardwareTier::Basic + HardwareTier::Web } else { HardwareTier::Native } @@ -63,7 +71,11 @@ impl HardwareTier { /// Downlevel features required by the given tier. pub fn required_downlevel_capabilities(self) -> wgpu::DownlevelCapabilities { wgpu::DownlevelCapabilities { - flags: wgpu::DownlevelFlags::empty(), + flags: match self { + HardwareTier::Web => wgpu::DownlevelFlags::empty(), + // Require fully WebGPU compliance for the native tier. + HardwareTier::Native => wgpu::DownlevelFlags::all(), + }, limits: Default::default(), // unused so far both here and in wgpu shader_model: wgpu::ShaderModel::Sm4, } diff --git a/crates/re_renderer/src/draw_phases/picking_layer.rs b/crates/re_renderer/src/draw_phases/picking_layer.rs index 5270f4bf6093..69b125529b2d 100644 --- a/crates/re_renderer/src/draw_phases/picking_layer.rs +++ b/crates/re_renderer/src/draw_phases/picking_layer.rs @@ -12,11 +12,18 @@ use crate::{ allocator::create_and_fill_uniform_buffer, global_bindings::FrameUniformBuffer, + include_shader_module, view_builder::ViewBuilder, - wgpu_resources::{GpuBindGroup, GpuTexture, Texture2DBufferInfo, TextureDesc}, + wgpu_resources::{ + BindGroupDesc, BindGroupEntry, BindGroupLayoutDesc, GpuBindGroup, GpuRenderPipelineHandle, + GpuTexture, GpuTextureHandle, PipelineLayoutDesc, PoolError, RenderPipelineDesc, + Texture2DBufferInfo, TextureDesc, WgpuResourcePools, + }, DebugLabel, GpuReadbackBuffer, GpuReadbackIdentifier, IntRect, RenderContext, }; +use smallvec::smallvec; + /// GPU retrieved & processed picking data result. pub struct PickingResult { /// User data supplied on picking request. @@ -26,17 +33,58 @@ pub struct PickingResult { /// Describes the area of the picking layer that was read back. pub rect: IntRect, - /// Picking data for the requested rectangle. + /// Picking id data for the requested rectangle. + /// + /// GPU internal row padding has already been removed from this buffer. + /// Pixel data is stored in the normal fashion - row wise, left to right, top to bottom. + pub picking_id_data: Vec, + + /// Picking depth data for the requested rectangle. + /// + /// Use [`PickingResult::picked_world_position`] for easy interpretation of the data. + /// + /// GPU internal row padding has already been removed from this buffer. + /// Pixel data is stored in the normal fashion - row wise, left to right, top to bottom. + pub picking_depth_data: Vec, + + /// Transforms a NDC position on the picking rect to a world position. + world_from_cropped_projection: glam::Mat4, +} + +impl PickingResult { + /// Returns the picked world position. /// - /// GPU internal row padding has already been removed. - /// Data is stored row wise, left to right, top to bottom. - pub picking_data: Vec, + /// Panics if the position is outside of the picking rect. + /// + /// Keep in mind that the picked position may be (negative) infinity if nothing was picked. + #[inline] + pub fn picked_world_position(&self, pos_on_picking_rect: glam::UVec2) -> glam::Vec3 { + let raw_depth = self.picking_depth_data + [(pos_on_picking_rect.y * self.rect.width() + pos_on_picking_rect.x) as usize]; + + self.world_from_cropped_projection.project_point3( + pixel_coord_to_ndc(pos_on_picking_rect.as_vec2(), self.rect.extent.as_vec2()) + .extend(raw_depth), + ) + } + + /// Returns the picked picking id. + /// + /// Panics if the position is outside of the picking rect. + #[inline] + pub fn picked_id(&self, pos_on_picking_rect: glam::UVec2) -> PickingLayerId { + self.picking_id_data + [(pos_on_picking_rect.y * self.rect.width() + pos_on_picking_rect.x) as usize] + } } /// Type used as user data on the gpu readback belt. struct ReadbackBeltMetadata { picking_rect: IntRect, + world_from_cropped_projection: glam::Mat4, user_data: T, + + depth_readback_workaround_in_use: bool, } /// The first 64bit of the picking layer. @@ -76,22 +124,32 @@ impl From for [u32; 4] { } } +/// Converts a pixel coordinate to normalized device coordinates. +pub fn pixel_coord_to_ndc(coord: glam::Vec2, target_resolution: glam::Vec2) -> glam::Vec2 { + glam::vec2( + coord.x / target_resolution.x * 2.0 - 1.0, + 1.0 - coord.y / target_resolution.y * 2.0, + ) +} + /// Manages the rendering of the picking layer pass, its render targets & readback buffer. /// /// The view builder creates this for every frame that requests a picking result. pub struct PickingLayerProcessor { pub picking_target: GpuTexture, - picking_depth: GpuTexture, + picking_depth_target: GpuTexture, readback_buffer: GpuReadbackBuffer, bind_group_0: GpuBindGroup, + + depth_readback_workaround: Option, } impl PickingLayerProcessor { /// The texture format used for the picking layer. pub const PICKING_LAYER_FORMAT: wgpu::TextureFormat = wgpu::TextureFormat::Rgba32Uint; - pub const PICKING_LAYER_DEPTH_FORMAT: wgpu::TextureFormat = - ViewBuilder::MAIN_TARGET_DEPTH_FORMAT; + /// The depth format used for the picking layer - f32 makes it easiest to deal with retrieved depth and is guaranteed to be copyable. + pub const PICKING_LAYER_DEPTH_FORMAT: wgpu::TextureFormat = wgpu::TextureFormat::Depth32Float; pub const PICKING_LAYER_MSAA_STATE: wgpu::MultisampleState = wgpu::MultisampleState { count: 1, @@ -122,30 +180,6 @@ impl PickingLayerProcessor { readback_identifier: GpuReadbackIdentifier, readback_user_data: T, ) -> Self { - let row_info_id = Texture2DBufferInfo::new(Self::PICKING_LAYER_FORMAT, picking_rect.extent); - //let row_info_depth = - //Texture2DBufferInfo::new(Self::PICKING_LAYER_FORMAT, picking_rect.extent); - - // Offset of the depth buffer in the readback buffer needs to be aligned to size of a depth pixel. - // This is "trivially true" if the size of the depth format is a multiple of the size of the id format. - debug_assert!( - Self::PICKING_LAYER_FORMAT.describe().block_size - % Self::PICKING_LAYER_DEPTH_FORMAT.describe().block_size - == 0 - ); - let buffer_size = row_info_id.buffer_size_padded; // + row_info_depth.buffer_size_padded; - - let readback_buffer = ctx.gpu_readback_belt.lock().allocate( - &ctx.device, - &ctx.gpu_resources.buffers, - buffer_size, - readback_identifier, - Box::new(ReadbackBeltMetadata { - picking_rect, - user_data: readback_user_data, - }), - ); - let mut picking_target_usage = wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::COPY_SRC; picking_target_usage.set( @@ -165,27 +199,39 @@ impl PickingLayerProcessor { usage: picking_target_usage, }, ); - let picking_depth = ctx.gpu_resources.textures.alloc( + + let direct_depth_readback = ctx + .shared_renderer_data + .config + .hardware_tier + .support_depth_readback(); + + let picking_depth_target = ctx.gpu_resources.textures.alloc( &ctx.device, &TextureDesc { - label: format!("{view_name} - picking_layer depth").into(), + label: format!("{view_name} - picking_layer depth target").into(), format: Self::PICKING_LAYER_DEPTH_FORMAT, - usage: wgpu::TextureUsages::RENDER_ATTACHMENT, + usage: if direct_depth_readback { + wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::COPY_SRC + } else { + wgpu::TextureUsages::RENDER_ATTACHMENT | wgpu::TextureUsages::TEXTURE_BINDING + }, ..picking_target.creation_desc }, ); + let depth_readback_workaround = (!direct_depth_readback).then(|| { + DepthReadbackWorkaround::new(ctx, picking_rect.extent, picking_depth_target.handle) + }); + let rect_min = picking_rect.top_left_corner.as_vec2(); let rect_max = rect_min + picking_rect.extent.as_vec2(); let screen_resolution = screen_resolution.as_vec2(); - let rect_min_ndc = glam::vec2( - rect_min.x / screen_resolution.x * 2.0 - 1.0, - 1.0 - rect_max.y / screen_resolution.y * 2.0, - ); - let rect_max_ndc = glam::vec2( - rect_max.x / screen_resolution.x * 2.0 - 1.0, - 1.0 - rect_min.y / screen_resolution.y * 2.0, - ); + // y axis is flipped in NDC, therefore we need to flip the y axis of the rect. + let rect_min_ndc = + pixel_coord_to_ndc(glam::vec2(rect_min.x, rect_max.y), screen_resolution); + let rect_max_ndc = + pixel_coord_to_ndc(glam::vec2(rect_max.x, rect_min.y), screen_resolution); let rect_center_ndc = (rect_min_ndc + rect_max_ndc) * 0.5; let cropped_projection_from_projection = glam::Mat4::from_scale(2.0 / (rect_max_ndc - rect_min_ndc).extend(1.0)) @@ -194,15 +240,16 @@ impl PickingLayerProcessor { // Setup frame uniform buffer let previous_projection_from_world: glam::Mat4 = frame_uniform_buffer_content.projection_from_world.into(); + let cropped_projection_from_world = + cropped_projection_from_projection * previous_projection_from_world; let previous_projection_from_view: glam::Mat4 = frame_uniform_buffer_content.projection_from_view.into(); + let cropped_projection_from_view = + cropped_projection_from_projection * previous_projection_from_view; + let frame_uniform_buffer_content = FrameUniformBuffer { - projection_from_world: (cropped_projection_from_projection - * previous_projection_from_world) - .into(), - projection_from_view: (cropped_projection_from_projection - * previous_projection_from_view) - .into(), + projection_from_world: cropped_projection_from_world.into(), + projection_from_view: cropped_projection_from_view.into(), ..*frame_uniform_buffer_content }; @@ -218,11 +265,44 @@ impl PickingLayerProcessor { frame_uniform_buffer, ); + let row_info_id = Texture2DBufferInfo::new(Self::PICKING_LAYER_FORMAT, picking_rect.extent); + let row_info_depth = Texture2DBufferInfo::new( + if direct_depth_readback { + Self::PICKING_LAYER_DEPTH_FORMAT + } else { + DepthReadbackWorkaround::READBACK_FORMAT + }, + picking_rect.extent, + ); + + // Offset of the depth buffer in the readback buffer needs to be aligned to size of a depth pixel. + // This is "trivially true" if the size of the depth format is a multiple of the size of the id format. + debug_assert!( + Self::PICKING_LAYER_FORMAT.describe().block_size + % Self::PICKING_LAYER_DEPTH_FORMAT.describe().block_size + == 0 + ); + let buffer_size = row_info_id.buffer_size_padded + row_info_depth.buffer_size_padded; + + let readback_buffer = ctx.gpu_readback_belt.lock().allocate( + &ctx.device, + &ctx.gpu_resources.buffers, + buffer_size, + readback_identifier, + Box::new(ReadbackBeltMetadata { + picking_rect, + user_data: readback_user_data, + world_from_cropped_projection: cropped_projection_from_world.inverse(), + depth_readback_workaround_in_use: depth_readback_workaround.is_some(), + }), + ); + PickingLayerProcessor { bind_group_0, picking_target, - picking_depth, + picking_depth_target, readback_buffer, + depth_readback_workaround, } } @@ -244,10 +324,10 @@ impl PickingLayerProcessor { }, })], depth_stencil_attachment: Some(wgpu::RenderPassDepthStencilAttachment { - view: &self.picking_depth.default_view, + view: &self.picking_depth_target.default_view, depth_ops: Some(wgpu::Operations { load: ViewBuilder::DEFAULT_DEPTH_CLEAR, - store: false, + store: true, // Store for readback! }), stencil_ops: None, }), @@ -258,20 +338,49 @@ impl PickingLayerProcessor { pass } - pub fn end_render_pass(self, encoder: &mut wgpu::CommandEncoder) { - self.readback_buffer.read_texture2d( + pub fn end_render_pass( + self, + encoder: &mut wgpu::CommandEncoder, + pools: &WgpuResourcePools, + ) -> Result<(), PoolError> { + let extent = glam::uvec2( + self.picking_target.texture.width(), + self.picking_target.texture.height(), + ); + + let readable_depth_texture = if let Some(depth_copy_workaround) = + self.depth_readback_workaround.as_ref() + { + depth_copy_workaround.copy_to_readable_texture(encoder, pools, &self.bind_group_0)? + } else { + &self.picking_depth_target + }; + + self.readback_buffer.read_multiple_texture2d( encoder, - wgpu::ImageCopyTexture { - texture: &self.picking_target.texture, - mip_level: 0, - origin: wgpu::Origin3d::ZERO, - aspect: wgpu::TextureAspect::All, - }, - glam::uvec2( - self.picking_target.texture.width(), - self.picking_target.texture.height(), - ), + &[ + ( + wgpu::ImageCopyTexture { + texture: &self.picking_target.texture, + mip_level: 0, + origin: wgpu::Origin3d::ZERO, + aspect: wgpu::TextureAspect::All, + }, + extent, + ), + ( + wgpu::ImageCopyTexture { + texture: &readable_depth_texture.texture, + mip_level: 0, + origin: wgpu::Origin3d::ZERO, + aspect: wgpu::TextureAspect::All, + }, + extent, + ), + ], ); + + Ok(()) } /// Returns the oldest received picking results for a given identifier and user data type. @@ -290,19 +399,202 @@ impl PickingLayerProcessor { ctx.gpu_readback_belt .lock() .readback_data::>(identifier, |data, metadata| { + // Assert that our texture data reinterpretation works out from a pixel size point of view. + debug_assert_eq!( + Self::PICKING_LAYER_DEPTH_FORMAT.describe().block_size as usize, + std::mem::size_of::() + ); + debug_assert_eq!( + Self::PICKING_LAYER_FORMAT.describe().block_size as usize, + std::mem::size_of::() + ); + let buffer_info_id = Texture2DBufferInfo::new( Self::PICKING_LAYER_FORMAT, metadata.picking_rect.extent, ); + let buffer_info_depth = Texture2DBufferInfo::new( + if metadata.depth_readback_workaround_in_use { + DepthReadbackWorkaround::READBACK_FORMAT + } else { + Self::PICKING_LAYER_DEPTH_FORMAT + }, + metadata.picking_rect.extent, + ); - let picking_data = buffer_info_id.remove_padding_and_convert(data); + let picking_id_data = buffer_info_id + .remove_padding_and_convert(&data[..buffer_info_id.buffer_size_padded as _]); + let mut picking_depth_data = buffer_info_depth + .remove_padding_and_convert(&data[buffer_info_id.buffer_size_padded as _..]); + + if metadata.depth_readback_workaround_in_use { + // Can't read back depth textures & can't read back R32Float textures either! + // See https://github.com/gfx-rs/wgpu/issues/3644 + debug_assert_eq!( + DepthReadbackWorkaround::READBACK_FORMAT + .describe() + .block_size as usize, + std::mem::size_of::() * 4 + ); + picking_depth_data = picking_depth_data.into_iter().step_by(4).collect(); + } result = Some(PickingResult { - picking_data, + picking_id_data, + picking_depth_data, user_data: metadata.user_data, rect: metadata.picking_rect, + world_from_cropped_projection: metadata.world_from_cropped_projection, }); }); result } } + +/// Utility for copying a depth texture when it can't be read-back directly to a [`wgpu::TextureFormat::R32Float`] which is readable texture. +/// +/// Implementation note: +/// This is a plain & simple "sample in shader and write to texture" utility. +/// It might be worth abstracting this further into a general purpose operator. +/// There is not much in here that is specific to the depth usecase! +struct DepthReadbackWorkaround { + render_pipeline: GpuRenderPipelineHandle, + bind_group: GpuBindGroup, + readable_texture: GpuTexture, +} + +impl DepthReadbackWorkaround { + /// There's two layers of workarounds here: + /// * WebGL (via spec) not being able to read back depth textures + /// * unclear behavior for any readback that isn't RGBA + /// Furthermore, integer textures also seemed to be problematic, + /// but it seems to work fine for [`wgpu::TextureFormat::Rgba32Uint`] which we use for our picking ID + /// Details see [wgpu#3644](https://github.com/gfx-rs/wgpu/issues/3644) + const READBACK_FORMAT: wgpu::TextureFormat = wgpu::TextureFormat::Rgba32Float; + + fn new( + ctx: &mut RenderContext, + extent: glam::UVec2, + depth_target_handle: GpuTextureHandle, + ) -> DepthReadbackWorkaround { + let readable_texture = ctx.gpu_resources.textures.alloc( + &ctx.device, + &TextureDesc { + label: "DepthCopyWorkaround::readable_texture".into(), + format: Self::READBACK_FORMAT, + usage: wgpu::TextureUsages::COPY_SRC | wgpu::TextureUsages::RENDER_ATTACHMENT, + size: wgpu::Extent3d { + width: extent.x, + height: extent.y, + depth_or_array_layers: 1, + }, + mip_level_count: 1, + sample_count: 1, + dimension: wgpu::TextureDimension::D2, + }, + ); + + let bind_group_layout = ctx.gpu_resources.bind_group_layouts.get_or_create( + &ctx.device, + &BindGroupLayoutDesc { + label: "DepthCopyWorkaround::bind_group_layout".into(), + entries: vec![wgpu::BindGroupLayoutEntry { + binding: 0, + visibility: wgpu::ShaderStages::FRAGMENT, + ty: wgpu::BindingType::Texture { + sample_type: wgpu::TextureSampleType::Float { filterable: false }, + view_dimension: wgpu::TextureViewDimension::D2, + multisampled: false, + }, + count: None, + }], + }, + ); + + let bind_group = ctx.gpu_resources.bind_groups.alloc( + &ctx.device, + &ctx.gpu_resources, + &BindGroupDesc { + label: "DepthCopyWorkaround::bind_group".into(), + entries: smallvec![BindGroupEntry::DefaultTextureView(depth_target_handle)], + layout: bind_group_layout, + }, + ); + + let render_pipeline = ctx.gpu_resources.render_pipelines.get_or_create( + &ctx.device, + &RenderPipelineDesc { + label: "DepthCopyWorkaround::render_pipeline".into(), + pipeline_layout: ctx.gpu_resources.pipeline_layouts.get_or_create( + &ctx.device, + &PipelineLayoutDesc { + label: "DepthCopyWorkaround::render_pipeline".into(), + entries: vec![ + ctx.shared_renderer_data.global_bindings.layout, + bind_group_layout, + ], + }, + &ctx.gpu_resources.bind_group_layouts, + ), + vertex_entrypoint: "main".into(), + vertex_handle: ctx.gpu_resources.shader_modules.get_or_create( + &ctx.device, + &mut ctx.resolver, + &include_shader_module!("../../shader/screen_triangle.wgsl"), + ), + fragment_entrypoint: "main".into(), + fragment_handle: ctx.gpu_resources.shader_modules.get_or_create( + &ctx.device, + &mut ctx.resolver, + &include_shader_module!("../../shader/copy_texture.wgsl"), + ), + vertex_buffers: smallvec![], + render_targets: smallvec![Some(readable_texture.texture.format().into())], + primitive: wgpu::PrimitiveState { + topology: wgpu::PrimitiveTopology::TriangleStrip, + cull_mode: None, + ..Default::default() + }, + depth_stencil: None, + multisample: wgpu::MultisampleState::default(), + }, + &ctx.gpu_resources.pipeline_layouts, + &ctx.gpu_resources.shader_modules, + ); + + Self { + render_pipeline, + bind_group, + readable_texture, + } + } + + fn copy_to_readable_texture( + &self, + encoder: &mut wgpu::CommandEncoder, + pools: &WgpuResourcePools, + global_binding_bind_group: &GpuBindGroup, + ) -> Result<&GpuTexture, PoolError> { + // Copy depth texture to a readable (color) texture with a screen filling triangle. + let mut pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor { + label: DebugLabel::from("Depth copy workaround").get(), + color_attachments: &[Some(wgpu::RenderPassColorAttachment { + view: &self.readable_texture.default_view, + resolve_target: None, + ops: wgpu::Operations { + load: wgpu::LoadOp::Clear(wgpu::Color::TRANSPARENT), + store: true, // Store for readback! + }, + })], + depth_stencil_attachment: None, + }); + + let pipeline = pools.render_pipelines.get_resource(self.render_pipeline)?; + pass.set_pipeline(pipeline); + pass.set_bind_group(0, global_binding_bind_group, &[]); + pass.set_bind_group(1, &self.bind_group, &[]); + pass.draw(0..3, 0..1); + + Ok(&self.readable_texture) + } +} diff --git a/crates/re_renderer/src/renderer/debug_overlay.rs b/crates/re_renderer/src/renderer/debug_overlay.rs index f7dc4a4dbcec..276f8fc413da 100644 --- a/crates/re_renderer/src/renderer/debug_overlay.rs +++ b/crates/re_renderer/src/renderer/debug_overlay.rs @@ -189,7 +189,7 @@ impl Renderer for DebugOverlayRenderer { ); let render_pipeline = pools.render_pipelines.get_or_create( device, - &(RenderPipelineDesc { + &RenderPipelineDesc { label: "DebugOverlayDrawData::render_pipeline_regular".into(), pipeline_layout: pools.pipeline_layouts.get_or_create( device, @@ -212,7 +212,7 @@ impl Renderer for DebugOverlayRenderer { }, depth_stencil: None, multisample: wgpu::MultisampleState::default(), - }), + }, &pools.pipeline_layouts, &pools.shader_modules, ); diff --git a/crates/re_renderer/src/view_builder.rs b/crates/re_renderer/src/view_builder.rs index 49fde9b8bf09..704f0a22f7be 100644 --- a/crates/re_renderer/src/view_builder.rs +++ b/crates/re_renderer/src/view_builder.rs @@ -582,7 +582,7 @@ impl ViewBuilder { //pass.set_bind_group(0, &setup.bind_group_0, &[]); self.draw_phase(ctx, DrawPhase::PickingLayer, &mut pass); } - picking_processor.end_render_pass(&mut encoder); + picking_processor.end_render_pass(&mut encoder, &ctx.gpu_resources)?; } if let Some(outline_mask_processor) = self.outline_mask_processor.take() { diff --git a/crates/re_renderer/src/wgpu_resources/mod.rs b/crates/re_renderer/src/wgpu_resources/mod.rs index 403e46350b47..06f30b6292d3 100644 --- a/crates/re_renderer/src/wgpu_resources/mod.rs +++ b/crates/re_renderer/src/wgpu_resources/mod.rs @@ -169,7 +169,7 @@ impl Texture2DBufferInfo { pub fn remove_padding<'a>(&self, buffer: &'a [u8]) -> Cow<'a, [u8]> { crate::profile_function!(); - assert!(buffer.len() as wgpu::BufferAddress == self.buffer_size_padded); + assert_eq!(buffer.len() as wgpu::BufferAddress, self.buffer_size_padded); if self.bytes_per_row_padded == self.bytes_per_row_unpadded { return Cow::Borrowed(buffer); @@ -196,7 +196,7 @@ impl Texture2DBufferInfo { pub fn remove_padding_and_convert(&self, buffer: &[u8]) -> Vec { crate::profile_function!(); - assert!(buffer.len() as wgpu::BufferAddress == self.buffer_size_padded); + assert_eq!(buffer.len() as wgpu::BufferAddress, self.buffer_size_padded); assert!(self.bytes_per_row_unpadded % std::mem::size_of::() as u32 == 0); // Due to https://github.com/gfx-rs/wgpu/issues/3508 the data might be completely unaligned, diff --git a/crates/re_renderer/src/workspace_shaders.rs b/crates/re_renderer/src/workspace_shaders.rs index c2f37fe7ae41..0dd9316c8c4c 100644 --- a/crates/re_renderer/src/workspace_shaders.rs +++ b/crates/re_renderer/src/workspace_shaders.rs @@ -25,6 +25,12 @@ pub fn init() { fs.create_file(virtpath, content).unwrap(); } + { + let virtpath = Path::new("shader/copy_texture.wgsl"); + let content = include_str!("../shader/copy_texture.wgsl").into(); + fs.create_file(virtpath, content).unwrap(); + } + { let virtpath = Path::new("shader/debug_overlay.wgsl"); let content = include_str!("../shader/debug_overlay.wgsl").into(); diff --git a/crates/re_viewer/src/ui/view_spatial/scene/picking.rs b/crates/re_viewer/src/ui/view_spatial/scene/picking.rs index 5585e555f5a0..ea813f460c91 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/picking.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/picking.rs @@ -207,14 +207,19 @@ pub fn picking( if state.closest_opaque_pick.instance_path_hash == InstancePathHash::NONE { if let Some(gpu_picking_result) = gpu_picking_result { // TODO(andreas): Pick middle pixel for now. But we soon want to snap to the closest object using a bigger picking rect. - let rect = gpu_picking_result.rect; - let picked_id = gpu_picking_result.picking_data - [(rect.width() / 2 + (rect.height() / 2) * rect.width()) as usize]; + let pos_on_picking_rect = gpu_picking_result.rect.extent / 2; + let picked_id = gpu_picking_result.picked_id(pos_on_picking_rect); let picked_object = instance_path_hash_from_picking_layer_id(picked_id); - // TODO(andreas): We're lacking depth information! - state.closest_opaque_pick.instance_path_hash = picked_object; - state.closest_opaque_pick.used_gpu_picking = true; + // It is old data, the object might be gone by now! + if picked_object.is_some() { + // TODO(andreas): Once this is the primary path we should not awkwardly reconstruct the ray_t here. It's entirely correct either! + state.closest_opaque_pick.ray_t = gpu_picking_result + .picked_world_position(pos_on_picking_rect) + .distance(context.ray_in_world.origin); + state.closest_opaque_pick.instance_path_hash = picked_object; + state.closest_opaque_pick.used_gpu_picking = true; + } } else { // It is possible that some frames we don't get a picking result and the frame after we get several. // We need to cache the last picking result and use it until we get a new one or the mouse leaves the screen. From a479c0ccc21691badf43c5fdf28f7c46fc30e833 Mon Sep 17 00:00:00 2001 From: Pablo Vela Date: Tue, 4 Apr 2023 07:03:45 -0500 Subject: [PATCH 10/89] Add new ARKitScenes example (#1538) Co-authored-by: Nikolaus West Co-authored-by: Emil Ernerfeldt --- examples/python/arkitscenes/.gitignore | 1 + .../python/arkitscenes/download_dataset.py | 321 +++++++++++++ examples/python/arkitscenes/main.py | 450 ++++++++++++++++++ examples/python/arkitscenes/requirements.txt | 7 + 4 files changed, 779 insertions(+) create mode 100644 examples/python/arkitscenes/.gitignore create mode 100644 examples/python/arkitscenes/download_dataset.py create mode 100755 examples/python/arkitscenes/main.py create mode 100644 examples/python/arkitscenes/requirements.txt diff --git a/examples/python/arkitscenes/.gitignore b/examples/python/arkitscenes/.gitignore new file mode 100644 index 000000000000..d1ac3a94954e --- /dev/null +++ b/examples/python/arkitscenes/.gitignore @@ -0,0 +1 @@ +dataset/** diff --git a/examples/python/arkitscenes/download_dataset.py b/examples/python/arkitscenes/download_dataset.py new file mode 100644 index 000000000000..d2f5e2cbe62a --- /dev/null +++ b/examples/python/arkitscenes/download_dataset.py @@ -0,0 +1,321 @@ +# Copied from https://github.com/apple/ARKitScenes/blob/main/download_data.py +# Licensing information: https://github.com/apple/ARKitScenes/blob/main/LICENSE +import math +import os +import subprocess +from pathlib import Path +from typing import Final, List, Optional + +import pandas as pd + +ARkitscense_url = "https://docs-assets.developer.apple.com/ml-research/datasets/arkitscenes/v1" +TRAINING: Final = "Training" +VALIDATION: Final = "Validation" +HIGRES_DEPTH_ASSET_NAME: Final = "highres_depth" +POINT_CLOUDS_FOLDER: Final = "laser_scanner_point_clouds" + +AVAILABLE_RECORDINGS: Final = ["48458663", "42444949", "41069046", "41125722", "41125763", "42446167"] +DATASET_DIR: Final = Path(os.path.dirname(__file__)) / "dataset" + +default_raw_dataset_assets = [ + "mov", + "annotation", + "mesh", + "confidence", + "highres_depth", + "lowres_depth", + "lowres_wide.traj", + "lowres_wide", + "lowres_wide_intrinsics", + "ultrawide", + "ultrawide_intrinsics", + "vga_wide", + "vga_wide_intrinsics", +] + +missing_3dod_assets_video_ids = [ + "47334522", + "47334523", + "42897421", + "45261582", + "47333152", + "47333155", + "48458535", + "48018733", + "47429677", + "48458541", + "42897848", + "47895482", + "47333960", + "47430089", + "42899148", + "42897612", + "42899153", + "42446164", + "48018149", + "47332198", + "47334515", + "45663223", + "45663226", + "45663227", +] + + +def raw_files(video_id: str, assets: List[str], metadata: pd.DataFrame) -> List[str]: + file_names = [] + for asset in assets: + if HIGRES_DEPTH_ASSET_NAME == asset: + in_upsampling = metadata.loc[metadata["video_id"] == float(video_id), ["is_in_upsampling"]].iat[0, 0] + if not in_upsampling: + print(f"Skipping asset {asset} for video_id {video_id} - Video not in upsampling dataset") + continue # highres_depth asset only available for video ids from upsampling dataset + + if asset in [ + "confidence", + "highres_depth", + "lowres_depth", + "lowres_wide", + "lowres_wide_intrinsics", + "ultrawide", + "ultrawide_intrinsics", + "wide", + "wide_intrinsics", + "vga_wide", + "vga_wide_intrinsics", + ]: + file_names.append(asset + ".zip") + elif asset == "mov": + file_names.append(f"{video_id}.mov") + elif asset == "mesh": + if video_id not in missing_3dod_assets_video_ids: + file_names.append(f"{video_id}_3dod_mesh.ply") + elif asset == "annotation": + if video_id not in missing_3dod_assets_video_ids: + file_names.append(f"{video_id}_3dod_annotation.json") + elif asset == "lowres_wide.traj": + if video_id not in missing_3dod_assets_video_ids: + file_names.append("lowres_wide.traj") + else: + raise Exception(f"No asset = {asset} in raw dataset") + return file_names + + +def download_file(url: str, file_name: str, dst: Path) -> bool: + os.makedirs(dst, exist_ok=True) + filepath = os.path.join(dst, file_name) + + if not os.path.isfile(filepath): + command = f"curl {url} -o {file_name}.tmp --fail" + print(f"Downloading file {filepath}") + try: + subprocess.check_call(command, shell=True, cwd=dst) + except Exception as error: + print(f"Error downloading {url}, error: {error}") + return False + os.rename(filepath + ".tmp", filepath) + else: + print(f"WARNING: skipping download of existing file: {filepath}") + return True + + +def unzip_file(file_name: str, dst: Path, keep_zip: bool = True) -> bool: + filepath = os.path.join(dst, file_name) + print(f"Unzipping zip file {filepath}") + command = f"unzip -oq {filepath} -d {dst}" + try: + subprocess.check_call(command, shell=True) + except Exception as error: + print(f"Error unzipping {filepath}, error: {error}") + return False + if not keep_zip: + os.remove(filepath) + return True + + +def download_laser_scanner_point_clouds_for_video(video_id: str, metadata: pd.DataFrame, download_dir: Path) -> None: + video_metadata = metadata.loc[metadata["video_id"] == float(video_id)] + visit_id = video_metadata["visit_id"].iat[0] + has_laser_scanner_point_clouds = video_metadata["has_laser_scanner_point_clouds"].iat[0] + + if not has_laser_scanner_point_clouds: + print(f"Warning: Laser scanner point clouds for video {video_id} are not available") + return + + if math.isnan(visit_id) or not visit_id.is_integer(): + print(f"Warning: Downloading laser scanner point clouds for video {video_id} failed - Bad visit id {visit_id}") + return + + visit_id = int(visit_id) # Expecting an 8 digit integer + laser_scanner_point_clouds_ids = laser_scanner_point_clouds_for_visit_id(visit_id, download_dir) + + for point_cloud_id in laser_scanner_point_clouds_ids: + download_laser_scanner_point_clouds(point_cloud_id, visit_id, download_dir) + + +def laser_scanner_point_clouds_for_visit_id(visit_id: int, download_dir: Path) -> List[str]: + point_cloud_to_visit_id_mapping_filename = "laser_scanner_point_clouds_mapping.csv" + if not os.path.exists(point_cloud_to_visit_id_mapping_filename): + point_cloud_to_visit_id_mapping_url = ( + f"{ARkitscense_url}/raw/laser_scanner_point_clouds/{point_cloud_to_visit_id_mapping_filename}" + ) + if not download_file( + point_cloud_to_visit_id_mapping_url, + point_cloud_to_visit_id_mapping_filename, + download_dir, + ): + print( + f"Error downloading point cloud for visit_id {visit_id} at location " + f"{point_cloud_to_visit_id_mapping_url}" + ) + return [] + + point_cloud_to_visit_id_mapping_filepath = os.path.join(download_dir, point_cloud_to_visit_id_mapping_filename) + point_cloud_to_visit_id_mapping = pd.read_csv(point_cloud_to_visit_id_mapping_filepath) + point_cloud_ids = point_cloud_to_visit_id_mapping.loc[ + point_cloud_to_visit_id_mapping["visit_id"] == visit_id, + ["laser_scanner_point_clouds_id"], + ] + point_cloud_ids_list = [scan_id[0] for scan_id in point_cloud_ids.values] + + return point_cloud_ids_list + + +def download_laser_scanner_point_clouds(laser_scanner_point_cloud_id: str, visit_id: int, download_dir: Path) -> None: + laser_scanner_point_clouds_folder_path = download_dir / POINT_CLOUDS_FOLDER / str(visit_id) + os.makedirs(laser_scanner_point_clouds_folder_path, exist_ok=True) + + for extension in [".ply", "_pose.txt"]: + filename = f"{laser_scanner_point_cloud_id}{extension}" + filepath = os.path.join(laser_scanner_point_clouds_folder_path, filename) + if os.path.exists(filepath): + return + file_url = f"{ARkitscense_url}/raw/laser_scanner_point_clouds/{visit_id}/{filename}" + download_file(file_url, filename, laser_scanner_point_clouds_folder_path) + + +def get_metadata(dataset: str, download_dir: Path) -> pd.DataFrame: + filename = "metadata.csv" + url = f"{ARkitscense_url}/threedod/{filename}" if "3dod" == dataset else f"{ARkitscense_url}/{dataset}/{filename}" + dst_folder = download_dir / dataset + dst_file = dst_folder / filename + + if not download_file(url, filename, dst_folder): + return + + metadata = pd.read_csv(dst_file) + return metadata + + +def download_data( + dataset: str, + video_ids: List[str], + dataset_splits: List[str], + download_dir: Path, + keep_zip: bool, + raw_dataset_assets: Optional[List[str]] = None, + should_download_laser_scanner_point_cloud: bool = False, +) -> None: + """ + Downloads data from the specified dataset and video IDs to the given download directory. + + Args: + ---- + dataset: the name of the dataset to download from (raw, 3dod, or upsampling) + video_ids: the list of video IDs to download data for + dataset_splits: the list of splits for each video ID (train, validation, or test) + download_dir: the directory to download data to + keep_zip: whether to keep the downloaded zip files after extracting them + raw_dataset_assets: a list of asset types to download from the raw dataset, if dataset is "raw" + should_download_laser_scanner_point_cloud: whether to download the laser scanner point cloud data, if available + + Returns: None + """ + metadata = get_metadata(dataset, download_dir) + if None is metadata: + print(f"Error retrieving metadata for dataset {dataset}") + return + + for video_id in sorted(set(video_ids)): + split = dataset_splits[video_ids.index(video_id)] + dst_dir = download_dir / dataset / split + if dataset == "raw": + url_prefix = "" + file_names = [] + if not raw_dataset_assets: + print(f"Warning: No raw assets given for video id {video_id}") + else: + dst_dir = dst_dir / str(video_id) + url_prefix = f"{ARkitscense_url}/raw/{split}/{video_id}" + "/{}" + file_names = raw_files(video_id, raw_dataset_assets, metadata) + elif dataset == "3dod": + url_prefix = f"{ARkitscense_url}/threedod/{split}" + "/{}" + file_names = [ + f"{video_id}.zip", + ] + elif dataset == "upsampling": + url_prefix = f"{ARkitscense_url}/upsampling/{split}" + "/{}" + file_names = [ + f"{video_id}.zip", + ] + else: + raise Exception(f"No such dataset = {dataset}") + + if should_download_laser_scanner_point_cloud and dataset == "raw": + # Point clouds only available for the raw dataset + download_laser_scanner_point_clouds_for_video(video_id, metadata, download_dir) + + for file_name in file_names: + dst_path = os.path.join(dst_dir, file_name) + url = url_prefix.format(file_name) + + if not file_name.endswith(".zip") or not os.path.isdir(dst_path[: -len(".zip")]): + download_file(url, dst_path, dst_dir) + else: + print(f"WARNING: skipping download of existing zip file: {dst_path}") + if file_name.endswith(".zip") and os.path.isfile(dst_path): + unzip_file(file_name, dst_dir, keep_zip) + + +def ensure_recording_downloaded(video_id: str, include_highres: bool) -> Path: + """Only downloads from validation set.""" + data_path = DATASET_DIR / "raw" / "Validation" / video_id + assets_to_download = [ + "lowres_wide", + "lowres_depth", + "lowres_wide_intrinsics", + "lowres_wide.traj", + "annotation", + "mesh", + ] + if include_highres: + assets_to_download.extend(["highres_depth", "wide", "wide_intrinsics"]) + download_data( + dataset="raw", + video_ids=[video_id], + dataset_splits=[VALIDATION], + download_dir=DATASET_DIR, + keep_zip=False, + raw_dataset_assets=assets_to_download, + should_download_laser_scanner_point_cloud=False, + ) + return data_path + + +def ensure_recording_available(video_id: str, include_highres: bool) -> Path: + """ + Returns the path to the recording for a given video_id. + + Args: + video_id (str): Identifier for the recording. + + Returns + ------- + Path: Path object representing the path to the recording. + + Raises + ------ + AssertionError: If the recording path does not exist. + """ + recording_path = ensure_recording_downloaded(video_id, include_highres) + assert recording_path.exists(), f"Recording path {recording_path} does not exist." + return recording_path # Return the path to the recording diff --git a/examples/python/arkitscenes/main.py b/examples/python/arkitscenes/main.py new file mode 100755 index 000000000000..5b2c62027022 --- /dev/null +++ b/examples/python/arkitscenes/main.py @@ -0,0 +1,450 @@ +#!/usr/bin/env python3 +import argparse +import json +import os +from pathlib import Path +from typing import Any, Dict, List, Tuple + +import cv2 +import matplotlib.pyplot as plt +import numpy as np +import numpy.typing as npt +import rerun as rr +import trimesh +from download_dataset import AVAILABLE_RECORDINGS, ensure_recording_available +from scipy.spatial.transform import Rotation as R +from tqdm import tqdm + +# hack for now since dataset does not provide orientation information, only known after initial visual inspection +ORIENTATION = { + "48458663": "landscape", + "42444949": "portrait", + "41069046": "portrait", + "41125722": "portrait", + "41125763": "portrait", + "42446167": "portrait", +} +assert len(ORIENTATION) == len(AVAILABLE_RECORDINGS) +assert set(ORIENTATION.keys()) == set(AVAILABLE_RECORDINGS) + + +def load_json(js_path: Path) -> Dict[str, Any]: + with open(js_path, "r") as f: + json_data = json.load(f) # type: Dict[str, Any] + return json_data + + +def log_annotated_bboxes( + annotation: Dict[str, Any] +) -> Tuple[npt.NDArray[np.float64], List[str], List[Tuple[int, int, int, int]]]: + """ + Logs annotated oriented bounding boxes to Rerun. + + We currently calculate and return the 3D bounding boxes keypoints, labels, and colors for each object to log them in + each camera frame TODO(pablovela5620): Once #1581 is resolved this can be removed. + + annotation json file + | |-- label: object name of bounding box + | |-- axesLengths[x, y, z]: size of the origin bounding-box before transforming + | |-- centroid[]: the translation matrix (1,3) of bounding-box + | |-- normalizedAxes[]: the rotation matrix (3,3) of bounding-box + """ + bbox_list = [] + bbox_labels = [] + num_objects = len(annotation["data"]) + # Generate a color per object that can be reused across both 3D obb and their 2D projections + # TODO(pablovela5620): Once #1581 or #1728 is resolved this can be removed + color_positions = np.linspace(0, 1, num_objects) + colormap = plt.cm.get_cmap("viridis") + color_array_float = [colormap(pos) for pos in color_positions] + color_list = [(int(r * 255), int(g * 255), int(b * 255), int(a * 255)) for r, g, b, a in color_array_float] + + for i, label_info in enumerate(annotation["data"]): + uid = label_info["uid"] + label = label_info["label"] + + # TODO(pablovela5620): half this value once #1701 is resolved + scale = np.array(label_info["segments"]["obbAligned"]["axesLengths"]).reshape(-1, 3)[0] + transform = np.array(label_info["segments"]["obbAligned"]["centroid"]).reshape(-1, 3)[0] + rotation = np.array(label_info["segments"]["obbAligned"]["normalizedAxes"]).reshape(3, 3) + + rot = R.from_matrix(rotation).inv() + + rr.log_obb( + f"world/annotations/box-{uid}-{label}", + half_size=scale, + position=transform, + rotation_q=rot.as_quat(), + label=label, + color=color_list[i], + timeless=True, + ) + + box3d = compute_box_3d(scale, transform, rotation) + bbox_list.append(box3d) + bbox_labels.append(label) + bboxes_3d = np.array(bbox_list) + return bboxes_3d, bbox_labels, color_list + + +def compute_box_3d( + scale: npt.NDArray[np.float64], transform: npt.NDArray[np.float64], rotation: npt.NDArray[np.float64] +) -> npt.NDArray[np.float64]: + """ + Given obb compute 3d keypoints of the box. + + TODO(pablovela5620): Once #1581 is resolved this can be removed + """ + scale = scale.tolist() + scales = [i / 2 for i in scale] + length, height, width = scales + center = np.reshape(transform, (-1, 3)) + center = center.reshape(3) + x_corners = [length, length, -length, -length, length, length, -length, -length] + y_corners = [height, -height, -height, height, height, -height, -height, height] + z_corners = [width, width, width, width, -width, -width, -width, -width] + corners_3d = np.dot(np.transpose(rotation), np.vstack([x_corners, y_corners, z_corners])) + + corners_3d[0, :] += center[0] + corners_3d[1, :] += center[1] + corners_3d[2, :] += center[2] + bbox3d_raw = np.transpose(corners_3d) + return bbox3d_raw + + +def log_line_segments( + entity_path: str, bboxes_2d_filtered: npt.NDArray[np.float64], color: Tuple[int, int, int, int], label: str +) -> None: + """ + Generates line segments for each object's bounding box in 2d. + + Box corner order that we return is of the format below: + 6 -------- 7 + /| /| + 5 -------- 4 . + | | | | + . 2 -------- 3 + |/ |/ + 1 -------- 0 + + TODO(pablovela5620): Once #1581 is resolved this can be removed + + :param bboxes_2d_filtered: + A numpy array of shape (8, 2), representing the filtered 2D keypoints of the 3D bounding boxes. + :return: A numpy array of shape (24, 2), representing the line segments for each object's bounding boxes. + Even and odd indices represent the start and end points of each line segment respectively. + """ + + # Calculate the centroid of the 2D keypoints + valid_points = bboxes_2d_filtered[~np.isnan(bboxes_2d_filtered).any(axis=1)] + + # log centroid and add label so that object label is visible in the 2d view + if valid_points.size > 0: + centroid = valid_points.mean(axis=0) + rr.log_point(f"{entity_path}/centroid", centroid, color=color, label=label) + else: + pass + + # fmt: off + segments = np.array([ + # bottom of bbox + bboxes_2d_filtered[0], bboxes_2d_filtered[1], + bboxes_2d_filtered[1], bboxes_2d_filtered[2], + bboxes_2d_filtered[2], bboxes_2d_filtered[3], + bboxes_2d_filtered[3], bboxes_2d_filtered[0], + + # top of bbox + bboxes_2d_filtered[4], bboxes_2d_filtered[5], + bboxes_2d_filtered[5], bboxes_2d_filtered[6], + bboxes_2d_filtered[6], bboxes_2d_filtered[7], + bboxes_2d_filtered[7], bboxes_2d_filtered[4], + + # sides of bbox + bboxes_2d_filtered[0], bboxes_2d_filtered[4], + bboxes_2d_filtered[1], bboxes_2d_filtered[5], + bboxes_2d_filtered[2], bboxes_2d_filtered[6], + bboxes_2d_filtered[3], bboxes_2d_filtered[7] + ], dtype=np.float32) + + rr.log_line_segments(entity_path, segments, color=color) + + +def project_3d_bboxes_to_2d_keypoints( + bboxes_3d: npt.NDArray[np.float64], + camera_from_world: Tuple[npt.NDArray[np.float64], npt.NDArray[np.float64]], + intrinsic: npt.NDArray[np.float64], + img_width: int, + img_height: int, +) -> npt.NDArray[np.float64]: + """ + Returns 2D keypoints of the 3D bounding box in the camera view. + + TODO(pablovela5620): Once #1581 is resolved this can be removed + Args: + bboxes_3d: (nObjects, 8, 3) containing the 3D bounding box keypoints in world frame. + camera_from_world: Tuple containing the camera translation and rotation_quaternion in world frame. + intrinsic: (3,3) containing the camera intrinsic matrix. + img_width: Width of the image. + img_height: Height of the image. + + Returns + ------- + bboxes_2d_filtered: + A numpy array of shape (nObjects, 8, 2), representing the 2D keypoints of the 3D bounding boxes. That + are within the image frame. + """ + + translation, rotation_q = camera_from_world + rotation = R.from_quat(rotation_q) + + # Transform 3D keypoints from world to camera frame + world_to_camera_rotation = rotation.as_matrix() + world_to_camera_translation = translation.reshape(3, 1) + # Tile translation to match bounding box shape, (nObjects, 1, 3) + world_to_camera_translation_tiled = np.tile(world_to_camera_translation.T, (bboxes_3d.shape[0], 1, 1)) + # Transform 3D bounding box keypoints from world to camera frame to filter out points behind the camera + camera_points = ( + np.einsum("ij,afj->afi", world_to_camera_rotation, bboxes_3d[..., :3]) + world_to_camera_translation_tiled + ) + # Check if the points are in front of the camera + depth_mask = camera_points[..., 2] > 0 + # convert to transformation matrix shape of (3, 4) + world_to_camera = np.hstack([world_to_camera_rotation, world_to_camera_translation]) + transformation_matrix = intrinsic @ world_to_camera + # add batch dimension to match bounding box shape, (nObjects, 3, 4) + transformation_matrix = np.tile(transformation_matrix, (bboxes_3d.shape[0], 1, 1)) + # bboxes_3d: [nObjects, 8, 3] -> [nObjects, 8, 4] to allow for batch projection + bboxes_3d = np.concatenate([bboxes_3d, np.ones((bboxes_3d.shape[0], bboxes_3d.shape[1], 1))], axis=-1) + # Apply depth mask to filter out points behind the camera + bboxes_3d[~depth_mask] = np.nan + # batch projection of points using einsum + bboxes_2d = np.einsum("vab,fnb->vfna", transformation_matrix, bboxes_3d) + bboxes_2d = bboxes_2d[..., :2] / bboxes_2d[..., 2:] + # nViews irrelevant, squeeze out + bboxes_2d = bboxes_2d[0] + + # Filter out keypoints that are not within the frame + mask_x = (bboxes_2d[:, :, 0] >= 0) & (bboxes_2d[:, :, 0] < img_width) + mask_y = (bboxes_2d[:, :, 1] >= 0) & (bboxes_2d[:, :, 1] < img_height) + mask = mask_x & mask_y + bboxes_2d_filtered = np.where(mask[..., np.newaxis], bboxes_2d, np.nan) + + return bboxes_2d_filtered + + +def log_camera( + intri_path: Path, + frame_id: str, + poses_from_traj: Dict[str, Tuple[npt.NDArray[np.float64], npt.NDArray[np.float64]]], + entity_id: str, + bboxes: npt.NDArray[np.float64], + bbox_labels: List[str], + color_list: List[Tuple[int, int, int, int]], +) -> None: + """Logs camera transform and 3D bounding boxes in the image frame.""" + w, h, fx, fy, cx, cy = np.loadtxt(intri_path) + intrinsic = np.array([[fx, 0, cx], [0, fy, cy], [0, 0, 1]]) + camera_from_world = poses_from_traj[frame_id] + + # TODO(pablovela5620): Once #1581 is resolved this can be removed + # Project 3D bounding boxes into 2D image + bboxes_2d = project_3d_bboxes_to_2d_keypoints(bboxes, camera_from_world, intrinsic, img_width=w, img_height=h) + # clear previous centroid labels + rr.log_cleared(f"{entity_id}/bbox-2d-segments", recursive=True) + # Log line segments for each bounding box in the image + for i, (label, bbox_2d) in enumerate(zip(bbox_labels, bboxes_2d)): + log_line_segments(f"{entity_id}/bbox-2d-segments/{label}", bbox_2d.reshape(-1, 2), color_list[i], label) + + rr.log_rigid3( + # pathlib makes it easy to get the parent, but log_rigid requires a string + str(Path(entity_id).parent), + child_from_parent=camera_from_world, + xyz="RDF", # X=Right, Y=Down, Z=Forward + ) + rr.log_pinhole(f"{entity_id}", child_from_parent=intrinsic, width=w, height=h) + + +def read_camera_from_world(traj_string: str) -> Tuple[str, Tuple[npt.NDArray[np.float64], npt.NDArray[np.float64]]]: + """ + Reads out camera_from_world transform from trajectory string. + + Args: + traj_string: A space-delimited file where each line represents a camera position at a particular timestamp. + The file has seven columns: + * Column 1: timestamp + * Columns 2-4: rotation (axis-angle representation in radians) + * Columns 5-7: translation (usually in meters) + + Returns + ------- + timestamp: float + timestamp in seconds + camera_from_world: tuple of two numpy arrays + A tuple containing a translation vector and a quaternion that represent the camera_from_world transform + + Raises + ------ + AssertionError: If the input string does not contain 7 tokens. + """ + tokens = traj_string.split() # Split the input string into tokens + assert len(tokens) == 7, f"Input string must have 7 tokens, but found {len(tokens)}." + ts: str = tokens[0] # Extract timestamp from the first token + + # Extract rotation from the second to fourth tokens + angle_axis = [float(tokens[1]), float(tokens[2]), float(tokens[3])] + rotation = R.from_rotvec(np.asarray(angle_axis)) + + # Extract translation from the fifth to seventh tokens + translation = np.asarray([float(tokens[4]), float(tokens[5]), float(tokens[6])]) + + # Create tuple in format log_rigid3 expects + camera_from_world = (translation, rotation.as_quat()) + + return (ts, camera_from_world) + + +def find_closest_frame_id(target_id: str, frame_ids: Dict[str, Any]) -> str: + """Finds the closest frame id to the target id.""" + target_value = float(target_id) + closest_id = min(frame_ids.keys(), key=lambda x: abs(float(x) - target_value)) + return closest_id + + +def log_arkit(recording_path: Path, include_highres: bool) -> None: + """ + Logs ARKit recording data using Rerun. + + Args: + recording_path (Path): The path to the ARKit recording. + + Returns + ------- + None + """ + video_id = recording_path.stem + lowres_image_dir = recording_path / "lowres_wide" + image_dir = recording_path / "wide" + lowres_depth_dir = recording_path / "lowres_depth" + depth_dir = recording_path / "highres_depth" + lowres_intrinsics_dir = recording_path / "lowres_wide_intrinsics" + intrinsics_dir = recording_path / "wide_intrinsics" + traj_path = recording_path / "lowres_wide.traj" + + # frame_ids are indexed by timestamps, you can see more info here + # https://github.com/apple/ARKitScenes/blob/main/threedod/README.md#data-organization-and-format-of-input-data + depth_filenames = [x.name for x in sorted(lowres_depth_dir.iterdir())] + lowres_frame_ids = [x.split(".png")[0].split("_")[1] for x in depth_filenames] + lowres_frame_ids.sort() + + # dict of timestamp to pose which is a tuple of translation and quaternion + camera_from_world_dict = {} + with open(traj_path, "r", encoding="utf-8") as f: + trajectory = f.readlines() + + for line in trajectory: + timestamp, camera_from_world = read_camera_from_world(line) + # round timestamp to 3 decimal places as seen in the original repo here + # https://github.com/apple/ARKitScenes/blob/e2e975128a0a9695ea56fa215fe76b4295241538/threedod/benchmark_scripts/utils/tenFpsDataLoader.py#L247 + timestamp = f"{round(float(timestamp), 3):.3f}" + camera_from_world_dict[timestamp] = camera_from_world + + rr.log_view_coordinates("world", up="+Z", right_handed=True, timeless=True) + ply_path = recording_path / f"{recording_path.stem}_3dod_mesh.ply" + print(f"Loading {ply_path}…") + assert os.path.isfile(ply_path), f"Failed to find {ply_path}" + + mesh_ply = trimesh.load(str(ply_path)) + rr.log_mesh( + "world/mesh", + positions=mesh_ply.vertices, + indices=mesh_ply.faces, + vertex_colors=mesh_ply.visual.vertex_colors, + timeless=True, + ) + + # load the obb annotations and log them in the world frame + bbox_annotations_path = recording_path / f"{recording_path.stem}_3dod_annotation.json" + annotation = load_json(bbox_annotations_path) + bboxes_3d, bbox_labels, colors_list = log_annotated_bboxes(annotation) + + lowres_posed_entity_id = "world/camera_posed_lowres/image_posed_lowres" + highres_entity_id = "world/camera_highres/image_highres" + + print("Processing frames…") + for frame_timestamp in tqdm(lowres_frame_ids): + # frame_id is equivalent to timestamp + rr.set_time_seconds("time", float(frame_timestamp)) + # load the lowres image and depth + bgr = cv2.imread(f"{lowres_image_dir}/{video_id}_{frame_timestamp}.png") + rgb = cv2.cvtColor(bgr, cv2.COLOR_BGR2RGB) + depth = cv2.imread(f"{lowres_depth_dir}/{video_id}_{frame_timestamp}.png", cv2.IMREAD_ANYDEPTH) + + high_res_exists: bool = (image_dir / f"{video_id}_{frame_timestamp}.png").exists() and include_highres + + # Log the camera transforms: + if frame_timestamp in camera_from_world_dict: + lowres_intri_path = lowres_intrinsics_dir / f"{video_id}_{frame_timestamp}.pincam" + log_camera( + lowres_intri_path, + frame_timestamp, + camera_from_world_dict, + lowres_posed_entity_id, + bboxes_3d, + bbox_labels, + colors_list, + ) + + rr.log_image(f"{lowres_posed_entity_id}/rgb", rgb) + rr.log_depth_image(f"{lowres_posed_entity_id}/depth", depth, meter=1000) + + # log the high res camera + if high_res_exists: + rr.set_time_seconds("time high resolution", float(frame_timestamp)) + # only low res camera has a trajectory, high res does not so need to find the closest low res frame id + closest_lowres_frame_id = find_closest_frame_id(frame_timestamp, camera_from_world_dict) + highres_intri_path = intrinsics_dir / f"{video_id}_{frame_timestamp}.pincam" + log_camera( + highres_intri_path, + closest_lowres_frame_id, + camera_from_world_dict, + highres_entity_id, + bboxes_3d, + bbox_labels, + colors_list, + ) + + # load the highres image and depth if they exist + highres_bgr = cv2.imread(f"{image_dir}/{video_id}_{frame_timestamp}.png") + highres_depth = cv2.imread(f"{depth_dir}/{video_id}_{frame_timestamp}.png", cv2.IMREAD_ANYDEPTH) + + highres_rgb = cv2.cvtColor(highres_bgr, cv2.COLOR_BGR2RGB) + rr.log_image(f"{highres_entity_id}/rgb", highres_rgb) + rr.log_depth_image(f"{highres_entity_id}/depth", highres_depth, meter=1000) + + +def main() -> None: + parser = argparse.ArgumentParser(description="Visualizes the ARKitScenes dataset using the Rerun SDK.") + parser.add_argument( + "--video-id", + type=str, + choices=AVAILABLE_RECORDINGS, + default=AVAILABLE_RECORDINGS[0], + help="Video ID of the ARKitScenes Dataset", + ) + parser.add_argument( + "--include-highres", + action="store_true", + help="Include the high resolution camera and depth images", + ) + rr.script_add_args(parser) + args = parser.parse_args() + + rr.script_setup(args, "arkitscenes") + recording_path = ensure_recording_available(args.video_id, args.include_highres) + log_arkit(recording_path, args.include_highres) + + rr.script_teardown(args) + + +if __name__ == "__main__": + main() diff --git a/examples/python/arkitscenes/requirements.txt b/examples/python/arkitscenes/requirements.txt new file mode 100644 index 000000000000..132f56a29b4d --- /dev/null +++ b/examples/python/arkitscenes/requirements.txt @@ -0,0 +1,7 @@ +rerun-sdk +numpy +pandas +opencv-python +tqdm +scipy +trimesh From 19466837c7590331d3a20dd1f904ec5a7542757c Mon Sep 17 00:00:00 2001 From: Emil Ernerfeldt Date: Tue, 4 Apr 2023 14:47:48 +0200 Subject: [PATCH 11/89] Fix log_obb usage (#1761) * Make sure all log_obb uses uses half_size correctly * Remove outdated link from README * Fix docstring of save * Force named arguments of log_scalar * Add link to --memory-limit docs * update ros example --- README.md | 3 +-- examples/python/arkitscenes/main.py | 17 +++++++---------- examples/python/clock/main.py | 2 +- examples/python/objectron/main.py | 6 +++--- examples/python/ros/main.py | 2 +- rerun_py/rerun_sdk/rerun/__init__.py | 4 +--- rerun_py/rerun_sdk/rerun/log/bounding_box.py | 13 +++++++------ rerun_py/rerun_sdk/rerun/log/scalar.py | 1 + 8 files changed, 22 insertions(+), 26 deletions(-) diff --git a/README.md b/README.md index 2f5740a31b4c..bae9d9f0f30c 100644 --- a/README.md +++ b/README.md @@ -60,9 +60,8 @@ _Expect breaking changes!_ Some shortcomings: * Big points clouds (1M+) are slow ([#1136](https://github.com/rerun-io/rerun/issues/1136)) * The data you want to visualize must fit in RAM. - - See for how to bound memory use + - See for how to bound memory use - We plan on having a disk-based data store some time in the future - - Additionally, Rerun is using more memory than it should at the moment ([#1242](https://github.com/rerun-io/rerun/pull/1242)) * The Rust library takes a long time to compile - We have way too many big dependencies, and we are planning on improving the situation ([#1316](https://github.com/rerun-io/rerun/pull/1316)) diff --git a/examples/python/arkitscenes/main.py b/examples/python/arkitscenes/main.py index 5b2c62027022..021f2ba94f88 100755 --- a/examples/python/arkitscenes/main.py +++ b/examples/python/arkitscenes/main.py @@ -63,24 +63,23 @@ def log_annotated_bboxes( uid = label_info["uid"] label = label_info["label"] - # TODO(pablovela5620): half this value once #1701 is resolved - scale = np.array(label_info["segments"]["obbAligned"]["axesLengths"]).reshape(-1, 3)[0] - transform = np.array(label_info["segments"]["obbAligned"]["centroid"]).reshape(-1, 3)[0] + half_size = 0.5 * np.array(label_info["segments"]["obbAligned"]["axesLengths"]).reshape(-1, 3)[0] + centroid = np.array(label_info["segments"]["obbAligned"]["centroid"]).reshape(-1, 3)[0] rotation = np.array(label_info["segments"]["obbAligned"]["normalizedAxes"]).reshape(3, 3) rot = R.from_matrix(rotation).inv() rr.log_obb( f"world/annotations/box-{uid}-{label}", - half_size=scale, - position=transform, + half_size=half_size, + position=centroid, rotation_q=rot.as_quat(), label=label, color=color_list[i], timeless=True, ) - box3d = compute_box_3d(scale, transform, rotation) + box3d = compute_box_3d(half_size, centroid, rotation) bbox_list.append(box3d) bbox_labels.append(label) bboxes_3d = np.array(bbox_list) @@ -88,16 +87,14 @@ def log_annotated_bboxes( def compute_box_3d( - scale: npt.NDArray[np.float64], transform: npt.NDArray[np.float64], rotation: npt.NDArray[np.float64] + half_size: npt.NDArray[np.float64], transform: npt.NDArray[np.float64], rotation: npt.NDArray[np.float64] ) -> npt.NDArray[np.float64]: """ Given obb compute 3d keypoints of the box. TODO(pablovela5620): Once #1581 is resolved this can be removed """ - scale = scale.tolist() - scales = [i / 2 for i in scale] - length, height, width = scales + length, height, width = half_size.tolist() center = np.reshape(transform, (-1, 3)) center = center.reshape(3) x_corners = [length, length, -length, -length, length, length, -length, -length] diff --git a/examples/python/clock/main.py b/examples/python/clock/main.py index a5a55fc38d04..1eff8821373c 100755 --- a/examples/python/clock/main.py +++ b/examples/python/clock/main.py @@ -34,7 +34,7 @@ def rotate(angle: float, len: float) -> Tuple[float, float, float]: rr.log_obb( "world/frame", - half_size=[2 * LENGTH_S, 2 * LENGTH_S, 1.0], + half_size=[LENGTH_S, LENGTH_S, 1.0], position=[0.0, 0.0, 0.0], rotation_q=[0.0, 0.0, 0.0, 0.0], timeless=True, diff --git a/examples/python/objectron/main.py b/examples/python/objectron/main.py index d7dc5237ddd5..f4a8efc60a45 100755 --- a/examples/python/objectron/main.py +++ b/examples/python/objectron/main.py @@ -181,9 +181,9 @@ def log_annotated_bboxes(bboxes: Iterable[Object]) -> None: rot = R.from_matrix(np.asarray(bbox.rotation).reshape((3, 3))) rr.log_obb( f"world/annotations/box-{bbox.id}", - bbox.scale, - bbox.translation, - rot.as_quat(), + half_size=0.5 * np.array(bbox.scale), + position=bbox.translation, + rotation_q=rot.as_quat(), color=[160, 230, 130, 255], label=bbox.category, timeless=True, diff --git a/examples/python/ros/main.py b/examples/python/ros/main.py index 15e268a2a4fe..0b83bc88d391 100644 --- a/examples/python/ros/main.py +++ b/examples/python/ros/main.py @@ -83,7 +83,7 @@ def __init__(self) -> None: # # TODO(jleibs): Log the real map once [#1531](https://github.com/rerun-io/rerun/issues/1531) is merged rr.log_obb( "map/box", - half_size=[6, 6, 2], + half_size=[3, 3, 1], position=[0, 0, 1], color=[255, 255, 255, 255], timeless=True, diff --git a/rerun_py/rerun_sdk/rerun/__init__.py b/rerun_py/rerun_sdk/rerun/__init__.py index 00b40749ff25..efcccb40766c 100644 --- a/rerun_py/rerun_sdk/rerun/__init__.py +++ b/rerun_py/rerun_sdk/rerun/__init__.py @@ -367,9 +367,7 @@ def disconnect() -> None: def save(path: str) -> None: """ - Save previously logged data to a file. - - This only works if you have not called `connect`. + Stream all log-data to a file. Parameters ---------- diff --git a/rerun_py/rerun_sdk/rerun/log/bounding_box.py b/rerun_py/rerun_sdk/rerun/log/bounding_box.py index 2e1ef6b50b26..cda8b6e10d03 100644 --- a/rerun_py/rerun_sdk/rerun/log/bounding_box.py +++ b/rerun_py/rerun_sdk/rerun/log/bounding_box.py @@ -24,6 +24,7 @@ @log_decorator def log_obb( entity_path: str, + *, half_size: Optional[npt.ArrayLike], position: Optional[npt.ArrayLike] = None, rotation_q: Optional[npt.ArrayLike] = None, @@ -72,12 +73,12 @@ def log_obb( splats: Dict[str, Any] = {} if half_size is not None: - size = np.require(half_size, dtype="float32") + half_size = np.require(half_size, dtype="float32") - if size.shape[0] == 3: - instanced["rerun.box3d"] = Box3DArray.from_numpy(size.reshape(1, 3)) + if half_size.shape[0] == 3: + instanced["rerun.box3d"] = Box3DArray.from_numpy(half_size.reshape(1, 3)) else: - raise TypeError("Position should be 1x3") + raise TypeError("half_size should be 1x3") if position is not None: position = np.require(position, dtype="float32") @@ -85,7 +86,7 @@ def log_obb( if position.shape[0] == 3: instanced["rerun.vec3d"] = Vec3DArray.from_numpy(position.reshape(1, 3)) else: - raise TypeError("Position should be 1x3") + raise TypeError("position should be 1x3") if rotation_q is not None: rotation = np.require(rotation_q, dtype="float32") @@ -93,7 +94,7 @@ def log_obb( if rotation.shape[0] == 4: instanced["rerun.quaternion"] = QuaternionArray.from_numpy(rotation.reshape(1, 4)) else: - raise TypeError("Rotation should be 1x4") + raise TypeError("rotation should be 1x4") if color: colors = _normalize_colors([color]) diff --git a/rerun_py/rerun_sdk/rerun/log/scalar.py b/rerun_py/rerun_sdk/rerun/log/scalar.py index 98693934b425..66a6d49ff733 100644 --- a/rerun_py/rerun_sdk/rerun/log/scalar.py +++ b/rerun_py/rerun_sdk/rerun/log/scalar.py @@ -21,6 +21,7 @@ def log_scalar( entity_path: str, scalar: float, + *, label: Optional[str] = None, color: Optional[Sequence[int]] = None, radius: Optional[float] = None, From 9c1babea56a03f96d88d9600c38624e4e7555bb7 Mon Sep 17 00:00:00 2001 From: Emil Ernerfeldt Date: Tue, 4 Apr 2023 14:59:18 +0200 Subject: [PATCH 12/89] Python SDK: document that we also accept colors in 0-1 floats (#1740) * Python SDK: document that we also accept colors in 0-1 floats * Assume float colors to be in gamma-space, and document that * Update arkitscenes example * Fix bug * typo * py-format --- examples/python/arkitscenes/main.py | 21 ++++++++----------- rerun_py/rerun_sdk/rerun/log/__init__.py | 13 +++++++++--- rerun_py/rerun_sdk/rerun/log/annotation.py | 2 +- rerun_py/rerun_sdk/rerun/log/arrow.py | 8 +++---- rerun_py/rerun_sdk/rerun/log/bounding_box.py | 8 +++---- rerun_py/rerun_sdk/rerun/log/lines.py | 14 ++++++------- rerun_py/rerun_sdk/rerun/log/mesh.py | 3 ++- rerun_py/rerun_sdk/rerun/log/points.py | 6 ++++-- rerun_py/rerun_sdk/rerun/log/rects.py | 6 +++--- rerun_py/rerun_sdk/rerun/log/scalar.py | 10 ++++----- rerun_py/rerun_sdk/rerun/log/text.py | 8 +++---- rerun_py/rerun_sdk/rerun/log/text_internal.py | 8 +++---- 12 files changed, 57 insertions(+), 50 deletions(-) diff --git a/examples/python/arkitscenes/main.py b/examples/python/arkitscenes/main.py index 021f2ba94f88..e05277eff918 100755 --- a/examples/python/arkitscenes/main.py +++ b/examples/python/arkitscenes/main.py @@ -15,6 +15,8 @@ from scipy.spatial.transform import Rotation as R from tqdm import tqdm +Color = Tuple[float, float, float, float] + # hack for now since dataset does not provide orientation information, only known after initial visual inspection ORIENTATION = { "48458663": "landscape", @@ -34,9 +36,7 @@ def load_json(js_path: Path) -> Dict[str, Any]: return json_data -def log_annotated_bboxes( - annotation: Dict[str, Any] -) -> Tuple[npt.NDArray[np.float64], List[str], List[Tuple[int, int, int, int]]]: +def log_annotated_bboxes(annotation: Dict[str, Any]) -> Tuple[npt.NDArray[np.float64], List[str], List[Color]]: """ Logs annotated oriented bounding boxes to Rerun. @@ -56,8 +56,7 @@ def log_annotated_bboxes( # TODO(pablovela5620): Once #1581 or #1728 is resolved this can be removed color_positions = np.linspace(0, 1, num_objects) colormap = plt.cm.get_cmap("viridis") - color_array_float = [colormap(pos) for pos in color_positions] - color_list = [(int(r * 255), int(g * 255), int(b * 255), int(a * 255)) for r, g, b, a in color_array_float] + colors = [colormap(pos) for pos in color_positions] for i, label_info in enumerate(annotation["data"]): uid = label_info["uid"] @@ -75,7 +74,7 @@ def log_annotated_bboxes( position=centroid, rotation_q=rot.as_quat(), label=label, - color=color_list[i], + color=colors[i], timeless=True, ) @@ -83,7 +82,7 @@ def log_annotated_bboxes( bbox_list.append(box3d) bbox_labels.append(label) bboxes_3d = np.array(bbox_list) - return bboxes_3d, bbox_labels, color_list + return bboxes_3d, bbox_labels, colors def compute_box_3d( @@ -109,9 +108,7 @@ def compute_box_3d( return bbox3d_raw -def log_line_segments( - entity_path: str, bboxes_2d_filtered: npt.NDArray[np.float64], color: Tuple[int, int, int, int], label: str -) -> None: +def log_line_segments(entity_path: str, bboxes_2d_filtered: npt.NDArray[np.float64], color: Color, label: str) -> None: """ Generates line segments for each object's bounding box in 2d. @@ -236,7 +233,7 @@ def log_camera( entity_id: str, bboxes: npt.NDArray[np.float64], bbox_labels: List[str], - color_list: List[Tuple[int, int, int, int]], + colors: List[Color], ) -> None: """Logs camera transform and 3D bounding boxes in the image frame.""" w, h, fx, fy, cx, cy = np.loadtxt(intri_path) @@ -250,7 +247,7 @@ def log_camera( rr.log_cleared(f"{entity_id}/bbox-2d-segments", recursive=True) # Log line segments for each bounding box in the image for i, (label, bbox_2d) in enumerate(zip(bbox_labels, bboxes_2d)): - log_line_segments(f"{entity_id}/bbox-2d-segments/{label}", bbox_2d.reshape(-1, 2), color_list[i], label) + log_line_segments(f"{entity_id}/bbox-2d-segments/{label}", bbox_2d.reshape(-1, 2), colors[i], label) rr.log_rigid3( # pathlib makes it easy to get the parent, but log_rigid requires a string diff --git a/rerun_py/rerun_sdk/rerun/log/__init__.py b/rerun_py/rerun_sdk/rerun/log/__init__.py index 69c8e8fe6328..b36abdacd5c6 100644 --- a/rerun_py/rerun_sdk/rerun/log/__init__.py +++ b/rerun_py/rerun_sdk/rerun/log/__init__.py @@ -4,7 +4,6 @@ import numpy.typing as npt from rerun import bindings -from rerun.color_conversion import linear_to_gamma_u8_pixel __all__ = [ "annotation", @@ -43,7 +42,14 @@ def _to_sequence(array: Optional[npt.ArrayLike]) -> Optional[Sequence[float]]: def _normalize_colors(colors: Optional[Union[Color, Colors]] = None) -> npt.NDArray[np.uint8]: - """Normalize flexible colors arrays.""" + """ + Normalize flexible colors arrays. + + Float colors are assumed to be in 0-1 gamma sRGB space. + All other colors are assumed to be in 0-255 gamma sRGB space. + + If there is an alpha, we assume it is in linear space, and separate (NOT pre-multiplied). + """ if colors is None: # An empty array represents no colors. return np.array((), dtype=np.uint8) @@ -52,7 +58,8 @@ def _normalize_colors(colors: Optional[Union[Color, Colors]] = None) -> npt.NDAr # Rust expects colors in 0-255 uint8 if colors_array.dtype.type in [np.float32, np.float64]: - return linear_to_gamma_u8_pixel(linear=colors_array) + # Assume gamma-space colors + return np.require(np.round(colors_array * 255.0), np.uint8) return np.require(colors_array, np.uint8) diff --git a/rerun_py/rerun_sdk/rerun/log/annotation.py b/rerun_py/rerun_sdk/rerun/log/annotation.py index 47980b4d771e..c941fc22ccb0 100644 --- a/rerun_py/rerun_sdk/rerun/log/annotation.py +++ b/rerun_py/rerun_sdk/rerun/log/annotation.py @@ -90,7 +90,7 @@ def log_annotation_context( Each ClassDescription must include an annotation info with an id, which will be used for matching the class and may optionally include a label and color. - Colors should either be in 0-255 gamma space or in 0-1 linear space. Colors + Colors should either be in 0-255 gamma space or in 0-1 gamma space. Colors can be RGB or RGBA. These can either be specified verbosely as: diff --git a/rerun_py/rerun_sdk/rerun/log/arrow.py b/rerun_py/rerun_sdk/rerun/log/arrow.py index b502baa6a0e9..7ad72d06fb80 100644 --- a/rerun_py/rerun_sdk/rerun/log/arrow.py +++ b/rerun_py/rerun_sdk/rerun/log/arrow.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Optional, Sequence +from typing import Any, Dict, Optional import numpy as np import numpy.typing as npt @@ -9,7 +9,7 @@ from rerun.components.instance import InstanceArray from rerun.components.label import LabelArray from rerun.components.radius import RadiusArray -from rerun.log import _normalize_colors, _normalize_radii +from rerun.log import Color, _normalize_colors, _normalize_radii from rerun.log.extension_components import _add_extension_components from rerun.log.log_decorator import log_decorator @@ -24,7 +24,7 @@ def log_arrow( origin: Optional[npt.ArrayLike], vector: Optional[npt.ArrayLike] = None, *, - color: Optional[Sequence[int]] = None, + color: Optional[Color] = None, label: Optional[str] = None, width_scale: Optional[float] = None, ext: Optional[Dict[str, Any]] = None, @@ -48,7 +48,7 @@ def log_arrow( vector The vector along which the arrow will be drawn. color - An optional RGB or RGBA triplet in 0-255 sRGB. + Optional RGB or RGBA in sRGB gamma-space as either 0-1 floats or 0-255 integers, with separate alpha. label An optional text to show beside the arrow. width_scale diff --git a/rerun_py/rerun_sdk/rerun/log/bounding_box.py b/rerun_py/rerun_sdk/rerun/log/bounding_box.py index cda8b6e10d03..869eaf7a6553 100644 --- a/rerun_py/rerun_sdk/rerun/log/bounding_box.py +++ b/rerun_py/rerun_sdk/rerun/log/bounding_box.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Optional, Sequence +from typing import Any, Dict, Optional import numpy as np import numpy.typing as npt @@ -12,7 +12,7 @@ from rerun.components.quaternion import QuaternionArray from rerun.components.radius import RadiusArray from rerun.components.vec import Vec3DArray -from rerun.log import _normalize_colors, _normalize_ids, _normalize_radii +from rerun.log import Color, _normalize_colors, _normalize_ids, _normalize_radii from rerun.log.extension_components import _add_extension_components from rerun.log.log_decorator import log_decorator @@ -28,7 +28,7 @@ def log_obb( half_size: Optional[npt.ArrayLike], position: Optional[npt.ArrayLike] = None, rotation_q: Optional[npt.ArrayLike] = None, - color: Optional[Sequence[int]] = None, + color: Optional[Color] = None, stroke_width: Optional[float] = None, label: Optional[str] = None, class_id: Optional[int] = None, @@ -55,7 +55,7 @@ def log_obb( rotation_q: Optional array with quaternion coordinates [x, y, z, w] for the rotation from model to world space. color: - Optional RGB or RGBA triplet in 0-255 sRGB. + Optional RGB or RGBA in sRGB gamma-space as either 0-1 floats or 0-255 integers, with separate alpha. stroke_width: Optional width of the line edges. label: diff --git a/rerun_py/rerun_sdk/rerun/log/lines.py b/rerun_py/rerun_sdk/rerun/log/lines.py index ac46b9fe43d8..01055647398a 100644 --- a/rerun_py/rerun_sdk/rerun/log/lines.py +++ b/rerun_py/rerun_sdk/rerun/log/lines.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Optional, Sequence +from typing import Any, Dict, Optional import numpy as np import numpy.typing as npt @@ -9,7 +9,7 @@ from rerun.components.instance import InstanceArray from rerun.components.linestrip import LineStrip2DArray, LineStrip3DArray from rerun.components.radius import RadiusArray -from rerun.log import _normalize_colors, _normalize_radii +from rerun.log import Color, _normalize_colors, _normalize_radii from rerun.log.extension_components import _add_extension_components from rerun.log.log_decorator import log_decorator @@ -26,7 +26,7 @@ def log_path( positions: Optional[npt.ArrayLike], *, stroke_width: Optional[float] = None, - color: Optional[Sequence[int]] = None, + color: Optional[Color] = None, ext: Optional[Dict[str, Any]] = None, timeless: bool = False, ) -> None: @@ -39,7 +39,7 @@ def log_line_strip( positions: Optional[npt.ArrayLike], *, stroke_width: Optional[float] = None, - color: Optional[Sequence[int]] = None, + color: Optional[Color] = None, ext: Optional[Dict[str, Any]] = None, timeless: bool = False, ) -> None: @@ -65,7 +65,7 @@ def log_line_strip( stroke_width: Optional width of the line. color: - Optional RGB or RGBA triplet in 0-255 sRGB. + Optional RGB or RGBA in sRGB gamma-space as either 0-1 floats or 0-255 integers, with separate alpha. ext: Optional dictionary of extension components. See [rerun.log_extension_components][] timeless: @@ -114,7 +114,7 @@ def log_line_segments( positions: npt.ArrayLike, *, stroke_width: Optional[float] = None, - color: Optional[Sequence[int]] = None, + color: Optional[Color] = None, ext: Optional[Dict[str, Any]] = None, timeless: bool = False, ) -> None: @@ -139,7 +139,7 @@ def log_line_segments( stroke_width: Optional width of the line. color: - Optional RGB or RGBA triplet in 0-255 sRGB. + Optional RGB or RGBA in sRGB gamma-space as either 0-1 floats or 0-255 integers, with separate alpha. ext: Optional dictionary of extension components. See [rerun.log_extension_components][] timeless: diff --git a/rerun_py/rerun_sdk/rerun/log/mesh.py b/rerun_py/rerun_sdk/rerun/log/mesh.py index 8d7d986ce40e..8d122b6bc3f5 100644 --- a/rerun_py/rerun_sdk/rerun/log/mesh.py +++ b/rerun_py/rerun_sdk/rerun/log/mesh.py @@ -70,7 +70,8 @@ def log_mesh( albedo_factor: Optional color multiplier of the mesh using RGB or unmuliplied RGBA in linear 0-1 space. vertex_colors: - Optional array of RGB(a) vertex colors + Optional array of RGB(A) vertex colors, in sRGB gamma space, either as 0-1 floats or 0-255 integers. + If specified, the alpha is considered separate (unmultiplied). timeless: If true, the mesh will be timeless (default: False) diff --git a/rerun_py/rerun_sdk/rerun/log/points.py b/rerun_py/rerun_sdk/rerun/log/points.py index 040c7c6ae928..2da62475f1d3 100644 --- a/rerun_py/rerun_sdk/rerun/log/points.py +++ b/rerun_py/rerun_sdk/rerun/log/points.py @@ -36,7 +36,7 @@ def log_point( position: Optional[npt.ArrayLike] = None, *, radius: Optional[float] = None, - color: Optional[Sequence[int]] = None, + color: Optional[Color] = None, label: Optional[str] = None, class_id: Optional[int] = None, keypoint_id: Optional[int] = None, @@ -66,7 +66,7 @@ def log_point( radius: Optional radius (make it a sphere). color: - Optional color of the point. + Optional RGB or RGBA in sRGB gamma-space as either 0-1 floats or 0-255 integers, with separate alpha. label: Optional text to show with the point. class_id: @@ -169,6 +169,8 @@ def log_points( Unique numeric id that shows up when you hover or select the point. colors: Optional colors of the points. + The colors are interpreted as RGB or RGBA in sRGB gamma-space, + as either 0-1 floats or 0-255 integers, with separate alpha. radii: Optional radii (make it a sphere). labels: diff --git a/rerun_py/rerun_sdk/rerun/log/rects.py b/rerun_py/rerun_sdk/rerun/log/rects.py index 3d056d85f95b..36006fe4a43c 100644 --- a/rerun_py/rerun_sdk/rerun/log/rects.py +++ b/rerun_py/rerun_sdk/rerun/log/rects.py @@ -34,7 +34,7 @@ def log_rect( rect: Optional[npt.ArrayLike], *, rect_format: RectFormat = RectFormat.XYWH, - color: Optional[Sequence[int]] = None, + color: Optional[Color] = None, label: Optional[str] = None, class_id: Optional[int] = None, ext: Optional[Dict[str, Any]] = None, @@ -52,7 +52,7 @@ def log_rect( rect_format: how to interpret the `rect` argument color: - Optional RGB or RGBA triplet in 0-255 sRGB. + Optional RGB or RGBA in sRGB gamma-space as either 0-1 floats or 0-255 integers, with separate alpha. label: Optional text to show inside the rectangle. class_id: @@ -139,7 +139,7 @@ def log_rects( identifiers: Unique numeric id that shows up when you hover or select the point. colors: - Optional per-rectangle RGB or RGBA triplet in 0-255 sRGB. + Optional per-rectangle gamma-space RGB or RGBA as 0-1 floats or 0-255 integers. labels: Optional per-rectangle text to show inside the rectangle. class_ids: diff --git a/rerun_py/rerun_sdk/rerun/log/scalar.py b/rerun_py/rerun_sdk/rerun/log/scalar.py index 66a6d49ff733..b80d4c7906c8 100644 --- a/rerun_py/rerun_sdk/rerun/log/scalar.py +++ b/rerun_py/rerun_sdk/rerun/log/scalar.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Optional, Sequence +from typing import Any, Dict, Optional import numpy as np @@ -8,7 +8,7 @@ from rerun.components.label import LabelArray from rerun.components.radius import RadiusArray from rerun.components.scalar import ScalarArray, ScalarPlotPropsArray -from rerun.log import _normalize_colors +from rerun.log import Color, _normalize_colors from rerun.log.extension_components import _add_extension_components from rerun.log.log_decorator import log_decorator @@ -23,7 +23,7 @@ def log_scalar( scalar: float, *, label: Optional[str] = None, - color: Optional[Sequence[int]] = None, + color: Optional[Color] = None, radius: Optional[float] = None, scattered: Optional[bool] = None, ext: Optional[Dict[str, Any]] = None, @@ -82,7 +82,7 @@ def log_scalar( line will be named after the entity path. The plot itself is named after the space it's in. color: - An optional color in the form of a RGB or RGBA triplet in 0-255 sRGB. + Optional RGB or RGBA in sRGB gamma-space as either 0-1 floats or 0-255 integers, with separate alpha. If left unspecified, a pseudo-random color will be used instead. That same color will apply to all points residing in the same entity path @@ -122,7 +122,7 @@ def log_scalar( instanced["rerun.label"] = LabelArray.new([label]) if color: - colors = _normalize_colors(np.array([color])) + colors = _normalize_colors([color]) instanced["rerun.colorrgba"] = ColorRGBAArray.from_numpy(colors) if radius: diff --git a/rerun_py/rerun_sdk/rerun/log/text.py b/rerun_py/rerun_sdk/rerun/log/text.py index 5f2a1308f7ce..fddb04f37fb8 100644 --- a/rerun_py/rerun_sdk/rerun/log/text.py +++ b/rerun_py/rerun_sdk/rerun/log/text.py @@ -1,5 +1,5 @@ import logging -from typing import Any, Dict, Final, Optional, Sequence +from typing import Any, Dict, Final, Optional # Fully qualified to avoid circular import import rerun.log.extension_components @@ -7,7 +7,7 @@ from rerun.components.color import ColorRGBAArray from rerun.components.instance import InstanceArray from rerun.components.text_entry import TextEntryArray -from rerun.log import _normalize_colors +from rerun.log import Color, _normalize_colors from rerun.log.log_decorator import log_decorator from rerun.log.text_internal import LogLevel @@ -71,7 +71,7 @@ def log_text_entry( text: str, *, level: Optional[str] = LogLevel.INFO, - color: Optional[Sequence[int]] = None, + color: Optional[Color] = None, ext: Optional[Dict[str, Any]] = None, timeless: bool = False, ) -> None: @@ -89,7 +89,7 @@ def log_text_entry( be an arbitrary string, but it's recommended to use one of the constants from [LogLevel][rerun.log.text.LogLevel] color: - Optional RGB or RGBA triplet in 0-255 sRGB. + Optional RGB or RGBA in sRGB gamma-space as either 0-1 floats or 0-255 integers, with separate alpha. ext: Optional dictionary of extension components. See [rerun.log_extension_components][] timeless: diff --git a/rerun_py/rerun_sdk/rerun/log/text_internal.py b/rerun_py/rerun_sdk/rerun/log/text_internal.py index 1164c6bd9500..41898e4dfa5f 100644 --- a/rerun_py/rerun_sdk/rerun/log/text_internal.py +++ b/rerun_py/rerun_sdk/rerun/log/text_internal.py @@ -1,13 +1,13 @@ import logging from dataclasses import dataclass -from typing import Any, Dict, Final, Optional, Sequence +from typing import Any, Dict, Final, Optional # Fully qualified to avoid circular import from rerun import bindings from rerun.components.color import ColorRGBAArray from rerun.components.instance import InstanceArray from rerun.components.text_entry import TextEntryArray -from rerun.log import _normalize_colors +from rerun.log import Color, _normalize_colors __all__ = [ "LogLevel", @@ -48,7 +48,7 @@ def log_text_entry_internal( text: str, *, level: Optional[str] = LogLevel.INFO, - color: Optional[Sequence[int]] = None, + color: Optional[Color] = None, timeless: bool = False, ) -> None: """ @@ -68,7 +68,7 @@ def log_text_entry_internal( be an arbitrary string, but it's recommended to use one of the constants from [LogLevel][rerun.log.text.LogLevel] color: - Optional RGB or RGBA triplet in 0-255 sRGB. + Optional RGB or RGBA in sRGB gamma-space as either 0-1 floats or 0-255 integers, with separate alpha. timeless: Whether the text entry should be timeless. From 7892098799f2174691ec79f68ecd98edc6746f0b Mon Sep 17 00:00:00 2001 From: Emil Ernerfeldt Date: Tue, 4 Apr 2023 15:36:39 +0200 Subject: [PATCH 13/89] Collapse space-view by default if there is only one child (#1762) --- crates/re_viewer/src/ui/viewport.rs | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/crates/re_viewer/src/ui/viewport.rs b/crates/re_viewer/src/ui/viewport.rs index 37bf1a84124a..5f4a0073b992 100644 --- a/crates/re_viewer/src/ui/viewport.rs +++ b/crates/re_viewer/src/ui/viewport.rs @@ -13,9 +13,11 @@ use crate::{ }; use super::{ - data_blueprint::DataBlueprintGroupHandle, space_view_entity_picker::SpaceViewEntityPicker, - space_view_heuristics::all_possible_space_views, view_category::ViewCategory, SpaceView, - SpaceViewId, + data_blueprint::{DataBlueprintGroup, DataBlueprintGroupHandle}, + space_view_entity_picker::SpaceViewEntityPicker, + space_view_heuristics::all_possible_space_views, + view_category::ViewCategory, + SpaceView, SpaceViewId, }; // ---------------------------------------------------------------------------- @@ -120,8 +122,11 @@ impl Viewport { }); } - // If a group or spaceview has a total of this number of elements or less, show its subtree by default. - const MAX_ELEM_FOR_DEFAULT_OPEN: usize = 3; + /// If a group or spaceview has a total of this number of elements, show its subtree by default? + fn default_open_for_group(group: &DataBlueprintGroup) -> bool { + let num_children = group.children.len() + group.entities.len(); + 2 <= num_children && num_children <= 3 + } fn space_view_entry_ui( &mut self, @@ -140,8 +145,7 @@ impl Viewport { let mut is_space_view_visible = self.visible.contains(space_view_id); let root_group = space_view.data_blueprint.root_group(); - let default_open = root_group.children.len() + root_group.entities.len() - <= Self::MAX_ELEM_FOR_DEFAULT_OPEN; + let default_open = Self::default_open_for_group(root_group); let collapsing_header_id = ui.id().with(space_view.id); egui::collapsing_header::CollapsingState::load_with_default_open( ui.ctx(), @@ -265,8 +269,7 @@ impl Viewport { }; let mut remove_group = false; - let default_open = child_group.children.len() + child_group.entities.len() - <= Self::MAX_ELEM_FOR_DEFAULT_OPEN; + let default_open = Self::default_open_for_group(child_group); egui::collapsing_header::CollapsingState::load_with_default_open( ui.ctx(), ui.id().with(child_group_handle), From 1713e60517e3ac3678f6e03aad59e7799a64e677 Mon Sep 17 00:00:00 2001 From: Jeremy Leibs Date: Tue, 4 Apr 2023 10:02:22 -0400 Subject: [PATCH 14/89] Always create the log_time timeline (#1763) --- crates/re_data_store/src/entity_tree.rs | 8 +++++++- crates/re_viewer/src/misc/time_control.rs | 4 ++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/crates/re_data_store/src/entity_tree.rs b/crates/re_data_store/src/entity_tree.rs index 20ef41769734..451bd8f5325f 100644 --- a/crates/re_data_store/src/entity_tree.rs +++ b/crates/re_data_store/src/entity_tree.rs @@ -40,7 +40,6 @@ impl TimeHistogramPerTimeline { // ---------------------------------------------------------------------------- /// Number of messages per time per timeline -#[derive(Default)] pub struct TimesPerTimeline(BTreeMap>); impl TimesPerTimeline { @@ -79,6 +78,13 @@ impl TimesPerTimeline { } } +// Always ensure we have a default "log_time" timeline. +impl Default for TimesPerTimeline { + fn default() -> Self { + Self(BTreeMap::from([(Timeline::log_time(), Default::default())])) + } +} + // ---------------------------------------------------------------------------- /// Tree of entity paths, plus components at the leaves. diff --git a/crates/re_viewer/src/misc/time_control.rs b/crates/re_viewer/src/misc/time_control.rs index 8b8eeb7da001..836f2a7bfd3a 100644 --- a/crates/re_viewer/src/misc/time_control.rs +++ b/crates/re_viewer/src/misc/time_control.rs @@ -501,11 +501,11 @@ impl TimeControl { } fn min(values: &BTreeSet) -> TimeInt { - *values.iter().next().unwrap() + *values.iter().next().unwrap_or(&TimeInt::BEGINNING) } fn max(values: &BTreeSet) -> TimeInt { - *values.iter().rev().next().unwrap() + *values.iter().rev().next().unwrap_or(&TimeInt::BEGINNING) } fn range(values: &BTreeSet) -> TimeRange { From 9310bd7a6f915be2feaf8a63098af1ca491ff320 Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Tue, 4 Apr 2023 17:38:59 +0200 Subject: [PATCH 15/89] Columnar timepoints in data tables and during transport (#1767) * columnar timepoints * self review --- crates/re_log_types/src/data_table.rs | 177 +++++++++++++--- crates/re_log_types/src/time_point/arrow.rs | 189 ------------------ crates/re_log_types/src/time_point/mod.rs | 7 +- .../re_log_types/src/time_point/timeline.rs | 11 + 4 files changed, 171 insertions(+), 213 deletions(-) delete mode 100644 crates/re_log_types/src/time_point/arrow.rs diff --git a/crates/re_log_types/src/data_table.rs b/crates/re_log_types/src/data_table.rs index 6183c31f1a86..3ae3796288fb 100644 --- a/crates/re_log_types/src/data_table.rs +++ b/crates/re_log_types/src/data_table.rs @@ -1,3 +1,5 @@ +use std::collections::BTreeMap; + use ahash::HashMap; use itertools::Itertools as _; use nohash_hasher::{IntMap, IntSet}; @@ -5,7 +7,7 @@ use smallvec::SmallVec; use crate::{ ArrowMsg, ComponentName, DataCell, DataCellError, DataRow, DataRowError, EntityPath, MsgId, - TimePoint, + TimePoint, Timeline, }; // --- @@ -15,6 +17,11 @@ pub enum DataTableError { #[error("Trying to deserialize data that is missing a column present in the schema: {0:?}")] MissingColumn(String), + #[error( + "Trying to deserialize time column data with invalid datatype: {name:?} ({datatype:#?})" + )] + NotATimeColumn { name: String, datatype: DataType }, + #[error("Trying to deserialize column data that doesn't contain any ListArrays: {0:?}")] NotAColumn(String), @@ -37,6 +44,7 @@ pub type DataTableResult = ::std::result::Result; // --- type RowIdVec = SmallVec<[MsgId; 4]>; +type TimeOptVec = SmallVec<[Option; 4]>; type TimePointVec = SmallVec<[TimePoint; 4]>; type EntityPathVec = SmallVec<[EntityPath; 4]>; type NumInstancesVec = SmallVec<[u32; 4]>; @@ -229,8 +237,11 @@ pub struct DataTable { /// The entire column of `RowId`s. pub row_id: RowIdVec, - /// The entire column of [`TimePoint`]s. - pub timepoint: TimePointVec, + /// All the rows for all the time columns. + /// + /// The times are optional since not all rows are guaranteed to have a timestamp for every + /// single timeline (though it is highly likely to be the case in practice). + pub col_timelines: BTreeMap, /// The entire column of [`EntityPath`]s. pub entity_path: EntityPathVec, @@ -251,7 +262,7 @@ impl DataTable { Self { table_id, row_id: Default::default(), - timepoint: Default::default(), + col_timelines: Default::default(), entity_path: Default::default(), num_instances: Default::default(), columns: Default::default(), @@ -287,6 +298,24 @@ impl DataTable { }) .multiunzip(); + // All time columns. + let mut col_timelines: BTreeMap = BTreeMap::default(); + for (i, timepoint) in timepoint.iter().enumerate() { + for (timeline, time) in timepoint.iter() { + match col_timelines.entry(*timeline) { + std::collections::btree_map::Entry::Vacant(entry) => { + entry + .insert(smallvec::smallvec![None; i]) + .push(Some(time.as_i64())); + } + std::collections::btree_map::Entry::Occupied(mut entry) => { + let entry = entry.get_mut(); + entry.push(Some(time.as_i64())); + } + } + } + } + // Pre-allocate all columns (one per component). let mut columns = IntMap::default(); for component in components { @@ -314,7 +343,7 @@ impl DataTable { Self { table_id, row_id, - timepoint, + col_timelines, entity_path, num_instances, columns, @@ -335,7 +364,7 @@ impl DataTable { let Self { table_id: _, row_id, - timepoint, + col_timelines, entity_path, num_instances, columns, @@ -348,7 +377,14 @@ impl DataTable { DataRow::from_cells( row_id[i], - timepoint[i].clone(), + TimePoint::from( + col_timelines + .iter() + .filter_map(|(timeline, times)| { + times[i].map(|time| (*timeline, time.into())) + }) + .collect::>(), + ), entity_path[i].clone(), num_instances[i], cells, @@ -360,19 +396,23 @@ impl DataTable { /// and returns the corresponding [`TimePoint`]. #[inline] pub fn timepoint_max(&self) -> TimePoint { - self.timepoint - .iter() - .fold(TimePoint::timeless(), |acc, tp| acc.union_max(tp)) + let mut timepoint = TimePoint::timeless(); + for (timeline, col_time) in &self.col_timelines { + if let Some(time) = col_time.iter().flatten().max().copied() { + timepoint.insert(*timeline, time.into()); + } + } + timepoint } } // --- Serialization --- use arrow2::{ - array::{Array, ListArray}, + array::{Array, ListArray, PrimitiveArray}, bitmap::Bitmap, chunk::Chunk, - datatypes::{DataType, Field, Schema}, + datatypes::{DataType, Field, Schema, TimeUnit}, offset::Offsets, }; use arrow2_convert::{ @@ -383,13 +423,13 @@ use arrow2_convert::{ // TODO(#1696): Those names should come from the datatypes themselves. pub const COLUMN_ROW_ID: &str = "rerun.row_id"; -pub const COLUMN_TIMEPOINT: &str = "rerun.timepoint"; pub const COLUMN_ENTITY_PATH: &str = "rerun.entity_path"; pub const COLUMN_NUM_INSTANCES: &str = "rerun.num_instances"; pub const METADATA_KIND: &str = "rerun.kind"; pub const METADATA_KIND_DATA: &str = "data"; pub const METADATA_KIND_CONTROL: &str = "control"; +pub const METADATA_KIND_TIME: &str = "time"; pub const METADATA_TABLE_ID: &str = "rerun.table_id"; impl DataTable { @@ -400,6 +440,9 @@ impl DataTable { /// * Control columns are those that drive the behavior of the storage systems. /// They are always present, always dense, and always deserialized upon reception by the /// server. + /// Internally, time columns are (de)serialized separately from the rest of the control + /// columns for efficiency/QOL concerns: that doesn't change the fact that they are control + /// columns all the same! /// * Data columns are the one that hold component data. /// They are optional, potentially sparse, and never deserialized on the server-side (not by /// the storage systems, at least). @@ -409,6 +452,13 @@ impl DataTable { let mut schema = Schema::default(); let mut columns = Vec::new(); + { + let (control_schema, control_columns) = self.serialize_time_columns(); + schema.fields.extend(control_schema.fields); + schema.metadata.extend(control_schema.metadata); + columns.extend(control_columns.into_iter()); + } + { let (control_schema, control_columns) = self.serialize_control_columns()?; schema.fields.extend(control_schema.fields); @@ -426,6 +476,43 @@ impl DataTable { Ok((schema, Chunk::new(columns))) } + /// Serializes all time columns into an arrow payload and schema. + fn serialize_time_columns(&self) -> (Schema, Vec>) { + crate::profile_function!(); + + fn serialize_time_column( + timeline: Timeline, + times: &TimeOptVec, + ) -> (Field, Box) { + let data = PrimitiveArray::from(times.as_slice()).to(timeline.datatype()); + + let field = Field::new(timeline.name().as_str(), data.data_type().clone(), false) + .with_metadata([(METADATA_KIND.to_owned(), METADATA_KIND_TIME.to_owned())].into()); + + (field, data.boxed()) + } + + let Self { + table_id: _, + row_id: _, + col_timelines, + entity_path: _, + num_instances: _, + columns: _, + } = self; + + let mut schema = Schema::default(); + let mut columns = Vec::new(); + + for (timeline, col_time) in col_timelines { + let (time_field, time_column) = serialize_time_column(*timeline, col_time); + schema.fields.push(time_field); + columns.push(time_column); + } + + (schema, columns) + } + /// Serializes all controls columns into an arrow payload and schema. /// /// Control columns are those that drive the behavior of the storage systems. @@ -476,7 +563,7 @@ impl DataTable { let Self { table_id, row_id, - timepoint, + col_timelines: _, entity_path, num_instances, columns: _, @@ -489,11 +576,6 @@ impl DataTable { schema.fields.push(row_id_field); columns.push(row_id_column); - let (timepoint_field, timepoint_column) = - serialize_dense_column(COLUMN_TIMEPOINT, timepoint)?; - schema.fields.push(timepoint_field); - columns.push(timepoint_column); - let (entity_path_field, entity_path_column) = serialize_dense_column(COLUMN_ENTITY_PATH, entity_path)?; schema.fields.push(entity_path_field); @@ -520,7 +602,7 @@ impl DataTable { let Self { table_id: _, row_id: _, - timepoint: _, + col_timelines: _, entity_path: _, num_instances: _, columns: table, @@ -603,6 +685,28 @@ impl DataTable { ) -> DataTableResult { crate::profile_function!(); + // --- Time --- + + let col_timelines: DataTableResult<_> = schema + .fields + .iter() + .enumerate() + .filter_map(|(i, field)| { + field.metadata.get(METADATA_KIND).and_then(|kind| { + (kind == METADATA_KIND_TIME).then_some((field.name.as_str(), i)) + }) + }) + .map(|(name, index)| { + chunk + .get(index) + .ok_or(DataTableError::MissingColumn(name.to_owned())) + .and_then(|column| Self::deserialize_time_column(name, &**column)) + }) + .collect(); + let col_timelines = col_timelines?; + + // --- Control --- + let control_indices: HashMap<&str, usize> = schema .fields .iter() @@ -623,14 +727,14 @@ impl DataTable { // NOTE: the unwrappings cannot fail since control_index() makes sure the index is valid let row_id = (&**chunk.get(control_index(COLUMN_ROW_ID)?).unwrap()).try_into_collection()?; - let timepoint = - (&**chunk.get(control_index(COLUMN_TIMEPOINT)?).unwrap()).try_into_collection()?; let entity_path = (&**chunk.get(control_index(COLUMN_ENTITY_PATH)?).unwrap()).try_into_collection()?; // TODO(#1712): This is unnecessarily slow... let num_instances = (&**chunk.get(control_index(COLUMN_NUM_INSTANCES)?).unwrap()).try_into_collection()?; + // --- Components --- + let columns: DataTableResult<_> = schema .fields .iter() @@ -656,13 +760,40 @@ impl DataTable { Ok(Self { table_id, row_id, - timepoint, + col_timelines, entity_path, num_instances, columns, }) } + /// Deserializes a sparse time column. + fn deserialize_time_column( + name: &str, + column: &dyn Array, + ) -> DataTableResult<(Timeline, TimeOptVec)> { + // See also [`Timeline::datatype`] + let timeline = match column.data_type().to_logical_type() { + DataType::Int64 => Timeline::new_sequence(name), + DataType::Timestamp(TimeUnit::Nanosecond, None) => Timeline::new_temporal(name), + _ => { + return Err(DataTableError::NotATimeColumn { + name: name.into(), + datatype: column.data_type().clone(), + }) + } + }; + + let col_time = column + .as_any() + .downcast_ref::>() + // NOTE: cannot fail, datatype checked above + .unwrap(); + let col_time: TimeOptVec = col_time.into_iter().map(|time| time.copied()).collect(); + + Ok((timeline, col_time)) + } + /// Deserializes a sparse data column. fn deserialize_data_column( component: ComponentName, diff --git a/crates/re_log_types/src/time_point/arrow.rs b/crates/re_log_types/src/time_point/arrow.rs deleted file mode 100644 index bed235cb6f4f..000000000000 --- a/crates/re_log_types/src/time_point/arrow.rs +++ /dev/null @@ -1,189 +0,0 @@ -use arrow2::{ - array::{ - Int64Array, ListArray, MutableArray, MutableListArray, MutablePrimitiveArray, - MutableStructArray, MutableUtf8Array, StructArray, UInt8Array, Utf8Array, - }, - datatypes::{DataType, Field}, -}; -use arrow2_convert::{deserialize::ArrowDeserialize, field::ArrowField, serialize::ArrowSerialize}; - -use crate::{TimeInt, TimePoint, Timeline}; - -arrow2_convert::arrow_enable_vec_for_type!(TimePoint); - -impl ArrowField for TimePoint { - type Type = Self; - - #[inline] - fn data_type() -> DataType { - //TODO(john) Use Dictionary type - //let time_type_values = Utf8Array::::from_slice(["Time", "Sequence"]); - //let time_type = DataType::Dictionary( - // i32::KEY_TYPE, - // Box::new(time_type_values.data_type().clone()), - // false, - //); - let time_type = DataType::UInt8; - - let struct_type = DataType::Struct(vec![ - Field::new("timeline", DataType::Utf8, false), - Field::new("type", time_type, false), - Field::new("time", DataType::Int64, false), - ]); - - ListArray::::default_datatype(struct_type) - //TODO(john) Wrapping the DataType in Extension exposes a bug in arrow2::io::ipc - //DataType::Extension("TimePoint".to_owned(), Box::new(list_type), None) - } -} - -impl ArrowSerialize for TimePoint { - type MutableArrayType = MutableListArray; - - #[inline] - fn new_array() -> Self::MutableArrayType { - let timeline_array: Box = Box::new(MutableUtf8Array::::new()); - let time_type_array: Box = Box::new(MutablePrimitiveArray::::new()); - let time_array: Box = Box::new(MutablePrimitiveArray::::new()); - - let data_type = Self::data_type(); - let DataType::List(inner) = data_type.to_logical_type() else { unreachable!() }; - let str_array = MutableStructArray::new( - inner.data_type.clone(), - vec![timeline_array, time_type_array, time_array], - ); - MutableListArray::new_from(str_array, data_type, 0) - } - - fn arrow_serialize( - v: &::Type, - array: &mut Self::MutableArrayType, - ) -> arrow2::error::Result<()> { - let struct_array = array.mut_values(); - for (timeline, time) in &v.0 { - ::arrow_serialize( - &timeline.name().to_string(), - struct_array.value(0).unwrap(), - )?; - ::arrow_serialize( - &(timeline.typ() as u8), - struct_array.value(1).unwrap(), - )?; - ::arrow_serialize( - &time.as_i64(), - struct_array.value(2).unwrap(), - )?; - struct_array.push(true); - } - array.try_push_valid() - } -} - -// ---------------------------------------------------------------------------- - -pub struct TimePointIterator<'a> { - time_points: <&'a ListArray as IntoIterator>::IntoIter, -} - -impl<'a> Iterator for TimePointIterator<'a> { - type Item = TimePoint; - - #[inline] - fn next(&mut self) -> Option { - self.time_points.next().flatten().map(|time_point| { - let struct_arr = time_point - .as_any() - .downcast_ref::() - .expect("StructArray"); - let values = struct_arr.values(); - let timelines = values[0] - .as_any() - .downcast_ref::>() - .expect("timelines"); - let types = values[1] - .as_any() - .downcast_ref::() - .expect("types"); - let times = values[2] - .as_any() - .downcast_ref::() - .expect("times"); - - let time_points = timelines.iter().zip(types.iter()).zip(times.iter()).map( - |((timeline, ty), time)| { - ( - Timeline::new( - timeline.unwrap(), - num_traits::FromPrimitive::from_u8(*ty.unwrap()) - .expect("valid TimeType"), - ), - TimeInt::from(*time.unwrap()), - ) - }, - ); - - time_points.collect() - }) - } -} - -// ---------------------------------------------------------------------------- -pub struct TimePointArray; - -impl<'a> IntoIterator for &'a TimePointArray { - type Item = TimePoint; - - type IntoIter = TimePointIterator<'a>; - - fn into_iter(self) -> Self::IntoIter { - panic!("Use iter_from_array_ref. This is a quirk of the way the traits work in arrow2_convert."); - } -} - -impl arrow2_convert::deserialize::ArrowArray for TimePointArray { - type BaseArrayType = arrow2::array::MapArray; - - #[inline] - fn iter_from_array_ref(b: &dyn arrow2::array::Array) -> <&Self as IntoIterator>::IntoIter { - let arr = b.as_any().downcast_ref::>().unwrap(); - assert_eq!(arr.validity(), None, "TimePoints should be non-null"); - - TimePointIterator { - time_points: arr.into_iter(), - } - } -} - -impl ArrowDeserialize for TimePoint { - type ArrayType = TimePointArray; - - fn arrow_deserialize( - v: <&Self::ArrayType as IntoIterator>::Item, - ) -> Option<::Type> { - Some(v) - } -} - -// ---------------------------------------------------------------------------- - -#[test] -fn test_timepoint_roundtrip() { - use crate::datagen; - use arrow2::array::Array; - use arrow2_convert::{deserialize::TryIntoCollection, serialize::TryIntoArrow}; - - let time_points_in = vec![ - TimePoint::from([ - datagen::build_log_time(crate::Time::from_ns_since_epoch(100)), - datagen::build_frame_nr(1234.into()), - ]), - TimePoint::from([ - datagen::build_log_time(crate::Time::from_ns_since_epoch(200)), - datagen::build_frame_nr(2345.into()), - ]), - ]; - - let array: Box = time_points_in.try_into_arrow().unwrap(); - let time_points_out: Vec = TryIntoCollection::try_into_collection(array).unwrap(); - assert_eq!(time_points_in, time_points_out); -} diff --git a/crates/re_log_types/src/time_point/mod.rs b/crates/re_log_types/src/time_point/mod.rs index bec628a8c954..79a1eeae526f 100644 --- a/crates/re_log_types/src/time_point/mod.rs +++ b/crates/re_log_types/src/time_point/mod.rs @@ -1,6 +1,5 @@ use std::collections::{btree_map, BTreeMap}; -mod arrow; mod time_int; mod timeline; @@ -21,6 +20,12 @@ pub use timeline::{Timeline, TimelineName}; #[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] pub struct TimePoint(BTreeMap); +impl From> for TimePoint { + fn from(timelines: BTreeMap) -> Self { + Self(timelines) + } +} + impl TimePoint { /// Logging to this time means the data will show upp in all timelines, /// past and future. The time will be [`TimeInt::BEGINNING`], meaning it will diff --git a/crates/re_log_types/src/time_point/timeline.rs b/crates/re_log_types/src/time_point/timeline.rs index 14ed5cb5d835..9d9f14a2c9da 100644 --- a/crates/re_log_types/src/time_point/timeline.rs +++ b/crates/re_log_types/src/time_point/timeline.rs @@ -1,3 +1,5 @@ +use arrow2::datatypes::{DataType, TimeUnit}; + use crate::{TimeRange, TimeType}; re_string_interner::declare_new_type!( @@ -84,6 +86,15 @@ impl Timeline { self.typ.format(time_range.max), ) } + + /// Returns the appropriate arrow datatype to represent this timeline. + #[inline] + pub fn datatype(&self) -> DataType { + match self.typ { + TimeType::Time => DataType::Timestamp(TimeUnit::Nanosecond, None), + TimeType::Sequence => DataType::Int64, + } + } } impl nohash_hasher::IsEnabled for Timeline {} From 6c383c312da5782366057c26d72aebabc82c3334 Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Tue, 4 Apr 2023 18:16:21 +0200 Subject: [PATCH 16/89] Fix undo/redo selection shortcut/action changing selection history without changing selection (#1765) * Fix undo/redo selection shortcut/action changing selection history without changing selection Fixes #1172 * typo fix --- crates/re_viewer/src/misc/selection_state.rs | 14 +++++++---- .../re_viewer/src/ui/selection_history_ui.rs | 25 ++++++++----------- 2 files changed, 19 insertions(+), 20 deletions(-) diff --git a/crates/re_viewer/src/misc/selection_state.rs b/crates/re_viewer/src/misc/selection_state.rs index 6597f0af161d..c651f93af36a 100644 --- a/crates/re_viewer/src/misc/selection_state.rs +++ b/crates/re_viewer/src/misc/selection_state.rs @@ -7,7 +7,7 @@ use re_data_store::{EntityPath, InstancePath, InstancePathHash, LogDb}; use re_log_types::{component_types::InstanceKey, EntityPathHash}; use re_renderer::OutlineMaskPreference; -use crate::ui::{Blueprint, HistoricalSelection, SelectionHistory, SpaceView, SpaceViewId}; +use crate::ui::{Blueprint, SelectionHistory, SpaceView, SpaceViewId}; use super::{Item, ItemCollection}; @@ -205,13 +205,17 @@ impl SelectionState { } /// Selects the previous element in the history if any. - pub fn select_previous(&mut self) -> Option { - self.history.select_previous() + pub fn select_previous(&mut self) { + if let Some(selection) = self.history.select_previous() { + self.selection = selection; + } } /// Selections the next element in the history if any. - pub fn select_next(&mut self) -> Option { - self.history.select_next() + pub fn select_next(&mut self) { + if let Some(selection) = self.history.select_next() { + self.selection = selection; + } } /// Clears the current selection out. diff --git a/crates/re_viewer/src/ui/selection_history_ui.rs b/crates/re_viewer/src/ui/selection_history_ui.rs index 29064a43efa2..1aa6b542dfd1 100644 --- a/crates/re_viewer/src/ui/selection_history_ui.rs +++ b/crates/re_viewer/src/ui/selection_history_ui.rs @@ -1,7 +1,7 @@ use egui::RichText; use re_ui::Command; -use super::{HistoricalSelection, SelectionHistory}; +use super::SelectionHistory; use crate::{misc::ItemCollection, ui::Blueprint, Item}; // --- @@ -14,7 +14,6 @@ impl SelectionHistory { blueprint: &Blueprint, ) -> Option { self.control_bar_ui(re_ui, ui, blueprint) - .map(|sel| sel.selection) } fn control_bar_ui( @@ -22,7 +21,7 @@ impl SelectionHistory { re_ui: &re_ui::ReUi, ui: &mut egui::Ui, blueprint: &Blueprint, - ) -> Option { + ) -> Option { ui.horizontal_centered(|ui| { ui.strong("Selection").on_hover_text("The Selection View contains information and options about the currently selected object(s)."); @@ -38,27 +37,23 @@ impl SelectionHistory { }).inner } - // TODO(cmc): note that for now, we only check prev/next shortcuts in the UI code that - // shows the associated buttons... this means shortcuts only work when the selection panel - // is open! - // We might want to change this at some point, though the way things are currently designed, - // there isn't much point in selecting stuff while the selection panel is hidden anyway. - - pub fn select_previous(&mut self) -> Option { + #[must_use] + pub fn select_previous(&mut self) -> Option { if let Some(previous) = self.previous() { if previous.index != self.current { self.current = previous.index; - return self.current(); + return self.current().map(|s| s.selection); } } None } - pub fn select_next(&mut self) -> Option { + #[must_use] + pub fn select_next(&mut self) -> Option { if let Some(next) = self.next() { if next.index != self.current { self.current = next.index; - return self.current(); + return self.current().map(|s| s.selection); } } None @@ -69,7 +64,7 @@ impl SelectionHistory { re_ui: &re_ui::ReUi, ui: &mut egui::Ui, blueprint: &Blueprint, - ) -> Option { + ) -> Option { // undo selection if let Some(previous) = self.previous() { let response = re_ui @@ -112,7 +107,7 @@ impl SelectionHistory { re_ui: &re_ui::ReUi, ui: &mut egui::Ui, blueprint: &Blueprint, - ) -> Option { + ) -> Option { // redo selection if let Some(next) = self.next() { let response = re_ui From b9f1380ffabe21aaac1a419a46bb9d4ca85e3a7b Mon Sep 17 00:00:00 2001 From: Jeremy Leibs Date: Tue, 4 Apr 2023 12:18:45 -0400 Subject: [PATCH 17/89] Don't initialize an SDK session if we are only going to be launching the app (#1768) --- rerun_py/rerun_sdk/rerun/__init__.py | 7 ++++++- rerun_py/src/python_bridge.rs | 21 +++++++++++++++------ 2 files changed, 21 insertions(+), 7 deletions(-) diff --git a/rerun_py/rerun_sdk/rerun/__init__.py b/rerun_py/rerun_sdk/rerun/__init__.py index efcccb40766c..f58664c7f72a 100644 --- a/rerun_py/rerun_sdk/rerun/__init__.py +++ b/rerun_py/rerun_sdk/rerun/__init__.py @@ -306,10 +306,15 @@ def spawn(port: int = 9876, connect: bool = True) -> None: print("Rerun is disabled - spawn() call ignored") return + import os import subprocess import sys from time import sleep + # Let the spawned rerun process know it's just an app + new_env = os.environ.copy() + new_env["RERUN_APP_ONLY"] = "true" + # sys.executable: the absolute path of the executable binary for the Python interpreter python_executable = sys.executable if python_executable is None: @@ -317,7 +322,7 @@ def spawn(port: int = 9876, connect: bool = True) -> None: # start_new_session=True ensures the spawned process does NOT die when # we hit ctrl-c in the terminal running the parent Python process. - subprocess.Popen([python_executable, "-m", "rerun", "--port", str(port)], start_new_session=True) + subprocess.Popen([python_executable, "-m", "rerun", "--port", str(port)], env=new_env, start_new_session=True) # TODO(emilk): figure out a way to postpone connecting until the rerun viewer is listening. # For example, wait until it prints "Hosting a SDK server over TCP at …" diff --git a/rerun_py/src/python_bridge.rs b/rerun_py/src/python_bridge.rs index 0437757f6b3f..d42f3ef61a1a 100644 --- a/rerun_py/src/python_bridge.rs +++ b/rerun_py/src/python_bridge.rs @@ -107,6 +107,21 @@ fn rerun_bindings(py: Python<'_>, m: &PyModule) -> PyResult<()> { // called more than once. re_log::setup_native_logging(); + // We always want main to be available + m.add_function(wrap_pyfunction!(main, m)?)?; + + // These two components are necessary for imports to work + // TODO(jleibs): Refactor import logic so all we need is main + m.add_function(wrap_pyfunction!(get_registered_component_names, m)?)?; + m.add_class::()?; + + // If this is a special RERUN_APP_ONLY context (launched via .spawn), we + // can bypass everything else, which keeps us from preparing an SDK session + // that never gets used. + if matches!(std::env::var("RERUN_APP_ONLY").as_deref(), Ok("true")) { + return Ok(()); + } + python_session().set_python_version(python_version(py)); // NOTE: We do this here because we want child processes to share the same recording-id, @@ -114,10 +129,6 @@ fn rerun_bindings(py: Python<'_>, m: &PyModule) -> PyResult<()> { // See `default_recording_id` for extra information. python_session().set_recording_id(default_recording_id(py)); - m.add_function(wrap_pyfunction!(main, m)?)?; - - m.add_function(wrap_pyfunction!(get_registered_component_names, m)?)?; - m.add_function(wrap_pyfunction!(get_recording_id, m)?)?; m.add_function(wrap_pyfunction!(set_recording_id, m)?)?; @@ -150,8 +161,6 @@ fn rerun_bindings(py: Python<'_>, m: &PyModule) -> PyResult<()> { m.add_function(wrap_pyfunction!(log_cleared, m)?)?; m.add_function(wrap_pyfunction!(log_arrow_msg, m)?)?; - m.add_class::()?; - Ok(()) } From 679e2458055096a28c1fbcdbce397de20329e25f Mon Sep 17 00:00:00 2001 From: Jeremy Leibs Date: Tue, 4 Apr 2023 12:21:07 -0400 Subject: [PATCH 18/89] Allow torch tensors for log_rigid3 (#1769) --- rerun_py/rerun_sdk/rerun/log/__init__.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/rerun_py/rerun_sdk/rerun/log/__init__.py b/rerun_py/rerun_sdk/rerun/log/__init__.py index b36abdacd5c6..fd7a74d1cf76 100644 --- a/rerun_py/rerun_sdk/rerun/log/__init__.py +++ b/rerun_py/rerun_sdk/rerun/log/__init__.py @@ -35,10 +35,7 @@ def _to_sequence(array: Optional[npt.ArrayLike]) -> Optional[Sequence[float]]: - if isinstance(array, np.ndarray): - return np.require(array, float).tolist() # type: ignore[no-any-return] - - return array # type: ignore[return-value] + return np.require(array, float).tolist() # type: ignore[no-any-return] def _normalize_colors(colors: Optional[Union[Color, Colors]] = None) -> npt.NDArray[np.uint8]: From 409dcd89ae898e795964e151bcde4d5a86c263b9 Mon Sep 17 00:00:00 2001 From: Emil Ernerfeldt Date: Tue, 4 Apr 2023 18:58:14 +0200 Subject: [PATCH 19/89] Option to show scene bounding box (#1770) * Include depth clouds in bounding box calculation * Don't wrap text when showing bbox in ui * Handle projective transforms * Nicer selection view: don't wrap second column too early * Add checkbox to show the scene bounding box --- .../re_renderer/src/renderer/depth_cloud.rs | 31 +++++++++ crates/re_ui/src/lib.rs | 6 +- .../src/ui/view_spatial/scene/primitives.rs | 6 +- crates/re_viewer/src/ui/view_spatial/ui.rs | 2 + crates/re_viewer/src/ui/view_spatial/ui_3d.rs | 22 +++++++ crates/re_viewer/src/ui/view_text/ui.rs | 63 +++++++++---------- 6 files changed, 94 insertions(+), 36 deletions(-) diff --git a/crates/re_renderer/src/renderer/depth_cloud.rs b/crates/re_renderer/src/renderer/depth_cloud.rs index b66097528b64..fc6c13a36539 100644 --- a/crates/re_renderer/src/renderer/depth_cloud.rs +++ b/crates/re_renderer/src/renderer/depth_cloud.rs @@ -166,6 +166,37 @@ pub struct DepthCloud { pub outline_mask_id: OutlineMaskPreference, } +impl DepthCloud { + /// World-space bounding-box. + pub fn bbox(&self) -> macaw::BoundingBox { + let max_depth = self.max_depth_in_world; + let w = self.depth_dimensions.x as f32; + let h = self.depth_dimensions.y as f32; + let corners = [ + glam::Vec3::ZERO, // camera origin + glam::Vec3::new(0.0, 0.0, max_depth), + glam::Vec3::new(0.0, h, max_depth), + glam::Vec3::new(w, 0.0, max_depth), + glam::Vec3::new(w, h, max_depth), + ]; + + let intrinsics = self.depth_camera_intrinsics; + let focal_length = glam::vec2(intrinsics.col(0).x, intrinsics.col(1).y); + let offset = intrinsics.col(2).truncate(); + + let mut bbox = macaw::BoundingBox::nothing(); + + for corner in corners { + let depth = corner.z; + let pos_in_obj = ((corner.truncate() - offset) * depth / focal_length).extend(depth); + let pos_in_world = self.world_from_obj.project_point3(pos_in_obj); + bbox.extend(pos_in_world); + } + + bbox + } +} + pub struct DepthClouds { pub clouds: Vec, pub radius_boost_in_ui_points_for_outlines: f32, diff --git a/crates/re_ui/src/lib.rs b/crates/re_ui/src/lib.rs index 222473b8ed15..d5d9569cf7b2 100644 --- a/crates/re_ui/src/lib.rs +++ b/crates/re_ui/src/lib.rs @@ -480,11 +480,13 @@ impl ReUi { .inner } - /// Grid to be used in selection view. + /// Two-column grid to be used in selection view. #[allow(clippy::unused_self)] pub fn selection_grid(&self, ui: &mut egui::Ui, id: &str) -> egui::Grid { // Spread rows a bit to make it easier to see the groupings - egui::Grid::new(id).spacing(ui.style().spacing.item_spacing + egui::vec2(0.0, 8.0)) + egui::Grid::new(id) + .num_columns(2) + .spacing(ui.style().spacing.item_spacing + egui::vec2(0.0, 8.0)) } /// Draws a shadow into the given rect with the shadow direction given from dark to light diff --git a/crates/re_viewer/src/ui/view_spatial/scene/primitives.rs b/crates/re_viewer/src/ui/view_spatial/scene/primitives.rs index cf86d5c6d2d7..e8a56e454b27 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/primitives.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/primitives.rs @@ -93,7 +93,7 @@ impl SceneSpatialPrimitives { line_strips, points, meshes, - depth_clouds: _, // no bbox for depth clouds + depth_clouds, any_outlines: _, } = self; @@ -133,6 +133,10 @@ impl SceneSpatialPrimitives { *bounding_box = bounding_box.union(mesh.mesh.bbox().transform_affine3(&mesh.world_from_mesh)); } + + for cloud in &depth_clouds.clouds { + *bounding_box = bounding_box.union(cloud.bbox()); + } } pub fn mesh_instances(&self) -> Vec { diff --git a/crates/re_viewer/src/ui/view_spatial/ui.rs b/crates/re_viewer/src/ui/view_spatial/ui.rs index fb4fd4b78181..8542ecc881cf 100644 --- a/crates/re_viewer/src/ui/view_spatial/ui.rs +++ b/crates/re_viewer/src/ui/view_spatial/ui.rs @@ -347,6 +347,7 @@ impl ViewSpatialState { }); }); ui.checkbox(&mut self.state_3d.show_axes, "Show origin axes").on_hover_text("Show X-Y-Z axes"); + ui.checkbox(&mut self.state_3d.show_bbox, "Show bounding box").on_hover_text("Show the current scene bounding box"); }); ui.end_row(); } @@ -354,6 +355,7 @@ impl ViewSpatialState { ctx.re_ui.grid_left_hand_label(ui, "Bounding box") .on_hover_text("The bounding box encompassing all Entities in the view right now."); ui.vertical(|ui| { + ui.style_mut().wrap = Some(false); let BoundingBox { min, max } = self.scene_bbox; ui.label(format!( "x [{} - {}]", diff --git a/crates/re_viewer/src/ui/view_spatial/ui_3d.rs b/crates/re_viewer/src/ui/view_spatial/ui_3d.rs index 01dc139aee03..36d7044d1408 100644 --- a/crates/re_viewer/src/ui/view_spatial/ui_3d.rs +++ b/crates/re_viewer/src/ui/view_spatial/ui_3d.rs @@ -56,6 +56,7 @@ pub struct View3DState { // options: pub spin: bool, pub show_axes: bool, + pub show_bbox: bool, #[serde(skip)] last_eye_interact_time: f64, @@ -77,6 +78,7 @@ impl Default for View3DState { hovered_point: Default::default(), spin: false, show_axes: false, + show_bbox: false, last_eye_interact_time: f64::NEG_INFINITY, space_specs: Default::default(), space_camera: Default::default(), @@ -549,6 +551,26 @@ pub fn view_3d( ); } + if state.state_3d.show_bbox { + let bbox = scene.primitives.bounding_box(); + if bbox.is_something() && bbox.is_finite() { + let scale = bbox.size(); + let translation = bbox.center(); + let bbox_from_unit_cube = glam::Affine3A::from_scale_rotation_translation( + scale, + Default::default(), + translation, + ); + scene + .primitives + .line_strips + .batch("scene_bbox") + .add_box_outline(bbox_from_unit_cube) + .radius(Size::AUTO) + .color(egui::Color32::WHITE); + } + } + { let orbit_center_alpha = egui::remap_clamp( ui.input(|i| i.time) - state.state_3d.last_eye_interact_time, diff --git a/crates/re_viewer/src/ui/view_text/ui.rs b/crates/re_viewer/src/ui/view_text/ui.rs index 29ec21629d60..031472657303 100644 --- a/crates/re_viewer/src/ui/view_text/ui.rs +++ b/crates/re_viewer/src/ui/view_text/ui.rs @@ -37,43 +37,40 @@ impl ViewTextState { row_log_levels, } = &mut self.filters; - re_ui - .selection_grid(ui, "log_config") - .num_columns(2) - .show(ui, |ui| { - re_ui.grid_left_hand_label(ui, "Columns"); - ui.vertical(|ui| { - for (timeline, visible) in col_timelines { - ui.checkbox(visible, timeline.name().to_string()); - } - ui.checkbox(col_entity_path, "Entity path"); - ui.checkbox(col_log_level, "Log level"); - }); - ui.end_row(); + re_ui.selection_grid(ui, "log_config").show(ui, |ui| { + re_ui.grid_left_hand_label(ui, "Columns"); + ui.vertical(|ui| { + for (timeline, visible) in col_timelines { + ui.checkbox(visible, timeline.name().to_string()); + } + ui.checkbox(col_entity_path, "Entity path"); + ui.checkbox(col_log_level, "Log level"); + }); + ui.end_row(); - re_ui.grid_left_hand_label(ui, "Entity Filter"); - ui.vertical(|ui| { - for (entity_path, visible) in row_entity_paths { - ui.checkbox(visible, &entity_path.to_string()); - } - }); - ui.end_row(); + re_ui.grid_left_hand_label(ui, "Entity Filter"); + ui.vertical(|ui| { + for (entity_path, visible) in row_entity_paths { + ui.checkbox(visible, &entity_path.to_string()); + } + }); + ui.end_row(); - re_ui.grid_left_hand_label(ui, "Level Filter"); - ui.vertical(|ui| { - for (log_level, visible) in row_log_levels { - ui.checkbox(visible, level_to_rich_text(ui, log_level)); - } - }); - ui.end_row(); + re_ui.grid_left_hand_label(ui, "Level Filter"); + ui.vertical(|ui| { + for (log_level, visible) in row_log_levels { + ui.checkbox(visible, level_to_rich_text(ui, log_level)); + } + }); + ui.end_row(); - re_ui.grid_left_hand_label(ui, "Text style"); - ui.vertical(|ui| { - ui.radio_value(&mut self.monospace, false, "Proportional"); - ui.radio_value(&mut self.monospace, true, "Monospace"); - }); - ui.end_row(); + re_ui.grid_left_hand_label(ui, "Text style"); + ui.vertical(|ui| { + ui.radio_value(&mut self.monospace, false, "Proportional"); + ui.radio_value(&mut self.monospace, true, "Monospace"); }); + ui.end_row(); + }); } } From 5b590bbc2da6ad8063731f9e1b0b00d64f047569 Mon Sep 17 00:00:00 2001 From: Emil Ernerfeldt Date: Wed, 5 Apr 2023 17:00:01 +0200 Subject: [PATCH 20/89] Fix a whole lot of crashes, all at once (#1780) --- crates/re_log_types/src/arrow_msg.rs | 9 ++++++++- crates/re_viewer/src/ui/view_text/scene.rs | 4 ++-- crates/re_viewer/src/ui/view_text/ui.rs | 2 +- crates/re_ws_comms/src/server.rs | 2 +- crates/rerun/src/run.rs | 2 +- 5 files changed, 13 insertions(+), 6 deletions(-) diff --git a/crates/re_log_types/src/arrow_msg.rs b/crates/re_log_types/src/arrow_msg.rs index 81f48c032057..706635970da8 100644 --- a/crates/re_log_types/src/arrow_msg.rs +++ b/crates/re_log_types/src/arrow_msg.rs @@ -87,7 +87,14 @@ impl<'de> serde::Deserialize<'de> for ArrowMsg { (table_id, timepoint_min, buf) { let mut cursor = std::io::Cursor::new(buf); - let metadata = read_stream_metadata(&mut cursor).unwrap(); + let metadata = match read_stream_metadata(&mut cursor) { + Ok(metadata) => metadata, + Err(err) => { + return Err(serde::de::Error::custom(format!( + "Failed to read stream metadata: {err}" + ))) + } + }; let mut stream = StreamReader::new(cursor, metadata, None); let chunk = stream .find_map(|state| match state { diff --git a/crates/re_viewer/src/ui/view_text/scene.rs b/crates/re_viewer/src/ui/view_text/scene.rs index 498273cfa689..f83162bd0319 100644 --- a/crates/re_viewer/src/ui/view_text/scene.rs +++ b/crates/re_viewer/src/ui/view_text/scene.rs @@ -15,7 +15,7 @@ use super::ui::ViewTextFilters; #[derive(Debug, Clone)] pub struct TextEntry { // props - pub msg_id: MsgId, + pub msg_id: Option, pub entity_path: EntityPath, @@ -86,7 +86,7 @@ impl SceneText { if is_visible { self.text_entries.push(TextEntry { - msg_id: msg_id.unwrap(), // always present + msg_id, entity_path: entity_path.clone(), time: time.map(|time| time.as_i64()), color: color.map(|c| c.to_array()), diff --git a/crates/re_viewer/src/ui/view_text/ui.rs b/crates/re_viewer/src/ui/view_text/ui.rs index 031472657303..f0ff28dc1cba 100644 --- a/crates/re_viewer/src/ui/view_text/ui.rs +++ b/crates/re_viewer/src/ui/view_text/ui.rs @@ -188,7 +188,7 @@ fn get_time_point(ctx: &ViewerContext<'_>, entry: &TextEntry) -> Option>>>, ) -> tungstenite::Result<()> { - let ws_stream = accept_async(tcp_stream).await.expect("Failed to accept"); + let ws_stream = accept_async(tcp_stream).await?; let (mut ws_sender, mut ws_receiver) = ws_stream.split(); // Re-sending packet history - this is not water tight, but better than nothing. diff --git a/crates/rerun/src/run.rs b/crates/rerun/src/run.rs index 06afe4b7f7a0..1dfa9ee9ff59 100644 --- a/crates/rerun/src/run.rs +++ b/crates/rerun/src/run.rs @@ -532,7 +532,7 @@ pub fn setup_ctrl_c_handler() -> (tokio::sync::broadcast::Receiver<()>, Arc Date: Wed, 5 Apr 2023 14:25:19 -0400 Subject: [PATCH 21/89] Add typing_extensions to requirements-doc.txt (#1786) --- rerun_py/requirements-doc.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/rerun_py/requirements-doc.txt b/rerun_py/requirements-doc.txt index 8c5d6f584acb..3683da922b9d 100644 --- a/rerun_py/requirements-doc.txt +++ b/rerun_py/requirements-doc.txt @@ -7,3 +7,4 @@ mkdocs-redirects mkdocstrings mkdocstrings-python mike +typing_extensions # uncaptured dep for mkdocstrings (https://github.com/mkdocstrings/mkdocstrings/issues/548) From 83cd2297097d7d8754de154593ba848350a20136 Mon Sep 17 00:00:00 2001 From: Jeremy Leibs Date: Wed, 5 Apr 2023 16:13:47 -0400 Subject: [PATCH 22/89] auto_color class-ids if they are present (#1783) * auto_color class-ids if they are present * Update log line in segmentation demo * Avoid tuple structs --- crates/re_viewer/src/ui/annotations.rs | 61 ++++++++++++++++---------- examples/python/api_demo/main.py | 2 +- 2 files changed, 40 insertions(+), 23 deletions(-) diff --git a/crates/re_viewer/src/ui/annotations.rs b/crates/re_viewer/src/ui/annotations.rs index 2e5ab34c8394..3a25bc132fd1 100644 --- a/crates/re_viewer/src/ui/annotations.rs +++ b/crates/re_viewer/src/ui/annotations.rs @@ -22,38 +22,51 @@ pub struct Annotations { impl Annotations { pub fn class_description(&self, class_id: Option) -> ResolvedClassDescription<'_> { - ResolvedClassDescription( - class_id.and_then(|class_id| self.context.class_map.get(&class_id)), - ) + ResolvedClassDescription { + class_id, + class_description: class_id.and_then(|class_id| self.context.class_map.get(&class_id)), + } } } -pub struct ResolvedClassDescription<'a>(pub Option<&'a ClassDescription>); +pub struct ResolvedClassDescription<'a> { + pub class_id: Option, + pub class_description: Option<&'a ClassDescription>, +} impl<'a> ResolvedClassDescription<'a> { pub fn annotation_info(&self) -> ResolvedAnnotationInfo { - ResolvedAnnotationInfo(self.0.map(|desc| desc.info.clone())) + ResolvedAnnotationInfo { + class_id: self.class_id, + annotation_info: self.class_description.map(|desc| desc.info.clone()), + } } /// Merges class annotation info with keypoint annotation info (if existing respectively). pub fn annotation_info_with_keypoint(&self, keypoint_id: KeypointId) -> ResolvedAnnotationInfo { - if let Some(desc) = self.0 { + if let Some(desc) = self.class_description { // Assuming that keypoint annotation is the rarer case, merging the entire annotation ahead of time // is cheaper than doing it lazily (which would cause more branches down the line for callsites without keypoints) if let Some(keypoint_annotation_info) = desc.keypoint_map.get(&keypoint_id) { - ResolvedAnnotationInfo(Some(AnnotationInfo { - id: keypoint_id.0, - label: keypoint_annotation_info - .label - .clone() - .or_else(|| desc.info.label.clone()), - color: keypoint_annotation_info.color.or(desc.info.color), - })) + ResolvedAnnotationInfo { + class_id: self.class_id, + annotation_info: Some(AnnotationInfo { + id: keypoint_id.0, + label: keypoint_annotation_info + .label + .clone() + .or_else(|| desc.info.label.clone()), + color: keypoint_annotation_info.color.or(desc.info.color), + }), + } } else { self.annotation_info() } } else { - ResolvedAnnotationInfo(None) + ResolvedAnnotationInfo { + class_id: self.class_id, + annotation_info: None, + } } } } @@ -66,7 +79,10 @@ pub enum DefaultColor<'a> { } #[derive(Clone)] -pub struct ResolvedAnnotationInfo(pub Option); +pub struct ResolvedAnnotationInfo { + pub class_id: Option, + pub annotation_info: Option, +} impl ResolvedAnnotationInfo { pub fn color( @@ -76,17 +92,18 @@ impl ResolvedAnnotationInfo { ) -> re_renderer::Color32 { if let Some([r, g, b, a]) = color { re_renderer::Color32::from_rgba_premultiplied(*r, *g, *b, *a) - } else if let Some(color) = self.0.as_ref().and_then(|info| { + } else if let Some(color) = self.annotation_info.as_ref().and_then(|info| { info.color .map(|c| c.into()) .or_else(|| Some(auto_color(info.id))) }) { color } else { - match default_color { - DefaultColor::TransparentBlack => re_renderer::Color32::TRANSPARENT, - DefaultColor::OpaqueWhite => re_renderer::Color32::WHITE, - DefaultColor::EntityPath(entity_path) => { + match (self.class_id, default_color) { + (Some(class_id), _) if class_id.0 != 0 => auto_color(class_id.0), + (_, DefaultColor::TransparentBlack) => re_renderer::Color32::TRANSPARENT, + (_, DefaultColor::OpaqueWhite) => re_renderer::Color32::WHITE, + (_, DefaultColor::EntityPath(entity_path)) => { auto_color((entity_path.hash64() % std::u16::MAX as u64) as u16) } } @@ -97,7 +114,7 @@ impl ResolvedAnnotationInfo { if let Some(label) = label { Some(label.clone()) } else { - self.0 + self.annotation_info .as_ref() .and_then(|info| info.label.as_ref().map(|label| label.0.clone())) } diff --git a/examples/python/api_demo/main.py b/examples/python/api_demo/main.py index ba10c4f18514..daaa448677da 100755 --- a/examples/python/api_demo/main.py +++ b/examples/python/api_demo/main.py @@ -43,7 +43,7 @@ def run_segmentation() -> None: class_ids=np.array([42], dtype=np.uint8), ) - rr.log_text_entry("logs/seg_demo_log", "no rects, default colored points, a single point has a label") + rr.log_text_entry("logs/seg_demo_log", "default colored rects, default colored points, a single point has a label") # Log an initial segmentation map with arbitrary colors rr.set_time_seconds("sim_time", 2) From a4f59b25c6e84a08b694f0b463f2a6fd536157fe Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Thu, 6 Apr 2023 09:41:01 +0200 Subject: [PATCH 23/89] Don't run 3rd party bench suites on CI (#1787) * dont run 3rd party bench suites on CI * typo * and other annoyances --- crates/re_arrow_store/Cargo.toml | 5 +++++ crates/re_arrow_store/benches/arrow2.rs | 10 ++++++++-- crates/re_arrow_store/benches/arrow2_convert.rs | 10 ++++++++-- crates/re_arrow_store/benches/vectors.rs | 12 ++++++++++-- 4 files changed, 31 insertions(+), 6 deletions(-) diff --git a/crates/re_arrow_store/Cargo.toml b/crates/re_arrow_store/Cargo.toml index a1bef5e6c886..af9a149db7e9 100644 --- a/crates/re_arrow_store/Cargo.toml +++ b/crates/re_arrow_store/Cargo.toml @@ -25,6 +25,11 @@ deadlock_detection = ["parking_lot/deadlock_detection"] ## Integration with `polars`, to efficiently use the datastore with dataframes. polars = ["dep:polars-core", "dep:polars-ops"] +## When set, disables costly benchmark suites that measure the performance of third-party +## libraries. +## Commonly set implicitly by --all-features, e.g. on CI. +dont_bench_third_party = [] + [dependencies] # Rerun dependencies: diff --git a/crates/re_arrow_store/benches/arrow2.rs b/crates/re_arrow_store/benches/arrow2.rs index 9021b26e2dde..af02ac5289a6 100644 --- a/crates/re_arrow_store/benches/arrow2.rs +++ b/crates/re_arrow_store/benches/arrow2.rs @@ -9,7 +9,7 @@ use arrow2::{ array::{Array, PrimitiveArray, StructArray, UnionArray}, compute::aggregate::estimated_bytes_size, }; -use criterion::{criterion_group, criterion_main, Criterion}; +use criterion::{criterion_group, Criterion}; use itertools::Itertools; use re_log_types::{ component_types::{InstanceKey, Point2D, Rect2D}, @@ -21,7 +21,13 @@ use re_log_types::{ // --- criterion_group!(benches, erased_clone, estimated_size_bytes); -criterion_main!(benches); + +#[cfg(not(feature = "dont_bench_third_party"))] +criterion::criterion_main!(benches); + +// Don't run these benchmarks on CI: they measure the performance of third-party libraries. +#[cfg(feature = "dont_bench_third_party")] +fn main() {} // --- diff --git a/crates/re_arrow_store/benches/arrow2_convert.rs b/crates/re_arrow_store/benches/arrow2_convert.rs index c53dd6b5bd80..92a070e49f28 100644 --- a/crates/re_arrow_store/benches/arrow2_convert.rs +++ b/crates/re_arrow_store/benches/arrow2_convert.rs @@ -4,7 +4,7 @@ static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; use arrow2::{array::PrimitiveArray, datatypes::PhysicalType, types::PrimitiveType}; -use criterion::{criterion_group, criterion_main, Criterion}; +use criterion::{criterion_group, Criterion}; use re_log_types::{ component_types::InstanceKey, external::arrow2_convert::deserialize::TryIntoCollection, Component as _, DataCell, @@ -13,7 +13,13 @@ use re_log_types::{ // --- criterion_group!(benches, serialize, deserialize); -criterion_main!(benches); + +#[cfg(not(feature = "dont_bench_third_party"))] +criterion::criterion_main!(benches); + +// Don't run these benchmarks on CI: they measure the performance of third-party libraries. +#[cfg(feature = "dont_bench_third_party")] +fn main() {} // --- diff --git a/crates/re_arrow_store/benches/vectors.rs b/crates/re_arrow_store/benches/vectors.rs index c61b4667fa92..0ddd3b316e31 100644 --- a/crates/re_arrow_store/benches/vectors.rs +++ b/crates/re_arrow_store/benches/vectors.rs @@ -3,13 +3,21 @@ #[global_allocator] static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; -use criterion::{criterion_group, criterion_main, Criterion}; +use criterion::{criterion_group, Criterion}; use smallvec::SmallVec; use tinyvec::TinyVec; +// --- + criterion_group!(benches, sort, split, swap, swap_opt); -criterion_main!(benches); + +#[cfg(not(feature = "dont_bench_third_party"))] +criterion::criterion_main!(benches); + +// Don't run these benchmarks on CI: they measure the performance of third-party libraries. +#[cfg(feature = "dont_bench_third_party")] +fn main() {} // --- From d6cce1c2ceb0f09bb37839ac9de51b4578ecaac6 Mon Sep 17 00:00:00 2001 From: Emil Ernerfeldt Date: Thu, 6 Apr 2023 10:24:48 +0200 Subject: [PATCH 24/89] Use copilot markers in PR template (#1784) * Use copilot markers in PR template * remove poem Co-authored-by: Clement Rey --------- Co-authored-by: Clement Rey --- .github/pull_request_template.md | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index dc0ec46fa20f..088359ff3a5b 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,3 +1,13 @@ +### WHAT +copilot:summary +​ + +### WHY + + +### HOW +copilot:walkthrough + ### Checklist * [ ] I have read and agree to [Contributor Guide](https://github.com/rerun-io/rerun/blob/main/CONTRIBUTING.md) and the [Code of Conduct](https://github.com/rerun-io/rerun/blob/main/CODE_OF_CONDUCT.md) * [ ] I've included a screenshot or gif (if applicable) From 3be747cfebfa584fffda561d5706676c396bc7da Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Thu, 6 Apr 2023 13:01:02 +0200 Subject: [PATCH 25/89] re_format: barebone support for custom formatting (#1776) * implement barebone support for custom formatting and apply to Tuid * unwrap * rather than [] * use re_tuid --- Cargo.lock | 3 + crates/re_arrow_store/src/arrow_util.rs | 9 +- crates/re_format/Cargo.toml | 4 +- crates/re_format/src/arrow.rs | 93 +++++++++++++++++-- .../src/component_types/msg_id.rs | 4 +- crates/re_log_types/src/data_table.rs | 49 +++++++--- crates/re_tuid/src/lib.rs | 28 +++++- 7 files changed, 159 insertions(+), 31 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6fe5c47177ba..331040f76391 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3861,7 +3861,10 @@ name = "re_format" version = "0.4.0" dependencies = [ "arrow2", + "arrow2_convert", "comfy-table 6.1.4", + "parking_lot 0.12.1", + "re_tuid", ] [[package]] diff --git a/crates/re_arrow_store/src/arrow_util.rs b/crates/re_arrow_store/src/arrow_util.rs index d54d08fd26b4..ef119bd51b31 100644 --- a/crates/re_arrow_store/src/arrow_util.rs +++ b/crates/re_arrow_store/src/arrow_util.rs @@ -51,7 +51,14 @@ impl ArrayExt for dyn Array { /// /// Nested types are expanded and cleaned recursively fn clean_for_polars(&self) -> Box { - match self.data_type() { + let datatype = self.data_type(); + let datatype = if let DataType::Extension(_, inner, _) = datatype { + (**inner).clone() + } else { + datatype.clone() + }; + + match &datatype { DataType::List(field) => { // Recursively clean the contents let typed_arr = self.as_any().downcast_ref::>().unwrap(); diff --git a/crates/re_format/Cargo.toml b/crates/re_format/Cargo.toml index de103832902e..9c9525b85e3c 100644 --- a/crates/re_format/Cargo.toml +++ b/crates/re_format/Cargo.toml @@ -15,7 +15,9 @@ version.workspace = true [package.metadata.docs.rs] all-features = true - [dependencies] arrow2.workspace = true +arrow2_convert.workspace = true comfy-table.workspace = true +parking_lot.workspace = true +re_tuid.workspace = true diff --git a/crates/re_format/src/arrow.rs b/crates/re_format/src/arrow.rs index 5ac404970522..fcc8a4133cee 100644 --- a/crates/re_format/src/arrow.rs +++ b/crates/re_format/src/arrow.rs @@ -3,11 +3,80 @@ use std::fmt::Formatter; use arrow2::{ - array::{get_display, Array}, + array::{get_display, Array, ListArray, StructArray}, datatypes::{DataType, IntervalUnit, TimeUnit}, }; +use arrow2_convert::deserialize::TryIntoCollection; use comfy_table::{presets, Cell, Table}; +use re_tuid::Tuid; + +// --- + +// TODO(#1775): Registering custom formatters should be done from other crates: +// A) Because `re_format` cannot depend on other crates (cyclic deps) +// B) Because how to deserialize and inspect some type is a private implementation detail of that +// type, re_format shouldn't know how to deserialize a TUID... + +type CustomFormatter<'a, F> = Box std::fmt::Result + 'a>; + +pub fn get_custom_display<'a, F: std::fmt::Write + 'a>( + _column_name: &'a str, + array: &'a dyn Array, + null: &'static str, +) -> CustomFormatter<'a, F> { + // NOTE: If the top-level array is a list, it's probably not the type we're looking for: we're + // interested in the type of the array that's underneath. + let datatype = (|| match array.data_type().to_logical_type() { + DataType::List(_) => array + .as_any() + .downcast_ref::>()? + .iter() + .next()? + .map(|array| array.data_type().clone()), + _ => Some(array.data_type().clone()), + })(); + + if let Some(DataType::Extension(name, _, _)) = datatype { + match name.as_str() { + // TODO(#1775): This should be registered dynamically. + // NOTE: Can't call `Tuid::name()`, `Component` lives in `re_log_types`. + "rerun.tuid" => Box::new(|w, index| { + if let Some(tuid) = parse_tuid(array, index) { + w.write_fmt(format_args!("{tuid}")) + } else { + w.write_str("") + } + }), + _ => get_display(array, null), + } + } else { + get_display(array, null) + } +} + +// TODO(#1775): This should be defined and registered by the `re_tuid` crate. +fn parse_tuid(array: &dyn Array, index: usize) -> Option { + let (array, index) = match array.data_type().to_logical_type() { + // Legacy MsgId lists: just grab the first value, they're all identical + DataType::List(_) => ( + array + .as_any() + .downcast_ref::>()? + .value(index), + 0, + ), + // New control columns: it's not a list to begin with! + _ => (array.to_boxed(), index), + }; + let array = array.as_any().downcast_ref::()?; + + let tuids: Vec = TryIntoCollection::try_into_collection(array.to_boxed()).ok()?; + tuids.get(index).copied() +} + +// --- + //TODO(john) move this and the Display impl upstream into arrow2 #[repr(transparent)] pub struct DisplayTimeUnit(TimeUnit); @@ -15,10 +84,10 @@ pub struct DisplayTimeUnit(TimeUnit); impl std::fmt::Display for DisplayTimeUnit { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { let s = match self.0 { - arrow2::datatypes::TimeUnit::Second => "s", - arrow2::datatypes::TimeUnit::Millisecond => "ms", - arrow2::datatypes::TimeUnit::Microsecond => "us", - arrow2::datatypes::TimeUnit::Nanosecond => "ns", + TimeUnit::Second => "s", + TimeUnit::Millisecond => "ms", + TimeUnit::Microsecond => "us", + TimeUnit::Nanosecond => "ns", }; f.write_str(s) } @@ -133,11 +202,19 @@ where let mut table = Table::new(); table.load_preset(presets::UTF8_FULL); + let names = names + .into_iter() + .map(|name| name.as_ref().to_owned()) + .collect::>(); let arrays = columns.into_iter().collect::>(); let (displayers, lengths): (Vec<_>, Vec<_>) = arrays .iter() - .map(|array| (get_display(array.as_ref(), "-"), array.as_ref().len())) + .zip(names.iter()) + .map(|(array, name)| { + let formatter = get_custom_display(name, array.as_ref(), "-"); + (formatter, array.as_ref().len()) + }) .unzip(); if displayers.is_empty() { @@ -145,12 +222,12 @@ where } let header = names - .into_iter() + .iter() .zip(arrays.iter().map(|array| array.as_ref().data_type())) .map(|(name, data_type)| { Cell::new(format!( "{}\n---\n{}", - name.as_ref(), + name, DisplayDataType(data_type.clone()) )) }); diff --git a/crates/re_log_types/src/component_types/msg_id.rs b/crates/re_log_types/src/component_types/msg_id.rs index 104f444cac09..8ab6a59da1f5 100644 --- a/crates/re_log_types/src/component_types/msg_id.rs +++ b/crates/re_log_types/src/component_types/msg_id.rs @@ -12,10 +12,10 @@ use crate::{Component, ComponentName}; /// # use arrow2::datatypes::{DataType, Field}; /// assert_eq!( /// MsgId::data_type(), -/// DataType::Struct(vec![ +/// DataType::Extension("rerun.tuid".into(), Box::new(DataType::Struct(vec![ /// Field::new("time_ns", DataType::UInt64, false), /// Field::new("inc", DataType::UInt64, false), -/// ]) +/// ])), None), /// ); /// ``` #[derive( diff --git a/crates/re_log_types/src/data_table.rs b/crates/re_log_types/src/data_table.rs index 3ae3796288fb..49eccc0a43a9 100644 --- a/crates/re_log_types/src/data_table.rs +++ b/crates/re_log_types/src/data_table.rs @@ -150,15 +150,15 @@ impl std::ops::IndexMut for DataCellColumn { /// /// The table above translates to the following, where each column is contiguous in memory: /// ```text -/// β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” -/// β”‚ rerun.row_id ┆ rerun.timepoint ┆ rerun.entity_path ┆ rerun.num_instances ┆ rerun.label ┆ rerun.point2d ┆ rerun.colorrgba β”‚ -/// β•žβ•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•β•ͺ═══════════════════════════════════β•ͺ════════════════════β•ͺ═════════════════════β•ͺ═════════════β•ͺ══════════════════════════════════β•ͺ═════════════════║ -/// β”‚ {167967218, 54449486} ┆ [{frame_nr, 1, 1}, {clock, 1, 1}] ┆ a ┆ 2 ┆ [] ┆ [{x: 10, y: 10}, {x: 20, y: 20}] ┆ [2155905279] β”‚ -/// β”œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”€ -/// β”‚ {167967218, 54449486} ┆ [{frame_nr, 1, 1}, {clock, 1, 2}] ┆ b ┆ 0 ┆ - ┆ - ┆ [] β”‚ -/// β”œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”€ -/// β”‚ {167967218, 54449486} ┆ [{frame_nr, 1, 2}, {clock, 1, 1}] ┆ c ┆ 1 ┆ [hey] ┆ - ┆ [4294967295] β”‚ -/// β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ +/// β”Œβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”¬β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β” +/// β”‚ frame_nr ┆ log_time ┆ rerun.row_id ┆ rerun.entity_path ┆ rerun.num_instances ┆ rerun.label ┆ rerun.point2d ┆ rerun.colorrgba β”‚ +/// β•žβ•β•β•β•β•β•β•β•β•β•β•ͺ═══════════════════════════════β•ͺ══════════════════════════════════β•ͺ═══════════════════β•ͺ═════════════════════β•ͺ═════════════β•ͺ══════════════════════════════════β•ͺ═════════════════║ +/// β”‚ 1 ┆ 2023-04-05 09:36:47.188796402 ┆ 1753004ACBF5D6E651F2983C3DAF260C ┆ a ┆ 2 ┆ [] ┆ [{x: 10, y: 10}, {x: 20, y: 20}] ┆ [2155905279] β”‚ +/// β”œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”€ +/// β”‚ 1 ┆ 2023-04-05 09:36:47.188852222 ┆ 1753004ACBF5D6E651F2983C3DAF260C ┆ b ┆ 0 ┆ - ┆ - ┆ [] β”‚ +/// β”œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”Όβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ•Œβ”€ +/// β”‚ 2 ┆ 2023-04-05 09:36:47.188855872 ┆ 1753004ACBF5D6E651F2983C3DAF260C ┆ c ┆ 1 ┆ [hey] ┆ - ┆ [4294967295] β”‚ +/// β””β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”΄β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”˜ /// ``` /// /// ## Example @@ -533,8 +533,6 @@ impl DataTable { [(METADATA_KIND.to_owned(), METADATA_KIND_CONTROL.to_owned())].into(), ); - // TODO(cmc): why do we have to do this manually on the way out, but it's done - // automatically on our behalf on the way in...? if let DataType::Extension(name, _, _) = data.data_type() { field .metadata @@ -627,15 +625,20 @@ impl DataTable { .map(|cell| cell.as_arrow_ref()) .collect_vec(); + let ext_name = cell_refs.first().and_then(|cell| match cell.data_type() { + DataType::Extension(name, _, _) => Some(name), + _ => None, + }); + // NOTE: Avoid paying for the cost of the concatenation machinery if there's a single // row in the column. let data = if cell_refs.len() == 1 { - data_to_lists(column, cell_refs[0].to_boxed()) + data_to_lists(column, cell_refs[0].to_boxed(), ext_name.cloned()) } else { // NOTE: This is a column of cells, it shouldn't ever fail to concatenate since // they share the same underlying type. let data = arrow2::compute::concatenate::concatenate(cell_refs.as_slice())?; - data_to_lists(column, data) + data_to_lists(column, data, ext_name.cloned()) }; let field = Field::new(name, data.data_type().clone(), false) @@ -648,10 +651,26 @@ impl DataTable { /// /// * Before: `[C, C, C, C, C, C, C, ...]` /// * After: `ListArray[ [[C, C], [C, C, C], None, [C], [C], ...] ]` - fn data_to_lists(column: &[Option], data: Box) -> Box { + fn data_to_lists( + column: &[Option], + data: Box, + ext_name: Option, + ) -> Box { let datatype = data.data_type().clone(); - let datatype = ListArray::::default_datatype(datatype); + let field = { + let mut field = Field::new("item", datatype, true); + + if let Some(name) = ext_name { + field + .metadata + .extend([("ARROW:extension:name".to_owned(), name)]); + } + + field + }; + + let datatype = DataType::List(Box::new(field)); let offsets = Offsets::try_from_lengths(column.iter().map(|cell| { cell.as_ref() .map_or(0, |cell| cell.num_instances() as usize) diff --git a/crates/re_tuid/src/lib.rs b/crates/re_tuid/src/lib.rs index 49c0840dad3d..072b65261da5 100644 --- a/crates/re_tuid/src/lib.rs +++ b/crates/re_tuid/src/lib.rs @@ -6,11 +6,10 @@ #![doc = document_features::document_features!()] //! -use arrow2_convert::{ArrowDeserialize, ArrowField, ArrowSerialize}; +use arrow2::datatypes::DataType; +use arrow2_convert::{ArrowDeserialize, ArrowSerialize}; -#[derive( - Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, ArrowField, ArrowSerialize, ArrowDeserialize, -)] +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, ArrowSerialize, ArrowDeserialize)] #[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] pub struct Tuid { /// Approximate nanoseconds since epoch. @@ -21,6 +20,27 @@ pub struct Tuid { inc: u64, } +arrow2_convert::arrow_enable_vec_for_type!(Tuid); + +// TODO(#1774): shouldn't have to write this manually +impl arrow2_convert::field::ArrowField for Tuid { + type Type = Self; + + fn data_type() -> arrow2::datatypes::DataType { + let datatype = arrow2::datatypes::DataType::Struct(<[_]>::into_vec(Box::new([ + ::field("time_ns"), + ::field("inc"), + ]))); + DataType::Extension("rerun.tuid".into(), Box::new(datatype), None) + } +} + +impl std::fmt::Display for Tuid { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{:032X}", self.as_u128()) + } +} + impl std::fmt::Debug for Tuid { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{:032X}", self.as_u128()) From aedf1c0fb72b871ae01a6b7ff231a835e6eb2b27 Mon Sep 17 00:00:00 2001 From: Jeremy Leibs Date: Thu, 6 Apr 2023 11:57:52 -0400 Subject: [PATCH 26/89] Always send recording_id as part of LogMsg (#1778) * Always send recording_id as part of LogMsg * Rename build_chunk_from_components -> build_data_table_from_components * Don't make RecordingInfo optional * Always default the recording id * Log an error if we hit the initialization issue --- crates/re_data_store/examples/memory_usage.rs | 13 +++- crates/re_data_store/src/log_db.rs | 4 +- .../benches/msg_encode_benchmark.rs | 29 +++---- crates/re_log_types/src/lib.rs | 20 +++-- crates/re_sdk/src/msg_sender.rs | 31 ++++++-- crates/re_sdk/src/session.rs | 36 +++++++-- crates/re_sdk_comms/src/server.rs | 6 +- crates/re_viewer/src/app.rs | 53 +++++++------ crates/re_viewer/src/ui/data_ui/log_msg.rs | 4 +- crates/re_viewer/src/ui/event_log_view.rs | 4 +- rerun_py/src/arrow.rs | 17 ++-- rerun_py/src/python_bridge.rs | 37 +++++---- rerun_py/src/python_session.rs | 77 +++++++++++-------- 13 files changed, 205 insertions(+), 126 deletions(-) diff --git a/crates/re_data_store/examples/memory_usage.rs b/crates/re_data_store/examples/memory_usage.rs index 105d7e1a0014..ff5d1faba8a4 100644 --- a/crates/re_data_store/examples/memory_usage.rs +++ b/crates/re_data_store/examples/memory_usage.rs @@ -48,7 +48,7 @@ fn live_bytes() -> usize { // ---------------------------------------------------------------------------- -use re_log_types::{entity_path, DataRow, MsgId}; +use re_log_types::{entity_path, DataRow, MsgId, RecordingId}; fn main() { log_messages(); @@ -91,6 +91,7 @@ fn log_messages() { const NUM_POINTS: usize = 1_000; + let recording_id = RecordingId::random(); let timeline = Timeline::new_sequence("frame_nr"); let mut time_point = TimePoint::default(); time_point.insert(timeline, TimeInt::from(0)); @@ -116,7 +117,10 @@ fn log_messages() { .into_table(), ); let table_bytes = live_bytes() - used_bytes_start; - let log_msg = Box::new(LogMsg::ArrowMsg(ArrowMsg::try_from(&*table).unwrap())); + let log_msg = Box::new(LogMsg::ArrowMsg( + recording_id, + ArrowMsg::try_from(&*table).unwrap(), + )); let log_msg_bytes = live_bytes() - used_bytes_start; println!("Arrow payload containing a Pos2 uses {table_bytes} bytes in RAM"); let encoded = encode_log_msg(&log_msg); @@ -139,7 +143,10 @@ fn log_messages() { .into_table(), ); let table_bytes = live_bytes() - used_bytes_start; - let log_msg = Box::new(LogMsg::ArrowMsg(ArrowMsg::try_from(&*table).unwrap())); + let log_msg = Box::new(LogMsg::ArrowMsg( + recording_id, + ArrowMsg::try_from(&*table).unwrap(), + )); let log_msg_bytes = live_bytes() - used_bytes_start; println!("Arrow payload containing a Pos2 uses {table_bytes} bytes in RAM"); let encoded = encode_log_msg(&log_msg); diff --git a/crates/re_data_store/src/log_db.rs b/crates/re_data_store/src/log_db.rs index 50a5ce57a703..4ae4002aae07 100644 --- a/crates/re_data_store/src/log_db.rs +++ b/crates/re_data_store/src/log_db.rs @@ -235,7 +235,7 @@ impl LogDb { match &msg { LogMsg::BeginRecordingMsg(msg) => self.add_begin_recording_msg(msg), - LogMsg::EntityPathOpMsg(msg) => { + LogMsg::EntityPathOpMsg(_, msg) => { let EntityPathOpMsg { msg_id, time_point, @@ -243,7 +243,7 @@ impl LogDb { } = msg; self.entity_db.add_path_op(*msg_id, time_point, path_op); } - LogMsg::ArrowMsg(inner) => self.entity_db.try_add_arrow_msg(inner)?, + LogMsg::ArrowMsg(_, inner) => self.entity_db.try_add_arrow_msg(inner)?, LogMsg::Goodbye(_) => {} } diff --git a/crates/re_log_types/benches/msg_encode_benchmark.rs b/crates/re_log_types/benches/msg_encode_benchmark.rs index d9131ef9f9f9..8faca63d2371 100644 --- a/crates/re_log_types/benches/msg_encode_benchmark.rs +++ b/crates/re_log_types/benches/msg_encode_benchmark.rs @@ -6,7 +6,7 @@ static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; use re_log_types::{ datagen::{build_frame_nr, build_some_colors, build_some_point2d}, - entity_path, ArrowMsg, DataRow, DataTable, Index, LogMsg, MsgId, + entity_path, ArrowMsg, DataRow, DataTable, Index, LogMsg, MsgId, RecordingId, }; use criterion::{criterion_group, criterion_main, Criterion}; @@ -42,10 +42,10 @@ fn decode_log_msgs(mut bytes: &[u8]) -> Vec { messages } -fn generate_messages(tables: &[DataTable]) -> Vec { +fn generate_messages(recording_id: RecordingId, tables: &[DataTable]) -> Vec { tables .iter() - .map(|table| LogMsg::ArrowMsg(ArrowMsg::try_from(table).unwrap())) + .map(|table| LogMsg::ArrowMsg(recording_id, ArrowMsg::try_from(table).unwrap())) .collect() } @@ -53,7 +53,7 @@ fn decode_tables(messages: &[LogMsg]) -> Vec { messages .iter() .map(|log_msg| { - if let LogMsg::ArrowMsg(arrow_msg) = log_msg { + if let LogMsg::ArrowMsg(_, arrow_msg) = log_msg { DataTable::try_from(arrow_msg).unwrap() } else { unreachable!() @@ -81,6 +81,7 @@ fn mono_points_arrow(c: &mut Criterion) { } { + let recording_id = RecordingId::random(); let mut group = c.benchmark_group("mono_points_arrow"); group.throughput(criterion::Throughput::Elements(NUM_POINTS as _)); group.bench_function("generate_message_bundles", |b| { @@ -88,14 +89,14 @@ fn mono_points_arrow(c: &mut Criterion) { }); let tables = generate_tables(); group.bench_function("generate_messages", |b| { - b.iter(|| generate_messages(&tables)); + b.iter(|| generate_messages(recording_id, &tables)); }); - let messages = generate_messages(&tables); + let messages = generate_messages(recording_id, &tables); group.bench_function("encode_log_msg", |b| { b.iter(|| encode_log_msgs(&messages)); }); group.bench_function("encode_total", |b| { - b.iter(|| encode_log_msgs(&generate_messages(&generate_tables()))); + b.iter(|| encode_log_msgs(&generate_messages(recording_id, &generate_tables()))); }); let encoded = encode_log_msgs(&messages); @@ -136,6 +137,7 @@ fn mono_points_arrow_batched(c: &mut Criterion) { } { + let recording_id = RecordingId::random(); let mut group = c.benchmark_group("mono_points_arrow_batched"); group.throughput(criterion::Throughput::Elements(NUM_POINTS as _)); group.bench_function("generate_message_bundles", |b| { @@ -143,14 +145,14 @@ fn mono_points_arrow_batched(c: &mut Criterion) { }); let tables = [generate_table()]; group.bench_function("generate_messages", |b| { - b.iter(|| generate_messages(&tables)); + b.iter(|| generate_messages(recording_id, &tables)); }); - let messages = generate_messages(&tables); + let messages = generate_messages(recording_id, &tables); group.bench_function("encode_log_msg", |b| { b.iter(|| encode_log_msgs(&messages)); }); group.bench_function("encode_total", |b| { - b.iter(|| encode_log_msgs(&generate_messages(&[generate_table()]))); + b.iter(|| encode_log_msgs(&generate_messages(recording_id, &[generate_table()]))); }); let encoded = encode_log_msgs(&messages); @@ -192,6 +194,7 @@ fn batch_points_arrow(c: &mut Criterion) { } { + let recording_id = RecordingId::random(); let mut group = c.benchmark_group("batch_points_arrow"); group.throughput(criterion::Throughput::Elements(NUM_POINTS as _)); group.bench_function("generate_message_bundles", |b| { @@ -199,14 +202,14 @@ fn batch_points_arrow(c: &mut Criterion) { }); let tables = generate_tables(); group.bench_function("generate_messages", |b| { - b.iter(|| generate_messages(&tables)); + b.iter(|| generate_messages(recording_id, &tables)); }); - let messages = generate_messages(&tables); + let messages = generate_messages(recording_id, &tables); group.bench_function("encode_log_msg", |b| { b.iter(|| encode_log_msgs(&messages)); }); group.bench_function("encode_total", |b| { - b.iter(|| encode_log_msgs(&generate_messages(&generate_tables()))); + b.iter(|| encode_log_msgs(&generate_messages(recording_id, &generate_tables()))); }); let encoded = encode_log_msgs(&messages); diff --git a/crates/re_log_types/src/lib.rs b/crates/re_log_types/src/lib.rs index 61b0aa817f67..aa3c608b7e58 100644 --- a/crates/re_log_types/src/lib.rs +++ b/crates/re_log_types/src/lib.rs @@ -173,10 +173,10 @@ pub enum LogMsg { BeginRecordingMsg(BeginRecordingMsg), /// Server-backed operation on an [`EntityPath`]. - EntityPathOpMsg(EntityPathOpMsg), + EntityPathOpMsg(RecordingId, EntityPathOpMsg), /// Log an entity using an [`ArrowMsg`]. - ArrowMsg(ArrowMsg), + ArrowMsg(RecordingId, ArrowMsg), /// Sent when the client shuts down the connection. Goodbye(MsgId), @@ -186,19 +186,27 @@ impl LogMsg { pub fn id(&self) -> MsgId { match self { Self::BeginRecordingMsg(msg) => msg.msg_id, - Self::EntityPathOpMsg(msg) => msg.msg_id, + Self::EntityPathOpMsg(_, msg) => msg.msg_id, Self::Goodbye(msg_id) => *msg_id, // TODO(#1619): the following only makes sense because, while we support sending and // receiving batches, we don't actually do so yet. // We need to stop storing raw `LogMsg`s before we can benefit from our batching. - Self::ArrowMsg(msg) => msg.table_id, + Self::ArrowMsg(_, msg) => msg.table_id, + } + } + + pub fn recording_id(&self) -> Option<&RecordingId> { + match self { + Self::BeginRecordingMsg(msg) => Some(&msg.info.recording_id), + Self::EntityPathOpMsg(recording_id, _) | Self::ArrowMsg(recording_id, _) => { + Some(recording_id) + } + Self::Goodbye(_) => None, } } } impl_into_enum!(BeginRecordingMsg, LogMsg, BeginRecordingMsg); -impl_into_enum!(EntityPathOpMsg, LogMsg, EntityPathOpMsg); -impl_into_enum!(ArrowMsg, LogMsg, ArrowMsg); // ---------------------------------------------------------------------------- diff --git a/crates/re_sdk/src/msg_sender.rs b/crates/re_sdk/src/msg_sender.rs index 790a06e71bd4..9ab867a3d452 100644 --- a/crates/re_sdk/src/msg_sender.rs +++ b/crates/re_sdk/src/msg_sender.rs @@ -1,11 +1,13 @@ -use re_log_types::{component_types::InstanceKey, DataRow, DataTableError}; +use std::borrow::Borrow; + +use re_log_types::{component_types::InstanceKey, DataRow, DataTableError, RecordingId}; use crate::{ components::Transform, log::{DataCell, LogMsg, MsgId}, sink::LogSink, time::{Time, TimeInt, TimePoint, Timeline}, - Component, EntityPath, SerializableComponent, + Component, EntityPath, SerializableComponent, Session, }; // TODO(#1619): Rust SDK batching @@ -229,13 +231,17 @@ impl MsgSender { /// Consumes, packs, sanity checks and finally sends the message to the currently configured /// target of the SDK. - pub fn send(self, sink: &impl std::borrow::Borrow) -> Result<(), DataTableError> { - self.send_to_sink(sink.borrow()) + pub fn send(self, session: &Session) -> Result<(), DataTableError> { + self.send_to_sink(session.recording_id(), session.borrow()) } /// Consumes, packs, sanity checks and finally sends the message to the currently configured /// target of the SDK. - fn send_to_sink(self, sink: &dyn LogSink) -> Result<(), DataTableError> { + fn send_to_sink( + self, + recording_id: RecordingId, + sink: &dyn LogSink, + ) -> Result<(), DataTableError> { if !sink.is_enabled() { return Ok(()); // silently drop the message } @@ -243,15 +249,24 @@ impl MsgSender { let [row_standard, row_transforms, row_splats] = self.into_rows(); if let Some(row_transforms) = row_transforms { - sink.send(LogMsg::ArrowMsg((&row_transforms.into_table()).try_into()?)); + sink.send(LogMsg::ArrowMsg( + recording_id, + (&row_transforms.into_table()).try_into()?, + )); } if let Some(row_splats) = row_splats { - sink.send(LogMsg::ArrowMsg((&row_splats.into_table()).try_into()?)); + sink.send(LogMsg::ArrowMsg( + recording_id, + (&row_splats.into_table()).try_into()?, + )); } // Always the primary component last so range-based queries will include the other data. // Since the primary component can't be splatted it must be in msg_standard, see(#1215). if let Some(row_standard) = row_standard { - sink.send(LogMsg::ArrowMsg((&row_standard.into_table()).try_into()?)); + sink.send(LogMsg::ArrowMsg( + recording_id, + (&row_standard.into_table()).try_into()?, + )); } Ok(()) diff --git a/crates/re_sdk/src/session.rs b/crates/re_sdk/src/session.rs index ba2eb9a3b1fa..bbf99ee93451 100644 --- a/crates/re_sdk/src/session.rs +++ b/crates/re_sdk/src/session.rs @@ -189,6 +189,7 @@ impl SessionBuilder { #[must_use] #[derive(Clone)] pub struct Session { + recording_info: RecordingInfo, sink: Arc, // TODO(emilk): add convenience `TimePoint` here so that users can // do things like `session.set_time_sequence("frame", frame_idx);` @@ -222,13 +223,16 @@ impl Session { sink.send( re_log_types::BeginRecordingMsg { msg_id: re_log_types::MsgId::random(), - info: recording_info, + info: recording_info.clone(), } .into(), ); } - Self { sink: sink.into() } + Self { + recording_info, + sink: sink.into(), + } } /// Construct a new session with a disabled "dummy" sink that drops all logging messages. @@ -236,6 +240,16 @@ impl Session { /// [`Self::is_enabled`] will return `false`. pub fn disabled() -> Self { Self { + recording_info: RecordingInfo { + application_id: ApplicationId::unknown(), + recording_id: Default::default(), + is_official_example: crate::called_from_official_rust_example(), + started: Time::now(), + recording_source: RecordingSource::RustSdk { + rustc_version: env!("RE_BUILD_RUSTC_VERSION").into(), + llvm_version: env!("RE_BUILD_LLVM_VERSION").into(), + }, + }, sink: crate::sink::disabled().into(), } } @@ -272,17 +286,25 @@ impl Session { time_point: &re_log_types::TimePoint, path_op: re_log_types::PathOp, ) { - self.send(LogMsg::EntityPathOpMsg(re_log_types::EntityPathOpMsg { - msg_id: re_log_types::MsgId::random(), - time_point: time_point.clone(), - path_op, - })); + self.send(LogMsg::EntityPathOpMsg( + self.recording_id(), + re_log_types::EntityPathOpMsg { + msg_id: re_log_types::MsgId::random(), + time_point: time_point.clone(), + path_op, + }, + )); } /// Drain all buffered [`LogMsg`]es and return them. pub fn drain_backlog(&self) -> Vec { self.sink.drain_backlog() } + + /// The current [`RecordingId`]. + pub fn recording_id(&self) -> RecordingId { + self.recording_info.recording_id + } } impl AsRef for Session { diff --git a/crates/re_sdk_comms/src/server.rs b/crates/re_sdk_comms/src/server.rs index 71c7c786f763..fd1ceca50ea0 100644 --- a/crates/re_sdk_comms/src/server.rs +++ b/crates/re_sdk_comms/src/server.rs @@ -209,9 +209,11 @@ impl CongestionManager { #[allow(clippy::match_same_arms)] match msg { // we don't want to drop any of these - LogMsg::BeginRecordingMsg(_) | LogMsg::EntityPathOpMsg(_) | LogMsg::Goodbye(_) => true, + LogMsg::BeginRecordingMsg(_) | LogMsg::EntityPathOpMsg(_, _) | LogMsg::Goodbye(_) => { + true + } - LogMsg::ArrowMsg(arrow_msg) => self.should_send_time_point(&arrow_msg.timepoint_max), + LogMsg::ArrowMsg(_, arrow_msg) => self.should_send_time_point(&arrow_msg.timepoint_max), } } diff --git a/crates/re_viewer/src/app.rs b/crates/re_viewer/src/app.rs index 56b00df98563..04dd0127d173 100644 --- a/crates/re_viewer/src/app.rs +++ b/crates/re_viewer/src/app.rs @@ -677,34 +677,37 @@ impl App { let start = instant::Instant::now(); while let Ok(msg) = self.rx.try_recv() { - let is_new_recording = if let LogMsg::BeginRecordingMsg(msg) = &msg { - re_log::debug!("Opening a new recording: {:?}", msg.info); - self.state.selected_rec_id = msg.info.recording_id; - true - } else { - false - }; + // All messages except [`LogMsg::GoodBye`] should have an associated recording id + if let Some(recording_id) = msg.recording_id() { + let is_new_recording = if let LogMsg::BeginRecordingMsg(msg) = &msg { + re_log::debug!("Opening a new recording: {:?}", msg.info); + self.state.selected_rec_id = msg.info.recording_id; + true + } else { + false + }; - let log_db = self.log_dbs.entry(self.state.selected_rec_id).or_default(); + let log_db = self.log_dbs.entry(*recording_id).or_default(); - if log_db.data_source.is_none() { - log_db.data_source = Some(self.rx.source().clone()); - } + if log_db.data_source.is_none() { + log_db.data_source = Some(self.rx.source().clone()); + } - if let Err(err) = log_db.add(msg) { - re_log::error!("Failed to add incoming msg: {err}"); - }; + if let Err(err) = log_db.add(msg) { + re_log::error!("Failed to add incoming msg: {err}"); + }; - if is_new_recording { - // Do analytics after ingesting the new message, - // because thats when the `log_db.recording_info` is set, - // which we use in the analytics call. - self.analytics.on_open_recording(log_db); - } + if is_new_recording { + // Do analytics after ingesting the new message, + // because thats when the `log_db.recording_info` is set, + // which we use in the analytics call. + self.analytics.on_open_recording(log_db); + } - if start.elapsed() > instant::Duration::from_millis(10) { - egui_ctx.request_repaint(); // make sure we keep receiving messages asap - break; // don't block the main thread for too long + if start.elapsed() > instant::Duration::from_millis(10) { + egui_ctx.request_repaint(); // make sure we keep receiving messages asap + break; // don't block the main thread for too long + } } } } @@ -1767,7 +1770,7 @@ fn save_database_to_file( LogMsg::BeginRecordingMsg(_) | LogMsg::Goodbye(_) => { true // timeless } - LogMsg::EntityPathOpMsg(EntityPathOpMsg { time_point, .. }) => { + LogMsg::EntityPathOpMsg(_, EntityPathOpMsg { time_point, .. }) => { time_point.is_timeless() || { let is_within_range = time_point .get(&timeline) @@ -1775,7 +1778,7 @@ fn save_database_to_file( is_within_range } } - LogMsg::ArrowMsg(_) => { + LogMsg::ArrowMsg(_, _) => { // TODO(john) false } diff --git a/crates/re_viewer/src/ui/data_ui/log_msg.rs b/crates/re_viewer/src/ui/data_ui/log_msg.rs index b536284f6eeb..5c19a09fa3b8 100644 --- a/crates/re_viewer/src/ui/data_ui/log_msg.rs +++ b/crates/re_viewer/src/ui/data_ui/log_msg.rs @@ -16,8 +16,8 @@ impl DataUi for LogMsg { ) { match self { LogMsg::BeginRecordingMsg(msg) => msg.data_ui(ctx, ui, verbosity, query), - LogMsg::EntityPathOpMsg(msg) => msg.data_ui(ctx, ui, verbosity, query), - LogMsg::ArrowMsg(msg) => msg.data_ui(ctx, ui, verbosity, query), + LogMsg::EntityPathOpMsg(_, msg) => msg.data_ui(ctx, ui, verbosity, query), + LogMsg::ArrowMsg(_, msg) => msg.data_ui(ctx, ui, verbosity, query), LogMsg::Goodbye(_) => { ui.label("Goodbye"); } diff --git a/crates/re_viewer/src/ui/event_log_view.rs b/crates/re_viewer/src/ui/event_log_view.rs index 94c96edddfe8..89aea702faa2 100644 --- a/crates/re_viewer/src/ui/event_log_view.rs +++ b/crates/re_viewer/src/ui/event_log_view.rs @@ -141,7 +141,7 @@ fn table_row( ui.monospace(format!("{application_id} - {recording_id:?}")); }); } - LogMsg::EntityPathOpMsg(msg) => { + LogMsg::EntityPathOpMsg(_, msg) => { let EntityPathOpMsg { msg_id, time_point, @@ -176,7 +176,7 @@ fn table_row( // NOTE: This really only makes sense because we don't yet have batches with more than a // single row at the moment... and by the time we do, the event log view will have // disappeared entirely. - LogMsg::ArrowMsg(msg) => match DataTable::try_from(msg) { + LogMsg::ArrowMsg(_, msg) => match DataTable::try_from(msg) { Ok(table) => { for datarow in table.as_rows() { row.col(|ui| { diff --git a/rerun_py/src/arrow.rs b/rerun_py/src/arrow.rs index 8e75cc60bc4d..24cf0e43beb1 100644 --- a/rerun_py/src/arrow.rs +++ b/rerun_py/src/arrow.rs @@ -9,9 +9,7 @@ use pyo3::{ types::{IntoPyDict, PyString}, PyAny, PyResult, }; -use re_log_types::{ - component_types, DataCell, DataRow, DataTableError, EntityPath, LogMsg, MsgId, TimePoint, -}; +use re_log_types::{component_types, DataCell, DataRow, DataTable, EntityPath, MsgId, TimePoint}; /// Perform conversion between a pyarrow array to arrow2 types. /// @@ -82,13 +80,12 @@ pub fn get_registered_component_names(py: pyo3::Python<'_>) -> PyResult<&PyDict> Ok(fields.into_py_dict(py)) } -/// Build a [`LogMsg`] and vector of [`Field`] given a '**kwargs'-style dictionary of -/// component arrays. -pub fn build_chunk_from_components( +/// Build a [`DataTable`] given a '**kwargs'-style dictionary of component arrays. +pub fn build_data_table_from_components( entity_path: &EntityPath, components: &PyDict, time_point: &TimePoint, -) -> PyResult { +) -> PyResult { let (arrays, fields): (Vec>, Vec) = itertools::process_results( components.iter().map(|(name, array)| { let name = name.downcast::()?.to_str()?; @@ -112,9 +109,7 @@ pub fn build_chunk_from_components( cells, ); - let msg = (&row.into_table()) - .try_into() - .map_err(|err: DataTableError| PyValueError::new_err(err.to_string()))?; + let data_table = row.into_table(); - Ok(LogMsg::ArrowMsg(msg)) + Ok(data_table) } diff --git a/rerun_py/src/python_bridge.rs b/rerun_py/src/python_bridge.rs index d42f3ef61a1a..02926a8b51ce 100644 --- a/rerun_py/src/python_bridge.rs +++ b/rerun_py/src/python_bridge.rs @@ -11,9 +11,9 @@ use pyo3::{ types::PyDict, }; -use re_log_types::{DataRow, DataTableError}; +use re_log_types::{ArrowMsg, DataRow, DataTableError}; use rerun::{ - log::{LogMsg, MsgId, PathOp}, + log::{MsgId, PathOp}, time::{Time, TimeInt, TimePoint, TimeType, Timeline}, ApplicationId, EntityPath, RecordingId, }; @@ -243,10 +243,13 @@ fn main(py: Python<'_>, argv: Vec) -> PyResult { #[pyfunction] fn get_recording_id() -> PyResult { - python_session() - .recording_id() - .ok_or_else(|| PyTypeError::new_err("module has not been initialized")) - .map(|recording_id| recording_id.to_string()) + let recording_id = python_session().recording_id(); + + if recording_id == RecordingId::ZERO { + Err(PyTypeError::new_err("module has not been initialized")) + } else { + Ok(recording_id.to_string()) + } } #[pyfunction] @@ -485,7 +488,7 @@ fn log_transform( .try_into() .map_err(|err: DataTableError| PyValueError::new_err(err.to_string()))?; - session.send(LogMsg::ArrowMsg(msg)); + session.send_arrow_msg(msg); Ok(()) } @@ -569,7 +572,7 @@ fn log_view_coordinates( .try_into() .map_err(|err: DataTableError| PyValueError::new_err(err.to_string()))?; - session.send(LogMsg::ArrowMsg(msg)); + session.send_arrow_msg(msg); Ok(()) } @@ -703,7 +706,7 @@ fn log_meshes( .try_into() .map_err(|err: DataTableError| PyValueError::new_err(err.to_string()))?; - session.send(LogMsg::ArrowMsg(msg)); + session.send_arrow_msg(msg); Ok(()) } @@ -784,7 +787,7 @@ fn log_mesh_file( .try_into() .map_err(|err: DataTableError| PyValueError::new_err(err.to_string()))?; - session.send(LogMsg::ArrowMsg(msg)); + session.send_arrow_msg(msg); Ok(()) } @@ -876,7 +879,7 @@ fn log_image_file( .try_into() .map_err(|err: DataTableError| PyValueError::new_err(err.to_string()))?; - session.send(LogMsg::ArrowMsg(msg)); + session.send_arrow_msg(msg); Ok(()) } @@ -955,7 +958,7 @@ fn log_annotation_context( .try_into() .map_err(|err: DataTableError| PyValueError::new_err(err.to_string()))?; - session.send(LogMsg::ArrowMsg(msg)); + session.send_arrow_msg(msg); Ok(()) } @@ -979,10 +982,16 @@ fn log_arrow_msg(entity_path: &str, components: &PyDict, timeless: bool) -> PyRe // It's important that we don't hold the session lock while building our arrow component. // the API we call to back through pyarrow temporarily releases the GIL, which can cause // cause a deadlock. - let msg = crate::arrow::build_chunk_from_components(&entity_path, components, &time(timeless))?; + let data_table = + crate::arrow::build_data_table_from_components(&entity_path, components, &time(timeless))?; let mut session = python_session(); - session.send(msg); + + let msg: ArrowMsg = (&data_table) + .try_into() + .map_err(|err: DataTableError| PyValueError::new_err(err.to_string()))?; + + session.send_arrow_msg(msg); Ok(()) } diff --git a/rerun_py/src/python_session.rs b/rerun_py/src/python_session.rs index 9b4a1da61c7e..a44a7619747b 100644 --- a/rerun_py/src/python_session.rs +++ b/rerun_py/src/python_session.rs @@ -1,7 +1,7 @@ use std::net::SocketAddr; use re_log_types::{ - ApplicationId, BeginRecordingMsg, LogMsg, MsgId, PathOp, RecordingId, RecordingInfo, + ApplicationId, ArrowMsg, BeginRecordingMsg, LogMsg, MsgId, PathOp, RecordingId, RecordingInfo, RecordingSource, Time, TimePoint, }; @@ -13,7 +13,7 @@ use rerun::sink::LogSink; struct RecordingMetaData { recording_source: RecordingSource, application_id: Option, - recording_id: Option, + recording_id: RecordingId, is_official_example: Option, } @@ -23,28 +23,30 @@ impl Default for RecordingMetaData { // Will be filled in when we initialize the `rerun` python module. recording_source: RecordingSource::Unknown, application_id: Default::default(), - recording_id: Default::default(), + // TODO(https://github.com/rerun-io/rerun/issues/1792): ZERO is not a great choice + // here. Ideally we would use `default_recording_id(py)` instead. + recording_id: RecordingId::ZERO, is_official_example: Default::default(), } } } impl RecordingMetaData { - pub fn to_recording_info(&self) -> Option { - let recording_id = self.recording_id?; + pub fn to_recording_info(&self) -> RecordingInfo { + let recording_id = self.recording_id; let application_id = self .application_id .clone() .unwrap_or_else(ApplicationId::unknown); - Some(RecordingInfo { + RecordingInfo { application_id, recording_id, is_official_example: self.is_official_example.unwrap_or(false), started: Time::now(), recording_source: self.recording_source.clone(), - }) + } } } @@ -116,8 +118,8 @@ impl PythonSession { } } - /// The current [`RecordingId`], if set. - pub fn recording_id(&self) -> Option { + /// The current [`RecordingId`]. + pub fn recording_id(&self) -> RecordingId { self.recording_meta_data.recording_id } @@ -130,8 +132,8 @@ impl PythonSession { /// Note that many recordings can share the same [`ApplicationId`], but /// they all have unique [`RecordingId`]s. pub fn set_recording_id(&mut self, recording_id: RecordingId) { - if self.recording_meta_data.recording_id != Some(recording_id) { - self.recording_meta_data.recording_id = Some(recording_id); + if self.recording_meta_data.recording_id != recording_id { + self.recording_meta_data.recording_id = recording_id; self.has_sent_begin_recording_msg = false; } } @@ -205,33 +207,46 @@ impl PythonSession { } if !self.has_sent_begin_recording_msg { - if let Some(info) = self.recording_meta_data.to_recording_info() { - re_log::debug!( - "Beginning new recording with application_id {:?} and recording id {}", - info.application_id.0, - info.recording_id - ); - - self.sink.send( - BeginRecordingMsg { - msg_id: MsgId::random(), - info, - } - .into(), - ); - self.has_sent_begin_recording_msg = true; + let info = self.recording_meta_data.to_recording_info(); + + // This shouldn't happen, but at least log an error if it does. + // See: https://github.com/rerun-io/rerun/issues/1792 + if info.recording_id == RecordingId::ZERO { + re_log::error_once!("RecordingId was still ZERO when sent to server. This is a python initialization bug."); } + + re_log::debug!( + "Beginning new recording with application_id {:?} and recording id {}", + info.application_id.0, + info.recording_id + ); + + self.sink.send( + BeginRecordingMsg { + msg_id: MsgId::random(), + info, + } + .into(), + ); + self.has_sent_begin_recording_msg = true; } self.sink.send(log_msg); } + pub fn send_arrow_msg(&mut self, arrow_msg: ArrowMsg) { + self.send(LogMsg::ArrowMsg(self.recording_id(), arrow_msg)); + } + /// Send a [`PathOp`]. pub fn send_path_op(&mut self, time_point: &TimePoint, path_op: PathOp) { - self.send(LogMsg::EntityPathOpMsg(re_log_types::EntityPathOpMsg { - msg_id: MsgId::random(), - time_point: time_point.clone(), - path_op, - })); + self.send(LogMsg::EntityPathOpMsg( + self.recording_id(), + re_log_types::EntityPathOpMsg { + msg_id: MsgId::random(), + time_point: time_point.clone(), + path_op, + }, + )); } } From ab26a737604fc63caff95dd79fc385ef8ce89f12 Mon Sep 17 00:00:00 2001 From: Emil Ernerfeldt Date: Thu, 6 Apr 2023 18:53:23 +0200 Subject: [PATCH 27/89] Refactor: Add new helper crate `re_log_encoding` (#1772) * CI: Check `rerun` with --no-default features and/or with --features sdk * Create a new helper crate re_transport containing stream_rrd_from_http * Fix warnings * Move file sink to re_transport * wasm compilation fix * Move LogMsg encoding/decoding into re_transport * Fix typo * Fix web build * Fix tests * Remove a lot of unused dependencies with `cargo machete` * Build fix * Clarify * Rename the crate to re_log_encoding * better docstring Co-authored-by: Jeremy Leibs * better readme Co-authored-by: Jeremy Leibs --------- Co-authored-by: Jeremy Leibs --- .github/workflows/rust.yml | 16 +- Cargo.lock | 59 ++++---- Cargo.toml | 1 + crates/re_data_store/Cargo.toml | 6 +- crates/re_data_store/examples/memory_usage.rs | 4 +- crates/re_log_encoding/Cargo.toml | 69 +++++++++ crates/re_log_encoding/README.md | 10 ++ .../benches/msg_encode_benchmark.rs | 8 +- .../src/decoder.rs} | 139 ++---------------- crates/re_log_encoding/src/encoder.rs | 100 +++++++++++++ .../src/file_sink.rs | 10 +- crates/re_log_encoding/src/lib.rs | 42 ++++++ .../src/stream_rrd_from_http.rs | 23 ++- crates/re_log_types/Cargo.toml | 35 +---- crates/re_log_types/src/lib.rs | 6 +- crates/re_query/Cargo.toml | 1 - crates/re_sdk/Cargo.toml | 8 +- crates/re_sdk/src/lib.rs | 12 +- crates/re_sdk/src/session.rs | 2 +- crates/re_tuid/Cargo.toml | 7 +- crates/re_tuid/src/lib.rs | 13 +- crates/re_viewer/Cargo.toml | 17 +-- crates/re_viewer/src/app.rs | 4 +- crates/re_viewer/src/lib.rs | 1 - crates/re_viewer/src/web.rs | 2 +- crates/rerun/Cargo.toml | 7 +- crates/rerun/src/run.rs | 13 +- examples/rust/objectron/Cargo.toml | 2 - rerun_py/Cargo.toml | 6 - scripts/publish_crates.sh | 1 + 30 files changed, 340 insertions(+), 284 deletions(-) create mode 100644 crates/re_log_encoding/Cargo.toml create mode 100644 crates/re_log_encoding/README.md rename crates/{re_log_types => re_log_encoding}/benches/msg_encode_benchmark.rs (96%) rename crates/{re_log_types/src/encoding.rs => re_log_encoding/src/decoder.rs} (58%) create mode 100644 crates/re_log_encoding/src/encoder.rs rename crates/{re_sdk => re_log_encoding}/src/file_sink.rs (89%) create mode 100644 crates/re_log_encoding/src/lib.rs rename crates/{re_viewer => re_log_encoding}/src/stream_rrd_from_http.rs (83%) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 9821e24ad139..556f02977008 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -137,18 +137,20 @@ jobs: command: cranky args: --all-targets --all-features -- --deny warnings - - name: Check no default features + # -------------------------------------------------------------------------------- + # Check a few important permutations of the feature flags for our `rerun` library: + - name: Check rerun with `--no-default-features`` uses: actions-rs/cargo@v1 with: - command: check - args: --locked --no-default-features --features __ci --lib + command: cranky + args: --locked -p rerun --no-default-features - # Check a few important permutations of the feature flags for our `rerun` library: - - name: Check rerun with --features sdk + - name: Check rerun with `--features sdk` uses: actions-rs/cargo@v1 with: - command: check - args: --locked --no-default-features --features sdk + command: cranky + args: --locked -p rerun --no-default-features --features sdk + # -------------------------------------------------------------------------------- - name: Test doc-tests uses: actions-rs/cargo@v1 diff --git a/Cargo.lock b/Cargo.lock index 331040f76391..e7986f73779c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3129,8 +3129,6 @@ dependencies = [ "anyhow", "clap 4.1.4", "glam", - "image", - "itertools", "prost", "prost-build", "rerun", @@ -3842,9 +3840,9 @@ dependencies = [ "re_arrow_store", "re_int_histogram", "re_log", + "re_log_encoding", "re_log_types", "re_smart_channel", - "re_string_interner", "serde", "thiserror", ] @@ -3891,17 +3889,40 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "re_log_encoding" +version = "0.4.0" +dependencies = [ + "criterion", + "ehttp", + "instant", + "js-sys", + "mimalloc", + "parking_lot 0.12.1", + "puffin", + "re_build_info", + "re_log", + "re_log_types", + "re_smart_channel", + "rmp-serde", + "ruzstd", + "serde_test", + "thiserror", + "wasm-bindgen", + "wasm-bindgen-futures", + "web-sys", + "zstd", +] + [[package]] name = "re_log_types" version = "0.4.0" dependencies = [ "ahash 0.8.2", - "anyhow", "array-init", "arrow2", "arrow2_convert", "bytemuck", - "criterion", "document-features", "ecolor", "fixed", @@ -3911,29 +3932,24 @@ dependencies = [ "itertools", "lazy_static", "macaw", - "mimalloc", "ndarray", "nohash-hasher", "num-derive", "num-traits", "puffin", "rand", - "re_build_info", "re_format", "re_log", "re_string_interner", "re_tuid", "rmp-serde", - "ruzstd", "serde", "serde_bytes", - "serde_test", "smallvec", "thiserror", "time 0.3.20", "typenum", "uuid", - "zstd", ] [[package]] @@ -3970,7 +3986,6 @@ dependencies = [ "re_arrow_store", "re_data_store", "re_format", - "re_log", "re_log_types", "thiserror", ] @@ -4032,23 +4047,19 @@ dependencies = [ name = "re_sdk" version = "0.4.0" dependencies = [ - "anyhow", "arrow2_convert", "document-features", "ndarray", "ndarray-rand", - "nohash-hasher", "once_cell", "parking_lot 0.12.1", "rand", "re_build_build_info", - "re_build_info", - "re_error", "re_log", + "re_log_encoding", "re_log_types", "re_memory", "re_sdk_comms", - "re_smart_channel", "thiserror", ] @@ -4143,14 +4154,12 @@ dependencies = [ "egui-wgpu", "egui_dock", "egui_extras", - "ehttp", "enumset", "glam", "half 2.2.1", "image", "instant", "itertools", - "js-sys", "lazy_static", "macaw", "ndarray", @@ -4167,12 +4176,12 @@ dependencies = [ "re_error", "re_format", "re_log", + "re_log_encoding", "re_log_types", "re_memory", "re_query", "re_renderer", "re_smart_channel", - "re_string_interner", "re_tensor_ops", "re_ui", "re_ws_comms", @@ -4184,9 +4193,7 @@ dependencies = [ "time 0.3.20", "uuid", "vec1", - "wasm-bindgen", "wasm-bindgen-futures", - "web-sys", "wgpu", "winapi", ] @@ -4288,18 +4295,16 @@ dependencies = [ "clap 4.1.4", "ctrlc", "document-features", - "egui", "itertools", "libc", "mimalloc", - "once_cell", "parking_lot 0.12.1", "re_analytics", "re_build_build_info", "re_build_info", - "re_error", "re_format", "re_log", + "re_log_encoding", "re_log_types", "re_memory", "re_sdk", @@ -4316,14 +4321,9 @@ dependencies = [ name = "rerun_py" version = "0.4.0" dependencies = [ - "ahash 0.8.2", - "anyhow", "arrow2", - "bytemuck", - "crossbeam", "document-features", "glam", - "half 2.2.1", "image", "itertools", "macaw", @@ -4340,7 +4340,6 @@ dependencies = [ "re_log", "re_log_types", "re_memory", - "re_tensor_ops", "rerun", "tokio", "uuid", diff --git a/Cargo.toml b/Cargo.toml index baa3b459875f..76c8577f1c6a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -33,6 +33,7 @@ re_error = { path = "crates/re_error", version = "0.4.0" } re_format = { path = "crates/re_format", version = "0.4.0" } re_int_histogram = { path = "crates/re_int_histogram", version = "0.4.0" } re_log = { path = "crates/re_log", version = "0.4.0" } +re_log_encoding = { path = "crates/re_log_encoding", version = "0.4.0" } re_log_types = { path = "crates/re_log_types", version = "0.4.0" } re_memory = { path = "crates/re_memory", version = "0.4.0" } re_query = { path = "crates/re_query", version = "0.4.0" } diff --git a/crates/re_data_store/Cargo.toml b/crates/re_data_store/Cargo.toml index a77bdcb40096..4b5991b37c0a 100644 --- a/crates/re_data_store/Cargo.toml +++ b/crates/re_data_store/Cargo.toml @@ -26,10 +26,10 @@ serde = ["dep:serde", "re_log_types/serde"] [dependencies] re_arrow_store.workspace = true re_int_histogram.workspace = true +re_log_encoding = { workspace = true, optional = true } re_log_types.workspace = true re_log.workspace = true re_smart_channel.workspace = true -re_string_interner.workspace = true ahash.workspace = true document-features = "0.2" @@ -47,7 +47,7 @@ puffin.workspace = true criterion = "0.4" mimalloc.workspace = true rand = "0.8" -re_log_types = { workspace = true, features = ["load", "save"] } +re_log_encoding = { workspace = true, features = ["decoder", "encoder"] } [lib] bench = false @@ -55,4 +55,4 @@ bench = false [[example]] name = "memory_usage" path = "examples/memory_usage.rs" -required-features = ["re_log_types/load", "re_log_types/save"] +required-features = ["re_log_encoding/decoder", "re_log_encoding/encoder"] diff --git a/crates/re_data_store/examples/memory_usage.rs b/crates/re_data_store/examples/memory_usage.rs index ff5d1faba8a4..69ca6ef5d6a1 100644 --- a/crates/re_data_store/examples/memory_usage.rs +++ b/crates/re_data_store/examples/memory_usage.rs @@ -65,12 +65,12 @@ fn log_messages() { fn encode_log_msg(log_msg: &LogMsg) -> Vec { let mut bytes = vec![]; - re_log_types::encoding::encode(std::iter::once(log_msg), &mut bytes).unwrap(); + re_log_encoding::encoder::encode(std::iter::once(log_msg), &mut bytes).unwrap(); bytes } fn decode_log_msg(mut bytes: &[u8]) -> LogMsg { - let mut messages = re_log_types::encoding::Decoder::new(&mut bytes) + let mut messages = re_log_encoding::decoder::Decoder::new(&mut bytes) .unwrap() .collect::, _>>() .unwrap(); diff --git a/crates/re_log_encoding/Cargo.toml b/crates/re_log_encoding/Cargo.toml new file mode 100644 index 000000000000..b1c5c2943af6 --- /dev/null +++ b/crates/re_log_encoding/Cargo.toml @@ -0,0 +1,69 @@ +[package] +name = "re_log_encoding" +authors.workspace = true +description = "Helpers for encoding and transporting Rerun log messages" +edition.workspace = true +homepage.workspace = true +include.workspace = true +license.workspace = true +publish = true +readme = "README.md" +repository.workspace = true +rust-version.workspace = true +version.workspace = true + +[package.metadata.docs.rs] +all-features = true + + +[features] +default = [] + +## Enable loading data from an .rrd file. +decoder = ["dep:rmp-serde", "dep:zstd", "dep:ruzstd"] + +# Enable encoding of log messages to an .rrd file/stream: +encoder = ["dep:rmp-serde", "dep:zstd"] + + +[dependencies] + +# Rerun: +re_build_info.workspace = true +re_log_types = { workspace = true, features = ["serde"] } +re_log.workspace = true +re_smart_channel.workspace = true + +# External: +ehttp = "0.2" +parking_lot.workspace = true +thiserror.workspace = true + +# Optional external dependencies: +rmp-serde = { version = "1", optional = true } + +# Native dependencies: +[target.'cfg(not(target_arch = "wasm32"))'.dependencies] +puffin.workspace = true +zstd = { version = "0.11.0", optional = true } # native only + +# Web dependencies: +[target.'cfg(target_arch = "wasm32")'.dependencies] +instant = { version = "0.1", features = ["wasm-bindgen"] } +js-sys = "0.3" +ruzstd = { version = "0.3.0", optional = true } # works on wasm, in contrast to zstd +wasm-bindgen = "0.2" +wasm-bindgen-futures = "0.4" +web-sys = { version = "0.3.52", features = ["Window"] } + +[dev-dependencies] +criterion = "0.4" +mimalloc.workspace = true +serde_test = { version = "1" } + +[lib] +bench = false + +[[bench]] +name = "msg_encode_benchmark" +harness = false diff --git a/crates/re_log_encoding/README.md b/crates/re_log_encoding/README.md new file mode 100644 index 000000000000..8b3e7ff4f875 --- /dev/null +++ b/crates/re_log_encoding/README.md @@ -0,0 +1,10 @@ +# re_log_encoding + +Part of the [`rerun`](https://github.com/rerun-io/rerun) family of crates. + +[![Latest version](https://img.shields.io/crates/v/re_log_encoding.svg)](https://crates.io/crates/re_log_encoding) +[![Documentation](https://docs.rs/re_log_encoding/badge.svg)](https://docs.rs/re_log_encoding) +![MIT](https://img.shields.io/badge/license-MIT-blue.svg) +![Apache](https://img.shields.io/badge/license-Apache-blue.svg) + +Helper library for encoding Rerun log messages. diff --git a/crates/re_log_types/benches/msg_encode_benchmark.rs b/crates/re_log_encoding/benches/msg_encode_benchmark.rs similarity index 96% rename from crates/re_log_types/benches/msg_encode_benchmark.rs rename to crates/re_log_encoding/benches/msg_encode_benchmark.rs index 8faca63d2371..fb36e7bc34d7 100644 --- a/crates/re_log_types/benches/msg_encode_benchmark.rs +++ b/crates/re_log_encoding/benches/msg_encode_benchmark.rs @@ -1,5 +1,5 @@ -#[cfg(not(all(feature = "save", feature = "load")))] -compile_error!("msg_encode_benchmark requires 'save' and 'load' features."); +#[cfg(not(all(feature = "decoder", feature = "encoder")))] +compile_error!("msg_encode_benchmark requires 'decoder' and 'encoder' features."); #[global_allocator] static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; @@ -28,13 +28,13 @@ criterion_main!(benches); fn encode_log_msgs(messages: &[LogMsg]) -> Vec { let mut bytes = vec![]; - re_log_types::encoding::encode(messages.iter(), &mut bytes).unwrap(); + re_log_encoding::encoder::encode(messages.iter(), &mut bytes).unwrap(); assert!(bytes.len() > messages.len()); bytes } fn decode_log_msgs(mut bytes: &[u8]) -> Vec { - let messages = re_log_types::encoding::Decoder::new(&mut bytes) + let messages = re_log_encoding::decoder::Decoder::new(&mut bytes) .unwrap() .collect::, _>>() .unwrap(); diff --git a/crates/re_log_types/src/encoding.rs b/crates/re_log_encoding/src/decoder.rs similarity index 58% rename from crates/re_log_types/src/encoding.rs rename to crates/re_log_encoding/src/decoder.rs index d1e2299c2509..411c580d4439 100644 --- a/crates/re_log_types/src/encoding.rs +++ b/crates/re_log_encoding/src/decoder.rs @@ -1,117 +1,6 @@ -//! Encoding/decoding [`LogMsg`]:es as `.rrd` files. +//! Decoding [`LogMsg`]:es from `.rrd` files/streams. -use crate::LogMsg; - -// ---------------------------------------------------------------------------- -// native encode: - -#[cfg(feature = "save")] -#[cfg(not(target_arch = "wasm32"))] -mod encoder { - use std::io::Write as _; - - use crate::LogMsg; - - /// On failure to encode or serialize a [`LogMsg`]. - #[derive(thiserror::Error, Debug)] - pub enum EncodeError { - #[error("Failed to write: {0}")] - Write(std::io::Error), - - #[error("Zstd error: {0}")] - Zstd(std::io::Error), - - #[error("MsgPack error: {0}")] - MsgPack(#[from] rmp_serde::encode::Error), - - #[error("Called append on already finished encoder")] - AlreadyFinished, - } - - /// Encode a stream of [`LogMsg`] into an `.rrd` file. - pub struct Encoder { - /// Set to None when finished. - zstd_encoder: Option>, - buffer: Vec, - } - - impl Drop for Encoder { - fn drop(&mut self) { - if self.zstd_encoder.is_some() { - re_log::warn!("Encoder dropped without calling finish()!"); - if let Err(err) = self.finish() { - re_log::error!("Failed to finish encoding: {err}"); - } - } - } - } - - impl Encoder { - pub fn new(mut write: W) -> Result { - let rerun_version = re_build_info::CrateVersion::parse(env!("CARGO_PKG_VERSION")); - - write.write_all(b"RRF0").map_err(EncodeError::Write)?; - write - .write_all(&rerun_version.to_bytes()) - .map_err(EncodeError::Write)?; - - let level = 3; - let zstd_encoder = - zstd::stream::Encoder::new(write, level).map_err(EncodeError::Zstd)?; - - Ok(Self { - zstd_encoder: Some(zstd_encoder), - buffer: vec![], - }) - } - - pub fn append(&mut self, message: &LogMsg) -> Result<(), EncodeError> { - let Self { - zstd_encoder, - buffer, - } = self; - - if let Some(zstd_encoder) = zstd_encoder { - buffer.clear(); - rmp_serde::encode::write_named(buffer, message)?; - - zstd_encoder - .write_all(&(buffer.len() as u64).to_le_bytes()) - .map_err(EncodeError::Zstd)?; - zstd_encoder.write_all(buffer).map_err(EncodeError::Zstd)?; - - Ok(()) - } else { - Err(EncodeError::AlreadyFinished) - } - } - - pub fn finish(&mut self) -> Result<(), EncodeError> { - if let Some(zstd_encoder) = self.zstd_encoder.take() { - zstd_encoder.finish().map_err(EncodeError::Zstd)?; - Ok(()) - } else { - re_log::warn!("Encoder::finish called twice"); - Ok(()) - } - } - } - - pub fn encode<'a>( - messages: impl Iterator, - write: impl std::io::Write, - ) -> Result<(), EncodeError> { - let mut encoder = Encoder::new(write)?; - for message in messages { - encoder.append(message)?; - } - encoder.finish() - } -} - -#[cfg(feature = "save")] -#[cfg(not(target_arch = "wasm32"))] -pub use encoder::*; +use re_log_types::LogMsg; // ---------------------------------------------------------------------------- @@ -135,7 +24,6 @@ fn warn_on_version_mismatch(encoded_version: [u8; 4]) { // ---------------------------------------------------------------------------- /// On failure to encode or serialize a [`LogMsg`]. -#[cfg(feature = "load")] #[derive(thiserror::Error, Debug)] pub enum DecodeError { #[error("Not an .rrd file")] @@ -163,14 +51,12 @@ pub enum DecodeError { // ---------------------------------------------------------------------------- // native decode: -#[cfg(feature = "load")] #[cfg(not(target_arch = "wasm32"))] pub struct Decoder<'r, R: std::io::BufRead> { zdecoder: zstd::stream::Decoder<'r, R>, buffer: Vec, } -#[cfg(feature = "load")] #[cfg(not(target_arch = "wasm32"))] impl<'r, R: std::io::Read> Decoder<'r, std::io::BufReader> { pub fn new(mut read: R) -> Result { @@ -192,7 +78,6 @@ impl<'r, R: std::io::Read> Decoder<'r, std::io::BufReader> { } } -#[cfg(feature = "load")] #[cfg(not(target_arch = "wasm32"))] impl<'r, R: std::io::BufRead> Iterator for Decoder<'r, R> { type Item = Result; @@ -225,14 +110,12 @@ impl<'r, R: std::io::BufRead> Iterator for Decoder<'r, R> { // ---------------------------------------------------------------------------- // wasm decode: -#[cfg(feature = "load")] #[cfg(target_arch = "wasm32")] pub struct Decoder { zdecoder: ruzstd::StreamingDecoder, buffer: Vec, } -#[cfg(feature = "load")] #[cfg(target_arch = "wasm32")] impl Decoder { pub fn new(mut read: R) -> Result { @@ -254,7 +137,6 @@ impl Decoder { } } -#[cfg(feature = "load")] #[cfg(target_arch = "wasm32")] impl Iterator for Decoder { type Item = Result; @@ -286,19 +168,22 @@ impl Iterator for Decoder { // ---------------------------------------------------------------------------- -#[cfg(all(feature = "load", feature = "save"))] +#[cfg(all(feature = "decoder", feature = "encoder"))] #[test] fn test_encode_decode() { - use crate::{BeginRecordingMsg, LogMsg, MsgId, Time}; + use re_log_types::{ + ApplicationId, BeginRecordingMsg, LogMsg, MsgId, RecordingId, RecordingInfo, + RecordingSource, Time, + }; let messages = vec![LogMsg::BeginRecordingMsg(BeginRecordingMsg { msg_id: MsgId::random(), - info: crate::RecordingInfo { - application_id: crate::ApplicationId("test".to_owned()), - recording_id: crate::RecordingId::random(), + info: RecordingInfo { + application_id: ApplicationId("test".to_owned()), + recording_id: RecordingId::random(), is_official_example: true, started: Time::now(), - recording_source: crate::RecordingSource::RustSdk { + recording_source: RecordingSource::RustSdk { rustc_version: String::new(), llvm_version: String::new(), }, @@ -306,7 +191,7 @@ fn test_encode_decode() { })]; let mut file = vec![]; - encode(messages.iter(), &mut file).unwrap(); + crate::encoder::encode(messages.iter(), &mut file).unwrap(); let decoded_messages = Decoder::new(&mut file.as_slice()) .unwrap() diff --git a/crates/re_log_encoding/src/encoder.rs b/crates/re_log_encoding/src/encoder.rs new file mode 100644 index 000000000000..13cfbbdd849b --- /dev/null +++ b/crates/re_log_encoding/src/encoder.rs @@ -0,0 +1,100 @@ +//! Encoding of [`LogMsg`]es as a binary stream, e.g. to store in an `.rrd` file, or send over network. + +use std::io::Write as _; + +use re_log_types::LogMsg; + +/// On failure to encode or serialize a [`LogMsg`]. +#[derive(thiserror::Error, Debug)] +pub enum EncodeError { + #[error("Failed to write: {0}")] + Write(std::io::Error), + + #[error("Zstd error: {0}")] + Zstd(std::io::Error), + + #[error("MsgPack error: {0}")] + MsgPack(#[from] rmp_serde::encode::Error), + + #[error("Called append on already finished encoder")] + AlreadyFinished, +} + +/// Encode a stream of [`LogMsg`] into an `.rrd` file. +pub struct Encoder { + /// Set to None when finished. + zstd_encoder: Option>, + buffer: Vec, +} + +impl Drop for Encoder { + fn drop(&mut self) { + if self.zstd_encoder.is_some() { + re_log::warn!("Encoder dropped without calling finish()!"); + if let Err(err) = self.finish() { + re_log::error!("Failed to finish encoding: {err}"); + } + } + } +} + +impl Encoder { + pub fn new(mut write: W) -> Result { + let rerun_version = re_build_info::CrateVersion::parse(env!("CARGO_PKG_VERSION")); + + write.write_all(b"RRF0").map_err(EncodeError::Write)?; + write + .write_all(&rerun_version.to_bytes()) + .map_err(EncodeError::Write)?; + + let level = 3; + let zstd_encoder = zstd::stream::Encoder::new(write, level).map_err(EncodeError::Zstd)?; + + Ok(Self { + zstd_encoder: Some(zstd_encoder), + buffer: vec![], + }) + } + + pub fn append(&mut self, message: &LogMsg) -> Result<(), EncodeError> { + let Self { + zstd_encoder, + buffer, + } = self; + + if let Some(zstd_encoder) = zstd_encoder { + buffer.clear(); + rmp_serde::encode::write_named(buffer, message)?; + + zstd_encoder + .write_all(&(buffer.len() as u64).to_le_bytes()) + .map_err(EncodeError::Zstd)?; + zstd_encoder.write_all(buffer).map_err(EncodeError::Zstd)?; + + Ok(()) + } else { + Err(EncodeError::AlreadyFinished) + } + } + + pub fn finish(&mut self) -> Result<(), EncodeError> { + if let Some(zstd_encoder) = self.zstd_encoder.take() { + zstd_encoder.finish().map_err(EncodeError::Zstd)?; + Ok(()) + } else { + re_log::warn!("Encoder::finish called twice"); + Ok(()) + } + } +} + +pub fn encode<'a>( + messages: impl Iterator, + write: impl std::io::Write, +) -> Result<(), EncodeError> { + let mut encoder = Encoder::new(write)?; + for message in messages { + encoder.append(message)?; + } + encoder.finish() +} diff --git a/crates/re_sdk/src/file_sink.rs b/crates/re_log_encoding/src/file_sink.rs similarity index 89% rename from crates/re_sdk/src/file_sink.rs rename to crates/re_log_encoding/src/file_sink.rs index f5f7e69f6fe1..121383553eb2 100644 --- a/crates/re_sdk/src/file_sink.rs +++ b/crates/re_log_encoding/src/file_sink.rs @@ -17,7 +17,7 @@ pub enum FileSinkError { /// Error encoding a log message. #[error("Failed to encode LogMsg: {0}")] - LogMsgEncode(#[from] re_log_types::encoding::EncodeError), + LogMsgEncode(#[from] crate::encoder::EncodeError), } /// Stream log messages to an `.rrd` file. @@ -47,7 +47,7 @@ impl FileSink { let file = std::fs::File::create(&path) .map_err(|err| FileSinkError::CreateFile(path.clone(), err))?; - let mut encoder = re_log_types::encoding::Encoder::new(file)?; + let mut encoder = crate::encoder::Encoder::new(file)?; let join_handle = std::thread::Builder::new() .name("file_writer".into()) @@ -71,10 +71,8 @@ impl FileSink { join_handle: Some(join_handle), }) } -} -impl crate::sink::LogSink for FileSink { - fn send(&self, msg: LogMsg) { - self.tx.lock().send(Some(msg)).ok(); + pub fn send(&self, log_msg: LogMsg) { + self.tx.lock().send(Some(log_msg)).ok(); } } diff --git a/crates/re_log_encoding/src/lib.rs b/crates/re_log_encoding/src/lib.rs new file mode 100644 index 000000000000..16b883448803 --- /dev/null +++ b/crates/re_log_encoding/src/lib.rs @@ -0,0 +1,42 @@ +//! Crate that handles encoding of rerun log types. + +#[cfg(feature = "decoder")] +pub mod decoder; +#[cfg(feature = "encoder")] +#[cfg(not(target_arch = "wasm32"))] // we do no yet support encoding LogMsgs in the browser +pub mod encoder; + +#[cfg(feature = "encoder")] +#[cfg(not(target_arch = "wasm32"))] +mod file_sink; + +#[cfg(feature = "decoder")] +pub mod stream_rrd_from_http; + +// --------------------------------------------------------------------- + +#[cfg(feature = "encoder")] +#[cfg(not(target_arch = "wasm32"))] +pub use file_sink::{FileSink, FileSinkError}; + +// --------------------------------------------------------------------------- + +/// Profiling macro for feature "puffin" +#[doc(hidden)] +#[macro_export] +macro_rules! profile_function { + ($($arg: tt)*) => { + #[cfg(not(target_arch = "wasm32"))] + puffin::profile_function!($($arg)*); + }; +} + +/// Profiling macro for feature "puffin" +#[doc(hidden)] +#[macro_export] +macro_rules! profile_scope { + ($($arg: tt)*) => { + #[cfg(not(target_arch = "wasm32"))] + puffin::profile_scope!($($arg)*); + }; +} diff --git a/crates/re_viewer/src/stream_rrd_from_http.rs b/crates/re_log_encoding/src/stream_rrd_from_http.rs similarity index 83% rename from crates/re_viewer/src/stream_rrd_from_http.rs rename to crates/re_log_encoding/src/stream_rrd_from_http.rs index b0997d4494a5..007105f37f53 100644 --- a/crates/re_viewer/src/stream_rrd_from_http.rs +++ b/crates/re_log_encoding/src/stream_rrd_from_http.rs @@ -1,6 +1,6 @@ -pub fn stream_rrd_from_http_to_channel( - url: String, -) -> re_smart_channel::Receiver { +use re_log_types::LogMsg; + +pub fn stream_rrd_from_http_to_channel(url: String) -> re_smart_channel::Receiver { let (tx, rx) = re_smart_channel::smart_channel(re_smart_channel::Source::RrdHttpStream { url: url.clone(), }); @@ -13,7 +13,7 @@ pub fn stream_rrd_from_http_to_channel( rx } -pub fn stream_rrd_from_http(url: String, on_msg: Box) { +pub fn stream_rrd_from_http(url: String, on_msg: Box) { re_log::debug!("Downloading .rrd file from {url:?}…"); // TODO(emilk): stream the http request, progressively decoding the .rrd file. @@ -38,8 +38,8 @@ pub fn stream_rrd_from_http(url: String, on_msg: Box, on_msg: Box) { - match re_log_types::encoding::Decoder::new(rrd_bytes.as_slice()) { +fn decode_rrd(rrd_bytes: Vec, on_msg: Box) { + match crate::decoder::Decoder::new(rrd_bytes.as_slice()) { Ok(decoder) => { for msg in decoder { match msg { @@ -60,20 +60,19 @@ fn decode_rrd(rrd_bytes: Vec, on_msg: Box, on_msg: Box) { + use re_log_types::LogMsg; + + pub fn decode_rrd(rrd_bytes: Vec, on_msg: Box) { wasm_bindgen_futures::spawn_local(decode_rrd_async(rrd_bytes, on_msg)); } /// Decodes the file in chunks, with an yield between each chunk. /// /// This is cooperative multi-tasking. - async fn decode_rrd_async( - rrd_bytes: Vec, - on_msg: Box, - ) { + async fn decode_rrd_async(rrd_bytes: Vec, on_msg: Box) { let mut last_yield = instant::Instant::now(); - match re_log_types::encoding::Decoder::new(rrd_bytes.as_slice()) { + match crate::decoder::Decoder::new(rrd_bytes.as_slice()) { Ok(decoder) => { for msg in decoder { match msg { diff --git a/crates/re_log_types/Cargo.toml b/crates/re_log_types/Cargo.toml index 12ae7e9e5463..4c517b9de34f 100644 --- a/crates/re_log_types/Cargo.toml +++ b/crates/re_log_types/Cargo.toml @@ -17,7 +17,7 @@ all-features = true [features] -default = ["arrow_datagen", "anyhow"] +default = ["arrow_datagen"] ## Enables the `datagen` module, which exposes a number of tools for generating random data for ## tests and benchmarks. @@ -32,12 +32,6 @@ glam = ["dep:glam", "dep:macaw"] ## Integration with the [`image`](https://crates.io/crates/image/) crate. image = ["dep:image"] -## Enable loading data from a file. -load = ["anyhow", "rmp-serde", "serde", "zstd", "ruzstd"] - -## Enable saving data to a file. -save = ["anyhow", "rmp-serde", "serde", "zstd"] - ## Enable (de)serialization using serde. serde = [ "dep:serde", @@ -51,11 +45,10 @@ serde = [ [dependencies] # Rerun -re_build_info.workspace = true re_format.workspace = true re_log.workspace = true re_string_interner.workspace = true -re_tuid.workspace = true +re_tuid = { workspace = true, features = ["arrow2_convert"] } # External ahash.workspace = true @@ -75,7 +68,7 @@ lazy_static.workspace = true ndarray.workspace = true nohash-hasher = "0.2" num-derive = "0.3" -num-traits = "0.2" +num-traits = "0.2" # used by num-derive smallvec = "1.10" thiserror.workspace = true time = { workspace = true, default-features = false, features = [ @@ -87,7 +80,6 @@ uuid = { version = "1.1", features = ["serde", "v4", "js"] } # Optional dependencies: -anyhow = { workspace = true, optional = true } ecolor = { workspace = true, optional = true } glam = { workspace = true, optional = true } image = { workspace = true, optional = true, default-features = false, features = [ @@ -95,32 +87,13 @@ image = { workspace = true, optional = true, default-features = false, features ] } macaw = { workspace = true, optional = true } rand = { version = "0.8", optional = true } -rmp-serde = { version = "1", optional = true } serde = { version = "1", optional = true, features = ["derive", "rc"] } serde_bytes = { version = "0.11", optional = true } # Native dependencies: [target.'cfg(not(target_arch = "wasm32"))'.dependencies] puffin.workspace = true -zstd = { version = "0.11.0", optional = true } # native only -# Web dependencies: -[target.'cfg(target_arch = "wasm32")'.dependencies] -ruzstd = { version = "0.3.0", optional = true } # works on wasm [dev-dependencies] -criterion = "0.4" -mimalloc.workspace = true -serde_test = { version = "1" } -arrow2 = { workspace = true, features = [ - "io_ipc", - "io_print", - "compute_concatenate", -] } - -[lib] -bench = false - -[[bench]] -name = "msg_encode_benchmark" -harness = false +rmp-serde = "1.1" diff --git a/crates/re_log_types/src/lib.rs b/crates/re_log_types/src/lib.rs index aa3c608b7e58..bf258e417ad1 100644 --- a/crates/re_log_types/src/lib.rs +++ b/crates/re_log_types/src/lib.rs @@ -4,9 +4,6 @@ #![doc = document_features::document_features!()] //! -#[cfg(any(feature = "save", feature = "load"))] -pub mod encoding; - #[cfg(feature = "arrow_datagen")] pub mod datagen; @@ -162,9 +159,8 @@ impl std::fmt::Display for ApplicationId { /// The most general log message sent from the SDK to the server. #[must_use] -#[derive(Clone, Debug)] +#[derive(Clone, Debug, PartialEq)] // `PartialEq` used for tests in another crate #[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] -#[cfg_attr(test, derive(PartialEq))] #[allow(clippy::large_enum_variant)] pub enum LogMsg { /// A new recording has begun. diff --git a/crates/re_query/Cargo.toml b/crates/re_query/Cargo.toml index 4a28c1fd0f54..b09471633c7e 100644 --- a/crates/re_query/Cargo.toml +++ b/crates/re_query/Cargo.toml @@ -29,7 +29,6 @@ re_arrow_store.workspace = true re_data_store.workspace = true re_format.workspace = true re_log_types.workspace = true -re_log.workspace = true # External dependencies: arrow2 = { workspace = true, features = [ diff --git a/crates/re_sdk/Cargo.toml b/crates/re_sdk/Cargo.toml index 1eb913bfcf30..018bc298a43b 100644 --- a/crates/re_sdk/Cargo.toml +++ b/crates/re_sdk/Cargo.toml @@ -33,17 +33,13 @@ image = ["re_log_types/image"] [dependencies] -re_build_info.workspace = true -re_error.workspace = true -re_log_types = { workspace = true, features = ["save"] } +re_log_encoding = { workspace = true, features = ["encoder"] } +re_log_types.workspace = true re_log.workspace = true re_memory.workspace = true re_sdk_comms = { workspace = true, features = ["client"] } -re_smart_channel.workspace = true -anyhow.workspace = true document-features = "0.2" -nohash-hasher = "0.2" parking_lot.workspace = true thiserror.workspace = true diff --git a/crates/re_sdk/src/lib.rs b/crates/re_sdk/src/lib.rs index 03d329a6435e..f78991944b90 100644 --- a/crates/re_sdk/src/lib.rs +++ b/crates/re_sdk/src/lib.rs @@ -9,9 +9,6 @@ // ---------------- // Private modules: -#[cfg(not(target_arch = "wasm32"))] -mod file_sink; - #[cfg(feature = "global_session")] mod global; @@ -34,6 +31,13 @@ pub use re_log_types::{ ApplicationId, Component, ComponentName, EntityPath, RecordingId, SerializableComponent, }; +#[cfg(not(target_arch = "wasm32"))] +impl crate::sink::LogSink for re_log_encoding::FileSink { + fn send(&self, msg: re_log_types::LogMsg) { + re_log_encoding::FileSink::send(self, msg); + } +} + // --------------- // Public modules: @@ -48,7 +52,7 @@ pub mod sink { pub use crate::log_sink::{disabled, BufferedSink, LogSink, TcpSink}; #[cfg(not(target_arch = "wasm32"))] - pub use crate::file_sink::{FileSink, FileSinkError}; + pub use re_log_encoding::{FileSink, FileSinkError}; } /// Things directly related to logging. diff --git a/crates/re_sdk/src/session.rs b/crates/re_sdk/src/session.rs index bbf99ee93451..94d336b30fcc 100644 --- a/crates/re_sdk/src/session.rs +++ b/crates/re_sdk/src/session.rs @@ -133,7 +133,7 @@ impl SessionBuilder { pub fn save( self, path: impl Into, - ) -> Result { + ) -> Result { let (rerun_enabled, recording_info) = self.finalize(); if rerun_enabled { Ok(Session::new( diff --git a/crates/re_tuid/Cargo.toml b/crates/re_tuid/Cargo.toml index 4f8e66c70dd2..71312ce7becd 100644 --- a/crates/re_tuid/Cargo.toml +++ b/crates/re_tuid/Cargo.toml @@ -19,17 +19,20 @@ all-features = true [features] default = [] +## Enable converting Tuid to arrow2 +arrow2_convert = ["dep:arrow2", "dep:arrow2_convert"] + ## Enable (de)serialization using serde. serde = ["dep:serde"] [dependencies] -arrow2_convert.workspace = true -arrow2.workspace = true document-features = "0.2" once_cell = "1.16" # Optional dependencies: +arrow2 = { workspace = true, optional = true } # used by arrow2_convert +arrow2_convert = { workspace = true, optional = true } serde = { version = "1", features = ["derive"], optional = true } # native dependencies: diff --git a/crates/re_tuid/src/lib.rs b/crates/re_tuid/src/lib.rs index 072b65261da5..33b8d85625c2 100644 --- a/crates/re_tuid/src/lib.rs +++ b/crates/re_tuid/src/lib.rs @@ -6,10 +6,11 @@ #![doc = document_features::document_features!()] //! -use arrow2::datatypes::DataType; -use arrow2_convert::{ArrowDeserialize, ArrowSerialize}; - -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, ArrowSerialize, ArrowDeserialize)] +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +#[cfg_attr( + feature = "arrow2_convert", + derive(arrow2_convert::ArrowSerialize, arrow2_convert::ArrowDeserialize) +)] #[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] pub struct Tuid { /// Approximate nanoseconds since epoch. @@ -20,9 +21,11 @@ pub struct Tuid { inc: u64, } +#[cfg(feature = "arrow2_convert")] arrow2_convert::arrow_enable_vec_for_type!(Tuid); // TODO(#1774): shouldn't have to write this manually +#[cfg(feature = "arrow2_convert")] impl arrow2_convert::field::ArrowField for Tuid { type Type = Self; @@ -31,7 +34,7 @@ impl arrow2_convert::field::ArrowField for Tuid { ::field("time_ns"), ::field("inc"), ]))); - DataType::Extension("rerun.tuid".into(), Box::new(datatype), None) + arrow2::datatypes::DataType::Extension("rerun.tuid".into(), Box::new(datatype), None) } } diff --git a/crates/re_viewer/Cargo.toml b/crates/re_viewer/Cargo.toml index a929387a0603..bb849af2fde5 100644 --- a/crates/re_viewer/Cargo.toml +++ b/crates/re_viewer/Cargo.toml @@ -41,19 +41,13 @@ re_build_info.workspace = true re_data_store = { workspace = true, features = ["serde"] } re_error.workspace = true re_format.workspace = true +re_log_encoding = { workspace = true, features = ["decoder", "encoder"] } +re_log_types = { workspace = true, features = ["ecolor", "glam", "image"] } re_log.workspace = true -re_log_types = { workspace = true, features = [ - "ecolor", - "glam", - "image", - "save", - "load", -] } re_memory.workspace = true re_query.workspace = true re_renderer = { workspace = true, features = ["arrow", "serde"] } re_smart_channel.workspace = true -re_string_interner.workspace = true re_tensor_ops.workspace = true re_ui.workspace = true re_ws_comms = { workspace = true, features = ["client"] } @@ -76,7 +70,6 @@ egui = { workspace = true, features = ["extra_debug_asserts", "tracing"] } egui_dock = { workspace = true, features = ["serde"] } egui_extras = { workspace = true, features = ["tracing"] } egui-wgpu.workspace = true -ehttp = "0.2" enumset.workspace = true glam = { workspace = true, features = [ "mint", @@ -124,12 +117,6 @@ winapi = "0.3.9" [target.'cfg(target_arch = "wasm32")'.dependencies] console_error_panic_hook = "0.1.6" wasm-bindgen-futures = "0.4" -js-sys = "0.3" -wasm-bindgen = "0.2" - -[dependencies.web-sys] -version = "0.3.52" -features = ["Window"] [build-dependencies] diff --git a/crates/re_viewer/src/app.rs b/crates/re_viewer/src/app.rs index 04dd0127d173..ad5459df9dc5 100644 --- a/crates/re_viewer/src/app.rs +++ b/crates/re_viewer/src/app.rs @@ -1796,7 +1796,7 @@ fn save_database_to_file( let file = std::fs::File::create(path.as_path()) .with_context(|| format!("Failed to create file at {path:?}"))?; - re_log_types::encoding::encode(msgs.iter(), file) + re_log_encoding::encoder::encode(msgs.iter(), file) .map(|_| path) .context("Message encode") } @@ -1806,7 +1806,7 @@ fn save_database_to_file( fn load_rrd_to_log_db(mut read: impl std::io::Read) -> anyhow::Result { crate::profile_function!(); - let decoder = re_log_types::encoding::Decoder::new(read)?; + let decoder = re_log_encoding::decoder::Decoder::new(read)?; let mut log_db = LogDb::default(); for msg in decoder { diff --git a/crates/re_viewer/src/lib.rs b/crates/re_viewer/src/lib.rs index a248d82f521e..d9942fe1928c 100644 --- a/crates/re_viewer/src/lib.rs +++ b/crates/re_viewer/src/lib.rs @@ -8,7 +8,6 @@ pub mod env_vars; pub mod math; mod misc; mod remote_viewer_app; -pub mod stream_rrd_from_http; mod ui; mod viewer_analytics; diff --git a/crates/re_viewer/src/web.rs b/crates/re_viewer/src/web.rs index eb879b32f3f7..4bf72943a05e 100644 --- a/crates/re_viewer/src/web.rs +++ b/crates/re_viewer/src/web.rs @@ -50,7 +50,7 @@ pub async fn start( url: url.clone(), }); let egui_ctx = cc.egui_ctx.clone(); - crate::stream_rrd_from_http::stream_rrd_from_http( + re_log_encoding::stream_rrd_from_http::stream_rrd_from_http( url, Box::new(move |msg| { egui_ctx.request_repaint(); // wake up ui thread diff --git a/crates/rerun/Cargo.toml b/crates/rerun/Cargo.toml index f3a14600869a..643ca8b25deb 100644 --- a/crates/rerun/Cargo.toml +++ b/crates/rerun/Cargo.toml @@ -57,7 +57,6 @@ sdk = ["dep:re_sdk"] # You also need to install some additional tools, which you can do by running # [`scripts/setup_web.sh`](https://github.com/rerun-io/rerun/blob/main/scripts/setup_web.sh). web_viewer = [ - "dep:once_cell", "dep:re_web_viewer_server", "dep:webbrowser", "re_ws_comms/server", @@ -65,9 +64,9 @@ web_viewer = [ [dependencies] re_build_info.workspace = true -re_error.workspace = true re_format.workspace = true -re_log_types = { workspace = true, features = ["load"] } +re_log_encoding = { workspace = true, features = ["decoder", "encoder"] } +re_log_types.workspace = true re_log.workspace = true re_memory.workspace = true re_smart_channel.workspace = true @@ -75,7 +74,6 @@ re_ws_comms = { workspace = true, features = ["client"] } anyhow.workspace = true document-features = "0.2" -egui = { workspace = true, default-features = false } itertools = { workspace = true } parking_lot.workspace = true @@ -86,7 +84,6 @@ re_sdk_comms = { workspace = true, optional = true } re_viewer = { workspace = true, optional = true } re_web_viewer_server = { workspace = true, optional = true } -once_cell = { version = "1.17", optional = true } webbrowser = { version = "0.8", optional = true } # Native dependencies: diff --git a/crates/rerun/src/run.rs b/crates/rerun/src/run.rs index 1dfa9ee9ff59..495fd680eb7f 100644 --- a/crates/rerun/src/run.rs +++ b/crates/rerun/src/run.rs @@ -263,7 +263,7 @@ async fn run_impl( let rx = if let Some(url_or_path) = args.url_or_path.clone() { match categorize_argument(url_or_path) { ArgumentCategory::RrdHttpUrl(url) => { - re_viewer::stream_rrd_from_http::stream_rrd_from_http_to_channel(url) + re_log_encoding::stream_rrd_from_http::stream_rrd_from_http_to_channel(url) } ArgumentCategory::RrdFilePath(path) => { re_log::info!("Loading {path:?}…"); @@ -281,6 +281,7 @@ async fn run_impl( } #[cfg(not(feature = "web_viewer"))] { + _ = (rerun_server_ws_url, shutdown_rx); panic!("Can't host web-viewer - rerun was not compiled with the 'web_viewer' feature"); } } else { @@ -295,7 +296,7 @@ async fn run_impl( #[cfg(not(feature = "native_viewer"))] { - _ = call_source; + _ = (call_source, rerun_server_ws_url); anyhow::bail!("Can't start viewer - rerun was compiled without the 'native_viewer' feature"); } } @@ -452,7 +453,7 @@ fn native_viewer_connect_to_ws_url( fn load_file_to_channel(path: &std::path::Path) -> anyhow::Result> { use anyhow::Context as _; let file = std::fs::File::open(path).context("Failed to open file")?; - let decoder = re_log_types::encoding::Decoder::new(file)?; + let decoder = re_log_encoding::decoder::Decoder::new(file)?; let (tx, rx) = re_smart_channel::smart_channel(re_smart_channel::Source::File { path: path.to_owned(), @@ -482,8 +483,8 @@ fn stream_to_rrd( rx: &re_smart_channel::Receiver, path: &std::path::PathBuf, shutdown_bool: &Arc, -) -> Result<(), re_sdk::sink::FileSinkError> { - use re_sdk::sink::FileSinkError; +) -> Result<(), re_log_encoding::FileSinkError> { + use re_log_encoding::FileSinkError; use re_smart_channel::RecvTimeoutError; if path.exists() { @@ -494,7 +495,7 @@ fn stream_to_rrd( let file = std::fs::File::create(path).map_err(|err| FileSinkError::CreateFile(path.clone(), err))?; - let mut encoder = re_log_types::encoding::Encoder::new(file)?; + let mut encoder = re_log_encoding::encoder::Encoder::new(file)?; while !shutdown_bool.load(std::sync::atomic::Ordering::Relaxed) { // We wake up and poll shutdown_bool every now and then. diff --git a/examples/rust/objectron/Cargo.toml b/examples/rust/objectron/Cargo.toml index ee1fe349f747..e96c1b651a73 100644 --- a/examples/rust/objectron/Cargo.toml +++ b/examples/rust/objectron/Cargo.toml @@ -13,8 +13,6 @@ rerun = { workspace = true, features = ["web_viewer"] } anyhow.workspace = true clap = { workspace = true, features = ["derive"] } glam.workspace = true -image = { workspace = true, default-features = false, features = ["jpeg"] } -itertools = { workspace = true } prost = "0.11" diff --git a/rerun_py/Cargo.toml b/rerun_py/Cargo.toml index 50fa0ada3d14..a7b89a43f450 100644 --- a/rerun_py/Cargo.toml +++ b/rerun_py/Cargo.toml @@ -44,21 +44,15 @@ re_error.workspace = true re_log.workspace = true re_log_types.workspace = true re_memory.workspace = true -re_tensor_ops.workspace = true rerun = { workspace = true, default-features = false, features = [ "analytics", "server", "sdk", ] } -ahash.workspace = true -anyhow.workspace = true arrow2 = { workspace = true, features = ["io_ipc", "io_print"] } -bytemuck = { version = "1.11", features = ["extern_crate_alloc"] } -crossbeam = "0.8" document-features = "0.2" glam.workspace = true -half.workspace = true image = { workspace = true, default-features = false, features = ["jpeg"] } itertools = { workspace = true } macaw.workspace = true diff --git a/scripts/publish_crates.sh b/scripts/publish_crates.sh index 6c5029c34a55..1c52ca6cc140 100755 --- a/scripts/publish_crates.sh +++ b/scripts/publish_crates.sh @@ -102,6 +102,7 @@ cargo publish $FLAGS -p re_memory cargo publish $FLAGS -p re_tuid cargo publish $FLAGS -p re_log_types cargo publish $FLAGS -p re_smart_channel +cargo publish $FLAGS -p re_log_encoding cargo publish $FLAGS -p re_tensor_ops cargo publish $FLAGS -p re_ui cargo publish $FLAGS -p re_arrow_store From 8ab1155f604ba6997b7c0830b8821297ebf686d5 Mon Sep 17 00:00:00 2001 From: Jeremy Leibs Date: Thu, 6 Apr 2023 15:16:12 -0400 Subject: [PATCH 28/89] New example code for facebook research segment anything (#1788) * New example code for facebook research segment anything * Add segmentation workaround for users still on 0.4.0 * Images should use class-id as label * Add an alternative tensor-based view --- crates/re_viewer/src/ui/data_ui/image.rs | 2 +- examples/python/segment_anything/.gitignore | 1 + examples/python/segment_anything/main.py | 210 ++++++++++++++++++ .../python/segment_anything/requirements.txt | 8 + 4 files changed, 220 insertions(+), 1 deletion(-) create mode 100644 examples/python/segment_anything/.gitignore create mode 100755 examples/python/segment_anything/main.py create mode 100644 examples/python/segment_anything/requirements.txt diff --git a/crates/re_viewer/src/ui/data_ui/image.rs b/crates/re_viewer/src/ui/data_ui/image.rs index bb3d5a483548..63d5a67130b3 100644 --- a/crates/re_viewer/src/ui/data_ui/image.rs +++ b/crates/re_viewer/src/ui/data_ui/image.rs @@ -366,7 +366,7 @@ pub fn show_zoomed_image_region( .class_description(Some(ClassId(u16_val))) .annotation_info() .label(None) - .unwrap_or_default(), + .unwrap_or_else(|| u16_val.to_string()) ); ui.end_row(); }; diff --git a/examples/python/segment_anything/.gitignore b/examples/python/segment_anything/.gitignore new file mode 100644 index 000000000000..0447b0d4ac3a --- /dev/null +++ b/examples/python/segment_anything/.gitignore @@ -0,0 +1 @@ +model/ diff --git a/examples/python/segment_anything/main.py b/examples/python/segment_anything/main.py new file mode 100755 index 000000000000..ad2069840f54 --- /dev/null +++ b/examples/python/segment_anything/main.py @@ -0,0 +1,210 @@ +#!/usr/bin/env python3 +""" +Example of using Rerun to log and visualize the output of segment-anything. + +See: [segment_anything](https://segment-anything.com/). + +Can be used to test mask-generation on one or more images. Images can be local file-paths +or remote urls. + +Exa: +``` +# Run on a remote image: +python main.py https://raw.githubusercontent.com/facebookresearch/segment-anything/main/notebooks/images/dog.jpg + +# Use cuda and a different model on a local image +python main.py --device cuda --model vit_h /path/to/my_image.jpg +``` +""" + + +import argparse +import logging +import os +from pathlib import Path +from typing import Final +from urllib.parse import urlparse + +import cv2 +import numpy as np +import requests +import rerun as rr +import torch +import torchvision +from cv2 import Mat +from segment_anything import SamAutomaticMaskGenerator, sam_model_registry +from segment_anything.modeling import Sam +from tqdm import tqdm + +MODEL_DIR: Final = Path(os.path.dirname(__file__)) / "model" +MODEL_URLS: Final = { + "vit_h": "https://dl.fbaipublicfiles.com/segment_anything/sam_vit_h_4b8939.pth", + "vit_l": "https://dl.fbaipublicfiles.com/segment_anything/sam_vit_l_0b3195.pth", + "vit_b": "https://dl.fbaipublicfiles.com/segment_anything/sam_vit_b_01ec64.pth", +} + + +def download_with_progress(url: str, dest: Path) -> None: + """Download file with tqdm progress bar.""" + chunk_size = 1024 * 1024 + resp = requests.get(url, stream=True) + total_size = int(resp.headers.get("content-length", 0)) + with open(dest, "wb") as dest_file: + with tqdm( + desc="Downloading model", total=total_size, unit="iB", unit_scale=True, unit_divisor=1024 + ) as progress: + for data in resp.iter_content(chunk_size): + dest_file.write(data) + progress.update(len(data)) + + +def get_downloaded_model_path(model_name: str) -> Path: + """Fetch the segment-anything model to a local cache directory.""" + model_url = MODEL_URLS[model_name] + + model_location = MODEL_DIR / model_url.split("/")[-1] + if not model_location.exists(): + os.makedirs(MODEL_DIR, exist_ok=True) + download_with_progress(model_url, model_location) + + return model_location + + +def create_sam(model: str, device: str) -> Sam: + """Load the segment-anything model, fetching the model-file as necessary.""" + model_path = get_downloaded_model_path(model) + + logging.info("PyTorch version: {}".format(torch.__version__)) + logging.info("Torchvision version: {}".format(torchvision.__version__)) + logging.info("CUDA is available: {}".format(torch.cuda.is_available())) + + logging.info("Building sam from: {}".format(model_path)) + sam = sam_model_registry[model](checkpoint=model_path) + return sam.to(device=device) + + +def run_segmentation(mask_generator: SamAutomaticMaskGenerator, image: Mat) -> None: + """Run segmentation on a single image.""" + rr.log_image("image", image) + + logging.info("Finding masks") + masks = mask_generator.generate(image) + + logging.info("Found {} masks".format(len(masks))) + + # Log all the masks stacked together as a tensor + # TODO(jleibs): Tensors with class-ids and annotation-coloring would make this much slicker + mask_tensor = ( + np.dstack([np.zeros((image.shape[0], image.shape[1]))] + [m["segmentation"] for m in masks]).astype("uint8") + * 128 + ) + rr.log_tensor("mask_tensor", mask_tensor) + + # Note: for stacking, it is important to sort these masks by area from largest to smallest + # this is because the masks are overlapping and we want smaller masks to + # be drawn on top of larger masks. + # TODO(jleibs): we could instead draw each mask as a separate image layer, but the current layer-stacking + # does not produce great results. + masks_with_ids = list(enumerate(masks, start=1)) + masks_with_ids.sort(key=(lambda x: x[1]["area"]), reverse=True) # type: ignore[no-any-return] + + # Work-around for https://github.com/rerun-io/rerun/issues/1782 + # Make sure we have an AnnotationInfo present for every class-id used in this image + # TODO(jleibs): Remove when fix is released + rr.log_annotation_context( + "image", + [rr.AnnotationInfo(id) for id, _ in masks_with_ids], + timeless=False, + ) + + # Layer all of the masks together, using the id as class-id in the segmentation + segmentation_img = np.zeros((image.shape[0], image.shape[1])) + for id, m in masks_with_ids: + segmentation_img[m["segmentation"]] = id + + rr.log_segmentation_image("image/masks", segmentation_img) + + mask_bbox = np.array([m["bbox"] for _, m in masks_with_ids]) + rr.log_rects("image/boxes", rects=mask_bbox, class_ids=[id for id, _ in masks_with_ids]) + + +def is_url(path: str) -> bool: + """Check if a path is a url or a local file.""" + try: + result = urlparse(path) + return all([result.scheme, result.netloc]) + except ValueError: + return False + + +def load_image(image_uri: str) -> Mat: + """Conditionally download an image from URL or load it from disk.""" + logging.info("Loading: {}".format(image_uri)) + if is_url(image_uri): + response = requests.get(image_uri) + response.raise_for_status() + image_data = np.asarray(bytearray(response.content), dtype="uint8") + image = cv2.imdecode(image_data, cv2.IMREAD_COLOR) + else: + image = cv2.imread(image_uri, cv2.IMREAD_COLOR) + + image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) + return image + + +def main() -> None: + parser = argparse.ArgumentParser( + description="Run the Facebook Research Segment Anything example.", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + parser.add_argument( + "--model", + action="store", + default="vit_b", + choices=MODEL_URLS.keys(), + help="Which model to use." "(See: https://github.com/facebookresearch/segment-anything#model-checkpoints)", + ) + parser.add_argument( + "--device", + action="store", + default="cpu", + help="Which torch device to use, e.g. cpu or cuda. " + "(See: https://pytorch.org/docs/stable/tensor_attributes.html#torch.device)", + ) + parser.add_argument( + "--points-per-batch", + action="store", + default=32, + type=int, + help="Points per batch. More points will run faster, but too many will exhaust GPU memory.", + ) + parser.add_argument("images", metavar="N", type=str, nargs="*", help="A list of images to process.") + + rr.script_add_args(parser) + args = parser.parse_args() + + rr.script_setup(args, "segment_anything") + logging.getLogger().addHandler(rr.LoggingHandler("logs")) + logging.getLogger().setLevel(logging.INFO) + + sam = create_sam(args.model, args.device) + + mask_config = {"points_per_batch": args.points_per_batch} + mask_generator = SamAutomaticMaskGenerator(sam, **mask_config) + + if len(args.images) == 0: + logging.info("No image provided. Using default.") + args.images = [ + "https://raw.githubusercontent.com/facebookresearch/segment-anything/main/notebooks/images/truck.jpg" + ] + + for n, image_uri in enumerate(args.images): + rr.set_time_sequence("image", n) + image = load_image(image_uri) + run_segmentation(mask_generator, image) + + rr.script_teardown(args) + + +if __name__ == "__main__": + main() diff --git a/examples/python/segment_anything/requirements.txt b/examples/python/segment_anything/requirements.txt new file mode 100644 index 000000000000..9c0dfad84fa8 --- /dev/null +++ b/examples/python/segment_anything/requirements.txt @@ -0,0 +1,8 @@ +-e git+https://github.com/facebookresearch/segment-anything.git#egg=segment-anything +numpy +opencv-python +requests +rerun-sdk +torch +torchvision +tqdm From 880cf8e96c1e626c7c74798632f2d2266dd418fe Mon Sep 17 00:00:00 2001 From: Emil Ernerfeldt Date: Sat, 8 Apr 2023 14:38:02 +0200 Subject: [PATCH 29/89] Implement `re_tuid::Tuid::random()` on web (#1796) * Implement `re_tuid::Tuid::random()` on web * Fix bad error message --- Cargo.lock | 1 + crates/re_tuid/Cargo.toml | 6 ++---- crates/re_tuid/src/lib.rs | 37 ++++++++++++++++++++----------------- 3 files changed, 23 insertions(+), 21 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e7986f73779c..2802735e3d49 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4117,6 +4117,7 @@ dependencies = [ "criterion", "document-features", "getrandom", + "instant", "once_cell", "serde", ] diff --git a/crates/re_tuid/Cargo.toml b/crates/re_tuid/Cargo.toml index 71312ce7becd..6dc45f5c34ff 100644 --- a/crates/re_tuid/Cargo.toml +++ b/crates/re_tuid/Cargo.toml @@ -28,6 +28,8 @@ serde = ["dep:serde"] [dependencies] document-features = "0.2" +getrandom = "0.2" +instant = "0.1" once_cell = "1.16" # Optional dependencies: @@ -35,10 +37,6 @@ arrow2 = { workspace = true, optional = true } # used by arro arrow2_convert = { workspace = true, optional = true } serde = { version = "1", features = ["derive"], optional = true } -# native dependencies: -[target.'cfg(not(target_arch = "wasm32"))'.dependencies] -getrandom = "0.2" - [dev-dependencies] criterion = "0.4" diff --git a/crates/re_tuid/src/lib.rs b/crates/re_tuid/src/lib.rs index 33b8d85625c2..b9c52ace609c 100644 --- a/crates/re_tuid/src/lib.rs +++ b/crates/re_tuid/src/lib.rs @@ -61,7 +61,6 @@ impl Tuid { }; #[inline] - #[cfg(not(target_arch = "wasm32"))] // TODO(emilk): implement for wasm32 (needs ms since epoch). pub fn random() -> Self { use std::cell::RefCell; @@ -106,35 +105,39 @@ impl Tuid { /// Returns a high-precision, monotonically increasing count that approximates nanoseconds since unix epoch. #[inline] -#[cfg(not(target_arch = "wasm32"))] fn monotonic_nanos_since_epoch() -> u64 { // This can maybe be optimized + use instant::Instant; use once_cell::sync::Lazy; - use std::time::Instant; - - fn epoch_offset_and_start() -> (u64, Instant) { - if let Ok(duration_since_epoch) = std::time::UNIX_EPOCH.elapsed() { - let nanos_since_epoch = duration_since_epoch.as_nanos() as u64; - (nanos_since_epoch, Instant::now()) - } else { - // system time is set before 1970. this should be quite rare. - (0, Instant::now()) - } - } - static START_TIME: Lazy<(u64, Instant)> = Lazy::new(epoch_offset_and_start); + static START_TIME: Lazy<(u64, Instant)> = Lazy::new(|| (nanos_since_epoch(), Instant::now())); START_TIME.0 + START_TIME.1.elapsed().as_nanos() as u64 } +fn nanos_since_epoch() -> u64 { + if let Ok(duration_since_epoch) = instant::SystemTime::UNIX_EPOCH.elapsed() { + let mut nanos_since_epoch = duration_since_epoch.as_nanos() as u64; + + if cfg!(target_arch = "wasm32") { + // Web notriously round to the nearest millisecond (because of spectre/meltdown) + // so we add a bit of extra randomenss here to increase our entropy and reduce the chance of collisions: + nanos_since_epoch += random_u64() % 1_000_000; + } + + nanos_since_epoch + } else { + // system time is set before 1970. this should be quite rare. + 0 + } +} + #[inline] -#[cfg(not(target_arch = "wasm32"))] fn random_u64() -> u64 { let mut bytes = [0_u8; 8]; - getrandom::getrandom(&mut bytes).expect("Couldn't get inc"); + getrandom::getrandom(&mut bytes).expect("Couldn't get random bytes"); u64::from_le_bytes(bytes) } -#[cfg(not(target_arch = "wasm32"))] #[test] fn test_tuid() { use std::collections::{BTreeSet, HashSet}; From e3f572f6e148780ebd75c5ce9d32f58c4b5da2c6 Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Sat, 8 Apr 2023 20:06:54 +0200 Subject: [PATCH 30/89] ci: fix benchmarks (#1799) * workflow: just run --all * datastore: skip bucket permutations etc on CI * i give up, just replace re_log_types by re_log_encoding --- .github/workflows/rust.yml | 2 +- crates/re_arrow_store/Cargo.toml | 5 +-- crates/re_arrow_store/benches/arrow2.rs | 4 +- .../re_arrow_store/benches/arrow2_convert.rs | 4 +- crates/re_arrow_store/benches/data_store.rs | 44 +++++++++++++------ crates/re_arrow_store/benches/vectors.rs | 4 +- 6 files changed, 40 insertions(+), 23 deletions(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 556f02977008..ad90d5219f7e 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -70,7 +70,7 @@ jobs: --all-features \ -p re_arrow_store \ -p re_data_store \ - -p re_log_types \ + -p re_log_encoding \ -p re_query \ -p re_tuid \ -- --output-format=bencher | tee output.txt diff --git a/crates/re_arrow_store/Cargo.toml b/crates/re_arrow_store/Cargo.toml index af9a149db7e9..9fd217bb7904 100644 --- a/crates/re_arrow_store/Cargo.toml +++ b/crates/re_arrow_store/Cargo.toml @@ -25,10 +25,9 @@ deadlock_detection = ["parking_lot/deadlock_detection"] ## Integration with `polars`, to efficiently use the datastore with dataframes. polars = ["dep:polars-core", "dep:polars-ops"] -## When set, disables costly benchmark suites that measure the performance of third-party -## libraries. +## When set, only run the core set of benchmark suites. ## Commonly set implicitly by --all-features, e.g. on CI. -dont_bench_third_party = [] +core_benchmarks_only = [] [dependencies] diff --git a/crates/re_arrow_store/benches/arrow2.rs b/crates/re_arrow_store/benches/arrow2.rs index af02ac5289a6..a74d875c9945 100644 --- a/crates/re_arrow_store/benches/arrow2.rs +++ b/crates/re_arrow_store/benches/arrow2.rs @@ -22,11 +22,11 @@ use re_log_types::{ criterion_group!(benches, erased_clone, estimated_size_bytes); -#[cfg(not(feature = "dont_bench_third_party"))] +#[cfg(not(feature = "core_benchmarks_only"))] criterion::criterion_main!(benches); // Don't run these benchmarks on CI: they measure the performance of third-party libraries. -#[cfg(feature = "dont_bench_third_party")] +#[cfg(feature = "core_benchmarks_only")] fn main() {} // --- diff --git a/crates/re_arrow_store/benches/arrow2_convert.rs b/crates/re_arrow_store/benches/arrow2_convert.rs index 92a070e49f28..db96a115c257 100644 --- a/crates/re_arrow_store/benches/arrow2_convert.rs +++ b/crates/re_arrow_store/benches/arrow2_convert.rs @@ -14,11 +14,11 @@ use re_log_types::{ criterion_group!(benches, serialize, deserialize); -#[cfg(not(feature = "dont_bench_third_party"))] +#[cfg(not(feature = "core_benchmarks_only"))] criterion::criterion_main!(benches); // Don't run these benchmarks on CI: they measure the performance of third-party libraries. -#[cfg(feature = "dont_bench_third_party")] +#[cfg(feature = "core_benchmarks_only")] fn main() {} // --- diff --git a/crates/re_arrow_store/benches/data_store.rs b/crates/re_arrow_store/benches/data_store.rs index cb8517114743..9d9a485311ac 100644 --- a/crates/re_arrow_store/benches/data_store.rs +++ b/crates/re_arrow_store/benches/data_store.rs @@ -27,10 +27,32 @@ const NUM_ROWS: i64 = 1; #[cfg(debug_assertions)] const NUM_INSTANCES: i64 = 1; +fn packed() -> &'static [bool] { + #[cfg(feature = "core_benchmarks_only")] + { + &[false] + } + #[cfg(not(feature = "core_benchmarks_only"))] + { + &[false, true] + } +} + +fn num_rows_per_bucket() -> &'static [u64] { + #[cfg(feature = "core_benchmarks_only")] + { + &[] + } + #[cfg(not(feature = "core_benchmarks_only"))] + { + &[0, 2, 32, 2048] + } +} + // --- Benchmarks --- fn insert(c: &mut Criterion) { - for packed in [false, true] { + for &packed in packed() { let mut group = c.benchmark_group(format!( "datastore/num_rows={NUM_ROWS}/num_instances={NUM_INSTANCES}/packed={packed}/insert" )); @@ -45,9 +67,8 @@ fn insert(c: &mut Criterion) { b.iter(|| insert_table(Default::default(), InstanceKey::name(), &table)); }); - // Emulate more or less buckets - let num_rows_per_bucket = [0, 2, 32, 2048]; - for num_rows_per_bucket in num_rows_per_bucket { + // Emulate more or less bucket + for &num_rows_per_bucket in num_rows_per_bucket() { group.bench_function(format!("bucketsz={num_rows_per_bucket}"), |b| { b.iter(|| { insert_table( @@ -68,7 +89,7 @@ fn insert(c: &mut Criterion) { } fn latest_at(c: &mut Criterion) { - for packed in [false, true] { + for &packed in packed() { let mut group = c.benchmark_group(format!( "datastore/num_rows={NUM_ROWS}/num_instances={NUM_INSTANCES}/packed={packed}/latest_at" )); @@ -92,8 +113,7 @@ fn latest_at(c: &mut Criterion) { }); // Emulate more or less buckets - let num_rows_per_bucket = [0, 2, 32, 2048]; - for num_rows_per_bucket in num_rows_per_bucket { + for &num_rows_per_bucket in num_rows_per_bucket() { let store = insert_table( DataStoreConfig { index_bucket_nb_rows: num_rows_per_bucket, @@ -122,7 +142,7 @@ fn latest_at(c: &mut Criterion) { } fn latest_at_missing(c: &mut Criterion) { - for packed in [false, true] { + for &packed in packed() { let mut group = c.benchmark_group(format!( "datastore/num_rows={NUM_ROWS}/num_instances={NUM_INSTANCES}/packed={packed}/latest_at_missing" )); @@ -157,8 +177,7 @@ fn latest_at_missing(c: &mut Criterion) { }); // Emulate more or less buckets - let num_rows_per_bucket = [0, 2, 32, 2048]; - for num_rows_per_bucket in num_rows_per_bucket { + for &num_rows_per_bucket in num_rows_per_bucket() { let store = insert_table( DataStoreConfig { index_bucket_nb_rows: num_rows_per_bucket, @@ -198,7 +217,7 @@ fn latest_at_missing(c: &mut Criterion) { } fn range(c: &mut Criterion) { - for packed in [false, true] { + for &packed in packed() { let mut group = c.benchmark_group(format!( "datastore/num_rows={NUM_ROWS}/num_instances={NUM_INSTANCES}/packed={packed}/range" )); @@ -214,8 +233,7 @@ fn range(c: &mut Criterion) { }); // Emulate more or less buckets - let num_rows_per_bucket = [0, 2, 32, 2048]; - for num_rows_per_bucket in num_rows_per_bucket { + for &num_rows_per_bucket in num_rows_per_bucket() { let store = insert_table( DataStoreConfig { index_bucket_nb_rows: num_rows_per_bucket, diff --git a/crates/re_arrow_store/benches/vectors.rs b/crates/re_arrow_store/benches/vectors.rs index 0ddd3b316e31..9bb175d6bbda 100644 --- a/crates/re_arrow_store/benches/vectors.rs +++ b/crates/re_arrow_store/benches/vectors.rs @@ -12,11 +12,11 @@ use tinyvec::TinyVec; criterion_group!(benches, sort, split, swap, swap_opt); -#[cfg(not(feature = "dont_bench_third_party"))] +#[cfg(not(feature = "core_benchmarks_only"))] criterion::criterion_main!(benches); // Don't run these benchmarks on CI: they measure the performance of third-party libraries. -#[cfg(feature = "dont_bench_third_party")] +#[cfg(feature = "core_benchmarks_only")] fn main() {} // --- From 49e260a9c7a9e5c529eb02d4e9db588e5ba733bf Mon Sep 17 00:00:00 2001 From: h3mosphere <129932586+h3mosphere@users.noreply.github.com> Date: Mon, 10 Apr 2023 19:15:10 +1000 Subject: [PATCH 31/89] Add `minimal_options` example (`RerunArgs`) (#1773) * Allows connecting to remote server through rerun's RerunArgs. Co-authored-by: Clement Rey --- Cargo.lock | 10 ++++ examples/rust/minimal_options/Cargo.toml | 14 +++++ examples/rust/minimal_options/src/main.rs | 66 +++++++++++++++++++++++ 3 files changed, 90 insertions(+) create mode 100644 examples/rust/minimal_options/Cargo.toml create mode 100644 examples/rust/minimal_options/src/main.rs diff --git a/Cargo.lock b/Cargo.lock index 2802735e3d49..7ccc54616423 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2719,6 +2719,16 @@ version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" +[[package]] +name = "minimal_options" +version = "0.4.0" +dependencies = [ + "anyhow", + "clap 4.1.4", + "glam", + "rerun", +] + [[package]] name = "miniz_oxide" version = "0.5.4" diff --git a/examples/rust/minimal_options/Cargo.toml b/examples/rust/minimal_options/Cargo.toml new file mode 100644 index 000000000000..9f5888dd8d69 --- /dev/null +++ b/examples/rust/minimal_options/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "minimal_options" +version.workspace = true +edition.workspace = true +rust-version.workspace = true +license.workspace = true +publish = false + +[dependencies] +rerun = { workspace = true, features = ["web_viewer"] } + +anyhow.workspace = true +clap = { workspace = true, features = ["derive"] } +glam.workspace = true diff --git a/examples/rust/minimal_options/src/main.rs b/examples/rust/minimal_options/src/main.rs new file mode 100644 index 000000000000..c6045998e498 --- /dev/null +++ b/examples/rust/minimal_options/src/main.rs @@ -0,0 +1,66 @@ +//! Demonstrates how to accept arguments and connect to running rerun servers. +//! +//! Usage: +//! ``` +//! cargo run -p minimal_options -- --help +//! ``` + +use rerun::components::{ColorRGBA, Point3D}; +use rerun::time::{TimeType, Timeline}; +use rerun::{external::re_log, MsgSender, Session}; + +use rerun::demo_util::grid; + +#[derive(Debug, clap::Parser)] +#[clap(author, version, about)] +struct Args { + #[command(flatten)] + rerun: rerun::clap::RerunArgs, + + #[clap(long, default_value = "10")] + num_points_per_axis: usize, + + #[clap(long, default_value = "5.0")] + radius: f32, +} + +fn run(session: &Session, args: &Args) -> anyhow::Result<()> { + let timeline_keyframe = Timeline::new("keyframe", TimeType::Sequence); + + let points = grid( + glam::Vec3::splat(-args.radius), + glam::Vec3::splat(args.radius), + args.num_points_per_axis, + ) + .map(Point3D::from) + .collect::>(); + let colors = grid( + glam::Vec3::ZERO, + glam::Vec3::splat(255.0), + args.num_points_per_axis, + ) + .map(|v| ColorRGBA::from_rgb(v.x as u8, v.y as u8, v.z as u8)) + .collect::>(); + + MsgSender::new("my_points") + .with_component(&points)? + .with_component(&colors)? + .with_time(timeline_keyframe, 0) + .send(session)?; + + Ok(()) +} + +fn main() -> anyhow::Result<()> { + re_log::setup_native_logging(); + + use clap::Parser as _; + let args = Args::parse(); + + let default_enabled = true; + args.rerun + .clone() + .run("minimal_options", default_enabled, move |session| { + run(&session, &args).unwrap(); + }) +} From 5c7f527f4dff6100c6d01127cbaf9b28e4f9a4db Mon Sep 17 00:00:00 2001 From: Urho Laukkarinen Date: Mon, 10 Apr 2023 12:17:07 +0300 Subject: [PATCH 32/89] Add `pacman` support to `setup_web.sh` (#1797) Co-authored-by: Clement Rey --- scripts/setup_web.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/setup_web.sh b/scripts/setup_web.sh index e755e07514c2..a711cd09055f 100755 --- a/scripts/setup_web.sh +++ b/scripts/setup_web.sh @@ -28,6 +28,7 @@ elif [ -x "$(command -v dnf)" ]; then sudo dnf install $packagesNeeded elif [ -x "$(command -v zypper)" ]; then sudo zypper install $packagesNeeded elif [ -x "$(command -v apk)" ]; then sudo apk add --no-cache $packagesNeeded elif [ -x "$(command -v winget)" ]; then sudo winget add --no-cache $packagesNeeded +elif [ -x "$(command -v pacman)" ]; then sudo pacman -S $packagesNeeded else echo "FAILED TO INSTALL PACKAGE: Package manager not found. You must manually install: $packagesNeeded">&2; exit 1 From 49d5de85b3fe48c6db88bc0bc59811deacc597d2 Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Mon, 10 Apr 2023 14:21:52 +0200 Subject: [PATCH 33/89] fix ci (cargo-deny): cargo update -p crossbeam-channel (#1806) --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7ccc54616423..c7b167fedbe4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -996,9 +996,9 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.6" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521" +checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" dependencies = [ "cfg-if", "crossbeam-utils", From d8525340b50c81815b5e30d876ee68453727c00b Mon Sep 17 00:00:00 2001 From: Emil Ernerfeldt Date: Tue, 11 Apr 2023 15:19:24 +0200 Subject: [PATCH 34/89] Compile with `panic = "abort"` (#1813) * Compile with `panic = "abort"` This PR sets `panic = "abort"` for both debug and release builds. This cuts down the `rerun` binary size in release builds from 29.9 MB to 22.7 MB - a 25% reduction! ## Details The default panic behavior in Rust is to unwind the stack. This leads to a lot of extra code bloat, and some missed opportunities for optimization. The benefit is that one can let a thread die without crashing the whole application, and one can use `std::panic::catch_unwind` as a kind of try-catch block. We don't make use of these features at all (at least not intentionally), and so are paying a cost for something we don't need. I would also argue that a panic SHOULD lead to a hard crash unless you are building an Erlang-like robust actor system where you use defensive programming to protect against programmer errors (all panics are programmer errors - user errors should use `Result`). * Quiet clippy --- Cargo.toml | 4 +++- clippy.toml | 2 ++ crates/rerun/src/crash_handler.rs | 9 +++++++-- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 76c8577f1c6a..b543d56845f1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -89,7 +89,8 @@ wgpu-hal = { version = "0.15.4", default-features = false } [profile.dev] -opt-level = 1 # Make debug builds run faster +opt-level = 1 # Make debug builds run faster +panic = "abort" # This leads to better optimizations and smaller binaries (and is the default in Wasm anyways). # Optimize all dependencies even in debug builds (does not affect workspace packages): [profile.dev.package."*"] @@ -97,6 +98,7 @@ opt-level = 2 [profile.release] # debug = true # good for profilers +panic = "abort" # This leads to better optimizations and smaller binaries (and is the default in Wasm anyways). [profile.bench] debug = true diff --git a/clippy.toml b/clippy.toml index c72661614556..4da41d009fbc 100644 --- a/clippy.toml +++ b/clippy.toml @@ -27,6 +27,8 @@ disallowed-methods = [ "std::thread::spawn", # Use `std::thread::Builder` and name the thread "sha1::Digest::new", # SHA1 is cryptographically broken + + "std::panic::catch_unwind", # We compile with `panic = "abort"` ] # https://rust-lang.github.io/rust-clippy/master/index.html#disallowed_names diff --git a/crates/rerun/src/crash_handler.rs b/crates/rerun/src/crash_handler.rs index e73ffd62b006..b0650b3e72cb 100644 --- a/crates/rerun/src/crash_handler.rs +++ b/crates/rerun/src/crash_handler.rs @@ -30,8 +30,6 @@ fn install_panic_hook(_build_info: BuildInfo) { format!("{file}:{}", location.line()) }); - // `panic_info.message` is unstable, so this is the recommended way of getting - // the panic message out. We need both the `&str` and `String` variants. let msg = panic_info_message(panic_info); if let Some(msg) = &msg { @@ -90,10 +88,17 @@ fn install_panic_hook(_build_info: BuildInfo) { std::thread::sleep(std::time::Duration::from_secs(1)); // Give analytics time to send the event } } + + // We compile with `panic = "abort"`, but we don't want to report the same problem twice, so just exit: + #[allow(clippy::exit)] + std::process::exit(102); })); } fn panic_info_message(panic_info: &std::panic::PanicInfo<'_>) -> Option { + // `panic_info.message` is unstable, so this is the recommended way of getting + // the panic message out. We need both the `&str` and `String` variants. + #[allow(clippy::manual_map)] if let Some(msg) = panic_info.payload().downcast_ref::<&str>() { Some((*msg).to_owned()) From 7d4514e7e024a07edf56fd9d0bf8554124d2f649 Mon Sep 17 00:00:00 2001 From: Emil Ernerfeldt Date: Tue, 11 Apr 2023 15:38:40 +0200 Subject: [PATCH 35/89] Add `rerun --strict`: crash if any warning or error is logged (#1812) * Add `rerun --strict`: crash if any warning or error is logged Part of https://github.com/rerun-io/rerun/issues/1483 * Can't doc-test private functions --- crates/re_log/src/lib.rs | 5 ++++ crates/rerun/src/crash_handler.rs | 29 +++++++++++++++++----- crates/rerun/src/run.rs | 41 +++++++++++++++++++++++++++++++ 3 files changed, 69 insertions(+), 6 deletions(-) diff --git a/crates/re_log/src/lib.rs b/crates/re_log/src/lib.rs index 68f77b420b09..653451a37405 100644 --- a/crates/re_log/src/lib.rs +++ b/crates/re_log/src/lib.rs @@ -35,6 +35,11 @@ pub use { setup::*, }; +/// Re-exports of other crates. +pub mod external { + pub use log; +} + /// Never log anything less serious than a `WARN` from these crates. const CRATES_AT_WARN_LEVEL: [&str; 3] = [ // wgpu crates spam a lot on info level, which is really annoying diff --git a/crates/rerun/src/crash_handler.rs b/crates/rerun/src/crash_handler.rs index b0650b3e72cb..ba1174daac72 100644 --- a/crates/rerun/src/crash_handler.rs +++ b/crates/rerun/src/crash_handler.rs @@ -23,7 +23,7 @@ fn install_panic_hook(_build_info: BuildInfo) { let previous_panic_hook = std::panic::take_hook(); std::panic::set_hook(Box::new(move |panic_info: &std::panic::PanicInfo<'_>| { - let callstack = callstack_from("panicking::panic_fmt\n"); + let callstack = callstack_from(&["panicking::panic_fmt\n"]); let file_line = panic_info.location().map(|location| { let file = anonymize_source_file_path(&std::path::PathBuf::from(location.file())); @@ -215,21 +215,38 @@ fn install_signal_handler(build_info: BuildInfo) { } fn callstack() -> String { - callstack_from("install_signal_handler::signal_handler\n") + callstack_from(&["install_signal_handler::signal_handler\n"]) } } -fn callstack_from(start_pattern: &str) -> String { +/// Get a nicely formatted callstack. +/// +/// You can give this function a list of substrings to look for, e.g. names of functions. +/// If any of these substrings matches, anything before that is removed from the callstack. +/// For example: +/// +/// ```ignore +/// fn print_callstack() { +/// eprintln!("{}", callstack_from(&["print_callstack"])); +/// } +/// ``` +pub fn callstack_from(start_patterns: &[&str]) -> String { let backtrace = backtrace::Backtrace::new(); let stack = backtrace_to_string(&backtrace); // Trim it a bit: let mut stack = stack.as_str(); + let start_patterns = start_patterns + .iter() + .chain(std::iter::once(&"callstack_from")); + // Trim the top (closest to the panic handler) to cut out some noise: - if let Some(offset) = stack.find(start_pattern) { - let prev_newline = stack[..offset].rfind('\n').map_or(0, |newline| newline + 1); - stack = &stack[prev_newline..]; + for start_pattern in start_patterns { + if let Some(offset) = stack.find(start_pattern) { + let prev_newline = stack[..offset].rfind('\n').map_or(0, |newline| newline + 1); + stack = &stack[prev_newline..]; + } } // Trim the bottom to cut out code that sets up the callstack: diff --git a/crates/rerun/src/run.rs b/crates/rerun/src/run.rs index 495fd680eb7f..691b3c359250 100644 --- a/crates/rerun/src/run.rs +++ b/crates/rerun/src/run.rs @@ -64,6 +64,10 @@ struct Args { #[clap(long)] profile: bool, + /// Exit with a non-zero exit code if any warning or error is logged. Useful for tests. + #[clap(long)] + strict: bool, + /// An upper limit on how much memory the Rerun Viewer should use. /// /// When this limit is used, Rerun will purge the oldest data. @@ -187,6 +191,11 @@ where return Ok(0); } + if args.strict { + re_log::add_boxed_logger(Box::new(StrictLogger {})).expect("Failed to enter --strict mode"); + re_log::info!("--strict mode: any warning or error will cause Rerun to panic."); + } + let res = if let Some(commands) = &args.commands { match commands { #[cfg(all(feature = "analytics"))] @@ -539,3 +548,35 @@ pub fn setup_ctrl_c_handler() -> (tokio::sync::broadcast::Receiver<()>, Arc) -> bool { + match metadata.level() { + log::Level::Error | log::Level::Warn => true, + log::Level::Info | log::Level::Debug | log::Level::Trace => false, + } + } + + fn log(&self, record: &log::Record<'_>) { + let level = match record.level() { + log::Level::Error => "error", + log::Level::Warn => "warning", + log::Level::Info | log::Level::Debug | log::Level::Trace => return, + }; + + eprintln!("{level} logged in --strict mode: {}", record.args()); + eprintln!( + "{}", + crate::crash_handler::callstack_from(&["log::__private_api_log"]) + ); + std::process::exit(1); + } + + fn flush(&self) {} +} From c254e2f4df1e292026860d4e105591e0767e8087 Mon Sep 17 00:00:00 2001 From: Emil Ernerfeldt Date: Tue, 11 Apr 2023 23:49:56 +0200 Subject: [PATCH 36/89] Refactor: Remove `TensorTrait` (#1819) * Refactor: Remove `TensorTrait` We don't need it anymore --- .github/workflows/labels.yml | 2 +- .../re_log_types/src/component_types/mod.rs | 2 +- .../src/component_types/tensor.rs | 32 ++++++------------- crates/re_sdk/src/lib.rs | 3 +- crates/re_tensor_ops/tests/tensor_tests.rs | 2 +- crates/re_viewer/src/misc/caches/mod.rs | 2 +- .../src/misc/caches/tensor_decode_cache.rs | 2 +- .../src/misc/caches/tensor_image_cache.rs | 2 +- crates/re_viewer/src/ui/data_ui/image.rs | 2 +- .../re_viewer/src/ui/space_view_heuristics.rs | 5 +-- .../re_viewer/src/ui/view_bar_chart/scene.rs | 2 +- crates/re_viewer/src/ui/view_category.rs | 3 +- .../view_spatial/scene/scene_part/images.rs | 2 +- crates/re_viewer/src/ui/view_spatial/ui.rs | 1 - crates/re_viewer/src/ui/view_spatial/ui_2d.rs | 1 - crates/re_viewer/src/ui/view_tensor/scene.rs | 2 +- crates/re_viewer/src/ui/view_tensor/ui.rs | 2 +- rerun_py/src/python_bridge.rs | 2 +- 18 files changed, 25 insertions(+), 44 deletions(-) diff --git a/.github/workflows/labels.yml b/.github/workflows/labels.yml index 62624d0e0bd8..286f91fc6b26 100644 --- a/.github/workflows/labels.yml +++ b/.github/workflows/labels.yml @@ -29,4 +29,4 @@ jobs: with: mode: minimum count: 1 - labels: "πŸ“Š analytics, πŸͺ³ bug, πŸ§‘β€πŸ’» dev experience, πŸ“– documentation, πŸ’¬ discussion, examples, πŸ“‰ performance, 🐍 python API, ⛃ re_datastore, πŸ“Ί re_viewer, πŸ”Ί re_renderer, β›΄ release, πŸ¦€ rust SDK, πŸ”¨ testing, ui, πŸ•ΈοΈ web" + labels: "πŸ“Š analytics, πŸͺ³ bug, πŸ§‘β€πŸ’» dev experience, πŸ“– documentation, πŸ’¬ discussion, examples, πŸ“‰ performance, 🐍 python API, ⛃ re_datastore, πŸ“Ί re_viewer, πŸ”Ί re_renderer, 🚜 refactor, β›΄ release, πŸ¦€ rust SDK, πŸ”¨ testing, ui, πŸ•ΈοΈ web" diff --git a/crates/re_log_types/src/component_types/mod.rs b/crates/re_log_types/src/component_types/mod.rs index ba4d4f112575..2a16223f9029 100644 --- a/crates/re_log_types/src/component_types/mod.rs +++ b/crates/re_log_types/src/component_types/mod.rs @@ -63,7 +63,7 @@ pub use size::Size3D; #[cfg(feature = "image")] pub use tensor::TensorImageError; pub use tensor::{ - Tensor, TensorCastError, TensorData, TensorDataMeaning, TensorDimension, TensorId, TensorTrait, + Tensor, TensorCastError, TensorData, TensorDataMeaning, TensorDimension, TensorId, }; pub use text_entry::TextEntry; pub use transform::{Pinhole, Rigid3, Transform}; diff --git a/crates/re_log_types/src/component_types/tensor.rs b/crates/re_log_types/src/component_types/tensor.rs index 18dccea0399c..19118545eb52 100644 --- a/crates/re_log_types/src/component_types/tensor.rs +++ b/crates/re_log_types/src/component_types/tensor.rs @@ -9,18 +9,6 @@ use crate::{TensorDataType, TensorElement}; use super::arrow_convert_shims::BinaryBuffer; -pub trait TensorTrait { - fn id(&self) -> TensorId; - fn shape(&self) -> &[TensorDimension]; - fn num_dim(&self) -> usize; - fn is_shaped_like_an_image(&self) -> bool; - fn is_vector(&self) -> bool; - fn meaning(&self) -> TensorDataMeaning; - fn get(&self, index: &[u64]) -> Option; - fn dtype(&self) -> TensorDataType; - fn size_in_bytes(&self) -> usize; -} - // ---------------------------------------------------------------------------- /// A unique id per [`Tensor`]. @@ -365,23 +353,23 @@ pub struct Tensor { pub meter: Option, } -impl TensorTrait for Tensor { +impl Tensor { #[inline] - fn id(&self) -> TensorId { + pub fn id(&self) -> TensorId { self.tensor_id } #[inline] - fn shape(&self) -> &[TensorDimension] { + pub fn shape(&self) -> &[TensorDimension] { self.shape.as_slice() } #[inline] - fn num_dim(&self) -> usize { + pub fn num_dim(&self) -> usize { self.shape.len() } - fn is_shaped_like_an_image(&self) -> bool { + pub fn is_shaped_like_an_image(&self) -> bool { self.num_dim() == 2 || self.num_dim() == 3 && { matches!( @@ -393,17 +381,17 @@ impl TensorTrait for Tensor { } #[inline] - fn is_vector(&self) -> bool { + pub fn is_vector(&self) -> bool { let shape = &self.shape; shape.len() == 1 || { shape.len() == 2 && (shape[0].size == 1 || shape[1].size == 1) } } #[inline] - fn meaning(&self) -> TensorDataMeaning { + pub fn meaning(&self) -> TensorDataMeaning { self.meaning } - fn get(&self, index: &[u64]) -> Option { + pub fn get(&self, index: &[u64]) -> Option { let mut stride: usize = 1; let mut offset: usize = 0; for (TensorDimension { size, .. }, index) in self.shape.iter().zip(index).rev() { @@ -429,11 +417,11 @@ impl TensorTrait for Tensor { } } - fn dtype(&self) -> TensorDataType { + pub fn dtype(&self) -> TensorDataType { self.data.dtype() } - fn size_in_bytes(&self) -> usize { + pub fn size_in_bytes(&self) -> usize { self.data.size_in_bytes() } } diff --git a/crates/re_sdk/src/lib.rs b/crates/re_sdk/src/lib.rs index f78991944b90..eca93ef69f16 100644 --- a/crates/re_sdk/src/lib.rs +++ b/crates/re_sdk/src/lib.rs @@ -75,8 +75,7 @@ pub mod components { EncodedMesh3D, InstanceKey, KeypointId, Label, LineStrip2D, LineStrip3D, Mat3x3, Mesh3D, MeshFormat, MeshId, Pinhole, Point2D, Point3D, Quaternion, Radius, RawMesh3D, Rect2D, Rigid3, Scalar, ScalarPlotProps, Size3D, Tensor, TensorData, TensorDataMeaning, - TensorDimension, TensorId, TensorTrait, TextEntry, Transform, Vec2D, Vec3D, Vec4D, - ViewCoordinates, + TensorDimension, TensorId, TextEntry, Transform, Vec2D, Vec3D, Vec4D, ViewCoordinates, }; } diff --git a/crates/re_tensor_ops/tests/tensor_tests.rs b/crates/re_tensor_ops/tests/tensor_tests.rs index f13893c3f58e..c04cd4ebad98 100644 --- a/crates/re_tensor_ops/tests/tensor_tests.rs +++ b/crates/re_tensor_ops/tests/tensor_tests.rs @@ -1,5 +1,5 @@ use re_log_types::component_types::{ - Tensor, TensorCastError, TensorData, TensorDataMeaning, TensorDimension, TensorId, TensorTrait, + Tensor, TensorCastError, TensorData, TensorDataMeaning, TensorDimension, TensorId, }; #[test] diff --git a/crates/re_viewer/src/misc/caches/mod.rs b/crates/re_viewer/src/misc/caches/mod.rs index db61a56109e2..e14d6968493e 100644 --- a/crates/re_viewer/src/misc/caches/mod.rs +++ b/crates/re_viewer/src/misc/caches/mod.rs @@ -2,7 +2,7 @@ mod mesh_cache; mod tensor_decode_cache; mod tensor_image_cache; -use re_log_types::component_types::{self, TensorTrait}; +use re_log_types::component_types::{self}; pub use tensor_image_cache::ColoredTensorView; /// Does memoization of different things for the immediate mode UI. diff --git a/crates/re_viewer/src/misc/caches/tensor_decode_cache.rs b/crates/re_viewer/src/misc/caches/tensor_decode_cache.rs index f555ef2d8a53..e942293e5f66 100644 --- a/crates/re_viewer/src/misc/caches/tensor_decode_cache.rs +++ b/crates/re_viewer/src/misc/caches/tensor_decode_cache.rs @@ -1,4 +1,4 @@ -use re_log_types::component_types::{Tensor, TensorDimension, TensorId, TensorTrait}; +use re_log_types::component_types::{Tensor, TensorDimension, TensorId}; #[derive(thiserror::Error, Clone, Debug)] pub enum TensorDecodeError { diff --git a/crates/re_viewer/src/misc/caches/tensor_image_cache.rs b/crates/re_viewer/src/misc/caches/tensor_image_cache.rs index b346dc779d85..30fce52003e1 100644 --- a/crates/re_viewer/src/misc/caches/tensor_image_cache.rs +++ b/crates/re_viewer/src/misc/caches/tensor_image_cache.rs @@ -4,7 +4,7 @@ use egui::{Color32, ColorImage}; use egui_extras::RetainedImage; use image::DynamicImage; use re_log_types::{ - component_types::{self, ClassId, Tensor, TensorData, TensorDataMeaning, TensorTrait}, + component_types::{self, ClassId, Tensor, TensorData, TensorDataMeaning}, MsgId, }; use re_renderer::{ diff --git a/crates/re_viewer/src/ui/data_ui/image.rs b/crates/re_viewer/src/ui/data_ui/image.rs index 63d5a67130b3..59547aeb2380 100644 --- a/crates/re_viewer/src/ui/data_ui/image.rs +++ b/crates/re_viewer/src/ui/data_ui/image.rs @@ -2,7 +2,7 @@ use egui::{ColorImage, Vec2}; use itertools::Itertools as _; use re_log_types::{ - component_types::{ClassId, Tensor, TensorDataMeaning, TensorTrait}, + component_types::{ClassId, Tensor, TensorDataMeaning}, TensorElement, }; diff --git a/crates/re_viewer/src/ui/space_view_heuristics.rs b/crates/re_viewer/src/ui/space_view_heuristics.rs index 54507209e2a6..80e66e303723 100644 --- a/crates/re_viewer/src/ui/space_view_heuristics.rs +++ b/crates/re_viewer/src/ui/space_view_heuristics.rs @@ -5,10 +5,7 @@ use itertools::Itertools; use nohash_hasher::IntSet; use re_arrow_store::{DataStore, LatestAtQuery, Timeline}; use re_data_store::{log_db::EntityDb, query_latest_single, ComponentName, EntityPath}; -use re_log_types::{ - component_types::{Tensor, TensorTrait}, - Component, -}; +use re_log_types::{component_types::Tensor, Component}; use crate::{ misc::{space_info::SpaceInfoCollection, ViewerContext}, diff --git a/crates/re_viewer/src/ui/view_bar_chart/scene.rs b/crates/re_viewer/src/ui/view_bar_chart/scene.rs index c22ca0f2ced9..829d5138f1ac 100644 --- a/crates/re_viewer/src/ui/view_bar_chart/scene.rs +++ b/crates/re_viewer/src/ui/view_bar_chart/scene.rs @@ -3,7 +3,7 @@ use std::collections::BTreeMap; use re_arrow_store::LatestAtQuery; use re_data_store::EntityPath; use re_log::warn_once; -use re_log_types::component_types::{self, InstanceKey, Tensor, TensorTrait as _}; +use re_log_types::component_types::{self, InstanceKey, Tensor}; use re_query::query_entity_with_primary; use crate::{misc::ViewerContext, ui::scene::SceneQuery}; diff --git a/crates/re_viewer/src/ui/view_category.rs b/crates/re_viewer/src/ui/view_category.rs index d37be10dd089..9a23512e42af 100644 --- a/crates/re_viewer/src/ui/view_category.rs +++ b/crates/re_viewer/src/ui/view_category.rs @@ -2,8 +2,7 @@ use re_arrow_store::{LatestAtQuery, TimeInt}; use re_data_store::{EntityPath, LogDb, Timeline}; use re_log_types::{ component_types::{ - Box3D, LineStrip2D, LineStrip3D, Point2D, Point3D, Rect2D, Scalar, Tensor, TensorTrait, - TextEntry, + Box3D, LineStrip2D, LineStrip3D, Point2D, Point3D, Rect2D, Scalar, Tensor, TextEntry, }, Arrow3D, Component, Mesh3D, Transform, }; diff --git a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/images.rs b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/images.rs index e62501b5e0ee..62d7cde09458 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/images.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/images.rs @@ -6,7 +6,7 @@ use itertools::Itertools; use re_data_store::{query_latest_single, EntityPath, EntityProperties, InstancePathHash}; use re_log_types::{ - component_types::{ColorRGBA, InstanceKey, Tensor, TensorData, TensorDataMeaning, TensorTrait}, + component_types::{ColorRGBA, InstanceKey, Tensor, TensorData, TensorDataMeaning}, Component, Transform, }; use re_query::{query_primary_with_history, EntityView, QueryError}; diff --git a/crates/re_viewer/src/ui/view_spatial/ui.rs b/crates/re_viewer/src/ui/view_spatial/ui.rs index 8542ecc881cf..6e5cbb887a67 100644 --- a/crates/re_viewer/src/ui/view_spatial/ui.rs +++ b/crates/re_viewer/src/ui/view_spatial/ui.rs @@ -218,7 +218,6 @@ impl ViewSpatialState { if tensor.meaning == TensorDataMeaning::Depth { if properties.depth_from_world_scale.is_auto() { let auto = tensor.meter.unwrap_or_else(|| { - use re_log_types::component_types::TensorTrait as _; if tensor.dtype().is_integer() { 1000.0 } else { diff --git a/crates/re_viewer/src/ui/view_spatial/ui_2d.rs b/crates/re_viewer/src/ui/view_spatial/ui_2d.rs index df3edcee23a4..bb4c37eb9eb7 100644 --- a/crates/re_viewer/src/ui/view_spatial/ui_2d.rs +++ b/crates/re_viewer/src/ui/view_spatial/ui_2d.rs @@ -4,7 +4,6 @@ use egui::{ }; use macaw::IsoTransform; use re_data_store::EntityPath; -use re_log_types::component_types::TensorTrait; use re_renderer::view_builder::{TargetConfiguration, ViewBuilder}; use super::{ diff --git a/crates/re_viewer/src/ui/view_tensor/scene.rs b/crates/re_viewer/src/ui/view_tensor/scene.rs index e314849ced54..3f5c90d9f741 100644 --- a/crates/re_viewer/src/ui/view_tensor/scene.rs +++ b/crates/re_viewer/src/ui/view_tensor/scene.rs @@ -1,6 +1,6 @@ use re_arrow_store::LatestAtQuery; use re_data_store::{EntityPath, EntityProperties, InstancePath}; -use re_log_types::component_types::{InstanceKey, Tensor, TensorTrait}; +use re_log_types::component_types::{InstanceKey, Tensor}; use re_query::{query_entity_with_primary, EntityView, QueryError}; use crate::{misc::ViewerContext, ui::SceneQuery}; diff --git a/crates/re_viewer/src/ui/view_tensor/ui.rs b/crates/re_viewer/src/ui/view_tensor/ui.rs index dd0ec972fd17..57cfbcb413e8 100644 --- a/crates/re_viewer/src/ui/view_tensor/ui.rs +++ b/crates/re_viewer/src/ui/view_tensor/ui.rs @@ -5,7 +5,7 @@ use egui::{epaint::TextShape, Color32, ColorImage, NumExt as _, Vec2}; use ndarray::{Axis, Ix2}; use re_log_types::{ - component_types::{self, Tensor, TensorTrait}, + component_types::{self, Tensor}, TensorDataType, }; use re_tensor_ops::dimension_mapping::{DimensionMapping, DimensionSelector}; diff --git a/rerun_py/src/python_bridge.rs b/rerun_py/src/python_bridge.rs index 02926a8b51ce..738059999692 100644 --- a/rerun_py/src/python_bridge.rs +++ b/rerun_py/src/python_bridge.rs @@ -24,7 +24,7 @@ pub use rerun::{ EncodedMesh3D, InstanceKey, KeypointId, Label, LineStrip2D, LineStrip3D, Mat3x3, Mesh3D, MeshFormat, MeshId, Pinhole, Point2D, Point3D, Quaternion, Radius, RawMesh3D, Rect2D, Rigid3, Scalar, ScalarPlotProps, Size3D, Tensor, TensorData, TensorDimension, TensorId, - TensorTrait, TextEntry, Transform, Vec2D, Vec3D, Vec4D, ViewCoordinates, + TextEntry, Transform, Vec2D, Vec3D, Vec4D, ViewCoordinates, }, coordinates::{Axis3, Handedness, Sign, SignedAxis3}, }; From 5da248dac8a929fb4b119625254662607655570d Mon Sep 17 00:00:00 2001 From: Emil Ernerfeldt Date: Wed, 12 Apr 2023 09:21:28 +0200 Subject: [PATCH 37/89] End-to-end testing of python logging -> store ingestion (#1817) * Sort the arguments to `rerun` * Pass on `LogMsg::Goodbye` just like any other message * Add `rerun --test-receive` * `just py-build --quiet` is now possible * Add scripts/run_python_e2e_test.py * replace `cargo r -p rerun` with `python3 -m rerun` * lint and explain choice of examples * Add to CI * check returncode --- .github/workflows/python.yml | 3 + Cargo.lock | 1 + crates/re_sdk_comms/src/server.rs | 3 +- crates/rerun/Cargo.toml | 1 + crates/rerun/src/run.rs | 103 ++++++++++++++++++++++-------- justfile | 5 +- scripts/run_python_e2e_test.py | 79 +++++++++++++++++++++++ 7 files changed, 165 insertions(+), 30 deletions(-) create mode 100755 scripts/run_python_e2e_test.py diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml index 80c2f14c1005..70a92fbd78e7 100644 --- a/.github/workflows/python.yml +++ b/.github/workflows/python.yml @@ -216,6 +216,9 @@ jobs: - name: Run tests run: cd rerun_py/tests && pytest + - name: Run e2e test + run: scripts/run_python_e2e_test.py + - name: Unpack the wheel shell: bash run: | diff --git a/Cargo.lock b/Cargo.lock index c7b167fedbe4..81e7f2a90a21 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4313,6 +4313,7 @@ dependencies = [ "re_analytics", "re_build_build_info", "re_build_info", + "re_data_store", "re_format", "re_log", "re_log_encoding", diff --git a/crates/re_sdk_comms/src/server.rs b/crates/re_sdk_comms/src/server.rs index fd1ceca50ea0..75f766d40b7c 100644 --- a/crates/re_sdk_comms/src/server.rs +++ b/crates/re_sdk_comms/src/server.rs @@ -158,7 +158,8 @@ async fn run_client( let msg = crate::decode_log_msg(&packet)?; if matches!(msg, LogMsg::Goodbye(_)) { - re_log::debug!("Client sent goodbye message."); + re_log::debug!("Received goodbye message."); + tx.send(msg)?; return Ok(()); } diff --git a/crates/rerun/Cargo.toml b/crates/rerun/Cargo.toml index 643ca8b25deb..15e31264d47f 100644 --- a/crates/rerun/Cargo.toml +++ b/crates/rerun/Cargo.toml @@ -64,6 +64,7 @@ web_viewer = [ [dependencies] re_build_info.workspace = true +re_data_store.workspace = true re_format.workspace = true re_log_encoding = { workspace = true, features = ["decoder", "encoder"] } re_log_types.workspace = true diff --git a/crates/rerun/src/run.rs b/crates/rerun/src/run.rs index 691b3c359250..4dbbb2e87617 100644 --- a/crates/rerun/src/run.rs +++ b/crates/rerun/src/run.rs @@ -36,58 +36,67 @@ use crate::web_viewer::host_web_viewer; #[derive(Debug, clap::Parser)] #[clap(author, about)] struct Args { - /// Print version and quit + // Note: arguments are sorted lexicographically for nicer `--help` message: + #[command(subcommand)] + commands: Option, + + /// Set a maximum input latency, e.g. "200ms" or "10s". + /// + /// If we go over this, we start dropping packets. + /// + /// The default is no limit, which means Rerun might eat more and more memory, + /// and have longer and longer latency, if you are logging data faster + /// than Rerun can index it. #[clap(long)] - version: bool, + drop_at_latency: Option, - /// Either a path to a `.rrd` file to load, an http url to an `.rrd` file, - /// or a websocket url to a Rerun Server from which to read data + /// An upper limit on how much memory the Rerun Viewer should use. /// - /// If none is given, a server will be hosted which the Rerun SDK can connect to. - url_or_path: Option, + /// When this limit is used, Rerun will purge the oldest data. + /// + /// Example: `16GB` + #[clap(long)] + memory_limit: Option, /// What TCP port do we listen to (for SDK:s to connect to)? #[cfg(feature = "server")] #[clap(long, default_value_t = re_sdk_comms::DEFAULT_SERVER_PORT)] port: u16, - /// Start the viewer in the browser (instead of locally). - /// Requires Rerun to have been compiled with the 'web_viewer' feature. + /// Start with the puffin profiler running. #[clap(long)] - web_viewer: bool, + profile: bool, /// Stream incoming log events to an .rrd file at the given path. #[clap(long)] save: Option, - /// Start with the puffin profiler running. - #[clap(long)] - profile: bool, - /// Exit with a non-zero exit code if any warning or error is logged. Useful for tests. #[clap(long)] strict: bool, - /// An upper limit on how much memory the Rerun Viewer should use. + /// Ingest data and then quit once the goodbye message has been received. /// - /// When this limit is used, Rerun will purge the oldest data. + /// Used for testing together with the `--strict` argument. /// - /// Example: `16GB` + /// Fails if no messages are received, or if no messages are received within a dozen or so seconds. #[clap(long)] - memory_limit: Option, + test_receive: bool, - /// Set a maximum input latency, e.g. "200ms" or "10s". - /// - /// If we go over this, we start dropping packets. + /// Either a path to a `.rrd` file to load, an http url to an `.rrd` file, + /// or a websocket url to a Rerun Server from which to read data /// - /// The default is no limit, which means Rerun might eat more and more memory, - /// and have longer and longer latency, if you are logging data faster - /// than Rerun can index it. + /// If none is given, a server will be hosted which the Rerun SDK can connect to. + url_or_path: Option, + + /// Print version and quit #[clap(long)] - drop_at_latency: Option, + version: bool, - #[command(subcommand)] - commands: Option, + /// Start the viewer in the browser (instead of locally). + /// Requires Rerun to have been compiled with the 'web_viewer' feature. + #[clap(long)] + web_viewer: bool, } #[derive(Debug, Clone, Subcommand)] @@ -329,7 +338,9 @@ async fn run_impl( // Now what do we do with the data? - if let Some(rrd_path) = args.save { + if args.test_receive { + receive_into_log_db(&rx).map(|_db| ()) + } else if let Some(rrd_path) = args.save { Ok(stream_to_rrd(&rx, &rrd_path.into(), &shutdown_bool)?) } else if args.web_viewer { #[cfg(feature = "web_viewer")] @@ -404,6 +415,44 @@ async fn run_impl( } } +fn receive_into_log_db(rx: &Receiver) -> anyhow::Result { + use re_smart_channel::RecvTimeoutError; + + re_log::info!("Receiving messages into a LogDb…"); + + let mut db = re_data_store::LogDb::default(); + + let mut num_messages = 0; + + let timeout = std::time::Duration::from_secs(12); + + loop { + match rx.recv_timeout(timeout) { + Ok(msg) => { + re_log::info_once!("Received first message."); + let is_goodbye = matches!(msg, re_log_types::LogMsg::Goodbye(_)); + db.add(msg)?; + num_messages += 1; + if is_goodbye { + db.entity_db.data_store.sanity_check()?; + anyhow::ensure!(0 < num_messages, "No messages received"); + re_log::info!("Successfully ingested {num_messages} messages."); + return Ok(db); + } + } + Err(RecvTimeoutError::Timeout) => { + anyhow::bail!( + "Didn't receive any messages within {} seconds. Giving up.", + timeout.as_secs() + ); + } + Err(RecvTimeoutError::Disconnected) => { + anyhow::bail!("Channel disconnected without a Goodbye message."); + } + } + } +} + enum ArgumentCategory { /// A remote RRD file, served over http. RrdHttpUrl(String), diff --git a/justfile b/justfile index 8c0378b036c7..fac1282d06ce 100644 --- a/justfile +++ b/justfile @@ -38,14 +38,15 @@ py-run-all: py-build fd main.py | xargs -I _ sh -c "echo _ && python3 _" # Build and install the package into the venv -py-build: +py-build *ARGS: #!/usr/bin/env bash set -euo pipefail unset CONDA_PREFIX && \ source venv/bin/activate && \ maturin develop \ -m rerun_py/Cargo.toml \ - --extras="tests" + --extras="tests" \ + {{ARGS}} # Run autoformatting py-format: diff --git a/scripts/run_python_e2e_test.py b/scripts/run_python_e2e_test.py new file mode 100755 index 000000000000..2a67f656a98e --- /dev/null +++ b/scripts/run_python_e2e_test.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python3 + +""" +Run some of our python exeamples, piping their log stream to the rerun process. + +This is an end-to-end test for testing: +* Our Python API +* LogMsg encoding/decoding +* Arrow encoding/decoding +* TCP connection +* Data store ingestion +""" + +import os +import subprocess +import sys +import time + + +def main() -> None: + build_env = os.environ.copy() + if "RUST_LOG" in build_env: + del build_env["RUST_LOG"] # The user likely only meant it for the actual tests; not the setup + + print("----------------------------------------------------------") + print("Building rerun-sdk…") + start_time = time.time() + subprocess.Popen(["just", "py-build", "--quiet"], env=build_env).wait() + elapsed = time.time() - start_time + print(f"rerun-sdk built in {elapsed:.1f} seconds") + print("") + + examples = [ + # Trivial examples that don't require weird dependencies, or downloading data + "examples/python/api_demo/main.py", + "examples/python/car/main.py", + "examples/python/multithreading/main.py", + "examples/python/plots/main.py", + "examples/python/text_logging/main.py", + ] + for example in examples: + print("----------------------------------------------------------") + print(f"Testing {example}…\n") + start_time = time.time() + run_example(example) + elapsed = time.time() - start_time + print(f"{example} done in {elapsed:.1f} seconds") + print() + + print() + print("All tests passed successfully!") + + +def run_example(example: str) -> None: + port = 9752 + + # sys.executable: the absolute path of the executable binary for the Python interpreter + python_executable = sys.executable + if python_executable is None: + python_executable = "python3" + + rerun_process = subprocess.Popen( + [python_executable, "-m", "rerun", "--port", str(port), "--strict", "--test-receive"] + ) + time.sleep(0.3) # Wait for rerun server to start to remove a logged warning + + python_process = subprocess.Popen([python_executable, example, "--connect", "--addr", f"127.0.0.1:{port}"]) + + print("Waiting for python process to finish…") + returncode = python_process.wait(timeout=30) + assert returncode == 0, f"python process exited with error code {returncode}" + + print("Waiting for rerun process to finish…") + returncode = rerun_process.wait(timeout=30) + assert returncode == 0, f"rerun process exited with error code {returncode}" + + +if __name__ == "__main__": + main() From c472b07edb94e28ef893f306e25b539138f7122b Mon Sep 17 00:00:00 2001 From: Emil Ernerfeldt Date: Wed, 12 Apr 2023 10:16:50 +0200 Subject: [PATCH 38/89] Fix e2e test on CI: Don't try to re-build rerun-sdk (#1821) --- .github/workflows/python.yml | 2 +- scripts/run_python_e2e_test.py | 29 ++++++++++++++++++----------- 2 files changed, 19 insertions(+), 12 deletions(-) diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml index 70a92fbd78e7..2635be7ba45b 100644 --- a/.github/workflows/python.yml +++ b/.github/workflows/python.yml @@ -217,7 +217,7 @@ jobs: run: cd rerun_py/tests && pytest - name: Run e2e test - run: scripts/run_python_e2e_test.py + run: scripts/run_python_e2e_test.py --no-build # rerun-sdk is already built and installed - name: Unpack the wheel shell: bash diff --git a/scripts/run_python_e2e_test.py b/scripts/run_python_e2e_test.py index 2a67f656a98e..78c03ed9c7e4 100755 --- a/scripts/run_python_e2e_test.py +++ b/scripts/run_python_e2e_test.py @@ -11,6 +11,7 @@ * Data store ingestion """ +import argparse import os import subprocess import sys @@ -18,17 +19,23 @@ def main() -> None: - build_env = os.environ.copy() - if "RUST_LOG" in build_env: - del build_env["RUST_LOG"] # The user likely only meant it for the actual tests; not the setup - - print("----------------------------------------------------------") - print("Building rerun-sdk…") - start_time = time.time() - subprocess.Popen(["just", "py-build", "--quiet"], env=build_env).wait() - elapsed = time.time() - start_time - print(f"rerun-sdk built in {elapsed:.1f} seconds") - print("") + parser = argparse.ArgumentParser(description="Logs Objectron data using the Rerun SDK.") + parser.add_argument("--no-build", action="store_true", help="Skip building rerun-sdk") + + if parser.parse_args().no_build: + print("Skipping building rerun-sdk - assuming it is already built and up-to-date!") + else: + build_env = os.environ.copy() + if "RUST_LOG" in build_env: + del build_env["RUST_LOG"] # The user likely only meant it for the actual tests; not the setup + + print("----------------------------------------------------------") + print("Building rerun-sdk…") + start_time = time.time() + subprocess.Popen(["just", "py-build", "--quiet"], env=build_env).wait() + elapsed = time.time() - start_time + print(f"rerun-sdk built in {elapsed:.1f} seconds") + print("") examples = [ # Trivial examples that don't require weird dependencies, or downloading data From f7cdc667f5d34648abccf5ee55acdddcd0acb8bd Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Wed, 12 Apr 2023 10:18:49 +0200 Subject: [PATCH 39/89] Use gpu picking for points, streamline/share picking code some more (#1814) * use gpu picking for picking points * gpu based picking no longer works like a fallback but integrates with other picking sources * fix incorrect cursor rounding for picking * refactor picking context to be a pub struct with exposed state * unify ui picking method for 2d & 3d space views * less indentation for picking method * picking rect size is dynamically chosen * fix accidental z scaling in projection correction for picking & make cropped_projection_from_projection easier to read --- crates/re_renderer/examples/2d.rs | 2 +- crates/re_renderer/examples/depth_cloud.rs | 2 +- crates/re_renderer/examples/framework.rs | 8 +- crates/re_renderer/examples/multiview.rs | 2 +- crates/re_renderer/examples/picking.rs | 2 +- .../src/draw_phases/picking_layer.rs | 10 +- crates/re_renderer/src/point_cloud_builder.rs | 125 ++---- crates/re_renderer/src/rect.rs | 12 +- .../re_renderer/src/renderer/debug_overlay.rs | 2 +- .../re_renderer/src/renderer/point_cloud.rs | 4 +- crates/re_viewer/src/ui/view_spatial/eye.rs | 18 +- .../src/ui/view_spatial/scene/mod.rs | 28 +- .../src/ui/view_spatial/scene/picking.rs | 382 ++++++++++-------- .../src/ui/view_spatial/scene/primitives.rs | 2 +- .../ui/view_spatial/scene/scene_part/mod.rs | 50 ++- .../view_spatial/scene/scene_part/points2d.rs | 29 +- .../view_spatial/scene/scene_part/points3d.rs | 58 ++- crates/re_viewer/src/ui/view_spatial/ui.rs | 203 +++++++++- crates/re_viewer/src/ui/view_spatial/ui_2d.rs | 172 +------- crates/re_viewer/src/ui/view_spatial/ui_3d.rs | 146 +------ 20 files changed, 598 insertions(+), 659 deletions(-) diff --git a/crates/re_renderer/examples/2d.rs b/crates/re_renderer/examples/2d.rs index cb4c25035645..a9ea6e7e5638 100644 --- a/crates/re_renderer/examples/2d.rs +++ b/crates/re_renderer/examples/2d.rs @@ -149,7 +149,7 @@ impl framework::Example for Render2D { // Moving the windows to a high dpi screen makes the second one bigger. // Also, it looks different under perspective projection. // The third point is automatic thickness which is determined by the point renderer implementation. - let mut point_cloud_builder = PointCloudBuilder::<()>::new(re_ctx); + let mut point_cloud_builder = PointCloudBuilder::new(re_ctx); point_cloud_builder .batch("points") .add_points_2d( diff --git a/crates/re_renderer/examples/depth_cloud.rs b/crates/re_renderer/examples/depth_cloud.rs index fb6a18821ace..ce80f17594c5 100644 --- a/crates/re_renderer/examples/depth_cloud.rs +++ b/crates/re_renderer/examples/depth_cloud.rs @@ -98,7 +98,7 @@ impl RenderDepthClouds { }) .multiunzip(); - let mut builder = PointCloudBuilder::<()>::new(re_ctx); + let mut builder = PointCloudBuilder::new(re_ctx); builder .batch("backprojected point cloud") .add_points(num_points as _, points.into_iter()) diff --git a/crates/re_renderer/examples/framework.rs b/crates/re_renderer/examples/framework.rs index 65d16fa3cf30..47e0b42a3212 100644 --- a/crates/re_renderer/examples/framework.rs +++ b/crates/re_renderer/examples/framework.rs @@ -210,10 +210,10 @@ impl Application { Event::WindowEvent { event: WindowEvent::CursorMoved { position, .. }, .. - } => self.example.on_cursor_moved(glam::uvec2( - position.x.round() as u32, - position.y.round() as u32, - )), + } => self + .example + // Don't round the position: The entire range from 0 to excluding 1 should fall into pixel coordinate 0! + .on_cursor_moved(glam::uvec2(position.x as u32, position.y as u32)), Event::WindowEvent { event: WindowEvent::ScaleFactorChanged { diff --git a/crates/re_renderer/examples/multiview.rs b/crates/re_renderer/examples/multiview.rs index 24c76d4c9d5f..35cb61cd2369 100644 --- a/crates/re_renderer/examples/multiview.rs +++ b/crates/re_renderer/examples/multiview.rs @@ -316,7 +316,7 @@ impl Example for Multiview { let skybox = GenericSkyboxDrawData::new(re_ctx); let lines = build_lines(re_ctx, seconds_since_startup); - let mut builder = PointCloudBuilder::<()>::new(re_ctx); + let mut builder = PointCloudBuilder::new(re_ctx); builder .batch("Random Points") .world_from_obj(glam::Mat4::from_rotation_x(seconds_since_startup)) diff --git a/crates/re_renderer/examples/picking.rs b/crates/re_renderer/examples/picking.rs index 5344e044d26d..f304b241ed1a 100644 --- a/crates/re_renderer/examples/picking.rs +++ b/crates/re_renderer/examples/picking.rs @@ -157,7 +157,7 @@ impl framework::Example for Picking { .schedule_picking_rect(re_ctx, picking_rect, READBACK_IDENTIFIER, (), false) .unwrap(); - let mut point_builder = PointCloudBuilder::<()>::new(re_ctx); + let mut point_builder = PointCloudBuilder::new(re_ctx); for (i, point_set) in self.point_sets.iter().enumerate() { point_builder .batch(format!("Random Points {i}")) diff --git a/crates/re_renderer/src/draw_phases/picking_layer.rs b/crates/re_renderer/src/draw_phases/picking_layer.rs index 69b125529b2d..dc5cf38f033f 100644 --- a/crates/re_renderer/src/draw_phases/picking_layer.rs +++ b/crates/re_renderer/src/draw_phases/picking_layer.rs @@ -224,7 +224,7 @@ impl PickingLayerProcessor { DepthReadbackWorkaround::new(ctx, picking_rect.extent, picking_depth_target.handle) }); - let rect_min = picking_rect.top_left_corner.as_vec2(); + let rect_min = picking_rect.left_top.as_vec2(); let rect_max = rect_min + picking_rect.extent.as_vec2(); let screen_resolution = screen_resolution.as_vec2(); // y axis is flipped in NDC, therefore we need to flip the y axis of the rect. @@ -232,10 +232,10 @@ impl PickingLayerProcessor { pixel_coord_to_ndc(glam::vec2(rect_min.x, rect_max.y), screen_resolution); let rect_max_ndc = pixel_coord_to_ndc(glam::vec2(rect_max.x, rect_min.y), screen_resolution); - let rect_center_ndc = (rect_min_ndc + rect_max_ndc) * 0.5; - let cropped_projection_from_projection = - glam::Mat4::from_scale(2.0 / (rect_max_ndc - rect_min_ndc).extend(1.0)) - * glam::Mat4::from_translation(-rect_center_ndc.extend(0.0)); + let scale = 2.0 / (rect_max_ndc - rect_min_ndc); + let translation = -0.5 * (rect_min_ndc + rect_max_ndc); + let cropped_projection_from_projection = glam::Mat4::from_scale(scale.extend(1.0)) + * glam::Mat4::from_translation(translation.extend(0.0)); // Setup frame uniform buffer let previous_projection_from_world: glam::Mat4 = diff --git a/crates/re_renderer/src/point_cloud_builder.rs b/crates/re_renderer/src/point_cloud_builder.rs index 84ef0e0187b9..3596f6f3d72d 100644 --- a/crates/re_renderer/src/point_cloud_builder.rs +++ b/crates/re_renderer/src/point_cloud_builder.rs @@ -9,23 +9,19 @@ use crate::{ }; /// Builder for point clouds, making it easy to create [`crate::renderer::PointCloudDrawData`]. -pub struct PointCloudBuilder { - // Size of `point`/color`/`per_point_user_data` must be equal. +pub struct PointCloudBuilder { + // Size of `point`/color` must be equal. pub vertices: Vec, pub(crate) color_buffer: CpuWriteGpuReadBuffer, pub(crate) picking_instance_ids_buffer: CpuWriteGpuReadBuffer, - pub user_data: Vec, pub(crate) batches: Vec, pub(crate) radius_boost_in_ui_points_for_outlines: f32, } -impl PointCloudBuilder -where - PerPointUserData: Default + Copy, -{ +impl PointCloudBuilder { pub fn new(ctx: &RenderContext) -> Self { const RESERVE_SIZE: usize = 512; @@ -48,7 +44,6 @@ where vertices: Vec::with_capacity(RESERVE_SIZE), color_buffer, picking_instance_ids_buffer, - user_data: Vec::with_capacity(RESERVE_SIZE), batches: Vec::with_capacity(16), radius_boost_in_ui_points_for_outlines: 0.0, } @@ -65,10 +60,7 @@ where /// Start of a new batch. #[inline] - pub fn batch( - &mut self, - label: impl Into, - ) -> PointCloudBatchBuilder<'_, PerPointUserData> { + pub fn batch(&mut self, label: impl Into) -> PointCloudBatchBuilder<'_> { self.batches.push(PointCloudBatchInfo { label: label.into(), world_from_obj: glam::Mat4::IDENTITY, @@ -105,30 +97,6 @@ where }) } - // Iterate over all batches, yielding the batch info and a point vertex iterator zipped with its user data. - pub fn iter_vertices_and_userdata_by_batch( - &self, - ) -> impl Iterator< - Item = ( - &PointCloudBatchInfo, - impl Iterator, - ), - > { - let mut vertex_offset = 0; - self.batches.iter().map(move |batch| { - let out = ( - batch, - self.vertices - .iter() - .zip(self.user_data.iter()) - .skip(vertex_offset) - .take(batch.point_count as usize), - ); - vertex_offset += batch.point_count as usize; - out - }) - } - /// Finalizes the builder and returns a point cloud draw data with all the points added so far. pub fn to_draw_data( self, @@ -138,16 +106,9 @@ where } } -pub struct PointCloudBatchBuilder<'a, PerPointUserData>( - &'a mut PointCloudBuilder, -) -where - PerPointUserData: Default + Copy; +pub struct PointCloudBatchBuilder<'a>(&'a mut PointCloudBuilder); -impl<'a, PerPointUserData> Drop for PointCloudBatchBuilder<'a, PerPointUserData> -where - PerPointUserData: Default + Copy, -{ +impl<'a> Drop for PointCloudBatchBuilder<'a> { fn drop(&mut self) { // Remove batch again if it wasn't actually used. if self.0.batches.last().unwrap().point_count == 0 { @@ -157,10 +118,7 @@ where } } -impl<'a, PerPointUserData> PointCloudBatchBuilder<'a, PerPointUserData> -where - PerPointUserData: Default + Copy, -{ +impl<'a> PointCloudBatchBuilder<'a> { #[inline] fn batch_mut(&mut self) -> &mut PointCloudBatchInfo { self.0 @@ -200,13 +158,6 @@ where self.0.vertices.len() - self.0.picking_instance_ids_buffer.num_written(), )); } - - if self.0.user_data.len() < self.0.vertices.len() { - self.0.user_data.extend( - std::iter::repeat(PerPointUserData::default()) - .take(self.0.vertices.len() - self.0.user_data.len()), - ); - } } #[inline] @@ -222,7 +173,7 @@ where &mut self, size_hint: usize, positions: impl Iterator, - ) -> PointsBuilder<'_, PerPointUserData> { + ) -> PointsBuilder<'_> { // TODO(jleibs): Figure out if we can plumb-through proper support for `Iterator::size_hints()` // or potentially make `FixedSizedIterator` work correctly. This should be possible size the // underlying arrow structures are of known-size, but carries some complexity with the amount of @@ -232,7 +183,6 @@ where self.extend_defaults(); debug_assert_eq!(self.0.vertices.len(), self.0.color_buffer.num_written()); - debug_assert_eq!(self.0.vertices.len(), self.0.user_data.len()); let old_size = self.0.vertices.len(); @@ -245,8 +195,6 @@ where let num_points = self.0.vertices.len() - old_size; self.batch_mut().point_count += num_points as u32; - self.0.user_data.reserve(num_points); - let new_range = old_size..self.0.vertices.len(); let max_points = self.0.vertices.len(); @@ -256,7 +204,6 @@ where max_points, colors: &mut self.0.color_buffer, picking_instance_ids: &mut self.0.picking_instance_ids_buffer, - user_data: &mut self.0.user_data, additional_outline_mask_ids: &mut self .0 .batches @@ -268,24 +215,22 @@ where } #[inline] - pub fn add_point(&mut self, position: glam::Vec3) -> PointBuilder<'_, PerPointUserData> { + pub fn add_point(&mut self, position: glam::Vec3) -> PointBuilder<'_> { self.extend_defaults(); debug_assert_eq!(self.0.vertices.len(), self.0.color_buffer.num_written()); - debug_assert_eq!(self.0.vertices.len(), self.0.user_data.len()); let vertex_index = self.0.vertices.len() as u32; self.0.vertices.push(PointCloudVertex { position, radius: Size::AUTO, }); - self.0.user_data.push(Default::default()); self.batch_mut().point_count += 1; PointBuilder { vertex: self.0.vertices.last_mut().unwrap(), color: &mut self.0.color_buffer, - user_data: self.0.user_data.last_mut().unwrap(), + picking_instance_id: &mut self.0.picking_instance_ids_buffer, vertex_index, additional_outline_mask_ids: &mut self .0 @@ -308,13 +253,13 @@ where &mut self, size_hint: usize, positions: impl Iterator, - ) -> PointsBuilder<'_, PerPointUserData> { + ) -> PointsBuilder<'_> { self.add_points(size_hint, positions.map(|p| p.extend(0.0))) } /// Adds a single 2D point. Uses an autogenerated depth value. #[inline] - pub fn add_point_2d(&mut self, position: glam::Vec2) -> PointBuilder<'_, PerPointUserData> { + pub fn add_point_2d(&mut self, position: glam::Vec2) -> PointBuilder<'_> { self.add_point(position.extend(0.0)) } @@ -331,19 +276,17 @@ where } // TODO(andreas): Should remove single-point builder, practically this never makes sense as we're almost always dealing with arrays of points. -pub struct PointBuilder<'a, PerPointUserData> { +pub struct PointBuilder<'a> { vertex: &'a mut PointCloudVertex, color: &'a mut CpuWriteGpuReadBuffer, - user_data: &'a mut PerPointUserData, + picking_instance_id: &'a mut CpuWriteGpuReadBuffer, vertex_index: u32, + additional_outline_mask_ids: &'a mut Vec<(std::ops::Range, OutlineMaskPreference)>, outline_mask_id: OutlineMaskPreference, } -impl<'a, PerPointUserData> PointBuilder<'a, PerPointUserData> -where - PerPointUserData: Clone, -{ +impl<'a> PointBuilder<'a> { #[inline] pub fn radius(self, radius: Size) -> Self { self.vertex.radius = radius; @@ -357,21 +300,24 @@ where self } - pub fn user_data(self, data: PerPointUserData) -> Self { - *self.user_data = data; - self - } - /// Pushes additional outline mask ids for this point /// /// Prefer the `overall_outline_mask_ids` setting to set the outline mask ids for the entire batch whenever possible! + #[inline] pub fn outline_mask_id(mut self, outline_mask_id: OutlineMaskPreference) -> Self { self.outline_mask_id = outline_mask_id; self } + + /// This mustn't call this more than once. + #[inline] + pub fn picking_instance_id(self, picking_instance_id: PickingLayerInstanceId) -> Self { + self.picking_instance_id.push(picking_instance_id); + self + } } -impl<'a, PerPointUserData> Drop for PointBuilder<'a, PerPointUserData> { +impl<'a> Drop for PointBuilder<'a> { fn drop(&mut self) { if self.outline_mask_id.is_some() { self.additional_outline_mask_ids.push(( @@ -382,21 +328,17 @@ impl<'a, PerPointUserData> Drop for PointBuilder<'a, PerPointUserData> { } } -pub struct PointsBuilder<'a, PerPointUserData> { +pub struct PointsBuilder<'a> { // Vertices is a slice, which radii will update vertices: &'a mut [PointCloudVertex], max_points: usize, colors: &'a mut CpuWriteGpuReadBuffer, picking_instance_ids: &'a mut CpuWriteGpuReadBuffer, - user_data: &'a mut Vec, additional_outline_mask_ids: &'a mut Vec<(std::ops::Range, OutlineMaskPreference)>, start_vertex_index: u32, } -impl<'a, PerPointUserData> PointsBuilder<'a, PerPointUserData> -where - PerPointUserData: Clone, -{ +impl<'a> PointsBuilder<'a> { /// Assigns radii to all points. /// /// This mustn't call this more than once. @@ -440,19 +382,6 @@ where self } - /// Assigns user data for all points in this builder. - /// - /// This mustn't call this more than once. - /// - /// User data is currently not available on the GPU. - #[inline] - pub fn user_data(self, data: impl Iterator) -> Self { - crate::profile_function!(); - self.user_data - .extend(data.take(self.max_points - self.user_data.len())); - self - } - /// Pushes additional outline mask ids for a specific range of points. /// The range is relative to this builder's range, not the entire batch. /// diff --git a/crates/re_renderer/src/rect.rs b/crates/re_renderer/src/rect.rs index 60c48ea82ae4..8b70e81ac357 100644 --- a/crates/re_renderer/src/rect.rs +++ b/crates/re_renderer/src/rect.rs @@ -4,7 +4,7 @@ #[derive(Clone, Copy, Debug)] pub struct IntRect { /// The top left corner of the rectangle. - pub top_left_corner: glam::IVec2, + pub left_top: glam::IVec2, /// The size of the rectangle. pub extent: glam::UVec2, @@ -14,23 +14,23 @@ impl IntRect { #[inline] pub fn from_middle_and_extent(middle: glam::IVec2, size: glam::UVec2) -> Self { Self { - top_left_corner: middle - size.as_ivec2() / 2, + left_top: middle - size.as_ivec2() / 2, extent: size, } } #[inline] - pub fn width(&self) -> u32 { + pub fn width(self) -> u32 { self.extent.x } #[inline] - pub fn height(&self) -> u32 { - self.extent.x + pub fn height(self) -> u32 { + self.extent.y } #[inline] - pub fn wgpu_extent(&self) -> wgpu::Extent3d { + pub fn wgpu_extent(self) -> wgpu::Extent3d { wgpu::Extent3d { width: self.extent.x, height: self.extent.y, diff --git a/crates/re_renderer/src/renderer/debug_overlay.rs b/crates/re_renderer/src/renderer/debug_overlay.rs index 276f8fc413da..6e615cd4a710 100644 --- a/crates/re_renderer/src/renderer/debug_overlay.rs +++ b/crates/re_renderer/src/renderer/debug_overlay.rs @@ -93,7 +93,7 @@ impl DebugOverlayDrawData { "DebugOverlayDrawData".into(), gpu_data::DebugOverlayUniformBuffer { screen_resolution: screen_resolution.as_vec2().into(), - position_in_pixel: overlay_rect.top_left_corner.as_vec2().into(), + position_in_pixel: overlay_rect.left_top.as_vec2().into(), extent_in_pixel: overlay_rect.extent.as_vec2().into(), mode: mode as u32, _padding: 0, diff --git a/crates/re_renderer/src/renderer/point_cloud.rs b/crates/re_renderer/src/renderer/point_cloud.rs index 1e9bfb77a578..15059d82fd04 100644 --- a/crates/re_renderer/src/renderer/point_cloud.rs +++ b/crates/re_renderer/src/renderer/point_cloud.rs @@ -173,9 +173,9 @@ impl PointCloudDrawData { /// Number of vertices and colors has to be equal. /// /// If no batches are passed, all points are assumed to be in a single batch with identity transform. - pub fn new( + pub fn new( ctx: &mut RenderContext, - mut builder: PointCloudBuilder, + mut builder: PointCloudBuilder, ) -> Result { crate::profile_function!(); diff --git a/crates/re_viewer/src/ui/view_spatial/eye.rs b/crates/re_viewer/src/ui/view_spatial/eye.rs index 101da1ca6d7c..287481311101 100644 --- a/crates/re_viewer/src/ui/view_spatial/eye.rs +++ b/crates/re_viewer/src/ui/view_spatial/eye.rs @@ -48,24 +48,24 @@ impl Eye { } } - pub fn ui_from_world(&self, rect: &Rect) -> Mat4 { - let aspect_ratio = rect.width() / rect.height(); + pub fn ui_from_world(&self, space2d_rect: Rect) -> Mat4 { + let aspect_ratio = space2d_rect.width() / space2d_rect.height(); let projection = if let Some(fov_y) = self.fov_y { Mat4::perspective_infinite_rh(fov_y, aspect_ratio, self.near()) } else { Mat4::orthographic_rh( - rect.left(), - rect.right(), - rect.bottom(), - rect.top(), + space2d_rect.left(), + space2d_rect.right(), + space2d_rect.bottom(), + space2d_rect.top(), self.near(), self.far(), ) }; - Mat4::from_translation(vec3(rect.center().x, rect.center().y, 0.0)) - * Mat4::from_scale(0.5 * vec3(rect.width(), -rect.height(), 1.0)) + Mat4::from_translation(vec3(space2d_rect.center().x, space2d_rect.center().y, 0.0)) + * Mat4::from_scale(0.5 * vec3(space2d_rect.width(), -space2d_rect.height(), 1.0)) * projection * self.world_from_view.inverse() } @@ -80,7 +80,7 @@ impl Eye { /// Picking ray for a given pointer in the parent space /// (i.e. prior to camera transform, "world" space) - pub fn picking_ray(&self, screen_rect: &Rect, pointer: glam::Vec2) -> macaw::Ray3 { + pub fn picking_ray(&self, screen_rect: Rect, pointer: glam::Vec2) -> macaw::Ray3 { if let Some(fov_y) = self.fov_y { let (w, h) = (screen_rect.width(), screen_rect.height()); let aspect_ratio = w / h; diff --git a/crates/re_viewer/src/ui/view_spatial/scene/mod.rs b/crates/re_viewer/src/ui/view_spatial/scene/mod.rs index 7f7c9cb90a7c..93dbc5845c2e 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/mod.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/mod.rs @@ -8,7 +8,7 @@ use re_log_types::{ }; use re_renderer::{Color32, OutlineMaskPreference, Size}; -use super::{eye::Eye, SpaceCamera3D, SpatialNavigationMode}; +use super::{SpaceCamera3D, SpatialNavigationMode}; use crate::{ misc::{mesh_loader::LoadedMesh, SpaceViewHighlights, TransformCache, ViewerContext}, ui::{ @@ -21,7 +21,7 @@ mod picking; mod primitives; mod scene_part; -pub use self::picking::{AdditionalPickingInfo, PickingRayHit, PickingResult}; +pub use self::picking::{AdditionalPickingInfo, PickingContext, PickingRayHit, PickingResult}; pub use self::primitives::SceneSpatialPrimitives; use scene_part::ScenePart; @@ -246,28 +246,4 @@ impl SceneSpatial { SpatialNavigationMode::ThreeD } - - #[allow(clippy::too_many_arguments)] - pub fn picking( - &self, - render_ctx: &re_renderer::RenderContext, - gpu_readback_identifier: re_renderer::GpuReadbackIdentifier, - previous_picking_result: &Option, - pointer_in_ui: glam::Vec2, - ui_rect: &egui::Rect, - eye: &Eye, - ui_interaction_radius: f32, - ) -> PickingResult { - picking::picking( - render_ctx, - gpu_readback_identifier, - previous_picking_result, - pointer_in_ui, - ui_rect, - eye, - &self.primitives, - &self.ui, - ui_interaction_radius, - ) - } } diff --git a/crates/re_viewer/src/ui/view_spatial/scene/picking.rs b/crates/re_viewer/src/ui/view_spatial/scene/picking.rs index ea813f460c91..d74d80ef1ee1 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/picking.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/picking.rs @@ -1,3 +1,5 @@ +//! Handles picking in 2D & 3D spaces. + use itertools::Itertools as _; use re_data_store::InstancePathHash; @@ -18,6 +20,9 @@ pub enum AdditionalPickingInfo { /// The hit was a textured rect at the given uv coordinates (ranging from 0 to 1) TexturedRect(glam::Vec2), + /// The result came from GPU based picking. + GpuPickingResult, + /// We hit a egui ui element, meaning that depth information is not usable. GuiOverlay, } @@ -36,9 +41,6 @@ pub struct PickingRayHit { /// Any additional information about the picking hit. pub info: AdditionalPickingInfo, - - /// True if this picking result came from a GPU picking pass. - pub used_gpu_picking: bool, } impl PickingRayHit { @@ -48,9 +50,12 @@ impl PickingRayHit { ray_t: t, info: AdditionalPickingInfo::None, depth_offset: 0, - used_gpu_picking: false, } } + + pub fn space_position(&self, ray_in_world: &macaw::Ray3) -> glam::Vec3 { + ray_in_world.origin + ray_in_world.dir * self.ray_t + } } #[derive(Clone)] @@ -61,33 +66,25 @@ pub struct PickingResult { /// Picking ray hits for transparent objects, sorted from far to near. /// If there is an opaque hit, all of them are in front of the opaque hit. pub transparent_hits: Vec, - - /// The picking ray used. Given in the coordinates of the space the picking is performed in. - picking_ray: macaw::Ray3, } impl PickingResult { - /// The space position of a given hit. - #[allow(dead_code)] - pub fn space_position(&self, hit: &PickingRayHit) -> glam::Vec3 { - self.picking_ray.origin + self.picking_ray.dir * hit.ray_t - } - /// Iterates over all hits from far to close. pub fn iter_hits(&self) -> impl Iterator { self.opaque_hit.iter().chain(self.transparent_hits.iter()) } + + pub fn space_position(&self, ray_in_world: &macaw::Ray3) -> Option { + self.opaque_hit + .as_ref() + .or_else(|| self.transparent_hits.last()) + .map(|hit| hit.space_position(ray_in_world)) + } } const RAY_T_EPSILON: f32 = f32::EPSILON; -struct PickingContext { - pointer_in_ui: glam::Vec2, - ray_in_world: macaw::Ray3, - ui_from_world: glam::Mat4, - max_side_ui_dist_sq: f32, -} - +/// State used to build up picking results. struct PickingState { closest_opaque_side_ui_dist_sq: f32, closest_opaque_pick: PickingRayHit, @@ -138,146 +135,208 @@ impl PickingState { } } -#[allow(clippy::too_many_arguments)] -pub fn picking( - render_ctx: &re_renderer::RenderContext, - gpu_readback_identifier: re_renderer::GpuReadbackIdentifier, - previous_picking_result: &Option, - pointer_in_ui: glam::Vec2, - ui_rect: &egui::Rect, - eye: &Eye, - primitives: &SceneSpatialPrimitives, - ui_data: &SceneSpatialUiData, - ui_interaction_radius: f32, -) -> PickingResult { - crate::profile_function!(); +/// Picking context in which picking is performed. +pub struct PickingContext { + /// Cursor position in the UI coordinate system. + pub pointer_in_ui: glam::Vec2, - let max_side_ui_dist_sq = ui_interaction_radius * ui_interaction_radius; - - let context = PickingContext { - pointer_in_ui, - ui_from_world: eye.ui_from_world(ui_rect), - ray_in_world: eye.picking_ray(ui_rect, pointer_in_ui), - max_side_ui_dist_sq, - }; - let mut state = PickingState { - closest_opaque_side_ui_dist_sq: max_side_ui_dist_sq, - closest_opaque_pick: PickingRayHit { - instance_path_hash: InstancePathHash::NONE, - ray_t: f32::INFINITY, - info: AdditionalPickingInfo::None, - depth_offset: 0, - used_gpu_picking: false, - }, - // Combined, sorted (and partially "hidden") by opaque results later. - transparent_hits: Vec::new(), - }; - - let SceneSpatialPrimitives { - bounding_box: _, - textured_rectangles, - textured_rectangles_ids, - line_strips, - points, - meshes, - depth_clouds: _, // no picking for depth clouds yet - any_outlines: _, - } = primitives; - - picking_points(&context, &mut state, points); - picking_lines(&context, &mut state, line_strips); - picking_meshes(&context, &mut state, meshes); - picking_textured_rects( - &context, - &mut state, - textured_rectangles, - textured_rectangles_ids, - ); - picking_ui_rects(&context, &mut state, ui_data); - - // GPU based picking. - // Only look at newest available result, discard everything else. - let mut gpu_picking_result = None; - while let Some(picking_result) = - PickingLayerProcessor::next_readback_result::<()>(render_ctx, gpu_readback_identifier) - { - gpu_picking_result = Some(picking_result); - } - // TODO(andreas): Use gpu picking as fallback for now to fix meshes. Should combine instead! - if state.closest_opaque_pick.instance_path_hash == InstancePathHash::NONE { - if let Some(gpu_picking_result) = gpu_picking_result { - // TODO(andreas): Pick middle pixel for now. But we soon want to snap to the closest object using a bigger picking rect. - let pos_on_picking_rect = gpu_picking_result.rect.extent / 2; - let picked_id = gpu_picking_result.picked_id(pos_on_picking_rect); - let picked_object = instance_path_hash_from_picking_layer_id(picked_id); - - // It is old data, the object might be gone by now! - if picked_object.is_some() { - // TODO(andreas): Once this is the primary path we should not awkwardly reconstruct the ray_t here. It's entirely correct either! - state.closest_opaque_pick.ray_t = gpu_picking_result - .picked_world_position(pos_on_picking_rect) - .distance(context.ray_in_world.origin); - state.closest_opaque_pick.instance_path_hash = picked_object; - state.closest_opaque_pick.used_gpu_picking = true; - } - } else { - // It is possible that some frames we don't get a picking result and the frame after we get several. - // We need to cache the last picking result and use it until we get a new one or the mouse leaves the screen. - // (Andreas: On my mac this *actually* happens in very simple scenes, I get occasional frames with 0 and then with 2 picking results!) - if let Some(PickingResult { - opaque_hit: Some(previous_opaque_hit), - .. - }) = previous_picking_result - { - if previous_opaque_hit.used_gpu_picking { - state.closest_opaque_pick = previous_opaque_hit.clone(); - } - } + /// Cursor position on the renderer canvas in pixels. + pub pointer_in_pixel: glam::Vec2, + + /// Cursor position in the 2D space coordinate system. + /// + /// For 3D spaces this is equal to the cursor position in pixel coordinate system. + pub pointer_in_space2d: glam::Vec2, + + /// The picking ray used. Given in the coordinates of the space the picking is performed in. + pub ray_in_world: macaw::Ray3, + + /// Transformation from ui coordinates to world coordinates. + ui_from_world: glam::Mat4, + + /// Multiply with this to convert to pixels from points. + pixels_from_points: f32, +} + +impl PickingContext { + /// Radius in which cursor interactions may snap to the nearest object even if the cursor + /// does not hover it directly. + /// + /// Note that this needs to be scaled when zooming is applied by the virtual->visible ui rect transform. + pub const UI_INTERACTION_RADIUS: f32 = 5.0; + + pub fn new( + pointer_in_ui: egui::Pos2, + space2d_from_ui: eframe::emath::RectTransform, + ui_clip_rect: egui::Rect, + pixels_from_points: f32, + eye: &Eye, + ) -> PickingContext { + let pointer_in_space2d = space2d_from_ui.transform_pos(pointer_in_ui); + let pointer_in_space2d = glam::vec2(pointer_in_space2d.x, pointer_in_space2d.y); + let pointer_in_pixel = (pointer_in_ui - ui_clip_rect.left_top()) * pixels_from_points; + + PickingContext { + pointer_in_space2d, + pointer_in_pixel: glam::vec2(pointer_in_pixel.x, pointer_in_pixel.y), + pointer_in_ui: glam::vec2(pointer_in_ui.x, pointer_in_ui.y), + ui_from_world: eye.ui_from_world(*space2d_from_ui.to()), + ray_in_world: eye.picking_ray(*space2d_from_ui.to(), pointer_in_space2d), + pixels_from_points, } } - state.sort_and_remove_hidden_transparent(); + /// Performs picking for a given scene. + pub fn pick( + &self, + render_ctx: &re_renderer::RenderContext, + gpu_readback_identifier: re_renderer::GpuReadbackIdentifier, + previous_picking_result: &Option, + primitives: &SceneSpatialPrimitives, + ui_data: &SceneSpatialUiData, + ) -> PickingResult { + crate::profile_function!(); + + let max_side_ui_dist_sq = Self::UI_INTERACTION_RADIUS * Self::UI_INTERACTION_RADIUS; + + let mut state = PickingState { + closest_opaque_side_ui_dist_sq: max_side_ui_dist_sq, + closest_opaque_pick: PickingRayHit { + instance_path_hash: InstancePathHash::NONE, + ray_t: f32::INFINITY, + info: AdditionalPickingInfo::None, + depth_offset: 0, + }, + // Combined, sorted (and partially "hidden") by opaque results later. + transparent_hits: Vec::new(), + }; + + let SceneSpatialPrimitives { + bounding_box: _, + textured_rectangles, + textured_rectangles_ids, + line_strips, + points: _, + meshes: _, + depth_clouds: _, // no picking for depth clouds yet + any_outlines: _, + } = primitives; + + // GPU based picking. + picking_gpu( + render_ctx, + gpu_readback_identifier, + &mut state, + self, + previous_picking_result, + ); + + picking_lines(self, &mut state, line_strips); + picking_textured_rects( + self, + &mut state, + textured_rectangles, + textured_rectangles_ids, + ); + picking_ui_rects(self, &mut state, ui_data); + + state.sort_and_remove_hidden_transparent(); - PickingResult { - opaque_hit: state - .closest_opaque_pick - .instance_path_hash - .is_some() - .then_some(state.closest_opaque_pick), - transparent_hits: state.transparent_hits, - picking_ray: context.ray_in_world, + PickingResult { + opaque_hit: state + .closest_opaque_pick + .instance_path_hash + .is_some() + .then_some(state.closest_opaque_pick), + transparent_hits: state.transparent_hits, + } } } -fn picking_points( - context: &PickingContext, +fn picking_gpu( + render_ctx: &re_renderer::RenderContext, + gpu_readback_identifier: u64, state: &mut PickingState, - points: &re_renderer::PointCloudBuilder, + context: &PickingContext, + previous_picking_result: &Option, ) { crate::profile_function!(); - for (batch, vertex_iter) in points.iter_vertices_and_userdata_by_batch() { - // For getting the closest point we could transform the mouse ray into the "batch space". - // However, we want to determine the closest point in *screen space*, meaning that we need to project all points. - let ui_from_batch = context.ui_from_world * batch.world_from_obj; + // Only look at newest available result, discard everything else. + let mut gpu_picking_result = None; + while let Some(picking_result) = + PickingLayerProcessor::next_readback_result::<()>(render_ctx, gpu_readback_identifier) + { + gpu_picking_result = Some(picking_result); + } - for (point, instance_hash) in vertex_iter { - if instance_hash.is_none() { - continue; + if let Some(gpu_picking_result) = gpu_picking_result { + // First, figure out where on the rect the cursor is by now. + // (for simplicity, we assume the screen hasn't been resized) + let pointer_on_picking_rect = + context.pointer_in_pixel - gpu_picking_result.rect.left_top.as_vec2(); + // The cursor might have moved outside of the rect. Clamp it back in. + let pointer_on_picking_rect = pointer_on_picking_rect.clamp( + glam::Vec2::ZERO, + (gpu_picking_result.rect.extent - glam::UVec2::ONE).as_vec2(), + ); + + // Find closest non-zero pixel to the cursor. + let mut picked_id = re_renderer::PickingLayerId::default(); + let mut picked_on_picking_rect = glam::Vec2::ZERO; + let mut closest_rect_distance_sq = f32::INFINITY; + + for (i, id) in gpu_picking_result.picking_id_data.iter().enumerate() { + if id.object.0 != 0 { + let current_pos_on_picking_rect = glam::uvec2( + i as u32 % gpu_picking_result.rect.extent.x, + i as u32 / gpu_picking_result.rect.extent.x, + ) + .as_vec2() + + glam::vec2(0.5, 0.5); // Use pixel center for distances. + let distance_sq = + current_pos_on_picking_rect.distance_squared(pointer_on_picking_rect); + if distance_sq < closest_rect_distance_sq { + picked_on_picking_rect = current_pos_on_picking_rect; + closest_rect_distance_sq = distance_sq; + picked_id = *id; + } } + } + if picked_id == re_renderer::PickingLayerId::default() { + // Nothing found. + return; + } - // TODO(emilk): take point radius into account - let pos_in_ui = ui_from_batch.project_point3(point.position); - let dist_sq = pos_in_ui.truncate().distance_squared(context.pointer_in_ui); - if dist_sq <= state.closest_opaque_side_ui_dist_sq { - let t = context - .ray_in_world - .closest_t_to_point(batch.world_from_obj.transform_point3(point.position)); - state.check_hit( - dist_sq, - PickingRayHit::from_instance_and_t(*instance_hash, t), - false, - ); + let ui_distance_sq = picked_on_picking_rect.distance_squared(pointer_on_picking_rect) + / (context.pixels_from_points * context.pixels_from_points); + let picked_world_position = + gpu_picking_result.picked_world_position(picked_on_picking_rect.as_uvec2()); + state.check_hit( + ui_distance_sq, + PickingRayHit { + instance_path_hash: instance_path_hash_from_picking_layer_id(picked_id), + // TODO(andreas): Once this is the primary path we should not awkwardly reconstruct the ray_t here. It's not entirely correct either! + ray_t: picked_world_position.distance(context.ray_in_world.origin), + depth_offset: 0, + info: AdditionalPickingInfo::GpuPickingResult, + }, + false, + ); + } else { + // It is possible that some frames we don't get a picking result and the frame after we get several. + // We need to cache the last picking result and use it until we get a new one or the mouse leaves the screen. + // (Andreas: On my mac this *actually* happens in very simple scenes, I get occasional frames with 0 and then with 2 picking results!) + if let Some(PickingResult { + opaque_hit: Some(previous_opaque_hit), + .. + }) = previous_picking_result + { + if matches!( + previous_opaque_hit.info, + AdditionalPickingInfo::GpuPickingResult + ) { + state.closest_opaque_pick = previous_opaque_hit.clone(); } } } @@ -311,10 +370,10 @@ fn picking_lines( let b = ui_from_batch.project_point3(end.position); let side_ui_dist_sq = line_segment_distance_sq_to_point_2d( [a.truncate(), b.truncate()], - context.pointer_in_ui, + context.pointer_in_space2d, ); - if side_ui_dist_sq < context.max_side_ui_dist_sq { + if side_ui_dist_sq < state.closest_opaque_side_ui_dist_sq { let start_world = batch.world_from_obj.transform_point3(start.position); let end_world = batch.world_from_obj.transform_point3(end.position); let t = ray_closest_t_line_segment(&context.ray_in_world, [start_world, end_world]); @@ -329,31 +388,6 @@ fn picking_lines( } } -fn picking_meshes( - context: &PickingContext, - state: &mut PickingState, - meshes: &[super::MeshSource], -) { - crate::profile_function!(); - - for mesh in meshes { - if !mesh.picking_instance_hash.is_some() { - continue; - } - let ray_in_mesh = (mesh.world_from_mesh.inverse() * context.ray_in_world).normalize(); - let t = crate::math::ray_bbox_intersect(&ray_in_mesh, mesh.mesh.bbox()); - - if t < 0.0 { - let side_ui_dist_sq = 0.0; - state.check_hit( - side_ui_dist_sq, - PickingRayHit::from_instance_and_t(mesh.picking_instance_hash, t), - false, - ); - } - } -} - fn picking_textured_rects( context: &PickingContext, state: &mut PickingState, @@ -392,7 +426,6 @@ fn picking_textured_rects( ray_t: t, info: AdditionalPickingInfo::TexturedRect(glam::vec2(u, v)), depth_offset: rect.depth_offset, - used_gpu_picking: false, }; state.check_hit(0.0, picking_hit, rect.multiplicative_tint.a() < 1.0); } @@ -406,7 +439,7 @@ fn picking_ui_rects( ) { crate::profile_function!(); - let egui_pos = egui::pos2(context.pointer_in_ui.x, context.pointer_in_ui.y); + let egui_pos = egui::pos2(context.pointer_in_space2d.x, context.pointer_in_space2d.y); for (bbox, instance_hash) in &ui_data.pickable_ui_rects { let side_ui_dist_sq = bbox.distance_sq_to_pos(egui_pos); state.check_hit( @@ -416,7 +449,6 @@ fn picking_ui_rects( ray_t: 0.0, info: AdditionalPickingInfo::GuiOverlay, depth_offset: 0, - used_gpu_picking: false, }, false, ); diff --git a/crates/re_viewer/src/ui/view_spatial/scene/primitives.rs b/crates/re_viewer/src/ui/view_spatial/scene/primitives.rs index e8a56e454b27..c495496b3c9a 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/primitives.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/primitives.rs @@ -25,7 +25,7 @@ pub struct SceneSpatialPrimitives { pub textured_rectangles: Vec, pub line_strips: LineStripSeriesBuilder, - pub points: PointCloudBuilder, + pub points: PointCloudBuilder, pub meshes: Vec, pub depth_clouds: DepthClouds, diff --git a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/mod.rs b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/mod.rs index d4811ca9864d..a2cbc5e371e3 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/mod.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/mod.rs @@ -42,21 +42,55 @@ pub trait ScenePart { /// Computes the instance hash that should be used for picking (in turn for selecting/hover) /// -/// Takes into account the currently the object properties, currently highlighted objects, and number of instances. -pub fn instance_path_hash_for_picking( +/// TODO(andreas): Resolve the hash-for-picking when retrieving the picking result instead of doing it ahead of time here to speed up things. +/// (gpu picking would always get the "most fine grained hash" which we could then resolve to groups etc. depending on selection state) +/// Right now this is a bit hard to do since number of instances depends on the Primary. This is expected to change soon. +pub fn instance_path_hash_for_picking( ent_path: &EntityPath, instance_key: re_log_types::component_types::InstanceKey, - entity_view: &re_query::EntityView, + entity_view: &re_query::EntityView, props: &EntityProperties, any_part_selected: bool, ) -> InstancePathHash { if props.interactive { - if entity_view.num_instances() == 1 || !any_part_selected { - InstancePathHash::entity_splat(ent_path) - } else { - InstancePathHash::instance(ent_path, instance_key) - } + InstancePathHash::instance( + ent_path, + instance_key_for_picking(instance_key, entity_view, any_part_selected), + ) } else { InstancePathHash::NONE } } + +/// Computes the instance key that should be used for picking (in turn for selecting/hover) +/// +/// Assumes the entity is interactive. +/// +/// TODO(andreas): Resolve the hash-for-picking when retrieving the picking result instead of doing it ahead of time here to speed up things. +/// (gpu picking would always get the "most fine grained hash" which we could then resolve to groups etc. depending on selection state) +/// Right now this is a bit hard to do since number of instances depends on the Primary. This is expected to change soon. +pub fn instance_key_for_picking( + instance_key: re_log_types::component_types::InstanceKey, + entity_view: &re_query::EntityView, + any_part_selected: bool, +) -> re_log_types::component_types::InstanceKey { + // If no part of the entity is selected or if there is only one instance, selecting + // should select the entire entity, not the specific instance. + // (the splat key means that no particular instance is selected but all at once instead) + if entity_view.num_instances() == 1 || !any_part_selected { + re_log_types::component_types::InstanceKey::SPLAT + } else { + instance_key + } +} + +/// See [`instance_key_for_picking`] +pub fn instance_key_to_picking_id( + instance_key: re_log_types::component_types::InstanceKey, + entity_view: &re_query::EntityView, + any_part_selected: bool, +) -> re_renderer::PickingLayerInstanceId { + re_renderer::PickingLayerInstanceId( + instance_key_for_picking(instance_key, entity_view, any_part_selected).0, + ) +} diff --git a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/points2d.rs b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/points2d.rs index d2e86093e056..3811f0cf20ef 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/points2d.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/points2d.rs @@ -17,7 +17,7 @@ use crate::{ }, }; -use super::{instance_path_hash_for_picking, ScenePart}; +use super::{instance_key_to_picking_id, instance_path_hash_for_picking, ScenePart}; pub struct Points2DPart; @@ -26,7 +26,7 @@ impl Points2DPart { fn process_entity_view( scene: &mut SceneSpatial, _query: &SceneQuery<'_>, - props: &EntityProperties, + properties: &EntityProperties, entity_view: &EntityView, ent_path: &EntityPath, world_from_obj: Mat4, @@ -50,6 +50,11 @@ impl Points2DPart { .world_from_obj(world_from_obj) .outline_mask_ids(entity_highlight.overall); + if properties.interactive { + point_batch = + point_batch.picking_object_id(re_renderer::PickingLayerObjectId(ent_path.hash64())); + } + // TODO(andreas): This should follow the same batch processing as points3d. let visitor = |instance_key: InstanceKey, pos: Point2D, @@ -62,7 +67,7 @@ impl Points2DPart { ent_path, instance_key, entity_view, - props, + properties, entity_highlight.any_selection_highlight, ); @@ -88,11 +93,17 @@ impl Points2DPart { let radius = radius.map_or(Size::AUTO, |r| Size::new_scene(r.0)); let label = annotation_info.label(label.map(|l| l.0).as_ref()); - let point_range_builder = point_batch - .add_point_2d(pos) - .color(color) - .radius(radius) - .user_data(picking_instance_hash); + let mut point_range_builder = point_batch.add_point_2d(pos).color(color).radius(radius); + + // Set picking instance id if interactive. + if properties.interactive { + point_range_builder = + point_range_builder.picking_instance_id(instance_key_to_picking_id( + instance_key, + entity_view, + entity_highlight.any_selection_highlight, + )); + } // Check if this point is individually highlighted. if let Some(instance_mask_ids) = entity_highlight.instances.get(&instance_key) { @@ -119,7 +130,7 @@ impl Points2DPart { } // Generate keypoint connections if any. - scene.load_keypoint_connections(ent_path, keypoints, &annotations, props.interactive); + scene.load_keypoint_connections(ent_path, keypoints, &annotations, properties.interactive); Ok(()) } diff --git a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/points3d.rs b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/points3d.rs index 3303d8da0a43..5b349c52e791 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/points3d.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/points3d.rs @@ -17,7 +17,10 @@ use crate::{ annotations::ResolvedAnnotationInfo, scene::SceneQuery, view_spatial::{ - scene::{scene_part::instance_path_hash_for_picking, Keypoints}, + scene::{ + scene_part::{instance_key_to_picking_id, instance_path_hash_for_picking}, + Keypoints, + }, SceneSpatial, UiLabel, UiLabelTarget, }, Annotations, DefaultColor, @@ -176,30 +179,31 @@ impl Points3DPart { let (annotation_infos, keypoints) = Self::process_annotations(query, entity_view, &annotations)?; - let instance_path_hashes_for_picking = { - crate::profile_scope!("instance_hashes"); - entity_view - .iter_instance_keys()? - .map(|instance_key| { - instance_path_hash_for_picking( - ent_path, - instance_key, - entity_view, - properties, - entity_highlight.any_selection_highlight, - ) - }) - .collect::>() - }; let colors = Self::process_colors(entity_view, ent_path, &annotation_infos)?; let radii = Self::process_radii(ent_path, entity_view)?; - if show_labels && instance_path_hashes_for_picking.len() <= self.max_labels { + if show_labels && entity_view.num_instances() <= self.max_labels { // Max labels is small enough that we can afford iterating on the colors again. let colors = Self::process_colors(entity_view, ent_path, &annotation_infos)?.collect::>(); + let instance_path_hashes_for_picking = { + crate::profile_scope!("instance_hashes"); + entity_view + .iter_instance_keys()? + .map(|instance_key| { + instance_path_hash_for_picking( + ent_path, + instance_key, + entity_view, + properties, + entity_highlight.any_selection_highlight, + ) + }) + .collect::>() + }; + scene.ui.labels.extend(Self::process_labels( entity_view, &instance_path_hashes_for_picking, @@ -216,13 +220,27 @@ impl Points3DPart { .batch("3d points") .world_from_obj(world_from_obj) .outline_mask_ids(entity_highlight.overall); + if properties.interactive { + point_batch = point_batch + .picking_object_id(re_renderer::PickingLayerObjectId(ent_path.hash64())); + } let mut point_range_builder = point_batch .add_points(entity_view.num_instances(), point_positions) .colors(colors) - .radii(radii) - .user_data(instance_path_hashes_for_picking.into_iter()); + .radii(radii); + if properties.interactive { + point_range_builder = point_range_builder.picking_instance_ids( + entity_view.iter_instance_keys()?.map(|instance_key| { + instance_key_to_picking_id( + instance_key, + entity_view, + entity_highlight.any_selection_highlight, + ) + }), + ); + } - // Determine if there's any subranges that need extra highlighting. + // Determine if there's any sub-ranges that need extra highlighting. { crate::profile_scope!("marking additional highlight points"); for (highlighted_key, instance_mask_ids) in &entity_highlight.instances { diff --git a/crates/re_viewer/src/ui/view_spatial/ui.rs b/crates/re_viewer/src/ui/view_spatial/ui.rs index 6e5cbb887a67..91a14e813591 100644 --- a/crates/re_viewer/src/ui/view_spatial/ui.rs +++ b/crates/re_viewer/src/ui/view_spatial/ui.rs @@ -9,17 +9,21 @@ use re_renderer::OutlineConfig; use crate::{ misc::{ - space_info::query_view_coordinates, SelectionHighlight, SpaceViewHighlights, ViewerContext, + space_info::query_view_coordinates, HoveredSpace, SelectionHighlight, SpaceViewHighlights, + ViewerContext, }, ui::{ - data_blueprint::DataBlueprintTree, space_view::ScreenshotMode, view_spatial::UiLabelTarget, + data_blueprint::DataBlueprintTree, + data_ui::{self, DataUi}, + space_view::ScreenshotMode, + view_spatial::UiLabelTarget, SpaceViewId, }, }; use super::{ eye::Eye, - scene::{PickingResult, SceneSpatialUiData}, + scene::{AdditionalPickingInfo, PickingResult, SceneSpatialUiData}, ui_2d::View2DState, ui_3d::View3DState, SceneSpatial, SpaceSpecs, @@ -59,8 +63,6 @@ impl From for WidgetText { } } -pub const PICKING_RECT_SIZE: u32 = 15; - #[derive(Clone, serde::Deserialize, serde::Serialize)] pub struct ViewSpatialState { /// How the scene is navigated. @@ -534,7 +536,7 @@ pub fn create_labels( let mut label_shapes = Vec::with_capacity(scene_ui.labels.len() * 2); - let ui_from_world_3d = eye3d.ui_from_world(ui_from_space2d.to()); + let ui_from_world_3d = eye3d.ui_from_world(*ui_from_space2d.to()); for label in &scene_ui.labels { let (wrap_width, text_anchor_pos) = match label.target { @@ -662,3 +664,192 @@ pub fn screenshot_context_menu( (response, None) } } + +#[allow(clippy::too_many_arguments)] +pub fn picking( + ctx: &mut ViewerContext<'_>, + mut response: egui::Response, + space_from_ui: egui::emath::RectTransform, + ui_clip_rect: egui::Rect, + parent_ui: &mut egui::Ui, + eye: Eye, + view_builder: &mut re_renderer::view_builder::ViewBuilder, + space_view_id: SpaceViewId, + state: &mut ViewSpatialState, + scene: &SceneSpatial, + space: &EntityPath, +) -> egui::Response { + crate::profile_function!(); + + let Some(pointer_pos_ui) = response.hover_pos() else { + state.previous_picking_result = None; + return response; + }; + + ctx.select_hovered_on_click(&response); + + let picking_context = super::scene::PickingContext::new( + pointer_pos_ui, + space_from_ui, + ui_clip_rect, + parent_ui.ctx().pixels_per_point(), + &eye, + ); + + let picking_rect_size = + super::scene::PickingContext::UI_INTERACTION_RADIUS * parent_ui.ctx().pixels_per_point(); + // Make the picking rect bigger than necessary so we can use it to counter act delays. + // (by the time the picking rectangle read back, the cursor may have moved on). + let picking_rect_size = (picking_rect_size * 2.0) + .ceil() + .at_least(8.0) + .at_most(128.0) as u32; + + let _ = view_builder.schedule_picking_rect( + ctx.render_ctx, + re_renderer::IntRect::from_middle_and_extent( + picking_context.pointer_in_pixel.as_ivec2(), + glam::uvec2(picking_rect_size, picking_rect_size), + ), + space_view_id.gpu_readback_id(), + (), + ctx.app_options.show_picking_debug_overlay, + ); + + let picking_result = picking_context.pick( + ctx.render_ctx, + space_view_id.gpu_readback_id(), + &state.previous_picking_result, + &scene.primitives, + &scene.ui, + ); + state.previous_picking_result = Some(picking_result.clone()); + + // Depth at pointer used for projecting rays from a hovered 2D view to corresponding 3D view(s). + // TODO(#1818): Depth at pointer only works for depth images so far. + let mut depth_at_pointer = None; + for hit in picking_result.iter_hits() { + let Some(instance_path) = hit.instance_path_hash.resolve(&ctx.log_db.entity_db) + else { continue; }; + + // Special hover ui for images. + let picked_image_with_uv = if let AdditionalPickingInfo::TexturedRect(uv) = hit.info { + scene + .ui + .images + .iter() + .find(|image| image.instance_path_hash == hit.instance_path_hash) + .map(|image| (image, uv)) + } else { + None + }; + response = if let Some((image, uv)) = picked_image_with_uv { + if let Some(meter) = image.meter { + if let Some(raw_value) = image.tensor.get(&[ + picking_context.pointer_in_space2d.y.round() as _, + picking_context.pointer_in_space2d.x.round() as _, + ]) { + let raw_value = raw_value.as_f64(); + let depth_in_meters = raw_value / meter as f64; + depth_at_pointer = Some(depth_in_meters as f32); + } + } + + response + .on_hover_cursor(egui::CursorIcon::Crosshair) + .on_hover_ui_at_pointer(|ui| { + ui.set_max_width(320.0); + + ui.vertical(|ui| { + ui.label(instance_path.to_string()); + instance_path.data_ui( + ctx, + ui, + crate::ui::UiVerbosity::Small, + &ctx.current_query(), + ); + + let tensor_view = ctx + .cache + .image + .get_colormapped_view(&image.tensor, &image.annotations); + + if let [h, w, ..] = image.tensor.shape() { + ui.separator(); + ui.horizontal(|ui| { + let (w, h) = (w.size as f32, h.size as f32); + let center = [(uv.x * w) as isize, (uv.y * h) as isize]; + if *state.nav_mode.get() == SpatialNavigationMode::TwoD { + let rect = egui::Rect::from_min_size( + egui::Pos2::ZERO, + egui::vec2(w, h), + ); + data_ui::image::show_zoomed_image_region_area_outline( + ui, + &tensor_view, + center, + space_from_ui.inverse().transform_rect(rect), + ); + } + data_ui::image::show_zoomed_image_region( + ui, + &tensor_view, + center, + image.meter, + ); + }); + } + }); + }) + } else { + // Hover ui for everything else + response.on_hover_ui_at_pointer(|ui| { + ctx.instance_path_button(ui, Some(space_view_id), &instance_path); + instance_path.data_ui( + ctx, + ui, + crate::ui::UiVerbosity::Reduced, + &ctx.current_query(), + ); + }) + }; + + ctx.set_hovered(picking_result.iter_hits().filter_map(|pick| { + pick.instance_path_hash + .resolve(&ctx.log_db.entity_db) + .map(|instance_path| { + crate::misc::Item::InstancePath(Some(space_view_id), instance_path) + }) + })); + } + + let hovered_space = match state.nav_mode.get() { + SpatialNavigationMode::TwoD => HoveredSpace::TwoD { + space_2d: space.clone(), + pos: picking_context + .pointer_in_space2d + .extend(depth_at_pointer.unwrap_or(f32::INFINITY)), + }, + SpatialNavigationMode::ThreeD => { + let hovered_point = picking_result.space_position(&picking_context.ray_in_world); + HoveredSpace::ThreeD { + space_3d: space.clone(), + pos: hovered_point, + tracked_space_camera: state.state_3d.tracked_camera.clone(), + point_in_space_cameras: scene + .space_cameras + .iter() + .map(|cam| { + ( + cam.instance_path_hash, + hovered_point.and_then(|pos| cam.project_onto_2d(pos)), + ) + }) + .collect(), + } + } + }; + ctx.selection_state_mut().set_hovered_space(hovered_space); + + response +} diff --git a/crates/re_viewer/src/ui/view_spatial/ui_2d.rs b/crates/re_viewer/src/ui/view_spatial/ui_2d.rs index bb4c37eb9eb7..48b0255e8333 100644 --- a/crates/re_viewer/src/ui/view_spatial/ui_2d.rs +++ b/crates/re_viewer/src/ui/view_spatial/ui_2d.rs @@ -1,21 +1,17 @@ use eframe::emath::RectTransform; -use egui::{ - pos2, vec2, Align2, Color32, NumExt as _, Pos2, Rect, Response, ScrollArea, Shape, Vec2, -}; +use egui::{pos2, vec2, Align2, Color32, NumExt as _, Pos2, Rect, ScrollArea, Shape, Vec2}; use macaw::IsoTransform; use re_data_store::EntityPath; use re_renderer::view_builder::{TargetConfiguration, ViewBuilder}; use super::{ eye::Eye, - scene::AdditionalPickingInfo, - ui::{create_labels, screenshot_context_menu, PICKING_RECT_SIZE}, + ui::{create_labels, picking, screenshot_context_menu}, SpatialNavigationMode, ViewSpatialState, }; use crate::{ - misc::{HoveredSpace, Item, SpaceViewHighlights}, + misc::{HoveredSpace, SpaceViewHighlights}, ui::{ - data_ui::{self, DataUi}, view_spatial::{ ui::outline_config, ui_renderer_bridge::{ @@ -23,7 +19,7 @@ use crate::{ }, SceneSpatial, }, - SpaceViewId, UiVerbosity, + SpaceViewId, }, ViewerContext, }; @@ -341,134 +337,22 @@ fn view_2d_scrollable( SpatialNavigationMode::TwoD, ); - let should_do_hovering = !re_ui::egui_helpers::is_anything_being_dragged(parent_ui.ctx()); - - // Check if we're hovering any hover primitive. - let mut depth_at_pointer = None; - if let (true, Some(pointer_pos_ui)) = (should_do_hovering, response.hover_pos()) { - // Schedule GPU picking. - let pointer_in_pixel = ((pointer_pos_ui - response.rect.left_top()) - * parent_ui.ctx().pixels_per_point()) - .round(); - let _ = view_builder.schedule_picking_rect( - ctx.render_ctx, - re_renderer::IntRect::from_middle_and_extent( - glam::ivec2(pointer_in_pixel.x as i32, pointer_in_pixel.y as i32), - glam::uvec2(PICKING_RECT_SIZE, PICKING_RECT_SIZE), - ), - space_view_id.gpu_readback_id(), - (), - ctx.app_options.show_picking_debug_overlay, - ); - - let pointer_pos_space = space_from_ui.transform_pos(pointer_pos_ui); - let hover_radius = space_from_ui.scale().y * 5.0; // TODO(emilk): from egui? - let picking_result = scene.picking( - ctx.render_ctx, - space_view_id.gpu_readback_id(), - &state.previous_picking_result, - glam::vec2(pointer_pos_space.x, pointer_pos_space.y), - &scene_rect_accum, - &eye, - hover_radius, + if !re_ui::egui_helpers::is_anything_being_dragged(parent_ui.ctx()) { + response = picking( + ctx, + response, + space_from_ui, + painter.clip_rect(), + parent_ui, + eye, + &mut view_builder, + space_view_id, + state, + &scene, + space, ); - state.previous_picking_result = Some(picking_result.clone()); - - for hit in picking_result.iter_hits() { - let Some(instance_path) = hit.instance_path_hash.resolve(&ctx.log_db.entity_db) - else { continue; }; - - // Special hover ui for images. - let picked_image_with_uv = if let AdditionalPickingInfo::TexturedRect(uv) = hit.info { - scene - .ui - .images - .iter() - .find(|image| image.instance_path_hash == hit.instance_path_hash) - .map(|image| (image, uv)) - } else { - None - }; - response = if let Some((image, uv)) = picked_image_with_uv { - // TODO(andreas): This is different in 3d view. - if let Some(meter) = image.meter { - if let Some(raw_value) = image.tensor.get(&[ - pointer_pos_space.y.round() as _, - pointer_pos_space.x.round() as _, - ]) { - let raw_value = raw_value.as_f64(); - let depth_in_meters = raw_value / meter as f64; - depth_at_pointer = Some(depth_in_meters as f32); - } - } - - response - .on_hover_cursor(egui::CursorIcon::Crosshair) - .on_hover_ui_at_pointer(|ui| { - ui.set_max_width(320.0); - - ui.vertical(|ui| { - ui.label(instance_path.to_string()); - instance_path.data_ui( - ctx, - ui, - UiVerbosity::Small, - &ctx.current_query(), - ); - - let tensor_view = ctx - .cache - .image - .get_colormapped_view(&image.tensor, &image.annotations); - - if let [h, w, ..] = image.tensor.shape() { - ui.separator(); - ui.horizontal(|ui| { - // TODO(andreas): 3d skips the show_zoomed_image_region_rect part here. - let (w, h) = (w.size as f32, h.size as f32); - let center = [(uv.x * w) as isize, (uv.y * h) as isize]; - let rect = Rect::from_min_size(Pos2::ZERO, egui::vec2(w, h)); - data_ui::image::show_zoomed_image_region_area_outline( - parent_ui, - &tensor_view, - center, - ui_from_space.transform_rect(rect), - ); - data_ui::image::show_zoomed_image_region( - ui, - &tensor_view, - center, - image.meter, - ); - }); - } - }); - }) - } else { - // Hover ui for everything else - response.on_hover_ui_at_pointer(|ui| { - ctx.instance_path_button(ui, Some(space_view_id), &instance_path); - instance_path.data_ui( - ctx, - ui, - crate::ui::UiVerbosity::Reduced, - &ctx.current_query(), - ); - }) - }; - - ctx.set_hovered(picking_result.iter_hits().filter_map(|pick| { - pick.instance_path_hash - .resolve(&ctx.log_db.entity_db) - .map(|instance_path| Item::InstancePath(Some(space_view_id), instance_path)) - })); - } - } else { - state.previous_picking_result = None; } - ctx.select_hovered_on_click(&response); - // ------------------------------------------------------------------------ // Screenshot context menu. @@ -502,7 +386,6 @@ fn view_2d_scrollable( )); } - project_onto_other_spaces(ctx, space, &response, &space_from_ui, depth_at_pointer); painter.extend(show_projections_from_3d_space( ctx, parent_ui, @@ -552,27 +435,6 @@ fn setup_target_config( // ------------------------------------------------------------------------ -fn project_onto_other_spaces( - ctx: &mut ViewerContext<'_>, - space: &EntityPath, - response: &Response, - space_from_ui: &RectTransform, - z: Option, -) { - if let Some(pointer_in_screen) = response.hover_pos() { - let pointer_in_space = space_from_ui.transform_pos(pointer_in_screen); - ctx.selection_state_mut() - .set_hovered_space(HoveredSpace::TwoD { - space_2d: space.clone(), - pos: glam::vec3( - pointer_in_space.x, - pointer_in_space.y, - z.unwrap_or(f32::INFINITY), - ), - }); - } -} - fn show_projections_from_3d_space( ctx: &ViewerContext<'_>, ui: &egui::Ui, diff --git a/crates/re_viewer/src/ui/view_spatial/ui_3d.rs b/crates/re_viewer/src/ui/view_spatial/ui_3d.rs index 36d7044d1408..e62782460ae3 100644 --- a/crates/re_viewer/src/ui/view_spatial/ui_3d.rs +++ b/crates/re_viewer/src/ui/view_spatial/ui_3d.rs @@ -13,16 +13,14 @@ use re_renderer::{ use crate::{ misc::{HoveredSpace, Item, SpaceViewHighlights}, ui::{ - data_ui::{self, DataUi}, view_spatial::{ - scene::AdditionalPickingInfo, - ui::{create_labels, outline_config, screenshot_context_menu, PICKING_RECT_SIZE}, + ui::{create_labels, outline_config, picking, screenshot_context_menu}, ui_renderer_bridge::{ fill_view_builder, get_viewport, renderer_paint_callback, ScreenBackground, }, SceneSpatial, SpaceCamera3D, SpatialNavigationMode, }, - SpaceViewId, UiVerbosity, + SpaceViewId, }, ViewerContext, }; @@ -41,7 +39,7 @@ pub struct View3DState { pub orbit_eye: Option, /// Currently tracked camera. - tracked_camera: Option, + pub tracked_camera: Option, /// Camera pose just before we took over another camera via [Self::tracked_camera]. camera_before_tracked_camera: Option, @@ -358,134 +356,22 @@ pub fn view_3d( SpatialNavigationMode::ThreeD, ); - let should_do_hovering = !re_ui::egui_helpers::is_anything_being_dragged(ui.ctx()); - - // TODO(andreas): We're very close making the hover reaction of ui2d and ui3d the same. Finish the job! - // Check if we're hovering any hover primitive. - if let (true, Some(pointer_pos)) = (should_do_hovering, response.hover_pos()) { - // Schedule GPU picking. - let pointer_in_pixel = - ((pointer_pos - rect.left_top()) * ui.ctx().pixels_per_point()).round(); - let _ = view_builder.schedule_picking_rect( - ctx.render_ctx, - re_renderer::IntRect::from_middle_and_extent( - glam::ivec2(pointer_in_pixel.x as i32, pointer_in_pixel.y as i32), - glam::uvec2(PICKING_RECT_SIZE, PICKING_RECT_SIZE), - ), - space_view_id.gpu_readback_id(), - (), - ctx.app_options.show_picking_debug_overlay, + if !re_ui::egui_helpers::is_anything_being_dragged(ui.ctx()) { + response = picking( + ctx, + response, + RectTransform::from_to(rect, rect), + rect, + ui, + eye, + &mut view_builder, + space_view_id, + state, + &scene, + space, ); - - let picking_result = scene.picking( - ctx.render_ctx, - space_view_id.gpu_readback_id(), - &state.previous_picking_result, - glam::vec2(pointer_pos.x, pointer_pos.y), - &rect, - &eye, - 5.0, - ); - state.previous_picking_result = Some(picking_result.clone()); - - for hit in picking_result.iter_hits() { - let Some(instance_path) = hit.instance_path_hash.resolve(&ctx.log_db.entity_db) - else { continue; }; - - // Special hover ui for images. - let picked_image_with_uv = if let AdditionalPickingInfo::TexturedRect(uv) = hit.info { - scene - .ui - .images - .iter() - .find(|image| image.instance_path_hash == hit.instance_path_hash) - .map(|image| (image, uv)) - } else { - None - }; - response = if let Some((image, uv)) = picked_image_with_uv { - response - .on_hover_cursor(egui::CursorIcon::Crosshair) - .on_hover_ui_at_pointer(|ui| { - ui.set_max_width(320.0); - - ui.vertical(|ui| { - ui.label(instance_path.to_string()); - instance_path.data_ui( - ctx, - ui, - UiVerbosity::Small, - &ctx.current_query(), - ); - - let tensor_view = ctx - .cache - .image - .get_colormapped_view(&image.tensor, &image.annotations); - - if let [h, w, ..] = &image.tensor.shape[..] { - ui.separator(); - ui.horizontal(|ui| { - let (w, h) = (w.size as f32, h.size as f32); - let center = [(uv.x * w) as isize, (uv.y * h) as isize]; - data_ui::image::show_zoomed_image_region( - ui, - &tensor_view, - center, - image.meter, - ); - }); - } - }); - }) - } else { - // Hover ui for everything else - response.on_hover_ui_at_pointer(|ui| { - ctx.instance_path_button(ui, Some(space_view_id), &instance_path); - instance_path.data_ui( - ctx, - ui, - crate::ui::UiVerbosity::Reduced, - &ctx.current_query(), - ); - }) - }; - } - - ctx.set_hovered(picking_result.iter_hits().filter_map(|pick| { - pick.instance_path_hash - .resolve(&ctx.log_db.entity_db) - .map(|instance_path| Item::InstancePath(Some(space_view_id), instance_path)) - })); - - let hovered_point = picking_result - .opaque_hit - .as_ref() - .or_else(|| picking_result.transparent_hits.last()) - .map(|hit| picking_result.space_position(hit)); - - ctx.selection_state_mut() - .set_hovered_space(HoveredSpace::ThreeD { - space_3d: space.clone(), - pos: hovered_point, - tracked_space_camera: state.state_3d.tracked_camera.clone(), - point_in_space_cameras: scene - .space_cameras - .iter() - .map(|cam| { - ( - cam.instance_path_hash, - hovered_point.and_then(|pos| cam.project_onto_2d(pos)), - ) - }) - .collect(), - }); - } else { - state.previous_picking_result = None; } - ctx.select_hovered_on_click(&response); - // Double click changes camera if response.double_clicked() { state.state_3d.tracked_camera = None; From fef1eda2c42773fb51835fa89670e429ef2cec1b Mon Sep 17 00:00:00 2001 From: Emil Ernerfeldt Date: Wed, 12 Apr 2023 11:42:01 +0200 Subject: [PATCH 40/89] CI: install pip requirements for Python e2e test --- .github/workflows/python.yml | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml index 2635be7ba45b..43f4bc894ba2 100644 --- a/.github/workflows/python.yml +++ b/.github/workflows/python.yml @@ -216,6 +216,13 @@ jobs: - name: Run tests run: cd rerun_py/tests && pytest + - name: Install requriements for e2e test + run: pip install -r examples/python/api_demo/requirements.txt \ + pip install -r examples/python/car/requirements.txt \ + pip install -r examples/python/multithreading/requirements.txt \ + pip install -r examples/python/plots/requirements.txt \ + pip install -r examples/python/text_logging/requirements.txt + - name: Run e2e test run: scripts/run_python_e2e_test.py --no-build # rerun-sdk is already built and installed From 43befc26f57197474671bf5cff7bc8ca144778bf Mon Sep 17 00:00:00 2001 From: Andreas Reich Date: Wed, 12 Apr 2023 12:03:18 +0200 Subject: [PATCH 41/89] Process 2d points always in batches (#1820) --- .../re_log_types/src/component_types/point.rs | 8 + crates/re_renderer/src/point_cloud_builder.rs | 87 ------- .../src/ui/view_spatial/scene/mod.rs | 2 +- .../ui/view_spatial/scene/scene_part/mod.rs | 109 ++++++++- .../view_spatial/scene/scene_part/points2d.rs | 215 ++++++++++-------- .../view_spatial/scene/scene_part/points3d.rs | 121 ++-------- 6 files changed, 257 insertions(+), 285 deletions(-) diff --git a/crates/re_log_types/src/component_types/point.rs b/crates/re_log_types/src/component_types/point.rs index 764aa96b8b76..934a69b076fc 100644 --- a/crates/re_log_types/src/component_types/point.rs +++ b/crates/re_log_types/src/component_types/point.rs @@ -64,6 +64,14 @@ impl From for glam::Vec2 { } } +#[cfg(feature = "glam")] +impl From for glam::Vec3 { + #[inline] + fn from(pt: Point2D) -> Self { + Self::new(pt.x, pt.y, 0.0) + } +} + /// A point in 3D space. /// /// ``` diff --git a/crates/re_renderer/src/point_cloud_builder.rs b/crates/re_renderer/src/point_cloud_builder.rs index 3596f6f3d72d..d32435e66e23 100644 --- a/crates/re_renderer/src/point_cloud_builder.rs +++ b/crates/re_renderer/src/point_cloud_builder.rs @@ -214,34 +214,6 @@ impl<'a> PointCloudBatchBuilder<'a> { } } - #[inline] - pub fn add_point(&mut self, position: glam::Vec3) -> PointBuilder<'_> { - self.extend_defaults(); - - debug_assert_eq!(self.0.vertices.len(), self.0.color_buffer.num_written()); - - let vertex_index = self.0.vertices.len() as u32; - self.0.vertices.push(PointCloudVertex { - position, - radius: Size::AUTO, - }); - self.batch_mut().point_count += 1; - - PointBuilder { - vertex: self.0.vertices.last_mut().unwrap(), - color: &mut self.0.color_buffer, - picking_instance_id: &mut self.0.picking_instance_ids_buffer, - vertex_index, - additional_outline_mask_ids: &mut self - .0 - .batches - .last_mut() - .unwrap() - .additional_outline_mask_ids_vertex_ranges, - outline_mask_id: OutlineMaskPreference::NONE, - } - } - /// Adds several 2D points. Uses an autogenerated depth value, the same for all points passed. /// /// Params: @@ -257,12 +229,6 @@ impl<'a> PointCloudBatchBuilder<'a> { self.add_points(size_hint, positions.map(|p| p.extend(0.0))) } - /// Adds a single 2D point. Uses an autogenerated depth value. - #[inline] - pub fn add_point_2d(&mut self, position: glam::Vec2) -> PointBuilder<'_> { - self.add_point(position.extend(0.0)) - } - /// Set flags for this batch. pub fn flags(mut self, flags: PointCloudBatchFlags) -> Self { self.batch_mut().flags = flags; @@ -275,59 +241,6 @@ impl<'a> PointCloudBatchBuilder<'a> { } } -// TODO(andreas): Should remove single-point builder, practically this never makes sense as we're almost always dealing with arrays of points. -pub struct PointBuilder<'a> { - vertex: &'a mut PointCloudVertex, - color: &'a mut CpuWriteGpuReadBuffer, - picking_instance_id: &'a mut CpuWriteGpuReadBuffer, - vertex_index: u32, - - additional_outline_mask_ids: &'a mut Vec<(std::ops::Range, OutlineMaskPreference)>, - outline_mask_id: OutlineMaskPreference, -} - -impl<'a> PointBuilder<'a> { - #[inline] - pub fn radius(self, radius: Size) -> Self { - self.vertex.radius = radius; - self - } - - /// This mustn't call this more than once. - #[inline] - pub fn color(self, color: Color32) -> Self { - self.color.push(color); - self - } - - /// Pushes additional outline mask ids for this point - /// - /// Prefer the `overall_outline_mask_ids` setting to set the outline mask ids for the entire batch whenever possible! - #[inline] - pub fn outline_mask_id(mut self, outline_mask_id: OutlineMaskPreference) -> Self { - self.outline_mask_id = outline_mask_id; - self - } - - /// This mustn't call this more than once. - #[inline] - pub fn picking_instance_id(self, picking_instance_id: PickingLayerInstanceId) -> Self { - self.picking_instance_id.push(picking_instance_id); - self - } -} - -impl<'a> Drop for PointBuilder<'a> { - fn drop(&mut self) { - if self.outline_mask_id.is_some() { - self.additional_outline_mask_ids.push(( - self.vertex_index..self.vertex_index + 1, - self.outline_mask_id, - )); - } - } -} - pub struct PointsBuilder<'a> { // Vertices is a slice, which radii will update vertices: &'a mut [PointCloudVertex], diff --git a/crates/re_viewer/src/ui/view_spatial/scene/mod.rs b/crates/re_viewer/src/ui/view_spatial/scene/mod.rs index 93dbc5845c2e..a3f9b3fc7e72 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/mod.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/mod.rs @@ -172,7 +172,7 @@ impl SceneSpatial { // -- // Note: Lines2DPart handles both Segments and LinesPaths since they are unified on the logging-side. &scene_part::Lines2DPart, - &scene_part::Points2DPart, + &scene_part::Points2DPart { max_labels: 10 }, // --- &scene_part::CamerasPart, ]; diff --git a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/mod.rs b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/mod.rs index a2cbc5e371e3..a76dbf050bf4 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/mod.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/mod.rs @@ -11,6 +11,9 @@ mod meshes; mod points2d; mod points3d; +use std::sync::Arc; + +use ahash::HashMap; pub(crate) use arrows3d::Arrows3DPart; pub(crate) use boxes2d::Boxes2DPart; pub(crate) use boxes3d::Boxes3DPart; @@ -21,11 +24,15 @@ pub(crate) use lines3d::Lines3DPart; pub(crate) use meshes::MeshPart; pub(crate) use points2d::Points2DPart; pub(crate) use points3d::Points3DPart; +use re_log_types::component_types::{ClassId, ColorRGBA, KeypointId, Radius}; use super::SceneSpatial; use crate::{ misc::{SpaceViewHighlights, TransformCache, ViewerContext}, - ui::scene::SceneQuery, + ui::{ + annotations::ResolvedAnnotationInfo, scene::SceneQuery, view_spatial::scene::Keypoints, + Annotations, DefaultColor, + }, }; use re_data_store::{EntityPath, EntityProperties, InstancePathHash}; @@ -94,3 +101,103 @@ pub fn instance_key_to_picking_id( instance_key_for_picking(instance_key, entity_view, any_part_selected).0, ) } + +/// Process [`ColorRGBA`] components using annotations and default colors. +pub fn process_colors<'a, Primary>( + entity_view: &'a re_query::EntityView, + ent_path: &'a EntityPath, + annotation_infos: &'a [ResolvedAnnotationInfo], +) -> Result + 'a, re_query::QueryError> +where + Primary: re_log_types::SerializableComponent + re_log_types::DeserializableComponent, + for<'b> &'b Primary::ArrayType: IntoIterator, +{ + crate::profile_function!(); + let default_color = DefaultColor::EntityPath(ent_path); + + Ok(itertools::izip!( + annotation_infos.iter(), + entity_view.iter_component::()?, + ) + .map(move |(annotation_info, color)| { + annotation_info.color(color.map(move |c| c.to_array()).as_ref(), default_color) + })) +} + +/// Process [`Radius`] components to [`re_renderer::Size`] using auto size where no radius is specified. +pub fn process_radii<'a, Primary>( + ent_path: &EntityPath, + entity_view: &'a re_query::EntityView, +) -> Result + 'a, re_query::QueryError> +where + Primary: re_log_types::SerializableComponent + re_log_types::DeserializableComponent, + for<'b> &'b Primary::ArrayType: IntoIterator, +{ + crate::profile_function!(); + let ent_path = ent_path.clone(); + Ok(entity_view.iter_component::()?.map(move |radius| { + radius.map_or(re_renderer::Size::AUTO, |r| { + if 0.0 <= r.0 && r.0.is_finite() { + re_renderer::Size::new_scene(r.0) + } else { + if r.0 < 0.0 { + re_log::warn_once!("Found negative radius in entity {ent_path}"); + } else if r.0.is_infinite() { + re_log::warn_once!("Found infinite radius in entity {ent_path}"); + } else { + re_log::warn_once!("Found NaN radius in entity {ent_path}"); + } + re_renderer::Size::AUTO + } + }) + })) +} + +/// Resolves all annotations and keypoints for the given entity view. +fn process_annotations_and_keypoints( + query: &SceneQuery<'_>, + entity_view: &re_query::EntityView, + annotations: &Arc, +) -> Result<(Vec, super::Keypoints), re_query::QueryError> +where + Primary: re_log_types::SerializableComponent + re_log_types::DeserializableComponent, + for<'b> &'b Primary::ArrayType: IntoIterator, + glam::Vec3: std::convert::From, +{ + crate::profile_function!(); + + let mut keypoints: Keypoints = HashMap::default(); + + // No need to process annotations if we don't have keypoints or class-ids + if !entity_view.has_component::() && !entity_view.has_component::() { + let resolved_annotation = annotations.class_description(None).annotation_info(); + return Ok(( + vec![resolved_annotation; entity_view.num_instances()], + keypoints, + )); + } + + let annotation_info = itertools::izip!( + entity_view.iter_primary()?, + entity_view.iter_component::()?, + entity_view.iter_component::()?, + ) + .map(|(position, keypoint_id, class_id)| { + let class_description = annotations.class_description(class_id); + + if let (Some(keypoint_id), Some(class_id), Some(position)) = + (keypoint_id, class_id, position) + { + keypoints + .entry((class_id, query.latest_at.as_i64())) + .or_insert_with(Default::default) + .insert(keypoint_id, position.into()); + class_description.annotation_info_with_keypoint(keypoint_id) + } else { + class_description.annotation_info() + } + }) + .collect(); + + Ok((annotation_info, keypoints)) +} diff --git a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/points2d.rs b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/points2d.rs index 3811f0cf20ef..984c1568caa1 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/points2d.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/points2d.rs @@ -1,135 +1,168 @@ use glam::Mat4; -use re_data_store::{EntityPath, EntityProperties}; +use re_data_store::{EntityPath, EntityProperties, InstancePathHash}; use re_log_types::{ component_types::{ClassId, ColorRGBA, InstanceKey, KeypointId, Label, Point2D, Radius}, Component, }; use re_query::{query_primary_with_history, EntityView, QueryError}; -use re_renderer::Size; use crate::{ misc::{SpaceViewHighlights, SpaceViewOutlineMasks, TransformCache, ViewerContext}, ui::{ + annotations::ResolvedAnnotationInfo, scene::SceneQuery, - view_spatial::{scene::Keypoints, SceneSpatial, UiLabel, UiLabelTarget}, - DefaultColor, + view_spatial::{SceneSpatial, UiLabel, UiLabelTarget}, }, }; -use super::{instance_key_to_picking_id, instance_path_hash_for_picking, ScenePart}; +use super::{ + instance_key_to_picking_id, instance_path_hash_for_picking, process_annotations_and_keypoints, + process_colors, process_radii, ScenePart, +}; -pub struct Points2DPart; +pub struct Points2DPart { + /// If the number of points in the batch is > max_labels, don't render point labels. + pub(crate) max_labels: usize, +} impl Points2DPart { + fn process_labels<'a>( + entity_view: &'a EntityView, + instance_path_hashes: &'a [InstancePathHash], + colors: &'a [egui::Color32], + annotation_infos: &'a [ResolvedAnnotationInfo], + ) -> Result + 'a, QueryError> { + let labels = itertools::izip!( + annotation_infos.iter(), + entity_view.iter_primary()?, + entity_view.iter_component::