Skip to content

Commit

Permalink
Merge branch 'main' into jan/remove-demo-build-script
Browse files Browse the repository at this point in the history
  • Loading branch information
jprochazk authored Dec 6, 2023
2 parents a871e75 + fa25e53 commit 349f458
Show file tree
Hide file tree
Showing 76 changed files with 856 additions and 951 deletions.
12 changes: 5 additions & 7 deletions clippy.toml
Original file line number Diff line number Diff line change
Expand Up @@ -40,17 +40,15 @@ disallowed-macros = [

# https://rust-lang.github.io/rust-clippy/master/index.html#disallowed_methods
disallowed-methods = [
"std::env::temp_dir", # Use the tempdir crate instead
{ path = "egui_extras::TableBody::row", reason = "`row` doesn't scale. Use `rows` instead." },
{ path = "sha1::Digest::new", reason = "SHA1 is cryptographically broken" },
{ path = "std::env::temp_dir", reason = "Use the tempdir crate instead" },
{ path = "std::panic::catch_unwind", reason = "We compile with `panic = 'abort'`" },
{ path = "std::thread::spawn", reason = "Use `std::thread::Builder` and name the thread" },

# There are many things that aren't allowed on wasm,
# but we cannot disable them all here (because of e.g. https://github.com/rust-lang/rust-clippy/issues/10406)
# so we do that in `clipppy_wasm.toml` instead.

"std::thread::spawn", # Use `std::thread::Builder` and name the thread

"sha1::Digest::new", # SHA1 is cryptographically broken

"std::panic::catch_unwind", # We compile with `panic = "abort"`
]

# https://rust-lang.github.io/rust-clippy/master/index.html#disallowed_names
Expand Down
2 changes: 1 addition & 1 deletion crates/re_arrow_store/benches/data_store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -333,7 +333,7 @@ fn build_table(n: usize, packed: bool) -> DataTable {
TableId::ZERO,
(0..NUM_ROWS).map(move |frame_idx| {
DataRow::from_cells2(
RowId::random(),
RowId::new(),
"large_structs",
[build_frame_nr(frame_idx.into())],
n as _,
Expand Down
2 changes: 1 addition & 1 deletion crates/re_arrow_store/benches/gc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ where
TableId::ZERO,
(0..NUM_ROWS_PER_ENTITY_PATH).map(move |i| {
DataRow::from_component_batches(
RowId::random(),
RowId::new(),
timegen(i),
entity_path.clone(),
datagen(i)
Expand Down
8 changes: 4 additions & 4 deletions crates/re_arrow_store/src/store_dump.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ impl DataStore {
rows.sort_by_key(|row| (row.timepoint.clone(), row.row_id));

Ok(re_log_types::DataTable::from_rows(
re_log_types::TableId::random(),
re_log_types::TableId::new(),
rows,
))
}
Expand Down Expand Up @@ -84,7 +84,7 @@ impl DataStore {
} = inner;

DataTable {
table_id: TableId::random(),
table_id: TableId::new(),
col_row_id: col_row_id.clone(),
col_timelines: Default::default(),
col_entity_path: std::iter::repeat_with(|| ent_path.clone())
Expand Down Expand Up @@ -124,7 +124,7 @@ impl DataStore {
} = &*inner.read();

DataTable {
table_id: TableId::random(),
table_id: TableId::new(),
col_row_id: col_row_id.clone(),
col_timelines: [(*timeline, col_time.iter().copied().map(Some).collect())]
.into(),
Expand Down Expand Up @@ -210,7 +210,7 @@ impl DataStore {
}

Some(DataTable {
table_id: TableId::random(),
table_id: TableId::new(),
col_row_id,
col_timelines,
col_entity_path,
Expand Down
10 changes: 5 additions & 5 deletions crates/re_arrow_store/src/store_event.rs
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,7 @@ mod tests {
let timeline_other = Timeline::new_temporal("other");
let timeline_yet_another = Timeline::new_sequence("yet_another");

let row_id1 = RowId::random();
let row_id1 = RowId::new();
let timepoint1 = TimePoint::from_iter([
(timeline_frame, 42.into()), //
(timeline_other, 666.into()), //
Expand Down Expand Up @@ -337,7 +337,7 @@ mod tests {
view,
);

let row_id2 = RowId::random();
let row_id2 = RowId::new();
let timepoint2 = TimePoint::from_iter([
(timeline_frame, 42.into()), //
(timeline_yet_another, 1.into()), //
Expand Down Expand Up @@ -389,7 +389,7 @@ mod tests {
view,
);

let row_id3 = RowId::random();
let row_id3 = RowId::new();
let timepoint3 = TimePoint::timeless();
let row3 = {
let num_instances = 6;
Expand Down Expand Up @@ -486,7 +486,7 @@ mod tests {
let timeline_frame = Timeline::new_sequence("frame");

let row1 = DataRow::from_component_batches(
RowId::random(),
RowId::new(),
TimePoint::from_iter([(timeline_frame, 42.into())]),
"entity_a".into(),
[&InstanceKey::from_iter(0..10) as _],
Expand All @@ -503,7 +503,7 @@ mod tests {
}

let row2 = DataRow::from_component_batches(
RowId::random(),
RowId::new(),
TimePoint::from_iter([(timeline_frame, 42.into())]),
"entity_b".into(),
[&[MyColor::from(0xAABBCCDD)] as _],
Expand Down
4 changes: 2 additions & 2 deletions crates/re_arrow_store/src/store_helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ impl DataStore {
re_tracing::profile_function!();

let mut row = match DataRow::from_cells1(
RowId::random(),
RowId::new(),
entity_path.clone(),
timepoint.clone(),
1,
Expand Down Expand Up @@ -243,7 +243,7 @@ impl DataStore {
let cell = DataCell::from_arrow_empty(component, datatype.clone());

let mut row = match DataRow::from_cells1(
RowId::random(),
RowId::new(),
entity_path.clone(),
timepoint.clone(),
cell.num_instances(),
Expand Down
6 changes: 3 additions & 3 deletions crates/re_arrow_store/src/store_subscriber.rs
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ mod tests {
let timeline_yet_another = Timeline::new_sequence("yet_another");

let row = DataRow::from_component_batches(
RowId::random(),
RowId::new(),
TimePoint::from_iter([
(timeline_frame, 42.into()), //
(timeline_other, 666.into()), //
Expand All @@ -231,7 +231,7 @@ mod tests {
.collect();
let colors = vec![MyColor::from(0xFF0000FF)];
DataRow::from_component_batches(
RowId::random(),
RowId::new(),
TimePoint::from_iter([
(timeline_frame, 42.into()), //
(timeline_yet_another, 1.into()), //
Expand All @@ -247,7 +247,7 @@ mod tests {
let num_instances = 6;
let colors = vec![MyColor::from(0x00DD00FF); num_instances];
DataRow::from_component_batches(
RowId::random(),
RowId::new(),
TimePoint::timeless(),
"entity_b".into(),
[
Expand Down
4 changes: 2 additions & 2 deletions crates/re_arrow_store/src/test_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use crate::{DataStore, DataStoreConfig};
macro_rules! test_row {
($entity:ident @ $frames:tt => $n:expr; [$c0:expr $(,)*]) => {{
::re_log_types::DataRow::from_cells1_sized(
::re_log_types::RowId::random(),
::re_log_types::RowId::new(),
$entity.clone(),
$frames,
$n,
Expand All @@ -17,7 +17,7 @@ macro_rules! test_row {
}};
($entity:ident @ $frames:tt => $n:expr; [$c0:expr, $c1:expr $(,)*]) => {{
::re_log_types::DataRow::from_cells2_sized(
::re_log_types::RowId::random(),
::re_log_types::RowId::new(),
$entity.clone(),
$frames,
$n,
Expand Down
20 changes: 10 additions & 10 deletions crates/re_arrow_store/tests/correctness.rs
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ fn row_id_ordering_semantics() -> anyhow::Result<()> {
Default::default(),
);

let row_id = RowId::random();
let row_id = RowId::new();
let row = DataRow::from_component_batches(
row_id,
timepoint.clone(),
Expand All @@ -51,7 +51,7 @@ fn row_id_ordering_semantics() -> anyhow::Result<()> {
)?;
store.insert_row(&row)?;

let row_id = RowId::random();
let row_id = RowId::new();
let row = DataRow::from_component_batches(
row_id,
timepoint.clone(),
Expand Down Expand Up @@ -83,7 +83,7 @@ fn row_id_ordering_semantics() -> anyhow::Result<()> {
Default::default(),
);

let row_id = RowId::random();
let row_id = RowId::new();

let row = DataRow::from_component_batches(
row_id,
Expand Down Expand Up @@ -114,7 +114,7 @@ fn row_id_ordering_semantics() -> anyhow::Result<()> {
Default::default(),
);

let row_id1 = RowId::random();
let row_id1 = RowId::new();
let row_id2 = row_id1.next();

let row = DataRow::from_component_batches(
Expand Down Expand Up @@ -159,7 +159,7 @@ fn row_id_ordering_semantics() -> anyhow::Result<()> {
Default::default(),
);

let row_id1 = RowId::random();
let row_id1 = RowId::new();
let row_id2 = row_id1.next();

let row = DataRow::from_component_batches(
Expand Down Expand Up @@ -266,7 +266,7 @@ fn write_errors() {
build_log_time(Time::now()),
] => 1; [ build_some_positions2d(1) ]);

row.row_id = re_log_types::RowId::random();
row.row_id = re_log_types::RowId::new();
store.insert_row(&row).unwrap();

row.row_id = row.row_id.next();
Expand Down Expand Up @@ -574,7 +574,7 @@ fn gc_metadata_size() -> anyhow::Result<()> {

for _ in 0..3 {
let row = DataRow::from_component_batches(
RowId::random(),
RowId::new(),
TimePoint::timeless(),
"xxx".into(),
[&[point] as _],
Expand Down Expand Up @@ -650,7 +650,7 @@ fn entity_min_time_correct_impl(store: &mut DataStore) -> anyhow::Result<()> {
let now_minus_one = now - Duration::from_secs(1.0);

let row = DataRow::from_component_batches(
RowId::random(),
RowId::new(),
TimePoint::from_iter([
(timeline_log_time, now.into()),
(timeline_frame_nr, 42.into()),
Expand Down Expand Up @@ -681,7 +681,7 @@ fn entity_min_time_correct_impl(store: &mut DataStore) -> anyhow::Result<()> {

// insert row in the future, these shouldn't be visible
let row = DataRow::from_component_batches(
RowId::random(),
RowId::new(),
TimePoint::from_iter([
(timeline_log_time, now_plus_one.into()),
(timeline_frame_nr, 54.into()),
Expand Down Expand Up @@ -711,7 +711,7 @@ fn entity_min_time_correct_impl(store: &mut DataStore) -> anyhow::Result<()> {

// insert row in the past, these should be visible
let row = DataRow::from_component_batches(
RowId::random(),
RowId::new(),
TimePoint::from_iter([
(timeline_log_time, now_minus_one.into()),
(timeline_frame_nr, 32.into()),
Expand Down
16 changes: 6 additions & 10 deletions crates/re_arrow_store/tests/data_store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -329,7 +329,7 @@ fn latest_at_impl(store: &mut DataStore) {
insert_table(
store,
&DataTable::from_rows(
TableId::random(),
TableId::new(),
[row1.clone(), row2.clone(), row3.clone(), row4.clone()],
),
);
Expand Down Expand Up @@ -1000,10 +1000,8 @@ fn protected_gc_impl(store: &mut DataStore) {
store.insert_row(&row4).unwrap();

// Re-insert row1 and row2 as timeless data as well
let mut table_timeless = DataTable::from_rows(
TableId::random(),
[row1.clone().next(), row2.clone().next()],
);
let mut table_timeless =
DataTable::from_rows(TableId::new(), [row1.clone().next(), row2.clone().next()]);
table_timeless.col_timelines = Default::default();
insert_table_with_retries(store, &table_timeless);

Expand Down Expand Up @@ -1098,10 +1096,8 @@ fn protected_gc_clear_impl(store: &mut DataStore) {
let row4 = test_row!(ent_path @ [build_frame_nr(frame4)] => 0; [points4]);

// Insert the 3 rows as timeless
let mut table_timeless = DataTable::from_rows(
TableId::random(),
[row1.clone(), row2.clone(), row3.clone()],
);
let mut table_timeless =
DataTable::from_rows(TableId::new(), [row1.clone(), row2.clone(), row3.clone()]);
table_timeless.col_timelines = Default::default();
insert_table_with_retries(store, &table_timeless);

Expand Down Expand Up @@ -1145,7 +1141,7 @@ fn protected_gc_clear_impl(store: &mut DataStore) {
assert_eq!(stats.timeless.num_rows, 2);

// Now erase points and GC again
let mut table_timeless = DataTable::from_rows(TableId::random(), [row4]);
let mut table_timeless = DataTable::from_rows(TableId::new(), [row4]);
table_timeless.col_timelines = Default::default();
insert_table_with_retries(store, &table_timeless);

Expand Down
6 changes: 3 additions & 3 deletions crates/re_arrow_store/tests/dump.rs
Original file line number Diff line number Diff line change
Expand Up @@ -270,7 +270,7 @@ fn create_insert_table(ent_path: impl Into<EntityPath>) -> DataTable {
build_frame_nr(frame4),
] => 5; [colors4]);

let mut table = DataTable::from_rows(TableId::random(), [row1, row2, row3, row4]);
let mut table = DataTable::from_rows(TableId::new(), [row1, row2, row3, row4]);
table.compute_all_size_bytes();

table
Expand Down Expand Up @@ -314,7 +314,7 @@ fn data_store_dump_empty_column_impl(store: &mut DataStore) {
let row2 = test_row!(ent_path @ [
build_frame_nr(frame2),
] => 3; [instances2, positions2]);
let mut table = DataTable::from_rows(TableId::random(), [row1, row2]);
let mut table = DataTable::from_rows(TableId::new(), [row1, row2]);
table.compute_all_size_bytes();
insert_table_with_retries(store, &table);
}
Expand All @@ -325,7 +325,7 @@ fn data_store_dump_empty_column_impl(store: &mut DataStore) {
let row3 = test_row!(ent_path @ [
build_frame_nr(frame3),
] => 3; [instances3, positions3]);
let mut table = DataTable::from_rows(TableId::random(), [row3]);
let mut table = DataTable::from_rows(TableId::new(), [row3]);
table.compute_all_size_bytes();
insert_table_with_retries(store, &table);
}
Expand Down
6 changes: 3 additions & 3 deletions crates/re_arrow_store/tests/internals.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ fn pathological_bucket_topology() {
let timepoint = TimePoint::from([build_frame_nr(frame_nr.into())]);
for _ in 0..num {
let row = DataRow::from_cells1_sized(
RowId::random(),
RowId::new(),
ent_path.clone(),
timepoint.clone(),
num_instances,
Expand All @@ -63,7 +63,7 @@ fn pathological_bucket_topology() {
store_forward.insert_row(&row).unwrap();

let row = DataRow::from_cells1_sized(
RowId::random(),
RowId::new(),
ent_path.clone(),
timepoint.clone(),
num_instances,
Expand All @@ -86,7 +86,7 @@ fn pathological_bucket_topology() {
.map(|frame_nr| {
let timepoint = TimePoint::from([build_frame_nr(frame_nr.into())]);
DataRow::from_cells1_sized(
RowId::random(),
RowId::new(),
ent_path.clone(),
timepoint,
num_instances,
Expand Down
7 changes: 3 additions & 4 deletions crates/re_data_source/src/load_file_contents.rs
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ fn load_and_send(

// First, set a store info since this is the first thing the application expects.
tx.send(LogMsg::SetStoreInfo(SetStoreInfo {
row_id: re_log_types::RowId::random(),
row_id: re_log_types::RowId::new(),
info: re_log_types::StoreInfo {
application_id: re_log_types::ApplicationId(file_contents.name.clone()),
store_id: store_id.clone(),
Expand Down Expand Up @@ -89,15 +89,14 @@ fn log_msg_from_file_contents(
let timepoint = re_log_types::TimePoint::default();

let data_row = re_log_types::DataRow::from_cells(
re_log_types::RowId::random(),
re_log_types::RowId::new(),
timepoint,
entity_path,
num_instances,
cells,
)?;

let data_table =
re_log_types::DataTable::from_rows(re_log_types::TableId::random(), [data_row]);
let data_table = re_log_types::DataTable::from_rows(re_log_types::TableId::new(), [data_row]);
let arrow_msg = data_table.to_arrow_msg()?;
Ok(LogMsg::ArrowMsg(store_id, arrow_msg))
}
Expand Down
Loading

0 comments on commit 349f458

Please sign in to comment.