From 76c5d4a2a2c3c468e7bd61f9b16ce296ff4a93ee Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Thu, 25 Jul 2024 17:44:33 +0200 Subject: [PATCH 1/7] introduce new chunkified time series view --- Cargo.lock | 2 +- .../re_space_view_time_series/Cargo.toml | 2 +- .../src/line_visualizer_system.rs | 298 ++++++++------- .../src/point_visualizer_system.rs | 357 +++++++++++------- 4 files changed, 382 insertions(+), 277 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8b2e73ed14a2..faa5aed94c38 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5085,7 +5085,7 @@ dependencies = [ "re_format", "re_log", "re_log_types", - "re_query", + "re_query2", "re_renderer", "re_space_view", "re_tracing", diff --git a/crates/viewer/re_space_view_time_series/Cargo.toml b/crates/viewer/re_space_view_time_series/Cargo.toml index 51a350ceabbc..630bb76d0ed4 100644 --- a/crates/viewer/re_space_view_time_series/Cargo.toml +++ b/crates/viewer/re_space_view_time_series/Cargo.toml @@ -23,7 +23,7 @@ re_chunk_store.workspace = true re_format.workspace = true re_log.workspace = true re_log_types.workspace = true -re_query.workspace = true +re_query2.workspace = true re_renderer.workspace = true re_space_view.workspace = true re_tracing.workspace = true diff --git a/crates/viewer/re_space_view_time_series/src/line_visualizer_system.rs b/crates/viewer/re_space_view_time_series/src/line_visualizer_system.rs index 75bf86bd91e8..889dbf87cb05 100644 --- a/crates/viewer/re_space_view_time_series/src/line_visualizer_system.rs +++ b/crates/viewer/re_space_view_time_series/src/line_visualizer_system.rs @@ -1,8 +1,9 @@ use itertools::Itertools as _; -use re_query::{PromiseResult, QueryError}; -use re_space_view::range_with_blueprint_resolved_data; + +use re_space_view::range_with_blueprint_resolved_data2; use re_types::archetypes; use re_types::components::AggregationPolicy; +use re_types::external::arrow2::datatypes::DataType as ArrowDatatype; use re_types::{ archetypes::SeriesLine, components::{Color, Name, Scalar, StrokeWidth}, @@ -50,10 +51,8 @@ impl VisualizerSystem for SeriesLineSystem { ) -> Result, SpaceViewSystemExecutionError> { re_tracing::profile_function!(); - match self.load_scalars(ctx, query) { - Ok(_) | Err(QueryError::PrimaryNotFound(_)) => Ok(Vec::new()), - Err(err) => Err(err.into()), - } + self.load_scalars(ctx, query); + Ok(Vec::new()) } fn as_any(&self) -> &dyn std::any::Any { @@ -89,11 +88,7 @@ impl TypedComponentFallbackProvider for SeriesLineSystem { re_viewer_context::impl_component_fallback_provider!(SeriesLineSystem => [Color, StrokeWidth, Name]); impl SeriesLineSystem { - fn load_scalars( - &mut self, - ctx: &ViewContext<'_>, - query: &ViewQuery<'_>, - ) -> Result<(), QueryError> { + fn load_scalars(&mut self, ctx: &ViewContext<'_>, query: &ViewQuery<'_>) { re_tracing::profile_function!(); let (plot_bounds, time_per_pixel) = @@ -105,10 +100,10 @@ impl SeriesLineSystem { if parallel_loading { use rayon::prelude::*; re_tracing::profile_wait!("load_series"); - for one_series in data_results + for mut one_series in data_results .collect_vec() .par_iter() - .map(|data_result| -> Result, QueryError> { + .map(|data_result| -> Vec { let mut series = vec![]; self.load_series( ctx, @@ -117,12 +112,12 @@ impl SeriesLineSystem { time_per_pixel, data_result, &mut series, - )?; - Ok(series) + ); + series }) - .collect::>>() + .collect::>() { - self.all_series.append(&mut one_series?); + self.all_series.append(&mut one_series); } } else { let mut series = vec![]; @@ -134,12 +129,10 @@ impl SeriesLineSystem { time_per_pixel, data_result, &mut series, - )?; + ); } self.all_series = series; } - - Ok(()) } #[allow(clippy::too_many_arguments)] @@ -151,11 +144,9 @@ impl SeriesLineSystem { time_per_pixel: f64, data_result: &re_viewer_context::DataResult, all_series: &mut Vec, - ) -> Result<(), QueryError> { + ) { re_tracing::profile_function!(); - let resolver = ctx.recording().resolver(); - let current_query = ctx.current_query(); let query_ctx = ctx.query_context(data_result, ¤t_query); @@ -183,14 +174,14 @@ impl SeriesLineSystem { ctx.viewer_ctx.app_options.experimental_plot_query_clamping, ); { - use re_space_view::RangeResultsExt as _; + use re_space_view::RangeResultsExt2 as _; re_tracing::profile_scope!("primary", &data_result.entity_path.to_string()); let entity_path = &data_result.entity_path; let query = re_chunk_store::RangeQuery::new(view_query.timeline, time_range); - let results = range_with_blueprint_resolved_data( + let results = range_with_blueprint_resolved_data2( ctx, None, &query, @@ -205,144 +196,193 @@ impl SeriesLineSystem { ); // If we have no scalars, we can't do anything. - let Some(all_scalars) = results.get_required_component_dense::(resolver) else { - return Ok(()); + let Some(all_scalar_chunks) = results.get_required_chunks(&Scalar::name()) else { + return; }; - let all_scalars = all_scalars?; - - let all_scalars_entry_range = all_scalars.entry_range(); - - if !matches!( - all_scalars.status(), - (PromiseResult::Ready(()), PromiseResult::Ready(())) - ) { - // TODO(#5607): what should happen if the promise is still pending? - } + let all_scalars_indices = || { + all_scalar_chunks + .iter() + .flat_map(|chunk| { + chunk.iter_component_indices(&query.timeline(), &Scalar::name()) + }) + .map(|index| (index, ())) + }; // Allocate all points. - points = all_scalars - .range_indices(all_scalars_entry_range.clone()) - .map(|(data_time, _)| PlotPoint { - time: data_time.as_i64(), - ..default_point.clone() - }) - .collect_vec(); - - if cfg!(debug_assertions) { - for ps in points.windows(2) { - assert!( - ps[0].time <= ps[1].time, - "scalars should be sorted already when extracted from the cache, got p0 at {} and p1 at {}\n{:?}", - ps[0].time, ps[1].time, - points.iter().map(|p| p.time).collect_vec(), - ); - } + { + re_tracing::profile_scope!("alloc"); + + points = all_scalar_chunks + .iter() + .flat_map(|chunk| { + chunk.iter_component_indices(&query.timeline(), &Scalar::name()) + }) + .map(|(data_time, _)| { + debug_assert_eq!(Scalar::arrow_datatype(), ArrowDatatype::Float64); + + PlotPoint { + time: data_time.as_i64(), + ..default_point.clone() + } + }) + .collect_vec(); } // Fill in values. - for (i, scalars) in all_scalars - .range_data(all_scalars_entry_range.clone()) - .enumerate() { - if scalars.len() > 1 { - re_log::warn_once!( - "found a scalar batch in {entity_path:?} -- those have no effect" - ); - } else if scalars.is_empty() { - points[i].attrs.kind = PlotSeriesKind::Clear; - } else { - points[i].value = scalars.first().map_or(0.0, |s| *s.0); - } + re_tracing::profile_scope!("fill values"); + + debug_assert_eq!(Scalar::arrow_datatype(), ArrowDatatype::Float64); + let mut i = 0; + all_scalar_chunks + .iter() + .flat_map(|chunk| chunk.iter_primitive::(&Scalar::name())) + .for_each(|values| { + if !values.is_empty() { + if values.len() > 1 { + re_log::warn_once!( + "found a scalar batch in {entity_path:?} -- those have no effect" + ); + } + + points[i].value = values[0]; + } else { + points[i].attrs.kind = PlotSeriesKind::Clear; + } + + i += 1; + }); } // Fill in colors. - // TODO(jleibs): Handle Err values. - if let Ok(all_colors) = results.get_or_empty_dense::(resolver) { - if !matches!( - all_colors.status(), - (PromiseResult::Ready(()), PromiseResult::Ready(())) - ) { - // TODO(#5607): what should happen if the promise is still pending? + { + re_tracing::profile_scope!("fill colors"); + + debug_assert_eq!(Color::arrow_datatype(), ArrowDatatype::UInt32); + + fn map_raw_color(raw: &[u32]) -> Option { + raw.first().map(|c| { + let [a, b, g, r] = c.to_le_bytes(); + if a == 255 { + // Common-case optimization + re_renderer::Color32::from_rgb(r, g, b) + } else { + re_renderer::Color32::from_rgba_unmultiplied(r, g, b, a) + } + }) } - let all_scalars_indexed = all_scalars - .range_indices(all_scalars_entry_range.clone()) - .map(|index| (index, ())); - - let all_frames = - re_query::range_zip_1x1(all_scalars_indexed, all_colors.range_indexed()) - .enumerate(); - - for (i, (_index, _scalars, colors)) in all_frames { - if let Some(color) = colors.and_then(|colors| { - colors.first().map(|c| { - let [r, g, b, a] = c.to_array(); - if a == 255 { - // Common-case optimization - re_renderer::Color32::from_rgb(r, g, b) - } else { - re_renderer::Color32::from_rgba_unmultiplied(r, g, b, a) + if let Some(all_color_chunks) = results.get_required_chunks(&Color::name()) { + if all_color_chunks.len() == 1 && all_color_chunks[0].is_static() { + re_tracing::profile_scope!("override fast path"); + + let color = all_color_chunks[0] + .iter_primitive::(&Color::name()) + .next() + .and_then(map_raw_color); + + if let Some(color) = color { + points.iter_mut().for_each(|p| p.attrs.color = color); + } + } else { + re_tracing::profile_scope!("standard path"); + + let all_colors = all_color_chunks.iter().flat_map(|chunk| { + itertools::izip!( + chunk.iter_component_indices(&query.timeline(), &Color::name()), + chunk.iter_primitive::(&Color::name()) + ) + }); + + let all_frames = + re_query2::range_zip_1x1(all_scalars_indices(), all_colors).enumerate(); + + all_frames.for_each(|(i, (_index, _scalars, colors))| { + if let Some(color) = colors.and_then(map_raw_color) { + points[i].attrs.color = color; } - }) - }) { - points[i].attrs.color = color; + }); } } } // Fill in stroke widths - // TODO(jleibs): Handle Err values. - if let Ok(all_stroke_widths) = results.get_or_empty_dense::(resolver) { - if !matches!( - all_stroke_widths.status(), - (PromiseResult::Ready(()), PromiseResult::Ready(())) - ) { - // TODO(#5607): what should happen if the promise is still pending? - } - - let all_scalars_indexed = all_scalars - .range_indices(all_scalars_entry_range.clone()) - .map(|index| (index, ())); + { + re_tracing::profile_scope!("fill stroke widths"); - let all_frames = - re_query::range_zip_1x1(all_scalars_indexed, all_stroke_widths.range_indexed()) - .enumerate(); + debug_assert_eq!(StrokeWidth::arrow_datatype(), ArrowDatatype::Float32); - for (i, (_index, _scalars, stroke_widths)) in all_frames { - if let Some(stroke_width) = - stroke_widths.and_then(|stroke_widths| stroke_widths.first().map(|r| *r.0)) + if let Some(all_stroke_width_chunks) = + results.get_required_chunks(&StrokeWidth::name()) + { + if all_stroke_width_chunks.len() == 1 && all_stroke_width_chunks[0].is_static() { - points[i].attrs.radius_ui = 0.5 * stroke_width; + re_tracing::profile_scope!("override fast path"); + + let stroke_width = all_stroke_width_chunks[0] + .iter_primitive::(&StrokeWidth::name()) + .next() + .and_then(|stroke_widths| stroke_widths.first().copied()); + + if let Some(stroke_width) = stroke_width { + points + .iter_mut() + .for_each(|p| p.attrs.radius_ui = stroke_width * 0.5); + } + } else { + re_tracing::profile_scope!("standard path"); + + let all_stroke_widths = all_stroke_width_chunks.iter().flat_map(|chunk| { + itertools::izip!( + chunk.iter_component_indices( + &query.timeline(), + &StrokeWidth::name() + ), + chunk.iter_primitive::(&StrokeWidth::name()) + ) + }); + + let all_frames = + re_query2::range_zip_1x1(all_scalars_indices(), all_stroke_widths) + .enumerate(); + + all_frames.for_each(|(i, (_index, _scalars, stroke_widths))| { + if let Some(stroke_width) = stroke_widths + .and_then(|stroke_widths| stroke_widths.first().copied()) + { + points[i].attrs.radius_ui = stroke_width * 0.5; + } + }); } } } // Extract the series name let series_name = results - .get_or_empty_dense::(resolver) - .ok() - .and_then(|all_series_name| { - all_series_name - .range_data(all_scalars_entry_range.clone()) - .next() - .and_then(|name| name.first().cloned()) - }) + .get_optional_chunks(&Name::name()) + .iter() + .find(|chunk| !chunk.is_empty()) + .and_then(|chunk| chunk.component_mono::(0)?.ok()) .unwrap_or_else(|| self.fallback_for(&query_ctx)); // Now convert the `PlotPoints` into `Vec` let aggregator = results - .get_or_empty_dense::(resolver) - .ok() - .and_then(|result| { - result - .range_data(all_scalars_entry_range.clone()) - .next() - .and_then(|aggregator| aggregator.first().copied()) - }) + .get_optional_chunks(&AggregationPolicy::name()) + .iter() + .find(|chunk| !chunk.is_empty()) + .and_then(|chunk| chunk.component_mono::(0)?.ok()) // TODO(andreas): Relying on the default==placeholder here instead of going through a fallback provider. // This is fine, because we know there's no `TypedFallbackProvider`, but wrong if one were to be added. .unwrap_or_default(); + + // This is _almost_ sorted already: all the individual chunks are sorted, but we still + // have to deal with overlap chunks. + { + re_tracing::profile_scope!("sort"); + points.sort_by_key(|p| p.time); + } + points_to_series( &data_result.entity_path, time_per_pixel, @@ -354,7 +394,5 @@ impl SeriesLineSystem { all_series, ); } - - Ok(()) } } diff --git a/crates/viewer/re_space_view_time_series/src/point_visualizer_system.rs b/crates/viewer/re_space_view_time_series/src/point_visualizer_system.rs index 0f3031ce0a9d..f6eb1b62de5b 100644 --- a/crates/viewer/re_space_view_time_series/src/point_visualizer_system.rs +++ b/crates/viewer/re_space_view_time_series/src/point_visualizer_system.rs @@ -1,10 +1,10 @@ use itertools::Itertools as _; -use re_query::{PromiseResult, QueryError}; -use re_space_view::range_with_blueprint_resolved_data; +use re_space_view::range_with_blueprint_resolved_data2; use re_types::{ archetypes::{self, SeriesPoint}, components::{Color, MarkerShape, MarkerSize, Name, Scalar}, + external::arrow2::datatypes::DataType as ArrowDatatype, Archetype as _, Loggable as _, }; use re_viewer_context::{ @@ -12,11 +12,10 @@ use re_viewer_context::{ TypedComponentFallbackProvider, ViewContext, ViewQuery, VisualizerQueryInfo, VisualizerSystem, }; -use crate::util::{ - determine_plot_bounds_and_time_per_pixel, determine_time_range, points_to_series, +use crate::{ + util::{determine_plot_bounds_and_time_per_pixel, determine_time_range, points_to_series}, + ScatterAttrs, {PlotPoint, PlotPointAttrs, PlotSeries, PlotSeriesKind}, }; -use crate::ScatterAttrs; -use crate::{PlotPoint, PlotPointAttrs, PlotSeries, PlotSeriesKind}; /// The system for rendering [`SeriesPoint`] archetypes. #[derive(Default, Debug)] @@ -52,10 +51,8 @@ impl VisualizerSystem for SeriesPointSystem { ) -> Result, SpaceViewSystemExecutionError> { re_tracing::profile_function!(); - match self.load_scalars(ctx, query) { - Ok(_) | Err(QueryError::PrimaryNotFound(_)) => Ok(Vec::new()), - Err(err) => Err(err.into()), - } + self.load_scalars(ctx, query); + Ok(Vec::new()) } fn as_any(&self) -> &dyn std::any::Any { @@ -91,15 +88,9 @@ impl TypedComponentFallbackProvider for SeriesPointSystem { re_viewer_context::impl_component_fallback_provider!(SeriesPointSystem => [Color, MarkerSize, Name]); impl SeriesPointSystem { - fn load_scalars( - &mut self, - ctx: &ViewContext<'_>, - view_query: &ViewQuery<'_>, - ) -> Result<(), QueryError> { + fn load_scalars(&mut self, ctx: &ViewContext<'_>, view_query: &ViewQuery<'_>) { re_tracing::profile_function!(); - let resolver = ctx.recording().resolver(); - let (plot_bounds, time_per_pixel) = determine_plot_bounds_and_time_per_pixel(ctx.viewer_ctx, view_query); @@ -144,14 +135,14 @@ impl SeriesPointSystem { ); { - use re_space_view::RangeResultsExt as _; + use re_space_view::RangeResultsExt2 as _; re_tracing::profile_scope!("primary", &data_result.entity_path.to_string()); let entity_path = &data_result.entity_path; let query = re_chunk_store::RangeQuery::new(view_query.timeline, time_range); - let results = range_with_blueprint_resolved_data( + let results = range_with_blueprint_resolved_data2( ctx, None, &query, @@ -166,164 +157,242 @@ impl SeriesPointSystem { ); // If we have no scalars, we can't do anything. - let Some(all_scalars) = results.get_required_component_dense::(resolver) - else { - return Ok(()); + let Some(all_scalar_chunks) = results.get_required_chunks(&Scalar::name()) else { + return; }; - let all_scalars = all_scalars?; - - let all_scalars_entry_range = all_scalars.entry_range(); - - if !matches!( - all_scalars.status(), - (PromiseResult::Ready(()), PromiseResult::Ready(())) - ) { - // TODO(#5607): what should happen if the promise is still pending? - } + let all_scalars_indices = || { + all_scalar_chunks + .iter() + .flat_map(|chunk| { + chunk.iter_component_indices(&query.timeline(), &Scalar::name()) + }) + .map(|index| (index, ())) + }; // Allocate all points. - points = all_scalars - .range_indices(all_scalars_entry_range.clone()) - .map(|(data_time, _)| PlotPoint { - time: data_time.as_i64(), - ..default_point.clone() - }) - .collect_vec(); - - if cfg!(debug_assertions) { - for ps in points.windows(2) { - assert!( - ps[0].time <= ps[1].time, - "scalars should be sorted already when extracted from the cache, got p0 at {} and p1 at {}\n{:?}", - ps[0].time, ps[1].time, - points.iter().map(|p| p.time).collect_vec(), - ); - } + { + re_tracing::profile_scope!("alloc"); + + points = all_scalar_chunks + .iter() + .flat_map(|chunk| { + chunk.iter_component_indices(&query.timeline(), &Scalar::name()) + }) + .map(|(data_time, _)| { + debug_assert_eq!(Scalar::arrow_datatype(), ArrowDatatype::Float64); + + PlotPoint { + time: data_time.as_i64(), + ..default_point.clone() + } + }) + .collect_vec(); } // Fill in values. - for (i, scalars) in all_scalars - .range_data(all_scalars_entry_range.clone()) - .enumerate() { - if scalars.len() > 1 { - re_log::warn_once!( - "found a scalar batch in {entity_path:?} -- those have no effect" - ); - } else if scalars.is_empty() { - points[i].attrs.kind = PlotSeriesKind::Clear; - } else { - points[i].value = scalars.first().map_or(0.0, |s| *s.0); - } - } + re_tracing::profile_scope!("fill values"); + + debug_assert_eq!(Scalar::arrow_datatype(), ArrowDatatype::Float64); + let mut i = 0; + all_scalar_chunks + .iter() + .flat_map(|chunk| chunk.iter_primitive::(&Scalar::name())) + .for_each(|values| { + if !values.is_empty() { + if values.len() > 1 { + re_log::warn_once!( + "found a scalar batch in {entity_path:?} -- those have no effect" + ); + } - // Make it as clear as possible to the optimizer that some parameters - // go completely unused as soon as overrides have been defined. + points[i].value = values[0]; + } else { + points[i].attrs.kind = PlotSeriesKind::Clear; + } + + i += 1; + }); + } // Fill in colors. - // TODO(jleibs): Handle Err values. - if let Ok(all_colors) = results.get_or_empty_dense::(resolver) { - if !matches!( - all_colors.status(), - (PromiseResult::Ready(()), PromiseResult::Ready(())) - ) { - // TODO(#5607): what should happen if the promise is still pending? + { + re_tracing::profile_scope!("fill colors"); + + debug_assert_eq!(Color::arrow_datatype(), ArrowDatatype::UInt32); + + fn map_raw_color(raw: &[u32]) -> Option { + raw.first().map(|c| { + let [a, b, g, r] = c.to_le_bytes(); + if a == 255 { + // Common-case optimization + re_renderer::Color32::from_rgb(r, g, b) + } else { + re_renderer::Color32::from_rgba_unmultiplied(r, g, b, a) + } + }) } - let all_scalars_indexed = all_scalars - .range_indices(all_scalars_entry_range.clone()) - .map(|index| (index, ())); - - let all_frames = - re_query::range_zip_1x1(all_scalars_indexed, all_colors.range_indexed()) - .enumerate(); - - for (i, (_index, _scalars, colors)) in all_frames { - if let Some(color) = colors.and_then(|colors| { - colors.first().map(|c| { - let [r, g, b, a] = c.to_array(); - if a == 255 { - // Common-case optimization - re_renderer::Color32::from_rgb(r, g, b) - } else { - re_renderer::Color32::from_rgba_unmultiplied(r, g, b, a) + if let Some(all_color_chunks) = results.get_required_chunks(&Color::name()) { + if all_color_chunks.len() == 1 && all_color_chunks[0].is_static() { + re_tracing::profile_scope!("override fast path"); + + let color = all_color_chunks[0] + .iter_primitive::(&Color::name()) + .next() + .and_then(map_raw_color); + + if let Some(color) = color { + points.iter_mut().for_each(|p| p.attrs.color = color); + } + } else { + re_tracing::profile_scope!("standard path"); + + let all_colors = all_color_chunks.iter().flat_map(|chunk| { + itertools::izip!( + chunk.iter_component_indices(&query.timeline(), &Color::name()), + chunk.iter_primitive::(&Color::name()) + ) + }); + + let all_frames = + re_query2::range_zip_1x1(all_scalars_indices(), all_colors) + .enumerate(); + + all_frames.for_each(|(i, (_index, _scalars, colors))| { + if let Some(color) = colors.and_then(map_raw_color) { + points[i].attrs.color = color; } - }) - }) { - points[i].attrs.color = color; + }); } } } // Fill in marker sizes - // TODO(jleibs): Handle Err values. - if let Ok(all_marker_sizes) = results.get_or_empty_dense::(resolver) { - if !matches!( - all_marker_sizes.status(), - (PromiseResult::Ready(()), PromiseResult::Ready(())) - ) { - // TODO(#5607): what should happen if the promise is still pending? - } - - let all_scalars_indexed = all_scalars - .range_indices(all_scalars_entry_range.clone()) - .map(|index| (index, ())); + { + re_tracing::profile_scope!("fill marker sizes"); - let all_frames = re_query::range_zip_1x1( - all_scalars_indexed, - all_marker_sizes.range_indexed(), - ) - .enumerate(); + debug_assert_eq!(MarkerSize::arrow_datatype(), ArrowDatatype::Float32); - for (i, (_index, _scalars, marker_sizes)) in all_frames { - if let Some(marker_size) = - marker_sizes.and_then(|marker_sizes| marker_sizes.first().copied()) + if let Some(all_marker_size_chunks) = + results.get_required_chunks(&MarkerSize::name()) + { + if all_marker_size_chunks.len() == 1 + && all_marker_size_chunks[0].is_static() { - points[i].attrs.radius_ui = *marker_size.0; + re_tracing::profile_scope!("override fast path"); + + let marker_size = all_marker_size_chunks[0] + .iter_primitive::(&MarkerSize::name()) + .next() + .and_then(|marker_sizes| marker_sizes.first().copied()); + + if let Some(marker_size) = marker_size { + points + .iter_mut() + .for_each(|p| p.attrs.radius_ui = marker_size * 0.5); + } + } else { + re_tracing::profile_scope!("standard path"); + + let all_marker_sizes = + all_marker_size_chunks.iter().flat_map(|chunk| { + itertools::izip!( + chunk.iter_component_indices( + &query.timeline(), + &MarkerSize::name() + ), + chunk.iter_primitive::(&MarkerSize::name()) + ) + }); + + let all_frames = + re_query2::range_zip_1x1(all_scalars_indices(), all_marker_sizes) + .enumerate(); + + all_frames.for_each(|(i, (_index, _scalars, marker_sizes))| { + if let Some(marker_size) = marker_sizes + .and_then(|marker_sizes| marker_sizes.first().copied()) + { + points[i].attrs.radius_ui = marker_size * 0.5; + } + }); } } } - // Fill in marker sizes - // TODO(jleibs): Handle Err values. - if let Ok(all_marker_shapes) = results.get_or_empty_dense::(resolver) { - if !matches!( - all_marker_shapes.status(), - (PromiseResult::Ready(()), PromiseResult::Ready(())) - ) { - // TODO(#5607): what should happen if the promise is still pending? - } - - let all_scalars_indexed = all_scalars - .range_indices(all_scalars_entry_range.clone()) - .map(|index| (index, ())); - - let all_frames = re_query::range_zip_1x1( - all_scalars_indexed, - all_marker_shapes.range_indexed(), - ) - .enumerate(); + // Fill in marker shapes + { + re_tracing::profile_scope!("fill marker shapes"); - for (i, (_index, _scalars, marker_shapes)) in all_frames { - if let Some(marker) = - marker_shapes.and_then(|marker_shapes| marker_shapes.first().copied()) + if let Some(all_marker_shapes_chunks) = + results.get_required_chunks(&MarkerShape::name()) + { + if all_marker_shapes_chunks.len() == 1 + && all_marker_shapes_chunks[0].is_static() { - points[i].attrs.kind = PlotSeriesKind::Scatter(ScatterAttrs { marker }); + re_tracing::profile_scope!("override fast path"); + + let marker_shape = all_marker_shapes_chunks[0] + .iter_component::() + .into_iter() + .next() + .and_then(|marker_shapes| marker_shapes.first().copied()); + + if let Some(marker_shape) = marker_shape { + for p in &mut points { + p.attrs.kind = PlotSeriesKind::Scatter(ScatterAttrs { + marker: marker_shape, + }); + } + } + } else { + re_tracing::profile_scope!("standard path"); + + let mut all_marker_shapes_iters = all_marker_shapes_chunks + .iter() + .map(|chunk| chunk.iter_component::()) + .collect_vec(); + let all_marker_shapes_indexed = { + let all_marker_shapes = all_marker_shapes_iters + .iter_mut() + .flat_map(|it| it.into_iter()); + let all_marker_shapes_indices = + all_marker_shapes_chunks.iter().flat_map(|chunk| { + chunk.iter_component_indices( + &query.timeline(), + &MarkerShape::name(), + ) + }); + itertools::izip!(all_marker_shapes_indices, all_marker_shapes) + }; + + let all_frames = re_query2::range_zip_1x1( + all_scalars_indices(), + all_marker_shapes_indexed, + ) + .enumerate(); + + all_frames.for_each(|(i, (_index, _scalars, marker_shapes))| { + if let Some(marker_shape) = marker_shapes + .and_then(|marker_shapes| marker_shapes.first().copied()) + { + points[i].attrs.kind = PlotSeriesKind::Scatter(ScatterAttrs { + marker: marker_shape, + }); + } + }); } } } // Extract the series name let series_name = results - .get_or_empty_dense::(resolver) - .ok() - .and_then(|all_series_name| { - all_series_name - .range_data(all_scalars_entry_range.clone()) - .next() - .and_then(|name| name.first().cloned()) - }) + .get_optional_chunks(&Name::name()) + .iter() + .find(|chunk| !chunk.is_empty()) + .and_then(|chunk| chunk.component_mono::(0)?.ok()) .unwrap_or_else(|| self.fallback_for(&query_ctx)); // Now convert the `PlotPoints` into `Vec` @@ -340,7 +409,5 @@ impl SeriesPointSystem { ); } } - - Ok(()) } } From 6bce702721c739db826b3b84e36ef1a58862394f Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Thu, 25 Jul 2024 17:49:13 +0200 Subject: [PATCH 2/7] plot_dashboard_stress: --blueprint --- tests/python/plot_dashboard_stress/main.py | 81 ++++++++++++++++++++-- 1 file changed, 74 insertions(+), 7 deletions(-) diff --git a/tests/python/plot_dashboard_stress/main.py b/tests/python/plot_dashboard_stress/main.py index 36c55fe01359..cb87596158d8 100755 --- a/tests/python/plot_dashboard_stress/main.py +++ b/tests/python/plot_dashboard_stress/main.py @@ -29,10 +29,35 @@ rr.script_add_args(parser) parser.add_argument("--num-plots", type=int, default=1, help="How many different plots?") -parser.add_argument("--num-series-per-plot", type=int, default=1, help="How many series in each single plot?") -parser.add_argument("--num-points-per-series", type=int, default=100000, help="How many points in each single series?") -parser.add_argument("--freq", type=float, default=1000, help="Frequency of logging (applies to all series)") -parser.add_argument("--temporal-batch-size", type=int, default=None, help="Number of rows to include in each log call") +parser.add_argument( + "--num-series-per-plot", + type=int, + default=1, + help="How many series in each single plot?", +) +parser.add_argument( + "--num-points-per-series", + type=int, + default=100000, + help="How many points in each single series?", +) +parser.add_argument( + "--freq", + type=float, + default=1000, + help="Frequency of logging (applies to all series)", +) +parser.add_argument( + "--temporal-batch-size", + type=int, + default=None, + help="Number of rows to include in each log call", +) +parser.add_argument( + "--blueprint", + action="store_true", + help="Setup a blueprint for a 5s window", +) order = [ "forwards", @@ -40,7 +65,11 @@ "random", ] parser.add_argument( - "--order", type=str, default=order[0], help="What order to log the data in (applies to all series)", choices=order + "--order", + type=str, + default=order[0], + help="What order to log the data in (applies to all series)", + choices=order, ) series_type = [ @@ -68,6 +97,37 @@ def main() -> None: plot_paths = [f"plot_{i}" for i in range(0, args.num_plots)] series_paths = [f"series_{i}" for i in range(0, args.num_series_per_plot)] + if args.blueprint: + from rerun.blueprint import ( + Blueprint, + BlueprintPanel, + Grid, + SelectionPanel, + TimeRangeBoundary, + TimeSeriesView, + VisibleTimeRange, + ) + + print("logging blueprint!") + rr.send_blueprint( + Blueprint( + Grid(*[ + TimeSeriesView( + name=p, + origin=f"/{p}", + time_ranges=VisibleTimeRange( + "sim_time", + start=TimeRangeBoundary.cursor_relative(offset=rr.TimeInt(seconds=-2.5)), + end=TimeRangeBoundary.cursor_relative(offset=rr.TimeInt(seconds=2.5)), + ), + ) + for p in plot_paths + ]), + BlueprintPanel(state="collapsed"), + SelectionPanel(state="collapsed"), + ) + ) + time_per_sim_step = 1.0 / args.freq stop_time = args.num_points_per_series * time_per_sim_step @@ -110,7 +170,10 @@ def main() -> None: ticks = enumerate(sim_times) else: offsets = range(0, len(sim_times), args.temporal_batch_size) - ticks = zip(offsets, (sim_times[offset : offset + args.temporal_batch_size] for offset in offsets)) + ticks = zip( + offsets, + (sim_times[offset : offset + args.temporal_batch_size] for offset in offsets), + ) time_batch = None @@ -129,7 +192,11 @@ def main() -> None: else: value_index = slice(index, index + args.temporal_batch_size) value_batch = rr.components.ScalarBatch(values[value_index, plot_idx, series_idx]) - rr.log_temporal_batch(f"{plot_path}/{series_path}", times=[time_batch], components=[value_batch]) + rr.log_temporal_batch( + f"{plot_path}/{series_path}", + times=[time_batch], + components=[value_batch], + ) # Progress report From 83074d41c91eb43e8c083eb7400aabbdf7232cdb Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Mon, 29 Jul 2024 14:41:28 +0200 Subject: [PATCH 3/7] review --- examples/python/blueprint/blueprint.py | 26 +++++++++++++-------- tests/python/plot_dashboard_stress/main.py | 27 ++++++++-------------- 2 files changed, 25 insertions(+), 28 deletions(-) diff --git a/examples/python/blueprint/blueprint.py b/examples/python/blueprint/blueprint.py index 62310fd2fcb0..d13f4f3f80dc 100755 --- a/examples/python/blueprint/blueprint.py +++ b/examples/python/blueprint/blueprint.py @@ -7,7 +7,7 @@ import numpy as np import rerun as rr # pip install rerun-sdk -from rerun.blueprint import Blueprint, BlueprintPanel, Grid, SelectionPanel, Spatial2DView, TimePanel +import rerun.blueprint as rrb def main() -> None: @@ -26,10 +26,10 @@ def main() -> None: # # If auto_space_views is True, the blueprint will automatically add one of the heuristic # space views, which will include the image and both rectangles. - blueprint = Blueprint( - Grid( - Spatial2DView(name="Rect 0", origin="/", contents=["image", "rect/0"]), - Spatial2DView( + blueprint = rrb.Blueprint( + rrb.Grid( + rrb.Spatial2DView(name="Rect 0", origin="/", contents=["image", "rect/0"]), + rrb.Spatial2DView( name="Rect 1", origin="/", contents=["/**"], @@ -37,9 +37,9 @@ def main() -> None: overrides={"rect/0": [rr.components.Radius(1)]}, # Override the radius of rect/0 to be 1 ), ), - BlueprintPanel(state="collapsed"), - SelectionPanel(state="collapsed"), - TimePanel(state="collapsed"), + rrb.BlueprintPanel(state="collapsed"), + rrb.SelectionPanel(state="collapsed"), + rrb.TimePanel(state="collapsed"), auto_space_views=args.auto_space_views, ) @@ -49,8 +49,14 @@ def main() -> None: for i in range(8): img[(i * 16) + 4 : (i * 16) + 12, :] = (0, 0, 200) rr.log("image", rr.Image(img)) - rr.log("rect/0", rr.Boxes2D(mins=[16, 16], sizes=[64, 64], labels="Rect0", colors=(255, 0, 0))) - rr.log("rect/1", rr.Boxes2D(mins=[48, 48], sizes=[64, 64], labels="Rect1", colors=(0, 255, 0))) + rr.log( + "rect/0", + rr.Boxes2D(mins=[16, 16], sizes=[64, 64], labels="Rect0", colors=(255, 0, 0)), + ) + rr.log( + "rect/1", + rr.Boxes2D(mins=[48, 48], sizes=[64, 64], labels="Rect1", colors=(0, 255, 0)), + ) if __name__ == "__main__": diff --git a/tests/python/plot_dashboard_stress/main.py b/tests/python/plot_dashboard_stress/main.py index cb87596158d8..f34c2636e1a5 100755 --- a/tests/python/plot_dashboard_stress/main.py +++ b/tests/python/plot_dashboard_stress/main.py @@ -24,6 +24,7 @@ import numpy as np import rerun as rr # pip install rerun-sdk +import rerun.blueprint as rrb parser = argparse.ArgumentParser(description="Plot dashboard stress test") rr.script_add_args(parser) @@ -98,33 +99,23 @@ def main() -> None: series_paths = [f"series_{i}" for i in range(0, args.num_series_per_plot)] if args.blueprint: - from rerun.blueprint import ( - Blueprint, - BlueprintPanel, - Grid, - SelectionPanel, - TimeRangeBoundary, - TimeSeriesView, - VisibleTimeRange, - ) - print("logging blueprint!") rr.send_blueprint( - Blueprint( - Grid(*[ - TimeSeriesView( + rrb.Blueprint( + rrb.Grid(*[ + rrb.TimeSeriesView( name=p, origin=f"/{p}", - time_ranges=VisibleTimeRange( + time_ranges=rrb.VisibleTimeRange( "sim_time", - start=TimeRangeBoundary.cursor_relative(offset=rr.TimeInt(seconds=-2.5)), - end=TimeRangeBoundary.cursor_relative(offset=rr.TimeInt(seconds=2.5)), + start=rrb.TimeRangeBoundary.cursor_relative(offset=rr.TimeInt(seconds=-2.5)), + end=rrb.TimeRangeBoundary.cursor_relative(offset=rr.TimeInt(seconds=2.5)), ), ) for p in plot_paths ]), - BlueprintPanel(state="collapsed"), - SelectionPanel(state="collapsed"), + rrb.BlueprintPanel(state="collapsed"), + rrb.SelectionPanel(state="collapsed"), ) ) From bd0269b83242bda0edb15f4518a58af902578cfc Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Thu, 25 Jul 2024 17:52:41 +0200 Subject: [PATCH 4/7] introduce transformables subscriber for faster transform-context --- .../src/contexts/transform_context.rs | 13 +- .../viewer/re_space_view_spatial/src/lib.rs | 1 + .../src/transformables.rs | 116 ++++++++++++++++++ .../re_space_view_spatial/src/view_2d.rs | 1 + .../re_space_view_spatial/src/view_3d.rs | 1 + 5 files changed, 131 insertions(+), 1 deletion(-) create mode 100644 crates/viewer/re_space_view_spatial/src/transformables.rs diff --git a/crates/viewer/re_space_view_spatial/src/contexts/transform_context.rs b/crates/viewer/re_space_view_spatial/src/contexts/transform_context.rs index 2fe2ba8ba73c..32eaf1639b8f 100644 --- a/crates/viewer/re_space_view_spatial/src/contexts/transform_context.rs +++ b/crates/viewer/re_space_view_spatial/src/contexts/transform_context.rs @@ -433,7 +433,18 @@ fn transform_at( } } - let transform3d = get_parent_from_child_transform(entity_path, entity_db, query); + // If this entity does not contain any `Transform3D`-related data at all, there's no + // point in running actual queries. + let is_potentially_transformed = + crate::transformables::Transformables::access(entity_db.store_id(), |transformables| { + transformables.is_potentially_transformed(entity_path) + }) + .unwrap_or(false); + let transform3d = is_potentially_transformed + .then(|| get_parent_from_child_transform(entity_path, entity_db, query)) + .flatten(); + + // let transform3d = get_parent_from_child_transform(entity_path, entity_db, query); let pinhole = pinhole.map(|(image_from_camera, camera_xyz)| { // Everything under a pinhole camera is a 2D projection, thus doesn't actually have a proper 3D representation. diff --git a/crates/viewer/re_space_view_spatial/src/lib.rs b/crates/viewer/re_space_view_spatial/src/lib.rs index b50d951dbe2a..47044ff1b949 100644 --- a/crates/viewer/re_space_view_spatial/src/lib.rs +++ b/crates/viewer/re_space_view_spatial/src/lib.rs @@ -18,6 +18,7 @@ mod proc_mesh; mod scene_bounding_boxes; mod space_camera_3d; mod spatial_topology; +mod transformables; mod ui; mod ui_2d; mod ui_3d; diff --git a/crates/viewer/re_space_view_spatial/src/transformables.rs b/crates/viewer/re_space_view_spatial/src/transformables.rs new file mode 100644 index 000000000000..6d87c264ce1d --- /dev/null +++ b/crates/viewer/re_space_view_spatial/src/transformables.rs @@ -0,0 +1,116 @@ +use ahash::HashMap; +use once_cell::sync::OnceCell; + +use nohash_hasher::IntSet; +use re_chunk_store::{ + ChunkStore, ChunkStoreDiffKind, ChunkStoreEvent, ChunkStoreSubscriber, + ChunkStoreSubscriberHandle, +}; +use re_log_types::{EntityPath, StoreId}; +use re_types::ComponentName; + +// --- + +/// Keeps track of which entities have had any `Transform3D`-related data on any timeline at any +/// point in time. +/// +/// This is used to optimize queries in the `TransformContext`, so that we don't unnecessarily pay +/// for the fixed overhead of all the query layers when we know for a fact that there won't be any +/// data there. +/// This is a huge performance improvement in practice, especially in recordings with many entities. +#[derive(Default)] +pub struct Transformables { + /// Which entities have had any of these components at any point in time. + entities: IntSet, +} + +impl Transformables { + /// Accesses the spatial topology for a given store. + #[inline] + pub fn access(store_id: &StoreId, f: impl FnOnce(&Self) -> T) -> Option { + ChunkStore::with_subscriber_once( + TransformablesStoreSubscriber::subscription_handle(), + move |susbcriber: &TransformablesStoreSubscriber| { + susbcriber.per_store.get(store_id).map(f) + }, + ) + .flatten() + } + + #[inline] + pub fn is_potentially_transformed(&self, entity_path: &EntityPath) -> bool { + self.entities.contains(entity_path) + } +} + +// --- + +pub struct TransformablesStoreSubscriber { + /// The components of interest. + components: IntSet, + + per_store: HashMap, +} + +impl Default for TransformablesStoreSubscriber { + #[inline] + fn default() -> Self { + use re_types::Archetype as _; + let components = re_types::archetypes::Transform3D::all_components() + .iter() + .copied() + .collect(); + + Self { + components, + per_store: Default::default(), + } + } +} + +impl TransformablesStoreSubscriber { + /// Accesses the global store subscriber. + /// + /// Lazily registers the subscriber if it hasn't been registered yet. + pub fn subscription_handle() -> ChunkStoreSubscriberHandle { + static SUBSCRIPTION: OnceCell = OnceCell::new(); + *SUBSCRIPTION.get_or_init(|| ChunkStore::register_subscriber(Box::::default())) + } +} + +impl ChunkStoreSubscriber for TransformablesStoreSubscriber { + #[inline] + fn name(&self) -> String { + "rerun.store_subscriber.Transformables".into() + } + + #[inline] + fn as_any(&self) -> &dyn std::any::Any { + self + } + + #[inline] + fn as_any_mut(&mut self) -> &mut dyn std::any::Any { + self + } + + fn on_events(&mut self, events: &[ChunkStoreEvent]) { + re_tracing::profile_function!(); + + for event in events + .iter() + // This is only additive, don't care about removals. + .filter(|e| e.kind == ChunkStoreDiffKind::Addition) + { + let transformables = self.per_store.entry(event.store_id.clone()).or_default(); + + for component_name in event.chunk.component_names() { + if self.components.contains(&component_name) { + transformables + .entities + .insert(event.chunk.entity_path().clone()); + } + } + } + } +} diff --git a/crates/viewer/re_space_view_spatial/src/view_2d.rs b/crates/viewer/re_space_view_spatial/src/view_2d.rs index 4ae9bd70f5ee..1824e62f3fd2 100644 --- a/crates/viewer/re_space_view_spatial/src/view_2d.rs +++ b/crates/viewer/re_space_view_spatial/src/view_2d.rs @@ -68,6 +68,7 @@ impl SpaceViewClass for SpatialSpaceView2D { ) -> Result<(), SpaceViewClassRegistryError> { // Ensure spatial topology & max image dimension is registered. crate::spatial_topology::SpatialTopologyStoreSubscriber::subscription_handle(); + crate::transformables::TransformablesStoreSubscriber::subscription_handle(); crate::max_image_dimension_subscriber::MaxImageDimensionSubscriber::subscription_handle(); register_spatial_contexts(system_registry)?; diff --git a/crates/viewer/re_space_view_spatial/src/view_3d.rs b/crates/viewer/re_space_view_spatial/src/view_3d.rs index 544701366c36..80115f199de8 100644 --- a/crates/viewer/re_space_view_spatial/src/view_3d.rs +++ b/crates/viewer/re_space_view_spatial/src/view_3d.rs @@ -74,6 +74,7 @@ impl SpaceViewClass for SpatialSpaceView3D { ) -> Result<(), SpaceViewClassRegistryError> { // Ensure spatial topology is registered. crate::spatial_topology::SpatialTopologyStoreSubscriber::subscription_handle(); + crate::transformables::TransformablesStoreSubscriber::subscription_handle(); register_spatial_contexts(system_registry)?; register_3d_spatial_visualizers(system_registry)?; From 23e7ee1839c98b0755a4740a7d6d478ac648ed8c Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Mon, 29 Jul 2024 14:46:44 +0200 Subject: [PATCH 5/7] review --- .../src/contexts/transform_context.rs | 11 ++++---- .../viewer/re_space_view_spatial/src/lib.rs | 2 +- ...bles.rs => transform_component_tracker.rs} | 25 ++++++++++--------- .../re_space_view_spatial/src/view_2d.rs | 2 +- .../re_space_view_spatial/src/view_3d.rs | 2 +- 5 files changed, 22 insertions(+), 20 deletions(-) rename crates/viewer/re_space_view_spatial/src/{transformables.rs => transform_component_tracker.rs} (79%) diff --git a/crates/viewer/re_space_view_spatial/src/contexts/transform_context.rs b/crates/viewer/re_space_view_spatial/src/contexts/transform_context.rs index 32eaf1639b8f..f196f4e36c69 100644 --- a/crates/viewer/re_space_view_spatial/src/contexts/transform_context.rs +++ b/crates/viewer/re_space_view_spatial/src/contexts/transform_context.rs @@ -436,16 +436,17 @@ fn transform_at( // If this entity does not contain any `Transform3D`-related data at all, there's no // point in running actual queries. let is_potentially_transformed = - crate::transformables::Transformables::access(entity_db.store_id(), |transformables| { - transformables.is_potentially_transformed(entity_path) - }) + crate::transform_component_tracker::TransformComponentTracker::access( + entity_db.store_id(), + |transform_component_tracker| { + transform_component_tracker.is_potentially_transformed(entity_path) + }, + ) .unwrap_or(false); let transform3d = is_potentially_transformed .then(|| get_parent_from_child_transform(entity_path, entity_db, query)) .flatten(); - // let transform3d = get_parent_from_child_transform(entity_path, entity_db, query); - let pinhole = pinhole.map(|(image_from_camera, camera_xyz)| { // Everything under a pinhole camera is a 2D projection, thus doesn't actually have a proper 3D representation. // Our visualization interprets this as looking at a 2D image plane from a single point (the pinhole). diff --git a/crates/viewer/re_space_view_spatial/src/lib.rs b/crates/viewer/re_space_view_spatial/src/lib.rs index 47044ff1b949..66300a10b248 100644 --- a/crates/viewer/re_space_view_spatial/src/lib.rs +++ b/crates/viewer/re_space_view_spatial/src/lib.rs @@ -18,7 +18,7 @@ mod proc_mesh; mod scene_bounding_boxes; mod space_camera_3d; mod spatial_topology; -mod transformables; +mod transform_component_tracker; mod ui; mod ui_2d; mod ui_3d; diff --git a/crates/viewer/re_space_view_spatial/src/transformables.rs b/crates/viewer/re_space_view_spatial/src/transform_component_tracker.rs similarity index 79% rename from crates/viewer/re_space_view_spatial/src/transformables.rs rename to crates/viewer/re_space_view_spatial/src/transform_component_tracker.rs index 6d87c264ce1d..adb8a741b895 100644 --- a/crates/viewer/re_space_view_spatial/src/transformables.rs +++ b/crates/viewer/re_space_view_spatial/src/transform_component_tracker.rs @@ -19,18 +19,18 @@ use re_types::ComponentName; /// data there. /// This is a huge performance improvement in practice, especially in recordings with many entities. #[derive(Default)] -pub struct Transformables { +pub struct TransformComponentTracker { /// Which entities have had any of these components at any point in time. entities: IntSet, } -impl Transformables { +impl TransformComponentTracker { /// Accesses the spatial topology for a given store. #[inline] pub fn access(store_id: &StoreId, f: impl FnOnce(&Self) -> T) -> Option { ChunkStore::with_subscriber_once( - TransformablesStoreSubscriber::subscription_handle(), - move |susbcriber: &TransformablesStoreSubscriber| { + TransformComponentTrackerStoreSubscriber::subscription_handle(), + move |susbcriber: &TransformComponentTrackerStoreSubscriber| { susbcriber.per_store.get(store_id).map(f) }, ) @@ -45,14 +45,14 @@ impl Transformables { // --- -pub struct TransformablesStoreSubscriber { +pub struct TransformComponentTrackerStoreSubscriber { /// The components of interest. components: IntSet, - per_store: HashMap, + per_store: HashMap, } -impl Default for TransformablesStoreSubscriber { +impl Default for TransformComponentTrackerStoreSubscriber { #[inline] fn default() -> Self { use re_types::Archetype as _; @@ -68,7 +68,7 @@ impl Default for TransformablesStoreSubscriber { } } -impl TransformablesStoreSubscriber { +impl TransformComponentTrackerStoreSubscriber { /// Accesses the global store subscriber. /// /// Lazily registers the subscriber if it hasn't been registered yet. @@ -78,10 +78,10 @@ impl TransformablesStoreSubscriber { } } -impl ChunkStoreSubscriber for TransformablesStoreSubscriber { +impl ChunkStoreSubscriber for TransformComponentTrackerStoreSubscriber { #[inline] fn name(&self) -> String { - "rerun.store_subscriber.Transformables".into() + "rerun.store_subscriber.TransformComponentTracker".into() } #[inline] @@ -102,11 +102,12 @@ impl ChunkStoreSubscriber for TransformablesStoreSubscriber { // This is only additive, don't care about removals. .filter(|e| e.kind == ChunkStoreDiffKind::Addition) { - let transformables = self.per_store.entry(event.store_id.clone()).or_default(); + let transform_component_tracker = + self.per_store.entry(event.store_id.clone()).or_default(); for component_name in event.chunk.component_names() { if self.components.contains(&component_name) { - transformables + transform_component_tracker .entities .insert(event.chunk.entity_path().clone()); } diff --git a/crates/viewer/re_space_view_spatial/src/view_2d.rs b/crates/viewer/re_space_view_spatial/src/view_2d.rs index 1824e62f3fd2..404e1c8971a5 100644 --- a/crates/viewer/re_space_view_spatial/src/view_2d.rs +++ b/crates/viewer/re_space_view_spatial/src/view_2d.rs @@ -68,7 +68,7 @@ impl SpaceViewClass for SpatialSpaceView2D { ) -> Result<(), SpaceViewClassRegistryError> { // Ensure spatial topology & max image dimension is registered. crate::spatial_topology::SpatialTopologyStoreSubscriber::subscription_handle(); - crate::transformables::TransformablesStoreSubscriber::subscription_handle(); + crate::transform_component_tracker::TransformComponentTrackerStoreSubscriber::subscription_handle(); crate::max_image_dimension_subscriber::MaxImageDimensionSubscriber::subscription_handle(); register_spatial_contexts(system_registry)?; diff --git a/crates/viewer/re_space_view_spatial/src/view_3d.rs b/crates/viewer/re_space_view_spatial/src/view_3d.rs index 80115f199de8..5d8d238d5e2e 100644 --- a/crates/viewer/re_space_view_spatial/src/view_3d.rs +++ b/crates/viewer/re_space_view_spatial/src/view_3d.rs @@ -74,7 +74,7 @@ impl SpaceViewClass for SpatialSpaceView3D { ) -> Result<(), SpaceViewClassRegistryError> { // Ensure spatial topology is registered. crate::spatial_topology::SpatialTopologyStoreSubscriber::subscription_handle(); - crate::transformables::TransformablesStoreSubscriber::subscription_handle(); + crate::transform_component_tracker::TransformComponentTrackerStoreSubscriber::subscription_handle(); register_spatial_contexts(system_registry)?; register_3d_spatial_visualizers(system_registry)?; From 0fd7264c31adaa1c7f48d454012c16f13b154d44 Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Thu, 25 Jul 2024 17:56:52 +0200 Subject: [PATCH 6/7] rerun rrd compact: make sure blueprints end up first --- crates/top/rerun/src/run.rs | 30 +++++++++++++++++++++++------- 1 file changed, 23 insertions(+), 7 deletions(-) diff --git a/crates/top/rerun/src/run.rs b/crates/top/rerun/src/run.rs index b8644448948b..02b198eaac9f 100644 --- a/crates/top/rerun/src/run.rs +++ b/crates/top/rerun/src/run.rs @@ -9,7 +9,7 @@ use itertools::{izip, Itertools}; use re_data_source::DataSource; use re_log_types::{LogMsg, SetStoreInfo}; -use re_sdk::log::Chunk; +use re_sdk::{log::Chunk, StoreKind}; use re_smart_channel::{ReceiveSet, Receiver, SmartMessagePayload}; #[cfg(feature = "web_viewer")] @@ -661,16 +661,32 @@ fn run_compact(path_to_input_rrd: &Path, path_to_output_rrd: &Path) -> anyhow::R let mut rrd_out = std::fs::File::create(path_to_output_rrd) .with_context(|| format!("{path_to_output_rrd:?}"))?; - let messages: Result>, _> = entity_dbs - .into_values() + let messages_rbl: Result>, _> = entity_dbs + .values() + .filter(|entity_db| entity_db.store_kind() == StoreKind::Blueprint) .map(|entity_db| entity_db.to_messages(None /* time selection */)) .collect(); - let messages = messages?; - let messages = messages.iter().flatten(); + let messages_rbl = messages_rbl?; + let messages_rbl = messages_rbl.iter().flatten(); + + let messages_rrd: Result>, _> = entity_dbs + .values() + .filter(|entity_db| entity_db.store_kind() == StoreKind::Recording) + .map(|entity_db| entity_db.to_messages(None /* time selection */)) + .collect(); + let messages_rrd = messages_rrd?; + let messages_rrd = messages_rrd.iter().flatten(); let encoding_options = re_log_encoding::EncodingOptions::COMPRESSED; - re_log_encoding::encoder::encode(version, encoding_options, messages, &mut rrd_out) - .context("Message encode")?; + re_log_encoding::encoder::encode( + version, + encoding_options, + // NOTE: We want to make sure all blueprints come first, so that the viewer can immediately + // set up the viewport correctly. + messages_rbl.chain(messages_rrd), + &mut rrd_out, + ) + .context("Message encode")?; let rrd_out_size = rrd_out.metadata().ok().map(|md| md.len()); From af25ad51c781becd9855950086b3631acf951ed9 Mon Sep 17 00:00:00 2001 From: Clement Rey Date: Thu, 25 Jul 2024 17:57:52 +0200 Subject: [PATCH 7/7] finally comitting these store configs ive had in my stash for 12 years --- crates/store/re_chunk_store/src/store.rs | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/crates/store/re_chunk_store/src/store.rs b/crates/store/re_chunk_store/src/store.rs index 12b444545d14..1a421f5850a7 100644 --- a/crates/store/re_chunk_store/src/store.rs +++ b/crates/store/re_chunk_store/src/store.rs @@ -115,6 +115,20 @@ impl ChunkStoreConfig { ..Self::DEFAULT }; + /// [`Self::DEFAULT`], but with changelog disabled. + pub const CHANGELOG_DISABLED: Self = Self { + enable_changelog: false, + ..Self::DEFAULT + }; + + /// All features disabled. + pub const ALL_DISABLED: Self = Self { + enable_changelog: false, + chunk_max_bytes: 0, + chunk_max_rows: 0, + chunk_max_rows_if_unsorted: 0, + }; + /// Environment variable to configure [`Self::enable_changelog`]. pub const ENV_STORE_ENABLE_CHANGELOG: &'static str = "RERUN_STORE_ENABLE_CHANGELOG";