diff --git a/crates/re_viewer/src/math.rs b/crates/re_viewer/src/math.rs index 4285e2be4c6b..6eb993c53f16 100644 --- a/crates/re_viewer/src/math.rs +++ b/crates/re_viewer/src/math.rs @@ -28,6 +28,21 @@ pub fn line_segment_distance_to_point_3d([a, b]: [glam::Vec3; 2], p: glam::Vec3) line_segment_distance_sq_to_point_3d([a, b], p).sqrt() } +/// Compute the distance between a ray and a line segment. +/// +/// Returns the ray offset at which the ray is closest to the line segment. +/// (i.e. the closest point then is at `ray.origin + ray.dir * ray_closest_t_line_segment(...)`) +pub fn ray_closest_t_line_segment(ray: &macaw::Ray3, [a, b]: [glam::Vec3; 2]) -> f32 { + let (t_ray, t_segment) = ray.closest_ts(&macaw::Ray3::from_origin_dir(a, b - a)); + if t_ray.is_nan() || t_segment < 0.0 { + ray.closest_t_to_point(a) + } else if t_segment > 1.0 { + ray.closest_t_to_point(b) + } else { + t_ray + } +} + /// Returns the distance the ray traveled of the first intersection or `f32::INFINITY` on miss. pub fn ray_bbox_intersect(ray: &macaw::Ray3, bbox: &macaw::BoundingBox) -> f32 { // from https://gamedev.stackexchange.com/a/18459 @@ -59,3 +74,51 @@ fn max(a: f32, b: f32) -> f32 { pub fn ease_out(t: f32) -> f32 { 1. - (1. - t) * (1. - t) } + +#[cfg(test)] +mod tests { + use crate::math::ray_closest_t_line_segment; + use cgmath::assert_ulps_eq; + use glam::vec3; + + #[test] + fn test_ray_closest_t_line_segment() { + let ray_x = macaw::Ray3::from_origin_dir(glam::Vec3::ZERO, glam::Vec3::X); + + // through origin. + assert_ulps_eq!( + ray_closest_t_line_segment(&ray_x, [vec3(0.0, 0.0, 1.0), vec3(0.0, 0.0, -1.0)]), + 0.0 + ); + + // behind origin, orthogonal to ray. + assert_ulps_eq!( + ray_closest_t_line_segment(&ray_x, [vec3(-1.0, 0.0, 1.0), vec3(-1.0, 0.0, -1.0)]), + -1.0 + ); + + // in front of origin, orthogonal to ray. + assert_ulps_eq!( + ray_closest_t_line_segment(&ray_x, [vec3(1.0, 0.0, 1.0), vec3(1.0, 0.0, -1.0)]), + 1.0 + ); + + // parallel to ray, half way in front + assert_ulps_eq!( + ray_closest_t_line_segment(&ray_x, [vec3(0.5, 1.0, 0.0), vec3(1.5, 1.0, 0.0)]), + 0.5 + ); + + // parallel to ray, half way behind + assert_ulps_eq!( + ray_closest_t_line_segment(&ray_x, [vec3(-0.5, 1.0, 0.0), vec3(-1.5, 1.0, 0.0)]), + -0.5 + ); + + // Degenerated line segment at origin. + assert_ulps_eq!( + ray_closest_t_line_segment(&ray_x, [vec3(0.0, 0.0, 0.0), vec3(0.0, 0.0, 0.0)]), + 0.0 + ); + } +} diff --git a/crates/re_viewer/src/misc/viewer_context.rs b/crates/re_viewer/src/misc/viewer_context.rs index 1f3260f2cb74..c378cbd55bcc 100644 --- a/crates/re_viewer/src/misc/viewer_context.rs +++ b/crates/re_viewer/src/misc/viewer_context.rs @@ -1,5 +1,3 @@ -use macaw::Ray3; - use re_data_store::{log_db::LogDb, InstanceId, ObjTypePath}; use re_log_types::{DataPath, MsgId, ObjPath, TimeInt, Timeline}; @@ -238,7 +236,7 @@ pub enum HoveredSpace { space_3d: ObjPath, /// 2D spaces and pixel coordinates (with Z=depth) - target_spaces: Vec<(ObjPath, Option, Option)>, + target_spaces: Vec<(ObjPath, Option)>, }, } diff --git a/crates/re_viewer/src/ui/data_ui/image.rs b/crates/re_viewer/src/ui/data_ui/image.rs index 860946d76d85..89e8a0c15841 100644 --- a/crates/re_viewer/src/ui/data_ui/image.rs +++ b/crates/re_viewer/src/ui/data_ui/image.rs @@ -71,79 +71,89 @@ fn show_zoomed_image_region_tooltip( .on_hover_cursor(egui::CursorIcon::ZoomIn) .on_hover_ui_at_pointer(|ui| { ui.horizontal(|ui| { - show_zoomed_image_region( - parent_ui, - ui, - tensor_view, - image_rect, - pointer_pos, - meter, - ); + let Some(dynamic_img) = tensor_view.dynamic_img else { return }; + let w = dynamic_img.width() as _; + let h = dynamic_img.height() as _; + + use egui::NumExt; + + let center = [ + (egui::remap(pointer_pos.x, image_rect.x_range(), 0.0..=w as f32) as isize) + .at_most(w), + (egui::remap(pointer_pos.y, image_rect.y_range(), 0.0..=h as f32) as isize) + .at_most(h), + ]; + show_zoomed_image_region_area_outline(parent_ui, tensor_view, center, image_rect); + show_zoomed_image_region(ui, tensor_view, center, meter); }); }) } -/// meter: iff this is a depth map, how long is one meter? -pub(crate) fn show_zoomed_image_region( - parent_ui: &mut egui::Ui, - tooltip_ui: &mut egui::Ui, +// Show the surrounding pixels: +const ZOOMED_IMAGE_TEXEL_RADIUS: isize = 12; + +pub fn show_zoomed_image_region_area_outline( + ui: &mut egui::Ui, tensor_view: &TensorImageView<'_, '_>, + [center_x, center_y]: [isize; 2], image_rect: egui::Rect, - pointer_pos: egui::Pos2, - meter: Option, ) { let Some(dynamic_img) = tensor_view.dynamic_img else { return }; - use egui::{color_picker, pos2, remap, Color32, Mesh, NumExt, Rect, Vec2}; + use egui::{pos2, remap, Color32, Rect}; - // Show the surrounding pixels: - let texel_radius = 12; - let size = Vec2::splat(128.0); - - let (_id, zoom_rect) = tooltip_ui.allocate_space(size); let w = dynamic_img.width() as _; let h = dynamic_img.height() as _; - let center_x = - (remap(pointer_pos.x, image_rect.x_range(), 0.0..=(w as f32)).floor() as isize).at_most(w); - let center_y = - (remap(pointer_pos.y, image_rect.y_range(), 0.0..=(h as f32)).floor() as isize).at_most(h); - { - // Show where on the original image the zoomed-in region is at: - let left = (center_x - texel_radius) as f32; - let right = (center_x + texel_radius) as f32; - let top = (center_y - texel_radius) as f32; - let bottom = (center_y + texel_radius) as f32; - - let left = remap(left, 0.0..=w as f32, image_rect.x_range()); - let right = remap(right, 0.0..=w as f32, image_rect.x_range()); - let top = remap(top, 0.0..=h as f32, image_rect.y_range()); - let bottom = remap(bottom, 0.0..=h as f32, image_rect.y_range()); - - let rect = Rect::from_min_max(pos2(left, top), pos2(right, bottom)); - // TODO(emilk): use `parent_ui.painter()` and put it in a high Z layer, when https://github.com/emilk/egui/issues/1516 is done - let painter = parent_ui.ctx().debug_painter(); - painter.rect_stroke(rect, 0.0, (2.0, Color32::BLACK)); - painter.rect_stroke(rect, 0.0, (1.0, Color32::WHITE)); - } + // Show where on the original image the zoomed-in region is at: + let left = (center_x - ZOOMED_IMAGE_TEXEL_RADIUS) as f32; + let right = (center_x + ZOOMED_IMAGE_TEXEL_RADIUS) as f32; + let top = (center_y - ZOOMED_IMAGE_TEXEL_RADIUS) as f32; + let bottom = (center_y + ZOOMED_IMAGE_TEXEL_RADIUS) as f32; + + let left = remap(left, 0.0..=w, image_rect.x_range()); + let right = remap(right, 0.0..=w, image_rect.x_range()); + let top = remap(top, 0.0..=h, image_rect.y_range()); + let bottom = remap(bottom, 0.0..=h, image_rect.y_range()); + + let rect = Rect::from_min_max(pos2(left, top), pos2(right, bottom)); + // TODO(emilk): use `parent_ui.painter()` and put it in a high Z layer, when https://github.com/emilk/egui/issues/1516 is done + let painter = ui.ctx().debug_painter(); + painter.rect_stroke(rect, 0.0, (2.0, Color32::BLACK)); + painter.rect_stroke(rect, 0.0, (1.0, Color32::WHITE)); +} +/// `meter`: iff this is a depth map, how long is one meter? +pub fn show_zoomed_image_region( + tooltip_ui: &mut egui::Ui, + tensor_view: &TensorImageView<'_, '_>, + image_position: [isize; 2], + meter: Option, +) { + let Some(dynamic_img) = tensor_view.dynamic_img else { return }; + + use egui::{color_picker, pos2, remap, Color32, Mesh, Rect, Vec2}; + + let size = Vec2::splat(128.0); + + let (_id, zoom_rect) = tooltip_ui.allocate_space(size); let painter = tooltip_ui.painter(); painter.rect_filled(zoom_rect, 0.0, tooltip_ui.visuals().extreme_bg_color); let mut mesh = Mesh::default(); let mut center_texel_rect = None; - for dx in -texel_radius..=texel_radius { - for dy in -texel_radius..=texel_radius { - let x = center_x + dx; - let y = center_y + dy; + for dx in -ZOOMED_IMAGE_TEXEL_RADIUS..=ZOOMED_IMAGE_TEXEL_RADIUS { + for dy in -ZOOMED_IMAGE_TEXEL_RADIUS..=ZOOMED_IMAGE_TEXEL_RADIUS { + let x = image_position[0] + dx; + let y = image_position[1] + dy; let color = get_pixel(dynamic_img, [x, y]); if let Some(color) = color { let image::Rgba([r, g, b, a]) = color; let color = egui::Color32::from_rgba_unmultiplied(r, g, b, a); if color != Color32::TRANSPARENT { - let tr = texel_radius as f32; + let tr = ZOOMED_IMAGE_TEXEL_RADIUS as f32; let left = remap(dx as f32, -tr..=(tr + 1.0), zoom_rect.x_range()); let right = remap((dx + 1) as f32, -tr..=(tr + 1.0), zoom_rect.x_range()); let top = remap(dy as f32, -tr..=(tr + 1.0), zoom_rect.y_range()); @@ -169,9 +179,9 @@ pub(crate) fn show_zoomed_image_region( painter.rect_stroke(center_texel_rect, 0.0, (1.0, Color32::WHITE)); } - if let Some(color) = get_pixel(dynamic_img, [center_x, center_y]) { + if let Some(color) = get_pixel(dynamic_img, image_position) { tooltip_ui.separator(); - let (x, y) = (center_x as _, center_y as _); + let (x, y) = (image_position[0] as _, image_position[1] as _); tooltip_ui.vertical(|ui| { if tensor_view.tensor.num_dim() == 2 { diff --git a/crates/re_viewer/src/ui/view_spatial/eye.rs b/crates/re_viewer/src/ui/view_spatial/eye.rs index cb5242b576d5..0d19ea150ac2 100644 --- a/crates/re_viewer/src/ui/view_spatial/eye.rs +++ b/crates/re_viewer/src/ui/view_spatial/eye.rs @@ -12,7 +12,9 @@ use super::SpaceCamera3D; #[derive(Clone, Copy, Debug, PartialEq, serde::Deserialize, serde::Serialize)] pub struct Eye { pub world_from_view: IsoTransform, - pub fov_y: f32, + + /// If no angle is present, this is an orthographic camera. + pub fov_y: Option, } impl Eye { @@ -26,25 +28,79 @@ impl Eye { Some(Self { world_from_view: space_cameras.world_from_rub_view()?, - fov_y, + fov_y: Some(fov_y), }) } - #[allow(clippy::unused_self)] pub fn near(&self) -> f32 { - 0.01 // TODO(emilk) + if self.is_perspective() { + 0.01 // TODO(emilk) + } else { + -1000.0 // TODO(andreas) + } + } + + pub fn far(&self) -> f32 { + if self.is_perspective() { + f32::INFINITY + } else { + 1000.0 + } } pub fn ui_from_world(&self, rect: &Rect) -> Mat4 { let aspect_ratio = rect.width() / rect.height(); + + let projection = if let Some(fov_y) = self.fov_y { + Mat4::perspective_infinite_rh(fov_y, aspect_ratio, self.near()) + } else { + Mat4::orthographic_rh( + rect.left(), + rect.right(), + rect.bottom(), + rect.top(), + self.near(), + self.far(), + ) + }; + Mat4::from_translation(vec3(rect.center().x, rect.center().y, 0.0)) * Mat4::from_scale(0.5 * vec3(rect.width(), -rect.height(), 1.0)) - * Mat4::perspective_infinite_rh(self.fov_y, aspect_ratio, self.near()) + * projection * self.world_from_view.inverse() } - pub fn world_from_ui(&self, rect: &Rect) -> Mat4 { - self.ui_from_world(rect).inverse() + pub fn is_perspective(&self) -> bool { + self.fov_y.is_some() + } + + // pub fn is_orthographic(&self) -> bool { + // self.fov_y.is_none() + // } + + /// Picking ray for a given pointer in the parent space + /// (i.e. prior to camera transform, "world" space) + pub fn picking_ray(&self, screen_rect: &Rect, pointer: glam::Vec2) -> macaw::Ray3 { + if let Some(fov_y) = self.fov_y { + let (w, h) = (screen_rect.width(), screen_rect.height()); + let aspect_ratio = w / h; + let f = (fov_y * 0.5).tan(); + let px = (2.0 * (pointer.x - screen_rect.left()) / w - 1.0) * f * aspect_ratio; + let py = (1.0 - 2.0 * (pointer.y - screen_rect.top()) / h) * f; + let ray_dir = self + .world_from_view + .transform_vector3(glam::vec3(px, py, -1.0)); + macaw::Ray3::from_origin_dir(self.pos_in_world(), ray_dir.normalize()) + } else { + // The ray originates on the camera plane, not from the camera position + let ray_dir = self.world_from_view.rotation().mul_vec3(glam::Vec3::Z); + let origin = self.world_from_view.translation() + + self.world_from_view.rotation().mul_vec3(glam::Vec3::X) * pointer.x + + self.world_from_view.rotation().mul_vec3(glam::Vec3::Y) * pointer.y + + ray_dir * self.near(); + + macaw::Ray3::from_origin_dir(origin, ray_dir) + } } pub fn pos_in_world(&self) -> glam::Vec3 { @@ -64,7 +120,21 @@ impl Eye { .world_from_view .rotation() .slerp(other.world_from_view.rotation(), t); - let fov_y = egui::lerp(self.fov_y..=other.fov_y, t); + + let fov_y = if t < 0.02 { + self.fov_y + } else if t > 0.98 { + other.fov_y + } else if self.fov_y.is_none() && other.fov_y.is_none() { + None + } else { + // TODO(andreas): Interpolating between perspective and ortho is untested and likely more involved than this. + Some(egui::lerp( + self.fov_y.unwrap_or(0.01)..=other.fov_y.unwrap_or(0.01), + t, + )) + }; + Eye { world_from_view: IsoTransform::from_rotation_translation(rotation, translation), fov_y, @@ -102,7 +172,7 @@ impl OrbitEye { self.world_from_view_rot, self.position(), ), - fov_y: self.fov_y, + fov_y: Some(self.fov_y), } } @@ -115,7 +185,7 @@ impl OrbitEye { self.orbit_radius = distance.at_least(self.orbit_radius / 5.0); self.orbit_center = eye.pos_in_world() + self.orbit_radius * eye.forward_in_world(); self.world_from_view_rot = eye.world_from_view.rotation(); - self.fov_y = eye.fov_y; + self.fov_y = eye.fov_y.unwrap_or(Eye::DEFAULT_FOV_Y); self.velocity = Vec3::ZERO; } diff --git a/crates/re_viewer/src/ui/view_spatial/scene/mod.rs b/crates/re_viewer/src/ui/view_spatial/scene/mod.rs index 8cfc99d313fe..2b95db2e6e08 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/mod.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/mod.rs @@ -21,9 +21,11 @@ use crate::{ }, }; +mod picking; mod primitives; mod scene_part; +pub use self::picking::{AdditionalPickingInfo, PickingRayHit, PickingResult}; pub use self::primitives::SceneSpatialPrimitives; use scene_part::ScenePart; @@ -80,7 +82,7 @@ pub enum Label2DTarget { pub struct Label2D { pub text: String, pub color: Color32, - /// The shape being labled. + /// The shape being labeled. pub target: Label2DTarget, /// What is hovered if this label is hovered. pub labled_instance: InstanceIdHash, @@ -103,8 +105,9 @@ pub struct SceneSpatialUiData { pub labels_3d: Vec, pub labels_2d: Vec, - /// Cursor within any of these rects cause the referred instance to be hovered. - pub rects: Vec<(egui::Rect, InstanceIdHash)>, + /// Picking any any of these rects cause the referred instance to be hovered. + /// Only use this for 2d overlays! + pub pickable_ui_rects: Vec<(egui::Rect, InstanceIdHash)>, /// Images are a special case of rects where we're storing some extra information to allow miniature previews etc. pub images: Vec, @@ -239,7 +242,7 @@ impl SceneSpatial { crate::profile_function!(); // Size of a pixel (in meters), when projected out one meter: - let point_size_at_one_meter = eye.fov_y / viewport_size.y; + let point_size_at_one_meter = eye.fov_y.unwrap() / viewport_size.y; let eye_camera_plane = macaw::Plane3::from_normal_point(eye.forward_in_world(), eye.pos_in_world()); @@ -394,6 +397,23 @@ impl SceneSpatial { SpatialNavigationMode::ThreeD } } + + pub fn picking( + &self, + pointer_in_ui: glam::Vec2, + ui_rect: &egui::Rect, + eye: &Eye, + ui_interaction_radius: f32, + ) -> PickingResult { + picking::picking( + pointer_in_ui, + ui_rect, + eye, + &self.primitives, + &self.ui, + ui_interaction_radius, + ) + } } pub struct ObjectPaintProperties { diff --git a/crates/re_viewer/src/ui/view_spatial/scene/picking.rs b/crates/re_viewer/src/ui/view_spatial/scene/picking.rs new file mode 100644 index 000000000000..ce5fbfd70ea2 --- /dev/null +++ b/crates/re_viewer/src/ui/view_spatial/scene/picking.rs @@ -0,0 +1,285 @@ +use itertools::Itertools as _; + +use re_data_store::InstanceIdHash; + +use super::{SceneSpatialPrimitives, SceneSpatialUiData}; +use crate::{ + math::{line_segment_distance_sq_to_point_2d, ray_closest_t_line_segment}, + ui::view_spatial::eye::Eye, +}; + +pub enum AdditionalPickingInfo { + /// No additional picking information. + None, + /// The hit was a textured rect at the given uv coordinates (ranging from 0 to 1) + TexturedRect(glam::Vec2), + /// We hit a egui ui element, meaning that depth information is not usable. + GuiOverlay, +} + +pub struct PickingRayHit { + /// What object got hit by the picking ray. + /// + /// The ray hit position may not actually be on this object, as we allow snapping to closest object! + pub instance_hash: InstanceIdHash, + + /// Where along the picking ray the hit occurred. + pub ray_t: f32, + + /// Any additional information about the picking hit. + pub info: AdditionalPickingInfo, +} + +impl PickingRayHit { + fn from_instance_and_t(instance_hash: InstanceIdHash, t: f32) -> Self { + Self { + instance_hash, + ray_t: t, + info: AdditionalPickingInfo::None, + } + } +} + +pub struct PickingResult { + /// Picking ray hit for an opaque object (if any). + pub opaque_hit: Option, + + /// Picking ray hits for transparent objects, sorted from far to near. + /// If there is an opaque hit, all of them are in front of the opaque hit. + pub transparent_hits: Vec, + + /// The picking ray used. Given in the coordinates of the space the picking is performed in. + picking_ray: macaw::Ray3, +} + +impl PickingResult { + /// The space position of a given hit. + pub fn space_position(&self, hit: &PickingRayHit) -> glam::Vec3 { + self.picking_ray.origin + self.picking_ray.dir * hit.ray_t + } + + /// Iterates over all hits from far to close. + pub fn iter_hits(&self) -> impl Iterator { + self.opaque_hit.iter().chain(self.transparent_hits.iter()) + } + + fn sort_and_remove_hidden_transparent(&mut self) { + // Sort from far to close + self.transparent_hits + .sort_by(|a, b| b.ray_t.partial_cmp(&a.ray_t).unwrap()); + + // Delete subset that is behind opaque hit. + if let Some(opaque_hit) = &self.opaque_hit { + let mut num_hidden = 0; + for (i, transparent_hit) in self.transparent_hits.iter().enumerate() { + if transparent_hit.ray_t <= opaque_hit.ray_t { + break; + } + num_hidden = i + 1; + } + self.transparent_hits.drain(0..num_hidden); + } + } +} + +pub fn picking( + pointer_in_ui: glam::Vec2, + ui_rect: &egui::Rect, + eye: &Eye, + primitives: &SceneSpatialPrimitives, + ui_data: &SceneSpatialUiData, + ui_interaction_radius: f32, +) -> PickingResult { + crate::profile_function!(); + + let ui_from_world = eye.ui_from_world(ui_rect); + let ray_in_world = eye.picking_ray(ui_rect, pointer_in_ui); + + let SceneSpatialPrimitives { + bounding_box: _, + textured_rectangles, + textured_rectangles_ids, + line_strips, + points, + meshes, + } = primitives; + + // in ui points + let max_side_ui_dist_sq = ui_interaction_radius * ui_interaction_radius; // TODO(emilk): interaction radius from egui + let mut closest_opaque_side_ui_dist_sq = max_side_ui_dist_sq; + let mut closest_opaque_pick = PickingRayHit { + instance_hash: InstanceIdHash::NONE, + ray_t: f32::INFINITY, + info: AdditionalPickingInfo::None, + }; + let mut transparent_hits = Vec::new(); // Combined, sorted (and partially "hidden") by opaque results later. + + let mut check_hit = |side_ui_dist_sq, ray_hit: PickingRayHit, transparent| { + if ray_hit.ray_t < closest_opaque_pick.ray_t + && side_ui_dist_sq <= closest_opaque_side_ui_dist_sq + { + if transparent { + transparent_hits.push(ray_hit); + } else { + closest_opaque_pick = ray_hit; + closest_opaque_side_ui_dist_sq = side_ui_dist_sq; + } + } + }; + + { + crate::profile_scope!("points"); + + for (batch, vertex_iter) in points.iter_vertices_and_userdata_by_batch() { + // For getting the closest point we could transform the mouse ray into the "batch space". + // However, we want to determine the closest point in *screen space*, meaning that we need to project all points. + let ui_from_batch = ui_from_world * batch.world_from_obj; + + for (point, instance_hash) in vertex_iter { + if instance_hash.is_none() { + continue; + } + + // TODO(emilk): take point radius into account + let pos_in_ui = ui_from_batch.project_point3(point.position); + let dist_sq = pos_in_ui.truncate().distance_squared(pointer_in_ui); + if dist_sq <= max_side_ui_dist_sq { + let t = ray_in_world + .closest_t_to_point(batch.world_from_obj.transform_point3(point.position)); + check_hit( + dist_sq, + PickingRayHit::from_instance_and_t(*instance_hash, t), + false, + ); + } + } + } + } + + { + crate::profile_scope!("line_segments"); + + for (batch, vertices) in line_strips.iter_vertices_by_batch() { + // For getting the closest point we could transform the mouse ray into the "batch space". + // However, we want to determine the closest point in *screen space*, meaning that we need to project all points. + let ui_from_batch = ui_from_world * batch.world_from_obj; + + for (start, end) in vertices.tuple_windows() { + // Skip unconnected tuples. + if start.strip_index != end.strip_index { + continue; + } + + let instance_hash = line_strips.strip_user_data[start.strip_index as usize]; + if instance_hash.is_none() { + continue; + } + + // TODO(emilk): take line segment radius into account + let a = ui_from_batch.project_point3(start.position); + let b = ui_from_batch.project_point3(end.position); + let side_ui_dist_sq = line_segment_distance_sq_to_point_2d( + [a.truncate(), b.truncate()], + pointer_in_ui, + ); + + if side_ui_dist_sq < max_side_ui_dist_sq { + let start_world = batch.world_from_obj.transform_point3(start.position); + let end_world = batch.world_from_obj.transform_point3(end.position); + let t = ray_closest_t_line_segment(&ray_in_world, [start_world, end_world]); + + check_hit( + side_ui_dist_sq, + PickingRayHit::from_instance_and_t(instance_hash, t), + false, + ); + } + } + } + } + + { + crate::profile_scope!("meshes"); + for mesh in meshes { + if !mesh.instance_hash.is_some() { + continue; + } + let ray_in_mesh = (mesh.world_from_mesh.inverse() * ray_in_world).normalize(); + let t = crate::math::ray_bbox_intersect(&ray_in_mesh, mesh.mesh.bbox()); + + if t < 0.0 { + let side_ui_dist_sq = 0.0; + check_hit( + side_ui_dist_sq, + PickingRayHit::from_instance_and_t(mesh.instance_hash, t), + false, + ); + } + } + } + + { + crate::profile_scope!("textured rectangles"); + for (rect, id) in textured_rectangles + .iter() + .zip(textured_rectangles_ids.iter()) + { + if !id.is_some() { + continue; + } + + let rect_plane = macaw::Plane3::from_normal_point( + rect.extent_u.cross(rect.extent_v).normalize(), + rect.top_left_corner_position, + ); + + // TODO(andreas): Interaction radius is currently ignored for rects. + let (intersect, t) = rect_plane.intersect_ray(ray_in_world.origin, ray_in_world.dir); + if !intersect { + continue; + } + let intersection_world = ray_in_world.origin + ray_in_world.dir * t; + let dir_from_rect_top_left = intersection_world - rect.top_left_corner_position; + let u = dir_from_rect_top_left.dot(rect.extent_u) / rect.extent_u.length_squared(); + let v = dir_from_rect_top_left.dot(rect.extent_v) / rect.extent_v.length_squared(); + + if (0.0..=1.0).contains(&u) && (0.0..=1.0).contains(&v) { + let picking_hit = PickingRayHit { + instance_hash: *id, + ray_t: t, + info: AdditionalPickingInfo::TexturedRect(glam::vec2(u, v)), + }; + check_hit(0.0, picking_hit, rect.multiplicative_tint.a() < 1.0); + } + } + } + + { + crate::profile_scope!("ui rectangles"); + let egui_pos = egui::pos2(pointer_in_ui.x, pointer_in_ui.y); + + for (bbox, instance_hash) in &ui_data.pickable_ui_rects { + let side_ui_dist_sq = bbox.distance_sq_to_pos(egui_pos); + check_hit( + side_ui_dist_sq, + PickingRayHit { + instance_hash: *instance_hash, + ray_t: 0.0, + info: AdditionalPickingInfo::GuiOverlay, + }, + false, + ); + } + } + + let mut result = PickingResult { + opaque_hit: closest_opaque_pick + .instance_hash + .is_some() + .then_some(closest_opaque_pick), + transparent_hits, + picking_ray: ray_in_world, + }; + result.sort_and_remove_hidden_transparent(); + result +} diff --git a/crates/re_viewer/src/ui/view_spatial/scene/primitives.rs b/crates/re_viewer/src/ui/view_spatial/scene/primitives.rs index b95e1f55c27a..02086b08f7b8 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/primitives.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/primitives.rs @@ -1,21 +1,25 @@ use egui::Color32; -use itertools::Itertools as _; use re_data_store::InstanceIdHash; use re_renderer::{renderer::MeshInstance, LineStripSeriesBuilder, PointCloudBuilder}; -use crate::{math::line_segment_distance_sq_to_point_2d, ui::view_spatial::eye::Eye}; - use super::MeshSource; /// Primitives sent off to `re_renderer`. /// (Some meta information still relevant to ui setup as well) +/// +/// TODO(andreas): Right now we're using `re_renderer` data structures for reading (bounding box & picking). +/// In the future, this will be more limited as we're going to gpu staging data as soon as possible +/// which is very slow to read. See [#594](https://github.com/rerun-io/rerun/pull/594) #[derive(Default)] pub struct SceneSpatialPrimitives { /// Estimated bounding box of all data in scene coordinates. Accumulated. - bounding_box: macaw::BoundingBox, + pub(super) bounding_box: macaw::BoundingBox, - /// TODO(andreas): Need to decide of this should be used for hovering as well. If so add another builder with meta-data? + // TODO(andreas): Storing extra data like so is unsafe and not future proof either + // (see also above comment on the need to separate cpu-readable data) + pub textured_rectangles_ids: Vec, pub textured_rectangles: Vec, + pub line_strips: LineStripSeriesBuilder, pub points: PointCloudBuilder, @@ -88,142 +92,4 @@ impl SceneSpatialPrimitives { }) .collect() } - - pub fn picking( - &self, - pointer_in_ui: glam::Vec2, - rect: &egui::Rect, - eye: &Eye, - ) -> Option<(InstanceIdHash, glam::Vec3)> { - crate::profile_function!(); - - let ui_from_world = eye.ui_from_world(rect); - let world_from_ui = eye.world_from_ui(rect); - - let ray_in_world = { - let ray_dir = world_from_ui.project_point3(glam::Vec3::new( - pointer_in_ui.x, - pointer_in_ui.y, - -1.0, - )) - eye.pos_in_world(); - macaw::Ray3::from_origin_dir(eye.pos_in_world(), ray_dir.normalize()) - }; - - let Self { - bounding_box: _, - textured_rectangles: _, // TODO(andreas): Should be able to pick 2d rectangles! - line_strips, - points, - meshes, - } = &self; - - // in points - let max_side_dist_sq = 5.0 * 5.0; // TODO(emilk): interaction radius from egui - - let mut closest_z = f32::INFINITY; - // in points - let mut closest_side_dist_sq = max_side_dist_sq; - let mut closest_instance_id = None; - - { - crate::profile_scope!("points_3d"); - - for (batch, vertex_iter) in points.iter_vertices_and_userdata_by_batch() { - // For getting the closest point we could transform the mouse ray into the "batch space". - // However, we want to determine the closest point in *screen space*, meaning that we need to project all points. - let ui_from_batch = ui_from_world * batch.world_from_obj; - - for (point, instance_hash) in vertex_iter { - if instance_hash.is_none() { - continue; - } - - // TODO(emilk): take point radius into account - let pos_in_ui = ui_from_batch.project_point3(point.position); - if pos_in_ui.z < 0.0 { - continue; // TODO(emilk): don't we expect negative Z!? RHS etc - } - let dist_sq = pos_in_ui.truncate().distance_squared(pointer_in_ui); - if dist_sq < max_side_dist_sq { - let t = pos_in_ui.z.abs(); - if t < closest_z || dist_sq < closest_side_dist_sq { - closest_z = t; - closest_side_dist_sq = dist_sq; - closest_instance_id = Some(*instance_hash); - } - } - } - } - } - - { - crate::profile_scope!("line_segments_3d"); - - for (batch, vertices) in line_strips.iter_vertices_by_batch() { - // For getting the closest point we could transform the mouse ray into the "batch space". - // However, we want to determine the closest point in *screen space*, meaning that we need to project all points. - let ui_from_batch = ui_from_world * batch.world_from_obj; - - for (start, end) in vertices.tuple_windows() { - // Skip unconnected tuples. - if start.strip_index != end.strip_index { - continue; - } - - let instance_hash = line_strips.strip_user_data[start.strip_index as usize]; - if instance_hash.is_none() { - continue; - } - - // TODO(emilk): take line segment radius into account - let a = ui_from_batch.project_point3(start.position); - let b = ui_from_batch.project_point3(end.position); - let dist_sq = line_segment_distance_sq_to_point_2d( - [a.truncate(), b.truncate()], - pointer_in_ui, - ); - - if dist_sq < max_side_dist_sq { - let t = a.z.abs(); // not very accurate - if t < closest_z || dist_sq < closest_side_dist_sq { - closest_z = t; - closest_side_dist_sq = dist_sq; - closest_instance_id = Some(instance_hash); - } - } - } - } - } - - { - crate::profile_scope!("meshes"); - for mesh in meshes { - if !mesh.instance_hash.is_some() { - continue; - } - let ray_in_mesh = (mesh.world_from_mesh.inverse() * ray_in_world).normalize(); - let t = crate::math::ray_bbox_intersect(&ray_in_mesh, mesh.mesh.bbox()); - - if t < f32::INFINITY { - let dist_sq = 0.0; - if t < closest_z || dist_sq < closest_side_dist_sq { - closest_z = t; // TODO(emilk): I think this is wrong - closest_side_dist_sq = dist_sq; - closest_instance_id = Some(mesh.instance_hash); - } - } - } - } - - if let Some(closest_instance_id) = closest_instance_id { - let closest_point = world_from_ui.project_point3(glam::Vec3::new( - pointer_in_ui.x, - pointer_in_ui.y, - closest_z, - )); - Some((closest_instance_id, closest_point)) - } else { - None - } - } } diff --git a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/boxes2d.rs b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/boxes2d.rs index 5be13236c682..1147e0084737 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/boxes2d.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/boxes2d.rs @@ -72,10 +72,6 @@ impl ScenePart for Boxes2DPartClassic { let color = annotation_info.color(color, DefaultColor::ObjPath(obj_path)); let label = annotation_info.label(label); - // Hovering with a rect. - let rect = egui::Rect::from_min_max(bbox.min.into(), bbox.max.into()); - scene.ui.rects.push((rect, instance_hash)); - let mut paint_props = paint_properties(color, stroke_width); if instance_hash.is_some() && hovered_instance == instance_hash { apply_hover_effect(&mut paint_props); @@ -96,7 +92,10 @@ impl ScenePart for Boxes2DPartClassic { scene.ui.labels_2d.push(Label2D { text: label, color: paint_props.fg_stroke.color, - target: Label2DTarget::Rect(rect), + target: Label2DTarget::Rect(egui::Rect::from_min_max( + bbox.min.into(), + bbox.max.into(), + )), labled_instance: instance_hash, }); } @@ -140,11 +139,6 @@ impl Boxes2DPart { let color = annotation_info.color(color.as_ref(), DefaultColor::ObjPath(obj_path)); let label = annotation_info.label(label); - // Hovering with a rect. - let hover_rect = - egui::Rect::from_min_size(egui::pos2(rect.x, rect.y), egui::vec2(rect.w, rect.h)); - scene.ui.rects.push((hover_rect, instance)); - let mut paint_props = paint_properties(color, stroke_width); if hovered_instance == instance { apply_hover_effect(&mut paint_props); @@ -179,7 +173,10 @@ impl Boxes2DPart { scene.ui.labels_2d.push(Label2D { text: label, color: paint_props.fg_stroke.color, - target: Label2DTarget::Rect(hover_rect), + target: Label2DTarget::Rect(egui::Rect::from_min_size( + egui::pos2(rect.x, rect.y), + egui::vec2(rect.w, rect.h), + )), labled_instance: instance, }); } diff --git a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/images.rs b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/images.rs index 27de01327ea9..638191dcf38a 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/images.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/images.rs @@ -70,6 +70,7 @@ impl ScenePart for ImagesPart { .primitives .line_strips .batch("image outlines") + .world_from_obj(world_from_obj) .add_axis_aligned_rectangle_outline_2d(glam::Vec2::ZERO, glam::vec2(w, h)) .color(paint_props.fg_stroke.color) .radius(Size::new_points(paint_props.fg_stroke.width * 0.5)); @@ -96,6 +97,7 @@ impl ScenePart for ImagesPart { multiplicative_tint: paint_props.fg_stroke.color.into(), }, ); + scene.primitives.textured_rectangles_ids.push(instance_hash); } scene.ui.images.push(Image { @@ -116,6 +118,7 @@ impl ScenePart for ImagesPart { // Handle layered rectangles that are on (roughly) the same plane and were logged in sequence. // First, group by similar plane. + // TODO(andreas): Need planes later for picking as well! let rects_grouped_by_plane = { let mut cur_plane = macaw::Plane3::from_normal_dist(Vec3::NAN, std::f32::NAN); let mut rectangle_group = Vec::new(); @@ -127,10 +130,9 @@ impl ScenePart for ImagesPart { for rect in it.by_ref() { let prev_plane = cur_plane; cur_plane = macaw::Plane3::from_normal_point( - rect.extent_u.cross(rect.extent_v), + rect.extent_u.cross(rect.extent_v).normalize(), rect.top_left_corner_position, - ) - .normalized(); + ); // Are the image planes too unsimilar? Then this is a new group. if !rectangle_group.is_empty() diff --git a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/segments2d.rs b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/segments2d.rs index 94862769c366..30e818834968 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/segments2d.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/segments2d.rs @@ -31,6 +31,10 @@ impl ScenePart for LineSegments2DPart { ) { crate::profile_scope!("LineSegments2DPart"); + // TODO(andreas): Workaround for unstable z index when interacting on images. + // See also https://github.com/rerun-io/rerun/issues/647 + scene.primitives.line_strips.next_2d_z = -0.0001; + for (_obj_type, obj_path, time_query, obj_store) in query.iter_object_stores(ctx.log_db, &[ObjectType::LineSegments2D]) { diff --git a/crates/re_viewer/src/ui/view_spatial/ui_2d.rs b/crates/re_viewer/src/ui/view_spatial/ui_2d.rs index 8a2ae31f8cf7..4b3a60f6e3f5 100644 --- a/crates/re_viewer/src/ui/view_spatial/ui_2d.rs +++ b/crates/re_viewer/src/ui/view_spatial/ui_2d.rs @@ -3,7 +3,7 @@ use egui::{ pos2, vec2, Align, Align2, Color32, NumExt as _, Pos2, Rect, Response, ScrollArea, Shape, TextFormat, TextStyle, Vec2, }; -use itertools::Itertools; +use macaw::IsoTransform; use re_data_store::{InstanceId, InstanceIdHash, ObjPath}; use re_renderer::view_builder::TargetConfiguration; @@ -13,13 +13,15 @@ use crate::{ data_ui::{self, DataUi}, view_spatial::{ ui_renderer_bridge::{create_scene_paint_callback, get_viewport, ScreenBackground}, - Image, Label2DTarget, SceneSpatial, + Label2DTarget, SceneSpatial, }, Preview, }, - Selection, ViewerContext, + ViewerContext, }; +use super::{eye::Eye, scene::AdditionalPickingInfo}; + // --- #[derive(Clone, Default, serde::Deserialize, serde::Serialize)] @@ -288,6 +290,34 @@ fn view_2d_scrollable( // ------------------------------------------------------------------------ + // Draw a re_renderer driven view. + // Camera & projection are configured to ingest space coordinates directly. + { + crate::profile_scope!("build command buffer for 2D view {}", space.to_string()); + + let Ok(target_config) = setup_target_config( + &painter, + space_from_ui, + space_from_pixel, + &space.to_string(), + ) else { + return response; + }; + + let Ok(callback) = create_scene_paint_callback( + ctx.render_ctx, + target_config, painter.clip_rect(), + &scene.primitives, + &ScreenBackground::ClearColor(parent_ui.visuals().extreme_bg_color.into()), + ) else { + return response; + }; + + painter.add(callback); + } + + // ------------------------------------------------------------------------ + // Add egui driven labels on top of re_renderer content. // Needs to come before hovering checks because it adds more objects for hovering. { @@ -305,133 +335,108 @@ fn view_2d_scrollable( // ------------------------------------------------------------------------ - // What tooltips we've shown so far - let mut shown_tooltips = ahash::HashSet::default(); - let mut depths_at_pointer = vec![]; - let mut closest_instance_id_hash = InstanceIdHash::NONE; - // Check if we're hovering any hover primitive. + *hovered_instance = None; + let mut depth_at_pointer = None; if let Some(pointer_pos_ui) = response.hover_pos() { - // All hover primitives have their coordinates in space units. - // Transform the pointer pos so we don't have to transform anything else! let pointer_pos_space = space_from_ui.transform_pos(pointer_pos_ui); - let pointer_pos_space_glam = glam::vec2(pointer_pos_space.x, pointer_pos_space.y); - let hover_radius = space_from_ui.scale().y * 5.0; // TODO(emilk): from egui? - let mut closest_dist = hover_radius; - - let mut check_hovering = |instance_hash, dist: f32| { - if dist <= closest_dist { - closest_dist = dist; - closest_instance_id_hash = instance_hash; - } - }; - - for (bbox, instance_hash) in &scene.ui.rects { - check_hovering(*instance_hash, bbox.distance_to_pos(pointer_pos_space)); - } - - for (point, instance_hash) in scene - .primitives - .points - .vertices - .iter() - .zip(scene.primitives.points.user_data.iter()) - { - if instance_hash.is_none() { - continue; - } - - check_hovering( - *instance_hash, - point.position.truncate().distance(pointer_pos_space_glam), - ); - } - - for ((_info, instance_hash), vertices) in - scene.primitives.line_strips.iter_strips_with_vertices() - { - if instance_hash.is_none() { - continue; - } - - let mut min_dist_sq = f32::INFINITY; - - for (a, b) in vertices.tuple_windows() { - let line_segment_distance_sq = crate::math::line_segment_distance_sq_to_point_2d( - [a.position.truncate(), b.position.truncate()], - pointer_pos_space_glam, - ); - min_dist_sq = min_dist_sq.min(line_segment_distance_sq); - } - - check_hovering(*instance_hash, min_dist_sq.sqrt()); - } - - for img in &scene.ui.images { - let Image { - instance_hash, - tensor, - meter, - annotations, - } = img; - - if instance_hash.is_none() { - continue; - } - - let (w, h) = (tensor.shape[1].size as f32, tensor.shape[0].size as f32); - let rect = Rect::from_min_size(Pos2::ZERO, vec2(w, h)); - let dist = rect.distance_sq_to_pos(pointer_pos_space).sqrt(); - let dist = dist.at_least(hover_radius); // allow stuff on top of us to "win" - check_hovering(*instance_hash, dist); + let picking_result = scene.picking( + glam::vec2(pointer_pos_space.x, pointer_pos_space.y), + &scene_rect_accum, + &Eye { + world_from_view: IsoTransform::IDENTITY, + fov_y: None, + }, + hover_radius, + ); + + for hit in picking_result.iter_hits() { + let Some(instance_id) = hit.instance_hash.resolve(&ctx.log_db.obj_db) + else { continue; }; + + // Special hover ui for images. + let picked_image_with_uv = if let AdditionalPickingInfo::TexturedRect(uv) = hit.info { + scene + .ui + .images + .iter() + .find(|image| image.instance_hash == hit.instance_hash) + .map(|image| (image, uv)) + } else { + None + }; + response = if let Some((image, uv)) = picked_image_with_uv { + // TODO(andreas): This is different in 3d view. + if let Some(meter) = image.meter { + if let Some(raw_value) = image.tensor.get(&[ + pointer_pos_space.y.round() as _, + pointer_pos_space.x.round() as _, + ]) { + let raw_value = raw_value.as_f64(); + let depth_in_meters = raw_value / meter as f64; + depth_at_pointer = Some(depth_in_meters as f32); + } + } - // Show tooltips for all images, not just the "most hovered" one. - //TODO(john) this should probably be factored out into `data_ui` - if rect.contains(pointer_pos_space) { - response = response + response .on_hover_cursor(egui::CursorIcon::ZoomIn) .on_hover_ui_at_pointer(|ui| { ui.set_max_width(400.0); ui.vertical(|ui| { - if let Some(instance_id) = instance_hash.resolve(&ctx.log_db.obj_db) { - ui.label(instance_id.to_string()); - instance_id.data_ui(ctx, ui, Preview::Small); - ui.separator(); - } + ui.label(instance_id.to_string()); + instance_id.data_ui(ctx, ui, Preview::Small); - let legend = Some(annotations.clone()); + let legend = Some(image.annotations.clone()); let tensor_view = ctx.cache.image.get_view_with_annotations( - tensor, + &image.tensor, &legend, ctx.render_ctx, ); - ui.horizontal(|ui| { - data_ui::image::show_zoomed_image_region( - parent_ui, - ui, - &tensor_view, - ui_from_space.transform_rect(rect), - pointer_pos_ui, - *meter, - ); - }); + if let [h, w, ..] = image.tensor.shape.as_slice() { + ui.separator(); + ui.horizontal(|ui| { + // TODO(andreas): 3d skips the show_zoomed_image_region_rect part here. + let (w, h) = (w.size as f32, h.size as f32); + let center = [(uv.x * w) as isize, (uv.y * h) as isize]; + let rect = Rect::from_min_size(Pos2::ZERO, egui::vec2(w, h)); + data_ui::image::show_zoomed_image_region_area_outline( + parent_ui, + &tensor_view, + center, + ui_from_space.transform_rect(rect), + ); + data_ui::image::show_zoomed_image_region( + ui, + &tensor_view, + center, + image.meter, + ); + }); + } }); - }); - - shown_tooltips.insert(*instance_hash); + }) + } else { + // Hover ui for everything else + response.on_hover_ui_at_pointer(|ui| { + ctx.instance_id_button(ui, &instance_id); + instance_id.data_ui(ctx, ui, crate::ui::Preview::Medium); + }) + }; + + if let Some(closest_pick) = picking_result.iter_hits().last() { + // Save last known hovered object. + if let Some(instance_id) = closest_pick.instance_hash.resolve(&ctx.log_db.obj_db) { + *hovered_instance = Some(instance_id); + } } - if let Some(meter) = *meter { - if let Some(raw_value) = tensor.get(&[ - pointer_pos_space.y.round() as _, - pointer_pos_space.x.round() as _, - ]) { - let raw_value = raw_value.as_f64(); - let depth_in_meters = raw_value / meter as f64; - depths_at_pointer.push(depth_in_meters); + // Clicking the last hovered object. + if let Some(instance_id) = hovered_instance { + if response.clicked() { + ctx.set_selection(crate::Selection::Instance(instance_id.clone())); } } } @@ -439,53 +444,6 @@ fn view_2d_scrollable( // ------------------------------------------------------------------------ - // Draw a re_renderer driven view. - // Camera & projection are configured to ingest space coordinates directly. - { - crate::profile_scope!("build command buffer for 2D view {}", space.to_string()); - - let Ok(target_config) = setup_target_config( - &painter, - space_from_ui, - space_from_pixel, - &space.to_string(), - ) else { - return response; - }; - - let Ok(callback) = create_scene_paint_callback( - ctx.render_ctx, - target_config, painter.clip_rect(), - &scene.primitives, - &ScreenBackground::ClearColor(parent_ui.visuals().extreme_bg_color.into()), - ) else { - return response; - }; - - painter.add(callback); - } - - // ------------------------------------------------------------------------ - - if let Some(instance_id) = hovered_instance { - if response.clicked() { - ctx.set_selection(Selection::Instance(instance_id.clone())); - } - if !shown_tooltips.contains(&instance_id.hash()) { - response = response.on_hover_ui_at_pointer(|ui| { - ctx.instance_id_button(ui, instance_id); - instance_id.data_ui(ctx, ui, Preview::Small); - }); - } - } - - // ------------------------------------------------------------------------ - - let depth_at_pointer = if depths_at_pointer.len() == 1 { - depths_at_pointer[0] as f32 - } else { - f32::INFINITY - }; project_onto_other_spaces(ctx, space, &response, &space_from_ui, depth_at_pointer); painter.extend(show_projections_from_3d_space( ctx, @@ -496,8 +454,6 @@ fn view_2d_scrollable( // ------------------------------------------------------------------------ - *hovered_instance = closest_instance_id_hash.resolve(&ctx.log_db.obj_db); - response } @@ -560,7 +516,7 @@ fn create_labels( scene .ui - .rects + .pickable_ui_rects .push((space_from_ui.transform_rect(bg_rect), label.labled_instance)); } @@ -595,13 +551,17 @@ fn project_onto_other_spaces( space: &ObjPath, response: &Response, space_from_ui: &RectTransform, - z: f32, + z: Option, ) { if let Some(pointer_in_screen) = response.hover_pos() { let pointer_in_space = space_from_ui.transform_pos(pointer_in_screen); ctx.rec_cfg.hovered_space_this_frame = HoveredSpace::TwoD { space_2d: space.clone(), - pos: glam::vec3(pointer_in_space.x, pointer_in_space.y, z), + pos: glam::vec3( + pointer_in_space.x, + pointer_in_space.y, + z.unwrap_or(f32::INFINITY), + ), }; } } @@ -614,7 +574,7 @@ fn show_projections_from_3d_space( ) -> Vec { let mut shapes = Vec::new(); if let HoveredSpace::ThreeD { target_spaces, .. } = &ctx.rec_cfg.hovered_space_previous_frame { - for (space_2d, ray_2d, pos_2d) in target_spaces { + for (space_2d, pos_2d) in target_spaces { if space_2d == space { if let Some(pos_2d) = pos_2d { // User is hovering a 2D point inside a 3D view. @@ -641,26 +601,6 @@ fn show_projections_from_3d_space( )); shapes.push(Shape::galley(rect.min, galley)); } - - let show_ray = false; // This visualization is mostly confusing - if show_ray { - if let Some(ray_2d) = ray_2d { - // User is hovering a 3D view with a camera in it. - // TODO(emilk): figure out a nice visualization here, or delete the code. - let origin = ray_2d.origin; - let end = ray_2d.point_along(10_000.0); - - let origin = pos2(origin.x / origin.z, origin.y / origin.z); - let end = pos2(end.x / end.z, end.y / end.z); - - let origin = ui_from_space.transform_pos(origin); - let end = ui_from_space.transform_pos(end); - - shapes.push(Shape::circle_filled(origin, 5.0, Color32::WHITE)); - shapes.push(Shape::line_segment([origin, end], (3.0, Color32::BLACK))); - shapes.push(Shape::line_segment([origin, end], (2.0, Color32::WHITE))); - } - } } } } diff --git a/crates/re_viewer/src/ui/view_spatial/ui_3d.rs b/crates/re_viewer/src/ui/view_spatial/ui_3d.rs index 7def7278e2ee..e689154fdba8 100644 --- a/crates/re_viewer/src/ui/view_spatial/ui_3d.rs +++ b/crates/re_viewer/src/ui/view_spatial/ui_3d.rs @@ -1,6 +1,6 @@ use egui::NumExt as _; use glam::Affine3A; -use macaw::{vec3, BoundingBox, Quat, Ray3, Vec3}; +use macaw::{vec3, BoundingBox, Quat, Vec3}; use re_data_store::{InstanceId, InstanceIdHash, ObjectsProperties}; use re_log_types::{ObjPath, ViewCoordinates}; @@ -12,11 +12,13 @@ use re_renderer::{ use crate::{ misc::{HoveredSpace, Selection}, ui::{ - data_ui::DataUi, + data_ui::{self, DataUi}, view_spatial::{ + scene::AdditionalPickingInfo, ui_renderer_bridge::{create_scene_paint_callback, get_viewport, ScreenBackground}, SceneSpatial, SpaceCamera3D, AXIS_COLOR_X, AXIS_COLOR_Y, AXIS_COLOR_Z, }, + Preview, }, ViewerContext, }; @@ -373,37 +375,85 @@ pub fn view_3d( } } - if ui.input().pointer.any_click() { - if let Some(hovered_instance) = &hovered_instance { - click_object(ctx, space_cameras, state, hovered_instance); + // TODO(andreas): We're very close making the hover reaction of ui2d and ui3d the same. Finish the job! + *hovered_instance = None; + if let Some(pointer_pos) = response.hover_pos() { + let picking_result = + scene.picking(glam::vec2(pointer_pos.x, pointer_pos.y), &rect, &eye, 5.0); + + for hit in picking_result.iter_hits() { + let Some(instance_id) = hit.instance_hash.resolve(&ctx.log_db.obj_db) + else { continue; }; + + // Special hover ui for images. + let picked_image_with_uv = if let AdditionalPickingInfo::TexturedRect(uv) = hit.info { + scene + .ui + .images + .iter() + .find(|image| image.instance_hash == hit.instance_hash) + .map(|image| (image, uv)) + } else { + None + }; + response = if let Some((image, uv)) = picked_image_with_uv { + response + .on_hover_cursor(egui::CursorIcon::ZoomIn) + .on_hover_ui_at_pointer(|ui| { + ui.set_max_width(400.0); + + ui.vertical(|ui| { + ui.label(instance_id.to_string()); + instance_id.data_ui(ctx, ui, Preview::Small); + + let legend = Some(image.annotations.clone()); + let tensor_view = ctx.cache.image.get_view_with_annotations( + &image.tensor, + &legend, + ctx.render_ctx, + ); + + if let [h, w, ..] = image.tensor.shape.as_slice() { + ui.separator(); + ui.horizontal(|ui| { + let (w, h) = (w.size as f32, h.size as f32); + let center = [(uv.x * w) as isize, (uv.y * h) as isize]; + data_ui::image::show_zoomed_image_region( + ui, + &tensor_view, + center, + image.meter, + ); + }); + } + }); + }) + } else { + // Hover ui for everything else + response.on_hover_ui_at_pointer(|ui| { + ctx.instance_id_button(ui, &instance_id); + instance_id.data_ui(ctx, ui, crate::ui::Preview::Medium); + }) + }; } - } else if ui.input().pointer.any_down() { - *hovered_instance = None; - } - - if let Some(instance_id) = &hovered_instance { - response = response.on_hover_ui_at_pointer(|ui| { - ctx.instance_id_button(ui, instance_id); - instance_id.data_ui(ctx, ui, crate::ui::Preview::Medium); - }); - } - let hovered = response.hover_pos().and_then(|pointer_pos| { - scene - .primitives - .picking(glam::vec2(pointer_pos.x, pointer_pos.y), &rect, &eye) - }); + if let Some(closest_pick) = picking_result.iter_hits().last() { + // Save last known hovered object. + if let Some(instance_id) = closest_pick.instance_hash.resolve(&ctx.log_db.obj_db) { + state.hovered_point = Some(picking_result.space_position(closest_pick)); + *hovered_instance = Some(instance_id); + } + } - *hovered_instance = None; - state.hovered_point = None; - if let Some((instance_id, point)) = hovered { - if let Some(instance_id) = instance_id.resolve(&ctx.log_db.obj_db) { - *hovered_instance = Some(instance_id); - state.hovered_point = Some(point); + // Clicking the last hovered object. + if let Some(instance_id) = hovered_instance { + if ui.input().pointer.any_click() { + click_object(ctx, space_cameras, state, instance_id); + } } - } - project_onto_other_spaces(ctx, space_cameras, state, space, &response, orbit_eye); + project_onto_other_spaces(ctx, space_cameras, state, space); + } show_projections_from_2d_space(ctx, space_cameras, &mut scene, scene_bbox_accum); if state.show_axes { show_origin_axis(&mut scene); @@ -497,7 +547,7 @@ fn paint_view( view_from_world: eye.world_from_view.inverse(), projection_from_view: Projection::Perspective { - vertical_fov: eye.fov_y, + vertical_fov: eye.fov_y.unwrap(), near_plane_distance: eye.near(), }, @@ -573,32 +623,14 @@ fn project_onto_other_spaces( space_cameras: &[SpaceCamera3D], state: &mut View3DState, space: &ObjPath, - response: &egui::Response, - orbit_eye: OrbitEye, ) { - let Some(pos_in_ui) = response.hover_pos() else { return }; - - let ray_in_world = { - let eye = orbit_eye.to_eye(); - let world_from_ui = eye.world_from_ui(&response.rect); - let ray_origin = eye.pos_in_world(); - let ray_dir = - world_from_ui.project_point3(glam::vec3(pos_in_ui.x, pos_in_ui.y, 1.0)) - ray_origin; - Ray3::from_origin_dir(ray_origin, ray_dir.normalize()) - }; - let mut target_spaces = vec![]; for cam in space_cameras { if let Some(target_space) = cam.target_space.clone() { - let ray_in_2d = cam - .image_from_world() - .map(|image_from_world| (image_from_world * ray_in_world).normalize()); - let point_in_2d = state .hovered_point .and_then(|hovered_point| cam.project_onto_2d(hovered_point)); - - target_spaces.push((target_space, ray_in_2d, point_in_2d)); + target_spaces.push((target_space, point_in_2d)); } } ctx.rec_cfg.hovered_space_this_frame = HoveredSpace::ThreeD {