From abdc89d277a454804f0462a6cb054dae84f2234b Mon Sep 17 00:00:00 2001 From: sherrpass Date: Fri, 22 Mar 2024 18:16:25 +0800 Subject: [PATCH 01/11] Integrate upsampling into vvplay --- Cargo.lock | 55 +++++---- src/render/wgpu/camera.rs | 10 +- src/render/wgpu/mod.rs | 1 + src/render/wgpu/render_manager.rs | 79 +++++++++++- src/render/wgpu/renderer.rs | 5 +- src/render/wgpu/upsampler.rs | 197 ++++++++++++++++++++++++++++++ 6 files changed, 312 insertions(+), 35 deletions(-) create mode 100644 src/render/wgpu/upsampler.rs diff --git a/Cargo.lock b/Cargo.lock index a9ae3cc..840fd0c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -346,7 +346,7 @@ checksum = "965ab7eb5f8f97d2a083c799f3a1b994fc397b2fe2da5d1da1626ce15a39f2b1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.42", + "syn 2.0.53", ] [[package]] @@ -546,7 +546,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.42", + "syn 2.0.53", ] [[package]] @@ -918,7 +918,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.42", + "syn 2.0.53", ] [[package]] @@ -940,7 +940,7 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ "darling_core 0.20.3", "quote", - "syn 2.0.42", + "syn 2.0.53", ] [[package]] @@ -1266,7 +1266,7 @@ checksum = "1a5c6c585bc94aaf2c7b51dd4c2ba22680844aba4c687be581871a6f518c5742" dependencies = [ "proc-macro2", "quote", - "syn 2.0.42", + "syn 2.0.53", ] [[package]] @@ -1368,7 +1368,7 @@ checksum = "53b153fd91e4b0147f4aced87be237c98248656bb01050b96bf3ee89220a8ddb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.42", + "syn 2.0.53", ] [[package]] @@ -2381,7 +2381,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.42", + "syn 2.0.53", ] [[package]] @@ -2594,9 +2594,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.71" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75cb1540fadbd5b8fbccc4dddad2734eba435053f725621c070711a14bb5f4b8" +checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e" dependencies = [ "unicode-ident", ] @@ -2639,9 +2639,9 @@ dependencies = [ [[package]] name = "quote" -version = "1.0.33" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" +checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" dependencies = [ "proc-macro2", ] @@ -2955,29 +2955,29 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.193" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89" +checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.193" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3" +checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.42", + "syn 2.0.53", ] [[package]] name = "serde_json" -version = "1.0.108" +version = "1.0.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b" +checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" dependencies = [ "itoa", "ryu", @@ -3022,7 +3022,7 @@ dependencies = [ "darling 0.20.3", "proc-macro2", "quote", - "syn 2.0.42", + "syn 2.0.53", ] [[package]] @@ -3192,9 +3192,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.42" +version = "2.0.53" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b7d0a2c048d661a1a59fcd7355baa232f7ed34e0ee4df2eef3c1c1c0d3852d8" +checksum = "7383cd0e49fff4b6b90ca5670bfd3e9d6a733b3f90c686605aa7eec8c4996032" dependencies = [ "proc-macro2", "quote", @@ -3277,7 +3277,7 @@ checksum = "01742297787513b79cf8e29d1056ede1313e2420b7b3b15d0a768b4921f549df" dependencies = [ "proc-macro2", "quote", - "syn 2.0.42", + "syn 2.0.53", ] [[package]] @@ -3413,7 +3413,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.42", + "syn 2.0.53", ] [[package]] @@ -3482,7 +3482,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.42", + "syn 2.0.53", ] [[package]] @@ -3643,6 +3643,7 @@ dependencies = [ "reqwest", "roxmltree", "serde", + "serde_json", "serde_with", "tempfile", "thiserror", @@ -3699,7 +3700,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.42", + "syn 2.0.53", "wasm-bindgen-shared", ] @@ -3733,7 +3734,7 @@ checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283" dependencies = [ "proc-macro2", "quote", - "syn 2.0.42", + "syn 2.0.53", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -4296,7 +4297,7 @@ checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.42", + "syn 2.0.53", ] [[package]] diff --git a/src/render/wgpu/camera.rs b/src/render/wgpu/camera.rs index bf6b069..139a8cb 100644 --- a/src/render/wgpu/camera.rs +++ b/src/render/wgpu/camera.rs @@ -6,10 +6,10 @@ use wgpu::util::DeviceExt; use winit::dpi::PhysicalPosition; use winit::event::*; -const CAMERA_SPEED: f32 = 2.0; -const CAMERA_SENSITIVITY: f32 = 0.5; +const CAMERA_SPEED: f32 = 1.0; +const CAMERA_SENSITIVITY: f32 = 0.2; const PROJECTION_FOXY: f32 = 45.0; -const PROJECTION_ZNEAR: f32 = 0.1; +const PROJECTION_ZNEAR: f32 = 0.001; const PROJECTION_ZFAR: f32 = 100.0; #[derive(Clone)] @@ -152,8 +152,8 @@ impl CameraState { /// Create a uniform buffer: a blob of data that is available to every invocation of a set of shaders. /// This buffer is used to store our view projection matrix pub struct CameraUniform { - view_position: [f32; 4], - view_proj: [[f32; 4]; 4], + pub view_position: [f32; 4], + pub view_proj: [[f32; 4]; 4], } impl Default for CameraUniform { diff --git a/src/render/wgpu/mod.rs b/src/render/wgpu/mod.rs index 73fe454..96182c3 100644 --- a/src/render/wgpu/mod.rs +++ b/src/render/wgpu/mod.rs @@ -10,3 +10,4 @@ pub mod render_manager; pub mod renderable; pub mod renderer; pub mod resolution_controller; +pub mod upsampler; diff --git a/src/render/wgpu/render_manager.rs b/src/render/wgpu/render_manager.rs index 06bb818..adb47e4 100644 --- a/src/render/wgpu/render_manager.rs +++ b/src/render/wgpu/render_manager.rs @@ -1,14 +1,20 @@ +use wgpu_glyph::ab_glyph::Point; + +use cgmath::*; use crate::formats::metadata::MetaData; use crate::formats::pointxyzrgba::PointXyzRgba; use crate::formats::PointCloud; +use crate::render::wgpu::antialias; use std::marker::PhantomData; use std::path::Path; use std::process::exit; +use std::time::{Duration, Instant}; use super::camera::CameraState; use super::reader::{LODFileReader, RenderReader}; use super::renderable::Renderable; use super::resolution_controller::ResolutionController; +use super::upsampler::Upsampler; pub trait RenderManager { fn start(&mut self) -> Option; @@ -18,6 +24,7 @@ pub trait RenderManager { fn set_len(&mut self, len: usize); fn set_camera_state(&mut self, camera_state: Option); fn should_redraw(&mut self, camera_state: &CameraState) -> bool; + fn get_visible_points(&self, point_cloud: PointCloud) -> PointCloud; } pub struct AdaptiveManager { @@ -33,6 +40,13 @@ pub struct AdaptiveManager { // As the temporary cache current_index: usize, additional_points_loaded: Vec, + + // For upsampling + upsampler: Upsampler, + pc: Option>, + + total_latency: Duration, + sample_size: i32, } fn infer_format(src: &String) -> String { @@ -122,12 +136,17 @@ impl AdaptiveManager { let additional_points_loaded = vec![0; reader.len()]; Self { + pc: None, + upsampler: Upsampler { }, reader, camera_state: None, resolution_controller: Some(resolution_controller), metadata: Some(metadata), current_index: usize::MAX, // no point cloud loaded yet additional_points_loaded, + total_latency: Duration::new(0, 0), + sample_size: 0, + } } else { let reader = LODFileReader::new(base_path, None, &play_format); @@ -138,12 +157,16 @@ impl AdaptiveManager { } Self { + pc: None, + upsampler: Upsampler { }, reader, camera_state: None, resolution_controller: None, metadata: None, current_index: usize::MAX, additional_points_loaded: vec![], + total_latency: Duration::new(0, 0), + sample_size: 0, } } } @@ -237,7 +260,54 @@ impl RenderManager> for AdaptiveManager { } fn get_at(&mut self, index: usize) -> Option> { - self.get_desired_point_cloud(index) + // println!("RenderManager get_at: {:?}", index); + + if index != self.current_index || self.pc.is_none() { + // println!("Loading point cloud at index: {:?}, {:?} {:?}", index, self.current_index, self.pc.is_none()); + self.pc = Some(self.get_desired_point_cloud(index)?); + self.current_index = index; + } + + let pc = self.pc.as_ref().unwrap(); + let start = Instant::now(); + let mut visible_pc = self.get_visible_points(pc.clone()); + let visibility_elasped = start.elapsed(); + // println!("Calculated visibility in {:?}", visibility_elasped); + + let should_upsample = self.upsampler.should_upsample(&visible_pc, &self.camera_state.as_ref().unwrap()); + + if should_upsample { + let init_len = visible_pc.points.len(); + let upsampled_points = self.upsampler.upsample_grid(visible_pc.points.clone()); + let upsampled_pc = PointCloud::new(upsampled_points.len(), upsampled_points.clone()); + visible_pc.combine(&upsampled_pc); + self.pc.as_mut().unwrap().combine(&upsampled_pc); + let upsample_elasped: Duration = start.elapsed(); + + println!("Upsampled points from {:?} to {:?} in {:?}", init_len, visible_pc.points.len(), upsample_elasped); + } + Some(visible_pc) + // println!("Point visibility took: {:?}", start.elapsed()); + // self.total_latency += start.elapsed(); + // self.sample_size += 1; + // println!("Average Point visibility took: {:?}", self.total_latency / self.sample_size.try_into().unwrap()); + + } + + fn get_visible_points(&self, point_cloud: PointCloud) -> PointCloud { + // println!("Number of points total: {:?}", point_cloud.points.len()); + let view_proj_matrix = Matrix4::from(self.camera_state.as_ref().unwrap().camera_uniform.view_proj); + let antialias = point_cloud.antialias(); + let visible_points = point_cloud.points.into_iter().filter(|point| { + let point_vec = Point3::new(point.x - antialias.x, point.y - antialias.y, point.z - antialias.z) / antialias.scale; + let point_in_view = view_proj_matrix.transform_point(point_vec); + + point_in_view.x.abs() <= 1.0 && + point_in_view.y.abs() <= 1.0 && + point_in_view.z.abs() <= 1.0 + }).collect::>(); + // println!("Number of points visible: {:?}", visible_points.len()); + PointCloud::new(visible_points.len(), visible_points) } fn len(&self) -> usize { @@ -257,7 +327,8 @@ impl RenderManager> for AdaptiveManager { } fn should_redraw(&mut self, camera_state: &CameraState) -> bool { - self.should_load_more_points(camera_state) + true + // self.should_load_more_points(camera_state) } } @@ -314,4 +385,8 @@ where fn should_redraw(&mut self, _camera_state: &CameraState) -> bool { false } + + fn get_visible_points(&self, point_cloud: PointCloud) -> PointCloud { + PointCloud::new(0, vec![]) + } } diff --git a/src/render/wgpu/renderer.rs b/src/render/wgpu/renderer.rs index 728ee01..c0af928 100644 --- a/src/render/wgpu/renderer.rs +++ b/src/render/wgpu/renderer.rs @@ -373,7 +373,7 @@ where } } else if self.reader.should_redraw(&self.camera_state) { self.redisplay(); - } + } let info = RenderInformation { camera: self.camera_state.camera, @@ -529,9 +529,12 @@ where pub fn update_vertices(&mut self, device: &Device, queue: &Queue, data: &T) { let vertices = data.num_vertices(); if vertices > self.num_vertices { + // print!("creating new device"); + self.vertex_buffer.destroy(); self.vertex_buffer = data.create_buffer(device); } else { + // print!("writing to buffer length: {}", data.bytes().len()); queue.write_buffer(&self.vertex_buffer, 0, data.bytes()); } self.num_vertices = vertices; diff --git a/src/render/wgpu/upsampler.rs b/src/render/wgpu/upsampler.rs new file mode 100644 index 0000000..16dfef2 --- /dev/null +++ b/src/render/wgpu/upsampler.rs @@ -0,0 +1,197 @@ +use cgmath::{Matrix4, Point3, Transform}; +use kiddo::{distance::squared_euclidean, KdTree}; +use num_traits::Float; +use rayon::iter::{IntoParallelIterator, IntoParallelRefIterator, ParallelIterator}; + +use crate::formats::{pointxyzrgba::PointXyzRgba, PointCloud}; +use std::{collections::{BTreeSet, HashSet}, time::Instant}; + +use super::{camera::CameraState, renderable::Renderable, resolution_controller::ResolutionController}; + +pub struct Upsampler { + +} + +const VIEWPORT_DIST_UPSAMPLING_THRESHOLD: f32 = 3.0; + +impl Upsampler { + pub fn new() -> Self { + Self {} + } + + pub fn should_upsample(&self, point_cloud: &PointCloud, camera_state: &CameraState) -> bool { + /* + 1. Get points in NDC + 2. Calculate the average distance normalised by viewport + 3. If greater than **threshold**, upsample + */ + let start = Instant::now(); + let point_num = point_cloud.points.len(); + if point_num == 0 || point_num > 100_000 { + return false + } + let view_proj_matrix = Matrix4::from(camera_state.camera_uniform.view_proj); + let antialias = point_cloud.antialias(); + let width = camera_state.get_window_size().width; + let height = camera_state.get_window_size().height; + let points_viewport = point_cloud.points.par_iter().map(|point| { + let point_vec = Point3::new(point.x - antialias.x, point.y - antialias.y, point.z - antialias.z) / antialias.scale; + let point_ndc = view_proj_matrix.transform_point(point_vec); + let x = (point_ndc.x * (width as f32)) as i32; + let y = (point_ndc.y * (height as f32)) as i32; + (x, y) + }).collect::>().par_iter().map(|coords| { + PointXyzRgba { + x: coords.0 as f32, + y: coords.1 as f32, + z: 0 as f32, + r: 0, + g: 0, + b: 0, + a: 0, + } + }).collect::>(); + + let average_spacing = Self::calculate_spacing(&points_viewport); + println!("{:?}", average_spacing); + println!("Time taken {:?}", start.elapsed()); + return average_spacing > VIEWPORT_DIST_UPSAMPLING_THRESHOLD + } + + fn calculate_spacing(points: &Vec) -> f32 { + let mut tree = KdTree::new(); + for (i, p) in points.iter().enumerate() { + tree.add(&[p.x, p.y, p.z], i).unwrap(); + } + + let mut sum = 0.0; + // The value is currently hard coded. Can potentially be improved with variance + let k_nearest = 4; + + for p in points.iter() { + let avg_spacing = tree + .nearest(&[p.x, p.y, p.z], k_nearest, &squared_euclidean) + .unwrap() + .iter() + .skip(1) // ignore the first point (same point) + .map(|(d, _)| d.sqrt()) + .sum::() + / (k_nearest - 1) as f32; + + sum += avg_spacing; + } + + sum / points.len() as f32 + } + + fn euclidean_distance_3d(&self, point1: &PointXyzRgba, point2: &PointXyzRgba) -> f32 { + let dx = point1.x - point2.x; + let dy = point1.y - point2.y; + let dz = point1.z - point2.z; + (dx.powi(2) + dy.powi(2) + dz.powi(2)).sqrt() + } + + fn get_middlepoint(&self, point1: &PointXyzRgba, point2: &PointXyzRgba) -> PointXyzRgba { + let geom_x = ((point1.x as f32) + (point2.x as f32)) / 2.0; + let geom_y = ((point1.y as f32) + (point2.y as f32)) / 2.0; + let geom_z = ((point1.z as f32) + (point2.z as f32)) / 2.0; + + let col_r = ((point1.r as f32) + (point2.r as f32)) / 2.0; + let col_g = ((point1.g as f32) + (point2.g as f32)) / 2.0; + let col_b = ((point1.b as f32) + (point2.b as f32)) / 2.0; + let col_a = ((point1.a as f32) + (point2.a as f32)) / 2.0; + PointXyzRgba { + x: geom_x, + y: geom_y, + z: geom_z, + r: col_r as u8, + g: col_g as u8, + b: col_b as u8, + a: col_a as u8, + } + } + + fn get_circumference_order(&self, neighbours: &Vec, points: &Vec) -> Vec { + let mut curr = neighbours[0]; // Assuming this is valid + let mut order = vec![curr]; + let mut seen = HashSet::new(); + seen.insert(curr); + + while order.len() < neighbours.len() { + let mut min_distance = f32::INFINITY; + let mut nearest_neighbour = None; + + for &neighbour in neighbours { + if seen.contains(&neighbour) { + continue; + } + let distance = self.euclidean_distance_3d(&points[curr], &points[neighbour]); + if distance < min_distance { + min_distance = distance; + nearest_neighbour = Some(neighbour); + } + } + + let next_point = nearest_neighbour.expect("Failed to find nearest neighbour"); + curr = next_point; + order.push(curr); + seen.insert(curr); + } + + order + } + + pub fn upsample_grid(&self, vertices: Vec) -> Vec { + let start = Instant::now(); + let mut kd_tree = KdTree::new(); + for (i, pt) in vertices.iter().enumerate() { + kd_tree + .add(&[pt.x, pt.y, pt.z], i) + .expect("Failed to add to kd tree"); + } + // let end_kd_init = start.elapsed(); + let mut visited: HashSet<(usize, usize)> = HashSet::new(); + let mut new_points: Vec = vec![]; + for source in 0..vertices.len() { + + let point = vertices[source]; + let x = point.x; + let y = point.y; + let z = point.z; + match kd_tree.nearest(&[x, y, z], 9, &squared_euclidean){ + Ok(nearest) => { + let neighbours = nearest.iter().map(|(_, second)| **second).skip(1).collect(); + let order = self.get_circumference_order(&neighbours, &vertices); + + for i in 0..order.len() { + let next_i = (i + 1) % order.len(); + let circumference_pair = if order[i] < order[next_i] { (order[i], order[next_i]) } else { (order[next_i], order[i]) }; + let source_pair = if order[i] < source { (order[i], source) } else { (source, order[i]) }; + + for &pair in &[circumference_pair, source_pair] { + if visited.contains(&pair) { + continue; + } + let middlepoint = self.get_middlepoint(&vertices[pair.0], &vertices[pair.1]); + new_points.push(middlepoint); + } + visited.insert(source_pair); + visited.insert(circumference_pair); + + let next_next_i = (i + 2) % order.len(); + let dup_pair = if order[next_next_i] < source { (order[next_next_i], source) } else { (source, order[next_next_i]) }; + visited.insert(dup_pair); + } + } + Err(e) => { + println!("{:?}", e); + } + } + }; + new_points + } + + + + +} From ac8f52c362d6e292360ad2042d8ab1e1cc190169 Mon Sep 17 00:00:00 2001 From: sherrpass Date: Tue, 26 Mar 2024 09:34:43 +0800 Subject: [PATCH 02/11] Parallise upsampling in grids --- src/pipeline/subcommands/upsample.rs | 4 +- src/render/wgpu/upsampler.rs | 73 +++++----- src/upsample/interpolate.rs | 191 ++++++++++++++++++++++++++- 3 files changed, 228 insertions(+), 40 deletions(-) diff --git a/src/pipeline/subcommands/upsample.rs b/src/pipeline/subcommands/upsample.rs index 45e69b5..f92ede9 100644 --- a/src/pipeline/subcommands/upsample.rs +++ b/src/pipeline/subcommands/upsample.rs @@ -2,7 +2,7 @@ use clap::Parser; use crate::{ pipeline::{channel::Channel, PipelineMessage}, - upsample::interpolate::upsample, + upsample::interpolate::{upsample, upsample_grid}, }; use super::Subcommand; @@ -32,7 +32,7 @@ impl Subcommand for Upsampler { for message in messages { match message { PipelineMessage::IndexedPointCloud(pc, i) => { - let upsampled_pc = upsample(pc, self.factor); + let upsampled_pc = upsample_grid(pc, self.factor); channel.send(PipelineMessage::IndexedPointCloud(upsampled_pc, i)); } PipelineMessage::Metrics(_) diff --git a/src/render/wgpu/upsampler.rs b/src/render/wgpu/upsampler.rs index 16dfef2..783d8bd 100644 --- a/src/render/wgpu/upsampler.rs +++ b/src/render/wgpu/upsampler.rs @@ -20,42 +20,43 @@ impl Upsampler { } pub fn should_upsample(&self, point_cloud: &PointCloud, camera_state: &CameraState) -> bool { - /* - 1. Get points in NDC - 2. Calculate the average distance normalised by viewport - 3. If greater than **threshold**, upsample - */ - let start = Instant::now(); - let point_num = point_cloud.points.len(); - if point_num == 0 || point_num > 100_000 { - return false - } - let view_proj_matrix = Matrix4::from(camera_state.camera_uniform.view_proj); - let antialias = point_cloud.antialias(); - let width = camera_state.get_window_size().width; - let height = camera_state.get_window_size().height; - let points_viewport = point_cloud.points.par_iter().map(|point| { - let point_vec = Point3::new(point.x - antialias.x, point.y - antialias.y, point.z - antialias.z) / antialias.scale; - let point_ndc = view_proj_matrix.transform_point(point_vec); - let x = (point_ndc.x * (width as f32)) as i32; - let y = (point_ndc.y * (height as f32)) as i32; - (x, y) - }).collect::>().par_iter().map(|coords| { - PointXyzRgba { - x: coords.0 as f32, - y: coords.1 as f32, - z: 0 as f32, - r: 0, - g: 0, - b: 0, - a: 0, - } - }).collect::>(); - - let average_spacing = Self::calculate_spacing(&points_viewport); - println!("{:?}", average_spacing); - println!("Time taken {:?}", start.elapsed()); - return average_spacing > VIEWPORT_DIST_UPSAMPLING_THRESHOLD + false + // /* + // 1. Get points in NDC + // 2. Calculate the average distance normalised by viewport + // 3. If greater than **threshold**, upsample + // */ + // let start = Instant::now(); + // let point_num = point_cloud.points.len(); + // if point_num == 0 || point_num > 100_000 { + // return false + // } + // let view_proj_matrix = Matrix4::from(camera_state.camera_uniform.view_proj); + // let antialias = point_cloud.antialias(); + // let width = camera_state.get_window_size().width; + // let height = camera_state.get_window_size().height; + // let points_viewport = point_cloud.points.par_iter().map(|point| { + // let point_vec = Point3::new(point.x - antialias.x, point.y - antialias.y, point.z - antialias.z) / antialias.scale; + // let point_ndc = view_proj_matrix.transform_point(point_vec); + // let x = (point_ndc.x * (width as f32)) as i32; + // let y = (point_ndc.y * (height as f32)) as i32; + // (x, y) + // }).collect::>().par_iter().map(|coords| { + // PointXyzRgba { + // x: coords.0 as f32, + // y: coords.1 as f32, + // z: 0 as f32, + // r: 0, + // g: 0, + // b: 0, + // a: 0, + // } + // }).collect::>(); + + // let average_spacing = Self::calculate_spacing(&points_viewport); + // println!("{:?}", average_spacing); + // println!("Time taken {:?}", start.elapsed()); + // return average_spacing > VIEWPORT_DIST_UPSAMPLING_THRESHOLD } fn calculate_spacing(points: &Vec) -> f32 { diff --git a/src/upsample/interpolate.rs b/src/upsample/interpolate.rs index dc7043b..5d353f8 100644 --- a/src/upsample/interpolate.rs +++ b/src/upsample/interpolate.rs @@ -1,9 +1,10 @@ -use std::collections::HashSet; +use std::{collections::HashSet, time::Instant}; use kiddo::{distance::squared_euclidean, KdTree}; use log::warn; +use rayon::iter::{IntoParallelRefIterator, ParallelIterator}; -use crate::formats::{pointxyzrgba::PointXyzRgba, PointCloud}; +use crate::{formats::{bounds::Bounds, pointxyzrgba::PointXyzRgba, PointCloud}, utils::get_pc_bound}; pub fn upsample(point_cloud: PointCloud, factor: usize) -> PointCloud { if factor <= 1 { @@ -85,6 +86,192 @@ pub fn upsample(point_cloud: PointCloud, factor: usize) -> PointCl } } + +fn calculate_spacing(points: &Vec) -> f32 { + let mut tree = KdTree::new(); + for (i, p) in points.iter().enumerate() { + tree.add(&[p.x, p.y, p.z], i).unwrap(); + } + + let mut sum = 0.0; + // The value is currently hard coded. Can potentially be improved with variance + let k_nearest = 4; + + for p in points.iter() { + let avg_spacing = tree + .nearest(&[p.x, p.y, p.z], k_nearest, &squared_euclidean) + .unwrap() + .iter() + .skip(1) // ignore the first point (same point) + .map(|(d, _)| d.sqrt()) + .sum::() + / (k_nearest - 1) as f32; + + sum += avg_spacing; + } + + sum / points.len() as f32 +} + + +pub fn contains(bound: &Bounds, point: &PointXyzRgba) -> bool { + const ERROR_MARGIN_PERCENTAGE: f32 = 1.01; + point.x * ERROR_MARGIN_PERCENTAGE >= bound.min_x + && point.x <= bound.max_x * ERROR_MARGIN_PERCENTAGE + && point.y >= bound.min_y + && point.y <= bound.max_y * ERROR_MARGIN_PERCENTAGE + && point.z >= bound.min_z + && point.z <= bound.max_z * ERROR_MARGIN_PERCENTAGE +} + +fn partition( + pc: &PointCloud, + partitions: (usize, usize, usize), +) -> Vec> { + let pc_bound = get_pc_bound(&pc); + let child_bounds = pc_bound.partition(partitions); + + let num_segments = child_bounds.len(); + let mut partitioned_points = vec![vec![]; num_segments]; + + for point in &pc.points { + for (index, bound) in child_bounds.iter().enumerate() { + if contains(&bound, &point) { + partitioned_points[index].push(point.clone()); + } + } + } + + partitioned_points +} + + +fn euclidean_distance_3d(point1: &PointXyzRgba, point2: &PointXyzRgba) -> f32 { + let dx = point1.x - point2.x; + let dy = point1.y - point2.y; + let dz = point1.z - point2.z; + (dx.powi(2) + dy.powi(2) + dz.powi(2)).sqrt() +} + +fn get_middlepoint(point1: &PointXyzRgba, point2: &PointXyzRgba) -> PointXyzRgba { + let geom_x = ((point1.x as f32) + (point2.x as f32)) / 2.0; + let geom_y = ((point1.y as f32) + (point2.y as f32)) / 2.0; + let geom_z = ((point1.z as f32) + (point2.z as f32)) / 2.0; + + let col_r = ((point1.r as f32) + (point2.r as f32)) / 2.0; + let col_g = ((point1.g as f32) + (point2.g as f32)) / 2.0; + let col_b = ((point1.b as f32) + (point2.b as f32)) / 2.0; + let col_a = ((point1.a as f32) + (point2.a as f32)) / 2.0; + PointXyzRgba { + x: geom_x, + y: geom_y, + z: geom_z, + r: col_r as u8, + g: col_g as u8, + b: col_b as u8, + a: col_a as u8, + } +} + +fn get_circumference_order(neighbours: &Vec, points: &Vec) -> Vec { + let mut curr = neighbours[0]; // Assuming this is valid + let mut order = vec![curr]; + let mut seen = HashSet::new(); + seen.insert(curr); + + while order.len() < neighbours.len() { + let mut min_distance = f32::INFINITY; + let mut nearest_neighbour = None; + + for &neighbour in neighbours { + if seen.contains(&neighbour) { + continue; + } + let distance = euclidean_distance_3d(&points[curr], &points[neighbour]); + if distance < min_distance { + min_distance = distance; + nearest_neighbour = Some(neighbour); + } + } + + let next_point = nearest_neighbour.expect("Failed to find nearest neighbour"); + curr = next_point; + order.push(curr); + seen.insert(curr); + } + + order +} + +pub fn upsample_grid(point_cloud: PointCloud, partition_k: usize) -> PointCloud { + /* + 1. Partition the vertices + 2. Parallel iter upsampling each segment + 3. combining into a single point cloud + */ + let start = Instant::now(); + let partitions = partition(&point_cloud, (partition_k, partition_k, partition_k)); + println!("Time taken for partition: {:?}", start.elapsed()); + let new_points = partitions.par_iter().filter(|vertices| !vertices.is_empty()).flat_map(|vertices| upsample_grid_vertices(vertices)).collect::>(); + println!("Time taken for grid upsample: {:?}", start.elapsed()); + PointCloud::new(new_points.len(), new_points) +} + + +fn upsample_grid_vertices(vertices: &Vec) -> Vec { + let mut kd_tree = KdTree::new(); + for (i, pt) in vertices.iter().enumerate() { + kd_tree + .add(&[pt.x, pt.y, pt.z], i) + .expect("Failed to add to kd tree"); + } + // let end_kd_init = start.elapsed(); + let mut visited: HashSet<(usize, usize)> = HashSet::new(); + let mut new_points: Vec = vec![]; + for source in 0..vertices.len() { + + let point = vertices[source]; + let x = point.x; + let y = point.y; + let z = point.z; + match kd_tree.nearest(&[x, y, z], 9, &squared_euclidean){ + Ok(nearest) => { + let neighbours = nearest.iter().map(|(_, second)| **second).skip(1).collect::>(); + if neighbours.len() != 8 { + continue; + } + let order = get_circumference_order(&neighbours, &vertices); + + for i in 0..order.len() { + let next_i = (i + 1) % order.len(); + let circumference_pair = if order[i] < order[next_i] { (order[i], order[next_i]) } else { (order[next_i], order[i]) }; + let source_pair = if order[i] < source { (order[i], source) } else { (source, order[i]) }; + + for &pair in &[circumference_pair, source_pair] { + if visited.contains(&pair) { + continue; + } + let middlepoint = get_middlepoint(&vertices[pair.0], &vertices[pair.1]); + new_points.push(middlepoint); + } + visited.insert(source_pair); + visited.insert(circumference_pair); + + let next_next_i = (i + 2) % order.len(); + let dup_pair = if order[next_next_i] < source { (order[next_next_i], source) } else { (source, order[next_next_i]) }; + visited.insert(dup_pair); + } + } + Err(e) => { + println!("{:?}", e); + } + } + }; + new_points.extend(vertices); + new_points +} + + #[cfg(test)] mod test { use crate::{ From 2cf15462ba1b441e71693872292f3de3ad2b50a7 Mon Sep 17 00:00:00 2001 From: sherrpass Date: Tue, 26 Mar 2024 13:38:10 +0800 Subject: [PATCH 03/11] parallelise partitioning --- src/upsample/interpolate.rs | 16 +++------------- 1 file changed, 3 insertions(+), 13 deletions(-) diff --git a/src/upsample/interpolate.rs b/src/upsample/interpolate.rs index 5d353f8..7695347 100644 --- a/src/upsample/interpolate.rs +++ b/src/upsample/interpolate.rs @@ -131,18 +131,9 @@ fn partition( let pc_bound = get_pc_bound(&pc); let child_bounds = pc_bound.partition(partitions); - let num_segments = child_bounds.len(); - let mut partitioned_points = vec![vec![]; num_segments]; - - for point in &pc.points { - for (index, bound) in child_bounds.iter().enumerate() { - if contains(&bound, &point) { - partitioned_points[index].push(point.clone()); - } - } - } - - partitioned_points + child_bounds.par_iter().map(|bound| { + pc.points.iter().map(|point| point.clone()).filter(|point| contains(bound, point)).collect::>() + }).collect::>() } @@ -217,7 +208,6 @@ pub fn upsample_grid(point_cloud: PointCloud, partition_k: usize) PointCloud::new(new_points.len(), new_points) } - fn upsample_grid_vertices(vertices: &Vec) -> Vec { let mut kd_tree = KdTree::new(); for (i, pt) in vertices.iter().enumerate() { From 284bc467e7c9436c29d7b2eda2df5299fad17d53 Mon Sep 17 00:00:00 2001 From: sherrpass Date: Wed, 27 Mar 2024 07:55:13 +0800 Subject: [PATCH 04/11] Add Dedup optimisation to interpolate --- src/formats/pointxyzrgba.rs | 18 +++++++ src/pipeline/subcommands/read.rs | 1 - src/upsample/interpolate.rs | 93 +++++++++++++++++++++----------- 3 files changed, 80 insertions(+), 32 deletions(-) diff --git a/src/formats/pointxyzrgba.rs b/src/formats/pointxyzrgba.rs index 7ff5bd9..6dbe69f 100644 --- a/src/formats/pointxyzrgba.rs +++ b/src/formats/pointxyzrgba.rs @@ -1,3 +1,5 @@ +use std::cmp::Ordering; + use serde::ser::{Serialize, SerializeStruct, Serializer}; #[repr(C)] #[derive(Debug, Copy, Clone, PartialEq, bytemuck::Pod, bytemuck::Zeroable)] @@ -11,6 +13,22 @@ pub struct PointXyzRgba { pub a: u8, } +impl PartialOrd for PointXyzRgba { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for PointXyzRgba { + fn cmp(&self, other: &Self) -> Ordering { + self.x.total_cmp(&other.x) + .then_with(|| self.y.total_cmp(&other.y)) + .then_with(|| self.z.total_cmp(&other.z)) + } +} + +impl Eq for PointXyzRgba {} + impl Serialize for PointXyzRgba { fn serialize(&self, serializer: S) -> Result where diff --git a/src/pipeline/subcommands/read.rs b/src/pipeline/subcommands/read.rs index ef0bc28..bdbe0ca 100644 --- a/src/pipeline/subcommands/read.rs +++ b/src/pipeline/subcommands/read.rs @@ -44,7 +44,6 @@ impl Read { impl Subcommand for Read { fn handle(&mut self, messages: Vec, channel: &Channel) { - println!("Reading files"); if messages.is_empty() { let mut files = find_all_files(&self.args.files); files.sort(); diff --git a/src/upsample/interpolate.rs b/src/upsample/interpolate.rs index 7695347..e2f21a0 100644 --- a/src/upsample/interpolate.rs +++ b/src/upsample/interpolate.rs @@ -87,33 +87,6 @@ pub fn upsample(point_cloud: PointCloud, factor: usize) -> PointCl } -fn calculate_spacing(points: &Vec) -> f32 { - let mut tree = KdTree::new(); - for (i, p) in points.iter().enumerate() { - tree.add(&[p.x, p.y, p.z], i).unwrap(); - } - - let mut sum = 0.0; - // The value is currently hard coded. Can potentially be improved with variance - let k_nearest = 4; - - for p in points.iter() { - let avg_spacing = tree - .nearest(&[p.x, p.y, p.z], k_nearest, &squared_euclidean) - .unwrap() - .iter() - .skip(1) // ignore the first point (same point) - .map(|(d, _)| d.sqrt()) - .sum::() - / (k_nearest - 1) as f32; - - sum += avg_spacing; - } - - sum / points.len() as f32 -} - - pub fn contains(bound: &Bounds, point: &PointXyzRgba) -> bool { const ERROR_MARGIN_PERCENTAGE: f32 = 1.01; point.x * ERROR_MARGIN_PERCENTAGE >= bound.min_x @@ -202,13 +175,71 @@ pub fn upsample_grid(point_cloud: PointCloud, partition_k: usize) */ let start = Instant::now(); let partitions = partition(&point_cloud, (partition_k, partition_k, partition_k)); - println!("Time taken for partition: {:?}", start.elapsed()); - let new_points = partitions.par_iter().filter(|vertices| !vertices.is_empty()).flat_map(|vertices| upsample_grid_vertices(vertices)).collect::>(); - println!("Time taken for grid upsample: {:?}", start.elapsed()); + let new_points = partitions.par_iter().filter(|vertices| !vertices.is_empty()).flat_map(|vertices| upsample_grid_vertices_dedup(vertices.clone())).collect::>(); + println!("{:?}", start.elapsed().as_micros()); PointCloud::new(new_points.len(), new_points) } -fn upsample_grid_vertices(vertices: &Vec) -> Vec { +fn upsample_grid_vertices_dedup(vertices: Vec) -> Vec { + let mut vertices = vertices; + vertices.sort_unstable(); + let mut kd_tree = KdTree::new(); + for (i, pt) in vertices.iter().enumerate() { + kd_tree + .add(&[pt.x, pt.y, pt.z], i) + .expect("Failed to add to kd tree"); + } + // let end_kd_init = start.elapsed(); + let mut visited: HashSet<(usize, usize)> = HashSet::new(); + let mut new_points: Vec = vec![]; + let mut visited_points: HashSet = HashSet::new(); + for source in 0..vertices.len() { + if visited_points.contains(&source){ + continue; + } + let point = vertices[source]; + let x = point.x; + let y = point.y; + let z = point.z; + match kd_tree.nearest(&[x, y, z], 9, &squared_euclidean){ + Ok(nearest) => { + let neighbours = nearest.iter().map(|(_, second)| **second).skip(1).collect::>(); + if neighbours.len() != 8 { + continue; + } + visited_points.extend(&neighbours); + + let order = get_circumference_order(&neighbours, &vertices); + + for i in 0..order.len() { + let next_i = (i + 1) % order.len(); + let circumference_pair = if order[i] < order[next_i] { (order[i], order[next_i]) } else { (order[next_i], order[i]) }; + let source_pair = if order[i] < source { (order[i], source) } else { (source, order[i]) }; + + for &pair in &[circumference_pair, source_pair] { + if visited.contains(&pair) { + continue; + } + let middlepoint = get_middlepoint(&vertices[pair.0], &vertices[pair.1]); + new_points.push(middlepoint); + } + visited.insert(source_pair); + visited.insert(circumference_pair); + + let next_next_i = (i + 2) % order.len(); + let dup_pair = if order[next_next_i] < source { (order[next_next_i], source) } else { (source, order[next_next_i]) }; + visited.insert(dup_pair); + } + } + Err(e) => { + println!("{:?}", e); + } + } + }; + new_points.extend(vertices); + new_points +} +fn upsample_grid_vertices(vertices: Vec) -> Vec { let mut kd_tree = KdTree::new(); for (i, pt) in vertices.iter().enumerate() { kd_tree From 7cde50663b5c88468045350cd96c8f024198c892 Mon Sep 17 00:00:00 2001 From: sherrpass Date: Thu, 28 Mar 2024 14:22:14 +0800 Subject: [PATCH 05/11] Fix interpolate dup pair bug and reduce redup --- src/render/wgpu/render_manager.rs | 9 +- src/render/wgpu/upsampler.rs | 132 ++++++++++++++++++++++++++---- src/upsample/interpolate.rs | 21 +++-- 3 files changed, 138 insertions(+), 24 deletions(-) diff --git a/src/render/wgpu/render_manager.rs b/src/render/wgpu/render_manager.rs index adb47e4..4933784 100644 --- a/src/render/wgpu/render_manager.rs +++ b/src/render/wgpu/render_manager.rs @@ -272,16 +272,19 @@ impl RenderManager> for AdaptiveManager { let start = Instant::now(); let mut visible_pc = self.get_visible_points(pc.clone()); let visibility_elasped = start.elapsed(); - // println!("Calculated visibility in {:?}", visibility_elasped); + println!("Total points {:?}, Visible points {:?}, took {:?}", pc.points.len(), visible_pc.points.len(), visibility_elasped); let should_upsample = self.upsampler.should_upsample(&visible_pc, &self.camera_state.as_ref().unwrap()); if should_upsample { let init_len = visible_pc.points.len(); - let upsampled_points = self.upsampler.upsample_grid(visible_pc.points.clone()); + + let upsampled_points = self.upsampler.upsample_grid(&visible_pc, 3); let upsampled_pc = PointCloud::new(upsampled_points.len(), upsampled_points.clone()); - visible_pc.combine(&upsampled_pc); self.pc.as_mut().unwrap().combine(&upsampled_pc); + + visible_pc.combine(&upsampled_pc); + let upsample_elasped: Duration = start.elapsed(); println!("Upsampled points from {:?} to {:?} in {:?}", init_len, visible_pc.points.len(), upsample_elasped); diff --git a/src/render/wgpu/upsampler.rs b/src/render/wgpu/upsampler.rs index 783d8bd..5c44d08 100644 --- a/src/render/wgpu/upsampler.rs +++ b/src/render/wgpu/upsampler.rs @@ -3,7 +3,7 @@ use kiddo::{distance::squared_euclidean, KdTree}; use num_traits::Float; use rayon::iter::{IntoParallelIterator, IntoParallelRefIterator, ParallelIterator}; -use crate::formats::{pointxyzrgba::PointXyzRgba, PointCloud}; +use crate::{formats::{bounds::Bounds, pointxyzrgba::PointXyzRgba, PointCloud}, utils::get_pc_bound}; use std::{collections::{BTreeSet, HashSet}, time::Instant}; use super::{camera::CameraState, renderable::Renderable, resolution_controller::ResolutionController}; @@ -20,7 +20,7 @@ impl Upsampler { } pub fn should_upsample(&self, point_cloud: &PointCloud, camera_state: &CameraState) -> bool { - false + point_cloud.points.len() < 100_000 // /* // 1. Get points in NDC // 2. Calculate the average distance normalised by viewport @@ -85,18 +85,41 @@ impl Upsampler { sum / points.len() as f32 } - fn euclidean_distance_3d(&self, point1: &PointXyzRgba, point2: &PointXyzRgba) -> f32 { + pub fn contains(bound: &Bounds, point: &PointXyzRgba) -> bool { + const ERROR_MARGIN_PERCENTAGE: f32 = 1.01; + point.x * ERROR_MARGIN_PERCENTAGE >= bound.min_x + && point.x <= bound.max_x * ERROR_MARGIN_PERCENTAGE + && point.y >= bound.min_y + && point.y <= bound.max_y * ERROR_MARGIN_PERCENTAGE + && point.z >= bound.min_z + && point.z <= bound.max_z * ERROR_MARGIN_PERCENTAGE + } + + fn partition( + pc: &PointCloud, + partitions: (usize, usize, usize), + ) -> Vec> { + let pc_bound = get_pc_bound(&pc); + let child_bounds = pc_bound.partition(partitions); + + child_bounds.par_iter().map(|bound| { + pc.points.iter().map(|point| point.clone()).filter(|point| Self::contains(bound, point)).collect::>() + }).collect::>() + } + + + fn euclidean_distance_3d(point1: &PointXyzRgba, point2: &PointXyzRgba) -> f32 { let dx = point1.x - point2.x; let dy = point1.y - point2.y; let dz = point1.z - point2.z; (dx.powi(2) + dy.powi(2) + dz.powi(2)).sqrt() } - - fn get_middlepoint(&self, point1: &PointXyzRgba, point2: &PointXyzRgba) -> PointXyzRgba { + + fn get_middlepoint(point1: &PointXyzRgba, point2: &PointXyzRgba) -> PointXyzRgba { let geom_x = ((point1.x as f32) + (point2.x as f32)) / 2.0; let geom_y = ((point1.y as f32) + (point2.y as f32)) / 2.0; let geom_z = ((point1.z as f32) + (point2.z as f32)) / 2.0; - + let col_r = ((point1.r as f32) + (point2.r as f32)) / 2.0; let col_g = ((point1.g as f32) + (point2.g as f32)) / 2.0; let col_b = ((point1.b as f32) + (point2.b as f32)) / 2.0; @@ -111,8 +134,8 @@ impl Upsampler { a: col_a as u8, } } - - fn get_circumference_order(&self, neighbours: &Vec, points: &Vec) -> Vec { + + fn get_circumference_order(neighbours: &Vec, points: &Vec) -> Vec { let mut curr = neighbours[0]; // Assuming this is valid let mut order = vec![curr]; let mut seen = HashSet::new(); @@ -126,7 +149,7 @@ impl Upsampler { if seen.contains(&neighbour) { continue; } - let distance = self.euclidean_distance_3d(&points[curr], &points[neighbour]); + let distance = Self::euclidean_distance_3d(&points[curr], &points[neighbour]); if distance < min_distance { min_distance = distance; nearest_neighbour = Some(neighbour); @@ -141,9 +164,82 @@ impl Upsampler { order } - - pub fn upsample_grid(&self, vertices: Vec) -> Vec { + + pub fn upsample_grid(&self, point_cloud: &PointCloud, partition_k: usize) -> Vec { + /* + 1. Partition the vertices + 2. Parallel iter upsampling each segment + 3. combining into a single point cloud + */ let start = Instant::now(); + let partitions = Self::partition(&point_cloud, (partition_k, partition_k, partition_k)); + let new_points = partitions.par_iter().filter(|vertices| !vertices.is_empty()).flat_map(|vertices| Self::upsample_grid_vertices(vertices.clone())).collect::>(); + println!("{:?}", start.elapsed().as_micros()); + new_points + } + + fn upsample_grid_vertices_dedup(vertices: Vec) -> Vec { + /* + Upsamples vertices and returns only the upsampled points + */ + let mut vertices = vertices; + vertices.sort_unstable(); + let mut kd_tree = KdTree::new(); + for (i, pt) in vertices.iter().enumerate() { + kd_tree + .add(&[pt.x, pt.y, pt.z], i) + .expect("Failed to add to kd tree"); + } + // let end_kd_init = start.elapsed(); + let mut visited: HashSet<(usize, usize)> = HashSet::new(); + let mut new_points: Vec = vec![]; + let mut visited_points: HashSet = HashSet::new(); + for source in 0..vertices.len() { + if visited_points.contains(&source){ + continue; + } + let point = vertices[source]; + let x = point.x; + let y = point.y; + let z = point.z; + match kd_tree.nearest(&[x, y, z], 9, &squared_euclidean){ + Ok(nearest) => { + let neighbours = nearest.iter().map(|(_, second)| **second).skip(1).collect::>(); + if neighbours.len() != 8 { + continue; + } + visited_points.extend(&neighbours); + + let order = Self::get_circumference_order(&neighbours, &vertices); + + for i in 0..order.len() { + let next_i = (i + 1) % order.len(); + let circumference_pair = if order[i] < order[next_i] { (order[i], order[next_i]) } else { (order[next_i], order[i]) }; + let source_pair = if order[i] < source { (order[i], source) } else { (source, order[i]) }; + + for &pair in &[circumference_pair, source_pair] { + if visited.contains(&pair) { + continue; + } + let middlepoint = Self::get_middlepoint(&vertices[pair.0], &vertices[pair.1]); + new_points.push(middlepoint); + } + visited.insert(source_pair); + visited.insert(circumference_pair); + + let next_next_i = (i + 2) % order.len(); + let dup_pair = if order[next_next_i] < order[i] { (order[next_next_i], order[i]) } else { (order[i], order[next_next_i]) }; + visited.insert(dup_pair); + } + } + Err(e) => { + println!("{:?}", e); + } + } + }; + new_points + } + fn upsample_grid_vertices(vertices: Vec) -> Vec { let mut kd_tree = KdTree::new(); for (i, pt) in vertices.iter().enumerate() { kd_tree @@ -161,8 +257,11 @@ impl Upsampler { let z = point.z; match kd_tree.nearest(&[x, y, z], 9, &squared_euclidean){ Ok(nearest) => { - let neighbours = nearest.iter().map(|(_, second)| **second).skip(1).collect(); - let order = self.get_circumference_order(&neighbours, &vertices); + let neighbours = nearest.iter().map(|(_, second)| **second).skip(1).collect::>(); + if neighbours.len() != 8 { + continue; + } + let order = Self::get_circumference_order(&neighbours, &vertices); for i in 0..order.len() { let next_i = (i + 1) % order.len(); @@ -173,14 +272,14 @@ impl Upsampler { if visited.contains(&pair) { continue; } - let middlepoint = self.get_middlepoint(&vertices[pair.0], &vertices[pair.1]); + let middlepoint = Self::get_middlepoint(&vertices[pair.0], &vertices[pair.1]); new_points.push(middlepoint); } visited.insert(source_pair); visited.insert(circumference_pair); let next_next_i = (i + 2) % order.len(); - let dup_pair = if order[next_next_i] < source { (order[next_next_i], source) } else { (source, order[next_next_i]) }; + let dup_pair = if order[next_next_i] < order[i] { (order[next_next_i], order[i]) } else { (order[i], order[next_next_i]) }; visited.insert(dup_pair); } } @@ -189,9 +288,10 @@ impl Upsampler { } } }; + new_points.extend(vertices); new_points } - + diff --git a/src/upsample/interpolate.rs b/src/upsample/interpolate.rs index e2f21a0..6129857 100644 --- a/src/upsample/interpolate.rs +++ b/src/upsample/interpolate.rs @@ -207,10 +207,13 @@ fn upsample_grid_vertices_dedup(vertices: Vec) -> Vec) -> Vec { println!("{:?}", e); @@ -237,8 +241,11 @@ fn upsample_grid_vertices_dedup(vertices: Vec) -> Vec) -> Vec { let mut kd_tree = KdTree::new(); for (i, pt) in vertices.iter().enumerate() { @@ -257,12 +264,13 @@ fn upsample_grid_vertices(vertices: Vec) -> Vec { let z = point.z; match kd_tree.nearest(&[x, y, z], 9, &squared_euclidean){ Ok(nearest) => { + let neighbours = nearest.iter().map(|(_, second)| **second).skip(1).collect::>(); if neighbours.len() != 8 { continue; } let order = get_circumference_order(&neighbours, &vertices); - + for i in 0..order.len() { let next_i = (i + 1) % order.len(); let circumference_pair = if order[i] < order[next_i] { (order[i], order[next_i]) } else { (order[next_i], order[i]) }; @@ -279,7 +287,7 @@ fn upsample_grid_vertices(vertices: Vec) -> Vec { visited.insert(circumference_pair); let next_next_i = (i + 2) % order.len(); - let dup_pair = if order[next_next_i] < source { (order[next_next_i], source) } else { (source, order[next_next_i]) }; + let dup_pair = if order[next_next_i] < order[i] { (order[next_next_i], order[i]) } else { (order[i], order[next_next_i]) }; visited.insert(dup_pair); } } @@ -288,7 +296,10 @@ fn upsample_grid_vertices(vertices: Vec) -> Vec { } } }; + println!("Original count: {:?}", vertices.len()); new_points.extend(vertices); + println!("Upsampled count: {:?}", new_points.len()); + println!("Visited pairs count: {:?}", visited.len()); new_points } From 8fd2c735c2320461bafe929819425f8e3a5f3942 Mon Sep 17 00:00:00 2001 From: sherrpass Date: Sat, 30 Mar 2024 15:49:56 +0800 Subject: [PATCH 06/11] implement should_upsample optimised --- src/render/wgpu/render_manager.rs | 9 +-- src/render/wgpu/upsampler.rs | 114 +++++++++++++++++++++--------- src/upsample/interpolate.rs | 4 +- 3 files changed, 87 insertions(+), 40 deletions(-) diff --git a/src/render/wgpu/render_manager.rs b/src/render/wgpu/render_manager.rs index 4933784..0461274 100644 --- a/src/render/wgpu/render_manager.rs +++ b/src/render/wgpu/render_manager.rs @@ -1,3 +1,4 @@ +use rayon::iter::{IntoParallelIterator, IntoParallelRefIterator, ParallelIterator}; use wgpu_glyph::ab_glyph::Point; use cgmath::*; @@ -271,15 +272,15 @@ impl RenderManager> for AdaptiveManager { let pc = self.pc.as_ref().unwrap(); let start = Instant::now(); let mut visible_pc = self.get_visible_points(pc.clone()); - let visibility_elasped = start.elapsed(); - println!("Total points {:?}, Visible points {:?}, took {:?}", pc.points.len(), visible_pc.points.len(), visibility_elasped); + // let visibility_elasped = start.elapsed(); + // println!("Total points {:?}, Visible points {:?}, took {:?}", pc.points.len(), visible_pc.points.len(), visibility_elasped); let should_upsample = self.upsampler.should_upsample(&visible_pc, &self.camera_state.as_ref().unwrap()); if should_upsample { let init_len = visible_pc.points.len(); - let upsampled_points = self.upsampler.upsample_grid(&visible_pc, 3); + let upsampled_points = self.upsampler.upsample_grid(&visible_pc, 7); let upsampled_pc = PointCloud::new(upsampled_points.len(), upsampled_points.clone()); self.pc.as_mut().unwrap().combine(&upsampled_pc); @@ -301,7 +302,7 @@ impl RenderManager> for AdaptiveManager { // println!("Number of points total: {:?}", point_cloud.points.len()); let view_proj_matrix = Matrix4::from(self.camera_state.as_ref().unwrap().camera_uniform.view_proj); let antialias = point_cloud.antialias(); - let visible_points = point_cloud.points.into_iter().filter(|point| { + let visible_points = point_cloud.points.into_par_iter().filter(|point| { let point_vec = Point3::new(point.x - antialias.x, point.y - antialias.y, point.z - antialias.z) / antialias.scale; let point_in_view = view_proj_matrix.transform_point(point_vec); diff --git a/src/render/wgpu/upsampler.rs b/src/render/wgpu/upsampler.rs index 5c44d08..b0ba8fb 100644 --- a/src/render/wgpu/upsampler.rs +++ b/src/render/wgpu/upsampler.rs @@ -4,7 +4,7 @@ use num_traits::Float; use rayon::iter::{IntoParallelIterator, IntoParallelRefIterator, ParallelIterator}; use crate::{formats::{bounds::Bounds, pointxyzrgba::PointXyzRgba, PointCloud}, utils::get_pc_bound}; -use std::{collections::{BTreeSet, HashSet}, time::Instant}; +use std::{cmp::{max, min}, collections::{BTreeSet, HashSet}, time::Instant}; use super::{camera::CameraState, renderable::Renderable, resolution_controller::ResolutionController}; @@ -12,36 +12,75 @@ pub struct Upsampler { } -const VIEWPORT_DIST_UPSAMPLING_THRESHOLD: f32 = 3.0; - impl Upsampler { pub fn new() -> Self { Self {} } pub fn should_upsample(&self, point_cloud: &PointCloud, camera_state: &CameraState) -> bool { - point_cloud.points.len() < 100_000 - // /* - // 1. Get points in NDC - // 2. Calculate the average distance normalised by viewport - // 3. If greater than **threshold**, upsample - // */ + if point_cloud.points.is_empty() || point_cloud.points.len() > 300_000 { + return false + } + const RANGE_PIXEL_THRESHOLD: i32 = 2; + const PERCENTAGE_THRESHOLD: f32 = 0.6; + // let start = Instant::now(); - // let point_num = point_cloud.points.len(); - // if point_num == 0 || point_num > 100_000 { - // return false - // } - // let view_proj_matrix = Matrix4::from(camera_state.camera_uniform.view_proj); - // let antialias = point_cloud.antialias(); - // let width = camera_state.get_window_size().width; - // let height = camera_state.get_window_size().height; - // let points_viewport = point_cloud.points.par_iter().map(|point| { - // let point_vec = Point3::new(point.x - antialias.x, point.y - antialias.y, point.z - antialias.z) / antialias.scale; - // let point_ndc = view_proj_matrix.transform_point(point_vec); - // let x = (point_ndc.x * (width as f32)) as i32; - // let y = (point_ndc.y * (height as f32)) as i32; - // (x, y) - // }).collect::>().par_iter().map(|coords| { + let view_proj_matrix = Matrix4::from(camera_state.camera_uniform.view_proj); + let antialias = point_cloud.antialias(); + let width = camera_state.get_window_size().width as usize; + let height = camera_state.get_window_size().height as usize; + let points_viewport = point_cloud.points.par_iter().map(|point| { + let point_vec = Point3::new(point.x - antialias.x, point.y - antialias.y, point.z - antialias.z) / antialias.scale; + let point_ndc = view_proj_matrix.transform_point(point_vec); + let x = min(((point_ndc.x + 1.0) * (width as f32) / 2.0) as usize, width); + let y = min(((point_ndc.y + 1.0) * (height as f32) / 2.0) as usize, height); + (x, y) + }).collect::>(); + // println!("viewport coords processing duration: {:?}", start.elapsed()); + // row: height, y; + // col: width, x + // let dedup_start = Instant::now(); + + let mut viewport_is_filled = vec![false; (height + 1) * (width + 1)]; + + points_viewport.iter().for_each(|&coords| { + let (x, y) = coords; + viewport_is_filled[y * (width + 1) + x] = true; + }); + // println!("viewport coords dedup duration: {:?}", dedup_start.elapsed()); + // let calculate_space_start = Instant::now(); + + + let number_pixels_with_close_neighbours = (0..(viewport_is_filled.len())).into_par_iter() + .filter(|&index| viewport_is_filled[index]) + .map(|val| (val % (width + 1), val / (width + 1))) + .filter(|(x, y)| { + let x = *x; + let y = *y; + for x_curr in ((x as i32) - RANGE_PIXEL_THRESHOLD)..((x as i32) + RANGE_PIXEL_THRESHOLD + 1) { + for y_curr in ((y as i32) - RANGE_PIXEL_THRESHOLD)..((y as i32) + RANGE_PIXEL_THRESHOLD + 1) { + if 0 > x_curr || x_curr > (width as i32) || 0 > y_curr || y_curr > (height as i32) || (x_curr, y_curr) == (x as i32, y as i32) { + continue; + } + let x_curr = x_curr as usize; + let y_curr = y_curr as usize; + if viewport_is_filled[y_curr * (width + 1) + x_curr] { + return true; + } + } + } + return false + }).count(); + let filled_pixels: usize = viewport_is_filled.par_iter().filter(|&&is_filled| is_filled).count(); + let percentage_pixels_close_enough = (number_pixels_with_close_neighbours as f32) / (filled_pixels as f32); + // println!("Number of pixels with close neighbours: {:?}/{:?}={:?}", number_pixels_with_close_neighbours, filled_pixels, percentage_pixels_close_enough); + // println!("Calculate space duration {:?}", calculate_space_start.elapsed()); + // println!("Total should_upsample duration {:?}", start.elapsed()); + percentage_pixels_close_enough < PERCENTAGE_THRESHOLD + // let deduped_viewport_pixels = points_viewport.into_par_iter().collect::>(); + + // let calculate_space_start = Instant::now(); + // let deduped_viewport_points = deduped_viewport_pixels.par_iter().map(|coords| { // PointXyzRgba { // x: coords.0 as f32, // y: coords.1 as f32, @@ -53,9 +92,12 @@ impl Upsampler { // } // }).collect::>(); - // let average_spacing = Self::calculate_spacing(&points_viewport); - // println!("{:?}", average_spacing); - // println!("Time taken {:?}", start.elapsed()); + // println!("deduped viewport points {:?}", deduped_viewport_points.len()); + + // let average_spacing = Self::calculate_spacing(&deduped_viewport_points); + // println!("Calculate space duration {:?}", calculate_space_start.elapsed()); + // println!("Total should_upsample duration {:?}", start.elapsed()); + // return average_spacing > VIEWPORT_DIST_UPSAMPLING_THRESHOLD } @@ -173,15 +215,12 @@ impl Upsampler { */ let start = Instant::now(); let partitions = Self::partition(&point_cloud, (partition_k, partition_k, partition_k)); - let new_points = partitions.par_iter().filter(|vertices| !vertices.is_empty()).flat_map(|vertices| Self::upsample_grid_vertices(vertices.clone())).collect::>(); - println!("{:?}", start.elapsed().as_micros()); + let new_points = partitions.par_iter().filter(|vertices| !vertices.is_empty()).flat_map(|vertices| Self::upsample_grid_vertices_dedup(vertices.clone())).collect::>(); + println!("Upsample took: {:?}", start.elapsed()); new_points } fn upsample_grid_vertices_dedup(vertices: Vec) -> Vec { - /* - Upsamples vertices and returns only the upsampled points - */ let mut vertices = vertices; vertices.sort_unstable(); let mut kd_tree = KdTree::new(); @@ -208,10 +247,13 @@ impl Upsampler { if neighbours.len() != 8 { continue; } - visited_points.extend(&neighbours); let order = Self::get_circumference_order(&neighbours, &vertices); - + for (index, value) in order.iter().enumerate() { + if index % 2 == 0 { + visited_points.insert(*value); + } + } for i in 0..order.len() { let next_i = (i + 1) % order.len(); let circumference_pair = if order[i] < order[next_i] { (order[i], order[next_i]) } else { (order[next_i], order[i]) }; @@ -231,14 +273,18 @@ impl Upsampler { let dup_pair = if order[next_next_i] < order[i] { (order[next_next_i], order[i]) } else { (order[i], order[next_next_i]) }; visited.insert(dup_pair); } + } Err(e) => { println!("{:?}", e); } } }; + new_points.extend(vertices); + new_points } + fn upsample_grid_vertices(vertices: Vec) -> Vec { let mut kd_tree = KdTree::new(); for (i, pt) in vertices.iter().enumerate() { diff --git a/src/upsample/interpolate.rs b/src/upsample/interpolate.rs index 6129857..4175510 100644 --- a/src/upsample/interpolate.rs +++ b/src/upsample/interpolate.rs @@ -1,10 +1,11 @@ use std::{collections::HashSet, time::Instant}; +use cgmath::Matrix4; use kiddo::{distance::squared_euclidean, KdTree}; use log::warn; use rayon::iter::{IntoParallelRefIterator, ParallelIterator}; -use crate::{formats::{bounds::Bounds, pointxyzrgba::PointXyzRgba, PointCloud}, utils::get_pc_bound}; +use crate::{formats::{bounds::Bounds, pointxyzrgba::PointXyzRgba, PointCloud}, render::wgpu::upsampler::Upsampler, utils::get_pc_bound}; pub fn upsample(point_cloud: PointCloud, factor: usize) -> PointCloud { if factor <= 1 { @@ -303,7 +304,6 @@ fn upsample_grid_vertices(vertices: Vec) -> Vec { new_points } - #[cfg(test)] mod test { use crate::{ From 352e0b95845c7d70a89e5cd5d1f630c9cf9d9893 Mon Sep 17 00:00:00 2001 From: sherrpass Date: Tue, 7 May 2024 10:25:53 +0200 Subject: [PATCH 07/11] Integrate upsampling into render_manager --- README.md | 2 +- src/bin/vvplay.rs | 6 +- src/render/wgpu/render_manager.rs | 239 ++++++++++++++++-------------- src/render/wgpu/renderer.rs | 4 +- src/render/wgpu/upsampler.rs | 166 +++++---------------- 5 files changed, 174 insertions(+), 243 deletions(-) diff --git a/README.md b/README.md index fcdda19..9d65733 100644 --- a/README.md +++ b/README.md @@ -504,7 +504,7 @@ Options: --decoder [default: noop] [possible values: noop, draco] --decoder-path --bg-color [default: rgb(255,255,255)] - --lodify [default: False] + --adaptive-upsampling [default: False] -h, --help Print help ``` diff --git a/src/bin/vvplay.rs b/src/bin/vvplay.rs index 05d0fbd..0622c49 100644 --- a/src/bin/vvplay.rs +++ b/src/bin/vvplay.rs @@ -4,7 +4,7 @@ use std::path::Path; use vivotk::render::wgpu::{ builder::RenderBuilder, camera::Camera, controls::Controller, metrics_reader::MetricsReader, - render_manager::AdaptiveManager, renderer::Renderer, + render_manager::AdaptiveUpsamplingManager, renderer::Renderer, }; /// Plays a folder of pcd files in lexicographical order @@ -60,7 +60,7 @@ struct Args { #[clap(long, default_value = "rgb(255,255,255)")] bg_color: OsString, #[clap(long, default_value = "false")] - lod: bool, + adaptive_upsampling: bool, } #[derive(clap::ValueEnum, Clone, Copy)] @@ -71,7 +71,7 @@ enum DecoderType { fn main() { let args: Args = Args::parse(); - let adaptive_manager = AdaptiveManager::new(&args.src, args.lod); + let adaptive_manager = AdaptiveUpsamplingManager::new(&args.src, args.adaptive_upsampling); let camera = Camera::new( (args.camera_x, args.camera_y, args.camera_z), diff --git a/src/render/wgpu/render_manager.rs b/src/render/wgpu/render_manager.rs index 0461274..9219298 100644 --- a/src/render/wgpu/render_manager.rs +++ b/src/render/wgpu/render_manager.rs @@ -1,18 +1,13 @@ -use rayon::iter::{IntoParallelIterator, IntoParallelRefIterator, ParallelIterator}; -use wgpu_glyph::ab_glyph::Point; -use cgmath::*; use crate::formats::metadata::MetaData; use crate::formats::pointxyzrgba::PointXyzRgba; use crate::formats::PointCloud; -use crate::render::wgpu::antialias; use std::marker::PhantomData; use std::path::Path; use std::process::exit; -use std::time::{Duration, Instant}; use super::camera::CameraState; -use super::reader::{LODFileReader, RenderReader}; +use super::reader::{LODFileReader, PointCloudFileReader, RenderReader}; use super::renderable::Renderable; use super::resolution_controller::ResolutionController; use super::upsampler::Upsampler; @@ -25,10 +20,9 @@ pub trait RenderManager { fn set_len(&mut self, len: usize); fn set_camera_state(&mut self, camera_state: Option); fn should_redraw(&mut self, camera_state: &CameraState) -> bool; - fn get_visible_points(&self, point_cloud: PointCloud) -> PointCloud; } -pub struct AdaptiveManager { +pub struct AdaptiveLODManager { reader: LODFileReader, // For adaptive loading @@ -41,57 +35,9 @@ pub struct AdaptiveManager { // As the temporary cache current_index: usize, additional_points_loaded: Vec, - - // For upsampling - upsampler: Upsampler, - pc: Option>, - - total_latency: Duration, - sample_size: i32, } -fn infer_format(src: &String) -> String { - let choices = ["pcd", "ply", "bin", "http"]; - const PCD: usize = 0; - const PLY: usize = 1; - const BIN: usize = 2; - - if choices.contains(&src.as_str()) { - return src.clone(); - } - - let path = Path::new(src); - // infer by counting extension numbers (pcd ply and bin) - - let mut choice_count = [0, 0, 0]; - for file_entry in path.read_dir().unwrap() { - match file_entry { - Ok(entry) => { - if let Some(ext) = entry.path().extension() { - if ext.eq("pcd") { - choice_count[PCD] += 1; - } else if ext.eq("ply") { - choice_count[PLY] += 1; - } else if ext.eq("bin") { - choice_count[BIN] += 1; - } - } - } - Err(e) => { - eprintln!("{e}") - } - } - } - - let max_index = choice_count - .iter() - .enumerate() - .max_by_key(|(_, &item)| item) - .map(|(index, _)| index); - choices[max_index.unwrap()].to_string() -} - -impl AdaptiveManager { +impl AdaptiveLODManager { pub fn new(src: &String, lod: bool) -> Self { let base_path = if lod { src.clone() + "/base" @@ -137,17 +83,12 @@ impl AdaptiveManager { let additional_points_loaded = vec![0; reader.len()]; Self { - pc: None, - upsampler: Upsampler { }, reader, camera_state: None, resolution_controller: Some(resolution_controller), metadata: Some(metadata), current_index: usize::MAX, // no point cloud loaded yet additional_points_loaded, - total_latency: Duration::new(0, 0), - sample_size: 0, - } } else { let reader = LODFileReader::new(base_path, None, &play_format); @@ -158,25 +99,19 @@ impl AdaptiveManager { } Self { - pc: None, - upsampler: Upsampler { }, reader, camera_state: None, resolution_controller: None, metadata: None, current_index: usize::MAX, additional_points_loaded: vec![], - total_latency: Duration::new(0, 0), - sample_size: 0, } } } pub fn get_desired_point_cloud(&mut self, index: usize) -> Option> { - // let now = std::time::Instant::now(); if self.metadata.is_none() { - // println!("get base pc: {:?}", now.elapsed()); let pc = self.reader.get_at(index).unwrap(); return Some(pc); } @@ -255,63 +190,109 @@ impl AdaptiveManager { } } -impl RenderManager> for AdaptiveManager { + +impl RenderManager> for AdaptiveLODManager { fn start(&mut self) -> Option> { self.get_desired_point_cloud(0) } fn get_at(&mut self, index: usize) -> Option> { - // println!("RenderManager get_at: {:?}", index); + self.get_desired_point_cloud(index) + } - if index != self.current_index || self.pc.is_none() { - // println!("Loading point cloud at index: {:?}, {:?} {:?}", index, self.current_index, self.pc.is_none()); - self.pc = Some(self.get_desired_point_cloud(index)?); - self.current_index = index; - } + fn len(&self) -> usize { + self.reader.len() + } - let pc = self.pc.as_ref().unwrap(); - let start = Instant::now(); - let mut visible_pc = self.get_visible_points(pc.clone()); - // let visibility_elasped = start.elapsed(); - // println!("Total points {:?}, Visible points {:?}, took {:?}", pc.points.len(), visible_pc.points.len(), visibility_elasped); + fn is_empty(&self) -> bool { + self.reader.is_empty() + } - let should_upsample = self.upsampler.should_upsample(&visible_pc, &self.camera_state.as_ref().unwrap()); + fn set_len(&mut self, len: usize) { + self.reader.set_len(len); + } - if should_upsample { - let init_len = visible_pc.points.len(); + fn set_camera_state(&mut self, camera_state: Option) { + self.camera_state = camera_state; + } - let upsampled_points = self.upsampler.upsample_grid(&visible_pc, 7); - let upsampled_pc = PointCloud::new(upsampled_points.len(), upsampled_points.clone()); - self.pc.as_mut().unwrap().combine(&upsampled_pc); + fn should_redraw(&mut self, camera_state: &CameraState) -> bool { + self.should_load_more_points(camera_state) + } +} - visible_pc.combine(&upsampled_pc); +pub struct AdaptiveUpsamplingManager { + reader: PointCloudFileReader, - let upsample_elasped: Duration = start.elapsed(); + // For adaptive loading + camera_state: Option, - println!("Upsampled points from {:?} to {:?} in {:?}", init_len, visible_pc.points.len(), upsample_elasped); + // As the temporary cache + current_index: usize, + pc: Option>, + + should_adaptive_upsample: bool +} + +impl AdaptiveUpsamplingManager { + pub fn new(src: &String, should_adaptive_upsample: bool) -> Self { + let play_format = infer_format(src); + let base_path = Path::new(src); + + let reader = PointCloudFileReader::from_directory(base_path, &play_format); + + if reader.is_empty() { + eprintln!("Must provide at least one file!"); + exit(1); } - Some(visible_pc) - // println!("Point visibility took: {:?}", start.elapsed()); - // self.total_latency += start.elapsed(); - // self.sample_size += 1; - // println!("Average Point visibility took: {:?}", self.total_latency / self.sample_size.try_into().unwrap()); + Self { + pc: None, + reader, + camera_state: None, + current_index: usize::MAX, + should_adaptive_upsample + } + } + + pub fn len(&self) -> usize { + self.reader.len() + } +} + +impl RenderManager> for AdaptiveUpsamplingManager { + fn start(&mut self) -> Option> { + self.reader.get_at(0) } - fn get_visible_points(&self, point_cloud: PointCloud) -> PointCloud { - // println!("Number of points total: {:?}", point_cloud.points.len()); - let view_proj_matrix = Matrix4::from(self.camera_state.as_ref().unwrap().camera_uniform.view_proj); - let antialias = point_cloud.antialias(); - let visible_points = point_cloud.points.into_par_iter().filter(|point| { - let point_vec = Point3::new(point.x - antialias.x, point.y - antialias.y, point.z - antialias.z) / antialias.scale; - let point_in_view = view_proj_matrix.transform_point(point_vec); - - point_in_view.x.abs() <= 1.0 && - point_in_view.y.abs() <= 1.0 && - point_in_view.z.abs() <= 1.0 - }).collect::>(); - // println!("Number of points visible: {:?}", visible_points.len()); - PointCloud::new(visible_points.len(), visible_points) + fn get_at(&mut self, index: usize) -> Option> { + if !self.should_adaptive_upsample { + return self.reader.get_at(index); + } + + const PARTITION_SIZE: usize = 6; + if index != self.current_index || self.pc.is_none() { + let pc = self.reader.get_at(index)?; + self.pc = Some(pc); + self.current_index = index; + } + + let camera_state = self.camera_state.as_ref().unwrap(); + + let mut visible_pc = Upsampler::get_visible_points(self.pc.as_ref().unwrap().clone(), camera_state); + let mut needs_upsampling = true; + while needs_upsampling { + let upsampled_points = Upsampler::upsample(&visible_pc, camera_state, PARTITION_SIZE); + + if let Some(upsampled_points) = upsampled_points { + self.pc.as_mut().unwrap().combine(&upsampled_points); + visible_pc.combine(&upsampled_points); + } else { + needs_upsampling = false; + } + } + + Some(visible_pc) } fn len(&self) -> usize { @@ -332,7 +313,6 @@ impl RenderManager> for AdaptiveManager { fn should_redraw(&mut self, camera_state: &CameraState) -> bool { true - // self.should_load_more_points(camera_state) } } @@ -389,8 +369,45 @@ where fn should_redraw(&mut self, _camera_state: &CameraState) -> bool { false } +} + +fn infer_format(src: &String) -> String { + let choices = ["pcd", "ply", "bin", "http"]; + const PCD: usize = 0; + const PLY: usize = 1; + const BIN: usize = 2; + + if choices.contains(&src.as_str()) { + return src.clone(); + } + + let path = Path::new(src); + // infer by counting extension numbers (pcd ply and bin) - fn get_visible_points(&self, point_cloud: PointCloud) -> PointCloud { - PointCloud::new(0, vec![]) + let mut choice_count = [0, 0, 0]; + for file_entry in path.read_dir().unwrap() { + match file_entry { + Ok(entry) => { + if let Some(ext) = entry.path().extension() { + if ext.eq("pcd") { + choice_count[PCD] += 1; + } else if ext.eq("ply") { + choice_count[PLY] += 1; + } else if ext.eq("bin") { + choice_count[BIN] += 1; + } + } + } + Err(e) => { + eprintln!("{e}") + } + } } + + let max_index = choice_count + .iter() + .enumerate() + .max_by_key(|(_, &item)| item) + .map(|(index, _)| index); + choices[max_index.unwrap()].to_string() } diff --git a/src/render/wgpu/renderer.rs b/src/render/wgpu/renderer.rs index c0af928..aa41409 100644 --- a/src/render/wgpu/renderer.rs +++ b/src/render/wgpu/renderer.rs @@ -373,8 +373,8 @@ where } } else if self.reader.should_redraw(&self.camera_state) { self.redisplay(); - } - + } + let info = RenderInformation { camera: self.camera_state.camera, current_position: self.current_position, diff --git a/src/render/wgpu/upsampler.rs b/src/render/wgpu/upsampler.rs index b0ba8fb..0e674fd 100644 --- a/src/render/wgpu/upsampler.rs +++ b/src/render/wgpu/upsampler.rs @@ -1,10 +1,11 @@ use cgmath::{Matrix4, Point3, Transform}; use kiddo::{distance::squared_euclidean, KdTree}; +use nalgebra::ComplexField; use num_traits::Float; use rayon::iter::{IntoParallelIterator, IntoParallelRefIterator, ParallelIterator}; use crate::{formats::{bounds::Bounds, pointxyzrgba::PointXyzRgba, PointCloud}, utils::get_pc_bound}; -use std::{cmp::{max, min}, collections::{BTreeSet, HashSet}, time::Instant}; +use std::{cmp::{max, min}, collections::{BTreeSet, HashSet}, time::{Duration, Instant}}; use super::{camera::CameraState, renderable::Renderable, resolution_controller::ResolutionController}; @@ -17,14 +18,38 @@ impl Upsampler { Self {} } - pub fn should_upsample(&self, point_cloud: &PointCloud, camera_state: &CameraState) -> bool { + pub fn upsample(pc: &PointCloud, camera_state: &CameraState, partition_k: usize) -> Option> { + let should_upsample = Self::should_upsample(pc, camera_state); + + if should_upsample { + let upsampled_points = Self::upsample_grid(pc, partition_k); + Some(PointCloud::new(upsampled_points.len(), upsampled_points.clone())) + } else { + None + } + } + + pub fn get_visible_points(point_cloud: PointCloud, camera_state: &CameraState) -> PointCloud { + let view_proj_matrix = Matrix4::from(camera_state.camera_uniform.view_proj); + let antialias = point_cloud.antialias(); + + let visible_points = point_cloud.points.into_par_iter().filter(|point| { + let point_vec = Point3::new(point.x - antialias.x, point.y - antialias.y, point.z - antialias.z) / antialias.scale; + let point_in_view = view_proj_matrix * point_vec.to_homogeneous(); + (point_in_view.x / point_in_view.w).abs() <= 1.0 && + (point_in_view.y / point_in_view.w).abs() <= 1.0 && + (point_in_view.z / point_in_view.w).abs() <= 1.0 + }).collect::>(); + PointCloud::new(visible_points.len(), visible_points) + } + + pub fn should_upsample(point_cloud: &PointCloud, camera_state: &CameraState) -> bool { if point_cloud.points.is_empty() || point_cloud.points.len() > 300_000 { return false } - const RANGE_PIXEL_THRESHOLD: i32 = 2; - const PERCENTAGE_THRESHOLD: f32 = 0.6; + const RANGE_PIXEL_THRESHOLD: i32 = 1; + const PERCENTAGE_THRESHOLD: f32 = 0.8; - // let start = Instant::now(); let view_proj_matrix = Matrix4::from(camera_state.camera_uniform.view_proj); let antialias = point_cloud.antialias(); let width = camera_state.get_window_size().width as usize; @@ -36,10 +61,6 @@ impl Upsampler { let y = min(((point_ndc.y + 1.0) * (height as f32) / 2.0) as usize, height); (x, y) }).collect::>(); - // println!("viewport coords processing duration: {:?}", start.elapsed()); - // row: height, y; - // col: width, x - // let dedup_start = Instant::now(); let mut viewport_is_filled = vec![false; (height + 1) * (width + 1)]; @@ -47,8 +68,6 @@ impl Upsampler { let (x, y) = coords; viewport_is_filled[y * (width + 1) + x] = true; }); - // println!("viewport coords dedup duration: {:?}", dedup_start.elapsed()); - // let calculate_space_start = Instant::now(); let number_pixels_with_close_neighbours = (0..(viewport_is_filled.len())).into_par_iter() @@ -57,6 +76,7 @@ impl Upsampler { .filter(|(x, y)| { let x = *x; let y = *y; + let mut filled_neighbours_count = 0; for x_curr in ((x as i32) - RANGE_PIXEL_THRESHOLD)..((x as i32) + RANGE_PIXEL_THRESHOLD + 1) { for y_curr in ((y as i32) - RANGE_PIXEL_THRESHOLD)..((y as i32) + RANGE_PIXEL_THRESHOLD + 1) { if 0 > x_curr || x_curr > (width as i32) || 0 > y_curr || y_curr > (height as i32) || (x_curr, y_curr) == (x as i32, y as i32) { @@ -65,66 +85,16 @@ impl Upsampler { let x_curr = x_curr as usize; let y_curr = y_curr as usize; if viewport_is_filled[y_curr * (width + 1) + x_curr] { - return true; + filled_neighbours_count += 1; } } } - return false + return filled_neighbours_count >= 4 }).count(); let filled_pixels: usize = viewport_is_filled.par_iter().filter(|&&is_filled| is_filled).count(); let percentage_pixels_close_enough = (number_pixels_with_close_neighbours as f32) / (filled_pixels as f32); - // println!("Number of pixels with close neighbours: {:?}/{:?}={:?}", number_pixels_with_close_neighbours, filled_pixels, percentage_pixels_close_enough); - // println!("Calculate space duration {:?}", calculate_space_start.elapsed()); - // println!("Total should_upsample duration {:?}", start.elapsed()); - percentage_pixels_close_enough < PERCENTAGE_THRESHOLD - // let deduped_viewport_pixels = points_viewport.into_par_iter().collect::>(); - - // let calculate_space_start = Instant::now(); - // let deduped_viewport_points = deduped_viewport_pixels.par_iter().map(|coords| { - // PointXyzRgba { - // x: coords.0 as f32, - // y: coords.1 as f32, - // z: 0 as f32, - // r: 0, - // g: 0, - // b: 0, - // a: 0, - // } - // }).collect::>(); - - // println!("deduped viewport points {:?}", deduped_viewport_points.len()); - - // let average_spacing = Self::calculate_spacing(&deduped_viewport_points); - // println!("Calculate space duration {:?}", calculate_space_start.elapsed()); - // println!("Total should_upsample duration {:?}", start.elapsed()); - - // return average_spacing > VIEWPORT_DIST_UPSAMPLING_THRESHOLD - } - - fn calculate_spacing(points: &Vec) -> f32 { - let mut tree = KdTree::new(); - for (i, p) in points.iter().enumerate() { - tree.add(&[p.x, p.y, p.z], i).unwrap(); - } - - let mut sum = 0.0; - // The value is currently hard coded. Can potentially be improved with variance - let k_nearest = 4; - - for p in points.iter() { - let avg_spacing = tree - .nearest(&[p.x, p.y, p.z], k_nearest, &squared_euclidean) - .unwrap() - .iter() - .skip(1) // ignore the first point (same point) - .map(|(d, _)| d.sqrt()) - .sum::() - / (k_nearest - 1) as f32; - sum += avg_spacing; - } - - sum / points.len() as f32 + percentage_pixels_close_enough < PERCENTAGE_THRESHOLD } pub fn contains(bound: &Bounds, point: &PointXyzRgba) -> bool { @@ -207,20 +177,19 @@ impl Upsampler { order } - pub fn upsample_grid(&self, point_cloud: &PointCloud, partition_k: usize) -> Vec { + pub fn upsample_grid(point_cloud: &PointCloud, partition_k: usize) -> Vec { /* 1. Partition the vertices 2. Parallel iter upsampling each segment 3. combining into a single point cloud */ - let start = Instant::now(); + // let start = Instant::now(); let partitions = Self::partition(&point_cloud, (partition_k, partition_k, partition_k)); - let new_points = partitions.par_iter().filter(|vertices| !vertices.is_empty()).flat_map(|vertices| Self::upsample_grid_vertices_dedup(vertices.clone())).collect::>(); - println!("Upsample took: {:?}", start.elapsed()); + let new_points = partitions.par_iter().filter(|vertices| !vertices.is_empty()).flat_map(|vertices| Self::upsample_grid_vertices(vertices.clone())).collect::>(); new_points } - fn upsample_grid_vertices_dedup(vertices: Vec) -> Vec { + fn upsample_grid_vertices(vertices: Vec) -> Vec { let mut vertices = vertices; vertices.sort_unstable(); let mut kd_tree = KdTree::new(); @@ -244,6 +213,8 @@ impl Upsampler { match kd_tree.nearest(&[x, y, z], 9, &squared_euclidean){ Ok(nearest) => { let neighbours = nearest.iter().map(|(_, second)| **second).skip(1).collect::>(); + // visited_points.extend(neighbours.clone()); + if neighbours.len() != 8 { continue; } @@ -284,61 +255,4 @@ impl Upsampler { new_points } - - fn upsample_grid_vertices(vertices: Vec) -> Vec { - let mut kd_tree = KdTree::new(); - for (i, pt) in vertices.iter().enumerate() { - kd_tree - .add(&[pt.x, pt.y, pt.z], i) - .expect("Failed to add to kd tree"); - } - // let end_kd_init = start.elapsed(); - let mut visited: HashSet<(usize, usize)> = HashSet::new(); - let mut new_points: Vec = vec![]; - for source in 0..vertices.len() { - - let point = vertices[source]; - let x = point.x; - let y = point.y; - let z = point.z; - match kd_tree.nearest(&[x, y, z], 9, &squared_euclidean){ - Ok(nearest) => { - let neighbours = nearest.iter().map(|(_, second)| **second).skip(1).collect::>(); - if neighbours.len() != 8 { - continue; - } - let order = Self::get_circumference_order(&neighbours, &vertices); - - for i in 0..order.len() { - let next_i = (i + 1) % order.len(); - let circumference_pair = if order[i] < order[next_i] { (order[i], order[next_i]) } else { (order[next_i], order[i]) }; - let source_pair = if order[i] < source { (order[i], source) } else { (source, order[i]) }; - - for &pair in &[circumference_pair, source_pair] { - if visited.contains(&pair) { - continue; - } - let middlepoint = Self::get_middlepoint(&vertices[pair.0], &vertices[pair.1]); - new_points.push(middlepoint); - } - visited.insert(source_pair); - visited.insert(circumference_pair); - - let next_next_i = (i + 2) % order.len(); - let dup_pair = if order[next_next_i] < order[i] { (order[next_next_i], order[i]) } else { (order[i], order[next_next_i]) }; - visited.insert(dup_pair); - } - } - Err(e) => { - println!("{:?}", e); - } - } - }; - new_points.extend(vertices); - new_points - } - - - - } From aa1e563dd84ecd8188c09d3fc3eb6acca5fd5292 Mon Sep 17 00:00:00 2001 From: Wei Tsang Ooi Date: Tue, 28 May 2024 05:05:27 +0800 Subject: [PATCH 08/11] Fix cargo fmt --- src/formats/pointxyzrgba.rs | 3 +- src/render/wgpu/render_manager.rs | 14 +- src/render/wgpu/renderer.rs | 2 +- src/render/wgpu/upsampler.rs | 216 +++++++++++++++++++++--------- src/upsample/interpolate.rs | 118 +++++++++++----- 5 files changed, 239 insertions(+), 114 deletions(-) diff --git a/src/formats/pointxyzrgba.rs b/src/formats/pointxyzrgba.rs index 6dbe69f..8c472b8 100644 --- a/src/formats/pointxyzrgba.rs +++ b/src/formats/pointxyzrgba.rs @@ -21,7 +21,8 @@ impl PartialOrd for PointXyzRgba { impl Ord for PointXyzRgba { fn cmp(&self, other: &Self) -> Ordering { - self.x.total_cmp(&other.x) + self.x + .total_cmp(&other.x) .then_with(|| self.y.total_cmp(&other.y)) .then_with(|| self.z.total_cmp(&other.z)) } diff --git a/src/render/wgpu/render_manager.rs b/src/render/wgpu/render_manager.rs index 9219298..d8cfee0 100644 --- a/src/render/wgpu/render_manager.rs +++ b/src/render/wgpu/render_manager.rs @@ -1,4 +1,3 @@ - use crate::formats::metadata::MetaData; use crate::formats::pointxyzrgba::PointXyzRgba; use crate::formats::PointCloud; @@ -110,7 +109,6 @@ impl AdaptiveLODManager { } pub fn get_desired_point_cloud(&mut self, index: usize) -> Option> { - if self.metadata.is_none() { let pc = self.reader.get_at(index).unwrap(); return Some(pc); @@ -190,7 +188,6 @@ impl AdaptiveLODManager { } } - impl RenderManager> for AdaptiveLODManager { fn start(&mut self) -> Option> { self.get_desired_point_cloud(0) @@ -231,7 +228,7 @@ pub struct AdaptiveUpsamplingManager { current_index: usize, pc: Option>, - should_adaptive_upsample: bool + should_adaptive_upsample: bool, } impl AdaptiveUpsamplingManager { @@ -251,7 +248,7 @@ impl AdaptiveUpsamplingManager { reader, camera_state: None, current_index: usize::MAX, - should_adaptive_upsample + should_adaptive_upsample, } } @@ -279,11 +276,12 @@ impl RenderManager> for AdaptiveUpsamplingManager { let camera_state = self.camera_state.as_ref().unwrap(); - let mut visible_pc = Upsampler::get_visible_points(self.pc.as_ref().unwrap().clone(), camera_state); + let mut visible_pc = + Upsampler::get_visible_points(self.pc.as_ref().unwrap().clone(), camera_state); let mut needs_upsampling = true; while needs_upsampling { let upsampled_points = Upsampler::upsample(&visible_pc, camera_state, PARTITION_SIZE); - + if let Some(upsampled_points) = upsampled_points { self.pc.as_mut().unwrap().combine(&upsampled_points); visible_pc.combine(&upsampled_points); @@ -291,7 +289,7 @@ impl RenderManager> for AdaptiveUpsamplingManager { needs_upsampling = false; } } - + Some(visible_pc) } diff --git a/src/render/wgpu/renderer.rs b/src/render/wgpu/renderer.rs index aa41409..a322c57 100644 --- a/src/render/wgpu/renderer.rs +++ b/src/render/wgpu/renderer.rs @@ -374,7 +374,7 @@ where } else if self.reader.should_redraw(&self.camera_state) { self.redisplay(); } - + let info = RenderInformation { camera: self.camera_state.camera, current_position: self.current_position, diff --git a/src/render/wgpu/upsampler.rs b/src/render/wgpu/upsampler.rs index 0e674fd..9ea96a3 100644 --- a/src/render/wgpu/upsampler.rs +++ b/src/render/wgpu/upsampler.rs @@ -4,48 +4,76 @@ use nalgebra::ComplexField; use num_traits::Float; use rayon::iter::{IntoParallelIterator, IntoParallelRefIterator, ParallelIterator}; -use crate::{formats::{bounds::Bounds, pointxyzrgba::PointXyzRgba, PointCloud}, utils::get_pc_bound}; -use std::{cmp::{max, min}, collections::{BTreeSet, HashSet}, time::{Duration, Instant}}; +use crate::{ + formats::{bounds::Bounds, pointxyzrgba::PointXyzRgba, PointCloud}, + utils::get_pc_bound, +}; +use std::{ + cmp::{max, min}, + collections::{BTreeSet, HashSet}, + time::{Duration, Instant}, +}; -use super::{camera::CameraState, renderable::Renderable, resolution_controller::ResolutionController}; +use super::{ + camera::CameraState, renderable::Renderable, resolution_controller::ResolutionController, +}; -pub struct Upsampler { - -} +pub struct Upsampler {} impl Upsampler { pub fn new() -> Self { Self {} } - pub fn upsample(pc: &PointCloud, camera_state: &CameraState, partition_k: usize) -> Option> { + pub fn upsample( + pc: &PointCloud, + camera_state: &CameraState, + partition_k: usize, + ) -> Option> { let should_upsample = Self::should_upsample(pc, camera_state); if should_upsample { let upsampled_points = Self::upsample_grid(pc, partition_k); - Some(PointCloud::new(upsampled_points.len(), upsampled_points.clone())) + Some(PointCloud::new( + upsampled_points.len(), + upsampled_points.clone(), + )) } else { None } } - pub fn get_visible_points(point_cloud: PointCloud, camera_state: &CameraState) -> PointCloud { + pub fn get_visible_points( + point_cloud: PointCloud, + camera_state: &CameraState, + ) -> PointCloud { let view_proj_matrix = Matrix4::from(camera_state.camera_uniform.view_proj); let antialias = point_cloud.antialias(); - let visible_points = point_cloud.points.into_par_iter().filter(|point| { - let point_vec = Point3::new(point.x - antialias.x, point.y - antialias.y, point.z - antialias.z) / antialias.scale; - let point_in_view = view_proj_matrix * point_vec.to_homogeneous(); - (point_in_view.x / point_in_view.w).abs() <= 1.0 && - (point_in_view.y / point_in_view.w).abs() <= 1.0 && - (point_in_view.z / point_in_view.w).abs() <= 1.0 - }).collect::>(); + let visible_points = point_cloud + .points + .into_par_iter() + .filter(|point| { + let point_vec = Point3::new( + point.x - antialias.x, + point.y - antialias.y, + point.z - antialias.z, + ) / antialias.scale; + let point_in_view = view_proj_matrix * point_vec.to_homogeneous(); + (point_in_view.x / point_in_view.w).abs() <= 1.0 + && (point_in_view.y / point_in_view.w).abs() <= 1.0 + && (point_in_view.z / point_in_view.w).abs() <= 1.0 + }) + .collect::>(); PointCloud::new(visible_points.len(), visible_points) } - pub fn should_upsample(point_cloud: &PointCloud, camera_state: &CameraState) -> bool { + pub fn should_upsample( + point_cloud: &PointCloud, + camera_state: &CameraState, + ) -> bool { if point_cloud.points.is_empty() || point_cloud.points.len() > 300_000 { - return false + return false; } const RANGE_PIXEL_THRESHOLD: i32 = 1; const PERCENTAGE_THRESHOLD: f32 = 0.8; @@ -54,13 +82,24 @@ impl Upsampler { let antialias = point_cloud.antialias(); let width = camera_state.get_window_size().width as usize; let height = camera_state.get_window_size().height as usize; - let points_viewport = point_cloud.points.par_iter().map(|point| { - let point_vec = Point3::new(point.x - antialias.x, point.y - antialias.y, point.z - antialias.z) / antialias.scale; - let point_ndc = view_proj_matrix.transform_point(point_vec); - let x = min(((point_ndc.x + 1.0) * (width as f32) / 2.0) as usize, width); - let y = min(((point_ndc.y + 1.0) * (height as f32) / 2.0) as usize, height); - (x, y) - }).collect::>(); + let points_viewport = point_cloud + .points + .par_iter() + .map(|point| { + let point_vec = Point3::new( + point.x - antialias.x, + point.y - antialias.y, + point.z - antialias.z, + ) / antialias.scale; + let point_ndc = view_proj_matrix.transform_point(point_vec); + let x = min(((point_ndc.x + 1.0) * (width as f32) / 2.0) as usize, width); + let y = min( + ((point_ndc.y + 1.0) * (height as f32) / 2.0) as usize, + height, + ); + (x, y) + }) + .collect::>(); let mut viewport_is_filled = vec![false; (height + 1) * (width + 1)]; @@ -69,17 +108,26 @@ impl Upsampler { viewport_is_filled[y * (width + 1) + x] = true; }); - - let number_pixels_with_close_neighbours = (0..(viewport_is_filled.len())).into_par_iter() + let number_pixels_with_close_neighbours = (0..(viewport_is_filled.len())) + .into_par_iter() .filter(|&index| viewport_is_filled[index]) .map(|val| (val % (width + 1), val / (width + 1))) .filter(|(x, y)| { let x = *x; let y = *y; let mut filled_neighbours_count = 0; - for x_curr in ((x as i32) - RANGE_PIXEL_THRESHOLD)..((x as i32) + RANGE_PIXEL_THRESHOLD + 1) { - for y_curr in ((y as i32) - RANGE_PIXEL_THRESHOLD)..((y as i32) + RANGE_PIXEL_THRESHOLD + 1) { - if 0 > x_curr || x_curr > (width as i32) || 0 > y_curr || y_curr > (height as i32) || (x_curr, y_curr) == (x as i32, y as i32) { + for x_curr in + ((x as i32) - RANGE_PIXEL_THRESHOLD)..((x as i32) + RANGE_PIXEL_THRESHOLD + 1) + { + for y_curr in ((y as i32) - RANGE_PIXEL_THRESHOLD) + ..((y as i32) + RANGE_PIXEL_THRESHOLD + 1) + { + if 0 > x_curr + || x_curr > (width as i32) + || 0 > y_curr + || y_curr > (height as i32) + || (x_curr, y_curr) == (x as i32, y as i32) + { continue; } let x_curr = x_curr as usize; @@ -89,49 +137,60 @@ impl Upsampler { } } } - return filled_neighbours_count >= 4 - }).count(); - let filled_pixels: usize = viewport_is_filled.par_iter().filter(|&&is_filled| is_filled).count(); - let percentage_pixels_close_enough = (number_pixels_with_close_neighbours as f32) / (filled_pixels as f32); + return filled_neighbours_count >= 4; + }) + .count(); + let filled_pixels: usize = viewport_is_filled + .par_iter() + .filter(|&&is_filled| is_filled) + .count(); + let percentage_pixels_close_enough = + (number_pixels_with_close_neighbours as f32) / (filled_pixels as f32); percentage_pixels_close_enough < PERCENTAGE_THRESHOLD } pub fn contains(bound: &Bounds, point: &PointXyzRgba) -> bool { const ERROR_MARGIN_PERCENTAGE: f32 = 1.01; - point.x * ERROR_MARGIN_PERCENTAGE >= bound.min_x + point.x * ERROR_MARGIN_PERCENTAGE >= bound.min_x && point.x <= bound.max_x * ERROR_MARGIN_PERCENTAGE && point.y >= bound.min_y && point.y <= bound.max_y * ERROR_MARGIN_PERCENTAGE && point.z >= bound.min_z && point.z <= bound.max_z * ERROR_MARGIN_PERCENTAGE } - + fn partition( pc: &PointCloud, partitions: (usize, usize, usize), ) -> Vec> { let pc_bound = get_pc_bound(&pc); let child_bounds = pc_bound.partition(partitions); - - child_bounds.par_iter().map(|bound| { - pc.points.iter().map(|point| point.clone()).filter(|point| Self::contains(bound, point)).collect::>() - }).collect::>() + + child_bounds + .par_iter() + .map(|bound| { + pc.points + .iter() + .map(|point| point.clone()) + .filter(|point| Self::contains(bound, point)) + .collect::>() + }) + .collect::>() } - - + fn euclidean_distance_3d(point1: &PointXyzRgba, point2: &PointXyzRgba) -> f32 { let dx = point1.x - point2.x; let dy = point1.y - point2.y; let dz = point1.z - point2.z; (dx.powi(2) + dy.powi(2) + dz.powi(2)).sqrt() } - + fn get_middlepoint(point1: &PointXyzRgba, point2: &PointXyzRgba) -> PointXyzRgba { let geom_x = ((point1.x as f32) + (point2.x as f32)) / 2.0; let geom_y = ((point1.y as f32) + (point2.y as f32)) / 2.0; let geom_z = ((point1.z as f32) + (point2.z as f32)) / 2.0; - + let col_r = ((point1.r as f32) + (point2.r as f32)) / 2.0; let col_g = ((point1.g as f32) + (point2.g as f32)) / 2.0; let col_b = ((point1.b as f32) + (point2.b as f32)) / 2.0; @@ -146,17 +205,17 @@ impl Upsampler { a: col_a as u8, } } - + fn get_circumference_order(neighbours: &Vec, points: &Vec) -> Vec { let mut curr = neighbours[0]; // Assuming this is valid let mut order = vec![curr]; let mut seen = HashSet::new(); seen.insert(curr); - + while order.len() < neighbours.len() { let mut min_distance = f32::INFINITY; let mut nearest_neighbour = None; - + for &neighbour in neighbours { if seen.contains(&neighbour) { continue; @@ -167,17 +226,20 @@ impl Upsampler { nearest_neighbour = Some(neighbour); } } - + let next_point = nearest_neighbour.expect("Failed to find nearest neighbour"); curr = next_point; order.push(curr); seen.insert(curr); } - + order } - - pub fn upsample_grid(point_cloud: &PointCloud, partition_k: usize) -> Vec { + + pub fn upsample_grid( + point_cloud: &PointCloud, + partition_k: usize, + ) -> Vec { /* 1. Partition the vertices 2. Parallel iter upsampling each segment @@ -185,10 +247,14 @@ impl Upsampler { */ // let start = Instant::now(); let partitions = Self::partition(&point_cloud, (partition_k, partition_k, partition_k)); - let new_points = partitions.par_iter().filter(|vertices| !vertices.is_empty()).flat_map(|vertices| Self::upsample_grid_vertices(vertices.clone())).collect::>(); + let new_points = partitions + .par_iter() + .filter(|vertices| !vertices.is_empty()) + .flat_map(|vertices| Self::upsample_grid_vertices(vertices.clone())) + .collect::>(); new_points } - + fn upsample_grid_vertices(vertices: Vec) -> Vec { let mut vertices = vertices; vertices.sort_unstable(); @@ -203,22 +269,26 @@ impl Upsampler { let mut new_points: Vec = vec![]; let mut visited_points: HashSet = HashSet::new(); for source in 0..vertices.len() { - if visited_points.contains(&source){ + if visited_points.contains(&source) { continue; } let point = vertices[source]; let x = point.x; let y = point.y; let z = point.z; - match kd_tree.nearest(&[x, y, z], 9, &squared_euclidean){ + match kd_tree.nearest(&[x, y, z], 9, &squared_euclidean) { Ok(nearest) => { - let neighbours = nearest.iter().map(|(_, second)| **second).skip(1).collect::>(); + let neighbours = nearest + .iter() + .map(|(_, second)| **second) + .skip(1) + .collect::>(); // visited_points.extend(neighbours.clone()); - + if neighbours.len() != 8 { continue; } - + let order = Self::get_circumference_order(&neighbours, &vertices); for (index, value) in order.iter().enumerate() { if index % 2 == 0 { @@ -227,32 +297,44 @@ impl Upsampler { } for i in 0..order.len() { let next_i = (i + 1) % order.len(); - let circumference_pair = if order[i] < order[next_i] { (order[i], order[next_i]) } else { (order[next_i], order[i]) }; - let source_pair = if order[i] < source { (order[i], source) } else { (source, order[i]) }; - + let circumference_pair = if order[i] < order[next_i] { + (order[i], order[next_i]) + } else { + (order[next_i], order[i]) + }; + let source_pair = if order[i] < source { + (order[i], source) + } else { + (source, order[i]) + }; + for &pair in &[circumference_pair, source_pair] { if visited.contains(&pair) { continue; } - let middlepoint = Self::get_middlepoint(&vertices[pair.0], &vertices[pair.1]); + let middlepoint = + Self::get_middlepoint(&vertices[pair.0], &vertices[pair.1]); new_points.push(middlepoint); } visited.insert(source_pair); visited.insert(circumference_pair); - + let next_next_i = (i + 2) % order.len(); - let dup_pair = if order[next_next_i] < order[i] { (order[next_next_i], order[i]) } else { (order[i], order[next_next_i]) }; + let dup_pair = if order[next_next_i] < order[i] { + (order[next_next_i], order[i]) + } else { + (order[i], order[next_next_i]) + }; visited.insert(dup_pair); } - } Err(e) => { println!("{:?}", e); } } - }; + } new_points.extend(vertices); - + new_points } } diff --git a/src/upsample/interpolate.rs b/src/upsample/interpolate.rs index 4175510..8256f8a 100644 --- a/src/upsample/interpolate.rs +++ b/src/upsample/interpolate.rs @@ -5,7 +5,11 @@ use kiddo::{distance::squared_euclidean, KdTree}; use log::warn; use rayon::iter::{IntoParallelRefIterator, ParallelIterator}; -use crate::{formats::{bounds::Bounds, pointxyzrgba::PointXyzRgba, PointCloud}, render::wgpu::upsampler::Upsampler, utils::get_pc_bound}; +use crate::{ + formats::{bounds::Bounds, pointxyzrgba::PointXyzRgba, PointCloud}, + render::wgpu::upsampler::Upsampler, + utils::get_pc_bound, +}; pub fn upsample(point_cloud: PointCloud, factor: usize) -> PointCloud { if factor <= 1 { @@ -87,10 +91,9 @@ pub fn upsample(point_cloud: PointCloud, factor: usize) -> PointCl } } - pub fn contains(bound: &Bounds, point: &PointXyzRgba) -> bool { const ERROR_MARGIN_PERCENTAGE: f32 = 1.01; - point.x * ERROR_MARGIN_PERCENTAGE >= bound.min_x + point.x * ERROR_MARGIN_PERCENTAGE >= bound.min_x && point.x <= bound.max_x * ERROR_MARGIN_PERCENTAGE && point.y >= bound.min_y && point.y <= bound.max_y * ERROR_MARGIN_PERCENTAGE @@ -105,12 +108,18 @@ fn partition( let pc_bound = get_pc_bound(&pc); let child_bounds = pc_bound.partition(partitions); - child_bounds.par_iter().map(|bound| { - pc.points.iter().map(|point| point.clone()).filter(|point| contains(bound, point)).collect::>() - }).collect::>() + child_bounds + .par_iter() + .map(|bound| { + pc.points + .iter() + .map(|point| point.clone()) + .filter(|point| contains(bound, point)) + .collect::>() + }) + .collect::>() } - fn euclidean_distance_3d(point1: &PointXyzRgba, point2: &PointXyzRgba) -> f32 { let dx = point1.x - point2.x; let dy = point1.y - point2.y; @@ -143,11 +152,11 @@ fn get_circumference_order(neighbours: &Vec, points: &Vec) let mut order = vec![curr]; let mut seen = HashSet::new(); seen.insert(curr); - + while order.len() < neighbours.len() { let mut min_distance = f32::INFINITY; let mut nearest_neighbour = None; - + for &neighbour in neighbours { if seen.contains(&neighbour) { continue; @@ -158,17 +167,20 @@ fn get_circumference_order(neighbours: &Vec, points: &Vec) nearest_neighbour = Some(neighbour); } } - + let next_point = nearest_neighbour.expect("Failed to find nearest neighbour"); curr = next_point; order.push(curr); seen.insert(curr); } - + order } -pub fn upsample_grid(point_cloud: PointCloud, partition_k: usize) -> PointCloud { +pub fn upsample_grid( + point_cloud: PointCloud, + partition_k: usize, +) -> PointCloud { /* 1. Partition the vertices 2. Parallel iter upsampling each segment @@ -176,7 +188,11 @@ pub fn upsample_grid(point_cloud: PointCloud, partition_k: usize) */ let start = Instant::now(); let partitions = partition(&point_cloud, (partition_k, partition_k, partition_k)); - let new_points = partitions.par_iter().filter(|vertices| !vertices.is_empty()).flat_map(|vertices| upsample_grid_vertices_dedup(vertices.clone())).collect::>(); + let new_points = partitions + .par_iter() + .filter(|vertices| !vertices.is_empty()) + .flat_map(|vertices| upsample_grid_vertices_dedup(vertices.clone())) + .collect::>(); println!("{:?}", start.elapsed().as_micros()); PointCloud::new(new_points.len(), new_points) } @@ -195,20 +211,24 @@ fn upsample_grid_vertices_dedup(vertices: Vec) -> Vec = vec![]; let mut visited_points: HashSet = HashSet::new(); for source in 0..vertices.len() { - if visited_points.contains(&source){ + if visited_points.contains(&source) { continue; } let point = vertices[source]; let x = point.x; let y = point.y; let z = point.z; - match kd_tree.nearest(&[x, y, z], 9, &squared_euclidean){ + match kd_tree.nearest(&[x, y, z], 9, &squared_euclidean) { Ok(nearest) => { - let neighbours = nearest.iter().map(|(_, second)| **second).skip(1).collect::>(); + let neighbours = nearest + .iter() + .map(|(_, second)| **second) + .skip(1) + .collect::>(); if neighbours.len() != 8 { continue; } - + let order = get_circumference_order(&neighbours, &vertices); for (index, value) in order.iter().enumerate() { if index % 2 == 0 { @@ -217,36 +237,46 @@ fn upsample_grid_vertices_dedup(vertices: Vec) -> Vec { println!("{:?}", e); } } - }; + } new_points.extend(vertices); new_points } - fn upsample_grid_vertices(vertices: Vec) -> Vec { let mut kd_tree = KdTree::new(); for (i, pt) in vertices.iter().enumerate() { @@ -258,15 +288,17 @@ fn upsample_grid_vertices(vertices: Vec) -> Vec { let mut visited: HashSet<(usize, usize)> = HashSet::new(); let mut new_points: Vec = vec![]; for source in 0..vertices.len() { - let point = vertices[source]; let x = point.x; let y = point.y; let z = point.z; - match kd_tree.nearest(&[x, y, z], 9, &squared_euclidean){ + match kd_tree.nearest(&[x, y, z], 9, &squared_euclidean) { Ok(nearest) => { - - let neighbours = nearest.iter().map(|(_, second)| **second).skip(1).collect::>(); + let neighbours = nearest + .iter() + .map(|(_, second)| **second) + .skip(1) + .collect::>(); if neighbours.len() != 8 { continue; } @@ -274,21 +306,33 @@ fn upsample_grid_vertices(vertices: Vec) -> Vec { for i in 0..order.len() { let next_i = (i + 1) % order.len(); - let circumference_pair = if order[i] < order[next_i] { (order[i], order[next_i]) } else { (order[next_i], order[i]) }; - let source_pair = if order[i] < source { (order[i], source) } else { (source, order[i]) }; - + let circumference_pair = if order[i] < order[next_i] { + (order[i], order[next_i]) + } else { + (order[next_i], order[i]) + }; + let source_pair = if order[i] < source { + (order[i], source) + } else { + (source, order[i]) + }; + for &pair in &[circumference_pair, source_pair] { if visited.contains(&pair) { continue; } - let middlepoint = get_middlepoint(&vertices[pair.0], &vertices[pair.1]); + let middlepoint = get_middlepoint(&vertices[pair.0], &vertices[pair.1]); new_points.push(middlepoint); } visited.insert(source_pair); visited.insert(circumference_pair); - + let next_next_i = (i + 2) % order.len(); - let dup_pair = if order[next_next_i] < order[i] { (order[next_next_i], order[i]) } else { (order[i], order[next_next_i]) }; + let dup_pair = if order[next_next_i] < order[i] { + (order[next_next_i], order[i]) + } else { + (order[i], order[next_next_i]) + }; visited.insert(dup_pair); } } @@ -296,7 +340,7 @@ fn upsample_grid_vertices(vertices: Vec) -> Vec { println!("{:?}", e); } } - }; + } println!("Original count: {:?}", vertices.len()); new_points.extend(vertices); println!("Upsampled count: {:?}", new_points.len()); From a976729804a137a69700f1dd476d6f10cc3d949e Mon Sep 17 00:00:00 2001 From: Wei Tsang Ooi Date: Tue, 28 May 2024 05:16:05 +0800 Subject: [PATCH 09/11] Fix cargo fmt --- src/formats/pointxyzrgba.rs | 3 +-- src/pipeline/subcommands/upsample.rs | 4 +++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/formats/pointxyzrgba.rs b/src/formats/pointxyzrgba.rs index 7f51a84..24e710f 100644 --- a/src/formats/pointxyzrgba.rs +++ b/src/formats/pointxyzrgba.rs @@ -1,8 +1,8 @@ -use std::cmp::Ordering; use serde::{ ser::{Serialize, SerializeStruct, Serializer}, Deserialize, }; +use std::cmp::Ordering; #[repr(C)] #[derive(Debug, Copy, Clone, PartialEq, bytemuck::Pod, bytemuck::Zeroable, Deserialize)] pub struct PointXyzRgba { @@ -32,7 +32,6 @@ impl Ord for PointXyzRgba { impl Eq for PointXyzRgba {} - impl Serialize for PointXyzRgba { fn serialize(&self, serializer: S) -> Result where diff --git a/src/pipeline/subcommands/upsample.rs b/src/pipeline/subcommands/upsample.rs index 1b93cd1..95bbeea 100644 --- a/src/pipeline/subcommands/upsample.rs +++ b/src/pipeline/subcommands/upsample.rs @@ -5,7 +5,9 @@ use std::time::Instant; use crate::{ pipeline::{channel::Channel, PipelineMessage}, reconstruct::poisson_reconstruct::reconstruct, - upsample::{interpolate::upsample, interpolate::upsample_grid, upsample_methods::UpsampleMethod}, + upsample::{ + interpolate::upsample, interpolate::upsample_grid, upsample_methods::UpsampleMethod, + }, }; use super::Subcommand; From 3fdc5fefd345b47a3f9118b6e8306f41c808ecb5 Mon Sep 17 00:00:00 2001 From: Wei Tsang Ooi Date: Tue, 28 May 2024 05:32:43 +0800 Subject: [PATCH 10/11] Remove unused packages warnings --- src/render/wgpu/render_manager.rs | 2 +- src/render/wgpu/upsampler.rs | 16 +++++++++------- src/upsample/interpolate.rs | 4 ++-- 3 files changed, 12 insertions(+), 10 deletions(-) diff --git a/src/render/wgpu/render_manager.rs b/src/render/wgpu/render_manager.rs index d8cfee0..f157b08 100644 --- a/src/render/wgpu/render_manager.rs +++ b/src/render/wgpu/render_manager.rs @@ -309,7 +309,7 @@ impl RenderManager> for AdaptiveUpsamplingManager { self.camera_state = camera_state; } - fn should_redraw(&mut self, camera_state: &CameraState) -> bool { + fn should_redraw(&mut self, _camera_state: &CameraState) -> bool { true } } diff --git a/src/render/wgpu/upsampler.rs b/src/render/wgpu/upsampler.rs index 9ea96a3..7b88c4e 100644 --- a/src/render/wgpu/upsampler.rs +++ b/src/render/wgpu/upsampler.rs @@ -1,21 +1,23 @@ use cgmath::{Matrix4, Point3, Transform}; use kiddo::{distance::squared_euclidean, KdTree}; -use nalgebra::ComplexField; -use num_traits::Float; use rayon::iter::{IntoParallelIterator, IntoParallelRefIterator, ParallelIterator}; use crate::{ - formats::{bounds::Bounds, pointxyzrgba::PointXyzRgba, PointCloud}, + formats::{ + bounds::Bounds, + pointxyzrgba::PointXyzRgba, + PointCloud + }, utils::get_pc_bound, }; use std::{ - cmp::{max, min}, - collections::{BTreeSet, HashSet}, - time::{Duration, Instant}, + cmp::min, + collections::HashSet, }; use super::{ - camera::CameraState, renderable::Renderable, resolution_controller::ResolutionController, + camera::CameraState, + renderable::Renderable }; pub struct Upsampler {} diff --git a/src/upsample/interpolate.rs b/src/upsample/interpolate.rs index 8256f8a..5636ea5 100644 --- a/src/upsample/interpolate.rs +++ b/src/upsample/interpolate.rs @@ -1,13 +1,11 @@ use std::{collections::HashSet, time::Instant}; -use cgmath::Matrix4; use kiddo::{distance::squared_euclidean, KdTree}; use log::warn; use rayon::iter::{IntoParallelRefIterator, ParallelIterator}; use crate::{ formats::{bounds::Bounds, pointxyzrgba::PointXyzRgba, PointCloud}, - render::wgpu::upsampler::Upsampler, utils::get_pc_bound, }; @@ -277,6 +275,7 @@ fn upsample_grid_vertices_dedup(vertices: Vec) -> Vec) -> Vec { let mut kd_tree = KdTree::new(); for (i, pt) in vertices.iter().enumerate() { @@ -347,6 +346,7 @@ fn upsample_grid_vertices(vertices: Vec) -> Vec { println!("Visited pairs count: {:?}", visited.len()); new_points } +*/ #[cfg(test)] mod test { From 5642d35dc75ee475de405f0f60ec309f56f9b924 Mon Sep 17 00:00:00 2001 From: Wei Tsang Ooi Date: Tue, 28 May 2024 05:36:40 +0800 Subject: [PATCH 11/11] Fix cargo fmt --- src/render/wgpu/upsampler.rs | 16 +++------------- 1 file changed, 3 insertions(+), 13 deletions(-) diff --git a/src/render/wgpu/upsampler.rs b/src/render/wgpu/upsampler.rs index 7b88c4e..d9c914c 100644 --- a/src/render/wgpu/upsampler.rs +++ b/src/render/wgpu/upsampler.rs @@ -3,22 +3,12 @@ use kiddo::{distance::squared_euclidean, KdTree}; use rayon::iter::{IntoParallelIterator, IntoParallelRefIterator, ParallelIterator}; use crate::{ - formats::{ - bounds::Bounds, - pointxyzrgba::PointXyzRgba, - PointCloud - }, + formats::{bounds::Bounds, pointxyzrgba::PointXyzRgba, PointCloud}, utils::get_pc_bound, }; -use std::{ - cmp::min, - collections::HashSet, -}; +use std::{cmp::min, collections::HashSet}; -use super::{ - camera::CameraState, - renderable::Renderable -}; +use super::{camera::CameraState, renderable::Renderable}; pub struct Upsampler {}