From fab8e8e6de8aa2412754b5330cb0a74835c9ad51 Mon Sep 17 00:00:00 2001 From: Parric Date: Mon, 5 May 2025 21:59:35 +0200 Subject: [PATCH] added projection to image --- Cargo.toml | 4 +- src/main.rs | 179 ++++++++++++++++++++++++++++++++++++++++++++++------ 2 files changed, 161 insertions(+), 22 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index eb198de..7544a88 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,4 +11,6 @@ image = "0.25.6" show-image = "0.14.1" rand = "0.9.1" rayon = "1.10.0" -hdbscan = "0.10.0" \ No newline at end of file +hdbscan = "0.10.0" +pyo3 = { version = "0.21.2", features = ["auto-initialize"] } +numpy = "0.21.0" \ No newline at end of file diff --git a/src/main.rs b/src/main.rs index 95c584e..668db93 100644 --- a/src/main.rs +++ b/src/main.rs @@ -6,6 +6,10 @@ use std::thread; use std::time::{Duration, Instant}; use hdbscan::{Hdbscan, HdbscanHyperParams}; use image::{DynamicImage, ImageReader, Rgb, RgbImage}; +use numpy::PyArray2; +use pyo3::prelude::PyModule; +use pyo3::Python; +use pyo3::types::PyList; use rand::{Rng}; use rayon::prelude::*; use show_image::{create_window, ImageInfo, ImageView, PixelFormat}; @@ -26,7 +30,7 @@ fn create_extract_thread(image_rx: Receiver>>, width: u32, threshold rng.random::() < max_rng }) .map(|(i, _)| { - vec![ i as f64 % width as f64 - width_2, i as f64 / width as f64 ] + vec![ i as f64 % width as f64, i as f64 / width as f64 ] }); let mut vec = Vec::new(); @@ -43,7 +47,7 @@ fn create_pointcloud_labels_thread(pointcloud_rx: Receiver>>>) thread::spawn(move || { for pointcloud in pointcloud_rx { let params = HdbscanHyperParams::builder() - .epsilon(1.0) + .epsilon(5.0) .min_samples(3) .allow_single_cluster(false) .min_cluster_size(pointcloud.len() / 8) @@ -97,12 +101,137 @@ fn create_isolate_lane_thread(clusters_rx: Receiver> = Vec::new(x_points, y_points, 1-array) - //let X = H * pts +fn create_transform_thread(pointcloud_rx : Receiver>>>) -> Receiver>>>{ + let (projection_tx, projection_rx) = mpsc::sync_channel::>>>(1); + thread::spawn(move || { + Python::with_gil(|py| { + let simulation_image_helper = PyModule::from_code(py,r#" +import numpy as np + + +class SimulationImageHelper: + """ + This class can be used to project points on the road surface onto the image + and vice versa. It may be used for detecting lanes in simulation. + + Attributes: + yHorizon: The y-coordinate of the horizon in the image + + Methods: + image2road: project 2d image points to 3d road coords + road2image: project 3d road coords to 2d image points + distance2imagerow: return which y-coord in the image corresponds to a + given distance on the 3d road plane + """ + + def __init__(self,notsurewhatthisis): + """ + The class constructor. + """ + # from calib_image.py + self.H_I = np.matrix( + [ + [-7.74914499e-03, 3.95733793e-18, 3.10353257e00], + [8.56519716e-18, 9.42313768e-05, -1.86052093e00], + [2.57498016e-18, -2.73825295e-03, 1.00000000e00], + ] + ) + + self.H_I = self.H_I.reshape(3, 3) + self.H = self.H_I.I + + X_infinity = np.matrix([0, 10000, 1]).reshape(3, 1) + self.yHorizon = self.H * X_infinity + self.yHorizon = int((self.yHorizon[1] / self.yHorizon[2]).item()) + + def image2road(self, pts2d): + """ + Transform a point on the road surface from the image to 3D coordinates + (in DIN70000, i.e. X to front, Y to left, origin in center of vehicle at + height 0). + """ + pts2d = np.array(pts2d) + N, cols = pts2d.shape + if (cols == 1) and (N == 2): + # if just two coordinates are given, we make sure we have a (2x1) vector + pts2d = pts2d.T + assert ( + cols == 2 + ), "pts2d should be a Nx2 numpy array, but we obtained (%d, %d)" % (N, cols) + + X = self.H_I * np.hstack((pts2d, np.ones((pts2d.shape[0], 1)))).T + X = X / X[2, :] + X = X.T + + # the camera is "self.C[2]" in front of the vehicle's center + return np.hstack((X[:, 1], -X[:, 0])) + + def road2image(self, ptsRd, imSize=None): + """ + Transform a 3d point on the road surface (in DIN70000, i.e. X to front, + Y to left) to image coordinates. + """ + ptsRd = np.array(ptsRd) + N, cols = ptsRd.shape + if (cols == 1) and (N == 2): + # if just two coordinates are given, we make sure we have a (2x1) vector + ptsRd = ptsRd.T + N = cols + assert ( + cols == 2 + ), "ptsRd should be a Nx2 numpy array, but we obtained (%d, %d)" % (N, cols) + + # go back from DIN70000 to our image coordinate system + pts = np.hstack( + ( + -ptsRd[:, 1].reshape((-1, 1)), + ptsRd[:, 0].reshape((-1, 1)), + np.ones((N, 1)), + ) + ) + x = self.H * pts.T + x = x / x[2, :] + x = x.T + + if imSize is not None: + valid = np.logical_and((x[:, 0] >= 0), (x[:, 1] >= 0)) + valid = np.logical_and(valid, (x[:, 0] < imSize[1])) + valid = np.logical_and(valid, (x[:, 1] < imSize[0])) + valid = np.nonzero(valid)[0].tolist() + x = x[valid, :] + + return np.hstack((x[:, 0], x[:, 1])) + + def distance2imagerow(self, distance): + """ + Compute which row in the image corresponds to a distance (in DIN70000). + """ + p = self.road2image(np.array([[distance, 0]])) + return p[0, 1] + + "#, + "simulation_image_helper.py", + "simImgHelper", + ).unwrap(); + + let image_helper_class = simulation_image_helper + .getattr("SimulationImageHelper") + .unwrap(); + let house = image_helper_class.call1(("",)).unwrap(); + + for pointcloud in pointcloud_rx { + let pylist = PyList::new(py, pointcloud.iter().map(|inner| PyList::new(py, inner))); + let result : &PyArray2 = house.call_method1("image2road", (pylist,)).unwrap().extract().unwrap(); + let arr = result.readonly().as_array().outer_iter().map(|row| { + let vec = row.to_vec(); + vec![-vec[1], vec[0]] + }).collect::>(); + projection_tx.send(Arc::new(arr)).unwrap(); + } + }); }); + projection_rx } fn create_filter_thread() { @@ -128,15 +257,17 @@ fn dummy_image_spawner_thread() -> Receiver>> { fn main() { let image_rx = dummy_image_spawner_thread(); let pointcloud_rx = create_extract_thread(image_rx, 800, 196, 0.05); - let cluster_labels_rx = create_pointcloud_labels_thread(pointcloud_rx); + let projection_rx = create_transform_thread(pointcloud_rx); + let cluster_labels_rx = create_pointcloud_labels_thread(projection_rx); let clusters_rx = create_cluster_separator_thread(cluster_labels_rx); let lanes = create_isolate_lane_thread(clusters_rx); + let window = create_window("image", Default::default()).unwrap(); let mut t: VecDeque = VecDeque::with_capacity(25); let mut now: Instant = Instant::now(); - for lanes in lanes.iter() { + for lanes in lanes { let mut rgb_image = RgbImage::new(800, 800); @@ -144,20 +275,26 @@ fn main() { for (i, lane) in lanes.iter().enumerate() { let color = colors[i]; let _ = lane.iter().for_each(|(pixel)| { - let x = (pixel[0] + 400f64) as u32; - rgb_image.put_pixel(x, pixel[1] as u32, color); - if pixel[0] > 0.0 { - rgb_image.put_pixel(x - 1, pixel[1] as u32, color); - } - if pixel[0] < 399f64 { - rgb_image.put_pixel(x + 1, pixel[1] as u32, color); - } - if pixel[1] > 0.0 { - rgb_image.put_pixel(x, pixel[1] as u32 - 1, color); - } - if pixel[1] < 399f64 { - rgb_image.put_pixel(x, pixel[1] as u32 + 1, color); + let x = (pixel[0] * 10.0 + rgb_image.width() as f64 / 2.0) as u32; + let y = rgb_image.height() - (pixel[1] * 10.0) as u32; + + if x < 0 || x > 799 || y < 0 || y > 799 {} + else { + rgb_image.put_pixel(x, y, color); + if x > 0 { + rgb_image.put_pixel(x - 1, y, color); + } + if x < 399 { + rgb_image.put_pixel(x + 1, y, color); + } + if y > 0 { + rgb_image.put_pixel(x, y - 1, color); + } + if y < 399 { + rgb_image.put_pixel(x, y + 1, color); + } } + }); }