use std::collections::{HashMap, VecDeque}; use std::iter::{zip, Map}; use std::sync::{mpsc, Arc}; use std::sync::mpsc::Receiver; use std::thread; use std::time::{Duration, Instant}; use hdbscan::{Hdbscan, HdbscanHyperParams}; use image::{DynamicImage, ImageReader, Rgb, RgbImage}; use numpy::PyArray2; use pyo3::prelude::PyModule; use pyo3::Python; use pyo3::types::PyList; use rand::{Rng}; use rayon::prelude::*; use show_image::{create_window, ImageInfo, ImageView, PixelFormat}; use show_image::event::VirtualKeyCode::V; fn create_extract_thread(image_rx: Receiver>>, width: u32, threshold: u8, downsample: f64) -> Receiver>>> { let (tx, rx) = mpsc::sync_channel::>>>(1); thread::spawn(move || { for image in image_rx { let max_rng = (u32::MAX as f64 * downsample) as u32; let width_2 = width as f64 / 2.0; let poi = image .par_iter() .enumerate() .filter(|(_, pixel)| pixel > &&threshold) .filter(|_| { let mut rng = rand::rng(); rng.random::() < max_rng }) .map(|(i, _)| { vec![ i as f64 % width as f64, i as f64 / width as f64 ] }); let mut vec = Vec::new(); vec.par_extend(poi); tx.send(Arc::new(vec)).expect("TODO: panic message"); } }); rx } fn create_pointcloud_labels_thread(pointcloud_rx: Receiver>>>) -> Receiver<(Arc>>, Arc>)> { let (pointcloud_labels_tx, pointcloud_labels_rx) = mpsc::sync_channel::<(Arc>>, Arc>)>(1); thread::spawn(move || { for pointcloud in pointcloud_rx { let params = HdbscanHyperParams::builder() .epsilon(5.0) .min_samples(3) .allow_single_cluster(false) .min_cluster_size(pointcloud.len() / 8) .build(); let labels = Arc::new(Hdbscan::new(&pointcloud, params).cluster().unwrap()); pointcloud_labels_tx.send((pointcloud, labels)).unwrap() ; } }); pointcloud_labels_rx } fn create_cluster_separator_thread(cluster_labels: Receiver<(Arc>>, Arc>)>) -> Receiver>>>> { let (clusters_tx, clusters_rx) = mpsc::sync_channel::>>>>(1); thread::spawn(move || { for (pointcloud, labels) in cluster_labels { let mut clusters = HashMap::>>::with_capacity(5); let _ = zip(pointcloud.iter(), labels.iter()).for_each(|(point, label)| { if !clusters.contains_key(label) { clusters.insert(*label, Vec::new()); } clusters.get_mut(label).unwrap().push(point.clone()); }); clusters_tx.send(Arc::new(clusters)).unwrap() ; } }); clusters_rx } fn create_isolate_lane_thread(clusters_rx: Receiver>>>>) -> Receiver>; 2]>> { let (lanes_tx, lanes_rx) = mpsc::sync_channel::>; 2]>>(1); thread::spawn(move || { for clusters in clusters_rx { let mut averages = clusters.iter().filter(|(label, _)| **label != -1).map(|(label, cluster)| { (*label, cluster.iter().map(|point| point[0]).sum::() / (cluster.len() as f64)) }).collect::>(); averages.sort_by(|(_, a), (_, b)| a.partial_cmp(b).unwrap()); if averages.len() < 2 {continue;} lanes_tx.send(Arc::new([clusters.get(&averages[0].0).unwrap().clone(), clusters.get(&averages[1].0 ).unwrap().clone()])).unwrap() ; } }); lanes_rx } fn create_transform_thread(pointcloud_rx : Receiver>>>) -> Receiver>>>{ let (projection_tx, projection_rx) = mpsc::sync_channel::>>>(1); thread::spawn(move || { Python::with_gil(|py| { let simulation_image_helper = PyModule::from_code(py,r#" import numpy as np class SimulationImageHelper: """ This class can be used to project points on the road surface onto the image and vice versa. It may be used for detecting lanes in simulation. Attributes: yHorizon: The y-coordinate of the horizon in the image Methods: image2road: project 2d image points to 3d road coords road2image: project 3d road coords to 2d image points distance2imagerow: return which y-coord in the image corresponds to a given distance on the 3d road plane """ def __init__(self,notsurewhatthisis): """ The class constructor. """ # from calib_image.py self.H_I = np.matrix( [ [-7.74914499e-03, 3.95733793e-18, 3.10353257e00], [8.56519716e-18, 9.42313768e-05, -1.86052093e00], [2.57498016e-18, -2.73825295e-03, 1.00000000e00], ] ) self.H_I = self.H_I.reshape(3, 3) self.H = self.H_I.I X_infinity = np.matrix([0, 10000, 1]).reshape(3, 1) self.yHorizon = self.H * X_infinity self.yHorizon = int((self.yHorizon[1] / self.yHorizon[2]).item()) def image2road(self, pts2d): """ Transform a point on the road surface from the image to 3D coordinates (in DIN70000, i.e. X to front, Y to left, origin in center of vehicle at height 0). """ pts2d = np.array(pts2d) N, cols = pts2d.shape if (cols == 1) and (N == 2): # if just two coordinates are given, we make sure we have a (2x1) vector pts2d = pts2d.T assert ( cols == 2 ), "pts2d should be a Nx2 numpy array, but we obtained (%d, %d)" % (N, cols) X = self.H_I * np.hstack((pts2d, np.ones((pts2d.shape[0], 1)))).T X = X / X[2, :] X = X.T # the camera is "self.C[2]" in front of the vehicle's center return np.hstack((X[:, 1], -X[:, 0])) def road2image(self, ptsRd, imSize=None): """ Transform a 3d point on the road surface (in DIN70000, i.e. X to front, Y to left) to image coordinates. """ ptsRd = np.array(ptsRd) N, cols = ptsRd.shape if (cols == 1) and (N == 2): # if just two coordinates are given, we make sure we have a (2x1) vector ptsRd = ptsRd.T N = cols assert ( cols == 2 ), "ptsRd should be a Nx2 numpy array, but we obtained (%d, %d)" % (N, cols) # go back from DIN70000 to our image coordinate system pts = np.hstack( ( -ptsRd[:, 1].reshape((-1, 1)), ptsRd[:, 0].reshape((-1, 1)), np.ones((N, 1)), ) ) x = self.H * pts.T x = x / x[2, :] x = x.T if imSize is not None: valid = np.logical_and((x[:, 0] >= 0), (x[:, 1] >= 0)) valid = np.logical_and(valid, (x[:, 0] < imSize[1])) valid = np.logical_and(valid, (x[:, 1] < imSize[0])) valid = np.nonzero(valid)[0].tolist() x = x[valid, :] return np.hstack((x[:, 0], x[:, 1])) def distance2imagerow(self, distance): """ Compute which row in the image corresponds to a distance (in DIN70000). """ p = self.road2image(np.array([[distance, 0]])) return p[0, 1] "#, "simulation_image_helper.py", "simImgHelper", ).unwrap(); let image_helper_class = simulation_image_helper .getattr("SimulationImageHelper") .unwrap(); let house = image_helper_class.call1(("",)).unwrap(); for pointcloud in pointcloud_rx { let pylist = PyList::new(py, pointcloud.iter().map(|inner| PyList::new(py, inner))); let result : &PyArray2 = house.call_method1("image2road", (pylist,)).unwrap().extract().unwrap(); let arr = result.readonly().as_array().outer_iter().map(|row| { let vec = row.to_vec(); vec![-vec[1], vec[0]] }).collect::>(); projection_tx.send(Arc::new(arr)).unwrap(); } }); }); projection_rx } fn create_filter_thread() { } fn dummy_image_spawner_thread() -> Receiver>> { let img: DynamicImage = ImageReader::open("./images/lane_detection_loop_60.png").unwrap().decode().unwrap(); let img_data: Vec = img.clone().into_bytes(); let (img_tx, img_rx) = mpsc::sync_channel::>>(1); thread::spawn(move || { loop { img_tx.send(Arc::new(img_data.clone())).unwrap(); } }); img_rx } #[show_image::main] fn main() { let image_rx = dummy_image_spawner_thread(); let pointcloud_rx = create_extract_thread(image_rx, 800, 196, 0.05); let projection_rx = create_transform_thread(pointcloud_rx); let cluster_labels_rx = create_pointcloud_labels_thread(projection_rx); let clusters_rx = create_cluster_separator_thread(cluster_labels_rx); let lanes = create_isolate_lane_thread(clusters_rx); let window = create_window("image", Default::default()).unwrap(); let mut t: VecDeque = VecDeque::with_capacity(25); let mut now: Instant = Instant::now(); for lanes in lanes { let mut rgb_image = RgbImage::new(800, 800); let colors = vec![Rgb([0, 255, 0]), Rgb([255, 0, 0])]; for (i, lane) in lanes.iter().enumerate() { let color = colors[i]; let _ = lane.iter().for_each(|(pixel)| { let x = (pixel[0] * 10.0 + rgb_image.width() as f64 / 2.0) as u32; let y = rgb_image.height() - (pixel[1] * 10.0) as u32; if x < 0 || x > 799 || y < 0 || y > 799 {} else { rgb_image.put_pixel(x, y, color); if x > 0 { rgb_image.put_pixel(x - 1, y, color); } if x < 399 { rgb_image.put_pixel(x + 1, y, color); } if y > 0 { rgb_image.put_pixel(x, y - 1, color); } if y < 399 { rgb_image.put_pixel(x, y + 1, color); } } }); } let image = ImageView::new(ImageInfo::new(PixelFormat::Rgb8, 800, 800), rgb_image.iter().as_slice()); window.set_image("camera", image).unwrap(); if t.len() == 25 { t.pop_front(); } t.push_back(now.elapsed()); println!("Avg: {:.6} s", (t.iter().map(|d| d.as_secs_f64()).sum::() / t.len() as f64)); now = Instant::now(); } }