From 902de6a12bee9f6024b1f7756f34d728c880bb33 Mon Sep 17 00:00:00 2001 From: Liam Date: Sun, 24 Aug 2025 15:24:28 +0200 Subject: [PATCH] this shit ON FIRE --- src/target_tracking/CMakeLists.txt | 69 ++ src/target_tracking/README.md | 3 + .../launch/target_tracking_launch.py | 69 ++ src/target_tracking/package.xml | 27 + src/target_tracking/src/cloud_clustering.cpp | 664 ++++++++++++++++++ .../src/cloud_preprocessing.cpp | 166 +++++ 6 files changed, 998 insertions(+) create mode 100644 src/target_tracking/CMakeLists.txt create mode 100644 src/target_tracking/README.md create mode 100644 src/target_tracking/launch/target_tracking_launch.py create mode 100644 src/target_tracking/package.xml create mode 100644 src/target_tracking/src/cloud_clustering.cpp create mode 100644 src/target_tracking/src/cloud_preprocessing.cpp diff --git a/src/target_tracking/CMakeLists.txt b/src/target_tracking/CMakeLists.txt new file mode 100644 index 0000000..751b007 --- /dev/null +++ b/src/target_tracking/CMakeLists.txt @@ -0,0 +1,69 @@ +cmake_minimum_required(VERSION 3.8) +project(target_tracking) + +# Required packages +find_package(ament_cmake REQUIRED) +find_package(rclcpp REQUIRED) +find_package(sensor_msgs REQUIRED) +find_package(pcl_conversions REQUIRED) +find_package(PCL REQUIRED) +find_package(tf2 REQUIRED) +find_package(tf2_ros REQUIRED) +find_package(tf2_sensor_msgs REQUIRED) +find_package(OpenCV REQUIRED) +find_package(visualization_msgs REQUIRED) + + +# Include paths +include_directories( + include + ${PCL_INCLUDE_DIRS} + ${OpenCV_INCLUDE_DIRS} +) + +# Node for preprocessing +add_executable(cloud_preprocessing_node + src/cloud_preprocessing.cpp +) + +# Dependencies for preprocessing +ament_target_dependencies(cloud_preprocessing_node + rclcpp + sensor_msgs + pcl_conversions + tf2 + tf2_ros + tf2_sensor_msgs +) + +# Node for clustering +add_executable(cloud_clustering_node + src/cloud_clustering.cpp +) + +# Dependencies for clustering +ament_target_dependencies(cloud_clustering_node + rclcpp + sensor_msgs + visualization_msgs + PCL + pcl_conversions + OpenCV +) + +#target_link_libraries(target_tracking_node ${PCL_LIBRARIES}) + +# Install target +install(TARGETS + cloud_preprocessing_node + cloud_clustering_node + DESTINATION lib/${PROJECT_NAME} +) + +# Install launchfile +install( + DIRECTORY launch/ + DESTINATION share/${PROJECT_NAME}/launch +) + +ament_package() diff --git a/src/target_tracking/README.md b/src/target_tracking/README.md new file mode 100644 index 0000000..852ff9a --- /dev/null +++ b/src/target_tracking/README.md @@ -0,0 +1,3 @@ +# Target tracking + +Ros2 implementation of a filtering node in combination with a clustering node to find a target with lidar data \ No newline at end of file diff --git a/src/target_tracking/launch/target_tracking_launch.py b/src/target_tracking/launch/target_tracking_launch.py new file mode 100644 index 0000000..887cf11 --- /dev/null +++ b/src/target_tracking/launch/target_tracking_launch.py @@ -0,0 +1,69 @@ +from launch import LaunchDescription +from launch_ros.actions import Node +import math + +################### user configure parameters for ros2 start ################### +# Topics/Frames +frame_id = 'velodyne' +topic_preprocessing_in = 'velodyne_points' +topic_preprocessing_out = 'filtered_points' + +# Preprocessing +x_min = 0.0 +x_max = 20.0 +z_min = -0.5 +z_max = 1.5 +tan_h_fov = math.pi / 4 # ±45° +tan_v_fov = math.pi / 6 # ±30° + +# Clustering +z_dim_scale = 0.2 +cluster_tolerance = 0.2 +min_cluster_size = 10 +max_cluster_size = 1000 + +################### user configure parameters for ros2 end ##################### + +cloud_preprocessing_params = [ + {"topic_in": topic_preprocessing_in}, + {"topic_out": topic_preprocessing_out}, + {"x_min": x_min}, + {"x_max": x_max}, + {"z_min": z_min}, + {"z_max": z_max}, + {"tan_h_fov": tan_h_fov}, + {"tan_v_fov": tan_v_fov} +] + +cloud_clustering_params = [ + {"topic_in": topic_preprocessing_out}, + {"frame_id": frame_id}, + {"z_dim_scale": z_dim_scale}, + {"cluster_tolerance": cluster_tolerance}, + {"min_cluster_size": min_cluster_size}, + {"max_cluster_size": max_cluster_size} +] + +def generate_launch_description(): + return LaunchDescription([ + Node( + package='target_tracking', + executable='cloud_preprocessing_node', + name='cloud_preprocessing', + parameters=cloud_preprocessing_params, + output={ + 'stdout': 'screen', + 'stderr': 'screen', + } + ), + Node( + package='target_tracking', + executable='cloud_clustering_node', + name='cloud_clustering', + parameters=cloud_clustering_params, + output={ + 'stdout': 'screen', + 'stderr': 'screen', + } + ) + ]) \ No newline at end of file diff --git a/src/target_tracking/package.xml b/src/target_tracking/package.xml new file mode 100644 index 0000000..8149adb --- /dev/null +++ b/src/target_tracking/package.xml @@ -0,0 +1,27 @@ + + + + target_tracking + 1.0.1 + Studienprojekt + Liam + Apache 2.0 + + ament_cmake + + ament_lint_auto + ament_lint_common + + + rclcpp + sensor_msgs + visualization_msgs + pcl_conversions + PCL + cv_bridge + OpenCV + + + ament_cmake + + diff --git a/src/target_tracking/src/cloud_clustering.cpp b/src/target_tracking/src/cloud_clustering.cpp new file mode 100644 index 0000000..3ebce4a --- /dev/null +++ b/src/target_tracking/src/cloud_clustering.cpp @@ -0,0 +1,664 @@ +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +using namespace std; +using namespace cv; + +rclcpp::Publisher::SharedPtr objID_pub; + +// KF init +int stateDim = 4; // [x,y,v_x,v_y]//,w,h] +int measDim = 2; // [z_x,z_y,z_w,z_h] +int ctrlDim = 0; + +std::string frame_id; +std::string topic_in; + +float z_dim_scale; +float cluster_tolerance; +int min_cluster_size; +int max_cluster_size; + +cv::KalmanFilter KF0(stateDim, measDim, ctrlDim, CV_32F); +cv::KalmanFilter KF1(stateDim, measDim, ctrlDim, CV_32F); +cv::KalmanFilter KF2(stateDim, measDim, ctrlDim, CV_32F); +cv::KalmanFilter KF3(stateDim, measDim, ctrlDim, CV_32F); +cv::KalmanFilter KF4(stateDim, measDim, ctrlDim, CV_32F); +cv::KalmanFilter KF5(stateDim, measDim, ctrlDim, CV_32F); + +rclcpp::Publisher::SharedPtr pub_cluster0; +rclcpp::Publisher::SharedPtr pub_cluster1; +rclcpp::Publisher::SharedPtr pub_cluster2; +rclcpp::Publisher::SharedPtr pub_cluster3; +rclcpp::Publisher::SharedPtr pub_cluster4; +rclcpp::Publisher::SharedPtr pub_cluster5; + +rclcpp::Publisher::SharedPtr markerPub; + +std::vector prevClusterCenters; + +cv::Mat state(stateDim, 1, CV_32F); +cv::Mat_ measurement(2, 1); + +std::vector objID; // Output of the data association using KF + // measurement.setTo(Scalar(0)); + +bool firstFrame = true; + +// calculate euclidean distance of two points +double euclidean_distance(geometry_msgs::msg::Point &p1, geometry_msgs::msg::Point &p2) { + return sqrt((p1.x - p2.x) * (p1.x - p2.x) + (p1.y - p2.y) * (p1.y - p2.y) + + (p1.z - p2.z) * (p1.z - p2.z) * z_dim_scale); +} +/* +//Count unique object IDs. just to make sure same ID has not been assigned to +two KF_Trackers. int countIDs(vector v) +{ + transform(v.begin(), v.end(), v.begin(), abs); // O(n) where n = +distance(v.end(), v.begin()) sort(v.begin(), v.end()); // Average case O(n log +n), worst case O(n^2) (usually implemented as quicksort. + // To guarantee worst case O(n log n) replace with make_heap, then +sort_heap. + + // Unique will take a sorted range, and move things around to get duplicated + // items to the back and returns an iterator to the end of the unique +section of the range auto unique_end = unique(v.begin(), v.end()); // Again n +comparisons return distance(unique_end, v.begin()); // Constant time for random +access iterators (like vector's) +} +*/ + +/* + +objID: vector containing the IDs of the clusters that should be associated with +each KF_Tracker objID[0] corresponds to KFT0, objID[1] corresponds to KFT1 etc. +*/ + +std::pair findIndexOfMin(std::vector> distMat) { + std::pair minIndex; + float minEl = std::numeric_limits::max(); + for (int i = 0; i < distMat.size(); i++) + for (int j = 0; j < distMat.at(0).size(); j++) { + if (distMat[i][j] < minEl) { + minEl = distMat[i][j]; + minIndex = std::make_pair(i, j); + } + } + return minIndex; +} +void KFT(const std_msgs::msg::Float32MultiArray ccs) { + + // First predict, to update the internal statePre variable + + std::vector pred{KF0.predict(), KF1.predict(), KF2.predict(), + KF3.predict(), KF4.predict(), KF5.predict()}; + + // cv::Point predictPt(prediction.at(0),prediction.at(1)); + // cout<<"Prediction 1 + // ="<(0)<<","<(1)<<"\n"; + + // Get measurements + // Extract the position of the clusters forom the multiArray. To check if the + // data coming in, check the .z (every third) coordinate and that will be 0.0 + std::vector clusterCenters; // clusterCenters + + int i = 0; + for (std::vector::const_iterator it = ccs.data.begin(); + it != ccs.data.end(); it += 3) { + geometry_msgs::msg::Point pt; + pt.x = *it; + pt.y = *(it + 1); + pt.z = *(it + 2); + + clusterCenters.push_back(pt); + } + + std::vector KFpredictions; + i = 0; + for (auto it = pred.begin(); it != pred.end(); it++) { + geometry_msgs::msg::Point pt; + pt.x = (*it).at(0); + pt.y = (*it).at(1); + pt.z = (*it).at(2); + + KFpredictions.push_back(pt); + } + + // Find the cluster that is more probable to be belonging to a given KF. + objID.clear(); // Clear the objID vector + objID.resize(6); // Allocate default elements so that [i] doesnt segfault. + // Should be done better + // Copy clusterCentres for modifying it and preventing multiple assignments of + // the same ID + std::vector copyOfClusterCenters(clusterCenters); + std::vector> distMat; + + for (int filterN = 0; filterN < 6; filterN++) { + std::vector distVec; + for (int n = 0; n < 6; n++) { + distVec.push_back( + euclidean_distance(KFpredictions[filterN], copyOfClusterCenters[n])); + } + + distMat.push_back(distVec); + /*// Based on distVec instead of distMat (global min). Has problems with the + person's leg going out of scope int + ID=std::distance(distVec.begin(),min_element(distVec.begin(),distVec.end())); + //cout<<"finterlN="< (i,j); + std::pair minIndex(findIndexOfMin(distMat)); + // 2. objID[i]=clusterCenters[j]; counter++ + objID[minIndex.first] = minIndex.second; + + // 3. distMat[i,:]=10000; distMat[:,j]=10000 + distMat[minIndex.first] = + std::vector(6, 10000.0); // Set the row to a high number. + for (int row = 0; row < distMat.size(); + row++) // set the column to a high number + { + distMat[row][minIndex.second] = 10000.0; + } + // 4. if(counter<6) got to 1. + //cout << "clusterCount=" << clusterCount << "\n"; + } + + // cout<<"Got object IDs"<<"\n"; + // countIDs(objID);// for verif/corner cases + + // display objIDs + /* DEBUG + cout<<"objID= "; + for(auto it=objID.begin();it!=objID.end();it++) + cout<<*it<<" ,"; + cout<<"\n"; + */ + + visualization_msgs::msg::MarkerArray clusterMarkers; + + for (int i = 0; i < 6; i++) { + visualization_msgs::msg::Marker m; + + m.id = i; + m.type = visualization_msgs::msg::Marker::CUBE; + m.header.frame_id = frame_id; + m.scale.x = 0.3; + m.scale.y = 0.3; + m.scale.z = 0.3; + m.action = visualization_msgs::msg::Marker::ADD; + m.color.a = 1.0; + m.color.r = i % 2 ? 1 : 0; + m.color.g = i % 3 ? 1 : 0; + m.color.b = i % 4 ? 1 : 0; + + // geometry_msgs::msg::Point clusterC(clusterCenters.at(objID[i])); + geometry_msgs::msg::Point clusterC(KFpredictions[i]); + m.pose.position.x = clusterC.x; + m.pose.position.y = clusterC.y; + m.pose.position.z = clusterC.z; + + clusterMarkers.markers.push_back(m); + } + + prevClusterCenters = clusterCenters; + + markerPub->publish(clusterMarkers); + + std_msgs::msg::Int32MultiArray obj_id; + for (auto it = objID.begin(); it != objID.end(); it++) + obj_id.data.push_back(*it); + // Publish the object IDs + objID_pub->publish(obj_id); + // convert clusterCenters from geometry_msgs::msg::Point to floats + std::vector> cc; + for (int i = 0; i < 6; i++) { + vector pt; + pt.push_back(clusterCenters[objID[i]].x); + pt.push_back(clusterCenters[objID[i]].y); + pt.push_back(clusterCenters[objID[i]].z); + + cc.push_back(pt); + } + // cout<<"cc[5][0]="<(0, 0) == 0.0f || meas0Mat.at(1, 0) == 0.0f)) + Mat estimated0 = KF0.correct(meas0Mat); + if (!(meas1[0] == 0.0f || meas1[1] == 0.0f)) + Mat estimated1 = KF1.correct(meas1Mat); + if (!(meas2[0] == 0.0f || meas2[1] == 0.0f)) + Mat estimated2 = KF2.correct(meas2Mat); + if (!(meas3[0] == 0.0f || meas3[1] == 0.0f)) + Mat estimated3 = KF3.correct(meas3Mat); + if (!(meas4[0] == 0.0f || meas4[1] == 0.0f)) + Mat estimated4 = KF4.correct(meas4Mat); + if (!(meas5[0] == 0.0f || meas5[1] == 0.0f)) + Mat estimated5 = KF5.correct(meas5Mat); + + // Publish the point clouds belonging to each clusters + + // cout<<"estimate="<(0)<<","<(1)<<"\n"; + // Point statePt(estimated.at(0),estimated.at(1)); + // cout<<"DONE KF_TRACKER\n"; +} +void publish_cloud( + rclcpp::Publisher::SharedPtr pub, + pcl::PointCloud::Ptr cluster, + rclcpp::Node::SharedPtr node) +{ + // Create a PointCloud2 message + auto clustermsg = std::make_shared(); + + // Convert PCL cloud to ROS 2 message + pcl::toROSMsg(*cluster, *clustermsg); + + // Set header info + clustermsg->header.frame_id = frame_id; + clustermsg->header.stamp = node->now(); + + // Publish the message + pub->publish(*clustermsg); +} + +void parse_cloud(const sensor_msgs::msg::PointCloud2::SharedPtr input, + std::vector::Ptr + ,float>>& cluster_vec, + std::vector& clusterCentroids + ) { + pcl::PointCloud::Ptr input_a(new pcl::PointCloud); + pcl::fromROSMsg(*input, *input_a); + + // Apply filters + pcl::PointCloud::Ptr fov_filtered(new pcl::PointCloud); + fov_filtered->reserve(input_a->size()); + + for (const auto& point : input_a->points) { + if (point.x < 0 || point.x > 40) continue; + if (isnan(point.x) || isnan(point.y) || isnan(point.z)) continue; + if (point.z < -0.5 || point.z > 1.5) continue; + fov_filtered->points.push_back(point); // emplace_back also works + } + + if (fov_filtered->empty()) return; // nothing to cluster + + pcl::PointCloud::Ptr input_cloud = fov_filtered; + + // Setup KdTree + pcl::search::KdTree::Ptr tree(new pcl::search::KdTree); + tree->setInputCloud(input_cloud); + + std::vector cluster_indices; + pcl::EuclideanClusterExtraction ec; + ec.setClusterTolerance(cluster_tolerance); + ec.setMinClusterSize(min_cluster_size); + ec.setMaxClusterSize(max_cluster_size); + ec.setSearchMethod(tree); + ec.setInputCloud(input_cloud); + /* Extract the clusters out of pc and save indices in cluster_indices.*/ + ec.extract(cluster_indices); + + std::vector::const_iterator it; + std::vector::const_iterator pit; + + for (it = cluster_indices.begin(); it != cluster_indices.end(); ++it) { + + pcl::PointCloud::Ptr cloud_cluster( + new pcl::PointCloud); + float x = 0.0; + float y = 0.0; + int numPts = 0; + float intensity_sum = 0.0; + + for (pit = it->indices.begin(); pit != it->indices.end(); pit++) { + + cloud_cluster->points.push_back(input_cloud->points[*pit]); + x += input_cloud->points[*pit].x; + y += input_cloud->points[*pit].y; + numPts++; + + intensity_sum += input_cloud->points[*pit].intensity; + // dist_this_point = pcl::geometry::distance(input_cloud->points[*pit], + // origin); + // mindist_this_cluster = std::min(dist_this_point, + // mindist_this_cluster); + } + + if (numPts == 0) continue; + + pcl::PointXYZI centroid; + centroid.x = x / numPts; + centroid.y = y / numPts; + centroid.z = 0.0; + + cluster_vec.push_back(std::make_pair(cloud_cluster, intensity_sum / numPts)); + + // Get the centroid of the cluster + clusterCentroids.push_back(centroid); + } + + // Ensure at least 6 clusters exist to publish (later clusters may be empty) + while (cluster_vec.size() < 6) { + pcl::PointCloud::Ptr empty_cluster( + new pcl::PointCloud); + + pcl::PointXYZI pt; + pt.x = pt.y = pt.z = pt.intensity = 0.0f; + + empty_cluster->points.push_back(pt); + + pcl::PointXYZI centroid; + centroid.x = 0.0; + centroid.y = 0.0; + centroid.z = 0.0; + centroid.intensity = 0.0; + + cluster_vec.push_back(std::make_pair(empty_cluster, 0.0f)); + + } + + while (clusterCentroids.size() < 6) { + pcl::PointXYZI centroid; + centroid.x = 0.0; + centroid.y = 0.0; + centroid.z = 0.0; + centroid.intensity = 0.0; + + clusterCentroids.push_back(centroid); + } +} + +void cloud_cb(const sensor_msgs::msg::PointCloud2::SharedPtr input, rclcpp::Node::SharedPtr node) + +{ + // Vector of cluster pointclouds + std::vector::Ptr,float>> cluster_vec; + // Cluster centroids + std::vector clusterCentroids; + + if (firstFrame) { + // If this is the first frame, initialize kalman filters for the clustered objects + // Initialize 6 Kalman Filters; Assuming 6 max objects in the dataset. + // Could be made generic by creating a Kalman Filter only when a new object + // is detected + + float dvx = 0.01f; // 1.0 + float dvy = 0.01f; // 1.0 + float dx = 1.0f; + float dy = 1.0f; + KF0.transitionMatrix = (Mat_(4, 4) << dx, 0, 1, 0, 0, dy, 0, 1, 0, 0, + dvx, 0, 0, 0, 0, dvy); + KF1.transitionMatrix = (Mat_(4, 4) << dx, 0, 1, 0, 0, dy, 0, 1, 0, 0, + dvx, 0, 0, 0, 0, dvy); + KF2.transitionMatrix = (Mat_(4, 4) << dx, 0, 1, 0, 0, dy, 0, 1, 0, 0, + dvx, 0, 0, 0, 0, dvy); + KF3.transitionMatrix = (Mat_(4, 4) << dx, 0, 1, 0, 0, dy, 0, 1, 0, 0, + dvx, 0, 0, 0, 0, dvy); + KF4.transitionMatrix = (Mat_(4, 4) << dx, 0, 1, 0, 0, dy, 0, 1, 0, 0, + dvx, 0, 0, 0, 0, dvy); + KF5.transitionMatrix = (Mat_(4, 4) << dx, 0, 1, 0, 0, dy, 0, 1, 0, 0, + dvx, 0, 0, 0, 0, dvy); + + cv::setIdentity(KF0.measurementMatrix); + cv::setIdentity(KF1.measurementMatrix); + cv::setIdentity(KF2.measurementMatrix); + cv::setIdentity(KF3.measurementMatrix); + cv::setIdentity(KF4.measurementMatrix); + cv::setIdentity(KF5.measurementMatrix); + // Process Noise Covariance Matrix Q + // [ Ex 0 0 0 0 0 ] + // [ 0 Ey 0 0 0 0 ] + // [ 0 0 Ev_x 0 0 0 ] + // [ 0 0 0 1 Ev_y 0 ] + //// [ 0 0 0 0 1 Ew ] + //// [ 0 0 0 0 0 Eh ] + float sigmaP = 0.01; + float sigmaQ = 0.1; + setIdentity(KF0.processNoiseCov, Scalar::all(sigmaP)); + setIdentity(KF1.processNoiseCov, Scalar::all(sigmaP)); + setIdentity(KF2.processNoiseCov, Scalar::all(sigmaP)); + setIdentity(KF3.processNoiseCov, Scalar::all(sigmaP)); + setIdentity(KF4.processNoiseCov, Scalar::all(sigmaP)); + setIdentity(KF5.processNoiseCov, Scalar::all(sigmaP)); + // Meas noise cov matrix R + cv::setIdentity(KF0.measurementNoiseCov, cv::Scalar(sigmaQ)); // 1e-1 + cv::setIdentity(KF1.measurementNoiseCov, cv::Scalar(sigmaQ)); + cv::setIdentity(KF2.measurementNoiseCov, cv::Scalar(sigmaQ)); + cv::setIdentity(KF3.measurementNoiseCov, cv::Scalar(sigmaQ)); + cv::setIdentity(KF4.measurementNoiseCov, cv::Scalar(sigmaQ)); + cv::setIdentity(KF5.measurementNoiseCov, cv::Scalar(sigmaQ)); + + // Process the point cloud + parse_cloud(input, cluster_vec, clusterCentroids); + + // Set initial state + KF0.statePre.at(0) = clusterCentroids.at(0).x; + KF0.statePre.at(1) = clusterCentroids.at(0).y; + KF0.statePre.at(2) = 0; // initial v_x + KF0.statePre.at(3) = 0; // initial v_y + + // Set initial state + KF1.statePre.at(0) = clusterCentroids.at(1).x; + KF1.statePre.at(1) = clusterCentroids.at(1).y; + KF1.statePre.at(2) = 0; // initial v_x + KF1.statePre.at(3) = 0; // initial v_y + + // Set initial state + KF2.statePre.at(0) = clusterCentroids.at(2).x; + KF2.statePre.at(1) = clusterCentroids.at(2).y; + KF2.statePre.at(2) = 0; // initial v_x + KF2.statePre.at(3) = 0; // initial v_y + + // Set initial state + KF3.statePre.at(0) = clusterCentroids.at(3).x; + KF3.statePre.at(1) = clusterCentroids.at(3).y; + KF3.statePre.at(2) = 0; // initial v_x + KF3.statePre.at(3) = 0; // initial v_y + + // Set initial state + KF4.statePre.at(0) = clusterCentroids.at(4).x; + KF4.statePre.at(1) = clusterCentroids.at(4).y; + KF4.statePre.at(2) = 0; // initial v_x + KF4.statePre.at(3) = 0; // initial v_y + + // Set initial state + KF5.statePre.at(0) = clusterCentroids.at(5).x; + KF5.statePre.at(1) = clusterCentroids.at(5).y; + KF5.statePre.at(2) = 0; // initial v_x + KF5.statePre.at(3) = 0; // initial v_y + + firstFrame = false; + + for (int i = 0; i < 6; i++) { + geometry_msgs::msg::Point pt; + pt.x = clusterCentroids.at(i).x; + pt.y = clusterCentroids.at(i).y; + prevClusterCenters.push_back(pt); + } + } + + else { + parse_cloud(input, cluster_vec, clusterCentroids); + + std_msgs::msg::Float32MultiArray cc; + for (int i = 0; i < 6; i++) { + cc.data.push_back(clusterCentroids.at(i).x); + cc.data.push_back(clusterCentroids.at(i).y); + cc.data.push_back(clusterCentroids.at(i).z); + } + + // cc_pos->publish(cc);// Publish cluster mid-points. + KFT(cc); + int i = 0; + + std::sort(objID.begin(), objID.end(), [cluster_vec](const int &a, const int &b) { + return cluster_vec[a].second < cluster_vec[b].second; + }); + + for (auto it = objID.begin(); it != objID.end(); it++) { + switch (i) { + case 0: { + + publish_cloud(pub_cluster0, cluster_vec[*it].first, node); + i++; + break; + } + case 1: { + publish_cloud(pub_cluster1, cluster_vec[*it].first, node); + i++; + break; + } + case 2: { + publish_cloud(pub_cluster2, cluster_vec[*it].first, node); + i++; + break; + } + case 3: { + publish_cloud(pub_cluster3, cluster_vec[*it].first, node); + i++; + break; + } + case 4: { + publish_cloud(pub_cluster4, cluster_vec[*it].first, node); + i++; + break; + } + + case 5: { + publish_cloud(pub_cluster5, cluster_vec[*it].first, node); + i++; + break; + } + default: + break; + } + } + } +} + +int main(int argc, char **argv) { + // ROS init + rclcpp::init(argc, argv); + auto node = rclcpp::Node::make_shared("cloud_clustering"); + + //Topic parameters + node->declare_parameter("topic_in", "!defaultTopic!"); + node->declare_parameter("frame_id", "!defaultFrameId!"); + + //Cluster parameters + node->declare_parameter("z_dim_scale", 0); + node->declare_parameter("cluster_tolerance", 0.0f); + node->declare_parameter("min_cluster_size", 0); + node->declare_parameter("max_cluster_size", 0); + + node->get_parameter("topic_in", topic_in); + node->get_parameter("frame_id", frame_id); + node->get_parameter("z_dim_scale", z_dim_scale); + node->get_parameter("cluster_tolerance", cluster_tolerance); + node->get_parameter("min_cluster_size", min_cluster_size); + node->get_parameter("max_cluster_size", max_cluster_size); + + // Initialize publishers + objID_pub = node->create_publisher("object_ids", 10); + pub_cluster0 = node->create_publisher("cluster0", 10); + pub_cluster1 = node->create_publisher("cluster1", 10); + pub_cluster2 = node->create_publisher("cluster2", 10); + pub_cluster3 = node->create_publisher("cluster3", 10); + pub_cluster4 = node->create_publisher("cluster4", 10); + pub_cluster5 = node->create_publisher("cluster5", 10); + markerPub = node->create_publisher("markers", 10); + + // Publishers to publish the state of the objects (pos and vel) + // objState1=nh.advertise ("obj_1",1); + + cout << "About to setup callback\n"; + + // Create a ROS subscriber for the input point cloud + auto sub = node->create_subscription( + topic_in, + 10, // queue size + [node](sensor_msgs::msg::PointCloud2::SharedPtr msg) { + cloud_cb(msg, node); + }); + + // Publishers + objID_pub = node->create_publisher("obj_id", 10); + markerPub = node->create_publisher("viz", 10); + + cout << "Started clustering node with parameters:\n" + << "topic_in: " << topic_in << "\n" + << "frame_id: " << frame_id << "\n" + << "z_dim_scale: " << z_dim_scale << "\n" + << "cluster_tolerance: " << cluster_tolerance << "\n" + << "min_cluster_size: " << min_cluster_size << "\n" + << "max_cluster_size: " << max_cluster_size << "\n"; + + // Example if you still need cluster centers publisher: + // cc_pos = node->create_publisher("ccs", 10); + + /* Point cloud clustering + */ + + rclcpp::spin(node); + rclcpp::shutdown(); + return 0; +} diff --git a/src/target_tracking/src/cloud_preprocessing.cpp b/src/target_tracking/src/cloud_preprocessing.cpp new file mode 100644 index 0000000..821adff --- /dev/null +++ b/src/target_tracking/src/cloud_preprocessing.cpp @@ -0,0 +1,166 @@ +#include +#include + +#include +#include +#include +#include + +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +class CloudFilterNode : public rclcpp::Node +{ + //Topic parameters + std::string topic_in; + std::string topic_out; + + // Filter parameters + float x_min; + float x_max; + float z_min; + float z_max; + float tan_h_fov; + float tan_v_fov; + +public: + CloudFilterNode() + : Node("cloud_preprocessing"), tf_buffer_(this->get_clock()), tf_listener_(tf_buffer_) + { + + //Topic parameters + this->declare_parameter("topic_in", "test"); + this->declare_parameter("topic_out", "test2"); + + //Filter parameters + this->declare_parameter("x_min", 0.0f); + this->declare_parameter("x_max", 0.0f); + this->declare_parameter("z_min", 0.0f); + this->declare_parameter("z_max", 0.0f); + this->declare_parameter("tan_h_fov", 0.0f); // ±45° + this->declare_parameter("tan_v_fov", 0.0f); // ±30° + + this->get_parameter("topic_in", topic_in); + this->get_parameter("topic_out", topic_out); + + this->get_parameter("x_min", x_min); + this->get_parameter("x_max", x_max); + this->get_parameter("z_min", z_min); + this->get_parameter("z_max", z_max); + this->get_parameter("tan_h_fov", tan_h_fov); + this->get_parameter("tan_v_fov", tan_v_fov); + + RCLCPP_INFO(this->get_logger(), "Starting filter node with parameters:\n" + "topic_in: %s\n" + "topic_out: %s\n" + "x_min: %f\n" + "x_max: %f\n" + "z_min: %f\n" + "z_max: %f\n" + "tan_h_fov: %f\n" + "tan_v_fov: %f" + , topic_in.c_str(), topic_out.c_str(), x_min, x_max, z_min, z_max, tan_h_fov, tan_v_fov + ); + + subscription_ = this->create_subscription( + topic_in, rclcpp::SensorDataQoS(), + std::bind(&CloudFilterNode::cloud_callback, this, std::placeholders::_1)); + + filtered_publisher_ = this->create_publisher(topic_out, 10); + } + +private: + void cloud_callback(const sensor_msgs::msg::PointCloud2::SharedPtr msg) + { + // Convert to PCL + pcl::PointCloud::Ptr input(new pcl::PointCloud); + pcl::fromROSMsg(*msg, *input); + + pcl::PointCloud::Ptr fov_filtered(new pcl::PointCloud); + fov_filtered->reserve(input->size()); + + for (const auto& point : input->points) + { + if (point.x < x_min || point.x > x_max) continue; + if (isnan(point.x) || isnan(point.y) || isnan(point.z)) continue; + + float inv_x = 1.0f / point.x; + if (std::abs(point.y * inv_x) > tan_h_fov) continue; + if (std::abs(point.z * inv_x) > tan_v_fov) continue; + if (point.z < z_min || point.z > z_max) continue; + fov_filtered->emplace_back(point); + } + + // Apply voxel grid filter + pcl::VoxelGrid voxel_filter; + voxel_filter.setInputCloud(fov_filtered); + voxel_filter.setLeafSize(0.03f, 0.03f, 0.03f); // Adjust as needed + + pcl::PointCloud::Ptr voxel_filtered(new pcl::PointCloud); + voxel_filter.filter(*voxel_filtered); + + //Apply ransac + pcl::ModelCoefficients::Ptr coefficients (new pcl::ModelCoefficients); + pcl::PointIndices::Ptr inliers (new pcl::PointIndices); + // Create the segmentation object + pcl::SACSegmentation seg; + // Optional + seg.setOptimizeCoefficients(true); + // Mandatory + seg.setModelType(pcl::SACMODEL_PLANE); + seg.setMethodType(pcl::SAC_RANSAC); + seg.setDistanceThreshold(0.01); + + seg.setInputCloud(voxel_filtered); + seg.segment (*inliers, *coefficients); + + if (inliers->indices.size () == 0) + { + RCLCPP_INFO(this->get_logger(), "Oh oh. No inliers"); + return; + } + + //Select remaining points + pcl::ExtractIndices extract; + extract.setInputCloud(voxel_filtered); + extract.setIndices(inliers); + extract.setNegative(true); + extract.filter(*voxel_filtered); + + // Convert and publish + sensor_msgs::msg::PointCloud2 out_msg; + pcl::toROSMsg(*voxel_filtered, out_msg); + out_msg.header.stamp = this->get_clock()->now(); + out_msg.header.frame_id = msg->header.frame_id; + filtered_publisher_->publish(out_msg); + + RCLCPP_INFO(this->get_logger(), "Filtered %zu -> %zu points", input->points.size(), voxel_filtered->points.size()); + } + + tf2_ros::Buffer tf_buffer_; + tf2_ros::TransformListener tf_listener_; + + rclcpp::Subscription::SharedPtr subscription_; + + rclcpp::Publisher::SharedPtr filtered_publisher_; + rclcpp::Publisher::SharedPtr original_publisher_; +}; + +int main(int argc, char * argv[]) +{ + srand((unsigned int) time (NULL)); + rclcpp::init(argc, argv); + rclcpp::spin(std::make_shared()); + rclcpp::shutdown(); + return 0; +} \ No newline at end of file