Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add lidar centerpoint package #71

Closed
wants to merge 9 commits into from
Closed
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
port lidar_centerpoint (#527)
* delete COLCON_IGNORE

* change autoware_perception_msgs to autoware_auto_perception_msgs

* use existence_probability

* use orientation_availability

* about about existence_probability
  • Loading branch information
yukke42 authored and YoheiMishina committed Dec 1, 2021
commit d1c45c5a14fe50e109c3b701d88ac39a4aa4af56
Empty file.
2 changes: 2 additions & 0 deletions perception/lidar_centerpoint/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ This is a 3D object detection implementation of CenterPoint supporting TensorRT

The models are trained with [OpenPCDet](https://github.com/tier4/OpenPCDet)

The object.existence_probability is stored the value of classification confidence of DNN, not probability.

## Parameters

### Input Topics
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,10 @@
#include <pointcloud_densification.hpp>
#include <rclcpp/rclcpp.hpp>

#include <autoware_perception_msgs/msg/dynamic_object_with_feature_array.hpp>
#include <autoware_auto_perception_msgs/msg/detected_object_kinematics.hpp>
#include <autoware_auto_perception_msgs/msg/detected_objects.hpp>
#include <autoware_auto_perception_msgs/msg/object_classification.hpp>
#include <autoware_auto_perception_msgs/msg/shape.hpp>
#include <sensor_msgs/msg/point_cloud2.hpp>

#include <memory>
Expand All @@ -37,8 +40,7 @@ class LidarCenterPointNode : public rclcpp::Node
void pointCloudCallback(const sensor_msgs::msg::PointCloud2::ConstSharedPtr input_pointcloud_msg);

rclcpp::Subscription<sensor_msgs::msg::PointCloud2>::SharedPtr pointcloud_sub_;
rclcpp::Publisher<autoware_perception_msgs::msg::DynamicObjectWithFeatureArray>::SharedPtr
objects_pub_;
rclcpp::Publisher<autoware_auto_perception_msgs::msg::DetectedObjects>::SharedPtr objects_pub_;
rclcpp::Publisher<sensor_msgs::msg::PointCloud2>::SharedPtr pointcloud_pub_;

float score_threshold_{0.0};
Expand Down
2 changes: 1 addition & 1 deletion perception/lidar_centerpoint/package.xml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

<buildtool_depend>ament_cmake_auto</buildtool_depend>

<depend>autoware_perception_msgs</depend>
<depend>autoware_auto_perception_msgs</depend>
<depend>autoware_utils</depend>
<depend>pcl_ros</depend>
<depend>rclcpp</depend>
Expand Down
71 changes: 37 additions & 34 deletions perception/lidar_centerpoint/src/node.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,8 @@ LidarCenterPointNode::LidarCenterPointNode(const rclcpp::NodeOptions & node_opti
pointcloud_sub_ = this->create_subscription<sensor_msgs::msg::PointCloud2>(
"~/input/pointcloud", rclcpp::SensorDataQoS{}.keep_last(1),
std::bind(&LidarCenterPointNode::pointCloudCallback, this, std::placeholders::_1));
objects_pub_ =
this->create_publisher<autoware_perception_msgs::msg::DynamicObjectWithFeatureArray>(
"~/output/objects", rclcpp::QoS{1});
objects_pub_ = this->create_publisher<autoware_auto_perception_msgs::msg::DetectedObjects>(
"~/output/objects", rclcpp::QoS{1});
pointcloud_pub_ = this->create_publisher<sensor_msgs::msg::PointCloud2>(
"~/debug/pointcloud_densification", rclcpp::SensorDataQoS{}.keep_last(1));
}
Expand All @@ -73,7 +72,7 @@ void LidarCenterPointNode::pointCloudCallback(
auto stacked_pointcloud_msg = densification_ptr_->stackPointCloud(*input_pointcloud_msg);
std::vector<float> boxes3d_vec = detector_ptr_->detect(stacked_pointcloud_msg);

autoware_perception_msgs::msg::DynamicObjectWithFeatureArray output_msg;
autoware_auto_perception_msgs::msg::DetectedObjects output_msg;
output_msg.header = input_pointcloud_msg->header;
for (size_t obj_i = 0; obj_i < boxes3d_vec.size() / Config::num_box_features; obj_i++) {
float score = boxes3d_vec[obj_i * Config::num_box_features + 0];
Expand All @@ -90,46 +89,50 @@ void LidarCenterPointNode::pointCloudCallback(
float h = boxes3d_vec[obj_i * Config::num_box_features + 7];
float yaw = boxes3d_vec[obj_i * Config::num_box_features + 8];

autoware_perception_msgs::msg::DynamicObjectWithFeature feature_obj;
autoware_perception_msgs::msg::Semantic semantic;
autoware_auto_perception_msgs::msg::DetectedObject obj;
// TODO(yukke42): the value of classification confidence of DNN, not probability.
obj.existence_probability = score;

autoware_auto_perception_msgs::msg::ObjectClassification classification;
classification.probability = 1.0f;
switch (class_id) {
case 0:
semantic.type = autoware_perception_msgs::msg::Semantic::CAR;
classification.label = autoware_auto_perception_msgs::msg::ObjectClassification::CAR;

// Note: object size is referred from multi_object_tracker
if ((w * l > 2.2 * 5.5) && (w * l <= 2.5 * 7.9)) {
classification.label = autoware_auto_perception_msgs::msg::ObjectClassification::TRUCK;
} else if (w * l > 2.5 * 7.9) {
classification.label = autoware_auto_perception_msgs::msg::ObjectClassification::BUS;
}
obj.kinematics.orientation_availability =
autoware_auto_perception_msgs::msg::DetectedObjectKinematics::SIGN_UNKNOWN;
break;
case 1:
semantic.type = autoware_perception_msgs::msg::Semantic::PEDESTRIAN;
classification.label = autoware_auto_perception_msgs::msg::ObjectClassification::PEDESTRIAN;
obj.kinematics.orientation_availability =
autoware_auto_perception_msgs::msg::DetectedObjectKinematics::UNAVAILABLE;
break;
case 2:
semantic.type = autoware_perception_msgs::msg::Semantic::BICYCLE;
classification.label = autoware_auto_perception_msgs::msg::ObjectClassification::BICYCLE;
obj.kinematics.orientation_availability =
autoware_auto_perception_msgs::msg::DetectedObjectKinematics::UNAVAILABLE;
break;
default:
semantic.type = autoware_perception_msgs::msg::Semantic::UNKNOWN;
}
semantic.confidence = score;
feature_obj.object.semantic = semantic;

geometry_msgs::msg::Pose pose;
pose.position = autoware_utils::createPoint(x, y, z);
pose.orientation = autoware_utils::createQuaternionFromYaw(yaw);
feature_obj.object.state.pose_covariance.pose = pose;

autoware_perception_msgs::msg::Shape shape;
shape.type = autoware_perception_msgs::msg::Shape::BOUNDING_BOX;
shape.dimensions.x = l;
shape.dimensions.y = w;
shape.dimensions.z = h;
feature_obj.object.shape = shape;

// Note: object size is referred from multi_object_tracker
if ((w * l > 2.2 * 5.5) && (w * l <= 2.5 * 7.9)) {
feature_obj.object.semantic.type = semantic.type =
autoware_perception_msgs::msg::Semantic::TRUCK;
} else if (w * l > 2.5 * 7.9) {
feature_obj.object.semantic.type = semantic.type =
autoware_perception_msgs::msg::Semantic::BUS;
classification.label = autoware_auto_perception_msgs::msg::ObjectClassification::UNKNOWN;
obj.kinematics.orientation_availability =
autoware_auto_perception_msgs::msg::DetectedObjectKinematics::UNAVAILABLE;
}
classification.probability = score;
obj.classification.emplace_back(classification);

obj.kinematics.pose_with_covariance.pose.position = autoware_utils::createPoint(x, y, z);
obj.kinematics.pose_with_covariance.pose.orientation =
autoware_utils::createQuaternionFromYaw(yaw);
obj.shape.type = autoware_auto_perception_msgs::msg::Shape::BOUNDING_BOX;
obj.shape.dimensions = autoware_utils::createTranslation(l, w, h);

output_msg.feature_objects.emplace_back(feature_obj);
output_msg.objects.emplace_back(obj);
}

if (objects_sub_count > 0) {
Expand Down