Add body tracking data publishing (#50)

This commit is contained in:
Stephan Mävers 2019-09-10 23:40:30 +02:00 коммит произвёл Stuart Alldritt
Родитель 1a106d466b
Коммит f06d9377dd
6 изменённых файлов: 140 добавлений и 7 удалений

Просмотреть файл

@ -21,7 +21,6 @@ The camera is fully configurable using a variety of options which can be specifi
However, this node does ***not*** expose all the sensor data from the Azure Kinect Developer Kit hardware. It does not provide access to:
- Body tracking data
- Microphone array
For more information about how to use the node, please see the [usage guide](docs/usage.md).

Просмотреть файл

@ -22,6 +22,7 @@ The node accepts the following parameters:
- `rgb_point_cloud` (bool) : Defaults to `false`. If this parameter is set to `true`, the node will generate a sensor_msgs::PointCloud2 message from the depth camera data and colorize it using the color camera data. This requires that the `point_cloud` parameter be `true`, and the `color_enabled` parameter be `true`.
- `recording_file` (string) : No default value. If this parameter contains a valid absolute path to a k4arecording file, the node will use the playback api with this file instead of opening a device.
- `recording_loop_enabled` (bool) : Defaults to `false`. If this parameter is set to `true`, the node will rewind the recording file to the beginning after reaching the last frame. Otherwise the node will stop working after reaching the end of the recording file.
- `body_tracking_enabled` (bool) : Defaults to `false`. If this parameter is set to `true`, the node will generate visualization_msgs::MarkerArray messages for the body tracking data. This requires that the `depth_enabled` parameter is set to `true` and an installed azure kinect body tracking sdk.
#### Parameter Restrictions
@ -49,6 +50,7 @@ The node emits a variety of topics into its namespace.
- `ir/image_raw` (`sensor_msgs::Image`) : The raw infrared image from the depth camera sensor. In most depth modes, this image will be illuminated by the infrared illuminator built into the Azure Kinect DK. In `PASSIVE_IR` mode, this image will not be illuminated by the Azure Kinect DK.
- `ir/camera_info` (`sensor_msgs::CameraInfo`) : Calibration information for the infrared camera, converted from the Azure Kinect Sensor SDK format. Since the depth camera and infrared camera are physically the same camera, this `camera_info` is a copy of the camera info published for the depth camera.
- `imu` (`sensor_msgs::Imu`) : The intrinsics-corrected IMU sensor stream, provided by the Azure Kinect Sensor SDK. The sensor SDK automatically corrects for IMU intrinsics before sensor data is emitted.
- `body_tracking_data` (`visualization_msgs::MarkerArray`) : Topic for receiving body tracking data. Each message contains all joints for all bodies for the most recent depth image. The markers are grouped by the [body id](https://microsoft.github.io/Azure-Kinect-Body-Tracking/release/0.9.x/structk4abt__body__t.html#a38fed6c7125f92b41165ffe2e1da9dd4) and the joints are in the same order as in the [joint enum of body tracking sdk](https://microsoft.github.io/Azure-Kinect-Body-Tracking/release/0.9.x/group__btenums.html#ga5fe6fa921525a37dec7175c91c473781). The id field of a marker is calculated by: `body_id * 100 + joint_index` where body_id is the corresponding body id from the body tracking sdk and the joint index is analogue to enum value for joints in the body tracking sdk. Subscribers can calculate the body.id by `floor(marker.id / 100)` and the joint_index by `marker.id % 100`.
## Azure Kinect Developer Kit Calibration

Просмотреть файл

@ -23,6 +23,11 @@
#include <sensor_msgs/Temperature.h>
#include <image_transport/image_transport.h>
#if defined(K4A_BODY_TRACKING)
#include <k4abt.hpp>
#include <visualization_msgs/MarkerArray.h>
#endif
// Project headers
//
#include "azure_kinect_ros_driver/k4a_ros_device_params.h"
@ -58,6 +63,10 @@ class K4AROSDevice
k4a_result_t getIrFrame(const k4a::capture &capture, sensor_msgs::ImagePtr ir_image);
#if defined(K4A_BODY_TRACKING)
k4a_result_t getBodyMarker(const k4abt_body_t& body, visualization_msgs::MarkerPtr marker_msg, int jointType, ros::Time capture_time);
#endif
private:
k4a_result_t renderBGRA32ToROS(sensor_msgs::ImagePtr rgb_frame, k4a::image& k4a_bgra_frame);
k4a_result_t renderDepthToROS(sensor_msgs::ImagePtr depth_image, k4a::image& k4a_depth_frame);
@ -100,6 +109,10 @@ class K4AROSDevice
ros::Publisher pointcloud_publisher_;
#if defined(K4A_BODY_TRACKING)
ros::Publisher body_marker_publisher_;
#endif
// Parameters
K4AROSDeviceParams params_;
@ -110,6 +123,11 @@ class K4AROSDevice
// K4A Recording
k4a_playback_t k4a_playback_handle_;
std::mutex k4a_playback_handle_mutex_;
#if defined(K4A_BODY_TRACKING)
// Body tracker
k4abt::tracker k4abt_tracker_;
#endif
ros::Time start_time_;

Просмотреть файл

@ -39,6 +39,7 @@
LIST_ENTRY(tf_prefix, "The prefix prepended to tf frame ID's", std::string, std::string()) \
LIST_ENTRY(recording_file, "Path to a recording file to open instead of opening a device", std::string, std::string("")) \
LIST_ENTRY(recording_loop_enabled, "True if the recording should be rewound at EOF", bool, false) \
LIST_ENTRY(body_tracking_enabled, "True if body joints should be published as a marker array message", bool, false) \
class K4AROSDeviceParams
{

Просмотреть файл

@ -23,6 +23,7 @@ Licensed under the MIT License.
<arg name="sensor_sn" default="" /> <!-- Sensor serial number. If none provided, the first sensor will be selected -->
<arg name="recording_file" default="" /> <!-- Absolute path to a mkv recording file which will be used with the playback api instead of opening a device -->
<arg name="recording_loop_enabled" default="false" /> <!-- If set to true the recording file will rewind the beginning once end of file is reached -->
<arg name="body_tracking_enabled" default="false" /> <!-- If set to true the joint positions will be published as marker arrays -->
<node pkg="azure_kinect_ros_driver" type="node" name="node" output="screen" required="$(arg required)">
<param name="depth_enabled" type="bool" value="$(arg depth_enabled)" />
@ -36,5 +37,6 @@ Licensed under the MIT License.
<param name="tf_prefix" type="string" value="$(arg tf_prefix)" />
<param name="recording_file" type="string" value="$(arg recording_file)" />
<param name="recording_loop_enabled" type="bool" value="$(arg recording_loop_enabled)" />
<param name="body_tracking_enabled" type="bool" value="$(arg body_tracking_enabled)" />
</node>
</launch>

Просмотреть файл

@ -27,8 +27,15 @@ using namespace sensor_msgs;
using namespace image_transport;
using namespace std;
#if defined(K4A_BODY_TRACKING)
using namespace visualization_msgs;
#endif
K4AROSDevice::K4AROSDevice(const NodeHandle &n, const NodeHandle &p) : k4a_device_(nullptr),
k4a_playback_handle_(nullptr),
#if defined(K4A_BODY_TRACKING)
k4abt_tracker_(nullptr),
#endif
node_(n),
private_node_(p),
image_transport_(n),
@ -223,6 +230,10 @@ K4AROSDevice::K4AROSDevice(const NodeHandle &n, const NodeHandle &p) : k4a_devic
imu_orientation_publisher_ = node_.advertise<Imu>("imu", 200);
pointcloud_publisher_ = node_.advertise<PointCloud2>("points2", 1);
#if defined(K4A_BODY_TRACKING)
body_marker_publisher_ = node_.advertise<MarkerArray>("body_tracking_data", 1);
#endif
}
K4AROSDevice::~K4AROSDevice()
@ -247,16 +258,23 @@ K4AROSDevice::~K4AROSDevice()
{
k4a_playback_close(k4a_playback_handle_);
}
#if defined(K4A_BODY_TRACKING)
if (k4abt_tracker_)
{
k4abt_tracker_.shutdown();
}
#endif
}
k4a_result_t K4AROSDevice::startCameras()
{
k4a_device_configuration_t k4a_configuration = K4A_DEVICE_CONFIG_INIT_DISABLE_ALL;
k4a_result_t result = params_.GetDeviceConfig(&k4a_configuration);
if (k4a_device_)
{
k4a_device_configuration_t k4a_configuration = K4A_DEVICE_CONFIG_INIT_DISABLE_ALL;
k4a_result_t result = params_.GetDeviceConfig(&k4a_configuration);
if (result != K4A_RESULT_SUCCEEDED)
{
ROS_ERROR("Failed to generate a device configuration. Not starting camera!");
@ -266,16 +284,28 @@ k4a_result_t K4AROSDevice::startCameras()
// Now that we have a proposed camera configuration, we can
// initialize the class which will take care of device calibration information
calibration_data_.initialize(k4a_device_, k4a_configuration.depth_mode, k4a_configuration.color_resolution, params_);
ROS_INFO_STREAM("STARTING CAMERAS");
k4a_device_.start_cameras(&k4a_configuration);
}
else if (k4a_playback_handle_)
{
// initialize the class which will take care of device calibration information from the playback_handle
calibration_data_.initialize(k4a_playback_handle_, params_);
}
#if defined(K4A_BODY_TRACKING)
// When calibration is initialized the body tracker can be created with the device calibration
if (params_.body_tracking_enabled)
{
k4abt_tracker_ = k4abt::tracker::create(calibration_data_.k4a_calibration_);
}
#endif
if (k4a_device_)
{
ROS_INFO_STREAM("STARTING CAMERAS");
k4a_device_.start_cameras(&k4a_configuration);
}
// Cannot assume the device timestamp begins increasing upon starting the cameras.
// If we set the time base here, depending on the machine performance, the new timestamp
@ -742,6 +772,42 @@ k4a_result_t K4AROSDevice::getImuFrame(const k4a_imu_sample_t& sample, sensor_ms
return K4A_RESULT_SUCCEEDED;
}
#if defined(K4A_BODY_TRACKING)
k4a_result_t K4AROSDevice::getBodyMarker(const k4abt_body_t& body, MarkerPtr marker_msg, int jointType, ros::Time capture_time)
{
k4a_float3_t position = body.skeleton.joints[jointType].position;
k4a_quaternion_t orientation = body.skeleton.joints[jointType].orientation;
marker_msg->header.frame_id = calibration_data_.tf_prefix_ + calibration_data_.depth_camera_frame_;
marker_msg->header.stamp = capture_time;
// Set the lifetime to 0.25 to prevent flickering for even 5fps configurations.
// New markers with the same ID will replace old markers as soon as they arrive.
marker_msg->lifetime = ros::Duration(0.25);
marker_msg->id = body.id * 100 + jointType;
marker_msg->type = Marker::SPHERE;
marker_msg->color.a = 1.0;
marker_msg->color.r = 255;
marker_msg->color.g = 0;
marker_msg->color.b = 0;
marker_msg->scale.x = 0.05;
marker_msg->scale.y = 0.05;
marker_msg->scale.z = 0.05;
marker_msg->pose.position.x = position.v[0] / 1000.0f;
marker_msg->pose.position.y = position.v[1] / 1000.0f;
marker_msg->pose.position.z = position.v[2] / 1000.0f;
marker_msg->pose.orientation.x = orientation.v[0];
marker_msg->pose.orientation.y = orientation.v[1];
marker_msg->pose.orientation.z = orientation.v[2];
marker_msg->pose.orientation.w = orientation.v[3];
return K4A_RESULT_SUCCEEDED;
}
#endif
void K4AROSDevice::framePublisherThread()
{
ros::Rate loop_rate(params_.fps);
@ -916,6 +982,51 @@ void K4AROSDevice::framePublisherThread()
depth_rect_camerainfo_publisher_.publish(depth_rect_camera_info);
}
}
#if defined(K4A_BODY_TRACKING)
// Publish body markers when body tracking is enabled and a depth image is available
if (params_.body_tracking_enabled && body_marker_publisher_.getNumSubscribers() > 0)
{
capture_time = timestampToROS(capture.get_depth_image().get_device_timestamp());
if (!k4abt_tracker_.enqueue_capture(capture))
{
ROS_ERROR("Error! Add capture to tracker process queue failed!");
ros::shutdown();
return;
}
else
{
k4abt::frame body_frame = k4abt_tracker_.pop_result();
if (body_frame == nullptr)
{
ROS_ERROR_STREAM("Pop body frame result failed!");
ros::shutdown();
return;
}
else
{
auto num_bodies = body_frame.get_num_bodies();
if (num_bodies > 0)
{
MarkerArrayPtr markerArrayPtr(new MarkerArray);
for (size_t i = 0; i < num_bodies; ++i)
{
k4abt_body_t body = body_frame.get_body(i);
for (int j = 0; j < (int) K4ABT_JOINT_COUNT; ++j)
{
MarkerPtr markerPtr(new Marker);
getBodyMarker(body, markerPtr, j, capture_time);
markerArrayPtr->markers.push_back(*markerPtr);
}
}
body_marker_publisher_.publish(markerArrayPtr);
}
}
}
}
#endif
}
}