Merge branch 'melodic' into fix_intrinsic_frame_id

This commit is contained in:
Lou Amadio 2022-04-19 10:11:28 -07:00 коммит произвёл GitHub
Родитель f75e0a2833 044693ff14
Коммит db53201eb3
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
6 изменённых файлов: 243 добавлений и 10 удалений

Просмотреть файл

@ -6,6 +6,8 @@ project(azure_kinect_ros_driver LANGUAGES C CXX)
list(INSERT CMAKE_MODULE_PATH 0 ${CMAKE_CURRENT_LIST_DIR}/cmake)
option(CUDA_SUPPORT "use CUDA support onnxruntime library" ON)
## Find catkin macros and libraries
## if COMPONENTS list like find_package(catkin REQUIRED COMPONENTS xyz)
## is used, also find other catkin packages
@ -14,6 +16,7 @@ find_package(catkin REQUIRED COMPONENTS
std_msgs
sensor_msgs
image_transport
image_geometry
nodelet
tf2
tf2_ros
@ -24,6 +27,7 @@ find_package(catkin REQUIRED COMPONENTS
camera_info_manager
)
find_package(OpenCV REQUIRED)
###################################
## catkin specific configuration ##
###################################
@ -77,6 +81,7 @@ set_target_properties(${PROJECT_NAME}_node PROPERTIES OUTPUT_NAME node PREFIX ""
############################
message("Finding K4A SDK binaries")
message("--------------------------------------------${CMAKE_MODULE_PATH}-----------------------------------------")
# Disable cached locations for K4A SDK binaries.
# Do this to force the search logic to happen correctly.
@ -92,20 +97,121 @@ find_package(k4a 1.3.0 QUIET MODULE REQUIRED)
set(K4A_LIBS k4a::k4a;k4a::k4arecord)
# Try to find and enable the body tracking SDK
find_package(k4abt 1.0.0 QUIET MODULE)
find_package(k4abt 1.0.1 QUIET MODULE)
if (k4abt_FOUND)
list(APPEND K4A_LIBS k4abt::k4abt)
message(STATUS "Body Tracking SDK found: compiling support for Body Tracking")
target_compile_definitions(${PROJECT_NAME}_node PUBLIC K4A_BODY_TRACKING)
target_compile_definitions(${PROJECT_NAME}_nodelet PUBLIC K4A_BODY_TRACKING)
message("!!! Body Tracking SDK found: body tracking features will be available !!!")
else()
message("!!! Body Tracking SDK not found: body tracking features will not be available !!!")
endif()
if(MSVC AND NOT k4abt_FOUND)
if(CUDA_SUPPORT)
set(KBT "Microsoft.Azure.Kinect.BodyTracking.1.0.1")
set(KBT_URL "https://www.nuget.org/api/v2/package/Microsoft.Azure.Kinect.BodyTracking/1.0.1")
set(KBT_SHA512 "546ebf82551ca809213c7540c4a7c3ccdd14d1ae0ac5d8475b3a98439c4a68cf3d5b4e3fa70676d524ac820815f77611bf2d8ffba96933ad5b317db392d34f20")
file(DOWNLOAD
${KBT_URL}
${CMAKE_CURRENT_BINARY_DIR}/kbt.nuget
EXPECTED_HASH SHA512=${KBT_SHA512}
SHOW_PROGRESS
)
file(MAKE_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/${KBT}")
execute_process(COMMAND tar xvzf "${CMAKE_CURRENT_BINARY_DIR}/kbt.nuget"
WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/${KBT}"
)
file(GLOB KBT_ONNX_DLL
"${CMAKE_CURRENT_BINARY_DIR}/${KBT}/lib/native/amd64/release/onnxruntime.dll"
)
file(GLOB KBT_DLL
"${CMAKE_CURRENT_BINARY_DIR}/${KBT}/lib/native/amd64/release/k4abt.dll"
)
file(GLOB KBT_LIB
"${CMAKE_CURRENT_BINARY_DIR}/${KBT}/lib/native/amd64/release/k4abt.lib"
)
file(GLOB KBT_DNN_MODEL
"${CMAKE_CURRENT_BINARY_DIR}/${KBT}/content/dnn_model_2_0.onnx"
)
add_library(k4abt::k4abt SHARED IMPORTED GLOBAL)
target_include_directories(
k4abt::k4abt
INTERFACE
${CMAKE_CURRENT_BINARY_DIR}/${KBT}/build/native/include
)
set_property(TARGET k4abt::k4abt PROPERTY IMPORTED_CONFIGURATIONS "")
set_property(TARGET k4abt::k4abt PROPERTY IMPORTED_LOCATION "${KBT_DLL}")
set_property(TARGET k4abt::k4abt PROPERTY IMPORTED_IMPLIB "${KBT_LIB}")
set_property(TARGET k4abt::k4abt PROPERTY IMPORTED_LINK_DEPENDENT_LIBRARIES "${KBT_DNN_MODEL};${KBT_ONNX_DLL}")
set(k4abt_FOUND TRUE)
set(k4abt_VERSION "1.0.1")
list(APPEND K4A_LIBS k4abt::k4abt)
target_compile_definitions(${PROJECT_NAME}_node PUBLIC K4A_BODY_TRACKING)
target_compile_definitions(${PROJECT_NAME}_nodelet PUBLIC K4A_BODY_TRACKING)
set(KBT_DEPENDENCIES "Microsoft.Azure.Kinect.BodyTracking.Dependencies.0.9.1")
set(KBTD_URL "https://www.nuget.org/api/v2/package/Microsoft.Azure.Kinect.BodyTracking.Dependencies/0.9.1")
set(KBTD_SHA512 "5df5ceb2f7905a3e208a085a29ef02feb1820ffe819563af77c272ad7e068cf3a158c0ce610e421829b5f7ebbb628c45f56617344d6d1ef0a31d719253cf881f")
file(DOWNLOAD
${KBTD_URL}
${CMAKE_CURRENT_BINARY_DIR}/kbtd.nuget
EXPECTED_HASH SHA512=${KBTD_SHA512}
SHOW_PROGRESS
)
file(MAKE_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/${KBT_DEPENDENCIES}")
execute_process(COMMAND tar xvzf "${CMAKE_CURRENT_BINARY_DIR}/kbtd.nuget"
WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/${KBT_DEPENDENCIES}"
)
file(GLOB KBTD_DLLS
"${CMAKE_CURRENT_BINARY_DIR}/${KBT_DEPENDENCIES}/lib/native/amd64/release/*.dll"
)
file(COPY ${KBTD_DLLS} DESTINATION ${CATKIN_DEVEL_PREFIX}/${CATKIN_PACKAGE_BIN_DESTINATION})
file(COPY ${KBTD_DLLS} DESTINATION ${CATKIN_DEVEL_PREFIX}/${CATKIN_PACKAGE_LIB_DESTINATION})
set(CUDNN_DEPENDENCIES "Microsoft.Azure.Kinect.BodyTracking.Dependencies.cuDNN.0.9.1")
set(CUDNN_URL "https://www.nuget.org/api/v2/package/Microsoft.Azure.Kinect.BodyTracking.Dependencies.cuDNN/0.9.1")
set(CUDNN_SHA512 "a9986d5d7f06e0445cb65dfe1363c3e827df2c87b80ecfa05894c062714bf2092b3f87defd136cd2a89b9e588e773080ecf7079e8ce7e428426c4a00259c5085")
file(DOWNLOAD
${CUDNN_URL}
${CMAKE_CURRENT_BINARY_DIR}/cudnn.nuget
EXPECTED_HASH SHA512=${CUDNN_SHA512}
SHOW_PROGRESS
)
file(MAKE_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/${CUDNN_DEPENDENCIES}")
execute_process(COMMAND tar xvzf "${CMAKE_CURRENT_BINARY_DIR}/cudnn.nuget"
WORKING_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/${CUDNN_DEPENDENCIES}"
)
file(GLOB CUDNN_DLLS
"${CMAKE_CURRENT_BINARY_DIR}/${CUDNN_DEPENDENCIES}/lib/native/amd64/release/*.dll"
)
file(COPY ${CUDNN_DLLS} DESTINATION ${CATKIN_DEVEL_PREFIX}/${CATKIN_PACKAGE_BIN_DESTINATION})
file(COPY ${CUDNN_DLLS} DESTINATION ${CATKIN_DEVEL_PREFIX}/${CATKIN_PACKAGE_LIB_DESTINATION})
else()
message(FATAL_ERROR "Body Tracking SDK needs a NVIDIA GEFORCE GTX 1070 or better")
endif()
endif()
# This reads the K4A_LIBS and K4A_INSTALL_REQUIRED variables and decides how to install
# the various shared objects / DLLs
include(Installk4a)
##################################
###### END AZURE KINECT SDK ######
##################################
@ -118,11 +224,13 @@ include_directories(
target_link_libraries(${PROJECT_NAME}_node
${K4A_LIBS}
${catkin_LIBRARIES}
${OpenCV_LIBS}
)
target_link_libraries(${PROJECT_NAME}_nodelet
${K4A_LIBS}
${catkin_LIBRARIES}
${OpenCV_LIBS}
)
#############

Просмотреть файл

@ -6,7 +6,7 @@ set(RELATIVE_WIN_BIN_DIR "sdk/windows-desktop/amd64/release/bin")
set(RELATIVE_WIN_K4ABT_LIB_PATH "${RELATIVE_WIN_LIB_DIR}/k4abt.lib")
set(RELATIVE_WIN_K4ABT_DLL_PATH "${RELATIVE_WIN_BIN_DIR}/k4abt.dll")
set(RELATIVE_WIN_DNN_MODEL_PATH "${RELATIVE_WIN_BIN_DIR}/dnn_model.onnx")
set(RELATIVE_WIN_DNN_MODEL_PATH "${RELATIVE_WIN_BIN_DIR}/dnn_model_2_0.onnx")
set(RELATIVE_WIN_ONNX_RUNTIME_DLL_PATH "${RELATIVE_WIN_BIN_DIR}/onnxruntime.dll")
# K4A BT versions have exactly 3 components: major.minor.rev
@ -59,7 +59,7 @@ elseif(CMAKE_HOST_SYSTEM_NAME STREQUAL "Windows")
set(_dnn_model_path "${_sdk_dir}/${RELATIVE_WIN_DNN_MODEL_PATH}")
if(NOT EXISTS "${_dnn_model_path}")
quiet_message(WARNING "Rejecting SDK located at ${_sdk_dir}: Could not find dnn_model.onnx at ${_dnn_model_path}")
quiet_message(WARNING "Rejecting SDK located at ${_sdk_dir}: Could not find dnn_model_2_0.onnx at ${_dnn_model_path}")
return()
endif()

Просмотреть файл

@ -28,6 +28,13 @@
#if defined(K4A_BODY_TRACKING)
#include <visualization_msgs/MarkerArray.h>
#include <k4abt.hpp>
#include <tf2_ros/transform_broadcaster.h>
#include <geometry_msgs/TransformStamped.h>
#include <geometry_msgs/Pose.h>
#include <opencv2/opencv.hpp>
#include <image_geometry/pinhole_camera_model.h>
#include <tf2_ros/transform_listener.h>
#include <tf2_geometry_msgs/tf2_geometry_msgs.h>
#endif
// Project headers
@ -63,13 +70,15 @@ class K4AROSDevice
k4a_result_t getIrFrame(const k4a::capture& capture, sensor_msgs::ImagePtr& ir_image);
#if defined(K4A_BODY_TRACKING)
k4a_result_t getBodyMarker(const k4abt_body_t& body, visualization_msgs::MarkerPtr marker_msg, int jointType,
k4a_result_t getBodyMarker(const k4abt_body_t& body, visualization_msgs::MarkerPtr marker_msg, geometry_msgs::TransformStamped& transform_msg, int bodyNum, int jointType,
ros::Time capture_time);
k4a_result_t getBodyIndexMap(const k4abt::frame& body_frame, sensor_msgs::ImagePtr body_index_map_image);
k4a_result_t renderBodyIndexMapToROS(sensor_msgs::ImagePtr body_index_map_image, k4a::image& k4a_body_index_map,
const k4abt::frame& body_frame);
void imageCallback(const sensor_msgs::ImageConstPtr& image_msg, const sensor_msgs::CameraInfoConstPtr& info_msg);
#endif
private:
@ -140,8 +149,14 @@ class K4AROSDevice
#if defined(K4A_BODY_TRACKING)
ros::Publisher body_marker_publisher_;
tf2_ros::TransformBroadcaster br;
image_transport::Publisher body_index_map_publisher_;
image_transport::Publisher image_tf_publisher_;
image_transport::CameraSubscriber image_subscriber_;
image_geometry::PinholeCameraModel cam_model_;
tf2_ros::Buffer tfBuffer;
tf2_ros::TransformListener* tfListener;
#endif
// Parameters
@ -162,6 +177,9 @@ class K4AROSDevice
k4abt::tracker k4abt_tracker_;
std::atomic_int16_t k4abt_tracker_queue_size_;
std::thread body_publisher_thread_;
std::vector<std::string> joint_names_{"Pelvis", "Spine_Naval", "Spine_Chest", "Neck", "Clavicle_left", "Shoulder_left", "Elbow_left", "Wrist_left", "Hand_left", "Handtip_left", "thumb_left", "Clavicle_right", "Shoulder_right", "Elbow_right", "Wrist_right", "Hand_right", "Handtip_right", "Thumb_right", "Hip_left", "Knee_left", "Ankle_left", "Foot_left", "Hip_right", "Knee_right", "Ankle_right", "Foot_right", "Head", "Nose", "Eye_Left", "Ear_Left", "Eye_Right", "Ear_Right"};
size_t num_bodies;
#endif
std::chrono::nanoseconds device_to_realtime_offset_{0};

Просмотреть файл

@ -0,0 +1,10 @@
<!--
Copyright (c) Microsoft Corporation. All rights reserved.
Licensed under the MIT License.
-->
<launch>
<include file="$(find azure_kinect_ros_driver)/launch/driver.launch">
<arg name="body_tracking_enabled" value="true" />
</include>
</launch>

Просмотреть файл

@ -18,6 +18,7 @@
<depend>std_msgs</depend>
<depend>sensor_msgs</depend>
<depend>image_transport</depend>
<depend>image_geometry</depend>
<depend>tf2</depend>
<depend>tf2_ros</depend>
<depend>tf2_geometry_msgs</depend>
@ -30,6 +31,7 @@
<depend>K4A</depend>
<exec_depend>rgbd_launch</exec_depend>
<depend>OpenCV</depend>
<export>
<nodelet plugin="${prefix}/nodelet_plugins.xml" />

Просмотреть файл

@ -300,10 +300,15 @@ K4AROSDevice::K4AROSDevice(const NodeHandle& n, const NodeHandle& p)
ci_mngr_ir_ = std::make_shared<camera_info_manager::CameraInfoManager>(node_ir_, calibration_file_name_ir, calibration_url_ir);
#if defined(K4A_BODY_TRACKING)
if (params_.body_tracking_enabled) {
if (params_.body_tracking_enabled) {
tfListener = new tf2_ros::TransformListener(tfBuffer);
body_marker_publisher_ = node_.advertise<MarkerArray>("body_tracking_data", 1);
body_index_map_publisher_ = image_transport_.advertise("body_index_map/image_raw", 1);
image_subscriber_ = image_transport_.subscribeCamera("rgb/image_raw", 1, &K4AROSDevice::imageCallback, this);
image_tf_publisher_ = image_transport_.advertise("image_tf", 1);
}
#endif
}
@ -784,13 +789,15 @@ k4a_result_t K4AROSDevice::getImuFrame(const k4a_imu_sample_t& sample, sensor_ms
}
#if defined(K4A_BODY_TRACKING)
k4a_result_t K4AROSDevice::getBodyMarker(const k4abt_body_t& body, MarkerPtr marker_msg, int jointType,
k4a_result_t K4AROSDevice::getBodyMarker(const k4abt_body_t& body, MarkerPtr marker_msg, geometry_msgs::TransformStamped& transform_msg, int bodyNum, int jointType,
ros::Time capture_time)
{
k4a_float3_t position = body.skeleton.joints[jointType].position;
k4a_quaternion_t orientation = body.skeleton.joints[jointType].orientation;
std::string depth_frame = calibration_data_.tf_prefix_ + calibration_data_.depth_camera_frame_;
std::string rgb_frame = calibration_data_.tf_prefix_ + calibration_data_.rgb_camera_frame_;
marker_msg->header.frame_id = calibration_data_.tf_prefix_ + calibration_data_.depth_camera_frame_;
marker_msg->header.frame_id = depth_frame;
marker_msg->header.stamp = capture_time;
// Set the lifetime to 0.25 to prevent flickering for even 5fps configurations.
@ -818,9 +825,91 @@ k4a_result_t K4AROSDevice::getBodyMarker(const k4abt_body_t& body, MarkerPtr mar
marker_msg->pose.orientation.y = orientation.wxyz.y;
marker_msg->pose.orientation.z = orientation.wxyz.z;
//try transforming from depth_camera_link to rgb_camera_link by waiting for the transform upto 1 second
geometry_msgs::TransformStamped depth_link_to_rgb_link;
try{
depth_link_to_rgb_link = tfBuffer.lookupTransform(rgb_frame , depth_frame,
ros::Time(0));
}
catch (tf2::TransformException &ex) {
ROS_WARN("%s",ex.what());
ros::Duration(1.0).sleep();
}
//Pose msg which is used to transform the pose to the rgb camera link
geometry_msgs::Pose pose_msg;
pose_msg.position.x = position.v[0] / 1000.0f;
pose_msg.position.y = position.v[1] / 1000.0f;
pose_msg.position.z = position.v[2] / 1000.0f;
pose_msg.orientation.w = orientation.wxyz.w;
pose_msg.orientation.x = orientation.wxyz.x;
pose_msg.orientation.y = orientation.wxyz.y;
pose_msg.orientation.z = orientation.wxyz.z;
tf2::doTransform(pose_msg, pose_msg, depth_link_to_rgb_link);
transform_msg.header.stamp = capture_time;
transform_msg.header.frame_id = rgb_frame;
transform_msg.child_frame_id = joint_names_[jointType] + std::to_string(bodyNum);
transform_msg.transform.translation.x = pose_msg.position.x;
transform_msg.transform.translation.y = pose_msg.position.y;
transform_msg.transform.translation.z = pose_msg.position.z;
transform_msg.transform.rotation.w = pose_msg.orientation.w;
transform_msg.transform.rotation.x = pose_msg.orientation.x;
transform_msg.transform.rotation.y = pose_msg.orientation.y;
transform_msg.transform.rotation.z = pose_msg.orientation.z;
return K4A_RESULT_SUCCEEDED;
}
//method to project a tf frame to an image
void K4AROSDevice::imageCallback(const sensor_msgs::ImageConstPtr& image_msg, const sensor_msgs::CameraInfoConstPtr& info_msg)
{
cv::Mat image;
cv_bridge::CvImagePtr input_bridge;
try {
input_bridge = cv_bridge::toCvCopy(image_msg, sensor_msgs::image_encodings::BGR8);
image = input_bridge->image;
}
catch (cv_bridge::Exception& ex){
ROS_ERROR("[draw_frames] Failed to convert image");
return;
}
cam_model_.fromCameraInfo(info_msg);
std::vector<std::string> frame_ids_;
for(int i = 0; i < num_bodies; ++i){
std::transform(joint_names_.begin(), joint_names_.end(), back_inserter(frame_ids_), [&i](std::string j){return j + std::to_string(i);});
}
for(const std::string frame_id: frame_ids_) {
geometry_msgs::TransformStamped transform_msg;
try{
transform_msg = tfBuffer.lookupTransform(cam_model_.tfFrame(), frame_id,
ros::Time(0));
}
catch (tf2::TransformException &ex) {
ROS_WARN("Unable to look up the transform between the frames, %s",ex.what());
return;
}
cv::Point3d pt_cv(transform_msg.transform.translation.x, transform_msg.transform.translation.y, transform_msg.transform.translation.z);
cv::Point2d uv;
uv = cam_model_.project3dToPixel(pt_cv);
static const int RADIUS = 10;
cv::circle(image, uv, RADIUS, CV_RGB(255,0,0), -1);
}
image_tf_publisher_.publish(input_bridge->toImageMsg());
}
k4a_result_t K4AROSDevice::getBodyIndexMap(const k4abt::frame& body_frame, sensor_msgs::ImagePtr body_index_map_image)
{
k4a::image k4a_body_index_map = body_frame.get_body_index_map();
@ -1253,19 +1342,25 @@ void K4AROSDevice::bodyPublisherThread()
if (body_marker_publisher_.getNumSubscribers() > 0)
{
// Joint marker array
MarkerArrayPtr markerArrayPtr(new MarkerArray);
auto num_bodies = body_frame.get_num_bodies();
std::vector<geometry_msgs::TransformStamped> transformArrary;
num_bodies = body_frame.get_num_bodies();
for (size_t i = 0; i < num_bodies; ++i)
{
//tf2_ros::TransformListener tfListener(tfBuffer);
k4abt_body_t body = body_frame.get_body(i);
for (int j = 0; j < (int) K4ABT_JOINT_COUNT; ++j)
{
MarkerPtr markerPtr(new Marker);
getBodyMarker(body, markerPtr, j, capture_time);
geometry_msgs::TransformStamped transform_msg;
getBodyMarker(body, markerPtr, transform_msg, i, j, capture_time);
markerArrayPtr->markers.push_back(*markerPtr);
transformArrary.push_back(std::move(transform_msg));
}
}
body_marker_publisher_.publish(markerArrayPtr);
br.sendTransform(transformArrary);
}
if (body_index_map_publisher_.getNumSubscribers() > 0)