Skip to content
Snippets Groups Projects
Commit f8cb5783 authored by ARMAR-6 User's avatar ARMAR-6 User
Browse files

Merge branch 'feature/ak-body-tracking-sdk' of...

Merge branch 'feature/ak-body-tracking-sdk' of git.h2t.iar.kit.edu:sw/armarx/visionx into feature/ak-body-tracking-sdk
parents 127ebdd2 cdae5324
No related branches found
No related tags found
1 merge request!141feature: Azure Kinect Body Tracking SDK
......@@ -33,6 +33,7 @@
#include <Calibration/Calibration.h>
#include <Calibration/StereoCalibration.h>
#include <VisionX/libraries/human/pose/model/openpose_body_25.h>
#include <VisionX/core/ImageProcessor.h>
#include <VisionX/interface/components/OpenPoseEstimationInterface.h>
#include <VisionX/interface/components/PointCloudAndImageAndCalibrationProviderInterface.h>
......@@ -103,7 +104,7 @@ namespace armarx
"If you want to change the initial calib->get scale, you actually want to multiply the "
"`net_resolution` by your desired initial scale.");
defineOptionalProperty<int>("OP_scale_number", 1, "Number of scales to average.");
defineOptionalProperty<std::string>("OP_model_pose", "BODY_25", "Model to be used. E.g. `BODY_25` (25 keypoints, best model), `COCO` (18 keypoints), `MPI` (15 keypoints, ~10% faster), "
defineOptionalProperty<std::string>("OP_model_pose", human::pose::model::openpose_body_25::ModelId, "Model to be used. E.g. `BODY_25` (25 keypoints, best model), `COCO` (18 keypoints), `MPI` (15 keypoints, ~10% faster), "
"MPI_4_layers` (15 keypoints, even faster but less accurate).");
defineOptionalProperty<std::string>("OP_model_folder", "models/", "Folder path (absolute or relative) where the models (pose, face, ...) are located.");
defineOptionalProperty<int>("OP_num_gpu_start", 0, "GPU device start number.");
......
......@@ -36,6 +36,7 @@
#include <Calibration/Calibration.h>
// ArmarX
#include "ArmarXCore/core/time/Metronome.h"
#include "ArmarXCore/core/time/forward_declarations.h"
#include <ArmarXCore/core/time/TimeUtil.h>
#include <ArmarXCore/core/time/ScopedStopWatch.h>
......@@ -52,8 +53,8 @@
#include <VisionX/libraries/imrec/helper.h>
#include <opencv2/core/hal/interface.h>
#include <mutex>
#include <utility>
#include <Eigen/src/Geometry/Quaternion.h>
// OpenCV
#include <opencv2/imgproc/imgproc.hpp>
......@@ -101,17 +102,7 @@ namespace
}
}
std::function<void(armarx::Duration)>
createSwCallback(
armarx::ManagedIceObject* obj,
const std::string& description)
{
return [ = ](armarx::Duration duration)
{
std::string name = "duration " + description + " in [ms]";
obj->setMetaInfo(name, new armarx::Variant{duration.toMilliSecondsDouble()});
};
}
#ifdef INCLUDE_BODY_TRACKING
void
......@@ -138,6 +129,19 @@ namespace
namespace visionx
{
std::function<void(armarx::Duration)>
AzureKinectPointCloudProvider::createSwCallback(
const std::string& description)
{
return [ & ](armarx::Duration duration)
{
std::string name = "duration " + description + " in [ms]";
setMetaInfo(name, new armarx::Variant{duration.toMilliSecondsDouble()});
setDebugObserverDatafield(name, duration.toMilliSecondsDouble());
};
}
AzureKinectPointCloudProviderPropertyDefinitions::AzureKinectPointCloudProviderPropertyDefinitions(
std::string prefix) :
visionx::CapturingPointCloudProviderPropertyDefinitions(std::move(prefix))
......@@ -267,6 +271,8 @@ namespace visionx
{
ARMARX_DEBUG << "Connecting " << getName();
setDebugObserverBatchModeEnabled(true);
pointcloudTask = armarx::RunningTask<AzureKinectPointCloudProvider>::pointer_type(
new armarx::RunningTask<AzureKinectPointCloudProvider>(
this,
......@@ -275,6 +281,10 @@ namespace visionx
#ifdef INCLUDE_BODY_TRACKING
humanPoseWriter.emplace(memoryNameSystem(),humanMemoryServer);
bodyTrackingPublishTask = new armarx::RunningTask<AzureKinectPointCloudProvider>(this, &AzureKinectPointCloudProvider::runPublishBodyTrackingResults);
bodyTrackingPublishTask->start();
#endif
ARMARX_VERBOSE << "Connected " << getName();
......@@ -297,6 +307,11 @@ namespace visionx
ARMARX_DEBUG << "Pointcloud processing thread stopped";
}
#ifdef INCLUDE_BODY_TRACKING
bodyTrackingPublishTask->stop();
#endif
ARMARX_DEBUG << "Disconnected " << getName();
}
......@@ -446,7 +461,7 @@ namespace visionx
using armarx::core::time::ScopedStopWatch;
using armarx::core::time::StopWatch;
ScopedStopWatch sw_total{::createSwCallback(this, "doCapture")};
ScopedStopWatch sw_total{createSwCallback("doCapture")};
k4a::capture capture;
const std::chrono::milliseconds TIMEOUT{1000};
......@@ -471,12 +486,16 @@ namespace visionx
if (status)
{
::createSwCallback(this, "waiting for get_capture")(sw_get_capture.stop());
createSwCallback("waiting for get_capture")(sw_get_capture.stop());
setMetaInfo("temperature", new armarx::Variant(capture.get_temperature_c()));
// see ROS: This will give INCORRECT timestamps until the first image.
if(device_to_realtime_offset_.count() == 0)
const bool mustInitializeTimestampOffset = [&](){
std::lock_guard g{deviceToRealtimeOffsetMtx};
return device_to_realtime_offset_.count() == 0;
}();
if (mustInitializeTimestampOffset)
{
initializeTimestampOffset(capture.get_depth_image().get_device_timestamp());
}
......@@ -490,64 +509,6 @@ namespace visionx
ARMARX_WARNING << "Add capture to tracker process queue timeout";
}
k4abt::frame body_frame = bodyTracker.pop_result();
if (body_frame != nullptr)
{
// see https://github.com/microsoft/Azure_Kinect_ROS_Driver/blob/melodic/src/k4a_ros_device.cpp
const armarx::DateTime timestamp = timestampToArmarX(body_frame.get_device_timestamp());
std::uint32_t num_bodies = body_frame.get_num_bodies();
ARMARX_INFO << deactivateSpam(1) << num_bodies << " bodies are detected!";
std::vector<armarx::armem::human::HumanPose> humanPoses;
for (std::uint32_t i = 0; i < num_bodies; i++)
{
k4abt_body_t body = body_frame.get_body(i);
printBodyInformation(body);
armarx::armem::human::HumanPose humanPose;
humanPose.humanTrackingId = std::to_string(body.id);
humanPose.poseModelId = "K4ABT_BODY_32";
for (int i = 0; i < static_cast<int>(K4ABT_JOINT_COUNT); i++)
{
const auto joint = static_cast<armarx::human::pose::model::k4a_bt_body_32::Joints>(i);
const auto name = armarx::human::pose::model::k4a_bt_body_32::JointNames.to_name(joint);
k4a_float3_t position = body.skeleton.joints[i].position;
k4a_quaternion_t orientation = body.skeleton.joints[i].orientation;
k4abt_joint_confidence_level_t confidence_level =
body.skeleton.joints[i].confidence_level;
humanPose.keypoints[name] = armarx::armem::human::PoseKeypoint{
.label = name,
.confidence = static_cast<float>(static_cast<int>(confidence_level)),
.positionCamera = armarx::FramedPosition(Eigen::Vector3f{position.v[0], position.v[1], position.v[2]}, bodyCameraFrameName, robotName),
.orientationCamera = armarx::FramedOrientation(Eigen::Quaternionf(orientation.wxyz.w, orientation.wxyz.x, orientation.wxyz.y, orientation.wxyz.z).toRotationMatrix(), bodyCameraFrameName, robotName)
};
}
humanPoses.push_back(humanPose);
}
humanPoseWriter->commitHumanPosesInCameraFrame(humanPoses, getName(), timestamp);
k4a::image body_index_map = body_frame.get_body_index_map();
if (body_index_map != nullptr)
{
//print_body_index_map_middle_line(body_index_map);
}
else
{
ARMARX_WARNING << "Error: Failed to generate bodyindex map!";
}
}
else
{
// It should never hit timeout when K4A_WAIT_INFINITE is set.
ARMARX_WARNING << "Error! Pop body frame result time out!";
}
#endif
const k4a::image DEPTH_IMAGE = capture.get_depth_image();
......@@ -596,14 +557,14 @@ namespace visionx
else
{
// Convert the k4a image to an IVT image.
ScopedStopWatch sw{::createSwCallback(this, "convert k4a image to IVT")};
ScopedStopWatch sw{createSwCallback("convert k4a image to IVT")};
::k4aToIvtImage(COLOR_IMAGE, *resultColorImage);
}
// Provide data for pointcloud processing thread and signal to start processing.
{
ScopedStopWatch sw{::createSwCallback(this, "transform depth image to camera")};
ScopedStopWatch sw{createSwCallback("transform depth image to camera")};
// Acquire lock and write data needed by pointcloud thread (i.e.,
// alignedDepthImageScaled and depthImageReady).
{
......@@ -621,7 +582,7 @@ namespace visionx
// Prepare result depth image.
{
ScopedStopWatch sw{::createSwCallback(this, "prepare result depth image")};
ScopedStopWatch sw{createSwCallback("prepare result depth image")};
const int DW = alignedDepthImage.get_width_pixels();
const int DH = alignedDepthImage.get_height_pixels();
......@@ -650,11 +611,12 @@ namespace visionx
index_2 += 1;
}
}
}
// Broadcast RGB-D image.
{
ScopedStopWatch sw{::createSwCallback(this, "broadcast RGB-D image")};
ScopedStopWatch sw{createSwCallback("broadcast RGB-D image")};
CByteImage* images[2] = {resultColorImage.get(), resultDepthImage.get()};
provideImages(images, imagesTime);
}
......@@ -668,17 +630,98 @@ namespace visionx
ARMARX_DEBUG << "Capturing thread received signal.";
}
sendDebugObserverBatch();
return true;
}
else
{
ARMARX_INFO << deactivateSpam(30) << "Did not get frame until timeout of " << TIMEOUT;
sendDebugObserverBatch();
return false;
}
}
#ifdef INCLUDE_BODY_TRACKING
void AzureKinectPointCloudProvider::runPublishBodyTrackingResults()
{
armarx::Metronome metronome(armarx::Frequency::Hertz(100));
while(true)
{
k4abt::frame body_frame = bodyTracker.pop_result();
if (body_frame != nullptr)
{
armarx::core::time::ScopedStopWatch sw{createSwCallback("publish body tracking result")};
// see https://github.com/microsoft/Azure_Kinect_ROS_Driver/blob/melodic/src/k4a_ros_device.cpp
const armarx::DateTime timestamp = timestampToArmarX(body_frame.get_device_timestamp());
std::uint32_t num_bodies = body_frame.get_num_bodies();
setDebugObserverDatafield("n_bodies_detected", num_bodies);
std::vector<armarx::armem::human::HumanPose> humanPoses;
humanPoses.reserve(num_bodies);
for (std::uint32_t i = 0; i < num_bodies; i++)
{
k4abt_body_t body = body_frame.get_body(i);
printBodyInformation(body);
armarx::armem::human::HumanPose humanPose;
humanPose.humanTrackingId = std::to_string(body.id);
humanPose.poseModelId = armarx::human::pose::model::k4a_bt_body_32::ModelId;
for (int i = 0; i < static_cast<int>(K4ABT_JOINT_COUNT); i++)
{
const auto joint = static_cast<armarx::human::pose::model::k4a_bt_body_32::Joints>(i);
const auto name = armarx::human::pose::model::k4a_bt_body_32::JointNames.to_name(joint);
k4a_float3_t position = body.skeleton.joints[i].position;
k4a_quaternion_t orientation = body.skeleton.joints[i].orientation;
k4abt_joint_confidence_level_t confidence_level =
body.skeleton.joints[i].confidence_level;
humanPose.keypoints[name] = armarx::armem::human::PoseKeypoint{
.label = name,
.confidence = static_cast<float>(static_cast<int>(confidence_level)),
.positionCamera = armarx::FramedPosition(Eigen::Vector3f{position.v[0], position.v[1], position.v[2]}, bodyCameraFrameName, robotName),
.orientationCamera = armarx::FramedOrientation(Eigen::Quaternionf(orientation.wxyz.w, orientation.wxyz.x, orientation.wxyz.y, orientation.wxyz.z).toRotationMatrix(), bodyCameraFrameName, robotName)
};
}
humanPoses.push_back(humanPose);
}
setDebugObserverDatafield("bodyTrackingCaptureUntilPublish [ms]", (armarx::Clock::Now() - timestamp).toMilliSeconds());
humanPoseWriter->commitHumanPosesInCameraFrame(humanPoses, getName(), timestamp);
// k4a::image body_index_map = body_frame.get_body_index_map();
// if (body_index_map != nullptr)
// {
// //print_body_index_map_middle_line(body_index_map);
// }
// else
// {
// ARMARX_WARNING << "Error: Failed to generate bodyindex map!";
// }
}
else
{
// It should never hit timeout when K4A_WAIT_INFINITE is set.
ARMARX_WARNING << "Error! Pop body frame result time out!";
}
metronome.waitForNextTick();
}
}
#endif
void
AzureKinectPointCloudProvider::runPointcloudPublishing()
{
......@@ -922,40 +965,56 @@ namespace visionx
void AzureKinectPointCloudProvider::updateTimestampOffset(const std::chrono::microseconds& k4a_device_timestamp_us,
const std::chrono::nanoseconds& k4a_system_timestamp_ns)
{
// System timestamp is on monotonic system clock.
// Device time is on AKDK hardware clock.
// We want to continuously estimate diff between realtime and AKDK hardware clock as low-pass offset.
// This consists of two parts: device to monotonic, and monotonic to realtime.
// First figure out realtime to monotonic offset. This will change to keep updating it.
std::chrono::nanoseconds realtime_clock = std::chrono::system_clock::now().time_since_epoch();
std::chrono::nanoseconds monotonic_clock = std::chrono::steady_clock::now().time_since_epoch();
std::chrono::nanoseconds monotonic_to_realtime = realtime_clock - monotonic_clock;
// Next figure out the other part (combined).
std::chrono::nanoseconds device_to_realtime =
k4a_system_timestamp_ns - k4a_device_timestamp_us + monotonic_to_realtime;
// If we're over a second off, just snap into place.
if (device_to_realtime_offset_.count() == 0 ||
std::abs((device_to_realtime_offset_ - device_to_realtime).count()) > 1e7)
{
ARMARX_WARNING << "Initializing or re-initializing the device to realtime offset: " << device_to_realtime.count()
<< " ns";
device_to_realtime_offset_ = device_to_realtime;
}
else
{
// Low-pass filter!
constexpr double alpha = 0.10;
device_to_realtime_offset_ = device_to_realtime_offset_ +
std::chrono::nanoseconds(static_cast<int64_t>(
std::floor(alpha * (device_to_realtime - device_to_realtime_offset_).count())));
}
// System timestamp is on monotonic system clock.
// Device time is on AKDK hardware clock.
// We want to continuously estimate diff between realtime and AKDK hardware clock as low-pass offset.
// This consists of two parts: device to monotonic, and monotonic to realtime.
// First figure out realtime to monotonic offset. This will change to keep updating it.
std::chrono::nanoseconds realtime_clock = std::chrono::system_clock::now().time_since_epoch();
std::chrono::nanoseconds monotonic_clock = std::chrono::steady_clock::now().time_since_epoch();
std::chrono::nanoseconds monotonic_to_realtime = realtime_clock - monotonic_clock;
// Next figure out the other part (combined).
std::chrono::nanoseconds device_to_realtime =
k4a_system_timestamp_ns - k4a_device_timestamp_us + monotonic_to_realtime;
{
std::lock_guard g{deviceToRealtimeOffsetMtx};
setDebugObserverDatafield("device_to_realtime_offset [ms]", device_to_realtime_offset_.count() / 1'000'000.f); // [ns] -> [ms]
setDebugObserverDatafield("clock_error", std::abs<float>((device_to_realtime_offset_ - device_to_realtime).count()) / 1'000.f); // [ns] -> [µs]
const std::int64_t timeOffsetThreshold = 1e7; // 10 ms
// If we're over a certain time off, just snap into place.
if (device_to_realtime_offset_.count() == 0 ||
std::abs((device_to_realtime_offset_ - device_to_realtime).count()) >timeOffsetThreshold )
{
ARMARX_WARNING << "Initializing or re-initializing the device to realtime offset: " << device_to_realtime.count()
<< " ns";
device_to_realtime_offset_ = device_to_realtime;
}
else
{
// Low-pass filter!
constexpr double alpha = 0.10;
const std::chrono::nanoseconds timeCorrection(static_cast<int64_t>(
std::floor(alpha * (device_to_realtime - device_to_realtime_offset_).count())));
device_to_realtime_offset_ = device_to_realtime_offset_ + timeCorrection;
setDebugObserverDatafield("timeCorrection [µs]", timeCorrection.count() / 1'000); // [ns] -> [µs]
}
}
}
armarx::DateTime AzureKinectPointCloudProvider::timestampToArmarX(const std::chrono::microseconds& k4a_timestamp_us)
{
std::lock_guard g{deviceToRealtimeOffsetMtx};
// must be initialized beforehand
ARMARX_CHECK(device_to_realtime_offset_.count() != 0);
......@@ -967,6 +1026,8 @@ namespace visionx
void AzureKinectPointCloudProvider::initializeTimestampOffset(const std::chrono::microseconds& k4a_device_timestamp_us)
{
std::lock_guard g{deviceToRealtimeOffsetMtx};
// We have no better guess than "now".
std::chrono::nanoseconds realtime_clock = std::chrono::system_clock::now().time_since_epoch();
......
......@@ -41,6 +41,7 @@
#include <ArmarXCore/core/services/tasks/RunningTask.h>
#include <ArmarXCore/core/time/TimeUtil.h>
#include <ArmarXCore/interface/observers/ObserverInterface.h>
#include <ArmarXCore/libraries/ArmarXCoreComponentPlugins/DebugObserverComponentPlugin.h>
// RobotAPI
#include <RobotAPI/interface/visualization/DebugDrawerInterface.h>
......@@ -87,7 +88,8 @@ namespace visionx
class AzureKinectPointCloudProvider :
virtual public visionx::RGBDPointCloudProviderInterface,
virtual public visionx::CapturingPointCloudProvider,
virtual public visionx::ImageProvider
virtual public visionx::ImageProvider,
virtual public armarx::DebugObserverComponentPluginUser
#ifdef INCLUDE_BODY_TRACKING
,virtual public armarx::armem::ClientPluginUser
#endif
......@@ -224,6 +226,9 @@ namespace visionx
return s.str();
}
std::function<void(armarx::Duration)>
createSwCallback(const std::string& description);
void updateTimestampOffset(const std::chrono::microseconds& k4a_device_timestamp_us,
const std::chrono::nanoseconds& k4a_system_timestamp_ns);
......@@ -283,6 +288,10 @@ namespace visionx
std::optional<armarx::armem::human::client::Writer> humanPoseWriter;
armarx::RunningTask<AzureKinectPointCloudProvider>::pointer_type bodyTrackingPublishTask;
void runPublishBodyTrackingResults();
#endif
int mDeviceId = K4A_DEVICE_DEFAULT;
......@@ -299,6 +308,7 @@ namespace visionx
std::string bodyCameraFrameName = "AzureKinectDepthCamera";
std::string robotName = "Armar6";
std::mutex deviceToRealtimeOffsetMtx;
std::chrono::nanoseconds device_to_realtime_offset_{0};
};
}
......@@ -5,7 +5,7 @@ find_package(k4abt 1.1 QUIET)
message(STATUS "k4a found: ${k4a_FOUND} k4a version: ${k4a_VERSION}")
armarx_build_if(k4a_FOUND "Azure Kinect SDK not available")
set(COMPONENT_LIBS ArmarXCore ArmarXCoreObservers RobotAPIInterfaces VisionXInterfaces VisionXTools VisionXCore
set(COMPONENT_LIBS ArmarXCore ArmarXCoreObservers RobotAPIInterfaces VisionXInterfaces VisionXTools VisionXCore ArmarXCoreComponentPlugins
VisionXPointCloud ImageToPointCloud human armem_human
k4a::k4a
)
......
......@@ -4,7 +4,8 @@
#include <VisionX/libraries/ArViz/HumanPoseK4ABTBody32.h>
#include <VisionX/libraries/ArViz/HumanPoseBody25.h>
#include <VisionX/libraries/human/pose/model/k4a_bt_body_32.h>
#include <VisionX/libraries/human/pose/model/openpose_body_25.h>
namespace armarx
{
......@@ -14,11 +15,14 @@ namespace armarx
viz::Layer& layer,
const std::string& prefix)
{
if (pose.poseModelId == "BODY_25")
namespace k4a_bt_body_32 = human::pose::model::k4a_bt_body_32;
namespace openpose_body_25 = human::pose::model::openpose_body_25;
if (pose.poseModelId == openpose_body_25::ModelId)
{
body_25::addPoseToLayer(pose, layer, prefix);
}
else if (pose.poseModelId == "K4ABT_BODY_32")
else if (pose.poseModelId == k4a_bt_body_32::ModelId)
{
k4abt_body_32::addPoseToLayer(pose, layer, prefix);
}
......
......@@ -53,6 +53,7 @@
#include <ArmarXCore/core/logging/Logging.h>
#include <RobotAPI/interface/visualization/DebugDrawerInterface.h>
#include <VisionX/interface/components/OpenPoseEstimationInterface.h>
#include <VisionX/libraries/human/pose/model/openpose_body_25.h>
namespace armarx
{
......@@ -66,7 +67,7 @@ namespace armarx
{
std::string net_resolution = "-1x368";
std::string output_resolution = "-1x-1";
std::string model_pose = "BODY_25";
std::string model_pose = human::pose::model::openpose_body_25::ModelId;
std::string model_folder = "models/";
double scale_gap = 0.3;
int scale_number = 1;
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment