K4AViewer: Move video playback to a separate thread (#191)

Move video playback to a separate thread to fix timing so the playback speed approximately matches the speed at which events in the recording took place

Also includes cleanup of a lot of the 'frame' naming, which predated the concept of an 'image object' in the K4A API, and cleanup of the image conversion logic; now, the converters convert from one k4a::image to another k4a::image rather than using custom intermediate formats. This will eventually help support RGB point cloud visualization in arbitrary recordings.
This commit is contained in:
Billy Price 2019-03-27 16:50:41 -07:00 коммит произвёл GitHub
Родитель ff070018a7
Коммит dc578399bb
Не найден ключ, соответствующий данной подписи
Идентификатор ключа GPG: 4AEE18F83AFDEB23
25 изменённых файлов: 883 добавлений и 871 удалений

Просмотреть файл

@ -7,7 +7,7 @@ set(SOURCE_FILES
k4aaudiochanneldatagraph.cpp
k4aaudiomanager.cpp
k4aaudiowindow.cpp
k4acolorframevisualizer.cpp
k4acolorimageconverter.cpp
k4adevicedockcontrol.cpp
k4afilepicker.cpp
k4aimguiextensions.cpp

Просмотреть файл

@ -1,75 +0,0 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
#ifndef IK4AFRAMEVISUALIZER_H
#define IK4AFRAMEVISUALIZER_H
// System headers
//
#include <memory>
#include <vector>
// Library headers
//
#include <k4a/k4a.hpp>
// Project headers
//
#include "k4aviewerimage.h"
namespace k4aviewer
{
enum class ImageVisualizationResult
{
Success,
OpenGLError,
InvalidBufferSizeError,
InvalidImageDataError,
NoDataError
};
inline ImageVisualizationResult GLEnumToImageVisualizationResult(GLenum error)
{
return error == GL_NO_ERROR ? ImageVisualizationResult::Success : ImageVisualizationResult::OpenGLError;
}
template<k4a_image_format_t ImageFormat> struct K4ATextureBuffer
{
std::vector<uint8_t> Data;
k4a::image SourceImage;
};
template<k4a_image_format_t ImageFormat> class IK4AFrameVisualizer
{
public:
// Creates a new OpenGL texture from the video source and stores it in texture.
//
virtual GLenum InitializeTexture(std::shared_ptr<K4AViewerImage> *texture) = 0;
// Initializes buffer such that it can hold intermediate images of the type
// the visualizer handles
//
virtual void InitializeBuffer(K4ATextureBuffer<ImageFormat> *buffer) = 0;
// Interprets image as an image and stores it in buffer in a format that is appropriate for
// a future call to UpdateTexture
//
virtual ImageVisualizationResult ConvertImage(const k4a::image &image, K4ATextureBuffer<ImageFormat> *buffer) = 0;
// Updates texture in-place with the image stored in buffer.
// UpdateTexture expects to get a texture that was previously initialized by InitializeTexture.
//
virtual ImageVisualizationResult UpdateTexture(const K4ATextureBuffer<ImageFormat> &buffer,
K4AViewerImage *texture) = 0;
virtual ~IK4AFrameVisualizer() = default;
IK4AFrameVisualizer() = default;
IK4AFrameVisualizer(const IK4AFrameVisualizer &) = delete;
IK4AFrameVisualizer(const IK4AFrameVisualizer &&) = delete;
IK4AFrameVisualizer &operator=(const IK4AFrameVisualizer &) = delete;
IK4AFrameVisualizer &operator=(const IK4AFrameVisualizer &&) = delete;
};
} // namespace k4aviewer
#endif

Просмотреть файл

@ -0,0 +1,59 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
#ifndef IK4AIMAGECONVERTER_H
#define IK4AIMAGECONVERTER_H
// System headers
//
#include <memory>
#include <vector>
// Library headers
//
#include <k4a/k4a.hpp>
// Project headers
//
#include "k4aviewerimage.h"
namespace k4aviewer
{
enum class ImageConversionResult
{
Success,
OpenGLError,
InvalidBufferSizeError,
InvalidImageDataError,
NoDataError
};
inline ImageConversionResult GLEnumToImageConversionResult(GLenum error)
{
return error == GL_NO_ERROR ? ImageConversionResult::Success : ImageConversionResult::OpenGLError;
}
template<k4a_image_format_t ImageFormat> class IK4AImageConverter
{
public:
// Gets the dimensions in pixels of images that this converter expects to receive and produce
//
virtual ImageDimensions GetImageDimensions() const = 0;
// Converts srcImage into a BGRA32-formatted image and stores the result in bgraImage.
// bgraImage must already be allocated and must be an image with the same dimensions that
// GetImageDimensions() returns.
//
virtual ImageConversionResult ConvertImage(const k4a::image &srcImage, k4a::image *bgraImage) = 0;
virtual ~IK4AImageConverter() = default;
IK4AImageConverter() = default;
IK4AImageConverter(const IK4AImageConverter &) = delete;
IK4AImageConverter(const IK4AImageConverter &&) = delete;
IK4AImageConverter &operator=(const IK4AImageConverter &) = delete;
IK4AImageConverter &operator=(const IK4AImageConverter &&) = delete;
};
} // namespace k4aviewer
#endif

Просмотреть файл

@ -1,319 +0,0 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Associated header
//
#include "k4acolorframevisualizer.h"
// System headers
//
#include <algorithm>
#include <string>
// Library headers
//
#include "libyuv.h"
// Clang parses doxygen-style comments in your source and checks for doxygen syntax errors.
// Unfortunately, some of our external dependencies have doxygen syntax errors in them, so
// we need to shut off that warning.
//
#ifdef __clang__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdocumentation"
#pragma clang diagnostic ignored "-Wdocumentation-unknown-command"
#pragma clang diagnostic ignored "-Wdouble-promotion"
#endif
#include "turbojpeg.h"
#ifdef __clang__
#pragma clang diagnostic pop
#endif
// Project headers
//
#include "k4astaticimageproperties.h"
#include "k4aviewererrormanager.h"
#include "perfcounter.h"
using namespace k4aviewer;
class K4AColorFrameVisualizerBase
{
public:
virtual ~K4AColorFrameVisualizerBase() = default;
protected:
explicit K4AColorFrameVisualizerBase(k4a_color_resolution_t colorResolution) :
m_dimensions(GetColorDimensions(colorResolution))
{
m_expectedBufferSize = sizeof(BgraPixel) * static_cast<size_t>(m_dimensions.Width * m_dimensions.Height);
}
size_t m_expectedBufferSize;
ImageDimensions m_dimensions;
};
class K4AYUY2FrameVisualizer : public K4AColorFrameVisualizerBase,
public IK4AFrameVisualizer<K4A_IMAGE_FORMAT_COLOR_YUY2>
{
public:
GLenum InitializeTexture(std::shared_ptr<K4AViewerImage> *texture) override
{
// libyuv does not have a function that directly converts from YUY2 to RGBA,
// so we either have to have libyuv convert from YUY2 -> BGRA and then again
// from BGRA -> ARGB, or we have to tell OpenGL to do the conversion as part
// of texture upload. Either way, we incur a performance hit by doing this
// extra conversion.
//
// It looks like OpenGL's conversion is slightly faster than libyuv's, so
// we're using that.
//
return K4AViewerImage::Create(texture, nullptr, m_dimensions, GL_BGRA);
}
void InitializeBuffer(K4ATextureBuffer<K4A_IMAGE_FORMAT_COLOR_YUY2> *buffer) override
{
buffer->Data.resize(m_expectedBufferSize);
}
ImageVisualizationResult ConvertImage(const k4a::image &image,
K4ATextureBuffer<K4A_IMAGE_FORMAT_COLOR_YUY2> *buffer) override
{
// YUY2 is a 4:2:2 format, so there are 4 bytes per 'chunk' of data, and each 'chunk' represents 2 pixels.
//
const int stride = m_dimensions.Width * 4 / 2;
const auto expectedBufferSize = static_cast<size_t>(stride * m_dimensions.Height);
if (image.get_size() != expectedBufferSize)
{
return ImageVisualizationResult::InvalidBufferSizeError;
}
static PerfCounter decode("YUY2 decode");
PerfSample decodeSample(&decode);
int result = libyuv::YUY2ToARGB(image.get_buffer(), // src_yuy2,
stride, // src_stride_yuy2,
&buffer->Data[0], // dst_argb,
m_dimensions.Width * static_cast<int>(sizeof(BgraPixel)), // dst_stride_argb,
m_dimensions.Width, // width,
m_dimensions.Height // height
);
decodeSample.End();
if (result != 0)
{
return ImageVisualizationResult::InvalidImageDataError;
}
buffer->SourceImage = image;
return ImageVisualizationResult::Success;
}
ImageVisualizationResult UpdateTexture(const K4ATextureBuffer<K4A_IMAGE_FORMAT_COLOR_YUY2> &buffer,
K4AViewerImage *texture) override
{
static PerfCounter upload("YUY2 upload");
PerfSample uploadSample(&upload);
return GLEnumToImageVisualizationResult(texture->UpdateTexture(&buffer.Data[0]));
}
K4AYUY2FrameVisualizer(k4a_color_resolution_t resolution) : K4AColorFrameVisualizerBase(resolution) {}
};
class K4ANV12FrameVisualizer : public K4AColorFrameVisualizerBase,
public IK4AFrameVisualizer<K4A_IMAGE_FORMAT_COLOR_NV12>
{
public:
GLenum InitializeTexture(std::shared_ptr<K4AViewerImage> *texture) override
{
return K4AViewerImage::Create(texture, nullptr, m_dimensions, GL_RGBA);
}
void InitializeBuffer(K4ATextureBuffer<K4A_IMAGE_FORMAT_COLOR_NV12> *buffer) override
{
buffer->Data.resize(m_expectedBufferSize);
}
ImageVisualizationResult ConvertImage(const k4a::image &image,
K4ATextureBuffer<K4A_IMAGE_FORMAT_COLOR_NV12> *buffer) override
{
const int luminanceStride = m_dimensions.Width;
const int hueSatStride = m_dimensions.Width;
const uint8_t *hueSatStart = image.get_buffer() + luminanceStride * m_dimensions.Height;
// NV12 is a 4:2:0 format, so there are half as many hue/sat pixels as luminance pixels
//
const auto expectedBufferSize = static_cast<size_t>(m_dimensions.Height * (luminanceStride + hueSatStride / 2));
if (image.get_size() != expectedBufferSize)
{
return ImageVisualizationResult::InvalidBufferSizeError;
}
// libyuv refers to pixel order in system-endian order but OpenGL refers to
// pixel order in big-endian order, which is why we create the OpenGL texture
// as "RGBA" but then use the "ABGR" libyuv function here.
//
static PerfCounter decode("NV12 decode");
PerfSample decodeSample(&decode);
int result = libyuv::NV12ToABGR(image.get_buffer(), // src_y
luminanceStride, // src_stride_y
hueSatStart, // src_vu
hueSatStride, // src_stride_vu
&buffer->Data[0], // dst_argb
m_dimensions.Width * static_cast<int>(sizeof(BgraPixel)), // dst_stride_argb
m_dimensions.Width, // width
m_dimensions.Height // height
);
decodeSample.End();
if (result != 0)
{
return ImageVisualizationResult::InvalidImageDataError;
}
buffer->SourceImage = image;
return ImageVisualizationResult::Success;
}
ImageVisualizationResult UpdateTexture(const K4ATextureBuffer<K4A_IMAGE_FORMAT_COLOR_NV12> &buffer,
K4AViewerImage *texture) override
{
static PerfCounter upload("NV12 upload");
PerfSample uploadSample(&upload);
return GLEnumToImageVisualizationResult(texture->UpdateTexture(&buffer.Data[0]));
}
K4ANV12FrameVisualizer(k4a_color_resolution_t resolution) : K4AColorFrameVisualizerBase(resolution) {}
};
class K4ABGRA32FrameVisualizer : public K4AColorFrameVisualizerBase,
public IK4AFrameVisualizer<K4A_IMAGE_FORMAT_COLOR_BGRA32>
{
public:
GLenum InitializeTexture(std::shared_ptr<K4AViewerImage> *texture) override
{
return K4AViewerImage::Create(texture, nullptr, m_dimensions, GL_BGRA);
}
void InitializeBuffer(K4ATextureBuffer<K4A_IMAGE_FORMAT_COLOR_BGRA32> *buffer) override
{
buffer->Data.resize(m_expectedBufferSize);
}
ImageVisualizationResult ConvertImage(const k4a::image &image,
K4ATextureBuffer<K4A_IMAGE_FORMAT_COLOR_BGRA32> *buffer) override
{
if (image.get_size() != static_cast<size_t>(m_dimensions.Height * m_dimensions.Width) * sizeof(BgraPixel))
{
return ImageVisualizationResult::InvalidBufferSizeError;
}
std::copy(image.get_buffer(), image.get_buffer() + image.get_size(), buffer->Data.begin());
buffer->SourceImage = image;
return ImageVisualizationResult::Success;
}
ImageVisualizationResult UpdateTexture(const K4ATextureBuffer<K4A_IMAGE_FORMAT_COLOR_BGRA32> &buffer,
K4AViewerImage *texture) override
{
static PerfCounter upload("BGRA32 upload");
PerfSample uploadSample(&upload);
return GLEnumToImageVisualizationResult(texture->UpdateTexture(&buffer.Data[0]));
}
K4ABGRA32FrameVisualizer(k4a_color_resolution_t resolution) : K4AColorFrameVisualizerBase(resolution) {}
};
class K4AMJPGFrameVisualizer : public K4AColorFrameVisualizerBase,
public IK4AFrameVisualizer<K4A_IMAGE_FORMAT_COLOR_MJPG>
{
public:
GLenum InitializeTexture(std::shared_ptr<K4AViewerImage> *texture) override
{
return K4AViewerImage::Create(texture, nullptr, m_dimensions, GL_RGBA);
}
void InitializeBuffer(K4ATextureBuffer<K4A_IMAGE_FORMAT_COLOR_MJPG> *buffer) override
{
buffer->Data.resize(m_expectedBufferSize);
}
ImageVisualizationResult ConvertImage(const k4a::image &image,
K4ATextureBuffer<K4A_IMAGE_FORMAT_COLOR_MJPG> *buffer) override
{
static PerfCounter mjpgDecode("MJPG decode");
PerfSample decodeSample(&mjpgDecode);
const int decompressStatus = tjDecompress2(m_decompressor,
image.get_buffer(),
static_cast<unsigned long>(image.get_size()),
&buffer->Data[0],
m_dimensions.Width,
0, // pitch
m_dimensions.Height,
TJPF_RGBA,
TJFLAG_FASTDCT | TJFLAG_FASTUPSAMPLE);
if (decompressStatus != 0)
{
return ImageVisualizationResult::InvalidImageDataError;
}
buffer->SourceImage = image;
return ImageVisualizationResult::Success;
}
ImageVisualizationResult UpdateTexture(const K4ATextureBuffer<K4A_IMAGE_FORMAT_COLOR_MJPG> &buffer,
K4AViewerImage *texture) override
{
static PerfCounter mjpgUpload("MJPG upload");
PerfSample uploadSample(&mjpgUpload);
return GLEnumToImageVisualizationResult(texture->UpdateTexture(&buffer.Data[0]));
}
K4AMJPGFrameVisualizer(k4a_color_resolution_t resolution) :
K4AColorFrameVisualizerBase(resolution),
m_decompressor(tjInitDecompress())
{
}
~K4AMJPGFrameVisualizer() override
{
(void)tjDestroy(m_decompressor);
}
private:
tjhandle m_decompressor;
};
template<>
std::unique_ptr<IK4AFrameVisualizer<K4A_IMAGE_FORMAT_COLOR_YUY2>>
K4AColorFrameVisualizerFactory::Create(k4a_color_resolution_t resolution)
{
return std14::make_unique<K4AYUY2FrameVisualizer>(resolution);
}
template<>
std::unique_ptr<IK4AFrameVisualizer<K4A_IMAGE_FORMAT_COLOR_NV12>>
K4AColorFrameVisualizerFactory::Create(k4a_color_resolution_t resolution)
{
return std14::make_unique<K4ANV12FrameVisualizer>(resolution);
}
template<>
std::unique_ptr<IK4AFrameVisualizer<K4A_IMAGE_FORMAT_COLOR_BGRA32>>
K4AColorFrameVisualizerFactory::Create(k4a_color_resolution_t resolution)
{
return std14::make_unique<K4ABGRA32FrameVisualizer>(resolution);
}
template<>
std::unique_ptr<IK4AFrameVisualizer<K4A_IMAGE_FORMAT_COLOR_MJPG>>
K4AColorFrameVisualizerFactory::Create(k4a_color_resolution_t resolution)
{
return std14::make_unique<K4AMJPGFrameVisualizer>(resolution);
}

Просмотреть файл

@ -1,48 +0,0 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
#ifndef K4ACOLORFRAMEVISUALIZER_H
#define K4ACOLORFRAMEVISUALIZER_H
// System headers
//
#include <memory>
#include <vector>
// Library headers
//
#include "k4aimgui_all.h"
// Project headers
//
#include "ik4aframevisualizer.h"
#include "k4aviewerutil.h"
namespace k4aviewer
{
class K4AColorFrameVisualizerFactory
{
public:
template<k4a_image_format_t ImageFormat>
static std::unique_ptr<IK4AFrameVisualizer<ImageFormat>> Create(k4a_color_resolution_t resolution);
};
template<>
std::unique_ptr<IK4AFrameVisualizer<K4A_IMAGE_FORMAT_COLOR_YUY2>>
K4AColorFrameVisualizerFactory::Create<K4A_IMAGE_FORMAT_COLOR_YUY2>(k4a_color_resolution_t resolution);
template<>
std::unique_ptr<IK4AFrameVisualizer<K4A_IMAGE_FORMAT_COLOR_NV12>>
K4AColorFrameVisualizerFactory::Create<K4A_IMAGE_FORMAT_COLOR_NV12>(k4a_color_resolution_t resolution);
template<>
std::unique_ptr<IK4AFrameVisualizer<K4A_IMAGE_FORMAT_COLOR_BGRA32>>
K4AColorFrameVisualizerFactory::Create<K4A_IMAGE_FORMAT_COLOR_BGRA32>(k4a_color_resolution_t resolution);
template<>
std::unique_ptr<IK4AFrameVisualizer<K4A_IMAGE_FORMAT_COLOR_MJPG>>
K4AColorFrameVisualizerFactory::Create<K4A_IMAGE_FORMAT_COLOR_MJPG>(k4a_color_resolution_t resolution);
} // namespace k4aviewer
#endif

Просмотреть файл

@ -0,0 +1,265 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Associated header
//
#include "k4acolorimageconverter.h"
// System headers
//
#include <algorithm>
#include <string>
// Library headers
//
#include "libyuv.h"
// Clang parses doxygen-style comments in your source and checks for doxygen syntax errors.
// Unfortunately, some of our external dependencies have doxygen syntax errors in them, so
// we need to shut off that warning.
//
#ifdef __clang__
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wdocumentation"
#pragma clang diagnostic ignored "-Wdocumentation-unknown-command"
#pragma clang diagnostic ignored "-Wdouble-promotion"
#endif
#include "turbojpeg.h"
#ifdef __clang__
#pragma clang diagnostic pop
#endif
// Project headers
//
#include "k4astaticimageproperties.h"
#include "k4aviewererrormanager.h"
#include "perfcounter.h"
using namespace k4aviewer;
template<k4a_image_format_t ImageFormat> class K4AColorImageConverterBase : public IK4AImageConverter<ImageFormat>
{
public:
virtual ~K4AColorImageConverterBase() override = default;
ImageDimensions GetImageDimensions() const override
{
return m_dimensions;
}
protected:
explicit K4AColorImageConverterBase(k4a_color_resolution_t colorResolution) :
m_dimensions(GetColorDimensions(colorResolution))
{
m_expectedBufferSize = sizeof(BgraPixel) * static_cast<size_t>(m_dimensions.Width * m_dimensions.Height);
}
bool ImagesAreCorrectlySized(const k4a::image &srcImage,
const k4a::image &dstImage,
const size_t *srcImageExpectedSize)
{
if (srcImageExpectedSize)
{
if (srcImage.get_size() != *srcImageExpectedSize)
{
return false;
}
}
if (srcImage.get_width_pixels() != dstImage.get_width_pixels())
{
return false;
}
if (srcImage.get_height_pixels() != dstImage.get_height_pixels())
{
return false;
}
return true;
}
size_t m_expectedBufferSize;
ImageDimensions m_dimensions;
};
class K4AYUY2ImageConverter : public K4AColorImageConverterBase<K4A_IMAGE_FORMAT_COLOR_YUY2>
{
public:
ImageConversionResult ConvertImage(const k4a::image &srcImage, k4a::image *bgraImage) override
{
// YUY2 is a 4:2:2 format, so there are 4 bytes per 'chunk' of data, and each 'chunk' represents 2 pixels.
//
const int stride = m_dimensions.Width * 4 / 2;
const size_t expectedBufferSize = static_cast<size_t>(stride * m_dimensions.Height);
if (!ImagesAreCorrectlySized(srcImage, *bgraImage, &expectedBufferSize))
{
return ImageConversionResult::InvalidBufferSizeError;
}
static PerfCounter decode("YUY2 decode");
PerfSample decodeSample(&decode);
int result = libyuv::YUY2ToARGB(srcImage.get_buffer(), // src_yuy2,
stride, // src_stride_yuy2,
bgraImage->get_buffer(), // dst_argb,
m_dimensions.Width * static_cast<int>(sizeof(BgraPixel)), // dst_stride_argb,
m_dimensions.Width, // width,
m_dimensions.Height // height
);
decodeSample.End();
if (result != 0)
{
return ImageConversionResult::InvalidImageDataError;
}
return ImageConversionResult::Success;
}
K4AYUY2ImageConverter(k4a_color_resolution_t resolution) : K4AColorImageConverterBase(resolution) {}
};
class K4ANV12ImageConverter : public K4AColorImageConverterBase<K4A_IMAGE_FORMAT_COLOR_NV12>
{
public:
ImageConversionResult ConvertImage(const k4a::image &srcImage, k4a::image *bgraImage) override
{
const int luminanceStride = m_dimensions.Width;
const int hueSatStride = m_dimensions.Width;
const uint8_t *hueSatStart = srcImage.get_buffer() + luminanceStride * m_dimensions.Height;
// NV12 is a 4:2:0 format, so there are half as many hue/sat pixels as luminance pixels
//
const auto expectedBufferSize = static_cast<size_t>(m_dimensions.Height * (luminanceStride + hueSatStride / 2));
if (!ImagesAreCorrectlySized(srcImage, *bgraImage, &expectedBufferSize))
{
return ImageConversionResult::InvalidBufferSizeError;
}
// libyuv refers to pixel order in system-endian order but OpenGL refers to
// pixel order in big-endian order, which is why we create the OpenGL texture
// as "RGBA" but then use the "ABGR" libyuv function here.
//
static PerfCounter decode("NV12 decode");
PerfSample decodeSample(&decode);
int result = libyuv::NV12ToARGB(srcImage.get_buffer(), // src_y
luminanceStride, // src_stride_y
hueSatStart, // src_vu
hueSatStride, // src_stride_vu
bgraImage->get_buffer(), // dst_argb
m_dimensions.Width * static_cast<int>(sizeof(BgraPixel)), // dst_stride_argb
m_dimensions.Width, // width
m_dimensions.Height // height
);
if (result != 0)
{
return ImageConversionResult::InvalidImageDataError;
}
decodeSample.End();
return ImageConversionResult::Success;
}
K4ANV12ImageConverter(k4a_color_resolution_t resolution) : K4AColorImageConverterBase(resolution) {}
};
class K4ABGRA32ImageConverter : public K4AColorImageConverterBase<K4A_IMAGE_FORMAT_COLOR_BGRA32>
{
public:
ImageConversionResult ConvertImage(const k4a::image &srcImage, k4a::image *bgraImage) override
{
const size_t expectedBufferSize = static_cast<size_t>(m_dimensions.Height * m_dimensions.Width) *
sizeof(BgraPixel);
if (!ImagesAreCorrectlySized(srcImage, *bgraImage, &expectedBufferSize))
{
return ImageConversionResult::InvalidBufferSizeError;
}
std::copy(srcImage.get_buffer(), srcImage.get_buffer() + srcImage.get_size(), bgraImage->get_buffer());
return ImageConversionResult::Success;
}
K4ABGRA32ImageConverter(k4a_color_resolution_t resolution) : K4AColorImageConverterBase(resolution) {}
};
class K4AMJPGImageConverter : public K4AColorImageConverterBase<K4A_IMAGE_FORMAT_COLOR_MJPG>
{
public:
ImageConversionResult ConvertImage(const k4a::image &srcImage, k4a::image *bgraImage) override
{
// MJPG images are not of a consistent size, so we can't compute an expected size
//
if (!ImagesAreCorrectlySized(srcImage, *bgraImage, nullptr))
{
return ImageConversionResult::InvalidBufferSizeError;
}
static PerfCounter mjpgDecode("MJPG decode");
PerfSample decodeSample(&mjpgDecode);
const int decompressStatus = tjDecompress2(m_decompressor,
srcImage.get_buffer(),
static_cast<unsigned long>(srcImage.get_size()),
bgraImage->get_buffer(),
m_dimensions.Width,
0, // pitch
m_dimensions.Height,
TJPF_BGRA,
TJFLAG_FASTDCT | TJFLAG_FASTUPSAMPLE);
if (decompressStatus != 0)
{
return ImageConversionResult::InvalidImageDataError;
}
return ImageConversionResult::Success;
}
K4AMJPGImageConverter(k4a_color_resolution_t resolution) :
K4AColorImageConverterBase(resolution),
m_decompressor(tjInitDecompress())
{
}
~K4AMJPGImageConverter() override
{
(void)tjDestroy(m_decompressor);
}
private:
tjhandle m_decompressor;
};
template<>
std::unique_ptr<IK4AImageConverter<K4A_IMAGE_FORMAT_COLOR_YUY2>>
K4AColorImageConverterFactory::Create(k4a_color_resolution_t resolution)
{
return std14::make_unique<K4AYUY2ImageConverter>(resolution);
}
template<>
std::unique_ptr<IK4AImageConverter<K4A_IMAGE_FORMAT_COLOR_NV12>>
K4AColorImageConverterFactory::Create(k4a_color_resolution_t resolution)
{
return std14::make_unique<K4ANV12ImageConverter>(resolution);
}
template<>
std::unique_ptr<IK4AImageConverter<K4A_IMAGE_FORMAT_COLOR_BGRA32>>
K4AColorImageConverterFactory::Create(k4a_color_resolution_t resolution)
{
return std14::make_unique<K4ABGRA32ImageConverter>(resolution);
}
template<>
std::unique_ptr<IK4AImageConverter<K4A_IMAGE_FORMAT_COLOR_MJPG>>
K4AColorImageConverterFactory::Create(k4a_color_resolution_t resolution)
{
return std14::make_unique<K4AMJPGImageConverter>(resolution);
}

Просмотреть файл

@ -0,0 +1,48 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
#ifndef K4ACOLORIMAGECONVERTER_H
#define K4ACOLORIMAGECONVERTER_H
// System headers
//
#include <memory>
#include <vector>
// Library headers
//
#include "k4aimgui_all.h"
// Project headers
//
#include "ik4aimageconverter.h"
#include "k4aviewerutil.h"
namespace k4aviewer
{
class K4AColorImageConverterFactory
{
public:
template<k4a_image_format_t ImageFormat>
static std::unique_ptr<IK4AImageConverter<ImageFormat>> Create(k4a_color_resolution_t resolution);
};
template<>
std::unique_ptr<IK4AImageConverter<K4A_IMAGE_FORMAT_COLOR_YUY2>>
K4AColorImageConverterFactory::Create<K4A_IMAGE_FORMAT_COLOR_YUY2>(k4a_color_resolution_t resolution);
template<>
std::unique_ptr<IK4AImageConverter<K4A_IMAGE_FORMAT_COLOR_NV12>>
K4AColorImageConverterFactory::Create<K4A_IMAGE_FORMAT_COLOR_NV12>(k4a_color_resolution_t resolution);
template<>
std::unique_ptr<IK4AImageConverter<K4A_IMAGE_FORMAT_COLOR_BGRA32>>
K4AColorImageConverterFactory::Create<K4A_IMAGE_FORMAT_COLOR_BGRA32>(k4a_color_resolution_t resolution);
template<>
std::unique_ptr<IK4AImageConverter<K4A_IMAGE_FORMAT_COLOR_MJPG>>
K4AColorImageConverterFactory::Create<K4A_IMAGE_FORMAT_COLOR_MJPG>(k4a_color_resolution_t resolution);
} // namespace k4aviewer
#endif

Просмотреть файл

@ -19,7 +19,7 @@
// Project headers
//
#include "ik4aobserver.h"
#include "ik4aframevisualizer.h"
#include "ik4aimageconverter.h"
#include "k4aimageextractor.h"
#include "k4aframeratetracker.h"
#include "k4aringbuffer.h"
@ -27,10 +27,10 @@
namespace k4aviewer
{
template<k4a_image_format_t ImageFormat> class K4AConvertingFrameSourceImpl : public IK4ACaptureObserver
template<k4a_image_format_t ImageFormat> class K4AConvertingImageSourceImpl : public IK4ACaptureObserver
{
public:
inline ImageVisualizationResult GetNextFrame(K4AViewerImage *textureToUpdate, k4a::image *sourceImage)
inline ImageConversionResult GetNextImage(K4AViewerImage *textureToUpdate, k4a::image *sourceImage)
{
if (IsFailed())
{
@ -38,19 +38,19 @@ public:
}
if (!HasData())
{
return ImageVisualizationResult::NoDataError;
return ImageConversionResult::NoDataError;
}
ImageVisualizationResult result = m_frameVisualizer->UpdateTexture(*m_textureBuffers.CurrentItem(),
textureToUpdate);
*sourceImage = m_textureBuffers.CurrentItem()->SourceImage;
GLenum result = textureToUpdate->UpdateTexture(m_textureBuffers.CurrentItem()->Bgra.get_buffer());
*sourceImage = m_textureBuffers.CurrentItem()->Source;
m_textureBuffers.AdvanceRead();
return result;
return GLEnumToImageConversionResult(result);
}
inline GLenum InitializeTexture(std::shared_ptr<K4AViewerImage> *texture)
{
return m_frameVisualizer->InitializeTexture(texture);
return K4AViewerImage::Create(texture, nullptr, m_imageConverter->GetImageDimensions());
}
double GetFrameRate() const
@ -79,7 +79,7 @@ public:
m_failed = true;
}
~K4AConvertingFrameSourceImpl() override
~K4AConvertingImageSourceImpl() override
{
m_workerThreadShouldExit = true;
if (m_workerThread.joinable())
@ -88,19 +88,24 @@ public:
}
};
K4AConvertingFrameSourceImpl(std::unique_ptr<IK4AFrameVisualizer<ImageFormat>> &&frameVisualizer) :
m_frameVisualizer(std::move(frameVisualizer))
K4AConvertingImageSourceImpl(std::unique_ptr<IK4AImageConverter<ImageFormat>> &&imageConverter) :
m_imageConverter(std::move(imageConverter))
{
m_textureBuffers.Initialize(
[this](K4ATextureBuffer<ImageFormat> *buffer) { this->m_frameVisualizer->InitializeBuffer(buffer); });
ImageDimensions dimensions = m_imageConverter->GetImageDimensions();
m_textureBuffers.Initialize([dimensions](ConvertedImagePair *bufferItem) {
bufferItem->Bgra = k4a::image::create(K4A_IMAGE_FORMAT_COLOR_BGRA32,
dimensions.Width,
dimensions.Height,
dimensions.Width * static_cast<int>(sizeof(BgraPixel)));
});
m_workerThread = std::thread(&K4AConvertingFrameSourceImpl::WorkerThread, this);
m_workerThread = std::thread(&K4AConvertingImageSourceImpl::WorkerThread, this);
}
K4AConvertingFrameSourceImpl(K4AConvertingFrameSourceImpl &) = delete;
K4AConvertingFrameSourceImpl(K4AConvertingFrameSourceImpl &&) = delete;
K4AConvertingFrameSourceImpl operator=(K4AConvertingFrameSourceImpl &) = delete;
K4AConvertingFrameSourceImpl operator=(K4AConvertingFrameSourceImpl &&) = delete;
K4AConvertingImageSourceImpl(K4AConvertingImageSourceImpl &) = delete;
K4AConvertingImageSourceImpl(K4AConvertingImageSourceImpl &&) = delete;
K4AConvertingImageSourceImpl operator=(K4AConvertingImageSourceImpl &) = delete;
K4AConvertingImageSourceImpl operator=(K4AConvertingImageSourceImpl &&) = delete;
protected:
void NotifyDataImpl(const k4a::capture &data)
@ -118,7 +123,7 @@ protected:
if (!m_inputImageBuffer.BeginInsert())
{
// Worker thread is backed up. drop the frame
// Worker thread is backed up. drop the image
//
return;
}
@ -129,14 +134,14 @@ protected:
}
private:
static void WorkerThread(K4AConvertingFrameSourceImpl *fs)
static void WorkerThread(K4AConvertingImageSourceImpl *fs)
{
while (!fs->m_workerThreadShouldExit)
{
std::unique_lock<std::mutex> lock(fs->m_mutex);
if (!fs->m_inputImageBuffer.Empty())
{
// Take the image from the frame source
// Take the image from the image source
//
k4a::image imageToConvert = std::move(*fs->m_inputImageBuffer.CurrentItem());
fs->m_inputImageBuffer.AdvanceRead();
@ -144,15 +149,15 @@ private:
if (!fs->m_textureBuffers.BeginInsert())
{
// Our buffer has overflowed. Drop the frame.
// Our buffer has overflowed. Drop the image.
//
continue;
}
ImageVisualizationResult result =
fs->m_frameVisualizer->ConvertImage(imageToConvert, fs->m_textureBuffers.InsertionItem());
ImageConversionResult result =
fs->m_imageConverter->ConvertImage(imageToConvert, &fs->m_textureBuffers.InsertionItem()->Bgra);
if (result != ImageVisualizationResult::Success)
if (result != ImageConversionResult::Success)
{
// We treat visualization failures as fatal. Stop the thread.
//
@ -162,19 +167,30 @@ private:
return;
}
// Save off the source image so the viewer can show things like pixel values
//
fs->m_textureBuffers.InsertionItem()->Source = std::move(imageToConvert);
fs->m_textureBuffers.EndInsert();
fs->m_framerateTracker.NotifyFrame();
}
}
}
ImageVisualizationResult m_failureCode = ImageVisualizationResult::Success;
ImageConversionResult m_failureCode = ImageConversionResult::Success;
bool m_failed = false;
std::unique_ptr<IK4AFrameVisualizer<ImageFormat>> m_frameVisualizer;
std::unique_ptr<IK4AImageConverter<ImageFormat>> m_imageConverter;
static constexpr size_t BufferSize = 2;
K4ARingBuffer<K4ATextureBuffer<ImageFormat>, BufferSize> m_textureBuffers;
struct ConvertedImagePair
{
k4a::image Source;
k4a::image Bgra;
};
K4ARingBuffer<ConvertedImagePair, BufferSize> m_textureBuffers;
K4ARingBuffer<k4a::image, BufferSize> m_inputImageBuffer;
K4AFramerateTracker m_framerateTracker;
@ -186,11 +202,11 @@ private:
};
template<k4a_image_format_t ImageFormat>
class K4AConvertingFrameSource : public K4AConvertingFrameSourceImpl<ImageFormat>
class K4AConvertingImageSource : public K4AConvertingImageSourceImpl<ImageFormat>
{
public:
K4AConvertingFrameSource(std::unique_ptr<IK4AFrameVisualizer<ImageFormat>> &&frameVisualizer) :
K4AConvertingFrameSourceImpl<ImageFormat>(std::move(frameVisualizer))
K4AConvertingImageSource(std::unique_ptr<IK4AImageConverter<ImageFormat>> &&imageConverter) :
K4AConvertingImageSourceImpl<ImageFormat>(std::move(imageConverter))
{
}
};
@ -198,11 +214,11 @@ public:
// On depth captures, we also want to track the temperature
//
template<>
class K4AConvertingFrameSource<K4A_IMAGE_FORMAT_DEPTH16> : public K4AConvertingFrameSourceImpl<K4A_IMAGE_FORMAT_DEPTH16>
class K4AConvertingImageSource<K4A_IMAGE_FORMAT_DEPTH16> : public K4AConvertingImageSourceImpl<K4A_IMAGE_FORMAT_DEPTH16>
{
public:
K4AConvertingFrameSource(std::unique_ptr<IK4AFrameVisualizer<K4A_IMAGE_FORMAT_DEPTH16>> &&frameVisualizer) :
K4AConvertingFrameSourceImpl<K4A_IMAGE_FORMAT_DEPTH16>(std::move(frameVisualizer))
K4AConvertingImageSource(std::unique_ptr<IK4AImageConverter<K4A_IMAGE_FORMAT_DEPTH16>> &&imageConverter) :
K4AConvertingImageSourceImpl<K4A_IMAGE_FORMAT_DEPTH16>(std::move(imageConverter))
{
}

Просмотреть файл

@ -1,40 +0,0 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
#ifndef K4ADEPTHFRAMEVISUALIZER_H
#define K4ADEPTHFRAMEVISUALIZER_H
// System headers
//
// Library headers
//
// Project headers
//
#include "k4adepthsensorframebasevisualizer.h"
#include "k4adepthpixelcolorizer.h"
#include "k4astaticimageproperties.h"
namespace k4aviewer
{
class K4ADepthFrameVisualizer
: public K4ADepthSensorFrameBaseVisualizer<K4A_IMAGE_FORMAT_DEPTH16, K4ADepthPixelColorizer::ColorizeBlueToRed>
{
public:
explicit K4ADepthFrameVisualizer(k4a_depth_mode_t depthMode) :
K4ADepthSensorFrameBaseVisualizer(depthMode, GetDepthModeRange(depthMode))
{
}
~K4ADepthFrameVisualizer() override = default;
K4ADepthFrameVisualizer(const K4ADepthFrameVisualizer &) = delete;
K4ADepthFrameVisualizer(const K4ADepthFrameVisualizer &&) = delete;
K4ADepthFrameVisualizer &operator=(const K4ADepthFrameVisualizer &) = delete;
K4ADepthFrameVisualizer &operator=(const K4ADepthFrameVisualizer &&) = delete;
};
} // namespace k4aviewer
#endif

Просмотреть файл

@ -0,0 +1,40 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
#ifndef K4ADEPTHIMAGECONVERTER_H
#define K4ADEPTHIMAGECONVERTER_H
// System headers
//
// Library headers
//
// Project headers
//
#include "k4adepthimageconverterbase.h"
#include "k4adepthpixelcolorizer.h"
#include "k4astaticimageproperties.h"
namespace k4aviewer
{
class K4ADepthImageConverter
: public K4ADepthImageConverterBase<K4A_IMAGE_FORMAT_DEPTH16, K4ADepthPixelColorizer::ColorizeBlueToRed>
{
public:
explicit K4ADepthImageConverter(k4a_depth_mode_t depthMode) :
K4ADepthImageConverterBase(depthMode, GetDepthModeRange(depthMode))
{
}
~K4ADepthImageConverter() override = default;
K4ADepthImageConverter(const K4ADepthImageConverter &) = delete;
K4ADepthImageConverter(const K4ADepthImageConverter &&) = delete;
K4ADepthImageConverter &operator=(const K4ADepthImageConverter &) = delete;
K4ADepthImageConverter &operator=(const K4ADepthImageConverter &&) = delete;
};
} // namespace k4aviewer
#endif

Просмотреть файл

@ -0,0 +1,100 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
#ifndef K4ADEPTHIMAGECONVERTERBASE_H
#define K4ADEPTHIMAGECONVERTERBASE_H
// System headers
//
#include <memory>
#include <vector>
// Library headers
//
// Project headers
//
#include "ik4aimageconverter.h"
#include "k4adepthpixelcolorizer.h"
#include "k4astaticimageproperties.h"
#include "k4aviewerutil.h"
#include "perfcounter.h"
namespace k4aviewer
{
template<k4a_image_format_t ImageFormat, DepthPixelVisualizationFunction VisualizationFunction>
class K4ADepthImageConverterBase : public IK4AImageConverter<ImageFormat>
{
public:
explicit K4ADepthImageConverterBase(const k4a_depth_mode_t depthMode,
const std::pair<DepthPixel, DepthPixel> expectedValueRange) :
m_dimensions(GetDepthDimensions(depthMode)),
m_expectedValueRange(expectedValueRange),
m_expectedBufferSize(static_cast<size_t>(m_dimensions.Width * m_dimensions.Height) * sizeof(BgraPixel))
{
}
ImageConversionResult ConvertImage(const k4a::image &srcImage, k4a::image *bgraImage) override
{
const size_t srcImageSize = static_cast<size_t>(m_dimensions.Width * m_dimensions.Height) * sizeof(DepthPixel);
if (srcImage.get_size() != srcImageSize)
{
return ImageConversionResult::InvalidBufferSizeError;
}
if (bgraImage->get_width_pixels() != srcImage.get_width_pixels() ||
bgraImage->get_height_pixels() != srcImage.get_height_pixels())
{
return ImageConversionResult::InvalidBufferSizeError;
}
const uint8_t *src = srcImage.get_buffer();
static PerfCounter render(std::string("Depth sensor<T") + std::to_string(int(ImageFormat)) + "> render");
PerfSample renderSample(&render);
RenderImage(src,
static_cast<size_t>(m_dimensions.Width * m_dimensions.Height) * sizeof(DepthPixel),
bgraImage->get_buffer());
renderSample.End();
return ImageConversionResult::Success;
}
ImageDimensions GetImageDimensions() const override
{
return m_dimensions;
}
~K4ADepthImageConverterBase() override = default;
K4ADepthImageConverterBase(const K4ADepthImageConverterBase &) = delete;
K4ADepthImageConverterBase(const K4ADepthImageConverterBase &&) = delete;
K4ADepthImageConverterBase &operator=(const K4ADepthImageConverterBase &) = delete;
K4ADepthImageConverterBase &operator=(const K4ADepthImageConverterBase &&) = delete;
private:
void RenderImage(const uint8_t *src, const size_t srcSize, uint8_t *dst)
{
const uint8_t *currentSrc = src;
const uint8_t *srcEnd = src + srcSize;
while (currentSrc < srcEnd)
{
const DepthPixel pixelValue = *reinterpret_cast<const DepthPixel *>(currentSrc);
BgraPixel *outputPixel = reinterpret_cast<BgraPixel *>(dst);
*outputPixel = VisualizationFunction(pixelValue, m_expectedValueRange.first, m_expectedValueRange.second);
dst += sizeof(BgraPixel);
currentSrc += sizeof(DepthPixel);
}
}
const ImageDimensions m_dimensions;
const std::pair<DepthPixel, DepthPixel> m_expectedValueRange;
const size_t m_expectedBufferSize;
};
} // namespace k4aviewer
#endif

Просмотреть файл

@ -1,116 +0,0 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
#ifndef K4ADEPTHSENSORFRAMEBASEVISUALIZER_H
#define K4ADEPTHSENSORFRAMEBASEVISUALIZER_H
// System headers
//
#include <memory>
#include <vector>
// Library headers
//
// Project headers
//
#include "ik4aframevisualizer.h"
#include "k4adepthpixelcolorizer.h"
#include "k4astaticimageproperties.h"
#include "k4aviewerutil.h"
#include "perfcounter.h"
namespace k4aviewer
{
template<k4a_image_format_t ImageFormat, DepthPixelVisualizationFunction VisualizationFunction>
class K4ADepthSensorFrameBaseVisualizer : public IK4AFrameVisualizer<ImageFormat>
{
public:
explicit K4ADepthSensorFrameBaseVisualizer(const k4a_depth_mode_t depthMode,
const std::pair<DepthPixel, DepthPixel> expectedValueRange) :
m_dimensions(GetDepthDimensions(depthMode)),
m_expectedValueRange(expectedValueRange),
m_expectedBufferSize(static_cast<size_t>(m_dimensions.Width * m_dimensions.Height) * sizeof(BgraPixel))
{
}
GLenum InitializeTexture(std::shared_ptr<K4AViewerImage> *texture) override
{
return K4AViewerImage::Create(texture, nullptr, m_dimensions, GL_BGRA);
}
void InitializeBuffer(K4ATextureBuffer<ImageFormat> *buffer) override
{
buffer->Data.resize(m_expectedBufferSize);
}
ImageVisualizationResult ConvertImage(const k4a::image &image, K4ATextureBuffer<ImageFormat> *buffer) override
{
const size_t srcImageSize = static_cast<size_t>(m_dimensions.Width * m_dimensions.Height) * sizeof(DepthPixel);
if (image.get_size() != srcImageSize)
{
return ImageVisualizationResult::InvalidBufferSizeError;
}
// Convert the image into an OpenGL texture
//
const uint8_t *src = image.get_buffer();
static PerfCounter render(std::string("Depth sensor<T") + std::to_string(int(ImageFormat)) + "> render");
PerfSample renderSample(&render);
RenderImage(src,
static_cast<size_t>(m_dimensions.Width * m_dimensions.Height) * sizeof(DepthPixel),
&buffer->Data[0]);
renderSample.End();
buffer->SourceImage = image;
return ImageVisualizationResult::Success;
}
ImageVisualizationResult UpdateTexture(const K4ATextureBuffer<ImageFormat> &buffer,
K4AViewerImage *texture) override
{
static PerfCounter upload(std::string("Depth sensor<T") + std::to_string(int(ImageFormat)) + "> upload");
PerfSample uploadSample(&upload);
return GLEnumToImageVisualizationResult(texture->UpdateTexture(&buffer.Data[0]));
}
~K4ADepthSensorFrameBaseVisualizer() override = default;
K4ADepthSensorFrameBaseVisualizer(const K4ADepthSensorFrameBaseVisualizer &) = delete;
K4ADepthSensorFrameBaseVisualizer(const K4ADepthSensorFrameBaseVisualizer &&) = delete;
K4ADepthSensorFrameBaseVisualizer &operator=(const K4ADepthSensorFrameBaseVisualizer &) = delete;
K4ADepthSensorFrameBaseVisualizer &operator=(const K4ADepthSensorFrameBaseVisualizer &&) = delete;
protected:
const ImageDimensions &GetDimensions() const
{
return m_dimensions;
}
private:
void RenderImage(const uint8_t *src, const size_t srcSize, uint8_t *dst)
{
const uint8_t *currentSrc = src;
const uint8_t *srcEnd = src + srcSize;
while (currentSrc < srcEnd)
{
const DepthPixel pixelValue = *reinterpret_cast<const DepthPixel *>(currentSrc);
BgraPixel *outputPixel = reinterpret_cast<BgraPixel *>(dst);
*outputPixel = VisualizationFunction(pixelValue, m_expectedValueRange.first, m_expectedValueRange.second);
dst += sizeof(BgraPixel);
currentSrc += sizeof(DepthPixel);
}
}
const ImageDimensions m_dimensions;
const std::pair<DepthPixel, DepthPixel> m_expectedValueRange;
const size_t m_expectedBufferSize;
};
} // namespace k4aviewer
#endif

Просмотреть файл

@ -863,7 +863,7 @@ bool K4ADeviceDockControl::StartCameras()
bool *pPaused = &m_paused;
bool *pCamerasStarted = &m_camerasStarted;
m_cameraPollingThread = std14::make_unique<K4APollingThread<k4a::capture>>(
m_cameraPollingThread = std14::make_unique<K4APollingThread>(
[pDevice, pCameraDataSource, pPaused, pCamerasStarted]() {
return PollSensor<k4a::capture>("Cameras",
pDevice,
@ -951,20 +951,19 @@ bool K4ADeviceDockControl::StartImu()
bool *pPaused = &m_paused;
bool *pImuStarted = &m_imuStarted;
m_imuPollingThread = std14::make_unique<K4APollingThread<k4a_imu_sample_t>>(
[pDevice, pImuDataSource, pPaused, pImuStarted]() {
return PollSensor<k4a_imu_sample_t>("IMU",
pDevice,
pImuDataSource,
pPaused,
[](k4a::device *device, k4a_imu_sample_t *sample) {
return device->get_imu_sample(sample, ImuPollingTimeout);
},
[pImuStarted](k4a::device *device) {
device->stop_imu();
*pImuStarted = false;
});
});
m_imuPollingThread = std14::make_unique<K4APollingThread>([pDevice, pImuDataSource, pPaused, pImuStarted]() {
return PollSensor<k4a_imu_sample_t>("IMU",
pDevice,
pImuDataSource,
pPaused,
[](k4a::device *device, k4a_imu_sample_t *sample) {
return device->get_imu_sample(sample, ImuPollingTimeout);
},
[pImuStarted](k4a::device *device) {
device->stop_imu();
*pImuStarted = false;
});
});
return true;
}

Просмотреть файл

@ -122,8 +122,8 @@ private:
std::string m_windowTitle;
std::unique_ptr<K4APollingThread<k4a::capture>> m_cameraPollingThread;
std::unique_ptr<K4APollingThread<k4a_imu_sample_t>> m_imuPollingThread;
std::unique_ptr<K4APollingThread> m_cameraPollingThread;
std::unique_ptr<K4APollingThread> m_imuPollingThread;
};
} // namespace k4aviewer

Просмотреть файл

@ -1,38 +0,0 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
#ifndef K4AINFRAREDFRAMEVISUALIZER_H
#define K4AINFRAREDFRAMEVISUALIZER_H
// System headers
//
// Library headers
//
// Project headers
//
#include "k4adepthsensorframebasevisualizer.h"
#include "k4adepthpixelcolorizer.h"
namespace k4aviewer
{
class K4AInfraredFrameVisualizer
: public K4ADepthSensorFrameBaseVisualizer<K4A_IMAGE_FORMAT_IR16, K4ADepthPixelColorizer::ColorizeGreyscale>
{
public:
explicit K4AInfraredFrameVisualizer(k4a_depth_mode_t depthMode) :
K4ADepthSensorFrameBaseVisualizer<K4A_IMAGE_FORMAT_IR16, K4ADepthPixelColorizer::ColorizeGreyscale>(
depthMode,
GetIrLevels(depthMode)){};
~K4AInfraredFrameVisualizer() override = default;
K4AInfraredFrameVisualizer(const K4AInfraredFrameVisualizer &) = delete;
K4AInfraredFrameVisualizer(const K4AInfraredFrameVisualizer &&) = delete;
K4AInfraredFrameVisualizer &operator=(const K4AInfraredFrameVisualizer &) = delete;
K4AInfraredFrameVisualizer &operator=(const K4AInfraredFrameVisualizer &&) = delete;
};
} // namespace k4aviewer
#endif

Просмотреть файл

@ -0,0 +1,38 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
#ifndef K4AINFRAREDIMAGECONVERTER_H
#define K4AINFRAREDIMAGECONVERTER_H
// System headers
//
// Library headers
//
// Project headers
//
#include "k4adepthimageconverterbase.h"
#include "k4adepthpixelcolorizer.h"
namespace k4aviewer
{
class K4AInfraredImageConverter
: public K4ADepthImageConverterBase<K4A_IMAGE_FORMAT_IR16, K4ADepthPixelColorizer::ColorizeGreyscale>
{
public:
explicit K4AInfraredImageConverter(k4a_depth_mode_t depthMode) :
K4ADepthImageConverterBase<K4A_IMAGE_FORMAT_IR16, K4ADepthPixelColorizer::ColorizeGreyscale>(depthMode,
GetIrLevels(
depthMode)){};
~K4AInfraredImageConverter() override = default;
K4AInfraredImageConverter(const K4AInfraredImageConverter &) = delete;
K4AInfraredImageConverter(const K4AInfraredImageConverter &&) = delete;
K4AInfraredImageConverter &operator=(const K4AInfraredImageConverter &) = delete;
K4AInfraredImageConverter &operator=(const K4AInfraredImageConverter &&) = delete;
};
} // namespace k4aviewer
#endif

Просмотреть файл

@ -14,7 +14,7 @@
// Project headers
//
#include "k4acolorframevisualizer.h"
#include "k4acolorimageconverter.h"
#include "k4adepthpixelcolorizer.h"
#include "k4astaticimageproperties.h"
#include "k4aviewerutil.h"

Просмотреть файл

@ -31,13 +31,13 @@ void K4APointCloudWindow::Show(K4AWindowPlacementInfo placementInfo)
{
if (m_failed)
{
ImGui::Text("Frame source failed!");
ImGui::Text("Data source failed!");
return;
}
if (m_captureSource->IsFailed())
{
K4AViewerErrorManager::Instance().SetErrorStatus(m_title + ": frame source failed!");
K4AViewerErrorManager::Instance().SetErrorStatus(m_title + ": image source failed!");
m_failed = true;
return;
}

Просмотреть файл

@ -17,12 +17,12 @@
namespace k4aviewer
{
template<typename T> class K4APollingThread
class K4APollingThread
{
public:
K4APollingThread(std::function<bool()> &&pollFn) : m_pollFn(std::move(pollFn))
{
m_thread = std::thread(&K4APollingThread<T>::Run, this);
m_thread = std::thread(&K4APollingThread::Run, this);
}
void Stop()
@ -52,7 +52,7 @@ private:
}
std::thread m_thread;
bool m_shouldExit = false;
volatile bool m_shouldExit = false;
std::function<bool()> m_pollFn;
};

Просмотреть файл

@ -19,35 +19,41 @@
//
#include "k4aimguiextensions.h"
#include "k4atypeoperators.h"
#include "k4aviewerutil.h"
#include "k4awindowmanager.h"
using namespace k4aviewer;
K4ARecordingDockControl::K4ARecordingDockControl(std::unique_ptr<K4ARecording> &&recording) :
m_recording(std::move(recording))
namespace
{
m_filenameLabel = m_recording->GetPath().filename().c_str();
static constexpr std::chrono::microseconds InvalidSeekTime = std::chrono::microseconds(-1);
}
K4ARecordingDockControl::K4ARecordingDockControl(std::unique_ptr<K4ARecording> &&recording)
{
m_playbackThreadState.SeekTimestamp = InvalidSeekTime;
m_filenameLabel = recording->GetPath().filename().c_str();
// Recording config
//
k4a_record_configuration_t recordConfig = m_recording->GetRecordConfiguation();
m_recordConfiguration = recording->GetRecordConfiguation();
std::stringstream fpsSS;
fpsSS << recordConfig.camera_fps;
fpsSS << m_recordConfiguration.camera_fps;
m_fpsLabel = fpsSS.str();
switch (recordConfig.camera_fps)
switch (m_recordConfiguration.camera_fps)
{
case K4A_FRAMES_PER_SECOND_5:
m_timePerFrame = std::chrono::microseconds(std::micro::den / (std::micro::num * 5));
m_playbackThreadState.TimePerFrame = std::chrono::microseconds(std::micro::den / (std::micro::num * 5));
break;
case K4A_FRAMES_PER_SECOND_15:
m_timePerFrame = std::chrono::microseconds(std::micro::den / (std::micro::num * 15));
m_playbackThreadState.TimePerFrame = std::chrono::microseconds(std::micro::den / (std::micro::num * 15));
break;
case K4A_FRAMES_PER_SECOND_30:
default:
m_timePerFrame = std::chrono::microseconds(std::micro::den / (std::micro::num * 30));
m_playbackThreadState.TimePerFrame = std::chrono::microseconds(std::micro::den / (std::micro::num * 30));
break;
}
@ -55,11 +61,11 @@ K4ARecordingDockControl::K4ARecordingDockControl(std::unique_ptr<K4ARecording> &
// We don't record a depth track if the camera is started in passive IR mode
//
m_recordingHasDepth = recordConfig.depth_track_enabled;
m_recordingHasDepth = m_recordConfiguration.depth_track_enabled;
std::stringstream depthSS;
if (m_recordingHasDepth)
{
depthSS << recordConfig.depth_mode;
depthSS << m_recordConfiguration.depth_mode;
}
else
{
@ -67,13 +73,13 @@ K4ARecordingDockControl::K4ARecordingDockControl(std::unique_ptr<K4ARecording> &
}
m_depthModeLabel = depthSS.str();
m_recordingHasColor = recordConfig.color_track_enabled;
m_recordingHasColor = m_recordConfiguration.color_track_enabled;
std::stringstream colorResolutionSS;
std::stringstream colorFormatSS;
if (m_recordingHasColor)
{
colorFormatSS << recordConfig.color_format;
colorResolutionSS << recordConfig.color_resolution;
colorFormatSS << m_recordConfiguration.color_format;
colorResolutionSS << m_recordConfiguration.color_resolution;
}
else
{
@ -86,30 +92,37 @@ K4ARecordingDockControl::K4ARecordingDockControl(std::unique_ptr<K4ARecording> &
// Sync info
//
m_depthDelayOffColorUsec = recordConfig.depth_delay_off_color_usec;
m_depthDelayOffColorUsec = m_recordConfiguration.depth_delay_off_color_usec;
std::stringstream syncModeSS;
syncModeSS << recordConfig.wired_sync_mode;
syncModeSS << m_recordConfiguration.wired_sync_mode;
m_wiredSyncModeLabel = syncModeSS.str();
m_subordinateDelayOffMasterUsec = recordConfig.subordinate_delay_off_master_usec;
m_startTimestampOffsetUsec = recordConfig.start_timestamp_offset_usec;
m_subordinateDelayOffMasterUsec = m_recordConfiguration.subordinate_delay_off_master_usec;
m_startTimestampOffsetUsec = m_recordConfiguration.start_timestamp_offset_usec;
m_playbackThreadState.TimestampOffset = std::chrono::microseconds(m_startTimestampOffsetUsec);
m_recordingLengthUsec = recording->GetRecordingLength();
// Device info
//
if (K4A_BUFFER_RESULT_SUCCEEDED != m_recording->GetTag("K4A_DEVICE_SERIAL_NUMBER", &m_deviceSerialNumber))
if (K4A_BUFFER_RESULT_SUCCEEDED != recording->GetTag("K4A_DEVICE_SERIAL_NUMBER", &m_deviceSerialNumber))
{
m_deviceSerialNumber = noneStr;
}
if (K4A_BUFFER_RESULT_SUCCEEDED != m_recording->GetTag("K4A_COLOR_FIRMWARE_VERSION", &m_colorFirmwareVersion))
if (K4A_BUFFER_RESULT_SUCCEEDED != recording->GetTag("K4A_COLOR_FIRMWARE_VERSION", &m_colorFirmwareVersion))
{
m_colorFirmwareVersion = noneStr;
}
if (K4A_BUFFER_RESULT_SUCCEEDED != m_recording->GetTag("K4A_DEPTH_FIRMWARE_VERSION", &m_depthFirmwareVersion))
if (K4A_BUFFER_RESULT_SUCCEEDED != recording->GetTag("K4A_DEPTH_FIRMWARE_VERSION", &m_depthFirmwareVersion))
{
m_depthFirmwareVersion = noneStr;
}
m_playbackThreadState.Recording = std::move(recording);
PlaybackThreadState *pThreadState = &m_playbackThreadState;
m_playbackThread = std::unique_ptr<K4APollingThread>(
new K4APollingThread([pThreadState]() { return PlaybackThreadFn(pThreadState); }));
SetViewType(K4AWindowSet::ViewType::Normal);
}
@ -139,6 +152,7 @@ void K4ARecordingDockControl::Show()
ImGui::Text("Sync mode: %s", m_wiredSyncModeLabel.c_str());
ImGui::Text("Subordinate delay (us): %d", m_subordinateDelayOffMasterUsec);
ImGui::Text("Start timestamp offset: %d", m_startTimestampOffsetUsec);
ImGui::Text("Recording Length (us): %lu", m_recordingLengthUsec);
ImGui::Separator();
ImGui::Text("%s", "Device info");
@ -149,123 +163,172 @@ void K4ARecordingDockControl::Show()
if (ImGui::Button("<|"))
{
Step(true);
std::lock_guard<std::mutex> lock(m_playbackThreadState.Mutex);
m_playbackThreadState.Step = StepDirection::Backward;
m_playbackThreadState.Paused = true;
}
ImGui::SameLine();
const int64_t seekMin = 0;
const int64_t seekMax = static_cast<int64_t>(m_recording->GetRecordingLength());
if (ImGui::SliderScalar("##seek", ImGuiDataType_S64, &m_currentTimestamp, &seekMin, &seekMax, ""))
const uint64_t seekMin = 0;
const uint64_t seekMax = m_recordingLengthUsec;
uint64_t currentTimestampUs;
bool paused;
{
m_recording->SeekTimestamp(static_cast<int64_t>(m_currentTimestamp.count()));
// The seek timestamp may end up in the middle of a capture, read backwards and forwards again to get a full
// capture.
(void)m_recording->GetPreviousCapture();
Step(false);
std::lock_guard<std::mutex> lock(m_playbackThreadState.Mutex);
currentTimestampUs = static_cast<uint64_t>(m_playbackThreadState.CurrentCaptureTimestamp.count());
paused = m_playbackThreadState.Paused;
}
if (ImGui::SliderScalar("##seek", ImGuiDataType_U64, &currentTimestampUs, &seekMin, &seekMax, ""))
{
std::lock_guard<std::mutex> lock(m_playbackThreadState.Mutex);
m_playbackThreadState.SeekTimestamp = std::chrono::microseconds(currentTimestampUs);
m_playbackThreadState.Paused = true;
}
ImGui::SameLine();
if (ImGui::Button("|>"))
{
Step(false);
std::lock_guard<std::mutex> lock(m_playbackThreadState.Mutex);
m_playbackThreadState.Step = StepDirection::Forward;
m_playbackThreadState.Paused = true;
}
if (ImGui::Button("<<"))
{
m_recording->SeekTimestamp(0);
Step(false);
std::lock_guard<std::mutex> lock(m_playbackThreadState.Mutex);
m_playbackThreadState.SeekTimestamp = std::chrono::microseconds(0);
m_playbackThreadState.Step = StepDirection::Forward;
m_playbackThreadState.Paused = true;
}
ImGui::SameLine();
if (ImGui::Button(m_paused ? ">" : "||"))
if (ImGui::Button(paused ? ">" : "||"))
{
m_paused = !m_paused;
std::lock_guard<std::mutex> lock(m_playbackThreadState.Mutex);
m_playbackThreadState.Paused = !m_playbackThreadState.Paused;
}
ImGui::SameLine();
if (ImGui::Button(">>"))
{
m_recording->SeekTimestamp(static_cast<int64_t>(m_recording->GetRecordingLength() + 1));
Step(true);
std::lock_guard<std::mutex> lock(m_playbackThreadState.Mutex);
m_playbackThreadState.SeekTimestamp = std::chrono::microseconds(m_recordingLengthUsec + 1);
m_playbackThreadState.Step = StepDirection::Forward;
m_playbackThreadState.Paused = true;
}
K4AWindowSet::ShowModeSelector(&m_viewType, true, m_recordingHasDepth, [this](K4AWindowSet::ViewType t) {
return this->SetViewType(t);
});
ReadNext();
}
void K4ARecordingDockControl::ReadNext()
bool K4ARecordingDockControl::PlaybackThreadFn(PlaybackThreadState *state)
{
if (m_paused)
std::chrono::high_resolution_clock::time_point startTime = std::chrono::high_resolution_clock::now();
std::unique_lock<std::mutex> lock(state->Mutex);
k4a::capture backseekCapture;
if (state->SeekTimestamp != InvalidSeekTime)
{
return;
state->Recording->SeekTimestamp(state->SeekTimestamp.count());
// The seek timestamp may end up in the middle of a capture, read backwards and forwards
// again to get a full capture.
//
// If the read-forward fails after this, it means we seeked to the end of the file, and
// this capture is the last capture in the file, so we actually do want to use this
// capture, so we need to keep it until we determine if that happened.
//
backseekCapture = state->Recording->GetPreviousCapture();
state->SeekTimestamp = InvalidSeekTime;
// Force-read the next frame
//
state->Step = StepDirection::Forward;
}
// Only show the next frame if enough time has elapsed since we showed the last one
//
auto now = std::chrono::high_resolution_clock::now();
std::chrono::high_resolution_clock::duration timeSinceLastFrame = now - m_lastFrameShownTime;
if (!m_currentCapture || timeSinceLastFrame >= m_timePerFrame)
bool backward = false;
if (state->Step != StepDirection::None)
{
k4a::capture nextCapture(m_recording->GetNextCapture());
if (nextCapture == nullptr)
backward = state->Step == StepDirection::Backward;
state->Step = StepDirection::None;
// We don't want to restart from the beginning after stepping
// under most circumstances. If the user stepped to the last
// capture, we'll pick up on that when we try to read it and
// re-set this flag.
//
state->RecordingAtEnd = false;
}
else if (state->Paused)
{
return true;
}
else if (!state->Paused && state->RecordingAtEnd)
{
// Someone hit 'play' after the recording ended, so we need
// to restart from the beginning
//
state->Recording->SeekTimestamp(0);
state->RecordingAtEnd = false;
}
k4a::capture nextCapture = backward ? state->Recording->GetPreviousCapture() : state->Recording->GetNextCapture();
if (nextCapture == nullptr)
{
// We're at the end of the file.
//
state->RecordingAtEnd = true;
state->Paused = true;
if (backseekCapture != nullptr)
{
// Recording ended
// We reached EOF as a result of an explicit seek operation, and
// the backseek capture is the last capture in the file.
//
m_paused = true;
m_recording->SeekTimestamp(0);
return;
nextCapture = std::move(backseekCapture);
}
else
{
SetCurrentCapture(std::move(nextCapture));
// Recording ended
//
return true;
}
}
}
void K4ARecordingDockControl::Step(bool backward)
{
m_paused = true;
k4a::capture capture = backward ? m_recording->GetPreviousCapture() : m_recording->GetNextCapture();
if (capture)
{
SetCurrentCapture(std::move(capture));
}
else
{
// End of recording, keep displaying the last capture.
//
capture = backward ? m_recording->GetNextCapture() : m_recording->GetPreviousCapture();
SetCurrentCapture(std::move(capture));
}
}
void K4ARecordingDockControl::SetCurrentCapture(k4a::capture &&capture)
{
m_currentCapture = std::move(capture);
m_lastFrameShownTime = std::chrono::high_resolution_clock::now();
m_currentTimestamp = GetCaptureTimestamp(m_currentCapture);
state->CurrentCaptureTimestamp = GetCaptureTimestamp(nextCapture);
// Update the images' timestamps using the timing data embedded in the recording
// so we show comparable timestamps whenplaying back synchronized recordings
//
std::chrono::microseconds offset(m_startTimestampOffsetUsec);
k4a::image images[] = { m_currentCapture.get_color_image(),
m_currentCapture.get_depth_image(),
m_currentCapture.get_ir_image() };
k4a::image images[] = { nextCapture.get_color_image(), nextCapture.get_depth_image(), nextCapture.get_ir_image() };
for (k4a::image &image : images)
{
if (image)
{
image.set_timestamp(image.get_timestamp() + offset);
image.set_timestamp(image.get_timestamp() + state->TimestampOffset);
}
}
m_cameraDataSource.NotifyObservers(m_currentCapture);
state->DataSource.NotifyObservers(nextCapture);
lock.unlock();
// Account for the time we spent getting captures and such when figuring out how long to wait
// before starting the next frame
//
std::chrono::high_resolution_clock::time_point endTime = std::chrono::high_resolution_clock::now();
std::chrono::high_resolution_clock::duration processingTime = endTime - startTime;
auto processingTimeUs = std::chrono::duration_cast<std::chrono::microseconds>(processingTime);
std::chrono::microseconds sleepTime = state->TimePerFrame - processingTimeUs;
if (sleepTime > std::chrono::microseconds(0))
{
std::this_thread::sleep_for(sleepTime);
}
return true;
}
std::chrono::microseconds K4ARecordingDockControl::GetCaptureTimestamp(const k4a::capture &capture)
@ -303,36 +366,35 @@ std::chrono::microseconds K4ARecordingDockControl::GetCaptureTimestamp(const k4a
void K4ARecordingDockControl::SetViewType(K4AWindowSet::ViewType viewType)
{
K4AWindowManager::Instance().ClearWindows();
k4a_record_configuration_t recordConfig = m_recording->GetRecordConfiguation();
std::lock_guard<std::mutex> lock(m_playbackThreadState.Mutex);
switch (viewType)
{
case K4AWindowSet::ViewType::Normal:
K4AWindowSet::StartNormalWindows(m_filenameLabel.c_str(),
&m_cameraDataSource,
&m_playbackThreadState.DataSource,
nullptr, // IMU playback not supported yet
nullptr, // Audio source - sound is not supported in recordings
m_recordingHasDepth,
recordConfig.depth_mode,
m_recordConfiguration.depth_mode,
m_recordingHasColor,
recordConfig.color_format,
recordConfig.color_resolution);
m_recordConfiguration.color_format,
m_recordConfiguration.color_resolution);
break;
case K4AWindowSet::ViewType::PointCloudViewer:
k4a::calibration calibration;
const k4a_result_t result = m_recording->GetCalibration(&calibration);
const k4a_result_t result = m_playbackThreadState.Recording->GetCalibration(&calibration);
if (result != K4A_RESULT_SUCCEEDED)
{
return;
}
bool colorPointCloudAvailable = m_recording->GetRecordConfiguation().color_track_enabled &&
m_recording->GetRecordConfiguation().color_format ==
K4A_IMAGE_FORMAT_COLOR_BGRA32;
bool colorPointCloudAvailable = m_recordConfiguration.color_track_enabled &&
m_recordConfiguration.color_format == K4A_IMAGE_FORMAT_COLOR_BGRA32;
K4AWindowSet::StartPointCloudWindow(m_filenameLabel.c_str(),
std::move(calibration),
&m_cameraDataSource,
&m_playbackThreadState.DataSource,
colorPointCloudAvailable);
break;
}

Просмотреть файл

@ -16,6 +16,7 @@
//
#include "ik4adockcontrol.h"
#include "k4adatasource.h"
#include "k4apollingthread.h"
#include "k4arecording.h"
#include "k4awindowset.h"
@ -30,16 +31,45 @@ public:
void Show() override;
private:
std::chrono::microseconds GetCaptureTimestamp(const k4a::capture &capture);
enum class StepDirection
{
None,
Forward,
Backward
};
struct PlaybackThreadState
{
std::mutex Mutex;
// UI control signals
//
bool Paused = false;
bool RecordingAtEnd = false;
StepDirection Step = StepDirection::None;
std::chrono::microseconds SeekTimestamp;
std::chrono::microseconds CurrentCaptureTimestamp;
// Constant state (expected to be set once, accessible without synchronization)
//
std::chrono::microseconds TimePerFrame;
std::chrono::microseconds TimestampOffset = std::chrono::microseconds(0);
// Recording state
//
std::shared_ptr<K4ARecording> Recording;
K4ADataSource<k4a::capture> DataSource;
} m_playbackThreadState;
static bool PlaybackThreadFn(PlaybackThreadState *state);
static std::chrono::microseconds GetCaptureTimestamp(const k4a::capture &capture);
void SetViewType(K4AWindowSet::ViewType viewType);
void ReadNext();
void Step(bool backward);
void SetCurrentCapture(k4a::capture &&capture);
std::unique_ptr<K4ARecordingDockControl> m_dockControl;
std::shared_ptr<K4ARecording> m_recording;
// Labels / static UI state
//
k4a_record_configuration_t m_recordConfiguration;
std::string m_filenameLabel;
std::string m_fpsLabel;
@ -51,6 +81,7 @@ private:
std::string m_wiredSyncModeLabel;
uint32_t m_subordinateDelayOffMasterUsec;
uint32_t m_startTimestampOffsetUsec;
uint64_t m_recordingLengthUsec;
std::string m_deviceSerialNumber;
std::string m_colorFirmwareVersion;
@ -59,16 +90,9 @@ private:
bool m_recordingHasColor = false;
bool m_recordingHasDepth = false;
std::chrono::microseconds m_currentTimestamp = std::chrono::microseconds::zero();
K4ADataSource<k4a::capture> m_cameraDataSource;
std::chrono::high_resolution_clock::time_point m_lastFrameShownTime;
std::chrono::microseconds m_timePerFrame;
k4a::capture m_currentCapture;
bool m_paused = false;
K4AWindowSet::ViewType m_viewType = K4AWindowSet::ViewType::Normal;
std::unique_ptr<K4APollingThread> m_playbackThread;
};
} // namespace k4aviewer

Просмотреть файл

@ -24,24 +24,26 @@ using namespace k4aviewer;
// Template specialization for RenderInfoPane. Lets us show pixel value for the depth viewer.
//
template<>
void K4AVideoWindow<K4A_IMAGE_FORMAT_DEPTH16>::RenderInfoPane(const k4a::image &frame, const ImVec2 hoveredPixel)
void K4AVideoWindow<K4A_IMAGE_FORMAT_DEPTH16>::RenderInfoPane(const k4a::image &image, const ImVec2 hoveredPixel)
{
RenderBasicInfoPane(frame);
RenderHoveredDepthPixelValue(frame, hoveredPixel, "mm");
RenderBasicInfoPane(image);
RenderHoveredDepthPixelValue(image, hoveredPixel, "mm");
const float sensorTemp = m_imageSource->GetLastSensorTemperature();
// In recordings, there is no sensor temperature, so it's set to NaN.
//
if (!std::isnan(m_frameSource->GetLastSensorTemperature()))
if (!std::isnan(sensorTemp))
{
ImGui::Text("Sensor temperature: %.2f C", double(m_frameSource->GetLastSensorTemperature()));
ImGui::Text("Sensor temperature: %.2f C", double(sensorTemp));
}
}
template<>
void K4AVideoWindow<K4A_IMAGE_FORMAT_IR16>::RenderInfoPane(const k4a::image &frame, const ImVec2 hoveredPixel)
void K4AVideoWindow<K4A_IMAGE_FORMAT_IR16>::RenderInfoPane(const k4a::image &image, const ImVec2 hoveredPixel)
{
RenderBasicInfoPane(frame);
RenderHoveredDepthPixelValue(frame, hoveredPixel, "");
RenderBasicInfoPane(image);
RenderHoveredDepthPixelValue(image, hoveredPixel, "");
}
#ifdef __clang__

Просмотреть файл

@ -16,7 +16,7 @@
// Project headers
//
#include "ik4avisualizationwindow.h"
#include "k4aconvertingframesource.h"
#include "k4aconvertingimagesource.h"
#include "k4aviewererrormanager.h"
#include "k4aviewersettingsmanager.h"
#include "k4awindowsizehelpers.h"
@ -27,13 +27,13 @@ namespace k4aviewer
template<k4a_image_format_t ImageFormat> class K4AVideoWindow : public IK4AVisualizationWindow
{
public:
K4AVideoWindow(std::string &&title, std::shared_ptr<K4AConvertingFrameSource<ImageFormat>> frameSource) :
m_frameSource(std::move(frameSource)),
K4AVideoWindow(std::string &&title, std::shared_ptr<K4AConvertingImageSource<ImageFormat>> imageSource) :
m_imageSource(std::move(imageSource)),
m_title(std::move(title))
{
const GLenum initResult = m_frameSource->InitializeTexture(&m_currentTexture);
const ImageVisualizationResult ivr = GLEnumToImageVisualizationResult(initResult);
if (ivr != ImageVisualizationResult::Success)
const GLenum initResult = m_imageSource->InitializeTexture(&m_currentTexture);
const ImageConversionResult ivr = GLEnumToImageConversionResult(initResult);
if (ivr != ImageConversionResult::Success)
{
SetFailed(ivr);
}
@ -49,39 +49,21 @@ public:
return;
}
RenderVideoFrame(placementInfo.Size);
}
const char *GetTitle() const override
{
return m_title.c_str();
}
K4AVideoWindow(const K4AVideoWindow &) = delete;
K4AVideoWindow &operator=(const K4AVideoWindow &) = delete;
K4AVideoWindow(const K4AVideoWindow &&) = delete;
K4AVideoWindow &operator=(const K4AVideoWindow &&) = delete;
private:
const ImVec2 InvalidHoveredPixel = ImVec2(-1, -1);
void RenderVideoFrame(ImVec2 maxSize)
{
if (m_failed)
{
return;
}
const ImageVisualizationResult nextFrameResult = m_frameSource->GetNextFrame(m_currentTexture.get(),
&m_currentImage);
if (nextFrameResult == ImageVisualizationResult::NoDataError)
const ImageConversionResult nextImageResult = m_imageSource->GetNextImage(m_currentTexture.get(),
&m_currentImage);
if (nextImageResult == ImageConversionResult::NoDataError)
{
// We don't have data from the camera yet; show the window with the default black texture.
// Continue rendering the window.
}
else if (nextFrameResult != ImageVisualizationResult::Success)
else if (nextImageResult != ImageConversionResult::Success)
{
SetFailed(nextFrameResult);
SetFailed(nextImageResult);
return;
}
@ -97,7 +79,7 @@ private:
// Compute how big we can make the image
//
const ImVec2 displayDimensions = GetMaxImageSize(sourceImageDimensions, maxSize);
const ImVec2 displayDimensions = GetMaxImageSize(sourceImageDimensions, placementInfo.Size);
ImGui::Image(static_cast<ImTextureID>(*m_currentTexture), displayDimensions);
@ -145,6 +127,19 @@ private:
}
}
const char *GetTitle() const override
{
return m_title.c_str();
}
K4AVideoWindow(const K4AVideoWindow &) = delete;
K4AVideoWindow &operator=(const K4AVideoWindow &) = delete;
K4AVideoWindow(const K4AVideoWindow &&) = delete;
K4AVideoWindow &operator=(const K4AVideoWindow &&) = delete;
private:
const ImVec2 InvalidHoveredPixel = ImVec2(-1, -1);
void RenderInfoPane(const k4a::image &image, ImVec2 hoveredPixel)
{
(void)hoveredPixel;
@ -155,7 +150,7 @@ private:
{
if (K4AViewerSettingsManager::Instance().GetShowFrameRateInfo())
{
ImGui::Text("Average frame rate: %.2f fps", m_frameSource->GetFrameRate());
ImGui::Text("Average frame rate: %.2f fps", m_imageSource->GetFrameRate());
}
ImGui::Text("Timestamp: %llu", static_cast<long long int>(image.get_timestamp().count()));
@ -183,21 +178,21 @@ private:
// Sets the window failed with an error message derived from errorCode
//
void SetFailed(const ImageVisualizationResult errorCode)
void SetFailed(const ImageConversionResult errorCode)
{
std::stringstream errorBuilder;
errorBuilder << m_title << ": ";
switch (errorCode)
{
case ImageVisualizationResult::InvalidBufferSizeError:
case ImageConversionResult::InvalidBufferSizeError:
errorBuilder << "received an unexpected amount of data!";
break;
case ImageVisualizationResult::InvalidImageDataError:
case ImageConversionResult::InvalidImageDataError:
errorBuilder << "received malformed image data!";
break;
case ImageVisualizationResult::OpenGLError:
case ImageConversionResult::OpenGLError:
errorBuilder << "failed to upload image to OpenGL!";
break;
@ -211,7 +206,7 @@ private:
m_failed = true;
}
std::shared_ptr<K4AConvertingFrameSource<ImageFormat>> m_frameSource;
std::shared_ptr<K4AConvertingImageSource<ImageFormat>> m_imageSource;
std::string m_title;
bool m_failed = false;

Просмотреть файл

@ -39,7 +39,7 @@ public:
static GLenum Create(std::shared_ptr<K4AViewerImage> *out,
const uint8_t *data,
ImageDimensions dimensions,
GLenum format,
GLenum format = GL_BGRA,
GLenum internalFormat = GL_RGBA8);
explicit operator ImTextureID() const;

Просмотреть файл

@ -14,12 +14,12 @@
// Project headers
//
#include "k4aaudiowindow.h"
#include "k4acolorframevisualizer.h"
#include "k4adepthframevisualizer.h"
#include "k4acolorimageconverter.h"
#include "k4adepthimageconverter.h"
#include "k4aimguiextensions.h"
#include "k4aimusamplesource.h"
#include "k4aimuwindow.h"
#include "k4ainfraredframevisualizer.h"
#include "k4ainfraredimageconverter.h"
#include "k4apointcloudwindow.h"
#include "k4awindowmanager.h"
@ -32,14 +32,14 @@ template<k4a_image_format_t ImageFormat>
void CreateVideoWindow(const char *sourceIdentifier,
const char *windowTitle,
K4ADataSource<k4a::capture> &cameraDataSource,
std::shared_ptr<K4AConvertingFrameSource<ImageFormat>> &&frameSource)
std::shared_ptr<K4AConvertingImageSource<ImageFormat>> &&imageSource)
{
std::string title = std::string(sourceIdentifier) + ": " + windowTitle;
cameraDataSource.RegisterObserver(frameSource);
cameraDataSource.RegisterObserver(imageSource);
std::unique_ptr<IK4AVisualizationWindow> window(
std14::make_unique<K4AVideoWindow<ImageFormat>>(std::move(title), frameSource));
std14::make_unique<K4AVideoWindow<ImageFormat>>(std::move(title), imageSource));
K4AWindowManager::Instance().AddWindow(std::move(window));
}
@ -86,8 +86,8 @@ void K4AWindowSet::StartNormalWindows(const char *sourceIdentifier,
CreateVideoWindow(sourceIdentifier,
"Infrared Camera",
*cameraDataSource,
std::make_shared<K4AConvertingFrameSource<K4A_IMAGE_FORMAT_IR16>>(
std14::make_unique<K4AInfraredFrameVisualizer>(depthMode)));
std::make_shared<K4AConvertingImageSource<K4A_IMAGE_FORMAT_IR16>>(
std14::make_unique<K4AInfraredImageConverter>(depthMode)));
// K4A_DEPTH_MODE_PASSIVE_IR doesn't support actual depth
//
@ -96,8 +96,8 @@ void K4AWindowSet::StartNormalWindows(const char *sourceIdentifier,
CreateVideoWindow(sourceIdentifier,
"Depth Camera",
*cameraDataSource,
std::make_shared<K4AConvertingFrameSource<K4A_IMAGE_FORMAT_DEPTH16>>(
std14::make_unique<K4ADepthFrameVisualizer>(depthMode)));
std::make_shared<K4AConvertingImageSource<K4A_IMAGE_FORMAT_DEPTH16>>(
std14::make_unique<K4ADepthImageConverter>(depthMode)));
}
}
@ -112,8 +112,8 @@ void K4AWindowSet::StartNormalWindows(const char *sourceIdentifier,
sourceIdentifier,
colorWindowTitle,
*cameraDataSource,
std::make_shared<K4AConvertingFrameSource<K4A_IMAGE_FORMAT_COLOR_YUY2>>(
K4AColorFrameVisualizerFactory::Create<K4A_IMAGE_FORMAT_COLOR_YUY2>(colorResolution)));
std::make_shared<K4AConvertingImageSource<K4A_IMAGE_FORMAT_COLOR_YUY2>>(
K4AColorImageConverterFactory::Create<K4A_IMAGE_FORMAT_COLOR_YUY2>(colorResolution)));
break;
case K4A_IMAGE_FORMAT_COLOR_MJPG:
@ -121,8 +121,8 @@ void K4AWindowSet::StartNormalWindows(const char *sourceIdentifier,
sourceIdentifier,
colorWindowTitle,
*cameraDataSource,
std::make_shared<K4AConvertingFrameSource<K4A_IMAGE_FORMAT_COLOR_MJPG>>(
K4AColorFrameVisualizerFactory::Create<K4A_IMAGE_FORMAT_COLOR_MJPG>(colorResolution)));
std::make_shared<K4AConvertingImageSource<K4A_IMAGE_FORMAT_COLOR_MJPG>>(
K4AColorImageConverterFactory::Create<K4A_IMAGE_FORMAT_COLOR_MJPG>(colorResolution)));
break;
case K4A_IMAGE_FORMAT_COLOR_BGRA32:
@ -130,8 +130,8 @@ void K4AWindowSet::StartNormalWindows(const char *sourceIdentifier,
sourceIdentifier,
colorWindowTitle,
*cameraDataSource,
std::make_shared<K4AConvertingFrameSource<K4A_IMAGE_FORMAT_COLOR_BGRA32>>(
K4AColorFrameVisualizerFactory::Create<K4A_IMAGE_FORMAT_COLOR_BGRA32>(colorResolution)));
std::make_shared<K4AConvertingImageSource<K4A_IMAGE_FORMAT_COLOR_BGRA32>>(
K4AColorImageConverterFactory::Create<K4A_IMAGE_FORMAT_COLOR_BGRA32>(colorResolution)));
break;
case K4A_IMAGE_FORMAT_COLOR_NV12:
@ -139,8 +139,8 @@ void K4AWindowSet::StartNormalWindows(const char *sourceIdentifier,
sourceIdentifier,
colorWindowTitle,
*cameraDataSource,
std::make_shared<K4AConvertingFrameSource<K4A_IMAGE_FORMAT_COLOR_NV12>>(
K4AColorFrameVisualizerFactory::Create<K4A_IMAGE_FORMAT_COLOR_NV12>(colorResolution)));
std::make_shared<K4AConvertingImageSource<K4A_IMAGE_FORMAT_COLOR_NV12>>(
K4AColorImageConverterFactory::Create<K4A_IMAGE_FORMAT_COLOR_NV12>(colorResolution)));
break;
default: