зеркало из https://github.com/microsoft/AirSim.git
refactor image capture APIs
This commit is contained in:
Родитель
547dff43c8
Коммит
87184da778
|
@ -25,10 +25,10 @@ public: //types
|
|||
};
|
||||
|
||||
struct ImageRequest {
|
||||
uint8_t camera_id;
|
||||
ImageCaptureBase::ImageType image_type;
|
||||
bool pixels_as_float;
|
||||
bool compress;
|
||||
uint8_t camera_id = 0;
|
||||
ImageCaptureBase::ImageType image_type = ImageCaptureBase::ImageType::Scene;
|
||||
bool pixels_as_float = false;
|
||||
bool compress = true;
|
||||
|
||||
ImageRequest()
|
||||
{}
|
||||
|
@ -57,7 +57,7 @@ public: //types
|
|||
};
|
||||
|
||||
public: //methods
|
||||
virtual ImageResponse getImage(ImageType image_type, bool pixels_as_float, bool compress) = 0;
|
||||
virtual void getImages(const std::vector<ImageRequest>& requests, std::vector<ImageResponse>& responses) = 0;
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ public:
|
|||
virtual void stopApiServer() = 0;
|
||||
virtual bool isApiServerStarted() = 0;
|
||||
virtual VehicleControllerBase* getController() = 0;
|
||||
virtual ImageCaptureBase* getCamera(unsigned int index) = 0;
|
||||
virtual ImageCaptureBase* getImageCapture() = 0;
|
||||
virtual void setPose(const Pose& pose, bool ignore_collision) = 0;
|
||||
virtual Pose getPose() = 0;
|
||||
virtual bool setSegmentationObjectID(const std::string& mesh_name, int object_id,
|
||||
|
|
|
@ -251,17 +251,12 @@ public:
|
|||
controller_->enableApiControl(is_enabled);
|
||||
}
|
||||
|
||||
virtual vector<ImageCaptureBase::ImageResponse> simGetImages(const vector<ImageCaptureBase::ImageRequest>& request) override
|
||||
virtual vector<ImageCaptureBase::ImageResponse> simGetImages(const vector<ImageCaptureBase::ImageRequest>& requests) override
|
||||
{
|
||||
vector<ImageCaptureBase::ImageResponse> response;
|
||||
|
||||
for (const auto& item : request) {
|
||||
ImageCaptureBase* camera = vehicle_->getCamera(item.camera_id);
|
||||
const auto& item_response = camera->getImage(item.image_type, item.pixels_as_float, item.compress);
|
||||
response.push_back(item_response);
|
||||
}
|
||||
|
||||
return response;
|
||||
vector<ImageCaptureBase::ImageResponse> responses;
|
||||
ImageCaptureBase* image_capture = vehicle_->getImageCapture();
|
||||
image_capture->getImages(requests, responses);
|
||||
return responses;
|
||||
}
|
||||
virtual vector<uint8_t> simGetImage(uint8_t camera_id, ImageCaptureBase::ImageType image_type) override
|
||||
{
|
||||
|
|
|
@ -44,10 +44,10 @@ public:
|
|||
return controller_;
|
||||
}
|
||||
|
||||
virtual ImageCaptureBase* getCamera(unsigned int index) override
|
||||
virtual ImageCaptureBase* getImageCapture() override
|
||||
{
|
||||
//TODO: we need to support this but with only scene image type
|
||||
throw std::logic_error("getCamera() call is only supported for simulation");
|
||||
throw std::logic_error("getImageCapture() call is only supported for simulation");
|
||||
}
|
||||
|
||||
virtual void setPose(const Pose& pose, bool ignore_collision) override
|
||||
|
|
|
@ -10,17 +10,14 @@ CarPawnApi::CarPawnApi(VehiclePawnWrapper* pawn, UWheeledVehicleMovementComponen
|
|||
}
|
||||
|
||||
std::vector<ImageCaptureBase::ImageResponse> CarPawnApi::simGetImages(
|
||||
const std::vector<ImageCaptureBase::ImageRequest>& request)
|
||||
const std::vector<ImageCaptureBase::ImageRequest>& requests)
|
||||
{
|
||||
std::vector<ImageCaptureBase::ImageResponse> response;
|
||||
std::vector<ImageCaptureBase::ImageResponse> responses;
|
||||
|
||||
for (const auto& item : request) {
|
||||
ImageCaptureBase* camera = pawn_->getCameraConnector(item.camera_id);
|
||||
const auto& item_response = camera->getImage(item.image_type, item.pixels_as_float, item.compress);
|
||||
response.push_back(item_response);
|
||||
}
|
||||
ImageCaptureBase* camera = pawn_->getImageCapture();
|
||||
camera->getImages(requests, responses);
|
||||
|
||||
return response;
|
||||
return responses;
|
||||
}
|
||||
|
||||
bool CarPawnApi::simSetSegmentationObjectID(const std::string& mesh_name, int object_id,
|
||||
|
|
|
@ -13,8 +13,7 @@ public:
|
|||
|
||||
CarPawnApi(VehiclePawnWrapper* pawn, UWheeledVehicleMovementComponent* movement);
|
||||
|
||||
virtual std::vector<ImageCaptureBase::ImageResponse> simGetImages(
|
||||
const std::vector<ImageCaptureBase::ImageRequest>& request) override;
|
||||
|
||||
|
||||
virtual bool simSetSegmentationObjectID(const std::string& mesh_name, int object_id,
|
||||
bool is_name_regex = false) override;
|
||||
|
@ -26,6 +25,8 @@ public:
|
|||
virtual msr::airlib::CollisionInfo getCollisionInfo() override;
|
||||
|
||||
virtual std::vector<uint8_t> simGetImage(uint8_t camera_id, ImageCaptureBase::ImageType image_type) override;
|
||||
virtual std::vector<ImageCaptureBase::ImageResponse> simGetImages(
|
||||
const std::vector<ImageCaptureBase::ImageRequest>& requests) override;
|
||||
|
||||
virtual void setCarControls(const CarApiBase::CarControls& controls) override;
|
||||
|
||||
|
|
|
@ -63,9 +63,9 @@ MultiRotorConnector::MultiRotorConnector(VehiclePawnWrapper* vehicle_pawn_wrappe
|
|||
}
|
||||
}
|
||||
|
||||
msr::airlib::ImageCaptureBase* MultiRotorConnector::getCamera(unsigned int index)
|
||||
msr::airlib::ImageCaptureBase* MultiRotorConnector::getImageCapture()
|
||||
{
|
||||
return vehicle_pawn_wrapper_->getCameraConnector(index);
|
||||
return vehicle_pawn_wrapper_->getImageCapture();
|
||||
}
|
||||
|
||||
MultiRotorConnector::~MultiRotorConnector()
|
||||
|
|
|
@ -60,7 +60,7 @@ public:
|
|||
bool is_name_regex = false) override;
|
||||
virtual int getSegmentationObjectID(const std::string& mesh_name) override;
|
||||
|
||||
virtual msr::airlib::ImageCaptureBase* getCamera(unsigned int index = 0) override;
|
||||
virtual msr::airlib::ImageCaptureBase* getImageCapture() override;
|
||||
|
||||
virtual void printLogMessage(const std::string& message, std::string message_param = "", unsigned char severity = 0) override;
|
||||
|
||||
|
|
|
@ -11,20 +11,20 @@ std::unique_ptr<FRecordingThread> FRecordingThread::instance_;
|
|||
|
||||
|
||||
FRecordingThread::FRecordingThread()
|
||||
: stop_task_counter_(0), camera_(nullptr), recording_file_(nullptr), kinematics_(nullptr), is_ready_(false)
|
||||
: stop_task_counter_(0), recording_file_(nullptr), kinematics_(nullptr), is_ready_(false)
|
||||
{
|
||||
thread_.reset(FRunnableThread::Create(this, TEXT("FRecordingThread"), 0, TPri_BelowNormal)); // Windows default, possible to specify more priority
|
||||
}
|
||||
|
||||
|
||||
void FRecordingThread::startRecording(msr::airlib::ImageCaptureBase* camera, const msr::airlib::Kinematics::State* kinematics, const RecordingSettings& settings, std::vector <std::string> columns, VehiclePawnWrapper* wrapper)
|
||||
void FRecordingThread::startRecording(msr::airlib::ImageCaptureBase* image_capture, const msr::airlib::Kinematics::State* kinematics, const RecordingSettings& settings, std::vector <std::string> columns, VehiclePawnWrapper* wrapper)
|
||||
{
|
||||
stopRecording();
|
||||
|
||||
//TODO: check FPlatformProcess::SupportsMultithreading()?
|
||||
|
||||
instance_.reset(new FRecordingThread());
|
||||
instance_->camera_ = camera;
|
||||
instance_->image_capture_ = image_capture;
|
||||
instance_->kinematics_ = kinematics;
|
||||
instance_->settings_ = settings;
|
||||
instance_->wrapper_ = wrapper;
|
||||
|
@ -61,7 +61,7 @@ void FRecordingThread::stopRecording()
|
|||
|
||||
bool FRecordingThread::Init()
|
||||
{
|
||||
if (camera_ && recording_file_)
|
||||
if (image_capture_ && recording_file_)
|
||||
{
|
||||
UAirBlueprintLib::LogMessage(TEXT("Initiated recording thread"), TEXT(""), LogDebugLevel::Success);
|
||||
}
|
||||
|
@ -83,9 +83,13 @@ uint32 FRecordingThread::Run()
|
|||
|
||||
// todo: should we go as fast as possible, or should we limit this to a particular number of
|
||||
// frames per second?
|
||||
auto response = camera_->getImage(msr::airlib::ImageCaptureBase::ImageType::Scene, false, true);
|
||||
std::vector<msr::airlib::ImageCaptureBase::ImageRequest> requests;
|
||||
std::vector<msr::airlib::ImageCaptureBase::ImageResponse> responses;
|
||||
|
||||
requests.push_back(msr::airlib::ImageCaptureBase::ImageRequest(0, msr::airlib::ImageCaptureBase::ImageType::Scene, false, true));
|
||||
image_capture_->getImages(requests, responses);
|
||||
TArray<uint8_t> image_data;
|
||||
image_data.Append(response.image_data_uint8.data(), response.image_data_uint8.size());
|
||||
image_data.Append(responses[0].image_data_uint8.data(), responses[0].image_data_uint8.size());
|
||||
recording_file_->appendRecord(image_data, wrapper_);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
#include "CoreMinimal.h"
|
||||
#include "AirBlueprintLib.h"
|
||||
#include "HAL/Runnable.h"
|
||||
#include "VehicleCameraConnector.h"
|
||||
#include "UnrealImageCapture.h"
|
||||
#include "VehiclePawnWrapper.h"
|
||||
#include "Recording/RecordingFile.h"
|
||||
#include "physics/Kinematics.hpp"
|
||||
|
@ -38,7 +38,7 @@ private:
|
|||
std::unique_ptr<FRunnableThread> thread_;
|
||||
|
||||
RecordingSettings settings_;
|
||||
msr::airlib::ImageCaptureBase* camera_;
|
||||
msr::airlib::ImageCaptureBase* image_capture_;
|
||||
std::unique_ptr<RecordingFile> recording_file_;
|
||||
const msr::airlib::Kinematics::State* kinematics_;
|
||||
VehiclePawnWrapper* wrapper_;
|
||||
|
|
|
@ -214,7 +214,7 @@ ECameraDirectorMode ASimModeBase::getInitialViewMode()
|
|||
|
||||
void ASimModeBase::startRecording()
|
||||
{
|
||||
FRecordingThread::startRecording(getFpvVehiclePawnWrapper()->getCameraConnector(0),
|
||||
FRecordingThread::startRecording(getFpvVehiclePawnWrapper()->getImageCapture(),
|
||||
getFpvVehiclePawnWrapper()->getKinematics(), recording_settings, columns, getFpvVehiclePawnWrapper());
|
||||
}
|
||||
|
||||
|
|
|
@ -0,0 +1,130 @@
|
|||
#include "UnrealImageCapture.h"
|
||||
#include "RenderRequest.h"
|
||||
#include "ImageUtils.h"
|
||||
#include "common/ClockFactory.hpp"
|
||||
#include "NedTransform.h"
|
||||
|
||||
|
||||
UnrealImageCapture::UnrealImageCapture(APIPCamera* cameras[])
|
||||
: cameras_(cameras)
|
||||
{
|
||||
//TODO: explore screenshot option
|
||||
//addScreenCaptureHandler(camera->GetWorld());
|
||||
}
|
||||
|
||||
UnrealImageCapture::~UnrealImageCapture()
|
||||
{}
|
||||
|
||||
void UnrealImageCapture::getImages(const std::vector<msr::airlib::ImageCaptureBase::ImageRequest>& requests,
|
||||
std::vector<msr::airlib::ImageCaptureBase::ImageResponse>& responses)
|
||||
{
|
||||
if (cameras_== nullptr) {
|
||||
for (unsigned int i = 0; i < requests.size(); ++i) {
|
||||
responses.push_back(ImageResponse());
|
||||
responses[responses.size() - 1].message = "camera is not set";
|
||||
}
|
||||
}
|
||||
else
|
||||
getSceneCaptureImage(requests, responses, false);
|
||||
}
|
||||
|
||||
|
||||
void UnrealImageCapture::getSceneCaptureImage(const std::vector<msr::airlib::ImageCaptureBase::ImageRequest>& requests,
|
||||
std::vector<msr::airlib::ImageCaptureBase::ImageResponse>& responses, bool use_safe_method)
|
||||
{
|
||||
std::vector<std::shared_ptr<RenderRequest::RenderParams>> render_params;
|
||||
std::vector<std::shared_ptr<RenderRequest::RenderResult>> render_results;
|
||||
|
||||
for (unsigned int i = 0; i < requests.size(); ++i) {
|
||||
APIPCamera* camera = cameras_[i];
|
||||
responses.push_back(ImageResponse());
|
||||
ImageResponse& response = responses[i];
|
||||
|
||||
|
||||
updateCameraVisibility(camera, requests[i]);
|
||||
|
||||
UTextureRenderTarget2D* textureTarget = nullptr;
|
||||
USceneCaptureComponent2D* capture = camera->getCaptureComponent(requests[i].image_type, false);
|
||||
if (capture == nullptr) {
|
||||
response.message = "Can't take screenshot because none camera type is not active";
|
||||
}
|
||||
else if (capture->TextureTarget == nullptr) {
|
||||
response.message = "Can't take screenshot because texture target is null";
|
||||
}
|
||||
else
|
||||
textureTarget = capture->TextureTarget;
|
||||
|
||||
render_params.push_back(std::make_shared<RenderRequest::RenderParams>(textureTarget, requests[i].pixels_as_float, requests[i].compress));
|
||||
}
|
||||
|
||||
RenderRequest render_request(use_safe_method);
|
||||
render_request.getScreenshot(render_params.data(), render_results, render_params.size());
|
||||
|
||||
for (unsigned int i = 0; i < requests.size(); ++i) {
|
||||
const ImageRequest& request = requests[i];
|
||||
ImageResponse& response = responses[i];
|
||||
APIPCamera* camera = cameras_[i];
|
||||
|
||||
response.time_stamp = msr::airlib::ClockFactory::get()->nowNanos();
|
||||
response.image_data_uint8 = std::vector<uint8_t>(render_results[i]->image_data_uint8.GetData(), render_results[i]->image_data_uint8.GetData() + render_results[i]->image_data_uint8.Num());
|
||||
response.image_data_float = std::vector<float>(render_results[i]->image_data_float.GetData(), render_results[i]->image_data_float.GetData() + render_results[i]->image_data_float.Num());
|
||||
|
||||
response.camera_position = NedTransform::toNedMeters(camera->GetActorLocation());
|
||||
response.camera_orientation = NedTransform::toQuaternionr(camera->GetActorRotation().Quaternion(), true);
|
||||
response.pixels_as_float = request.pixels_as_float;
|
||||
response.compress = request.compress;
|
||||
response.width = render_results[i]->width;
|
||||
response.height = render_results[i]->height;
|
||||
response.image_type = request.image_type;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
void UnrealImageCapture::updateCameraVisibility(APIPCamera* camera, const msr::airlib::ImageCaptureBase::ImageRequest& request)
|
||||
{
|
||||
bool visibilityChanged = false;
|
||||
if (! camera->getCameraTypeEnabled(request.image_type)) {
|
||||
camera->setCameraTypeEnabled(request.image_type, true);
|
||||
visibilityChanged = true;
|
||||
}
|
||||
|
||||
if (visibilityChanged) {
|
||||
// Wait for render so that view is ready for capture
|
||||
std::this_thread::sleep_for(std::chrono::duration<double>(0.2));
|
||||
// not sure why this doesn't work.
|
||||
//DECLARE_CYCLE_STAT(TEXT("FNullGraphTask.CheckRenderStatus"), STAT_FNullGraphTask_CheckRenderStatus, STATGROUP_TaskGraphTasks);
|
||||
//auto renderStatus = TGraphTask<FNullGraphTask>::CreateTask(NULL).ConstructAndDispatchWhenReady(GET_STATID(STAT_FNullGraphTask_CheckRenderStatus), ENamedThreads::RenderThread);
|
||||
//FTaskGraphInterface::Get().WaitUntilTaskCompletes(renderStatus);
|
||||
}
|
||||
}
|
||||
|
||||
bool UnrealImageCapture::getScreenshotScreen(ImageType image_type, std::vector<uint8_t>& compressedPng)
|
||||
{
|
||||
FScreenshotRequest::RequestScreenshot(false); // This is an async operation
|
||||
return true;
|
||||
}
|
||||
|
||||
void UnrealImageCapture::addScreenCaptureHandler(UWorld *world)
|
||||
{
|
||||
static bool is_installed = false;
|
||||
|
||||
if (!is_installed) {
|
||||
UGameViewportClient* ViewportClient = world->GetGameViewport();
|
||||
ViewportClient->OnScreenshotCaptured().Clear();
|
||||
ViewportClient->OnScreenshotCaptured().AddLambda(
|
||||
[this](int32 SizeX, int32 SizeY, const TArray<FColor>& Bitmap)
|
||||
{
|
||||
// Make sure that all alpha values are opaque.
|
||||
TArray<FColor>& RefBitmap = const_cast<TArray<FColor>&>(Bitmap);
|
||||
for(auto& Color : RefBitmap)
|
||||
Color.A = 255;
|
||||
|
||||
TArray<uint8_t> last_compressed_png;
|
||||
FImageUtils::CompressImageArray(SizeX, SizeY, RefBitmap, last_compressed_png);
|
||||
last_compressed_png_ = std::vector<uint8_t>(last_compressed_png.GetData(), last_compressed_png.GetData() + last_compressed_png.Num());
|
||||
});
|
||||
|
||||
is_installed = true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
#pragma once
|
||||
|
||||
#include "CoreMinimal.h"
|
||||
#include "PIPCamera.h"
|
||||
#include "controllers/ImageCaptureBase.hpp"
|
||||
|
||||
|
||||
class UnrealImageCapture : public msr::airlib::ImageCaptureBase
|
||||
{
|
||||
public:
|
||||
typedef msr::airlib::ImageCaptureBase::ImageType ImageType;
|
||||
|
||||
UnrealImageCapture(APIPCamera* cameras[]);
|
||||
virtual ~UnrealImageCapture();
|
||||
|
||||
virtual void getImages(const std::vector<ImageRequest>& requests, std::vector<ImageResponse>& responses) override;
|
||||
|
||||
|
||||
private:
|
||||
void getSceneCaptureImage(const std::vector<msr::airlib::ImageCaptureBase::ImageRequest>& requests,
|
||||
std::vector<msr::airlib::ImageCaptureBase::ImageResponse>& responses, bool use_safe_method);
|
||||
|
||||
void addScreenCaptureHandler(UWorld *world);
|
||||
bool getScreenshotScreen(ImageType image_type, std::vector<uint8_t>& compressedPng);
|
||||
|
||||
void updateCameraVisibility(APIPCamera* camera, const msr::airlib::ImageCaptureBase::ImageRequest& request);
|
||||
|
||||
|
||||
private:
|
||||
APIPCamera** cameras_;
|
||||
std::vector<uint8_t> last_compressed_png_;
|
||||
};
|
|
@ -1,116 +0,0 @@
|
|||
#include "VehicleCameraConnector.h"
|
||||
#include "RenderRequest.h"
|
||||
#include "ImageUtils.h"
|
||||
#include "common/ClockFactory.hpp"
|
||||
#include "NedTransform.h"
|
||||
|
||||
|
||||
VehicleCameraConnector::VehicleCameraConnector(APIPCamera* camera)
|
||||
: camera_(camera)
|
||||
{
|
||||
//TODO: explore screenshot option
|
||||
//addScreenCaptureHandler(camera->GetWorld());
|
||||
}
|
||||
|
||||
VehicleCameraConnector::~VehicleCameraConnector()
|
||||
{}
|
||||
|
||||
msr::airlib::ImageCaptureBase::ImageResponse VehicleCameraConnector::getImage(VehicleCameraConnector::ImageType image_type, bool pixels_as_float, bool compress)
|
||||
{
|
||||
|
||||
if (camera_== nullptr) {
|
||||
ImageResponse response;
|
||||
response.message = "camera is not set";
|
||||
return response;
|
||||
}
|
||||
|
||||
return getSceneCaptureImage(image_type, pixels_as_float, compress, false);
|
||||
}
|
||||
|
||||
msr::airlib::ImageCaptureBase::ImageResponse VehicleCameraConnector::getSceneCaptureImage(VehicleCameraConnector::ImageType image_type,
|
||||
bool pixels_as_float, bool compress, bool use_safe_method)
|
||||
{
|
||||
bool visibilityChanged = false;
|
||||
if (! camera_->getCameraTypeEnabled(image_type)) {
|
||||
camera_->setCameraTypeEnabled(image_type, true);
|
||||
visibilityChanged = true;
|
||||
}
|
||||
|
||||
if (visibilityChanged) {
|
||||
// Wait for render so that view is ready for capture
|
||||
std::this_thread::sleep_for(std::chrono::duration<double>(0.2));
|
||||
// not sure why this doesn't work.
|
||||
//DECLARE_CYCLE_STAT(TEXT("FNullGraphTask.CheckRenderStatus"), STAT_FNullGraphTask_CheckRenderStatus, STATGROUP_TaskGraphTasks);
|
||||
//auto renderStatus = TGraphTask<FNullGraphTask>::CreateTask(NULL).ConstructAndDispatchWhenReady(GET_STATID(STAT_FNullGraphTask_CheckRenderStatus), ENamedThreads::RenderThread);
|
||||
//FTaskGraphInterface::Get().WaitUntilTaskCompletes(renderStatus);
|
||||
}
|
||||
|
||||
using namespace msr::airlib;
|
||||
USceneCaptureComponent2D* capture = camera_->getCaptureComponent(image_type, false);
|
||||
if (capture == nullptr) {
|
||||
ImageResponse response;
|
||||
response.message = "Can't take screenshot because none camera type is not active";
|
||||
return response;
|
||||
}
|
||||
|
||||
if (capture->TextureTarget == nullptr) {
|
||||
ImageResponse response;
|
||||
response.message = "Can't take screenshot because texture target is null";
|
||||
return response;
|
||||
}
|
||||
|
||||
UTextureRenderTarget2D* textureTarget = capture->TextureTarget;
|
||||
|
||||
RenderRequest request(use_safe_method);
|
||||
std::vector<std::shared_ptr<RenderRequest::RenderParams>> render_params;
|
||||
std::vector<std::shared_ptr<RenderRequest::RenderResult>> render_results;
|
||||
|
||||
render_params.push_back(std::make_shared<RenderRequest::RenderParams>(textureTarget, pixels_as_float, compress));
|
||||
|
||||
request.getScreenshot(render_params.data(), render_results, render_params.size());
|
||||
|
||||
ImageResponse response;
|
||||
response.time_stamp = msr::airlib::ClockFactory::get()->nowNanos();
|
||||
response.image_data_uint8 = std::vector<uint8_t>(render_results[0]->image_data_uint8.GetData(), render_results[0]->image_data_uint8.GetData() + render_results[0]->image_data_uint8.Num());
|
||||
response.image_data_float = std::vector<float>(render_results[0]->image_data_float.GetData(), render_results[0]->image_data_float.GetData() + render_results[0]->image_data_float.Num());
|
||||
|
||||
response.camera_position = NedTransform::toNedMeters(capture->GetComponentLocation());
|
||||
response.camera_orientation = NedTransform::toQuaternionr(capture->GetComponentRotation().Quaternion(), true);
|
||||
response.pixels_as_float = pixels_as_float;
|
||||
response.compress = compress;
|
||||
response.width = render_results[0]->width;
|
||||
response.height = render_results[0]->height;
|
||||
response.image_type = image_type;
|
||||
return response;
|
||||
}
|
||||
|
||||
|
||||
bool VehicleCameraConnector::getScreenshotScreen(ImageType image_type, std::vector<uint8_t>& compressedPng)
|
||||
{
|
||||
FScreenshotRequest::RequestScreenshot(false); // This is an async operation
|
||||
return true;
|
||||
}
|
||||
|
||||
void VehicleCameraConnector::addScreenCaptureHandler(UWorld *world)
|
||||
{
|
||||
static bool is_installed = false;
|
||||
|
||||
if (!is_installed) {
|
||||
UGameViewportClient* ViewportClient = world->GetGameViewport();
|
||||
ViewportClient->OnScreenshotCaptured().Clear();
|
||||
ViewportClient->OnScreenshotCaptured().AddLambda(
|
||||
[this](int32 SizeX, int32 SizeY, const TArray<FColor>& Bitmap)
|
||||
{
|
||||
// Make sure that all alpha values are opaque.
|
||||
TArray<FColor>& RefBitmap = const_cast<TArray<FColor>&>(Bitmap);
|
||||
for(auto& Color : RefBitmap)
|
||||
Color.A = 255;
|
||||
|
||||
TArray<uint8_t> last_compressed_png;
|
||||
FImageUtils::CompressImageArray(SizeX, SizeY, RefBitmap, last_compressed_png);
|
||||
last_compressed_png_ = std::vector<uint8_t>(last_compressed_png.GetData(), last_compressed_png.GetData() + last_compressed_png.Num());
|
||||
});
|
||||
|
||||
is_installed = true;
|
||||
}
|
||||
}
|
|
@ -1,27 +0,0 @@
|
|||
#pragma once
|
||||
|
||||
#include "CoreMinimal.h"
|
||||
#include "PIPCamera.h"
|
||||
#include "controllers/ImageCaptureBase.hpp"
|
||||
|
||||
|
||||
class VehicleCameraConnector : public msr::airlib::ImageCaptureBase
|
||||
{
|
||||
public:
|
||||
typedef msr::airlib::ImageCaptureBase::ImageType ImageType;
|
||||
|
||||
VehicleCameraConnector(APIPCamera* camera);
|
||||
virtual ~VehicleCameraConnector();
|
||||
virtual ImageResponse getImage(ImageType image_type, bool pixels_as_float, bool compress) override;
|
||||
|
||||
private:
|
||||
msr::airlib::ImageCaptureBase::ImageResponse getSceneCaptureImage(ImageType image_type,
|
||||
bool pixels_as_float, bool compress, bool use_safe_method);
|
||||
|
||||
void addScreenCaptureHandler(UWorld *world);
|
||||
bool getScreenshotScreen(ImageType image_type, std::vector<uint8_t>& compressedPng);
|
||||
|
||||
private:
|
||||
APIPCamera* camera_;
|
||||
std::vector<uint8_t> last_compressed_png_;
|
||||
};
|
|
@ -120,9 +120,7 @@ void VehiclePawnWrapper::initialize(APawn* pawn, const std::vector<APIPCamera*>&
|
|||
cameras_ = cameras;
|
||||
config_ = config;
|
||||
|
||||
for (auto camera : cameras_) {
|
||||
camera_connectors_.push_back(std::unique_ptr<VehicleCameraConnector>(new VehicleCameraConnector(camera)));
|
||||
}
|
||||
image_capture_.reset(new UnrealImageCapture(cameras_.data()));
|
||||
|
||||
if (!NedTransform::isInitialized())
|
||||
NedTransform::initialize(pawn_);
|
||||
|
@ -172,10 +170,9 @@ APIPCamera* VehiclePawnWrapper::getCamera(int index)
|
|||
return cameras_.at(index);
|
||||
}
|
||||
|
||||
VehicleCameraConnector* VehiclePawnWrapper::getCameraConnector(int index)
|
||||
UnrealImageCapture* VehiclePawnWrapper::getImageCapture()
|
||||
{
|
||||
//should be overridden in derived class
|
||||
return camera_connectors_.at(index).get();
|
||||
return image_capture_.get();
|
||||
}
|
||||
|
||||
int VehiclePawnWrapper::getCameraCount()
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
#include "CoreMinimal.h"
|
||||
#include <vector>
|
||||
#include <memory>
|
||||
#include "VehicleCameraConnector.h"
|
||||
#include "UnrealImageCapture.h"
|
||||
#include "common/Common.hpp"
|
||||
#include "common/CommonStructs.hpp"
|
||||
#include "PIPCamera.h"
|
||||
|
@ -46,7 +46,7 @@ public: //interface
|
|||
bool bSelfMoved, FVector HitLocation, FVector HitNormal, FVector NormalImpulse, const FHitResult& Hit);
|
||||
|
||||
APIPCamera* getCamera(int index = 0);
|
||||
VehicleCameraConnector* getCameraConnector(int index = 0);
|
||||
UnrealImageCapture* getImageCapture();
|
||||
int getCameraCount();
|
||||
void displayCollisionEffect(FVector hit_location, const FHitResult& hit);
|
||||
APawn* getPawn();
|
||||
|
@ -94,7 +94,7 @@ private: //vars
|
|||
GeoPoint home_point_;
|
||||
APawn* pawn_;
|
||||
std::vector<APIPCamera*> cameras_;
|
||||
std::vector<std::unique_ptr<VehicleCameraConnector>> camera_connectors_;
|
||||
std::unique_ptr<UnrealImageCapture> image_capture_;
|
||||
const msr::airlib::Kinematics::State* kinematics_;
|
||||
std::string log_line_;
|
||||
WrapperConfig config_;
|
||||
|
|
Загрузка…
Ссылка в новой задаче