v1.0.7 update OpenCVForUnity version to 2.5.1.
This commit is contained in:
Родитель
29c79cd0f1
Коммит
f37498e849
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -1,12 +0,0 @@
|
|||
fileFormatVersion: 2
|
||||
guid: f9a1576f5a2e5884ab5de1d7a3ef98de
|
||||
timeCreated: 1520109162
|
||||
licenseType: Free
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -1,8 +0,0 @@
|
|||
fileFormatVersion: 2
|
||||
guid: f0c9209848567b44396a236e8f2e6c9a
|
||||
timeCreated: 1520109162
|
||||
licenseType: Free
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -6,7 +6,7 @@ using System.Collections.Generic;
|
|||
using System.Threading;
|
||||
using System.IO;
|
||||
using System.Xml.Serialization;
|
||||
using OpenCVForUnity.ArucoModule;
|
||||
using OpenCVForUnity.ObjdetectModule;
|
||||
using OpenCVForUnity.CoreModule;
|
||||
using OpenCVForUnity.UnityUtils.Helper;
|
||||
using OpenCVForUnity.Calib3dModule;
|
||||
|
@ -82,7 +82,7 @@ namespace HoloLensWithOpenCVForUnityExample
|
|||
/// <summary>
|
||||
/// The dictionary identifier.
|
||||
/// </summary>
|
||||
public int dictionaryId = Aruco.DICT_6X6_250;
|
||||
public int dictionaryId = Objdetect.DICT_6X6_250;
|
||||
|
||||
/// <summary>
|
||||
/// The length of the markers' side. Normally, unit is meters.
|
||||
|
@ -147,46 +147,6 @@ namespace HoloLensWithOpenCVForUnityExample
|
|||
/// </summary>
|
||||
Matrix4x4 ARM;
|
||||
|
||||
/// <summary>
|
||||
/// The identifiers.
|
||||
/// </summary>
|
||||
Mat ids;
|
||||
|
||||
/// <summary>
|
||||
/// The corners.
|
||||
/// </summary>
|
||||
List<Mat> corners;
|
||||
|
||||
/// <summary>
|
||||
/// The rejected corners.
|
||||
/// </summary>
|
||||
List<Mat> rejectedCorners;
|
||||
|
||||
/// <summary>
|
||||
/// The rvecs.
|
||||
/// </summary>
|
||||
Mat rvecs;
|
||||
|
||||
/// <summary>
|
||||
/// The tvecs.
|
||||
/// </summary>
|
||||
Mat tvecs;
|
||||
|
||||
/// <summary>
|
||||
/// The rot mat.
|
||||
/// </summary>
|
||||
Mat rotMat;
|
||||
|
||||
/// <summary>
|
||||
/// The detector parameters.
|
||||
/// </summary>
|
||||
DetectorParameters detectorParams;
|
||||
|
||||
/// <summary>
|
||||
/// The dictionary.
|
||||
/// </summary>
|
||||
Dictionary dictionary;
|
||||
|
||||
/// <summary>
|
||||
/// The webcam texture to mat helper.
|
||||
/// </summary>
|
||||
|
@ -200,6 +160,16 @@ namespace HoloLensWithOpenCVForUnityExample
|
|||
Mat rgbMat4preview;
|
||||
Texture2D texture;
|
||||
|
||||
// for CanonicalMarker.
|
||||
Mat ids;
|
||||
List<Mat> corners;
|
||||
List<Mat> rejectedCorners;
|
||||
Dictionary dictionary;
|
||||
ArucoDetector arucoDetector;
|
||||
|
||||
Mat rvecs;
|
||||
Mat tvecs;
|
||||
|
||||
|
||||
readonly static Queue<Action> ExecuteOnMainThread = new Queue<Action>();
|
||||
System.Object sync = new System.Object();
|
||||
|
@ -439,9 +409,15 @@ namespace HoloLensWithOpenCVForUnityExample
|
|||
ids = new Mat();
|
||||
corners = new List<Mat>();
|
||||
rejectedCorners = new List<Mat>();
|
||||
rvecs = new Mat();
|
||||
tvecs = new Mat();
|
||||
rotMat = new Mat(3, 3, CvType.CV_64FC1);
|
||||
rvecs = new Mat(1, 10, CvType.CV_64FC3);
|
||||
tvecs = new Mat(1, 10, CvType.CV_64FC3);
|
||||
dictionary = Objdetect.getPredefinedDictionary(dictionaryId);
|
||||
|
||||
DetectorParameters detectorParams = new DetectorParameters();
|
||||
detectorParams.set_useAruco3Detection(true);
|
||||
RefineParameters refineParameters = new RefineParameters(10f, 3f, true);
|
||||
arucoDetector = new ArucoDetector(dictionary, detectorParams, refineParameters);
|
||||
|
||||
|
||||
|
||||
transformationM = new Matrix4x4();
|
||||
|
@ -452,12 +428,17 @@ namespace HoloLensWithOpenCVForUnityExample
|
|||
invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
|
||||
Debug.Log("invertZM " + invertZM.ToString());
|
||||
|
||||
detectorParams = DetectorParameters.create();
|
||||
dictionary = Aruco.getPredefinedDictionary(dictionaryId);
|
||||
|
||||
|
||||
//If WebCamera is frontFaceing, flip Mat.
|
||||
webCamTextureToMatHelper.flipHorizontal = webCamTextureToMatHelper.IsFrontFacing();
|
||||
// If the WebCam is front facing, flip the Mat horizontally. Required for successful detection of AR markers.
|
||||
if (webCamTextureToMatHelper.IsFrontFacing() && !webCamTextureToMatHelper.flipHorizontal)
|
||||
{
|
||||
webCamTextureToMatHelper.flipHorizontal = true;
|
||||
}
|
||||
else if (!webCamTextureToMatHelper.IsFrontFacing() && webCamTextureToMatHelper.flipHorizontal)
|
||||
{
|
||||
webCamTextureToMatHelper.flipHorizontal = false;
|
||||
}
|
||||
|
||||
rgbMat4preview = new Mat();
|
||||
}
|
||||
|
@ -494,6 +475,9 @@ namespace HoloLensWithOpenCVForUnityExample
|
|||
|
||||
hasUpdatedARTransformMatrix = false;
|
||||
|
||||
if (arucoDetector != null)
|
||||
arucoDetector.Dispose();
|
||||
|
||||
if (ids != null)
|
||||
ids.Dispose();
|
||||
foreach (var item in corners)
|
||||
|
@ -510,8 +494,6 @@ namespace HoloLensWithOpenCVForUnityExample
|
|||
rvecs.Dispose();
|
||||
if (tvecs != null)
|
||||
tvecs.Dispose();
|
||||
if (rotMat != null)
|
||||
rotMat.Dispose();
|
||||
|
||||
if (rgbMat4preview != null)
|
||||
rgbMat4preview.Dispose();
|
||||
|
@ -533,6 +515,7 @@ namespace HoloLensWithOpenCVForUnityExample
|
|||
}
|
||||
|
||||
#if WINDOWS_UWP && !DISABLE_HOLOLENSCAMSTREAM_API
|
||||
|
||||
public void OnFrameMatAcquired(Mat grayMat, Matrix4x4 projectionMatrix, Matrix4x4 cameraToWorldMatrix, CameraIntrinsics cameraIntrinsics)
|
||||
{
|
||||
isDetectingInFrameArrivedThread = true;
|
||||
|
@ -568,45 +551,63 @@ namespace HoloLensWithOpenCVForUnityExample
|
|||
if (enableDetection)
|
||||
{
|
||||
// Detect markers and estimate Pose
|
||||
|
||||
// undistort image.
|
||||
Calib3d.undistort(downScaleMat, downScaleMat, camMatrix, distCoeffs);
|
||||
// detect markers.
|
||||
Aruco.detectMarkers(downScaleMat, dictionary, corners, ids, detectorParams, rejectedCorners);
|
||||
arucoDetector.detectMarkers(downScaleMat, corners, ids, rejectedCorners);
|
||||
|
||||
if (applyEstimationPose && ids.total() > 0)
|
||||
{
|
||||
Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);
|
||||
if (rvecs.cols() < ids.total())
|
||||
rvecs.create(1, (int)ids.total(), CvType.CV_64FC3);
|
||||
if (tvecs.cols() < ids.total())
|
||||
tvecs.create(1, (int)ids.total(), CvType.CV_64FC3);
|
||||
|
||||
for (int i = 0; i < ids.total(); i++)
|
||||
using (MatOfPoint3f objPoints = new MatOfPoint3f(
|
||||
new Point3(-markerLength / 2f, markerLength / 2f, 0),
|
||||
new Point3(markerLength / 2f, markerLength / 2f, 0),
|
||||
new Point3(markerLength / 2f, -markerLength / 2f, 0),
|
||||
new Point3(-markerLength / 2f, -markerLength / 2f, 0)
|
||||
))
|
||||
{
|
||||
//This example can display ARObject on only first detected marker.
|
||||
if (i == 0)
|
||||
for (int i = 0; i < ids.total(); i++)
|
||||
{
|
||||
// Convert to unity pose data.
|
||||
double[] rvecArr = new double[3];
|
||||
rvecs.get(0, 0, rvecArr);
|
||||
double[] tvecArr = new double[3];
|
||||
tvecs.get(0, 0, tvecArr);
|
||||
tvecArr[2] /= DOWNSCALE_RATIO;
|
||||
PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);
|
||||
|
||||
// Create transform matrix.
|
||||
transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);
|
||||
|
||||
lock (sync)
|
||||
using (Mat rvec = new Mat(3, 1, CvType.CV_64FC1))
|
||||
using (Mat tvec = new Mat(3, 1, CvType.CV_64FC1))
|
||||
using (Mat corner_4x1 = corners[i].reshape(2, 4)) // 1*4*CV_32FC2 => 4*1*CV_32FC2
|
||||
using (MatOfPoint2f imagePoints = new MatOfPoint2f(corner_4x1))
|
||||
{
|
||||
// Right-handed coordinates system (OpenCV) to left-handed one (Unity)
|
||||
// https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix
|
||||
ARM = invertYM * transformationM * invertYM;
|
||||
// Calculate pose for each marker
|
||||
Calib3d.solvePnP(objPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
|
||||
|
||||
// Apply Y-axis and Z-axis refletion matrix. (Adjust the posture of the AR object)
|
||||
ARM = ARM * invertYM * invertZM;
|
||||
rvec.reshape(3, 1).copyTo(new Mat(rvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1)));
|
||||
tvec.reshape(3, 1).copyTo(new Mat(tvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1)));
|
||||
|
||||
// This example can display the ARObject on only first detected marker.
|
||||
if (i == 0)
|
||||
{
|
||||
// Convert to unity pose data.
|
||||
double[] rvecArr = new double[3];
|
||||
rvec.get(0, 0, rvecArr);
|
||||
double[] tvecArr = new double[3];
|
||||
tvec.get(0, 0, tvecArr);
|
||||
tvecArr[2] /= DOWNSCALE_RATIO;
|
||||
PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);
|
||||
|
||||
// Create transform matrix.
|
||||
transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);
|
||||
|
||||
lock (sync)
|
||||
{
|
||||
// Right-handed coordinates system (OpenCV) to left-handed one (Unity)
|
||||
// https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix
|
||||
ARM = invertYM * transformationM * invertYM;
|
||||
|
||||
// Apply Y-axis and Z-axis refletion matrix. (Adjust the posture of the AR object)
|
||||
ARM = ARM * invertYM * invertZM;
|
||||
}
|
||||
|
||||
hasUpdatedARTransformMatrix = true;
|
||||
}
|
||||
}
|
||||
|
||||
hasUpdatedARTransformMatrix = true;
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -620,7 +621,7 @@ namespace HoloLensWithOpenCVForUnityExample
|
|||
|
||||
if (ids.total() > 0)
|
||||
{
|
||||
Aruco.drawDetectedMarkers(rgbMat4preview, corners, ids, new Scalar(0, 255, 0));
|
||||
Objdetect.drawDetectedMarkers(rgbMat4preview, corners, ids, new Scalar(0, 255, 0));
|
||||
|
||||
if (applyEstimationPose)
|
||||
{
|
||||
|
@ -785,44 +786,63 @@ namespace HoloLensWithOpenCVForUnityExample
|
|||
private void DetectARUcoMarker()
|
||||
{
|
||||
// Detect markers and estimate Pose
|
||||
|
||||
// undistort image.
|
||||
Calib3d.undistort(downScaleMat, downScaleMat, camMatrix, distCoeffs);
|
||||
// detect markers.
|
||||
Aruco.detectMarkers(downScaleMat, dictionary, corners, ids, detectorParams, rejectedCorners);
|
||||
arucoDetector.detectMarkers(downScaleMat, corners, ids, rejectedCorners);
|
||||
|
||||
if (applyEstimationPose && ids.total() > 0)
|
||||
{
|
||||
Aruco.estimatePoseSingleMarkers(corners, markerLength, camMatrix, distCoeffs, rvecs, tvecs);
|
||||
if (rvecs.cols() < ids.total())
|
||||
rvecs.create(1, (int)ids.total(), CvType.CV_64FC3);
|
||||
if (tvecs.cols() < ids.total())
|
||||
tvecs.create(1, (int)ids.total(), CvType.CV_64FC3);
|
||||
|
||||
for (int i = 0; i < ids.total(); i++)
|
||||
using (MatOfPoint3f objPoints = new MatOfPoint3f(
|
||||
new Point3(-markerLength / 2f, markerLength / 2f, 0),
|
||||
new Point3(markerLength / 2f, markerLength / 2f, 0),
|
||||
new Point3(markerLength / 2f, -markerLength / 2f, 0),
|
||||
new Point3(-markerLength / 2f, -markerLength / 2f, 0)
|
||||
))
|
||||
{
|
||||
//This example can display ARObject on only first detected marker.
|
||||
if (i == 0)
|
||||
for (int i = 0; i < ids.total(); i++)
|
||||
{
|
||||
// Convert to unity pose data.
|
||||
double[] rvecArr = new double[3];
|
||||
rvecs.get(0, 0, rvecArr);
|
||||
double[] tvecArr = new double[3];
|
||||
tvecs.get(0, 0, tvecArr);
|
||||
tvecArr[2] /= DOWNSCALE_RATIO;
|
||||
PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);
|
||||
using (Mat rvec = new Mat(3, 1, CvType.CV_64FC1))
|
||||
using (Mat tvec = new Mat(3, 1, CvType.CV_64FC1))
|
||||
using (Mat corner_4x1 = corners[i].reshape(2, 4)) // 1*4*CV_32FC2 => 4*1*CV_32FC2
|
||||
using (MatOfPoint2f imagePoints = new MatOfPoint2f(corner_4x1))
|
||||
{
|
||||
// Calculate pose for each marker
|
||||
Calib3d.solvePnP(objPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
|
||||
|
||||
// Create transform matrix.
|
||||
transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);
|
||||
rvec.reshape(3, 1).copyTo(new Mat(rvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1)));
|
||||
tvec.reshape(3, 1).copyTo(new Mat(tvecs, new OpenCVForUnity.CoreModule.Rect(0, i, 1, 1)));
|
||||
|
||||
// Right-handed coordinates system (OpenCV) to left-handed one (Unity)
|
||||
// https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix
|
||||
ARM = invertYM * transformationM * invertYM;
|
||||
// This example can display the ARObject on only first detected marker.
|
||||
if (i == 0)
|
||||
{
|
||||
// Convert to unity pose data.
|
||||
double[] rvecArr = new double[3];
|
||||
rvec.get(0, 0, rvecArr);
|
||||
double[] tvecArr = new double[3];
|
||||
tvec.get(0, 0, tvecArr);
|
||||
tvecArr[2] /= DOWNSCALE_RATIO;
|
||||
PoseData poseData = ARUtils.ConvertRvecTvecToPoseData(rvecArr, tvecArr);
|
||||
|
||||
// Apply Y-axis and Z-axis refletion matrix. (Adjust the posture of the AR object)
|
||||
ARM = ARM * invertYM * invertZM;
|
||||
// Create transform matrix.
|
||||
transformationM = Matrix4x4.TRS(poseData.pos, poseData.rot, Vector3.one);
|
||||
|
||||
hasUpdatedARTransformMatrix = true;
|
||||
// Right-handed coordinates system (OpenCV) to left-handed one (Unity)
|
||||
// https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix
|
||||
ARM = invertYM * transformationM * invertYM;
|
||||
|
||||
break;
|
||||
// Apply Y-axis and Z-axis refletion matrix. (Adjust the posture of the AR object)
|
||||
ARM = ARM * invertYM * invertZM;
|
||||
|
||||
hasUpdatedARTransformMatrix = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -836,7 +856,8 @@ namespace HoloLensWithOpenCVForUnityExample
|
|||
|
||||
if (ids.total() > 0)
|
||||
{
|
||||
Aruco.drawDetectedMarkers(rgbMat4preview, corners, ids, new Scalar(0, 255, 0));
|
||||
//Aruco.drawDetectedMarkers(rgbMat4preview, corners, ids, new Scalar(0, 255, 0));
|
||||
Objdetect.drawDetectedMarkers(rgbMat4preview, corners, ids, new Scalar(0, 255, 0));
|
||||
|
||||
if (applyEstimationPose)
|
||||
{
|
||||
|
|
|
@ -103,10 +103,6 @@ namespace HoloLensWithOpenCVForUnityExample
|
|||
SceneManager.LoadScene("HLArUcoExample");
|
||||
}
|
||||
|
||||
public void OnHLArUcoCameraCalibrationExampleButtonClick()
|
||||
{
|
||||
SceneManager.LoadScene("HLArUcoCameraCalibrationExample");
|
||||
}
|
||||
public void OnHLCameraIntrinsicsCheckerButtonClick()
|
||||
{
|
||||
SceneManager.LoadScene("HLCameraIntrinsicsChecker");
|
||||
|
|
|
@ -4,11 +4,8 @@
|
|||
#if !OPENCV_DONT_USE_WEBCAMTEXTURE_API
|
||||
#if !(PLATFORM_LUMIN && !UNITY_EDITOR)
|
||||
|
||||
#if !DISABLE_HOLOLENSCAMSTREAM_API
|
||||
using HoloLensCameraStream;
|
||||
using OpenCVForUnity.UnityUtils;
|
||||
#endif
|
||||
|
||||
using OpenCVForUnity.CoreModule;
|
||||
using OpenCVForUnity.UnityUtils.Helper;
|
||||
using OpenCVForUnity.UtilsModule;
|
||||
|
@ -36,7 +33,7 @@ namespace HoloLensWithOpenCVForUnity.UnityUtils.Helper
|
|||
|
||||
/// <summary>
|
||||
/// Hololens camera stream to mat helper.
|
||||
/// v 1.0.6
|
||||
/// v 1.0.7
|
||||
/// Depends on EnoxSoftware/HoloLensCameraStream (https://github.com/EnoxSoftware/HoloLensCameraStream).
|
||||
/// Depends on OpenCVForUnity version 2.4.1 (WebCamTextureToMatHelper v 1.1.2) or later.
|
||||
///
|
||||
|
|
Двоичные данные
ChArUcoBoard-mx5-my7-d10-os1000-bb1.pdf
Двоичные данные
ChArUcoBoard-mx5-my7-d10-os1000-bb1.pdf
Двоичный файл не отображается.
|
@ -15,7 +15,7 @@
|
|||
* Visual Studio 2019
|
||||
* Unity 2019.4.31f1 / 2020.3.38f1
|
||||
* [Microsoft Mixed Reality Toolkit](https://github.com/Microsoft/MixedRealityToolkit-Unity/releases) v2.8.2
|
||||
* [OpenCV for Unity](https://assetstore.unity.com/packages/tools/integration/opencv-for-unity-21088?aid=1011l4ehR) 2.5.0+
|
||||
* [OpenCV for Unity](https://assetstore.unity.com/packages/tools/integration/opencv-for-unity-21088?aid=1011l4ehR) 2.5.1+
|
||||
* [EnoxSoftware/HoloLensCameraStream](https://github.com/EnoxSoftware/HoloLensCameraStream)
|
||||
|
||||
|
||||
|
@ -41,7 +41,7 @@
|
|||
1. **Open the VS Solution:** When the solution is built, a Windows explorer folder will open. Open the newly-built VS solution, which lives in `App/HoloLensWithOpenCVForUnityExample.sln`. This is the solution that ultimately gets deployed to your HoloLens.
|
||||
1. **Configure the deploy settings:** In the Visual Studio toolbar, change the solution platform from `ARM` to `x86` if you are building for Hololens1 or to `ARM64` if you are building for Hololens2; Change the deploy target (the green play button) to `Device` (if your HoloLens is plugged into your computer), or `Remote Machine` (if your HoloLens is connected via WiFi).
|
||||
1. **Run the app:** Go to **Debug > Start Debugging**. Once the app is deployed to the HoloLens, you should see some confirmation output in the Output window.
|
||||
* (Print the AR marker "CanonicalMarker-d10-i1-sp500-bb1.pdf" and "ChArUcoBoard-mx5-my7-d10-os1000-bb1.pdf" on an A4 size paper)
|
||||
* (Print the AR marker "CanonicalMarker-d10-i1-sp500-bb1.pdf" on an A4 size paper)
|
||||
|
||||
|Project Assets|Build Settings|
|
||||
|---|---|
|
||||
|
|
Загрузка…
Ссылка в новой задаче