Change the line feed code [utf-8/lf]

This commit is contained in:
EnoxSoftware 2022-06-28 23:35:03 +09:00
Родитель 62789b2751
Коммит bc9c86a73e
13 изменённых файлов: 2029 добавлений и 2028 удалений

Просмотреть файл

@ -1,280 +1,280 @@
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.Features2dModule;
using OpenCVForUnity.ImgcodecsModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UnityUtils.Helper;
using UnityEngine;
using UnityEngine.SceneManagement;
using UnityEngine.UI;
namespace MarkerLessARExample
{
/// <summary>
/// Pattern capture.
/// </summary>
[RequireComponent(typeof(WebCamTextureToMatHelper))]
public class CapturePattern : MonoBehaviour
{
/// <summary>
/// The pattern raw image.
/// </summary>
public RawImage patternRawImage;
/// <summary>
/// The texture.
/// </summary>
Texture2D texture;
/// <summary>
/// The webcam texture to mat helper.
/// </summary>
WebCamTextureToMatHelper webCamTextureToMatHelper;
/// <summary>
/// The pattern rect.
/// </summary>
OpenCVForUnity.CoreModule.Rect patternRect;
/// <summary>
/// The rgb mat.
/// </summary>
Mat rgbMat;
/// <summary>
/// The output mat.
/// </summary>
Mat outputMat;
/// <summary>
/// The detector.
/// </summary>
ORB detector;
/// <summary>
/// The keypoints.
/// </summary>
MatOfKeyPoint keypoints;
// Use this for initialization
void Start()
{
//Utils.setDebugMode(true);
using (Mat patternMat = Imgcodecs.imread(Application.persistentDataPath + "/patternImg.jpg"))
{
if (patternMat.total() == 0)
{
patternRawImage.gameObject.SetActive(false);
}
else
{
Imgproc.cvtColor(patternMat, patternMat, Imgproc.COLOR_BGR2RGB);
Texture2D patternTexture = new Texture2D(patternMat.width(), patternMat.height(), TextureFormat.RGBA32, false);
Utils.matToTexture2D(patternMat, patternTexture);
patternRawImage.texture = patternTexture;
patternRawImage.rectTransform.localScale = new Vector3(1.0f, (float)patternMat.height() / (float)patternMat.width(), 1.0f);
patternRawImage.gameObject.SetActive(true);
}
}
webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper>();
webCamTextureToMatHelper.Initialize();
detector = ORB.create();
detector.setMaxFeatures(1000);
keypoints = new MatOfKeyPoint();
}
/// <summary>
/// Raises the web cam texture to mat helper initialized event.
/// </summary>
public void OnWebCamTextureToMatHelperInitialized()
{
Debug.Log("OnWebCamTextureToMatHelperInitialized");
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();
texture = new Texture2D(webCamTextureMat.width(), webCamTextureMat.height(), TextureFormat.RGB24, false);
rgbMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC3);
outputMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC3);
gameObject.transform.localScale = new Vector3(webCamTextureMat.width(), webCamTextureMat.height(), 1);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = webCamTextureMat.width();
float height = webCamTextureMat.height();
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
}
else
{
Camera.main.orthographicSize = height / 2;
}
gameObject.GetComponent<Renderer>().material.mainTexture = texture;
//if WebCamera is frontFaceing,flip Mat.
if (webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing)
{
webCamTextureToMatHelper.flipHorizontal = true;
}
int patternWidth = (int)(Mathf.Min(webCamTextureMat.width(), webCamTextureMat.height()) * 0.8f);
patternRect = new OpenCVForUnity.CoreModule.Rect(webCamTextureMat.width() / 2 - patternWidth / 2, webCamTextureMat.height() / 2 - patternWidth / 2, patternWidth, patternWidth);
}
/// <summary>
/// Raises the web cam texture to mat helper disposed event.
/// </summary>
public void OnWebCamTextureToMatHelperDisposed()
{
Debug.Log("OnWebCamTextureToMatHelperDisposed");
if (rgbMat != null)
{
rgbMat.Dispose();
}
if (outputMat != null)
{
outputMat.Dispose();
}
}
/// <summary>
/// Raises the web cam texture to mat helper error occurred event.
/// </summary>
/// <param name="errorCode">Error code.</param>
public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
{
Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
// Update is called once per frame
void Update()
{
if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
{
Mat rgbaMat = webCamTextureToMatHelper.GetMat();
Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);
Imgproc.cvtColor(rgbaMat, outputMat, Imgproc.COLOR_RGBA2RGB);
detector.detect(rgbMat, keypoints);
//Debug.Log ("keypoints.ToString() " + keypoints.ToString());
Features2d.drawKeypoints(rgbMat, keypoints, rgbMat, Scalar.all(-1));
Imgproc.rectangle(rgbMat, patternRect.tl(), patternRect.br(), new Scalar(255, 0, 0, 255), 5);
Utils.fastMatToTexture2D(rgbMat, texture);
}
}
/// <summary>
/// Raises the destroy event.
/// </summary>
void OnDestroy()
{
webCamTextureToMatHelper.Dispose();
detector.Dispose();
if (keypoints != null)
keypoints.Dispose();
//Utils.setDebugMode(false);
}
/// <summary>
/// Raises the back button click event.
/// </summary>
public void OnBackButtonClick()
{
SceneManager.LoadScene("MarkerLessARExample");
}
/// <summary>
/// Raises the play button click event.
/// </summary>
public void OnPlayButtonClick()
{
webCamTextureToMatHelper.Play();
}
/// <summary>
/// Raises the pause button click event.
/// </summary>
public void OnPauseButtonClick()
{
webCamTextureToMatHelper.Pause();
}
/// <summary>
/// Raises the stop button click event.
/// </summary>
public void OnStopButtonClick()
{
webCamTextureToMatHelper.Stop();
}
/// <summary>
/// Raises the change camera button click event.
/// </summary>
public void OnChangeCameraButtonClick()
{
webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.IsFrontFacing();
}
/// <summary>
/// Raises the capture button click event.
/// </summary>
public void OnCaptureButtonClick()
{
Mat patternMat = new Mat(outputMat, patternRect);
Texture2D patternTexture = new Texture2D(patternMat.width(), patternMat.height(), TextureFormat.RGBA32, false);
Utils.matToTexture2D(patternMat, patternTexture);
patternRawImage.texture = patternTexture;
patternRawImage.gameObject.SetActive(true);
}
/// <summary>
/// Raises the save button click event.
/// </summary>
public void OnSaveButtonClick()
{
if (patternRawImage.texture != null)
{
Texture2D patternTexture = (Texture2D)patternRawImage.texture;
Mat patternMat = new Mat(patternRect.size(), CvType.CV_8UC3);
Utils.texture2DToMat(patternTexture, patternMat);
Imgproc.cvtColor(patternMat, patternMat, Imgproc.COLOR_RGB2BGR);
string savePath = Application.persistentDataPath;
Debug.Log("savePath " + savePath);
Imgcodecs.imwrite(savePath + "/patternImg.jpg", patternMat);
SceneManager.LoadScene("WebCamTextureMarkerLessARExample");
}
}
}
}
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.Features2dModule;
using OpenCVForUnity.ImgcodecsModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UnityUtils.Helper;
using UnityEngine;
using UnityEngine.SceneManagement;
using UnityEngine.UI;
namespace MarkerLessARExample
{
/// <summary>
/// Pattern capture.
/// </summary>
[RequireComponent(typeof(WebCamTextureToMatHelper))]
public class CapturePattern : MonoBehaviour
{
/// <summary>
/// The pattern raw image.
/// </summary>
public RawImage patternRawImage;
/// <summary>
/// The texture.
/// </summary>
Texture2D texture;
/// <summary>
/// The webcam texture to mat helper.
/// </summary>
WebCamTextureToMatHelper webCamTextureToMatHelper;
/// <summary>
/// The pattern rect.
/// </summary>
OpenCVForUnity.CoreModule.Rect patternRect;
/// <summary>
/// The rgb mat.
/// </summary>
Mat rgbMat;
/// <summary>
/// The output mat.
/// </summary>
Mat outputMat;
/// <summary>
/// The detector.
/// </summary>
ORB detector;
/// <summary>
/// The keypoints.
/// </summary>
MatOfKeyPoint keypoints;
// Use this for initialization
void Start()
{
//Utils.setDebugMode(true);
using (Mat patternMat = Imgcodecs.imread(Application.persistentDataPath + "/patternImg.jpg"))
{
if (patternMat.total() == 0)
{
patternRawImage.gameObject.SetActive(false);
}
else
{
Imgproc.cvtColor(patternMat, patternMat, Imgproc.COLOR_BGR2RGB);
Texture2D patternTexture = new Texture2D(patternMat.width(), patternMat.height(), TextureFormat.RGBA32, false);
Utils.matToTexture2D(patternMat, patternTexture);
patternRawImage.texture = patternTexture;
patternRawImage.rectTransform.localScale = new Vector3(1.0f, (float)patternMat.height() / (float)patternMat.width(), 1.0f);
patternRawImage.gameObject.SetActive(true);
}
}
webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper>();
webCamTextureToMatHelper.Initialize();
detector = ORB.create();
detector.setMaxFeatures(1000);
keypoints = new MatOfKeyPoint();
}
/// <summary>
/// Raises the web cam texture to mat helper initialized event.
/// </summary>
public void OnWebCamTextureToMatHelperInitialized()
{
Debug.Log("OnWebCamTextureToMatHelperInitialized");
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();
texture = new Texture2D(webCamTextureMat.width(), webCamTextureMat.height(), TextureFormat.RGB24, false);
rgbMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC3);
outputMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC3);
gameObject.transform.localScale = new Vector3(webCamTextureMat.width(), webCamTextureMat.height(), 1);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = webCamTextureMat.width();
float height = webCamTextureMat.height();
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
}
else
{
Camera.main.orthographicSize = height / 2;
}
gameObject.GetComponent<Renderer>().material.mainTexture = texture;
//if WebCamera is frontFaceing,flip Mat.
if (webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing)
{
webCamTextureToMatHelper.flipHorizontal = true;
}
int patternWidth = (int)(Mathf.Min(webCamTextureMat.width(), webCamTextureMat.height()) * 0.8f);
patternRect = new OpenCVForUnity.CoreModule.Rect(webCamTextureMat.width() / 2 - patternWidth / 2, webCamTextureMat.height() / 2 - patternWidth / 2, patternWidth, patternWidth);
}
/// <summary>
/// Raises the web cam texture to mat helper disposed event.
/// </summary>
public void OnWebCamTextureToMatHelperDisposed()
{
Debug.Log("OnWebCamTextureToMatHelperDisposed");
if (rgbMat != null)
{
rgbMat.Dispose();
}
if (outputMat != null)
{
outputMat.Dispose();
}
}
/// <summary>
/// Raises the web cam texture to mat helper error occurred event.
/// </summary>
/// <param name="errorCode">Error code.</param>
public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
{
Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
// Update is called once per frame
void Update()
{
if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
{
Mat rgbaMat = webCamTextureToMatHelper.GetMat();
Imgproc.cvtColor(rgbaMat, rgbMat, Imgproc.COLOR_RGBA2RGB);
Imgproc.cvtColor(rgbaMat, outputMat, Imgproc.COLOR_RGBA2RGB);
detector.detect(rgbMat, keypoints);
//Debug.Log ("keypoints.ToString() " + keypoints.ToString());
Features2d.drawKeypoints(rgbMat, keypoints, rgbMat, Scalar.all(-1));
Imgproc.rectangle(rgbMat, patternRect.tl(), patternRect.br(), new Scalar(255, 0, 0, 255), 5);
Utils.fastMatToTexture2D(rgbMat, texture);
}
}
/// <summary>
/// Raises the destroy event.
/// </summary>
void OnDestroy()
{
webCamTextureToMatHelper.Dispose();
detector.Dispose();
if (keypoints != null)
keypoints.Dispose();
//Utils.setDebugMode(false);
}
/// <summary>
/// Raises the back button click event.
/// </summary>
public void OnBackButtonClick()
{
SceneManager.LoadScene("MarkerLessARExample");
}
/// <summary>
/// Raises the play button click event.
/// </summary>
public void OnPlayButtonClick()
{
webCamTextureToMatHelper.Play();
}
/// <summary>
/// Raises the pause button click event.
/// </summary>
public void OnPauseButtonClick()
{
webCamTextureToMatHelper.Pause();
}
/// <summary>
/// Raises the stop button click event.
/// </summary>
public void OnStopButtonClick()
{
webCamTextureToMatHelper.Stop();
}
/// <summary>
/// Raises the change camera button click event.
/// </summary>
public void OnChangeCameraButtonClick()
{
webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.IsFrontFacing();
}
/// <summary>
/// Raises the capture button click event.
/// </summary>
public void OnCaptureButtonClick()
{
Mat patternMat = new Mat(outputMat, patternRect);
Texture2D patternTexture = new Texture2D(patternMat.width(), patternMat.height(), TextureFormat.RGBA32, false);
Utils.matToTexture2D(patternMat, patternTexture);
patternRawImage.texture = patternTexture;
patternRawImage.gameObject.SetActive(true);
}
/// <summary>
/// Raises the save button click event.
/// </summary>
public void OnSaveButtonClick()
{
if (patternRawImage.texture != null)
{
Texture2D patternTexture = (Texture2D)patternRawImage.texture;
Mat patternMat = new Mat(patternRect.size(), CvType.CV_8UC3);
Utils.texture2DToMat(patternTexture, patternMat);
Imgproc.cvtColor(patternMat, patternMat, Imgproc.COLOR_RGB2BGR);
string savePath = Application.persistentDataPath;
Debug.Log("savePath " + savePath);
Imgcodecs.imwrite(savePath + "/patternImg.jpg", patternMat);
SceneManager.LoadScene("WebCamTextureMarkerLessARExample");
}
}
}
}

Просмотреть файл

@ -1,68 +1,68 @@
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.Features2dModule;
using OpenCVForUnity.UnityUtils;
using System.Collections.Generic;
using UnityEngine;
namespace OpenCVMarkerLessAR
{
/// <summary>
/// Debug helpers.
/// This code is a rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter3_MarkerlessAR using "OpenCV for Unity".
/// </summary>
public class DebugHelpers : MonoBehaviour
{
// Use this for initialization
void Start()
{
}
// Update is called once per frame
void Update()
{
}
public void showMat(Mat mat)
{
Texture2D texture = new Texture2D(mat.cols(), mat.rows(), TextureFormat.RGBA32, false);
Utils.matToTexture2D(mat, texture);
gameObject.transform.localScale = new Vector3(texture.width, texture.height, 1);
gameObject.GetComponent<Renderer>().material.mainTexture = texture;
}
// Draw matches between two images
public static Mat getMatchesImage(Mat query, Mat pattern, MatOfKeyPoint queryKp, MatOfKeyPoint trainKp, MatOfDMatch matches, int maxMatchesDrawn)
{
Mat outImg = new Mat();
List<DMatch> matchesList = matches.toList();
if (matchesList.Count > maxMatchesDrawn)
{
matchesList.RemoveRange(maxMatchesDrawn, matchesList.Count - maxMatchesDrawn);
}
MatOfDMatch tmpMatches = new MatOfDMatch();
tmpMatches.fromList(matchesList);
Features2d.drawMatches
(
query,
queryKp,
pattern,
trainKp,
tmpMatches,
outImg,
new Scalar(0, 200, 0, 255),
Scalar.all(-1),
new MatOfByte()
);
return outImg;
}
}
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.Features2dModule;
using OpenCVForUnity.UnityUtils;
using System.Collections.Generic;
using UnityEngine;
namespace OpenCVMarkerLessAR
{
/// <summary>
/// Debug helpers.
/// This code is a rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter3_MarkerlessAR using "OpenCV for Unity".
/// </summary>
public class DebugHelpers : MonoBehaviour
{
// Use this for initialization
void Start()
{
}
// Update is called once per frame
void Update()
{
}
public void showMat(Mat mat)
{
Texture2D texture = new Texture2D(mat.cols(), mat.rows(), TextureFormat.RGBA32, false);
Utils.matToTexture2D(mat, texture);
gameObject.transform.localScale = new Vector3(texture.width, texture.height, 1);
gameObject.GetComponent<Renderer>().material.mainTexture = texture;
}
// Draw matches between two images
public static Mat getMatchesImage(Mat query, Mat pattern, MatOfKeyPoint queryKp, MatOfKeyPoint trainKp, MatOfDMatch matches, int maxMatchesDrawn)
{
Mat outImg = new Mat();
List<DMatch> matchesList = matches.toList();
if (matchesList.Count > maxMatchesDrawn)
{
matchesList.RemoveRange(maxMatchesDrawn, matchesList.Count - maxMatchesDrawn);
}
MatOfDMatch tmpMatches = new MatOfDMatch();
tmpMatches.fromList(matchesList);
Features2d.drawMatches
(
query,
queryKp,
pattern,
trainKp,
tmpMatches,
outImg,
new Scalar(0, 200, 0, 255),
Scalar.all(-1),
new MatOfByte()
);
return outImg;
}
}
}

Просмотреть файл

@ -1,60 +1,60 @@
using OpenCVForUnity.CoreModule;
namespace OpenCVMarkerLessAR
{
/// <summary>
/// Pattern.
/// This code is a rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter3_MarkerlessAR using "OpenCV for Unity".
/// </summary>
public class Pattern
{
/// <summary>
/// The size.
/// </summary>
public Size size;
/// <summary>
/// The frame.
/// </summary>
public Mat frame;
/// <summary>
/// The gray image.
/// </summary>
public Mat grayImg;
/// <summary>
/// The keypoints.
/// </summary>
public MatOfKeyPoint keypoints;
/// <summary>
/// The descriptors.
/// </summary>
public Mat descriptors;
/// <summary>
/// The points2d.
/// </summary>
public MatOfPoint2f points2d;
/// <summary>
/// The points3d.
/// </summary>
public MatOfPoint3f points3d;
/// <summary>
/// Initializes a new instance of the <see cref="Pattern"/> class.
/// </summary>
public Pattern()
{
size = new Size();
frame = new Mat();
grayImg = new Mat();
keypoints = new MatOfKeyPoint();
descriptors = new Mat();
points2d = new MatOfPoint2f();
points3d = new MatOfPoint3f();
}
}
using OpenCVForUnity.CoreModule;
namespace OpenCVMarkerLessAR
{
/// <summary>
/// Pattern.
/// This code is a rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter3_MarkerlessAR using "OpenCV for Unity".
/// </summary>
public class Pattern
{
/// <summary>
/// The size.
/// </summary>
public Size size;
/// <summary>
/// The frame.
/// </summary>
public Mat frame;
/// <summary>
/// The gray image.
/// </summary>
public Mat grayImg;
/// <summary>
/// The keypoints.
/// </summary>
public MatOfKeyPoint keypoints;
/// <summary>
/// The descriptors.
/// </summary>
public Mat descriptors;
/// <summary>
/// The points2d.
/// </summary>
public MatOfPoint2f points2d;
/// <summary>
/// The points3d.
/// </summary>
public MatOfPoint3f points3d;
/// <summary>
/// Initializes a new instance of the <see cref="Pattern"/> class.
/// </summary>
public Pattern()
{
size = new Size();
frame = new Mat();
grayImg = new Mat();
keypoints = new MatOfKeyPoint();
descriptors = new Mat();
points2d = new MatOfPoint2f();
points3d = new MatOfPoint3f();
}
}
}

Просмотреть файл

@ -1,487 +1,487 @@
using OpenCVForUnity.Calib3dModule;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.Features2dModule;
using OpenCVForUnity.ImgprocModule;
using System.Collections.Generic;
namespace OpenCVMarkerLessAR
{
/// <summary>
/// Pattern detector.
/// This code is a rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter3_MarkerlessAR using "OpenCV for Unity".
/// </summary>
public class PatternDetector
{
/// <summary>
/// The enable ratio test.
/// </summary>
public bool enableRatioTest;
/// <summary>
/// The enable homography refinement.
/// </summary>
public bool enableHomographyRefinement;
/// <summary>
/// The homography reprojection threshold.
/// </summary>
public float homographyReprojectionThreshold;
/// <summary>
/// The m_query keypoints.
/// </summary>
MatOfKeyPoint m_queryKeypoints;
/// <summary>
/// The m_query descriptors.
/// </summary>
Mat m_queryDescriptors;
/// <summary>
/// The m_matches.
/// </summary>
MatOfDMatch m_matches;
/// <summary>
/// The m_knn matches.
/// </summary>
List<MatOfDMatch> m_knnMatches;
/// <summary>
/// The m_gray image.
/// </summary>
Mat m_grayImg;
/// <summary>
/// The m_warped image.
/// </summary>
Mat m_warpedImg;
/// <summary>
/// The m_rough homography.
/// </summary>
Mat m_roughHomography;
/// <summary>
/// The m_refined homography.
/// </summary>
Mat m_refinedHomography;
/// <summary>
/// The m_pattern.
/// </summary>
Pattern m_pattern;
/// <summary>
/// The m_detector.
/// </summary>
ORB m_detector;
/// <summary>
/// The m_extractor.
/// </summary>
ORB m_extractor;
/// <summary>
/// The m_matcher.
/// </summary>
DescriptorMatcher m_matcher;
/// <summary>
/// Initializes a new instance of the <see cref="PatternDetector"/> class.
/// </summary>
/// <param name="detector">Detector.</param>
/// <param name="extractor">Extractor.</param>
/// <param name="matcher">Matcher.</param>
/// <param name="ratioTest">If set to <c>true</c> ratio test.</param>
public PatternDetector(ORB detector, ORB extractor, DescriptorMatcher matcher, bool ratioTest = false)
{
if (detector == null)
{
detector = ORB.create();
detector.setMaxFeatures(1000);
}
if (extractor == null)
{
extractor = ORB.create();
extractor.setMaxFeatures(1000);
}
if (matcher == null)
{
matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);
}
m_detector = detector;
m_extractor = extractor;
m_matcher = matcher;
enableRatioTest = ratioTest;
enableHomographyRefinement = true;
homographyReprojectionThreshold = 3;
m_queryKeypoints = new MatOfKeyPoint();
m_queryDescriptors = new Mat();
m_matches = new MatOfDMatch();
m_knnMatches = new List<MatOfDMatch>();
m_grayImg = new Mat();
m_warpedImg = new Mat();
m_roughHomography = new Mat();
m_refinedHomography = new Mat();
}
/// <summary>
/// Train the specified pattern.
/// </summary>
/// <param name="pattern">Pattern.</param>
public void train(Pattern pattern)
{
// Store the pattern object
m_pattern = pattern;
// API of cv::DescriptorMatcher is somewhat tricky
// First we clear old train data:
m_matcher.clear();
// Then we add vector of descriptors (each descriptors matrix describe one image).
// This allows us to perform search across multiple images:
List<Mat> descriptors = new List<Mat>(1);
descriptors.Add(pattern.descriptors.clone());
m_matcher.add(descriptors);
// After adding train data perform actual train:
m_matcher.train();
}
/// <summary>
/// Builds the pattern from image.
/// </summary>
/// <param name="image">Image.</param>
/// <param name="pattern">Pattern.</param>
public void buildPatternFromImage(Mat image, Pattern pattern)
{
//int numImages = 4;
//float step = Mathf.Sqrt (2.0f);
// Store original image in pattern structure
pattern.size = new Size(image.cols(), image.rows());
pattern.frame = image.clone();
getGray(image, pattern.grayImg);
// Build 2d and 3d contours (3d contour lie in XY plane since it's planar)
List<Point> points2dList = new List<Point>(4);
List<Point3> points3dList = new List<Point3>(4);
// Image dimensions
float w = image.cols();
float h = image.rows();
// Normalized dimensions:
//float maxSize = Mathf.Max (w, h);
//float unitW = w / maxSize;
//float unitH = h / maxSize;
points2dList.Add(new Point(0, 0));
points2dList.Add(new Point(w, 0));
points2dList.Add(new Point(w, h));
points2dList.Add(new Point(0, h));
pattern.points2d.fromList(points2dList);
//points3dList.Add (new Point3 (-unitW, -unitH, 0));
//points3dList.Add (new Point3 (unitW, -unitH, 0));
//points3dList.Add (new Point3 (unitW, unitH, 0));
//points3dList.Add (new Point3 (-unitW, unitH, 0));
points3dList.Add(new Point3(-0.5f, -0.5f, 0));
points3dList.Add(new Point3(+0.5f, -0.5f, 0));
points3dList.Add(new Point3(+0.5f, +0.5f, 0));
points3dList.Add(new Point3(-0.5f, +0.5f, 0));
pattern.points3d.fromList(points3dList);
extractFeatures(pattern.grayImg, pattern.keypoints, pattern.descriptors);
}
/// <summary>
/// Finds the pattern.
/// </summary>
/// <returns><c>true</c>, if pattern was found, <c>false</c> otherwise.</returns>
/// <param name="image">Image.</param>
/// <param name="info">Info.</param>
public bool findPattern(Mat image, PatternTrackingInfo info)
{
// Convert input image to gray
getGray(image, m_grayImg);
// Extract feature points from input gray image
extractFeatures(m_grayImg, m_queryKeypoints, m_queryDescriptors);
// Get matches with current pattern
getMatches(m_queryDescriptors, m_matches);
//(GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat (DebugHelpers.getMatchesImage (m_grayImg, m_pattern.grayImg, m_queryKeypoints, m_pattern.keypoints, m_matches, 100));
// Find homography transformation and detect good matches
bool homographyFound = refineMatchesWithHomography(
m_queryKeypoints,
m_pattern.keypoints,
homographyReprojectionThreshold,
m_matches,
m_roughHomography);
if (homographyFound)
{
//(GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat (DebugHelpers.getMatchesImage (m_grayImg, m_pattern.grayImg, m_queryKeypoints, m_pattern.keypoints, m_matches, 100));
// If homography refinement enabled improve found transformation
if (enableHomographyRefinement)
{
// Warp image using found homography
Imgproc.warpPerspective(m_grayImg, m_warpedImg, m_roughHomography, m_pattern.size, Imgproc.WARP_INVERSE_MAP | Imgproc.INTER_CUBIC);
//(GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat(m_warpedImg);
// Get refined matches:
using (MatOfKeyPoint warpedKeypoints = new MatOfKeyPoint())
using (MatOfDMatch refinedMatches = new MatOfDMatch())
{
// Detect features on warped image
extractFeatures(m_warpedImg, warpedKeypoints, m_queryDescriptors);
// Match with pattern
getMatches(m_queryDescriptors, refinedMatches);
// Estimate new refinement homography
homographyFound = refineMatchesWithHomography(
warpedKeypoints,
m_pattern.keypoints,
homographyReprojectionThreshold,
refinedMatches,
m_refinedHomography);
}
//(GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat(DebugHelpers.getMatchesImage(m_warpedImg, m_pattern.grayImg, warpedKeypoints, m_pattern.keypoints, refinedMatches, 100));
// Get a result homography as result of matrix product of refined and rough homographies:
// info.homography = m_roughHomography * m_refinedHomography;
Core.gemm(m_roughHomography, m_refinedHomography, 1, new Mat(), 0, info.homography);
//Debug.Log ("info.homography " + info.homography.ToString ());
// Transform contour with rough homography
//Core.perspectiveTransform (m_pattern.points2d, info.points2d, m_roughHomography);
//info.draw2dContour (image, new Scalar (200, 0, 0, 255));
// Transform contour with precise homography
Core.perspectiveTransform(m_pattern.points2d, info.points2d, info.homography);
//info.draw2dContour (image, new Scalar (0, 200, 0, 255));
}
else
{
info.homography = m_roughHomography;
//Debug.Log ("m_roughHomography " + m_roughHomography.ToString ());
//Debug.Log ("info.homography " + info.homography.ToString ());
// Transform contour with rough homography
Core.perspectiveTransform(m_pattern.points2d, info.points2d, m_roughHomography);
//info.draw2dContour (image, new Scalar (0, 200, 0, 255));
}
}
//(GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat (DebugHelpers.getMatchesImage (m_grayImg, m_pattern.grayImg, m_queryKeypoints, m_pattern.keypoints, m_matches, 100));
//Debug.Log ("Features:" + m_queryKeypoints.ToString () + " Matches: " + m_matches.ToString ());
return homographyFound;
}
/// <summary>
/// Gets the gray.
/// </summary>
/// <param name="image">Image.</param>
/// <param name="gray">Gray.</param>
static void getGray(Mat image, Mat gray)
{
if (image.channels() == 3)
Imgproc.cvtColor(image, gray, Imgproc.COLOR_RGB2GRAY);
else if (image.channels() == 4)
Imgproc.cvtColor(image, gray, Imgproc.COLOR_RGBA2GRAY);
else if (image.channels() == 1)
image.copyTo(gray);
}
/// <summary>
/// Extracts the features.
/// </summary>
/// <returns><c>true</c>, if features was extracted, <c>false</c> otherwise.</returns>
/// <param name="image">Image.</param>
/// <param name="keypoints">Keypoints.</param>
/// <param name="descriptors">Descriptors.</param>
bool extractFeatures(Mat image, MatOfKeyPoint keypoints, Mat descriptors)
{
if (image.total() == 0)
{
return false;
}
if (image.channels() != 1)
{
return false;
}
m_detector.detect(image, keypoints);
if (keypoints.total() == 0)
return false;
m_extractor.compute(image, keypoints, descriptors);
if (keypoints.total() == 0)
return false;
//Debug.Log ("extractFeatures true");
//Mat tmpImage = new Mat();
//
//Features2d.drawKeypoints(image, keypoints, tmpImage);
//
//DebugHelpers.showMat(tmpImage);
return true;
}
/// <summary>
/// Gets the matches.
/// </summary>
/// <param name="queryDescriptors">Query descriptors.</param>
/// <param name="matches">Matches.</param>
void getMatches(Mat queryDescriptors, MatOfDMatch matches)
{
List<DMatch> matchesList = new List<DMatch>();
//matches.clear();
if (enableRatioTest)
{
// To avoid NaN's when best match has zero distance we will use inversed ratio.
float minRatio = 1.0f / 1.5f;
// KNN match will return 2 nearest matches for each query descriptor
m_matcher.knnMatch(queryDescriptors, m_knnMatches, 2);
for (int i = 0; i < m_knnMatches.Count; i++)
{
List<DMatch> m_knnMatchesList = m_knnMatches[i].toList();
DMatch bestMatch = m_knnMatchesList[0];
DMatch betterMatch = m_knnMatchesList[1];
float distanceRatio = bestMatch.distance / betterMatch.distance;
// Pass only matches where distance ratio between
// nearest matches is greater than 1.5 (distinct criteria)
if (distanceRatio < minRatio)
{
matchesList.Add(bestMatch);
}
}
matches.fromList(matchesList);
}
else
{
matches.fromList(matchesList);
// Perform regular match
m_matcher.match(queryDescriptors, matches);
}
//Debug.Log ("getMatches " + matches.ToString ());
}
/// <summary>
/// Refines the matches with homography.
/// </summary>
/// <returns><c>true</c>, if matches with homography was refined, <c>false</c> otherwise.</returns>
/// <param name="queryKeypoints">Query keypoints.</param>
/// <param name="trainKeypoints">Train keypoints.</param>
/// <param name="reprojectionThreshold">Reprojection threshold.</param>
/// <param name="matches">Matches.</param>
/// <param name="homography">Homography.</param>
static bool refineMatchesWithHomography
(
MatOfKeyPoint queryKeypoints,
MatOfKeyPoint trainKeypoints,
float reprojectionThreshold,
MatOfDMatch matches,
Mat homography
)
{
//Debug.Log ("matches " + matches.ToString ());
int minNumberMatchesAllowed = 8;
List<KeyPoint> queryKeypointsList = queryKeypoints.toList();
List<KeyPoint> trainKeypointsList = trainKeypoints.toList();
List<DMatch> matchesList = matches.toList();
if (matchesList.Count < minNumberMatchesAllowed)
return false;
// Prepare data for cv::findHomography
List<Point> srcPointsList = new List<Point>(matchesList.Count);
List<Point> dstPointsList = new List<Point>(matchesList.Count);
for (int i = 0; i < matchesList.Count; i++)
{
srcPointsList.Add(trainKeypointsList[matchesList[i].trainIdx].pt);
dstPointsList.Add(queryKeypointsList[matchesList[i].queryIdx].pt);
}
// Find homography matrix and get inliers mask
using (MatOfPoint2f srcPoints = new MatOfPoint2f())
using (MatOfPoint2f dstPoints = new MatOfPoint2f())
using (MatOfByte inliersMask = new MatOfByte(new byte[srcPointsList.Count]))
{
srcPoints.fromList(srcPointsList);
dstPoints.fromList(dstPointsList);
//Debug.Log ("srcPoints " + srcPoints.ToString ());
//Debug.Log ("dstPoints " + dstPoints.ToString ());
Calib3d.findHomography(srcPoints,
dstPoints,
Calib3d.FM_RANSAC,
reprojectionThreshold,
inliersMask, 2000, 0.955).copyTo(homography);
if (homography.rows() != 3 || homography.cols() != 3)
return false;
//Debug.Log ("homography " + homography.ToString ());
//Debug.Log ("inliersMask " + inliersMask.dump ());
List<byte> inliersMaskList = inliersMask.toList();
List<DMatch> inliers = new List<DMatch>();
for (int i = 0; i < inliersMaskList.Count; i++)
{
if (inliersMaskList[i] == 1)
inliers.Add(matchesList[i]);
}
matches.fromList(inliers);
//Debug.Log ("matches " + matches.ToString ());
}
return matchesList.Count > minNumberMatchesAllowed;
}
}
using OpenCVForUnity.Calib3dModule;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.Features2dModule;
using OpenCVForUnity.ImgprocModule;
using System.Collections.Generic;
namespace OpenCVMarkerLessAR
{
/// <summary>
/// Pattern detector.
/// This code is a rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter3_MarkerlessAR using "OpenCV for Unity".
/// </summary>
public class PatternDetector
{
/// <summary>
/// The enable ratio test.
/// </summary>
public bool enableRatioTest;
/// <summary>
/// The enable homography refinement.
/// </summary>
public bool enableHomographyRefinement;
/// <summary>
/// The homography reprojection threshold.
/// </summary>
public float homographyReprojectionThreshold;
/// <summary>
/// The m_query keypoints.
/// </summary>
MatOfKeyPoint m_queryKeypoints;
/// <summary>
/// The m_query descriptors.
/// </summary>
Mat m_queryDescriptors;
/// <summary>
/// The m_matches.
/// </summary>
MatOfDMatch m_matches;
/// <summary>
/// The m_knn matches.
/// </summary>
List<MatOfDMatch> m_knnMatches;
/// <summary>
/// The m_gray image.
/// </summary>
Mat m_grayImg;
/// <summary>
/// The m_warped image.
/// </summary>
Mat m_warpedImg;
/// <summary>
/// The m_rough homography.
/// </summary>
Mat m_roughHomography;
/// <summary>
/// The m_refined homography.
/// </summary>
Mat m_refinedHomography;
/// <summary>
/// The m_pattern.
/// </summary>
Pattern m_pattern;
/// <summary>
/// The m_detector.
/// </summary>
ORB m_detector;
/// <summary>
/// The m_extractor.
/// </summary>
ORB m_extractor;
/// <summary>
/// The m_matcher.
/// </summary>
DescriptorMatcher m_matcher;
/// <summary>
/// Initializes a new instance of the <see cref="PatternDetector"/> class.
/// </summary>
/// <param name="detector">Detector.</param>
/// <param name="extractor">Extractor.</param>
/// <param name="matcher">Matcher.</param>
/// <param name="ratioTest">If set to <c>true</c> ratio test.</param>
public PatternDetector(ORB detector, ORB extractor, DescriptorMatcher matcher, bool ratioTest = false)
{
if (detector == null)
{
detector = ORB.create();
detector.setMaxFeatures(1000);
}
if (extractor == null)
{
extractor = ORB.create();
extractor.setMaxFeatures(1000);
}
if (matcher == null)
{
matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);
}
m_detector = detector;
m_extractor = extractor;
m_matcher = matcher;
enableRatioTest = ratioTest;
enableHomographyRefinement = true;
homographyReprojectionThreshold = 3;
m_queryKeypoints = new MatOfKeyPoint();
m_queryDescriptors = new Mat();
m_matches = new MatOfDMatch();
m_knnMatches = new List<MatOfDMatch>();
m_grayImg = new Mat();
m_warpedImg = new Mat();
m_roughHomography = new Mat();
m_refinedHomography = new Mat();
}
/// <summary>
/// Train the specified pattern.
/// </summary>
/// <param name="pattern">Pattern.</param>
public void train(Pattern pattern)
{
// Store the pattern object
m_pattern = pattern;
// API of cv::DescriptorMatcher is somewhat tricky
// First we clear old train data:
m_matcher.clear();
// Then we add vector of descriptors (each descriptors matrix describe one image).
// This allows us to perform search across multiple images:
List<Mat> descriptors = new List<Mat>(1);
descriptors.Add(pattern.descriptors.clone());
m_matcher.add(descriptors);
// After adding train data perform actual train:
m_matcher.train();
}
/// <summary>
/// Builds the pattern from image.
/// </summary>
/// <param name="image">Image.</param>
/// <param name="pattern">Pattern.</param>
public void buildPatternFromImage(Mat image, Pattern pattern)
{
//int numImages = 4;
//float step = Mathf.Sqrt (2.0f);
// Store original image in pattern structure
pattern.size = new Size(image.cols(), image.rows());
pattern.frame = image.clone();
getGray(image, pattern.grayImg);
// Build 2d and 3d contours (3d contour lie in XY plane since it's planar)
List<Point> points2dList = new List<Point>(4);
List<Point3> points3dList = new List<Point3>(4);
// Image dimensions
float w = image.cols();
float h = image.rows();
// Normalized dimensions:
//float maxSize = Mathf.Max (w, h);
//float unitW = w / maxSize;
//float unitH = h / maxSize;
points2dList.Add(new Point(0, 0));
points2dList.Add(new Point(w, 0));
points2dList.Add(new Point(w, h));
points2dList.Add(new Point(0, h));
pattern.points2d.fromList(points2dList);
//points3dList.Add (new Point3 (-unitW, -unitH, 0));
//points3dList.Add (new Point3 (unitW, -unitH, 0));
//points3dList.Add (new Point3 (unitW, unitH, 0));
//points3dList.Add (new Point3 (-unitW, unitH, 0));
points3dList.Add(new Point3(-0.5f, -0.5f, 0));
points3dList.Add(new Point3(+0.5f, -0.5f, 0));
points3dList.Add(new Point3(+0.5f, +0.5f, 0));
points3dList.Add(new Point3(-0.5f, +0.5f, 0));
pattern.points3d.fromList(points3dList);
extractFeatures(pattern.grayImg, pattern.keypoints, pattern.descriptors);
}
/// <summary>
/// Finds the pattern.
/// </summary>
/// <returns><c>true</c>, if pattern was found, <c>false</c> otherwise.</returns>
/// <param name="image">Image.</param>
/// <param name="info">Info.</param>
public bool findPattern(Mat image, PatternTrackingInfo info)
{
// Convert input image to gray
getGray(image, m_grayImg);
// Extract feature points from input gray image
extractFeatures(m_grayImg, m_queryKeypoints, m_queryDescriptors);
// Get matches with current pattern
getMatches(m_queryDescriptors, m_matches);
//(GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat (DebugHelpers.getMatchesImage (m_grayImg, m_pattern.grayImg, m_queryKeypoints, m_pattern.keypoints, m_matches, 100));
// Find homography transformation and detect good matches
bool homographyFound = refineMatchesWithHomography(
m_queryKeypoints,
m_pattern.keypoints,
homographyReprojectionThreshold,
m_matches,
m_roughHomography);
if (homographyFound)
{
//(GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat (DebugHelpers.getMatchesImage (m_grayImg, m_pattern.grayImg, m_queryKeypoints, m_pattern.keypoints, m_matches, 100));
// If homography refinement enabled improve found transformation
if (enableHomographyRefinement)
{
// Warp image using found homography
Imgproc.warpPerspective(m_grayImg, m_warpedImg, m_roughHomography, m_pattern.size, Imgproc.WARP_INVERSE_MAP | Imgproc.INTER_CUBIC);
//(GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat(m_warpedImg);
// Get refined matches:
using (MatOfKeyPoint warpedKeypoints = new MatOfKeyPoint())
using (MatOfDMatch refinedMatches = new MatOfDMatch())
{
// Detect features on warped image
extractFeatures(m_warpedImg, warpedKeypoints, m_queryDescriptors);
// Match with pattern
getMatches(m_queryDescriptors, refinedMatches);
// Estimate new refinement homography
homographyFound = refineMatchesWithHomography(
warpedKeypoints,
m_pattern.keypoints,
homographyReprojectionThreshold,
refinedMatches,
m_refinedHomography);
}
//(GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat(DebugHelpers.getMatchesImage(m_warpedImg, m_pattern.grayImg, warpedKeypoints, m_pattern.keypoints, refinedMatches, 100));
// Get a result homography as result of matrix product of refined and rough homographies:
// info.homography = m_roughHomography * m_refinedHomography;
Core.gemm(m_roughHomography, m_refinedHomography, 1, new Mat(), 0, info.homography);
//Debug.Log ("info.homography " + info.homography.ToString ());
// Transform contour with rough homography
//Core.perspectiveTransform (m_pattern.points2d, info.points2d, m_roughHomography);
//info.draw2dContour (image, new Scalar (200, 0, 0, 255));
// Transform contour with precise homography
Core.perspectiveTransform(m_pattern.points2d, info.points2d, info.homography);
//info.draw2dContour (image, new Scalar (0, 200, 0, 255));
}
else
{
info.homography = m_roughHomography;
//Debug.Log ("m_roughHomography " + m_roughHomography.ToString ());
//Debug.Log ("info.homography " + info.homography.ToString ());
// Transform contour with rough homography
Core.perspectiveTransform(m_pattern.points2d, info.points2d, m_roughHomography);
//info.draw2dContour (image, new Scalar (0, 200, 0, 255));
}
}
//(GameObject.Find ("DebugHelpers").GetComponent<DebugHelpers> ()).showMat (DebugHelpers.getMatchesImage (m_grayImg, m_pattern.grayImg, m_queryKeypoints, m_pattern.keypoints, m_matches, 100));
//Debug.Log ("Features:" + m_queryKeypoints.ToString () + " Matches: " + m_matches.ToString ());
return homographyFound;
}
/// <summary>
/// Gets the gray.
/// </summary>
/// <param name="image">Image.</param>
/// <param name="gray">Gray.</param>
static void getGray(Mat image, Mat gray)
{
if (image.channels() == 3)
Imgproc.cvtColor(image, gray, Imgproc.COLOR_RGB2GRAY);
else if (image.channels() == 4)
Imgproc.cvtColor(image, gray, Imgproc.COLOR_RGBA2GRAY);
else if (image.channels() == 1)
image.copyTo(gray);
}
/// <summary>
/// Extracts the features.
/// </summary>
/// <returns><c>true</c>, if features was extracted, <c>false</c> otherwise.</returns>
/// <param name="image">Image.</param>
/// <param name="keypoints">Keypoints.</param>
/// <param name="descriptors">Descriptors.</param>
bool extractFeatures(Mat image, MatOfKeyPoint keypoints, Mat descriptors)
{
if (image.total() == 0)
{
return false;
}
if (image.channels() != 1)
{
return false;
}
m_detector.detect(image, keypoints);
if (keypoints.total() == 0)
return false;
m_extractor.compute(image, keypoints, descriptors);
if (keypoints.total() == 0)
return false;
//Debug.Log ("extractFeatures true");
//Mat tmpImage = new Mat();
//
//Features2d.drawKeypoints(image, keypoints, tmpImage);
//
//DebugHelpers.showMat(tmpImage);
return true;
}
/// <summary>
/// Gets the matches.
/// </summary>
/// <param name="queryDescriptors">Query descriptors.</param>
/// <param name="matches">Matches.</param>
void getMatches(Mat queryDescriptors, MatOfDMatch matches)
{
List<DMatch> matchesList = new List<DMatch>();
//matches.clear();
if (enableRatioTest)
{
// To avoid NaN's when best match has zero distance we will use inversed ratio.
float minRatio = 1.0f / 1.5f;
// KNN match will return 2 nearest matches for each query descriptor
m_matcher.knnMatch(queryDescriptors, m_knnMatches, 2);
for (int i = 0; i < m_knnMatches.Count; i++)
{
List<DMatch> m_knnMatchesList = m_knnMatches[i].toList();
DMatch bestMatch = m_knnMatchesList[0];
DMatch betterMatch = m_knnMatchesList[1];
float distanceRatio = bestMatch.distance / betterMatch.distance;
// Pass only matches where distance ratio between
// nearest matches is greater than 1.5 (distinct criteria)
if (distanceRatio < minRatio)
{
matchesList.Add(bestMatch);
}
}
matches.fromList(matchesList);
}
else
{
matches.fromList(matchesList);
// Perform regular match
m_matcher.match(queryDescriptors, matches);
}
//Debug.Log ("getMatches " + matches.ToString ());
}
/// <summary>
/// Refines the matches with homography.
/// </summary>
/// <returns><c>true</c>, if matches with homography was refined, <c>false</c> otherwise.</returns>
/// <param name="queryKeypoints">Query keypoints.</param>
/// <param name="trainKeypoints">Train keypoints.</param>
/// <param name="reprojectionThreshold">Reprojection threshold.</param>
/// <param name="matches">Matches.</param>
/// <param name="homography">Homography.</param>
static bool refineMatchesWithHomography
(
MatOfKeyPoint queryKeypoints,
MatOfKeyPoint trainKeypoints,
float reprojectionThreshold,
MatOfDMatch matches,
Mat homography
)
{
//Debug.Log ("matches " + matches.ToString ());
int minNumberMatchesAllowed = 8;
List<KeyPoint> queryKeypointsList = queryKeypoints.toList();
List<KeyPoint> trainKeypointsList = trainKeypoints.toList();
List<DMatch> matchesList = matches.toList();
if (matchesList.Count < minNumberMatchesAllowed)
return false;
// Prepare data for cv::findHomography
List<Point> srcPointsList = new List<Point>(matchesList.Count);
List<Point> dstPointsList = new List<Point>(matchesList.Count);
for (int i = 0; i < matchesList.Count; i++)
{
srcPointsList.Add(trainKeypointsList[matchesList[i].trainIdx].pt);
dstPointsList.Add(queryKeypointsList[matchesList[i].queryIdx].pt);
}
// Find homography matrix and get inliers mask
using (MatOfPoint2f srcPoints = new MatOfPoint2f())
using (MatOfPoint2f dstPoints = new MatOfPoint2f())
using (MatOfByte inliersMask = new MatOfByte(new byte[srcPointsList.Count]))
{
srcPoints.fromList(srcPointsList);
dstPoints.fromList(dstPointsList);
//Debug.Log ("srcPoints " + srcPoints.ToString ());
//Debug.Log ("dstPoints " + dstPoints.ToString ());
Calib3d.findHomography(srcPoints,
dstPoints,
Calib3d.FM_RANSAC,
reprojectionThreshold,
inliersMask, 2000, 0.955).copyTo(homography);
if (homography.rows() != 3 || homography.cols() != 3)
return false;
//Debug.Log ("homography " + homography.ToString ());
//Debug.Log ("inliersMask " + inliersMask.dump ());
List<byte> inliersMaskList = inliersMask.toList();
List<DMatch> inliers = new List<DMatch>();
for (int i = 0; i < inliersMaskList.Count; i++)
{
if (inliersMaskList[i] == 1)
inliers.Add(matchesList[i]);
}
matches.fromList(inliers);
//Debug.Log ("matches " + matches.ToString ());
}
return matchesList.Count > minNumberMatchesAllowed;
}
}
}

Просмотреть файл

@ -1,87 +1,87 @@
using OpenCVForUnity.Calib3dModule;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using System.Collections.Generic;
using UnityEngine;
namespace OpenCVMarkerLessAR
{
/// <summary>
/// Pattern tracking info.
/// This code is a rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter3_MarkerlessAR using "OpenCV for Unity".
/// </summary>
public class PatternTrackingInfo
{
/// <summary>
/// The homography.
/// </summary>
public Mat homography;
/// <summary>
/// The points2d.
/// </summary>
public MatOfPoint2f points2d;
/// <summary>
/// The pose3d.
/// </summary>
public Matrix4x4 pose3d;
/// <summary>
/// Initializes a new instance of the <see cref="PatternTrackingInfo"/> class.
/// </summary>
public PatternTrackingInfo()
{
homography = new Mat();
points2d = new MatOfPoint2f();
pose3d = new Matrix4x4();
}
/// <summary>
/// Computes the pose.
/// </summary>
/// <param name="pattern">Pattern.</param>
/// <param name="camMatrix">Cam matrix.</param>
/// <param name="distCoeff">Dist coeff.</param>
public void computePose(Pattern pattern, Mat camMatrix, MatOfDouble distCoeff)
{
Mat Rvec = new Mat();
Mat Tvec = new Mat();
Mat raux = new Mat();
Mat taux = new Mat();
Calib3d.solvePnP(pattern.points3d, points2d, camMatrix, distCoeff, raux, taux);
raux.convertTo(Rvec, CvType.CV_32F);
taux.convertTo(Tvec, CvType.CV_32F);
Mat rotMat = new Mat(3, 3, CvType.CV_64FC1);
Calib3d.Rodrigues(Rvec, rotMat);
pose3d.SetRow(0, new Vector4((float)rotMat.get(0, 0)[0], (float)rotMat.get(0, 1)[0], (float)rotMat.get(0, 2)[0], (float)Tvec.get(0, 0)[0]));
pose3d.SetRow(1, new Vector4((float)rotMat.get(1, 0)[0], (float)rotMat.get(1, 1)[0], (float)rotMat.get(1, 2)[0], (float)Tvec.get(1, 0)[0]));
pose3d.SetRow(2, new Vector4((float)rotMat.get(2, 0)[0], (float)rotMat.get(2, 1)[0], (float)rotMat.get(2, 2)[0], (float)Tvec.get(2, 0)[0]));
pose3d.SetRow(3, new Vector4(0, 0, 0, 1));
Rvec.Dispose();
Tvec.Dispose();
raux.Dispose();
taux.Dispose();
rotMat.Dispose();
}
/// <summary>
/// Draw2ds the contour.
/// </summary>
/// <param name="image">Image.</param>
/// <param name="color">Color.</param>
public void draw2dContour(Mat image, Scalar color)
{
List<Point> points2dList = points2d.toList();
for (int i = 0; i < points2dList.Count; i++)
{
Imgproc.line(image, points2dList[i], points2dList[(i + 1) % points2dList.Count], color, 2, Imgproc.LINE_AA, 0);
}
}
}
using OpenCVForUnity.Calib3dModule;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using System.Collections.Generic;
using UnityEngine;
namespace OpenCVMarkerLessAR
{
/// <summary>
/// Pattern tracking info.
/// This code is a rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter3_MarkerlessAR using "OpenCV for Unity".
/// </summary>
public class PatternTrackingInfo
{
/// <summary>
/// The homography.
/// </summary>
public Mat homography;
/// <summary>
/// The points2d.
/// </summary>
public MatOfPoint2f points2d;
/// <summary>
/// The pose3d.
/// </summary>
public Matrix4x4 pose3d;
/// <summary>
/// Initializes a new instance of the <see cref="PatternTrackingInfo"/> class.
/// </summary>
public PatternTrackingInfo()
{
homography = new Mat();
points2d = new MatOfPoint2f();
pose3d = new Matrix4x4();
}
/// <summary>
/// Computes the pose.
/// </summary>
/// <param name="pattern">Pattern.</param>
/// <param name="camMatrix">Cam matrix.</param>
/// <param name="distCoeff">Dist coeff.</param>
public void computePose(Pattern pattern, Mat camMatrix, MatOfDouble distCoeff)
{
Mat Rvec = new Mat();
Mat Tvec = new Mat();
Mat raux = new Mat();
Mat taux = new Mat();
Calib3d.solvePnP(pattern.points3d, points2d, camMatrix, distCoeff, raux, taux);
raux.convertTo(Rvec, CvType.CV_32F);
taux.convertTo(Tvec, CvType.CV_32F);
Mat rotMat = new Mat(3, 3, CvType.CV_64FC1);
Calib3d.Rodrigues(Rvec, rotMat);
pose3d.SetRow(0, new Vector4((float)rotMat.get(0, 0)[0], (float)rotMat.get(0, 1)[0], (float)rotMat.get(0, 2)[0], (float)Tvec.get(0, 0)[0]));
pose3d.SetRow(1, new Vector4((float)rotMat.get(1, 0)[0], (float)rotMat.get(1, 1)[0], (float)rotMat.get(1, 2)[0], (float)Tvec.get(1, 0)[0]));
pose3d.SetRow(2, new Vector4((float)rotMat.get(2, 0)[0], (float)rotMat.get(2, 1)[0], (float)rotMat.get(2, 2)[0], (float)Tvec.get(2, 0)[0]));
pose3d.SetRow(3, new Vector4(0, 0, 0, 1));
Rvec.Dispose();
Tvec.Dispose();
raux.Dispose();
taux.Dispose();
rotMat.Dispose();
}
/// <summary>
/// Draw2ds the contour.
/// </summary>
/// <param name="image">Image.</param>
/// <param name="color">Color.</param>
public void draw2dContour(Mat image, Scalar color)
{
List<Point> points2dList = points2d.toList();
for (int i = 0; i < points2dList.Count; i++)
{
Imgproc.line(image, points2dList[i], points2dList[(i + 1) % points2dList.Count], color, 2, Imgproc.LINE_AA, 0);
}
}
}
}

Просмотреть файл

@ -1,83 +1,83 @@
using OpenCVForUnity.CoreModule;
using UnityEngine;
using UnityEngine.SceneManagement;
using UnityEngine.UI;
namespace MarkerLessARExample
{
/// <summary>
/// MarkerLessAR Example
/// </summary>
public class MarkerLessARExample : MonoBehaviour
{
public Text exampleTitle;
public Text versionInfo;
public ScrollRect scrollRect;
static float verticalNormalizedPosition = 1f;
// Use this for initialization
void Start()
{
exampleTitle.text = "MarkerLessAR Example " + Application.version;
versionInfo.text = Core.NATIVE_LIBRARY_NAME + " " + OpenCVForUnity.UnityUtils.Utils.getVersion() + " (" + Core.VERSION + ")";
versionInfo.text += " / UnityEditor " + Application.unityVersion;
versionInfo.text += " / ";
#if UNITY_EDITOR
versionInfo.text += "Editor";
#elif UNITY_STANDALONE_WIN
versionInfo.text += "Windows";
#elif UNITY_STANDALONE_OSX
versionInfo.text += "Mac OSX";
#elif UNITY_STANDALONE_LINUX
versionInfo.text += "Linux";
#elif UNITY_ANDROID
versionInfo.text += "Android";
#elif UNITY_IOS
versionInfo.text += "iOS";
#elif UNITY_WSA
versionInfo.text += "WSA";
#elif UNITY_WEBGL
versionInfo.text += "WebGL";
#endif
versionInfo.text += " ";
#if ENABLE_MONO
versionInfo.text += "Mono";
#elif ENABLE_IL2CPP
versionInfo.text += "IL2CPP";
#elif ENABLE_DOTNET
versionInfo.text += ".NET";
#endif
scrollRect.verticalNormalizedPosition = verticalNormalizedPosition;
}
// Update is called once per frame
void Update()
{
}
public void OnScrollRectValueChanged()
{
verticalNormalizedPosition = scrollRect.verticalNormalizedPosition;
}
public void OnShowLicenseButtonClick()
{
SceneManager.LoadScene("ShowLicense");
}
public void OnTexture2DMarkerLessARExampleButtonClick()
{
SceneManager.LoadScene("Texture2DMarkerLessARExample");
}
public void OnWebCamTextureMarkerLessARExampleButtonClick()
{
SceneManager.LoadScene("WebCamTextureMarkerLessARExample");
}
}
using OpenCVForUnity.CoreModule;
using UnityEngine;
using UnityEngine.SceneManagement;
using UnityEngine.UI;
namespace MarkerLessARExample
{
/// <summary>
/// MarkerLessAR Example
/// </summary>
public class MarkerLessARExample : MonoBehaviour
{
public Text exampleTitle;
public Text versionInfo;
public ScrollRect scrollRect;
static float verticalNormalizedPosition = 1f;
// Use this for initialization
void Start()
{
exampleTitle.text = "MarkerLessAR Example " + Application.version;
versionInfo.text = Core.NATIVE_LIBRARY_NAME + " " + OpenCVForUnity.UnityUtils.Utils.getVersion() + " (" + Core.VERSION + ")";
versionInfo.text += " / UnityEditor " + Application.unityVersion;
versionInfo.text += " / ";
#if UNITY_EDITOR
versionInfo.text += "Editor";
#elif UNITY_STANDALONE_WIN
versionInfo.text += "Windows";
#elif UNITY_STANDALONE_OSX
versionInfo.text += "Mac OSX";
#elif UNITY_STANDALONE_LINUX
versionInfo.text += "Linux";
#elif UNITY_ANDROID
versionInfo.text += "Android";
#elif UNITY_IOS
versionInfo.text += "iOS";
#elif UNITY_WSA
versionInfo.text += "WSA";
#elif UNITY_WEBGL
versionInfo.text += "WebGL";
#endif
versionInfo.text += " ";
#if ENABLE_MONO
versionInfo.text += "Mono";
#elif ENABLE_IL2CPP
versionInfo.text += "IL2CPP";
#elif ENABLE_DOTNET
versionInfo.text += ".NET";
#endif
scrollRect.verticalNormalizedPosition = verticalNormalizedPosition;
}
// Update is called once per frame
void Update()
{
}
public void OnScrollRectValueChanged()
{
verticalNormalizedPosition = scrollRect.verticalNormalizedPosition;
}
public void OnShowLicenseButtonClick()
{
SceneManager.LoadScene("ShowLicense");
}
public void OnTexture2DMarkerLessARExampleButtonClick()
{
SceneManager.LoadScene("Texture2DMarkerLessARExample");
}
public void OnWebCamTextureMarkerLessARExampleButtonClick()
{
SceneManager.LoadScene("WebCamTextureMarkerLessARExample");
}
}
}

Просмотреть файл

@ -1749,7 +1749,7 @@ RectTransform:
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_AnchorMin: {x: 0, y: 0}
m_AnchorMax: {x: 1, y: 1}
m_AnchoredPosition: {x: 0, y: -0.000001295756}
m_AnchoredPosition: {x: 0, y: 0.000021296615}
m_SizeDelta: {x: 0, y: 0}
m_Pivot: {x: 0.5, y: 1}
--- !u!114 &2020746268

Просмотреть файл

@ -1,48 +1,48 @@
using System.Collections;
using UnityEngine;
namespace MarkerLessARExample
{
public class DelayableSetActive : MonoBehaviour
{
private Coroutine deactivateCoroutine;
/// <summary>
/// Activates/Deactivates the GameObject.
/// </summary>
/// <param name="value">If set to <c>true</c> value.</param>
/// <param name="delayTime">Delay time.</param>
public void SetActive(bool value, float delayTime = 0.0f)
{
if (value)
{
if (deactivateCoroutine != null)
{
StopCoroutine(deactivateCoroutine);
deactivateCoroutine = null;
}
gameObject.SetActive(value);
}
else
{
if (delayTime == 0.0f)
{
gameObject.SetActive(value);
return;
}
if (gameObject.activeSelf && deactivateCoroutine == null)
deactivateCoroutine = StartCoroutine(DeactivateGameObject(delayTime));
}
}
private IEnumerator DeactivateGameObject(float delayTime)
{
yield return new WaitForSeconds(delayTime);
gameObject.SetActive(false);
deactivateCoroutine = null;
}
}
}
using System.Collections;
using UnityEngine;
namespace MarkerLessARExample
{
public class DelayableSetActive : MonoBehaviour
{
private Coroutine deactivateCoroutine;
/// <summary>
/// Activates/Deactivates the GameObject.
/// </summary>
/// <param name="value">If set to <c>true</c> value.</param>
/// <param name="delayTime">Delay time.</param>
public void SetActive(bool value, float delayTime = 0.0f)
{
if (value)
{
if (deactivateCoroutine != null)
{
StopCoroutine(deactivateCoroutine);
deactivateCoroutine = null;
}
gameObject.SetActive(value);
}
else
{
if (delayTime == 0.0f)
{
gameObject.SetActive(value);
return;
}
if (gameObject.activeSelf && deactivateCoroutine == null)
deactivateCoroutine = StartCoroutine(DeactivateGameObject(delayTime));
}
}
private IEnumerator DeactivateGameObject(float delayTime)
{
yield return new WaitForSeconds(delayTime);
gameObject.SetActive(false);
deactivateCoroutine = null;
}
}
}

Просмотреть файл

@ -1,155 +1,156 @@
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.VideoioModule;
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using VideoCapture = OpenCVForUnity.VideoioModule.VideoCapture;
namespace MarkerLessARExample
{
/// <summary>
/// Display video.
/// </summary>
public class DisplayVideo : MonoBehaviour
{
/// <summary>
/// The name of the file.
/// </summary>
public string fileName;
/// <summary>
/// The video capture.
/// </summary>
VideoCapture capture;
/// <summary>
/// The rgb mat.
/// </summary>
Mat rgbMat;
/// <summary>
/// The colors.
/// </summary>
Color32[] colors;
/// <summary>
/// The texture.
/// </summary>
Texture2D texture;
/// <summary>
/// Indicates whether the video is playing.
/// </summary>
bool isPlaying = false;
#if UNITY_WEBGL
IEnumerator getFilePath_Coroutine;
#endif
// Use this for initialization
void Start()
{
capture = new VideoCapture();
#if UNITY_WEBGL
getFilePath_Coroutine = Utils.getFilePathAsync(fileName, (result) =>
{
getFilePath_Coroutine = null;
Debug.Log("result "+ result);
capture.open(result);
Init();
});
StartCoroutine(getFilePath_Coroutine);
#else
capture.open(Utils.getFilePath(fileName));
Init();
#endif
}
private void Init()
{
rgbMat = new Mat();
if (capture.isOpened())
{
Debug.Log("capture.isOpened() true");
Debug.Log("CAP_PROP_FORMAT: " + capture.get(Videoio.CAP_PROP_FORMAT));
Debug.Log("CAP_PROP_POS_MSEC: " + capture.get(Videoio.CAP_PROP_POS_MSEC));
Debug.Log("CAP_PROP_POS_FRAMES: " + capture.get(Videoio.CAP_PROP_POS_FRAMES));
Debug.Log("CAP_PROP_POS_AVI_RATIO: " + capture.get(Videoio.CAP_PROP_POS_AVI_RATIO));
Debug.Log("CAP_PROP_FRAME_COUNT: " + capture.get(Videoio.CAP_PROP_FRAME_COUNT));
Debug.Log("CAP_PROP_FPS: " + capture.get(Videoio.CAP_PROP_FPS));
Debug.Log("CAP_PROP_FRAME_WIDTH: " + capture.get(Videoio.CAP_PROP_FRAME_WIDTH));
Debug.Log("CAP_PROP_FRAME_HEIGHT: " + capture.get(Videoio.CAP_PROP_FRAME_HEIGHT));
capture.grab();
capture.retrieve(rgbMat, 0);
colors = new Color32[rgbMat.cols() * rgbMat.rows()];
texture = new Texture2D(rgbMat.cols(), rgbMat.rows(), TextureFormat.RGBA32, false);
gameObject.transform.localScale = new Vector3(-((float)rgbMat.cols() / (float)rgbMat.rows()), -1, -1);
capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
gameObject.GetComponent<Renderer>().material.mainTexture = texture;
isPlaying = true;
}
else
{
Debug.Log("capture.isOpened() false");
}
}
// Update is called once per frame
void Update()
{
if (isPlaying) {
//Loop play
if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
if (capture.grab())
{
capture.retrieve(rgbMat, 0);
Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
Utils.matToTexture2D(rgbMat, texture, colors);
}
}
}
/// <summary>
/// Raises the destroy event.
/// </summary>
void OnDestroy()
{
if (capture != null)
capture.release();
if (rgbMat != null)
rgbMat.Dispose();
#if UNITY_WEBGL
if (getFilePath_Coroutine != null)
{
StopCoroutine(getFilePath_Coroutine);
((IDisposable)getFilePath_Coroutine).Dispose();
}
#endif
}
}
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.VideoioModule;
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using VideoCapture = OpenCVForUnity.VideoioModule.VideoCapture;
namespace MarkerLessARExample
{
/// <summary>
/// Display video.
/// </summary>
public class DisplayVideo : MonoBehaviour
{
/// <summary>
/// The name of the file.
/// </summary>
public string fileName;
/// <summary>
/// The video capture.
/// </summary>
VideoCapture capture;
/// <summary>
/// The rgb mat.
/// </summary>
Mat rgbMat;
/// <summary>
/// The colors.
/// </summary>
Color32[] colors;
/// <summary>
/// The texture.
/// </summary>
Texture2D texture;
/// <summary>
/// Indicates whether the video is playing.
/// </summary>
bool isPlaying = false;
#if UNITY_WEBGL
IEnumerator getFilePath_Coroutine;
#endif
// Use this for initialization
void Start()
{
capture = new VideoCapture();
#if UNITY_WEBGL
getFilePath_Coroutine = Utils.getFilePathAsync(fileName, (result) =>
{
getFilePath_Coroutine = null;
Debug.Log("result "+ result);
capture.open(result);
Init();
});
StartCoroutine(getFilePath_Coroutine);
#else
capture.open(Utils.getFilePath(fileName));
Init();
#endif
}
private void Init()
{
rgbMat = new Mat();
if (capture.isOpened())
{
Debug.Log("capture.isOpened() true");
Debug.Log("CAP_PROP_FORMAT: " + capture.get(Videoio.CAP_PROP_FORMAT));
Debug.Log("CAP_PROP_POS_MSEC: " + capture.get(Videoio.CAP_PROP_POS_MSEC));
Debug.Log("CAP_PROP_POS_FRAMES: " + capture.get(Videoio.CAP_PROP_POS_FRAMES));
Debug.Log("CAP_PROP_POS_AVI_RATIO: " + capture.get(Videoio.CAP_PROP_POS_AVI_RATIO));
Debug.Log("CAP_PROP_FRAME_COUNT: " + capture.get(Videoio.CAP_PROP_FRAME_COUNT));
Debug.Log("CAP_PROP_FPS: " + capture.get(Videoio.CAP_PROP_FPS));
Debug.Log("CAP_PROP_FRAME_WIDTH: " + capture.get(Videoio.CAP_PROP_FRAME_WIDTH));
Debug.Log("CAP_PROP_FRAME_HEIGHT: " + capture.get(Videoio.CAP_PROP_FRAME_HEIGHT));
capture.grab();
capture.retrieve(rgbMat, 0);
colors = new Color32[rgbMat.cols() * rgbMat.rows()];
texture = new Texture2D(rgbMat.cols(), rgbMat.rows(), TextureFormat.RGBA32, false);
gameObject.transform.localScale = new Vector3(-((float)rgbMat.cols() / (float)rgbMat.rows()), -1, -1);
capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
gameObject.GetComponent<Renderer>().material.mainTexture = texture;
isPlaying = true;
}
else
{
Debug.Log("capture.isOpened() false");
}
}
// Update is called once per frame
void Update()
{
if (isPlaying)
{
//Loop play
if (capture.get(Videoio.CAP_PROP_POS_FRAMES) >= capture.get(Videoio.CAP_PROP_FRAME_COUNT))
capture.set(Videoio.CAP_PROP_POS_FRAMES, 0);
if (capture.grab())
{
capture.retrieve(rgbMat, 0);
Imgproc.cvtColor(rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
Utils.matToTexture2D(rgbMat, texture, colors);
}
}
}
/// <summary>
/// Raises the destroy event.
/// </summary>
void OnDestroy()
{
if (capture != null)
capture.release();
if (rgbMat != null)
rgbMat.Dispose();
#if UNITY_WEBGL
if (getFilePath_Coroutine != null)
{
StopCoroutine(getFilePath_Coroutine);
((IDisposable)getFilePath_Coroutine).Dispose();
}
#endif
}
}
}

Просмотреть файл

@ -1,28 +1,28 @@
using UnityEngine;
using UnityEngine.SceneManagement;
namespace MarkerLessARSample
{
/// <summary>
/// Show License
/// </summary>
public class ShowLicense : MonoBehaviour
{
// Use this for initialization
void Start()
{
}
// Update is called once per frame
void Update()
{
}
public void OnBackButtonButtonClick()
{
SceneManager.LoadScene("MarkerLessARExample");
}
}
}
using UnityEngine;
using UnityEngine.SceneManagement;
namespace MarkerLessARSample
{
/// <summary>
/// Show License
/// </summary>
public class ShowLicense : MonoBehaviour
{
// Use this for initialization
void Start()
{
}
// Update is called once per frame
void Update()
{
}
public void OnBackButtonButtonClick()
{
SceneManager.LoadScene("MarkerLessARExample");
}
}
}

Просмотреть файл

@ -1,225 +1,225 @@
using OpenCVForUnity.Calib3dModule;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.UnityUtils;
using OpenCVMarkerLessAR;
using UnityEngine;
using UnityEngine.SceneManagement;
using UnityEngine.UI;
namespace MarkerLessARExample
{
/// <summary>
/// Texture2D Markerless AR Example
/// This code is a rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter3_MarkerlessAR using "OpenCV for Unity".
/// </summary>
public class Texture2DMarkerLessARExample : MonoBehaviour
{
/// <summary>
/// The pattern texture.
/// </summary>
public Texture2D patternTexture;
/// <summary>
/// The pattern raw image.
/// </summary>
public RawImage patternRawImage;
/// <summary>
/// The image texture.
/// </summary>
public Texture2D imgTexture;
/// <summary>
/// The AR camera.
/// </summary>
public Camera ARCamera;
/// <summary>
/// Determines if should move AR camera.
/// </summary>
public bool shouldMoveARCamera;
/// <summary>
/// The AR game object.
/// </summary>
public GameObject ARGameObject;
// Use this for initialization
void Start()
{
Mat patternMat = new Mat(patternTexture.height, patternTexture.width, CvType.CV_8UC4);
Utils.texture2DToMat(patternTexture, patternMat);
Debug.Log("patternMat dst ToString " + patternMat.ToString());
patternRawImage.texture = patternTexture;
patternRawImage.rectTransform.localScale = new Vector3(1.0f, (float)patternMat.height() / (float)patternMat.width(), 1.0f);
Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4);
Utils.texture2DToMat(imgTexture, imgMat);
Debug.Log("imgMat dst ToString " + imgMat.ToString());
gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = imgMat.width();
float height = imgMat.height();
float imageSizeScale = 1.0f;
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
imageSizeScale = (float)Screen.height / (float)Screen.width;
}
else
{
Camera.main.orthographicSize = height / 2;
}
//set cameraparam
int max_d = (int)Mathf.Max(width, height);
double fx = max_d;
double fy = max_d;
double cx = width / 2.0f;
double cy = height / 2.0f;
Mat camMatrix = new Mat(3, 3, CvType.CV_64FC1);
camMatrix.put(0, 0, fx);
camMatrix.put(0, 1, 0);
camMatrix.put(0, 2, cx);
camMatrix.put(1, 0, 0);
camMatrix.put(1, 1, fy);
camMatrix.put(1, 2, cy);
camMatrix.put(2, 0, 0);
camMatrix.put(2, 1, 0);
camMatrix.put(2, 2, 1.0f);
Debug.Log("camMatrix " + camMatrix.dump());
MatOfDouble distCoeffs = new MatOfDouble(0, 0, 0, 0);
Debug.Log("distCoeffs " + distCoeffs.dump());
//calibration camera
Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale);
double apertureWidth = 0;
double apertureHeight = 0;
double[] fovx = new double[1];
double[] fovy = new double[1];
double[] focalLength = new double[1];
Point principalPoint = new Point(0, 0);
double[] aspectratio = new double[1];
Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);
Debug.Log("imageSize " + imageSize.ToString());
Debug.Log("apertureWidth " + apertureWidth);
Debug.Log("apertureHeight " + apertureHeight);
Debug.Log("fovx " + fovx[0]);
Debug.Log("fovy " + fovy[0]);
Debug.Log("focalLength " + focalLength[0]);
Debug.Log("principalPoint " + principalPoint.ToString());
Debug.Log("aspectratio " + aspectratio[0]);
//To convert the difference of the FOV value of the OpenCV and Unity.
double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));
Debug.Log("fovXScale " + fovXScale);
Debug.Log("fovYScale " + fovYScale);
//Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
if (widthScale < heightScale)
{
ARCamera.fieldOfView = (float)(fovx[0] * fovXScale);
}
else
{
ARCamera.fieldOfView = (float)(fovy[0] * fovYScale);
}
//Learning the feature points of the pattern image.
Pattern pattern = new Pattern();
PatternTrackingInfo patternTrackingInfo = new PatternTrackingInfo();
PatternDetector patternDetector = new PatternDetector(null, null, null, true);
patternDetector.buildPatternFromImage(patternMat, pattern);
patternDetector.train(pattern);
bool patternFound = patternDetector.findPattern(imgMat, patternTrackingInfo);
Debug.Log("patternFound " + patternFound);
if (patternFound)
{
patternTrackingInfo.computePose(pattern, camMatrix, distCoeffs);
Matrix4x4 transformationM = patternTrackingInfo.pose3d;
Debug.Log("transformationM " + transformationM.ToString());
Matrix4x4 invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
Debug.Log("invertZM " + invertZM.ToString());
Matrix4x4 invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
Debug.Log("invertYM " + invertYM.ToString());
// right-handed coordinates system (OpenCV) to left-handed one (Unity)
// https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix
Matrix4x4 ARM = invertYM * transformationM * invertYM;
// Apply Y-axis and Z-axis refletion matrix. (Adjust the posture of the AR object)
ARM = ARM * invertYM * invertZM;
if (shouldMoveARCamera)
{
ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;
Debug.Log("ARM " + ARM.ToString());
ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
}
else
{
ARM = ARCamera.transform.localToWorldMatrix * ARM;
Debug.Log("ARM " + ARM.ToString());
ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
}
}
Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false);
Utils.matToTexture2D(imgMat, texture);
gameObject.GetComponent<Renderer>().material.mainTexture = texture;
}
// Update is called once per frame
void Update()
{
}
/// <summary>
/// Raises the back button click event.
/// </summary>
public void OnBackButtonClick()
{
SceneManager.LoadScene("MarkerLessARExample");
}
}
}
using OpenCVForUnity.Calib3dModule;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.UnityUtils;
using OpenCVMarkerLessAR;
using UnityEngine;
using UnityEngine.SceneManagement;
using UnityEngine.UI;
namespace MarkerLessARExample
{
/// <summary>
/// Texture2D Markerless AR Example
/// This code is a rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter3_MarkerlessAR using "OpenCV for Unity".
/// </summary>
public class Texture2DMarkerLessARExample : MonoBehaviour
{
/// <summary>
/// The pattern texture.
/// </summary>
public Texture2D patternTexture;
/// <summary>
/// The pattern raw image.
/// </summary>
public RawImage patternRawImage;
/// <summary>
/// The image texture.
/// </summary>
public Texture2D imgTexture;
/// <summary>
/// The AR camera.
/// </summary>
public Camera ARCamera;
/// <summary>
/// Determines if should move AR camera.
/// </summary>
public bool shouldMoveARCamera;
/// <summary>
/// The AR game object.
/// </summary>
public GameObject ARGameObject;
// Use this for initialization
void Start()
{
Mat patternMat = new Mat(patternTexture.height, patternTexture.width, CvType.CV_8UC4);
Utils.texture2DToMat(patternTexture, patternMat);
Debug.Log("patternMat dst ToString " + patternMat.ToString());
patternRawImage.texture = patternTexture;
patternRawImage.rectTransform.localScale = new Vector3(1.0f, (float)patternMat.height() / (float)patternMat.width(), 1.0f);
Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4);
Utils.texture2DToMat(imgTexture, imgMat);
Debug.Log("imgMat dst ToString " + imgMat.ToString());
gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = imgMat.width();
float height = imgMat.height();
float imageSizeScale = 1.0f;
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
imageSizeScale = (float)Screen.height / (float)Screen.width;
}
else
{
Camera.main.orthographicSize = height / 2;
}
//set cameraparam
int max_d = (int)Mathf.Max(width, height);
double fx = max_d;
double fy = max_d;
double cx = width / 2.0f;
double cy = height / 2.0f;
Mat camMatrix = new Mat(3, 3, CvType.CV_64FC1);
camMatrix.put(0, 0, fx);
camMatrix.put(0, 1, 0);
camMatrix.put(0, 2, cx);
camMatrix.put(1, 0, 0);
camMatrix.put(1, 1, fy);
camMatrix.put(1, 2, cy);
camMatrix.put(2, 0, 0);
camMatrix.put(2, 1, 0);
camMatrix.put(2, 2, 1.0f);
Debug.Log("camMatrix " + camMatrix.dump());
MatOfDouble distCoeffs = new MatOfDouble(0, 0, 0, 0);
Debug.Log("distCoeffs " + distCoeffs.dump());
//calibration camera
Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale);
double apertureWidth = 0;
double apertureHeight = 0;
double[] fovx = new double[1];
double[] fovy = new double[1];
double[] focalLength = new double[1];
Point principalPoint = new Point(0, 0);
double[] aspectratio = new double[1];
Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);
Debug.Log("imageSize " + imageSize.ToString());
Debug.Log("apertureWidth " + apertureWidth);
Debug.Log("apertureHeight " + apertureHeight);
Debug.Log("fovx " + fovx[0]);
Debug.Log("fovy " + fovy[0]);
Debug.Log("focalLength " + focalLength[0]);
Debug.Log("principalPoint " + principalPoint.ToString());
Debug.Log("aspectratio " + aspectratio[0]);
//To convert the difference of the FOV value of the OpenCV and Unity.
double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));
Debug.Log("fovXScale " + fovXScale);
Debug.Log("fovYScale " + fovYScale);
//Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
if (widthScale < heightScale)
{
ARCamera.fieldOfView = (float)(fovx[0] * fovXScale);
}
else
{
ARCamera.fieldOfView = (float)(fovy[0] * fovYScale);
}
//Learning the feature points of the pattern image.
Pattern pattern = new Pattern();
PatternTrackingInfo patternTrackingInfo = new PatternTrackingInfo();
PatternDetector patternDetector = new PatternDetector(null, null, null, true);
patternDetector.buildPatternFromImage(patternMat, pattern);
patternDetector.train(pattern);
bool patternFound = patternDetector.findPattern(imgMat, patternTrackingInfo);
Debug.Log("patternFound " + patternFound);
if (patternFound)
{
patternTrackingInfo.computePose(pattern, camMatrix, distCoeffs);
Matrix4x4 transformationM = patternTrackingInfo.pose3d;
Debug.Log("transformationM " + transformationM.ToString());
Matrix4x4 invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
Debug.Log("invertZM " + invertZM.ToString());
Matrix4x4 invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
Debug.Log("invertYM " + invertYM.ToString());
// right-handed coordinates system (OpenCV) to left-handed one (Unity)
// https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix
Matrix4x4 ARM = invertYM * transformationM * invertYM;
// Apply Y-axis and Z-axis refletion matrix. (Adjust the posture of the AR object)
ARM = ARM * invertYM * invertZM;
if (shouldMoveARCamera)
{
ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;
Debug.Log("ARM " + ARM.ToString());
ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
}
else
{
ARM = ARCamera.transform.localToWorldMatrix * ARM;
Debug.Log("ARM " + ARM.ToString());
ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
}
}
Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false);
Utils.matToTexture2D(imgMat, texture);
gameObject.GetComponent<Renderer>().material.mainTexture = texture;
}
// Update is called once per frame
void Update()
{
}
/// <summary>
/// Raises the back button click event.
/// </summary>
public void OnBackButtonClick()
{
SceneManager.LoadScene("MarkerLessARExample");
}
}
}

Просмотреть файл

@ -1,484 +1,484 @@
using OpenCVForUnity.Calib3dModule;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgcodecsModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UnityUtils.Helper;
using OpenCVMarkerLessAR;
using UnityEngine;
using UnityEngine.SceneManagement;
using UnityEngine.UI;
namespace MarkerLessARExample
{
/// <summary>
/// WebCamTexture Markerless AR Example
/// This code is a rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter3_MarkerlessAR using "OpenCV for Unity".
/// </summary>
[RequireComponent(typeof(WebCamTextureToMatHelper))]
public class WebCamTextureMarkerLessARExample : MonoBehaviour
{
/// <summary>
/// The pattern raw image.
/// </summary>
public RawImage patternRawImage;
/// <summary>
/// The AR game object.
/// </summary>
public GameObject ARGameObject;
/// <summary>
/// The AR camera.
/// </summary>
public Camera ARCamera;
/// <summary>
/// Determines if should move AR camera.
/// </summary>
public bool shouldMoveARCamera;
/// <summary>
/// Determines if displays axes.
/// </summary>
public bool displayAxes = false;
/// <summary>
/// The display axes toggle.
/// </summary>
public Toggle displayAxesToggle;
/// <summary>
/// The axes.
/// </summary>
public GameObject axes;
/// <summary>
/// Determines if displays cube.
/// </summary>
public bool displayCube = false;
/// <summary>
/// The display cube toggle.
/// </summary>
public Toggle displayCubeToggle;
/// <summary>
/// The cube.
/// </summary>
public GameObject cube;
/// <summary>
/// Determines if displays video.
/// </summary>
public bool displayVideo = false;
/// <summary>
/// The display video toggle.
/// </summary>
public Toggle displayVideoToggle;
/// <summary>
/// The video.
/// </summary>
public GameObject video;
/// <summary>
/// The pattern mat.
/// </summary>
Mat patternMat;
/// <summary>
/// The texture.
/// </summary>
Texture2D texture;
/// <summary>
/// The webcam texture to mat helper.
/// </summary>
WebCamTextureToMatHelper webCamTextureToMatHelper;
/// <summary>
/// The gray mat.
/// </summary>
Mat grayMat;
/// <summary>
/// The cameraparam matrix.
/// </summary>
Mat camMatrix;
/// <summary>
/// The dist coeffs.
/// </summary>
MatOfDouble distCoeffs;
/// <summary>
/// The matrix that inverts the Y axis.
/// </summary>
Matrix4x4 invertYM;
/// <summary>
/// The matrix that inverts the Z axis.
/// </summary>
Matrix4x4 invertZM;
/// <summary>
/// The pattern.
/// </summary>
Pattern pattern;
/// <summary>
/// The pattern tracking info.
/// </summary>
PatternTrackingInfo patternTrackingInfo;
/// <summary>
/// The pattern detector.
/// </summary>
PatternDetector patternDetector;
// Use this for initialization
void Start()
{
displayAxesToggle.isOn = displayAxes;
axes.SetActive(displayAxes);
displayCubeToggle.isOn = displayCube;
cube.SetActive(displayCube);
displayVideoToggle.isOn = displayVideo;
video.SetActive(displayVideo);
ARGameObject.gameObject.SetActive(false);
webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper>();
patternMat = Imgcodecs.imread(Application.persistentDataPath + "/patternImg.jpg");
if (patternMat.total() == 0)
{
OnCapturePatternButtonClick();
}
else
{
Imgproc.cvtColor(patternMat, patternMat, Imgproc.COLOR_BGR2RGB);
Texture2D patternTexture = new Texture2D(patternMat.width(), patternMat.height(), TextureFormat.RGBA32, false);
//To reuse mat, set the flipAfter flag to true.
Utils.matToTexture2D(patternMat, patternTexture, true, 0, true);
Debug.Log("patternMat dst ToString " + patternMat.ToString());
patternRawImage.texture = patternTexture;
patternRawImage.rectTransform.localScale = new Vector3(1.0f, (float)patternMat.height() / (float)patternMat.width(), 1.0f);
pattern = new Pattern();
patternTrackingInfo = new PatternTrackingInfo();
patternDetector = new PatternDetector(null, null, null, true);
patternDetector.buildPatternFromImage(patternMat, pattern);
patternDetector.train(pattern);
webCamTextureToMatHelper.Initialize();
}
}
/// <summary>
/// Raises the web cam texture to mat helper initialized event.
/// </summary>
public void OnWebCamTextureToMatHelperInitialized()
{
Debug.Log("OnWebCamTextureToMatHelperInitialized");
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();
texture = new Texture2D(webCamTextureMat.width(), webCamTextureMat.height(), TextureFormat.RGBA32, false);
gameObject.GetComponent<Renderer>().material.mainTexture = texture;
grayMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC1);
gameObject.transform.localScale = new Vector3(webCamTextureMat.width(), webCamTextureMat.height(), 1);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = webCamTextureMat.width();
float height = webCamTextureMat.height();
float imageSizeScale = 1.0f;
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
imageSizeScale = (float)Screen.height / (float)Screen.width;
}
else
{
Camera.main.orthographicSize = height / 2;
}
//set cameraparam
int max_d = (int)Mathf.Max(width, height);
double fx = max_d;
double fy = max_d;
double cx = width / 2.0f;
double cy = height / 2.0f;
camMatrix = new Mat(3, 3, CvType.CV_64FC1);
camMatrix.put(0, 0, fx);
camMatrix.put(0, 1, 0);
camMatrix.put(0, 2, cx);
camMatrix.put(1, 0, 0);
camMatrix.put(1, 1, fy);
camMatrix.put(1, 2, cy);
camMatrix.put(2, 0, 0);
camMatrix.put(2, 1, 0);
camMatrix.put(2, 2, 1.0f);
Debug.Log("camMatrix " + camMatrix.dump());
distCoeffs = new MatOfDouble(0, 0, 0, 0);
Debug.Log("distCoeffs " + distCoeffs.dump());
//calibration camera
Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale);
double apertureWidth = 0;
double apertureHeight = 0;
double[] fovx = new double[1];
double[] fovy = new double[1];
double[] focalLength = new double[1];
Point principalPoint = new Point(0, 0);
double[] aspectratio = new double[1];
Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);
Debug.Log("imageSize " + imageSize.ToString());
Debug.Log("apertureWidth " + apertureWidth);
Debug.Log("apertureHeight " + apertureHeight);
Debug.Log("fovx " + fovx[0]);
Debug.Log("fovy " + fovy[0]);
Debug.Log("focalLength " + focalLength[0]);
Debug.Log("principalPoint " + principalPoint.ToString());
Debug.Log("aspectratio " + aspectratio[0]);
//To convert the difference of the FOV value of the OpenCV and Unity.
double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));
Debug.Log("fovXScale " + fovXScale);
Debug.Log("fovYScale " + fovYScale);
//Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
if (widthScale < heightScale)
{
ARCamera.fieldOfView = (float)(fovx[0] * fovXScale);
}
else
{
ARCamera.fieldOfView = (float)(fovy[0] * fovYScale);
}
invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
Debug.Log("invertYM " + invertYM.ToString());
invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
Debug.Log("invertZM " + invertZM.ToString());
//if WebCamera is frontFaceing,flip Mat.
webCamTextureToMatHelper.flipHorizontal = webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing;
}
/// <summary>
/// Raises the web cam texture to mat helper disposed event.
/// </summary>
public void OnWebCamTextureToMatHelperDisposed()
{
Debug.Log("OnWebCamTextureToMatHelperDisposed");
if (grayMat != null)
grayMat.Dispose();
}
/// <summary>
/// Raises the web cam texture to mat helper error occurred event.
/// </summary>
/// <param name="errorCode">Error code.</param>
public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
{
Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
// Update is called once per frame
void Update()
{
if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
{
Mat rgbaMat = webCamTextureToMatHelper.GetMat();
Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
bool patternFound = patternDetector.findPattern(grayMat, patternTrackingInfo);
//Debug.Log ("patternFound " + patternFound);
if (patternFound)
{
patternTrackingInfo.computePose(pattern, camMatrix, distCoeffs);
//Marker to Camera Coordinate System Convert Matrix
Matrix4x4 transformationM = patternTrackingInfo.pose3d;
// right-handed coordinates system (OpenCV) to left-handed one (Unity)
// https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix
Matrix4x4 ARM = invertYM * transformationM * invertYM;
// Apply Y-axis and Z-axis refletion matrix. (Adjust the posture of the AR object)
ARM = ARM * invertYM * invertZM;
if (shouldMoveARCamera)
{
ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;
//Debug.Log("ARM " + ARM.ToString());
ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
}
else
{
ARM = ARCamera.transform.localToWorldMatrix * ARM;
//Debug.Log("ARM " + ARM.ToString());
ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
}
ARGameObject.GetComponent<DelayableSetActive>().SetActive(true);
}
else
{
ARGameObject.GetComponent<DelayableSetActive>().SetActive(false, 0.5f);
}
Utils.fastMatToTexture2D(rgbaMat, texture);
}
}
/// <summary>
/// Raises the destroy event.
/// </summary>
void OnDestroy()
{
webCamTextureToMatHelper.Dispose();
if (patternMat != null)
patternMat.Dispose();
}
/// <summary>
/// Raises the back button click event.
/// </summary>
public void OnBackButtonClick()
{
SceneManager.LoadScene("MarkerLessARExample");
}
/// <summary>
/// Raises the play button click event.
/// </summary>
public void OnPlayButtonClick()
{
webCamTextureToMatHelper.Play();
}
/// <summary>
/// Raises the pause button click event.
/// </summary>
public void OnPauseButtonClick()
{
webCamTextureToMatHelper.Pause();
}
/// <summary>
/// Raises the stop button click event.
/// </summary>
public void OnStopButtonClick()
{
webCamTextureToMatHelper.Stop();
}
/// <summary>
/// Raises the change camera button click event.
/// </summary>
public void OnChangeCameraButtonClick()
{
webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.IsFrontFacing();
}
/// <summary>
/// Raises the display axes toggle value changed event.
/// </summary>
public void OnDisplayAxesToggleValueChanged()
{
if (displayAxesToggle.isOn)
{
displayAxes = true;
}
else
{
displayAxes = false;
}
axes.SetActive(displayAxes);
}
/// <summary>
/// Raises the display cube toggle value changed event.
/// </summary>
public void OnDisplayCubeToggleValueChanged()
{
if (displayCubeToggle.isOn)
{
displayCube = true;
}
else
{
displayCube = false;
}
cube.SetActive(displayCube);
}
/// <summary>
/// Raises the display video toggle value changed event.
/// </summary>
public void OnDisplayVideoToggleValueChanged()
{
if (displayVideoToggle.isOn)
{
displayVideo = true;
}
else
{
displayVideo = false;
}
video.SetActive(displayVideo);
}
/// <summary>
/// Raises the capture pattern button click event.
/// </summary>
public void OnCapturePatternButtonClick()
{
SceneManager.LoadScene("CapturePattern");
}
}
}
using OpenCVForUnity.Calib3dModule;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgcodecsModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UnityUtils.Helper;
using OpenCVMarkerLessAR;
using UnityEngine;
using UnityEngine.SceneManagement;
using UnityEngine.UI;
namespace MarkerLessARExample
{
/// <summary>
/// WebCamTexture Markerless AR Example
/// This code is a rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter3_MarkerlessAR using "OpenCV for Unity".
/// </summary>
[RequireComponent(typeof(WebCamTextureToMatHelper))]
public class WebCamTextureMarkerLessARExample : MonoBehaviour
{
/// <summary>
/// The pattern raw image.
/// </summary>
public RawImage patternRawImage;
/// <summary>
/// The AR game object.
/// </summary>
public GameObject ARGameObject;
/// <summary>
/// The AR camera.
/// </summary>
public Camera ARCamera;
/// <summary>
/// Determines if should move AR camera.
/// </summary>
public bool shouldMoveARCamera;
/// <summary>
/// Determines if displays axes.
/// </summary>
public bool displayAxes = false;
/// <summary>
/// The display axes toggle.
/// </summary>
public Toggle displayAxesToggle;
/// <summary>
/// The axes.
/// </summary>
public GameObject axes;
/// <summary>
/// Determines if displays cube.
/// </summary>
public bool displayCube = false;
/// <summary>
/// The display cube toggle.
/// </summary>
public Toggle displayCubeToggle;
/// <summary>
/// The cube.
/// </summary>
public GameObject cube;
/// <summary>
/// Determines if displays video.
/// </summary>
public bool displayVideo = false;
/// <summary>
/// The display video toggle.
/// </summary>
public Toggle displayVideoToggle;
/// <summary>
/// The video.
/// </summary>
public GameObject video;
/// <summary>
/// The pattern mat.
/// </summary>
Mat patternMat;
/// <summary>
/// The texture.
/// </summary>
Texture2D texture;
/// <summary>
/// The webcam texture to mat helper.
/// </summary>
WebCamTextureToMatHelper webCamTextureToMatHelper;
/// <summary>
/// The gray mat.
/// </summary>
Mat grayMat;
/// <summary>
/// The cameraparam matrix.
/// </summary>
Mat camMatrix;
/// <summary>
/// The dist coeffs.
/// </summary>
MatOfDouble distCoeffs;
/// <summary>
/// The matrix that inverts the Y axis.
/// </summary>
Matrix4x4 invertYM;
/// <summary>
/// The matrix that inverts the Z axis.
/// </summary>
Matrix4x4 invertZM;
/// <summary>
/// The pattern.
/// </summary>
Pattern pattern;
/// <summary>
/// The pattern tracking info.
/// </summary>
PatternTrackingInfo patternTrackingInfo;
/// <summary>
/// The pattern detector.
/// </summary>
PatternDetector patternDetector;
// Use this for initialization
void Start()
{
displayAxesToggle.isOn = displayAxes;
axes.SetActive(displayAxes);
displayCubeToggle.isOn = displayCube;
cube.SetActive(displayCube);
displayVideoToggle.isOn = displayVideo;
video.SetActive(displayVideo);
ARGameObject.gameObject.SetActive(false);
webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper>();
patternMat = Imgcodecs.imread(Application.persistentDataPath + "/patternImg.jpg");
if (patternMat.total() == 0)
{
OnCapturePatternButtonClick();
}
else
{
Imgproc.cvtColor(patternMat, patternMat, Imgproc.COLOR_BGR2RGB);
Texture2D patternTexture = new Texture2D(patternMat.width(), patternMat.height(), TextureFormat.RGBA32, false);
//To reuse mat, set the flipAfter flag to true.
Utils.matToTexture2D(patternMat, patternTexture, true, 0, true);
Debug.Log("patternMat dst ToString " + patternMat.ToString());
patternRawImage.texture = patternTexture;
patternRawImage.rectTransform.localScale = new Vector3(1.0f, (float)patternMat.height() / (float)patternMat.width(), 1.0f);
pattern = new Pattern();
patternTrackingInfo = new PatternTrackingInfo();
patternDetector = new PatternDetector(null, null, null, true);
patternDetector.buildPatternFromImage(patternMat, pattern);
patternDetector.train(pattern);
webCamTextureToMatHelper.Initialize();
}
}
/// <summary>
/// Raises the web cam texture to mat helper initialized event.
/// </summary>
public void OnWebCamTextureToMatHelperInitialized()
{
Debug.Log("OnWebCamTextureToMatHelperInitialized");
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();
texture = new Texture2D(webCamTextureMat.width(), webCamTextureMat.height(), TextureFormat.RGBA32, false);
gameObject.GetComponent<Renderer>().material.mainTexture = texture;
grayMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC1);
gameObject.transform.localScale = new Vector3(webCamTextureMat.width(), webCamTextureMat.height(), 1);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = webCamTextureMat.width();
float height = webCamTextureMat.height();
float imageSizeScale = 1.0f;
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
imageSizeScale = (float)Screen.height / (float)Screen.width;
}
else
{
Camera.main.orthographicSize = height / 2;
}
//set cameraparam
int max_d = (int)Mathf.Max(width, height);
double fx = max_d;
double fy = max_d;
double cx = width / 2.0f;
double cy = height / 2.0f;
camMatrix = new Mat(3, 3, CvType.CV_64FC1);
camMatrix.put(0, 0, fx);
camMatrix.put(0, 1, 0);
camMatrix.put(0, 2, cx);
camMatrix.put(1, 0, 0);
camMatrix.put(1, 1, fy);
camMatrix.put(1, 2, cy);
camMatrix.put(2, 0, 0);
camMatrix.put(2, 1, 0);
camMatrix.put(2, 2, 1.0f);
Debug.Log("camMatrix " + camMatrix.dump());
distCoeffs = new MatOfDouble(0, 0, 0, 0);
Debug.Log("distCoeffs " + distCoeffs.dump());
//calibration camera
Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale);
double apertureWidth = 0;
double apertureHeight = 0;
double[] fovx = new double[1];
double[] fovy = new double[1];
double[] focalLength = new double[1];
Point principalPoint = new Point(0, 0);
double[] aspectratio = new double[1];
Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);
Debug.Log("imageSize " + imageSize.ToString());
Debug.Log("apertureWidth " + apertureWidth);
Debug.Log("apertureHeight " + apertureHeight);
Debug.Log("fovx " + fovx[0]);
Debug.Log("fovy " + fovy[0]);
Debug.Log("focalLength " + focalLength[0]);
Debug.Log("principalPoint " + principalPoint.ToString());
Debug.Log("aspectratio " + aspectratio[0]);
//To convert the difference of the FOV value of the OpenCV and Unity.
double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));
Debug.Log("fovXScale " + fovXScale);
Debug.Log("fovYScale " + fovYScale);
//Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
if (widthScale < heightScale)
{
ARCamera.fieldOfView = (float)(fovx[0] * fovXScale);
}
else
{
ARCamera.fieldOfView = (float)(fovy[0] * fovYScale);
}
invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
Debug.Log("invertYM " + invertYM.ToString());
invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
Debug.Log("invertZM " + invertZM.ToString());
//if WebCamera is frontFaceing,flip Mat.
webCamTextureToMatHelper.flipHorizontal = webCamTextureToMatHelper.GetWebCamDevice().isFrontFacing;
}
/// <summary>
/// Raises the web cam texture to mat helper disposed event.
/// </summary>
public void OnWebCamTextureToMatHelperDisposed()
{
Debug.Log("OnWebCamTextureToMatHelperDisposed");
if (grayMat != null)
grayMat.Dispose();
}
/// <summary>
/// Raises the web cam texture to mat helper error occurred event.
/// </summary>
/// <param name="errorCode">Error code.</param>
public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
{
Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
// Update is called once per frame
void Update()
{
if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
{
Mat rgbaMat = webCamTextureToMatHelper.GetMat();
Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
bool patternFound = patternDetector.findPattern(grayMat, patternTrackingInfo);
//Debug.Log ("patternFound " + patternFound);
if (patternFound)
{
patternTrackingInfo.computePose(pattern, camMatrix, distCoeffs);
//Marker to Camera Coordinate System Convert Matrix
Matrix4x4 transformationM = patternTrackingInfo.pose3d;
// right-handed coordinates system (OpenCV) to left-handed one (Unity)
// https://stackoverflow.com/questions/30234945/change-handedness-of-a-row-major-4x4-transformation-matrix
Matrix4x4 ARM = invertYM * transformationM * invertYM;
// Apply Y-axis and Z-axis refletion matrix. (Adjust the posture of the AR object)
ARM = ARM * invertYM * invertZM;
if (shouldMoveARCamera)
{
ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;
//Debug.Log("ARM " + ARM.ToString());
ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
}
else
{
ARM = ARCamera.transform.localToWorldMatrix * ARM;
//Debug.Log("ARM " + ARM.ToString());
ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
}
ARGameObject.GetComponent<DelayableSetActive>().SetActive(true);
}
else
{
ARGameObject.GetComponent<DelayableSetActive>().SetActive(false, 0.5f);
}
Utils.fastMatToTexture2D(rgbaMat, texture);
}
}
/// <summary>
/// Raises the destroy event.
/// </summary>
void OnDestroy()
{
webCamTextureToMatHelper.Dispose();
if (patternMat != null)
patternMat.Dispose();
}
/// <summary>
/// Raises the back button click event.
/// </summary>
public void OnBackButtonClick()
{
SceneManager.LoadScene("MarkerLessARExample");
}
/// <summary>
/// Raises the play button click event.
/// </summary>
public void OnPlayButtonClick()
{
webCamTextureToMatHelper.Play();
}
/// <summary>
/// Raises the pause button click event.
/// </summary>
public void OnPauseButtonClick()
{
webCamTextureToMatHelper.Pause();
}
/// <summary>
/// Raises the stop button click event.
/// </summary>
public void OnStopButtonClick()
{
webCamTextureToMatHelper.Stop();
}
/// <summary>
/// Raises the change camera button click event.
/// </summary>
public void OnChangeCameraButtonClick()
{
webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.IsFrontFacing();
}
/// <summary>
/// Raises the display axes toggle value changed event.
/// </summary>
public void OnDisplayAxesToggleValueChanged()
{
if (displayAxesToggle.isOn)
{
displayAxes = true;
}
else
{
displayAxes = false;
}
axes.SetActive(displayAxes);
}
/// <summary>
/// Raises the display cube toggle value changed event.
/// </summary>
public void OnDisplayCubeToggleValueChanged()
{
if (displayCubeToggle.isOn)
{
displayCube = true;
}
else
{
displayCube = false;
}
cube.SetActive(displayCube);
}
/// <summary>
/// Raises the display video toggle value changed event.
/// </summary>
public void OnDisplayVideoToggleValueChanged()
{
if (displayVideoToggle.isOn)
{
displayVideo = true;
}
else
{
displayVideo = false;
}
video.SetActive(displayVideo);
}
/// <summary>
/// Raises the capture pattern button click event.
/// </summary>
public void OnCapturePatternButtonClick()
{
SceneManager.LoadScene("CapturePattern");
}
}
}

Просмотреть файл

@ -1,28 +1,28 @@
MarkerLess AR Example
====================
Overview
-----
[https://assetstore.unity.com/packages/templates/tutorials/markerless-ar-example-77560](https://assetstore.unity.com/packages/templates/tutorials/markerless-ar-example-77560?aid=1011l4ehR)
Environment
-----
[OpenCVForUnity](https://assetstore.unity.com/packages/tools/integration/opencv-for-unity-21088?aid=1011l4ehR)
Setup Tutorial & Demo Video
-----
[![](http://img.youtube.com/vi/B4pc_e8mdcs/0.jpg)](https://www.youtube.com/watch?v=B4pc_e8mdcs)
Demo
-----
- WebGL
<https://enoxsoftware.github.io/MarkerLessARExample/webgl_example/index.html>
- Android
<https://play.google.com/store/apps/details?id=com.enoxsoftware.markerlessarexample>
Manual
-----
[ReadMe.pdf](/Assets/MarkerLessARExample/ReadMe.pdf)
MarkerLess AR Example
====================
Overview
-----
[https://assetstore.unity.com/packages/templates/tutorials/markerless-ar-example-77560](https://assetstore.unity.com/packages/templates/tutorials/markerless-ar-example-77560?aid=1011l4ehR)
Environment
-----
[OpenCVForUnity](https://assetstore.unity.com/packages/tools/integration/opencv-for-unity-21088?aid=1011l4ehR)
Setup Tutorial & Demo Video
-----
[![](http://img.youtube.com/vi/B4pc_e8mdcs/0.jpg)](https://www.youtube.com/watch?v=B4pc_e8mdcs)
Demo
-----
- WebGL
<https://enoxsoftware.github.io/MarkerLessARExample/webgl_example/index.html>
- Android
<https://play.google.com/store/apps/details?id=com.enoxsoftware.markerlessarexample>
Manual
-----
[ReadMe.pdf](/Assets/MarkerLessARExample/ReadMe.pdf)