1
0
Форкнуть 0
This commit is contained in:
EnoxSoftware 2020-03-28 07:05:59 +09:00
Коммит 995cf9f352
71 изменённых файлов: 31969 добавлений и 0 удалений

Просмотреть файл

@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: 66597862d10556b4096fef8f3680a3ab
folderAsset: yes
timeCreated: 1521821567
licenseType: Free
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: a394dc9ddb1871f46991ea89223c9b72
folderAsset: yes
timeCreated: 1524389028
licenseType: Free
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,176 @@
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UtilsModule;
using System;
namespace NatDeviceWithOpenCVForUnityExample
{
public class ComicFilter
{
Mat grayMat;
Mat maskMat;
Mat screentoneMat;
Mat grayDstMat;
Mat grayLUT;
Mat contrastAdjustmentsLUT;
Mat kernel_dilate;
Mat kernel_erode;
Size blurSize;
int blackThresh;
bool drawMainLine;
bool useNoiseFilter;
public ComicFilter(int blackThresh = 60, int grayThresh = 120, int thickness = 5, bool useNoiseFilter = true)
{
this.blackThresh = blackThresh;
this.drawMainLine = (thickness != 0);
this.useNoiseFilter = useNoiseFilter;
grayLUT = new Mat(1, 256, CvType.CV_8UC1);
byte[] lutArray = new byte[256];
for (int i = 0; i < lutArray.Length; i++)
{
if (blackThresh <= i && i < grayThresh)
lutArray[i] = 255;
}
MatUtils.copyToMat(lutArray, grayLUT);
if (drawMainLine)
{
kernel_dilate = new Mat(thickness, thickness, CvType.CV_8UC1, new Scalar(1));
int erode = (thickness >= 5) ? 2 : 1;
kernel_erode = new Mat(erode, erode, CvType.CV_8UC1, new Scalar(1));
int blur = (thickness >= 4) ? thickness - 1 : 3;
blurSize = new Size(blur, blur);
contrastAdjustmentsLUT = new Mat(1, 256, CvType.CV_8UC1);
byte[] contrastAdjustmentsLUTArray = new byte[256];
for (int i = 0; i < contrastAdjustmentsLUTArray.Length; i++)
{
int a = (int)(i * 1.5f);
contrastAdjustmentsLUTArray[i] = (a > byte.MaxValue) ? (byte)255 : (byte)a;
}
MatUtils.copyToMat(contrastAdjustmentsLUTArray, contrastAdjustmentsLUT);
}
}
public void Process(Mat src, Mat dst, bool isBGR = false)
{
if (src == null)
throw new ArgumentNullException("src == null");
if (dst == null)
throw new ArgumentNullException("dst == null");
if (grayMat != null && (grayMat.width() != src.width() || grayMat.height() != src.height()))
{
grayMat.Dispose();
grayMat = null;
maskMat.Dispose();
maskMat = null;
screentoneMat.Dispose();
screentoneMat = null;
grayDstMat.Dispose();
grayDstMat = null;
}
grayMat = grayMat ?? new Mat(src.height(), src.width(), CvType.CV_8UC1);
maskMat = maskMat ?? new Mat(src.height(), src.width(), CvType.CV_8UC1);
grayDstMat = grayDstMat ?? new Mat(src.height(), src.width(), CvType.CV_8UC1);
if (screentoneMat == null)
{
// create a striped screentone.
screentoneMat = new Mat(src.height(), src.width(), CvType.CV_8UC1, new Scalar(255));
for (int i = 0; i < screentoneMat.rows() * 2.5f; i = i + 4)
{
Imgproc.line(screentoneMat, new Point(0, 0 + i), new Point(screentoneMat.cols(), -screentoneMat.cols() + i), new Scalar(0), 1);
}
}
if (src.type() == CvType.CV_8UC1)
{
src.copyTo(grayMat);
}
else if (dst.type() == CvType.CV_8UC3)
{
Imgproc.cvtColor(src, grayMat, (isBGR) ? Imgproc.COLOR_BGR2GRAY : Imgproc.COLOR_RGB2GRAY);
}
else
{
Imgproc.cvtColor(src, grayMat, (isBGR) ? Imgproc.COLOR_BGRA2GRAY : Imgproc.COLOR_RGBA2GRAY);
}
// binarize.
Imgproc.threshold(grayMat, grayDstMat, blackThresh, 255.0, Imgproc.THRESH_BINARY);
// draw striped screentone.
Core.LUT(grayMat, grayLUT, maskMat);
screentoneMat.copyTo(grayDstMat, maskMat);
// draw main line.
if (drawMainLine)
{
Core.LUT(grayMat, contrastAdjustmentsLUT, maskMat); // = grayMat.convertTo(maskMat, -1, 1.5, 0);
if (useNoiseFilter)
{
Imgproc.blur(maskMat, grayMat, blurSize);
Imgproc.dilate(grayMat, maskMat, kernel_dilate);
}
else
{
Imgproc.dilate(maskMat, grayMat, kernel_dilate);
}
Core.absdiff(grayMat, maskMat, grayMat);
Imgproc.threshold(grayMat, maskMat, 25, 255.0, Imgproc.THRESH_BINARY);
if (useNoiseFilter)
{
Imgproc.erode(maskMat, grayMat, kernel_erode);
Core.bitwise_not(grayMat, maskMat);
maskMat.copyTo(grayDstMat, grayMat);
}
else
{
Core.bitwise_not(maskMat, grayMat);
grayMat.copyTo(grayDstMat, maskMat);
}
}
if (dst.type() == CvType.CV_8UC1)
{
grayDstMat.copyTo(dst);
}
else if (dst.type() == CvType.CV_8UC3)
{
Imgproc.cvtColor(grayDstMat, dst, (isBGR) ? Imgproc.COLOR_GRAY2BGR : Imgproc.COLOR_GRAY2RGB);
}
else
{
Imgproc.cvtColor(grayDstMat, dst, (isBGR) ? Imgproc.COLOR_GRAY2BGRA : Imgproc.COLOR_GRAY2RGBA);
}
}
public void Dispose()
{
foreach (var mat in new[] { grayMat, maskMat, screentoneMat, grayDstMat, grayLUT, kernel_dilate, kernel_erode, contrastAdjustmentsLUT })
if (mat != null) mat.Dispose();
grayDstMat =
screentoneMat =
maskMat =
grayMat =
grayLUT =
kernel_dilate =
kernel_erode =
contrastAdjustmentsLUT = null;
}
}
}

Просмотреть файл

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 08d77cc1cacdd0f42b35833496299c75
timeCreated: 1524599498
licenseType: Free
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,206 @@
using NatSuite.Sharing;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UnityUtils;
using System;
using System.Threading.Tasks;
using UnityEngine;
using UnityEngine.UI;
namespace NatDeviceWithOpenCVForUnityExample
{
/// <summary>
/// Integration With NatShare Example
/// An example of the native sharing and save to the camera roll using NatShare.
/// </summary>
public class IntegrationWithNatShareExample : ExampleBase<NatDeviceCamSource>
{
public Toggle applyComicFilterToggle;
Mat frameMatrix;
Texture2D texture;
ComicFilter comicFilter;
FpsMonitor fpsMonitor;
string exampleTitle = "";
string exampleSceneTitle = "";
string settingInfo1 = "";
Scalar textColor = new Scalar(255, 255, 255, 255);
Point textPos = new Point();
protected override async void Start()
{
base.Start();
// Load global camera benchmark settings.
int width, height, framerate;
NatDeviceWithOpenCVForUnityExample.CameraConfiguration(out width, out height, out framerate);
// Create camera source
cameraSource = new NatDeviceCamSource(width, height, framerate, useFrontCamera);
if (cameraSource.activeCamera == null)
cameraSource = new NatDeviceCamSource(width, height, framerate, !useFrontCamera);
await cameraSource.StartRunning(OnStart, OnFrame);
// Create comic filter
comicFilter = new ComicFilter();
exampleTitle = "[NatDeviceWithOpenCVForUnity Example] (" + NatDeviceWithOpenCVForUnityExample.GetNatDeviceVersion() + ")";
exampleSceneTitle = "- Integration With NatShare Example";
fpsMonitor = GetComponent<FpsMonitor>();
if (fpsMonitor != null)
{
fpsMonitor.Add("Name", "IntegrationWithNatShareExample");
fpsMonitor.Add("onFrameFPS", onFrameFPS.ToString("F1"));
fpsMonitor.Add("drawFPS", drawFPS.ToString("F1"));
fpsMonitor.Add("width", "");
fpsMonitor.Add("height", "");
fpsMonitor.Add("isFrontFacing", "");
fpsMonitor.Add("orientation", "");
}
}
protected override void OnStart()
{
settingInfo1 = "- resolution: " + cameraSource.width + "x" + cameraSource.height;
// Create matrix
if (frameMatrix != null)
frameMatrix.Dispose();
frameMatrix = new Mat(cameraSource.height, cameraSource.width, CvType.CV_8UC4);
// Create texture
if (texture != null)
Texture2D.Destroy(texture);
texture = new Texture2D(
cameraSource.width,
cameraSource.height,
TextureFormat.RGBA32,
false,
false
);
// Display preview
rawImage.texture = texture;
aspectFitter.aspectRatio = cameraSource.width / (float)cameraSource.height;
Debug.Log("NatDevice camera source started with resolution: " + cameraSource.width + "x" + cameraSource.height + " isFrontFacing: " + cameraSource.isFrontFacing);
if (fpsMonitor != null)
{
fpsMonitor.Add("width", cameraSource.width.ToString());
fpsMonitor.Add("height", cameraSource.height.ToString());
fpsMonitor.Add("isFrontFacing", cameraSource.isFrontFacing.ToString());
fpsMonitor.Add("orientation", Screen.orientation.ToString());
}
}
protected override void Update()
{
base.Update();
if (updateCount == 0)
{
if (fpsMonitor != null)
{
fpsMonitor.Add("onFrameFPS", onFrameFPS.ToString("F1"));
fpsMonitor.Add("drawFPS", drawFPS.ToString("F1"));
fpsMonitor.Add("orientation", Screen.orientation.ToString());
}
}
}
protected override void UpdateTexture()
{
cameraSource.CaptureFrame(frameMatrix);
if (applyComicFilterToggle.isOn)
comicFilter.Process(frameMatrix, frameMatrix);
textPos.x = 5;
textPos.y = frameMatrix.rows() - 50;
Imgproc.putText(frameMatrix, exampleTitle, textPos, Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, textColor, 1, Imgproc.LINE_AA, false);
textPos.y = frameMatrix.rows() - 30;
Imgproc.putText(frameMatrix, exampleSceneTitle, textPos, Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, textColor, 1, Imgproc.LINE_AA, false);
textPos.y = frameMatrix.rows() - 10;
Imgproc.putText(frameMatrix, settingInfo1, textPos, Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, textColor, 1, Imgproc.LINE_AA, false);
// Convert to Texture2D
Utils.fastMatToTexture2D(frameMatrix, texture, true, 0, false);
}
protected override void OnDestroy()
{
base.OnDestroy();
if (frameMatrix != null)
frameMatrix.Dispose();
frameMatrix = null;
Texture2D.Destroy(texture);
texture = null;
comicFilter.Dispose();
comicFilter = null;
}
public async void OnShareButtonClick()
{
var mes = "";
#if (UNITY_IOS || UNITY_ANDROID) && !UNITY_EDITOR
try
{
var success = await new SharePayload()
.AddText("User shared image! [NatDeviceWithOpenCVForUnity Example](" + NatDeviceWithOpenCVForUnityExample.GetNatDeviceVersion() + ")")
.AddImage(texture)
.Commit();
mes = $"Successfully shared items: {success}";
}
catch (ApplicationException e)
{
mes = e.Message;
}
#else
mes = "NatShare Error: SharePayload is not supported on this platform";
#endif
Debug.Log(mes);
if (fpsMonitor != null)
{
fpsMonitor.consoleText = mes;
await Task.Delay(2000);
fpsMonitor.consoleText = "";
}
}
public async void OnSaveToCameraRollButtonClick()
{
var mes = "";
#if (UNITY_IOS || UNITY_ANDROID) && !UNITY_EDITOR
try
{
var success = await new SavePayload("NatDeviceWithOpenCVForUnityExample")
.AddImage(texture)
.Commit();
mes = $"Successfully saved items: {success}";
}
catch (ApplicationException e)
{
mes = e.Message;
}
#else
mes = "NatShare Error: SavePayload is not supported on this platform";
#endif
Debug.Log(mes);
if (fpsMonitor != null)
{
fpsMonitor.consoleText = mes;
await Task.Delay(2000);
fpsMonitor.consoleText = "";
}
}
}
}

Просмотреть файл

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: f5ca190fc54d25e4a80601d2e5f9c573
timeCreated: 1524389028
licenseType: Free
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: f78ae37dff926a641be7ee414fcc2798
timeCreated: 1524389028
licenseType: Free
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: 7a91cc898012624429966da3089ef8fb
folderAsset: yes
timeCreated: 1522155379
licenseType: Free
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,152 @@
using NatSuite.Devices;
using System.Collections.Generic;
using UnityEngine;
namespace NatDeviceWithOpenCVForUnityExample
{
/// <summary>
/// NatDeviceCamPreview Only Example
/// An example of displaying the preview frame of camera only using NatDevice.
/// </summary>
public class NatDeviceCamPreviewOnlyExample : ExampleBase<NatDeviceCamSource>
{
Texture2D texture;
byte[] pixelBuffer;
FpsMonitor fpsMonitor;
protected override async void Start()
{
base.Start();
// Request camera permissions
if (!await MediaDeviceQuery.RequestPermissions<CameraDevice>())
{
Debug.LogError("User did not grant camera permissions");
return;
}
// Load global camera benchmark settings.
int width, height, framerate;
NatDeviceWithOpenCVForUnityExample.CameraConfiguration(out width, out height, out framerate);
// Create camera source
cameraSource = new NatDeviceCamSource(width, height, framerate, useFrontCamera);
if (cameraSource.activeCamera == null)
cameraSource = new NatDeviceCamSource(width, height, framerate, !useFrontCamera);
await cameraSource.StartRunning(OnStart, OnFrame);
fpsMonitor = GetComponent<FpsMonitor>();
if (fpsMonitor != null)
{
fpsMonitor.Add("Name", "NatDeviceCamPreviewOnlyExample");
fpsMonitor.Add("onFrameFPS", onFrameFPS.ToString("F1"));
fpsMonitor.Add("drawFPS", drawFPS.ToString("F1"));
fpsMonitor.Add("width", "");
fpsMonitor.Add("height", "");
fpsMonitor.Add("orientation", "");
}
}
protected override void OnStart()
{
base.OnStart();
// Create pixel buffer
pixelBuffer = new byte[cameraSource.width * cameraSource.height * 4];
// Create texture
if (texture != null)
Texture2D.Destroy(texture);
texture = new Texture2D(
cameraSource.width,
cameraSource.height,
TextureFormat.RGBA32,
false,
false
);
// Display preview
rawImage.texture = texture;
aspectFitter.aspectRatio = cameraSource.width / (float)cameraSource.height;
Debug.Log("NatDevice camera source started with resolution: " + cameraSource.width + "x" + cameraSource.height + " isFrontFacing: " + cameraSource.isFrontFacing);
// Log camera properties
var cameraProps = new Dictionary<string, string>();
var camera = cameraSource.activeCamera as CameraDevice;
if (camera != null)
{
cameraProps.Add("exposureBias", camera.exposureBias.ToString());
cameraProps.Add("exposureLock", camera.exposureLock.ToString());
cameraProps.Add("exposureLockSupported", camera.exposureLockSupported.ToString());
cameraProps.Add("exposureRange", camera.exposureRange.max + "x" + camera.exposureRange.min);
cameraProps.Add("fieldOfView", camera.fieldOfView.width + "x" + camera.fieldOfView.height);
cameraProps.Add("flashMode", camera.flashMode.ToString());
cameraProps.Add("flashSupported", camera.flashSupported.ToString());
cameraProps.Add("focusLock", camera.focusLock.ToString());
cameraProps.Add("focusLockSupported", camera.focusLockSupported.ToString());
cameraProps.Add("frameRate", camera.frameRate.ToString());
cameraProps.Add("frontFacing", camera.frontFacing.ToString());
cameraProps.Add("photoResolution", camera.photoResolution.width + "x" + camera.photoResolution.height);
cameraProps.Add("previewResolution", camera.previewResolution.width + "x" + camera.previewResolution.height);
cameraProps.Add("running", camera.running.ToString());
cameraProps.Add("torchEnabled", camera.torchEnabled.ToString());
cameraProps.Add("torchSupported", camera.torchSupported.ToString());
cameraProps.Add("uniqueID", camera.uniqueID.ToString());
cameraProps.Add("whiteBalanceLock", camera.whiteBalanceLock.ToString());
cameraProps.Add("whiteBalanceLockSupported", camera.whiteBalanceLockSupported.ToString());
cameraProps.Add("zoomRange", camera.zoomRange.max + "x" + camera.zoomRange.min);
cameraProps.Add("zoomRatio", camera.zoomRatio.ToString());
}
Debug.Log("# Active Camera Properties #####################");
foreach (string key in cameraProps.Keys)
Debug.Log(key + ": " + cameraProps[key]);
Debug.Log("#######################################");
if (fpsMonitor != null)
{
fpsMonitor.Add("width", cameraSource.width.ToString());
fpsMonitor.Add("height", cameraSource.height.ToString());
fpsMonitor.Add("orientation", Screen.orientation.ToString());
fpsMonitor.boxWidth = 240;
fpsMonitor.boxHeight = 800;
fpsMonitor.LocateGUI();
foreach (string key in cameraProps.Keys)
fpsMonitor.Add(key, cameraProps[key]);
}
}
protected override void Update()
{
base.Update();
if (updateCount == 0)
{
if (fpsMonitor != null)
{
fpsMonitor.Add("onFrameFPS", onFrameFPS.ToString("F1"));
fpsMonitor.Add("drawFPS", drawFPS.ToString("F1"));
fpsMonitor.Add("orientation", Screen.orientation.ToString());
}
}
}
protected override void UpdateTexture()
{
cameraSource.CaptureFrame(pixelBuffer);
ProcessImage(pixelBuffer, texture.width, texture.height, imageProcessingType);
texture.LoadRawTextureData(pixelBuffer);
texture.Apply();
}
protected override void OnDestroy()
{
base.OnDestroy();
Texture2D.Destroy(texture);
texture = null;
pixelBuffer = null;
}
}
}

Просмотреть файл

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 7de64702992c67842b18b623410d3b42
timeCreated: 1484195860
licenseType: Store
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 45d5c81453669024799a34ff7a3d5b51
timeCreated: 1477101412
licenseType: Store
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: 96db6872c06c0dd43b5374295f5c6c53
folderAsset: yes
timeCreated: 1522155392
licenseType: Free
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,225 @@
using NatSuite.Devices;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UtilsModule;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
#if OPENCV_USE_UNSAFE_CODE && UNITY_2018_2_OR_NEWER
using System;
using Unity.Collections.LowLevel.Unsafe;
using Unity.Collections;
#endif
namespace NatDeviceWithOpenCVForUnityExample
{
/// <summary>
/// NatDeviceCamPreview To Mat Example
/// An example of converting a NatDevice camera preview image to OpenCV's Mat format.
/// </summary>
public class NatDeviceCamPreviewToMatExample : ExampleBase<NatDeviceCamSource>
{
public enum MatCaptureMethod
{
GetRawTextureData_ByteArray,
GetRawTextureData_NativeArray,
}
[Header("OpenCV")]
public MatCaptureMethod matCaptureMethod = MatCaptureMethod.GetRawTextureData_ByteArray;
public Dropdown matCaptureMethodDropdown;
Mat frameMatrix;
Mat grayMatrix;
Texture2D texture;
FpsMonitor fpsMonitor;
#region --ExampleBase--
protected override async void Start()
{
base.Start();
// Request camera permissions
if (!await MediaDeviceQuery.RequestPermissions<CameraDevice>())
{
Debug.LogError("User did not grant camera permissions");
return;
}
// Load global camera benchmark settings.
int width, height, framerate;
NatDeviceWithOpenCVForUnityExample.CameraConfiguration(out width, out height, out framerate);
// Create camera source
cameraSource = new NatDeviceCamSource(width, height, framerate, useFrontCamera);
if (cameraSource.activeCamera == null)
cameraSource = new NatDeviceCamSource(width, height, framerate, !useFrontCamera);
await cameraSource.StartRunning(OnStart, OnFrame);
// Update UI
matCaptureMethodDropdown.value = (int)matCaptureMethod;
fpsMonitor = GetComponent<FpsMonitor>();
if (fpsMonitor != null)
{
fpsMonitor.Add("Name", "NatDeviceCamPreviewToMatExample");
fpsMonitor.Add("onFrameFPS", onFrameFPS.ToString("F1"));
fpsMonitor.Add("drawFPS", drawFPS.ToString("F1"));
fpsMonitor.Add("width", "");
fpsMonitor.Add("height", "");
fpsMonitor.Add("orientation", "");
}
}
protected override void OnStart()
{
base.OnStart();
// Create matrices
if (frameMatrix != null)
frameMatrix.Dispose();
frameMatrix = new Mat(cameraSource.height, cameraSource.width, CvType.CV_8UC4);
if (grayMatrix != null)
grayMatrix.Dispose();
grayMatrix = new Mat(cameraSource.height, cameraSource.width, CvType.CV_8UC1);
// Create texture
if (texture != null)
Texture2D.Destroy(texture);
texture = new Texture2D(
cameraSource.width,
cameraSource.height,
TextureFormat.RGBA32,
false,
false
);
// Display preview
rawImage.texture = texture;
aspectFitter.aspectRatio = cameraSource.width / (float)cameraSource.height;
Debug.Log("NatDevice camera source started with resolution: " + cameraSource.width + "x" + cameraSource.height + " isFrontFacing: " + cameraSource.isFrontFacing);
// Log camera properties
var cameraProps = new Dictionary<string, string>();
var camera = cameraSource.activeCamera as CameraDevice;
if (camera != null)
{
cameraProps.Add("exposureBias", camera.exposureBias.ToString());
cameraProps.Add("exposureLock", camera.exposureLock.ToString());
cameraProps.Add("exposureLockSupported", camera.exposureLockSupported.ToString());
cameraProps.Add("exposureRange", camera.exposureRange.max + "x" + camera.exposureRange.min);
cameraProps.Add("fieldOfView", camera.fieldOfView.width + "x" + camera.fieldOfView.height);
cameraProps.Add("flashMode", camera.flashMode.ToString());
cameraProps.Add("flashSupported", camera.flashSupported.ToString());
cameraProps.Add("focusLock", camera.focusLock.ToString());
cameraProps.Add("focusLockSupported", camera.focusLockSupported.ToString());
cameraProps.Add("frameRate", camera.frameRate.ToString());
cameraProps.Add("frontFacing", camera.frontFacing.ToString());
cameraProps.Add("photoResolution", camera.photoResolution.width + "x" + camera.photoResolution.height);
cameraProps.Add("previewResolution", camera.previewResolution.width + "x" + camera.previewResolution.height);
cameraProps.Add("running", camera.running.ToString());
cameraProps.Add("torchEnabled", camera.torchEnabled.ToString());
cameraProps.Add("torchSupported", camera.torchSupported.ToString());
cameraProps.Add("uniqueID", camera.uniqueID.ToString());
cameraProps.Add("whiteBalanceLock", camera.whiteBalanceLock.ToString());
cameraProps.Add("whiteBalanceLockSupported", camera.whiteBalanceLockSupported.ToString());
cameraProps.Add("zoomRange", camera.zoomRange.max + "x" + camera.zoomRange.min);
cameraProps.Add("zoomRatio", camera.zoomRatio.ToString());
}
Debug.Log("# Active Camera Properties #####################");
foreach (string key in cameraProps.Keys)
Debug.Log(key + ": " + cameraProps[key]);
Debug.Log("#######################################");
if (fpsMonitor != null)
{
fpsMonitor.Add("width", cameraSource.width.ToString());
fpsMonitor.Add("height", cameraSource.height.ToString());
fpsMonitor.Add("orientation", Screen.orientation.ToString());
fpsMonitor.boxWidth = 240;
fpsMonitor.boxHeight = 800;
fpsMonitor.LocateGUI();
foreach (string key in cameraProps.Keys)
fpsMonitor.Add(key, cameraProps[key]);
}
}
protected override void Update()
{
base.Update();
if (updateCount == 0)
{
if (fpsMonitor != null)
{
fpsMonitor.Add("onFrameFPS", onFrameFPS.ToString("F1"));
fpsMonitor.Add("drawFPS", drawFPS.ToString("F1"));
fpsMonitor.Add("orientation", Screen.orientation.ToString());
}
}
}
protected override void UpdateTexture()
{
// Get the matrix
switch (matCaptureMethod)
{
case MatCaptureMethod.GetRawTextureData_ByteArray:
MatUtils.copyToMat(cameraSource.preview.GetRawTextureData(), frameMatrix);
Core.flip(frameMatrix, frameMatrix, 0);
break;
case MatCaptureMethod.GetRawTextureData_NativeArray:
#if OPENCV_USE_UNSAFE_CODE && UNITY_2018_2_OR_NEWER
// non-memory allocation.
unsafe
{
var ptr = (IntPtr)NativeArrayUnsafeUtility.GetUnsafeReadOnlyPtr(cameraSource.preview.GetRawTextureData<byte>());
MatUtils.copyToMat(ptr, frameMatrix);
}
Core.flip(frameMatrix, frameMatrix, 0);
#else
MatUtils.copyToMat(cameraSource.preview.GetRawTextureData(), frameMatrix);
Core.flip(frameMatrix, frameMatrix, 0);
Imgproc.putText(frameMatrix, "NativeArray<T> GetRawTextureData() method can be used from Unity 2018.2 or later.", new Point(5, frameMatrix.rows() - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
#endif
break;
}
ProcessImage(frameMatrix, grayMatrix, imageProcessingType);
// Convert to Texture2D
Utils.fastMatToTexture2D(frameMatrix, texture);
}
protected override void OnDestroy()
{
base.OnDestroy();
if (frameMatrix != null)
frameMatrix.Dispose();
if (grayMatrix != null)
grayMatrix.Dispose();
frameMatrix =
grayMatrix = null;
Texture2D.Destroy(texture);
texture = null;
}
#endregion
#region --UI Callbacks--
public void OnMatCaptureMethodDropdownValueChanged(int result)
{
matCaptureMethod = (MatCaptureMethod)result;
}
#endregion
}
}

Просмотреть файл

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 8ee1737701661f640b54a64e60ee8de5
timeCreated: 1522098564
licenseType: Free
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: b7a31f20bafa7a14997597b75d05ea78
timeCreated: 1482683808
licenseType: Store
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: fc933a03de5fd314bac0848335d7ca1c
folderAsset: yes
timeCreated: 1524603639
licenseType: Free
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,26 @@
%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!21 &2100000
Material:
serializedVersion: 6
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
m_Name: ExampleMaterial
m_Shader: {fileID: 10750, guid: 0000000000000000f000000000000000, type: 0}
m_ShaderKeywords:
m_LightmapFlags: 5
m_EnableInstancingVariants: 0
m_CustomRenderQueue: -1
stringTagMap: {}
disabledShaderPasses: []
m_SavedProperties:
serializedVersion: 3
m_TexEnvs:
- _MainTex:
m_Texture: {fileID: 2800000, guid: 9a89f0244015e104fb922e309d53caf1, type: 3}
m_Scale: {x: 1, y: 1}
m_Offset: {x: 0, y: 0}
m_Floats: []
m_Colors:
- _Color: {r: 1, g: 1, b: 1, a: 1}

Просмотреть файл

@ -0,0 +1,6 @@
fileFormatVersion: 2
guid: 6c6c3f29578c5fc438256e239d0e7273
NativeFormatImporter:
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,382 @@
using NatDeviceWithOpenCVForUnity.UnityUtils.Helper;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UnityUtils.Helper;
using System;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.SceneManagement;
using UnityEngine.UI;
namespace NatDeviceWithOpenCVForUnityExample
{
/// <summary>
/// NatDeviceCamPreviewToMatHelper Example
/// </summary>
[RequireComponent(typeof(NatDeviceCamPreviewToMatHelper))]
public class NatDeviceCamPreviewToMatHelperExample : MonoBehaviour
{
/// <summary>
/// The requested resolution dropdown.
/// </summary>
public Dropdown requestedResolutionDropdown;
/// <summary>
/// The requested resolution.
/// </summary>
public ResolutionPreset requestedResolution = ResolutionPreset._1280x720;
/// <summary>
/// The requestedFPS dropdown.
/// </summary>
public Dropdown requestedFPSDropdown;
/// <summary>
/// The requestedFPS.
/// </summary>
public FPSPreset requestedFPS = FPSPreset._30;
/// <summary>
/// The rotate 90 degree toggle.
/// </summary>
public Toggle rotate90DegreeToggle;
/// <summary>
/// The flip vertical toggle.
/// </summary>
public Toggle flipVerticalToggle;
/// <summary>
/// The flip horizontal toggle.
/// </summary>
public Toggle flipHorizontalToggle;
/// <summary>
/// The texture.
/// </summary>
Texture2D texture;
/// <summary>
/// The webcam texture to mat helper.
/// </summary>
NatDeviceCamPreviewToMatHelper webCamTextureToMatHelper;
/// <summary>
/// The FPS monitor.
/// </summary>
FpsMonitor fpsMonitor;
// Use this for initialization
void Start()
{
fpsMonitor = GetComponent<FpsMonitor>();
webCamTextureToMatHelper = gameObject.GetComponent<NatDeviceCamPreviewToMatHelper>();
int width, height;
Dimensions(requestedResolution, out width, out height);
webCamTextureToMatHelper.requestedWidth = width;
webCamTextureToMatHelper.requestedHeight = height;
webCamTextureToMatHelper.requestedFPS = (int)requestedFPS;
webCamTextureToMatHelper.Initialize();
// Update GUI state
requestedResolutionDropdown.value = (int)requestedResolution;
string[] enumNames = System.Enum.GetNames(typeof(FPSPreset));
int index = Array.IndexOf(enumNames, requestedFPS.ToString());
requestedFPSDropdown.value = index;
rotate90DegreeToggle.isOn = webCamTextureToMatHelper.rotate90Degree;
flipVerticalToggle.isOn = webCamTextureToMatHelper.flipVertical;
flipHorizontalToggle.isOn = webCamTextureToMatHelper.flipHorizontal;
}
/// <summary>
/// Raises the webcam texture to mat helper initialized event.
/// </summary>
public void OnWebCamTextureToMatHelperInitialized()
{
Debug.Log("OnWebCamTextureToMatHelperInitialized");
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();
texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
Utils.fastMatToTexture2D(webCamTextureMat, texture);
gameObject.GetComponent<Renderer>().material.mainTexture = texture;
gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
if (fpsMonitor != null)
{
fpsMonitor.Add("width", webCamTextureToMatHelper.GetWidth().ToString());
fpsMonitor.Add("height", webCamTextureToMatHelper.GetHeight().ToString());
fpsMonitor.Add("isFrontFacing", webCamTextureToMatHelper.IsFrontFacing().ToString());
fpsMonitor.Add("rotate90Degree", webCamTextureToMatHelper.rotate90Degree.ToString());
fpsMonitor.Add("flipVertical", webCamTextureToMatHelper.flipVertical.ToString());
fpsMonitor.Add("flipHorizontal", webCamTextureToMatHelper.flipHorizontal.ToString());
fpsMonitor.Add("orientation", Screen.orientation.ToString());
}
float width = webCamTextureMat.width();
float height = webCamTextureMat.height();
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
}
else
{
Camera.main.orthographicSize = height / 2;
}
UpdateNatDeviceCameraProps(webCamTextureToMatHelper);
}
/// <summary>
/// Raises the webcam texture to mat helper disposed event.
/// </summary>
public void OnWebCamTextureToMatHelperDisposed()
{
Debug.Log("OnWebCamTextureToMatHelperDisposed");
if (texture != null)
{
Texture2D.Destroy(texture);
texture = null;
}
}
/// <summary>
/// Raises the webcam texture to mat helper error occurred event.
/// </summary>
/// <param name="errorCode">Error code.</param>
public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
{
Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
// Update is called once per frame
void Update()
{
if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
{
Mat rgbaMat = webCamTextureToMatHelper.GetMat();
//Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
// Restore the coordinate system of the image by OpenCV's Flip function.
Utils.fastMatToTexture2D(rgbaMat, texture);
}
}
private void UpdateNatDeviceCameraProps(NatDeviceCamPreviewToMatHelper helper)
{
var cameraProps = new Dictionary<string, string>();
cameraProps.Add("exposureBias", helper.exposureBias.ToString());
cameraProps.Add("exposureLock", helper.exposureLock.ToString());
cameraProps.Add("exposureLockSupported", helper.exposureLockSupported.ToString());
cameraProps.Add("exposureRange", helper.exposureRange.max + "x" + helper.exposureRange.min);
cameraProps.Add("fieldOfView", helper.fieldOfView.width + "x" + helper.fieldOfView.height);
cameraProps.Add("flashMode", helper.flashMode.ToString());
cameraProps.Add("flashSupported", helper.flashSupported.ToString());
cameraProps.Add("focusLock", helper.focusLock.ToString());
cameraProps.Add("focusLockSupported", helper.focusLockSupported.ToString());
cameraProps.Add("frameRate", helper.frameRate.ToString());
cameraProps.Add("frontFacing", helper.frontFacing.ToString());
cameraProps.Add("photoResolution", helper.photoResolution.width + "x" + helper.photoResolution.height);
cameraProps.Add("previewResolution", helper.previewResolution.width + "x" + helper.previewResolution.height);
cameraProps.Add("running", helper.running.ToString());
cameraProps.Add("torchEnabled", helper.torchEnabled.ToString());
cameraProps.Add("torchSupported", helper.torchSupported.ToString());
cameraProps.Add("uniqueID", helper.uniqueID.ToString());
cameraProps.Add("whiteBalanceLock", helper.whiteBalanceLock.ToString());
cameraProps.Add("whiteBalanceLockSupported", helper.whiteBalanceLockSupported.ToString());
cameraProps.Add("zoomRange", helper.zoomRange.max + "x" + helper.zoomRange.min);
cameraProps.Add("zoomRatio", helper.zoomRatio.ToString());
if (fpsMonitor != null)
{
fpsMonitor.boxWidth = 240;
fpsMonitor.boxHeight = 800;
fpsMonitor.LocateGUI();
foreach (string key in cameraProps.Keys)
fpsMonitor.Add(key, cameraProps[key]);
}
}
/// <summary>
/// Raises the destroy event.
/// </summary>
void OnDestroy()
{
webCamTextureToMatHelper.Dispose();
}
/// <summary>
/// Raises the back button click event.
/// </summary>
public void OnBackButtonClick()
{
SceneManager.LoadScene("NatDeviceWithOpenCVForUnityExample");
}
/// <summary>
/// Raises the play button click event.
/// </summary>
public void OnPlayButtonClick()
{
webCamTextureToMatHelper.Play();
}
/// <summary>
/// Raises the pause button click event.
/// </summary>
public void OnPauseButtonClick()
{
webCamTextureToMatHelper.Pause();
}
/// <summary>
/// Raises the stop button click event.
/// </summary>
public void OnStopButtonClick()
{
webCamTextureToMatHelper.Stop();
}
/// <summary>
/// Raises the change camera button click event.
/// </summary>
public void OnChangeCameraButtonClick()
{
webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.IsFrontFacing();
}
/// <summary>
/// Raises the requested resolution dropdown value changed event.
/// </summary>
public void OnRequestedResolutionDropdownValueChanged(int result)
{
if ((int)requestedResolution != result)
{
requestedResolution = (ResolutionPreset)result;
int width, height;
Dimensions(requestedResolution, out width, out height);
webCamTextureToMatHelper.Initialize(width, height);
}
}
/// <summary>
/// Raises the requestedFPS dropdown value changed event.
/// </summary>
public void OnRequestedFPSDropdownValueChanged(int result)
{
string[] enumNames = Enum.GetNames(typeof(FPSPreset));
int value = (int)System.Enum.Parse(typeof(FPSPreset), enumNames[result], true);
if ((int)requestedFPS != value)
{
requestedFPS = (FPSPreset)value;
webCamTextureToMatHelper.requestedFPS = (int)requestedFPS;
}
}
/// <summary>
/// Raises the rotate 90 degree toggle value changed event.
/// </summary>
public void OnRotate90DegreeToggleValueChanged()
{
if (rotate90DegreeToggle.isOn != webCamTextureToMatHelper.rotate90Degree)
{
webCamTextureToMatHelper.rotate90Degree = rotate90DegreeToggle.isOn;
}
if (fpsMonitor != null)
fpsMonitor.Add("rotate90Degree", webCamTextureToMatHelper.rotate90Degree.ToString());
}
/// <summary>
/// Raises the flip vertical toggle value changed event.
/// </summary>
public void OnFlipVerticalToggleValueChanged()
{
if (flipVerticalToggle.isOn != webCamTextureToMatHelper.flipVertical)
{
webCamTextureToMatHelper.flipVertical = flipVerticalToggle.isOn;
}
if (fpsMonitor != null)
fpsMonitor.Add("flipVertical", webCamTextureToMatHelper.flipVertical.ToString());
}
/// <summary>
/// Raises the flip horizontal toggle value changed event.
/// </summary>
public void OnFlipHorizontalToggleValueChanged()
{
if (flipHorizontalToggle.isOn != webCamTextureToMatHelper.flipHorizontal)
{
webCamTextureToMatHelper.flipHorizontal = flipHorizontalToggle.isOn;
}
if (fpsMonitor != null)
fpsMonitor.Add("flipHorizontal", webCamTextureToMatHelper.flipHorizontal.ToString());
}
public enum FPSPreset : int
{
_0 = 0,
_1 = 1,
_5 = 5,
_10 = 10,
_15 = 15,
_30 = 30,
_60 = 60,
}
public enum ResolutionPreset
{
Lowest,
_640x480,
_1280x720,
_1920x1080,
Highest,
}
private void Dimensions(ResolutionPreset preset, out int width, out int height)
{
switch (preset)
{
case ResolutionPreset.Lowest:
width = height = 50;
break;
case ResolutionPreset._640x480:
width = 640;
height = 480;
break;
case ResolutionPreset._1920x1080:
width = 1920;
height = 1080;
break;
case ResolutionPreset.Highest:
width = height = 9999;
break;
case ResolutionPreset._1280x720:
default:
width = 1280;
height = 720;
break;
}
}
}
}

Просмотреть файл

@ -0,0 +1,10 @@
fileFormatVersion: 2
guid: 7a2cd8859f80e4b4188b88f2442b1d3b
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,6 @@
fileFormatVersion: 2
guid: d6973b7cefe924f48a9b2f3e00c22c2a
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,206 @@
using System.Collections;
using UnityEngine;
using UnityEngine.SceneManagement;
using UnityEngine.UI;
namespace NatDeviceWithOpenCVForUnityExample
{
public class NatDeviceWithOpenCVForUnityExample : MonoBehaviour
{
public static string GetNatDeviceVersion()
{
return "1.0.1";
}
public enum FrameratePreset
{
_10,
_15,
_30,
_60
}
public enum ResolutionPreset
{
Lowest,
_640x480,
_1280x720,
_1920x1080,
Highest,
}
[HeaderAttribute("Benchmark")]
public Dropdown cameraResolutionDropdown;
public Dropdown cameraFPSDropdown;
private static ResolutionPreset cameraResolution = ResolutionPreset._1280x720;
private static FrameratePreset cameraFramerate = FrameratePreset._30;
private static bool performImageProcessingEachTime = false;
[Header("UI")]
public Text exampleTitle;
public Text versionInfo;
public ScrollRect scrollRect;
private static float verticalNormalizedPosition = 1f;
public static void CameraConfiguration(out int width, out int height, out int framerate)
{
switch (cameraResolution)
{
case ResolutionPreset.Lowest:
width = height = 50;
break;
case ResolutionPreset._640x480:
width = 640;
height = 480;
break;
case ResolutionPreset._1920x1080:
width = 1920;
height = 1080;
break;
case ResolutionPreset.Highest:
width = height = 9999;
break;
case ResolutionPreset._1280x720:
default:
width = 1280;
height = 720;
break;
}
switch (cameraFramerate)
{
case FrameratePreset._10:
framerate = 10;
break;
case FrameratePreset._15:
framerate = 15;
break;
case FrameratePreset._60:
framerate = 60;
break;
case FrameratePreset._30:
default:
framerate = 30;
break;
}
}
public static void ExampleSceneConfiguration(out bool performImageProcessingEachTime)
{
performImageProcessingEachTime = NatDeviceWithOpenCVForUnityExample.performImageProcessingEachTime;
}
void Awake()
{
QualitySettings.vSyncCount = 0;
Application.targetFrameRate = 60;
}
IEnumerator Start()
{
exampleTitle.text = "NatDeviceWithOpenCVForUnity Example " + Application.version;
versionInfo.text = "NatDevice " + GetNatDeviceVersion();
versionInfo.text += " / " + OpenCVForUnity.CoreModule.Core.NATIVE_LIBRARY_NAME + " " + OpenCVForUnity.UnityUtils.Utils.getVersion() + " (" + OpenCVForUnity.CoreModule.Core.VERSION + ")";
versionInfo.text += " / UnityEditor " + Application.unityVersion;
versionInfo.text += " / ";
#if UNITY_EDITOR
versionInfo.text += "Editor";
#elif UNITY_STANDALONE_WIN
versionInfo.text += "Windows";
#elif UNITY_STANDALONE_OSX
versionInfo.text += "Mac OSX";
#elif UNITY_STANDALONE_LINUX
versionInfo.text += "Linux";
#elif UNITY_ANDROID
versionInfo.text += "Android";
#elif UNITY_IOS
versionInfo.text += "iOS";
#elif UNITY_WSA
versionInfo.text += "WSA";
#elif UNITY_WEBGL
versionInfo.text += "WebGL";
#endif
versionInfo.text += " ";
#if ENABLE_MONO
versionInfo.text += "Mono";
#elif ENABLE_IL2CPP
versionInfo.text += "IL2CPP";
#elif ENABLE_DOTNET
versionInfo.text += ".NET";
#endif
scrollRect.verticalNormalizedPosition = verticalNormalizedPosition;
// Update GUI state
cameraResolutionDropdown.value = (int)cameraResolution;
cameraFPSDropdown.value = (int)cameraFramerate;
#if (UNITY_IOS || UNITY_ANDROID) && !UNITY_EDITOR
RuntimePermissionHelper runtimePermissionHelper = GetComponent<RuntimePermissionHelper>();
yield return runtimePermissionHelper.hasUserAuthorizedCameraPermission();
yield return runtimePermissionHelper.hasUserAuthorizedExternalStorageWritePermission(); // It works only with Android devices.
#endif
yield break;
}
public void OnCameraResolutionDropdownValueChanged(int result)
{
cameraResolution = (ResolutionPreset)result;
}
public void OnCameraFPSDropdownValueChanged(int result)
{
cameraFramerate = (FrameratePreset)result;
}
public void OnScrollRectValueChanged()
{
verticalNormalizedPosition = scrollRect.verticalNormalizedPosition;
}
public void OnShowSystemInfoButtonClick()
{
SceneManager.LoadScene("ShowSystemInfo");
}
public void OnShowLicenseButtonClick()
{
SceneManager.LoadScene("ShowLicense");
}
public void OnNatDeviceCamPreviewOnlyExampleButtonClick()
{
SceneManager.LoadScene("NatDeviceCamPreviewOnlyExample");
}
public void OnWebCamTextureOnlyExampleButtonClick()
{
SceneManager.LoadScene("WebCamTextureOnlyExample");
}
public void OnNatDeviceCamPreviewToMatExampleButtonClick()
{
SceneManager.LoadScene("NatDeviceCamPreviewToMatExample");
}
public void OnWebCamTextureToMatExampleButtonClick()
{
SceneManager.LoadScene("WebCamTextureToMatExample");
}
public void OnNatDeviceCamPreviewToMatHelperExampleButtonClick()
{
SceneManager.LoadScene("NatDeviceCamPreviewToMatHelperExample");
}
public void OnIntegrationWithNatShareExampleButtonClick()
{
SceneManager.LoadScene("IntegrationWithNatShareExample");
}
}
}

Просмотреть файл

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 3fc005588c60a0a4c827e3838c5f0e0d
timeCreated: 1521862404
licenseType: Free
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 70b98c90e8c1b7e40862f0229eb0d567
timeCreated: 1521862404
licenseType: Free
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: 21e9747b132986c4b8c0ebb383902e9f
folderAsset: yes
timeCreated: 1522155797
licenseType: Free
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,251 @@
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using System;
using UnityEngine;
using UnityEngine.SceneManagement;
using UnityEngine.UI;
namespace NatDeviceWithOpenCVForUnityExample
{
public abstract class ExampleBase<T> : MonoBehaviour where T : ICameraSource
{
[Header("Preview")]
public RawImage rawImage;
public AspectRatioFitter aspectFitter;
[Header("Camera")]
public bool useFrontCamera;
[Header("Processing")]
public ImageProcessingType imageProcessingType = ImageProcessingType.None;
public Dropdown imageProcessingTypeDropdown;
protected T cameraSource;
protected int updateCount, onFrameCount, drawCount;
protected float elapsed, updateFPS, onFrameFPS, drawFPS;
#region --Lifecycle--
protected virtual void Start()
{
// Update UI
if (imageProcessingTypeDropdown != null)
imageProcessingTypeDropdown.value = (int)imageProcessingType;
}
protected virtual void Update()
{
updateCount++;
elapsed += Time.deltaTime;
if (elapsed >= 1f)
{
updateFPS = updateCount / elapsed;
onFrameFPS = onFrameCount / elapsed;
drawFPS = drawCount / elapsed;
updateCount = onFrameCount = drawCount = 0;
elapsed = 0f;
}
}
protected virtual void OnStart()
{
//Debug.Log("##### OnStart() #####");
useFrontCamera = cameraSource.isFrontFacing;
}
protected virtual void OnFrame()
{
//Debug.Log("##### OnFrame() #####");
onFrameCount++;
UpdateTexture();
drawCount++;
}
protected virtual void OnDestroy()
{
if (cameraSource != null)
{
cameraSource.Dispose();
cameraSource = default(T);
}
}
#endregion
#region --UI Callbacks--
public void OnBackButtonClick()
{
SceneManager.LoadScene("NatDeviceWithOpenCVForUnityExample");
}
public async void OnPlayButtonClick()
{
//Debug.Log("##### OnPlayButton() #####");
if (cameraSource == null) return;
try
{
await cameraSource.StartRunning(OnStart, OnFrame);
}
catch (ApplicationException e)
{
Debug.LogError(e.Message);
}
}
public void OnStopButtonClick()
{
//Debug.Log("##### OnStopButton() #####");
if (cameraSource == null) return;
cameraSource.StopRunning();
}
public async void OnChangeCameraButtonClick()
{
//Debug.Log("##### OnChangeCameraButton() #####");
if (cameraSource == null) return;
try
{
await cameraSource.SwitchCamera();
}
catch (ApplicationException e)
{
Debug.LogError(e.Message);
}
}
public void OnImageProcessingTypeDropdownValueChanged(int result)
{
imageProcessingType = (ImageProcessingType)result;
}
#endregion
#region --Operations--
protected abstract void UpdateTexture();
protected void ProcessImage(Color32[] buffer, int width, int height, ImageProcessingType imageProcessingType)
{
switch (imageProcessingType)
{
case ImageProcessingType.DrawLine:
// Draw a diagonal line on our image
float inclination = height / (float)width;
for (int i = 0; i < 4; i++)
{
for (int x = 0; x < width; x++)
{
int y = (int)(-inclination * x) + height - 2 + i;
y = Mathf.Clamp(y, 0, height - 1);
int p = (x) + (y * width);
// Set pixels in the buffer
buffer.SetValue(new Color32(255, 0, 0, 255), p);
}
}
break;
case ImageProcessingType.ConvertToGray:
// Convert a four-channel pixel buffer to greyscale
// Iterate over the buffer
for (int i = 0; i < buffer.Length; i++)
{
var p = buffer[i];
// Get channel intensities
byte r = p.r, g = p.g, b = p.b, a = p.a,
// Use quick luminance approximation to save time and memory
l = (byte)((r + r + r + b + g + g + g + g) >> 3);
// Set pixels in the buffer
buffer[i] = new Color32(l, l, l, a);
}
break;
}
}
protected void ProcessImage(byte[] buffer, int width, int height, ImageProcessingType imageProcessingType)
{
switch (imageProcessingType)
{
case ImageProcessingType.DrawLine:
// Draw a diagonal line on our image
float inclination = height / (float)width;
for (int i = 0; i < 4; i++)
{
for (int x = 0; x < width; x++)
{
int y = (int)(-inclination * x) + height - 2 + i;
y = Mathf.Clamp(y, 0, height - 1);
int p = (x * 4) + (y * width * 4);
// Set pixels in the buffer
buffer[p] = 255;
buffer[p + 1] = 0;
buffer[p + 2] = 0;
buffer[p + 3] = 255;
}
}
break;
case ImageProcessingType.ConvertToGray:
// Convert a four-channel pixel buffer to greyscale
// Iterate over the buffer
for (int i = 0; i < buffer.Length; i = i + 4)
{
// Get channel intensities
byte r = buffer[i];
byte g = buffer[i + 1];
byte b = buffer[i + 2];
byte a = buffer[i + 3];
// Use quick luminance approximation to save time and memory
byte l = (byte)((r + r + r + b + g + g + g + g) >> 3);
// Set pixels in the buffer
buffer[i] = buffer[i + 1] = buffer[i + 2] = l;
buffer[i + 3] = a;
}
break;
}
}
protected void ProcessImage(Mat frameMatrix, Mat grayMatrix, ImageProcessingType imageProcessingType)
{
switch (imageProcessingType)
{
case ImageProcessingType.DrawLine:
Imgproc.line(
frameMatrix,
new Point(0, 0),
new Point(frameMatrix.cols(), frameMatrix.rows()),
new Scalar(255, 0, 0, 255),
4
);
break;
case ImageProcessingType.ConvertToGray:
Imgproc.cvtColor(frameMatrix, grayMatrix, Imgproc.COLOR_RGBA2GRAY);
Imgproc.cvtColor(grayMatrix, frameMatrix, Imgproc.COLOR_GRAY2RGBA);
break;
}
}
#endregion
}
public enum ImageProcessingType
{
None,
DrawLine,
ConvertToGray,
}
}

Просмотреть файл

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 7876b09c1405d4835a53efb593d78029
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,43 @@
using OpenCVForUnity.CoreModule;
using System;
using System.Threading.Tasks;
using UnityEngine;
namespace NatDeviceWithOpenCVForUnityExample
{
public interface ICameraSource
{
#region --Properties--
int width { get; }
int height { get; }
bool isRunning { get; }
bool isFrontFacing { get; }
#endregion
#region --Operations--
Task StartRunning(Action startCallback, Action frameCallback);
void StopRunning();
void CaptureFrame(Mat matrix);
void CaptureFrame(Color32[] pixelBuffer);
void CaptureFrame(byte[] pixelBuffer);
Task SwitchCamera();
void Dispose();
#endregion
}
}

Просмотреть файл

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: d7f3d8ae972b34e85823d6cd3323bfe7
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,228 @@
using NatSuite.Devices;
using System;
using System.Threading.Tasks;
using UnityEngine;
using OpenCVForUnity.UtilsModule;
using OpenCVForUnity.CoreModule;
#if OPENCV_USE_UNSAFE_CODE && UNITY_2018_2_OR_NEWER
using Unity.Collections.LowLevel.Unsafe;
using Unity.Collections;
#endif
namespace NatDeviceWithOpenCVForUnityExample
{
public class NatDeviceCamSource : ICameraSource
{
#region --Op vars--
private Action startCallback, frameCallback;
private int requestedWidth, requestedHeight, requestedFramerate;
private bool firstFrame;
private MediaDeviceQuery deviceQuery;
private NatDeviceCamSourceAttachment attachment;
private class NatDeviceCamSourceAttachment : MonoBehaviour
{
public Action @delegate;
void Awake() => DontDestroyOnLoad(this.gameObject);
void Update() => @delegate?.Invoke();
}
#endregion
#region --Client API--
public int width { get; private set; }
public int height { get; private set; }
public bool isRunning { get; private set; }
public bool isFrontFacing { get { return activeCamera != null ? activeCamera.frontFacing : false; } }
public Texture2D preview { get; private set; }
public ICameraDevice activeCamera { get; private set; }
public NatDeviceCamSource(int width, int height, int framerate = 30, bool front = false)
{
requestedWidth = width;
requestedHeight = height;
requestedFramerate = framerate;
// Create a device query for device cameras
// Use `GenericCameraDevice` so we also capture WebCamTexture cameras
deviceQuery = new MediaDeviceQuery(MediaDeviceQuery.Criteria.GenericCameraDevice);
// Pick camera
if (deviceQuery.count == 0)
{
Debug.LogError("Camera device does not exist.");
return;
}
for (var i = 0; i < deviceQuery.count; i++)
{
activeCamera = deviceQuery.currentDevice as ICameraDevice;
if (activeCamera.frontFacing == front) break;
deviceQuery.Advance();
}
if (activeCamera == null || activeCamera.frontFacing != front)
{
Debug.LogError("Camera is null. Consider using " + (front ? "rear" : "front") + " camera.");
activeCamera = null;
return;
}
activeCamera.previewResolution = (width: requestedWidth, height: requestedHeight);
activeCamera.frameRate = requestedFramerate;
}
public void Dispose()
{
StopRunning();
activeCamera = null;
if (preview != null)
{
Texture2D.Destroy(preview);
preview = null;
}
}
public async Task StartRunning(Action startCallback, Action frameCallback)
{
if (activeCamera == null || isRunning)
return;
this.startCallback = startCallback;
this.frameCallback = frameCallback;
preview = await activeCamera.StartRunning();
width = preview.width;
height = preview.height;
isRunning = true;
firstFrame = true;
attachment = new GameObject("NatDeviceWithOpenCVForUnityExample NatDeviceCamSource Helper").AddComponent<NatDeviceCamSourceAttachment>();
attachment.@delegate = () => {
if (firstFrame)
{
startCallback();
firstFrame = false;
}
else
{
frameCallback();
}
};
}
public void StopRunning()
{
if (activeCamera == null || !isRunning)
return;
if (attachment != null)
{
attachment.@delegate = default;
NatDeviceCamSourceAttachment.Destroy(attachment.gameObject);
attachment = default;
}
activeCamera.StopRunning();
isRunning = false;
}
public void CaptureFrame(Mat matrix)
{
if (preview == null) return;
#if OPENCV_USE_UNSAFE_CODE && UNITY_2018_2_OR_NEWER
unsafe
{
var ptr = (IntPtr)NativeArrayUnsafeUtility.GetUnsafeReadOnlyPtr(preview.GetRawTextureData<byte>());
MatUtils.copyToMat(ptr, matrix);
}
Core.flip(matrix, matrix, 0);
#else
MatUtils.copyToMat(preview.GetRawTextureData(), matrix);
Core.flip(matrix, matrix, 0);
#endif
}
public void CaptureFrame(Color32[] pixelBuffer)
{
if (preview == null) return;
#if OPENCV_USE_UNSAFE_CODE && UNITY_2018_2_OR_NEWER
unsafe
{
NativeArray<Color32> rawTextureData = preview.GetRawTextureData<Color32>();
int size = UnsafeUtility.SizeOf<Color32>() * rawTextureData.Length;
Color32* srcAddr = (Color32*)NativeArrayUnsafeUtility.GetUnsafeReadOnlyPtr(rawTextureData);
fixed (Color32* dstAddr = pixelBuffer)
{
UnsafeUtility.MemCpy(dstAddr, srcAddr, size);
}
}
#else
byte[] rawTextureData = preview.GetRawTextureData();
GCHandle pin = GCHandle.Alloc(pixelBuffer, GCHandleType.Pinned);
Marshal.Copy(rawTextureData, 0, pin.AddrOfPinnedObject(), rawTextureData.Length);
pin.Free();
#endif
}
public void CaptureFrame(byte[] pixelBuffer)
{
if (preview == null) return;
#if OPENCV_USE_UNSAFE_CODE && UNITY_2018_2_OR_NEWER
unsafe
{
NativeArray<byte> rawTextureData = preview.GetRawTextureData<byte>();
int size = UnsafeUtility.SizeOf<byte>() * rawTextureData.Length;
byte* srcAddr = (byte*)NativeArrayUnsafeUtility.GetUnsafeReadOnlyPtr(rawTextureData);
fixed (byte* dstAddr = pixelBuffer)
{
UnsafeUtility.MemCpy(dstAddr, srcAddr, size);
}
}
#else
byte[] rawTextureData = preview.GetRawTextureData();
Buffer.BlockCopy(rawTextureData, 0, pixelBuffer, 0, rawTextureData.Length);
#endif
}
public async Task SwitchCamera()
{
if (activeCamera == null)
return;
var _isRunning = isRunning;
Dispose();
deviceQuery.Advance();
activeCamera = deviceQuery.currentDevice as ICameraDevice;
activeCamera.previewResolution = (width: requestedWidth, height: requestedHeight);
activeCamera.frameRate = requestedFramerate;
await StartRunning(startCallback, frameCallback);
if (!_isRunning)
StopRunning();
}
#endregion
}
}

Просмотреть файл

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 84e02125237fe4d6f865b3862bfd52c3
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: a69a2a8276734bb4eb9e353b5b07354a
folderAsset: yes
timeCreated: 1522155806
licenseType: Free
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,188 @@
using System.Collections.Generic;
using UnityEngine;
namespace NatDeviceWithOpenCVForUnityExample
{
public class FpsMonitor : MonoBehaviour
{
int tick = 0;
float elapsed = 0;
float fps = 0;
public enum Alignment
{
LeftTop,
RightTop,
LeftBottom,
RightBottom,
}
public Alignment alignment = Alignment.RightTop;
const float GUI_WIDTH = 75f;
const float GUI_HEIGHT = 30f;
const float MARGIN_X = 10f;
const float MARGIN_Y = 10f;
const float INNER_X = 8f;
const float INNER_Y = 5f;
const float GUI_CONSOLE_HEIGHT = 50f;
public Vector2 offset = new Vector2(MARGIN_X, MARGIN_Y);
public bool boxVisible = true;
public float boxWidth = GUI_WIDTH;
public float boxHeight = GUI_HEIGHT;
public Vector2 padding = new Vector2(INNER_X, INNER_Y);
public float consoleHeight = GUI_CONSOLE_HEIGHT;
GUIStyle console_labelStyle;
float x, y;
Rect outer;
Rect inner;
float console_x, console_y;
Rect console_outer;
Rect console_inner;
int oldScrWidth;
int oldScrHeight;
Dictionary<string, string> outputDict = new Dictionary<string, string>();
public string consoleText;
// Use this for initialization
void Start()
{
console_labelStyle = new GUIStyle();
console_labelStyle.fontSize = 32;
console_labelStyle.fontStyle = FontStyle.Normal;
console_labelStyle.wordWrap = true;
console_labelStyle.normal.textColor = Color.white;
oldScrWidth = Screen.width;
oldScrHeight = Screen.height;
LocateGUI();
}
// Update is called once per frame
void Update()
{
tick++;
elapsed += Time.deltaTime;
if (elapsed >= 1f)
{
fps = tick / elapsed;
tick = 0;
elapsed = 0;
}
}
void OnGUI()
{
if (oldScrWidth != Screen.width || oldScrHeight != Screen.height)
{
LocateGUI();
}
oldScrWidth = Screen.width;
oldScrHeight = Screen.height;
if (boxVisible)
{
GUI.Box(outer, "");
}
GUILayout.BeginArea(inner);
{
GUILayout.BeginVertical();
GUILayout.Label("fps : " + fps.ToString("F1"));
foreach (KeyValuePair<string, string> pair in outputDict)
{
GUILayout.Label(pair.Key + " : " + pair.Value);
}
GUILayout.EndVertical();
}
GUILayout.EndArea();
if (!string.IsNullOrEmpty(consoleText))
{
if (boxVisible)
{
GUI.Box(console_outer, "");
}
GUILayout.BeginArea(console_inner);
{
GUILayout.BeginVertical();
GUILayout.Label(consoleText, console_labelStyle);
GUILayout.EndVertical();
}
GUILayout.EndArea();
}
}
public void Add(string key, string value)
{
if (outputDict.ContainsKey(key))
{
outputDict[key] = value;
}
else
{
outputDict.Add(key, value);
}
}
public void Remove(string key)
{
outputDict.Remove(key);
}
public void Clear()
{
outputDict.Clear();
}
public void LocateGUI()
{
x = GetAlignedX(alignment, boxWidth);
y = GetAlignedY(alignment, boxHeight);
outer = new Rect(x, y, boxWidth, boxHeight);
inner = new Rect(x + padding.x, y + padding.y, boxWidth, boxHeight);
console_x = GetAlignedX(Alignment.LeftBottom, Screen.width);
console_y = GetAlignedY(Alignment.LeftBottom, consoleHeight);
console_outer = new Rect(console_x, console_y, Screen.width - offset.x * 2, consoleHeight);
console_inner = new Rect(console_x + padding.x, console_y + padding.y, Screen.width - offset.x * 2 - padding.x, consoleHeight);
}
float GetAlignedX(Alignment anchor, float w)
{
switch (anchor)
{
default:
case Alignment.LeftTop:
case Alignment.LeftBottom:
return offset.x;
case Alignment.RightTop:
case Alignment.RightBottom:
return Screen.width - w - offset.x;
}
}
float GetAlignedY(Alignment anchor, float h)
{
switch (anchor)
{
default:
case Alignment.LeftTop:
case Alignment.RightTop:
return offset.y;
case Alignment.LeftBottom:
case Alignment.RightBottom:
return Screen.height - h - offset.y;
}
}
}
}

Просмотреть файл

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 9730a00114a61104b87dc34be31889c0
timeCreated: 1521859037
licenseType: Free
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,684 @@
using NatSuite.Devices;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UnityUtils.Helper;
using System;
using System.Collections;
using System.Threading.Tasks;
using UnityEngine;
namespace NatDeviceWithOpenCVForUnity.UnityUtils.Helper
{
/// <summary>
/// NatDeviceCamPreview to mat helper.
/// v 1.0.0
/// Depends on NatDevice version 1.0.0 or later.
/// Depends on OpenCVForUnity version 2.3.7 or later.
/// </summary>
public class NatDeviceCamPreviewToMatHelper : WebCamTextureToMatHelper
{
#region --NatDevice CameraDevice Properties--
public virtual bool running => GetNatDeviceCameraDevice() != null ? GetNatDeviceCameraDevice().running : default;
public virtual string uniqueID => GetNatDeviceCameraDevice() != null ? GetNatDeviceCameraDevice().uniqueID : default;
public virtual bool frontFacing => GetNatDeviceCameraDevice() != null ? GetNatDeviceCameraDevice().frontFacing : default;
public virtual bool flashSupported => GetNatDeviceCameraDevice() != null ? GetNatDeviceCameraDevice().flashSupported : default;
public virtual bool torchSupported => GetNatDeviceCameraDevice() != null ? GetNatDeviceCameraDevice().torchSupported : default;
public virtual bool exposureLockSupported => GetNatDeviceCameraDevice() != null ? GetNatDeviceCameraDevice().exposureLockSupported : default;
public virtual bool focusLockSupported => GetNatDeviceCameraDevice() != null ? GetNatDeviceCameraDevice().focusLockSupported : default;
public virtual bool whiteBalanceLockSupported => GetNatDeviceCameraDevice() != null ? GetNatDeviceCameraDevice().whiteBalanceLockSupported : default;
public virtual (float width, float height) fieldOfView
{
get { return GetNatDeviceCameraDevice() != null ? GetNatDeviceCameraDevice().fieldOfView : default; }
}
public virtual (float min, float max) exposureRange
{
get { return GetNatDeviceCameraDevice() != null ? GetNatDeviceCameraDevice().exposureRange : default; }
}
public virtual (float min, float max) zoomRange
{
get { return GetNatDeviceCameraDevice() != null ? GetNatDeviceCameraDevice().zoomRange : default; }
}
public virtual (int width, int height) previewResolution
{
get { return GetNatDeviceCameraDevice() != null ? GetNatDeviceCameraDevice().previewResolution : default; }
set { if (GetNatDeviceCameraDevice() != null) GetNatDeviceCameraDevice().previewResolution = (width: value.width, height: value.height); }
}
public virtual (int width, int height) photoResolution
{
get { return GetNatDeviceCameraDevice() != null ? GetNatDeviceCameraDevice().photoResolution : default; }
set { if (GetNatDeviceCameraDevice() != null) GetNatDeviceCameraDevice().photoResolution = (width: value.width, height: value.height); }
}
public virtual int frameRate
{
get { return GetNatDeviceCameraDevice() != null ? GetNatDeviceCameraDevice().frameRate : default; }
set { if (GetNatDeviceCameraDevice() != null) GetNatDeviceCameraDevice().frameRate = value; }
}
public virtual float exposureBias
{
get { return GetNatDeviceCameraDevice() != null ? GetNatDeviceCameraDevice().exposureBias : default; }
set { if (GetNatDeviceCameraDevice() != null) GetNatDeviceCameraDevice().exposureBias = value; }
}
public virtual bool exposureLock
{
get { return GetNatDeviceCameraDevice() != null ? GetNatDeviceCameraDevice().exposureLock : default; }
set { if (GetNatDeviceCameraDevice() != null) GetNatDeviceCameraDevice().exposureLock = value; }
}
public virtual (float x, float y) exposurePoint
{
set { if (GetNatDeviceCameraDevice() != null) GetNatDeviceCameraDevice().exposurePoint = (x: value.x, y: value.y); }
}
public virtual FlashMode flashMode
{
get { return (GetNatDeviceCameraDevice() != null && GetNatDeviceCameraDevice().running) ? GetNatDeviceCameraDevice().flashMode : default; }
set { if (GetNatDeviceCameraDevice() != null) GetNatDeviceCameraDevice().flashMode = value; }
}
public virtual bool focusLock
{
get { return GetNatDeviceCameraDevice() != null ? GetNatDeviceCameraDevice().focusLock : default; }
set { if (GetNatDeviceCameraDevice() != null) GetNatDeviceCameraDevice().focusLock = value; }
}
public virtual (float x, float y) focusPoint
{
set { if (GetNatDeviceCameraDevice() != null) GetNatDeviceCameraDevice().exposurePoint = (x: value.x, y: value.y); }
}
public virtual bool torchEnabled
{
get { return GetNatDeviceCameraDevice() != null ? GetNatDeviceCameraDevice().torchEnabled : default; }
set { if (GetNatDeviceCameraDevice() != null) GetNatDeviceCameraDevice().torchEnabled = value; }
}
public virtual bool whiteBalanceLock
{
get { return GetNatDeviceCameraDevice() != null ? GetNatDeviceCameraDevice().whiteBalanceLock : default; }
set { if (GetNatDeviceCameraDevice() != null) GetNatDeviceCameraDevice().whiteBalanceLock = value; }
}
public virtual float zoomRatio
{
get { return GetNatDeviceCameraDevice() != null ? GetNatDeviceCameraDevice().zoomRatio : default; }
set { if (GetNatDeviceCameraDevice() != null) GetNatDeviceCameraDevice().zoomRatio = value; }
}
public virtual FrameOrientation orientation
{
set { if (GetNatDeviceCameraDevice() != null) GetNatDeviceCameraDevice().orientation = value; }
}
#endregion
#if (UNITY_IOS || UNITY_ANDROID) && !UNITY_EDITOR && !DISABLE_NATDEVICE_API
public override float requestedFPS
{
get { return _requestedFPS; }
set
{
_requestedFPS = Mathf.Clamp(value, -1f, float.MaxValue);
if (hasInitDone)
{
Initialize();
}
}
}
protected bool isStartWaiting = false;
protected bool didUpdateThisFrame = false;
protected MediaDeviceQuery deviceQuery;
protected CameraDevice cameraDevice;
protected Texture2D preview;
// Update is called once per frame
protected override void Update()
{
if (hasInitDone)
{
// Catch the orientation change of the screen and correct the mat image to the correct direction.
if (screenOrientation != Screen.orientation && (screenWidth != Screen.width || screenHeight != Screen.height))
{
switch (Screen.orientation)
{
case ScreenOrientation.LandscapeLeft: cameraDevice.orientation = FrameOrientation.LandscapeLeft; break;
case ScreenOrientation.Portrait: cameraDevice.orientation = FrameOrientation.Portrait; break;
case ScreenOrientation.LandscapeRight: cameraDevice.orientation = FrameOrientation.LandscapeRight; break;
case ScreenOrientation.PortraitUpsideDown: cameraDevice.orientation = FrameOrientation.PortraitUpsideDown; break;
}
Initialize();
}
else
{
screenWidth = Screen.width;
screenHeight = Screen.height;
}
}
}
/// <summary>
/// Raises the destroy event.
/// </summary>
protected override void OnDestroy()
{
Dispose();
if (cameraDevice != null && cameraDevice.running)
cameraDevice.StopRunning();
cameraDevice = null;
}
/// <summary>
/// Initializes this instance by coroutine.
/// </summary>
protected override IEnumerator _Initialize()
{
if (hasInitDone)
{
ReleaseResources();
if (onDisposed != null)
onDisposed.Invoke();
}
isInitWaiting = true;
while (isStartWaiting)
{
yield return null;
}
if (cameraDevice != null && cameraDevice.running)
cameraDevice.StopRunning();
cameraDevice = null;
// Checks camera permission state.
IEnumerator coroutine = hasUserAuthorizedCameraPermission();
yield return coroutine;
if (!(bool)coroutine.Current)
{
isInitWaiting = false;
initCoroutine = null;
if (onErrorOccurred != null)
onErrorOccurred.Invoke(ErrorCode.CAMERA_PERMISSION_DENIED);
yield break;
}
// Creates the camera
deviceQuery = deviceQuery ?? new MediaDeviceQuery(MediaDeviceQuery.Criteria.CameraDevice);
//var devices = deviceQuery.devices;
if (!String.IsNullOrEmpty(requestedDeviceName))
{
int requestedDeviceIndex = -1;
if (Int32.TryParse(requestedDeviceName, out requestedDeviceIndex))
{
if (requestedDeviceIndex >= 0 && requestedDeviceIndex < deviceQuery.count)
{
cameraDevice = deviceQuery.currentDevice as CameraDevice;
}
}
else
{
for (int cameraIndex = 0; cameraIndex < deviceQuery.count; cameraIndex++)
{
if (deviceQuery.currentDevice.uniqueID == requestedDeviceName)
{
cameraDevice = deviceQuery.currentDevice as CameraDevice;
break;
}
deviceQuery.Advance();
}
}
if (cameraDevice == null)
Debug.Log("Cannot find camera device " + requestedDeviceName + ".");
}
if (cameraDevice == null)
{
// Checks how many and which cameras are available on the device
for (int cameraIndex = 0; cameraIndex < deviceQuery.count; cameraIndex++)
{
cameraDevice = deviceQuery.currentDevice as CameraDevice;
if (cameraDevice != null && cameraDevice.frontFacing == requestedIsFrontFacing)
{
break;
}
deviceQuery.Advance();
}
}
if (cameraDevice == null)
{
if (deviceQuery.count > 0)
{
cameraDevice = deviceQuery.currentDevice as CameraDevice;
}
if (cameraDevice == null)
{
isInitWaiting = false;
initCoroutine = null;
if (onErrorOccurred != null)
onErrorOccurred.Invoke(ErrorCode.CAMERA_DEVICE_NOT_EXIST);
yield break;
}
}
// Set the camera's preview resolution and frameRate
cameraDevice.previewResolution = (width: requestedWidth, height: requestedHeight);
cameraDevice.frameRate = (int)requestedFPS;
// Starts the camera
isStartWaiting = true;
didUpdateThisFrame = false;
bool isError = false;
cameraDevice.StartRunning().ContinueWith(
task => {
if (task.Exception == null)
{
preview = task.Result;
didUpdateThisFrame = true;
}
else
{
isError = true;
Debug.LogError(task.Exception.Message);
}
isStartWaiting = false;
},
TaskContinuationOptions.ExecuteSynchronously
);
int initFrameCount = 0;
bool isTimeout = false;
while (true)
{
if (isError)
{
cameraDevice = null;
isInitWaiting = false;
initCoroutine = null;
if (onErrorOccurred != null)
onErrorOccurred.Invoke(ErrorCode.UNKNOWN);
break;
}
if (initFrameCount > timeoutFrameCount)
{
isTimeout = true;
break;
}
else if (didUpdateThisFrame)
{
Debug.Log("NatDeviceCamPreviewToMatHelper:: " + "UniqueID:" + cameraDevice.uniqueID + " width:" + preview.width + " height:" + preview.height + " fps:" + cameraDevice.frameRate
+ " isFrongFacing:" + cameraDevice.frontFacing);
frameMat = new Mat(preview.height, preview.width, CvType.CV_8UC4);
screenOrientation = Screen.orientation;
screenWidth = Screen.width;
screenHeight = Screen.height;
if (rotate90Degree)
rotatedFrameMat = new Mat(preview.width, preview.height, CvType.CV_8UC4);
isInitWaiting = false;
hasInitDone = true;
initCoroutine = null;
if (onInitialized != null)
onInitialized.Invoke();
break;
}
else
{
initFrameCount++;
yield return null;
}
}
if (isTimeout)
{
if (cameraDevice.running)
cameraDevice.StopRunning();
cameraDevice = null;
isInitWaiting = false;
initCoroutine = null;
if (onErrorOccurred != null)
onErrorOccurred.Invoke(ErrorCode.TIMEOUT);
}
}
/// <summary>
/// Starts the camera.
/// </summary>
public override void Play()
{
if (hasInitDone)
StartCoroutine(_Play());
}
protected virtual IEnumerator _Play()
{
while (isStartWaiting)
{
yield return null;
}
if (!hasInitDone || cameraDevice.running) yield break;
isStartWaiting = true;
cameraDevice.StartRunning().ContinueWith(
task => {
if (task.Exception == null)
{
preview = task.Result;
didUpdateThisFrame = true;
}
else
{
Debug.LogError(task.Exception.Message);
}
isStartWaiting = false;
},
TaskContinuationOptions.ExecuteSynchronously
);
}
/// <summary>
/// Pauses the active camera.
/// </summary>
public override void Pause()
{
if (hasInitDone)
StartCoroutine(_Stop());
}
/// <summary>
/// Stops the active camera.
/// </summary>
public override void Stop()
{
if (hasInitDone)
StartCoroutine(_Stop());
}
protected virtual IEnumerator _Stop()
{
while (isStartWaiting)
{
yield return null;
}
if (!hasInitDone || !cameraDevice.running) yield break;
cameraDevice.StopRunning();
didUpdateThisFrame = false;
}
/// <summary>
/// Indicates whether the active camera is currently playing.
/// </summary>
/// <returns><c>true</c>, if the active camera is playing, <c>false</c> otherwise.</returns>
public override bool IsPlaying()
{
return hasInitDone ? cameraDevice.running : false;
}
/// <summary>
/// Indicates whether the active camera device is currently front facng.
/// </summary>
/// <returns><c>true</c>, if the active camera device is front facng, <c>false</c> otherwise.</returns>
public override bool IsFrontFacing()
{
return hasInitDone ? cameraDevice.frontFacing : false;
}
/// <summary>
/// Returns the active camera device name.
/// </summary>
/// <returns>The active camera device name.</returns>
public override string GetDeviceName()
{
return hasInitDone ? cameraDevice.uniqueID : "";
}
/// <summary>
/// Returns the active camera framerate.
/// </summary>
/// <returns>The active camera framerate.</returns>
public override float GetFPS()
{
return hasInitDone ? cameraDevice.frameRate : -1f;
}
/// <summary>
/// Returns the active WebcamTexture.
/// </summary>
/// <returns>The active WebcamTexture.</returns>
public override WebCamTexture GetWebCamTexture()
{
return null;
}
/// <summary>
/// Indicates whether the video buffer of the frame has been updated.
/// </summary>
/// <returns><c>true</c>, if the video buffer has been updated <c>false</c> otherwise.</returns>
public override bool DidUpdateThisFrame()
{
if (!hasInitDone)
return false;
return didUpdateThisFrame;
}
#endif
/// <summary>
/// Returns the NatDevice camera device.
/// </summary>
/// <returns>The NatDevice camera device.</returns>
public virtual CameraDevice GetNatDeviceCameraDevice()
{
#if (UNITY_IOS || UNITY_ANDROID) && !UNITY_EDITOR && !DISABLE_NATDEVICE_API
return cameraDevice;
#else
return null;
#endif
}
#if (UNITY_IOS || UNITY_ANDROID) && !UNITY_EDITOR && !DISABLE_NATDEVICE_API
/// <summary>
/// Gets the mat of the current frame.
/// The Mat object's type is 'CV_8UC4' (RGBA).
/// </summary>
/// <returns>The mat of the current frame.</returns>
public override Mat GetMat()
{
if (!hasInitDone || !cameraDevice.running)
{
return (rotatedFrameMat != null) ? rotatedFrameMat : frameMat;
}
Utils.fastTexture2DToMat(preview, frameMat, false);
FlipMat(frameMat, flipVertical, flipHorizontal);
if (rotatedFrameMat != null)
{
Core.rotate(frameMat, rotatedFrameMat, Core.ROTATE_90_CLOCKWISE);
return rotatedFrameMat;
}
else
{
return frameMat;
}
}
/// <summary>
/// Flips the mat.
/// </summary>
/// <param name="mat">Mat.</param>
protected override void FlipMat(Mat mat, bool flipVertical, bool flipHorizontal)
{
int flipCode = 0;
if (flipVertical)
{
if (flipCode == int.MinValue)
{
flipCode = 0;
}
else if (flipCode == 0)
{
flipCode = int.MinValue;
}
else if (flipCode == 1)
{
flipCode = -1;
}
else if (flipCode == -1)
{
flipCode = 1;
}
}
if (flipHorizontal)
{
if (flipCode == int.MinValue)
{
flipCode = 1;
}
else if (flipCode == 0)
{
flipCode = -1;
}
else if (flipCode == 1)
{
flipCode = int.MinValue;
}
else if (flipCode == -1)
{
flipCode = 0;
}
}
if (flipCode > int.MinValue)
{
Core.flip(mat, mat, flipCode);
}
}
/// <summary>
/// To release the resources.
/// </summary>
protected override void ReleaseResources()
{
isInitWaiting = false;
hasInitDone = false;
preview = null;
didUpdateThisFrame = false;
if (frameMat != null)
{
frameMat.Dispose();
frameMat = null;
}
if (rotatedFrameMat != null)
{
rotatedFrameMat.Dispose();
rotatedFrameMat = null;
}
}
/// <summary>
/// Releases all resource used by the <see cref="WebCamTextureToMatHelper"/> object.
/// </summary>
/// <remarks>Call <see cref="Dispose"/> when you are finished using the <see cref="WebCamTextureToMatHelper"/>. The
/// <see cref="Dispose"/> method leaves the <see cref="WebCamTextureToMatHelper"/> in an unusable state. After
/// calling <see cref="Dispose"/>, you must release all references to the <see cref="WebCamTextureToMatHelper"/> so
/// the garbage collector can reclaim the memory that the <see cref="WebCamTextureToMatHelper"/> was occupying.</remarks>
public override void Dispose()
{
if (colors != null)
colors = null;
if (isInitWaiting)
{
CancelInitCoroutine();
if (cameraDevice != null)
{
if (this != null && this.isActiveAndEnabled)
StartCoroutine(_Dispose());
}
ReleaseResources();
}
else if (hasInitDone)
{
if (this != null && this.isActiveAndEnabled)
StartCoroutine(_Dispose());
ReleaseResources();
if (onDisposed != null)
onDisposed.Invoke();
}
}
protected virtual IEnumerator _Dispose()
{
while (isStartWaiting)
{
yield return null;
}
if (cameraDevice.running)
cameraDevice.StopRunning();
cameraDevice = null;
}
#endif
}
}

Просмотреть файл

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: af1f30f564c42fd4a992788aa16ace23
timeCreated: 1524605908
licenseType: Free
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,119 @@
using System.Collections;
using UnityEngine;
namespace NatDeviceWithOpenCVForUnityExample
{
public class RuntimePermissionHelper : MonoBehaviour
{
public virtual IEnumerator hasUserAuthorizedCameraPermission()
{
#if UNITY_IOS && UNITY_2018_1_OR_NEWER
UserAuthorization mode = UserAuthorization.WebCam;
if (!Application.HasUserAuthorization(mode))
{
yield return RequestUserAuthorization(mode);
}
yield return Application.HasUserAuthorization(mode);
#elif UNITY_ANDROID && UNITY_2018_3_OR_NEWER
string permission = UnityEngine.Android.Permission.Camera;
if (!UnityEngine.Android.Permission.HasUserAuthorizedPermission(permission))
{
yield return RequestUserPermission(permission);
}
yield return UnityEngine.Android.Permission.HasUserAuthorizedPermission(permission);
#else
yield return true;
#endif
}
public virtual IEnumerator hasUserAuthorizedMicrophonePermission()
{
#if UNITY_IOS && UNITY_2018_1_OR_NEWER
UserAuthorization mode = UserAuthorization.Microphone;
if (!Application.HasUserAuthorization(mode))
{
yield return RequestUserAuthorization(mode);
}
yield return Application.HasUserAuthorization(mode);
#elif UNITY_ANDROID && UNITY_2018_3_OR_NEWER
string permission = UnityEngine.Android.Permission.Microphone;
if (!UnityEngine.Android.Permission.HasUserAuthorizedPermission(permission))
{
yield return RequestUserPermission(permission);
}
yield return UnityEngine.Android.Permission.HasUserAuthorizedPermission(permission);
#else
yield return true;
#endif
}
public virtual IEnumerator hasUserAuthorizedExternalStorageWritePermission()
{
#if UNITY_ANDROID && UNITY_2018_3_OR_NEWER
string permission = UnityEngine.Android.Permission.ExternalStorageWrite;
if (!UnityEngine.Android.Permission.HasUserAuthorizedPermission(permission))
{
yield return RequestUserPermission(permission);
}
yield return UnityEngine.Android.Permission.HasUserAuthorizedPermission(permission);
#else
yield return true;
#endif
}
#if (UNITY_IOS && UNITY_2018_1_OR_NEWER) || (UNITY_ANDROID && UNITY_2018_3_OR_NEWER)
protected bool isUserRequestingPermission;
protected virtual IEnumerator OnApplicationFocus(bool hasFocus)
{
yield return null;
if (isUserRequestingPermission && hasFocus)
isUserRequestingPermission = false;
}
#if UNITY_IOS
protected virtual IEnumerator RequestUserAuthorization(UserAuthorization mode)
{
isUserRequestingPermission = true;
yield return Application.RequestUserAuthorization(mode);
float timeElapsed = 0;
while (isUserRequestingPermission)
{
if (timeElapsed > 0.25f)
{
isUserRequestingPermission = false;
yield break;
}
timeElapsed += Time.deltaTime;
yield return null;
}
yield break;
}
#elif UNITY_ANDROID
protected virtual IEnumerator RequestUserPermission(string permission)
{
isUserRequestingPermission = true;
UnityEngine.Android.Permission.RequestUserPermission(permission);
float timeElapsed = 0;
while (isUserRequestingPermission)
{
if (timeElapsed > 0.25f)
{
isUserRequestingPermission = false;
yield break;
}
timeElapsed += Time.deltaTime;
yield return null;
}
yield break;
}
#endif
#endif
}
}

Просмотреть файл

@ -0,0 +1,13 @@
fileFormatVersion: 2
guid: 0e036a041bcf1924b912512e8bdf9bd2
timeCreated: 1553402189
licenseType: Free
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,289 @@
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UtilsModule;
using System;
using System.Threading.Tasks;
using UnityEngine;
namespace NatDeviceWithOpenCVForUnityExample
{
public class WebCamMatSource : ICameraSource
{
#region --Op vars--
private WebCamDevice cameraDevice;
private Action startCallback, frameCallback;
private Mat sourceMatrix, uprightMatrix;
private Color32[] bufferColors;
private int requestedWidth, requestedHeight, requestedFramerate;
private int cameraIndex;
private bool firstFrame;
private DeviceOrientation orientation;
private bool rotate90Degree;
private WebCamMatSourceAttachment attachment;
private class WebCamMatSourceAttachment : MonoBehaviour
{
public Action @delegate;
void Awake() => DontDestroyOnLoad(this.gameObject);
void Update() => @delegate?.Invoke();
}
#endregion
#region --Client API--
public int width { get { return uprightMatrix.width(); } }
public int height { get { return uprightMatrix.height(); } }
public bool isRunning { get { return (activeCamera && !firstFrame) ? activeCamera.isPlaying : false; } }
public bool isFrontFacing { get { return activeCamera ? cameraDevice.isFrontFacing : false; } }
public WebCamTexture activeCamera { get; private set; }
public WebCamMatSource(int width, int height, int framerate = 30, bool front = false)
{
requestedWidth = width;
requestedHeight = height;
requestedFramerate = framerate;
#if UNITY_ANDROID && !UNITY_EDITOR
// Set the requestedFPS parameter to avoid the problem of the WebCamTexture image becoming low light on some Android devices. (Pixel, Pixel 2)
// https://forum.unity.com/threads/released-opencv-for-unity.277080/page-33#post-3445178
framerate = front ? 15 : framerate;
#endif
orientation = (DeviceOrientation)(int)Screen.orientation;
// Pick camera
var devices = WebCamTexture.devices;
if (devices.Length == 0)
{
Debug.LogError("Camera device does not exist.");
return;
}
for (; cameraIndex < devices.Length; cameraIndex++)
if (devices[cameraIndex].isFrontFacing == front)
break;
if (cameraIndex == devices.Length)
{
Debug.LogError("Camera is null. Consider using " + (front ? "rear" : "front") + " camera.");
return;
}
cameraDevice = devices[cameraIndex];
activeCamera = new WebCamTexture(cameraDevice.name, requestedWidth, requestedHeight, framerate);
}
public void Dispose()
{
StopRunning();
if (activeCamera != null)
{
WebCamTexture.Destroy(activeCamera);
activeCamera = null;
}
if (sourceMatrix != null)
sourceMatrix.Dispose();
if (uprightMatrix != null)
uprightMatrix.Dispose();
sourceMatrix =
uprightMatrix = null;
bufferColors = null;
}
public Task StartRunning(Action startCallback, Action frameCallback)
{
var startTask = new TaskCompletionSource<bool>();
if(activeCamera == null || (activeCamera != null && activeCamera.isPlaying))
return startTask.Task;
this.startCallback = startCallback;
this.frameCallback = frameCallback;
firstFrame = true;
activeCamera.Play();
attachment = new GameObject("NatDeviceWithOpenCVForUnityExample WebCamMatSource Helper").AddComponent<WebCamMatSourceAttachment>();
attachment.@delegate = () => {
OnFrame();
};
startTask.SetResult(true);
return startTask.Task;
}
public void StopRunning()
{
if (activeCamera == null)
return;
if (attachment != null)
{
attachment.@delegate = default;
WebCamMatSourceAttachment.Destroy(attachment.gameObject);
attachment = default;
}
activeCamera.Stop();
}
public void CaptureFrame(Mat matrix)
{
if (uprightMatrix == null) return;
uprightMatrix.copyTo(matrix);
}
public void CaptureFrame(Color32[] pixelBuffer)
{
if (uprightMatrix == null) return;
MatUtils.copyFromMat(uprightMatrix, pixelBuffer);
}
public void CaptureFrame(byte[] pixelBuffer)
{
if (uprightMatrix == null) return;
MatUtils.copyFromMat(uprightMatrix, pixelBuffer);
}
public async Task SwitchCamera()
{
if (activeCamera == null)
return;
var _isRunning = activeCamera.isPlaying;
Dispose();
var devices = WebCamTexture.devices;
cameraIndex = ++cameraIndex % devices.Length;
cameraDevice = devices[cameraIndex];
bool front = cameraDevice.isFrontFacing;
int framerate = requestedFramerate;
#if UNITY_ANDROID && !UNITY_EDITOR
// Set the requestedFPS parameter to avoid the problem of the WebCamTexture image becoming low light on some Android devices. (Pixel, Pixel 2)
// https://forum.unity.com/threads/released-opencv-for-unity.277080/page-33#post-3445178
framerate = front ? 15 : framerate;
#endif
activeCamera = new WebCamTexture(cameraDevice.name, requestedWidth, requestedHeight, framerate);
await StartRunning(startCallback, frameCallback);
if (!_isRunning)
StopRunning();
}
#endregion
#region --Operations--
private void OnFrame()
{
if (!activeCamera.isPlaying || !activeCamera.didUpdateThisFrame)
return;
// Check matrix
sourceMatrix = sourceMatrix ?? new Mat(activeCamera.height, activeCamera.width, CvType.CV_8UC4);
uprightMatrix = uprightMatrix ?? new Mat();
bufferColors = bufferColors ?? new Color32[activeCamera.width * activeCamera.height];
// Update matrix
Utils.webCamTextureToMat(activeCamera, sourceMatrix, bufferColors, false);
if (firstFrame)
{
rotate90Degree = false;
#if !UNITY_EDITOR && !(UNITY_STANDALONE || UNITY_WEBGL)
switch (orientation)
{
case DeviceOrientation.LandscapeLeft:
case DeviceOrientation.LandscapeRight:
break;
case DeviceOrientation.Portrait:
case DeviceOrientation.PortraitUpsideDown:
rotate90Degree = true;
break;
}
#endif
}
int flipCode = 0;
if (cameraDevice.isFrontFacing)
{
if (activeCamera.videoRotationAngle == 0 || activeCamera.videoRotationAngle == 90)
{
flipCode = -1;
}
else if (activeCamera.videoRotationAngle == 180 || activeCamera.videoRotationAngle == 270)
{
flipCode = int.MinValue;
}
}
else
{
if (activeCamera.videoRotationAngle == 180 || activeCamera.videoRotationAngle == 270)
{
flipCode = 1;
}
}
if (rotate90Degree && cameraDevice.isFrontFacing)
{
if (flipCode == int.MinValue)
{
flipCode = -1;
}
else if (flipCode == 0)
{
flipCode = 1;
}
else if (flipCode == 1)
{
flipCode = 0;
}
else if (flipCode == -1)
{
flipCode = int.MinValue;
}
}
if (flipCode > int.MinValue)
{
Core.flip(sourceMatrix, sourceMatrix, flipCode);
}
if (rotate90Degree)
{
Core.rotate(sourceMatrix, uprightMatrix, Core.ROTATE_90_CLOCKWISE);
}
else
{
sourceMatrix.copyTo(uprightMatrix);
}
// Invoke client callbacks
if (firstFrame)
{
startCallback();
firstFrame = false;
}
frameCallback();
}
#endregion
}
}

Просмотреть файл

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: c02e9458fdffa45a6a262ed1919b780e
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,385 @@
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.UtilsModule;
using System;
using System.Runtime.InteropServices;
using System.Threading.Tasks;
using UnityEngine;
namespace NatDeviceWithOpenCVForUnityExample
{
public class WebCamSource : ICameraSource
{
#region --Op vars--
private WebCamDevice cameraDevice;
private Action startCallback, frameCallback;
private Color32[] sourceBuffer, uprightBuffer;
private int requestedWidth, requestedHeight, requestedFramerate;
private int cameraIndex;
private bool firstFrame;
private DeviceOrientation orientation;
private bool rotate90Degree;
private WebCamSourceAttachment attachment;
private class WebCamSourceAttachment : MonoBehaviour
{
public Action @delegate;
void Awake() => DontDestroyOnLoad(this.gameObject);
void Update() => @delegate?.Invoke();
}
#endregion
#region --Client API--
public int width { get; private set; }
public int height { get; private set; }
public bool isRunning { get { return (activeCamera && !firstFrame) ? activeCamera.isPlaying : false; } }
public bool isFrontFacing { get { return activeCamera ? cameraDevice.isFrontFacing : false; } }
public WebCamTexture activeCamera { get; private set; }
public WebCamSource(int width, int height, int framerate = 30, bool front = false)
{
requestedWidth = width;
requestedHeight = height;
requestedFramerate = framerate;
#if UNITY_ANDROID && !UNITY_EDITOR
// Set the requestedFPS parameter to avoid the problem of the WebCamTexture image becoming low light on some Android devices. (Pixel, Pixel 2)
// https://forum.unity.com/threads/released-opencv-for-unity.277080/page-33#post-3445178
framerate = front ? 15 : framerate;
#endif
orientation = (DeviceOrientation)(int)Screen.orientation;
// Pick camera
var devices = WebCamTexture.devices;
if (devices.Length == 0)
{
Debug.LogError("Camera device does not exist.");
return;
}
for (; cameraIndex < devices.Length; cameraIndex++)
if (devices[cameraIndex].isFrontFacing == front)
break;
if (cameraIndex == devices.Length)
{
Debug.LogError("Camera is null. Consider using " + (front ? "rear" : "front") + " camera.");
return;
}
cameraDevice = devices[cameraIndex];
activeCamera = new WebCamTexture(cameraDevice.name, requestedWidth, requestedHeight, framerate);
}
public void Dispose()
{
StopRunning();
if (activeCamera != null)
{
WebCamTexture.Destroy(activeCamera);
activeCamera = null;
}
sourceBuffer = null;
uprightBuffer = null;
}
public Task StartRunning(Action startCallback, Action frameCallback)
{
var startTask = new TaskCompletionSource<bool>();
if (activeCamera == null || (activeCamera != null && activeCamera.isPlaying))
return startTask.Task;
this.startCallback = startCallback;
this.frameCallback = frameCallback;
firstFrame = true;
activeCamera.Play();
attachment = new GameObject("NatDeviceWithOpenCVForUnityExample WebCamSource Helper").AddComponent<WebCamSourceAttachment>();
attachment.@delegate = () => {
OnFrame();
};
startTask.SetResult(true);
return startTask.Task;
}
public void StopRunning()
{
if (activeCamera == null)
return;
if (attachment != null)
{
attachment.@delegate = default;
WebCamSourceAttachment.Destroy(attachment.gameObject);
attachment = default;
}
activeCamera.Stop();
}
public void CaptureFrame(Mat matrix)
{
if (uprightBuffer == null) return;
MatUtils.copyToMat(uprightBuffer, matrix);
Core.flip(matrix, matrix, 0);
}
public void CaptureFrame(Color32[] pixelBuffer)
{
if (uprightBuffer == null) return;
Array.Copy(uprightBuffer, pixelBuffer, uprightBuffer.Length);
}
public void CaptureFrame(byte[] pixelBuffer)
{
if (uprightBuffer == null) return;
GCHandle pin = GCHandle.Alloc(uprightBuffer, GCHandleType.Pinned);
Marshal.Copy(pin.AddrOfPinnedObject(), pixelBuffer, 0, pixelBuffer.Length);
pin.Free();
}
public async Task SwitchCamera()
{
if (activeCamera == null)
return;
var _isRunning = activeCamera.isPlaying;
Dispose();
var devices = WebCamTexture.devices;
cameraIndex = ++cameraIndex % devices.Length;
cameraDevice = devices[cameraIndex];
bool front = cameraDevice.isFrontFacing;
int framerate = requestedFramerate;
#if UNITY_ANDROID && !UNITY_EDITOR
// Set the requestedFPS parameter to avoid the problem of the WebCamTexture image becoming low light on some Android devices. (Pixel, Pixel 2)
// https://forum.unity.com/threads/released-opencv-for-unity.277080/page-33#post-3445178
framerate = front ? 15 : framerate;
#endif
activeCamera = new WebCamTexture(cameraDevice.name, requestedWidth, requestedHeight, framerate);
await StartRunning(startCallback, frameCallback);
if (!_isRunning)
StopRunning();
}
#endregion
#region --Operations--
private void OnFrame()
{
if (!activeCamera.isPlaying || !activeCamera.didUpdateThisFrame)
return;
// Check buffers
sourceBuffer = sourceBuffer ?? activeCamera.GetPixels32();
uprightBuffer = uprightBuffer ?? new Color32[sourceBuffer.Length];
// Update buffers
activeCamera.GetPixels32(sourceBuffer);
if (firstFrame)
{
rotate90Degree = false;
#if !UNITY_EDITOR && !(UNITY_STANDALONE || UNITY_WEBGL)
switch (orientation)
{
case DeviceOrientation.LandscapeLeft:
case DeviceOrientation.LandscapeRight:
width = activeCamera.width;
height = activeCamera.height;
break;
case DeviceOrientation.Portrait:
case DeviceOrientation.PortraitUpsideDown:
width = activeCamera.height;
height = activeCamera.width;
rotate90Degree = true;
break;
}
#else
width = activeCamera.width;
height = activeCamera.height;
#endif
}
int flipCode = int.MinValue;
if (cameraDevice.isFrontFacing)
{
if (activeCamera.videoRotationAngle == 0 || activeCamera.videoRotationAngle == 90)
{
flipCode = 1;
}
else if (activeCamera.videoRotationAngle == 180 || activeCamera.videoRotationAngle == 270)
{
flipCode = 0;
}
}
else
{
if (activeCamera.videoRotationAngle == 180 || activeCamera.videoRotationAngle == 270)
{
flipCode = -1;
}
}
if (rotate90Degree && cameraDevice.isFrontFacing)
{
if (flipCode == int.MinValue)
{
flipCode = -1;
}
else if (flipCode == 0)
{
flipCode = 1;
}
else if (flipCode == 1)
{
flipCode = 0;
}
else if (flipCode == -1)
{
flipCode = int.MinValue;
}
}
if (flipCode > int.MinValue)
{
if (flipCode == 0)
{
FlipVertical(sourceBuffer, activeCamera.width, activeCamera.height, sourceBuffer);
}
else if (flipCode == 1)
{
FlipHorizontal(sourceBuffer, activeCamera.width, activeCamera.height, sourceBuffer);
}
else if (flipCode == -1)
{
Rotate(sourceBuffer, activeCamera.width, activeCamera.height, sourceBuffer, 2);
}
}
if (rotate90Degree)
{
Rotate(sourceBuffer, activeCamera.width, activeCamera.height, uprightBuffer, 1);
}
else
{
Array.Copy(sourceBuffer, uprightBuffer, sourceBuffer.Length);
}
// Invoke client callbacks
if (firstFrame)
{
startCallback();
firstFrame = false;
}
frameCallback();
}
#endregion
#region --Utilities--
private static void FlipVertical(Color32[] src, int width, int height, Color32[] dst)
{
for (var i = 0; i < height / 2; i++)
{
var y = i * width;
var x = (height - i - 1) * width;
for (var j = 0; j < width; j++)
{
int s = y + j;
int t = x + j;
Color32 c = src[s];
dst[s] = src[t];
dst[t] = c;
}
}
}
private static void FlipHorizontal(Color32[] src, int width, int height, Color32[] dst)
{
for (int i = 0; i < height; i++)
{
int y = i * width;
int x = y + width - 1;
for (var j = 0; j < width / 2; j++)
{
int s = y + j;
int t = x - j;
Color32 c = src[s];
dst[s] = src[t];
dst[t] = c;
}
}
}
private static void Rotate(Color32[] src, int srcWidth, int srcHeight, Color32[] dst, int rotation)
{
int i;
switch (rotation)
{
case 0:
Array.Copy(src, dst, src.Length);
break;
case 1:
// Rotate 90 degrees (CLOCKWISE)
i = 0;
for (int x = srcWidth - 1; x >= 0; x--)
{
for (int y = 0; y < srcHeight; y++)
{
dst[i] = src[x + y * srcWidth];
i++;
}
}
break;
case 2:
// Rotate 180 degrees
i = src.Length;
for (int x = 0; x < i / 2; x++)
{
Color32 t = src[x];
dst[x] = src[i - x - 1];
dst[i - x - 1] = t;
}
break;
case 3:
// Rotate 90 degrees (COUNTERCLOCKWISE)
i = 0;
for (int x = 0; x < srcWidth; x++)
{
for (int y = srcHeight - 1; y >= 0; y--)
{
dst[i] = src[x + y * srcWidth];
i++;
}
}
break;
}
}
#endregion
}
}

Просмотреть файл

@ -0,0 +1,11 @@
fileFormatVersion: 2
guid: 486b80aae3ddd4e14bb0406376991414
MonoImporter:
externalObjects: {}
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,15 @@
using UnityEngine;
using UnityEngine.SceneManagement;
namespace NatDeviceWithOpenCVForUnityExample
{
public class ShowLicense : MonoBehaviour
{
public void OnBackButtonClick()
{
SceneManager.LoadScene("NatDeviceWithOpenCVForUnityExample");
}
}
}

Просмотреть файл

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: 599febf5582a8084bbb793443573466c
timeCreated: 1521862659
licenseType: Free
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 8d1000b636372dc4e8ce1be55a9f0e58
timeCreated: 1521862659
licenseType: Free
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,181 @@
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.UnityUtils;
using System;
using System.Collections.Generic;
using System.Reflection;
using System.Text;
using UnityEngine;
using UnityEngine.SceneManagement;
using UnityEngine.UI;
namespace NatDeviceWithOpenCVForUnityExample
{
public class ShowSystemInfo : MonoBehaviour
{
public Text systemInfoText;
public InputField systemInfoInputField;
private const string ASSET_NAME = "OpenCVForUnity";
// Use this for initialization
void Start()
{
StringBuilder sb = new StringBuilder();
sb.Append("###### Build Info ######\n");
IDictionary<string, string> buildInfo = GetBuildInfo();
foreach (string key in buildInfo.Keys)
{
sb.Append(key).Append(" = ").Append(buildInfo[key]).Append("\n");
}
sb.Append("\n");
#if UNITY_IOS || (UNITY_ANDROID && UNITY_2018_3_OR_NEWER)
sb.Append("###### Device Info ######\n");
IDictionary<string, string> deviceInfo = GetDeviceInfo();
foreach (string key in deviceInfo.Keys)
{
sb.Append(key).Append(" = ").Append(deviceInfo[key]).Append("\n");
}
sb.Append("\n");
#endif
sb.Append("###### System Info ######\n");
IDictionary<string, string> systemInfo = GetSystemInfo();
foreach (string key in systemInfo.Keys)
{
sb.Append(key).Append(" = ").Append(systemInfo[key]).Append("\n");
}
sb.Append("#########################\n");
systemInfoText.text = systemInfoInputField.text = sb.ToString();
Debug.Log(sb.ToString());
}
// Update is called once per frame
void Update()
{
}
public Dictionary<string, string> GetBuildInfo()
{
Dictionary<string, string> dict = new Dictionary<string, string>();
dict.Add(ASSET_NAME + " version", Core.NATIVE_LIBRARY_NAME + " " + Utils.getVersion() + " (" + Core.VERSION + ")");
dict.Add("Build Unity version", Application.unityVersion);
#if UNITY_EDITOR
dict.Add("Build target", "Editor");
#elif UNITY_STANDALONE_WIN
dict.Add("Build target", "Windows");
#elif UNITY_STANDALONE_OSX
dict.Add("Build target", "Mac OSX");
#elif UNITY_STANDALONE_LINUX
dict.Add("Build target", "Linux");
#elif UNITY_ANDROID
dict.Add("Build target", "Android");
#elif UNITY_IOS
dict.Add("Build target", "iOS");
#elif UNITY_WSA
dict.Add("Build target", "WSA");
#elif UNITY_WEBGL
dict.Add("Build target", "WebGL");
#elif PLATFORM_LUMIN
dict.Add("Build target", "LUMIN");
#else
dict.Add("Build target", "");
#endif
#if ENABLE_MONO
dict.Add("Scripting backend", "Mono");
#elif ENABLE_IL2CPP
dict.Add("Scripting backend", "IL2CPP");
#elif ENABLE_DOTNET
dict.Add("Scripting backend", ".NET");
#else
dict.Add("Scripting backend", "");
#endif
#if OPENCV_USE_UNSAFE_CODE
dict.Add("Allow 'unsafe' Code", "Enabled");
#else
dict.Add("Allow 'unsafe' Code", "Disabled");
#endif
return dict;
}
public Dictionary<string, string> GetDeviceInfo()
{
Dictionary<string, string> dict = new Dictionary<string, string>();
#if UNITY_IOS
dict.Add("iOS.Device.generation", UnityEngine.iOS.Device.generation.ToString());
dict.Add("iOS.Device.systemVersion", UnityEngine.iOS.Device.systemVersion.ToString());
#endif
#if UNITY_IOS && UNITY_2018_1_OR_NEWER
dict.Add("UserAuthorization.WebCam", Application.HasUserAuthorization(UserAuthorization.WebCam).ToString());
dict.Add("UserAuthorization.Microphone", Application.HasUserAuthorization(UserAuthorization.Microphone).ToString());
#endif
#if UNITY_ANDROID && UNITY_2018_3_OR_NEWER
dict.Add("Android.Permission.Camera", UnityEngine.Android.Permission.HasUserAuthorizedPermission(UnityEngine.Android.Permission.Camera).ToString());
dict.Add("Android.Permission.CoarseLocation", UnityEngine.Android.Permission.HasUserAuthorizedPermission(UnityEngine.Android.Permission.CoarseLocation).ToString());
dict.Add("Android.Permission.ExternalStorageRead", UnityEngine.Android.Permission.HasUserAuthorizedPermission(UnityEngine.Android.Permission.ExternalStorageRead).ToString());
dict.Add("Android.Permission.ExternalStorageWrite", UnityEngine.Android.Permission.HasUserAuthorizedPermission(UnityEngine.Android.Permission.ExternalStorageWrite).ToString());
dict.Add("Android.Permission.FineLocation", UnityEngine.Android.Permission.HasUserAuthorizedPermission(UnityEngine.Android.Permission.FineLocation).ToString());
dict.Add("Android.Permission.Microphone", UnityEngine.Android.Permission.HasUserAuthorizedPermission(UnityEngine.Android.Permission.Microphone).ToString());
#endif
return dict;
}
/// SystemInfo Class Propertys
public SortedDictionary<string, string> GetSystemInfo()
{
SortedDictionary<string, string> dict = new SortedDictionary<string, string>();
Type type = typeof(SystemInfo);
MemberInfo[] members = type.GetMembers(
BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Static);
foreach (MemberInfo mb in members)
{
try
{
if (mb.MemberType == MemberTypes.Property)
{
if (mb.Name == "deviceUniqueIdentifier")
{
dict.Add(mb.Name, "xxxxxxxxxxxxxxxxxxxxxxxx");
continue;
}
PropertyInfo pr = type.GetProperty(mb.Name);
if (pr != null)
{
object resobj = pr.GetValue(type, null);
dict.Add(mb.Name, resobj.ToString());
}
else
{
dict.Add(mb.Name, "");
}
}
}
catch (Exception e)
{
Debug.Log("Exception: " + e);
}
}
return dict;
}
public void OnBackButtonClick()
{
SceneManager.LoadScene("NatDeviceWithOpenCVForUnityExample");
}
}
}

Просмотреть файл

@ -0,0 +1,12 @@
fileFormatVersion: 2
guid: ebfaa02d802aac44399fcf98a93f24f0
timeCreated: 1522792936
licenseType: Free
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,8 @@
fileFormatVersion: 2
guid: 9ce31c29464a23a4380beca317fc5b01
timeCreated: 1522792936
licenseType: Free
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: 8522a0c763b132a49bbb1568e9f6fcae
folderAsset: yes
timeCreated: 1522155404
licenseType: Free
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,106 @@
using UnityEngine;
namespace NatDeviceWithOpenCVForUnityExample
{
/// <summary>
/// WebCamTexture Only Example
/// An example of displaying the preview frame of camera only using WebCamTexture API.
/// </summary>
public class WebCamTextureOnlyExample : ExampleBase<WebCamSource>
{
Texture2D texture;
byte[] pixelBuffer;
FpsMonitor fpsMonitor;
protected override async void Start()
{
base.Start();
// Load global camera benchmark settings.
int width, height, framerate;
NatDeviceWithOpenCVForUnityExample.CameraConfiguration(out width, out height, out framerate);
// Create camera source
cameraSource = new WebCamSource(width, height, framerate, useFrontCamera);
if (!cameraSource.activeCamera)
cameraSource = new WebCamSource(width, height, framerate, !useFrontCamera);
await cameraSource.StartRunning(OnStart, OnFrame);
fpsMonitor = GetComponent<FpsMonitor>();
if (fpsMonitor != null)
{
fpsMonitor.Add("Name", "WebCamTextureOnlyExample");
fpsMonitor.Add("onFrameFPS", onFrameFPS.ToString("F1"));
fpsMonitor.Add("drawFPS", drawFPS.ToString("F1"));
fpsMonitor.Add("width", "");
fpsMonitor.Add("height", "");
fpsMonitor.Add("isFrontFacing", "");
fpsMonitor.Add("orientation", "");
}
}
protected override void OnStart()
{
base.OnStart();
// Create pixel buffer
pixelBuffer = new byte[cameraSource.width * cameraSource.height * 4];
// Create texture
if (texture != null)
Texture2D.Destroy(texture);
texture = new Texture2D(
cameraSource.width,
cameraSource.height,
TextureFormat.RGBA32,
false,
false
);
// Display texture
rawImage.texture = texture;
aspectFitter.aspectRatio = cameraSource.width / (float)cameraSource.height;
Debug.Log("WebCam camera source started with resolution: " + cameraSource.width + "x" + cameraSource.height + " isFrontFacing: " + cameraSource.isFrontFacing);
if (fpsMonitor != null)
{
fpsMonitor.Add("width", cameraSource.width.ToString());
fpsMonitor.Add("height", cameraSource.height.ToString());
fpsMonitor.Add("isFrontFacing", cameraSource.isFrontFacing.ToString());
fpsMonitor.Add("orientation", Screen.orientation.ToString());
}
}
protected override void Update()
{
base.Update();
if (updateCount == 0)
{
if (fpsMonitor != null)
{
fpsMonitor.Add("onFrameFPS", onFrameFPS.ToString("F1"));
fpsMonitor.Add("drawFPS", drawFPS.ToString("F1"));
fpsMonitor.Add("orientation", Screen.orientation.ToString());
}
}
}
protected override void UpdateTexture()
{
cameraSource.CaptureFrame(pixelBuffer);
ProcessImage(pixelBuffer, texture.width, texture.height, imageProcessingType);
texture.LoadRawTextureData(pixelBuffer);
texture.Apply();
}
protected override void OnDestroy()
{
base.OnDestroy();
Texture2D.Destroy(texture);
texture = null;
pixelBuffer = null;
}
}
}

Просмотреть файл

@ -0,0 +1,10 @@
fileFormatVersion: 2
guid: 39f2790264240404281d4d8069864b53
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,6 @@
fileFormatVersion: 2
guid: 247e68aaae4c3a4478a32b7849213734
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: fb415b3de5b7e55478861734060ef255
folderAsset: yes
timeCreated: 1522155404
licenseType: Free
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,116 @@
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.UnityUtils;
using UnityEngine;
namespace NatDeviceWithOpenCVForUnityExample
{
/// <summary>
/// WebCamTexture To Mat Example
/// An example of converting a WebCamTexture image to OpenCV's Mat format.
/// </summary>
public class WebCamTextureToMatExample : ExampleBase<WebCamMatSource>
{
Mat frameMatrix, grayMatrix;
Texture2D texture;
FpsMonitor fpsMonitor;
protected override async void Start()
{
base.Start();
// Load global camera benchmark settings.
int width, height, framerate;
NatDeviceWithOpenCVForUnityExample.CameraConfiguration(out width, out height, out framerate);
// Create camera source
cameraSource = new WebCamMatSource(width, height, framerate, useFrontCamera);
if (!cameraSource.activeCamera)
cameraSource = new WebCamMatSource(width, height, framerate, !useFrontCamera);
await cameraSource.StartRunning(OnStart, OnFrame);
fpsMonitor = GetComponent<FpsMonitor>();
if (fpsMonitor != null)
{
fpsMonitor.Add("Name", "WebCamTextureToMatExample");
fpsMonitor.Add("onFrameFPS", onFrameFPS.ToString("F1"));
fpsMonitor.Add("drawFPS", drawFPS.ToString("F1"));
fpsMonitor.Add("width", "");
fpsMonitor.Add("height", "");
fpsMonitor.Add("isFrontFacing", "");
fpsMonitor.Add("orientation", "");
}
}
protected override void OnStart()
{
base.OnStart();
// Create matrices
if (frameMatrix != null)
frameMatrix.Dispose();
frameMatrix = new Mat(cameraSource.height, cameraSource.width, CvType.CV_8UC4);
if (grayMatrix != null)
grayMatrix.Dispose();
grayMatrix = new Mat(cameraSource.height, cameraSource.width, CvType.CV_8UC1);
// Create texture
if (texture != null)
Texture2D.Destroy(texture);
texture = new Texture2D(
cameraSource.width,
cameraSource.height,
TextureFormat.RGBA32,
false,
false
);
// Display texture
rawImage.texture = texture;
aspectFitter.aspectRatio = cameraSource.width / (float)cameraSource.height;
Debug.Log("WebCam camera source started with resolution: " + cameraSource.width + "x" + cameraSource.height + " isFrontFacing: " + cameraSource.isFrontFacing);
if (fpsMonitor != null)
{
fpsMonitor.Add("width", cameraSource.width.ToString());
fpsMonitor.Add("height", cameraSource.height.ToString());
fpsMonitor.Add("isFrontFacing", cameraSource.isFrontFacing.ToString());
fpsMonitor.Add("orientation", Screen.orientation.ToString());
}
}
protected override void Update()
{
base.Update();
if (updateCount == 0)
{
if (fpsMonitor != null)
{
fpsMonitor.Add("onFrameFPS", onFrameFPS.ToString("F1"));
fpsMonitor.Add("drawFPS", drawFPS.ToString("F1"));
fpsMonitor.Add("orientation", Screen.orientation.ToString());
}
}
}
protected override void UpdateTexture()
{
cameraSource.CaptureFrame(frameMatrix);
ProcessImage(frameMatrix, grayMatrix, imageProcessingType);
// Convert to Texture2D
Utils.fastMatToTexture2D(frameMatrix, texture);
}
protected override void OnDestroy()
{
base.OnDestroy();
grayMatrix.Dispose();
frameMatrix.Dispose();
Texture2D.Destroy(texture);
texture = null;
grayMatrix =
frameMatrix = null;
}
}
}

Просмотреть файл

@ -0,0 +1,10 @@
fileFormatVersion: 2
guid: f2163b2acf1d92b4591f93c90a5e82f2
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -0,0 +1,6 @@
fileFormatVersion: 2
guid: b90a2bbbb939a5a45aab414e3f5a7a56
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,5 @@
<linker>
<assembly fullname="UnityEngine">
<type fullname="UnityEngine.SystemInfo" preserve="all"/>
</assembly>
</linker>

Просмотреть файл

@ -0,0 +1,7 @@
fileFormatVersion: 2
guid: d11dbcae93faacf48a73bef9bee50163
TextScriptImporter:
externalObjects: {}
userData:
assetBundleName:
assetBundleVariant:

48
README.md Normal file
Просмотреть файл

@ -0,0 +1,48 @@
# NatDevice With OpenCVForUnity Example
- An example of a benchmark test integrating NatDevice and OpenCVForUnity. (Comparison between WebCamTexture and NatDevice API)
- An example of replacing WebCamTextureToMatHelper with NatDeviceCamPreviewToMatHelper.
- An example of native sharing and save to the camera roll using NatShare API.
## Environment
- Anddroid (Pixel, Nexus 7) / iOS (iPhone8, iPhone6s)
- Unity >= 2018.3+
- Scripting backend MONO / IL2CPP
- [NatDevice - Media Device API](https://assetstore.unity.com/packages/tools/integration/natdevice-media-device-api-162053?aid=1011l4ehR) 1.0.1+
- [NatShare - Mobile Sharing API](https://assetstore.unity.com/packages/tools/integration/natshare-mobile-sharing-api-117705?aid=1011l4ehR) 1.2.2+
- [OpenCV for Unity](https://assetstore.unity.com/packages/tools/integration/opencv-for-unity-21088?aid=1011l4ehR) 2.3.8+
## Demo
- Android [NatDeviceWithOpenCVForUnityExample.apk](https://github.com/EnoxSoftware/NatDeviceWithOpenCVForUnityExample/releases)
## Setup
1. Download the latest release unitypackage. [NatDeviceWithOpenCVForUnityExample.unitypackage](https://github.com/EnoxSoftware/NatDeviceWithOpenCVForUnityExample/releases)
1. Create a new project. (NatDeviceWithOpenCVForUnityExample)
1. Import NatDevice.
1. Import NatShare.
1. Import OpenCVForUnity.
* Setup the OpenCVForUnity. (Tools > OpenCV for Unity > Set Plugin Import Settings)
1. Import the NatDeviceWithOpenCVForUnityExample.unitypackage.
1. Change the "Minimum API Level" to 22 or higher in the "Player Settings (Androd)" Inspector.
1. Change the "Target minimum iOS Version" to 11 or higher in the "Player Settings (iOS)" Inspector.
* Set the reason for accessing the camera in "cameraUsageDescription".
1. Add the "Assets/NatDeviceWithOpenCVForUnityExample/*.unity" files to the "Scenes In Build" list in the "Build Settings" window.
1. Build and Deploy to Android and iOS.
## Android Instructions
Build requires Android SDK Platform 29 or higher.
## iOS Instructions
After building an Xcode project from Unity, add the following keys to the `Info.plist` file with a good description:
- `NSPhotoLibraryUsageDescription`
- `NSPhotoLibraryAddUsageDescription`
## ScreenShot
![screenshot01.jpg](screenshot01.jpg)
![screenshot02.jpg](screenshot02.jpg)
![screenshot03.jpg](screenshot03.jpg)

Двоичные данные
screenshot01.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 100 KiB

Двоичные данные
screenshot02.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 106 KiB

Двоичные данные
screenshot03.jpg Normal file

Двоичный файл не отображается.

После

Ширина:  |  Высота:  |  Размер: 117 KiB