This commit is contained in:
EnoxSoftware 2019-01-18 05:03:17 +09:00
Родитель 58c9a7cee8
Коммит b756c15b8b
92 изменённых файлов: 2786 добавлений и 3130 удалений

Просмотреть файл

@ -1,94 +1,91 @@
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using OpenCVForUnity;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ObjdetectModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UnityUtils;
namespace OpenCVFaceTracker
{
/// <summary>
/// Face detector.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary>
/// <summary>
/// Face detector.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary>
public class FaceDetector
{
string detector_fname; //file containing cascade classifier
Vector3 detector_offset; //offset from center of detection
Mat reference; //reference shape
string detector_fname;
//file containing cascade classifier
Vector3 detector_offset;
//offset from center of detection
Mat reference;
//reference shape
CascadeClassifier detector;
public FaceDetector()
public FaceDetector ()
{
}
public List<Point[]> detect(Mat im, float scaleFactor, int minNeighbours, OpenCVForUnity.Size minSize)
public List<Point[]> detect (Mat im, float scaleFactor, int minNeighbours, Size minSize)
{
//convert image to greyscale
Mat gray = null;
if (im.channels() == 1)
{
if (im.channels () == 1) {
gray = im;
} else
{
gray = new Mat();
Imgproc.cvtColor(im, gray, Imgproc.COLOR_RGBA2GRAY);
} else {
gray = new Mat ();
Imgproc.cvtColor (im, gray, Imgproc.COLOR_RGBA2GRAY);
}
using (Mat equalizeHistMat = new Mat ())
using (MatOfRect faces = new MatOfRect ())
{
using (Mat equalizeHistMat = new Mat ()) using (MatOfRect faces = new MatOfRect ()) {
Imgproc.equalizeHist(gray, equalizeHistMat);
Imgproc.equalizeHist (gray, equalizeHistMat);
detector.detectMultiScale(equalizeHistMat, faces, scaleFactor, minNeighbours, 0
| Objdetect.CASCADE_FIND_BIGGEST_OBJECT
| Objdetect.CASCADE_SCALE_IMAGE, minSize, new Size());
detector.detectMultiScale (equalizeHistMat, faces, scaleFactor, minNeighbours, 0
| Objdetect.CASCADE_FIND_BIGGEST_OBJECT
| Objdetect.CASCADE_SCALE_IMAGE, minSize, new Size ());
if (faces.rows() < 1)
{
return new List<Point[]>();
if (faces.rows () < 1) {
return new List<Point[]> ();
}
return convertMatOfRectToPoints(faces);
return convertMatOfRectToPoints (faces);
}
}
public List<Point[]> convertMatOfRectToPoints(MatOfRect rects)
public List<Point[]> convertMatOfRectToPoints (MatOfRect rects)
{
List<OpenCVForUnity.Rect> R = rects.toList();
List<OpenCVForUnity.CoreModule.Rect> R = rects.toList ();
List<Point[]> points = new List<Point[]>(R.Count);
List<Point[]> points = new List<Point[]> (R.Count);
int n = reference.rows() / 2;
float[] reference_float = new float[reference.total()];
Utils.copyFromMat<float>(reference, reference_float);
int n = reference.rows () / 2;
float[] reference_float = new float[reference.total ()];
Utils.copyFromMat<float> (reference, reference_float);
foreach (var r in R)
{
foreach (var r in R) {
Vector3 scale = detector_offset * r.width;
Point[] p = new Point[n];
for (int i = 0; i < n; i++)
{
p [i] = new Point();
for (int i = 0; i < n; i++) {
p [i] = new Point ();
p [i].x = scale.z * reference_float [2 * i] + r.x + 0.5 * r.width + scale.x;
p [i].y = scale.z * reference_float [(2 * i) + 1] + r.y + 0.5 * r.height + scale.y;
}
points.Add(p);
points.Add (p);
}
return points;
}
public void read(object root_json)
public void read (object root_json)
{
IDictionary detector_json = (IDictionary)root_json;
@ -96,26 +93,25 @@ namespace OpenCVFaceTracker
// Debug.Log ("detector_fname " + detector_fname);
detector_offset = new Vector3((float)(double)detector_json ["x offset"], (float)(double)detector_json ["y offset"], (float)(double)detector_json ["z offset"]);
detector_offset = new Vector3 ((float)(double)detector_json ["x offset"], (float)(double)detector_json ["y offset"], (float)(double)detector_json ["z offset"]);
// Debug.Log ("detector_offset " + detector_offset.ToString ());
IDictionary reference_json = (IDictionary)detector_json ["reference"];
reference = new Mat((int)(long)reference_json ["rows"], (int)(long)reference_json ["cols"], CvType.CV_32F);
reference = new Mat ((int)(long)reference_json ["rows"], (int)(long)reference_json ["cols"], CvType.CV_32F);
// Debug.Log ("reference " + reference.ToString ());
IList data_json = (IList)reference_json ["data"];
float[] data = new float[reference.rows() * reference.cols()];
for (int i = 0; i < data_json.Count; i++)
{
float[] data = new float[reference.rows () * reference.cols ()];
for (int i = 0; i < data_json.Count; i++) {
data [i] = (float)(double)data_json [i];
}
Utils.copyToMat(data, reference);
Utils.copyToMat (data, reference);
// Debug.Log ("reference dump " + reference.dump ());
detector = new CascadeClassifier(Utils.getFilePath(detector_fname));
detector = new CascadeClassifier (Utils.getFilePath (detector_fname));
// detector = new CascadeClassifier (System.IO.Path.Combine (Application.streamingAssetsPath, detector_fname));
}
}

Просмотреть файл

@ -1,12 +1,15 @@
using UnityEngine;
using System.Collections;
using OpenCVForUnity;
using System.Collections.Generic;
using MiniJSON;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.ImgprocModule;
#if UNITY_WSA
using UnityEngine.Windows;
using System.Text;
#else
using System.IO;
#endif
@ -14,249 +17,229 @@ using System.IO;
namespace OpenCVFaceTracker
{
/// <summary>
/// Face tracker.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary>
/// <summary>
/// Face tracker.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary>
public class FaceTracker
{
List<Point[]> points; //current tracked points
FaceDetector detector; //detector for initialisation
ShapeModel smodel; //shape model
PatchModels pmodel; //feature detectors
List<Point[]> points;
//current tracked points
FaceDetector detector;
//detector for initialisation
ShapeModel smodel;
//shape model
PatchModels pmodel;
//feature detectors
public FaceTracker(string filepath)
public FaceTracker (string filepath)
{
if (filepath == null)
{
Debug.LogError("tracker_model file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
if (filepath == null) {
Debug.LogError ("tracker_model file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
}
points = new List<Point[]>();
points = new List<Point[]> ();
string jsonText = null;
#if UNITY_WSA
var data = File.ReadAllBytes(filepath);
jsonText = Encoding.UTF8.GetString(data, 0, data.Length);
#else
jsonText = File.ReadAllText(filepath);
#endif
#if UNITY_WSA
var data = File.ReadAllBytes(filepath);
jsonText = Encoding.UTF8.GetString(data, 0, data.Length);
#else
jsonText = File.ReadAllText (filepath);
#endif
// TextAsset textAsset = Resources.Load (filename) as TextAsset;
// string jsonText = textAsset.text;
IDictionary json = (IDictionary)Json.Deserialize(jsonText);
IDictionary json = (IDictionary)Json.Deserialize (jsonText);
IDictionary ft = (IDictionary)json ["ft object"];
detector = new FaceDetector();
detector.read(ft ["detector"]);
detector = new FaceDetector ();
detector.read (ft ["detector"]);
smodel = new ShapeModel();
smodel.read(ft ["smodel"]);
smodel = new ShapeModel ();
smodel.read (ft ["smodel"]);
pmodel = new PatchModels();
pmodel.read(ft ["pmodel"]);
pmodel = new PatchModels ();
pmodel.read (ft ["pmodel"]);
}
public List<Point[]> getPoints()
public List<Point[]> getPoints ()
{
return points;
}
public void addPoints(List<Point[]> points)
public void addPoints (List<Point[]> points)
{
points.AddRange(points);
points.AddRange (points);
}
public void addPoints(MatOfRect rects)
public void addPoints (MatOfRect rects)
{
points.AddRange(detector.convertMatOfRectToPoints(rects));
points.AddRange (detector.convertMatOfRectToPoints (rects));
}
public Point[] getConnections()
public Point[] getConnections ()
{
Point[] c = new Point[smodel.C.rows()];
Point[] c = new Point[smodel.C.rows ()];
int[] data = new int[c.Length * 2];
Utils.copyFromMat<int>(smodel.C, data);
Utils.copyFromMat<int> (smodel.C, data);
int len = c.Length;
for (int i = 0; i < len; i++)
{
c [i] = new Point(data [i * 2], data [(i * 2) + 1]);
for (int i = 0; i < len; i++) {
c [i] = new Point (data [i * 2], data [(i * 2) + 1]);
}
return c;
}
public void reset()
public void reset ()
{
//reset tracker
points.Clear();
points.Clear ();
}
public bool track(Mat im, FaceTrackerParams p)
public bool track (Mat im, FaceTrackerParams p)
{
if (points.Count <= 0)
return false;
if (points.Count <= 0) return false;
//convert image to greyscale
Mat gray = null;
if (im.channels() == 1)
{
if (im.channels () == 1) {
gray = im;
} else
{
gray = new Mat();
Imgproc.cvtColor(im, gray, Imgproc.COLOR_RGBA2GRAY);
} else {
gray = new Mat ();
Imgproc.cvtColor (im, gray, Imgproc.COLOR_RGBA2GRAY);
}
//initialise
// if (!tracking)
// points = detector.detect (gray, p.scaleFactor, p.minNeighbours, p.minSize);
int count = points.Count;
for (int i = 0; i < count; i++)
{
if (points [i].Length != smodel.npts())
return false;
for (int i = 0; i < count; i++) {
if (points [i].Length != smodel.npts ()) return false;
//fit
int size_count = p.ssize.Count;
for (int level = 0; level < size_count; level++)
{
points [i] = fit(gray, points [i], p.ssize [level], p.robust, p.itol, p.ftol);
for (int level = 0; level < size_count; level++) {
points [i] = fit (gray, points [i], p.ssize [level], p.robust, p.itol, p.ftol);
}
}
return true;
}
public void draw(Mat im, Scalar pts_color, Scalar con_color)
public void draw (Mat im, Scalar pts_color, Scalar con_color)
{
int[] smodel_C_int = new int[smodel.C.total()];
Utils.copyFromMat<int>(smodel.C, smodel_C_int);
int[] smodel_C_int = new int[smodel.C.total ()];
Utils.copyFromMat<int> (smodel.C, smodel_C_int);
foreach (var point in points)
{
foreach (var point in points) {
int n = point.Length;
if (n == 0)
return;
if (n == 0) return;
int rows = smodel.C.rows();
int cols = smodel.C.cols();
for (int i = 0; i < rows; i++)
{
int rows = smodel.C.rows ();
int cols = smodel.C.cols ();
for (int i = 0; i < rows; i++) {
int j = smodel_C_int [i * cols], k = smodel_C_int [(i * cols) + 1];
#if OPENCV_2
Core.line(im, point[j], point[k], con_color, 1);
#else
Imgproc.line(im, point [j], point [k], con_color, 1);
#endif
#if OPENCV_2
Core.line(im, point[j], point[k], con_color, 1);
#else
Imgproc.line (im, point [j], point [k], con_color, 1);
#endif
}
for (int i = 0; i < n; i++)
{
#if OPENCV_2
Core.circle (im, point [i], 1, pts_color, 2, Core.LINE_AA, 0);
#else
Imgproc.circle(im, point [i], 1, pts_color, 2, Core.LINE_AA, 0);
#endif
for (int i = 0; i < n; i++) {
#if OPENCV_2
Core.circle (im, point [i], 1, pts_color, 2, Core.LINE_AA, 0);
#else
Imgproc.circle (im, point [i], 1, pts_color, 2, Imgproc.LINE_AA, 0);
#endif
}
}
}
private Point[] fit(Mat image,
Point[] init,
OpenCVForUnity.Size ssize,
bool robust,
int itol,
double ftol)
private Point[] fit (Mat image,
Point[] init,
Size ssize,
bool robust,
int itol,
double ftol)
{
int n = smodel.npts();
int n = smodel.npts ();
// assert((int(init.size())==n) && (pmodel.n_patches()==n));
// Debug.Log ("init.size())==n " + init.Length + " " + n);
// Debug.Log ("pmodel.n_patches()==n " + pmodel.n_patches () + " " + n);
smodel.calc_params(init, new Mat(), 3.0f);
Point[] pts = smodel.calc_shape();
smodel.calc_params (init, new Mat (), 3.0f);
Point[] pts = smodel.calc_shape ();
//find facial features in image around current estimates
Point[] peaks = pmodel.calc_peaks(image, pts, ssize);
Point[] peaks = pmodel.calc_peaks (image, pts, ssize);
//optimise
if (!robust)
{
smodel.calc_params(peaks, new Mat(), 3.0f); //compute shape model parameters
pts = smodel.calc_shape(); //update shape
} else
{
using (Mat weight = new Mat (n, 1, CvType.CV_32F))
using (Mat weight_sort = new Mat (n, 1, CvType.CV_32F))
{
if (!robust) {
smodel.calc_params (peaks, new Mat (), 3.0f); //compute shape model parameters
pts = smodel.calc_shape (); //update shape
} else {
using (Mat weight = new Mat (n, 1, CvType.CV_32F)) using (Mat weight_sort = new Mat (n, 1, CvType.CV_32F)) {
float[] weight_float = new float[weight.total()];
Utils.copyFromMat<float>(weight, weight_float);
float[] weight_sort_float = new float[weight_sort.total()];
float[] weight_float = new float[weight.total ()];
Utils.copyFromMat<float> (weight, weight_float);
float[] weight_sort_float = new float[weight_sort.total ()];
Point[] pts_old = pts;
for (int iter = 0; iter < itol; iter++)
{
for (int iter = 0; iter < itol; iter++) {
//compute robust weight
for (int i = 0; i < n; i++)
{
using (MatOfPoint tmpMat = new MatOfPoint (new Point (pts [i].x - peaks [i].x, pts [i].y - peaks [i].y)))
{
weight_float [i] = (float)Core.norm(tmpMat);
for (int i = 0; i < n; i++) {
using (MatOfPoint tmpMat = new MatOfPoint (new Point (pts [i].x - peaks [i].x, pts [i].y - peaks [i].y))) {
weight_float [i] = (float)Core.norm (tmpMat);
}
}
Utils.copyToMat(weight_float, weight);
Utils.copyToMat (weight_float, weight);
Core.sort(weight, weight_sort, Core.SORT_EVERY_COLUMN | Core.SORT_ASCENDING);
Core.sort (weight, weight_sort, Core.SORT_EVERY_COLUMN | Core.SORT_ASCENDING);
Utils.copyFromMat<float>(weight_sort, weight_sort_float);
Utils.copyFromMat<float> (weight_sort, weight_sort_float);
double var = 1.4826 * weight_sort_float [n / 2];
if (var < 0.1)
var = 0.1;
if (var < 0.1) var = 0.1;
Core.pow(weight, 2, weight);
Core.pow (weight, 2, weight);
Core.multiply(weight, new Scalar(-0.5 / (var * var)), weight);
Core.multiply (weight, new Scalar (-0.5 / (var * var)), weight);
Core.exp(weight, weight);
Core.exp (weight, weight);
//compute shape model parameters
smodel.calc_params(peaks, weight, 3.0f);
smodel.calc_params (peaks, weight, 3.0f);
//update shape
pts = smodel.calc_shape();
pts = smodel.calc_shape ();
//check for convergence
float v = 0;
for (int i = 0; i < n; i++)
{
using (MatOfPoint tmpMat = new MatOfPoint (new Point (pts [i].x - pts_old [i].x, pts [i].y - pts_old [i].y)))
{
v += (float)Core.norm(tmpMat);
for (int i = 0; i < n; i++) {
using (MatOfPoint tmpMat = new MatOfPoint (new Point (pts [i].x - pts_old [i].x, pts [i].y - pts_old [i].y))) {
v += (float)Core.norm (tmpMat);
}
}
if (v < ftol)
{
if (v < ftol) {
break;
} else
{
} else {
pts_old = pts;
}
}

Просмотреть файл

@ -1,38 +1,38 @@
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using OpenCVForUnity;
using OpenCVForUnity.CoreModule;
namespace OpenCVFaceTracker
{
/// <summary>
/// Face tracker parameters.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary>
/// <summary>
/// Face tracker parameters.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary>
public class FaceTrackerParams
{
public List<OpenCVForUnity.Size> ssize; //search region size/level
public bool robust; //use robust fitting?
public int itol; //maximum number of iterations to try
public double ftol; //convergence tolerance
// public float scaleFactor; //OpenCV Cascade detector parameters
// public int minNeighbours; //...
// public OpenCVForUnity.Size minSize;
public List<Size> ssize;
//search region size/level
public bool robust;
//use robust fitting?
public int itol;
//maximum number of iterations to try
public double ftol;
//convergence tolerance
// public float scaleFactor; //OpenCV Cascade detector parameters
// public int minNeighbours; //...
// public Size minSize;
public FaceTrackerParams(bool robust = false, int itol = 20, double ftol = 1e-3, List<OpenCVForUnity.Size> ssize = null)
public FaceTrackerParams (bool robust = false, int itol = 20, double ftol = 1e-3, List<Size> ssize = null)
{
if (ssize == null)
{
this.ssize = new List<Size>();
this.ssize.Add(new OpenCVForUnity.Size(21, 21));
this.ssize.Add(new OpenCVForUnity.Size(11, 11));
this.ssize.Add(new OpenCVForUnity.Size(5, 5));
} else
{
if (ssize == null) {
this.ssize = new List<Size> ();
this.ssize.Add (new Size (21, 21));
this.ssize.Add (new Size (11, 11));
this.ssize.Add (new Size (5, 5));
} else {
this.ssize = ssize;
}
@ -41,7 +41,7 @@ namespace OpenCVFaceTracker
this.ftol = ftol;
// scaleFactor = 1.1f;
// minNeighbours = 2;
// minSize = new OpenCVForUnity.Size (30, 30);
// minSize = new Size (30, 30);
}
}
}

Просмотреть файл

@ -1,103 +1,91 @@
using UnityEngine;
using System.Collections;
using OpenCVForUnity;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UnityUtils;
namespace OpenCVFaceTracker
{
/// <summary>
/// Patch model.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary>
/// <summary>
/// Patch model.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary>
public class PatchModel
{
Mat P; //normalised patch
Mat P;
//normalised patch
public PatchModel()
public PatchModel ()
{
}
//size of patch model
public Size patch_size()
public Size patch_size ()
{
return P.size();
return P.size ();
}
Mat convert_image(Mat im)
Mat convert_image (Mat im)
{
Mat I = null;
if (im.channels() == 1)
{
if (im.type() != CvType.CV_32F)
{
I = new Mat();
im.convertTo(I, CvType.CV_32F);
} else
{
if (im.channels () == 1) {
if (im.type () != CvType.CV_32F) {
I = new Mat ();
im.convertTo (I, CvType.CV_32F);
} else {
I = im;
}
} else
{
if (im.channels() == 3)
{
Mat img = new Mat();
Imgproc.cvtColor(im, img, Imgproc.COLOR_RGBA2GRAY);
if (img.type() != CvType.CV_32F)
{
I = new Mat();
img.convertTo(I, CvType.CV_32F);
} else
{
} else {
if (im.channels () == 3) {
Mat img = new Mat ();
Imgproc.cvtColor (im, img, Imgproc.COLOR_RGBA2GRAY);
if (img.type () != CvType.CV_32F) {
I = new Mat ();
img.convertTo (I, CvType.CV_32F);
} else {
I = img;
}
} else
{
Debug.Log("Unsupported image type!");
} else {
Debug.Log ("Unsupported image type!");
}
}
Core.add(I, new Scalar(1.0), I);
Core.log(I, I);
Core.add (I, new Scalar (1.0), I);
Core.log (I, I);
return I;
}
public Mat calc_response(Mat im, bool sum2one)
public Mat calc_response (Mat im, bool sum2one)
{
Mat I = convert_image(im);
Mat res = new Mat();
Mat I = convert_image (im);
Mat res = new Mat ();
Imgproc.matchTemplate(I, P, res, Imgproc.TM_CCOEFF_NORMED);
if (sum2one)
{
Core.normalize(res, res, 0, 1, Core.NORM_MINMAX);
Imgproc.matchTemplate (I, P, res, Imgproc.TM_CCOEFF_NORMED);
if (sum2one) {
Core.normalize (res, res, 0, 1, Core.NORM_MINMAX);
Core.divide(res, new Scalar(Core.sumElems(res).val [0]), res);
Core.divide (res, new Scalar (Core.sumElems (res).val [0]), res);
}
return res;
}
public void read(object root_json)
public void read (object root_json)
{
IDictionary pmodel_json = (IDictionary)root_json;
IDictionary P_json = (IDictionary)pmodel_json ["P"];
P = new Mat((int)(long)P_json ["rows"], (int)(long)P_json ["cols"], CvType.CV_32F);
P = new Mat ((int)(long)P_json ["rows"], (int)(long)P_json ["cols"], CvType.CV_32F);
// Debug.Log ("P " + P.ToString ());
IList P_data_json = (IList)P_json ["data"];
float[] P_data = new float[P.rows() * P.cols()];
for (int i = 0; i < P_data_json.Count; i++)
{
float[] P_data = new float[P.rows () * P.cols ()];
for (int i = 0; i < P_data_json.Count; i++) {
P_data [i] = (float)(double)P_data_json [i];
}
Utils.copyToMat(P_data, P);
Utils.copyToMat (P_data, P);
// Debug.Log ("P dump " + P.dump ());
}
}
}

Просмотреть файл

@ -1,88 +1,79 @@
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using System;
using OpenCVForUnity;
using System.Collections;
using System.Collections.Generic;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.ImgprocModule;
namespace OpenCVFaceTracker
{
/// <summary>
/// Patch models.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary>
/// <summary>
/// Patch models.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary>
public class PatchModels
{
public Mat reference;
//reference shape
public IList<PatchModel> patches;
//patch models
public Mat reference; //reference shape
public IList<PatchModel> patches; //patch models
public PatchModels()
public PatchModels ()
{
}
//number of patches
public int n_patches()
public int n_patches ()
{
return patches.Count;
}
public Point[] calc_peaks(Mat im,
Point[] points,
OpenCVForUnity.Size ssize)
public Point[] calc_peaks (Mat im,
Point[] points,
Size ssize)
{
int n = points.Length;
// Debug.Log ("n == int(patches.size()) " + patches.Count);
using (Mat pt = (new MatOfPoint2f (points)).reshape (1, 2 * n))
using (Mat S = calc_simil (pt))
using (Mat Si = inv_simil (S))
{
using (Mat pt = (new MatOfPoint2f (points)).reshape (1, 2 * n)) using (Mat S = calc_simil (pt)) using (Mat Si = inv_simil (S)) {
float[] pt_float = new float[pt.total()];
Utils.copyFromMat<float>(pt, pt_float);
int pt_cols = pt.cols();
float[] pt_float = new float[pt.total ()];
Utils.copyFromMat<float> (pt, pt_float);
int pt_cols = pt.cols ();
float[] S_float = new float[S.total()];
Utils.copyFromMat<float>(S, S_float);
int S_cols = S.cols();
float[] S_float = new float[S.total ()];
Utils.copyFromMat<float> (S, S_float);
int S_cols = S.cols ();
float[] A_float = new float[2 * 3];
Point[] pts = apply_simil(Si, points);
Point[] pts = apply_simil (Si, points);
for (int i = 0; i < n; i++)
{
for (int i = 0; i < n; i++) {
OpenCVForUnity.Size wsize = new OpenCVForUnity.Size(ssize.width + patches [i].patch_size().width, ssize.height + patches [i].patch_size().height);
using (Mat A = new Mat (2, 3, CvType.CV_32F))
{
Size wsize = new Size (ssize.width + patches [i].patch_size ().width, ssize.height + patches [i].patch_size ().height);
using (Mat A = new Mat (2, 3, CvType.CV_32F)) {
Utils.copyFromMat<float>(A, A_float);
int A_cols = A.cols();
Utils.copyFromMat<float> (A, A_float);
int A_cols = A.cols ();
A_float [0] = S_float [0];
A_float [1] = S_float [1];
A_float [1 * A_cols] = S_float [1 * S_cols];
A_float [(1 * A_cols) + 1] = S_float [(1 * S_cols) + 1];
A_float [2] = (float)(pt_float [(2 * pt_cols) * i] -
(A_float [0] * (wsize.width - 1) / 2 + A_float [1] * (wsize.height - 1) / 2));
(A_float [0] * (wsize.width - 1) / 2 + A_float [1] * (wsize.height - 1) / 2));
A_float [(1 * A_cols) + 2] = (float)(pt_float [((2 * pt_cols) * i) + 1] -
(A_float [1 * A_cols] * (wsize.width - 1) / 2 + A_float [(1 * A_cols) + 1] * (wsize.height - 1) / 2));
(A_float [1 * A_cols] * (wsize.width - 1) / 2 + A_float [(1 * A_cols) + 1] * (wsize.height - 1) / 2));
Utils.copyToMat(A_float, A);
Utils.copyToMat (A_float, A);
using (Mat I = new Mat ())
{
Imgproc.warpAffine(im, I, A, wsize, Imgproc.INTER_LINEAR + Imgproc.WARP_INVERSE_MAP);
using (Mat R = patches [i].calc_response (I, false))
{
using (Mat I = new Mat ()) {
Imgproc.warpAffine (im, I, A, wsize, Imgproc.INTER_LINEAR + Imgproc.WARP_INVERSE_MAP);
using (Mat R = patches [i].calc_response (I, false)) {
Core.MinMaxLocResult minMaxLocResult = Core.minMaxLoc(R);
Core.MinMaxLocResult minMaxLocResult = Core.minMaxLoc (R);
pts [i].x = pts [i].x + minMaxLocResult.maxLoc.x - 0.5 * ssize.width;
pts [i].y = pts [i].y + minMaxLocResult.maxLoc.y - 0.5 * ssize.height;
}
@ -90,152 +81,144 @@ namespace OpenCVFaceTracker
}
}
return apply_simil(S, pts);
return apply_simil (S, pts);
}
}
Point[] apply_simil(Mat S, Point[] points)
Point[] apply_simil (Mat S, Point[] points)
{
float[] S_float = new float[S.total()];
Utils.copyFromMat<float>(S, S_float);
int S_cols = S.cols();
float[] S_float = new float[S.total ()];
Utils.copyFromMat<float> (S, S_float);
int S_cols = S.cols ();
int n = points.Length;
Point[] p = new Point[n];
for (int i = 0; i < n; i++)
{
p [i] = new Point();
for (int i = 0; i < n; i++) {
p [i] = new Point ();
p [i].x = S_float [0] * points [i].x + S_float [1] * points [i].y + S_float [2];
p [i].y = S_float [1 * S_cols] * points [i].x + S_float [(1 * S_cols) + 1] * points [i].y + S_float [(1 * S_cols) + 2];
}
return p;
}
Mat inv_simil(Mat S)
Mat inv_simil (Mat S)
{
float[] S_float = new float[S.total()];
Utils.copyFromMat<float>(S, S_float);
int S_cols = S.cols();
float[] S_float = new float[S.total ()];
Utils.copyFromMat<float> (S, S_float);
int S_cols = S.cols ();
Mat Si = new Mat(2, 3, CvType.CV_32F);
Mat Si = new Mat (2, 3, CvType.CV_32F);
float d = S_float [0] * S_float [(1 * S_cols) + 1] - S_float [1 * S_cols] * S_float [1];
float[] Si_float = new float[Si.total()];
Utils.copyFromMat<float>(Si, Si_float);
int Si_cols = Si.cols();
float[] Si_float = new float[Si.total ()];
Utils.copyFromMat<float> (Si, Si_float);
int Si_cols = Si.cols ();
Si_float [0] = S_float [(1 * S_cols) + 1] / d;
Si_float [1] = -S_float [1] / d;
Si_float [(1 * Si_cols) + 1] = S_float [0] / d;
Si_float [1 * Si_cols] = -S_float [1 * S_cols] / d;
Utils.copyToMat(Si_float, Si);
Utils.copyToMat (Si_float, Si);
Mat Ri = new Mat(Si, new OpenCVForUnity.Rect(0, 0, 2, 2));
Mat Ri = new Mat (Si, new OpenCVForUnity.CoreModule.Rect (0, 0, 2, 2));
Mat negaRi = new Mat();
Core.multiply(Ri, new Scalar(-1), negaRi);
Mat t = new Mat();
Core.gemm(negaRi, S.col(2), 1, new Mat(negaRi.rows(), negaRi.cols(), negaRi.type()), 0, t);
Mat negaRi = new Mat ();
Core.multiply (Ri, new Scalar (-1), negaRi);
Mat t = new Mat ();
Core.gemm (negaRi, S.col (2), 1, new Mat (negaRi.rows (), negaRi.cols (), negaRi.type ()), 0, t);
Mat St = Si.col(2);
t.copyTo(St);
Mat St = Si.col (2);
t.copyTo (St);
return Si;
}
Mat calc_simil(Mat pts)
Mat calc_simil (Mat pts)
{
float[] pts_float = new float[pts.total()];
Utils.copyFromMat<float>(pts, pts_float);
int pts_cols = pts.cols();
float[] pts_float = new float[pts.total ()];
Utils.copyFromMat<float> (pts, pts_float);
int pts_cols = pts.cols ();
//compute translation
int n = pts.rows() / 2;
int n = pts.rows () / 2;
float mx = 0, my = 0;
for (int i = 0; i < n; i++)
{
for (int i = 0; i < n; i++) {
mx += pts_float [(2 * pts_cols) * i];
my += pts_float [((2 * pts_cols) * i) + 1];
}
using (Mat p = new Mat (2 * n, 1, CvType.CV_32F))
{
using (Mat p = new Mat (2 * n, 1, CvType.CV_32F)) {
float[] p_float = new float[p.total()];
Utils.copyFromMat<float>(p, p_float);
int p_cols = p.cols();
float[] p_float = new float[p.total ()];
Utils.copyFromMat<float> (p, p_float);
int p_cols = p.cols ();
mx /= n;
my /= n;
for (int i = 0; i < n; i++)
{
for (int i = 0; i < n; i++) {
p_float [(2 * p_cols) * i] = pts_float [(2 * pts_cols) * i] - mx;
p_float [((2 * p_cols) * i) + 1] = pts_float [((2 * pts_cols) * i) + 1] - my;
}
Utils.copyToMat(p_float, p);
Utils.copyToMat (p_float, p);
//compute rotation and scale
float[] reference_float = new float[reference.total()];
Utils.copyFromMat<float>(reference, reference_float);
int reference_cols = reference.cols();
float[] reference_float = new float[reference.total ()];
Utils.copyFromMat<float> (reference, reference_float);
int reference_cols = reference.cols ();
float a = 0, b = 0, c = 0;
for (int i = 0; i < n; i++)
{
for (int i = 0; i < n; i++) {
a += reference_float [(2 * reference_cols) * i] * reference_float [(2 * reference_cols) * i] +
reference_float [((2 * reference_cols) * i) + 1] * reference_float [((2 * reference_cols) * i) + 1];
reference_float [((2 * reference_cols) * i) + 1] * reference_float [((2 * reference_cols) * i) + 1];
b += reference_float [(2 * reference_cols) * i] * p_float [(2 * p_cols) * i] +
reference_float [((2 * reference_cols) * i) + 1] * p_float [((2 * p_cols) * i) + 1];
reference_float [((2 * reference_cols) * i) + 1] * p_float [((2 * p_cols) * i) + 1];
c += reference_float [(2 * reference_cols) * i] * p_float [((2 * p_cols) * i) + 1] -
reference_float [((2 * reference_cols) * i) + 1] * p_float [(2 * p_cols) * i];
reference_float [((2 * reference_cols) * i) + 1] * p_float [(2 * p_cols) * i];
}
b /= a;
c /= a;
float scale = (float)Math.Sqrt(b * b + c * c), theta = (float)Math.Atan2(c, b);
float sc = scale * (float)Math.Cos(theta), ss = scale * (float)Math.Sin(theta);
float scale = (float)Math.Sqrt (b * b + c * c), theta = (float)Math.Atan2 (c, b);
float sc = scale * (float)Math.Cos (theta), ss = scale * (float)Math.Sin (theta);
Mat returnMat = new Mat(2, 3, CvType.CV_32F);
returnMat.put(0, 0, sc, -ss, mx, ss, sc, my);
Mat returnMat = new Mat (2, 3, CvType.CV_32F);
returnMat.put (0, 0, sc, -ss, mx, ss, sc, my);
return returnMat;
}
}
public void read(object root_json)
public void read (object root_json)
{
IDictionary pmodels_json = (IDictionary)root_json;
IDictionary reference_json = (IDictionary)pmodels_json ["reference"];
reference = new Mat((int)(long)reference_json ["rows"], (int)(long)reference_json ["cols"], CvType.CV_32F);
reference = new Mat ((int)(long)reference_json ["rows"], (int)(long)reference_json ["cols"], CvType.CV_32F);
// Debug.Log ("reference " + reference.ToString ());
IList data_json = (IList)reference_json ["data"];
float[] data = new float[reference.rows() * reference.cols()];
float[] data = new float[reference.rows () * reference.cols ()];
int count = data_json.Count;
for (int i = 0; i < count; i++)
{
for (int i = 0; i < count; i++) {
data [i] = (float)(double)data_json [i];
}
Utils.copyToMat(data, reference);
Utils.copyToMat (data, reference);
// Debug.Log ("reference dump " + reference.dump ());
int n = (int)(long)pmodels_json ["n_patches"];
patches = new List<PatchModel>(n);
patches = new List<PatchModel> (n);
for (int i = 0; i < n; i++)
{
PatchModel patchModel = new PatchModel();
patchModel.read(pmodels_json ["patch " + i]);
for (int i = 0; i < n; i++) {
PatchModel patchModel = new PatchModel ();
patchModel.read (pmodels_json ["patch " + i]);
patches.Add(patchModel);
patches.Add (patchModel);
}
}
}
}

Просмотреть файл

@ -1,197 +1,174 @@
using UnityEngine;
using System.Collections;
using System;
using OpenCVForUnity;
using System.Collections;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.UnityUtils;
namespace OpenCVFaceTracker
{
/// <summary>
/// Shape model.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary>
/// <summary>
/// Shape model.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary>
public class ShapeModel
{
public Mat p;
//parameter vector (kx1) CV_32F
public Mat V;
//shape basis (2nxk) CV_32F
public Mat e;
//parameter variance (kx1) CV_32F
public Mat C;
//connectivity (cx2) CV_32S
public Mat p; //parameter vector (kx1) CV_32F
public Mat V; //shape basis (2nxk) CV_32F
public Mat e; //parameter variance (kx1) CV_32F
public Mat C; //connectivity (cx2) CV_32S
public ShapeModel()
public ShapeModel ()
{
}
public int npts()
public int npts ()
{
//number of points in shape model
return V.rows() / 2;
return V.rows () / 2;
}
public void calc_params(Point[] pts, Mat weight, float c_factor)
public void calc_params (Point[] pts, Mat weight, float c_factor)
{
int n = pts.Length;
// assert(V.rows == 2*n);
// Debug.Log ("V.rows == 2*n " + V.rows () + " " + 2 * n);
using (Mat s = (new MatOfPoint2f (pts)).reshape (1, 2 * n))
{ //point set to vector format
using (Mat s = (new MatOfPoint2f (pts)).reshape (1, 2 * n)) { //point set to vector format
if (weight.total() == 0)
{
Core.gemm(V.t(), s, 1, new Mat(), 0, p); //simple projection
if (weight.total () == 0) {
Core.gemm (V.t (), s, 1, new Mat (), 0, p); //simple projection
} else
{ //scaled projection
if (weight.rows() != n)
{
Debug.Log("Invalid weighting matrix");
} else { //scaled projection
if (weight.rows () != n) {
Debug.Log ("Invalid weighting matrix");
}
float[] weight_float = new float[weight.total()];
Utils.copyFromMat<float>(weight, weight_float);
int weight_cols = weight.cols();
float[] weight_float = new float[weight.total ()];
Utils.copyFromMat<float> (weight, weight_float);
int weight_cols = weight.cols ();
int K = V.cols();
using (Mat H = Mat.zeros (K, K, CvType.CV_32F))
using (Mat g = Mat.zeros (K, 1, CvType.CV_32F))
{
for (int i = 0; i < n; i++)
{
using (Mat v = new Mat (V, new OpenCVForUnity.Rect (0, 2 * i, K, 2)))
using (Mat tmpMat1 = new Mat ())
using (Mat tmpMat2 = new Mat ())
using (Mat tmpMat3 = new Mat ())
{
int K = V.cols ();
using (Mat H = Mat.zeros (K, K, CvType.CV_32F)) using (Mat g = Mat.zeros (K, 1, CvType.CV_32F)) {
for (int i = 0; i < n; i++) {
using (Mat v = new Mat (V, new OpenCVForUnity.CoreModule.Rect (0, 2 * i, K, 2))) using (Mat tmpMat1 = new Mat ()) using (Mat tmpMat2 = new Mat ()) using (Mat tmpMat3 = new Mat ()) {
float w = weight_float [i * weight_cols];
Core.multiply(v.t(), new Scalar(w), tmpMat1);
Core.multiply (v.t (), new Scalar (w), tmpMat1);
Core.gemm(tmpMat1, v, 1, new Mat(), 0, tmpMat2);
Core.add(H, tmpMat2, H);
Core.gemm (tmpMat1, v, 1, new Mat (), 0, tmpMat2);
Core.add (H, tmpMat2, H);
Core.gemm(tmpMat1, new MatOfPoint2f(pts [i]).reshape(1, 2), 1, new Mat(), 0, tmpMat3);
Core.add(g, tmpMat3, g);
Core.gemm (tmpMat1, new MatOfPoint2f (pts [i]).reshape (1, 2), 1, new Mat (), 0, tmpMat3);
Core.add (g, tmpMat3, g);
}
}
Core.solve(H, g, p, Core.DECOMP_SVD);
Core.solve (H, g, p, Core.DECOMP_SVD);
}
}
}
clamp(c_factor); //clamp resulting parameters
clamp (c_factor); //clamp resulting parameters
}
public Point[] calc_shape()
public Point[] calc_shape ()
{
using (Mat s = new Mat ())
{
Core.gemm(V, p, 1, new Mat(), 0, s);
using (Mat s = new Mat ()) {
Core.gemm (V, p, 1, new Mat (), 0, s);
float[] s_float = new float[s.total()];
Utils.copyFromMat<float>(s, s_float);
int s_cols = s.cols();
float[] s_float = new float[s.total ()];
Utils.copyFromMat<float> (s, s_float);
int s_cols = s.cols ();
int n = s.rows() / 2;
int n = s.rows () / 2;
Point[] pts = new Point[n];
for (int i = 0; i < n; i++)
{
pts [i] = new Point(s_float [(2 * s_cols) * i], s_float [((2 * s_cols) * i) + 1]);
for (int i = 0; i < n; i++) {
pts [i] = new Point (s_float [(2 * s_cols) * i], s_float [((2 * s_cols) * i) + 1]);
}
return pts;
}
}
void clamp(float c)
void clamp (float c)
{
float[] p_float = new float[p.total()];
Utils.copyFromMat<float>(p, p_float);
int p_cols = p.cols();
float[] p_float = new float[p.total ()];
Utils.copyFromMat<float> (p, p_float);
int p_cols = p.cols ();
float[] e_float = new float[e.total()];
Utils.copyFromMat<float>(e, e_float);
int e_cols = e.cols();
float[] e_float = new float[e.total ()];
Utils.copyFromMat<float> (e, e_float);
int e_cols = e.cols ();
double scale = p_float [0];
int rows = e.rows();
for (int i = 0; i < rows; i++)
{
if (e_float [i * e_cols] < 0)
continue;
float v = c * (float)Math.Sqrt(e_float [i * e_cols]);
if (Math.Abs(p_float [i * p_cols] / scale) > v)
{
if (p_float [i * p_cols] > 0)
{
int rows = e.rows ();
for (int i = 0; i < rows; i++) {
if (e_float [i * e_cols] < 0) continue;
float v = c * (float)Math.Sqrt (e_float [i * e_cols]);
if (Math.Abs (p_float [i * p_cols] / scale) > v) {
if (p_float [i * p_cols] > 0) {
p_float [i * p_cols] = (float)(v * scale);
} else
{
} else {
p_float [i * p_cols] = (float)(-v * scale);
}
}
}
Utils.copyToMat(p_float, p);
Utils.copyToMat (p_float, p);
}
public void read(object root_json)
public void read (object root_json)
{
IDictionary smodel_json = (IDictionary)root_json;
IDictionary V_json = (IDictionary)smodel_json ["V"];
V = new Mat((int)(long)V_json ["rows"], (int)(long)V_json ["cols"], CvType.CV_32F);
V = new Mat ((int)(long)V_json ["rows"], (int)(long)V_json ["cols"], CvType.CV_32F);
// Debug.Log ("V " + V.ToString ());
IList V_data_json = (IList)V_json ["data"];
float[] V_data = new float[V.rows() * V.cols()];
for (int i = 0; i < V_data_json.Count; i++)
{
float[] V_data = new float[V.rows () * V.cols ()];
for (int i = 0; i < V_data_json.Count; i++) {
V_data [i] = (float)(double)V_data_json [i];
}
Utils.copyToMat(V_data, V);
Utils.copyToMat (V_data, V);
// Debug.Log ("V dump " + V.dump ());
IDictionary e_json = (IDictionary)smodel_json ["e"];
e = new Mat((int)(long)e_json ["rows"], (int)(long)e_json ["cols"], CvType.CV_32F);
e = new Mat ((int)(long)e_json ["rows"], (int)(long)e_json ["cols"], CvType.CV_32F);
// Debug.Log ("e " + e.ToString ());
IList e_data_json = (IList)e_json ["data"];
float[] e_data = new float[e.rows() * e.cols()];
for (int i = 0; i < e_data_json.Count; i++)
{
float[] e_data = new float[e.rows () * e.cols ()];
for (int i = 0; i < e_data_json.Count; i++) {
e_data [i] = (float)(double)e_data_json [i];
}
Utils.copyToMat(e_data, e);
Utils.copyToMat (e_data, e);
// Debug.Log ("e dump " + e.dump ());
IDictionary C_json = (IDictionary)smodel_json ["C"];
C = new Mat((int)(long)C_json ["rows"], (int)(long)C_json ["cols"], CvType.CV_32S);
C = new Mat ((int)(long)C_json ["rows"], (int)(long)C_json ["cols"], CvType.CV_32S);
// Debug.Log ("C " + C.ToString ());
IList C_data_json = (IList)C_json ["data"];
int[] C_data = new int[C.rows() * C.cols()];
for (int i = 0; i < C_data_json.Count; i++)
{
int[] C_data = new int[C.rows () * C.cols ()];
for (int i = 0; i < C_data_json.Count; i++) {
C_data [i] = (int)(long)C_data_json [i];
}
Utils.copyToMat(C_data, C);
Utils.copyToMat (C_data, C);
// Debug.Log ("C dump " + C.dump ());
p = Mat.zeros(e.rows(), 1, CvType.CV_32F);
p = Mat.zeros (e.rows (), 1, CvType.CV_32F);
}
}
}

Просмотреть файл

@ -1,7 +1,7 @@
fileFormatVersion: 2
guid: 0a6e8ede0a97c7a469cdfd38f9fca45d
guid: 9ffdcb63dd6f01f40b19133c6e88fd9f
folderAsset: yes
timeCreated: 1486748112
timeCreated: 1542356413
licenseType: Free
DefaultImporter:
userData:

Просмотреть файл

@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: 75fa1f1affb3bb94c818f6b9e5735924
folderAsset: yes
timeCreated: 1542356354
licenseType: Free
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

До

Ширина:  |  Высота:  |  Размер: 22 KiB

После

Ширина:  |  Высота:  |  Размер: 22 KiB

Просмотреть файл

До

Ширина:  |  Высота:  |  Размер: 34 KiB

После

Ширина:  |  Высота:  |  Размер: 34 KiB

Просмотреть файл

До

Ширина:  |  Высота:  |  Размер: 192 KiB

После

Ширина:  |  Высота:  |  Размер: 192 KiB

Просмотреть файл

До

Ширина:  |  Высота:  |  Размер: 30 KiB

После

Ширина:  |  Высота:  |  Размер: 30 KiB

Просмотреть файл

@ -0,0 +1,798 @@
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.SceneManagement;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ObjdetectModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.Calib3dModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UnityUtils.Helper;
using OpenCVFaceTracker;
namespace FaceTrackerExample
{
/// <summary>
/// Face Tracker AR Example
/// This example was referring to http://www.morethantechnical.com/2012/10/17/head-pose-estimation-with-opencv-opengl-revisited-w-code/
/// and use effect asset from http://ktk-kumamoto.hatenablog.com/entry/2014/09/14/092400
/// </summary>
[RequireComponent (typeof(WebCamTextureToMatHelper))]
public class FaceTrackerARExample : MonoBehaviour
{
/// <summary>
/// The should draw face points.
/// </summary>
public bool isShowingFacePoints;
/// <summary>
/// The is showing face points toggle.
/// </summary>
public Toggle isShowingFacePointsToggle;
/// <summary>
/// The should draw axes.
/// </summary>
public bool isShowingAxes;
/// <summary>
/// The is showing axes toggle.
/// </summary>
public Toggle isShowingAxesToggle;
/// <summary>
/// The should draw head.
/// </summary>
public bool isShowingHead;
/// <summary>
/// The is showing head toggle.
/// </summary>
public Toggle isShowingHeadToggle;
/// <summary>
/// The should draw effects.
/// </summary>
public bool isShowingEffects;
/// <summary>
/// The is showing effects toggle.
/// </summary>
public Toggle isShowingEffectsToggle;
/// <summary>
/// The auto reset mode. if ture, Only if face is detected in each frame, face is tracked.
/// </summary>
public bool isAutoResetMode;
/// <summary>
/// The auto reset mode toggle.
/// </summary>
public Toggle isAutoResetModeToggle;
/// <summary>
/// The axes.
/// </summary>
public GameObject axes;
/// <summary>
/// The head.
/// </summary>
public GameObject head;
/// <summary>
/// The right eye.
/// </summary>
public GameObject rightEye;
/// <summary>
/// The left eye.
/// </summary>
public GameObject leftEye;
/// <summary>
/// The mouth.
/// </summary>
public GameObject mouth;
/// <summary>
/// The rvec noise filter range.
/// </summary>
[Range (0, 50)]
public float
rvecNoiseFilterRange = 8;
/// <summary>
/// The tvec noise filter range.
/// </summary>
[Range (0, 360)]
public float
tvecNoiseFilterRange = 90;
/// <summary>
/// The gray mat.
/// </summary>
Mat grayMat;
/// <summary>
/// The texture.
/// </summary>
Texture2D texture;
/// <summary>
/// The cascade.
/// </summary>
CascadeClassifier cascade;
/// <summary>
/// The face tracker.
/// </summary>
FaceTracker faceTracker;
/// <summary>
/// The face tracker parameters.
/// </summary>
FaceTrackerParams faceTrackerParams;
/// <summary>
/// The AR camera.
/// </summary>
public Camera ARCamera;
/// <summary>
/// The cam matrix.
/// </summary>
Mat camMatrix;
/// <summary>
/// The dist coeffs.
/// </summary>
MatOfDouble distCoeffs;
/// <summary>
/// The invert Y.
/// </summary>
Matrix4x4 invertYM;
/// <summary>
/// The transformation m.
/// </summary>
Matrix4x4 transformationM = new Matrix4x4 ();
/// <summary>
/// The invert Z.
/// </summary>
Matrix4x4 invertZM;
/// <summary>
/// The ar m.
/// </summary>
Matrix4x4 ARM;
/// <summary>
/// The ar game object.
/// </summary>
public GameObject ARGameObject;
/// <summary>
/// The should move AR camera.
/// </summary>
public bool shouldMoveARCamera;
/// <summary>
/// The 3d face object points.
/// </summary>
MatOfPoint3f objectPoints;
/// <summary>
/// The image points.
/// </summary>
MatOfPoint2f imagePoints;
/// <summary>
/// The rvec.
/// </summary>
Mat rvec;
/// <summary>
/// The tvec.
/// </summary>
Mat tvec;
/// <summary>
/// The rot m.
/// </summary>
Mat rotM;
/// <summary>
/// The old rvec.
/// </summary>
Mat oldRvec;
/// <summary>
/// The old tvec.
/// </summary>
Mat oldTvec;
/// <summary>
/// The web cam texture to mat helper.
/// </summary>
WebCamTextureToMatHelper webCamTextureToMatHelper;
/// <summary>
/// The tracker_model_json_filepath.
/// </summary>
private string tracker_model_json_filepath;
/// <summary>
/// The haarcascade_frontalface_alt_xml_filepath.
/// </summary>
private string haarcascade_frontalface_alt_xml_filepath;
#if UNITY_WEBGL && !UNITY_EDITOR
IEnumerator getFilePath_Coroutine;
#endif
// Use this for initialization
void Start ()
{
webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper> ();
isShowingFacePointsToggle.isOn = isShowingFacePoints;
isShowingAxesToggle.isOn = isShowingAxes;
isShowingHeadToggle.isOn = isShowingHead;
isShowingEffectsToggle.isOn = isShowingEffects;
isAutoResetModeToggle.isOn = isAutoResetMode;
#if UNITY_WEBGL && !UNITY_EDITOR
getFilePath_Coroutine = GetFilePath ();
StartCoroutine (getFilePath_Coroutine);
#else
tracker_model_json_filepath = Utils.getFilePath ("tracker_model.json");
haarcascade_frontalface_alt_xml_filepath = Utils.getFilePath ("haarcascade_frontalface_alt.xml");
Run ();
#endif
}
#if UNITY_WEBGL && !UNITY_EDITOR
private IEnumerator GetFilePath ()
{
var getFilePathAsync_0_Coroutine = Utils.getFilePathAsync ("tracker_model.json", (result) => {
tracker_model_json_filepath = result;
});
yield return getFilePathAsync_0_Coroutine;
var getFilePathAsync_1_Coroutine = Utils.getFilePathAsync ("haarcascade_frontalface_alt.xml", (result) => {
haarcascade_frontalface_alt_xml_filepath = result;
});
yield return getFilePathAsync_1_Coroutine;
getFilePath_Coroutine = null;
Run();
}
#endif
private void Run ()
{
//set 3d face object points.
objectPoints = new MatOfPoint3f (new Point3 (-31, 72, 86),//l eye
new Point3 (31, 72, 86),//r eye
new Point3 (0, 40, 114),//nose
new Point3 (-20, 15, 90),//l mouse
new Point3 (20, 15, 90)//r mouse
// ,
// new Point3 (-70, 60, -9),//l ear
// new Point3 (70, 60, -9)//r ear
);
imagePoints = new MatOfPoint2f ();
rvec = new Mat ();
tvec = new Mat ();
rotM = new Mat (3, 3, CvType.CV_64FC1);
//initialize FaceTracker
faceTracker = new FaceTracker (tracker_model_json_filepath);
//initialize FaceTrackerParams
faceTrackerParams = new FaceTrackerParams ();
cascade = new CascadeClassifier ();
cascade.load (haarcascade_frontalface_alt_xml_filepath);
// if (cascade.empty())
// {
// Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
// }
#if UNITY_ANDROID && !UNITY_EDITOR
// Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2).
webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true;
#endif
webCamTextureToMatHelper.Initialize ();
}
/// <summary>
/// Raises the webcam texture to mat helper initialized event.
/// </summary>
public void OnWebCamTextureToMatHelperInitialized ()
{
Debug.Log ("OnWebCamTextureToMatHelperInitialized");
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();
texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);
gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = webCamTextureMat.width ();
float height = webCamTextureMat.height ();
float imageSizeScale = 1.0f;
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale) {
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
imageSizeScale = (float)Screen.height / (float)Screen.width;
} else {
Camera.main.orthographicSize = height / 2;
}
//set cameraparam
int max_d = (int)Mathf.Max (width, height);
double fx = max_d;
double fy = max_d;
double cx = width / 2.0f;
double cy = height / 2.0f;
camMatrix = new Mat (3, 3, CvType.CV_64FC1);
camMatrix.put (0, 0, fx);
camMatrix.put (0, 1, 0);
camMatrix.put (0, 2, cx);
camMatrix.put (1, 0, 0);
camMatrix.put (1, 1, fy);
camMatrix.put (1, 2, cy);
camMatrix.put (2, 0, 0);
camMatrix.put (2, 1, 0);
camMatrix.put (2, 2, 1.0f);
Debug.Log ("camMatrix " + camMatrix.dump ());
distCoeffs = new MatOfDouble (0, 0, 0, 0);
Debug.Log ("distCoeffs " + distCoeffs.dump ());
//calibration camera
Size imageSize = new Size (width * imageSizeScale, height * imageSizeScale);
double apertureWidth = 0;
double apertureHeight = 0;
double[] fovx = new double[1];
double[] fovy = new double[1];
double[] focalLength = new double[1];
Point principalPoint = new Point (0, 0);
double[] aspectratio = new double[1];
Calib3d.calibrationMatrixValues (camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);
Debug.Log ("imageSize " + imageSize.ToString ());
Debug.Log ("apertureWidth " + apertureWidth);
Debug.Log ("apertureHeight " + apertureHeight);
Debug.Log ("fovx " + fovx [0]);
Debug.Log ("fovy " + fovy [0]);
Debug.Log ("focalLength " + focalLength [0]);
Debug.Log ("principalPoint " + principalPoint.ToString ());
Debug.Log ("aspectratio " + aspectratio [0]);
//To convert the difference of the FOV value of the OpenCV and Unity.
double fovXScale = (2.0 * Mathf.Atan ((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2 ((float)cx, (float)fx) + Mathf.Atan2 ((float)(imageSize.width - cx), (float)fx));
double fovYScale = (2.0 * Mathf.Atan ((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2 ((float)cy, (float)fy) + Mathf.Atan2 ((float)(imageSize.height - cy), (float)fy));
Debug.Log ("fovXScale " + fovXScale);
Debug.Log ("fovYScale " + fovYScale);
//Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
if (widthScale < heightScale) {
ARCamera.fieldOfView = (float)(fovx [0] * fovXScale);
} else {
ARCamera.fieldOfView = (float)(fovy [0] * fovYScale);
}
invertYM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, -1, 1));
Debug.Log ("invertYM " + invertYM.ToString ());
invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1));
Debug.Log ("invertZM " + invertZM.ToString ());
grayMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1);
axes.SetActive (false);
head.SetActive (false);
rightEye.SetActive (false);
leftEye.SetActive (false);
mouth.SetActive (false);
}
/// <summary>
/// Raises the webcam texture to mat helper disposed event.
/// </summary>
public void OnWebCamTextureToMatHelperDisposed ()
{
Debug.Log ("OnWebCamTextureToMatHelperDisposed");
faceTracker.reset ();
grayMat.Dispose ();
camMatrix.Dispose ();
distCoeffs.Dispose ();
}
/// <summary>
/// Raises the webcam texture to mat helper error occurred event.
/// </summary>
/// <param name="errorCode">Error code.</param>
public void OnWebCamTextureToMatHelperErrorOccurred (WebCamTextureToMatHelper.ErrorCode errorCode)
{
Debug.Log ("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
// Update is called once per frame
void Update ()
{
if (webCamTextureToMatHelper.IsPlaying () && webCamTextureToMatHelper.DidUpdateThisFrame ()) {
Mat rgbaMat = webCamTextureToMatHelper.GetMat ();
//convert image to greyscale
Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
if (isAutoResetMode || faceTracker.getPoints ().Count <= 0) {
// Debug.Log ("detectFace");
//convert image to greyscale
using (Mat equalizeHistMat = new Mat ()) using (MatOfRect faces = new MatOfRect ()) {
Imgproc.equalizeHist (grayMat, equalizeHistMat);
cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0
| Objdetect.CASCADE_FIND_BIGGEST_OBJECT
| Objdetect.CASCADE_SCALE_IMAGE, new Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ());
if (faces.rows () > 0) {
// Debug.Log ("faces " + faces.dump ());
List<OpenCVForUnity.CoreModule.Rect> rectsList = faces.toList ();
List<Point[]> pointsList = faceTracker.getPoints ();
if (isAutoResetMode) {
//add initial face points from MatOfRect
if (pointsList.Count <= 0) {
faceTracker.addPoints (faces);
// Debug.Log ("reset faces ");
} else {
for (int i = 0; i < rectsList.Count; i++) {
OpenCVForUnity.CoreModule.Rect trackRect = new OpenCVForUnity.CoreModule.Rect (rectsList [i].x + rectsList [i].width / 3, rectsList [i].y + rectsList [i].height / 2, rectsList [i].width / 3, rectsList [i].height / 3);
//It determines whether nose point has been included in trackRect.
if (i < pointsList.Count && !trackRect.contains (pointsList [i] [67])) {
rectsList.RemoveAt (i);
pointsList.RemoveAt (i);
// Debug.Log ("remove " + i);
}
Imgproc.rectangle (rgbaMat, new Point (trackRect.x, trackRect.y), new Point (trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar (0, 0, 255, 255), 2);
}
}
} else {
faceTracker.addPoints (faces);
}
//draw face rect
for (int i = 0; i < rectsList.Count; i++) {
Imgproc.rectangle (rgbaMat, new Point (rectsList [i].x, rectsList [i].y), new Point (rectsList [i].x + rectsList [i].width, rectsList [i].y + rectsList [i].height), new Scalar (255, 0, 0, 255), 2);
}
} else {
if (isAutoResetMode) {
faceTracker.reset ();
rightEye.SetActive (false);
leftEye.SetActive (false);
head.SetActive (false);
mouth.SetActive (false);
axes.SetActive (false);
}
}
}
}
//track face points.if face points <= 0, always return false.
if (faceTracker.track (grayMat, faceTrackerParams)) {
if (isShowingFacePoints) faceTracker.draw (rgbaMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255));
Imgproc.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
Point[] points = faceTracker.getPoints () [0];
if (points.Length > 0) {
// for (int i = 0; i < points.Length; i++)
// {
// Imgproc.putText(rgbaMat, "" + i, new Point(points [i].x, points [i].y), Imgproc.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar(0, 0, 255, 255), 2, Imgproc.LINE_AA, false);
// }
imagePoints.fromArray (
points [31],//l eye
points [36],//r eye
points [67],//nose
points [48],//l mouth
points [54] //r mouth
// ,
// points [0],//l ear
// points [14]//r ear
);
Calib3d.solvePnP (objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
bool isRefresh = false;
if (tvec.get (2, 0) [0] > 0 && tvec.get (2, 0) [0] < 1200 * ((float)rgbaMat.cols () / (float)webCamTextureToMatHelper.requestedWidth)) {
isRefresh = true;
if (oldRvec == null) {
oldRvec = new Mat ();
rvec.copyTo (oldRvec);
}
if (oldTvec == null) {
oldTvec = new Mat ();
tvec.copyTo (oldTvec);
}
//filter Rvec Noise.
using (Mat absDiffRvec = new Mat ()) {
Core.absdiff (rvec, oldRvec, absDiffRvec);
// Debug.Log ("absDiffRvec " + absDiffRvec.dump());
using (Mat cmpRvec = new Mat ()) {
Core.compare (absDiffRvec, new Scalar (rvecNoiseFilterRange), cmpRvec, Core.CMP_GT);
if (Core.countNonZero (cmpRvec) > 0) isRefresh = false;
}
}
//filter Tvec Noise.
using (Mat absDiffTvec = new Mat ()) {
Core.absdiff (tvec, oldTvec, absDiffTvec);
// Debug.Log ("absDiffRvec " + absDiffRvec.dump());
using (Mat cmpTvec = new Mat ()) {
Core.compare (absDiffTvec, new Scalar (tvecNoiseFilterRange), cmpTvec, Core.CMP_GT);
if (Core.countNonZero (cmpTvec) > 0) isRefresh = false;
}
}
}
if (isRefresh) {
if (isShowingEffects) rightEye.SetActive (true);
if (isShowingEffects) leftEye.SetActive (true);
if (isShowingHead) head.SetActive (true);
if (isShowingAxes) axes.SetActive (true);
if ((Mathf.Abs ((float)(points [48].x - points [56].x)) < Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.2
&& Mathf.Abs ((float)(points [51].y - points [57].y)) > Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.9)
|| Mathf.Abs ((float)(points [51].y - points [57].y)) > Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.7) {
if (isShowingEffects) mouth.SetActive (true);
} else {
if (isShowingEffects) mouth.SetActive (false);
}
rvec.copyTo (oldRvec);
tvec.copyTo (oldTvec);
Calib3d.Rodrigues (rvec, rotM);
transformationM.SetRow (0, new Vector4 ((float)rotM.get (0, 0) [0], (float)rotM.get (0, 1) [0], (float)rotM.get (0, 2) [0], (float)tvec.get (0, 0) [0]));
transformationM.SetRow (1, new Vector4 ((float)rotM.get (1, 0) [0], (float)rotM.get (1, 1) [0], (float)rotM.get (1, 2) [0], (float)tvec.get (1, 0) [0]));
transformationM.SetRow (2, new Vector4 ((float)rotM.get (2, 0) [0], (float)rotM.get (2, 1) [0], (float)rotM.get (2, 2) [0], (float)tvec.get (2, 0) [0]));
transformationM.SetRow (3, new Vector4 (0, 0, 0, 1));
// right-handed coordinates system (OpenCV) to left-handed one (Unity)
ARM = invertYM * transformationM;
// Apply Z-axis inverted matrix.
ARM = ARM * invertZM;
if (shouldMoveARCamera) {
if (ARGameObject != null) {
ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;
ARUtils.SetTransformFromMatrix (ARCamera.transform, ref ARM);
ARGameObject.SetActive (true);
}
} else {
ARM = ARCamera.transform.localToWorldMatrix * ARM;
if (ARGameObject != null) {
ARUtils.SetTransformFromMatrix (ARGameObject.transform, ref ARM);
ARGameObject.SetActive (true);
}
}
}
}
}
// Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
Utils.fastMatToTexture2D (rgbaMat, texture);
}
if (Input.GetKeyUp (KeyCode.Space) || Input.touchCount > 0) {
faceTracker.reset ();
if (oldRvec != null) {
oldRvec.Dispose ();
oldRvec = null;
}
if (oldTvec != null) {
oldTvec.Dispose ();
oldTvec = null;
}
rightEye.SetActive (false);
leftEye.SetActive (false);
head.SetActive (false);
mouth.SetActive (false);
axes.SetActive (false);
}
}
/// <summary>
/// Raises the disable event.
/// </summary>
void OnDisable ()
{
webCamTextureToMatHelper.Dispose ();
if (cascade != null) cascade.Dispose ();
#if UNITY_WEBGL && !UNITY_EDITOR
if (getFilePath_Coroutine != null) {
StopCoroutine (getFilePath_Coroutine);
((IDisposable)getFilePath_Coroutine).Dispose ();
}
#endif
}
/// <summary>
/// Raises the back button event.
/// </summary>
public void OnBackButton ()
{
SceneManager.LoadScene ("FaceTrackerExample");
}
/// <summary>
/// Raises the play button event.
/// </summary>
public void OnPlayButton ()
{
webCamTextureToMatHelper.Play ();
}
/// <summary>
/// Raises the pause button event.
/// </summary>
public void OnPauseButton ()
{
webCamTextureToMatHelper.Pause ();
}
/// <summary>
/// Raises the stop button event.
/// </summary>
public void OnStopButton ()
{
webCamTextureToMatHelper.Stop ();
}
/// <summary>
/// Raises the change camera button event.
/// </summary>
public void OnChangeCameraButton ()
{
webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.IsFrontFacing ();
}
/// <summary>
/// Raises the is showing face points toggle event.
/// </summary>
public void OnIsShowingFacePointsToggle ()
{
if (isShowingFacePointsToggle.isOn) {
isShowingFacePoints = true;
} else {
isShowingFacePoints = false;
}
}
/// <summary>
/// Raises the is showing axes toggle event.
/// </summary>
public void OnIsShowingAxesToggle ()
{
if (isShowingAxesToggle.isOn) {
isShowingAxes = true;
} else {
isShowingAxes = false;
axes.SetActive (false);
}
}
/// <summary>
/// Raises the is showing head toggle event.
/// </summary>
public void OnIsShowingHeadToggle ()
{
if (isShowingHeadToggle.isOn) {
isShowingHead = true;
} else {
isShowingHead = false;
head.SetActive (false);
}
}
/// <summary>
/// Raises the is showin effects toggle event.
/// </summary>
public void OnIsShowingEffectsToggle ()
{
if (isShowingEffectsToggle.isOn) {
isShowingEffects = true;
} else {
isShowingEffects = false;
rightEye.SetActive (false);
leftEye.SetActive (false);
mouth.SetActive (false);
}
}
/// <summary>
/// Raises the change auto reset mode toggle event.
/// </summary>
public void OnIsAutoResetModeToggle ()
{
if (isAutoResetModeToggle.isOn) {
isAutoResetMode = true;
} else {
isAutoResetMode = false;
}
}
}
}

Просмотреть файл

@ -0,0 +1,88 @@
using UnityEngine;
using UnityEngine.SceneManagement;
using UnityEngine.UI;
using OpenCVForUnity.CoreModule;
namespace FaceTrackerExample
{
/// <summary>
/// FaceTracker Example
/// </summary>
public class FaceTrackerExample : MonoBehaviour
{
public Text exampleTitle;
public Text versionInfo;
public ScrollRect scrollRect;
static float verticalNormalizedPosition = 1f;
// Use this for initialization
void Start ()
{
exampleTitle.text = "FaceTracker Example " + Application.version;
versionInfo.text = Core.NATIVE_LIBRARY_NAME + " " + OpenCVForUnity.UnityUtils.Utils.getVersion () + " (" + Core.VERSION + ")";
versionInfo.text += " / UnityEditor " + Application.unityVersion;
versionInfo.text += " / ";
#if UNITY_EDITOR
versionInfo.text += "Editor";
#elif UNITY_STANDALONE_WIN
versionInfo.text += "Windows";
#elif UNITY_STANDALONE_OSX
versionInfo.text += "Mac OSX";
#elif UNITY_STANDALONE_LINUX
versionInfo.text += "Linux";
#elif UNITY_ANDROID
versionInfo.text += "Android";
#elif UNITY_IOS
versionInfo.text += "iOS";
#elif UNITY_WSA
versionInfo.text += "WSA";
#elif UNITY_WEBGL
versionInfo.text += "WebGL";
#endif
versionInfo.text += " ";
#if ENABLE_MONO
versionInfo.text += "Mono";
#elif ENABLE_IL2CPP
versionInfo.text += "IL2CPP";
#elif ENABLE_DOTNET
versionInfo.text += ".NET";
#endif
scrollRect.verticalNormalizedPosition = verticalNormalizedPosition;
}
// Update is called once per frame
void Update ()
{
}
public void OnScrollRectValueChanged ()
{
verticalNormalizedPosition = scrollRect.verticalNormalizedPosition;
}
public void OnShowLicenseButton ()
{
SceneManager.LoadScene ("ShowLicense");
}
public void OnTexture2DFaceTrackerExample ()
{
SceneManager.LoadScene ("Texture2DFaceTrackerExample");
}
public void OnWebCamTextureFaceTrackerExample ()
{
SceneManager.LoadScene ("WebCamTextureFaceTrackerExample");
}
public void OnFaceTrackerARExample ()
{
SceneManager.LoadScene ("FaceTrackerARExample");
}
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Двоичные данные
Assets/FaceTrackerExample/ReadMe.pdf

Двоичный файл не отображается.

Просмотреть файл

@ -1,5 +0,0 @@
fileFormatVersion: 2
guid: c6d804cb2b659464eb528f25050a855c
folderAsset: yes
DefaultImporter:
userData:

Просмотреть файл

@ -1,5 +0,0 @@
fileFormatVersion: 2
guid: 6c9557d03374cf54cbd32ac281028699
folderAsset: yes
DefaultImporter:
userData:

Просмотреть файл

@ -1,871 +0,0 @@
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using UnityEngine.UI;
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
using UnityEngine.SceneManagement;
#endif
using OpenCVForUnity;
using OpenCVFaceTracker;
namespace FaceTrackerExample
{
/// <summary>
/// Face tracker AR example.
/// This Example was referring to http://www.morethantechnical.com/2012/10/17/head-pose-estimation-with-opencv-opengl-revisited-w-code/
/// and use effect asset from http://ktk-kumamoto.hatenablog.com/entry/2014/09/14/092400
/// </summary>
[RequireComponent(typeof(WebCamTextureToMatHelper))]
public class FaceTrackerARExample : MonoBehaviour
{
/// <summary>
/// The should draw face points.
/// </summary>
public bool isShowingFacePoints;
/// <summary>
/// The is showing face points toggle.
/// </summary>
public Toggle isShowingFacePointsToggle;
/// <summary>
/// The should draw axes.
/// </summary>
public bool isShowingAxes;
/// <summary>
/// The is showing axes toggle.
/// </summary>
public Toggle isShowingAxesToggle;
/// <summary>
/// The should draw head.
/// </summary>
public bool isShowingHead;
/// <summary>
/// The is showing head toggle.
/// </summary>
public Toggle isShowingHeadToggle;
/// <summary>
/// The should draw effects.
/// </summary>
public bool isShowingEffects;
/// <summary>
/// The is showing effects toggle.
/// </summary>
public Toggle isShowingEffectsToggle;
/// <summary>
/// The auto reset mode. if ture, Only if face is detected in each frame, face is tracked.
/// </summary>
public bool isAutoResetMode;
/// <summary>
/// The auto reset mode toggle.
/// </summary>
public Toggle isAutoResetModeToggle;
/// <summary>
/// The axes.
/// </summary>
public GameObject axes;
/// <summary>
/// The head.
/// </summary>
public GameObject head;
/// <summary>
/// The right eye.
/// </summary>
public GameObject rightEye;
/// <summary>
/// The left eye.
/// </summary>
public GameObject leftEye;
/// <summary>
/// The mouth.
/// </summary>
public GameObject mouth;
/// <summary>
/// The rvec noise filter range.
/// </summary>
[Range(0, 50)]
public float
rvecNoiseFilterRange = 8;
/// <summary>
/// The tvec noise filter range.
/// </summary>
[Range(0, 360)]
public float
tvecNoiseFilterRange = 90;
/// <summary>
/// The gray mat.
/// </summary>
Mat grayMat;
/// <summary>
/// The texture.
/// </summary>
Texture2D texture;
/// <summary>
/// The cascade.
/// </summary>
CascadeClassifier cascade;
/// <summary>
/// The face tracker.
/// </summary>
FaceTracker faceTracker;
/// <summary>
/// The face tracker parameters.
/// </summary>
FaceTrackerParams faceTrackerParams;
/// <summary>
/// The AR camera.
/// </summary>
public Camera ARCamera;
/// <summary>
/// The cam matrix.
/// </summary>
Mat camMatrix;
/// <summary>
/// The dist coeffs.
/// </summary>
MatOfDouble distCoeffs;
/// <summary>
/// The invert Y.
/// </summary>
Matrix4x4 invertYM;
/// <summary>
/// The transformation m.
/// </summary>
Matrix4x4 transformationM = new Matrix4x4();
/// <summary>
/// The invert Z.
/// </summary>
Matrix4x4 invertZM;
/// <summary>
/// The ar m.
/// </summary>
Matrix4x4 ARM;
/// <summary>
/// The ar game object.
/// </summary>
public GameObject ARGameObject;
/// <summary>
/// The should move AR camera.
/// </summary>
public bool shouldMoveARCamera;
/// <summary>
/// The 3d face object points.
/// </summary>
MatOfPoint3f objectPoints;
/// <summary>
/// The image points.
/// </summary>
MatOfPoint2f imagePoints;
/// <summary>
/// The rvec.
/// </summary>
Mat rvec;
/// <summary>
/// The tvec.
/// </summary>
Mat tvec;
/// <summary>
/// The rot m.
/// </summary>
Mat rotM;
/// <summary>
/// The old rvec.
/// </summary>
Mat oldRvec;
/// <summary>
/// The old tvec.
/// </summary>
Mat oldTvec;
/// <summary>
/// The web cam texture to mat helper.
/// </summary>
WebCamTextureToMatHelper webCamTextureToMatHelper;
/// <summary>
/// The tracker_model_json_filepath.
/// </summary>
private string tracker_model_json_filepath;
/// <summary>
/// The haarcascade_frontalface_alt_xml_filepath.
/// </summary>
private string haarcascade_frontalface_alt_xml_filepath;
// Use this for initialization
void Start()
{
webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper>();
isShowingFacePointsToggle.isOn = isShowingFacePoints;
isShowingAxesToggle.isOn = isShowingAxes;
isShowingHeadToggle.isOn = isShowingHead;
isShowingEffectsToggle.isOn = isShowingEffects;
isAutoResetModeToggle.isOn = isAutoResetMode;
#if UNITY_WEBGL && !UNITY_EDITOR
StartCoroutine(getFilePathCoroutine());
#else
tracker_model_json_filepath = Utils.getFilePath("tracker_model.json");
haarcascade_frontalface_alt_xml_filepath = Utils.getFilePath("haarcascade_frontalface_alt.xml");
Run();
#endif
}
#if UNITY_WEBGL && !UNITY_EDITOR
private IEnumerator getFilePathCoroutine()
{
var getFilePathAsync_0_Coroutine = StartCoroutine(Utils.getFilePathAsync("tracker_model.json", (result) => {
tracker_model_json_filepath = result;
}));
var getFilePathAsync_1_Coroutine = StartCoroutine(Utils.getFilePathAsync("haarcascade_frontalface_alt.xml", (result) => {
haarcascade_frontalface_alt_xml_filepath = result;
}));
yield return getFilePathAsync_0_Coroutine;
yield return getFilePathAsync_1_Coroutine;
Run();
}
#endif
private void Run()
{
//set 3d face object points.
objectPoints = new MatOfPoint3f(new Point3(-31, 72, 86),//l eye
new Point3(31, 72, 86),//r eye
new Point3(0, 40, 114),//nose
new Point3(-20, 15, 90),//l mouse
new Point3(20, 15, 90)//r mouse
// ,
// new Point3 (-70, 60, -9),//l ear
// new Point3 (70, 60, -9)//r ear
);
imagePoints = new MatOfPoint2f();
rvec = new Mat();
tvec = new Mat();
rotM = new Mat(3, 3, CvType.CV_64FC1);
//initialize FaceTracker
faceTracker = new FaceTracker(tracker_model_json_filepath);
//initialize FaceTrackerParams
faceTrackerParams = new FaceTrackerParams();
cascade = new CascadeClassifier();
cascade.load(haarcascade_frontalface_alt_xml_filepath);
// if (cascade.empty())
// {
// Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
// }
webCamTextureToMatHelper.Initialize();
}
/// <summary>
/// Raises the webcam texture to mat helper initialized event.
/// </summary>
public void OnWebCamTextureToMatHelperInitialized()
{
Debug.Log("OnWebCamTextureToMatHelperInitialized");
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();
texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
gameObject.GetComponent<Renderer>().material.mainTexture = texture;
gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = webCamTextureMat.width();
float height = webCamTextureMat.height();
float imageSizeScale = 1.0f;
width = gameObject.transform.localScale.x;
height = gameObject.transform.localScale.y;
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
imageSizeScale = (float)Screen.height / (float)Screen.width;
} else
{
Camera.main.orthographicSize = height / 2;
}
int max_d = (int)Mathf.Max(width, height);
double fx = max_d;
double fy = max_d;
double cx = width / 2.0f;
double cy = height / 2.0f;
camMatrix = new Mat(3, 3, CvType.CV_64FC1);
camMatrix.put(0, 0, fx);
camMatrix.put(0, 1, 0);
camMatrix.put(0, 2, cx);
camMatrix.put(1, 0, 0);
camMatrix.put(1, 1, fy);
camMatrix.put(1, 2, cy);
camMatrix.put(2, 0, 0);
camMatrix.put(2, 1, 0);
camMatrix.put(2, 2, 1.0f);
Debug.Log("camMatrix " + camMatrix.dump());
distCoeffs = new MatOfDouble(0, 0, 0, 0);
Debug.Log("distCoeffs " + distCoeffs.dump());
Size imageSize = new Size(width * imageSizeScale, height * imageSizeScale);
double apertureWidth = 0;
double apertureHeight = 0;
double[] fovx = new double[1];
double[] fovy = new double[1];
double[] focalLength = new double[1];
Point principalPoint = new Point(0, 0);
double[] aspectratio = new double[1];
Calib3d.calibrationMatrixValues(camMatrix, imageSize, apertureWidth, apertureHeight, fovx, fovy, focalLength, principalPoint, aspectratio);
Debug.Log("imageSize " + imageSize.ToString());
Debug.Log("apertureWidth " + apertureWidth);
Debug.Log("apertureHeight " + apertureHeight);
Debug.Log("fovx " + fovx [0]);
Debug.Log("fovy " + fovy [0]);
Debug.Log("focalLength " + focalLength [0]);
Debug.Log("principalPoint " + principalPoint.ToString());
Debug.Log("aspectratio " + aspectratio [0]);
//To convert the difference of the FOV value of the OpenCV and Unity.
double fovXScale = (2.0 * Mathf.Atan((float)(imageSize.width / (2.0 * fx)))) / (Mathf.Atan2((float)cx, (float)fx) + Mathf.Atan2((float)(imageSize.width - cx), (float)fx));
double fovYScale = (2.0 * Mathf.Atan((float)(imageSize.height / (2.0 * fy)))) / (Mathf.Atan2((float)cy, (float)fy) + Mathf.Atan2((float)(imageSize.height - cy), (float)fy));
Debug.Log("fovXScale " + fovXScale);
Debug.Log("fovYScale " + fovYScale);
//Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
if (widthScale < heightScale)
{
ARCamera.fieldOfView = (float)(fovx [0] * fovXScale);
} else
{
ARCamera.fieldOfView = (float)(fovy [0] * fovYScale);
}
invertYM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, -1, 1));
Debug.Log("invertYM " + invertYM.ToString());
invertZM = Matrix4x4.TRS(Vector3.zero, Quaternion.identity, new Vector3(1, 1, -1));
Debug.Log("invertZM " + invertZM.ToString());
grayMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC1);
axes.SetActive(false);
head.SetActive(false);
rightEye.SetActive(false);
leftEye.SetActive(false);
mouth.SetActive(false);
}
/// <summary>
/// Raises the webcam texture to mat helper disposed event.
/// </summary>
public void OnWebCamTextureToMatHelperDisposed()
{
Debug.Log("OnWebCamTextureToMatHelperDisposed");
faceTracker.reset();
grayMat.Dispose();
camMatrix.Dispose();
distCoeffs.Dispose();
}
/// <summary>
/// Raises the webcam texture to mat helper error occurred event.
/// </summary>
/// <param name="errorCode">Error code.</param>
public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
{
Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
// Update is called once per frame
void Update()
{
if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
{
Mat rgbaMat = webCamTextureToMatHelper.GetMat();
//convert image to greyscale
Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
if (isAutoResetMode || faceTracker.getPoints().Count <= 0)
{
// Debug.Log ("detectFace");
//convert image to greyscale
using (Mat equalizeHistMat = new Mat())
using (MatOfRect faces = new MatOfRect())
{
Imgproc.equalizeHist(grayMat, equalizeHistMat);
cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0
| Objdetect.CASCADE_FIND_BIGGEST_OBJECT
| Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());
if (faces.rows() > 0)
{
// Debug.Log ("faces " + faces.dump ());
List<OpenCVForUnity.Rect> rectsList = faces.toList();
List<Point[]> pointsList = faceTracker.getPoints();
if (isAutoResetMode)
{
//add initial face points from MatOfRect
if (pointsList.Count <= 0)
{
faceTracker.addPoints(faces);
// Debug.Log ("reset faces ");
} else
{
for (int i = 0; i < rectsList.Count; i++)
{
OpenCVForUnity.Rect trackRect = new OpenCVForUnity.Rect(rectsList [i].x + rectsList [i].width / 3, rectsList [i].y + rectsList [i].height / 2, rectsList [i].width / 3, rectsList [i].height / 3);
//It determines whether nose point has been included in trackRect.
if (i < pointsList.Count && !trackRect.contains(pointsList [i] [67]))
{
rectsList.RemoveAt(i);
pointsList.RemoveAt(i);
// Debug.Log ("remove " + i);
}
Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2);
}
}
} else
{
faceTracker.addPoints(faces);
}
//draw face rect
for (int i = 0; i < rectsList.Count; i++)
{
#if OPENCV_2
Core.rectangle (rgbaMat, new Point (rectsLIst [i].x, rectsList [i].y), new Point (rectsList [i].x + rectsList [i].width, rectsList [i].y + rectsList [i].height), new Scalar (255, 0, 0, 255), 2);
#else
Imgproc.rectangle(rgbaMat, new Point(rectsList [i].x, rectsList [i].y), new Point(rectsList [i].x + rectsList [i].width, rectsList [i].y + rectsList [i].height), new Scalar(255, 0, 0, 255), 2);
#endif
}
} else
{
if (isAutoResetMode)
{
faceTracker.reset();
rightEye.SetActive(false);
leftEye.SetActive(false);
head.SetActive(false);
mouth.SetActive(false);
axes.SetActive(false);
}
}
}
}
//track face points.if face points <= 0, always return false.
if (faceTracker.track(grayMat, faceTrackerParams))
{
if (isShowingFacePoints)
faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
#if OPENCV_2
Core.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false);
#else
Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
#endif
Point[] points = faceTracker.getPoints() [0];
if (points.Length > 0)
{
// for (int i = 0; i < points.Length; i++) {
// #if OPENCV_2
// Core.putText (rgbaMat, "" + i, new Point (points [i].x, points [i].y), Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar (0, 0, 255, 255), 2, Core.LINE_AA, false);
// #else
// Imgproc.putText (rgbaMat, "" + i, new Point (points [i].x, points [i].y), Core.FONT_HERSHEY_SIMPLEX, 0.3, new Scalar (0, 0, 255, 255), 2, Core.LINE_AA, false);
// #endif
// }
imagePoints.fromArray(
points [31],//l eye
points [36],//r eye
points [67],//nose
points [48],//l mouth
points [54] //r mouth
// ,
// points [0],//l ear
// points [14]//r ear
);
Calib3d.solvePnP(objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
bool isRefresh = false;
if (tvec.get(2, 0) [0] > 0 && tvec.get(2, 0) [0] < 1200 * ((float)rgbaMat.cols() / (float)webCamTextureToMatHelper.requestedWidth))
{
isRefresh = true;
if (oldRvec == null)
{
oldRvec = new Mat();
rvec.copyTo(oldRvec);
}
if (oldTvec == null)
{
oldTvec = new Mat();
tvec.copyTo(oldTvec);
}
//filter Rvec Noise.
using (Mat absDiffRvec = new Mat())
{
Core.absdiff(rvec, oldRvec, absDiffRvec);
// Debug.Log ("absDiffRvec " + absDiffRvec.dump());
using (Mat cmpRvec = new Mat())
{
Core.compare(absDiffRvec, new Scalar(rvecNoiseFilterRange), cmpRvec, Core.CMP_GT);
if (Core.countNonZero(cmpRvec) > 0)
isRefresh = false;
}
}
//filter Tvec Noise.
using (Mat absDiffTvec = new Mat())
{
Core.absdiff(tvec, oldTvec, absDiffTvec);
// Debug.Log ("absDiffRvec " + absDiffRvec.dump());
using (Mat cmpTvec = new Mat())
{
Core.compare(absDiffTvec, new Scalar(tvecNoiseFilterRange), cmpTvec, Core.CMP_GT);
if (Core.countNonZero(cmpTvec) > 0)
isRefresh = false;
}
}
}
if (isRefresh)
{
if (isShowingEffects)
rightEye.SetActive(true);
if (isShowingEffects)
leftEye.SetActive(true);
if (isShowingHead)
head.SetActive(true);
if (isShowingAxes)
axes.SetActive(true);
if ((Mathf.Abs((float)(points [48].x - points [56].x)) < Mathf.Abs((float)(points [31].x - points [36].x)) / 2.2
&& Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.9)
|| Mathf.Abs((float)(points [51].y - points [57].y)) > Mathf.Abs((float)(points [31].x - points [36].x)) / 2.7)
{
if (isShowingEffects)
mouth.SetActive(true);
} else
{
if (isShowingEffects)
mouth.SetActive(false);
}
rvec.copyTo(oldRvec);
tvec.copyTo(oldTvec);
Calib3d.Rodrigues(rvec, rotM);
transformationM.SetRow(0, new Vector4((float)rotM.get(0, 0) [0], (float)rotM.get(0, 1) [0], (float)rotM.get(0, 2) [0], (float)tvec.get(0, 0) [0]));
transformationM.SetRow(1, new Vector4((float)rotM.get(1, 0) [0], (float)rotM.get(1, 1) [0], (float)rotM.get(1, 2) [0], (float)tvec.get(1, 0) [0]));
transformationM.SetRow(2, new Vector4((float)rotM.get(2, 0) [0], (float)rotM.get(2, 1) [0], (float)rotM.get(2, 2) [0], (float)tvec.get(2, 0) [0]));
transformationM.SetRow(3, new Vector4(0, 0, 0, 1));
if (shouldMoveARCamera)
{
if (ARGameObject != null)
{
ARM = ARGameObject.transform.localToWorldMatrix * invertZM * transformationM.inverse * invertYM;
ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
ARGameObject.SetActive(true);
}
} else
{
ARM = ARCamera.transform.localToWorldMatrix * invertYM * transformationM * invertZM;
if (ARGameObject != null)
{
ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
ARGameObject.SetActive(true);
}
}
}
}
}
// Core.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false);
Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
}
if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0)
{
faceTracker.reset();
if (oldRvec != null)
{
oldRvec.Dispose();
oldRvec = null;
}
if (oldTvec != null)
{
oldTvec.Dispose();
oldTvec = null;
}
rightEye.SetActive(false);
leftEye.SetActive(false);
head.SetActive(false);
mouth.SetActive(false);
axes.SetActive(false);
}
}
/// <summary>
/// Raises the disable event.
/// </summary>
void OnDisable()
{
webCamTextureToMatHelper.Dispose();
if (cascade != null)
cascade.Dispose();
}
/// <summary>
/// Raises the back button event.
/// </summary>
public void OnBackButton()
{
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
SceneManager.LoadScene("FaceTrackerExample");
#else
Application.LoadLevel("FaceTrackerExample");
#endif
}
/// <summary>
/// Raises the play button event.
/// </summary>
public void OnPlayButton()
{
webCamTextureToMatHelper.Play();
}
/// <summary>
/// Raises the pause button event.
/// </summary>
public void OnPauseButton()
{
webCamTextureToMatHelper.Pause();
}
/// <summary>
/// Raises the stop button event.
/// </summary>
public void OnStopButton()
{
webCamTextureToMatHelper.Stop();
}
/// <summary>
/// Raises the change camera button event.
/// </summary>
public void OnChangeCameraButton()
{
webCamTextureToMatHelper.Initialize(null, webCamTextureToMatHelper.requestedWidth, webCamTextureToMatHelper.requestedHeight, !webCamTextureToMatHelper.requestedIsFrontFacing);
}
/// <summary>
/// Raises the is showing face points toggle event.
/// </summary>
public void OnIsShowingFacePointsToggle()
{
if (isShowingFacePointsToggle.isOn)
{
isShowingFacePoints = true;
} else
{
isShowingFacePoints = false;
}
}
/// <summary>
/// Raises the is showing axes toggle event.
/// </summary>
public void OnIsShowingAxesToggle()
{
if (isShowingAxesToggle.isOn)
{
isShowingAxes = true;
} else
{
isShowingAxes = false;
axes.SetActive(false);
}
}
/// <summary>
/// Raises the is showing head toggle event.
/// </summary>
public void OnIsShowingHeadToggle()
{
if (isShowingHeadToggle.isOn)
{
isShowingHead = true;
} else
{
isShowingHead = false;
head.SetActive(false);
}
}
/// <summary>
/// Raises the is showin effects toggle event.
/// </summary>
public void OnIsShowingEffectsToggle()
{
if (isShowingEffectsToggle.isOn)
{
isShowingEffects = true;
} else
{
isShowingEffects = false;
rightEye.SetActive(false);
leftEye.SetActive(false);
mouth.SetActive(false);
}
}
/// <summary>
/// Raises the change auto reset mode toggle event.
/// </summary>
public void OnIsAutoResetModeToggle()
{
if (isAutoResetModeToggle.isOn)
{
isAutoResetMode = true;
} else
{
isAutoResetMode = false;
}
}
}
}

Просмотреть файл

@ -1,64 +0,0 @@
using UnityEngine;
using System.Collections;
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
using UnityEngine.SceneManagement;
#endif
namespace FaceTrackerExample
{
/// <summary>
/// Face tracker example.
/// </summary>
public class FaceTrackerExample : MonoBehaviour
{
// Use this for initialization
void Start()
{
}
// Update is called once per frame
void Update()
{
}
public void OnShowLicenseButton()
{
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
SceneManager.LoadScene ("ShowLicense");
#else
Application.LoadLevel("ShowLicense");
#endif
}
public void OnTexture2DFaceTrackerExample()
{
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
SceneManager.LoadScene ("Texture2DFaceTrackerExample");
#else
Application.LoadLevel("Texture2DFaceTrackerExample");
#endif
}
public void OnWebCamTextureFaceTrackerExample()
{
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
SceneManager.LoadScene ("WebCamTextureFaceTrackerExample");
#else
Application.LoadLevel("WebCamTextureFaceTrackerExample");
#endif
}
public void OnFaceTrackerARExample()
{
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
SceneManager.LoadScene ("FaceTrackerARExample");
#else
Application.LoadLevel("FaceTrackerARExample");
#endif
}
}
}

Просмотреть файл

@ -1,157 +0,0 @@
using UnityEngine;
using System.Collections;
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
using UnityEngine.SceneManagement;
#endif
using OpenCVForUnity;
using OpenCVFaceTracker;
namespace FaceTrackerExample
{
/// <summary>
/// Texture2D face tracker example.
/// </summary>
public class Texture2DFaceTrackerExample : MonoBehaviour
{
/// <summary>
/// The image texture.
/// </summary>
public Texture2D imgTexture;
/// <summary>
/// The tracker_model_json_filepath.
/// </summary>
private string tracker_model_json_filepath;
/// <summary>
/// The haarcascade_frontalface_alt_xml_filepath.
/// </summary>
private string haarcascade_frontalface_alt_xml_filepath;
// Use this for initialization
void Start()
{
#if UNITY_WEBGL && !UNITY_EDITOR
StartCoroutine(getFilePathCoroutine());
#else
tracker_model_json_filepath = Utils.getFilePath("tracker_model.json");
haarcascade_frontalface_alt_xml_filepath = Utils.getFilePath("haarcascade_frontalface_alt.xml");
Run();
#endif
}
#if UNITY_WEBGL && !UNITY_EDITOR
private IEnumerator getFilePathCoroutine()
{
var getFilePathAsync_0_Coroutine = StartCoroutine(Utils.getFilePathAsync("tracker_model.json", (result) => {
tracker_model_json_filepath = result;
}));
var getFilePathAsync_1_Coroutine = StartCoroutine(Utils.getFilePathAsync("haarcascade_frontalface_alt.xml", (result) => {
haarcascade_frontalface_alt_xml_filepath = result;
}));
yield return getFilePathAsync_0_Coroutine;
yield return getFilePathAsync_1_Coroutine;
Run();
}
#endif
private void Run()
{
gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = 0;
float height = 0;
width = gameObject.transform.localScale.x;
height = gameObject.transform.localScale.y;
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
} else
{
Camera.main.orthographicSize = height / 2;
}
//initialize FaceTracker
FaceTracker faceTracker = new FaceTracker(tracker_model_json_filepath);
//initialize FaceTrackerParams
FaceTrackerParams faceTrackerParams = new FaceTrackerParams();
Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4);
Utils.texture2DToMat(imgTexture, imgMat);
Debug.Log("imgMat dst ToString " + imgMat.ToString());
CascadeClassifier cascade = new CascadeClassifier();
cascade.load(haarcascade_frontalface_alt_xml_filepath);
// if (cascade.empty())
// {
// Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
// }
//convert image to greyscale
Mat gray = new Mat();
Imgproc.cvtColor(imgMat, gray, Imgproc.COLOR_RGBA2GRAY);
MatOfRect faces = new MatOfRect();
Imgproc.equalizeHist(gray, gray);
cascade.detectMultiScale(gray, faces, 1.1f, 2, 0
// | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
| Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(gray.cols() * 0.05, gray.cols() * 0.05), new Size());
Debug.Log("faces " + faces.dump());
if (faces.rows() > 0)
{
//add initial face points from MatOfRect
faceTracker.addPoints(faces);
}
//track face points.if face points <= 0, always return false.
if (faceTracker.track(imgMat, faceTrackerParams))
faceTracker.draw(imgMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false);
Utils.matToTexture2D(imgMat, texture);
gameObject.GetComponent<Renderer>().material.mainTexture = texture;
}
// Update is called once per frame
void Update()
{
}
public void OnBackButton()
{
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
SceneManager.LoadScene ("FaceTrackerExample");
#else
Application.LoadLevel("FaceTrackerExample");
#endif
}
}
}

Просмотреть файл

@ -1,97 +0,0 @@
using UnityEngine;
using System.Collections;
namespace FaceTrackerExample
{
/// <summary>
/// AR utils.
/// </summary>
public class ARUtils
{
/// <summary>
/// Extract translation from transform matrix.
/// </summary>
/// <param name="matrix">Transform matrix. This parameter is passed by reference
/// to improve performance; no changes will be made to it.</param>
/// <returns>
/// Translation offset.
/// </returns>
public static Vector3 ExtractTranslationFromMatrix(ref Matrix4x4 matrix)
{
Vector3 translate;
translate.x = matrix.m03;
translate.y = matrix.m13;
translate.z = matrix.m23;
return translate;
}
/// <summary>
/// Extract rotation quaternion from transform matrix.
/// </summary>
/// <param name="matrix">Transform matrix. This parameter is passed by reference
/// to improve performance; no changes will be made to it.</param>
/// <returns>
/// Quaternion representation of rotation transform.
/// </returns>
public static Quaternion ExtractRotationFromMatrix(ref Matrix4x4 matrix)
{
Vector3 forward;
forward.x = matrix.m02;
forward.y = matrix.m12;
forward.z = matrix.m22;
Vector3 upwards;
upwards.x = matrix.m01;
upwards.y = matrix.m11;
upwards.z = matrix.m21;
return Quaternion.LookRotation(forward, upwards);
}
/// <summary>
/// Extract scale from transform matrix.
/// </summary>
/// <param name="matrix">Transform matrix. This parameter is passed by reference
/// to improve performance; no changes will be made to it.</param>
/// <returns>
/// Scale vector.
/// </returns>
public static Vector3 ExtractScaleFromMatrix(ref Matrix4x4 matrix)
{
Vector3 scale;
scale.x = new Vector4(matrix.m00, matrix.m10, matrix.m20, matrix.m30).magnitude;
scale.y = new Vector4(matrix.m01, matrix.m11, matrix.m21, matrix.m31).magnitude;
scale.z = new Vector4(matrix.m02, matrix.m12, matrix.m22, matrix.m32).magnitude;
return scale;
}
/// <summary>
/// Extract position, rotation and scale from TRS matrix.
/// </summary>
/// <param name="matrix">Transform matrix. This parameter is passed by reference
/// to improve performance; no changes will be made to it.</param>
/// <param name="localPosition">Output position.</param>
/// <param name="localRotation">Output rotation.</param>
/// <param name="localScale">Output scale.</param>
public static void DecomposeMatrix(ref Matrix4x4 matrix, out Vector3 localPosition, out Quaternion localRotation, out Vector3 localScale)
{
localPosition = ExtractTranslationFromMatrix(ref matrix);
localRotation = ExtractRotationFromMatrix(ref matrix);
localScale = ExtractScaleFromMatrix(ref matrix);
}
/// <summary>
/// Set transform component from TRS matrix.
/// </summary>
/// <param name="transform">Transform component.</param>
/// <param name="matrix">Transform matrix. This parameter is passed by reference
/// to improve performance; no changes will be made to it.</param>
public static void SetTransformFromMatrix(Transform transform, ref Matrix4x4 matrix)
{
transform.localPosition = ExtractTranslationFromMatrix(ref matrix);
transform.localRotation = ExtractRotationFromMatrix(ref matrix);
transform.localScale = ExtractScaleFromMatrix(ref matrix);
}
}
}

Просмотреть файл

@ -1,12 +0,0 @@
fileFormatVersion: 2
guid: 055caa3bf99695e42a0f9f1ee487907d
timeCreated: 1458482509
licenseType: Pro
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -1,577 +0,0 @@
using OpenCVForUnity;
using System;
using System.Collections;
using UnityEngine;
using UnityEngine.Events;
namespace FaceTrackerExample
{
/// <summary>
/// Webcam texture to mat helper.
/// v 1.0.4
/// </summary>
public class WebCamTextureToMatHelper : MonoBehaviour
{
/// <summary>
/// Set the name of the device to use.
/// </summary>
[SerializeField, TooltipAttribute("Set the name of the device to use.")]
public string requestedDeviceName = null;
/// <summary>
/// Set the width of WebCamTexture.
/// </summary>
[SerializeField, TooltipAttribute("Set the width of WebCamTexture.")]
public int requestedWidth = 640;
/// <summary>
/// Set the height of WebCamTexture.
/// </summary>
[SerializeField, TooltipAttribute("Set the height of WebCamTexture.")]
public int requestedHeight = 480;
/// <summary>
/// Set whether to use the front facing camera.
/// </summary>
[SerializeField, TooltipAttribute("Set whether to use the front facing camera.")]
public bool requestedIsFrontFacing = false;
/// <summary>
/// Set FPS of WebCamTexture.
/// </summary>
[SerializeField, TooltipAttribute("Set FPS of WebCamTexture.")]
public int requestedFPS = 30;
/// <summary>
/// Sets whether to rotate WebCamTexture 90 degrees.
/// </summary>
[SerializeField, TooltipAttribute("Sets whether to rotate WebCamTexture 90 degrees.")]
public bool requestedRotate90Degree = false;
/// <summary>
/// Determines if flips vertically.
/// </summary>
[SerializeField, TooltipAttribute("Determines if flips vertically.")]
public bool flipVertical = false;
/// <summary>
/// Determines if flips horizontal.
/// </summary>
[SerializeField, TooltipAttribute("Determines if flips horizontal.")]
public bool flipHorizontal = false;
/// <summary>
/// The timeout frame count.
/// </summary>
public int timeoutFrameCount = 300;
/// <summary>
/// UnityEvent that is triggered when this instance is initialized.
/// </summary>
public UnityEvent onInitialized;
/// <summary>
/// UnityEvent that is triggered when this instance is disposed.
/// </summary>
public UnityEvent onDisposed;
/// <summary>
/// UnityEvent that is triggered when this instance is error Occurred.
/// </summary>
public ErrorUnityEvent onErrorOccurred;
/// <summary>
/// The webcam texture.
/// </summary>
protected WebCamTexture webCamTexture;
/// <summary>
/// The webcam device.
/// </summary>
protected WebCamDevice webCamDevice;
/// <summary>
/// The rgba mat.
/// </summary>
protected Mat rgbaMat;
/// <summary>
/// The rotated rgba mat
/// </summary>
protected Mat rotatedRgbaMat;
/// <summary>
/// The buffer colors.
/// </summary>
protected Color32[] colors;
/// <summary>
/// Indicates whether this instance is waiting for initialization to complete.
/// </summary>
protected bool isInitWaiting = false;
/// <summary>
/// Indicates whether this instance has been initialized.
/// </summary>
protected bool hasInitDone = false;
/// <summary>
/// Orientation of the screen.
/// </summary>
protected ScreenOrientation screenOrientation = ScreenOrientation.Unknown;
[System.Serializable]
public enum ErrorCode :int
{
CAMERA_DEVICE_NOT_EXIST = 0,
TIMEOUT = 1,
}
[System.Serializable]
public class ErrorUnityEvent : UnityEngine.Events.UnityEvent<ErrorCode>
{
}
// Update is called once per frame
protected virtual void Update()
{
if (hasInitDone)
{
if (screenOrientation != Screen.orientation)
{
StartCoroutine(_Initialize());
}
}
}
/// <summary>
/// Raises the destroy event.
/// </summary>
protected virtual void OnDestroy()
{
Dispose();
}
/// <summary>
/// Initializes this instance.
/// </summary>
public virtual void Initialize()
{
if (isInitWaiting)
return;
if (onInitialized == null)
onInitialized = new UnityEvent();
if (onDisposed == null)
onDisposed = new UnityEvent();
if (onErrorOccurred == null)
onErrorOccurred = new ErrorUnityEvent();
StartCoroutine(_Initialize());
}
/// <summary>
/// Initializes this instance.
/// </summary>
/// <param name="deviceName">Device name.</param>
/// <param name="requestedWidth">Requested width.</param>
/// <param name="requestedHeight">Requested height.</param>
/// <param name="requestedIsFrontFacing">If set to <c>true</c> requested to using the front camera.</param>
/// <param name="requestedFPS">Requested FPS.</param>
public virtual void Initialize(string deviceName, int requestedWidth, int requestedHeight, bool requestedIsFrontFacing = false, int requestedFPS = 30)
{
if (isInitWaiting)
return;
this.requestedDeviceName = deviceName;
this.requestedWidth = requestedWidth;
this.requestedHeight = requestedHeight;
this.requestedIsFrontFacing = requestedIsFrontFacing;
this.requestedFPS = requestedFPS;
if (onInitialized == null)
onInitialized = new UnityEvent();
if (onDisposed == null)
onDisposed = new UnityEvent();
if (onErrorOccurred == null)
onErrorOccurred = new ErrorUnityEvent();
StartCoroutine(_Initialize());
}
/// <summary>
/// Initializes this instance by coroutine.
/// </summary>
protected virtual IEnumerator _Initialize()
{
if (hasInitDone)
_Dispose();
isInitWaiting = true;
if (!String.IsNullOrEmpty(requestedDeviceName))
{
webCamTexture = new WebCamTexture(requestedDeviceName, requestedWidth, requestedHeight, requestedFPS);
} else
{
// Checks how many and which cameras are available on the device
for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++)
{
if (WebCamTexture.devices [cameraIndex].isFrontFacing == requestedIsFrontFacing)
{
webCamDevice = WebCamTexture.devices [cameraIndex];
webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight, requestedFPS);
break;
}
}
}
if (webCamTexture == null)
{
if (WebCamTexture.devices.Length > 0)
{
webCamDevice = WebCamTexture.devices [0];
webCamTexture = new WebCamTexture(webCamDevice.name, requestedWidth, requestedHeight, requestedFPS);
} else
{
isInitWaiting = false;
if (onErrorOccurred != null)
onErrorOccurred.Invoke(ErrorCode.CAMERA_DEVICE_NOT_EXIST);
yield break;
}
}
// Starts the camera
webCamTexture.Play();
int initFrameCount = 0;
bool isTimeout = false;
while (true)
{
if (initFrameCount > timeoutFrameCount)
{
isTimeout = true;
break;
}
// If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
else if (webCamTexture.width > 16 && webCamTexture.height > 16) {
#else
else if (webCamTexture.didUpdateThisFrame)
{
#if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2
while (webCamTexture.width <= 16) {
if (initFrameCount > timeoutFrameCount) {
isTimeout = true;
break;
}else {
initFrameCount++;
}
webCamTexture.GetPixels32 ();
yield return new WaitForEndOfFrame ();
}
if (isTimeout) break;
#endif
#endif
Debug.Log("name " + webCamTexture.name + " width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
Debug.Log("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);
if (colors == null || colors.Length != webCamTexture.width * webCamTexture.height)
colors = new Color32[webCamTexture.width * webCamTexture.height];
rgbaMat = new Mat(webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
screenOrientation = Screen.orientation;
#if !UNITY_EDITOR && !(UNITY_STANDALONE || UNITY_WEBGL)
if (screenOrientation == ScreenOrientation.Portrait || screenOrientation == ScreenOrientation.PortraitUpsideDown) {
rotatedRgbaMat = new Mat (webCamTexture.width, webCamTexture.height, CvType.CV_8UC4);
}
#endif
if (requestedRotate90Degree)
{
if (rotatedRgbaMat == null)
rotatedRgbaMat = new Mat(webCamTexture.width, webCamTexture.height, CvType.CV_8UC4);
}
isInitWaiting = false;
hasInitDone = true;
if (onInitialized != null)
onInitialized.Invoke();
break;
} else
{
initFrameCount++;
yield return 0;
}
}
if (isTimeout)
{
webCamTexture.Stop();
webCamTexture = null;
isInitWaiting = false;
if (onErrorOccurred != null)
onErrorOccurred.Invoke(ErrorCode.TIMEOUT);
}
}
/// <summary>
/// Indicates whether this instance has been initialized.
/// </summary>
/// <returns><c>true</c>, if this instance has been initialized, <c>false</c> otherwise.</returns>
public virtual bool IsInitialized()
{
return hasInitDone;
}
/// <summary>
/// Starts the webcam texture.
/// </summary>
public virtual void Play()
{
if (hasInitDone)
webCamTexture.Play();
}
/// <summary>
/// Pauses the webcam texture
/// </summary>
public virtual void Pause()
{
if (hasInitDone)
webCamTexture.Pause();
}
/// <summary>
/// Stops the webcam texture.
/// </summary>
public virtual void Stop()
{
if (hasInitDone)
webCamTexture.Stop();
}
/// <summary>
/// Indicates whether the webcam texture is currently playing.
/// </summary>
/// <returns><c>true</c>, if the webcam texture is playing, <c>false</c> otherwise.</returns>
public virtual bool IsPlaying()
{
if (!hasInitDone)
return false;
return webCamTexture.isPlaying;
}
/// <summary>
/// Returns the webcam texture.
/// </summary>
/// <returns>The webcam texture.</returns>
public virtual WebCamTexture GetWebCamTexture()
{
return (hasInitDone) ? webCamTexture : null;
}
/// <summary>
/// Returns the webcam device.
/// </summary>
/// <returns>The webcam device.</returns>
public virtual WebCamDevice GetWebCamDevice()
{
return webCamDevice;
}
/// <summary>
/// Indicates whether the video buffer of the frame has been updated.
/// </summary>
/// <returns><c>true</c>, if the video buffer has been updated <c>false</c> otherwise.</returns>
public virtual bool DidUpdateThisFrame()
{
if (!hasInitDone)
return false;
#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
if (webCamTexture.width > 16 && webCamTexture.height > 16) {
return true;
} else {
return false;
}
#else
return webCamTexture.didUpdateThisFrame;
#endif
}
/// <summary>
/// Gets the mat of the current frame.
/// </summary>
/// <returns>The mat.</returns>
public virtual Mat GetMat()
{
if (!hasInitDone || !webCamTexture.isPlaying)
{
if (rotatedRgbaMat != null)
{
return rotatedRgbaMat;
} else
{
return rgbaMat;
}
}
Utils.webCamTextureToMat(webCamTexture, rgbaMat, colors);
if (rotatedRgbaMat != null)
{
Core.rotate(rgbaMat, rotatedRgbaMat, Core.ROTATE_90_CLOCKWISE);
FlipMat(rotatedRgbaMat);
return rotatedRgbaMat;
} else
{
FlipMat(rgbaMat);
return rgbaMat;
}
}
/// <summary>
/// Flips the mat.
/// </summary>
/// <param name="mat">Mat.</param>
protected virtual void FlipMat(Mat mat)
{
int flipCode = int.MinValue;
if (webCamDevice.isFrontFacing)
{
if (webCamTexture.videoRotationAngle == 0)
{
flipCode = 1;
} else if (webCamTexture.videoRotationAngle == 90)
{
flipCode = 1;
}
if (webCamTexture.videoRotationAngle == 180)
{
flipCode = 0;
} else if (webCamTexture.videoRotationAngle == 270)
{
flipCode = 0;
}
} else
{
if (webCamTexture.videoRotationAngle == 180)
{
flipCode = -1;
} else if (webCamTexture.videoRotationAngle == 270)
{
flipCode = -1;
}
}
if (flipVertical)
{
if (flipCode == int.MinValue)
{
flipCode = 0;
} else if (flipCode == 0)
{
flipCode = int.MinValue;
} else if (flipCode == 1)
{
flipCode = -1;
} else if (flipCode == -1)
{
flipCode = 1;
}
}
if (flipHorizontal)
{
if (flipCode == int.MinValue)
{
flipCode = 1;
} else if (flipCode == 0)
{
flipCode = -1;
} else if (flipCode == 1)
{
flipCode = int.MinValue;
} else if (flipCode == -1)
{
flipCode = 0;
}
}
if (flipCode > int.MinValue)
{
Core.flip(mat, mat, flipCode);
}
}
/// <summary>
/// Gets the buffer colors.
/// </summary>
/// <returns>The buffer colors.</returns>
public virtual Color32[] GetBufferColors()
{
return colors;
}
/// <summary>
/// To release the resources for the initialized method.
/// </summary>
protected virtual void _Dispose()
{
isInitWaiting = false;
hasInitDone = false;
if (webCamTexture != null)
{
webCamTexture.Stop();
webCamTexture = null;
}
if (rgbaMat != null)
{
rgbaMat.Dispose();
rgbaMat = null;
}
if (rotatedRgbaMat != null)
{
rotatedRgbaMat.Dispose();
rotatedRgbaMat = null;
}
if (onDisposed != null)
onDisposed.Invoke();
}
/// <summary>
/// Releases all resource used by the <see cref="WebCamTextureToMatHelper"/> object.
/// </summary>
/// <remarks>Call <see cref="Dispose"/> when you are finished using the <see cref="WebCamTextureToMatHelper"/>. The
/// <see cref="Dispose"/> method leaves the <see cref="WebCamTextureToMatHelper"/> in an unusable state. After
/// calling <see cref="Dispose"/>, you must release all references to the <see cref="WebCamTextureToMatHelper"/> so
/// the garbage collector can reclaim the memory that the <see cref="WebCamTextureToMatHelper"/> was occupying.</remarks>
public virtual void Dispose()
{
if (hasInitDone)
_Dispose();
if (colors != null)
colors = null;
}
}
}

Просмотреть файл

@ -1,8 +0,0 @@
fileFormatVersion: 2
guid: a02a8f18206fd484995e61cc923550d2
MonoImporter:
serializedVersion: 2
defaultReferences: []
executionOrder: 0
icon: {instanceID: 0}
userData:

Просмотреть файл

@ -1,369 +0,0 @@
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using UnityEngine.UI;
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
using UnityEngine.SceneManagement;
#endif
using OpenCVForUnity;
using OpenCVFaceTracker;
namespace FaceTrackerExample
{
/// <summary>
/// WebCamTexture face tracker example.
/// </summary>
[RequireComponent(typeof(WebCamTextureToMatHelper))]
public class WebCamTextureFaceTrackerExample : MonoBehaviour
{
/// <summary>
/// The auto reset mode. if ture, Only if face is detected in each frame, face is tracked.
/// </summary>
public bool isAutoResetMode;
/// <summary>
/// The auto reset mode toggle.
/// </summary>
public Toggle isAutoResetModeToggle;
/// <summary>
/// The gray mat.
/// </summary>
Mat grayMat;
/// <summary>
/// The texture.
/// </summary>
Texture2D texture;
/// <summary>
/// The cascade.
/// </summary>
CascadeClassifier cascade;
/// <summary>
/// The face tracker.
/// </summary>
FaceTracker faceTracker;
/// <summary>
/// The face tracker parameters.
/// </summary>
FaceTrackerParams faceTrackerParams;
/// <summary>
/// The web cam texture to mat helper.
/// </summary>
WebCamTextureToMatHelper webCamTextureToMatHelper;
/// <summary>
/// The tracker_model_json_filepath.
/// </summary>
private string tracker_model_json_filepath;
/// <summary>
/// The haarcascade_frontalface_alt_xml_filepath.
/// </summary>
private string haarcascade_frontalface_alt_xml_filepath;
// Use this for initialization
void Start()
{
webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper>();
isAutoResetModeToggle.isOn = isAutoResetMode;
#if UNITY_WEBGL && !UNITY_EDITOR
StartCoroutine(getFilePathCoroutine());
#else
tracker_model_json_filepath = Utils.getFilePath("tracker_model.json");
haarcascade_frontalface_alt_xml_filepath = Utils.getFilePath("haarcascade_frontalface_alt.xml");
Run();
#endif
}
#if UNITY_WEBGL && !UNITY_EDITOR
private IEnumerator getFilePathCoroutine()
{
var getFilePathAsync_0_Coroutine = StartCoroutine(Utils.getFilePathAsync("tracker_model.json", (result) => {
tracker_model_json_filepath = result;
}));
var getFilePathAsync_1_Coroutine = StartCoroutine(Utils.getFilePathAsync("haarcascade_frontalface_alt.xml", (result) => {
haarcascade_frontalface_alt_xml_filepath = result;
}));
yield return getFilePathAsync_0_Coroutine;
yield return getFilePathAsync_1_Coroutine;
Run();
}
#endif
private void Run()
{
//initialize FaceTracker
faceTracker = new FaceTracker(tracker_model_json_filepath);
//initialize FaceTrackerParams
faceTrackerParams = new FaceTrackerParams();
cascade = new CascadeClassifier();
cascade.load(haarcascade_frontalface_alt_xml_filepath);
// if (cascade.empty())
// {
// Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
// }
webCamTextureToMatHelper.Initialize();
}
/// <summary>
/// Raises the webcam texture to mat helper initialized event.
/// </summary>
public void OnWebCamTextureToMatHelperInitialized()
{
Debug.Log("OnWebCamTextureToMatHelperInitialized");
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();
texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = 0;
float height = 0;
width = gameObject.transform.localScale.x;
height = gameObject.transform.localScale.y;
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
} else
{
Camera.main.orthographicSize = height / 2;
}
gameObject.GetComponent<Renderer>().material.mainTexture = texture;
grayMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC1);
}
/// <summary>
/// Raises the webcam texture to mat helper disposed event.
/// </summary>
public void OnWebCamTextureToMatHelperDisposed()
{
Debug.Log("OnWebCamTextureToMatHelperDisposed");
faceTracker.reset();
grayMat.Dispose();
}
/// <summary>
/// Raises the webcam texture to mat helper error occurred event.
/// </summary>
/// <param name="errorCode">Error code.</param>
public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
{
Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
// Update is called once per frame
void Update()
{
if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
{
Mat rgbaMat = webCamTextureToMatHelper.GetMat();
//convert image to greyscale
Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
if (isAutoResetMode || faceTracker.getPoints().Count <= 0)
{
// Debug.Log ("detectFace");
//convert image to greyscale
using (Mat equalizeHistMat = new Mat())
using (MatOfRect faces = new MatOfRect())
{
Imgproc.equalizeHist(grayMat, equalizeHistMat);
cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0
// | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
| Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());
if (faces.rows() > 0)
{
// Debug.Log ("faces " + faces.dump ());
List<OpenCVForUnity.Rect> rectsList = faces.toList();
List<Point[]> pointsList = faceTracker.getPoints();
if (isAutoResetMode)
{
//add initial face points from MatOfRect
if (pointsList.Count <= 0)
{
faceTracker.addPoints(faces);
// Debug.Log ("reset faces ");
} else
{
for (int i = 0; i < rectsList.Count; i++)
{
OpenCVForUnity.Rect trackRect = new OpenCVForUnity.Rect(rectsList [i].x + rectsList [i].width / 3, rectsList [i].y + rectsList [i].height / 2, rectsList [i].width / 3, rectsList [i].height / 3);
//It determines whether nose point has been included in trackRect.
if (i < pointsList.Count && !trackRect.contains(pointsList [i] [67]))
{
rectsList.RemoveAt(i);
pointsList.RemoveAt(i);
// Debug.Log ("remove " + i);
}
Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2);
}
}
} else
{
faceTracker.addPoints(faces);
}
//draw face rect
for (int i = 0; i < rectsList.Count; i++)
{
#if OPENCV_2
Core.rectangle (rgbaMat, new Point (rectsList [i].x, rectsList [i].y), new Point (rectsList [i].x + rectsLIst [i].width, rectsList [i].y + rectsList [i].height), new Scalar (255, 0, 0, 255), 2);
#else
Imgproc.rectangle(rgbaMat, new Point(rectsList [i].x, rectsList [i].y), new Point(rectsList [i].x + rectsList [i].width, rectsList [i].y + rectsList [i].height), new Scalar(255, 0, 0, 255), 2);
#endif
}
} else
{
if (isAutoResetMode)
{
faceTracker.reset();
}
}
}
}
//track face points.if face points <= 0, always return false.
if (faceTracker.track(grayMat, faceTrackerParams))
faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
#if OPENCV_2
Core.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false);
#else
Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Core.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
#endif
// Core.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Core.LINE_AA, false);
Utils.matToTexture2D(rgbaMat, texture, webCamTextureToMatHelper.GetBufferColors());
}
if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0)
{
faceTracker.reset();
}
}
/// <summary>
/// Raises the disable event.
/// </summary>
void OnDisable()
{
webCamTextureToMatHelper.Dispose();
if (cascade != null)
cascade.Dispose();
}
/// <summary>
/// Raises the back button event.
/// </summary>
public void OnBackButton()
{
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
SceneManager.LoadScene("FaceTrackerExample");
#else
Application.LoadLevel("FaceTrackerExample");
#endif
}
/// <summary>
/// Raises the play button event.
/// </summary>
public void OnPlayButton()
{
webCamTextureToMatHelper.Play();
}
/// <summary>
/// Raises the pause button event.
/// </summary>
public void OnPauseButton()
{
webCamTextureToMatHelper.Pause();
}
/// <summary>
/// Raises the stop button event.
/// </summary>
public void OnStopButton()
{
webCamTextureToMatHelper.Stop();
}
/// <summary>
/// Raises the change camera button event.
/// </summary>
public void OnChangeCameraButton()
{
webCamTextureToMatHelper.Initialize(null, webCamTextureToMatHelper.requestedWidth, webCamTextureToMatHelper.requestedHeight, !webCamTextureToMatHelper.requestedIsFrontFacing);
}
/// <summary>
/// Raises the change auto reset mode toggle event.
/// </summary>
public void OnIsAutoResetModeToggle()
{
if (isAutoResetModeToggle.isOn)
{
isAutoResetMode = true;
} else
{
isAutoResetMode = false;
}
}
}
}

Просмотреть файл

@ -1,38 +1,30 @@
using UnityEngine;
using System.Collections;
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
using UnityEngine.SceneManagement;
#endif
using System.Collections;
namespace FaceTrackerExample
{
/// <summary>
/// Show license.
/// Show License
/// </summary>
public class ShowLicense : MonoBehaviour
{
// Use this for initialization
void Start()
void Start ()
{
}
// Update is called once per frame
void Update()
void Update ()
{
}
public void OnBackButton()
public void OnBackButton ()
{
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
SceneManager.LoadScene ("FaceTrackerExample");
#else
Application.LoadLevel("FaceTrackerExample");
#endif
}
}
}

Просмотреть файл

@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: fee46c03ca79c094cb33952673cf318c
folderAsset: yes
timeCreated: 1542356439
licenseType: Free
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,168 @@
using UnityEngine;
using UnityEngine.SceneManagement;
using System;
using System.Collections;
using System.Collections.Generic;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ObjdetectModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVFaceTracker;
namespace FaceTrackerExample
{
/// <summary>
/// Texture2D Face Tracker Example
/// </summary>
public class Texture2DFaceTrackerExample : MonoBehaviour
{
/// <summary>
/// The image texture.
/// </summary>
public Texture2D imgTexture;
/// <summary>
/// The tracker_model_json_filepath.
/// </summary>
private string tracker_model_json_filepath;
/// <summary>
/// The haarcascade_frontalface_alt_xml_filepath.
/// </summary>
private string haarcascade_frontalface_alt_xml_filepath;
#if UNITY_WEBGL && !UNITY_EDITOR
IEnumerator getFilePath_Coroutine;
#endif
// Use this for initialization
void Start ()
{
#if UNITY_WEBGL && !UNITY_EDITOR
getFilePath_Coroutine = GetFilePath ();
StartCoroutine (getFilePath_Coroutine);
#else
tracker_model_json_filepath = Utils.getFilePath ("tracker_model.json");
haarcascade_frontalface_alt_xml_filepath = Utils.getFilePath ("haarcascade_frontalface_alt.xml");
Run ();
#endif
}
#if UNITY_WEBGL && !UNITY_EDITOR
private IEnumerator GetFilePath ()
{
var getFilePathAsync_0_Coroutine = Utils.getFilePathAsync ("tracker_model.json", (result) => {
tracker_model_json_filepath = result;
});
yield return getFilePathAsync_0_Coroutine;
var getFilePathAsync_1_Coroutine = Utils.getFilePathAsync ("haarcascade_frontalface_alt.xml", (result) => {
haarcascade_frontalface_alt_xml_filepath = result;
});
yield return getFilePathAsync_1_Coroutine;
getFilePath_Coroutine = null;
Run();
}
#endif
private void Run ()
{
gameObject.transform.localScale = new Vector3 (imgTexture.width, imgTexture.height, 1);
Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = 0;
float height = 0;
width = gameObject.transform.localScale.x;
height = gameObject.transform.localScale.y;
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale) {
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
} else {
Camera.main.orthographicSize = height / 2;
}
//initialize FaceTracker
FaceTracker faceTracker = new FaceTracker (tracker_model_json_filepath);
//initialize FaceTrackerParams
FaceTrackerParams faceTrackerParams = new FaceTrackerParams ();
Mat imgMat = new Mat (imgTexture.height, imgTexture.width, CvType.CV_8UC4);
Utils.texture2DToMat (imgTexture, imgMat);
Debug.Log ("imgMat dst ToString " + imgMat.ToString ());
CascadeClassifier cascade = new CascadeClassifier ();
cascade.load (haarcascade_frontalface_alt_xml_filepath);
// if (cascade.empty())
// {
// Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
// }
//convert image to greyscale
Mat gray = new Mat ();
Imgproc.cvtColor (imgMat, gray, Imgproc.COLOR_RGBA2GRAY);
MatOfRect faces = new MatOfRect ();
Imgproc.equalizeHist (gray, gray);
cascade.detectMultiScale (gray, faces, 1.1f, 2, 0
// | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
| Objdetect.CASCADE_SCALE_IMAGE, new Size (gray.cols () * 0.05, gray.cols () * 0.05), new Size ());
Debug.Log ("faces " + faces.dump ());
if (faces.rows () > 0) {
//add initial face points from MatOfRect
faceTracker.addPoints (faces);
}
//track face points.if face points <= 0, always return false.
if (faceTracker.track (imgMat, faceTrackerParams)) faceTracker.draw (imgMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255));
Texture2D texture = new Texture2D (imgMat.cols (), imgMat.rows (), TextureFormat.RGBA32, false);
Utils.matToTexture2D (imgMat, texture);
gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
cascade.Dispose ();
}
// Update is called once per frame
void Update ()
{
}
/// <summary>
/// Raises the destroy event.
/// </summary>
void OnDestroy ()
{
#if UNITY_WEBGL && !UNITY_EDITOR
if (getFilePath_Coroutine != null) {
StopCoroutine (getFilePath_Coroutine);
((IDisposable)getFilePath_Coroutine).Dispose ();
}
#endif
}
public void OnBackButton ()
{
SceneManager.LoadScene ("FaceTrackerExample");
}
}
}

Просмотреть файл

@ -0,0 +1,9 @@
fileFormatVersion: 2
guid: f522f537f4339854390705310cb1411a
folderAsset: yes
timeCreated: 1542356459
licenseType: Free
DefaultImporter:
userData:
assetBundleName:
assetBundleVariant:

Просмотреть файл

@ -0,0 +1,337 @@
using UnityEngine;
using UnityEngine.UI;
using UnityEngine.SceneManagement;
using System;
using System.Collections;
using System.Collections.Generic;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ObjdetectModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UnityUtils.Helper;
using OpenCVFaceTracker;
namespace FaceTrackerExample
{
/// <summary>
/// WebCamTexture Face Tracker Example
/// </summary>
[RequireComponent (typeof(WebCamTextureToMatHelper))]
public class WebCamTextureFaceTrackerExample : MonoBehaviour
{
/// <summary>
/// The auto reset mode. if ture, Only if face is detected in each frame, face is tracked.
/// </summary>
public bool isAutoResetMode;
/// <summary>
/// The auto reset mode toggle.
/// </summary>
public Toggle isAutoResetModeToggle;
/// <summary>
/// The gray mat.
/// </summary>
Mat grayMat;
/// <summary>
/// The texture.
/// </summary>
Texture2D texture;
/// <summary>
/// The cascade.
/// </summary>
CascadeClassifier cascade;
/// <summary>
/// The face tracker.
/// </summary>
FaceTracker faceTracker;
/// <summary>
/// The face tracker parameters.
/// </summary>
FaceTrackerParams faceTrackerParams;
/// <summary>
/// The web cam texture to mat helper.
/// </summary>
WebCamTextureToMatHelper webCamTextureToMatHelper;
/// <summary>
/// The tracker_model_json_filepath.
/// </summary>
private string tracker_model_json_filepath;
/// <summary>
/// The haarcascade_frontalface_alt_xml_filepath.
/// </summary>
private string haarcascade_frontalface_alt_xml_filepath;
#if UNITY_WEBGL && !UNITY_EDITOR
IEnumerator getFilePath_Coroutine;
#endif
// Use this for initialization
void Start ()
{
webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper> ();
isAutoResetModeToggle.isOn = isAutoResetMode;
#if UNITY_WEBGL && !UNITY_EDITOR
getFilePath_Coroutine = GetFilePath ();
StartCoroutine (getFilePath_Coroutine);
#else
tracker_model_json_filepath = Utils.getFilePath ("tracker_model.json");
haarcascade_frontalface_alt_xml_filepath = Utils.getFilePath ("haarcascade_frontalface_alt.xml");
Run ();
#endif
}
#if UNITY_WEBGL && !UNITY_EDITOR
private IEnumerator GetFilePath ()
{
var getFilePathAsync_0_Coroutine = Utils.getFilePathAsync ("tracker_model.json", (result) => {
tracker_model_json_filepath = result;
});
yield return getFilePathAsync_0_Coroutine;
var getFilePathAsync_1_Coroutine = Utils.getFilePathAsync ("haarcascade_frontalface_alt.xml", (result) => {
haarcascade_frontalface_alt_xml_filepath = result;
});
yield return getFilePathAsync_1_Coroutine;
getFilePath_Coroutine = null;
Run();
}
#endif
private void Run ()
{
//initialize FaceTracker
faceTracker = new FaceTracker (tracker_model_json_filepath);
//initialize FaceTrackerParams
faceTrackerParams = new FaceTrackerParams ();
cascade = new CascadeClassifier ();
cascade.load (haarcascade_frontalface_alt_xml_filepath);
// if (cascade.empty())
// {
// Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
// }
#if UNITY_ANDROID && !UNITY_EDITOR
// Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2).
webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true;
#endif
webCamTextureToMatHelper.Initialize ();
}
/// <summary>
/// Raises the webcam texture to mat helper initialized event.
/// </summary>
public void OnWebCamTextureToMatHelperInitialized ()
{
Debug.Log ("OnWebCamTextureToMatHelperInitialized");
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();
texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);
gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = 0;
float height = 0;
width = gameObject.transform.localScale.x;
height = gameObject.transform.localScale.y;
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale) {
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
} else {
Camera.main.orthographicSize = height / 2;
}
gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
grayMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1);
}
/// <summary>
/// Raises the webcam texture to mat helper disposed event.
/// </summary>
public void OnWebCamTextureToMatHelperDisposed ()
{
Debug.Log ("OnWebCamTextureToMatHelperDisposed");
faceTracker.reset ();
grayMat.Dispose ();
}
/// <summary>
/// Raises the webcam texture to mat helper error occurred event.
/// </summary>
/// <param name="errorCode">Error code.</param>
public void OnWebCamTextureToMatHelperErrorOccurred (WebCamTextureToMatHelper.ErrorCode errorCode)
{
Debug.Log ("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
// Update is called once per frame
void Update ()
{
if (webCamTextureToMatHelper.IsPlaying () && webCamTextureToMatHelper.DidUpdateThisFrame ()) {
Mat rgbaMat = webCamTextureToMatHelper.GetMat ();
//convert image to greyscale
Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
if (isAutoResetMode || faceTracker.getPoints ().Count <= 0) {
// Debug.Log ("detectFace");
//convert image to greyscale
using (Mat equalizeHistMat = new Mat ()) using (MatOfRect faces = new MatOfRect ()) {
Imgproc.equalizeHist (grayMat, equalizeHistMat);
cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0
// | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
| Objdetect.CASCADE_SCALE_IMAGE, new Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ());
if (faces.rows () > 0) {
// Debug.Log ("faces " + faces.dump ());
List<OpenCVForUnity.CoreModule.Rect> rectsList = faces.toList ();
List<Point[]> pointsList = faceTracker.getPoints ();
if (isAutoResetMode) {
//add initial face points from MatOfRect
if (pointsList.Count <= 0) {
faceTracker.addPoints (faces);
// Debug.Log ("reset faces ");
} else {
for (int i = 0; i < rectsList.Count; i++) {
OpenCVForUnity.CoreModule.Rect trackRect = new OpenCVForUnity.CoreModule.Rect (rectsList [i].x + rectsList [i].width / 3, rectsList [i].y + rectsList [i].height / 2, rectsList [i].width / 3, rectsList [i].height / 3);
//It determines whether nose point has been included in trackRect.
if (i < pointsList.Count && !trackRect.contains (pointsList [i] [67])) {
rectsList.RemoveAt (i);
pointsList.RemoveAt (i);
// Debug.Log ("remove " + i);
}
Imgproc.rectangle (rgbaMat, new Point (trackRect.x, trackRect.y), new Point (trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar (0, 0, 255, 255), 2);
}
}
} else {
faceTracker.addPoints (faces);
}
//draw face rect
for (int i = 0; i < rectsList.Count; i++) {
Imgproc.rectangle (rgbaMat, new Point (rectsList [i].x, rectsList [i].y), new Point (rectsList [i].x + rectsList [i].width, rectsList [i].y + rectsList [i].height), new Scalar (255, 0, 0, 255), 2);
}
} else {
if (isAutoResetMode) {
faceTracker.reset ();
}
}
}
}
//track face points.if face points <= 0, always return false.
if (faceTracker.track (grayMat, faceTrackerParams)) faceTracker.draw (rgbaMat, new Scalar (255, 0, 0, 255), new Scalar (0, 255, 0, 255));
Imgproc.putText (rgbaMat, "'Tap' or 'Space Key' to Reset", new Point (5, rgbaMat.rows () - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
// Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
Utils.fastMatToTexture2D (rgbaMat, texture);
}
if (Input.GetKeyUp (KeyCode.Space) || Input.touchCount > 0) {
faceTracker.reset ();
}
}
/// <summary>
/// Raises the disable event.
/// </summary>
void OnDisable ()
{
webCamTextureToMatHelper.Dispose ();
if (cascade != null) cascade.Dispose ();
#if UNITY_WEBGL && !UNITY_EDITOR
if (getFilePath_Coroutine != null) {
StopCoroutine (getFilePath_Coroutine);
((IDisposable)getFilePath_Coroutine).Dispose ();
}
#endif
}
/// <summary>
/// Raises the back button event.
/// </summary>
public void OnBackButton ()
{
SceneManager.LoadScene ("FaceTrackerExample");
}
/// <summary>
/// Raises the play button event.
/// </summary>
public void OnPlayButton ()
{
webCamTextureToMatHelper.Play ();
}
/// <summary>
/// Raises the pause button event.
/// </summary>
public void OnPauseButton ()
{
webCamTextureToMatHelper.Pause ();
}
/// <summary>
/// Raises the stop button event.
/// </summary>
public void OnStopButton ()
{
webCamTextureToMatHelper.Stop ();
}
/// <summary>
/// Raises the change camera button event.
/// </summary>
public void OnChangeCameraButton ()
{
webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.IsFrontFacing ();
}
/// <summary>
/// Raises the change auto reset mode toggle event.
/// </summary>
public void OnIsAutoResetModeToggle ()
{
if (isAutoResetModeToggle.isOn) {
isAutoResetMode = true;
} else {
isAutoResetMode = false;
}
}
}
}

Просмотреть файл

@ -1,19 +1,19 @@
%YAML 1.1
%TAG !u! tag:unity3d.com,2011:
--- !u!29 &1
SceneSettings:
OcclusionCullingSettings:
m_ObjectHideFlags: 0
m_PVSData:
m_PVSObjectsArray: []
m_PVSPortalsArray: []
serializedVersion: 2
m_OcclusionBakeSettings:
smallestOccluder: 5
smallestHole: 0.25
backfaceThreshold: 100
m_SceneGUID: 00000000000000000000000000000000
m_OcclusionCullingData: {fileID: 0}
--- !u!104 &2
RenderSettings:
m_ObjectHideFlags: 0
serializedVersion: 6
serializedVersion: 8
m_Fog: 0
m_FogColor: {r: 0.5, g: 0.5, b: 0.5, a: 1}
m_FogMode: 3
@ -25,6 +25,7 @@ RenderSettings:
m_AmbientGroundColor: {r: 0.2, g: 0.2, b: 0.2, a: 1}
m_AmbientIntensity: 1
m_AmbientMode: 3
m_SubtractiveShadowColor: {r: 0.42, g: 0.478, b: 0.627, a: 1}
m_SkyboxMaterial: {fileID: 0}
m_HaloStrength: 0.5
m_FlareStrength: 1
@ -37,12 +38,12 @@ RenderSettings:
m_ReflectionIntensity: 1
m_CustomReflection: {fileID: 0}
m_Sun: {fileID: 0}
m_IndirectSpecularColor: {r: 0, g: 0, b: 0, a: 1}
--- !u!157 &4
LightmapSettings:
m_ObjectHideFlags: 0
serializedVersion: 6
serializedVersion: 9
m_GIWorkflowMode: 1
m_LightmapsMode: 1
m_GISettings:
serializedVersion: 2
m_BounceScale: 1
@ -53,48 +54,72 @@ LightmapSettings:
m_EnableBakedLightmaps: 1
m_EnableRealtimeLightmaps: 0
m_LightmapEditorSettings:
serializedVersion: 3
serializedVersion: 8
m_Resolution: 1
m_BakeResolution: 50
m_TextureWidth: 1024
m_TextureHeight: 1024
m_AO: 0
m_AOMaxDistance: 1
m_Padding: 2
m_CompAOExponent: 0
m_CompAOExponentDirect: 0
m_Padding: 2
m_LightmapParameters: {fileID: 0}
m_LightmapsBakeMode: 1
m_TextureCompression: 0
m_FinalGather: 0
m_FinalGatherFiltering: 1
m_FinalGatherRayCount: 1024
m_ReflectionCompression: 2
m_MixedBakeMode: 1
m_BakeBackend: 0
m_PVRSampling: 1
m_PVRDirectSampleCount: 32
m_PVRSampleCount: 500
m_PVRBounces: 2
m_PVRFilterTypeDirect: 0
m_PVRFilterTypeIndirect: 0
m_PVRFilterTypeAO: 0
m_PVRFilteringMode: 0
m_PVRCulling: 1
m_PVRFilteringGaussRadiusDirect: 1
m_PVRFilteringGaussRadiusIndirect: 5
m_PVRFilteringGaussRadiusAO: 2
m_PVRFilteringAtrousPositionSigmaDirect: 0.5
m_PVRFilteringAtrousPositionSigmaIndirect: 2
m_PVRFilteringAtrousPositionSigmaAO: 1
m_LightingDataAsset: {fileID: 0}
m_RuntimeCPUUsage: 25
m_ShadowMaskMode: 2
--- !u!196 &5
NavMeshSettings:
serializedVersion: 2
m_ObjectHideFlags: 0
m_BuildSettings:
serializedVersion: 2
agentTypeID: 0
agentRadius: 0.5
agentHeight: 2
agentSlope: 45
agentClimb: 0.4
ledgeDropHeight: 0
maxJumpAcrossDistance: 0
accuratePlacement: 0
minRegionArea: 2
cellSize: 0.16666666
manualCellSize: 0
cellSize: 0.16666666
manualTileSize: 0
tileSize: 256
accuratePlacement: 0
m_NavMeshData: {fileID: 0}
--- !u!1 &186794849
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 4
serializedVersion: 5
m_Component:
- 224: {fileID: 186794850}
- 114: {fileID: 186794852}
- 114: {fileID: 186794851}
- component: {fileID: 186794850}
- component: {fileID: 186794852}
- component: {fileID: 186794851}
m_Layer: 5
m_Name: IsAutoResetModeToggle
m_TagString: Untagged
@ -111,12 +136,12 @@ RectTransform:
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_Children:
- {fileID: 219549511}
- {fileID: 752718649}
m_Father: {fileID: 193188696}
m_RootOrder: 2
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_AnchorMin: {x: 0, y: 0}
m_AnchorMax: {x: 0, y: 0}
m_AnchoredPosition: {x: 0, y: 0}
@ -201,10 +226,10 @@ GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 100006, guid: e4ed316126173e54da8ebe67c2421b33, type: 2}
m_PrefabInternal: {fileID: 0}
serializedVersion: 4
serializedVersion: 5
m_Component:
- 224: {fileID: 193188696}
- 114: {fileID: 193188697}
- component: {fileID: 193188696}
- component: {fileID: 193188697}
m_Layer: 5
m_Name: Menu
m_TagString: Untagged
@ -222,13 +247,13 @@ RectTransform:
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_Children:
- {fileID: 1476431623}
- {fileID: 1587337940}
- {fileID: 186794850}
m_Father: {fileID: 347822784}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_AnchorMin: {x: 0, y: 0}
m_AnchorMax: {x: 1, y: 1}
m_AnchoredPosition: {x: 0, y: 0}
@ -255,16 +280,18 @@ MonoBehaviour:
m_Spacing: 10
m_ChildForceExpandWidth: 0
m_ChildForceExpandHeight: 0
m_ChildControlWidth: 1
m_ChildControlHeight: 1
--- !u!1 &196446976
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 100000, guid: e4ed316126173e54da8ebe67c2421b33, type: 2}
m_PrefabInternal: {fileID: 0}
serializedVersion: 4
serializedVersion: 5
m_Component:
- 224: {fileID: 196446977}
- 222: {fileID: 196446979}
- 114: {fileID: 196446978}
- component: {fileID: 196446977}
- component: {fileID: 196446979}
- component: {fileID: 196446978}
m_Layer: 5
m_Name: Text
m_TagString: Untagged
@ -282,10 +309,10 @@ RectTransform:
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_Children: []
m_Father: {fileID: 1587337940}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_AnchorMin: {x: 0, y: 0}
m_AnchorMax: {x: 1, y: 1}
m_AnchoredPosition: {x: 0, y: 0}
@ -337,11 +364,11 @@ GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 4
serializedVersion: 5
m_Component:
- 224: {fileID: 219549511}
- 222: {fileID: 219549513}
- 114: {fileID: 219549512}
- component: {fileID: 219549511}
- component: {fileID: 219549513}
- component: {fileID: 219549512}
m_Layer: 5
m_Name: Background
m_TagString: Untagged
@ -358,11 +385,11 @@ RectTransform:
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_Children:
- {fileID: 1216475359}
m_Father: {fileID: 186794850}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_AnchorMin: {x: 0, y: 1}
m_AnchorMax: {x: 0, y: 1}
m_AnchoredPosition: {x: 20, y: -20}
@ -406,12 +433,12 @@ GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 100002, guid: e4ed316126173e54da8ebe67c2421b33, type: 2}
m_PrefabInternal: {fileID: 0}
serializedVersion: 4
serializedVersion: 5
m_Component:
- 224: {fileID: 347822784}
- 223: {fileID: 347822787}
- 114: {fileID: 347822786}
- 114: {fileID: 347822785}
- component: {fileID: 347822784}
- component: {fileID: 347822787}
- component: {fileID: 347822786}
- component: {fileID: 347822785}
m_Layer: 5
m_Name: Canvas
m_TagString: Untagged
@ -429,11 +456,11 @@ RectTransform:
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 0, y: 0, z: 0}
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_Children:
- {fileID: 193188696}
m_Father: {fileID: 0}
m_RootOrder: 2
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_AnchorMin: {x: 0, y: 0}
m_AnchorMax: {x: 0, y: 0}
m_AnchoredPosition: {x: 0, y: 0}
@ -486,7 +513,7 @@ Canvas:
m_PrefabInternal: {fileID: 0}
m_GameObject: {fileID: 347822783}
m_Enabled: 1
serializedVersion: 2
serializedVersion: 3
m_RenderMode: 0
m_Camera: {fileID: 0}
m_PlaneDistance: 100
@ -495,6 +522,7 @@ Canvas:
m_OverrideSorting: 0
m_OverridePixelPerfect: 0
m_SortingBucketNormalizedSize: 0
m_AdditionalShaderChannelsFlag: 25
m_SortingLayerID: 0
m_SortingOrder: 0
m_TargetDisplay: 0
@ -503,11 +531,11 @@ GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 100008, guid: e4ed316126173e54da8ebe67c2421b33, type: 2}
m_PrefabInternal: {fileID: 0}
serializedVersion: 4
serializedVersion: 5
m_Component:
- 224: {fileID: 751411270}
- 222: {fileID: 751411272}
- 114: {fileID: 751411271}
- component: {fileID: 751411270}
- component: {fileID: 751411272}
- component: {fileID: 751411271}
m_Layer: 5
m_Name: Text
m_TagString: Untagged
@ -525,10 +553,10 @@ RectTransform:
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_Children: []
m_Father: {fileID: 1476431623}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_AnchorMin: {x: 0, y: 0}
m_AnchorMax: {x: 1, y: 1}
m_AnchoredPosition: {x: 0, y: 0}
@ -580,11 +608,11 @@ GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 4
serializedVersion: 5
m_Component:
- 224: {fileID: 752718649}
- 222: {fileID: 752718651}
- 114: {fileID: 752718650}
- component: {fileID: 752718649}
- component: {fileID: 752718651}
- component: {fileID: 752718650}
m_Layer: 5
m_Name: Label
m_TagString: Untagged
@ -601,10 +629,10 @@ RectTransform:
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_Children: []
m_Father: {fileID: 186794850}
m_RootOrder: 1
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_AnchorMin: {x: 0, y: 0}
m_AnchorMax: {x: 1, y: 1}
m_AnchoredPosition: {x: 20, y: -5.5}
@ -654,11 +682,11 @@ GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 4
serializedVersion: 5
m_Component:
- 224: {fileID: 1216475359}
- 222: {fileID: 1216475358}
- 114: {fileID: 1216475357}
- component: {fileID: 1216475359}
- component: {fileID: 1216475358}
- component: {fileID: 1216475357}
m_Layer: 5
m_Name: Checkmark
m_TagString: Untagged
@ -708,10 +736,10 @@ RectTransform:
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_Children: []
m_Father: {fileID: 219549511}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_AnchorMin: {x: 0.5, y: 0.5}
m_AnchorMax: {x: 0.5, y: 0.5}
m_AnchoredPosition: {x: 0, y: 0}
@ -722,14 +750,14 @@ GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 4
serializedVersion: 5
m_Component:
- 4: {fileID: 1362053827}
- 33: {fileID: 1362053826}
- 64: {fileID: 1362053825}
- 23: {fileID: 1362053824}
- 114: {fileID: 1362053828}
- 114: {fileID: 1362053829}
- component: {fileID: 1362053827}
- component: {fileID: 1362053826}
- component: {fileID: 1362053825}
- component: {fileID: 1362053824}
- component: {fileID: 1362053828}
- component: {fileID: 1362053829}
m_Layer: 0
m_Name: Quad
m_TagString: Untagged
@ -746,22 +774,28 @@ MeshRenderer:
m_Enabled: 1
m_CastShadows: 1
m_ReceiveShadows: 1
m_MotionVectors: 1
m_LightProbeUsage: 0
m_ReflectionProbeUsage: 1
m_Materials:
- {fileID: 2100000, guid: 2fe89ad658086d24f8fe283057df260d, type: 2}
m_SubsetIndices:
m_StaticBatchInfo:
firstSubMesh: 0
subMeshCount: 0
m_StaticBatchRoot: {fileID: 0}
m_UseLightProbes: 0
m_ReflectionProbeUsage: 1
m_ProbeAnchor: {fileID: 0}
m_LightProbeVolumeOverride: {fileID: 0}
m_ScaleInLightmap: 1
m_PreserveUVs: 0
m_IgnoreNormalsForChartDetection: 0
m_ImportantGI: 0
m_SelectedEditorRenderState: 3
m_MinimumChartSize: 4
m_AutoUVMaxDistance: 0.5
m_AutoUVMaxAngle: 89
m_LightmapParameters: {fileID: 0}
m_SortingLayerID: 0
m_SortingLayer: 0
m_SortingOrder: 0
--- !u!64 &1362053825
MeshCollider:
@ -774,6 +808,8 @@ MeshCollider:
m_Enabled: 1
serializedVersion: 2
m_Convex: 0
m_InflateMesh: 0
m_SkinWidth: 0.01
m_Mesh: {fileID: 10210, guid: 0000000000000000e000000000000000, type: 0}
--- !u!33 &1362053826
MeshFilter:
@ -791,10 +827,10 @@ Transform:
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 640, y: 480, z: 1}
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_Children: []
m_Father: {fileID: 0}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!114 &1362053828
MonoBehaviour:
m_ObjectHideFlags: 0
@ -816,18 +852,18 @@ MonoBehaviour:
m_GameObject: {fileID: 1362053823}
m_Enabled: 1
m_EditorHideFlags: 0
m_Script: {fileID: 11500000, guid: a02a8f18206fd484995e61cc923550d2, type: 3}
m_Script: {fileID: 11500000, guid: df35b0c19ca97734e87299a664cea35f, type: 3}
m_Name:
m_EditorClassIdentifier:
requestedDeviceName:
requestedWidth: 640
requestedHeight: 480
requestedIsFrontFacing: 0
requestedFPS: 30
requestedRotate90Degree: 0
flipVertical: 0
flipHorizontal: 0
timeoutFrameCount: 300
_requestedDeviceName:
_requestedWidth: 640
_requestedHeight: 480
_requestedIsFrontFacing: 0
_requestedFPS: 30
_rotate90Degree: 0
_flipVertical: 0
_flipHorizontal: 0
_timeoutFrameCount: 300
onInitialized:
m_PersistentCalls:
m_Calls:
@ -874,19 +910,19 @@ MonoBehaviour:
m_StringArgument:
m_BoolArgument: 0
m_CallState: 2
m_TypeName: FaceTrackerExample.WebCamTextureToMatHelper+ErrorUnityEvent, Assembly-CSharp,
Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
m_TypeName: OpenCVForUnity.UnityUtils.Helper.WebCamTextureToMatHelper+ErrorUnityEvent,
Assembly-CSharp, Version=0.0.0.0, Culture=neutral, PublicKeyToken=null
--- !u!1 &1462179230
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 100000, guid: 1ccccd5701fc6ee48964a3c728b8ab62, type: 2}
m_PrefabInternal: {fileID: 0}
serializedVersion: 4
serializedVersion: 5
m_Component:
- 4: {fileID: 1462179234}
- 114: {fileID: 1462179233}
- 114: {fileID: 1462179232}
- 114: {fileID: 1462179231}
- component: {fileID: 1462179234}
- component: {fileID: 1462179233}
- component: {fileID: 1462179232}
- component: {fileID: 1462179231}
m_Layer: 0
m_Name: EventSystem
m_TagString: Untagged
@ -950,22 +986,22 @@ Transform:
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_Children: []
m_Father: {fileID: 0}
m_RootOrder: 3
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
--- !u!1 &1476431622
GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 100004, guid: e4ed316126173e54da8ebe67c2421b33, type: 2}
m_PrefabInternal: {fileID: 0}
serializedVersion: 4
serializedVersion: 5
m_Component:
- 224: {fileID: 1476431623}
- 222: {fileID: 1476431627}
- 114: {fileID: 1476431626}
- 114: {fileID: 1476431625}
- 114: {fileID: 1476431624}
- component: {fileID: 1476431623}
- component: {fileID: 1476431627}
- component: {fileID: 1476431626}
- component: {fileID: 1476431625}
- component: {fileID: 1476431624}
m_Layer: 5
m_Name: BackButton
m_TagString: Untagged
@ -983,11 +1019,11 @@ RectTransform:
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_Children:
- {fileID: 751411270}
m_Father: {fileID: 193188696}
m_RootOrder: 0
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_AnchorMin: {x: 0, y: 0}
m_AnchorMax: {x: 0, y: 0}
m_AnchoredPosition: {x: 0, y: 0}
@ -1105,13 +1141,13 @@ GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 100010, guid: e4ed316126173e54da8ebe67c2421b33, type: 2}
m_PrefabInternal: {fileID: 0}
serializedVersion: 4
serializedVersion: 5
m_Component:
- 224: {fileID: 1587337940}
- 222: {fileID: 1587337939}
- 114: {fileID: 1587337938}
- 114: {fileID: 1587337937}
- 114: {fileID: 1587337936}
- component: {fileID: 1587337940}
- component: {fileID: 1587337939}
- component: {fileID: 1587337938}
- component: {fileID: 1587337937}
- component: {fileID: 1587337936}
m_Layer: 5
m_Name: ChangeCameraButton
m_TagString: Untagged
@ -1236,11 +1272,11 @@ RectTransform:
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 0, z: 0}
m_LocalScale: {x: 1, y: 1, z: 1}
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_Children:
- {fileID: 196446977}
m_Father: {fileID: 193188696}
m_RootOrder: 1
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_AnchorMin: {x: 0, y: 0}
m_AnchorMax: {x: 0, y: 0}
m_AnchoredPosition: {x: 0, y: 0}
@ -1251,13 +1287,13 @@ GameObject:
m_ObjectHideFlags: 0
m_PrefabParentObject: {fileID: 0}
m_PrefabInternal: {fileID: 0}
serializedVersion: 4
serializedVersion: 5
m_Component:
- 4: {fileID: 1871338440}
- 20: {fileID: 1871338439}
- 92: {fileID: 1871338438}
- 124: {fileID: 1871338437}
- 81: {fileID: 1871338436}
- component: {fileID: 1871338440}
- component: {fileID: 1871338439}
- component: {fileID: 1871338438}
- component: {fileID: 1871338437}
- component: {fileID: 1871338436}
m_Layer: 0
m_Name: Main Camera
m_TagString: MainCamera
@ -1316,6 +1352,8 @@ Camera:
m_TargetDisplay: 0
m_TargetEye: 3
m_HDR: 0
m_AllowMSAA: 1
m_ForceIntoRT: 0
m_OcclusionCulling: 1
m_StereoConvergence: 10
m_StereoSeparation: 0.022
@ -1329,7 +1367,7 @@ Transform:
m_LocalRotation: {x: 0, y: 0, z: 0, w: 1}
m_LocalPosition: {x: 0, y: 1, z: -10}
m_LocalScale: {x: 1, y: 1, z: 1}
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_Children: []
m_Father: {fileID: 0}
m_RootOrder: 1
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}