Change the line feed code [utf-8/lf]

This commit is contained in:
EnoxSoftware 2022-06-28 23:33:53 +09:00
Родитель ba7af9a594
Коммит 3907407b7a
13 изменённых файлов: 2729 добавлений и 2644 удалений

Просмотреть файл

@ -1,116 +1,116 @@
using OpenCVForUnity.CoreModule; using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule; using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.ObjdetectModule; using OpenCVForUnity.ObjdetectModule;
using OpenCVForUnity.UnityUtils; using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UtilsModule; using OpenCVForUnity.UtilsModule;
using System.Collections; using System.Collections;
using System.Collections.Generic; using System.Collections.Generic;
using UnityEngine; using UnityEngine;
namespace OpenCVFaceTracker namespace OpenCVFaceTracker
{ {
/// <summary> /// <summary>
/// Face detector. /// Face detector.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”. /// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary> /// </summary>
public class FaceDetector public class FaceDetector
{ {
string detector_fname; string detector_fname;
//file containing cascade classifier //file containing cascade classifier
Vector3 detector_offset; Vector3 detector_offset;
//offset from center of detection //offset from center of detection
Mat reference; Mat reference;
//reference shape //reference shape
CascadeClassifier detector; CascadeClassifier detector;
public FaceDetector() public FaceDetector()
{ {
} }
public List<Point[]> detect(Mat im, float scaleFactor, int minNeighbours, Size minSize) public List<Point[]> detect(Mat im, float scaleFactor, int minNeighbours, Size minSize)
{ {
//convert image to greyscale //convert image to greyscale
Mat gray = null; Mat gray = null;
if (im.channels() == 1) if (im.channels() == 1)
{ {
gray = im; gray = im;
} }
else else
{ {
gray = new Mat(); gray = new Mat();
Imgproc.cvtColor(im, gray, Imgproc.COLOR_RGBA2GRAY); Imgproc.cvtColor(im, gray, Imgproc.COLOR_RGBA2GRAY);
} }
using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect())
{ {
Imgproc.equalizeHist(gray, equalizeHistMat); Imgproc.equalizeHist(gray, equalizeHistMat);
detector.detectMultiScale(equalizeHistMat, faces, scaleFactor, minNeighbours, 0 detector.detectMultiScale(equalizeHistMat, faces, scaleFactor, minNeighbours, 0
| Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
| Objdetect.CASCADE_SCALE_IMAGE, minSize, new Size()); | Objdetect.CASCADE_SCALE_IMAGE, minSize, new Size());
if (faces.rows() < 1) if (faces.rows() < 1)
{ {
return new List<Point[]>(); return new List<Point[]>();
} }
return convertMatOfRectToPoints(faces); return convertMatOfRectToPoints(faces);
} }
} }
public List<Point[]> convertMatOfRectToPoints(MatOfRect rects) public List<Point[]> convertMatOfRectToPoints(MatOfRect rects)
{ {
List<OpenCVForUnity.CoreModule.Rect> R = rects.toList(); List<OpenCVForUnity.CoreModule.Rect> R = rects.toList();
List<Point[]> points = new List<Point[]>(R.Count); List<Point[]> points = new List<Point[]>(R.Count);
int n = reference.rows() / 2; int n = reference.rows() / 2;
float[] reference_float = new float[reference.total()]; float[] reference_float = new float[reference.total()];
MatUtils.copyFromMat<float>(reference, reference_float); MatUtils.copyFromMat<float>(reference, reference_float);
foreach (var r in R) foreach (var r in R)
{ {
Vector3 scale = detector_offset * r.width; Vector3 scale = detector_offset * r.width;
Point[] p = new Point[n]; Point[] p = new Point[n];
for (int i = 0; i < n; i++) for (int i = 0; i < n; i++)
{ {
p[i] = new Point(); p[i] = new Point();
p[i].x = scale.z * reference_float[2 * i] + r.x + 0.5 * r.width + scale.x; p[i].x = scale.z * reference_float[2 * i] + r.x + 0.5 * r.width + scale.x;
p[i].y = scale.z * reference_float[(2 * i) + 1] + r.y + 0.5 * r.height + scale.y; p[i].y = scale.z * reference_float[(2 * i) + 1] + r.y + 0.5 * r.height + scale.y;
} }
points.Add(p); points.Add(p);
} }
return points; return points;
} }
public void read(object root_json) public void read(object root_json)
{ {
IDictionary detector_json = (IDictionary)root_json; IDictionary detector_json = (IDictionary)root_json;
detector_fname = (string)detector_json["fname"]; detector_fname = (string)detector_json["fname"];
detector_offset = new Vector3((float)(double)detector_json["x offset"], (float)(double)detector_json["y offset"], (float)(double)detector_json["z offset"]); detector_offset = new Vector3((float)(double)detector_json["x offset"], (float)(double)detector_json["y offset"], (float)(double)detector_json["z offset"]);
IDictionary reference_json = (IDictionary)detector_json["reference"]; IDictionary reference_json = (IDictionary)detector_json["reference"];
reference = new Mat((int)(long)reference_json["rows"], (int)(long)reference_json["cols"], CvType.CV_32F); reference = new Mat((int)(long)reference_json["rows"], (int)(long)reference_json["cols"], CvType.CV_32F);
IList data_json = (IList)reference_json["data"]; IList data_json = (IList)reference_json["data"];
float[] data = new float[reference.rows() * reference.cols()]; float[] data = new float[reference.rows() * reference.cols()];
for (int i = 0; i < data_json.Count; i++) for (int i = 0; i < data_json.Count; i++)
{ {
data[i] = (float)(double)data_json[i]; data[i] = (float)(double)data_json[i];
} }
MatUtils.copyToMat(data, reference); MatUtils.copyToMat(data, reference);
detector = new CascadeClassifier(Utils.getFilePath(detector_fname)); detector = new CascadeClassifier(Utils.getFilePath(detector_fname));
} }
} }
} }

Просмотреть файл

@ -1,265 +1,265 @@
using OpenCVForUnity.CoreModule; using OpenCVForUnity.CoreModule;
using OpenCVForUnity.UnityUtils; using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.ImgprocModule; using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UtilsModule; using OpenCVForUnity.UtilsModule;
using System.Collections; using System.Collections;
using System.Collections.Generic; using System.Collections.Generic;
using MiniJSON; using MiniJSON;
using UnityEngine; using UnityEngine;
#if UNITY_WSA #if UNITY_WSA
using UnityEngine.Windows; using UnityEngine.Windows;
using System.Text; using System.Text;
#else #else
using System.IO; using System.IO;
#endif #endif
namespace OpenCVFaceTracker namespace OpenCVFaceTracker
{ {
/// <summary> /// <summary>
/// Face tracker. /// Face tracker.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”. /// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary> /// </summary>
public class FaceTracker public class FaceTracker
{ {
List<Point[]> points; List<Point[]> points;
//current tracked points //current tracked points
FaceDetector detector; FaceDetector detector;
//detector for initialisation //detector for initialisation
ShapeModel smodel; ShapeModel smodel;
//shape model //shape model
PatchModels pmodel; PatchModels pmodel;
//feature detectors //feature detectors
public FaceTracker(string filepath) public FaceTracker(string filepath)
{ {
if (filepath == null) if (filepath == null)
{ {
Debug.LogError("tracker_model file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); Debug.LogError("tracker_model file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
} }
points = new List<Point[]>(); points = new List<Point[]>();
string jsonText = null; string jsonText = null;
#if UNITY_WSA #if UNITY_WSA
var data = File.ReadAllBytes(filepath); var data = File.ReadAllBytes(filepath);
jsonText = Encoding.UTF8.GetString(data, 0, data.Length); jsonText = Encoding.UTF8.GetString(data, 0, data.Length);
#else #else
jsonText = File.ReadAllText(filepath); jsonText = File.ReadAllText(filepath);
#endif #endif
//TextAsset textAsset = Resources.Load (filename) as TextAsset; //TextAsset textAsset = Resources.Load (filename) as TextAsset;
//string jsonText = textAsset.text; //string jsonText = textAsset.text;
IDictionary json = (IDictionary)Json.Deserialize(jsonText); IDictionary json = (IDictionary)Json.Deserialize(jsonText);
IDictionary ft = (IDictionary)json["ft object"]; IDictionary ft = (IDictionary)json["ft object"];
detector = new FaceDetector(); detector = new FaceDetector();
detector.read(ft["detector"]); detector.read(ft["detector"]);
smodel = new ShapeModel(); smodel = new ShapeModel();
smodel.read(ft["smodel"]); smodel.read(ft["smodel"]);
pmodel = new PatchModels(); pmodel = new PatchModels();
pmodel.read(ft["pmodel"]); pmodel.read(ft["pmodel"]);
} }
public List<Point[]> getPoints() public List<Point[]> getPoints()
{ {
return points; return points;
} }
public void addPoints(List<Point[]> points) public void addPoints(List<Point[]> points)
{ {
points.AddRange(points); points.AddRange(points);
} }
public void addPoints(MatOfRect rects) public void addPoints(MatOfRect rects)
{ {
points.AddRange(detector.convertMatOfRectToPoints(rects)); points.AddRange(detector.convertMatOfRectToPoints(rects));
} }
public Point[] getConnections() public Point[] getConnections()
{ {
Point[] c = new Point[smodel.C.rows()]; Point[] c = new Point[smodel.C.rows()];
int[] data = new int[c.Length * 2]; int[] data = new int[c.Length * 2];
MatUtils.copyFromMat<int>(smodel.C, data); MatUtils.copyFromMat<int>(smodel.C, data);
int len = c.Length; int len = c.Length;
for (int i = 0; i < len; i++) for (int i = 0; i < len; i++)
{ {
c[i] = new Point(data[i * 2], data[(i * 2) + 1]); c[i] = new Point(data[i * 2], data[(i * 2) + 1]);
} }
return c; return c;
} }
public void reset() public void reset()
{ {
//reset tracker //reset tracker
points.Clear(); points.Clear();
} }
public bool track(Mat im, FaceTrackerParams p) public bool track(Mat im, FaceTrackerParams p)
{ {
if (points.Count <= 0) return false; if (points.Count <= 0) return false;
//convert image to greyscale //convert image to greyscale
Mat gray = null; Mat gray = null;
if (im.channels() == 1) if (im.channels() == 1)
{ {
gray = im; gray = im;
} }
else else
{ {
gray = new Mat(); gray = new Mat();
Imgproc.cvtColor(im, gray, Imgproc.COLOR_RGBA2GRAY); Imgproc.cvtColor(im, gray, Imgproc.COLOR_RGBA2GRAY);
} }
//initialise //initialise
//if (!tracking) //if (!tracking)
//points = detector.detect (gray, p.scaleFactor, p.minNeighbours, p.minSize); //points = detector.detect (gray, p.scaleFactor, p.minNeighbours, p.minSize);
int count = points.Count; int count = points.Count;
for (int i = 0; i < count; i++) for (int i = 0; i < count; i++)
{ {
if (points[i].Length != smodel.npts()) return false; if (points[i].Length != smodel.npts()) return false;
//fit //fit
int size_count = p.ssize.Count; int size_count = p.ssize.Count;
for (int level = 0; level < size_count; level++) for (int level = 0; level < size_count; level++)
{ {
points[i] = fit(gray, points[i], p.ssize[level], p.robust, p.itol, p.ftol); points[i] = fit(gray, points[i], p.ssize[level], p.robust, p.itol, p.ftol);
} }
} }
return true; return true;
} }
public void draw(Mat im, Scalar pts_color, Scalar con_color) public void draw(Mat im, Scalar pts_color, Scalar con_color)
{ {
int[] smodel_C_int = new int[smodel.C.total()]; int[] smodel_C_int = new int[smodel.C.total()];
MatUtils.copyFromMat<int>(smodel.C, smodel_C_int); MatUtils.copyFromMat<int>(smodel.C, smodel_C_int);
foreach (var point in points) foreach (var point in points)
{ {
int n = point.Length; int n = point.Length;
if (n == 0) return; if (n == 0) return;
int rows = smodel.C.rows(); int rows = smodel.C.rows();
int cols = smodel.C.cols(); int cols = smodel.C.cols();
for (int i = 0; i < rows; i++) for (int i = 0; i < rows; i++)
{ {
int j = smodel_C_int[i * cols], k = smodel_C_int[(i * cols) + 1]; int j = smodel_C_int[i * cols], k = smodel_C_int[(i * cols) + 1];
#if OPENCV_2 #if OPENCV_2
Core.line(im, point[j], point[k], con_color, 1); Core.line(im, point[j], point[k], con_color, 1);
#else #else
Imgproc.line(im, point[j], point[k], con_color, 1); Imgproc.line(im, point[j], point[k], con_color, 1);
#endif #endif
} }
for (int i = 0; i < n; i++) for (int i = 0; i < n; i++)
{ {
#if OPENCV_2 #if OPENCV_2
Core.circle (im, point [i], 1, pts_color, 2, Core.LINE_AA, 0); Core.circle (im, point [i], 1, pts_color, 2, Core.LINE_AA, 0);
#else #else
Imgproc.circle(im, point[i], 1, pts_color, 2, Imgproc.LINE_AA, 0); Imgproc.circle(im, point[i], 1, pts_color, 2, Imgproc.LINE_AA, 0);
#endif #endif
} }
} }
} }
private Point[] fit(Mat image, Point[] init, Size ssize, bool robust, int itol, double ftol) private Point[] fit(Mat image, Point[] init, Size ssize, bool robust, int itol, double ftol)
{ {
int n = smodel.npts(); int n = smodel.npts();
//assert((int(init.size())==n) && (pmodel.n_patches()==n)); //assert((int(init.size())==n) && (pmodel.n_patches()==n));
//Debug.Log ("init.size())==n " + init.Length + " " + n); //Debug.Log ("init.size())==n " + init.Length + " " + n);
//Debug.Log ("pmodel.n_patches()==n " + pmodel.n_patches () + " " + n); //Debug.Log ("pmodel.n_patches()==n " + pmodel.n_patches () + " " + n);
smodel.calc_params(init, new Mat(), 3.0f); smodel.calc_params(init, new Mat(), 3.0f);
Point[] pts = smodel.calc_shape(); Point[] pts = smodel.calc_shape();
//find facial features in image around current estimates //find facial features in image around current estimates
Point[] peaks = pmodel.calc_peaks(image, pts, ssize); Point[] peaks = pmodel.calc_peaks(image, pts, ssize);
//optimise //optimise
if (!robust) if (!robust)
{ {
smodel.calc_params(peaks, new Mat(), 3.0f); //compute shape model parameters smodel.calc_params(peaks, new Mat(), 3.0f); //compute shape model parameters
pts = smodel.calc_shape(); //update shape pts = smodel.calc_shape(); //update shape
} }
else else
{ {
using (Mat weight = new Mat(n, 1, CvType.CV_32F)) using (Mat weight_sort = new Mat(n, 1, CvType.CV_32F)) using (Mat weight = new Mat(n, 1, CvType.CV_32F)) using (Mat weight_sort = new Mat(n, 1, CvType.CV_32F))
{ {
float[] weight_float = new float[weight.total()]; float[] weight_float = new float[weight.total()];
MatUtils.copyFromMat<float>(weight, weight_float); MatUtils.copyFromMat<float>(weight, weight_float);
float[] weight_sort_float = new float[weight_sort.total()]; float[] weight_sort_float = new float[weight_sort.total()];
Point[] pts_old = pts; Point[] pts_old = pts;
for (int iter = 0; iter < itol; iter++) for (int iter = 0; iter < itol; iter++)
{ {
//compute robust weight //compute robust weight
for (int i = 0; i < n; i++) for (int i = 0; i < n; i++)
{ {
using (MatOfPoint tmpMat = new MatOfPoint(new Point(pts[i].x - peaks[i].x, pts[i].y - peaks[i].y))) using (MatOfPoint tmpMat = new MatOfPoint(new Point(pts[i].x - peaks[i].x, pts[i].y - peaks[i].y)))
{ {
weight_float[i] = (float)Core.norm(tmpMat); weight_float[i] = (float)Core.norm(tmpMat);
} }
} }
MatUtils.copyToMat(weight_float, weight); MatUtils.copyToMat(weight_float, weight);
Core.sort(weight, weight_sort, Core.SORT_EVERY_COLUMN | Core.SORT_ASCENDING); Core.sort(weight, weight_sort, Core.SORT_EVERY_COLUMN | Core.SORT_ASCENDING);
MatUtils.copyFromMat<float>(weight_sort, weight_sort_float); MatUtils.copyFromMat<float>(weight_sort, weight_sort_float);
double var = 1.4826 * weight_sort_float[n / 2]; double var = 1.4826 * weight_sort_float[n / 2];
if (var < 0.1) var = 0.1; if (var < 0.1) var = 0.1;
Core.pow(weight, 2, weight); Core.pow(weight, 2, weight);
Core.multiply(weight, new Scalar(-0.5 / (var * var)), weight); Core.multiply(weight, new Scalar(-0.5 / (var * var)), weight);
Core.exp(weight, weight); Core.exp(weight, weight);
//compute shape model parameters //compute shape model parameters
smodel.calc_params(peaks, weight, 3.0f); smodel.calc_params(peaks, weight, 3.0f);
//update shape //update shape
pts = smodel.calc_shape(); pts = smodel.calc_shape();
//check for convergence //check for convergence
float v = 0; float v = 0;
for (int i = 0; i < n; i++) for (int i = 0; i < n; i++)
{ {
using (MatOfPoint tmpMat = new MatOfPoint(new Point(pts[i].x - pts_old[i].x, pts[i].y - pts_old[i].y))) using (MatOfPoint tmpMat = new MatOfPoint(new Point(pts[i].x - pts_old[i].x, pts[i].y - pts_old[i].y)))
{ {
v += (float)Core.norm(tmpMat); v += (float)Core.norm(tmpMat);
} }
} }
if (v < ftol) if (v < ftol)
{ {
break; break;
} }
else else
{ {
pts_old = pts; pts_old = pts;
} }
} }
} }
} }
return pts; return pts;
} }
} }
} }

Просмотреть файл

@ -1,47 +1,47 @@
using OpenCVForUnity.CoreModule; using OpenCVForUnity.CoreModule;
using System.Collections.Generic; using System.Collections.Generic;
namespace OpenCVFaceTracker namespace OpenCVFaceTracker
{ {
/// <summary> /// <summary>
/// Face tracker parameters. /// Face tracker parameters.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”. /// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary> /// </summary>
public class FaceTrackerParams public class FaceTrackerParams
{ {
public List<Size> ssize; public List<Size> ssize;
//search region size/level //search region size/level
public bool robust; public bool robust;
//use robust fitting? //use robust fitting?
public int itol; public int itol;
//maximum number of iterations to try //maximum number of iterations to try
public double ftol; public double ftol;
//convergence tolerance //convergence tolerance
// public float scaleFactor; //OpenCV Cascade detector parameters // public float scaleFactor; //OpenCV Cascade detector parameters
// public int minNeighbours; //... // public int minNeighbours; //...
// public Size minSize; // public Size minSize;
public FaceTrackerParams(bool robust = false, int itol = 20, double ftol = 1e-3, List<Size> ssize = null) public FaceTrackerParams(bool robust = false, int itol = 20, double ftol = 1e-3, List<Size> ssize = null)
{ {
if (ssize == null) if (ssize == null)
{ {
this.ssize = new List<Size>(); this.ssize = new List<Size>();
this.ssize.Add(new Size(21, 21)); this.ssize.Add(new Size(21, 21));
this.ssize.Add(new Size(11, 11)); this.ssize.Add(new Size(11, 11));
this.ssize.Add(new Size(5, 5)); this.ssize.Add(new Size(5, 5));
} }
else else
{ {
this.ssize = ssize; this.ssize = ssize;
} }
this.robust = robust; this.robust = robust;
this.itol = itol; this.itol = itol;
this.ftol = ftol; this.ftol = ftol;
//scaleFactor = 1.1f; //scaleFactor = 1.1f;
//minNeighbours = 2; //minNeighbours = 2;
//minSize = new Size (30, 30); //minSize = new Size (30, 30);
} }
} }
} }

Просмотреть файл

@ -32,7 +32,8 @@ using System.Collections.Generic;
using System.IO; using System.IO;
using System.Text; using System.Text;
namespace MiniJSON { namespace MiniJSON
{
// Example usage: // Example usage:
// //
// using UnityEngine; // using UnityEngine;
@ -73,29 +74,35 @@ namespace MiniJSON {
/// JSON uses Arrays and Objects. These correspond here to the datatypes IList and IDictionary. /// JSON uses Arrays and Objects. These correspond here to the datatypes IList and IDictionary.
/// All numbers are parsed to doubles. /// All numbers are parsed to doubles.
/// </summary> /// </summary>
public static class Json { public static class Json
{
/// <summary> /// <summary>
/// Parses the string json into a value /// Parses the string json into a value
/// </summary> /// </summary>
/// <param name="json">A JSON string.</param> /// <param name="json">A JSON string.</param>
/// <returns>An List&lt;object&gt;, a Dictionary&lt;string, object&gt;, a double, an integer,a string, null, true, or false</returns> /// <returns>An List&lt;object&gt;, a Dictionary&lt;string, object&gt;, a double, an integer,a string, null, true, or false</returns>
public static object Deserialize(string json) { public static object Deserialize(string json)
{
// save the string for debug information // save the string for debug information
if (json == null) { if (json == null)
{
return null; return null;
} }
return Parser.Parse(json); return Parser.Parse(json);
} }
sealed class Parser : IDisposable { sealed class Parser : IDisposable
{
const string WORD_BREAK = "{}[],:\""; const string WORD_BREAK = "{}[],:\"";
public static bool IsWordBreak(char c) { public static bool IsWordBreak(char c)
{
return Char.IsWhiteSpace(c) || WORD_BREAK.IndexOf(c) != -1; return Char.IsWhiteSpace(c) || WORD_BREAK.IndexOf(c) != -1;
} }
enum TOKEN { enum TOKEN
{
NONE, NONE,
CURLY_OPEN, CURLY_OPEN,
CURLY_CLOSE, CURLY_CLOSE,
@ -112,58 +119,68 @@ namespace MiniJSON {
StringReader json; StringReader json;
Parser(string jsonString) { Parser(string jsonString)
{
json = new StringReader(jsonString); json = new StringReader(jsonString);
} }
public static object Parse(string jsonString) { public static object Parse(string jsonString)
using (var instance = new Parser(jsonString)) { {
using (var instance = new Parser(jsonString))
{
return instance.ParseValue(); return instance.ParseValue();
} }
} }
public void Dispose() { public void Dispose()
{
json.Dispose(); json.Dispose();
json = null; json = null;
} }
Dictionary<string, object> ParseObject() { Dictionary<string, object> ParseObject()
{
Dictionary<string, object> table = new Dictionary<string, object>(); Dictionary<string, object> table = new Dictionary<string, object>();
// ditch opening brace // ditch opening brace
json.Read(); json.Read();
// { // {
while (true) { while (true)
switch (NextToken) { {
case TOKEN.NONE: switch (NextToken)
return null; {
case TOKEN.COMMA: case TOKEN.NONE:
continue;
case TOKEN.CURLY_CLOSE:
return table;
default:
// name
string name = ParseString();
if (name == null) {
return null; return null;
} case TOKEN.COMMA:
continue;
case TOKEN.CURLY_CLOSE:
return table;
default:
// name
string name = ParseString();
if (name == null)
{
return null;
}
// : // :
if (NextToken != TOKEN.COLON) { if (NextToken != TOKEN.COLON)
return null; {
} return null;
// ditch the colon }
json.Read(); // ditch the colon
json.Read();
// value // value
table[name] = ParseValue(); table[name] = ParseValue();
break; break;
} }
} }
} }
List<object> ParseArray() { List<object> ParseArray()
{
List<object> array = new List<object>(); List<object> array = new List<object>();
// ditch opening bracket // ditch opening bracket
@ -171,55 +188,61 @@ namespace MiniJSON {
// [ // [
var parsing = true; var parsing = true;
while (parsing) { while (parsing)
{
TOKEN nextToken = NextToken; TOKEN nextToken = NextToken;
switch (nextToken) { switch (nextToken)
case TOKEN.NONE: {
return null; case TOKEN.NONE:
case TOKEN.COMMA: return null;
continue; case TOKEN.COMMA:
case TOKEN.SQUARED_CLOSE: continue;
parsing = false; case TOKEN.SQUARED_CLOSE:
break; parsing = false;
default: break;
object value = ParseByToken(nextToken); default:
object value = ParseByToken(nextToken);
array.Add(value); array.Add(value);
break; break;
} }
} }
return array; return array;
} }
object ParseValue() { object ParseValue()
{
TOKEN nextToken = NextToken; TOKEN nextToken = NextToken;
return ParseByToken(nextToken); return ParseByToken(nextToken);
} }
object ParseByToken(TOKEN token) { object ParseByToken(TOKEN token)
switch (token) { {
case TOKEN.STRING: switch (token)
return ParseString(); {
case TOKEN.NUMBER: case TOKEN.STRING:
return ParseNumber(); return ParseString();
case TOKEN.CURLY_OPEN: case TOKEN.NUMBER:
return ParseObject(); return ParseNumber();
case TOKEN.SQUARED_OPEN: case TOKEN.CURLY_OPEN:
return ParseArray(); return ParseObject();
case TOKEN.TRUE: case TOKEN.SQUARED_OPEN:
return true; return ParseArray();
case TOKEN.FALSE: case TOKEN.TRUE:
return false; return true;
case TOKEN.NULL: case TOKEN.FALSE:
return null; return false;
default: case TOKEN.NULL:
return null; return null;
default:
return null;
} }
} }
string ParseString() { string ParseString()
{
StringBuilder s = new StringBuilder(); StringBuilder s = new StringBuilder();
char c; char c;
@ -227,70 +250,78 @@ namespace MiniJSON {
json.Read(); json.Read();
bool parsing = true; bool parsing = true;
while (parsing) { while (parsing)
{
if (json.Peek() == -1) { if (json.Peek() == -1)
{
parsing = false; parsing = false;
break; break;
} }
c = NextChar; c = NextChar;
switch (c) { switch (c)
case '"': {
parsing = false; case '"':
break;
case '\\':
if (json.Peek() == -1) {
parsing = false; parsing = false;
break; break;
}
c = NextChar;
switch (c) {
case '"':
case '\\': case '\\':
case '/': if (json.Peek() == -1)
s.Append(c); {
break; parsing = false;
case 'b': break;
s.Append('\b');
break;
case 'f':
s.Append('\f');
break;
case 'n':
s.Append('\n');
break;
case 'r':
s.Append('\r');
break;
case 't':
s.Append('\t');
break;
case 'u':
var hex = new char[4];
for (int i=0; i< 4; i++) {
hex[i] = NextChar;
} }
s.Append((char) Convert.ToInt32(new string(hex), 16)); c = NextChar;
switch (c)
{
case '"':
case '\\':
case '/':
s.Append(c);
break;
case 'b':
s.Append('\b');
break;
case 'f':
s.Append('\f');
break;
case 'n':
s.Append('\n');
break;
case 'r':
s.Append('\r');
break;
case 't':
s.Append('\t');
break;
case 'u':
var hex = new char[4];
for (int i = 0; i < 4; i++)
{
hex[i] = NextChar;
}
s.Append((char)Convert.ToInt32(new string(hex), 16));
break;
}
break;
default:
s.Append(c);
break; break;
}
break;
default:
s.Append(c);
break;
} }
} }
return s.ToString(); return s.ToString();
} }
object ParseNumber() { object ParseNumber()
{
string number = NextWord; string number = NextWord;
if (number.IndexOf('.') == -1) { if (number.IndexOf('.') == -1)
{
long parsedInt; long parsedInt;
Int64.TryParse(number, out parsedInt); Int64.TryParse(number, out parsedInt);
return parsedInt; return parsedInt;
@ -301,36 +332,47 @@ namespace MiniJSON {
return parsedDouble; return parsedDouble;
} }
void EatWhitespace() { void EatWhitespace()
while (Char.IsWhiteSpace(PeekChar)) { {
while (Char.IsWhiteSpace(PeekChar))
{
json.Read(); json.Read();
if (json.Peek() == -1) { if (json.Peek() == -1)
{
break; break;
} }
} }
} }
char PeekChar { char PeekChar
get { {
get
{
return Convert.ToChar(json.Peek()); return Convert.ToChar(json.Peek());
} }
} }
char NextChar { char NextChar
get { {
get
{
return Convert.ToChar(json.Read()); return Convert.ToChar(json.Read());
} }
} }
string NextWord { string NextWord
get { {
get
{
StringBuilder word = new StringBuilder(); StringBuilder word = new StringBuilder();
while (!IsWordBreak(PeekChar)) { while (!IsWordBreak(PeekChar))
{
word.Append(NextChar); word.Append(NextChar);
if (json.Peek() == -1) { if (json.Peek() == -1)
{
break; break;
} }
} }
@ -339,53 +381,58 @@ namespace MiniJSON {
} }
} }
TOKEN NextToken { TOKEN NextToken
get { {
get
{
EatWhitespace(); EatWhitespace();
if (json.Peek() == -1) { if (json.Peek() == -1)
{
return TOKEN.NONE; return TOKEN.NONE;
} }
switch (PeekChar) { switch (PeekChar)
case '{': {
return TOKEN.CURLY_OPEN; case '{':
case '}': return TOKEN.CURLY_OPEN;
json.Read(); case '}':
return TOKEN.CURLY_CLOSE; json.Read();
case '[': return TOKEN.CURLY_CLOSE;
return TOKEN.SQUARED_OPEN; case '[':
case ']': return TOKEN.SQUARED_OPEN;
json.Read(); case ']':
return TOKEN.SQUARED_CLOSE; json.Read();
case ',': return TOKEN.SQUARED_CLOSE;
json.Read(); case ',':
return TOKEN.COMMA; json.Read();
case '"': return TOKEN.COMMA;
return TOKEN.STRING; case '"':
case ':': return TOKEN.STRING;
return TOKEN.COLON; case ':':
case '0': return TOKEN.COLON;
case '1': case '0':
case '2': case '1':
case '3': case '2':
case '4': case '3':
case '5': case '4':
case '6': case '5':
case '7': case '6':
case '8': case '7':
case '9': case '8':
case '-': case '9':
return TOKEN.NUMBER; case '-':
return TOKEN.NUMBER;
} }
switch (NextWord) { switch (NextWord)
case "false": {
return TOKEN.FALSE; case "false":
case "true": return TOKEN.FALSE;
return TOKEN.TRUE; case "true":
case "null": return TOKEN.TRUE;
return TOKEN.NULL; case "null":
return TOKEN.NULL;
} }
return TOKEN.NONE; return TOKEN.NONE;
@ -398,18 +445,22 @@ namespace MiniJSON {
/// </summary> /// </summary>
/// <param name="json">A Dictionary&lt;string, object&gt; / List&lt;object&gt;</param> /// <param name="json">A Dictionary&lt;string, object&gt; / List&lt;object&gt;</param>
/// <returns>A JSON encoded string, or null if object 'json' is not serializable</returns> /// <returns>A JSON encoded string, or null if object 'json' is not serializable</returns>
public static string Serialize(object obj) { public static string Serialize(object obj)
{
return Serializer.Serialize(obj); return Serializer.Serialize(obj);
} }
sealed class Serializer { sealed class Serializer
{
StringBuilder builder; StringBuilder builder;
Serializer() { Serializer()
{
builder = new StringBuilder(); builder = new StringBuilder();
} }
public static string Serialize(object obj) { public static string Serialize(object obj)
{
var instance = new Serializer(); var instance = new Serializer();
instance.SerializeValue(obj); instance.SerializeValue(obj);
@ -417,35 +468,52 @@ namespace MiniJSON {
return instance.builder.ToString(); return instance.builder.ToString();
} }
void SerializeValue(object value) { void SerializeValue(object value)
{
IList asList; IList asList;
IDictionary asDict; IDictionary asDict;
string asStr; string asStr;
if (value == null) { if (value == null)
{
builder.Append("null"); builder.Append("null");
} else if ((asStr = value as string) != null) { }
else if ((asStr = value as string) != null)
{
SerializeString(asStr); SerializeString(asStr);
} else if (value is bool) { }
builder.Append((bool) value ? "true" : "false"); else if (value is bool)
} else if ((asList = value as IList) != null) { {
builder.Append((bool)value ? "true" : "false");
}
else if ((asList = value as IList) != null)
{
SerializeArray(asList); SerializeArray(asList);
} else if ((asDict = value as IDictionary) != null) { }
else if ((asDict = value as IDictionary) != null)
{
SerializeObject(asDict); SerializeObject(asDict);
} else if (value is char) { }
SerializeString(new string((char) value, 1)); else if (value is char)
} else { {
SerializeString(new string((char)value, 1));
}
else
{
SerializeOther(value); SerializeOther(value);
} }
} }
void SerializeObject(IDictionary obj) { void SerializeObject(IDictionary obj)
{
bool first = true; bool first = true;
builder.Append('{'); builder.Append('{');
foreach (object e in obj.Keys) { foreach (object e in obj.Keys)
if (!first) { {
if (!first)
{
builder.Append(','); builder.Append(',');
} }
@ -460,13 +528,16 @@ namespace MiniJSON {
builder.Append('}'); builder.Append('}');
} }
void SerializeArray(IList anArray) { void SerializeArray(IList anArray)
{
builder.Append('['); builder.Append('[');
bool first = true; bool first = true;
foreach (object obj in anArray) { foreach (object obj in anArray)
if (!first) { {
if (!first)
{
builder.Append(','); builder.Append(',');
} }
@ -478,67 +549,81 @@ namespace MiniJSON {
builder.Append(']'); builder.Append(']');
} }
void SerializeString(string str) { void SerializeString(string str)
{
builder.Append('\"'); builder.Append('\"');
char[] charArray = str.ToCharArray(); char[] charArray = str.ToCharArray();
foreach (var c in charArray) { foreach (var c in charArray)
switch (c) { {
case '"': switch (c)
builder.Append("\\\""); {
break; case '"':
case '\\': builder.Append("\\\"");
builder.Append("\\\\"); break;
break; case '\\':
case '\b': builder.Append("\\\\");
builder.Append("\\b"); break;
break; case '\b':
case '\f': builder.Append("\\b");
builder.Append("\\f"); break;
break; case '\f':
case '\n': builder.Append("\\f");
builder.Append("\\n"); break;
break; case '\n':
case '\r': builder.Append("\\n");
builder.Append("\\r"); break;
break; case '\r':
case '\t': builder.Append("\\r");
builder.Append("\\t"); break;
break; case '\t':
default: builder.Append("\\t");
int codepoint = Convert.ToInt32(c); break;
if ((codepoint >= 32) && (codepoint <= 126)) { default:
builder.Append(c); int codepoint = Convert.ToInt32(c);
} else { if ((codepoint >= 32) && (codepoint <= 126))
builder.Append("\\u"); {
builder.Append(codepoint.ToString("x4")); builder.Append(c);
} }
break; else
{
builder.Append("\\u");
builder.Append(codepoint.ToString("x4"));
}
break;
} }
} }
builder.Append('\"'); builder.Append('\"');
} }
void SerializeOther(object value) { void SerializeOther(object value)
{
// NOTE: decimals lose precision during serialization. // NOTE: decimals lose precision during serialization.
// They always have, I'm just letting you know. // They always have, I'm just letting you know.
// Previously floats and doubles lost precision too. // Previously floats and doubles lost precision too.
if (value is float) { if (value is float)
builder.Append(((float) value).ToString("R")); {
} else if (value is int builder.Append(((float)value).ToString("R"));
|| value is uint }
|| value is long else if (value is int
|| value is sbyte || value is uint
|| value is byte || value is long
|| value is short || value is sbyte
|| value is ushort || value is byte
|| value is ulong) { || value is short
|| value is ushort
|| value is ulong)
{
builder.Append(value); builder.Append(value);
} else if (value is double }
|| value is decimal) { else if (value is double
|| value is decimal)
{
builder.Append(Convert.ToDouble(value).ToString("R")); builder.Append(Convert.ToDouble(value).ToString("R"));
} else { }
else
{
SerializeString(value.ToString()); SerializeString(value.ToString());
} }
} }

Просмотреть файл

@ -1,102 +1,102 @@
using OpenCVForUnity.CoreModule; using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule; using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UtilsModule; using OpenCVForUnity.UtilsModule;
using System.Collections; using System.Collections;
using UnityEngine; using UnityEngine;
namespace OpenCVFaceTracker namespace OpenCVFaceTracker
{ {
/// <summary> /// <summary>
/// Patch model. /// Patch model.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”. /// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary> /// </summary>
public class PatchModel public class PatchModel
{ {
Mat P; Mat P;
//normalised patch //normalised patch
public PatchModel() public PatchModel()
{ {
} }
//size of patch model //size of patch model
public Size patch_size() public Size patch_size()
{ {
return P.size(); return P.size();
} }
Mat convert_image(Mat im) Mat convert_image(Mat im)
{ {
Mat I = null; Mat I = null;
if (im.channels() == 1) if (im.channels() == 1)
{ {
if (im.type() != CvType.CV_32F) if (im.type() != CvType.CV_32F)
{ {
I = new Mat(); I = new Mat();
im.convertTo(I, CvType.CV_32F); im.convertTo(I, CvType.CV_32F);
} }
else else
{ {
I = im; I = im;
} }
} }
else else
{ {
if (im.channels() == 3) if (im.channels() == 3)
{ {
Mat img = new Mat(); Mat img = new Mat();
Imgproc.cvtColor(im, img, Imgproc.COLOR_RGBA2GRAY); Imgproc.cvtColor(im, img, Imgproc.COLOR_RGBA2GRAY);
if (img.type() != CvType.CV_32F) if (img.type() != CvType.CV_32F)
{ {
I = new Mat(); I = new Mat();
img.convertTo(I, CvType.CV_32F); img.convertTo(I, CvType.CV_32F);
} }
else else
{ {
I = img; I = img;
} }
} }
else else
{ {
Debug.Log("Unsupported image type!"); Debug.Log("Unsupported image type!");
} }
} }
Core.add(I, new Scalar(1.0), I); Core.add(I, new Scalar(1.0), I);
Core.log(I, I); Core.log(I, I);
return I; return I;
} }
public Mat calc_response(Mat im, bool sum2one) public Mat calc_response(Mat im, bool sum2one)
{ {
Mat I = convert_image(im); Mat I = convert_image(im);
Mat res = new Mat(); Mat res = new Mat();
Imgproc.matchTemplate(I, P, res, Imgproc.TM_CCOEFF_NORMED); Imgproc.matchTemplate(I, P, res, Imgproc.TM_CCOEFF_NORMED);
if (sum2one) if (sum2one)
{ {
Core.normalize(res, res, 0, 1, Core.NORM_MINMAX); Core.normalize(res, res, 0, 1, Core.NORM_MINMAX);
Core.divide(res, new Scalar(Core.sumElems(res).val[0]), res); Core.divide(res, new Scalar(Core.sumElems(res).val[0]), res);
} }
return res; return res;
} }
public void read(object root_json) public void read(object root_json)
{ {
IDictionary pmodel_json = (IDictionary)root_json; IDictionary pmodel_json = (IDictionary)root_json;
IDictionary P_json = (IDictionary)pmodel_json["P"]; IDictionary P_json = (IDictionary)pmodel_json["P"];
P = new Mat((int)(long)P_json["rows"], (int)(long)P_json["cols"], CvType.CV_32F); P = new Mat((int)(long)P_json["rows"], (int)(long)P_json["cols"], CvType.CV_32F);
//Debug.Log ("P " + P.ToString ()); //Debug.Log ("P " + P.ToString ());
IList P_data_json = (IList)P_json["data"]; IList P_data_json = (IList)P_json["data"];
float[] P_data = new float[P.rows() * P.cols()]; float[] P_data = new float[P.rows() * P.cols()];
for (int i = 0; i < P_data_json.Count; i++) for (int i = 0; i < P_data_json.Count; i++)
{ {
P_data[i] = (float)(double)P_data_json[i]; P_data[i] = (float)(double)P_data_json[i];
} }
MatUtils.copyToMat(P_data, P); MatUtils.copyToMat(P_data, P);
} }
} }
} }

Просмотреть файл

@ -1,223 +1,223 @@
using OpenCVForUnity.CoreModule; using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule; using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UtilsModule; using OpenCVForUnity.UtilsModule;
using System; using System;
using System.Collections; using System.Collections;
using System.Collections.Generic; using System.Collections.Generic;
namespace OpenCVFaceTracker namespace OpenCVFaceTracker
{ {
/// <summary> /// <summary>
/// Patch models. /// Patch models.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”. /// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary> /// </summary>
public class PatchModels public class PatchModels
{ {
public Mat reference; public Mat reference;
//reference shape //reference shape
public IList<PatchModel> patches; public IList<PatchModel> patches;
//patch models //patch models
public PatchModels() public PatchModels()
{ {
} }
//number of patches //number of patches
public int n_patches() public int n_patches()
{ {
return patches.Count; return patches.Count;
} }
public Point[] calc_peaks(Mat im, Point[] points, Size ssize) public Point[] calc_peaks(Mat im, Point[] points, Size ssize)
{ {
int n = points.Length; int n = points.Length;
using (Mat pt = (new MatOfPoint2f(points)).reshape(1, 2 * n)) using (Mat S = calc_simil(pt)) using (Mat Si = inv_simil(S)) using (Mat pt = (new MatOfPoint2f(points)).reshape(1, 2 * n)) using (Mat S = calc_simil(pt)) using (Mat Si = inv_simil(S))
{ {
float[] pt_float = new float[pt.total()]; float[] pt_float = new float[pt.total()];
MatUtils.copyFromMat<float>(pt, pt_float); MatUtils.copyFromMat<float>(pt, pt_float);
int pt_cols = pt.cols(); int pt_cols = pt.cols();
float[] S_float = new float[S.total()]; float[] S_float = new float[S.total()];
MatUtils.copyFromMat<float>(S, S_float); MatUtils.copyFromMat<float>(S, S_float);
int S_cols = S.cols(); int S_cols = S.cols();
float[] A_float = new float[2 * 3]; float[] A_float = new float[2 * 3];
Point[] pts = apply_simil(Si, points); Point[] pts = apply_simil(Si, points);
for (int i = 0; i < n; i++) for (int i = 0; i < n; i++)
{ {
Size wsize = new Size(ssize.width + patches[i].patch_size().width, ssize.height + patches[i].patch_size().height); Size wsize = new Size(ssize.width + patches[i].patch_size().width, ssize.height + patches[i].patch_size().height);
using (Mat A = new Mat(2, 3, CvType.CV_32F)) using (Mat A = new Mat(2, 3, CvType.CV_32F))
{ {
MatUtils.copyFromMat<float>(A, A_float); MatUtils.copyFromMat<float>(A, A_float);
int A_cols = A.cols(); int A_cols = A.cols();
A_float[0] = S_float[0]; A_float[0] = S_float[0];
A_float[1] = S_float[1]; A_float[1] = S_float[1];
A_float[1 * A_cols] = S_float[1 * S_cols]; A_float[1 * A_cols] = S_float[1 * S_cols];
A_float[(1 * A_cols) + 1] = S_float[(1 * S_cols) + 1]; A_float[(1 * A_cols) + 1] = S_float[(1 * S_cols) + 1];
A_float[2] = (float)(pt_float[(2 * pt_cols) * i] - A_float[2] = (float)(pt_float[(2 * pt_cols) * i] -
(A_float[0] * (wsize.width - 1) / 2 + A_float[1] * (wsize.height - 1) / 2)); (A_float[0] * (wsize.width - 1) / 2 + A_float[1] * (wsize.height - 1) / 2));
A_float[(1 * A_cols) + 2] = (float)(pt_float[((2 * pt_cols) * i) + 1] - A_float[(1 * A_cols) + 2] = (float)(pt_float[((2 * pt_cols) * i) + 1] -
(A_float[1 * A_cols] * (wsize.width - 1) / 2 + A_float[(1 * A_cols) + 1] * (wsize.height - 1) / 2)); (A_float[1 * A_cols] * (wsize.width - 1) / 2 + A_float[(1 * A_cols) + 1] * (wsize.height - 1) / 2));
MatUtils.copyToMat(A_float, A); MatUtils.copyToMat(A_float, A);
using (Mat I = new Mat()) using (Mat I = new Mat())
{ {
Imgproc.warpAffine(im, I, A, wsize, Imgproc.INTER_LINEAR + Imgproc.WARP_INVERSE_MAP); Imgproc.warpAffine(im, I, A, wsize, Imgproc.INTER_LINEAR + Imgproc.WARP_INVERSE_MAP);
using (Mat R = patches[i].calc_response(I, false)) using (Mat R = patches[i].calc_response(I, false))
{ {
Core.MinMaxLocResult minMaxLocResult = Core.minMaxLoc(R); Core.MinMaxLocResult minMaxLocResult = Core.minMaxLoc(R);
pts[i].x = pts[i].x + minMaxLocResult.maxLoc.x - 0.5 * ssize.width; pts[i].x = pts[i].x + minMaxLocResult.maxLoc.x - 0.5 * ssize.width;
pts[i].y = pts[i].y + minMaxLocResult.maxLoc.y - 0.5 * ssize.height; pts[i].y = pts[i].y + minMaxLocResult.maxLoc.y - 0.5 * ssize.height;
} }
} }
} }
} }
return apply_simil(S, pts); return apply_simil(S, pts);
} }
} }
Point[] apply_simil(Mat S, Point[] points) Point[] apply_simil(Mat S, Point[] points)
{ {
float[] S_float = new float[S.total()]; float[] S_float = new float[S.total()];
MatUtils.copyFromMat<float>(S, S_float); MatUtils.copyFromMat<float>(S, S_float);
int S_cols = S.cols(); int S_cols = S.cols();
int n = points.Length; int n = points.Length;
Point[] p = new Point[n]; Point[] p = new Point[n];
for (int i = 0; i < n; i++) for (int i = 0; i < n; i++)
{ {
p[i] = new Point(); p[i] = new Point();
p[i].x = S_float[0] * points[i].x + S_float[1] * points[i].y + S_float[2]; p[i].x = S_float[0] * points[i].x + S_float[1] * points[i].y + S_float[2];
p[i].y = S_float[1 * S_cols] * points[i].x + S_float[(1 * S_cols) + 1] * points[i].y + S_float[(1 * S_cols) + 2]; p[i].y = S_float[1 * S_cols] * points[i].x + S_float[(1 * S_cols) + 1] * points[i].y + S_float[(1 * S_cols) + 2];
} }
return p; return p;
} }
Mat inv_simil(Mat S) Mat inv_simil(Mat S)
{ {
float[] S_float = new float[S.total()]; float[] S_float = new float[S.total()];
MatUtils.copyFromMat<float>(S, S_float); MatUtils.copyFromMat<float>(S, S_float);
int S_cols = S.cols(); int S_cols = S.cols();
Mat Si = new Mat(2, 3, CvType.CV_32F); Mat Si = new Mat(2, 3, CvType.CV_32F);
float d = S_float[0] * S_float[(1 * S_cols) + 1] - S_float[1 * S_cols] * S_float[1]; float d = S_float[0] * S_float[(1 * S_cols) + 1] - S_float[1 * S_cols] * S_float[1];
float[] Si_float = new float[Si.total()]; float[] Si_float = new float[Si.total()];
MatUtils.copyFromMat<float>(Si, Si_float); MatUtils.copyFromMat<float>(Si, Si_float);
int Si_cols = Si.cols(); int Si_cols = Si.cols();
Si_float[0] = S_float[(1 * S_cols) + 1] / d; Si_float[0] = S_float[(1 * S_cols) + 1] / d;
Si_float[1] = -S_float[1] / d; Si_float[1] = -S_float[1] / d;
Si_float[(1 * Si_cols) + 1] = S_float[0] / d; Si_float[(1 * Si_cols) + 1] = S_float[0] / d;
Si_float[1 * Si_cols] = -S_float[1 * S_cols] / d; Si_float[1 * Si_cols] = -S_float[1 * S_cols] / d;
MatUtils.copyToMat(Si_float, Si); MatUtils.copyToMat(Si_float, Si);
Mat Ri = new Mat(Si, new OpenCVForUnity.CoreModule.Rect(0, 0, 2, 2)); Mat Ri = new Mat(Si, new OpenCVForUnity.CoreModule.Rect(0, 0, 2, 2));
Mat negaRi = new Mat(); Mat negaRi = new Mat();
Core.multiply(Ri, new Scalar(-1), negaRi); Core.multiply(Ri, new Scalar(-1), negaRi);
Mat t = new Mat(); Mat t = new Mat();
Core.gemm(negaRi, S.col(2), 1, new Mat(negaRi.rows(), negaRi.cols(), negaRi.type()), 0, t); Core.gemm(negaRi, S.col(2), 1, new Mat(negaRi.rows(), negaRi.cols(), negaRi.type()), 0, t);
Mat St = Si.col(2); Mat St = Si.col(2);
t.copyTo(St); t.copyTo(St);
return Si; return Si;
} }
Mat calc_simil(Mat pts) Mat calc_simil(Mat pts)
{ {
float[] pts_float = new float[pts.total()]; float[] pts_float = new float[pts.total()];
MatUtils.copyFromMat<float>(pts, pts_float); MatUtils.copyFromMat<float>(pts, pts_float);
int pts_cols = pts.cols(); int pts_cols = pts.cols();
//compute translation //compute translation
int n = pts.rows() / 2; int n = pts.rows() / 2;
float mx = 0, my = 0; float mx = 0, my = 0;
for (int i = 0; i < n; i++) for (int i = 0; i < n; i++)
{ {
mx += pts_float[(2 * pts_cols) * i]; mx += pts_float[(2 * pts_cols) * i];
my += pts_float[((2 * pts_cols) * i) + 1]; my += pts_float[((2 * pts_cols) * i) + 1];
} }
using (Mat p = new Mat(2 * n, 1, CvType.CV_32F)) using (Mat p = new Mat(2 * n, 1, CvType.CV_32F))
{ {
float[] p_float = new float[p.total()]; float[] p_float = new float[p.total()];
MatUtils.copyFromMat<float>(p, p_float); MatUtils.copyFromMat<float>(p, p_float);
int p_cols = p.cols(); int p_cols = p.cols();
mx /= n; mx /= n;
my /= n; my /= n;
for (int i = 0; i < n; i++) for (int i = 0; i < n; i++)
{ {
p_float[(2 * p_cols) * i] = pts_float[(2 * pts_cols) * i] - mx; p_float[(2 * p_cols) * i] = pts_float[(2 * pts_cols) * i] - mx;
p_float[((2 * p_cols) * i) + 1] = pts_float[((2 * pts_cols) * i) + 1] - my; p_float[((2 * p_cols) * i) + 1] = pts_float[((2 * pts_cols) * i) + 1] - my;
} }
MatUtils.copyToMat(p_float, p); MatUtils.copyToMat(p_float, p);
//compute rotation and scale //compute rotation and scale
float[] reference_float = new float[reference.total()]; float[] reference_float = new float[reference.total()];
MatUtils.copyFromMat<float>(reference, reference_float); MatUtils.copyFromMat<float>(reference, reference_float);
int reference_cols = reference.cols(); int reference_cols = reference.cols();
float a = 0, b = 0, c = 0; float a = 0, b = 0, c = 0;
for (int i = 0; i < n; i++) for (int i = 0; i < n; i++)
{ {
a += reference_float[(2 * reference_cols) * i] * reference_float[(2 * reference_cols) * i] + a += reference_float[(2 * reference_cols) * i] * reference_float[(2 * reference_cols) * i] +
reference_float[((2 * reference_cols) * i) + 1] * reference_float[((2 * reference_cols) * i) + 1]; reference_float[((2 * reference_cols) * i) + 1] * reference_float[((2 * reference_cols) * i) + 1];
b += reference_float[(2 * reference_cols) * i] * p_float[(2 * p_cols) * i] + b += reference_float[(2 * reference_cols) * i] * p_float[(2 * p_cols) * i] +
reference_float[((2 * reference_cols) * i) + 1] * p_float[((2 * p_cols) * i) + 1]; reference_float[((2 * reference_cols) * i) + 1] * p_float[((2 * p_cols) * i) + 1];
c += reference_float[(2 * reference_cols) * i] * p_float[((2 * p_cols) * i) + 1] - c += reference_float[(2 * reference_cols) * i] * p_float[((2 * p_cols) * i) + 1] -
reference_float[((2 * reference_cols) * i) + 1] * p_float[(2 * p_cols) * i]; reference_float[((2 * reference_cols) * i) + 1] * p_float[(2 * p_cols) * i];
} }
b /= a; b /= a;
c /= a; c /= a;
float scale = (float)Math.Sqrt(b * b + c * c), theta = (float)Math.Atan2(c, b); float scale = (float)Math.Sqrt(b * b + c * c), theta = (float)Math.Atan2(c, b);
float sc = scale * (float)Math.Cos(theta), ss = scale * (float)Math.Sin(theta); float sc = scale * (float)Math.Cos(theta), ss = scale * (float)Math.Sin(theta);
Mat returnMat = new Mat(2, 3, CvType.CV_32F); Mat returnMat = new Mat(2, 3, CvType.CV_32F);
returnMat.put(0, 0, sc, -ss, mx, ss, sc, my); returnMat.put(0, 0, sc, -ss, mx, ss, sc, my);
return returnMat; return returnMat;
} }
} }
public void read(object root_json) public void read(object root_json)
{ {
IDictionary pmodels_json = (IDictionary)root_json; IDictionary pmodels_json = (IDictionary)root_json;
IDictionary reference_json = (IDictionary)pmodels_json["reference"]; IDictionary reference_json = (IDictionary)pmodels_json["reference"];
reference = new Mat((int)(long)reference_json["rows"], (int)(long)reference_json["cols"], CvType.CV_32F); reference = new Mat((int)(long)reference_json["rows"], (int)(long)reference_json["cols"], CvType.CV_32F);
IList data_json = (IList)reference_json["data"]; IList data_json = (IList)reference_json["data"];
float[] data = new float[reference.rows() * reference.cols()]; float[] data = new float[reference.rows() * reference.cols()];
int count = data_json.Count; int count = data_json.Count;
for (int i = 0; i < count; i++) for (int i = 0; i < count; i++)
{ {
data[i] = (float)(double)data_json[i]; data[i] = (float)(double)data_json[i];
} }
MatUtils.copyToMat(data, reference); MatUtils.copyToMat(data, reference);
int n = (int)(long)pmodels_json["n_patches"]; int n = (int)(long)pmodels_json["n_patches"];
patches = new List<PatchModel>(n); patches = new List<PatchModel>(n);
for (int i = 0; i < n; i++) for (int i = 0; i < n; i++)
{ {
PatchModel patchModel = new PatchModel(); PatchModel patchModel = new PatchModel();
patchModel.read(pmodels_json["patch " + i]); patchModel.read(pmodels_json["patch " + i]);
patches.Add(patchModel); patches.Add(patchModel);
} }
} }
} }
} }

Просмотреть файл

@ -1,185 +1,185 @@
using OpenCVForUnity.CoreModule; using OpenCVForUnity.CoreModule;
using OpenCVForUnity.UtilsModule; using OpenCVForUnity.UtilsModule;
using System; using System;
using System.Collections; using System.Collections;
using UnityEngine; using UnityEngine;
namespace OpenCVFaceTracker namespace OpenCVFaceTracker
{ {
/// <summary> /// <summary>
/// Shape model. /// Shape model.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”. /// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary> /// </summary>
public class ShapeModel public class ShapeModel
{ {
public Mat p; public Mat p;
//parameter vector (kx1) CV_32F //parameter vector (kx1) CV_32F
public Mat V; public Mat V;
//shape basis (2nxk) CV_32F //shape basis (2nxk) CV_32F
public Mat e; public Mat e;
//parameter variance (kx1) CV_32F //parameter variance (kx1) CV_32F
public Mat C; public Mat C;
//connectivity (cx2) CV_32S //connectivity (cx2) CV_32S
public ShapeModel() public ShapeModel()
{ {
} }
public int npts() public int npts()
{ {
//number of points in shape model //number of points in shape model
return V.rows() / 2; return V.rows() / 2;
} }
public void calc_params(Point[] pts, Mat weight, float c_factor) public void calc_params(Point[] pts, Mat weight, float c_factor)
{ {
int n = pts.Length; int n = pts.Length;
//assert(V.rows == 2*n); //assert(V.rows == 2*n);
//Debug.Log ("V.rows == 2*n " + V.rows () + " " + 2 * n); //Debug.Log ("V.rows == 2*n " + V.rows () + " " + 2 * n);
using (Mat s = (new MatOfPoint2f(pts)).reshape(1, 2 * n)) using (Mat s = (new MatOfPoint2f(pts)).reshape(1, 2 * n))
{ //point set to vector format { //point set to vector format
if (weight.total() == 0) if (weight.total() == 0)
{ {
Core.gemm(V.t(), s, 1, new Mat(), 0, p); //simple projection Core.gemm(V.t(), s, 1, new Mat(), 0, p); //simple projection
} }
else else
{ {
//scaled projection //scaled projection
if (weight.rows() != n) if (weight.rows() != n)
{ {
Debug.Log("Invalid weighting matrix"); Debug.Log("Invalid weighting matrix");
} }
float[] weight_float = new float[weight.total()]; float[] weight_float = new float[weight.total()];
MatUtils.copyFromMat<float>(weight, weight_float); MatUtils.copyFromMat<float>(weight, weight_float);
int weight_cols = weight.cols(); int weight_cols = weight.cols();
int K = V.cols(); int K = V.cols();
using (Mat H = Mat.zeros(K, K, CvType.CV_32F)) using (Mat g = Mat.zeros(K, 1, CvType.CV_32F)) using (Mat H = Mat.zeros(K, K, CvType.CV_32F)) using (Mat g = Mat.zeros(K, 1, CvType.CV_32F))
{ {
for (int i = 0; i < n; i++) for (int i = 0; i < n; i++)
{ {
using (Mat v = new Mat(V, new OpenCVForUnity.CoreModule.Rect(0, 2 * i, K, 2))) using (Mat tmpMat1 = new Mat()) using (Mat tmpMat2 = new Mat()) using (Mat tmpMat3 = new Mat()) using (Mat v = new Mat(V, new OpenCVForUnity.CoreModule.Rect(0, 2 * i, K, 2))) using (Mat tmpMat1 = new Mat()) using (Mat tmpMat2 = new Mat()) using (Mat tmpMat3 = new Mat())
{ {
float w = weight_float[i * weight_cols]; float w = weight_float[i * weight_cols];
Core.multiply(v.t(), new Scalar(w), tmpMat1); Core.multiply(v.t(), new Scalar(w), tmpMat1);
Core.gemm(tmpMat1, v, 1, new Mat(), 0, tmpMat2); Core.gemm(tmpMat1, v, 1, new Mat(), 0, tmpMat2);
Core.add(H, tmpMat2, H); Core.add(H, tmpMat2, H);
Core.gemm(tmpMat1, new MatOfPoint2f(pts[i]).reshape(1, 2), 1, new Mat(), 0, tmpMat3); Core.gemm(tmpMat1, new MatOfPoint2f(pts[i]).reshape(1, 2), 1, new Mat(), 0, tmpMat3);
Core.add(g, tmpMat3, g); Core.add(g, tmpMat3, g);
} }
} }
Core.solve(H, g, p, Core.DECOMP_SVD); Core.solve(H, g, p, Core.DECOMP_SVD);
} }
} }
} }
clamp(c_factor); //clamp resulting parameters clamp(c_factor); //clamp resulting parameters
} }
public Point[] calc_shape() public Point[] calc_shape()
{ {
using (Mat s = new Mat()) using (Mat s = new Mat())
{ {
Core.gemm(V, p, 1, new Mat(), 0, s); Core.gemm(V, p, 1, new Mat(), 0, s);
float[] s_float = new float[s.total()]; float[] s_float = new float[s.total()];
MatUtils.copyFromMat<float>(s, s_float); MatUtils.copyFromMat<float>(s, s_float);
int s_cols = s.cols(); int s_cols = s.cols();
int n = s.rows() / 2; int n = s.rows() / 2;
Point[] pts = new Point[n]; Point[] pts = new Point[n];
for (int i = 0; i < n; i++) for (int i = 0; i < n; i++)
{ {
pts[i] = new Point(s_float[(2 * s_cols) * i], s_float[((2 * s_cols) * i) + 1]); pts[i] = new Point(s_float[(2 * s_cols) * i], s_float[((2 * s_cols) * i) + 1]);
} }
return pts; return pts;
} }
} }
void clamp(float c) void clamp(float c)
{ {
float[] p_float = new float[p.total()]; float[] p_float = new float[p.total()];
MatUtils.copyFromMat<float>(p, p_float); MatUtils.copyFromMat<float>(p, p_float);
int p_cols = p.cols(); int p_cols = p.cols();
float[] e_float = new float[e.total()]; float[] e_float = new float[e.total()];
MatUtils.copyFromMat<float>(e, e_float); MatUtils.copyFromMat<float>(e, e_float);
int e_cols = e.cols(); int e_cols = e.cols();
double scale = p_float[0]; double scale = p_float[0];
int rows = e.rows(); int rows = e.rows();
for (int i = 0; i < rows; i++) for (int i = 0; i < rows; i++)
{ {
if (e_float[i * e_cols] < 0) continue; if (e_float[i * e_cols] < 0) continue;
float v = c * (float)Math.Sqrt(e_float[i * e_cols]); float v = c * (float)Math.Sqrt(e_float[i * e_cols]);
if (Math.Abs(p_float[i * p_cols] / scale) > v) if (Math.Abs(p_float[i * p_cols] / scale) > v)
{ {
if (p_float[i * p_cols] > 0) if (p_float[i * p_cols] > 0)
{ {
p_float[i * p_cols] = (float)(v * scale); p_float[i * p_cols] = (float)(v * scale);
} }
else else
{ {
p_float[i * p_cols] = (float)(-v * scale); p_float[i * p_cols] = (float)(-v * scale);
} }
} }
} }
MatUtils.copyToMat(p_float, p); MatUtils.copyToMat(p_float, p);
} }
public void read(object root_json) public void read(object root_json)
{ {
IDictionary smodel_json = (IDictionary)root_json; IDictionary smodel_json = (IDictionary)root_json;
IDictionary V_json = (IDictionary)smodel_json["V"]; IDictionary V_json = (IDictionary)smodel_json["V"];
V = new Mat((int)(long)V_json["rows"], (int)(long)V_json["cols"], CvType.CV_32F); V = new Mat((int)(long)V_json["rows"], (int)(long)V_json["cols"], CvType.CV_32F);
IList V_data_json = (IList)V_json["data"]; IList V_data_json = (IList)V_json["data"];
float[] V_data = new float[V.rows() * V.cols()]; float[] V_data = new float[V.rows() * V.cols()];
for (int i = 0; i < V_data_json.Count; i++) for (int i = 0; i < V_data_json.Count; i++)
{ {
V_data[i] = (float)(double)V_data_json[i]; V_data[i] = (float)(double)V_data_json[i];
} }
MatUtils.copyToMat(V_data, V); MatUtils.copyToMat(V_data, V);
IDictionary e_json = (IDictionary)smodel_json["e"]; IDictionary e_json = (IDictionary)smodel_json["e"];
e = new Mat((int)(long)e_json["rows"], (int)(long)e_json["cols"], CvType.CV_32F); e = new Mat((int)(long)e_json["rows"], (int)(long)e_json["cols"], CvType.CV_32F);
IList e_data_json = (IList)e_json["data"]; IList e_data_json = (IList)e_json["data"];
float[] e_data = new float[e.rows() * e.cols()]; float[] e_data = new float[e.rows() * e.cols()];
for (int i = 0; i < e_data_json.Count; i++) for (int i = 0; i < e_data_json.Count; i++)
{ {
e_data[i] = (float)(double)e_data_json[i]; e_data[i] = (float)(double)e_data_json[i];
} }
MatUtils.copyToMat(e_data, e); MatUtils.copyToMat(e_data, e);
IDictionary C_json = (IDictionary)smodel_json["C"]; IDictionary C_json = (IDictionary)smodel_json["C"];
C = new Mat((int)(long)C_json["rows"], (int)(long)C_json["cols"], CvType.CV_32S); C = new Mat((int)(long)C_json["rows"], (int)(long)C_json["cols"], CvType.CV_32S);
//Debug.Log ("C " + C.ToString ()); //Debug.Log ("C " + C.ToString ());
IList C_data_json = (IList)C_json["data"]; IList C_data_json = (IList)C_json["data"];
int[] C_data = new int[C.rows() * C.cols()]; int[] C_data = new int[C.rows() * C.cols()];
for (int i = 0; i < C_data_json.Count; i++) for (int i = 0; i < C_data_json.Count; i++)
{ {
C_data[i] = (int)(long)C_data_json[i]; C_data[i] = (int)(long)C_data_json[i];
} }
MatUtils.copyToMat(C_data, C); MatUtils.copyToMat(C_data, C);
p = Mat.zeros(e.rows(), 1, CvType.CV_32F); p = Mat.zeros(e.rows(), 1, CvType.CV_32F);
} }
} }
} }

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,88 +1,88 @@
using OpenCVForUnity.CoreModule; using OpenCVForUnity.CoreModule;
using UnityEngine; using UnityEngine;
using UnityEngine.SceneManagement; using UnityEngine.SceneManagement;
using UnityEngine.UI; using UnityEngine.UI;
namespace FaceTrackerExample namespace FaceTrackerExample
{ {
/// <summary> /// <summary>
/// FaceTracker Example /// FaceTracker Example
/// </summary> /// </summary>
public class FaceTrackerExample : MonoBehaviour public class FaceTrackerExample : MonoBehaviour
{ {
public Text exampleTitle; public Text exampleTitle;
public Text versionInfo; public Text versionInfo;
public ScrollRect scrollRect; public ScrollRect scrollRect;
static float verticalNormalizedPosition = 1f; static float verticalNormalizedPosition = 1f;
// Use this for initialization // Use this for initialization
void Start() void Start()
{ {
exampleTitle.text = "FaceTracker Example " + Application.version; exampleTitle.text = "FaceTracker Example " + Application.version;
versionInfo.text = Core.NATIVE_LIBRARY_NAME + " " + OpenCVForUnity.UnityUtils.Utils.getVersion() + " (" + Core.VERSION + ")"; versionInfo.text = Core.NATIVE_LIBRARY_NAME + " " + OpenCVForUnity.UnityUtils.Utils.getVersion() + " (" + Core.VERSION + ")";
versionInfo.text += " / UnityEditor " + Application.unityVersion; versionInfo.text += " / UnityEditor " + Application.unityVersion;
versionInfo.text += " / "; versionInfo.text += " / ";
#if UNITY_EDITOR #if UNITY_EDITOR
versionInfo.text += "Editor"; versionInfo.text += "Editor";
#elif UNITY_STANDALONE_WIN #elif UNITY_STANDALONE_WIN
versionInfo.text += "Windows"; versionInfo.text += "Windows";
#elif UNITY_STANDALONE_OSX #elif UNITY_STANDALONE_OSX
versionInfo.text += "Mac OSX"; versionInfo.text += "Mac OSX";
#elif UNITY_STANDALONE_LINUX #elif UNITY_STANDALONE_LINUX
versionInfo.text += "Linux"; versionInfo.text += "Linux";
#elif UNITY_ANDROID #elif UNITY_ANDROID
versionInfo.text += "Android"; versionInfo.text += "Android";
#elif UNITY_IOS #elif UNITY_IOS
versionInfo.text += "iOS"; versionInfo.text += "iOS";
#elif UNITY_WSA #elif UNITY_WSA
versionInfo.text += "WSA"; versionInfo.text += "WSA";
#elif UNITY_WEBGL #elif UNITY_WEBGL
versionInfo.text += "WebGL"; versionInfo.text += "WebGL";
#endif #endif
versionInfo.text += " "; versionInfo.text += " ";
#if ENABLE_MONO #if ENABLE_MONO
versionInfo.text += "Mono"; versionInfo.text += "Mono";
#elif ENABLE_IL2CPP #elif ENABLE_IL2CPP
versionInfo.text += "IL2CPP"; versionInfo.text += "IL2CPP";
#elif ENABLE_DOTNET #elif ENABLE_DOTNET
versionInfo.text += ".NET"; versionInfo.text += ".NET";
#endif #endif
scrollRect.verticalNormalizedPosition = verticalNormalizedPosition; scrollRect.verticalNormalizedPosition = verticalNormalizedPosition;
} }
// Update is called once per frame // Update is called once per frame
void Update() void Update()
{ {
} }
public void OnScrollRectValueChanged() public void OnScrollRectValueChanged()
{ {
verticalNormalizedPosition = scrollRect.verticalNormalizedPosition; verticalNormalizedPosition = scrollRect.verticalNormalizedPosition;
} }
public void OnShowLicenseButton() public void OnShowLicenseButton()
{ {
SceneManager.LoadScene("ShowLicense"); SceneManager.LoadScene("ShowLicense");
} }
public void OnTexture2DFaceTrackerExample() public void OnTexture2DFaceTrackerExample()
{ {
SceneManager.LoadScene("Texture2DFaceTrackerExample"); SceneManager.LoadScene("Texture2DFaceTrackerExample");
} }
public void OnWebCamTextureFaceTrackerExample() public void OnWebCamTextureFaceTrackerExample()
{ {
SceneManager.LoadScene("WebCamTextureFaceTrackerExample"); SceneManager.LoadScene("WebCamTextureFaceTrackerExample");
} }
public void OnFaceTrackerARExample() public void OnFaceTrackerARExample()
{ {
SceneManager.LoadScene("FaceTrackerARExample"); SceneManager.LoadScene("FaceTrackerARExample");
} }
} }
} }

Просмотреть файл

@ -361,7 +361,7 @@ MonoBehaviour:
m_HandleRect: {fileID: 1039786615} m_HandleRect: {fileID: 1039786615}
m_Direction: 2 m_Direction: 2
m_Value: 0 m_Value: 0
m_Size: 0.99999994 m_Size: 1
m_NumberOfSteps: 0 m_NumberOfSteps: 0
m_OnValueChanged: m_OnValueChanged:
m_PersistentCalls: m_PersistentCalls:
@ -1736,7 +1736,7 @@ RectTransform:
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_AnchorMin: {x: 0, y: 0} m_AnchorMin: {x: 0, y: 0}
m_AnchorMax: {x: 1, y: 1} m_AnchorMax: {x: 1, y: 1}
m_AnchoredPosition: {x: 0, y: 0.000035862842} m_AnchoredPosition: {x: 0, y: 0.000030518197}
m_SizeDelta: {x: 0, y: 0} m_SizeDelta: {x: 0, y: 0}
m_Pivot: {x: 0.5, y: 1} m_Pivot: {x: 0.5, y: 1}
--- !u!114 &1339781663 --- !u!114 &1339781663

Просмотреть файл

@ -1,29 +1,29 @@
using UnityEngine; using UnityEngine;
using UnityEngine.SceneManagement; using UnityEngine.SceneManagement;
namespace FaceTrackerExample namespace FaceTrackerExample
{ {
/// <summary> /// <summary>
/// Show License /// Show License
/// </summary> /// </summary>
public class ShowLicense : MonoBehaviour public class ShowLicense : MonoBehaviour
{ {
// Use this for initialization // Use this for initialization
void Start() void Start()
{ {
} }
// Update is called once per frame // Update is called once per frame
void Update() void Update()
{ {
} }
public void OnBackButton() public void OnBackButton()
{ {
SceneManager.LoadScene("FaceTrackerExample"); SceneManager.LoadScene("FaceTrackerExample");
} }
} }
} }

Просмотреть файл

@ -1,174 +1,174 @@
using OpenCVFaceTracker; using OpenCVFaceTracker;
using OpenCVForUnity.CoreModule; using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule; using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.ObjdetectModule; using OpenCVForUnity.ObjdetectModule;
using OpenCVForUnity.UnityUtils; using OpenCVForUnity.UnityUtils;
using System; using System;
using System.Collections; using System.Collections;
using UnityEngine; using UnityEngine;
using UnityEngine.SceneManagement; using UnityEngine.SceneManagement;
namespace FaceTrackerExample namespace FaceTrackerExample
{ {
/// <summary> /// <summary>
/// Texture2D Face Tracker Example /// Texture2D Face Tracker Example
/// </summary> /// </summary>
public class Texture2DFaceTrackerExample : MonoBehaviour public class Texture2DFaceTrackerExample : MonoBehaviour
{ {
/// <summary> /// <summary>
/// The image texture. /// The image texture.
/// </summary> /// </summary>
public Texture2D imgTexture; public Texture2D imgTexture;
/// <summary> /// <summary>
/// The tracker_model_json_filepath. /// The tracker_model_json_filepath.
/// </summary> /// </summary>
private string tracker_model_json_filepath; private string tracker_model_json_filepath;
/// <summary> /// <summary>
/// The haarcascade_frontalface_alt_xml_filepath. /// The haarcascade_frontalface_alt_xml_filepath.
/// </summary> /// </summary>
private string haarcascade_frontalface_alt_xml_filepath; private string haarcascade_frontalface_alt_xml_filepath;
#if UNITY_WEBGL #if UNITY_WEBGL
IEnumerator getFilePath_Coroutine; IEnumerator getFilePath_Coroutine;
#endif #endif
// Use this for initialization // Use this for initialization
void Start() void Start()
{ {
#if UNITY_WEBGL #if UNITY_WEBGL
getFilePath_Coroutine = GetFilePath(); getFilePath_Coroutine = GetFilePath();
StartCoroutine(getFilePath_Coroutine); StartCoroutine(getFilePath_Coroutine);
#else #else
tracker_model_json_filepath = Utils.getFilePath("tracker_model.json"); tracker_model_json_filepath = Utils.getFilePath("tracker_model.json");
haarcascade_frontalface_alt_xml_filepath = Utils.getFilePath("haarcascade_frontalface_alt.xml"); haarcascade_frontalface_alt_xml_filepath = Utils.getFilePath("haarcascade_frontalface_alt.xml");
Run(); Run();
#endif #endif
} }
#if UNITY_WEBGL #if UNITY_WEBGL
private IEnumerator GetFilePath() private IEnumerator GetFilePath()
{ {
var getFilePathAsync_0_Coroutine = Utils.getFilePathAsync("tracker_model.json", (result) => var getFilePathAsync_0_Coroutine = Utils.getFilePathAsync("tracker_model.json", (result) =>
{ {
tracker_model_json_filepath = result; tracker_model_json_filepath = result;
}); });
yield return getFilePathAsync_0_Coroutine; yield return getFilePathAsync_0_Coroutine;
var getFilePathAsync_1_Coroutine = Utils.getFilePathAsync("haarcascade_frontalface_alt.xml", (result) => var getFilePathAsync_1_Coroutine = Utils.getFilePathAsync("haarcascade_frontalface_alt.xml", (result) =>
{ {
haarcascade_frontalface_alt_xml_filepath = result; haarcascade_frontalface_alt_xml_filepath = result;
}); });
yield return getFilePathAsync_1_Coroutine; yield return getFilePathAsync_1_Coroutine;
getFilePath_Coroutine = null; getFilePath_Coroutine = null;
Run(); Run();
} }
#endif #endif
private void Run() private void Run()
{ {
gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1); gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = 0; float width = 0;
float height = 0; float height = 0;
width = gameObject.transform.localScale.x; width = gameObject.transform.localScale.x;
height = gameObject.transform.localScale.y; height = gameObject.transform.localScale.y;
float widthScale = (float)Screen.width / width; float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height; float heightScale = (float)Screen.height / height;
if (widthScale < heightScale) if (widthScale < heightScale)
{ {
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
} }
else else
{ {
Camera.main.orthographicSize = height / 2; Camera.main.orthographicSize = height / 2;
} }
//initialize FaceTracker //initialize FaceTracker
FaceTracker faceTracker = new FaceTracker(tracker_model_json_filepath); FaceTracker faceTracker = new FaceTracker(tracker_model_json_filepath);
//initialize FaceTrackerParams //initialize FaceTrackerParams
FaceTrackerParams faceTrackerParams = new FaceTrackerParams(); FaceTrackerParams faceTrackerParams = new FaceTrackerParams();
Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4); Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4);
Utils.texture2DToMat(imgTexture, imgMat); Utils.texture2DToMat(imgTexture, imgMat);
Debug.Log("imgMat dst ToString " + imgMat.ToString()); Debug.Log("imgMat dst ToString " + imgMat.ToString());
CascadeClassifier cascade = new CascadeClassifier(); CascadeClassifier cascade = new CascadeClassifier();
cascade.load(haarcascade_frontalface_alt_xml_filepath); cascade.load(haarcascade_frontalface_alt_xml_filepath);
//if (cascade.empty()) //if (cascade.empty())
//{ //{
// Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); // Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
//} //}
//convert image to greyscale //convert image to greyscale
Mat gray = new Mat(); Mat gray = new Mat();
Imgproc.cvtColor(imgMat, gray, Imgproc.COLOR_RGBA2GRAY); Imgproc.cvtColor(imgMat, gray, Imgproc.COLOR_RGBA2GRAY);
MatOfRect faces = new MatOfRect(); MatOfRect faces = new MatOfRect();
Imgproc.equalizeHist(gray, gray); Imgproc.equalizeHist(gray, gray);
cascade.detectMultiScale(gray, faces, 1.1f, 2, 0 cascade.detectMultiScale(gray, faces, 1.1f, 2, 0
// | Objdetect.CASCADE_FIND_BIGGEST_OBJECT // | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
| Objdetect.CASCADE_SCALE_IMAGE, new Size(gray.cols() * 0.05, gray.cols() * 0.05), new Size()); | Objdetect.CASCADE_SCALE_IMAGE, new Size(gray.cols() * 0.05, gray.cols() * 0.05), new Size());
Debug.Log("faces " + faces.dump()); Debug.Log("faces " + faces.dump());
if (faces.rows() > 0) if (faces.rows() > 0)
{ {
//add initial face points from MatOfRect //add initial face points from MatOfRect
faceTracker.addPoints(faces); faceTracker.addPoints(faces);
} }
//track face points.if face points <= 0, always return false. //track face points.if face points <= 0, always return false.
if (faceTracker.track(imgMat, faceTrackerParams)) faceTracker.draw(imgMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); if (faceTracker.track(imgMat, faceTrackerParams)) faceTracker.draw(imgMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false); Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false);
Utils.matToTexture2D(imgMat, texture); Utils.matToTexture2D(imgMat, texture);
gameObject.GetComponent<Renderer>().material.mainTexture = texture; gameObject.GetComponent<Renderer>().material.mainTexture = texture;
cascade.Dispose(); cascade.Dispose();
} }
// Update is called once per frame // Update is called once per frame
void Update() void Update()
{ {
} }
/// <summary> /// <summary>
/// Raises the destroy event. /// Raises the destroy event.
/// </summary> /// </summary>
void OnDestroy() void OnDestroy()
{ {
#if UNITY_WEBGL #if UNITY_WEBGL
if (getFilePath_Coroutine != null) if (getFilePath_Coroutine != null)
{ {
StopCoroutine(getFilePath_Coroutine); StopCoroutine(getFilePath_Coroutine);
((IDisposable)getFilePath_Coroutine).Dispose(); ((IDisposable)getFilePath_Coroutine).Dispose();
} }
#endif #endif
} }
public void OnBackButton() public void OnBackButton()
{ {
SceneManager.LoadScene("FaceTrackerExample"); SceneManager.LoadScene("FaceTrackerExample");
} }
} }
} }

Просмотреть файл

@ -1,352 +1,352 @@
using OpenCVFaceTracker; using OpenCVFaceTracker;
using OpenCVForUnity.CoreModule; using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule; using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.ObjdetectModule; using OpenCVForUnity.ObjdetectModule;
using OpenCVForUnity.UnityUtils; using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UnityUtils.Helper; using OpenCVForUnity.UnityUtils.Helper;
using System; using System;
using System.Collections; using System.Collections;
using System.Collections.Generic; using System.Collections.Generic;
using UnityEngine; using UnityEngine;
using UnityEngine.SceneManagement; using UnityEngine.SceneManagement;
using UnityEngine.UI; using UnityEngine.UI;
namespace FaceTrackerExample namespace FaceTrackerExample
{ {
/// <summary> /// <summary>
/// WebCamTexture Face Tracker Example /// WebCamTexture Face Tracker Example
/// </summary> /// </summary>
[RequireComponent(typeof(WebCamTextureToMatHelper))] [RequireComponent(typeof(WebCamTextureToMatHelper))]
public class WebCamTextureFaceTrackerExample : MonoBehaviour public class WebCamTextureFaceTrackerExample : MonoBehaviour
{ {
/// <summary> /// <summary>
/// The auto reset mode. if ture, Only if face is detected in each frame, face is tracked. /// The auto reset mode. if ture, Only if face is detected in each frame, face is tracked.
/// </summary> /// </summary>
public bool isAutoResetMode; public bool isAutoResetMode;
/// <summary> /// <summary>
/// The auto reset mode toggle. /// The auto reset mode toggle.
/// </summary> /// </summary>
public Toggle isAutoResetModeToggle; public Toggle isAutoResetModeToggle;
/// <summary> /// <summary>
/// The gray mat. /// The gray mat.
/// </summary> /// </summary>
Mat grayMat; Mat grayMat;
/// <summary> /// <summary>
/// The texture. /// The texture.
/// </summary> /// </summary>
Texture2D texture; Texture2D texture;
/// <summary> /// <summary>
/// The cascade. /// The cascade.
/// </summary> /// </summary>
CascadeClassifier cascade; CascadeClassifier cascade;
/// <summary> /// <summary>
/// The face tracker. /// The face tracker.
/// </summary> /// </summary>
FaceTracker faceTracker; FaceTracker faceTracker;
/// <summary> /// <summary>
/// The face tracker parameters. /// The face tracker parameters.
/// </summary> /// </summary>
FaceTrackerParams faceTrackerParams; FaceTrackerParams faceTrackerParams;
/// <summary> /// <summary>
/// The web cam texture to mat helper. /// The web cam texture to mat helper.
/// </summary> /// </summary>
WebCamTextureToMatHelper webCamTextureToMatHelper; WebCamTextureToMatHelper webCamTextureToMatHelper;
/// <summary> /// <summary>
/// The tracker_model_json_filepath. /// The tracker_model_json_filepath.
/// </summary> /// </summary>
private string tracker_model_json_filepath; private string tracker_model_json_filepath;
/// <summary> /// <summary>
/// The haarcascade_frontalface_alt_xml_filepath. /// The haarcascade_frontalface_alt_xml_filepath.
/// </summary> /// </summary>
private string haarcascade_frontalface_alt_xml_filepath; private string haarcascade_frontalface_alt_xml_filepath;
#if UNITY_WEBGL #if UNITY_WEBGL
IEnumerator getFilePath_Coroutine; IEnumerator getFilePath_Coroutine;
#endif #endif
// Use this for initialization // Use this for initialization
void Start() void Start()
{ {
webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper>(); webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper>();
isAutoResetModeToggle.isOn = isAutoResetMode; isAutoResetModeToggle.isOn = isAutoResetMode;
#if UNITY_WEBGL #if UNITY_WEBGL
getFilePath_Coroutine = GetFilePath(); getFilePath_Coroutine = GetFilePath();
StartCoroutine(getFilePath_Coroutine); StartCoroutine(getFilePath_Coroutine);
#else #else
tracker_model_json_filepath = Utils.getFilePath("tracker_model.json"); tracker_model_json_filepath = Utils.getFilePath("tracker_model.json");
haarcascade_frontalface_alt_xml_filepath = Utils.getFilePath("haarcascade_frontalface_alt.xml"); haarcascade_frontalface_alt_xml_filepath = Utils.getFilePath("haarcascade_frontalface_alt.xml");
Run(); Run();
#endif #endif
} }
#if UNITY_WEBGL #if UNITY_WEBGL
private IEnumerator GetFilePath() private IEnumerator GetFilePath()
{ {
var getFilePathAsync_0_Coroutine = Utils.getFilePathAsync("tracker_model.json", (result) => var getFilePathAsync_0_Coroutine = Utils.getFilePathAsync("tracker_model.json", (result) =>
{ {
tracker_model_json_filepath = result; tracker_model_json_filepath = result;
}); });
yield return getFilePathAsync_0_Coroutine; yield return getFilePathAsync_0_Coroutine;
var getFilePathAsync_1_Coroutine = Utils.getFilePathAsync("haarcascade_frontalface_alt.xml", (result) => var getFilePathAsync_1_Coroutine = Utils.getFilePathAsync("haarcascade_frontalface_alt.xml", (result) =>
{ {
haarcascade_frontalface_alt_xml_filepath = result; haarcascade_frontalface_alt_xml_filepath = result;
}); });
yield return getFilePathAsync_1_Coroutine; yield return getFilePathAsync_1_Coroutine;
getFilePath_Coroutine = null; getFilePath_Coroutine = null;
Run(); Run();
} }
#endif #endif
private void Run() private void Run()
{ {
//initialize FaceTracker //initialize FaceTracker
faceTracker = new FaceTracker(tracker_model_json_filepath); faceTracker = new FaceTracker(tracker_model_json_filepath);
//initialize FaceTrackerParams //initialize FaceTrackerParams
faceTrackerParams = new FaceTrackerParams(); faceTrackerParams = new FaceTrackerParams();
cascade = new CascadeClassifier(); cascade = new CascadeClassifier();
cascade.load(haarcascade_frontalface_alt_xml_filepath); cascade.load(haarcascade_frontalface_alt_xml_filepath);
//if (cascade.empty()) //if (cascade.empty())
//{ //{
// Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. "); // Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
//} //}
webCamTextureToMatHelper.Initialize(); webCamTextureToMatHelper.Initialize();
} }
/// <summary> /// <summary>
/// Raises the webcam texture to mat helper initialized event. /// Raises the webcam texture to mat helper initialized event.
/// </summary> /// </summary>
public void OnWebCamTextureToMatHelperInitialized() public void OnWebCamTextureToMatHelperInitialized()
{ {
Debug.Log("OnWebCamTextureToMatHelperInitialized"); Debug.Log("OnWebCamTextureToMatHelperInitialized");
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat(); Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();
texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false); texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1); gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation); Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = 0; float width = 0;
float height = 0; float height = 0;
width = gameObject.transform.localScale.x; width = gameObject.transform.localScale.x;
height = gameObject.transform.localScale.y; height = gameObject.transform.localScale.y;
float widthScale = (float)Screen.width / width; float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height; float heightScale = (float)Screen.height / height;
if (widthScale < heightScale) if (widthScale < heightScale)
{ {
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2; Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
} }
else else
{ {
Camera.main.orthographicSize = height / 2; Camera.main.orthographicSize = height / 2;
} }
gameObject.GetComponent<Renderer>().material.mainTexture = texture; gameObject.GetComponent<Renderer>().material.mainTexture = texture;
grayMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC1); grayMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC1);
} }
/// <summary> /// <summary>
/// Raises the webcam texture to mat helper disposed event. /// Raises the webcam texture to mat helper disposed event.
/// </summary> /// </summary>
public void OnWebCamTextureToMatHelperDisposed() public void OnWebCamTextureToMatHelperDisposed()
{ {
Debug.Log("OnWebCamTextureToMatHelperDisposed"); Debug.Log("OnWebCamTextureToMatHelperDisposed");
faceTracker.reset(); faceTracker.reset();
grayMat.Dispose(); grayMat.Dispose();
} }
/// <summary> /// <summary>
/// Raises the webcam texture to mat helper error occurred event. /// Raises the webcam texture to mat helper error occurred event.
/// </summary> /// </summary>
/// <param name="errorCode">Error code.</param> /// <param name="errorCode">Error code.</param>
public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode) public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
{ {
Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode); Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
} }
// Update is called once per frame // Update is called once per frame
void Update() void Update()
{ {
if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
{ {
Mat rgbaMat = webCamTextureToMatHelper.GetMat(); Mat rgbaMat = webCamTextureToMatHelper.GetMat();
//convert image to greyscale //convert image to greyscale
Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY); Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
if (isAutoResetMode || faceTracker.getPoints().Count <= 0) if (isAutoResetMode || faceTracker.getPoints().Count <= 0)
{ {
//Debug.Log ("detectFace"); //Debug.Log ("detectFace");
//convert image to greyscale //convert image to greyscale
using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect()) using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect())
{ {
Imgproc.equalizeHist(grayMat, equalizeHistMat); Imgproc.equalizeHist(grayMat, equalizeHistMat);
cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0 cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0
// | Objdetect.CASCADE_FIND_BIGGEST_OBJECT // | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
| Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size()); | Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());
if (faces.rows() > 0) if (faces.rows() > 0)
{ {
//Debug.Log ("faces " + faces.dump ()); //Debug.Log ("faces " + faces.dump ());
List<OpenCVForUnity.CoreModule.Rect> rectsList = faces.toList(); List<OpenCVForUnity.CoreModule.Rect> rectsList = faces.toList();
List<Point[]> pointsList = faceTracker.getPoints(); List<Point[]> pointsList = faceTracker.getPoints();
if (isAutoResetMode) if (isAutoResetMode)
{ {
//add initial face points from MatOfRect //add initial face points from MatOfRect
if (pointsList.Count <= 0) if (pointsList.Count <= 0)
{ {
faceTracker.addPoints(faces); faceTracker.addPoints(faces);
//Debug.Log ("reset faces "); //Debug.Log ("reset faces ");
} }
else else
{ {
for (int i = 0; i < rectsList.Count; i++) for (int i = 0; i < rectsList.Count; i++)
{ {
OpenCVForUnity.CoreModule.Rect trackRect = new OpenCVForUnity.CoreModule.Rect(rectsList[i].x + rectsList[i].width / 3, rectsList[i].y + rectsList[i].height / 2, rectsList[i].width / 3, rectsList[i].height / 3); OpenCVForUnity.CoreModule.Rect trackRect = new OpenCVForUnity.CoreModule.Rect(rectsList[i].x + rectsList[i].width / 3, rectsList[i].y + rectsList[i].height / 2, rectsList[i].width / 3, rectsList[i].height / 3);
//It determines whether nose point has been included in trackRect. //It determines whether nose point has been included in trackRect.
if (i < pointsList.Count && !trackRect.contains(pointsList[i][67])) if (i < pointsList.Count && !trackRect.contains(pointsList[i][67]))
{ {
rectsList.RemoveAt(i); rectsList.RemoveAt(i);
pointsList.RemoveAt(i); pointsList.RemoveAt(i);
//Debug.Log ("remove " + i); //Debug.Log ("remove " + i);
} }
Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2); Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2);
} }
} }
} }
else else
{ {
faceTracker.addPoints(faces); faceTracker.addPoints(faces);
} }
//draw face rect //draw face rect
for (int i = 0; i < rectsList.Count; i++) for (int i = 0; i < rectsList.Count; i++)
{ {
Imgproc.rectangle(rgbaMat, new Point(rectsList[i].x, rectsList[i].y), new Point(rectsList[i].x + rectsList[i].width, rectsList[i].y + rectsList[i].height), new Scalar(255, 0, 0, 255), 2); Imgproc.rectangle(rgbaMat, new Point(rectsList[i].x, rectsList[i].y), new Point(rectsList[i].x + rectsList[i].width, rectsList[i].y + rectsList[i].height), new Scalar(255, 0, 0, 255), 2);
} }
} }
else else
{ {
if (isAutoResetMode) if (isAutoResetMode)
{ {
faceTracker.reset(); faceTracker.reset();
} }
} }
} }
} }
//track face points.if face points <= 0, always return false. //track face points.if face points <= 0, always return false.
if (faceTracker.track(grayMat, faceTrackerParams)) faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255)); if (faceTracker.track(grayMat, faceTrackerParams)) faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false); Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
//Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false); //Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
Utils.fastMatToTexture2D(rgbaMat, texture); Utils.fastMatToTexture2D(rgbaMat, texture);
} }
if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0) if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0)
{ {
faceTracker.reset(); faceTracker.reset();
} }
} }
/// <summary> /// <summary>
/// Raises the disable event. /// Raises the disable event.
/// </summary> /// </summary>
void OnDisable() void OnDisable()
{ {
webCamTextureToMatHelper.Dispose(); webCamTextureToMatHelper.Dispose();
if (cascade != null) cascade.Dispose(); if (cascade != null) cascade.Dispose();
#if UNITY_WEBGL #if UNITY_WEBGL
if (getFilePath_Coroutine != null) if (getFilePath_Coroutine != null)
{ {
StopCoroutine(getFilePath_Coroutine); StopCoroutine(getFilePath_Coroutine);
((IDisposable)getFilePath_Coroutine).Dispose(); ((IDisposable)getFilePath_Coroutine).Dispose();
} }
#endif #endif
} }
/// <summary> /// <summary>
/// Raises the back button event. /// Raises the back button event.
/// </summary> /// </summary>
public void OnBackButton() public void OnBackButton()
{ {
SceneManager.LoadScene("FaceTrackerExample"); SceneManager.LoadScene("FaceTrackerExample");
} }
/// <summary> /// <summary>
/// Raises the play button event. /// Raises the play button event.
/// </summary> /// </summary>
public void OnPlayButton() public void OnPlayButton()
{ {
webCamTextureToMatHelper.Play(); webCamTextureToMatHelper.Play();
} }
/// <summary> /// <summary>
/// Raises the pause button event. /// Raises the pause button event.
/// </summary> /// </summary>
public void OnPauseButton() public void OnPauseButton()
{ {
webCamTextureToMatHelper.Pause(); webCamTextureToMatHelper.Pause();
} }
/// <summary> /// <summary>
/// Raises the stop button event. /// Raises the stop button event.
/// </summary> /// </summary>
public void OnStopButton() public void OnStopButton()
{ {
webCamTextureToMatHelper.Stop(); webCamTextureToMatHelper.Stop();
} }
/// <summary> /// <summary>
/// Raises the change camera button event. /// Raises the change camera button event.
/// </summary> /// </summary>
public void OnChangeCameraButton() public void OnChangeCameraButton()
{ {
webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.IsFrontFacing(); webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.IsFrontFacing();
} }
/// <summary> /// <summary>
/// Raises the change auto reset mode toggle event. /// Raises the change auto reset mode toggle event.
/// </summary> /// </summary>
public void OnIsAutoResetModeToggle() public void OnIsAutoResetModeToggle()
{ {
if (isAutoResetModeToggle.isOn) if (isAutoResetModeToggle.isOn)
{ {
isAutoResetMode = true; isAutoResetMode = true;
} }
else else
{ {
isAutoResetMode = false; isAutoResetMode = false;
} }
} }
} }
} }