Change the line feed code [utf-8/lf]

This commit is contained in:
EnoxSoftware 2022-06-28 23:33:53 +09:00
Родитель ba7af9a594
Коммит 3907407b7a
13 изменённых файлов: 2729 добавлений и 2644 удалений

Просмотреть файл

@ -1,116 +1,116 @@
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.ObjdetectModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UtilsModule;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
namespace OpenCVFaceTracker
{
/// <summary>
/// Face detector.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary>
public class FaceDetector
{
string detector_fname;
//file containing cascade classifier
Vector3 detector_offset;
//offset from center of detection
Mat reference;
//reference shape
CascadeClassifier detector;
public FaceDetector()
{
}
public List<Point[]> detect(Mat im, float scaleFactor, int minNeighbours, Size minSize)
{
//convert image to greyscale
Mat gray = null;
if (im.channels() == 1)
{
gray = im;
}
else
{
gray = new Mat();
Imgproc.cvtColor(im, gray, Imgproc.COLOR_RGBA2GRAY);
}
using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect())
{
Imgproc.equalizeHist(gray, equalizeHistMat);
detector.detectMultiScale(equalizeHistMat, faces, scaleFactor, minNeighbours, 0
| Objdetect.CASCADE_FIND_BIGGEST_OBJECT
| Objdetect.CASCADE_SCALE_IMAGE, minSize, new Size());
if (faces.rows() < 1)
{
return new List<Point[]>();
}
return convertMatOfRectToPoints(faces);
}
}
public List<Point[]> convertMatOfRectToPoints(MatOfRect rects)
{
List<OpenCVForUnity.CoreModule.Rect> R = rects.toList();
List<Point[]> points = new List<Point[]>(R.Count);
int n = reference.rows() / 2;
float[] reference_float = new float[reference.total()];
MatUtils.copyFromMat<float>(reference, reference_float);
foreach (var r in R)
{
Vector3 scale = detector_offset * r.width;
Point[] p = new Point[n];
for (int i = 0; i < n; i++)
{
p[i] = new Point();
p[i].x = scale.z * reference_float[2 * i] + r.x + 0.5 * r.width + scale.x;
p[i].y = scale.z * reference_float[(2 * i) + 1] + r.y + 0.5 * r.height + scale.y;
}
points.Add(p);
}
return points;
}
public void read(object root_json)
{
IDictionary detector_json = (IDictionary)root_json;
detector_fname = (string)detector_json["fname"];
detector_offset = new Vector3((float)(double)detector_json["x offset"], (float)(double)detector_json["y offset"], (float)(double)detector_json["z offset"]);
IDictionary reference_json = (IDictionary)detector_json["reference"];
reference = new Mat((int)(long)reference_json["rows"], (int)(long)reference_json["cols"], CvType.CV_32F);
IList data_json = (IList)reference_json["data"];
float[] data = new float[reference.rows() * reference.cols()];
for (int i = 0; i < data_json.Count; i++)
{
data[i] = (float)(double)data_json[i];
}
MatUtils.copyToMat(data, reference);
detector = new CascadeClassifier(Utils.getFilePath(detector_fname));
}
}
}
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.ObjdetectModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UtilsModule;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
namespace OpenCVFaceTracker
{
/// <summary>
/// Face detector.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary>
public class FaceDetector
{
string detector_fname;
//file containing cascade classifier
Vector3 detector_offset;
//offset from center of detection
Mat reference;
//reference shape
CascadeClassifier detector;
public FaceDetector()
{
}
public List<Point[]> detect(Mat im, float scaleFactor, int minNeighbours, Size minSize)
{
//convert image to greyscale
Mat gray = null;
if (im.channels() == 1)
{
gray = im;
}
else
{
gray = new Mat();
Imgproc.cvtColor(im, gray, Imgproc.COLOR_RGBA2GRAY);
}
using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect())
{
Imgproc.equalizeHist(gray, equalizeHistMat);
detector.detectMultiScale(equalizeHistMat, faces, scaleFactor, minNeighbours, 0
| Objdetect.CASCADE_FIND_BIGGEST_OBJECT
| Objdetect.CASCADE_SCALE_IMAGE, minSize, new Size());
if (faces.rows() < 1)
{
return new List<Point[]>();
}
return convertMatOfRectToPoints(faces);
}
}
public List<Point[]> convertMatOfRectToPoints(MatOfRect rects)
{
List<OpenCVForUnity.CoreModule.Rect> R = rects.toList();
List<Point[]> points = new List<Point[]>(R.Count);
int n = reference.rows() / 2;
float[] reference_float = new float[reference.total()];
MatUtils.copyFromMat<float>(reference, reference_float);
foreach (var r in R)
{
Vector3 scale = detector_offset * r.width;
Point[] p = new Point[n];
for (int i = 0; i < n; i++)
{
p[i] = new Point();
p[i].x = scale.z * reference_float[2 * i] + r.x + 0.5 * r.width + scale.x;
p[i].y = scale.z * reference_float[(2 * i) + 1] + r.y + 0.5 * r.height + scale.y;
}
points.Add(p);
}
return points;
}
public void read(object root_json)
{
IDictionary detector_json = (IDictionary)root_json;
detector_fname = (string)detector_json["fname"];
detector_offset = new Vector3((float)(double)detector_json["x offset"], (float)(double)detector_json["y offset"], (float)(double)detector_json["z offset"]);
IDictionary reference_json = (IDictionary)detector_json["reference"];
reference = new Mat((int)(long)reference_json["rows"], (int)(long)reference_json["cols"], CvType.CV_32F);
IList data_json = (IList)reference_json["data"];
float[] data = new float[reference.rows() * reference.cols()];
for (int i = 0; i < data_json.Count; i++)
{
data[i] = (float)(double)data_json[i];
}
MatUtils.copyToMat(data, reference);
detector = new CascadeClassifier(Utils.getFilePath(detector_fname));
}
}
}

Просмотреть файл

@ -1,265 +1,265 @@
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UtilsModule;
using System.Collections;
using System.Collections.Generic;
using MiniJSON;
using UnityEngine;
#if UNITY_WSA
using UnityEngine.Windows;
using System.Text;
#else
using System.IO;
#endif
namespace OpenCVFaceTracker
{
/// <summary>
/// Face tracker.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary>
public class FaceTracker
{
List<Point[]> points;
//current tracked points
FaceDetector detector;
//detector for initialisation
ShapeModel smodel;
//shape model
PatchModels pmodel;
//feature detectors
public FaceTracker(string filepath)
{
if (filepath == null)
{
Debug.LogError("tracker_model file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
}
points = new List<Point[]>();
string jsonText = null;
#if UNITY_WSA
var data = File.ReadAllBytes(filepath);
jsonText = Encoding.UTF8.GetString(data, 0, data.Length);
#else
jsonText = File.ReadAllText(filepath);
#endif
//TextAsset textAsset = Resources.Load (filename) as TextAsset;
//string jsonText = textAsset.text;
IDictionary json = (IDictionary)Json.Deserialize(jsonText);
IDictionary ft = (IDictionary)json["ft object"];
detector = new FaceDetector();
detector.read(ft["detector"]);
smodel = new ShapeModel();
smodel.read(ft["smodel"]);
pmodel = new PatchModels();
pmodel.read(ft["pmodel"]);
}
public List<Point[]> getPoints()
{
return points;
}
public void addPoints(List<Point[]> points)
{
points.AddRange(points);
}
public void addPoints(MatOfRect rects)
{
points.AddRange(detector.convertMatOfRectToPoints(rects));
}
public Point[] getConnections()
{
Point[] c = new Point[smodel.C.rows()];
int[] data = new int[c.Length * 2];
MatUtils.copyFromMat<int>(smodel.C, data);
int len = c.Length;
for (int i = 0; i < len; i++)
{
c[i] = new Point(data[i * 2], data[(i * 2) + 1]);
}
return c;
}
public void reset()
{
//reset tracker
points.Clear();
}
public bool track(Mat im, FaceTrackerParams p)
{
if (points.Count <= 0) return false;
//convert image to greyscale
Mat gray = null;
if (im.channels() == 1)
{
gray = im;
}
else
{
gray = new Mat();
Imgproc.cvtColor(im, gray, Imgproc.COLOR_RGBA2GRAY);
}
//initialise
//if (!tracking)
//points = detector.detect (gray, p.scaleFactor, p.minNeighbours, p.minSize);
int count = points.Count;
for (int i = 0; i < count; i++)
{
if (points[i].Length != smodel.npts()) return false;
//fit
int size_count = p.ssize.Count;
for (int level = 0; level < size_count; level++)
{
points[i] = fit(gray, points[i], p.ssize[level], p.robust, p.itol, p.ftol);
}
}
return true;
}
public void draw(Mat im, Scalar pts_color, Scalar con_color)
{
int[] smodel_C_int = new int[smodel.C.total()];
MatUtils.copyFromMat<int>(smodel.C, smodel_C_int);
foreach (var point in points)
{
int n = point.Length;
if (n == 0) return;
int rows = smodel.C.rows();
int cols = smodel.C.cols();
for (int i = 0; i < rows; i++)
{
int j = smodel_C_int[i * cols], k = smodel_C_int[(i * cols) + 1];
#if OPENCV_2
Core.line(im, point[j], point[k], con_color, 1);
#else
Imgproc.line(im, point[j], point[k], con_color, 1);
#endif
}
for (int i = 0; i < n; i++)
{
#if OPENCV_2
Core.circle (im, point [i], 1, pts_color, 2, Core.LINE_AA, 0);
#else
Imgproc.circle(im, point[i], 1, pts_color, 2, Imgproc.LINE_AA, 0);
#endif
}
}
}
private Point[] fit(Mat image, Point[] init, Size ssize, bool robust, int itol, double ftol)
{
int n = smodel.npts();
//assert((int(init.size())==n) && (pmodel.n_patches()==n));
//Debug.Log ("init.size())==n " + init.Length + " " + n);
//Debug.Log ("pmodel.n_patches()==n " + pmodel.n_patches () + " " + n);
smodel.calc_params(init, new Mat(), 3.0f);
Point[] pts = smodel.calc_shape();
//find facial features in image around current estimates
Point[] peaks = pmodel.calc_peaks(image, pts, ssize);
//optimise
if (!robust)
{
smodel.calc_params(peaks, new Mat(), 3.0f); //compute shape model parameters
pts = smodel.calc_shape(); //update shape
}
else
{
using (Mat weight = new Mat(n, 1, CvType.CV_32F)) using (Mat weight_sort = new Mat(n, 1, CvType.CV_32F))
{
float[] weight_float = new float[weight.total()];
MatUtils.copyFromMat<float>(weight, weight_float);
float[] weight_sort_float = new float[weight_sort.total()];
Point[] pts_old = pts;
for (int iter = 0; iter < itol; iter++)
{
//compute robust weight
for (int i = 0; i < n; i++)
{
using (MatOfPoint tmpMat = new MatOfPoint(new Point(pts[i].x - peaks[i].x, pts[i].y - peaks[i].y)))
{
weight_float[i] = (float)Core.norm(tmpMat);
}
}
MatUtils.copyToMat(weight_float, weight);
Core.sort(weight, weight_sort, Core.SORT_EVERY_COLUMN | Core.SORT_ASCENDING);
MatUtils.copyFromMat<float>(weight_sort, weight_sort_float);
double var = 1.4826 * weight_sort_float[n / 2];
if (var < 0.1) var = 0.1;
Core.pow(weight, 2, weight);
Core.multiply(weight, new Scalar(-0.5 / (var * var)), weight);
Core.exp(weight, weight);
//compute shape model parameters
smodel.calc_params(peaks, weight, 3.0f);
//update shape
pts = smodel.calc_shape();
//check for convergence
float v = 0;
for (int i = 0; i < n; i++)
{
using (MatOfPoint tmpMat = new MatOfPoint(new Point(pts[i].x - pts_old[i].x, pts[i].y - pts_old[i].y)))
{
v += (float)Core.norm(tmpMat);
}
}
if (v < ftol)
{
break;
}
else
{
pts_old = pts;
}
}
}
}
return pts;
}
}
}
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UtilsModule;
using System.Collections;
using System.Collections.Generic;
using MiniJSON;
using UnityEngine;
#if UNITY_WSA
using UnityEngine.Windows;
using System.Text;
#else
using System.IO;
#endif
namespace OpenCVFaceTracker
{
/// <summary>
/// Face tracker.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary>
public class FaceTracker
{
List<Point[]> points;
//current tracked points
FaceDetector detector;
//detector for initialisation
ShapeModel smodel;
//shape model
PatchModels pmodel;
//feature detectors
public FaceTracker(string filepath)
{
if (filepath == null)
{
Debug.LogError("tracker_model file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
}
points = new List<Point[]>();
string jsonText = null;
#if UNITY_WSA
var data = File.ReadAllBytes(filepath);
jsonText = Encoding.UTF8.GetString(data, 0, data.Length);
#else
jsonText = File.ReadAllText(filepath);
#endif
//TextAsset textAsset = Resources.Load (filename) as TextAsset;
//string jsonText = textAsset.text;
IDictionary json = (IDictionary)Json.Deserialize(jsonText);
IDictionary ft = (IDictionary)json["ft object"];
detector = new FaceDetector();
detector.read(ft["detector"]);
smodel = new ShapeModel();
smodel.read(ft["smodel"]);
pmodel = new PatchModels();
pmodel.read(ft["pmodel"]);
}
public List<Point[]> getPoints()
{
return points;
}
public void addPoints(List<Point[]> points)
{
points.AddRange(points);
}
public void addPoints(MatOfRect rects)
{
points.AddRange(detector.convertMatOfRectToPoints(rects));
}
public Point[] getConnections()
{
Point[] c = new Point[smodel.C.rows()];
int[] data = new int[c.Length * 2];
MatUtils.copyFromMat<int>(smodel.C, data);
int len = c.Length;
for (int i = 0; i < len; i++)
{
c[i] = new Point(data[i * 2], data[(i * 2) + 1]);
}
return c;
}
public void reset()
{
//reset tracker
points.Clear();
}
public bool track(Mat im, FaceTrackerParams p)
{
if (points.Count <= 0) return false;
//convert image to greyscale
Mat gray = null;
if (im.channels() == 1)
{
gray = im;
}
else
{
gray = new Mat();
Imgproc.cvtColor(im, gray, Imgproc.COLOR_RGBA2GRAY);
}
//initialise
//if (!tracking)
//points = detector.detect (gray, p.scaleFactor, p.minNeighbours, p.minSize);
int count = points.Count;
for (int i = 0; i < count; i++)
{
if (points[i].Length != smodel.npts()) return false;
//fit
int size_count = p.ssize.Count;
for (int level = 0; level < size_count; level++)
{
points[i] = fit(gray, points[i], p.ssize[level], p.robust, p.itol, p.ftol);
}
}
return true;
}
public void draw(Mat im, Scalar pts_color, Scalar con_color)
{
int[] smodel_C_int = new int[smodel.C.total()];
MatUtils.copyFromMat<int>(smodel.C, smodel_C_int);
foreach (var point in points)
{
int n = point.Length;
if (n == 0) return;
int rows = smodel.C.rows();
int cols = smodel.C.cols();
for (int i = 0; i < rows; i++)
{
int j = smodel_C_int[i * cols], k = smodel_C_int[(i * cols) + 1];
#if OPENCV_2
Core.line(im, point[j], point[k], con_color, 1);
#else
Imgproc.line(im, point[j], point[k], con_color, 1);
#endif
}
for (int i = 0; i < n; i++)
{
#if OPENCV_2
Core.circle (im, point [i], 1, pts_color, 2, Core.LINE_AA, 0);
#else
Imgproc.circle(im, point[i], 1, pts_color, 2, Imgproc.LINE_AA, 0);
#endif
}
}
}
private Point[] fit(Mat image, Point[] init, Size ssize, bool robust, int itol, double ftol)
{
int n = smodel.npts();
//assert((int(init.size())==n) && (pmodel.n_patches()==n));
//Debug.Log ("init.size())==n " + init.Length + " " + n);
//Debug.Log ("pmodel.n_patches()==n " + pmodel.n_patches () + " " + n);
smodel.calc_params(init, new Mat(), 3.0f);
Point[] pts = smodel.calc_shape();
//find facial features in image around current estimates
Point[] peaks = pmodel.calc_peaks(image, pts, ssize);
//optimise
if (!robust)
{
smodel.calc_params(peaks, new Mat(), 3.0f); //compute shape model parameters
pts = smodel.calc_shape(); //update shape
}
else
{
using (Mat weight = new Mat(n, 1, CvType.CV_32F)) using (Mat weight_sort = new Mat(n, 1, CvType.CV_32F))
{
float[] weight_float = new float[weight.total()];
MatUtils.copyFromMat<float>(weight, weight_float);
float[] weight_sort_float = new float[weight_sort.total()];
Point[] pts_old = pts;
for (int iter = 0; iter < itol; iter++)
{
//compute robust weight
for (int i = 0; i < n; i++)
{
using (MatOfPoint tmpMat = new MatOfPoint(new Point(pts[i].x - peaks[i].x, pts[i].y - peaks[i].y)))
{
weight_float[i] = (float)Core.norm(tmpMat);
}
}
MatUtils.copyToMat(weight_float, weight);
Core.sort(weight, weight_sort, Core.SORT_EVERY_COLUMN | Core.SORT_ASCENDING);
MatUtils.copyFromMat<float>(weight_sort, weight_sort_float);
double var = 1.4826 * weight_sort_float[n / 2];
if (var < 0.1) var = 0.1;
Core.pow(weight, 2, weight);
Core.multiply(weight, new Scalar(-0.5 / (var * var)), weight);
Core.exp(weight, weight);
//compute shape model parameters
smodel.calc_params(peaks, weight, 3.0f);
//update shape
pts = smodel.calc_shape();
//check for convergence
float v = 0;
for (int i = 0; i < n; i++)
{
using (MatOfPoint tmpMat = new MatOfPoint(new Point(pts[i].x - pts_old[i].x, pts[i].y - pts_old[i].y)))
{
v += (float)Core.norm(tmpMat);
}
}
if (v < ftol)
{
break;
}
else
{
pts_old = pts;
}
}
}
}
return pts;
}
}
}

Просмотреть файл

@ -1,47 +1,47 @@
using OpenCVForUnity.CoreModule;
using System.Collections.Generic;
namespace OpenCVFaceTracker
{
/// <summary>
/// Face tracker parameters.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary>
public class FaceTrackerParams
{
public List<Size> ssize;
//search region size/level
public bool robust;
//use robust fitting?
public int itol;
//maximum number of iterations to try
public double ftol;
//convergence tolerance
// public float scaleFactor; //OpenCV Cascade detector parameters
// public int minNeighbours; //...
// public Size minSize;
public FaceTrackerParams(bool robust = false, int itol = 20, double ftol = 1e-3, List<Size> ssize = null)
{
if (ssize == null)
{
this.ssize = new List<Size>();
this.ssize.Add(new Size(21, 21));
this.ssize.Add(new Size(11, 11));
this.ssize.Add(new Size(5, 5));
}
else
{
this.ssize = ssize;
}
this.robust = robust;
this.itol = itol;
this.ftol = ftol;
//scaleFactor = 1.1f;
//minNeighbours = 2;
//minSize = new Size (30, 30);
}
}
}
using OpenCVForUnity.CoreModule;
using System.Collections.Generic;
namespace OpenCVFaceTracker
{
/// <summary>
/// Face tracker parameters.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary>
public class FaceTrackerParams
{
public List<Size> ssize;
//search region size/level
public bool robust;
//use robust fitting?
public int itol;
//maximum number of iterations to try
public double ftol;
//convergence tolerance
// public float scaleFactor; //OpenCV Cascade detector parameters
// public int minNeighbours; //...
// public Size minSize;
public FaceTrackerParams(bool robust = false, int itol = 20, double ftol = 1e-3, List<Size> ssize = null)
{
if (ssize == null)
{
this.ssize = new List<Size>();
this.ssize.Add(new Size(21, 21));
this.ssize.Add(new Size(11, 11));
this.ssize.Add(new Size(5, 5));
}
else
{
this.ssize = ssize;
}
this.robust = robust;
this.itol = itol;
this.ftol = ftol;
//scaleFactor = 1.1f;
//minNeighbours = 2;
//minSize = new Size (30, 30);
}
}
}

Просмотреть файл

@ -32,7 +32,8 @@ using System.Collections.Generic;
using System.IO;
using System.Text;
namespace MiniJSON {
namespace MiniJSON
{
// Example usage:
//
// using UnityEngine;
@ -73,29 +74,35 @@ namespace MiniJSON {
/// JSON uses Arrays and Objects. These correspond here to the datatypes IList and IDictionary.
/// All numbers are parsed to doubles.
/// </summary>
public static class Json {
public static class Json
{
/// <summary>
/// Parses the string json into a value
/// </summary>
/// <param name="json">A JSON string.</param>
/// <returns>An List&lt;object&gt;, a Dictionary&lt;string, object&gt;, a double, an integer,a string, null, true, or false</returns>
public static object Deserialize(string json) {
public static object Deserialize(string json)
{
// save the string for debug information
if (json == null) {
if (json == null)
{
return null;
}
return Parser.Parse(json);
}
sealed class Parser : IDisposable {
sealed class Parser : IDisposable
{
const string WORD_BREAK = "{}[],:\"";
public static bool IsWordBreak(char c) {
public static bool IsWordBreak(char c)
{
return Char.IsWhiteSpace(c) || WORD_BREAK.IndexOf(c) != -1;
}
enum TOKEN {
enum TOKEN
{
NONE,
CURLY_OPEN,
CURLY_CLOSE,
@ -112,58 +119,68 @@ namespace MiniJSON {
StringReader json;
Parser(string jsonString) {
Parser(string jsonString)
{
json = new StringReader(jsonString);
}
public static object Parse(string jsonString) {
using (var instance = new Parser(jsonString)) {
public static object Parse(string jsonString)
{
using (var instance = new Parser(jsonString))
{
return instance.ParseValue();
}
}
public void Dispose() {
public void Dispose()
{
json.Dispose();
json = null;
}
Dictionary<string, object> ParseObject() {
Dictionary<string, object> ParseObject()
{
Dictionary<string, object> table = new Dictionary<string, object>();
// ditch opening brace
json.Read();
// {
while (true) {
switch (NextToken) {
case TOKEN.NONE:
return null;
case TOKEN.COMMA:
continue;
case TOKEN.CURLY_CLOSE:
return table;
default:
// name
string name = ParseString();
if (name == null) {
while (true)
{
switch (NextToken)
{
case TOKEN.NONE:
return null;
}
case TOKEN.COMMA:
continue;
case TOKEN.CURLY_CLOSE:
return table;
default:
// name
string name = ParseString();
if (name == null)
{
return null;
}
// :
if (NextToken != TOKEN.COLON) {
return null;
}
// ditch the colon
json.Read();
// :
if (NextToken != TOKEN.COLON)
{
return null;
}
// ditch the colon
json.Read();
// value
table[name] = ParseValue();
break;
// value
table[name] = ParseValue();
break;
}
}
}
List<object> ParseArray() {
List<object> ParseArray()
{
List<object> array = new List<object>();
// ditch opening bracket
@ -171,55 +188,61 @@ namespace MiniJSON {
// [
var parsing = true;
while (parsing) {
while (parsing)
{
TOKEN nextToken = NextToken;
switch (nextToken) {
case TOKEN.NONE:
return null;
case TOKEN.COMMA:
continue;
case TOKEN.SQUARED_CLOSE:
parsing = false;
break;
default:
object value = ParseByToken(nextToken);
switch (nextToken)
{
case TOKEN.NONE:
return null;
case TOKEN.COMMA:
continue;
case TOKEN.SQUARED_CLOSE:
parsing = false;
break;
default:
object value = ParseByToken(nextToken);
array.Add(value);
break;
array.Add(value);
break;
}
}
return array;
}
object ParseValue() {
object ParseValue()
{
TOKEN nextToken = NextToken;
return ParseByToken(nextToken);
}
object ParseByToken(TOKEN token) {
switch (token) {
case TOKEN.STRING:
return ParseString();
case TOKEN.NUMBER:
return ParseNumber();
case TOKEN.CURLY_OPEN:
return ParseObject();
case TOKEN.SQUARED_OPEN:
return ParseArray();
case TOKEN.TRUE:
return true;
case TOKEN.FALSE:
return false;
case TOKEN.NULL:
return null;
default:
return null;
object ParseByToken(TOKEN token)
{
switch (token)
{
case TOKEN.STRING:
return ParseString();
case TOKEN.NUMBER:
return ParseNumber();
case TOKEN.CURLY_OPEN:
return ParseObject();
case TOKEN.SQUARED_OPEN:
return ParseArray();
case TOKEN.TRUE:
return true;
case TOKEN.FALSE:
return false;
case TOKEN.NULL:
return null;
default:
return null;
}
}
string ParseString() {
string ParseString()
{
StringBuilder s = new StringBuilder();
char c;
@ -227,70 +250,78 @@ namespace MiniJSON {
json.Read();
bool parsing = true;
while (parsing) {
while (parsing)
{
if (json.Peek() == -1) {
if (json.Peek() == -1)
{
parsing = false;
break;
}
c = NextChar;
switch (c) {
case '"':
parsing = false;
break;
case '\\':
if (json.Peek() == -1) {
switch (c)
{
case '"':
parsing = false;
break;
}
c = NextChar;
switch (c) {
case '"':
case '\\':
case '/':
s.Append(c);
break;
case 'b':
s.Append('\b');
break;
case 'f':
s.Append('\f');
break;
case 'n':
s.Append('\n');
break;
case 'r':
s.Append('\r');
break;
case 't':
s.Append('\t');
break;
case 'u':
var hex = new char[4];
for (int i=0; i< 4; i++) {
hex[i] = NextChar;
if (json.Peek() == -1)
{
parsing = false;
break;
}
s.Append((char) Convert.ToInt32(new string(hex), 16));
c = NextChar;
switch (c)
{
case '"':
case '\\':
case '/':
s.Append(c);
break;
case 'b':
s.Append('\b');
break;
case 'f':
s.Append('\f');
break;
case 'n':
s.Append('\n');
break;
case 'r':
s.Append('\r');
break;
case 't':
s.Append('\t');
break;
case 'u':
var hex = new char[4];
for (int i = 0; i < 4; i++)
{
hex[i] = NextChar;
}
s.Append((char)Convert.ToInt32(new string(hex), 16));
break;
}
break;
default:
s.Append(c);
break;
}
break;
default:
s.Append(c);
break;
}
}
return s.ToString();
}
object ParseNumber() {
object ParseNumber()
{
string number = NextWord;
if (number.IndexOf('.') == -1) {
if (number.IndexOf('.') == -1)
{
long parsedInt;
Int64.TryParse(number, out parsedInt);
return parsedInt;
@ -301,36 +332,47 @@ namespace MiniJSON {
return parsedDouble;
}
void EatWhitespace() {
while (Char.IsWhiteSpace(PeekChar)) {
void EatWhitespace()
{
while (Char.IsWhiteSpace(PeekChar))
{
json.Read();
if (json.Peek() == -1) {
if (json.Peek() == -1)
{
break;
}
}
}
char PeekChar {
get {
char PeekChar
{
get
{
return Convert.ToChar(json.Peek());
}
}
char NextChar {
get {
char NextChar
{
get
{
return Convert.ToChar(json.Read());
}
}
string NextWord {
get {
string NextWord
{
get
{
StringBuilder word = new StringBuilder();
while (!IsWordBreak(PeekChar)) {
while (!IsWordBreak(PeekChar))
{
word.Append(NextChar);
if (json.Peek() == -1) {
if (json.Peek() == -1)
{
break;
}
}
@ -339,53 +381,58 @@ namespace MiniJSON {
}
}
TOKEN NextToken {
get {
TOKEN NextToken
{
get
{
EatWhitespace();
if (json.Peek() == -1) {
if (json.Peek() == -1)
{
return TOKEN.NONE;
}
switch (PeekChar) {
case '{':
return TOKEN.CURLY_OPEN;
case '}':
json.Read();
return TOKEN.CURLY_CLOSE;
case '[':
return TOKEN.SQUARED_OPEN;
case ']':
json.Read();
return TOKEN.SQUARED_CLOSE;
case ',':
json.Read();
return TOKEN.COMMA;
case '"':
return TOKEN.STRING;
case ':':
return TOKEN.COLON;
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case '-':
return TOKEN.NUMBER;
switch (PeekChar)
{
case '{':
return TOKEN.CURLY_OPEN;
case '}':
json.Read();
return TOKEN.CURLY_CLOSE;
case '[':
return TOKEN.SQUARED_OPEN;
case ']':
json.Read();
return TOKEN.SQUARED_CLOSE;
case ',':
json.Read();
return TOKEN.COMMA;
case '"':
return TOKEN.STRING;
case ':':
return TOKEN.COLON;
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9':
case '-':
return TOKEN.NUMBER;
}
switch (NextWord) {
case "false":
return TOKEN.FALSE;
case "true":
return TOKEN.TRUE;
case "null":
return TOKEN.NULL;
switch (NextWord)
{
case "false":
return TOKEN.FALSE;
case "true":
return TOKEN.TRUE;
case "null":
return TOKEN.NULL;
}
return TOKEN.NONE;
@ -398,18 +445,22 @@ namespace MiniJSON {
/// </summary>
/// <param name="json">A Dictionary&lt;string, object&gt; / List&lt;object&gt;</param>
/// <returns>A JSON encoded string, or null if object 'json' is not serializable</returns>
public static string Serialize(object obj) {
public static string Serialize(object obj)
{
return Serializer.Serialize(obj);
}
sealed class Serializer {
sealed class Serializer
{
StringBuilder builder;
Serializer() {
Serializer()
{
builder = new StringBuilder();
}
public static string Serialize(object obj) {
public static string Serialize(object obj)
{
var instance = new Serializer();
instance.SerializeValue(obj);
@ -417,35 +468,52 @@ namespace MiniJSON {
return instance.builder.ToString();
}
void SerializeValue(object value) {
void SerializeValue(object value)
{
IList asList;
IDictionary asDict;
string asStr;
if (value == null) {
if (value == null)
{
builder.Append("null");
} else if ((asStr = value as string) != null) {
}
else if ((asStr = value as string) != null)
{
SerializeString(asStr);
} else if (value is bool) {
builder.Append((bool) value ? "true" : "false");
} else if ((asList = value as IList) != null) {
}
else if (value is bool)
{
builder.Append((bool)value ? "true" : "false");
}
else if ((asList = value as IList) != null)
{
SerializeArray(asList);
} else if ((asDict = value as IDictionary) != null) {
}
else if ((asDict = value as IDictionary) != null)
{
SerializeObject(asDict);
} else if (value is char) {
SerializeString(new string((char) value, 1));
} else {
}
else if (value is char)
{
SerializeString(new string((char)value, 1));
}
else
{
SerializeOther(value);
}
}
void SerializeObject(IDictionary obj) {
void SerializeObject(IDictionary obj)
{
bool first = true;
builder.Append('{');
foreach (object e in obj.Keys) {
if (!first) {
foreach (object e in obj.Keys)
{
if (!first)
{
builder.Append(',');
}
@ -460,13 +528,16 @@ namespace MiniJSON {
builder.Append('}');
}
void SerializeArray(IList anArray) {
void SerializeArray(IList anArray)
{
builder.Append('[');
bool first = true;
foreach (object obj in anArray) {
if (!first) {
foreach (object obj in anArray)
{
if (!first)
{
builder.Append(',');
}
@ -478,67 +549,81 @@ namespace MiniJSON {
builder.Append(']');
}
void SerializeString(string str) {
void SerializeString(string str)
{
builder.Append('\"');
char[] charArray = str.ToCharArray();
foreach (var c in charArray) {
switch (c) {
case '"':
builder.Append("\\\"");
break;
case '\\':
builder.Append("\\\\");
break;
case '\b':
builder.Append("\\b");
break;
case '\f':
builder.Append("\\f");
break;
case '\n':
builder.Append("\\n");
break;
case '\r':
builder.Append("\\r");
break;
case '\t':
builder.Append("\\t");
break;
default:
int codepoint = Convert.ToInt32(c);
if ((codepoint >= 32) && (codepoint <= 126)) {
builder.Append(c);
} else {
builder.Append("\\u");
builder.Append(codepoint.ToString("x4"));
}
break;
foreach (var c in charArray)
{
switch (c)
{
case '"':
builder.Append("\\\"");
break;
case '\\':
builder.Append("\\\\");
break;
case '\b':
builder.Append("\\b");
break;
case '\f':
builder.Append("\\f");
break;
case '\n':
builder.Append("\\n");
break;
case '\r':
builder.Append("\\r");
break;
case '\t':
builder.Append("\\t");
break;
default:
int codepoint = Convert.ToInt32(c);
if ((codepoint >= 32) && (codepoint <= 126))
{
builder.Append(c);
}
else
{
builder.Append("\\u");
builder.Append(codepoint.ToString("x4"));
}
break;
}
}
builder.Append('\"');
}
void SerializeOther(object value) {
void SerializeOther(object value)
{
// NOTE: decimals lose precision during serialization.
// They always have, I'm just letting you know.
// Previously floats and doubles lost precision too.
if (value is float) {
builder.Append(((float) value).ToString("R"));
} else if (value is int
|| value is uint
|| value is long
|| value is sbyte
|| value is byte
|| value is short
|| value is ushort
|| value is ulong) {
if (value is float)
{
builder.Append(((float)value).ToString("R"));
}
else if (value is int
|| value is uint
|| value is long
|| value is sbyte
|| value is byte
|| value is short
|| value is ushort
|| value is ulong)
{
builder.Append(value);
} else if (value is double
|| value is decimal) {
}
else if (value is double
|| value is decimal)
{
builder.Append(Convert.ToDouble(value).ToString("R"));
} else {
}
else
{
SerializeString(value.ToString());
}
}

Просмотреть файл

@ -1,102 +1,102 @@
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UtilsModule;
using System.Collections;
using UnityEngine;
namespace OpenCVFaceTracker
{
/// <summary>
/// Patch model.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary>
public class PatchModel
{
Mat P;
//normalised patch
public PatchModel()
{
}
//size of patch model
public Size patch_size()
{
return P.size();
}
Mat convert_image(Mat im)
{
Mat I = null;
if (im.channels() == 1)
{
if (im.type() != CvType.CV_32F)
{
I = new Mat();
im.convertTo(I, CvType.CV_32F);
}
else
{
I = im;
}
}
else
{
if (im.channels() == 3)
{
Mat img = new Mat();
Imgproc.cvtColor(im, img, Imgproc.COLOR_RGBA2GRAY);
if (img.type() != CvType.CV_32F)
{
I = new Mat();
img.convertTo(I, CvType.CV_32F);
}
else
{
I = img;
}
}
else
{
Debug.Log("Unsupported image type!");
}
}
Core.add(I, new Scalar(1.0), I);
Core.log(I, I);
return I;
}
public Mat calc_response(Mat im, bool sum2one)
{
Mat I = convert_image(im);
Mat res = new Mat();
Imgproc.matchTemplate(I, P, res, Imgproc.TM_CCOEFF_NORMED);
if (sum2one)
{
Core.normalize(res, res, 0, 1, Core.NORM_MINMAX);
Core.divide(res, new Scalar(Core.sumElems(res).val[0]), res);
}
return res;
}
public void read(object root_json)
{
IDictionary pmodel_json = (IDictionary)root_json;
IDictionary P_json = (IDictionary)pmodel_json["P"];
P = new Mat((int)(long)P_json["rows"], (int)(long)P_json["cols"], CvType.CV_32F);
//Debug.Log ("P " + P.ToString ());
IList P_data_json = (IList)P_json["data"];
float[] P_data = new float[P.rows() * P.cols()];
for (int i = 0; i < P_data_json.Count; i++)
{
P_data[i] = (float)(double)P_data_json[i];
}
MatUtils.copyToMat(P_data, P);
}
}
}
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UtilsModule;
using System.Collections;
using UnityEngine;
namespace OpenCVFaceTracker
{
/// <summary>
/// Patch model.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary>
public class PatchModel
{
Mat P;
//normalised patch
public PatchModel()
{
}
//size of patch model
public Size patch_size()
{
return P.size();
}
Mat convert_image(Mat im)
{
Mat I = null;
if (im.channels() == 1)
{
if (im.type() != CvType.CV_32F)
{
I = new Mat();
im.convertTo(I, CvType.CV_32F);
}
else
{
I = im;
}
}
else
{
if (im.channels() == 3)
{
Mat img = new Mat();
Imgproc.cvtColor(im, img, Imgproc.COLOR_RGBA2GRAY);
if (img.type() != CvType.CV_32F)
{
I = new Mat();
img.convertTo(I, CvType.CV_32F);
}
else
{
I = img;
}
}
else
{
Debug.Log("Unsupported image type!");
}
}
Core.add(I, new Scalar(1.0), I);
Core.log(I, I);
return I;
}
public Mat calc_response(Mat im, bool sum2one)
{
Mat I = convert_image(im);
Mat res = new Mat();
Imgproc.matchTemplate(I, P, res, Imgproc.TM_CCOEFF_NORMED);
if (sum2one)
{
Core.normalize(res, res, 0, 1, Core.NORM_MINMAX);
Core.divide(res, new Scalar(Core.sumElems(res).val[0]), res);
}
return res;
}
public void read(object root_json)
{
IDictionary pmodel_json = (IDictionary)root_json;
IDictionary P_json = (IDictionary)pmodel_json["P"];
P = new Mat((int)(long)P_json["rows"], (int)(long)P_json["cols"], CvType.CV_32F);
//Debug.Log ("P " + P.ToString ());
IList P_data_json = (IList)P_json["data"];
float[] P_data = new float[P.rows() * P.cols()];
for (int i = 0; i < P_data_json.Count; i++)
{
P_data[i] = (float)(double)P_data_json[i];
}
MatUtils.copyToMat(P_data, P);
}
}
}

Просмотреть файл

@ -1,223 +1,223 @@
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UtilsModule;
using System;
using System.Collections;
using System.Collections.Generic;
namespace OpenCVFaceTracker
{
/// <summary>
/// Patch models.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary>
public class PatchModels
{
public Mat reference;
//reference shape
public IList<PatchModel> patches;
//patch models
public PatchModels()
{
}
//number of patches
public int n_patches()
{
return patches.Count;
}
public Point[] calc_peaks(Mat im, Point[] points, Size ssize)
{
int n = points.Length;
using (Mat pt = (new MatOfPoint2f(points)).reshape(1, 2 * n)) using (Mat S = calc_simil(pt)) using (Mat Si = inv_simil(S))
{
float[] pt_float = new float[pt.total()];
MatUtils.copyFromMat<float>(pt, pt_float);
int pt_cols = pt.cols();
float[] S_float = new float[S.total()];
MatUtils.copyFromMat<float>(S, S_float);
int S_cols = S.cols();
float[] A_float = new float[2 * 3];
Point[] pts = apply_simil(Si, points);
for (int i = 0; i < n; i++)
{
Size wsize = new Size(ssize.width + patches[i].patch_size().width, ssize.height + patches[i].patch_size().height);
using (Mat A = new Mat(2, 3, CvType.CV_32F))
{
MatUtils.copyFromMat<float>(A, A_float);
int A_cols = A.cols();
A_float[0] = S_float[0];
A_float[1] = S_float[1];
A_float[1 * A_cols] = S_float[1 * S_cols];
A_float[(1 * A_cols) + 1] = S_float[(1 * S_cols) + 1];
A_float[2] = (float)(pt_float[(2 * pt_cols) * i] -
(A_float[0] * (wsize.width - 1) / 2 + A_float[1] * (wsize.height - 1) / 2));
A_float[(1 * A_cols) + 2] = (float)(pt_float[((2 * pt_cols) * i) + 1] -
(A_float[1 * A_cols] * (wsize.width - 1) / 2 + A_float[(1 * A_cols) + 1] * (wsize.height - 1) / 2));
MatUtils.copyToMat(A_float, A);
using (Mat I = new Mat())
{
Imgproc.warpAffine(im, I, A, wsize, Imgproc.INTER_LINEAR + Imgproc.WARP_INVERSE_MAP);
using (Mat R = patches[i].calc_response(I, false))
{
Core.MinMaxLocResult minMaxLocResult = Core.minMaxLoc(R);
pts[i].x = pts[i].x + minMaxLocResult.maxLoc.x - 0.5 * ssize.width;
pts[i].y = pts[i].y + minMaxLocResult.maxLoc.y - 0.5 * ssize.height;
}
}
}
}
return apply_simil(S, pts);
}
}
Point[] apply_simil(Mat S, Point[] points)
{
float[] S_float = new float[S.total()];
MatUtils.copyFromMat<float>(S, S_float);
int S_cols = S.cols();
int n = points.Length;
Point[] p = new Point[n];
for (int i = 0; i < n; i++)
{
p[i] = new Point();
p[i].x = S_float[0] * points[i].x + S_float[1] * points[i].y + S_float[2];
p[i].y = S_float[1 * S_cols] * points[i].x + S_float[(1 * S_cols) + 1] * points[i].y + S_float[(1 * S_cols) + 2];
}
return p;
}
Mat inv_simil(Mat S)
{
float[] S_float = new float[S.total()];
MatUtils.copyFromMat<float>(S, S_float);
int S_cols = S.cols();
Mat Si = new Mat(2, 3, CvType.CV_32F);
float d = S_float[0] * S_float[(1 * S_cols) + 1] - S_float[1 * S_cols] * S_float[1];
float[] Si_float = new float[Si.total()];
MatUtils.copyFromMat<float>(Si, Si_float);
int Si_cols = Si.cols();
Si_float[0] = S_float[(1 * S_cols) + 1] / d;
Si_float[1] = -S_float[1] / d;
Si_float[(1 * Si_cols) + 1] = S_float[0] / d;
Si_float[1 * Si_cols] = -S_float[1 * S_cols] / d;
MatUtils.copyToMat(Si_float, Si);
Mat Ri = new Mat(Si, new OpenCVForUnity.CoreModule.Rect(0, 0, 2, 2));
Mat negaRi = new Mat();
Core.multiply(Ri, new Scalar(-1), negaRi);
Mat t = new Mat();
Core.gemm(negaRi, S.col(2), 1, new Mat(negaRi.rows(), negaRi.cols(), negaRi.type()), 0, t);
Mat St = Si.col(2);
t.copyTo(St);
return Si;
}
Mat calc_simil(Mat pts)
{
float[] pts_float = new float[pts.total()];
MatUtils.copyFromMat<float>(pts, pts_float);
int pts_cols = pts.cols();
//compute translation
int n = pts.rows() / 2;
float mx = 0, my = 0;
for (int i = 0; i < n; i++)
{
mx += pts_float[(2 * pts_cols) * i];
my += pts_float[((2 * pts_cols) * i) + 1];
}
using (Mat p = new Mat(2 * n, 1, CvType.CV_32F))
{
float[] p_float = new float[p.total()];
MatUtils.copyFromMat<float>(p, p_float);
int p_cols = p.cols();
mx /= n;
my /= n;
for (int i = 0; i < n; i++)
{
p_float[(2 * p_cols) * i] = pts_float[(2 * pts_cols) * i] - mx;
p_float[((2 * p_cols) * i) + 1] = pts_float[((2 * pts_cols) * i) + 1] - my;
}
MatUtils.copyToMat(p_float, p);
//compute rotation and scale
float[] reference_float = new float[reference.total()];
MatUtils.copyFromMat<float>(reference, reference_float);
int reference_cols = reference.cols();
float a = 0, b = 0, c = 0;
for (int i = 0; i < n; i++)
{
a += reference_float[(2 * reference_cols) * i] * reference_float[(2 * reference_cols) * i] +
reference_float[((2 * reference_cols) * i) + 1] * reference_float[((2 * reference_cols) * i) + 1];
b += reference_float[(2 * reference_cols) * i] * p_float[(2 * p_cols) * i] +
reference_float[((2 * reference_cols) * i) + 1] * p_float[((2 * p_cols) * i) + 1];
c += reference_float[(2 * reference_cols) * i] * p_float[((2 * p_cols) * i) + 1] -
reference_float[((2 * reference_cols) * i) + 1] * p_float[(2 * p_cols) * i];
}
b /= a;
c /= a;
float scale = (float)Math.Sqrt(b * b + c * c), theta = (float)Math.Atan2(c, b);
float sc = scale * (float)Math.Cos(theta), ss = scale * (float)Math.Sin(theta);
Mat returnMat = new Mat(2, 3, CvType.CV_32F);
returnMat.put(0, 0, sc, -ss, mx, ss, sc, my);
return returnMat;
}
}
public void read(object root_json)
{
IDictionary pmodels_json = (IDictionary)root_json;
IDictionary reference_json = (IDictionary)pmodels_json["reference"];
reference = new Mat((int)(long)reference_json["rows"], (int)(long)reference_json["cols"], CvType.CV_32F);
IList data_json = (IList)reference_json["data"];
float[] data = new float[reference.rows() * reference.cols()];
int count = data_json.Count;
for (int i = 0; i < count; i++)
{
data[i] = (float)(double)data_json[i];
}
MatUtils.copyToMat(data, reference);
int n = (int)(long)pmodels_json["n_patches"];
patches = new List<PatchModel>(n);
for (int i = 0; i < n; i++)
{
PatchModel patchModel = new PatchModel();
patchModel.read(pmodels_json["patch " + i]);
patches.Add(patchModel);
}
}
}
}
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UtilsModule;
using System;
using System.Collections;
using System.Collections.Generic;
namespace OpenCVFaceTracker
{
/// <summary>
/// Patch models.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary>
public class PatchModels
{
public Mat reference;
//reference shape
public IList<PatchModel> patches;
//patch models
public PatchModels()
{
}
//number of patches
public int n_patches()
{
return patches.Count;
}
public Point[] calc_peaks(Mat im, Point[] points, Size ssize)
{
int n = points.Length;
using (Mat pt = (new MatOfPoint2f(points)).reshape(1, 2 * n)) using (Mat S = calc_simil(pt)) using (Mat Si = inv_simil(S))
{
float[] pt_float = new float[pt.total()];
MatUtils.copyFromMat<float>(pt, pt_float);
int pt_cols = pt.cols();
float[] S_float = new float[S.total()];
MatUtils.copyFromMat<float>(S, S_float);
int S_cols = S.cols();
float[] A_float = new float[2 * 3];
Point[] pts = apply_simil(Si, points);
for (int i = 0; i < n; i++)
{
Size wsize = new Size(ssize.width + patches[i].patch_size().width, ssize.height + patches[i].patch_size().height);
using (Mat A = new Mat(2, 3, CvType.CV_32F))
{
MatUtils.copyFromMat<float>(A, A_float);
int A_cols = A.cols();
A_float[0] = S_float[0];
A_float[1] = S_float[1];
A_float[1 * A_cols] = S_float[1 * S_cols];
A_float[(1 * A_cols) + 1] = S_float[(1 * S_cols) + 1];
A_float[2] = (float)(pt_float[(2 * pt_cols) * i] -
(A_float[0] * (wsize.width - 1) / 2 + A_float[1] * (wsize.height - 1) / 2));
A_float[(1 * A_cols) + 2] = (float)(pt_float[((2 * pt_cols) * i) + 1] -
(A_float[1 * A_cols] * (wsize.width - 1) / 2 + A_float[(1 * A_cols) + 1] * (wsize.height - 1) / 2));
MatUtils.copyToMat(A_float, A);
using (Mat I = new Mat())
{
Imgproc.warpAffine(im, I, A, wsize, Imgproc.INTER_LINEAR + Imgproc.WARP_INVERSE_MAP);
using (Mat R = patches[i].calc_response(I, false))
{
Core.MinMaxLocResult minMaxLocResult = Core.minMaxLoc(R);
pts[i].x = pts[i].x + minMaxLocResult.maxLoc.x - 0.5 * ssize.width;
pts[i].y = pts[i].y + minMaxLocResult.maxLoc.y - 0.5 * ssize.height;
}
}
}
}
return apply_simil(S, pts);
}
}
Point[] apply_simil(Mat S, Point[] points)
{
float[] S_float = new float[S.total()];
MatUtils.copyFromMat<float>(S, S_float);
int S_cols = S.cols();
int n = points.Length;
Point[] p = new Point[n];
for (int i = 0; i < n; i++)
{
p[i] = new Point();
p[i].x = S_float[0] * points[i].x + S_float[1] * points[i].y + S_float[2];
p[i].y = S_float[1 * S_cols] * points[i].x + S_float[(1 * S_cols) + 1] * points[i].y + S_float[(1 * S_cols) + 2];
}
return p;
}
Mat inv_simil(Mat S)
{
float[] S_float = new float[S.total()];
MatUtils.copyFromMat<float>(S, S_float);
int S_cols = S.cols();
Mat Si = new Mat(2, 3, CvType.CV_32F);
float d = S_float[0] * S_float[(1 * S_cols) + 1] - S_float[1 * S_cols] * S_float[1];
float[] Si_float = new float[Si.total()];
MatUtils.copyFromMat<float>(Si, Si_float);
int Si_cols = Si.cols();
Si_float[0] = S_float[(1 * S_cols) + 1] / d;
Si_float[1] = -S_float[1] / d;
Si_float[(1 * Si_cols) + 1] = S_float[0] / d;
Si_float[1 * Si_cols] = -S_float[1 * S_cols] / d;
MatUtils.copyToMat(Si_float, Si);
Mat Ri = new Mat(Si, new OpenCVForUnity.CoreModule.Rect(0, 0, 2, 2));
Mat negaRi = new Mat();
Core.multiply(Ri, new Scalar(-1), negaRi);
Mat t = new Mat();
Core.gemm(negaRi, S.col(2), 1, new Mat(negaRi.rows(), negaRi.cols(), negaRi.type()), 0, t);
Mat St = Si.col(2);
t.copyTo(St);
return Si;
}
Mat calc_simil(Mat pts)
{
float[] pts_float = new float[pts.total()];
MatUtils.copyFromMat<float>(pts, pts_float);
int pts_cols = pts.cols();
//compute translation
int n = pts.rows() / 2;
float mx = 0, my = 0;
for (int i = 0; i < n; i++)
{
mx += pts_float[(2 * pts_cols) * i];
my += pts_float[((2 * pts_cols) * i) + 1];
}
using (Mat p = new Mat(2 * n, 1, CvType.CV_32F))
{
float[] p_float = new float[p.total()];
MatUtils.copyFromMat<float>(p, p_float);
int p_cols = p.cols();
mx /= n;
my /= n;
for (int i = 0; i < n; i++)
{
p_float[(2 * p_cols) * i] = pts_float[(2 * pts_cols) * i] - mx;
p_float[((2 * p_cols) * i) + 1] = pts_float[((2 * pts_cols) * i) + 1] - my;
}
MatUtils.copyToMat(p_float, p);
//compute rotation and scale
float[] reference_float = new float[reference.total()];
MatUtils.copyFromMat<float>(reference, reference_float);
int reference_cols = reference.cols();
float a = 0, b = 0, c = 0;
for (int i = 0; i < n; i++)
{
a += reference_float[(2 * reference_cols) * i] * reference_float[(2 * reference_cols) * i] +
reference_float[((2 * reference_cols) * i) + 1] * reference_float[((2 * reference_cols) * i) + 1];
b += reference_float[(2 * reference_cols) * i] * p_float[(2 * p_cols) * i] +
reference_float[((2 * reference_cols) * i) + 1] * p_float[((2 * p_cols) * i) + 1];
c += reference_float[(2 * reference_cols) * i] * p_float[((2 * p_cols) * i) + 1] -
reference_float[((2 * reference_cols) * i) + 1] * p_float[(2 * p_cols) * i];
}
b /= a;
c /= a;
float scale = (float)Math.Sqrt(b * b + c * c), theta = (float)Math.Atan2(c, b);
float sc = scale * (float)Math.Cos(theta), ss = scale * (float)Math.Sin(theta);
Mat returnMat = new Mat(2, 3, CvType.CV_32F);
returnMat.put(0, 0, sc, -ss, mx, ss, sc, my);
return returnMat;
}
}
public void read(object root_json)
{
IDictionary pmodels_json = (IDictionary)root_json;
IDictionary reference_json = (IDictionary)pmodels_json["reference"];
reference = new Mat((int)(long)reference_json["rows"], (int)(long)reference_json["cols"], CvType.CV_32F);
IList data_json = (IList)reference_json["data"];
float[] data = new float[reference.rows() * reference.cols()];
int count = data_json.Count;
for (int i = 0; i < count; i++)
{
data[i] = (float)(double)data_json[i];
}
MatUtils.copyToMat(data, reference);
int n = (int)(long)pmodels_json["n_patches"];
patches = new List<PatchModel>(n);
for (int i = 0; i < n; i++)
{
PatchModel patchModel = new PatchModel();
patchModel.read(pmodels_json["patch " + i]);
patches.Add(patchModel);
}
}
}
}

Просмотреть файл

@ -1,185 +1,185 @@
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.UtilsModule;
using System;
using System.Collections;
using UnityEngine;
namespace OpenCVFaceTracker
{
/// <summary>
/// Shape model.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary>
public class ShapeModel
{
public Mat p;
//parameter vector (kx1) CV_32F
public Mat V;
//shape basis (2nxk) CV_32F
public Mat e;
//parameter variance (kx1) CV_32F
public Mat C;
//connectivity (cx2) CV_32S
public ShapeModel()
{
}
public int npts()
{
//number of points in shape model
return V.rows() / 2;
}
public void calc_params(Point[] pts, Mat weight, float c_factor)
{
int n = pts.Length;
//assert(V.rows == 2*n);
//Debug.Log ("V.rows == 2*n " + V.rows () + " " + 2 * n);
using (Mat s = (new MatOfPoint2f(pts)).reshape(1, 2 * n))
{ //point set to vector format
if (weight.total() == 0)
{
Core.gemm(V.t(), s, 1, new Mat(), 0, p); //simple projection
}
else
{
//scaled projection
if (weight.rows() != n)
{
Debug.Log("Invalid weighting matrix");
}
float[] weight_float = new float[weight.total()];
MatUtils.copyFromMat<float>(weight, weight_float);
int weight_cols = weight.cols();
int K = V.cols();
using (Mat H = Mat.zeros(K, K, CvType.CV_32F)) using (Mat g = Mat.zeros(K, 1, CvType.CV_32F))
{
for (int i = 0; i < n; i++)
{
using (Mat v = new Mat(V, new OpenCVForUnity.CoreModule.Rect(0, 2 * i, K, 2))) using (Mat tmpMat1 = new Mat()) using (Mat tmpMat2 = new Mat()) using (Mat tmpMat3 = new Mat())
{
float w = weight_float[i * weight_cols];
Core.multiply(v.t(), new Scalar(w), tmpMat1);
Core.gemm(tmpMat1, v, 1, new Mat(), 0, tmpMat2);
Core.add(H, tmpMat2, H);
Core.gemm(tmpMat1, new MatOfPoint2f(pts[i]).reshape(1, 2), 1, new Mat(), 0, tmpMat3);
Core.add(g, tmpMat3, g);
}
}
Core.solve(H, g, p, Core.DECOMP_SVD);
}
}
}
clamp(c_factor); //clamp resulting parameters
}
public Point[] calc_shape()
{
using (Mat s = new Mat())
{
Core.gemm(V, p, 1, new Mat(), 0, s);
float[] s_float = new float[s.total()];
MatUtils.copyFromMat<float>(s, s_float);
int s_cols = s.cols();
int n = s.rows() / 2;
Point[] pts = new Point[n];
for (int i = 0; i < n; i++)
{
pts[i] = new Point(s_float[(2 * s_cols) * i], s_float[((2 * s_cols) * i) + 1]);
}
return pts;
}
}
void clamp(float c)
{
float[] p_float = new float[p.total()];
MatUtils.copyFromMat<float>(p, p_float);
int p_cols = p.cols();
float[] e_float = new float[e.total()];
MatUtils.copyFromMat<float>(e, e_float);
int e_cols = e.cols();
double scale = p_float[0];
int rows = e.rows();
for (int i = 0; i < rows; i++)
{
if (e_float[i * e_cols] < 0) continue;
float v = c * (float)Math.Sqrt(e_float[i * e_cols]);
if (Math.Abs(p_float[i * p_cols] / scale) > v)
{
if (p_float[i * p_cols] > 0)
{
p_float[i * p_cols] = (float)(v * scale);
}
else
{
p_float[i * p_cols] = (float)(-v * scale);
}
}
}
MatUtils.copyToMat(p_float, p);
}
public void read(object root_json)
{
IDictionary smodel_json = (IDictionary)root_json;
IDictionary V_json = (IDictionary)smodel_json["V"];
V = new Mat((int)(long)V_json["rows"], (int)(long)V_json["cols"], CvType.CV_32F);
IList V_data_json = (IList)V_json["data"];
float[] V_data = new float[V.rows() * V.cols()];
for (int i = 0; i < V_data_json.Count; i++)
{
V_data[i] = (float)(double)V_data_json[i];
}
MatUtils.copyToMat(V_data, V);
IDictionary e_json = (IDictionary)smodel_json["e"];
e = new Mat((int)(long)e_json["rows"], (int)(long)e_json["cols"], CvType.CV_32F);
IList e_data_json = (IList)e_json["data"];
float[] e_data = new float[e.rows() * e.cols()];
for (int i = 0; i < e_data_json.Count; i++)
{
e_data[i] = (float)(double)e_data_json[i];
}
MatUtils.copyToMat(e_data, e);
IDictionary C_json = (IDictionary)smodel_json["C"];
C = new Mat((int)(long)C_json["rows"], (int)(long)C_json["cols"], CvType.CV_32S);
//Debug.Log ("C " + C.ToString ());
IList C_data_json = (IList)C_json["data"];
int[] C_data = new int[C.rows() * C.cols()];
for (int i = 0; i < C_data_json.Count; i++)
{
C_data[i] = (int)(long)C_data_json[i];
}
MatUtils.copyToMat(C_data, C);
p = Mat.zeros(e.rows(), 1, CvType.CV_32F);
}
}
}
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.UtilsModule;
using System;
using System.Collections;
using UnityEngine;
namespace OpenCVFaceTracker
{
/// <summary>
/// Shape model.
/// Code is the rewrite of https://github.com/MasteringOpenCV/code/tree/master/Chapter6_NonRigidFaceTracking using the “OpenCV for Unity”.
/// </summary>
public class ShapeModel
{
public Mat p;
//parameter vector (kx1) CV_32F
public Mat V;
//shape basis (2nxk) CV_32F
public Mat e;
//parameter variance (kx1) CV_32F
public Mat C;
//connectivity (cx2) CV_32S
public ShapeModel()
{
}
public int npts()
{
//number of points in shape model
return V.rows() / 2;
}
public void calc_params(Point[] pts, Mat weight, float c_factor)
{
int n = pts.Length;
//assert(V.rows == 2*n);
//Debug.Log ("V.rows == 2*n " + V.rows () + " " + 2 * n);
using (Mat s = (new MatOfPoint2f(pts)).reshape(1, 2 * n))
{ //point set to vector format
if (weight.total() == 0)
{
Core.gemm(V.t(), s, 1, new Mat(), 0, p); //simple projection
}
else
{
//scaled projection
if (weight.rows() != n)
{
Debug.Log("Invalid weighting matrix");
}
float[] weight_float = new float[weight.total()];
MatUtils.copyFromMat<float>(weight, weight_float);
int weight_cols = weight.cols();
int K = V.cols();
using (Mat H = Mat.zeros(K, K, CvType.CV_32F)) using (Mat g = Mat.zeros(K, 1, CvType.CV_32F))
{
for (int i = 0; i < n; i++)
{
using (Mat v = new Mat(V, new OpenCVForUnity.CoreModule.Rect(0, 2 * i, K, 2))) using (Mat tmpMat1 = new Mat()) using (Mat tmpMat2 = new Mat()) using (Mat tmpMat3 = new Mat())
{
float w = weight_float[i * weight_cols];
Core.multiply(v.t(), new Scalar(w), tmpMat1);
Core.gemm(tmpMat1, v, 1, new Mat(), 0, tmpMat2);
Core.add(H, tmpMat2, H);
Core.gemm(tmpMat1, new MatOfPoint2f(pts[i]).reshape(1, 2), 1, new Mat(), 0, tmpMat3);
Core.add(g, tmpMat3, g);
}
}
Core.solve(H, g, p, Core.DECOMP_SVD);
}
}
}
clamp(c_factor); //clamp resulting parameters
}
public Point[] calc_shape()
{
using (Mat s = new Mat())
{
Core.gemm(V, p, 1, new Mat(), 0, s);
float[] s_float = new float[s.total()];
MatUtils.copyFromMat<float>(s, s_float);
int s_cols = s.cols();
int n = s.rows() / 2;
Point[] pts = new Point[n];
for (int i = 0; i < n; i++)
{
pts[i] = new Point(s_float[(2 * s_cols) * i], s_float[((2 * s_cols) * i) + 1]);
}
return pts;
}
}
void clamp(float c)
{
float[] p_float = new float[p.total()];
MatUtils.copyFromMat<float>(p, p_float);
int p_cols = p.cols();
float[] e_float = new float[e.total()];
MatUtils.copyFromMat<float>(e, e_float);
int e_cols = e.cols();
double scale = p_float[0];
int rows = e.rows();
for (int i = 0; i < rows; i++)
{
if (e_float[i * e_cols] < 0) continue;
float v = c * (float)Math.Sqrt(e_float[i * e_cols]);
if (Math.Abs(p_float[i * p_cols] / scale) > v)
{
if (p_float[i * p_cols] > 0)
{
p_float[i * p_cols] = (float)(v * scale);
}
else
{
p_float[i * p_cols] = (float)(-v * scale);
}
}
}
MatUtils.copyToMat(p_float, p);
}
public void read(object root_json)
{
IDictionary smodel_json = (IDictionary)root_json;
IDictionary V_json = (IDictionary)smodel_json["V"];
V = new Mat((int)(long)V_json["rows"], (int)(long)V_json["cols"], CvType.CV_32F);
IList V_data_json = (IList)V_json["data"];
float[] V_data = new float[V.rows() * V.cols()];
for (int i = 0; i < V_data_json.Count; i++)
{
V_data[i] = (float)(double)V_data_json[i];
}
MatUtils.copyToMat(V_data, V);
IDictionary e_json = (IDictionary)smodel_json["e"];
e = new Mat((int)(long)e_json["rows"], (int)(long)e_json["cols"], CvType.CV_32F);
IList e_data_json = (IList)e_json["data"];
float[] e_data = new float[e.rows() * e.cols()];
for (int i = 0; i < e_data_json.Count; i++)
{
e_data[i] = (float)(double)e_data_json[i];
}
MatUtils.copyToMat(e_data, e);
IDictionary C_json = (IDictionary)smodel_json["C"];
C = new Mat((int)(long)C_json["rows"], (int)(long)C_json["cols"], CvType.CV_32S);
//Debug.Log ("C " + C.ToString ());
IList C_data_json = (IList)C_json["data"];
int[] C_data = new int[C.rows() * C.cols()];
for (int i = 0; i < C_data_json.Count; i++)
{
C_data[i] = (int)(long)C_data_json[i];
}
MatUtils.copyToMat(C_data, C);
p = Mat.zeros(e.rows(), 1, CvType.CV_32F);
}
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,88 +1,88 @@
using OpenCVForUnity.CoreModule;
using UnityEngine;
using UnityEngine.SceneManagement;
using UnityEngine.UI;
namespace FaceTrackerExample
{
/// <summary>
/// FaceTracker Example
/// </summary>
public class FaceTrackerExample : MonoBehaviour
{
public Text exampleTitle;
public Text versionInfo;
public ScrollRect scrollRect;
static float verticalNormalizedPosition = 1f;
// Use this for initialization
void Start()
{
exampleTitle.text = "FaceTracker Example " + Application.version;
versionInfo.text = Core.NATIVE_LIBRARY_NAME + " " + OpenCVForUnity.UnityUtils.Utils.getVersion() + " (" + Core.VERSION + ")";
versionInfo.text += " / UnityEditor " + Application.unityVersion;
versionInfo.text += " / ";
#if UNITY_EDITOR
versionInfo.text += "Editor";
#elif UNITY_STANDALONE_WIN
versionInfo.text += "Windows";
#elif UNITY_STANDALONE_OSX
versionInfo.text += "Mac OSX";
#elif UNITY_STANDALONE_LINUX
versionInfo.text += "Linux";
#elif UNITY_ANDROID
versionInfo.text += "Android";
#elif UNITY_IOS
versionInfo.text += "iOS";
#elif UNITY_WSA
versionInfo.text += "WSA";
#elif UNITY_WEBGL
versionInfo.text += "WebGL";
#endif
versionInfo.text += " ";
#if ENABLE_MONO
versionInfo.text += "Mono";
#elif ENABLE_IL2CPP
versionInfo.text += "IL2CPP";
#elif ENABLE_DOTNET
versionInfo.text += ".NET";
#endif
scrollRect.verticalNormalizedPosition = verticalNormalizedPosition;
}
// Update is called once per frame
void Update()
{
}
public void OnScrollRectValueChanged()
{
verticalNormalizedPosition = scrollRect.verticalNormalizedPosition;
}
public void OnShowLicenseButton()
{
SceneManager.LoadScene("ShowLicense");
}
public void OnTexture2DFaceTrackerExample()
{
SceneManager.LoadScene("Texture2DFaceTrackerExample");
}
public void OnWebCamTextureFaceTrackerExample()
{
SceneManager.LoadScene("WebCamTextureFaceTrackerExample");
}
public void OnFaceTrackerARExample()
{
SceneManager.LoadScene("FaceTrackerARExample");
}
}
using OpenCVForUnity.CoreModule;
using UnityEngine;
using UnityEngine.SceneManagement;
using UnityEngine.UI;
namespace FaceTrackerExample
{
/// <summary>
/// FaceTracker Example
/// </summary>
public class FaceTrackerExample : MonoBehaviour
{
public Text exampleTitle;
public Text versionInfo;
public ScrollRect scrollRect;
static float verticalNormalizedPosition = 1f;
// Use this for initialization
void Start()
{
exampleTitle.text = "FaceTracker Example " + Application.version;
versionInfo.text = Core.NATIVE_LIBRARY_NAME + " " + OpenCVForUnity.UnityUtils.Utils.getVersion() + " (" + Core.VERSION + ")";
versionInfo.text += " / UnityEditor " + Application.unityVersion;
versionInfo.text += " / ";
#if UNITY_EDITOR
versionInfo.text += "Editor";
#elif UNITY_STANDALONE_WIN
versionInfo.text += "Windows";
#elif UNITY_STANDALONE_OSX
versionInfo.text += "Mac OSX";
#elif UNITY_STANDALONE_LINUX
versionInfo.text += "Linux";
#elif UNITY_ANDROID
versionInfo.text += "Android";
#elif UNITY_IOS
versionInfo.text += "iOS";
#elif UNITY_WSA
versionInfo.text += "WSA";
#elif UNITY_WEBGL
versionInfo.text += "WebGL";
#endif
versionInfo.text += " ";
#if ENABLE_MONO
versionInfo.text += "Mono";
#elif ENABLE_IL2CPP
versionInfo.text += "IL2CPP";
#elif ENABLE_DOTNET
versionInfo.text += ".NET";
#endif
scrollRect.verticalNormalizedPosition = verticalNormalizedPosition;
}
// Update is called once per frame
void Update()
{
}
public void OnScrollRectValueChanged()
{
verticalNormalizedPosition = scrollRect.verticalNormalizedPosition;
}
public void OnShowLicenseButton()
{
SceneManager.LoadScene("ShowLicense");
}
public void OnTexture2DFaceTrackerExample()
{
SceneManager.LoadScene("Texture2DFaceTrackerExample");
}
public void OnWebCamTextureFaceTrackerExample()
{
SceneManager.LoadScene("WebCamTextureFaceTrackerExample");
}
public void OnFaceTrackerARExample()
{
SceneManager.LoadScene("FaceTrackerARExample");
}
}
}

Просмотреть файл

@ -361,7 +361,7 @@ MonoBehaviour:
m_HandleRect: {fileID: 1039786615}
m_Direction: 2
m_Value: 0
m_Size: 0.99999994
m_Size: 1
m_NumberOfSteps: 0
m_OnValueChanged:
m_PersistentCalls:
@ -1736,7 +1736,7 @@ RectTransform:
m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0}
m_AnchorMin: {x: 0, y: 0}
m_AnchorMax: {x: 1, y: 1}
m_AnchoredPosition: {x: 0, y: 0.000035862842}
m_AnchoredPosition: {x: 0, y: 0.000030518197}
m_SizeDelta: {x: 0, y: 0}
m_Pivot: {x: 0.5, y: 1}
--- !u!114 &1339781663

Просмотреть файл

@ -1,29 +1,29 @@
using UnityEngine;
using UnityEngine.SceneManagement;
namespace FaceTrackerExample
{
/// <summary>
/// Show License
/// </summary>
public class ShowLicense : MonoBehaviour
{
// Use this for initialization
void Start()
{
}
// Update is called once per frame
void Update()
{
}
public void OnBackButton()
{
SceneManager.LoadScene("FaceTrackerExample");
}
}
}
using UnityEngine;
using UnityEngine.SceneManagement;
namespace FaceTrackerExample
{
/// <summary>
/// Show License
/// </summary>
public class ShowLicense : MonoBehaviour
{
// Use this for initialization
void Start()
{
}
// Update is called once per frame
void Update()
{
}
public void OnBackButton()
{
SceneManager.LoadScene("FaceTrackerExample");
}
}
}

Просмотреть файл

@ -1,174 +1,174 @@
using OpenCVFaceTracker;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.ObjdetectModule;
using OpenCVForUnity.UnityUtils;
using System;
using System.Collections;
using UnityEngine;
using UnityEngine.SceneManagement;
namespace FaceTrackerExample
{
/// <summary>
/// Texture2D Face Tracker Example
/// </summary>
public class Texture2DFaceTrackerExample : MonoBehaviour
{
/// <summary>
/// The image texture.
/// </summary>
public Texture2D imgTexture;
/// <summary>
/// The tracker_model_json_filepath.
/// </summary>
private string tracker_model_json_filepath;
/// <summary>
/// The haarcascade_frontalface_alt_xml_filepath.
/// </summary>
private string haarcascade_frontalface_alt_xml_filepath;
#if UNITY_WEBGL
IEnumerator getFilePath_Coroutine;
#endif
// Use this for initialization
void Start()
{
#if UNITY_WEBGL
getFilePath_Coroutine = GetFilePath();
StartCoroutine(getFilePath_Coroutine);
#else
tracker_model_json_filepath = Utils.getFilePath("tracker_model.json");
haarcascade_frontalface_alt_xml_filepath = Utils.getFilePath("haarcascade_frontalface_alt.xml");
Run();
#endif
}
#if UNITY_WEBGL
private IEnumerator GetFilePath()
{
var getFilePathAsync_0_Coroutine = Utils.getFilePathAsync("tracker_model.json", (result) =>
{
tracker_model_json_filepath = result;
});
yield return getFilePathAsync_0_Coroutine;
var getFilePathAsync_1_Coroutine = Utils.getFilePathAsync("haarcascade_frontalface_alt.xml", (result) =>
{
haarcascade_frontalface_alt_xml_filepath = result;
});
yield return getFilePathAsync_1_Coroutine;
getFilePath_Coroutine = null;
Run();
}
#endif
private void Run()
{
gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = 0;
float height = 0;
width = gameObject.transform.localScale.x;
height = gameObject.transform.localScale.y;
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
}
else
{
Camera.main.orthographicSize = height / 2;
}
//initialize FaceTracker
FaceTracker faceTracker = new FaceTracker(tracker_model_json_filepath);
//initialize FaceTrackerParams
FaceTrackerParams faceTrackerParams = new FaceTrackerParams();
Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4);
Utils.texture2DToMat(imgTexture, imgMat);
Debug.Log("imgMat dst ToString " + imgMat.ToString());
CascadeClassifier cascade = new CascadeClassifier();
cascade.load(haarcascade_frontalface_alt_xml_filepath);
//if (cascade.empty())
//{
// Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
//}
//convert image to greyscale
Mat gray = new Mat();
Imgproc.cvtColor(imgMat, gray, Imgproc.COLOR_RGBA2GRAY);
MatOfRect faces = new MatOfRect();
Imgproc.equalizeHist(gray, gray);
cascade.detectMultiScale(gray, faces, 1.1f, 2, 0
// | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
| Objdetect.CASCADE_SCALE_IMAGE, new Size(gray.cols() * 0.05, gray.cols() * 0.05), new Size());
Debug.Log("faces " + faces.dump());
if (faces.rows() > 0)
{
//add initial face points from MatOfRect
faceTracker.addPoints(faces);
}
//track face points.if face points <= 0, always return false.
if (faceTracker.track(imgMat, faceTrackerParams)) faceTracker.draw(imgMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false);
Utils.matToTexture2D(imgMat, texture);
gameObject.GetComponent<Renderer>().material.mainTexture = texture;
cascade.Dispose();
}
// Update is called once per frame
void Update()
{
}
/// <summary>
/// Raises the destroy event.
/// </summary>
void OnDestroy()
{
#if UNITY_WEBGL
if (getFilePath_Coroutine != null)
{
StopCoroutine(getFilePath_Coroutine);
((IDisposable)getFilePath_Coroutine).Dispose();
}
#endif
}
public void OnBackButton()
{
SceneManager.LoadScene("FaceTrackerExample");
}
}
}
using OpenCVFaceTracker;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.ObjdetectModule;
using OpenCVForUnity.UnityUtils;
using System;
using System.Collections;
using UnityEngine;
using UnityEngine.SceneManagement;
namespace FaceTrackerExample
{
/// <summary>
/// Texture2D Face Tracker Example
/// </summary>
public class Texture2DFaceTrackerExample : MonoBehaviour
{
/// <summary>
/// The image texture.
/// </summary>
public Texture2D imgTexture;
/// <summary>
/// The tracker_model_json_filepath.
/// </summary>
private string tracker_model_json_filepath;
/// <summary>
/// The haarcascade_frontalface_alt_xml_filepath.
/// </summary>
private string haarcascade_frontalface_alt_xml_filepath;
#if UNITY_WEBGL
IEnumerator getFilePath_Coroutine;
#endif
// Use this for initialization
void Start()
{
#if UNITY_WEBGL
getFilePath_Coroutine = GetFilePath();
StartCoroutine(getFilePath_Coroutine);
#else
tracker_model_json_filepath = Utils.getFilePath("tracker_model.json");
haarcascade_frontalface_alt_xml_filepath = Utils.getFilePath("haarcascade_frontalface_alt.xml");
Run();
#endif
}
#if UNITY_WEBGL
private IEnumerator GetFilePath()
{
var getFilePathAsync_0_Coroutine = Utils.getFilePathAsync("tracker_model.json", (result) =>
{
tracker_model_json_filepath = result;
});
yield return getFilePathAsync_0_Coroutine;
var getFilePathAsync_1_Coroutine = Utils.getFilePathAsync("haarcascade_frontalface_alt.xml", (result) =>
{
haarcascade_frontalface_alt_xml_filepath = result;
});
yield return getFilePathAsync_1_Coroutine;
getFilePath_Coroutine = null;
Run();
}
#endif
private void Run()
{
gameObject.transform.localScale = new Vector3(imgTexture.width, imgTexture.height, 1);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = 0;
float height = 0;
width = gameObject.transform.localScale.x;
height = gameObject.transform.localScale.y;
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
}
else
{
Camera.main.orthographicSize = height / 2;
}
//initialize FaceTracker
FaceTracker faceTracker = new FaceTracker(tracker_model_json_filepath);
//initialize FaceTrackerParams
FaceTrackerParams faceTrackerParams = new FaceTrackerParams();
Mat imgMat = new Mat(imgTexture.height, imgTexture.width, CvType.CV_8UC4);
Utils.texture2DToMat(imgTexture, imgMat);
Debug.Log("imgMat dst ToString " + imgMat.ToString());
CascadeClassifier cascade = new CascadeClassifier();
cascade.load(haarcascade_frontalface_alt_xml_filepath);
//if (cascade.empty())
//{
// Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
//}
//convert image to greyscale
Mat gray = new Mat();
Imgproc.cvtColor(imgMat, gray, Imgproc.COLOR_RGBA2GRAY);
MatOfRect faces = new MatOfRect();
Imgproc.equalizeHist(gray, gray);
cascade.detectMultiScale(gray, faces, 1.1f, 2, 0
// | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
| Objdetect.CASCADE_SCALE_IMAGE, new Size(gray.cols() * 0.05, gray.cols() * 0.05), new Size());
Debug.Log("faces " + faces.dump());
if (faces.rows() > 0)
{
//add initial face points from MatOfRect
faceTracker.addPoints(faces);
}
//track face points.if face points <= 0, always return false.
if (faceTracker.track(imgMat, faceTrackerParams)) faceTracker.draw(imgMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
Texture2D texture = new Texture2D(imgMat.cols(), imgMat.rows(), TextureFormat.RGBA32, false);
Utils.matToTexture2D(imgMat, texture);
gameObject.GetComponent<Renderer>().material.mainTexture = texture;
cascade.Dispose();
}
// Update is called once per frame
void Update()
{
}
/// <summary>
/// Raises the destroy event.
/// </summary>
void OnDestroy()
{
#if UNITY_WEBGL
if (getFilePath_Coroutine != null)
{
StopCoroutine(getFilePath_Coroutine);
((IDisposable)getFilePath_Coroutine).Dispose();
}
#endif
}
public void OnBackButton()
{
SceneManager.LoadScene("FaceTrackerExample");
}
}
}

Просмотреть файл

@ -1,352 +1,352 @@
using OpenCVFaceTracker;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.ObjdetectModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UnityUtils.Helper;
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.SceneManagement;
using UnityEngine.UI;
namespace FaceTrackerExample
{
/// <summary>
/// WebCamTexture Face Tracker Example
/// </summary>
[RequireComponent(typeof(WebCamTextureToMatHelper))]
public class WebCamTextureFaceTrackerExample : MonoBehaviour
{
/// <summary>
/// The auto reset mode. if ture, Only if face is detected in each frame, face is tracked.
/// </summary>
public bool isAutoResetMode;
/// <summary>
/// The auto reset mode toggle.
/// </summary>
public Toggle isAutoResetModeToggle;
/// <summary>
/// The gray mat.
/// </summary>
Mat grayMat;
/// <summary>
/// The texture.
/// </summary>
Texture2D texture;
/// <summary>
/// The cascade.
/// </summary>
CascadeClassifier cascade;
/// <summary>
/// The face tracker.
/// </summary>
FaceTracker faceTracker;
/// <summary>
/// The face tracker parameters.
/// </summary>
FaceTrackerParams faceTrackerParams;
/// <summary>
/// The web cam texture to mat helper.
/// </summary>
WebCamTextureToMatHelper webCamTextureToMatHelper;
/// <summary>
/// The tracker_model_json_filepath.
/// </summary>
private string tracker_model_json_filepath;
/// <summary>
/// The haarcascade_frontalface_alt_xml_filepath.
/// </summary>
private string haarcascade_frontalface_alt_xml_filepath;
#if UNITY_WEBGL
IEnumerator getFilePath_Coroutine;
#endif
// Use this for initialization
void Start()
{
webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper>();
isAutoResetModeToggle.isOn = isAutoResetMode;
#if UNITY_WEBGL
getFilePath_Coroutine = GetFilePath();
StartCoroutine(getFilePath_Coroutine);
#else
tracker_model_json_filepath = Utils.getFilePath("tracker_model.json");
haarcascade_frontalface_alt_xml_filepath = Utils.getFilePath("haarcascade_frontalface_alt.xml");
Run();
#endif
}
#if UNITY_WEBGL
private IEnumerator GetFilePath()
{
var getFilePathAsync_0_Coroutine = Utils.getFilePathAsync("tracker_model.json", (result) =>
{
tracker_model_json_filepath = result;
});
yield return getFilePathAsync_0_Coroutine;
var getFilePathAsync_1_Coroutine = Utils.getFilePathAsync("haarcascade_frontalface_alt.xml", (result) =>
{
haarcascade_frontalface_alt_xml_filepath = result;
});
yield return getFilePathAsync_1_Coroutine;
getFilePath_Coroutine = null;
Run();
}
#endif
private void Run()
{
//initialize FaceTracker
faceTracker = new FaceTracker(tracker_model_json_filepath);
//initialize FaceTrackerParams
faceTrackerParams = new FaceTrackerParams();
cascade = new CascadeClassifier();
cascade.load(haarcascade_frontalface_alt_xml_filepath);
//if (cascade.empty())
//{
// Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
//}
webCamTextureToMatHelper.Initialize();
}
/// <summary>
/// Raises the webcam texture to mat helper initialized event.
/// </summary>
public void OnWebCamTextureToMatHelperInitialized()
{
Debug.Log("OnWebCamTextureToMatHelperInitialized");
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();
texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = 0;
float height = 0;
width = gameObject.transform.localScale.x;
height = gameObject.transform.localScale.y;
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
}
else
{
Camera.main.orthographicSize = height / 2;
}
gameObject.GetComponent<Renderer>().material.mainTexture = texture;
grayMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC1);
}
/// <summary>
/// Raises the webcam texture to mat helper disposed event.
/// </summary>
public void OnWebCamTextureToMatHelperDisposed()
{
Debug.Log("OnWebCamTextureToMatHelperDisposed");
faceTracker.reset();
grayMat.Dispose();
}
/// <summary>
/// Raises the webcam texture to mat helper error occurred event.
/// </summary>
/// <param name="errorCode">Error code.</param>
public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
{
Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
// Update is called once per frame
void Update()
{
if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
{
Mat rgbaMat = webCamTextureToMatHelper.GetMat();
//convert image to greyscale
Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
if (isAutoResetMode || faceTracker.getPoints().Count <= 0)
{
//Debug.Log ("detectFace");
//convert image to greyscale
using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect())
{
Imgproc.equalizeHist(grayMat, equalizeHistMat);
cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0
// | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
| Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());
if (faces.rows() > 0)
{
//Debug.Log ("faces " + faces.dump ());
List<OpenCVForUnity.CoreModule.Rect> rectsList = faces.toList();
List<Point[]> pointsList = faceTracker.getPoints();
if (isAutoResetMode)
{
//add initial face points from MatOfRect
if (pointsList.Count <= 0)
{
faceTracker.addPoints(faces);
//Debug.Log ("reset faces ");
}
else
{
for (int i = 0; i < rectsList.Count; i++)
{
OpenCVForUnity.CoreModule.Rect trackRect = new OpenCVForUnity.CoreModule.Rect(rectsList[i].x + rectsList[i].width / 3, rectsList[i].y + rectsList[i].height / 2, rectsList[i].width / 3, rectsList[i].height / 3);
//It determines whether nose point has been included in trackRect.
if (i < pointsList.Count && !trackRect.contains(pointsList[i][67]))
{
rectsList.RemoveAt(i);
pointsList.RemoveAt(i);
//Debug.Log ("remove " + i);
}
Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2);
}
}
}
else
{
faceTracker.addPoints(faces);
}
//draw face rect
for (int i = 0; i < rectsList.Count; i++)
{
Imgproc.rectangle(rgbaMat, new Point(rectsList[i].x, rectsList[i].y), new Point(rectsList[i].x + rectsList[i].width, rectsList[i].y + rectsList[i].height), new Scalar(255, 0, 0, 255), 2);
}
}
else
{
if (isAutoResetMode)
{
faceTracker.reset();
}
}
}
}
//track face points.if face points <= 0, always return false.
if (faceTracker.track(grayMat, faceTrackerParams)) faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
//Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
Utils.fastMatToTexture2D(rgbaMat, texture);
}
if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0)
{
faceTracker.reset();
}
}
/// <summary>
/// Raises the disable event.
/// </summary>
void OnDisable()
{
webCamTextureToMatHelper.Dispose();
if (cascade != null) cascade.Dispose();
#if UNITY_WEBGL
if (getFilePath_Coroutine != null)
{
StopCoroutine(getFilePath_Coroutine);
((IDisposable)getFilePath_Coroutine).Dispose();
}
#endif
}
/// <summary>
/// Raises the back button event.
/// </summary>
public void OnBackButton()
{
SceneManager.LoadScene("FaceTrackerExample");
}
/// <summary>
/// Raises the play button event.
/// </summary>
public void OnPlayButton()
{
webCamTextureToMatHelper.Play();
}
/// <summary>
/// Raises the pause button event.
/// </summary>
public void OnPauseButton()
{
webCamTextureToMatHelper.Pause();
}
/// <summary>
/// Raises the stop button event.
/// </summary>
public void OnStopButton()
{
webCamTextureToMatHelper.Stop();
}
/// <summary>
/// Raises the change camera button event.
/// </summary>
public void OnChangeCameraButton()
{
webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.IsFrontFacing();
}
/// <summary>
/// Raises the change auto reset mode toggle event.
/// </summary>
public void OnIsAutoResetModeToggle()
{
if (isAutoResetModeToggle.isOn)
{
isAutoResetMode = true;
}
else
{
isAutoResetMode = false;
}
}
}
using OpenCVFaceTracker;
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.ObjdetectModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UnityUtils.Helper;
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.SceneManagement;
using UnityEngine.UI;
namespace FaceTrackerExample
{
/// <summary>
/// WebCamTexture Face Tracker Example
/// </summary>
[RequireComponent(typeof(WebCamTextureToMatHelper))]
public class WebCamTextureFaceTrackerExample : MonoBehaviour
{
/// <summary>
/// The auto reset mode. if ture, Only if face is detected in each frame, face is tracked.
/// </summary>
public bool isAutoResetMode;
/// <summary>
/// The auto reset mode toggle.
/// </summary>
public Toggle isAutoResetModeToggle;
/// <summary>
/// The gray mat.
/// </summary>
Mat grayMat;
/// <summary>
/// The texture.
/// </summary>
Texture2D texture;
/// <summary>
/// The cascade.
/// </summary>
CascadeClassifier cascade;
/// <summary>
/// The face tracker.
/// </summary>
FaceTracker faceTracker;
/// <summary>
/// The face tracker parameters.
/// </summary>
FaceTrackerParams faceTrackerParams;
/// <summary>
/// The web cam texture to mat helper.
/// </summary>
WebCamTextureToMatHelper webCamTextureToMatHelper;
/// <summary>
/// The tracker_model_json_filepath.
/// </summary>
private string tracker_model_json_filepath;
/// <summary>
/// The haarcascade_frontalface_alt_xml_filepath.
/// </summary>
private string haarcascade_frontalface_alt_xml_filepath;
#if UNITY_WEBGL
IEnumerator getFilePath_Coroutine;
#endif
// Use this for initialization
void Start()
{
webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper>();
isAutoResetModeToggle.isOn = isAutoResetMode;
#if UNITY_WEBGL
getFilePath_Coroutine = GetFilePath();
StartCoroutine(getFilePath_Coroutine);
#else
tracker_model_json_filepath = Utils.getFilePath("tracker_model.json");
haarcascade_frontalface_alt_xml_filepath = Utils.getFilePath("haarcascade_frontalface_alt.xml");
Run();
#endif
}
#if UNITY_WEBGL
private IEnumerator GetFilePath()
{
var getFilePathAsync_0_Coroutine = Utils.getFilePathAsync("tracker_model.json", (result) =>
{
tracker_model_json_filepath = result;
});
yield return getFilePathAsync_0_Coroutine;
var getFilePathAsync_1_Coroutine = Utils.getFilePathAsync("haarcascade_frontalface_alt.xml", (result) =>
{
haarcascade_frontalface_alt_xml_filepath = result;
});
yield return getFilePathAsync_1_Coroutine;
getFilePath_Coroutine = null;
Run();
}
#endif
private void Run()
{
//initialize FaceTracker
faceTracker = new FaceTracker(tracker_model_json_filepath);
//initialize FaceTrackerParams
faceTrackerParams = new FaceTrackerParams();
cascade = new CascadeClassifier();
cascade.load(haarcascade_frontalface_alt_xml_filepath);
//if (cascade.empty())
//{
// Debug.LogError("cascade file is not loaded.Please copy from “FaceTrackerExample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
//}
webCamTextureToMatHelper.Initialize();
}
/// <summary>
/// Raises the webcam texture to mat helper initialized event.
/// </summary>
public void OnWebCamTextureToMatHelperInitialized()
{
Debug.Log("OnWebCamTextureToMatHelperInitialized");
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();
texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
float width = 0;
float height = 0;
width = gameObject.transform.localScale.x;
height = gameObject.transform.localScale.y;
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
}
else
{
Camera.main.orthographicSize = height / 2;
}
gameObject.GetComponent<Renderer>().material.mainTexture = texture;
grayMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC1);
}
/// <summary>
/// Raises the webcam texture to mat helper disposed event.
/// </summary>
public void OnWebCamTextureToMatHelperDisposed()
{
Debug.Log("OnWebCamTextureToMatHelperDisposed");
faceTracker.reset();
grayMat.Dispose();
}
/// <summary>
/// Raises the webcam texture to mat helper error occurred event.
/// </summary>
/// <param name="errorCode">Error code.</param>
public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
{
Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
// Update is called once per frame
void Update()
{
if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
{
Mat rgbaMat = webCamTextureToMatHelper.GetMat();
//convert image to greyscale
Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
if (isAutoResetMode || faceTracker.getPoints().Count <= 0)
{
//Debug.Log ("detectFace");
//convert image to greyscale
using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect())
{
Imgproc.equalizeHist(grayMat, equalizeHistMat);
cascade.detectMultiScale(equalizeHistMat, faces, 1.1f, 2, 0
// | Objdetect.CASCADE_FIND_BIGGEST_OBJECT
| Objdetect.CASCADE_SCALE_IMAGE, new Size(equalizeHistMat.cols() * 0.15, equalizeHistMat.cols() * 0.15), new Size());
if (faces.rows() > 0)
{
//Debug.Log ("faces " + faces.dump ());
List<OpenCVForUnity.CoreModule.Rect> rectsList = faces.toList();
List<Point[]> pointsList = faceTracker.getPoints();
if (isAutoResetMode)
{
//add initial face points from MatOfRect
if (pointsList.Count <= 0)
{
faceTracker.addPoints(faces);
//Debug.Log ("reset faces ");
}
else
{
for (int i = 0; i < rectsList.Count; i++)
{
OpenCVForUnity.CoreModule.Rect trackRect = new OpenCVForUnity.CoreModule.Rect(rectsList[i].x + rectsList[i].width / 3, rectsList[i].y + rectsList[i].height / 2, rectsList[i].width / 3, rectsList[i].height / 3);
//It determines whether nose point has been included in trackRect.
if (i < pointsList.Count && !trackRect.contains(pointsList[i][67]))
{
rectsList.RemoveAt(i);
pointsList.RemoveAt(i);
//Debug.Log ("remove " + i);
}
Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2);
}
}
}
else
{
faceTracker.addPoints(faces);
}
//draw face rect
for (int i = 0; i < rectsList.Count; i++)
{
Imgproc.rectangle(rgbaMat, new Point(rectsList[i].x, rectsList[i].y), new Point(rectsList[i].x + rectsList[i].width, rectsList[i].y + rectsList[i].height), new Scalar(255, 0, 0, 255), 2);
}
}
else
{
if (isAutoResetMode)
{
faceTracker.reset();
}
}
}
}
//track face points.if face points <= 0, always return false.
if (faceTracker.track(grayMat, faceTrackerParams)) faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
//Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Imgproc.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
Utils.fastMatToTexture2D(rgbaMat, texture);
}
if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0)
{
faceTracker.reset();
}
}
/// <summary>
/// Raises the disable event.
/// </summary>
void OnDisable()
{
webCamTextureToMatHelper.Dispose();
if (cascade != null) cascade.Dispose();
#if UNITY_WEBGL
if (getFilePath_Coroutine != null)
{
StopCoroutine(getFilePath_Coroutine);
((IDisposable)getFilePath_Coroutine).Dispose();
}
#endif
}
/// <summary>
/// Raises the back button event.
/// </summary>
public void OnBackButton()
{
SceneManager.LoadScene("FaceTrackerExample");
}
/// <summary>
/// Raises the play button event.
/// </summary>
public void OnPlayButton()
{
webCamTextureToMatHelper.Play();
}
/// <summary>
/// Raises the pause button event.
/// </summary>
public void OnPauseButton()
{
webCamTextureToMatHelper.Pause();
}
/// <summary>
/// Raises the stop button event.
/// </summary>
public void OnStopButton()
{
webCamTextureToMatHelper.Stop();
}
/// <summary>
/// Raises the change camera button event.
/// </summary>
public void OnChangeCameraButton()
{
webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.IsFrontFacing();
}
/// <summary>
/// Raises the change auto reset mode toggle event.
/// </summary>
public void OnIsAutoResetModeToggle()
{
if (isAutoResetModeToggle.isOn)
{
isAutoResetMode = true;
}
else
{
isAutoResetMode = false;
}
}
}
}