This commit is contained in:
EnoxSoftware 2020-02-11 18:03:13 +09:00
Родитель 74263f69c5
Коммит 78a205af2f
2 изменённых файлов: 439 добавлений и 380 удалений

Просмотреть файл

@ -261,12 +261,14 @@ namespace FaceTrackerExample
#if UNITY_WEBGL && !UNITY_EDITOR
private IEnumerator GetFilePath()
{
var getFilePathAsync_0_Coroutine = Utils.getFilePathAsync ("tracker_model.json", (result) => {
var getFilePathAsync_0_Coroutine = Utils.getFilePathAsync("tracker_model.json", (result) =>
{
tracker_model_json_filepath = result;
});
yield return getFilePathAsync_0_Coroutine;
var getFilePathAsync_1_Coroutine = Utils.getFilePathAsync ("haarcascade_frontalface_alt.xml", (result) => {
var getFilePathAsync_1_Coroutine = Utils.getFilePathAsync("haarcascade_frontalface_alt.xml", (result) =>
{
haarcascade_frontalface_alt_xml_filepath = result;
});
yield return getFilePathAsync_1_Coroutine;
@ -284,10 +286,9 @@ namespace FaceTrackerExample
new Point3(31, 72, 86),//r eye
new Point3(0, 40, 114),//nose
new Point3(-20, 15, 90),//l mouse
new Point3 (20, 15, 90)//r mouse
// ,
// new Point3 (-70, 60, -9),//l ear
// new Point3 (70, 60, -9)//r ear
new Point3(20, 15, 90),//r mouse
new Point3(-70, 60, -9),//l ear
new Point3(70, 60, -9)//r ear
);
imagePoints = new MatOfPoint2f();
rvec = new Mat();
@ -337,10 +338,13 @@ namespace FaceTrackerExample
float imageSizeScale = 1.0f;
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale) {
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
imageSizeScale = (float)Screen.height / (float)Screen.width;
} else {
}
else
{
Camera.main.orthographicSize = height / 2;
}
@ -398,9 +402,12 @@ namespace FaceTrackerExample
//Adjust Unity Camera FOV https://github.com/opencv/opencv/commit/8ed1945ccd52501f5ab22bdec6aa1f91f1e2cfd4
if (widthScale < heightScale) {
if (widthScale < heightScale)
{
ARCamera.fieldOfView = (float)(fovx[0] * fovXScale);
} else {
}
else
{
ARCamera.fieldOfView = (float)(fovy[0] * fovYScale);
}
@ -449,7 +456,8 @@ namespace FaceTrackerExample
void Update()
{
if (webCamTextureToMatHelper.IsPlaying () && webCamTextureToMatHelper.DidUpdateThisFrame ()) {
if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
{
Mat rgbaMat = webCamTextureToMatHelper.GetMat();
@ -458,11 +466,13 @@ namespace FaceTrackerExample
Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
if (isAutoResetMode || faceTracker.getPoints ().Count <= 0) {
if (isAutoResetMode || faceTracker.getPoints().Count <= 0)
{
// Debug.Log ("detectFace");
//convert image to greyscale
using (Mat equalizeHistMat = new Mat ()) using (MatOfRect faces = new MatOfRect ()) {
using (Mat equalizeHistMat = new Mat()) using (MatOfRect faces = new MatOfRect())
{
Imgproc.equalizeHist(grayMat, equalizeHistMat);
@ -472,24 +482,31 @@ namespace FaceTrackerExample
if (faces.rows () > 0) {
if (faces.rows() > 0)
{
// Debug.Log ("faces " + faces.dump ());
List<OpenCVForUnity.CoreModule.Rect> rectsList = faces.toList();
List<Point[]> pointsList = faceTracker.getPoints();
if (isAutoResetMode) {
if (isAutoResetMode)
{
//add initial face points from MatOfRect
if (pointsList.Count <= 0) {
if (pointsList.Count <= 0)
{
faceTracker.addPoints(faces);
// Debug.Log ("reset faces ");
} else {
}
else
{
for (int i = 0; i < rectsList.Count; i++) {
for (int i = 0; i < rectsList.Count; i++)
{
OpenCVForUnity.CoreModule.Rect trackRect = new OpenCVForUnity.CoreModule.Rect(rectsList[i].x + rectsList[i].width / 3, rectsList[i].y + rectsList[i].height / 2, rectsList[i].width / 3, rectsList[i].height / 3);
//It determines whether nose point has been included in trackRect.
if (i < pointsList.Count && !trackRect.contains (pointsList [i] [67])) {
if (i < pointsList.Count && !trackRect.contains(pointsList[i][67]))
{
rectsList.RemoveAt(i);
pointsList.RemoveAt(i);
// Debug.Log ("remove " + i);
@ -497,17 +514,23 @@ namespace FaceTrackerExample
Imgproc.rectangle(rgbaMat, new Point(trackRect.x, trackRect.y), new Point(trackRect.x + trackRect.width, trackRect.y + trackRect.height), new Scalar(0, 0, 255, 255), 2);
}
}
} else {
}
else
{
faceTracker.addPoints(faces);
}
//draw face rect
for (int i = 0; i < rectsList.Count; i++) {
for (int i = 0; i < rectsList.Count; i++)
{
Imgproc.rectangle(rgbaMat, new Point(rectsList[i].x, rectsList[i].y), new Point(rectsList[i].x + rectsList[i].width, rectsList[i].y + rectsList[i].height), new Scalar(255, 0, 0, 255), 2);
}
} else {
if (isAutoResetMode) {
}
else
{
if (isAutoResetMode)
{
faceTracker.reset();
rightEye.SetActive(false);
@ -522,7 +545,8 @@ namespace FaceTrackerExample
//track face points.if face points <= 0, always return false.
if (faceTracker.track (grayMat, faceTrackerParams)) {
if (faceTracker.track(grayMat, faceTrackerParams))
{
if (isShowingFacePoints) faceTracker.draw(rgbaMat, new Scalar(255, 0, 0, 255), new Scalar(0, 255, 0, 255));
Imgproc.putText(rgbaMat, "'Tap' or 'Space Key' to Reset", new Point(5, rgbaMat.rows() - 5), Imgproc.FONT_HERSHEY_SIMPLEX, 0.8, new Scalar(255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
@ -531,7 +555,8 @@ namespace FaceTrackerExample
Point[] points = faceTracker.getPoints()[0];
if (points.Length > 0) {
if (points.Length > 0)
{
//for (int i = 0; i < points.Length; i++)
//{
@ -544,10 +569,9 @@ namespace FaceTrackerExample
points[36],//r eye
points[67],//nose
points[48],//l mouth
points [54] //r mouth
// ,
// points [0],//l ear
// points [14]//r ear
points[54], //r mouth
points[0],//l ear
points[14]//r ear
);
@ -555,27 +579,32 @@ namespace FaceTrackerExample
bool isRefresh = false;
if (tvec.get (2, 0) [0] > 0 && tvec.get (2, 0) [0] < 1200 * ((float)rgbaMat.cols () / (float)webCamTextureToMatHelper.requestedWidth)) {
if (tvec.dims() != 0 && tvec.get(2, 0)[0] > 0 && tvec.get(2, 0)[0] < 1200 * ((float)rgbaMat.cols() / (float)webCamTextureToMatHelper.requestedWidth))
{
isRefresh = true;
if (oldRvec == null) {
if (oldRvec == null)
{
oldRvec = new Mat();
rvec.copyTo(oldRvec);
}
if (oldTvec == null) {
if (oldTvec == null)
{
oldTvec = new Mat();
tvec.copyTo(oldTvec);
}
//filter Rvec Noise.
using (Mat absDiffRvec = new Mat ()) {
using (Mat absDiffRvec = new Mat())
{
Core.absdiff(rvec, oldRvec, absDiffRvec);
//Debug.Log ("absDiffRvec " + absDiffRvec.dump());
using (Mat cmpRvec = new Mat ()) {
using (Mat cmpRvec = new Mat())
{
Core.compare(absDiffRvec, new Scalar(rvecNoiseFilterRange), cmpRvec, Core.CMP_GT);
if (Core.countNonZero(cmpRvec) > 0) isRefresh = false;
@ -583,12 +612,14 @@ namespace FaceTrackerExample
}
//filter Tvec Noise.
using (Mat absDiffTvec = new Mat ()) {
using (Mat absDiffTvec = new Mat())
{
Core.absdiff(tvec, oldTvec, absDiffTvec);
//Debug.Log ("absDiffRvec " + absDiffRvec.dump());
using (Mat cmpTvec = new Mat ()) {
using (Mat cmpTvec = new Mat())
{
Core.compare(absDiffTvec, new Scalar(tvecNoiseFilterRange), cmpTvec, Core.CMP_GT);
if (Core.countNonZero(cmpTvec) > 0) isRefresh = false;
@ -596,7 +627,8 @@ namespace FaceTrackerExample
}
}
if (isRefresh) {
if (isRefresh)
{
if (isShowingEffects) rightEye.SetActive(true);
if (isShowingEffects) leftEye.SetActive(true);
@ -606,11 +638,14 @@ namespace FaceTrackerExample
if ((Mathf.Abs((float)(points[48].x - points[56].x)) < Mathf.Abs((float)(points[31].x - points[36].x)) / 2.2
&& Mathf.Abs((float)(points[51].y - points[57].y)) > Mathf.Abs((float)(points[31].x - points[36].x)) / 2.9)
|| Mathf.Abs ((float)(points [51].y - points [57].y)) > Mathf.Abs ((float)(points [31].x - points [36].x)) / 2.7) {
|| Mathf.Abs((float)(points[51].y - points[57].y)) > Mathf.Abs((float)(points[31].x - points[36].x)) / 2.7)
{
if (isShowingEffects) mouth.SetActive(true);
} else {
}
else
{
if (isShowingEffects) mouth.SetActive(false);
}
@ -630,17 +665,22 @@ namespace FaceTrackerExample
// Apply Z-axis inverted matrix.
ARM = ARM * invertZM;
if (shouldMoveARCamera) {
if (shouldMoveARCamera)
{
if (ARGameObject != null) {
if (ARGameObject != null)
{
ARM = ARGameObject.transform.localToWorldMatrix * ARM.inverse;
ARUtils.SetTransformFromMatrix(ARCamera.transform, ref ARM);
ARGameObject.SetActive(true);
}
} else {
}
else
{
ARM = ARCamera.transform.localToWorldMatrix * ARM;
if (ARGameObject != null) {
if (ARGameObject != null)
{
ARUtils.SetTransformFromMatrix(ARGameObject.transform, ref ARM);
ARGameObject.SetActive(true);
}
@ -654,13 +694,16 @@ namespace FaceTrackerExample
Utils.fastMatToTexture2D(rgbaMat, texture);
}
if (Input.GetKeyUp (KeyCode.Space) || Input.touchCount > 0) {
if (Input.GetKeyUp(KeyCode.Space) || Input.touchCount > 0)
{
faceTracker.reset();
if (oldRvec != null) {
if (oldRvec != null)
{
oldRvec.Dispose();
oldRvec = null;
}
if (oldTvec != null) {
if (oldTvec != null)
{
oldTvec.Dispose();
oldTvec = null;
}
@ -683,7 +726,8 @@ namespace FaceTrackerExample
if (cascade != null) cascade.Dispose();
#if UNITY_WEBGL && !UNITY_EDITOR
if (getFilePath_Coroutine != null) {
if (getFilePath_Coroutine != null)
{
StopCoroutine(getFilePath_Coroutine);
((IDisposable)getFilePath_Coroutine).Dispose();
}
@ -735,9 +779,12 @@ namespace FaceTrackerExample
/// </summary>
public void OnIsShowingFacePointsToggle()
{
if (isShowingFacePointsToggle.isOn) {
if (isShowingFacePointsToggle.isOn)
{
isShowingFacePoints = true;
} else {
}
else
{
isShowingFacePoints = false;
}
}
@ -747,9 +794,12 @@ namespace FaceTrackerExample
/// </summary>
public void OnIsShowingAxesToggle()
{
if (isShowingAxesToggle.isOn) {
if (isShowingAxesToggle.isOn)
{
isShowingAxes = true;
} else {
}
else
{
isShowingAxes = false;
axes.SetActive(false);
}
@ -760,9 +810,12 @@ namespace FaceTrackerExample
/// </summary>
public void OnIsShowingHeadToggle()
{
if (isShowingHeadToggle.isOn) {
if (isShowingHeadToggle.isOn)
{
isShowingHead = true;
} else {
}
else
{
isShowingHead = false;
head.SetActive(false);
}
@ -773,9 +826,12 @@ namespace FaceTrackerExample
/// </summary>
public void OnIsShowingEffectsToggle()
{
if (isShowingEffectsToggle.isOn) {
if (isShowingEffectsToggle.isOn)
{
isShowingEffects = true;
} else {
}
else
{
isShowingEffects = false;
rightEye.SetActive(false);
leftEye.SetActive(false);
@ -788,9 +844,12 @@ namespace FaceTrackerExample
/// </summary>
public void OnIsAutoResetModeToggle()
{
if (isAutoResetModeToggle.isOn) {
if (isAutoResetModeToggle.isOn)
{
isAutoResetMode = true;
} else {
}
else
{
isAutoResetMode = false;
}
}

Двоичные данные
Assets/FaceTrackerExample/ReadMe.pdf

Двоичный файл не отображается.