Initial Version
This commit is contained in:
Коммит
987600ebdf
|
@ -0,0 +1,9 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 042de2a632ffe5640bc2ce85b2c331bf
|
||||
folderAsset: yes
|
||||
timeCreated: 1463603964
|
||||
licenseType: Free
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
Двоичный файл не отображается.
|
@ -0,0 +1,4 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 2fe89ad658086d24f8fe283057df260d
|
||||
NativeFormatImporter:
|
||||
userData:
|
|
@ -0,0 +1,9 @@
|
|||
fileFormatVersion: 2
|
||||
guid: b92092c4dce17b44994e61262be633b1
|
||||
folderAsset: yes
|
||||
timeCreated: 1464539197
|
||||
licenseType: Free
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,9 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 5dfd231f6ef964a49bdc678672d6eada
|
||||
folderAsset: yes
|
||||
timeCreated: 1464539197
|
||||
licenseType: Free
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,31 @@
|
|||
using System;
|
||||
|
||||
namespace OpenCVForUnity.FaceSwap
|
||||
{
|
||||
public class DlibFaceSwapper : FaceSwapper
|
||||
{
|
||||
// Finds facial landmarks on faces and extracts the useful points
|
||||
protected override void getFacePoints(Point[] landmark_points, Point[] points, Point[] affine_transform_keypoints)
|
||||
{
|
||||
if (landmark_points.Length != 68)
|
||||
throw new ArgumentNullException("Invalid landmark_points.");
|
||||
|
||||
//points(facial contour)
|
||||
points[0] = landmark_points[0];
|
||||
points[1] = landmark_points[3];
|
||||
points[2] = landmark_points[5];
|
||||
points[3] = landmark_points[8];
|
||||
points[4] = landmark_points[11];
|
||||
points[5] = landmark_points[13];
|
||||
points[6] = landmark_points[16];
|
||||
Point nose_length = new Point(landmark_points[27].x - landmark_points[30].x, landmark_points[27].y - landmark_points[30].y);
|
||||
points[7] = new Point(landmark_points[26].x + nose_length.x, landmark_points[26].y + nose_length.y);
|
||||
points[8] = new Point(landmark_points[17].x + nose_length.x, landmark_points[17].y + nose_length.y);
|
||||
|
||||
//affine_transform_keypoints(eyes and chin)
|
||||
affine_transform_keypoints[0] = points[3];
|
||||
affine_transform_keypoints[1] = landmark_points[36];
|
||||
affine_transform_keypoints[2] = landmark_points[45];
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
fileFormatVersion: 2
|
||||
guid: f03be5c9f0658d14cb6605b9603ac6dc
|
||||
timeCreated: 1464539199
|
||||
licenseType: Free
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,740 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
|
||||
namespace OpenCVForUnity.FaceSwap
|
||||
{
|
||||
/// <summary>
|
||||
/// FaceSwaper.
|
||||
/// Code is the rewrite of https://github.com/mc-jesus/FaceSwap using the “OpenCV for Unity” and "Dlib FaceLandmark Detector".
|
||||
/// </summary>
|
||||
public class FaceSwapper : IDisposable
|
||||
{
|
||||
|
||||
protected Rect rect_ann, rect_bob;
|
||||
protected float maskAlpha;
|
||||
|
||||
protected Point[] points_ann = new Point[9];
|
||||
protected Point[] points_bob = new Point[9];
|
||||
protected Point[] affine_transform_keypoints_ann = new Point[3];
|
||||
protected Point[] affine_transform_keypoints_bob = new Point[3];
|
||||
protected Size feather_amount = new Size();
|
||||
|
||||
protected Mat small_frame;
|
||||
protected Size small_frame_size;
|
||||
protected Rect small_frame_rect;
|
||||
protected Mat frame_c3;
|
||||
|
||||
//full size Mat
|
||||
protected Mat refined_ann_and_bob_warpped_full;
|
||||
protected Mat refined_bob_and_ann_warpped_full;
|
||||
protected Mat warpped_face_ann_full;
|
||||
protected Mat warpped_face_bob_full;
|
||||
protected Mat mask_ann_full;
|
||||
protected Mat mask_bob_full;
|
||||
protected Mat warpped_mask_ann_full;
|
||||
protected Mat warpped_mask_bob_full;
|
||||
protected Mat refined_masks_full;
|
||||
protected Mat face_ann_full;
|
||||
protected Mat face_bob_full;
|
||||
protected Mat warpped_faces_full;
|
||||
|
||||
//ROI Mat
|
||||
protected Mat refined_ann_and_bob_warpped;
|
||||
protected Mat refined_bob_and_ann_warpped;
|
||||
protected Mat warpped_face_ann;
|
||||
protected Mat warpped_face_bob;
|
||||
protected Mat mask_ann;
|
||||
protected Mat mask_bob;
|
||||
protected Mat warpped_mask_ann;
|
||||
protected Mat warpped_mask_bob;
|
||||
protected Mat refined_masks;
|
||||
protected Mat face_ann;
|
||||
protected Mat face_bob;
|
||||
protected Mat warpped_faces;
|
||||
|
||||
//affineTransforms
|
||||
protected Mat trans_ann_to_bob;
|
||||
protected Mat trans_bob_to_ann;
|
||||
|
||||
|
||||
protected byte[,] LUT = new byte[3, 256];
|
||||
protected int[,] source_hist_int = new int[3, 256];
|
||||
protected int[,] target_hist_int = new int[3, 256];
|
||||
protected float[,] source_histogram = new float[3, 256];
|
||||
protected float[,] target_histogram = new float[3, 256];
|
||||
|
||||
|
||||
public bool useSeamlessCloneForPasteFaces;
|
||||
public bool isShowingDebugFacePoints;
|
||||
|
||||
|
||||
// Initialize face swapped with landmarks
|
||||
public FaceSwapper()
|
||||
{
|
||||
trans_ann_to_bob = new Mat();
|
||||
trans_bob_to_ann = new Mat();
|
||||
}
|
||||
|
||||
//Swaps faces in points on frame
|
||||
public void SwapFaces(Mat frame, Point[] landmark_points_ann, Point[] landmark_points_bob, float alpha)
|
||||
{
|
||||
alpha = alpha > 0 ? alpha : 0;
|
||||
alpha = alpha < 1 ? alpha : 1;
|
||||
maskAlpha = alpha;
|
||||
|
||||
|
||||
getFacePoints(landmark_points_ann, points_ann, affine_transform_keypoints_ann);
|
||||
getFacePoints(landmark_points_bob, points_bob, affine_transform_keypoints_bob);
|
||||
getFeather_amount(points_ann, feather_amount);
|
||||
|
||||
rect_ann = Imgproc.boundingRect(new MatOfPoint(points_ann));
|
||||
rect_bob = Imgproc.boundingRect(new MatOfPoint(points_bob));
|
||||
|
||||
|
||||
Mat original_frame = frame;
|
||||
if (useSeamlessCloneForPasteFaces && (CvType.channels(original_frame.type()) == 4))
|
||||
{
|
||||
if (frame_c3 == null
|
||||
|| (frame_c3.width() != warpped_faces_full.width() || frame_c3.height() != warpped_faces_full.height())
|
||||
|| frame_c3.type() != warpped_faces_full.type())
|
||||
{
|
||||
if (frame_c3 != null)
|
||||
{
|
||||
frame_c3.Dispose();
|
||||
frame_c3 = null;
|
||||
}
|
||||
frame_c3 = new Mat();
|
||||
}
|
||||
Imgproc.cvtColor(frame, frame_c3, Imgproc.COLOR_RGBA2RGB);
|
||||
frame = frame_c3;
|
||||
}
|
||||
|
||||
if (small_frame != null)
|
||||
{
|
||||
small_frame.Dispose();
|
||||
}
|
||||
small_frame_rect = getMinFrameRect(frame, rect_ann, rect_bob);
|
||||
small_frame = new Mat(frame, small_frame_rect);
|
||||
small_frame_size = new Size(small_frame.cols(), small_frame.rows());
|
||||
|
||||
|
||||
if (warpped_faces_full == null
|
||||
|| (frame.width() != warpped_faces_full.width() || frame.height() != warpped_faces_full.height())
|
||||
|| frame.type() != warpped_faces_full.type())
|
||||
{
|
||||
createMat(frame);
|
||||
}
|
||||
createMatROI(small_frame_rect);
|
||||
|
||||
|
||||
rect_ann = getRectInFrame(small_frame, rect_ann);
|
||||
rect_bob = getRectInFrame(small_frame, rect_bob);
|
||||
points_ann = getPointsInFrame(small_frame, points_ann);
|
||||
points_bob = getPointsInFrame(small_frame, points_bob);
|
||||
affine_transform_keypoints_ann = getPointsInFrame(small_frame, affine_transform_keypoints_ann);
|
||||
affine_transform_keypoints_bob = getPointsInFrame(small_frame, affine_transform_keypoints_bob);
|
||||
|
||||
|
||||
getTransformationMatrices();
|
||||
getMasks();
|
||||
getWarppedMasks();
|
||||
refined_masks = getRefinedMasks();
|
||||
extractFaces();
|
||||
warpped_faces = getWarppedFaces();
|
||||
|
||||
|
||||
if (useSeamlessCloneForPasteFaces)
|
||||
{
|
||||
pasteFacesOnFrameUsingSeamlessClone(frame);
|
||||
|
||||
if (CvType.channels(original_frame.type()) == 4)
|
||||
{
|
||||
Imgproc.cvtColor(frame, original_frame, Imgproc.COLOR_RGB2RGBA);
|
||||
|
||||
frame = original_frame;
|
||||
if (small_frame != null)
|
||||
{
|
||||
small_frame.Dispose();
|
||||
small_frame = null;
|
||||
}
|
||||
small_frame = new Mat(frame, small_frame_rect);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
colorCorrectFaces();
|
||||
featherMasks();
|
||||
pasteFacesOnFrame();
|
||||
}
|
||||
|
||||
|
||||
if (isShowingDebugFacePoints)
|
||||
{
|
||||
drawDebugFacePoints();
|
||||
}
|
||||
}
|
||||
|
||||
public void SwapFaces(Mat frame, List<Point> landmark_points_ann, List<Point> landmark_points_bob, float alpha)
|
||||
{
|
||||
SwapFaces(frame, landmark_points_ann.ToArray(), landmark_points_bob.ToArray(), alpha);
|
||||
}
|
||||
|
||||
public void SwapFaces(Mat frame, Vector2[] landmark_points_ann, Vector2[] landmark_points_bob, float alpha)
|
||||
{
|
||||
Point[] landmark_points_ann_arr = new Point[landmark_points_ann.Length];
|
||||
for (int i = 0; i < landmark_points_ann_arr.Length; i++)
|
||||
{
|
||||
landmark_points_ann_arr[i] = new Point(landmark_points_ann[i].x, landmark_points_ann[i].y);
|
||||
}
|
||||
|
||||
Point[] landmark_points_bob_arr = new Point[landmark_points_bob.Length];
|
||||
for (int i = 0; i < landmark_points_bob_arr.Length; i++)
|
||||
{
|
||||
landmark_points_bob_arr[i] = new Point(landmark_points_bob[i].x, landmark_points_bob[i].y);
|
||||
}
|
||||
|
||||
SwapFaces(frame, landmark_points_ann_arr, landmark_points_bob_arr, alpha);
|
||||
}
|
||||
|
||||
public void SwapFaces(Mat frame, List<Vector2> landmark_points_ann, List<Vector2> landmark_points_bob, float alpha)
|
||||
{
|
||||
Point[] landmark_points_ann_arr = new Point[landmark_points_ann.Count];
|
||||
for (int i = 0; i < landmark_points_ann_arr.Length; i++)
|
||||
{
|
||||
landmark_points_ann_arr[i] = new Point(landmark_points_ann[i].x, landmark_points_ann[i].y);
|
||||
}
|
||||
|
||||
Point[] landmark_points_bob_arr = new Point[landmark_points_bob.Count];
|
||||
for (int i = 0; i < landmark_points_bob_arr.Length; i++)
|
||||
{
|
||||
landmark_points_bob_arr[i] = new Point(landmark_points_bob[i].x, landmark_points_bob[i].y);
|
||||
}
|
||||
|
||||
SwapFaces(frame, landmark_points_ann_arr, landmark_points_bob_arr, alpha);
|
||||
}
|
||||
|
||||
private void createMat(Mat frame)
|
||||
{
|
||||
disposeMat();
|
||||
|
||||
Size size = frame.size();
|
||||
int type = frame.type();
|
||||
|
||||
refined_ann_and_bob_warpped_full = new Mat(size, CvType.CV_8UC1);
|
||||
refined_bob_and_ann_warpped_full = new Mat(size, CvType.CV_8UC1);
|
||||
|
||||
warpped_face_ann_full = new Mat(size, type);
|
||||
warpped_face_bob_full = new Mat(size, type);
|
||||
|
||||
mask_ann_full = new Mat(size, CvType.CV_8UC1);
|
||||
mask_bob_full = new Mat(size, CvType.CV_8UC1);
|
||||
warpped_mask_ann_full = new Mat(size, CvType.CV_8UC1);
|
||||
warpped_mask_bob_full = new Mat(size, CvType.CV_8UC1);
|
||||
refined_masks_full = new Mat(size, CvType.CV_8UC1, new Scalar(0));
|
||||
|
||||
face_ann_full = new Mat(size, type);
|
||||
face_bob_full = new Mat(size, type);
|
||||
warpped_faces_full = new Mat(size, type, Scalar.all(0));
|
||||
}
|
||||
|
||||
private void disposeMat()
|
||||
{
|
||||
if (refined_ann_and_bob_warpped_full != null)
|
||||
{
|
||||
refined_ann_and_bob_warpped_full.Dispose();
|
||||
refined_ann_and_bob_warpped_full = null;
|
||||
}
|
||||
if (refined_bob_and_ann_warpped_full != null)
|
||||
{
|
||||
refined_bob_and_ann_warpped_full.Dispose();
|
||||
refined_bob_and_ann_warpped_full = null;
|
||||
}
|
||||
|
||||
if (warpped_face_ann_full != null)
|
||||
{
|
||||
warpped_face_ann_full.Dispose();
|
||||
warpped_face_ann_full = null;
|
||||
}
|
||||
if (warpped_face_bob_full != null)
|
||||
{
|
||||
warpped_face_bob_full.Dispose();
|
||||
warpped_face_bob_full = null;
|
||||
}
|
||||
|
||||
if (mask_ann_full != null)
|
||||
{
|
||||
mask_ann_full.Dispose();
|
||||
mask_ann_full = null;
|
||||
}
|
||||
if (mask_bob_full != null)
|
||||
{
|
||||
mask_bob_full.Dispose();
|
||||
mask_bob_full = null;
|
||||
}
|
||||
if (warpped_mask_ann_full != null)
|
||||
{
|
||||
warpped_mask_ann_full.Dispose();
|
||||
warpped_mask_ann_full = null;
|
||||
}
|
||||
if (warpped_mask_bob_full != null)
|
||||
{
|
||||
warpped_mask_bob_full.Dispose();
|
||||
warpped_mask_bob_full = null;
|
||||
}
|
||||
if (refined_masks_full != null)
|
||||
{
|
||||
refined_masks_full.Dispose();
|
||||
refined_masks_full = null;
|
||||
}
|
||||
|
||||
if (face_ann_full != null)
|
||||
{
|
||||
face_ann_full.Dispose();
|
||||
face_ann_full = null;
|
||||
}
|
||||
if (face_bob_full != null)
|
||||
{
|
||||
face_bob_full.Dispose();
|
||||
face_bob_full = null;
|
||||
}
|
||||
if (warpped_faces_full != null)
|
||||
{
|
||||
warpped_faces_full.Dispose();
|
||||
warpped_faces_full = null;
|
||||
}
|
||||
}
|
||||
|
||||
private void createMatROI(Rect roi)
|
||||
{
|
||||
disposeMatROI();
|
||||
|
||||
refined_ann_and_bob_warpped = new Mat(refined_ann_and_bob_warpped_full, roi);
|
||||
refined_bob_and_ann_warpped = new Mat(refined_bob_and_ann_warpped_full, roi);
|
||||
|
||||
warpped_face_ann = new Mat(warpped_face_ann_full, roi);
|
||||
warpped_face_bob = new Mat(warpped_face_bob_full, roi);
|
||||
|
||||
mask_ann = new Mat(mask_ann_full, roi);
|
||||
mask_bob = new Mat(mask_bob_full, roi);
|
||||
warpped_mask_ann = new Mat(warpped_mask_ann_full, roi);
|
||||
warpped_mask_bob = new Mat(warpped_mask_bob_full, roi);
|
||||
refined_masks = new Mat(refined_masks_full, roi);
|
||||
|
||||
face_ann = new Mat(face_ann_full, roi);
|
||||
face_bob = new Mat(face_bob_full, roi);
|
||||
warpped_faces = new Mat(warpped_faces_full, roi);
|
||||
}
|
||||
|
||||
private void disposeMatROI()
|
||||
{
|
||||
if (refined_ann_and_bob_warpped != null)
|
||||
{
|
||||
refined_ann_and_bob_warpped.Dispose();
|
||||
refined_ann_and_bob_warpped = null;
|
||||
}
|
||||
if (refined_bob_and_ann_warpped != null)
|
||||
{
|
||||
refined_bob_and_ann_warpped.Dispose();
|
||||
refined_bob_and_ann_warpped = null;
|
||||
}
|
||||
|
||||
if (warpped_face_ann != null)
|
||||
{
|
||||
warpped_face_ann.Dispose();
|
||||
warpped_face_ann = null;
|
||||
}
|
||||
if (warpped_face_bob != null)
|
||||
{
|
||||
warpped_face_bob.Dispose();
|
||||
warpped_face_bob = null;
|
||||
}
|
||||
|
||||
if (mask_ann != null)
|
||||
{
|
||||
mask_ann.Dispose();
|
||||
mask_ann = null;
|
||||
}
|
||||
if (mask_bob != null)
|
||||
{
|
||||
mask_bob.Dispose();
|
||||
mask_bob = null;
|
||||
}
|
||||
if (warpped_mask_ann != null)
|
||||
{
|
||||
warpped_mask_ann.Dispose();
|
||||
warpped_mask_ann = null;
|
||||
}
|
||||
if (warpped_mask_bob != null)
|
||||
{
|
||||
warpped_mask_bob.Dispose();
|
||||
warpped_mask_bob = null;
|
||||
}
|
||||
if (refined_masks != null)
|
||||
{
|
||||
refined_masks.Dispose();
|
||||
refined_masks = null;
|
||||
}
|
||||
|
||||
if (face_ann != null)
|
||||
{
|
||||
face_ann.Dispose();
|
||||
face_ann = null;
|
||||
}
|
||||
if (face_bob != null)
|
||||
{
|
||||
face_bob.Dispose();
|
||||
face_bob = null;
|
||||
}
|
||||
if (warpped_faces != null)
|
||||
{
|
||||
warpped_faces.Dispose();
|
||||
warpped_faces = null;
|
||||
}
|
||||
}
|
||||
|
||||
// Returns minimal Mat containing both faces
|
||||
private Rect getMinFrameRect(Mat frame, Rect rect_ann, Rect rect_bob)
|
||||
{
|
||||
Rect bounding_rect = RectUtils.Union(rect_ann, rect_bob);
|
||||
bounding_rect = RectUtils.Intersect(bounding_rect, new Rect(0, 0, frame.cols(), frame.rows()));
|
||||
|
||||
return bounding_rect;
|
||||
}
|
||||
|
||||
private Rect getRectInFrame(Mat frame, Rect r)
|
||||
{
|
||||
Size wholesize = new Size();
|
||||
Point ofs = new Point();
|
||||
frame.locateROI(wholesize, ofs);
|
||||
|
||||
Rect rect = new Rect(r.x - (int)ofs.x, r.y - (int)ofs.y, r.width, r.height);
|
||||
rect = RectUtils.Intersect(rect, new Rect(0, 0, frame.cols(), frame.rows()));
|
||||
|
||||
return rect;
|
||||
}
|
||||
|
||||
// Finds facial landmarks on faces and extracts the useful points
|
||||
protected virtual void getFacePoints(Point[] landmark_points, Point[] points, Point[] affine_transform_keypoints)
|
||||
{
|
||||
if (landmark_points.Length != 9)
|
||||
throw new ArgumentNullException("Invalid landmark_points.");
|
||||
|
||||
//points(facial contour)
|
||||
points[0] = landmark_points[0];
|
||||
points[1] = landmark_points[1];
|
||||
points[2] = landmark_points[2];
|
||||
points[3] = landmark_points[3];
|
||||
points[4] = landmark_points[4];
|
||||
points[5] = landmark_points[5];
|
||||
points[6] = landmark_points[6];
|
||||
points[7] = landmark_points[7];
|
||||
points[8] = landmark_points[8];
|
||||
|
||||
//affine_transform_keypoints(eyes and chin)
|
||||
affine_transform_keypoints[0] = points[3];
|
||||
affine_transform_keypoints[1] = new Point(points[8].x, points[0].y);
|
||||
affine_transform_keypoints[2] = new Point(points[7].x, points[6].y);
|
||||
}
|
||||
|
||||
private void getFeather_amount(Point[] points, Size feather_amount)
|
||||
{
|
||||
feather_amount.width = feather_amount.height = Core.norm(new MatOfPoint(new Point(points[0].x - points[6].x, points[0].y - points[6].y))) / 8;
|
||||
}
|
||||
|
||||
private Point[] getPointsInFrame(Mat frame, Point[] p)
|
||||
{
|
||||
Size wholesize = new Size();
|
||||
Point ofs = new Point();
|
||||
frame.locateROI(wholesize, ofs);
|
||||
|
||||
Point[] points = new Point[p.Length];
|
||||
|
||||
for (int i = 0; i < p.Length; i++)
|
||||
{
|
||||
points[i] = new Point(p[i].x - ofs.x, p[i].y - ofs.y);
|
||||
}
|
||||
|
||||
return points;
|
||||
}
|
||||
|
||||
// Calculates transformation matrices based on points extracted by getFacePoints
|
||||
private void getTransformationMatrices()
|
||||
{
|
||||
trans_ann_to_bob = Imgproc.getAffineTransform(new MatOfPoint2f(affine_transform_keypoints_ann), new MatOfPoint2f(affine_transform_keypoints_bob));
|
||||
Imgproc.invertAffineTransform(trans_ann_to_bob, trans_bob_to_ann);
|
||||
}
|
||||
|
||||
// Creates masks for faces based on the points extracted in getFacePoints
|
||||
private void getMasks()
|
||||
{
|
||||
mask_ann.setTo(Scalar.all(0));
|
||||
mask_bob.setTo(Scalar.all(0));
|
||||
Imgproc.fillConvexPoly(mask_ann, new MatOfPoint(points_ann), new Scalar(255 * maskAlpha));
|
||||
Imgproc.fillConvexPoly(mask_bob, new MatOfPoint(points_bob), new Scalar(255 * maskAlpha));
|
||||
}
|
||||
|
||||
// Creates warpped masks out of masks created in getMasks to switch places
|
||||
private void getWarppedMasks()
|
||||
{
|
||||
Imgproc.warpAffine(mask_ann, warpped_mask_ann, trans_ann_to_bob, small_frame_size, Imgproc.INTER_NEAREST, Core.BORDER_CONSTANT, new Scalar(0));
|
||||
Imgproc.warpAffine(mask_bob, warpped_mask_bob, trans_bob_to_ann, small_frame_size, Imgproc.INTER_NEAREST, Core.BORDER_CONSTANT, new Scalar(0));
|
||||
}
|
||||
|
||||
// Returns Mat of refined mask such that warpped mask isn't bigger than original mask
|
||||
private Mat getRefinedMasks()
|
||||
{
|
||||
Core.bitwise_and(mask_ann, warpped_mask_bob, refined_ann_and_bob_warpped);
|
||||
Core.bitwise_and(mask_bob, warpped_mask_ann, refined_bob_and_ann_warpped);
|
||||
|
||||
refined_masks.setTo(Scalar.all(0));
|
||||
refined_ann_and_bob_warpped.copyTo(refined_masks, refined_ann_and_bob_warpped);
|
||||
refined_bob_and_ann_warpped.copyTo(refined_masks, refined_bob_and_ann_warpped);
|
||||
|
||||
return refined_masks;
|
||||
}
|
||||
|
||||
// Extracts faces from images based on masks created in getMasks
|
||||
private void extractFaces()
|
||||
{
|
||||
small_frame.copyTo(face_ann, mask_ann);
|
||||
small_frame.copyTo(face_bob, mask_bob);
|
||||
}
|
||||
|
||||
// Creates warpped faces out of faces extracted in extractFaces
|
||||
private Mat getWarppedFaces()
|
||||
{
|
||||
Imgproc.warpAffine(face_ann, warpped_face_ann, trans_ann_to_bob, small_frame_size, Imgproc.INTER_NEAREST, Core.BORDER_CONSTANT, new Scalar(0, 0, 0));
|
||||
Imgproc.warpAffine(face_bob, warpped_face_bob, trans_bob_to_ann, small_frame_size, Imgproc.INTER_NEAREST, Core.BORDER_CONSTANT, new Scalar(0, 0, 0));
|
||||
|
||||
warpped_face_ann.copyTo(warpped_faces, warpped_mask_ann);
|
||||
warpped_face_bob.copyTo(warpped_faces, warpped_mask_bob);
|
||||
|
||||
return warpped_faces;
|
||||
}
|
||||
|
||||
// Matches Ann face color to Bob face color and vice versa
|
||||
private void colorCorrectFaces()
|
||||
{
|
||||
using (Mat rect_ann_small_frame = new Mat(small_frame, rect_ann))
|
||||
using (Mat rect_ann_warpped_faces = new Mat(warpped_faces, rect_ann))
|
||||
using (Mat rect_ann_warpped_mask_bob = new Mat(warpped_mask_bob, rect_ann))
|
||||
{
|
||||
specifiyHistogram(rect_ann_small_frame, rect_ann_warpped_faces, rect_ann_warpped_mask_bob);
|
||||
}
|
||||
using (Mat rect_bob_small_frame = new Mat(small_frame, rect_bob))
|
||||
using (Mat rect_bob_warpped_faces = new Mat(warpped_faces, rect_bob))
|
||||
using (Mat rect_bob_warpped_mask_ann = new Mat(warpped_mask_ann, rect_bob))
|
||||
{
|
||||
specifiyHistogram(rect_bob_small_frame, rect_bob_warpped_faces, rect_bob_warpped_mask_ann);
|
||||
}
|
||||
}
|
||||
|
||||
// Blurs edges of masks
|
||||
private void featherMasks()
|
||||
{
|
||||
using (Mat rect_ann_refined_masks = new Mat(refined_masks, rect_ann))
|
||||
{
|
||||
featherMask(rect_ann_refined_masks);
|
||||
}
|
||||
using (Mat rect_bob_refined_masks = new Mat(refined_masks, rect_bob))
|
||||
{
|
||||
featherMask(rect_bob_refined_masks);
|
||||
}
|
||||
}
|
||||
|
||||
// Blurs edges of mask
|
||||
private void featherMask(Mat refined_masks)
|
||||
{
|
||||
Imgproc.erode(refined_masks, refined_masks, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, feather_amount), new Point(-1, -1), 1, Core.BORDER_CONSTANT, new Scalar(0));
|
||||
Imgproc.blur(refined_masks, refined_masks, feather_amount, new Point(-1, -1), Core.BORDER_CONSTANT);
|
||||
}
|
||||
|
||||
// Pastes faces on original frame
|
||||
private void pasteFacesOnFrame()
|
||||
{
|
||||
byte[] masks_byte = new byte[refined_masks.total() * refined_masks.elemSize()];
|
||||
Utils.copyFromMat<byte>(refined_masks, masks_byte);
|
||||
byte[] faces_byte = new byte[warpped_faces.total() * warpped_faces.elemSize()];
|
||||
Utils.copyFromMat<byte>(warpped_faces, faces_byte);
|
||||
byte[] frame_byte = new byte[small_frame.total() * small_frame.elemSize()];
|
||||
Utils.copyFromMat<byte>(small_frame, frame_byte);
|
||||
|
||||
int pixel_i = 0;
|
||||
int elemSize = (int)small_frame.elemSize();
|
||||
int total = (int)small_frame.total();
|
||||
|
||||
for (int i = 0; i < total; i++)
|
||||
{
|
||||
if (masks_byte[i] != 0)
|
||||
{
|
||||
frame_byte[pixel_i] = (byte)(((255 - masks_byte[i]) * frame_byte[pixel_i] + masks_byte[i] * faces_byte[pixel_i]) >> 8);
|
||||
frame_byte[pixel_i + 1] = (byte)(((255 - masks_byte[i]) * frame_byte[pixel_i + 1] + masks_byte[i] * faces_byte[pixel_i + 1]) >> 8);
|
||||
frame_byte[pixel_i + 2] = (byte)(((255 - masks_byte[i]) * frame_byte[pixel_i + 2] + masks_byte[i] * faces_byte[pixel_i + 2]) >> 8);
|
||||
}
|
||||
pixel_i += elemSize;
|
||||
}
|
||||
|
||||
Utils.copyToMat(frame_byte, small_frame);
|
||||
}
|
||||
|
||||
// Calculates source image histogram and changes target_image to match source hist
|
||||
private void specifiyHistogram(Mat source_image, Mat target_image, Mat mask)
|
||||
{
|
||||
System.Array.Clear(source_hist_int, 0, source_hist_int.Length);
|
||||
System.Array.Clear(target_hist_int, 0, target_hist_int.Length);
|
||||
|
||||
byte[] mask_byte = new byte[mask.total() * mask.elemSize()];
|
||||
Utils.copyFromMat<byte>(mask, mask_byte);
|
||||
byte[] source_image_byte = new byte[source_image.total() * source_image.elemSize()];
|
||||
Utils.copyFromMat<byte>(source_image, source_image_byte);
|
||||
byte[] target_image_byte = new byte[target_image.total() * target_image.elemSize()];
|
||||
Utils.copyFromMat<byte>(target_image, target_image_byte);
|
||||
|
||||
int pixel_i = 0;
|
||||
int elemSize = (int)source_image.elemSize();
|
||||
int total = (int)mask.total();
|
||||
for (int i = 0; i < total; i++)
|
||||
{
|
||||
if (mask_byte[i] != 0)
|
||||
{
|
||||
source_hist_int[0, source_image_byte[pixel_i]]++;
|
||||
source_hist_int[1, source_image_byte[pixel_i + 1]]++;
|
||||
source_hist_int[2, source_image_byte[pixel_i + 2]]++;
|
||||
|
||||
target_hist_int[0, target_image_byte[pixel_i]]++;
|
||||
target_hist_int[1, target_image_byte[pixel_i + 1]]++;
|
||||
target_hist_int[2, target_image_byte[pixel_i + 2]]++;
|
||||
}
|
||||
// Advance to next pixel
|
||||
pixel_i += elemSize;
|
||||
}
|
||||
|
||||
// Calc CDF
|
||||
for (int i = 1; i < 256; i++)
|
||||
{
|
||||
source_hist_int[0, i] += source_hist_int[0, i - 1];
|
||||
source_hist_int[1, i] += source_hist_int[1, i - 1];
|
||||
source_hist_int[2, i] += source_hist_int[2, i - 1];
|
||||
|
||||
target_hist_int[0, i] += target_hist_int[0, i - 1];
|
||||
target_hist_int[1, i] += target_hist_int[1, i - 1];
|
||||
target_hist_int[2, i] += target_hist_int[2, i - 1];
|
||||
}
|
||||
|
||||
// Normalize CDF
|
||||
for (int i = 0; i < 256; i++)
|
||||
{
|
||||
source_histogram[0, i] = (source_hist_int[0, i] != 0) ? (float)source_hist_int[0, i] / source_hist_int[0, 255] : 0;
|
||||
source_histogram[1, i] = (source_hist_int[1, i] != 0) ? (float)source_hist_int[1, i] / source_hist_int[1, 255] : 0;
|
||||
source_histogram[2, i] = (source_hist_int[2, i] != 0) ? (float)source_hist_int[2, i] / source_hist_int[2, 255] : 0;
|
||||
|
||||
target_histogram[0, i] = (target_hist_int[0, i] != 0) ? (float)target_hist_int[0, i] / target_hist_int[0, 255] : 0;
|
||||
target_histogram[1, i] = (target_hist_int[1, i] != 0) ? (float)target_hist_int[1, i] / target_hist_int[1, 255] : 0;
|
||||
target_histogram[2, i] = (target_hist_int[2, i] != 0) ? (float)target_hist_int[2, i] / target_hist_int[2, 255] : 0;
|
||||
}
|
||||
|
||||
// Create lookup table
|
||||
for (int i = 0; i < 256; i++)
|
||||
{
|
||||
LUT[0, i] = binary_search(target_histogram[0, i], source_histogram, 0);
|
||||
LUT[1, i] = binary_search(target_histogram[1, i], source_histogram, 1);
|
||||
LUT[2, i] = binary_search(target_histogram[2, i], source_histogram, 2);
|
||||
}
|
||||
|
||||
// repaint pixels
|
||||
pixel_i = 0;
|
||||
for (int i = 0; i < total; i++)
|
||||
{
|
||||
if (mask_byte[i] != 0)
|
||||
{
|
||||
target_image_byte[pixel_i] = LUT[0, target_image_byte[pixel_i]];
|
||||
target_image_byte[pixel_i + 1] = LUT[1, target_image_byte[pixel_i + 1]];
|
||||
target_image_byte[pixel_i + 2] = LUT[2, target_image_byte[pixel_i + 2]];
|
||||
}
|
||||
// Advance to next pixel
|
||||
pixel_i += elemSize;
|
||||
}
|
||||
|
||||
Utils.copyToMat(target_image_byte, target_image);
|
||||
}
|
||||
|
||||
private byte binary_search(float needle, float[,] haystack, int haystack_index)
|
||||
{
|
||||
byte l = 0, r = 255, m = 0;
|
||||
while (l < r)
|
||||
{
|
||||
m = (byte)((l + r) / 2);
|
||||
if (needle > haystack[haystack_index, m])
|
||||
l = (byte)(m + 1);
|
||||
else
|
||||
r = (byte)(m - 1);
|
||||
}
|
||||
// TODO check closest value
|
||||
return m;
|
||||
}
|
||||
|
||||
|
||||
// Pastes faces on original frame using SeamlessClone
|
||||
private void pasteFacesOnFrameUsingSeamlessClone(Mat full_frame)
|
||||
{
|
||||
//Processing to avoid the problem that synthesis is shifted.
|
||||
Imgproc.rectangle(refined_masks, new Point(1, 1), new Point(small_frame_size.width - 1, small_frame_size.height - 1), new Scalar(255, 255, 255, 255), 1, Imgproc.LINE_8, 0);
|
||||
|
||||
using (Mat full_dst_frame = new Mat()){
|
||||
full_frame.copyTo(full_dst_frame);
|
||||
using (Mat small_dest_frame = new Mat(full_dst_frame, small_frame_rect))
|
||||
{
|
||||
//Utils.setDebugMode(true);
|
||||
Photo.seamlessClone(warpped_faces, small_frame, refined_masks, new Point(small_frame_size.width / 2, small_frame_size.height / 2), small_dest_frame, Photo.NORMAL_CLONE);
|
||||
//Utils.setDebugMode(false);
|
||||
}
|
||||
full_dst_frame.copyTo(full_frame);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void drawDebugFacePoints()
|
||||
{
|
||||
for (int i = 0; i < points_ann.Length; i++)
|
||||
{
|
||||
Imgproc.circle(small_frame, points_ann[i], 1, new Scalar(255, 0, 0, 255), 2, Core.LINE_AA, 0);
|
||||
}
|
||||
for (int i = 0; i < affine_transform_keypoints_ann.Length; i++)
|
||||
{
|
||||
Imgproc.circle(small_frame, affine_transform_keypoints_ann[i], 1, new Scalar(0, 255, 0, 255), 2, Core.LINE_AA, 0);
|
||||
}
|
||||
for (int i = 0; i < points_bob.Length; i++)
|
||||
{
|
||||
Imgproc.circle(small_frame, points_bob[i], 1, new Scalar(255, 0, 0, 255), 2, Core.LINE_AA, 0);
|
||||
}
|
||||
for (int i = 0; i < affine_transform_keypoints_bob.Length; i++)
|
||||
{
|
||||
Imgproc.circle(small_frame, affine_transform_keypoints_bob[i], 1, new Scalar(0, 255, 0, 255), 2, Core.LINE_AA, 0);
|
||||
}
|
||||
//
|
||||
Imgproc.rectangle(small_frame, new Point(1, 1), new Point(small_frame_size.width - 2, small_frame_size.height - 2), new Scalar(255, 0, 0, 255), 2, Imgproc.LINE_8, 0);
|
||||
Imgproc.rectangle(small_frame, this.rect_ann.tl(), this.rect_ann.br(), new Scalar(255, 0, 0, 255), 1, Imgproc.LINE_8, 0);
|
||||
Imgproc.rectangle(small_frame, this.rect_bob.tl(), this.rect_bob.br(), new Scalar(255, 0, 0, 255), 1, Imgproc.LINE_8, 0);
|
||||
//
|
||||
}
|
||||
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (small_frame != null)
|
||||
{
|
||||
small_frame.Dispose();
|
||||
small_frame = null;
|
||||
}
|
||||
disposeMatROI();
|
||||
disposeMat();
|
||||
|
||||
if (frame_c3 != null)
|
||||
{
|
||||
frame_c3.Dispose();
|
||||
frame_c3 = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
fileFormatVersion: 2
|
||||
guid: ba6438b58e5479b46990e81848dc5b03
|
||||
timeCreated: 1464539197
|
||||
licenseType: Free
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,33 @@
|
|||
using System;
|
||||
|
||||
namespace OpenCVForUnity.FaceSwap
|
||||
{
|
||||
|
||||
public class NonRigidObjectTrackerFaceSwapper : FaceSwapper
|
||||
{
|
||||
// Finds facial landmarks on faces and extracts the useful points
|
||||
protected override void getFacePoints(Point[] landmark_points, Point[] points, Point[] affine_transform_keypoints)
|
||||
{
|
||||
if (landmark_points.Length != 76)
|
||||
throw new ArgumentNullException("Invalid landmark_points.");
|
||||
|
||||
//points(facial contour)
|
||||
points[0] = landmark_points[0];
|
||||
points[1] = landmark_points[2];
|
||||
points[2] = landmark_points[5];
|
||||
points[3] = landmark_points[7];
|
||||
points[4] = landmark_points[9];
|
||||
points[5] = landmark_points[12];
|
||||
points[6] = landmark_points[14];
|
||||
Point nose_line_base = new Point((landmark_points[37].x + landmark_points[45].x) / 2, (landmark_points[37].y + landmark_points[45].y) / 2);
|
||||
Point nose_length = new Point(nose_line_base.x - landmark_points[67].x, nose_line_base.y - landmark_points[67].y);
|
||||
points[7] = new Point(landmark_points[15].x + nose_length.x, landmark_points[15].y + nose_length.y);
|
||||
points[8] = new Point(landmark_points[21].x + nose_length.x, landmark_points[21].y + nose_length.y);
|
||||
|
||||
//affine_transform_keypoints(eyes and chin)
|
||||
affine_transform_keypoints[0] = points[3];
|
||||
affine_transform_keypoints[1] = landmark_points[27];
|
||||
affine_transform_keypoints[2] = landmark_points[32];
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 4137cc5f5ba6b00498b76159d18889ce
|
||||
timeCreated: 1464539197
|
||||
licenseType: Free
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,40 @@
|
|||
using System;
|
||||
|
||||
namespace OpenCVForUnity.FaceSwap
|
||||
{
|
||||
public class PointUtils
|
||||
{
|
||||
/// <summary>
|
||||
/// Returns the distance between the specified two points
|
||||
/// </summary>
|
||||
/// <param name="p1"></param>
|
||||
/// <param name="p2"></param>
|
||||
/// <returns></returns>
|
||||
public static double Distance(Point p1, Point p2)
|
||||
{
|
||||
return Math.Sqrt(Math.Pow(p2.x - p1.x, 2) + Math.Pow(p2.y - p1.y, 2));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Calculates the dot product of two 2D vectors.
|
||||
/// </summary>
|
||||
/// <param name="p1"></param>
|
||||
/// <param name="p2"></param>
|
||||
/// <returns></returns>
|
||||
public static double DotProduct(Point p1, Point p2)
|
||||
{
|
||||
return p1.x * p2.x + p1.y * p2.y;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Calculates the cross product of two 2D vectors.
|
||||
/// </summary>
|
||||
/// <param name="p1"></param>
|
||||
/// <param name="p2"></param>
|
||||
/// <returns></returns>
|
||||
public static double CrossProduct(Point p1, Point p2)
|
||||
{
|
||||
return p1.x * p2.y - p2.x * p1.y;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 7d20b04cba21e3e4296c754903a3cb3e
|
||||
timeCreated: 1464539197
|
||||
licenseType: Free
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,58 @@
|
|||
using System;
|
||||
|
||||
namespace OpenCVForUnity.FaceSwap
|
||||
{
|
||||
public class RectUtils
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates and returns an inflated copy of the specified CvRect structure.
|
||||
/// </summary>
|
||||
/// <param name="rect">The Rectangle with which to start. This rectangle is not modified. </param>
|
||||
/// <param name="x">The amount to inflate this Rectangle horizontally. </param>
|
||||
/// <param name="y">The amount to inflate this Rectangle vertically. </param>
|
||||
/// <returns></returns>
|
||||
public static Rect Inflate(Rect rect, int x, int y)
|
||||
{
|
||||
rect.x -= x;
|
||||
rect.y -= y;
|
||||
rect.width += (2 * x);
|
||||
rect.height += (2 * y);
|
||||
return rect;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determines the CvRect structure that represents the intersection of two rectangles.
|
||||
/// </summary>
|
||||
/// <param name="a">A rectangle to intersect. </param>
|
||||
/// <param name="b">A rectangle to intersect. </param>
|
||||
/// <returns></returns>
|
||||
public static Rect Intersect(Rect a, Rect b)
|
||||
{
|
||||
int x1 = Math.Max(a.x, b.x);
|
||||
int x2 = Math.Min(a.x + a.width, b.x + b.width);
|
||||
int y1 = Math.Max(a.y, b.y);
|
||||
int y2 = Math.Min(a.y + a.height, b.y + b.height);
|
||||
|
||||
if (x2 >= x1 && y2 >= y1)
|
||||
return new Rect(x1, y1, x2 - x1, y2 - y1);
|
||||
else
|
||||
return new Rect();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets a CvRect structure that contains the union of two CvRect structures.
|
||||
/// </summary>
|
||||
/// <param name="a">A rectangle to union. </param>
|
||||
/// <param name="b">A rectangle to union. </param>
|
||||
/// <returns></returns>
|
||||
public static Rect Union(Rect a, Rect b)
|
||||
{
|
||||
int x1 = Math.Min(a.x, b.x);
|
||||
int x2 = Math.Max(a.x + a.width, b.x + b.width);
|
||||
int y1 = Math.Min(a.y, b.y);
|
||||
int y2 = Math.Max(a.y + a.height, b.y + b.height);
|
||||
|
||||
return new Rect(x1, y1, x2 - x1, y2 - y1);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 66c9ea5092809794caba465840d6d80f
|
||||
timeCreated: 1464539197
|
||||
licenseType: Free
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,21 @@
|
|||
FaceSwapper Sample
|
||||
====================
|
||||
|
||||
Overview
|
||||
-----
|
||||
|
||||
|
||||
Demo Video
|
||||
-----
|
||||
[![](http://img.youtube.com/vi/lQPL85LbSYo/0.jpg)](https://www.youtube.com/watch?v=lQPL85LbSYo)
|
||||
|
||||
Demo Application
|
||||
-----
|
||||
<https://play.google.com/store/apps/details?id=com.enoxsoftware.faceswappersample>
|
||||
|
||||
Manual
|
||||
-----
|
||||
[ReadMe.pdf](ReadMe.pdf)
|
||||
|
||||
|
||||
|
Двоичный файл не отображается.
|
@ -0,0 +1,6 @@
|
|||
fileFormatVersion: 2
|
||||
guid: a6df57592dc8e1c4a8af3a44efd670fe
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,9 @@
|
|||
fileFormatVersion: 2
|
||||
guid: a519c66b56ec9a643845078f4cb3b9d5
|
||||
folderAsset: yes
|
||||
timeCreated: 1464369829
|
||||
licenseType: Free
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,9 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 92a3f251f6656834d9660e1ac364c8ae
|
||||
folderAsset: yes
|
||||
timeCreated: 1464369829
|
||||
licenseType: Free
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,501 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using UnityEngine;
|
||||
|
||||
namespace OpenCVForUnity.RectangleTrack
|
||||
{
|
||||
|
||||
public class RectangleTracker : IDisposable
|
||||
{
|
||||
public List<TrackedObject> TrackedObjects
|
||||
{
|
||||
get { return trackedObjects; }
|
||||
}
|
||||
private List<TrackedObject> trackedObjects;
|
||||
|
||||
|
||||
public TrackerParameters TrackerParameters
|
||||
{
|
||||
get { return trackerParameters; }
|
||||
set
|
||||
{
|
||||
if (value == null)
|
||||
{
|
||||
throw new ArgumentNullException("value");
|
||||
}
|
||||
trackerParameters = value;
|
||||
}
|
||||
}
|
||||
private TrackerParameters trackerParameters;
|
||||
|
||||
|
||||
public List<float> weightsPositionsSmoothing
|
||||
{
|
||||
get { return _weightsPositionsSmoothing; }
|
||||
set
|
||||
{
|
||||
if (value == null)
|
||||
{
|
||||
throw new ArgumentNullException("value");
|
||||
}
|
||||
_weightsPositionsSmoothing = value;
|
||||
}
|
||||
}
|
||||
private List<float> _weightsPositionsSmoothing = new List<float>();
|
||||
|
||||
public List<float> weightsSizesSmoothing
|
||||
{
|
||||
get { return _weightsSizesSmoothing; }
|
||||
set
|
||||
{
|
||||
if (value == null)
|
||||
{
|
||||
throw new ArgumentNullException("value");
|
||||
}
|
||||
_weightsSizesSmoothing = value;
|
||||
}
|
||||
}
|
||||
private List<float> _weightsSizesSmoothing = new List<float>();
|
||||
|
||||
public RectangleTracker(TrackerParameters trackerParamerers = null)
|
||||
{
|
||||
trackedObjects = new List<TrackedObject>();
|
||||
|
||||
if (trackerParamerers != null)
|
||||
{
|
||||
this.trackerParameters = trackerParamerers;
|
||||
}
|
||||
else
|
||||
{
|
||||
this.trackerParameters = new TrackerParameters();
|
||||
}
|
||||
|
||||
_weightsPositionsSmoothing.Add(1);
|
||||
_weightsSizesSmoothing.Add(0.5f);
|
||||
_weightsSizesSmoothing.Add(0.3f);
|
||||
_weightsSizesSmoothing.Add(0.2f);
|
||||
}
|
||||
|
||||
public enum TrackedRectState : int
|
||||
{
|
||||
NEW_RECTANGLE = -1,
|
||||
INTERSECTED_RECTANGLE = -2
|
||||
}
|
||||
|
||||
|
||||
public void GetObjects(List<Rect> result, bool smoothing = true)
|
||||
{
|
||||
result.Clear();
|
||||
|
||||
int count = trackedObjects.Count;
|
||||
for (int i = 0; i < count; i++)
|
||||
{
|
||||
Rect r;
|
||||
if (smoothing)
|
||||
{
|
||||
r = getSmoothingRect(i);
|
||||
}
|
||||
else
|
||||
{
|
||||
r = trackedObjects[i].position;
|
||||
}
|
||||
|
||||
if (trackedObjects[i].state > TrackedState.NEW_DISPLAYED && trackedObjects[i].state < TrackedState.NEW_HIDED)
|
||||
result.Add(r);
|
||||
|
||||
//LOGD("DetectionBasedTracker::process: found a object with SIZE %d x %d, rect={%d, %d, %d x %d}", r.width, r.height, r.x, r.y, r.width, r.height);
|
||||
//Debug.Log("GetObjects" + r.width + " " + r.height + " " + r.x + " " + r.y + " " + r.width + " " + r.height + " " + trackedObjects[i].state + " " + trackedObjects[i].numDetectedFrames + " " + trackedObjects[i].numFramesNotDetected);
|
||||
}
|
||||
}
|
||||
|
||||
public void GetObjects(List<TrackedRect> result, bool smoothing = true)
|
||||
{
|
||||
result.Clear();
|
||||
|
||||
int count = trackedObjects.Count;
|
||||
for (int i = 0; i < count; i++)
|
||||
{
|
||||
Rect r;
|
||||
if (smoothing)
|
||||
{
|
||||
r = getSmoothingRect(i);
|
||||
}
|
||||
else
|
||||
{
|
||||
r = trackedObjects[i].position;
|
||||
}
|
||||
|
||||
result.Add(new TrackedRect(trackedObjects[i].id, r, trackedObjects[i].state, trackedObjects[i].numDetectedFrames, trackedObjects[i].numFramesNotDetected));
|
||||
|
||||
//LOGD("DetectionBasedTracker::process: found a object with SIZE %d x %d, rect={%d, %d, %d x %d}", r.width, r.height, r.x, r.y, r.width, r.height);
|
||||
//Debug.Log("GetObjects" + r.width + " " + r.height + " " + r.x + " " + r.y + " " + r.width + " " + r.height + " " + trackedObjects[i].state + " " + trackedObjects[i].numDetectedFrames + " " + trackedObjects[i].numFramesNotDetected);
|
||||
}
|
||||
}
|
||||
|
||||
public void UpdateTrackedObjects(List<Rect> detectedObjects)
|
||||
{
|
||||
if (detectedObjects == null)
|
||||
throw new ArgumentNullException("detectedObjects");
|
||||
|
||||
Rect[] correctionRects = CreateCorrectionBySpeedOfRects();
|
||||
|
||||
int N1 = (int)trackedObjects.Count;
|
||||
int N2 = (int)detectedObjects.Count;
|
||||
|
||||
for (int i = 0; i < N1; i++)
|
||||
{
|
||||
trackedObjects[i].numDetectedFrames++;
|
||||
}
|
||||
|
||||
int[] correspondence = Enumerable.Repeat<int>((int)TrackedRectState.NEW_RECTANGLE, N2).ToArray();
|
||||
|
||||
|
||||
for (int i = 0; i < N1; i++)
|
||||
{
|
||||
TrackedObject curObject = trackedObjects[i];
|
||||
|
||||
int bestIndex = -1;
|
||||
int bestArea = -1;
|
||||
|
||||
//int numpositions = (int)curObject.lastPositions.Count;
|
||||
|
||||
//if (numpositions > 0) UnityEngine.Debug.LogError("numpositions > 0 is false");
|
||||
|
||||
//Rect prevRect = curObject.lastPositions[numpositions - 1];
|
||||
Rect prevRect = correctionRects[i];
|
||||
|
||||
for (int j = 0; j < N2; j++)
|
||||
{
|
||||
if (correspondence[j] >= 0)
|
||||
{
|
||||
//Debug.Log("DetectionBasedTracker::updateTrackedObjects: j=" + j + " is rejected, because it has correspondence=" + correspondence[j]);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (correspondence[j] != (int)TrackedRectState.NEW_RECTANGLE)
|
||||
{
|
||||
//Debug.Log("DetectionBasedTracker::updateTrackedObjects: j=" + j + " is rejected, because it is intersected with another rectangle");
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if (IsCollideByRectangle(prevRect, detectedObjects[j], trackerParameters.coeffRectangleOverlap))
|
||||
{
|
||||
Rect r = Intersect(prevRect, detectedObjects[j]);
|
||||
if ((r.width > 0) && (r.height > 0))
|
||||
{
|
||||
//Debug.Log("DetectionBasedTracker::updateTrackedObjects: There is intersection between prevRect and detectedRect r={" + r.x + ", " + r.y + ", " + r.width + ", " + r.height + "]");
|
||||
|
||||
correspondence[j] = (int)TrackedRectState.INTERSECTED_RECTANGLE;
|
||||
|
||||
if (r.area() > bestArea)
|
||||
{
|
||||
//Debug.Log("DetectionBasedTracker::updateTrackedObjects: The area of intersection is " + r.area() + " it is better than bestArea= " + bestArea);
|
||||
|
||||
bestIndex = j;
|
||||
bestArea = (int)r.area();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (bestIndex >= 0)
|
||||
{
|
||||
//Debug.Log("DetectionBasedTracker::updateTrackedObjects: The best correspondence for i=" + i + " is j=" + bestIndex);
|
||||
|
||||
correspondence[bestIndex] = i;
|
||||
|
||||
Rect bestRect = detectedObjects[bestIndex];
|
||||
|
||||
for (int j = 0; j < N2; j++)
|
||||
{
|
||||
if (correspondence[j] >= 0)
|
||||
continue;
|
||||
|
||||
if (IsCollideByRectangle(detectedObjects[j], bestRect, trackerParameters.coeffRectangleOverlap))
|
||||
{
|
||||
Rect r = Intersect(detectedObjects[j], bestRect);
|
||||
|
||||
if ((r.width > 0) && (r.height > 0))
|
||||
{
|
||||
//Debug.Log("DetectionBasedTracker::updateTrackedObjects: Found intersection between rectangles j= " + j + " and bestIndex= " + bestIndex + " rectangle j= " + j + " is marked as intersected");
|
||||
|
||||
correspondence[j] = (int)TrackedRectState.INTERSECTED_RECTANGLE;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
//Debug.Log("DetectionBasedTracker::updateTrackedObjects: There is no correspondence for i= " + i);
|
||||
curObject.numFramesNotDetected++;
|
||||
}
|
||||
}
|
||||
|
||||
//Debug.Log("DetectionBasedTracker::updateTrackedObjects: start second cycle");
|
||||
for (int j = 0; j < N2; j++)
|
||||
{
|
||||
int i = correspondence[j];
|
||||
if (i >= 0)
|
||||
{//add position
|
||||
//Debug.Log("DetectionBasedTracker::updateTrackedObjects: add position");
|
||||
|
||||
trackedObjects[i].lastPositions.Add(detectedObjects[j]);
|
||||
while ((int)trackedObjects[i].lastPositions.Count > (int)trackerParameters.numLastPositionsToTrack)
|
||||
{
|
||||
trackedObjects[i].lastPositions.Remove(trackedObjects[i].lastPositions[0]);
|
||||
}
|
||||
trackedObjects[i].numFramesNotDetected = 0;
|
||||
if (trackedObjects[i].state != TrackedState.DELETED) trackedObjects[i].state = TrackedState.DISPLAYED;
|
||||
}
|
||||
else if (i == (int)TrackedRectState.NEW_RECTANGLE)
|
||||
{ //new object
|
||||
//Debug.Log("DetectionBasedTracker::updateTrackedObjects: new object");
|
||||
|
||||
trackedObjects.Add(new TrackedObject(detectedObjects[j]));
|
||||
}
|
||||
else
|
||||
{
|
||||
//Debug.Log("DetectionBasedTracker::updateTrackedObjects: was auxiliary intersection");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
int t = 0;
|
||||
TrackedObject it;
|
||||
while (t < trackedObjects.Count)
|
||||
{
|
||||
it = trackedObjects[t];
|
||||
|
||||
if (it.state == TrackedState.DELETED)
|
||||
{
|
||||
trackedObjects.Remove(it);
|
||||
}
|
||||
else if ((it.numFramesNotDetected > trackerParameters.maxTrackLifetime)//ALL
|
||||
||
|
||||
((it.numDetectedFrames <= trackerParameters.numStepsToWaitBeforeFirstShow)
|
||||
&&
|
||||
(it.numFramesNotDetected > trackerParameters.numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown))
|
||||
)
|
||||
{
|
||||
it.state = TrackedState.DELETED;
|
||||
t++;
|
||||
}
|
||||
else if (it.state >= TrackedState.DISPLAYED)//DISPLAYED, NEW_DISPLAYED, HIDED
|
||||
{
|
||||
|
||||
if (it.numDetectedFrames < trackerParameters.numStepsToWaitBeforeFirstShow)
|
||||
{
|
||||
it.state = TrackedState.PENDING;
|
||||
}
|
||||
else if (it.numDetectedFrames == trackerParameters.numStepsToWaitBeforeFirstShow)
|
||||
{
|
||||
//i, trackedObjects[i].numDetectedFrames, innerParameters.numStepsToWaitBeforeFirstShow);
|
||||
it.state = TrackedState.NEW_DISPLAYED;
|
||||
}
|
||||
else if (it.numFramesNotDetected == trackerParameters.numStepsToShowWithoutDetecting)
|
||||
{
|
||||
it.state = TrackedState.NEW_HIDED;
|
||||
}
|
||||
else if (it.numFramesNotDetected > trackerParameters.numStepsToShowWithoutDetecting)
|
||||
{
|
||||
it.state = TrackedState.HIDED;
|
||||
}
|
||||
|
||||
t++;
|
||||
}
|
||||
else//NEW
|
||||
{
|
||||
t++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Rect[] CreateCorrectionBySpeedOfRects()
|
||||
{
|
||||
//Debug.Log("DetectionBasedTracker::process: get _rectsWhereRegions from previous positions");
|
||||
Rect[] rectsWhereRegions = new Rect[trackedObjects.Count];
|
||||
|
||||
int count = trackedObjects.Count;
|
||||
for (int i = 0; i < count; i++)
|
||||
{
|
||||
int n = trackedObjects[i].lastPositions.Count;
|
||||
//if (n > 0) UnityEngine.Debug.LogError("n > 0 is false");
|
||||
|
||||
Rect r = trackedObjects[i].lastPositions[n - 1].clone();
|
||||
/*
|
||||
if (r.area() == 0)
|
||||
{
|
||||
Debug.Log("DetectionBasedTracker::process: ERROR: ATTENTION: strange algorithm's behavior: trackedObjects[i].rect() is empty");
|
||||
continue;
|
||||
}
|
||||
*/
|
||||
|
||||
//correction by speed of rectangle
|
||||
if (n > 1)
|
||||
{
|
||||
Point center = centerRect(r);
|
||||
Point center_prev = centerRect(trackedObjects[i].lastPositions[n - 2]);
|
||||
Point shift = new Point((center.x - center_prev.x) * trackerParameters.coeffObjectSpeedUsingInPrediction,
|
||||
(center.y - center_prev.y) * trackerParameters.coeffObjectSpeedUsingInPrediction);
|
||||
|
||||
r.x += (int)Math.Round(shift.x);
|
||||
r.y += (int)Math.Round(shift.y);
|
||||
}
|
||||
|
||||
rectsWhereRegions[i] = r;
|
||||
}
|
||||
|
||||
return rectsWhereRegions;
|
||||
}
|
||||
|
||||
private Point centerRect(Rect r)
|
||||
{
|
||||
return new Point(r.x + (r.width / 2), r.y + (r.height / 2));
|
||||
}
|
||||
|
||||
private Rect getSmoothingRect(int i)
|
||||
{
|
||||
//Debug.Log("trackedObjects[i].numFramesNotDetected: " + trackedObjects[i].numFramesNotDetected);
|
||||
|
||||
List<float> weightsSizesSmoothing = _weightsSizesSmoothing;
|
||||
List<float> weightsPositionsSmoothing = _weightsPositionsSmoothing;
|
||||
|
||||
List<Rect> lastPositions = trackedObjects[i].lastPositions;
|
||||
|
||||
int N = lastPositions.Count;
|
||||
if (N <= 0)
|
||||
{
|
||||
Debug.Log("DetectionBasedTracker::calcTrackedObjectPositionToShow: ERROR: no positions for i=" + i);
|
||||
return new Rect();
|
||||
}
|
||||
|
||||
int Nsize = Math.Min(N, (int)weightsSizesSmoothing.Count);
|
||||
int Ncenter = Math.Min(N, (int)weightsPositionsSmoothing.Count);
|
||||
|
||||
Point center = new Point();
|
||||
double w = 0, h = 0;
|
||||
if (Nsize > 0)
|
||||
{
|
||||
double sum = 0;
|
||||
for (int j = 0; j < Nsize; j++)
|
||||
{
|
||||
int k = N - j - 1;
|
||||
w += lastPositions[k].width * weightsSizesSmoothing[j];
|
||||
h += lastPositions[k].height * weightsSizesSmoothing[j];
|
||||
sum += weightsSizesSmoothing[j];
|
||||
}
|
||||
w /= sum;
|
||||
h /= sum;
|
||||
}
|
||||
else
|
||||
{
|
||||
w = lastPositions[N - 1].width;
|
||||
h = lastPositions[N - 1].height;
|
||||
}
|
||||
|
||||
if (Ncenter > 0)
|
||||
{
|
||||
double sum = 0;
|
||||
for (int j = 0; j < Ncenter; j++)
|
||||
{
|
||||
int k = N - j - 1;
|
||||
Point tl = lastPositions[k].tl();
|
||||
Point br = lastPositions[k].br();
|
||||
Point c1;
|
||||
|
||||
c1 = new Point(tl.x * 0.5f, tl.y * 0.5f);
|
||||
Point c2;
|
||||
|
||||
c2 = new Point(br.x * 0.5f, br.y * 0.5f);
|
||||
c1 = new Point(c1.x + c2.x, c1.y + c2.y);
|
||||
|
||||
center = new Point(center.x + (c1.x * weightsPositionsSmoothing[j]), center.y + (c1.y * weightsPositionsSmoothing[j]));
|
||||
sum += weightsPositionsSmoothing[j];
|
||||
}
|
||||
center = new Point(center.x * (1 / sum), center.y * (1 / sum));
|
||||
}
|
||||
else
|
||||
{
|
||||
int k = N - 1;
|
||||
Point tl = lastPositions[k].tl();
|
||||
Point br = lastPositions[k].br();
|
||||
Point c1;
|
||||
|
||||
c1 = new Point(tl.x * 0.5f, tl.y * 0.5f);
|
||||
Point c2;
|
||||
|
||||
c2 = new Point(br.x * 0.5f, br.y * 0.5f);
|
||||
|
||||
center = new Point(c1.x + c2.x, c1.y + c2.y);
|
||||
}
|
||||
Point tl2 = new Point(center.x - (w * 0.5f), center.y - (h * 0.5f));
|
||||
Rect res = new Rect((int)Math.Round(tl2.x), (int)Math.Round(tl2.y), (int)Math.Round(w), (int)Math.Round(h));
|
||||
|
||||
//Debug.Log("DetectionBasedTracker::calcTrackedObjectPositionToShow: Result for i=" + i + ": {" + res.x + ", " + res.y + ", " + res.width + ", " + res.height + "}");
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
public void Reset()
|
||||
{
|
||||
trackedObjects.Clear();
|
||||
}
|
||||
|
||||
private Rect Intersect(Rect a, Rect b)
|
||||
{
|
||||
int x1 = Math.Max(a.x, b.x);
|
||||
int x2 = Math.Min(a.x + a.width, b.x + b.width);
|
||||
int y1 = Math.Max(a.y, b.y);
|
||||
int y2 = Math.Min(a.y + a.height, b.y + b.height);
|
||||
|
||||
if (x2 >= x1 && y2 >= y1)
|
||||
return new Rect(x1, y1, x2 - x1, y2 - y1);
|
||||
else
|
||||
return new Rect();
|
||||
}
|
||||
/*
|
||||
private bool IsCollideByCircle(Rect a, Rect b, float coeffRectangleOverlap)
|
||||
{
|
||||
int r1 = (int)(a.width / 2.0f);
|
||||
int r2 = (int)(b.width / 2.0f);
|
||||
int px1 = a.x + r1;
|
||||
int py1 = a.y + r1;
|
||||
int px2 = b.x + r2;
|
||||
int py2 = b.y + r2;
|
||||
|
||||
if ((px2 - px1) * (px2 - px1) + (py2 - py1) * (py2 - py1) <= (r1 + r2) * (r1 + r2) * coeffRectangleOverlap)
|
||||
return true;
|
||||
else
|
||||
return false;
|
||||
}
|
||||
*/
|
||||
private bool IsCollideByRectangle(Rect a, Rect b, float coeffRectangleOverlap)
|
||||
{
|
||||
int mw = (int)(a.width * coeffRectangleOverlap);
|
||||
int mh = (int)(a.height * coeffRectangleOverlap);
|
||||
int mx1 = (int)(a.x + (a.width - mw) / 2.0f);
|
||||
int my1 = (int)(a.x + (a.height - mh) / 2.0f);
|
||||
int mx2 = (int)(mx1 + mw);
|
||||
int my2 = (int)(my1 + mh);
|
||||
|
||||
int ew = (int)(b.width * coeffRectangleOverlap);
|
||||
int eh = (int)(b.height * coeffRectangleOverlap);
|
||||
int ex1 = (int)(b.x + (b.width - ew) / 2.0f);
|
||||
int ey1 = (int)(b.x + (b.height - eh) / 2.0f);
|
||||
int ex2 = (int)(ex1 + ew);
|
||||
int ey2 = (int)(ey1 + eh);
|
||||
|
||||
if (mx1 <= ex2 && ex1 <= mx2 && my1 <= ey2 && ey1 <= my2)
|
||||
return true;
|
||||
else
|
||||
return false;
|
||||
}
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
Reset();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
fileFormatVersion: 2
|
||||
guid: bcc0418596b47c64088e64d3677066ed
|
||||
timeCreated: 1464369831
|
||||
licenseType: Free
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,53 @@
|
|||
using PositionsVector = System.Collections.Generic.List<OpenCVForUnity.Rect>;
|
||||
|
||||
namespace OpenCVForUnity.RectangleTrack
|
||||
{
|
||||
|
||||
public enum TrackedState : int
|
||||
{
|
||||
|
||||
NEW = 0,
|
||||
PENDING = 1,
|
||||
NEW_DISPLAYED = 2,
|
||||
DISPLAYED =3,
|
||||
NEW_HIDED = 4,
|
||||
HIDED = 5,
|
||||
DELETED = 6
|
||||
}
|
||||
|
||||
public class TrackedObject
|
||||
{
|
||||
public PositionsVector lastPositions;
|
||||
public int numDetectedFrames;
|
||||
public int numFramesNotDetected;
|
||||
public int id;
|
||||
public TrackedState state;
|
||||
public OpenCVForUnity.Rect position
|
||||
{
|
||||
get { return lastPositions[lastPositions.Count - 1].clone(); }
|
||||
}
|
||||
|
||||
|
||||
static private int _id = 0;
|
||||
|
||||
public TrackedObject(OpenCVForUnity.Rect rect)
|
||||
{
|
||||
lastPositions = new PositionsVector();
|
||||
|
||||
numDetectedFrames = 1;
|
||||
numFramesNotDetected = 0;
|
||||
state = TrackedState.NEW;
|
||||
|
||||
lastPositions.Add(rect.clone());
|
||||
|
||||
_id = getNextId();
|
||||
id = _id;
|
||||
}
|
||||
|
||||
static int getNextId()
|
||||
{
|
||||
_id++;
|
||||
return _id;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 8428419bd4335ca4a85e8a4a315afc13
|
||||
timeCreated: 1464369829
|
||||
licenseType: Free
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,19 @@
|
|||
namespace OpenCVForUnity.RectangleTrack
|
||||
{
|
||||
public class TrackedRect : Rect
|
||||
{
|
||||
public int numDetectedFrames;
|
||||
public int numFramesNotDetected;
|
||||
public int id;
|
||||
public TrackedState state;
|
||||
|
||||
public TrackedRect(int id, Rect rect, TrackedState state, int numDetectedFrames, int numFramesNotDetected)
|
||||
: base(rect.x, rect.y, rect.width, rect.height)
|
||||
{
|
||||
this.numDetectedFrames = numDetectedFrames;
|
||||
this.numFramesNotDetected = numFramesNotDetected;
|
||||
this.id = id;
|
||||
this.state = state;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 52e1c721cf4e59a4aab215bf5ab737b0
|
||||
timeCreated: 1464369829
|
||||
licenseType: Free
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,33 @@
|
|||
namespace OpenCVForUnity.RectangleTrack
|
||||
{
|
||||
|
||||
public class TrackerParameters
|
||||
{
|
||||
public int numLastPositionsToTrack = 4;
|
||||
public int numStepsToWaitBeforeFirstShow = 6;
|
||||
public int numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown = 3;
|
||||
public int numStepsToShowWithoutDetecting = 3;
|
||||
|
||||
public int maxTrackLifetime = 5;
|
||||
|
||||
public float coeffObjectSpeedUsingInPrediction = 0.8f;
|
||||
public float coeffRectangleOverlap = 0.7f;
|
||||
|
||||
public TrackerParameters() {
|
||||
}
|
||||
|
||||
public TrackerParameters Clone()
|
||||
{
|
||||
TrackerParameters trackerParameters = new TrackerParameters();
|
||||
trackerParameters.numLastPositionsToTrack = numLastPositionsToTrack;
|
||||
trackerParameters.numStepsToWaitBeforeFirstShow = numStepsToWaitBeforeFirstShow;
|
||||
trackerParameters.numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown = numStepsToTrackWithoutDetectingIfObjectHasNotBeenShown;
|
||||
trackerParameters.numStepsToShowWithoutDetecting = numStepsToShowWithoutDetecting;
|
||||
trackerParameters.maxTrackLifetime = maxTrackLifetime;
|
||||
trackerParameters.coeffObjectSpeedUsingInPrediction = coeffObjectSpeedUsingInPrediction;
|
||||
trackerParameters.coeffRectangleOverlap = coeffRectangleOverlap;
|
||||
|
||||
return trackerParameters;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
fileFormatVersion: 2
|
||||
guid: b0c3318827dc03f438fc0b04a3e9dec1
|
||||
timeCreated: 1464369829
|
||||
licenseType: Free
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,7 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 9ce089e1e22469940a797356e6fb7e6a
|
||||
folderAsset: yes
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
Двоичный файл не отображается.
После Ширина: | Высота: | Размер: 1.4 MiB |
|
@ -0,0 +1,47 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 3abc257791f75f1459d7c9a6dff35cf1
|
||||
TextureImporter:
|
||||
fileIDToRecycleName: {}
|
||||
serializedVersion: 2
|
||||
mipmaps:
|
||||
mipMapMode: 0
|
||||
enableMipMap: 0
|
||||
linearTexture: 0
|
||||
correctGamma: 0
|
||||
fadeOut: 0
|
||||
borderMipMap: 0
|
||||
mipMapFadeDistanceStart: 1
|
||||
mipMapFadeDistanceEnd: 3
|
||||
bumpmap:
|
||||
convertToNormalMap: 0
|
||||
externalNormalMap: 0
|
||||
heightScale: .25
|
||||
normalMapFilter: 0
|
||||
isReadable: 1
|
||||
grayScaleToAlpha: 0
|
||||
generateCubemap: 0
|
||||
seamlessCubemap: 0
|
||||
textureFormat: 4
|
||||
maxTextureSize: 1024
|
||||
textureSettings:
|
||||
filterMode: -1
|
||||
aniso: -1
|
||||
mipBias: -1
|
||||
wrapMode: -1
|
||||
nPOTScale: 0
|
||||
lightmap: 0
|
||||
compressionQuality: 50
|
||||
spriteMode: 0
|
||||
spriteExtrude: 1
|
||||
spriteMeshType: 1
|
||||
alignment: 0
|
||||
spritePivot: {x: .5, y: .5}
|
||||
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
|
||||
spritePixelsToUnits: 100
|
||||
alphaIsTransparency: 0
|
||||
textureType: 5
|
||||
buildTargetSettings: []
|
||||
spriteSheet:
|
||||
sprites: []
|
||||
spritePackingTag:
|
||||
userData:
|
|
@ -0,0 +1,7 @@
|
|||
fileFormatVersion: 2
|
||||
guid: c6d804cb2b659464eb528f25050a855c
|
||||
folderAsset: yes
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
Двоичный файл не отображается.
|
@ -0,0 +1,6 @@
|
|||
fileFormatVersion: 2
|
||||
guid: e5d0c664715582e4ca34e57c9c810679
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
Двоичный файл не отображается.
|
@ -0,0 +1,6 @@
|
|||
fileFormatVersion: 2
|
||||
guid: bffcb2baf6cb09f4c91f5a11a618b29b
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
Двоичный файл не отображается.
|
@ -0,0 +1,6 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 52f3044f03b445d40b792ea9adc8d3d9
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
Двоичный файл не отображается.
|
@ -0,0 +1,8 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 8c0f13bf77892b641a26f022f103dfc9
|
||||
timeCreated: 1463652241
|
||||
licenseType: Free
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
Двоичный файл не отображается.
|
@ -0,0 +1,6 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 2f907fe501981c742b2199b6b1866926
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,7 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 6c9557d03374cf54cbd32ac281028699
|
||||
folderAsset: yes
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,64 @@
|
|||
using UnityEngine;
|
||||
using System.Collections;
|
||||
|
||||
#if UNITY_5_3
|
||||
using UnityEngine.SceneManagement;
|
||||
#endif
|
||||
|
||||
namespace FaceSwapperSample
|
||||
{
|
||||
/// <summary>
|
||||
/// Face swapper sample.
|
||||
/// </summary>
|
||||
public class FaceSwapperSample : MonoBehaviour
|
||||
{
|
||||
|
||||
// Use this for initialization
|
||||
void Start ()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
// Update is called once per frame
|
||||
void Update ()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public void OnShowLicenseButton ()
|
||||
{
|
||||
#if UNITY_5_3
|
||||
SceneManager.LoadScene ("ShowLicense");
|
||||
#else
|
||||
Application.LoadLevel ("ShowLicense");
|
||||
#endif
|
||||
}
|
||||
|
||||
public void OnTexture2DFaceSwapperSample ()
|
||||
{
|
||||
#if UNITY_5_3
|
||||
SceneManager.LoadScene ("Texture2DFaceSwapperSample");
|
||||
#else
|
||||
Application.LoadLevel("Texture2DFaceSwapperSample");
|
||||
#endif
|
||||
}
|
||||
|
||||
public void OnWebCamTextureFaceSwapperSample()
|
||||
{
|
||||
#if UNITY_5_3
|
||||
SceneManager.LoadScene ("WebCamTextureFaceSwapperSample");
|
||||
#else
|
||||
Application.LoadLevel("WebCamTextureFaceSwapperSample");
|
||||
#endif
|
||||
}
|
||||
|
||||
public void OnVideoCaptureFaceSwapperSample()
|
||||
{
|
||||
#if UNITY_5_3
|
||||
SceneManager.LoadScene ("VideoCaptureFaceSwapperSample");
|
||||
#else
|
||||
Application.LoadLevel("VideoCaptureFaceSwapperSample");
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 661a709561282a140b876853278c3522
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,243 @@
|
|||
using UnityEngine;
|
||||
using System;
|
||||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
|
||||
using DlibFaceLandmarkDetector;
|
||||
using OpenCVForUnity;
|
||||
|
||||
namespace FaceSwapperSample
|
||||
{
|
||||
public class FrontalFaceParam : IDisposable
|
||||
{
|
||||
|
||||
//Calib3d.solvePnP
|
||||
Matrix4x4 transformationM = new Matrix4x4 ();
|
||||
MatOfPoint3f objectPoints;
|
||||
MatOfPoint2f imagePoints;
|
||||
Mat rvec;
|
||||
Mat tvec;
|
||||
Mat rotM;
|
||||
Mat camMatrix;
|
||||
MatOfDouble distCoeffs;
|
||||
Matrix4x4 invertYM;
|
||||
Matrix4x4 invertZM;
|
||||
Matrix4x4 ARM;
|
||||
|
||||
//normalize
|
||||
float normWidth = 200;
|
||||
float normHeight = 200;
|
||||
List<Vector2> normPoints;
|
||||
|
||||
public FrontalFaceParam ()
|
||||
{
|
||||
invertYM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, -1, 1));
|
||||
//Debug.Log ("invertYM " + invertYM.ToString ());
|
||||
|
||||
invertZM = Matrix4x4.TRS (Vector3.zero, Quaternion.identity, new Vector3 (1, 1, -1));
|
||||
//Debug.Log ("invertZM " + invertZM.ToString ());
|
||||
|
||||
//set 3d face object points.
|
||||
objectPoints = new MatOfPoint3f (
|
||||
new Point3 (-31, 72, 86),//l eye
|
||||
new Point3 (31, 72, 86),//r eye
|
||||
new Point3 (0, 40, 114),//nose
|
||||
new Point3 (-20, 15, 90),//l mouth //new Point3(-22, 17, 90),//l mouth
|
||||
new Point3 (20, 15, 90),//r mouth //new Point3(22, 17, 90),//r mouth
|
||||
new Point3 (-69, 76, -2),//l ear
|
||||
new Point3 (69, 76, -2)//r ear
|
||||
);
|
||||
imagePoints = new MatOfPoint2f ();
|
||||
rvec = new Mat ();
|
||||
tvec = new Mat ();
|
||||
rotM = new Mat (3, 3, CvType.CV_64FC1);
|
||||
|
||||
|
||||
float max_d = Mathf.Max (normHeight, normWidth);
|
||||
camMatrix = new Mat (3, 3, CvType.CV_64FC1);
|
||||
camMatrix.put (0, 0, max_d);
|
||||
camMatrix.put (0, 1, 0);
|
||||
camMatrix.put (0, 2, normWidth / 2.0f);
|
||||
camMatrix.put (1, 0, 0);
|
||||
camMatrix.put (1, 1, max_d);
|
||||
camMatrix.put (1, 2, normHeight / 2.0f);
|
||||
camMatrix.put (2, 0, 0);
|
||||
camMatrix.put (2, 1, 0);
|
||||
camMatrix.put (2, 2, 1.0f);
|
||||
|
||||
distCoeffs = new MatOfDouble (0, 0, 0, 0);
|
||||
//Debug.Log("distCoeffs " + distCoeffs.dump());
|
||||
|
||||
|
||||
normPoints = new List<Vector2>(68);
|
||||
for (int i = 0; i < 68; i++) {
|
||||
normPoints.Add( new Vector2(0, 0));
|
||||
}
|
||||
}
|
||||
|
||||
public void Dispose ()
|
||||
{
|
||||
objectPoints.Dispose ();
|
||||
imagePoints.Dispose ();
|
||||
rvec.Dispose ();
|
||||
tvec.Dispose ();
|
||||
rotM.Dispose ();
|
||||
camMatrix.Dispose ();
|
||||
distCoeffs.Dispose ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the frontal face angle.
|
||||
/// </summary>
|
||||
/// <returns>The frontal face angle.</returns>
|
||||
/// <param name="points">Points.</param>
|
||||
public Vector3 getFrontalFaceAngle (List<Vector2> points)
|
||||
{
|
||||
if (points.Count != 68)
|
||||
throw new ArgumentNullException ("Invalid landmark_points.");
|
||||
|
||||
if (camMatrix == null)
|
||||
throw new ArgumentNullException ("Invalid camMatrix.");
|
||||
|
||||
//normalize
|
||||
|
||||
float normScale = Math.Abs (points [30].y - points [8].y) / (normHeight / 2);
|
||||
Vector2 normDiff = points [30] * normScale - new Vector2 (normWidth / 2, normHeight / 2);
|
||||
|
||||
for (int i = 0; i < points.Count; i++) {
|
||||
normPoints[i] = points[i] * normScale - normDiff;
|
||||
}
|
||||
// Debug.Log ("points[30] " + points[30]);
|
||||
// Debug.Log ("normPoints[30] " + normPoints[30]);
|
||||
// Debug.Log ("normScale " + normScale);
|
||||
// Debug.Log ("normDiff " + normDiff);
|
||||
|
||||
imagePoints.fromArray (
|
||||
new Point ((normPoints [38].x + normPoints [41].x) / 2, (normPoints [38].y + normPoints [41].y) / 2),//l eye
|
||||
new Point ((normPoints [43].x + normPoints [46].x) / 2, (normPoints [43].y + normPoints [46].y) / 2),//r eye
|
||||
new Point (normPoints [33].x, normPoints [33].y),//nose
|
||||
new Point (normPoints [48].x, normPoints [48].y),//l mouth
|
||||
new Point (normPoints [54].x, normPoints [54].y), //r mouth ,
|
||||
new Point (normPoints [0].x, normPoints [0].y),//l ear
|
||||
new Point (normPoints [16].x, normPoints [16].y)//r ear
|
||||
);
|
||||
|
||||
|
||||
Calib3d.solvePnP (objectPoints, imagePoints, camMatrix, distCoeffs, rvec, tvec);
|
||||
|
||||
Calib3d.Rodrigues (rvec, rotM);
|
||||
|
||||
transformationM.SetRow (0, new Vector4 ((float)rotM.get (0, 0) [0], (float)rotM.get (0, 1) [0], (float)rotM.get (0, 2) [0], (float)tvec.get (0, 0) [0]));
|
||||
transformationM.SetRow (1, new Vector4 ((float)rotM.get (1, 0) [0], (float)rotM.get (1, 1) [0], (float)rotM.get (1, 2) [0], (float)tvec.get (1, 0) [0]));
|
||||
transformationM.SetRow (2, new Vector4 ((float)rotM.get (2, 0) [0], (float)rotM.get (2, 1) [0], (float)rotM.get (2, 2) [0], (float)tvec.get (2, 0) [0]));
|
||||
transformationM.SetRow (3, new Vector4 (0, 0, 0, 1));
|
||||
|
||||
ARM = invertYM * transformationM * invertZM;
|
||||
|
||||
return ExtractRotationFromMatrix (ref ARM).eulerAngles;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the frontal face rate.
|
||||
/// </summary>
|
||||
/// <returns>The frontal face rate.</returns>
|
||||
/// <param name="points">Points.</param>
|
||||
public float getFrontalFaceRate (List<Vector2> points)
|
||||
{
|
||||
|
||||
Vector3 angles = getFrontalFaceAngle (points);
|
||||
float rotateX = (angles.x > 180) ? angles.x - 360 : angles.x;
|
||||
float rotateY = (angles.y > 180) ? angles.y - 360 : angles.y;
|
||||
|
||||
// Debug.Log ("angles " + angles);
|
||||
// Debug.Log ("ratio " + (1.0f - (Mathf.Max (Mathf.Abs (rotateX), Mathf.Abs (rotateY)) / 90)));
|
||||
|
||||
return 1.0f - (Mathf.Max (Mathf.Abs (rotateX), Mathf.Abs (rotateY)) / 90);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract translation from transform matrix.
|
||||
/// </summary>
|
||||
/// <param name="matrix">Transform matrix. This parameter is passed by reference
|
||||
/// to improve performance; no changes will be made to it.</param>
|
||||
/// <returns>
|
||||
/// Translation offset.
|
||||
/// </returns>
|
||||
public static Vector3 ExtractTranslationFromMatrix (ref Matrix4x4 matrix)
|
||||
{
|
||||
Vector3 translate;
|
||||
translate.x = matrix.m03;
|
||||
translate.y = matrix.m13;
|
||||
translate.z = matrix.m23;
|
||||
return translate;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract rotation quaternion from transform matrix.
|
||||
/// </summary>
|
||||
/// <param name="matrix">Transform matrix. This parameter is passed by reference
|
||||
/// to improve performance; no changes will be made to it.</param>
|
||||
/// <returns>
|
||||
/// Quaternion representation of rotation transform.
|
||||
/// </returns>
|
||||
public static Quaternion ExtractRotationFromMatrix (ref Matrix4x4 matrix)
|
||||
{
|
||||
Vector3 forward;
|
||||
forward.x = matrix.m02;
|
||||
forward.y = matrix.m12;
|
||||
forward.z = matrix.m22;
|
||||
|
||||
Vector3 upwards;
|
||||
upwards.x = matrix.m01;
|
||||
upwards.y = matrix.m11;
|
||||
upwards.z = matrix.m21;
|
||||
|
||||
return Quaternion.LookRotation (forward, upwards);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract scale from transform matrix.
|
||||
/// </summary>
|
||||
/// <param name="matrix">Transform matrix. This parameter is passed by reference
|
||||
/// to improve performance; no changes will be made to it.</param>
|
||||
/// <returns>
|
||||
/// Scale vector.
|
||||
/// </returns>
|
||||
public static Vector3 ExtractScaleFromMatrix (ref Matrix4x4 matrix)
|
||||
{
|
||||
Vector3 scale;
|
||||
scale.x = new Vector4 (matrix.m00, matrix.m10, matrix.m20, matrix.m30).magnitude;
|
||||
scale.y = new Vector4 (matrix.m01, matrix.m11, matrix.m21, matrix.m31).magnitude;
|
||||
scale.z = new Vector4 (matrix.m02, matrix.m12, matrix.m22, matrix.m32).magnitude;
|
||||
return scale;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Extract position, rotation and scale from TRS matrix.
|
||||
/// </summary>
|
||||
/// <param name="matrix">Transform matrix. This parameter is passed by reference
|
||||
/// to improve performance; no changes will be made to it.</param>
|
||||
/// <param name="localPosition">Output position.</param>
|
||||
/// <param name="localRotation">Output rotation.</param>
|
||||
/// <param name="localScale">Output scale.</param>
|
||||
public static void DecomposeMatrix (ref Matrix4x4 matrix, out Vector3 localPosition, out Quaternion localRotation, out Vector3 localScale)
|
||||
{
|
||||
localPosition = ExtractTranslationFromMatrix (ref matrix);
|
||||
localRotation = ExtractRotationFromMatrix (ref matrix);
|
||||
localScale = ExtractScaleFromMatrix (ref matrix);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Set transform component from TRS matrix.
|
||||
/// </summary>
|
||||
/// <param name="transform">Transform component.</param>
|
||||
/// <param name="matrix">Transform matrix. This parameter is passed by reference
|
||||
/// to improve performance; no changes will be made to it.</param>
|
||||
public static void SetTransformFromMatrix (Transform transform, ref Matrix4x4 matrix)
|
||||
{
|
||||
transform.localPosition = ExtractTranslationFromMatrix (ref matrix);
|
||||
transform.localRotation = ExtractRotationFromMatrix (ref matrix);
|
||||
transform.localScale = ExtractScaleFromMatrix (ref matrix);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 4c57ef078c131834aa757a71ae3ec6ba
|
||||
timeCreated: 1467085640
|
||||
licenseType: Pro
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,80 @@
|
|||
using DlibFaceLandmarkDetector;
|
||||
using OpenCVForUnity;
|
||||
using System;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
|
||||
namespace FaceSwapperSample
|
||||
{
|
||||
/// <summary>
|
||||
/// Utility class for the integration of DlibFaceLandmarkDetector and OpenCVForUnity.
|
||||
/// </summary>
|
||||
public static class OpenCVForUnityUtils
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// Sets the image.
|
||||
/// </summary>
|
||||
/// <param name="faceLandmarkDetector">Face landmark detector.</param>
|
||||
/// <param name="imgMat">Image mat.</param>
|
||||
public static void SetImage (FaceLandmarkDetector faceLandmarkDetector, Mat imgMat)
|
||||
{
|
||||
faceLandmarkDetector.SetImage ((IntPtr)imgMat.dataAddr (), imgMat.width (), imgMat.height (), (int)imgMat.elemSize ());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Draws the face rect.
|
||||
/// </summary>
|
||||
/// <param name="imgMat">Image mat.</param>
|
||||
/// <param name="rect">Rect.</param>
|
||||
/// <param name="color">Color.</param>
|
||||
/// <param name="thickness">Thickness.</param>
|
||||
public static void DrawFaceRect (Mat imgMat, UnityEngine.Rect rect, Scalar color, int thickness)
|
||||
{
|
||||
Imgproc.rectangle (imgMat, new Point (rect.xMin, rect.yMin), new Point (rect.xMax, rect.yMax), color, thickness);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Draws the face landmark.
|
||||
/// </summary>
|
||||
/// <param name="imgMat">Image mat.</param>
|
||||
/// <param name="points">Points.</param>
|
||||
/// <param name="color">Color.</param>
|
||||
/// <param name="thickness">Thickness.</param>
|
||||
public static void DrawFaceLandmark (Mat imgMat, List<Vector2> points, Scalar color, int thickness)
|
||||
{
|
||||
if (points.Count == 68) {
|
||||
|
||||
for (int i = 1; i <= 16; ++i)
|
||||
Imgproc.line (imgMat, new Point (points [i].x, points [i].y), new Point (points [i - 1].x, points [i - 1].y), new Scalar (0, 255, 0, 255), thickness);
|
||||
|
||||
for (int i = 28; i <= 30; ++i)
|
||||
Imgproc.line (imgMat, new Point (points [i].x, points [i].y), new Point (points [i - 1].x, points [i - 1].y), new Scalar (0, 255, 0, 255), thickness);
|
||||
|
||||
for (int i = 18; i <= 21; ++i)
|
||||
Imgproc.line (imgMat, new Point (points [i].x, points [i].y), new Point (points [i - 1].x, points [i - 1].y), new Scalar (0, 255, 0, 255), thickness);
|
||||
for (int i = 23; i <= 26; ++i)
|
||||
Imgproc.line (imgMat, new Point (points [i].x, points [i].y), new Point (points [i - 1].x, points [i - 1].y), new Scalar (0, 255, 0, 255), thickness);
|
||||
for (int i = 31; i <= 35; ++i)
|
||||
Imgproc.line (imgMat, new Point (points [i].x, points [i].y), new Point (points [i - 1].x, points [i - 1].y), new Scalar (0, 255, 0, 255), thickness);
|
||||
Imgproc.line (imgMat, new Point (points [30].x, points [30].y), new Point (points [35].x, points [35].y), new Scalar (0, 255, 0, 255), thickness);
|
||||
|
||||
for (int i = 37; i <= 41; ++i)
|
||||
Imgproc.line (imgMat, new Point (points [i].x, points [i].y), new Point (points [i - 1].x, points [i - 1].y), new Scalar (0, 255, 0, 255), thickness);
|
||||
Imgproc.line (imgMat, new Point (points [36].x, points [36].y), new Point (points [41].x, points [41].y), new Scalar (0, 255, 0, 255), thickness);
|
||||
|
||||
for (int i = 43; i <= 47; ++i)
|
||||
Imgproc.line (imgMat, new Point (points [i].x, points [i].y), new Point (points [i - 1].x, points [i - 1].y), new Scalar (0, 255, 0, 255), thickness);
|
||||
Imgproc.line (imgMat, new Point (points [42].x, points [42].y), new Point (points [47].x, points [47].y), new Scalar (0, 255, 0, 255), thickness);
|
||||
|
||||
for (int i = 49; i <= 59; ++i)
|
||||
Imgproc.line (imgMat, new Point (points [i].x, points [i].y), new Point (points [i - 1].x, points [i - 1].y), new Scalar (0, 255, 0, 255), thickness);
|
||||
Imgproc.line (imgMat, new Point (points [48].x, points [48].y), new Point (points [59].x, points [59].y), new Scalar (0, 255, 0, 255), thickness);
|
||||
|
||||
for (int i = 61; i <= 67; ++i)
|
||||
Imgproc.line (imgMat, new Point (points [i].x, points [i].y), new Point (points [i - 1].x, points [i - 1].y), new Scalar (0, 255, 0, 255), thickness);
|
||||
Imgproc.line (imgMat, new Point (points [60].x, points [60].y), new Point (points [67].x, points [67].y), new Scalar (0, 255, 0, 255), thickness);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
fileFormatVersion: 2
|
||||
guid: e29736da80afb5d4aa50c3407eeea008
|
||||
timeCreated: 1463661767
|
||||
licenseType: Free
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,38 @@
|
|||
using UnityEngine;
|
||||
using System.Collections;
|
||||
|
||||
#if UNITY_5_3
|
||||
using UnityEngine.SceneManagement;
|
||||
#endif
|
||||
|
||||
namespace FaceSwapperSample
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// Show license.
|
||||
/// </summary>
|
||||
public class ShowLicense : MonoBehaviour
|
||||
{
|
||||
|
||||
// Use this for initialization
|
||||
void Start ()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
// Update is called once per frame
|
||||
void Update ()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public void OnBackButton ()
|
||||
{
|
||||
#if UNITY_5_3
|
||||
SceneManager.LoadScene ("FaceSwapperSample");
|
||||
#else
|
||||
Application.LoadLevel("FaceSwapperSample");
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 13f2a73f7ca1dbf42b421bdd812acbef
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,312 @@
|
|||
using DlibFaceLandmarkDetector;
|
||||
using OpenCVForUnity;
|
||||
using OpenCVForUnity.FaceSwap;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using UnityEngine;
|
||||
using UnityEngine.UI;
|
||||
|
||||
#if UNITY_5_3
|
||||
using UnityEngine.SceneManagement;
|
||||
#endif
|
||||
|
||||
namespace FaceSwapperSample
|
||||
{
|
||||
/// <summary>
|
||||
/// Texture2D face swapper sample.
|
||||
/// </summary>
|
||||
public class Texture2DFaceSwapperSample : MonoBehaviour
|
||||
{
|
||||
/// <summary>
|
||||
/// The frontal face parameter.
|
||||
/// </summary>
|
||||
FrontalFaceParam frontalFaceParam;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing face rects.
|
||||
/// </summary>
|
||||
public bool isShowingFaceRects = false;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing face rects toggle.
|
||||
/// </summary>
|
||||
public Toggle isShowingFaceRectsToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The use Dlib face detector flag.
|
||||
/// </summary>
|
||||
public bool useDlibFaceDetecter = true;
|
||||
|
||||
/// <summary>
|
||||
/// The use dlib face detecter toggle.
|
||||
/// </summary>
|
||||
public Toggle useDlibFaceDetecterToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The is filtering non frontal faces.
|
||||
/// </summary>
|
||||
public bool isFilteringNonFrontalFaces;
|
||||
|
||||
/// <summary>
|
||||
/// The is filtering non frontal faces toggle.
|
||||
/// </summary>
|
||||
public Toggle isFilteringNonFrontalFacesToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The frontal face rate lower limit.
|
||||
/// </summary>
|
||||
[Range(0.0f, 1.0f)]
|
||||
public float
|
||||
frontalFaceRateLowerLimit;
|
||||
|
||||
/// <summary>
|
||||
/// The use seamless clone for paste faces.
|
||||
/// </summary>
|
||||
public bool useSeamlessCloneForPasteFaces = false;
|
||||
|
||||
/// <summary>
|
||||
/// The use seamless clone for paste faces toggle.
|
||||
/// </summary>
|
||||
public Toggle useSeamlessCloneForPasteFacesToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing debug face points.
|
||||
/// </summary>
|
||||
public bool isShowingDebugFacePoints = false;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing debug face points toggle.
|
||||
/// </summary>
|
||||
public Toggle isShowingDebugFacePointsToggle;
|
||||
|
||||
|
||||
// Use this for initialization
|
||||
void Start ()
|
||||
{
|
||||
isShowingFaceRectsToggle.isOn = isShowingFaceRects;
|
||||
useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter;
|
||||
isFilteringNonFrontalFacesToggle.isOn = isFilteringNonFrontalFaces;
|
||||
useSeamlessCloneForPasteFacesToggle.isOn = useSeamlessCloneForPasteFaces;
|
||||
isShowingDebugFacePointsToggle.isOn = isShowingDebugFacePoints;
|
||||
|
||||
|
||||
Texture2D imgTexture = Resources.Load ("family") as Texture2D;
|
||||
|
||||
gameObject.transform.localScale = new Vector3 (imgTexture.width, imgTexture.height, 1);
|
||||
Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
|
||||
|
||||
float width = 0;
|
||||
float height = 0;
|
||||
|
||||
width = gameObject.transform.localScale.x;
|
||||
height = gameObject.transform.localScale.y;
|
||||
|
||||
|
||||
float widthScale = (float)Screen.width / width;
|
||||
float heightScale = (float)Screen.height / height;
|
||||
if (widthScale < heightScale) {
|
||||
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
|
||||
} else {
|
||||
Camera.main.orthographicSize = height / 2;
|
||||
}
|
||||
|
||||
Mat rgbaMat = new Mat (imgTexture.height, imgTexture.width, CvType.CV_8UC4);
|
||||
|
||||
OpenCVForUnity.Utils.texture2DToMat (imgTexture, rgbaMat);
|
||||
Debug.Log ("rgbaMat ToString " + rgbaMat.ToString ());
|
||||
|
||||
|
||||
FaceLandmarkDetector faceLandmarkDetector = new FaceLandmarkDetector (DlibFaceLandmarkDetector.Utils.getFilePath ("shape_predictor_68_face_landmarks.dat"));
|
||||
|
||||
frontalFaceParam = new FrontalFaceParam ();
|
||||
|
||||
//face detection
|
||||
List<OpenCVForUnity.Rect> detectResult = new List<OpenCVForUnity.Rect> ();
|
||||
if (useDlibFaceDetecter) {
|
||||
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);
|
||||
List<UnityEngine.Rect> result = faceLandmarkDetector.Detect ();
|
||||
|
||||
foreach (var unityRect in result) {
|
||||
detectResult.Add (new OpenCVForUnity.Rect ((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
|
||||
}
|
||||
} else {
|
||||
CascadeClassifier cascade = new CascadeClassifier (OpenCVForUnity.Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
|
||||
if (cascade.empty ()) {
|
||||
Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
|
||||
}
|
||||
|
||||
//convert image to greyscale
|
||||
Mat gray = new Mat ();
|
||||
Imgproc.cvtColor (rgbaMat, gray, Imgproc.COLOR_RGBA2GRAY);
|
||||
|
||||
//detect Faces
|
||||
MatOfRect faces = new MatOfRect ();
|
||||
Imgproc.equalizeHist (gray, gray);
|
||||
cascade.detectMultiScale (gray, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (gray.cols () * 0.05, gray.cols () * 0.05), new Size ());
|
||||
// Debug.Log ("faces " + faces.dump ());
|
||||
|
||||
detectResult = faces.toList ();
|
||||
}
|
||||
|
||||
|
||||
//detect face landmark
|
||||
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);
|
||||
List<List<Vector2>> landmarkPoints = new List<List<Vector2>> ();
|
||||
foreach (var openCVRect in detectResult) {
|
||||
UnityEngine.Rect rect = new UnityEngine.Rect (openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
|
||||
|
||||
Debug.Log ("face : " + rect);
|
||||
|
||||
//OpenCVForUnityUtils.DrawFaceRect(imgMat, rect, new Scalar(255, 0, 0, 255), 2);
|
||||
|
||||
List<Vector2> points = faceLandmarkDetector.DetectLandmark (rect);
|
||||
if (points.Count > 0) {
|
||||
|
||||
//OpenCVForUnityUtils.DrawFaceLandmark(imgMat, points, new Scalar(0, 255, 0, 255), 2);
|
||||
landmarkPoints.Add (points);
|
||||
}
|
||||
}
|
||||
faceLandmarkDetector.Dispose ();
|
||||
|
||||
|
||||
//filter nonfrontalface
|
||||
if (isFilteringNonFrontalFaces) {
|
||||
for (int i = 0; i < landmarkPoints.Count; i++) {
|
||||
if (frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]) < frontalFaceRateLowerLimit) {
|
||||
detectResult.RemoveAt (i);
|
||||
landmarkPoints.RemoveAt (i);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//swap faces
|
||||
//Debug.Log("face points count : " + points.Count);
|
||||
int[] face_nums = new int[landmarkPoints.Count];
|
||||
for (int i = 0; i < face_nums.Length; i++) {
|
||||
face_nums [i] = i;
|
||||
}
|
||||
face_nums = face_nums.OrderBy (i => System.Guid.NewGuid ()).ToArray ();
|
||||
if (landmarkPoints.Count >= 2) {
|
||||
DlibFaceSwapper faceSwapper = new DlibFaceSwapper ();
|
||||
faceSwapper.useSeamlessCloneForPasteFaces = useSeamlessCloneForPasteFaces;
|
||||
faceSwapper.isShowingDebugFacePoints = isShowingDebugFacePoints;
|
||||
|
||||
int ann = 0, bob = 0;
|
||||
for (int i = 0; i < face_nums.Length-1; i += 2) {
|
||||
ann = face_nums [i];
|
||||
bob = face_nums [i + 1];
|
||||
|
||||
faceSwapper.SwapFaces (rgbaMat, landmarkPoints [ann], landmarkPoints [bob], 1);
|
||||
|
||||
}
|
||||
faceSwapper.Dispose ();
|
||||
}
|
||||
|
||||
//show face rects
|
||||
if (isShowingFaceRects) {
|
||||
int ann = 0, bob = 0;
|
||||
for (int i = 0; i < face_nums.Length-1; i += 2) {
|
||||
ann = face_nums [i];
|
||||
bob = face_nums [i + 1];
|
||||
|
||||
UnityEngine.Rect rect_ann = new UnityEngine.Rect (detectResult [ann].x, detectResult [ann].y, detectResult [ann].width, detectResult [ann].height);
|
||||
UnityEngine.Rect rect_bob = new UnityEngine.Rect (detectResult [bob].x, detectResult [bob].y, detectResult [bob].width, detectResult [bob].height);
|
||||
Scalar color = new Scalar (Random.Range (0, 256), Random.Range (0, 256), Random.Range (0, 256), 255);
|
||||
OpenCVForUnityUtils.DrawFaceRect (rgbaMat, rect_ann, color, 2);
|
||||
OpenCVForUnityUtils.DrawFaceRect (rgbaMat, rect_bob, color, 2);
|
||||
// Imgproc.putText (rgbaMat, "" + i % 2, new Point (rect_ann.xMin, rect_ann.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, color, 2, Imgproc.LINE_AA, false);
|
||||
// Imgproc.putText (rgbaMat, "" + (i % 2 + 1), new Point (rect_bob.xMin, rect_bob.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, color, 2, Imgproc.LINE_AA, false);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
frontalFaceParam.Dispose ();
|
||||
|
||||
Texture2D texture = new Texture2D (rgbaMat.cols (), rgbaMat.rows (), TextureFormat.RGBA32, false);
|
||||
OpenCVForUnity.Utils.matToTexture2D (rgbaMat, texture);
|
||||
gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
|
||||
}
|
||||
|
||||
// Update is called once per frame
|
||||
void Update ()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public void OnBackButton ()
|
||||
{
|
||||
#if UNITY_5_3
|
||||
SceneManager.LoadScene ("FaceSwapperSample");
|
||||
#else
|
||||
Application.LoadLevel ("FaceSwapperSample");
|
||||
#endif
|
||||
}
|
||||
|
||||
public void OnShuffleButton ()
|
||||
{
|
||||
Start ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is showing face rects toggle event.
|
||||
/// </summary>
|
||||
public void OnIsShowingFaceRectsToggle ()
|
||||
{
|
||||
if (isShowingFaceRectsToggle.isOn) {
|
||||
isShowingFaceRects = true;
|
||||
} else {
|
||||
isShowingFaceRects = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the use Dlib face detector toggle event.
|
||||
/// </summary>
|
||||
public void OnUseDlibFaceDetecterToggle ()
|
||||
{
|
||||
if (useDlibFaceDetecterToggle.isOn) {
|
||||
useDlibFaceDetecter = true;
|
||||
} else {
|
||||
useDlibFaceDetecter = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is filtering non frontal faces toggle event.
|
||||
/// </summary>
|
||||
public void OnIsFilteringNonFrontalFacesToggle ()
|
||||
{
|
||||
if (isFilteringNonFrontalFacesToggle.isOn) {
|
||||
isFilteringNonFrontalFaces = true;
|
||||
} else {
|
||||
isFilteringNonFrontalFaces = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the use seamless clone for paste faces toggle event.
|
||||
/// </summary>
|
||||
public void OnUseSeamlessCloneForPasteFacesToggle ()
|
||||
{
|
||||
if (useSeamlessCloneForPasteFacesToggle.isOn) {
|
||||
useSeamlessCloneForPasteFaces = true;
|
||||
} else {
|
||||
useSeamlessCloneForPasteFaces = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is showing debug face points toggle event.
|
||||
/// </summary>
|
||||
public void OnIsShowingDebugFacePointsToggle ()
|
||||
{
|
||||
if (isShowingDebugFacePointsToggle.isOn) {
|
||||
isShowingDebugFacePoints = true;
|
||||
} else {
|
||||
isShowingDebugFacePoints = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 2b5f56412d33c3e499e04f152433fd5b
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,411 @@
|
|||
using DlibFaceLandmarkDetector;
|
||||
using OpenCVForUnity;
|
||||
using OpenCVForUnity.RectangleTrack;
|
||||
using OpenCVForUnity.FaceSwap;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
using UnityEngine.UI;
|
||||
|
||||
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
|
||||
using UnityEngine.SceneManagement;
|
||||
#endif
|
||||
|
||||
namespace FaceSwapperSample
|
||||
{
|
||||
/// <summary>
|
||||
/// Face Swapper from VideoCapture Sample.
|
||||
/// </summary>
|
||||
public class VideoCaptureFaceSwapperSample : MonoBehaviour
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// The width of the frame.
|
||||
/// </summary>
|
||||
private double frameWidth = 320;
|
||||
|
||||
/// <summary>
|
||||
/// The height of the frame.
|
||||
/// </summary>
|
||||
private double frameHeight = 240;
|
||||
|
||||
/// <summary>
|
||||
/// The capture.
|
||||
/// </summary>
|
||||
VideoCapture capture;
|
||||
|
||||
/// <summary>
|
||||
/// The rgb mat.
|
||||
/// </summary>
|
||||
Mat rgbMat;
|
||||
|
||||
/// <summary>
|
||||
/// The gray mat.
|
||||
/// </summary>
|
||||
Mat grayMat;
|
||||
|
||||
/// <summary>
|
||||
/// The texture.
|
||||
/// </summary>
|
||||
Texture2D texture;
|
||||
|
||||
/// <summary>
|
||||
/// The cascade.
|
||||
/// </summary>
|
||||
CascadeClassifier cascade;
|
||||
|
||||
/// <summary>
|
||||
/// The face landmark detector.
|
||||
/// </summary>
|
||||
FaceLandmarkDetector faceLandmarkDetector;
|
||||
|
||||
/// <summary>
|
||||
/// The face Swapper.
|
||||
/// </summary>
|
||||
DlibFaceSwapper faceSwapper;
|
||||
|
||||
/// <summary>
|
||||
/// The detection based tracker.
|
||||
/// </summary>
|
||||
RectangleTracker rectangleTracker;
|
||||
|
||||
/// <summary>
|
||||
/// The frontal face parameter.
|
||||
/// </summary>
|
||||
FrontalFaceParam frontalFaceParam;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing face rects.
|
||||
/// </summary>
|
||||
public bool isShowingFaceRects = false;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing face rects toggle.
|
||||
/// </summary>
|
||||
public Toggle isShowingFaceRectsToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The use Dlib face detector flag.
|
||||
/// </summary>
|
||||
public bool useDlibFaceDetecter = true;
|
||||
|
||||
/// <summary>
|
||||
/// The use dlib face detecter toggle.
|
||||
/// </summary>
|
||||
public Toggle useDlibFaceDetecterToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The is filtering non frontal faces.
|
||||
/// </summary>
|
||||
public bool isFilteringNonFrontalFaces;
|
||||
|
||||
/// <summary>
|
||||
/// The is filtering non frontal faces toggle.
|
||||
/// </summary>
|
||||
public Toggle isFilteringNonFrontalFacesToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The frontal face rate lower limit.
|
||||
/// </summary>
|
||||
[Range(0.0f, 1.0f)]
|
||||
public float
|
||||
frontalFaceRateLowerLimit;
|
||||
|
||||
/// <summary>
|
||||
/// The use seamless clone for paste faces.
|
||||
/// </summary>
|
||||
public bool useSeamlessCloneForPasteFaces = false;
|
||||
|
||||
/// <summary>
|
||||
/// The use seamless clone for paste faces toggle.
|
||||
/// </summary>
|
||||
public Toggle useSeamlessCloneForPasteFacesToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The enable tracker flag.
|
||||
/// </summary>
|
||||
public bool enableTracking = true;
|
||||
|
||||
/// <summary>
|
||||
/// The enable tracking toggle.
|
||||
/// </summary>
|
||||
public Toggle enableTrackingToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing debug face points.
|
||||
/// </summary>
|
||||
public bool isShowingDebugFacePoints = false;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing debug face points toggle.
|
||||
/// </summary>
|
||||
public Toggle isShowingDebugFacePointsToggle;
|
||||
|
||||
|
||||
// Use this for initialization
|
||||
void Start ()
|
||||
{
|
||||
rectangleTracker = new RectangleTracker ();
|
||||
|
||||
frontalFaceParam = new FrontalFaceParam ();
|
||||
|
||||
|
||||
faceLandmarkDetector = new FaceLandmarkDetector (DlibFaceLandmarkDetector.Utils.getFilePath ("shape_predictor_68_face_landmarks.dat"));
|
||||
|
||||
faceSwapper = new DlibFaceSwapper ();
|
||||
faceSwapper.useSeamlessCloneForPasteFaces = useSeamlessCloneForPasteFaces;
|
||||
faceSwapper.isShowingDebugFacePoints = isShowingDebugFacePoints;
|
||||
|
||||
rgbMat = new Mat ();
|
||||
|
||||
capture = new VideoCapture ();
|
||||
capture.open (OpenCVForUnity.Utils.getFilePath ("couple.avi"));
|
||||
|
||||
if (capture.isOpened ()) {
|
||||
Debug.Log ("capture.isOpened() true");
|
||||
} else {
|
||||
Debug.Log ("capture.isOpened() false");
|
||||
}
|
||||
|
||||
|
||||
Debug.Log ("CAP_PROP_FORMAT: " + capture.get (Videoio.CAP_PROP_FORMAT));
|
||||
Debug.Log ("CV_CAP_PROP_PREVIEW_FORMAT: " + capture.get (Videoio.CV_CAP_PROP_PREVIEW_FORMAT));
|
||||
Debug.Log ("CAP_PROP_POS_MSEC: " + capture.get (Videoio.CAP_PROP_POS_MSEC));
|
||||
Debug.Log ("CAP_PROP_POS_FRAMES: " + capture.get (Videoio.CAP_PROP_POS_FRAMES));
|
||||
Debug.Log ("CAP_PROP_POS_AVI_RATIO: " + capture.get (Videoio.CAP_PROP_POS_AVI_RATIO));
|
||||
Debug.Log ("CAP_PROP_FRAME_COUNT: " + capture.get (Videoio.CAP_PROP_FRAME_COUNT));
|
||||
Debug.Log ("CAP_PROP_FPS: " + capture.get (Videoio.CAP_PROP_FPS));
|
||||
Debug.Log ("CAP_PROP_FRAME_WIDTH: " + capture.get (Videoio.CAP_PROP_FRAME_WIDTH));
|
||||
Debug.Log ("CAP_PROP_FRAME_HEIGHT: " + capture.get (Videoio.CAP_PROP_FRAME_HEIGHT));
|
||||
|
||||
|
||||
texture = new Texture2D ((int)(frameWidth), (int)(frameHeight), TextureFormat.RGBA32, false);
|
||||
gameObject.transform.localScale = new Vector3 ((float)frameWidth, (float)frameHeight, 1);
|
||||
float widthScale = (float)Screen.width / (float)frameWidth;
|
||||
float heightScale = (float)Screen.height / (float)frameHeight;
|
||||
if (widthScale < heightScale) {
|
||||
Camera.main.orthographicSize = ((float)frameWidth * (float)Screen.height / (float)Screen.width) / 2;
|
||||
} else {
|
||||
Camera.main.orthographicSize = (float)frameHeight / 2;
|
||||
}
|
||||
|
||||
gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
|
||||
|
||||
|
||||
grayMat = new Mat ((int)frameHeight, (int)frameWidth, CvType.CV_8UC1);
|
||||
cascade = new CascadeClassifier (OpenCVForUnity.Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
|
||||
if (cascade.empty ()) {
|
||||
Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
|
||||
}
|
||||
|
||||
isShowingFaceRectsToggle.isOn = isShowingFaceRects;
|
||||
useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter;
|
||||
isFilteringNonFrontalFacesToggle.isOn = isFilteringNonFrontalFaces;
|
||||
useSeamlessCloneForPasteFacesToggle.isOn = useSeamlessCloneForPasteFaces;
|
||||
enableTrackingToggle.isOn = enableTracking;
|
||||
isShowingDebugFacePointsToggle.isOn = isShowingDebugFacePoints;
|
||||
}
|
||||
|
||||
// Update is called once per frame
|
||||
void Update ()
|
||||
{
|
||||
//Loop play
|
||||
if (capture.get (Videoio.CAP_PROP_POS_FRAMES) >= capture.get (Videoio.CAP_PROP_FRAME_COUNT))
|
||||
capture.set (Videoio.CAP_PROP_POS_FRAMES, 0);
|
||||
|
||||
//error PlayerLoop called recursively! on iOS.reccomend WebCamTexture.
|
||||
if (capture.grab ()) {
|
||||
|
||||
capture.retrieve (rgbMat, 0);
|
||||
|
||||
Imgproc.cvtColor (rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
|
||||
//Debug.Log ("Mat toString " + rgbMat.ToString ());
|
||||
|
||||
|
||||
//face detection
|
||||
List<OpenCVForUnity.Rect> detectResult = new List<OpenCVForUnity.Rect> ();
|
||||
if (useDlibFaceDetecter) {
|
||||
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbMat);
|
||||
List<UnityEngine.Rect> result = faceLandmarkDetector.Detect ();
|
||||
|
||||
foreach (var unityRect in result) {
|
||||
detectResult.Add (new OpenCVForUnity.Rect ((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
|
||||
}
|
||||
} else {
|
||||
//convert image to greyscale
|
||||
Imgproc.cvtColor (rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);
|
||||
|
||||
//convert image to greyscale
|
||||
using (Mat equalizeHistMat = new Mat())
|
||||
using (MatOfRect faces = new MatOfRect()) {
|
||||
Imgproc.equalizeHist (grayMat, equalizeHistMat);
|
||||
|
||||
cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ());
|
||||
|
||||
detectResult = faces.toList ();
|
||||
}
|
||||
}
|
||||
|
||||
//face traking
|
||||
if (enableTracking) {
|
||||
rectangleTracker.UpdateTrackedObjects (detectResult);
|
||||
detectResult = new List<OpenCVForUnity.Rect> ();
|
||||
rectangleTracker.GetObjects (detectResult, true);
|
||||
}
|
||||
|
||||
//face landmark detection
|
||||
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbMat);
|
||||
List<List<Vector2>> landmarkPoints = new List<List<Vector2>> ();
|
||||
foreach (var openCVRect in detectResult) {
|
||||
UnityEngine.Rect rect = new UnityEngine.Rect (openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
|
||||
|
||||
List<Vector2> points = faceLandmarkDetector.DetectLandmark (rect);
|
||||
if (points.Count > 0) {
|
||||
//OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, points, new Scalar(0, 255, 0, 255), 2);
|
||||
landmarkPoints.Add (points);
|
||||
}
|
||||
}
|
||||
|
||||
//filter nonfrontalface
|
||||
if (isFilteringNonFrontalFaces) {
|
||||
for (int i = 0; i < landmarkPoints.Count; i++) {
|
||||
if (frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]) < frontalFaceRateLowerLimit) {
|
||||
detectResult.RemoveAt (i);
|
||||
landmarkPoints.RemoveAt (i);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//face swapping
|
||||
if (landmarkPoints.Count >= 2) {
|
||||
int ann = 0, bob = 1;
|
||||
for (int i = 0; i < landmarkPoints.Count-1; i += 2) {
|
||||
ann = i;
|
||||
bob = i + 1;
|
||||
|
||||
faceSwapper.SwapFaces (rgbMat, landmarkPoints [ann], landmarkPoints [bob], 1);
|
||||
}
|
||||
}
|
||||
|
||||
//draw face rects
|
||||
if (isShowingFaceRects) {
|
||||
for (int i = 0; i < detectResult.Count; i++) {
|
||||
UnityEngine.Rect rect = new UnityEngine.Rect (detectResult [i].x, detectResult [i].y, detectResult [i].width, detectResult [i].height);
|
||||
OpenCVForUnityUtils.DrawFaceRect (rgbMat, rect, new Scalar (255, 0, 0, 255), 2);
|
||||
// Imgproc.putText (rgbMat, " " + frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255), 2, Imgproc.LINE_AA, false);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Imgproc.putText (rgbMat, "W:" + rgbMat.width () + " H:" + rgbMat.height () + " SO:" + Screen.orientation, new Point (5, rgbMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255), 1, Imgproc.LINE_AA, false);
|
||||
|
||||
OpenCVForUnity.Utils.matToTexture2D (rgbMat, texture);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the disable event.
|
||||
/// </summary>
|
||||
void OnDestroy ()
|
||||
{
|
||||
capture.release ();
|
||||
|
||||
if (rgbMat != null)
|
||||
rgbMat.Dispose ();
|
||||
if (grayMat != null)
|
||||
grayMat.Dispose ();
|
||||
|
||||
rectangleTracker.Dispose ();
|
||||
faceLandmarkDetector.Dispose ();
|
||||
faceSwapper.Dispose ();
|
||||
frontalFaceParam.Dispose ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the back button event.
|
||||
/// </summary>
|
||||
public void OnBackButton ()
|
||||
{
|
||||
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
|
||||
SceneManager.LoadScene ("FaceSwapperSample");
|
||||
#else
|
||||
Application.LoadLevel ("FaceSwapperSample");
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is showing face rects toggle event.
|
||||
/// </summary>
|
||||
public void OnIsShowingFaceRectsToggle ()
|
||||
{
|
||||
if (isShowingFaceRectsToggle.isOn) {
|
||||
isShowingFaceRects = true;
|
||||
} else {
|
||||
isShowingFaceRects = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the use Dlib face detector toggle event.
|
||||
/// </summary>
|
||||
public void OnUseDlibFaceDetecterToggle ()
|
||||
{
|
||||
if (useDlibFaceDetecterToggle.isOn) {
|
||||
useDlibFaceDetecter = true;
|
||||
} else {
|
||||
useDlibFaceDetecter = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is filtering non frontal faces toggle event.
|
||||
/// </summary>
|
||||
public void OnIsFilteringNonFrontalFacesToggle ()
|
||||
{
|
||||
if (isFilteringNonFrontalFacesToggle.isOn) {
|
||||
isFilteringNonFrontalFaces = true;
|
||||
} else {
|
||||
isFilteringNonFrontalFaces = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the use seamless clone for paste faces toggle event.
|
||||
/// </summary>
|
||||
public void OnUseSeamlessCloneForPasteFacesToggle ()
|
||||
{
|
||||
if (useSeamlessCloneForPasteFacesToggle.isOn) {
|
||||
useSeamlessCloneForPasteFaces = true;
|
||||
} else {
|
||||
useSeamlessCloneForPasteFaces = false;
|
||||
}
|
||||
faceSwapper.useSeamlessCloneForPasteFaces = useSeamlessCloneForPasteFaces;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the enable tracking toggle event.
|
||||
/// </summary>
|
||||
public void OnEnableTrackingToggle ()
|
||||
{
|
||||
if (enableTrackingToggle.isOn) {
|
||||
enableTracking = true;
|
||||
} else {
|
||||
enableTracking = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is showing debug face points toggle event.
|
||||
/// </summary>
|
||||
public void OnIsShowingDebugFacePointsToggle ()
|
||||
{
|
||||
if (isShowingDebugFacePointsToggle.isOn) {
|
||||
isShowingDebugFacePoints = true;
|
||||
} else {
|
||||
isShowingDebugFacePoints = false;
|
||||
}
|
||||
faceSwapper.isShowingDebugFacePoints = isShowingDebugFacePoints;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
fileFormatVersion: 2
|
||||
guid: d8d13016d5ab01348b307ab2d2a60701
|
||||
timeCreated: 1463652228
|
||||
licenseType: Free
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,433 @@
|
|||
using DlibFaceLandmarkDetector;
|
||||
using OpenCVForUnity;
|
||||
using OpenCVForUnity.RectangleTrack;
|
||||
using OpenCVForUnity.FaceSwap;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
using UnityEngine.UI;
|
||||
|
||||
#if UNITY_5_3
|
||||
using UnityEngine.SceneManagement;
|
||||
#endif
|
||||
|
||||
namespace FaceSwapperSample
|
||||
{
|
||||
/// <summary>
|
||||
/// WebCamTexture face swapper sample.
|
||||
/// </summary>
|
||||
[RequireComponent(typeof(WebCamTextureToMatHelper))]
|
||||
public class WebCamTextureFaceSwapperSample : MonoBehaviour
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// The colors.
|
||||
/// </summary>
|
||||
Color32[] colors;
|
||||
|
||||
/// <summary>
|
||||
/// The gray mat.
|
||||
/// </summary>
|
||||
Mat grayMat;
|
||||
|
||||
/// <summary>
|
||||
/// The texture.
|
||||
/// </summary>
|
||||
Texture2D texture;
|
||||
|
||||
/// <summary>
|
||||
/// The cascade.
|
||||
/// </summary>
|
||||
CascadeClassifier cascade;
|
||||
|
||||
/// <summary>
|
||||
/// The web cam texture to mat helper.
|
||||
/// </summary>
|
||||
WebCamTextureToMatHelper webCamTextureToMatHelper;
|
||||
|
||||
/// <summary>
|
||||
/// The face landmark detector.
|
||||
/// </summary>
|
||||
FaceLandmarkDetector faceLandmarkDetector;
|
||||
/// <summary>
|
||||
/// The face Swaper.
|
||||
/// </summary>
|
||||
DlibFaceSwapper faceSwapper;
|
||||
|
||||
/// <summary>
|
||||
/// The detection based tracker.
|
||||
/// </summary>
|
||||
RectangleTracker rectangleTracker;
|
||||
|
||||
/// <summary>
|
||||
/// The frontal face parameter.
|
||||
/// </summary>
|
||||
FrontalFaceParam frontalFaceParam;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing face rects.
|
||||
/// </summary>
|
||||
public bool isShowingFaceRects = false;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing face rects toggle.
|
||||
/// </summary>
|
||||
public Toggle isShowingFaceRectsToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The use Dlib face detector flag.
|
||||
/// </summary>
|
||||
public bool useDlibFaceDetecter = false;
|
||||
|
||||
/// <summary>
|
||||
/// The use dlib face detecter toggle.
|
||||
/// </summary>
|
||||
public Toggle useDlibFaceDetecterToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The is filtering non frontal faces.
|
||||
/// </summary>
|
||||
public bool isFilteringNonFrontalFaces;
|
||||
|
||||
/// <summary>
|
||||
/// The is filtering non frontal faces toggle.
|
||||
/// </summary>
|
||||
public Toggle isFilteringNonFrontalFacesToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The frontal face rate lower limit.
|
||||
/// </summary>
|
||||
[Range(0.0f, 1.0f)]
|
||||
public float
|
||||
frontalFaceRateLowerLimit;
|
||||
|
||||
/// <summary>
|
||||
/// The use seamless clone for paste faces.
|
||||
/// </summary>
|
||||
public bool useSeamlessCloneForPasteFaces = false;
|
||||
|
||||
/// <summary>
|
||||
/// The use seamless clone for paste faces toggle.
|
||||
/// </summary>
|
||||
public Toggle useSeamlessCloneForPasteFacesToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The enable tracker flag.
|
||||
/// </summary>
|
||||
public bool enableTracking = true;
|
||||
|
||||
/// <summary>
|
||||
/// The enable tracking toggle.
|
||||
/// </summary>
|
||||
public Toggle enableTrackingToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing debug face points.
|
||||
/// </summary>
|
||||
public bool isShowingDebugFacePoints = false;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing debug face points toggle.
|
||||
/// </summary>
|
||||
public Toggle isShowingDebugFacePointsToggle;
|
||||
|
||||
|
||||
// Use this for initialization
|
||||
void Start ()
|
||||
{
|
||||
rectangleTracker = new RectangleTracker ();
|
||||
|
||||
faceLandmarkDetector = new FaceLandmarkDetector (DlibFaceLandmarkDetector.Utils.getFilePath ("shape_predictor_68_face_landmarks.dat"));
|
||||
|
||||
faceSwapper = new DlibFaceSwapper ();
|
||||
faceSwapper.useSeamlessCloneForPasteFaces = useSeamlessCloneForPasteFaces;
|
||||
faceSwapper.isShowingDebugFacePoints = isShowingDebugFacePoints;
|
||||
|
||||
webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper> ();
|
||||
webCamTextureToMatHelper.Init ();
|
||||
|
||||
isShowingFaceRectsToggle.isOn = isShowingFaceRects;
|
||||
useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter;
|
||||
isFilteringNonFrontalFacesToggle.isOn = isFilteringNonFrontalFaces;
|
||||
useSeamlessCloneForPasteFacesToggle.isOn = useSeamlessCloneForPasteFaces;
|
||||
enableTrackingToggle.isOn = enableTracking;
|
||||
isShowingDebugFacePointsToggle.isOn = isShowingDebugFacePoints;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the web cam texture to mat helper inited event.
|
||||
/// </summary>
|
||||
public void OnWebCamTextureToMatHelperInited ()
|
||||
{
|
||||
Debug.Log ("OnWebCamTextureToMatHelperInited");
|
||||
|
||||
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();
|
||||
|
||||
colors = new Color32[webCamTextureMat.cols () * webCamTextureMat.rows ()];
|
||||
texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);
|
||||
|
||||
|
||||
gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
|
||||
Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
|
||||
|
||||
float width = gameObject.transform.localScale.x;
|
||||
float height = gameObject.transform.localScale.y;
|
||||
|
||||
float widthScale = (float)Screen.width / width;
|
||||
float heightScale = (float)Screen.height / height;
|
||||
if (widthScale < heightScale) {
|
||||
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
|
||||
} else {
|
||||
Camera.main.orthographicSize = height / 2;
|
||||
}
|
||||
|
||||
gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
|
||||
|
||||
|
||||
|
||||
grayMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1);
|
||||
cascade = new CascadeClassifier (OpenCVForUnity.Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
|
||||
if (cascade.empty ()) {
|
||||
Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
|
||||
}
|
||||
|
||||
frontalFaceParam = new FrontalFaceParam ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the web cam texture to mat helper disposed event.
|
||||
/// </summary>
|
||||
public void OnWebCamTextureToMatHelperDisposed ()
|
||||
{
|
||||
Debug.Log ("OnWebCamTextureToMatHelperDisposed");
|
||||
|
||||
grayMat.Dispose ();
|
||||
|
||||
rectangleTracker.Reset ();
|
||||
}
|
||||
|
||||
// Update is called once per frame
|
||||
void Update ()
|
||||
{
|
||||
|
||||
if (webCamTextureToMatHelper.isPlaying () && webCamTextureToMatHelper.didUpdateThisFrame ()) {
|
||||
|
||||
Mat rgbaMat = webCamTextureToMatHelper.GetMat ();
|
||||
|
||||
//face detection
|
||||
List<OpenCVForUnity.Rect> detectResult = new List<OpenCVForUnity.Rect> ();
|
||||
if (useDlibFaceDetecter) {
|
||||
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);
|
||||
List<UnityEngine.Rect> result = faceLandmarkDetector.Detect ();
|
||||
|
||||
foreach (var unityRect in result) {
|
||||
detectResult.Add (new OpenCVForUnity.Rect ((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
|
||||
}
|
||||
} else {
|
||||
//convert image to greyscale
|
||||
Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
|
||||
|
||||
//convert image to greyscale
|
||||
using (Mat equalizeHistMat = new Mat())
|
||||
using (MatOfRect faces = new MatOfRect()) {
|
||||
Imgproc.equalizeHist (grayMat, equalizeHistMat);
|
||||
|
||||
cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ());
|
||||
|
||||
detectResult = faces.toList ();
|
||||
}
|
||||
}
|
||||
|
||||
//face traking
|
||||
if (enableTracking) {
|
||||
rectangleTracker.UpdateTrackedObjects (detectResult);
|
||||
detectResult = new List<OpenCVForUnity.Rect> ();
|
||||
rectangleTracker.GetObjects (detectResult, true);
|
||||
}
|
||||
|
||||
//face landmark detection
|
||||
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);
|
||||
List<List<Vector2>> landmarkPoints = new List<List<Vector2>> ();
|
||||
foreach (var openCVRect in detectResult) {
|
||||
UnityEngine.Rect rect = new UnityEngine.Rect (openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
|
||||
|
||||
List<Vector2> points = faceLandmarkDetector.DetectLandmark (rect);
|
||||
if (points.Count > 0) {
|
||||
//OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);
|
||||
landmarkPoints.Add (points);
|
||||
}
|
||||
}
|
||||
|
||||
//filter nonfrontalface
|
||||
if (isFilteringNonFrontalFaces) {
|
||||
for (int i = 0; i < landmarkPoints.Count; i++) {
|
||||
if (frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]) < frontalFaceRateLowerLimit) {
|
||||
detectResult.RemoveAt (i);
|
||||
landmarkPoints.RemoveAt (i);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//face swapping
|
||||
if (landmarkPoints.Count >= 2) {
|
||||
int ann = 0, bob = 1;
|
||||
for (int i = 0; i < landmarkPoints.Count-1; i += 2) {
|
||||
ann = i;
|
||||
bob = i + 1;
|
||||
|
||||
faceSwapper.SwapFaces (rgbaMat, landmarkPoints [ann], landmarkPoints [bob], 1);
|
||||
}
|
||||
}
|
||||
|
||||
//draw face rects
|
||||
if (isShowingFaceRects) {
|
||||
for (int i = 0; i < detectResult.Count; i++) {
|
||||
UnityEngine.Rect rect = new UnityEngine.Rect (detectResult [i].x, detectResult [i].y, detectResult [i].width, detectResult [i].height);
|
||||
OpenCVForUnityUtils.DrawFaceRect (rgbaMat, rect, new Scalar (255, 0, 0, 255), 2);
|
||||
// Imgproc.putText (rgbaMat, " " + frontalFaceParam.getAngleOfFrontalFace (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
|
||||
// Imgproc.putText (rgbaMat, " " + frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
|
||||
|
||||
OpenCVForUnity.Utils.matToTexture2D (rgbaMat, texture, colors);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Raises the disable event.
|
||||
/// </summary>
|
||||
void OnDisable ()
|
||||
{
|
||||
webCamTextureToMatHelper.Dispose ();
|
||||
|
||||
rectangleTracker.Dispose ();
|
||||
faceLandmarkDetector.Dispose ();
|
||||
faceSwapper.Dispose ();
|
||||
frontalFaceParam.Dispose ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the back button event.
|
||||
/// </summary>
|
||||
public void OnBackButton ()
|
||||
{
|
||||
#if UNITY_5_3
|
||||
SceneManager.LoadScene ("FaceSwapperSample");
|
||||
#else
|
||||
Application.LoadLevel ("FaceSwapperSample");
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the play button event.
|
||||
/// </summary>
|
||||
public void OnPlayButton ()
|
||||
{
|
||||
webCamTextureToMatHelper.Play ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the pause button event.
|
||||
/// </summary>
|
||||
public void OnPauseButton ()
|
||||
{
|
||||
webCamTextureToMatHelper.Pause ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the stop button event.
|
||||
/// </summary>
|
||||
public void OnStopButton ()
|
||||
{
|
||||
webCamTextureToMatHelper.Stop ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the change camera button event.
|
||||
/// </summary>
|
||||
public void OnChangeCameraButton ()
|
||||
{
|
||||
webCamTextureToMatHelper.Init (null, webCamTextureToMatHelper.requestWidth, webCamTextureToMatHelper.requestHeight, !webCamTextureToMatHelper.requestIsFrontFacing);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is showing face rects toggle event.
|
||||
/// </summary>
|
||||
public void OnIsShowingFaceRectsToggle ()
|
||||
{
|
||||
if (isShowingFaceRectsToggle.isOn) {
|
||||
isShowingFaceRects = true;
|
||||
} else {
|
||||
isShowingFaceRects = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the use Dlib face detector toggle event.
|
||||
/// </summary>
|
||||
public void OnUseDlibFaceDetecterToggle ()
|
||||
{
|
||||
if (useDlibFaceDetecterToggle.isOn) {
|
||||
useDlibFaceDetecter = true;
|
||||
} else {
|
||||
useDlibFaceDetecter = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is filtering non frontal faces toggle event.
|
||||
/// </summary>
|
||||
public void OnIsFilteringNonFrontalFacesToggle ()
|
||||
{
|
||||
if (isFilteringNonFrontalFacesToggle.isOn) {
|
||||
isFilteringNonFrontalFaces = true;
|
||||
} else {
|
||||
isFilteringNonFrontalFaces = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the use seamless clone for paste faces toggle event.
|
||||
/// </summary>
|
||||
public void OnUseSeamlessCloneForPasteFacesToggle ()
|
||||
{
|
||||
if (useSeamlessCloneForPasteFacesToggle.isOn) {
|
||||
useSeamlessCloneForPasteFaces = true;
|
||||
} else {
|
||||
useSeamlessCloneForPasteFaces = false;
|
||||
}
|
||||
faceSwapper.useSeamlessCloneForPasteFaces = useSeamlessCloneForPasteFaces;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the enable tracking toggle event.
|
||||
/// </summary>
|
||||
public void OnEnableTrackingToggle ()
|
||||
{
|
||||
if (enableTrackingToggle.isOn) {
|
||||
enableTracking = true;
|
||||
} else {
|
||||
enableTracking = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is showing debug face points toggle event.
|
||||
/// </summary>
|
||||
public void OnIsShowingDebugFacePointsToggle ()
|
||||
{
|
||||
if (isShowingDebugFacePointsToggle.isOn) {
|
||||
isShowingDebugFacePoints = true;
|
||||
} else {
|
||||
isShowingDebugFacePoints = false;
|
||||
}
|
||||
faceSwapper.isShowingDebugFacePoints = isShowingDebugFacePoints;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 29941611c584b7b4ea9f5a83eec9e1e9
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,428 @@
|
|||
using UnityEngine;
|
||||
using System.Collections;
|
||||
|
||||
using System;
|
||||
using OpenCVForUnity;
|
||||
using UnityEngine.Events;
|
||||
|
||||
namespace FaceSwapperSample
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// Web cam texture to mat helper.
|
||||
/// </summary>
|
||||
public class WebCamTextureToMatHelper : MonoBehaviour
|
||||
{
|
||||
/// <summary>
|
||||
/// The name of the device.
|
||||
/// </summary>
|
||||
public string requestDeviceName = null;
|
||||
|
||||
/// <summary>
|
||||
/// The width.
|
||||
/// </summary>
|
||||
public int requestWidth = 640;
|
||||
|
||||
/// <summary>
|
||||
/// The height.
|
||||
/// </summary>
|
||||
public int requestHeight = 480;
|
||||
|
||||
/// <summary>
|
||||
/// Should use front facing.
|
||||
/// </summary>
|
||||
public bool requestIsFrontFacing = false;
|
||||
|
||||
/// <summary>
|
||||
/// The flip vertical.
|
||||
/// </summary>
|
||||
public bool flipVertical = false;
|
||||
|
||||
/// <summary>
|
||||
/// The flip horizontal.
|
||||
/// </summary>
|
||||
public bool flipHorizontal = false;
|
||||
|
||||
/// <summary>
|
||||
/// The on inited event.
|
||||
/// </summary>
|
||||
public UnityEvent OnInitedEvent;
|
||||
|
||||
/// <summary>
|
||||
/// The on disposed event.
|
||||
/// </summary>
|
||||
public UnityEvent OnDisposedEvent;
|
||||
|
||||
/// <summary>
|
||||
/// The web cam texture.
|
||||
/// </summary>
|
||||
WebCamTexture webCamTexture;
|
||||
|
||||
/// <summary>
|
||||
/// The web cam device.
|
||||
/// </summary>
|
||||
WebCamDevice webCamDevice;
|
||||
|
||||
/// <summary>
|
||||
/// The rgba mat.
|
||||
/// </summary>
|
||||
Mat rgbaMat;
|
||||
|
||||
/// <summary>
|
||||
/// The rotated rgba mat
|
||||
/// </summary>
|
||||
Mat rotatedRgbaMat;
|
||||
|
||||
/// <summary>
|
||||
/// The colors.
|
||||
/// </summary>
|
||||
Color32[] colors;
|
||||
|
||||
/// <summary>
|
||||
/// The init done.
|
||||
/// </summary>
|
||||
bool initDone = false;
|
||||
|
||||
/// <summary>
|
||||
/// The screenOrientation.
|
||||
/// </summary>
|
||||
ScreenOrientation screenOrientation = ScreenOrientation.Unknown;
|
||||
|
||||
|
||||
// Use this for initialization
|
||||
void Start ()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
// Update is called once per frame
|
||||
void Update ()
|
||||
{
|
||||
if (initDone) {
|
||||
if (screenOrientation != Screen.orientation) {
|
||||
StartCoroutine (init ());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void Init ()
|
||||
{
|
||||
if (OnInitedEvent == null)
|
||||
OnInitedEvent = new UnityEvent ();
|
||||
if (OnDisposedEvent == null)
|
||||
OnDisposedEvent = new UnityEvent ();
|
||||
|
||||
StartCoroutine (init ());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Init the specified deviceName, requestWidth, requestHeight, requestIsFrontFacing and OnInited.
|
||||
/// </summary>
|
||||
/// <param name="deviceName">Device name.</param>
|
||||
/// <param name="requestWidth">Request width.</param>
|
||||
/// <param name="requestHeight">Request height.</param>
|
||||
/// <param name="requestIsFrontFacing">If set to <c>true</c> request is front facing.</param>
|
||||
/// <param name="OnInited">On inited.</param>
|
||||
public void Init (string deviceName, int requestWidth, int requestHeight, bool requestIsFrontFacing)
|
||||
{
|
||||
this.requestDeviceName = deviceName;
|
||||
this.requestWidth = requestWidth;
|
||||
this.requestHeight = requestHeight;
|
||||
this.requestIsFrontFacing = requestIsFrontFacing;
|
||||
if (OnInitedEvent == null)
|
||||
OnInitedEvent = new UnityEvent ();
|
||||
if (OnDisposedEvent == null)
|
||||
OnDisposedEvent = new UnityEvent ();
|
||||
|
||||
StartCoroutine (init ());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Init this instance.
|
||||
/// </summary>
|
||||
private IEnumerator init ()
|
||||
{
|
||||
if (initDone)
|
||||
Dispose ();
|
||||
|
||||
if (!String.IsNullOrEmpty (requestDeviceName)) {
|
||||
// Debug.Log ("deviceName is "+requestDeviceName);
|
||||
webCamTexture = new WebCamTexture (requestDeviceName, requestWidth, requestHeight);
|
||||
} else {
|
||||
// Debug.Log ("deviceName is null");
|
||||
// Checks how many and which cameras are available on the device
|
||||
for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {
|
||||
|
||||
if (WebCamTexture.devices [cameraIndex].isFrontFacing == requestIsFrontFacing) {
|
||||
|
||||
// Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);
|
||||
|
||||
webCamDevice = WebCamTexture.devices [cameraIndex];
|
||||
|
||||
webCamTexture = new WebCamTexture (webCamDevice.name, requestWidth, requestHeight);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
if (webCamTexture == null) {
|
||||
// Debug.Log ("webCamTexture is null");
|
||||
if (WebCamTexture.devices.Length > 0) {
|
||||
webCamDevice = WebCamTexture.devices [0];
|
||||
webCamTexture = new WebCamTexture (webCamDevice.name, requestWidth, requestHeight);
|
||||
} else {
|
||||
webCamTexture = new WebCamTexture (requestWidth, requestHeight);
|
||||
}
|
||||
}
|
||||
|
||||
// Debug.Log ("name " + webCamTexture.name + " width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
|
||||
|
||||
|
||||
|
||||
// Starts the camera
|
||||
webCamTexture.Play ();
|
||||
|
||||
|
||||
while (true) {
|
||||
//If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
|
||||
#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
|
||||
if (webCamTexture.width > 16 && webCamTexture.height > 16) {
|
||||
#else
|
||||
if (webCamTexture.didUpdateThisFrame) {
|
||||
#if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2
|
||||
while (webCamTexture.width <= 16) {
|
||||
webCamTexture.GetPixels32 ();
|
||||
yield return new WaitForEndOfFrame ();
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
|
||||
Debug.Log ("name " + webCamTexture.name + " width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
|
||||
Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);
|
||||
|
||||
colors = new Color32[webCamTexture.width * webCamTexture.height];
|
||||
rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
|
||||
|
||||
// Debug.Log ("Screen.orientation " + Screen.orientation);
|
||||
screenOrientation = Screen.orientation;
|
||||
// screenOrientation = ScreenOrientation.PortraitUpsideDown;
|
||||
|
||||
#if !UNITY_EDITOR && !UNITY_STANDALONE
|
||||
if (screenOrientation == ScreenOrientation.Portrait || screenOrientation == ScreenOrientation.PortraitUpsideDown) {
|
||||
rotatedRgbaMat = new Mat (webCamTexture.width, webCamTexture.height, CvType.CV_8UC4);
|
||||
}
|
||||
#endif
|
||||
|
||||
// webCamTexture.Stop ();
|
||||
|
||||
initDone = true;
|
||||
|
||||
if (OnInitedEvent != null)
|
||||
OnInitedEvent.Invoke ();
|
||||
|
||||
|
||||
break;
|
||||
} else {
|
||||
yield return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Ises the inited.
|
||||
/// </summary>
|
||||
/// <returns><c>true</c>, if inited was ised, <c>false</c> otherwise.</returns>
|
||||
public bool isInited ()
|
||||
{
|
||||
|
||||
return initDone;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Play this instance.
|
||||
/// </summary>
|
||||
public void Play ()
|
||||
{
|
||||
if (initDone)
|
||||
webCamTexture.Play ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Pause this instance.
|
||||
/// </summary>
|
||||
public void Pause ()
|
||||
{
|
||||
if (initDone)
|
||||
webCamTexture.Pause ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Stop this instance.
|
||||
/// </summary>
|
||||
public void Stop ()
|
||||
{
|
||||
if (initDone)
|
||||
webCamTexture.Stop ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Ises the playing.
|
||||
/// </summary>
|
||||
/// <returns><c>true</c>, if playing was ised, <c>false</c> otherwise.</returns>
|
||||
public bool isPlaying ()
|
||||
{
|
||||
if (!initDone)
|
||||
return false;
|
||||
return webCamTexture.isPlaying;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the web cam texture.
|
||||
/// </summary>
|
||||
/// <returns>The web cam texture.</returns>
|
||||
public WebCamTexture GetWebCamTexture ()
|
||||
{
|
||||
return webCamTexture;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the web cam device.
|
||||
/// </summary>
|
||||
/// <returns>The web cam device.</returns>
|
||||
public WebCamDevice GetWebCamDevice ()
|
||||
{
|
||||
return webCamDevice;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Dids the update this frame.
|
||||
/// </summary>
|
||||
/// <returns><c>true</c>, if update this frame was dided, <c>false</c> otherwise.</returns>
|
||||
public bool didUpdateThisFrame ()
|
||||
{
|
||||
if (!initDone)
|
||||
return false;
|
||||
|
||||
#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
|
||||
if (webCamTexture.width > 16 && webCamTexture.height > 16) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
#else
|
||||
return webCamTexture.didUpdateThisFrame;
|
||||
#endif
|
||||
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the mat.
|
||||
/// </summary>
|
||||
/// <returns>The mat.</returns>
|
||||
public Mat GetMat ()
|
||||
{
|
||||
if (!initDone || !webCamTexture.isPlaying) {
|
||||
if (rotatedRgbaMat != null) {
|
||||
return rotatedRgbaMat;
|
||||
} else {
|
||||
return rgbaMat;
|
||||
}
|
||||
}
|
||||
|
||||
Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);
|
||||
|
||||
int flipCode = int.MinValue;
|
||||
|
||||
if (webCamDevice.isFrontFacing) {
|
||||
if (webCamTexture.videoRotationAngle == 0) {
|
||||
flipCode = 1;
|
||||
} else if (webCamTexture.videoRotationAngle == 90) {
|
||||
flipCode = 0;
|
||||
}
|
||||
if (webCamTexture.videoRotationAngle == 180) {
|
||||
flipCode = 0;
|
||||
} else if (webCamTexture.videoRotationAngle == 270) {
|
||||
flipCode = 1;
|
||||
}
|
||||
} else {
|
||||
if (webCamTexture.videoRotationAngle == 180) {
|
||||
flipCode = -1;
|
||||
} else if (webCamTexture.videoRotationAngle == 270) {
|
||||
flipCode = -1;
|
||||
}
|
||||
}
|
||||
|
||||
if (flipVertical) {
|
||||
if (flipCode == int.MinValue) {
|
||||
flipCode = 0;
|
||||
} else if (flipCode == 0) {
|
||||
flipCode = int.MinValue;
|
||||
} else if (flipCode == 1) {
|
||||
flipCode = -1;
|
||||
} else if (flipCode == -1) {
|
||||
flipCode = 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (flipHorizontal) {
|
||||
if (flipCode == int.MinValue) {
|
||||
flipCode = 1;
|
||||
} else if (flipCode == 0) {
|
||||
flipCode = -1;
|
||||
} else if (flipCode == 1) {
|
||||
flipCode = int.MinValue;
|
||||
} else if (flipCode == -1) {
|
||||
flipCode = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (flipCode > int.MinValue) {
|
||||
Core.flip (rgbaMat, rgbaMat, flipCode);
|
||||
}
|
||||
|
||||
|
||||
if (rotatedRgbaMat != null) {
|
||||
|
||||
using (Mat transposeRgbaMat = rgbaMat.t()) {
|
||||
Core.flip (transposeRgbaMat, rotatedRgbaMat, 1);
|
||||
}
|
||||
|
||||
return rotatedRgbaMat;
|
||||
} else {
|
||||
return rgbaMat;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Releases all resource used by the <see cref="WebCamTextureToMatHelper"/> object.
|
||||
/// </summary>
|
||||
/// <remarks>Call <see cref="Dispose"/> when you are finished using the <see cref="WebCamTextureToMatHelper"/>. The
|
||||
/// <see cref="Dispose"/> method leaves the <see cref="WebCamTextureToMatHelper"/> in an unusable state. After
|
||||
/// calling <see cref="Dispose"/>, you must release all references to the <see cref="WebCamTextureToMatHelper"/> so
|
||||
/// the garbage collector can reclaim the memory that the <see cref="WebCamTextureToMatHelper"/> was occupying.</remarks>
|
||||
public void Dispose ()
|
||||
{
|
||||
initDone = false;
|
||||
|
||||
if (webCamTexture != null) {
|
||||
webCamTexture.Stop ();
|
||||
webCamTexture = null;
|
||||
}
|
||||
if (rgbaMat != null) {
|
||||
rgbaMat.Dispose ();
|
||||
rgbaMat = null;
|
||||
}
|
||||
if (rotatedRgbaMat != null) {
|
||||
rotatedRgbaMat.Dispose ();
|
||||
rotatedRgbaMat = null;
|
||||
}
|
||||
colors = null;
|
||||
|
||||
if (OnDisposedEvent != null)
|
||||
OnDisposedEvent.Invoke ();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
fileFormatVersion: 2
|
||||
guid: a02a8f18206fd484995e61cc923550d2
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
Загрузка…
Ссылка в новой задаче