update 1.0.1
This commit is contained in:
Родитель
173ca73f3c
Коммит
944acfc7d3
|
@ -0,0 +1,9 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 213d24cf492691546ac4ae1b8b1073f6
|
||||
folderAsset: yes
|
||||
timeCreated: 1479443957
|
||||
licenseType: Free
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -1,7 +1,7 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 042de2a632ffe5640bc2ce85b2c331bf
|
||||
guid: 5903462d6f8e9c045aefcddde2d35304
|
||||
folderAsset: yes
|
||||
timeCreated: 1463603964
|
||||
timeCreated: 1479443957
|
||||
licenseType: Free
|
||||
DefaultImporter:
|
||||
userData:
|
|
@ -0,0 +1,25 @@
|
|||
%YAML 1.1
|
||||
%TAG !u! tag:unity3d.com,2011:
|
||||
--- !u!21 &2100000
|
||||
Material:
|
||||
serializedVersion: 5
|
||||
m_ObjectHideFlags: 0
|
||||
m_PrefabParentObject: {fileID: 0}
|
||||
m_PrefabInternal: {fileID: 0}
|
||||
m_Name: quad_material
|
||||
m_Shader: {fileID: 10750, guid: 0000000000000000f000000000000000, type: 0}
|
||||
m_ShaderKeywords:
|
||||
m_LightmapFlags: 5
|
||||
m_CustomRenderQueue: -1
|
||||
m_SavedProperties:
|
||||
serializedVersion: 2
|
||||
m_TexEnvs:
|
||||
data:
|
||||
first:
|
||||
name: _MainTex
|
||||
second:
|
||||
m_Texture: {fileID: 2800000, guid: 60efc80de1039114fa7285c55dc1e203, type: 3}
|
||||
m_Scale: {x: 1, y: 1}
|
||||
m_Offset: {x: 0, y: 0}
|
||||
m_Floats: {}
|
||||
m_Colors: {}
|
|
@ -0,0 +1,9 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 5f62433e1ec2fd04795589f4990b5fe9
|
||||
folderAsset: yes
|
||||
timeCreated: 1479458535
|
||||
licenseType: Free
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -1,7 +1,7 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 92a3f251f6656834d9660e1ac364c8ae
|
||||
guid: ebfb2dcb22f180d43973546ddb53ad08
|
||||
folderAsset: yes
|
||||
timeCreated: 1464369829
|
||||
timeCreated: 1464554941
|
||||
licenseType: Free
|
||||
DefaultImporter:
|
||||
userData:
|
|
@ -0,0 +1,31 @@
|
|||
using System;
|
||||
|
||||
namespace OpenCVForUnity.FaceChange
|
||||
{
|
||||
public class DlibFaceChanger : FaceChanger
|
||||
{
|
||||
// Finds facial landmarks on faces and extracts the useful points
|
||||
protected override void getFacePoints(Point[] landmark_points, Point[] points, Point[] affine_transform_keypoints)
|
||||
{
|
||||
if (landmark_points.Length != 68)
|
||||
throw new ArgumentNullException("Invalid landmark_points.");
|
||||
|
||||
//points(facial contour)
|
||||
points[0] = landmark_points[0].clone();
|
||||
points[1] = landmark_points[3].clone();
|
||||
points[2] = landmark_points[5].clone();
|
||||
points[3] = landmark_points[8].clone();
|
||||
points[4] = landmark_points[11].clone();
|
||||
points[5] = landmark_points[13].clone();
|
||||
points[6] = landmark_points[16].clone();
|
||||
Point nose_length = new Point(landmark_points[27].x - landmark_points[30].x, landmark_points[27].y - landmark_points[30].y);
|
||||
points[7] = new Point(landmark_points[26].x + nose_length.x, landmark_points[26].y + nose_length.y);
|
||||
points[8] = new Point(landmark_points[17].x + nose_length.x, landmark_points[17].y + nose_length.y);
|
||||
|
||||
//affine_transform_keypoints(eyes and chin)
|
||||
affine_transform_keypoints[0] = points[3].clone();
|
||||
affine_transform_keypoints[1] = landmark_points[36].clone();
|
||||
affine_transform_keypoints[2] = landmark_points[45].clone();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 8b672b9119e396140b993432c4097d27
|
||||
timeCreated: 1464647045
|
||||
licenseType: Free
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,779 @@
|
|||
using System;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
|
||||
namespace OpenCVForUnity.FaceChange
|
||||
{
|
||||
/// <summary>
|
||||
/// FaceChanger.
|
||||
/// Code is the rewrite of https://github.com/mc-jesus/FaceSwap using the “OpenCV for Unity”.
|
||||
/// </summary>
|
||||
public class FaceChanger : IDisposable
|
||||
{
|
||||
protected List<FaceChangeData> faceChangeData = new List<FaceChangeData>();
|
||||
|
||||
protected Rect target_rect, source_rect;
|
||||
protected float maskAlpha;
|
||||
|
||||
protected Point[] target_points = new Point[9];
|
||||
protected Point[] source_points = new Point[9];
|
||||
protected Point[] target_affine_transform_keypoints = new Point[3];
|
||||
protected Point[] source_affine_transform_keypoints = new Point[3];
|
||||
protected Size feather_amount = new Size();
|
||||
|
||||
protected Mat target_frame_full;
|
||||
protected Mat target_frame;
|
||||
protected Size target_frame_size;
|
||||
protected Rect target_frame_rect;
|
||||
|
||||
//full size Mat
|
||||
protected Mat target_mask_full;
|
||||
protected Mat source_mask_full;
|
||||
protected Mat warpped_source_mask_full;
|
||||
protected Mat refined_target_and_warpped_source_mask_full;
|
||||
protected Mat refined_mask_full;
|
||||
protected Mat source_face_full;
|
||||
protected Mat warpped_source_face_full;
|
||||
protected Mat warpped_face_full;
|
||||
|
||||
//ROI Mat
|
||||
protected Mat target_mask;
|
||||
protected Mat source_mask;
|
||||
protected Mat warpped_source_mask;
|
||||
protected Mat refined_target_and_warpped_source_mask;
|
||||
protected Mat refined_mask;
|
||||
protected Mat source_face;
|
||||
protected Mat warpped_source_face;
|
||||
protected Mat warpped_face;
|
||||
|
||||
//affineTransforms
|
||||
protected Mat trans_source_to_target = new Mat();
|
||||
|
||||
|
||||
protected byte[,] LUT = new byte[3, 256];
|
||||
protected int[,] source_hist_int = new int[3, 256];
|
||||
protected int[,] target_hist_int = new int[3, 256];
|
||||
protected float[,] source_histogram = new float[3, 256];
|
||||
protected float[,] target_histogram = new float[3, 256];
|
||||
|
||||
public bool enableColorCorrectFace = true;
|
||||
public bool isShowingDebugFacePoints = false;
|
||||
|
||||
|
||||
// Initialize face changed with landmarks
|
||||
public FaceChanger()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public void SetTargetImage(Mat frame){
|
||||
target_frame_full = frame;
|
||||
|
||||
if (warpped_face_full == null
|
||||
|| (target_frame_full.width() != warpped_face_full.width() || target_frame_full.height() != warpped_face_full.height())
|
||||
|| target_frame_full.type() != warpped_face_full.type())
|
||||
{
|
||||
createMat(target_frame_full);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//Change faces in points on frame
|
||||
public void AddFaceChangeData(Mat source_frame, Point[] source_landmark_points, Point[] target_landmark_points, float alpha)
|
||||
{
|
||||
|
||||
faceChangeData.Add(new FaceChangeData(source_frame, source_landmark_points, target_landmark_points, alpha));
|
||||
|
||||
}
|
||||
|
||||
public void AddFaceChangeData(Mat source_frame, List<Point> source_landmark_points, List<Point> target_landmark_points, float alpha)
|
||||
{
|
||||
|
||||
faceChangeData.Add(new FaceChangeData(source_frame, source_landmark_points.ToArray(), target_landmark_points.ToArray(), alpha));
|
||||
|
||||
}
|
||||
|
||||
public void AddFaceChangeData(Mat source_frame, Vector2[] source_landmark_points, Vector2[] target_landmark_points, float alpha)
|
||||
{
|
||||
Point[] landmark_points_source_arr = new Point[source_landmark_points.Length];
|
||||
for (int i = 0; i < landmark_points_source_arr.Length; i++)
|
||||
{
|
||||
landmark_points_source_arr[i] = new Point(source_landmark_points[i].x, source_landmark_points[i].y);
|
||||
}
|
||||
|
||||
Point[] landmark_points_target_arr = new Point[target_landmark_points.Length];
|
||||
for (int i = 0; i < landmark_points_target_arr.Length; i++)
|
||||
{
|
||||
landmark_points_target_arr[i] = new Point(target_landmark_points[i].x, target_landmark_points[i].y);
|
||||
}
|
||||
|
||||
AddFaceChangeData(source_frame, landmark_points_source_arr, landmark_points_target_arr, alpha);
|
||||
}
|
||||
|
||||
public void AddFaceChangeData(Mat source_frame, List<Vector2> source_landmark_points, List<Vector2> target_landmark_points, float alpha)
|
||||
{
|
||||
Point[] landmark_points_source_arr = new Point[source_landmark_points.Count];
|
||||
for (int i = 0; i < landmark_points_source_arr.Length; i++)
|
||||
{
|
||||
landmark_points_source_arr[i] = new Point(source_landmark_points[i].x, source_landmark_points[i].y);
|
||||
}
|
||||
|
||||
Point[] landmark_points_target_arr = new Point[target_landmark_points.Count];
|
||||
for (int i = 0; i < landmark_points_target_arr.Length; i++)
|
||||
{
|
||||
landmark_points_target_arr[i] = new Point(target_landmark_points[i].x, target_landmark_points[i].y);
|
||||
}
|
||||
|
||||
AddFaceChangeData(source_frame, landmark_points_source_arr, landmark_points_target_arr, alpha);
|
||||
}
|
||||
|
||||
public void ClearFaceChangeData(){
|
||||
faceChangeData.Clear();
|
||||
}
|
||||
|
||||
public void ChangeFace()
|
||||
{
|
||||
foreach (FaceChangeData data in faceChangeData){
|
||||
changeFace(data);
|
||||
}
|
||||
|
||||
pasteSourceFaceOnFrame();
|
||||
|
||||
|
||||
if (isShowingDebugFacePoints)
|
||||
{
|
||||
drawDebugFacePoints();
|
||||
}
|
||||
|
||||
faceChangeData.Clear();
|
||||
|
||||
//reset mask
|
||||
refined_mask_full.setTo(Scalar.all(0));
|
||||
}
|
||||
|
||||
private void changeFace(FaceChangeData data)
|
||||
{
|
||||
maskAlpha = data.alpha;
|
||||
|
||||
getFacePoints(data.target_landmark_points, target_points, target_affine_transform_keypoints);
|
||||
getFacePoints(data.source_landmark_points, source_points, source_affine_transform_keypoints);
|
||||
getFeather_amount(target_points, feather_amount);
|
||||
|
||||
target_rect = Imgproc.boundingRect(new MatOfPoint(target_points));
|
||||
source_rect = Imgproc.boundingRect(new MatOfPoint(source_points));
|
||||
|
||||
Rect rect_in_source_frame = getRectInFrame(data.source_frame, source_rect);
|
||||
|
||||
// Debug.Log(source_rect.x + " " + source_rect.y);
|
||||
|
||||
if (source_rect.width > target_frame_full.width() || source_rect.height > target_frame_full.height())
|
||||
throw new ArgumentNullException("The size of the face area in source image is too large.");
|
||||
|
||||
|
||||
int shift_x = 0;
|
||||
int shift_y = 0;
|
||||
//shift rect
|
||||
if (target_rect.x + source_rect.width <= target_frame_full.width())
|
||||
{
|
||||
shift_x = target_rect.x - source_rect.x;
|
||||
}
|
||||
else
|
||||
{
|
||||
shift_x = target_frame_full.width() - source_rect.width - source_rect.x;
|
||||
}
|
||||
if (target_rect.y + source_rect.height <= target_frame_full.height())
|
||||
{
|
||||
shift_y = target_rect.y - source_rect.y;
|
||||
}
|
||||
else
|
||||
{
|
||||
shift_y = target_frame_full.height() - source_rect.height - source_rect.y;
|
||||
}
|
||||
source_rect.x = source_rect.x + shift_x;
|
||||
source_rect.y = source_rect.y + shift_y;
|
||||
|
||||
//shift points
|
||||
for (int i = 0; i < source_points.Length; i++)
|
||||
{
|
||||
source_points[i].x = source_points[i].x + shift_x;
|
||||
source_points[i].y = source_points[i].y + shift_y;
|
||||
}
|
||||
|
||||
for (int i = 0; i < source_affine_transform_keypoints.Length; i++)
|
||||
{
|
||||
source_affine_transform_keypoints[i].x = source_affine_transform_keypoints[i].x + shift_x;
|
||||
source_affine_transform_keypoints[i].y = source_affine_transform_keypoints[i].y + shift_y;
|
||||
}
|
||||
|
||||
|
||||
if (target_frame != null)
|
||||
{
|
||||
target_frame.Dispose();
|
||||
}
|
||||
target_frame_rect = getMinFrameRect(target_frame_full, target_rect, source_rect);
|
||||
target_frame = new Mat(target_frame_full, target_frame_rect);
|
||||
target_frame_size = new Size(target_frame.cols(), target_frame.rows());
|
||||
|
||||
createMatROI(target_frame_rect);
|
||||
|
||||
|
||||
|
||||
target_rect = getRectInFrame(target_frame, target_rect);
|
||||
source_rect = getRectInFrame(target_frame, source_rect);
|
||||
target_points = getPointsInFrame(target_frame, target_points);
|
||||
source_points = getPointsInFrame(target_frame, source_points);
|
||||
target_affine_transform_keypoints = getPointsInFrame(target_frame, target_affine_transform_keypoints);
|
||||
source_affine_transform_keypoints = getPointsInFrame(target_frame, source_affine_transform_keypoints);
|
||||
|
||||
|
||||
getTransformationMatrice();
|
||||
getMasks();
|
||||
getWarppedMask();
|
||||
refined_mask = getRefinedMask();
|
||||
extractFace(data.source_frame, rect_in_source_frame);
|
||||
warpped_face = getWarppedFace();
|
||||
if (enableColorCorrectFace) colorCorrectFace();
|
||||
featherMask();
|
||||
}
|
||||
|
||||
private void createMat(Mat frame)
|
||||
{
|
||||
disposeMat();
|
||||
|
||||
Size size = frame.size();
|
||||
int type = frame.type();
|
||||
|
||||
target_mask_full = new Mat(size, CvType.CV_8UC1);
|
||||
source_mask_full = new Mat(size, CvType.CV_8UC1);
|
||||
warpped_source_mask_full = new Mat(size, CvType.CV_8UC1);
|
||||
refined_target_and_warpped_source_mask_full = new Mat(size, CvType.CV_8UC1);
|
||||
refined_mask_full = new Mat(size, CvType.CV_8UC1, new Scalar(0));
|
||||
|
||||
source_face_full = new Mat(size, type);
|
||||
warpped_source_face_full = new Mat(size, type);
|
||||
warpped_face_full = new Mat(size, type, Scalar.all(0));
|
||||
}
|
||||
|
||||
private void disposeMat()
|
||||
{
|
||||
|
||||
if (target_mask_full != null)
|
||||
{
|
||||
target_mask_full.Dispose();
|
||||
target_mask_full = null;
|
||||
}
|
||||
if (source_mask_full != null)
|
||||
{
|
||||
source_mask_full.Dispose();
|
||||
source_mask_full = null;
|
||||
}
|
||||
if (warpped_source_mask_full != null)
|
||||
{
|
||||
warpped_source_mask_full.Dispose();
|
||||
warpped_source_mask_full = null;
|
||||
}
|
||||
if (refined_target_and_warpped_source_mask_full != null)
|
||||
{
|
||||
refined_target_and_warpped_source_mask_full.Dispose();
|
||||
refined_target_and_warpped_source_mask_full = null;
|
||||
}
|
||||
|
||||
if (refined_mask_full != null)
|
||||
{
|
||||
refined_mask_full.Dispose();
|
||||
refined_mask_full = null;
|
||||
}
|
||||
|
||||
if (source_face_full != null)
|
||||
{
|
||||
source_face_full.Dispose();
|
||||
source_face_full = null;
|
||||
}
|
||||
if (warpped_source_face_full != null)
|
||||
{
|
||||
warpped_source_face_full.Dispose();
|
||||
warpped_source_face_full = null;
|
||||
}
|
||||
if (warpped_face_full != null)
|
||||
{
|
||||
warpped_face_full.Dispose();
|
||||
warpped_face_full = null;
|
||||
}
|
||||
}
|
||||
|
||||
private void createMatROI(Rect roi)
|
||||
{
|
||||
disposeMatROI();
|
||||
|
||||
target_mask = new Mat(target_mask_full, roi);
|
||||
source_mask = new Mat(source_mask_full, roi);
|
||||
warpped_source_mask = new Mat(warpped_source_mask_full, roi);
|
||||
refined_target_and_warpped_source_mask = new Mat(refined_target_and_warpped_source_mask_full, roi);
|
||||
refined_mask = new Mat(refined_mask_full, roi);
|
||||
|
||||
source_face = new Mat(source_face_full, roi);
|
||||
warpped_source_face = new Mat(warpped_source_face_full, roi);
|
||||
warpped_face = new Mat(warpped_face_full, roi);
|
||||
}
|
||||
|
||||
private void disposeMatROI()
|
||||
{
|
||||
|
||||
if (target_mask != null)
|
||||
{
|
||||
target_mask.Dispose();
|
||||
target_mask = null;
|
||||
}
|
||||
if (source_mask != null)
|
||||
{
|
||||
source_mask.Dispose();
|
||||
source_mask = null;
|
||||
}
|
||||
if (warpped_source_mask != null)
|
||||
{
|
||||
warpped_source_mask.Dispose();
|
||||
warpped_source_mask = null;
|
||||
}
|
||||
if (refined_target_and_warpped_source_mask != null)
|
||||
{
|
||||
refined_target_and_warpped_source_mask.Dispose();
|
||||
refined_target_and_warpped_source_mask = null;
|
||||
}
|
||||
if (refined_mask != null)
|
||||
{
|
||||
refined_mask.Dispose();
|
||||
refined_mask = null;
|
||||
}
|
||||
|
||||
if (source_face != null)
|
||||
{
|
||||
source_face.Dispose();
|
||||
source_face = null;
|
||||
}
|
||||
if (warpped_source_face != null)
|
||||
{
|
||||
warpped_source_face.Dispose();
|
||||
warpped_source_face = null;
|
||||
}
|
||||
if (warpped_face != null)
|
||||
{
|
||||
warpped_face.Dispose();
|
||||
warpped_face = null;
|
||||
}
|
||||
}
|
||||
|
||||
// Returns minimal Mat containing both faces
|
||||
private Rect getMinFrameRect(Mat frame, Rect target_rect, Rect source_rect)
|
||||
{
|
||||
Rect bounding_rect = RectUtils.Union(target_rect, source_rect);
|
||||
bounding_rect = RectUtils.Intersect(bounding_rect, new Rect(0, 0, frame.cols(), frame.rows()));
|
||||
|
||||
return bounding_rect;
|
||||
}
|
||||
|
||||
private Rect getRectInFrame(Mat frame, Rect r)
|
||||
{
|
||||
Size wholesize = new Size();
|
||||
Point ofs = new Point();
|
||||
frame.locateROI(wholesize, ofs);
|
||||
|
||||
Rect rect = new Rect(r.x - (int)ofs.x, r.y - (int)ofs.y, r.width, r.height);
|
||||
rect = RectUtils.Intersect(rect, new Rect(0, 0, frame.cols(), frame.rows()));
|
||||
|
||||
return rect;
|
||||
}
|
||||
|
||||
// Finds facial landmarks on face and extracts the useful points
|
||||
protected virtual void getFacePoints(Point[] landmark_points, Point[] points, Point[] affine_transform_keypoints)
|
||||
{
|
||||
if (landmark_points.Length != 9)
|
||||
throw new ArgumentNullException("Invalid landmark_points.");
|
||||
|
||||
//points(facial contour)
|
||||
points[0] = landmark_points[0];
|
||||
points[1] = landmark_points[1];
|
||||
points[2] = landmark_points[2];
|
||||
points[3] = landmark_points[3];
|
||||
points[4] = landmark_points[4];
|
||||
points[5] = landmark_points[5];
|
||||
points[6] = landmark_points[6];
|
||||
points[7] = landmark_points[7];
|
||||
points[8] = landmark_points[8];
|
||||
|
||||
//affine_transform_keypoints(eyes and chin)
|
||||
affine_transform_keypoints[0] = points[3];
|
||||
affine_transform_keypoints[1] = new Point(points[8].x, points[0].y);
|
||||
affine_transform_keypoints[2] = new Point(points[7].x, points[6].y);
|
||||
}
|
||||
|
||||
private void getFeather_amount(Point[] points, Size feather_amount)
|
||||
{
|
||||
feather_amount.width = feather_amount.height = Core.norm(new MatOfPoint(new Point(points[0].x - points[6].x, points[0].y - points[6].y))) / 8;
|
||||
}
|
||||
|
||||
private Point[] getPointsInFrame(Mat frame, Point[] p)
|
||||
{
|
||||
Size wholesize = new Size();
|
||||
Point ofs = new Point();
|
||||
frame.locateROI(wholesize, ofs);
|
||||
|
||||
Point[] points = new Point[p.Length];
|
||||
|
||||
for (int i = 0; i < p.Length; i++)
|
||||
{
|
||||
points[i] = new Point(p[i].x - ofs.x, p[i].y - ofs.y);
|
||||
}
|
||||
|
||||
return points;
|
||||
}
|
||||
|
||||
// Calculates transformation matrice based on points extracted by getFacePoints
|
||||
private void getTransformationMatrice()
|
||||
{
|
||||
trans_source_to_target = Imgproc.getAffineTransform(new MatOfPoint2f(source_affine_transform_keypoints), new MatOfPoint2f(target_affine_transform_keypoints));
|
||||
}
|
||||
|
||||
// Creates masks for faces based on the points extracted in getFacePoints
|
||||
private void getMasks()
|
||||
{
|
||||
target_mask.setTo(Scalar.all(0));
|
||||
source_mask.setTo(Scalar.all(0));
|
||||
Imgproc.fillConvexPoly(target_mask, new MatOfPoint(target_points), new Scalar(255 * maskAlpha));
|
||||
Imgproc.fillConvexPoly(source_mask, new MatOfPoint(source_points), new Scalar(255 * maskAlpha));
|
||||
}
|
||||
|
||||
// Creates warpped mask out of mask created in getMasks to switch places
|
||||
private void getWarppedMask()
|
||||
{
|
||||
Imgproc.warpAffine(source_mask, warpped_source_mask, trans_source_to_target, target_frame_size, Imgproc.INTER_NEAREST, Core.BORDER_CONSTANT, new Scalar(0));
|
||||
}
|
||||
|
||||
// Returns Mat of refined mask such that warpped mask isn't bigger than original mask
|
||||
private Mat getRefinedMask()
|
||||
{
|
||||
|
||||
Core.bitwise_and(target_mask, warpped_source_mask, refined_target_and_warpped_source_mask);
|
||||
refined_target_and_warpped_source_mask.copyTo(refined_mask, refined_target_and_warpped_source_mask);
|
||||
|
||||
return refined_mask;
|
||||
}
|
||||
|
||||
// Extracts face from source image based on mask created in getMask
|
||||
private void extractFace(Mat frame, Rect rect)
|
||||
{
|
||||
Rect new_source_rect = source_rect.clone();
|
||||
|
||||
if (rect.width != new_source_rect.width)
|
||||
{
|
||||
int shift_x = 0;
|
||||
if (rect.x == 0 || new_source_rect.x == 0)
|
||||
{
|
||||
if (rect.width < new_source_rect.width)
|
||||
{
|
||||
shift_x = new_source_rect.width - rect.width;
|
||||
new_source_rect.x += shift_x;
|
||||
new_source_rect.width -= shift_x;
|
||||
}
|
||||
else
|
||||
{
|
||||
shift_x = rect.width - new_source_rect.width;
|
||||
rect.x += shift_x;
|
||||
rect.width -= shift_x;
|
||||
}
|
||||
}
|
||||
|
||||
if (rect.x + rect.width == frame.width() || source_rect.x + new_source_rect.width == target_frame_full.width())
|
||||
{
|
||||
if (rect.width < new_source_rect.width)
|
||||
{
|
||||
shift_x = new_source_rect.width - rect.width;
|
||||
new_source_rect.width -= shift_x;
|
||||
}
|
||||
else
|
||||
{
|
||||
shift_x = rect.width - new_source_rect.width;
|
||||
rect.width -= shift_x;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (rect.height != new_source_rect.height)
|
||||
{
|
||||
int shift_y = 0;
|
||||
if (rect.y == 0 || new_source_rect.y == 0)
|
||||
{
|
||||
if (rect.height < new_source_rect.height)
|
||||
{
|
||||
shift_y = new_source_rect.height - rect.height;
|
||||
new_source_rect.y += shift_y;
|
||||
new_source_rect.height -= shift_y;
|
||||
}
|
||||
else
|
||||
{
|
||||
shift_y = rect.height - new_source_rect.height;
|
||||
rect.y += shift_y;
|
||||
rect.height -= shift_y;
|
||||
}
|
||||
}
|
||||
|
||||
if (rect.y + rect.height == frame.height() || source_rect.y + new_source_rect.height == target_frame_full.height())
|
||||
{
|
||||
if (rect.height < new_source_rect.height)
|
||||
{
|
||||
shift_y = new_source_rect.height - rect.height;
|
||||
new_source_rect.height -= shift_y;
|
||||
}
|
||||
else
|
||||
{
|
||||
shift_y = rect.height - new_source_rect.height;
|
||||
rect.height -= shift_y;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
using (Mat source_frame_source_rect = new Mat(frame, rect))
|
||||
using (Mat source_face_source_rect = new Mat(source_face, new_source_rect))
|
||||
using (Mat source_mask_source_rect = new Mat(source_mask, new_source_rect))
|
||||
{
|
||||
source_frame_source_rect.copyTo(source_face_source_rect, source_mask_source_rect);
|
||||
}
|
||||
}
|
||||
|
||||
// Creates warpped face out of face extracted in extractFace
|
||||
private Mat getWarppedFace()
|
||||
{
|
||||
Imgproc.warpAffine(source_face, warpped_source_face, trans_source_to_target, target_frame_size, Imgproc.INTER_NEAREST, Core.BORDER_CONSTANT, new Scalar(0, 0, 0));
|
||||
warpped_source_face.copyTo(warpped_face, warpped_source_mask);
|
||||
|
||||
return warpped_face;
|
||||
}
|
||||
|
||||
// Matches Target face color to Source face color and vice versa
|
||||
private void colorCorrectFace()
|
||||
{
|
||||
using (Mat target_frame_target_rect = new Mat(target_frame, target_rect))
|
||||
using (Mat warpped_face_target_rect = new Mat(warpped_face, target_rect))
|
||||
using (Mat warpped_source_mask_target_rect = new Mat(warpped_source_mask, target_rect))
|
||||
{
|
||||
specifiyHistogram(target_frame_target_rect, warpped_face_target_rect, warpped_source_mask_target_rect);
|
||||
}
|
||||
}
|
||||
|
||||
// Blurs edges of mask
|
||||
private void featherMask()
|
||||
{
|
||||
using (Mat refined_mask_target_rect = new Mat(refined_mask, target_rect))
|
||||
{
|
||||
Imgproc.erode(refined_mask, refined_mask, Imgproc.getStructuringElement(Imgproc.MORPH_RECT, feather_amount), new Point(-1, -1), 1, Core.BORDER_CONSTANT, new Scalar(0));
|
||||
Imgproc.blur(refined_mask, refined_mask, feather_amount, new Point(-1, -1), Core.BORDER_CONSTANT);
|
||||
}
|
||||
}
|
||||
|
||||
// Pastes source face on original frame
|
||||
private void pasteSourceFaceOnFrame()
|
||||
{
|
||||
byte[] mask_byte = new byte[refined_mask_full.total() * refined_mask_full.elemSize()];
|
||||
Utils.copyFromMat<byte>(refined_mask_full, mask_byte);
|
||||
byte[] face_byte = new byte[warpped_face_full.total() * warpped_face_full.elemSize()];
|
||||
Utils.copyFromMat<byte>(warpped_face_full, face_byte);
|
||||
byte[] frame_byte = new byte[target_frame_full.total() * target_frame_full.elemSize()];
|
||||
Utils.copyFromMat<byte>(target_frame_full, frame_byte);
|
||||
|
||||
int pixel_i = 0;
|
||||
int elemSize = (int)target_frame_full.elemSize();
|
||||
int total = (int)target_frame_full.total();
|
||||
|
||||
for (int i = 0; i < total; i++)
|
||||
{
|
||||
if (mask_byte[i] != 0)
|
||||
{
|
||||
frame_byte[pixel_i] = (byte)(((255 - mask_byte[i]) * frame_byte[pixel_i] + mask_byte[i] * face_byte[pixel_i]) >> 8);
|
||||
frame_byte[pixel_i + 1] = (byte)(((255 - mask_byte[i]) * frame_byte[pixel_i + 1] + mask_byte[i] * face_byte[pixel_i + 1]) >> 8);
|
||||
frame_byte[pixel_i + 2] = (byte)(((255 - mask_byte[i]) * frame_byte[pixel_i + 2] + mask_byte[i] * face_byte[pixel_i + 2]) >> 8);
|
||||
|
||||
/*
|
||||
frame_byte[pixel_i] = masks_byte[i];
|
||||
frame_byte[pixel_i + 1] = masks_byte[i];
|
||||
frame_byte[pixel_i + 2] = masks_byte[i];
|
||||
*/
|
||||
/*
|
||||
frame_byte[pixel_i] = faces_byte[pixel_i];
|
||||
frame_byte[pixel_i + 1] = faces_byte[pixel_i + 1];
|
||||
frame_byte[pixel_i + 2] = faces_byte[pixel_i + 2];
|
||||
*/
|
||||
}
|
||||
pixel_i += elemSize;
|
||||
}
|
||||
|
||||
Utils.copyToMat(frame_byte, target_frame_full);
|
||||
}
|
||||
|
||||
// Calculates source image histogram and changes target_image to match source hist
|
||||
private void specifiyHistogram(Mat source_image, Mat target_image, Mat mask)
|
||||
{
|
||||
System.Array.Clear(source_hist_int, 0, source_hist_int.Length);
|
||||
System.Array.Clear(target_hist_int, 0, target_hist_int.Length);
|
||||
|
||||
byte[] mask_byte = new byte[mask.total() * mask.elemSize()];
|
||||
Utils.copyFromMat<byte>(mask, mask_byte);
|
||||
byte[] source_image_byte = new byte[source_image.total() * source_image.elemSize()];
|
||||
Utils.copyFromMat<byte>(source_image, source_image_byte);
|
||||
byte[] target_image_byte = new byte[target_image.total() * target_image.elemSize()];
|
||||
Utils.copyFromMat<byte>(target_image, target_image_byte);
|
||||
|
||||
int pixel_i = 0;
|
||||
int elemSize = (int)target_frame.elemSize();
|
||||
int total = (int)mask.total();
|
||||
for (int i = 0; i < total; i++)
|
||||
{
|
||||
if (mask_byte[i] != 0)
|
||||
{
|
||||
source_hist_int[0, source_image_byte[pixel_i]]++;
|
||||
source_hist_int[1, source_image_byte[pixel_i + 1]]++;
|
||||
source_hist_int[2, source_image_byte[pixel_i + 2]]++;
|
||||
|
||||
target_hist_int[0, target_image_byte[pixel_i]]++;
|
||||
target_hist_int[1, target_image_byte[pixel_i + 1]]++;
|
||||
target_hist_int[2, target_image_byte[pixel_i + 2]]++;
|
||||
}
|
||||
// Advance to next pixel
|
||||
pixel_i += elemSize;
|
||||
}
|
||||
|
||||
// Calc CDF
|
||||
for (int i = 1; i < 256; i++)
|
||||
{
|
||||
source_hist_int[0, i] += source_hist_int[0, i - 1];
|
||||
source_hist_int[1, i] += source_hist_int[1, i - 1];
|
||||
source_hist_int[2, i] += source_hist_int[2, i - 1];
|
||||
|
||||
target_hist_int[0, i] += target_hist_int[0, i - 1];
|
||||
target_hist_int[1, i] += target_hist_int[1, i - 1];
|
||||
target_hist_int[2, i] += target_hist_int[2, i - 1];
|
||||
}
|
||||
|
||||
// Normalize CDF
|
||||
for (int i = 0; i < 256; i++)
|
||||
{
|
||||
source_histogram[0, i] = (source_hist_int[0, i] != 0) ? (float)source_hist_int[0, i] / source_hist_int[0, 255] : 0;
|
||||
source_histogram[1, i] = (source_hist_int[1, i] != 0) ? (float)source_hist_int[1, i] / source_hist_int[1, 255] : 0;
|
||||
source_histogram[2, i] = (source_hist_int[2, i] != 0) ? (float)source_hist_int[2, i] / source_hist_int[2, 255] : 0;
|
||||
|
||||
target_histogram[0, i] = (target_hist_int[0, i] != 0) ? (float)target_hist_int[0, i] / target_hist_int[0, 255] : 0;
|
||||
target_histogram[1, i] = (target_hist_int[1, i] != 0) ? (float)target_hist_int[1, i] / target_hist_int[1, 255] : 0;
|
||||
target_histogram[2, i] = (target_hist_int[2, i] != 0) ? (float)target_hist_int[2, i] / target_hist_int[2, 255] : 0;
|
||||
}
|
||||
|
||||
// Create lookup table
|
||||
for (int i = 0; i < 256; i++)
|
||||
{
|
||||
LUT[0, i] = binary_search(target_histogram[0, i], source_histogram, 0);
|
||||
LUT[1, i] = binary_search(target_histogram[1, i], source_histogram, 1);
|
||||
LUT[2, i] = binary_search(target_histogram[2, i], source_histogram, 2);
|
||||
}
|
||||
|
||||
// repaint pixels
|
||||
pixel_i = 0;
|
||||
for (int i = 0; i < total; i++)
|
||||
{
|
||||
if (mask_byte[i] != 0)
|
||||
{
|
||||
target_image_byte[pixel_i] = LUT[0, target_image_byte[pixel_i]];
|
||||
target_image_byte[pixel_i + 1] = LUT[1, target_image_byte[pixel_i + 1]];
|
||||
target_image_byte[pixel_i + 2] = LUT[2, target_image_byte[pixel_i + 2]];
|
||||
}
|
||||
// Advance to next pixel
|
||||
pixel_i += elemSize;
|
||||
}
|
||||
|
||||
Utils.copyToMat(target_image_byte, target_image);
|
||||
}
|
||||
|
||||
private byte binary_search(float needle, float[,] haystack, int haystack_index)
|
||||
{
|
||||
byte l = 0, r = 255, m = 0;
|
||||
while (l < r)
|
||||
{
|
||||
m = (byte)((l + r) / 2);
|
||||
if (needle > haystack[haystack_index, m])
|
||||
l = (byte)(m + 1);
|
||||
else
|
||||
r = (byte)(m - 1);
|
||||
}
|
||||
// TODO check closest value
|
||||
return m;
|
||||
}
|
||||
|
||||
|
||||
private void drawDebugFacePoints()
|
||||
{
|
||||
int index = faceChangeData.Count;
|
||||
foreach (FaceChangeData data in faceChangeData)
|
||||
{
|
||||
getFacePoints(data.target_landmark_points, target_points, target_affine_transform_keypoints);
|
||||
for (int i = 0; i < target_points.Length; i++)
|
||||
{
|
||||
Imgproc.circle(target_frame_full, target_points[i], 1, new Scalar(255, 0, 0, 255), 2, Core.LINE_AA, 0);
|
||||
}
|
||||
for (int i = 0; i < target_affine_transform_keypoints.Length; i++)
|
||||
{
|
||||
Imgproc.circle(target_frame_full, target_affine_transform_keypoints[i], 1, new Scalar(0, 255, 0, 255), 2, Core.LINE_AA, 0);
|
||||
}
|
||||
|
||||
getFacePoints(data.source_landmark_points, source_points, source_affine_transform_keypoints);
|
||||
for (int i = 0; i < source_points.Length; i++)
|
||||
{
|
||||
Imgproc.circle(data.source_frame, source_points[i], 1, new Scalar(255, 0, 0, 255), 2, Core.LINE_AA, 0);
|
||||
}
|
||||
for (int i = 0; i < source_affine_transform_keypoints.Length; i++)
|
||||
{
|
||||
Imgproc.circle(data.source_frame, source_affine_transform_keypoints[i], 1, new Scalar(0, 255, 0, 255), 2, Core.LINE_AA, 0);
|
||||
}
|
||||
|
||||
//
|
||||
target_rect = Imgproc.boundingRect(new MatOfPoint(target_points));
|
||||
source_rect = Imgproc.boundingRect(new MatOfPoint(source_points));
|
||||
Scalar color = new Scalar (127, 127, (int)(255/faceChangeData.Count) * index, 255);
|
||||
Imgproc.rectangle(target_frame_full, this.target_rect.tl(), this.target_rect.br(), color, 1, Imgproc.LINE_8, 0);
|
||||
Imgproc.rectangle(data.source_frame, this.source_rect.tl(), this.source_rect.br(), color, 1, Imgproc.LINE_8, 0);
|
||||
//
|
||||
|
||||
index--;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public void Dispose()
|
||||
{
|
||||
if (target_frame != null)
|
||||
{
|
||||
target_frame.Dispose();
|
||||
target_frame = null;
|
||||
}
|
||||
disposeMatROI();
|
||||
disposeMat();
|
||||
}
|
||||
|
||||
public class FaceChangeData
|
||||
{
|
||||
public Mat source_frame;
|
||||
public Point[] source_landmark_points;
|
||||
public Point[] target_landmark_points;
|
||||
public float alpha
|
||||
{
|
||||
set
|
||||
{ this._alpha = Mathf.Clamp01(value); }
|
||||
get { return this._alpha; }
|
||||
}
|
||||
private float _alpha = 1.0f;
|
||||
|
||||
public FaceChangeData(Mat source_frame, Point[] source_landmark_points, Point[] target_landmark_points, float alpha)
|
||||
{
|
||||
this.source_frame = source_frame;
|
||||
this.source_landmark_points = source_landmark_points;
|
||||
this.target_landmark_points = target_landmark_points;
|
||||
this.alpha = alpha;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 124167cb3721fe945a710b178fef5b72
|
||||
timeCreated: 1464561302
|
||||
licenseType: Free
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,33 @@
|
|||
using System;
|
||||
|
||||
namespace OpenCVForUnity.FaceChange
|
||||
{
|
||||
|
||||
public class NonRigidObjectTrackerFaceChanger : FaceChanger
|
||||
{
|
||||
// Finds facial landmarks on faces and extracts the useful points
|
||||
protected override void getFacePoints(Point[] landmark_points, Point[] points, Point[] affine_transform_keypoints)
|
||||
{
|
||||
if (landmark_points.Length != 76)
|
||||
throw new ArgumentNullException("Invalid landmark_points.");
|
||||
|
||||
//points(facial contour)
|
||||
points[0] = landmark_points[0];
|
||||
points[1] = landmark_points[2];
|
||||
points[2] = landmark_points[5];
|
||||
points[3] = landmark_points[7];
|
||||
points[4] = landmark_points[9];
|
||||
points[5] = landmark_points[12];
|
||||
points[6] = landmark_points[14];
|
||||
Point nose_line_base = new Point((landmark_points[37].x + landmark_points[45].x) / 2, (landmark_points[37].y + landmark_points[45].y) / 2);
|
||||
Point nose_length = new Point(nose_line_base.x - landmark_points[67].x, nose_line_base.y - landmark_points[67].y);
|
||||
points[7] = new Point(landmark_points[15].x + nose_length.x, landmark_points[15].y + nose_length.y);
|
||||
points[8] = new Point(landmark_points[21].x + nose_length.x, landmark_points[21].y + nose_length.y);
|
||||
|
||||
//affine_transform_keypoints(eyes and chin)
|
||||
affine_transform_keypoints[0] = points[3];
|
||||
affine_transform_keypoints[1] = landmark_points[27];
|
||||
affine_transform_keypoints[2] = landmark_points[32];
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 87ce4ed7743474a42be1de9daddbc473
|
||||
timeCreated: 1464647045
|
||||
licenseType: Free
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,40 @@
|
|||
using System;
|
||||
|
||||
namespace OpenCVForUnity.FaceChange
|
||||
{
|
||||
public class PointUtils
|
||||
{
|
||||
/// <summary>
|
||||
/// Returns the distance between the specified two points
|
||||
/// </summary>
|
||||
/// <param name="p1"></param>
|
||||
/// <param name="p2"></param>
|
||||
/// <returns></returns>
|
||||
public static double Distance(Point p1, Point p2)
|
||||
{
|
||||
return Math.Sqrt(Math.Pow(p2.x - p1.x, 2) + Math.Pow(p2.y - p1.y, 2));
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Calculates the dot product of two 2D vectors.
|
||||
/// </summary>
|
||||
/// <param name="p1"></param>
|
||||
/// <param name="p2"></param>
|
||||
/// <returns></returns>
|
||||
public static double DotProduct(Point p1, Point p2)
|
||||
{
|
||||
return p1.x * p2.x + p1.y * p2.y;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Calculates the cross product of two 2D vectors.
|
||||
/// </summary>
|
||||
/// <param name="p1"></param>
|
||||
/// <param name="p2"></param>
|
||||
/// <returns></returns>
|
||||
public static double CrossProduct(Point p1, Point p2)
|
||||
{
|
||||
return p1.x * p2.y - p2.x * p1.y;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
fileFormatVersion: 2
|
||||
guid: f24b4787f4fccc9468d3f1d3a5c4c2a9
|
||||
timeCreated: 1464561302
|
||||
licenseType: Free
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,58 @@
|
|||
using System;
|
||||
|
||||
namespace OpenCVForUnity.FaceChange
|
||||
{
|
||||
public class RectUtils
|
||||
{
|
||||
/// <summary>
|
||||
/// Creates and returns an inflated copy of the specified CvRect structure.
|
||||
/// </summary>
|
||||
/// <param name="rect">The Rectangle with which to start. This rectangle is not modified. </param>
|
||||
/// <param name="x">The amount to inflate this Rectangle horizontally. </param>
|
||||
/// <param name="y">The amount to inflate this Rectangle vertically. </param>
|
||||
/// <returns></returns>
|
||||
public static Rect Inflate(Rect rect, int x, int y)
|
||||
{
|
||||
rect.x -= x;
|
||||
rect.y -= y;
|
||||
rect.width += (2 * x);
|
||||
rect.height += (2 * y);
|
||||
return rect;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Determines the CvRect structure that represents the intersection of two rectangles.
|
||||
/// </summary>
|
||||
/// <param name="a">A rectangle to intersect. </param>
|
||||
/// <param name="b">A rectangle to intersect. </param>
|
||||
/// <returns></returns>
|
||||
public static Rect Intersect(Rect a, Rect b)
|
||||
{
|
||||
int x1 = Math.Max(a.x, b.x);
|
||||
int x2 = Math.Min(a.x + a.width, b.x + b.width);
|
||||
int y1 = Math.Max(a.y, b.y);
|
||||
int y2 = Math.Min(a.y + a.height, b.y + b.height);
|
||||
|
||||
if (x2 >= x1 && y2 >= y1)
|
||||
return new Rect(x1, y1, x2 - x1, y2 - y1);
|
||||
else
|
||||
return new Rect();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets a CvRect structure that contains the union of two CvRect structures.
|
||||
/// </summary>
|
||||
/// <param name="a">A rectangle to union. </param>
|
||||
/// <param name="b">A rectangle to union. </param>
|
||||
/// <returns></returns>
|
||||
public static Rect Union(Rect a, Rect b)
|
||||
{
|
||||
int x1 = Math.Min(a.x, b.x);
|
||||
int x2 = Math.Max(a.x + a.width, b.x + b.width);
|
||||
int y1 = Math.Min(a.y, b.y);
|
||||
int y2 = Math.Max(a.y + a.height, b.y + b.height);
|
||||
|
||||
return new Rect(x1, y1, x2 - x1, y2 - y1);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 378b605fb967c3140aa1fe3b795669f6
|
||||
timeCreated: 1464561302
|
||||
licenseType: Free
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -1,7 +1,7 @@
|
|||
fileFormatVersion: 2
|
||||
guid: b92092c4dce17b44994e61262be633b1
|
||||
guid: 626817b4d34f74141961f772cef98476
|
||||
folderAsset: yes
|
||||
timeCreated: 1464539197
|
||||
timeCreated: 1479443957
|
||||
licenseType: Free
|
||||
DefaultImporter:
|
||||
userData:
|
|
@ -1,7 +1,7 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 5dfd231f6ef964a49bdc678672d6eada
|
||||
guid: d3c8a771006749c498468d4731ea7585
|
||||
folderAsset: yes
|
||||
timeCreated: 1464539197
|
||||
timeCreated: 1479443957
|
||||
licenseType: Free
|
||||
DefaultImporter:
|
||||
userData:
|
|
@ -1,7 +1,7 @@
|
|||
fileFormatVersion: 2
|
||||
guid: f03be5c9f0658d14cb6605b9603ac6dc
|
||||
timeCreated: 1464539199
|
||||
licenseType: Free
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
|
@ -712,11 +712,11 @@ namespace OpenCVForUnity.FaceSwap
|
|||
{
|
||||
Imgproc.circle(small_frame, affine_transform_keypoints_bob[i], 1, new Scalar(0, 255, 0, 255), 2, Core.LINE_AA, 0);
|
||||
}
|
||||
//
|
||||
//
|
||||
Imgproc.rectangle(small_frame, new Point(1, 1), new Point(small_frame_size.width - 2, small_frame_size.height - 2), new Scalar(255, 0, 0, 255), 2, Imgproc.LINE_8, 0);
|
||||
Imgproc.rectangle(small_frame, this.rect_ann.tl(), this.rect_ann.br(), new Scalar(255, 0, 0, 255), 1, Imgproc.LINE_8, 0);
|
||||
Imgproc.rectangle(small_frame, this.rect_bob.tl(), this.rect_bob.br(), new Scalar(255, 0, 0, 255), 1, Imgproc.LINE_8, 0);
|
||||
//
|
||||
//
|
||||
}
|
||||
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
fileFormatVersion: 2
|
||||
guid: ba6438b58e5479b46990e81848dc5b03
|
||||
timeCreated: 1464539197
|
||||
licenseType: Free
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
|
@ -1,7 +1,7 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 4137cc5f5ba6b00498b76159d18889ce
|
||||
timeCreated: 1464539197
|
||||
licenseType: Free
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
|
@ -1,7 +1,7 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 7d20b04cba21e3e4296c754903a3cb3e
|
||||
timeCreated: 1464539197
|
||||
licenseType: Free
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
|
@ -1,7 +1,7 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 66c9ea5092809794caba465840d6d80f
|
||||
timeCreated: 1464539197
|
||||
licenseType: Free
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
Двоичные данные
ReadMe.pdf → FaceSwapperSample/ReadMe.pdf
Двоичные данные
ReadMe.pdf → FaceSwapperSample/ReadMe.pdf
Двоичный файл не отображается.
|
@ -1,7 +1,7 @@
|
|||
fileFormatVersion: 2
|
||||
guid: a519c66b56ec9a643845078f4cb3b9d5
|
||||
guid: 1c93efbfc058f854aaa16ed1a6eb97d4
|
||||
folderAsset: yes
|
||||
timeCreated: 1464369829
|
||||
timeCreated: 1479443957
|
||||
licenseType: Free
|
||||
DefaultImporter:
|
||||
userData:
|
|
@ -0,0 +1,9 @@
|
|||
fileFormatVersion: 2
|
||||
guid: ffd5e5b5f55f5174dbbee9559aa57a20
|
||||
folderAsset: yes
|
||||
timeCreated: 1479443957
|
||||
licenseType: Free
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -1,7 +1,7 @@
|
|||
fileFormatVersion: 2
|
||||
guid: bcc0418596b47c64088e64d3677066ed
|
||||
timeCreated: 1464369831
|
||||
licenseType: Free
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
|
@ -1,7 +1,7 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 8428419bd4335ca4a85e8a4a315afc13
|
||||
timeCreated: 1464369829
|
||||
licenseType: Free
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
|
@ -1,7 +1,7 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 52e1c721cf4e59a4aab215bf5ab737b0
|
||||
timeCreated: 1464369829
|
||||
licenseType: Free
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
|
@ -1,7 +1,7 @@
|
|||
fileFormatVersion: 2
|
||||
guid: b0c3318827dc03f438fc0b04a3e9dec1
|
||||
timeCreated: 1464369829
|
||||
licenseType: Free
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
|
@ -0,0 +1,9 @@
|
|||
fileFormatVersion: 2
|
||||
guid: e4d01064e730d614dbeb596e22b641f7
|
||||
folderAsset: yes
|
||||
timeCreated: 1479443957
|
||||
licenseType: Free
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
Двоичный файл не отображается.
После Ширина: | Высота: | Размер: 3.8 KiB |
|
@ -0,0 +1,55 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 60efc80de1039114fa7285c55dc1e203
|
||||
timeCreated: 1479410294
|
||||
licenseType: Pro
|
||||
TextureImporter:
|
||||
fileIDToRecycleName: {}
|
||||
serializedVersion: 2
|
||||
mipmaps:
|
||||
mipMapMode: 0
|
||||
enableMipMap: 1
|
||||
linearTexture: 0
|
||||
correctGamma: 0
|
||||
fadeOut: 0
|
||||
borderMipMap: 0
|
||||
mipMapFadeDistanceStart: 1
|
||||
mipMapFadeDistanceEnd: 3
|
||||
bumpmap:
|
||||
convertToNormalMap: 0
|
||||
externalNormalMap: 0
|
||||
heightScale: .25
|
||||
normalMapFilter: 0
|
||||
isReadable: 0
|
||||
grayScaleToAlpha: 0
|
||||
generateCubemap: 0
|
||||
cubemapConvolution: 0
|
||||
cubemapConvolutionSteps: 8
|
||||
cubemapConvolutionExponent: 1.5
|
||||
seamlessCubemap: 0
|
||||
textureFormat: -1
|
||||
maxTextureSize: 512
|
||||
textureSettings:
|
||||
filterMode: -1
|
||||
aniso: -1
|
||||
mipBias: -1
|
||||
wrapMode: 0
|
||||
nPOTScale: 1
|
||||
lightmap: 0
|
||||
rGBM: 0
|
||||
compressionQuality: 50
|
||||
spriteMode: 0
|
||||
spriteExtrude: 1
|
||||
spriteMeshType: 1
|
||||
alignment: 0
|
||||
spritePivot: {x: .5, y: .5}
|
||||
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
|
||||
spritePixelsToUnits: 100
|
||||
alphaIsTransparency: 0
|
||||
textureType: 0
|
||||
buildTargetSettings: []
|
||||
spriteSheet:
|
||||
sprites: []
|
||||
spritePackingTag:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
Двоичный файл не отображается.
После Ширина: | Высота: | Размер: 146 KiB |
|
@ -0,0 +1,59 @@
|
|||
fileFormatVersion: 2
|
||||
guid: e0a7f14f275499c46b9878f443c430ad
|
||||
timeCreated: 1479533578
|
||||
licenseType: Free
|
||||
TextureImporter:
|
||||
fileIDToRecycleName: {}
|
||||
serializedVersion: 2
|
||||
mipmaps:
|
||||
mipMapMode: 0
|
||||
enableMipMap: 0
|
||||
linearTexture: 0
|
||||
correctGamma: 0
|
||||
fadeOut: 0
|
||||
borderMipMap: 0
|
||||
mipMapFadeDistanceStart: 1
|
||||
mipMapFadeDistanceEnd: 3
|
||||
bumpmap:
|
||||
convertToNormalMap: 0
|
||||
externalNormalMap: 0
|
||||
heightScale: 0.25
|
||||
normalMapFilter: 0
|
||||
isReadable: 1
|
||||
grayScaleToAlpha: 0
|
||||
generateCubemap: 0
|
||||
cubemapConvolution: 0
|
||||
cubemapConvolutionSteps: 7
|
||||
cubemapConvolutionExponent: 1.5
|
||||
seamlessCubemap: 0
|
||||
textureFormat: 4
|
||||
maxTextureSize: 1024
|
||||
textureSettings:
|
||||
filterMode: -1
|
||||
aniso: -1
|
||||
mipBias: -1
|
||||
wrapMode: -1
|
||||
nPOTScale: 0
|
||||
lightmap: 0
|
||||
rGBM: 0
|
||||
compressionQuality: 50
|
||||
allowsAlphaSplitting: 0
|
||||
spriteMode: 0
|
||||
spriteExtrude: 1
|
||||
spriteMeshType: 1
|
||||
alignment: 0
|
||||
spritePivot: {x: 0.5, y: 0.5}
|
||||
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
|
||||
spritePixelsToUnits: 100
|
||||
alphaIsTransparency: 0
|
||||
spriteTessellationDetail: -1
|
||||
textureType: 5
|
||||
buildTargetSettings: []
|
||||
spriteSheet:
|
||||
serializedVersion: 2
|
||||
sprites: []
|
||||
outline: []
|
||||
spritePackingTag:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
До Ширина: | Высота: | Размер: 1.4 MiB После Ширина: | Высота: | Размер: 1.4 MiB |
|
@ -0,0 +1,9 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 211684dbfcf203f4486c53bac05bdd37
|
||||
folderAsset: yes
|
||||
timeCreated: 1479443957
|
||||
licenseType: Free
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -1,6 +1,5 @@
|
|||
fileFormatVersion: 2
|
||||
guid: c6d804cb2b659464eb528f25050a855c
|
||||
folderAsset: yes
|
||||
guid: 66328ed0f7ca81e4c8b3c5c89e38d6a1
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -1,7 +1,7 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 8c0f13bf77892b641a26f022f103dfc9
|
||||
timeCreated: 1463652241
|
||||
licenseType: Free
|
||||
licenseType: Store
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -1,6 +1,5 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 6c9557d03374cf54cbd32ac281028699
|
||||
folderAsset: yes
|
||||
guid: 8930824bc8abb5d488da2aefe77b83f1
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -0,0 +1,9 @@
|
|||
fileFormatVersion: 2
|
||||
guid: c49aeb1982c0d5546a3f669562312324
|
||||
folderAsset: yes
|
||||
timeCreated: 1479443957
|
||||
licenseType: Free
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,82 @@
|
|||
using UnityEngine;
|
||||
using System.Collections;
|
||||
|
||||
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
|
||||
using UnityEngine.SceneManagement;
|
||||
#endif
|
||||
|
||||
namespace FaceSwapperSample
|
||||
{
|
||||
/// <summary>
|
||||
/// Face swapper sample.
|
||||
/// </summary>
|
||||
public class FaceSwapperSample : MonoBehaviour
|
||||
{
|
||||
|
||||
// Use this for initialization
|
||||
void Start ()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
// Update is called once per frame
|
||||
void Update ()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public void OnShowLicenseButton ()
|
||||
{
|
||||
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
|
||||
SceneManager.LoadScene ("ShowLicense");
|
||||
#else
|
||||
Application.LoadLevel ("ShowLicense");
|
||||
#endif
|
||||
}
|
||||
|
||||
public void OnTexture2DFaceSwapperSample ()
|
||||
{
|
||||
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
|
||||
SceneManager.LoadScene ("Texture2DFaceSwapperSample");
|
||||
#else
|
||||
Application.LoadLevel ("Texture2DFaceSwapperSample");
|
||||
#endif
|
||||
}
|
||||
|
||||
public void OnWebCamTextureFaceSwapperSample ()
|
||||
{
|
||||
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
|
||||
SceneManager.LoadScene ("WebCamTextureFaceSwapperSample");
|
||||
#else
|
||||
Application.LoadLevel ("WebCamTextureFaceSwapperSample");
|
||||
#endif
|
||||
}
|
||||
|
||||
public void OnVideoCaptureFaceSwapperSample ()
|
||||
{
|
||||
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
|
||||
SceneManager.LoadScene ("VideoCaptureFaceSwapperSample");
|
||||
#else
|
||||
Application.LoadLevel ("VideoCaptureFaceSwapperSample");
|
||||
#endif
|
||||
}
|
||||
|
||||
public void OnTexture2DFaceChangerSample ()
|
||||
{
|
||||
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
|
||||
SceneManager.LoadScene ("Texture2DFaceChangerSample");
|
||||
#else
|
||||
Application.LoadLevel ("Texture2DFaceChangerSample");
|
||||
#endif
|
||||
}
|
||||
|
||||
public void OnWebCamTextureFaceChangerSample ()
|
||||
{
|
||||
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
|
||||
SceneManager.LoadScene ("WebCamTextureFaceChangerSample");
|
||||
#else
|
||||
Application.LoadLevel ("WebCamTextureFaceChangerSample");
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,7 +1,7 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 4c57ef078c131834aa757a71ae3ec6ba
|
||||
timeCreated: 1467085640
|
||||
licenseType: Pro
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
|
@ -1,7 +1,7 @@
|
|||
fileFormatVersion: 2
|
||||
guid: e29736da80afb5d4aa50c3407eeea008
|
||||
timeCreated: 1463661767
|
||||
licenseType: Free
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
|
@ -0,0 +1,38 @@
|
|||
using UnityEngine;
|
||||
using System.Collections;
|
||||
|
||||
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
|
||||
using UnityEngine.SceneManagement;
|
||||
#endif
|
||||
|
||||
namespace FaceSwapperSample
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// Show license.
|
||||
/// </summary>
|
||||
public class ShowLicense : MonoBehaviour
|
||||
{
|
||||
|
||||
// Use this for initialization
|
||||
void Start ()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
// Update is called once per frame
|
||||
void Update ()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public void OnBackButton ()
|
||||
{
|
||||
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
|
||||
SceneManager.LoadScene ("FaceSwapperSample");
|
||||
#else
|
||||
Application.LoadLevel ("FaceSwapperSample");
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,411 @@
|
|||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using DlibFaceLandmarkDetector;
|
||||
using OpenCVForUnity;
|
||||
using OpenCVForUnity.FaceChange;
|
||||
using UnityEngine;
|
||||
using UnityEngine.UI;
|
||||
using WebGLFileUploader;
|
||||
|
||||
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
|
||||
using UnityEngine.SceneManagement;
|
||||
#endif
|
||||
|
||||
namespace FaceSwapperSample
|
||||
{
|
||||
/// <summary>
|
||||
/// Texture2D face changer sample.
|
||||
/// </summary>
|
||||
public class Texture2DFaceChangerSample : MonoBehaviour
|
||||
{
|
||||
/// <summary>
|
||||
/// The image texture.
|
||||
/// </summary>
|
||||
Texture2D imgTexture;
|
||||
|
||||
/// <summary>
|
||||
/// The cascade.
|
||||
/// </summary>
|
||||
CascadeClassifier cascade;
|
||||
|
||||
/// <summary>
|
||||
/// The face landmark detector.
|
||||
/// </summary>
|
||||
FaceLandmarkDetector faceLandmarkDetector;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing face rects.
|
||||
/// </summary>
|
||||
public bool isShowingFaceRects = false;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing face rects toggle.
|
||||
/// </summary>
|
||||
public Toggle isShowingFaceRectsToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The use Dlib face detector flag.
|
||||
/// </summary>
|
||||
public bool useDlibFaceDetecter = true;
|
||||
|
||||
/// <summary>
|
||||
/// The use dlib face detecter toggle.
|
||||
/// </summary>
|
||||
public Toggle useDlibFaceDetecterToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The is filtering non frontal faces.
|
||||
/// </summary>
|
||||
public bool isFilteringNonFrontalFaces;
|
||||
|
||||
/// <summary>
|
||||
/// The is filtering non frontal faces toggle.
|
||||
/// </summary>
|
||||
public Toggle isFilteringNonFrontalFacesToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The frontal face rate lower limit.
|
||||
/// </summary>
|
||||
[Range (0.0f, 1.0f)]
|
||||
public float
|
||||
frontalFaceRateLowerLimit;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing debug face points.
|
||||
/// </summary>
|
||||
public bool isShowingDebugFacePoints = false;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing debug face points toggle.
|
||||
/// </summary>
|
||||
public Toggle isShowingDebugFacePointsToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The is upload image button.
|
||||
/// </summary>
|
||||
public Button uploadImageButton;
|
||||
|
||||
/// <summary>
|
||||
/// The haarcascade_frontalface_alt_xml_filepath.
|
||||
/// </summary>
|
||||
private string haarcascade_frontalface_alt_xml_filepath;
|
||||
|
||||
/// <summary>
|
||||
/// The shape_predictor_68_face_landmarks_dat_filepath.
|
||||
/// </summary>
|
||||
private string shape_predictor_68_face_landmarks_dat_filepath;
|
||||
|
||||
|
||||
// Use this for initialization
|
||||
void Start ()
|
||||
{
|
||||
WebGLFileUploadManager.SetImageEncodeSetting(true);
|
||||
WebGLFileUploadManager.SetAllowedFileName("\\.(png|jpe?g|gif)$");
|
||||
WebGLFileUploadManager.SetImageShrinkingSize(640, 480);
|
||||
WebGLFileUploadManager.FileUploadEventHandler += fileUploadHandler;
|
||||
|
||||
#if UNITY_WEBGL && !UNITY_EDITOR
|
||||
StartCoroutine(getFilePathCoroutine());
|
||||
#else
|
||||
haarcascade_frontalface_alt_xml_filepath = OpenCVForUnity.Utils.getFilePath ("haarcascade_frontalface_alt.xml");
|
||||
shape_predictor_68_face_landmarks_dat_filepath = DlibFaceLandmarkDetector.Utils.getFilePath ("shape_predictor_68_face_landmarks.dat");
|
||||
Run ();
|
||||
#endif
|
||||
}
|
||||
|
||||
#if UNITY_WEBGL && !UNITY_EDITOR
|
||||
private IEnumerator getFilePathCoroutine()
|
||||
{
|
||||
var getFilePathAsync_0_Coroutine = StartCoroutine (OpenCVForUnity.Utils.getFilePathAsync ("haarcascade_frontalface_alt.xml", (result) => {
|
||||
haarcascade_frontalface_alt_xml_filepath = result;
|
||||
}));
|
||||
var getFilePathAsync_1_Coroutine = StartCoroutine (DlibFaceLandmarkDetector.Utils.getFilePathAsync ("shape_predictor_68_face_landmarks.dat", (result) => {
|
||||
shape_predictor_68_face_landmarks_dat_filepath = result;
|
||||
}));
|
||||
|
||||
yield return getFilePathAsync_0_Coroutine;
|
||||
yield return getFilePathAsync_1_Coroutine;
|
||||
|
||||
Run ();
|
||||
uploadImageButton.interactable = true;
|
||||
}
|
||||
#endif
|
||||
|
||||
private void Run ()
|
||||
{
|
||||
isShowingFaceRectsToggle.isOn = isShowingFaceRects;
|
||||
useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter;
|
||||
isFilteringNonFrontalFacesToggle.isOn = isFilteringNonFrontalFaces;
|
||||
isShowingDebugFacePointsToggle.isOn = isShowingDebugFacePoints;
|
||||
|
||||
if(imgTexture == null)
|
||||
imgTexture = Resources.Load ("family") as Texture2D;
|
||||
|
||||
gameObject.transform.localScale = new Vector3 (imgTexture.width, imgTexture.height, 1);
|
||||
Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
|
||||
|
||||
float width = 0;
|
||||
float height = 0;
|
||||
|
||||
width = gameObject.transform.localScale.x;
|
||||
height = gameObject.transform.localScale.y;
|
||||
|
||||
|
||||
float widthScale = (float)Screen.width / width;
|
||||
float heightScale = (float)Screen.height / height;
|
||||
if (widthScale < heightScale) {
|
||||
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
|
||||
} else {
|
||||
Camera.main.orthographicSize = height / 2;
|
||||
}
|
||||
|
||||
Mat rgbaMat = new Mat (imgTexture.height, imgTexture.width, CvType.CV_8UC4);
|
||||
|
||||
OpenCVForUnity.Utils.texture2DToMat (imgTexture, rgbaMat);
|
||||
Debug.Log ("rgbaMat ToString " + rgbaMat.ToString ());
|
||||
|
||||
if(faceLandmarkDetector == null)
|
||||
faceLandmarkDetector = new FaceLandmarkDetector (shape_predictor_68_face_landmarks_dat_filepath);
|
||||
|
||||
FrontalFaceParam frontalFaceParam = new FrontalFaceParam ();
|
||||
|
||||
//face detection
|
||||
List<OpenCVForUnity.Rect> detectResult = new List<OpenCVForUnity.Rect> ();
|
||||
if (useDlibFaceDetecter) {
|
||||
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);
|
||||
List<UnityEngine.Rect> result = faceLandmarkDetector.Detect ();
|
||||
|
||||
foreach (var unityRect in result) {
|
||||
detectResult.Add (new OpenCVForUnity.Rect ((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
|
||||
}
|
||||
} else {
|
||||
if(cascade == null)
|
||||
cascade = new CascadeClassifier (haarcascade_frontalface_alt_xml_filepath);
|
||||
if (cascade.empty ()) {
|
||||
Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
|
||||
}
|
||||
|
||||
//convert image to greyscale
|
||||
Mat gray = new Mat ();
|
||||
Imgproc.cvtColor (rgbaMat, gray, Imgproc.COLOR_RGBA2GRAY);
|
||||
|
||||
//detect Faces
|
||||
MatOfRect faces = new MatOfRect ();
|
||||
Imgproc.equalizeHist (gray, gray);
|
||||
cascade.detectMultiScale (gray, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (gray.cols () * 0.05, gray.cols () * 0.05), new Size ());
|
||||
//Debug.Log ("faces " + faces.dump ());
|
||||
|
||||
detectResult = faces.toList ();
|
||||
|
||||
gray.Dispose();
|
||||
}
|
||||
|
||||
//detect face landmark
|
||||
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);
|
||||
List<List<Vector2>> landmarkPoints = new List<List<Vector2>> ();
|
||||
foreach (var openCVRect in detectResult) {
|
||||
UnityEngine.Rect rect = new UnityEngine.Rect (openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
|
||||
|
||||
Debug.Log ("face : " + rect);
|
||||
|
||||
//OpenCVForUnityUtils.DrawFaceRect(imgMat, rect, new Scalar(255, 0, 0, 255), 2);
|
||||
|
||||
List<Vector2> points = faceLandmarkDetector.DetectLandmark (rect);
|
||||
if (points.Count > 0) {
|
||||
|
||||
//OpenCVForUnityUtils.DrawFaceLandmark(imgMat, points, new Scalar(0, 255, 0, 255), 2);
|
||||
landmarkPoints.Add (points);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//filter nonfrontalface
|
||||
if (isFilteringNonFrontalFaces) {
|
||||
for (int i = 0; i < landmarkPoints.Count; i++) {
|
||||
if (frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]) < frontalFaceRateLowerLimit) {
|
||||
detectResult.RemoveAt (i);
|
||||
landmarkPoints.RemoveAt (i);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//change faces
|
||||
//Debug.Log("face points count : " + points.Count);
|
||||
int[] face_nums = new int[landmarkPoints.Count];
|
||||
for (int i = 0; i < face_nums.Length; i++) {
|
||||
face_nums [i] = i;
|
||||
}
|
||||
face_nums = face_nums.OrderBy (i => System.Guid.NewGuid ()).ToArray ();
|
||||
if (landmarkPoints.Count >= 2) {
|
||||
DlibFaceChanger faceChanger = new DlibFaceChanger ();
|
||||
faceChanger.isShowingDebugFacePoints = isShowingDebugFacePoints;
|
||||
|
||||
faceChanger.SetTargetImage(rgbaMat);
|
||||
|
||||
for (int i = 1; i < face_nums.Length; i ++) {
|
||||
faceChanger.AddFaceChangeData (rgbaMat, landmarkPoints [face_nums[0]], landmarkPoints [face_nums[i]], 1);
|
||||
}
|
||||
|
||||
faceChanger.ChangeFace();
|
||||
faceChanger.Dispose ();
|
||||
}
|
||||
|
||||
//show face rects
|
||||
if (isShowingFaceRects) {
|
||||
int ann = 0;
|
||||
for (int i = 0; i < face_nums.Length; i ++) {
|
||||
ann = face_nums [i];
|
||||
UnityEngine.Rect rect_ann = new UnityEngine.Rect (detectResult [ann].x, detectResult [ann].y, detectResult [ann].width, detectResult [ann].height);
|
||||
OpenCVForUnityUtils.DrawFaceRect (rgbaMat, rect_ann, new Scalar(255, 0, 0, 255), 2);
|
||||
}
|
||||
}
|
||||
|
||||
frontalFaceParam.Dispose ();
|
||||
|
||||
Texture2D texture = new Texture2D (rgbaMat.cols (), rgbaMat.rows (), TextureFormat.RGBA32, false);
|
||||
OpenCVForUnity.Utils.matToTexture2D (rgbaMat, texture);
|
||||
gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
|
||||
|
||||
rgbaMat.Dispose();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Files the upload handler.
|
||||
/// </summary>
|
||||
/// <param name="result">Result.</param>
|
||||
private void fileUploadHandler(UploadedFileInfo[] result){
|
||||
|
||||
if(result.Length == 0) {
|
||||
Debug.Log("File upload Error!");
|
||||
return;
|
||||
}
|
||||
|
||||
foreach(UploadedFileInfo file in result){
|
||||
if(file.isSuccess){
|
||||
Debug.Log("file.filePath: " + file.filePath + " exists:" + File.Exists(file.filePath));
|
||||
|
||||
imgTexture = new Texture2D (2, 2);
|
||||
byte[] byteArray = File.ReadAllBytes (file.filePath);
|
||||
imgTexture.LoadImage (byteArray);
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
Run();
|
||||
}
|
||||
|
||||
// Update is called once per frame
|
||||
void Update ()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the destroy event.
|
||||
/// </summary>
|
||||
void OnDestroy ()
|
||||
{
|
||||
WebGLFileUploadManager.FileUploadEventHandler -= fileUploadHandler;
|
||||
WebGLFileUploadManager.Dispose();
|
||||
|
||||
if(faceLandmarkDetector != null)
|
||||
faceLandmarkDetector.Dispose ();
|
||||
|
||||
if(cascade != null)
|
||||
cascade.Dispose();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the back button event.
|
||||
/// </summary>
|
||||
public void OnBackButton ()
|
||||
{
|
||||
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
|
||||
SceneManager.LoadScene ("FaceSwapperSample");
|
||||
#else
|
||||
Application.LoadLevel ("FaceSwapperSample");
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the shuffle button event.
|
||||
/// </summary>
|
||||
public void OnShuffleButton ()
|
||||
{
|
||||
if(imgTexture != null)
|
||||
Run ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is showing face rects toggle event.
|
||||
/// </summary>
|
||||
public void OnIsShowingFaceRectsToggle ()
|
||||
{
|
||||
if (isShowingFaceRectsToggle.isOn) {
|
||||
isShowingFaceRects = true;
|
||||
} else {
|
||||
isShowingFaceRects = false;
|
||||
}
|
||||
|
||||
if(imgTexture != null)
|
||||
Run ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the use Dlib face detector toggle event.
|
||||
/// </summary>
|
||||
public void OnUseDlibFaceDetecterToggle ()
|
||||
{
|
||||
if (useDlibFaceDetecterToggle.isOn) {
|
||||
useDlibFaceDetecter = true;
|
||||
} else {
|
||||
useDlibFaceDetecter = false;
|
||||
}
|
||||
|
||||
if(imgTexture != null)
|
||||
Run ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is filtering non frontal faces toggle event.
|
||||
/// </summary>
|
||||
public void OnIsFilteringNonFrontalFacesToggle ()
|
||||
{
|
||||
if (isFilteringNonFrontalFacesToggle.isOn) {
|
||||
isFilteringNonFrontalFaces = true;
|
||||
} else {
|
||||
isFilteringNonFrontalFaces = false;
|
||||
}
|
||||
|
||||
if(imgTexture != null)
|
||||
Run ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is showing debug face points toggle event.
|
||||
/// </summary>
|
||||
public void OnIsShowingDebugFacePointsToggle ()
|
||||
{
|
||||
if (isShowingDebugFacePointsToggle.isOn) {
|
||||
isShowingDebugFacePoints = true;
|
||||
} else {
|
||||
isShowingDebugFacePoints = false;
|
||||
}
|
||||
|
||||
if(imgTexture != null)
|
||||
Run ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is upload image button event.
|
||||
/// </summary>
|
||||
public void OnUploadImageButton ()
|
||||
{
|
||||
WebGLFileUploadManager.PopupDialog(null, "Select image files (.png|.jpg|.gif)");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
fileFormatVersion: 2
|
||||
guid: cd27f43d2d1bc4c479f77ae0bba6bb57
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,445 @@
|
|||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using System.Linq;
|
||||
using DlibFaceLandmarkDetector;
|
||||
using OpenCVForUnity;
|
||||
using OpenCVForUnity.FaceSwap;
|
||||
using UnityEngine;
|
||||
using UnityEngine.UI;
|
||||
using WebGLFileUploader;
|
||||
|
||||
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
|
||||
using UnityEngine.SceneManagement;
|
||||
#endif
|
||||
|
||||
namespace FaceSwapperSample
|
||||
{
|
||||
/// <summary>
|
||||
/// Texture2D face swapper sample.
|
||||
/// </summary>
|
||||
public class Texture2DFaceSwapperSample : MonoBehaviour
|
||||
{
|
||||
/// <summary>
|
||||
/// The image texture.
|
||||
/// </summary>
|
||||
Texture2D imgTexture;
|
||||
|
||||
/// <summary>
|
||||
/// The cascade.
|
||||
/// </summary>
|
||||
CascadeClassifier cascade;
|
||||
|
||||
/// <summary>
|
||||
/// The face landmark detector.
|
||||
/// </summary>
|
||||
FaceLandmarkDetector faceLandmarkDetector;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing face rects.
|
||||
/// </summary>
|
||||
public bool isShowingFaceRects = false;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing face rects toggle.
|
||||
/// </summary>
|
||||
public Toggle isShowingFaceRectsToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The use Dlib face detector flag.
|
||||
/// </summary>
|
||||
public bool useDlibFaceDetecter = true;
|
||||
|
||||
/// <summary>
|
||||
/// The use dlib face detecter toggle.
|
||||
/// </summary>
|
||||
public Toggle useDlibFaceDetecterToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The is filtering non frontal faces.
|
||||
/// </summary>
|
||||
public bool isFilteringNonFrontalFaces;
|
||||
|
||||
/// <summary>
|
||||
/// The is filtering non frontal faces toggle.
|
||||
/// </summary>
|
||||
public Toggle isFilteringNonFrontalFacesToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The frontal face rate lower limit.
|
||||
/// </summary>
|
||||
[Range (0.0f, 1.0f)]
|
||||
public float
|
||||
frontalFaceRateLowerLimit;
|
||||
|
||||
/// <summary>
|
||||
/// The use seamless clone for paste faces.
|
||||
/// </summary>
|
||||
public bool useSeamlessCloneForPasteFaces = false;
|
||||
|
||||
/// <summary>
|
||||
/// The use seamless clone for paste faces toggle.
|
||||
/// </summary>
|
||||
public Toggle useSeamlessCloneForPasteFacesToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing debug face points.
|
||||
/// </summary>
|
||||
public bool isShowingDebugFacePoints = false;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing debug face points toggle.
|
||||
/// </summary>
|
||||
public Toggle isShowingDebugFacePointsToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The is upload image button.
|
||||
/// </summary>
|
||||
public Button uploadImageButton;
|
||||
|
||||
/// <summary>
|
||||
/// The haarcascade_frontalface_alt_xml_filepath.
|
||||
/// </summary>
|
||||
private string haarcascade_frontalface_alt_xml_filepath;
|
||||
|
||||
/// <summary>
|
||||
/// The shape_predictor_68_face_landmarks_dat_filepath.
|
||||
/// </summary>
|
||||
private string shape_predictor_68_face_landmarks_dat_filepath;
|
||||
|
||||
|
||||
// Use this for initialization
|
||||
void Start ()
|
||||
{
|
||||
WebGLFileUploadManager.SetImageEncodeSetting (true);
|
||||
WebGLFileUploadManager.SetAllowedFileName ("\\.(png|jpe?g|gif)$");
|
||||
WebGLFileUploadManager.SetImageShrinkingSize (640, 480);
|
||||
WebGLFileUploadManager.FileUploadEventHandler += fileUploadHandler;
|
||||
|
||||
#if UNITY_WEBGL && !UNITY_EDITOR
|
||||
StartCoroutine(getFilePathCoroutine());
|
||||
#else
|
||||
haarcascade_frontalface_alt_xml_filepath = OpenCVForUnity.Utils.getFilePath ("haarcascade_frontalface_alt.xml");
|
||||
shape_predictor_68_face_landmarks_dat_filepath = DlibFaceLandmarkDetector.Utils.getFilePath ("shape_predictor_68_face_landmarks.dat");
|
||||
Run ();
|
||||
#endif
|
||||
}
|
||||
|
||||
#if UNITY_WEBGL && !UNITY_EDITOR
|
||||
private IEnumerator getFilePathCoroutine ()
|
||||
{
|
||||
var getFilePathAsync_0_Coroutine = StartCoroutine (OpenCVForUnity.Utils.getFilePathAsync ("haarcascade_frontalface_alt.xml", (result) => {
|
||||
haarcascade_frontalface_alt_xml_filepath = result;
|
||||
}));
|
||||
var getFilePathAsync_1_Coroutine = StartCoroutine (DlibFaceLandmarkDetector.Utils.getFilePathAsync ("shape_predictor_68_face_landmarks.dat", (result) => {
|
||||
shape_predictor_68_face_landmarks_dat_filepath = result;
|
||||
}));
|
||||
|
||||
yield return getFilePathAsync_0_Coroutine;
|
||||
yield return getFilePathAsync_1_Coroutine;
|
||||
|
||||
Run ();
|
||||
uploadImageButton.interactable = true;
|
||||
}
|
||||
#endif
|
||||
|
||||
private void Run ()
|
||||
{
|
||||
isShowingFaceRectsToggle.isOn = isShowingFaceRects;
|
||||
useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter;
|
||||
isFilteringNonFrontalFacesToggle.isOn = isFilteringNonFrontalFaces;
|
||||
useSeamlessCloneForPasteFacesToggle.isOn = useSeamlessCloneForPasteFaces;
|
||||
isShowingDebugFacePointsToggle.isOn = isShowingDebugFacePoints;
|
||||
|
||||
if (imgTexture == null)
|
||||
imgTexture = Resources.Load ("family") as Texture2D;
|
||||
|
||||
gameObject.transform.localScale = new Vector3 (imgTexture.width, imgTexture.height, 1);
|
||||
Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
|
||||
|
||||
float width = 0;
|
||||
float height = 0;
|
||||
|
||||
width = gameObject.transform.localScale.x;
|
||||
height = gameObject.transform.localScale.y;
|
||||
|
||||
|
||||
float widthScale = (float)Screen.width / width;
|
||||
float heightScale = (float)Screen.height / height;
|
||||
if (widthScale < heightScale) {
|
||||
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
|
||||
} else {
|
||||
Camera.main.orthographicSize = height / 2;
|
||||
}
|
||||
|
||||
Mat rgbaMat = new Mat (imgTexture.height, imgTexture.width, CvType.CV_8UC4);
|
||||
|
||||
OpenCVForUnity.Utils.texture2DToMat (imgTexture, rgbaMat);
|
||||
Debug.Log ("rgbaMat ToString " + rgbaMat.ToString ());
|
||||
|
||||
if (faceLandmarkDetector == null)
|
||||
faceLandmarkDetector = new FaceLandmarkDetector (shape_predictor_68_face_landmarks_dat_filepath);
|
||||
|
||||
FrontalFaceParam frontalFaceParam = new FrontalFaceParam ();
|
||||
|
||||
//face detection
|
||||
List<OpenCVForUnity.Rect> detectResult = new List<OpenCVForUnity.Rect> ();
|
||||
if (useDlibFaceDetecter) {
|
||||
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);
|
||||
List<UnityEngine.Rect> result = faceLandmarkDetector.Detect ();
|
||||
|
||||
foreach (var unityRect in result) {
|
||||
detectResult.Add (new OpenCVForUnity.Rect ((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
|
||||
}
|
||||
} else {
|
||||
if (cascade == null)
|
||||
cascade = new CascadeClassifier (haarcascade_frontalface_alt_xml_filepath);
|
||||
if (cascade.empty ()) {
|
||||
Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
|
||||
}
|
||||
|
||||
//convert image to greyscale
|
||||
Mat gray = new Mat ();
|
||||
Imgproc.cvtColor (rgbaMat, gray, Imgproc.COLOR_RGBA2GRAY);
|
||||
|
||||
//detect Faces
|
||||
MatOfRect faces = new MatOfRect ();
|
||||
Imgproc.equalizeHist (gray, gray);
|
||||
cascade.detectMultiScale (gray, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (gray.cols () * 0.05, gray.cols () * 0.05), new Size ());
|
||||
//Debug.Log ("faces " + faces.dump ());
|
||||
|
||||
detectResult = faces.toList ();
|
||||
|
||||
gray.Dispose ();
|
||||
}
|
||||
|
||||
//detect face landmark
|
||||
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);
|
||||
List<List<Vector2>> landmarkPoints = new List<List<Vector2>> ();
|
||||
foreach (var openCVRect in detectResult) {
|
||||
UnityEngine.Rect rect = new UnityEngine.Rect (openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
|
||||
|
||||
Debug.Log ("face : " + rect);
|
||||
|
||||
//OpenCVForUnityUtils.DrawFaceRect(imgMat, rect, new Scalar(255, 0, 0, 255), 2);
|
||||
|
||||
List<Vector2> points = faceLandmarkDetector.DetectLandmark (rect);
|
||||
if (points.Count > 0) {
|
||||
|
||||
//OpenCVForUnityUtils.DrawFaceLandmark(imgMat, points, new Scalar(0, 255, 0, 255), 2);
|
||||
landmarkPoints.Add (points);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
//filter nonfrontalface
|
||||
if (isFilteringNonFrontalFaces) {
|
||||
for (int i = 0; i < landmarkPoints.Count; i++) {
|
||||
if (frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]) < frontalFaceRateLowerLimit) {
|
||||
detectResult.RemoveAt (i);
|
||||
landmarkPoints.RemoveAt (i);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//swap faces
|
||||
//Debug.Log("face points count : " + points.Count);
|
||||
int[] face_nums = new int[landmarkPoints.Count];
|
||||
for (int i = 0; i < face_nums.Length; i++) {
|
||||
face_nums [i] = i;
|
||||
}
|
||||
face_nums = face_nums.OrderBy (i => System.Guid.NewGuid ()).ToArray ();
|
||||
if (landmarkPoints.Count >= 2) {
|
||||
DlibFaceSwapper faceSwapper = new DlibFaceSwapper ();
|
||||
faceSwapper.useSeamlessCloneForPasteFaces = useSeamlessCloneForPasteFaces;
|
||||
faceSwapper.isShowingDebugFacePoints = isShowingDebugFacePoints;
|
||||
|
||||
int ann = 0, bob = 0;
|
||||
for (int i = 0; i < face_nums.Length - 1; i += 2) {
|
||||
ann = face_nums [i];
|
||||
bob = face_nums [i + 1];
|
||||
|
||||
faceSwapper.SwapFaces (rgbaMat, landmarkPoints [ann], landmarkPoints [bob], 1);
|
||||
|
||||
}
|
||||
faceSwapper.Dispose ();
|
||||
}
|
||||
|
||||
//show face rects
|
||||
if (isShowingFaceRects) {
|
||||
int ann = 0, bob = 0;
|
||||
for (int i = 0; i < face_nums.Length - 1; i += 2) {
|
||||
ann = face_nums [i];
|
||||
bob = face_nums [i + 1];
|
||||
|
||||
UnityEngine.Rect rect_ann = new UnityEngine.Rect (detectResult [ann].x, detectResult [ann].y, detectResult [ann].width, detectResult [ann].height);
|
||||
UnityEngine.Rect rect_bob = new UnityEngine.Rect (detectResult [bob].x, detectResult [bob].y, detectResult [bob].width, detectResult [bob].height);
|
||||
Scalar color = new Scalar (Random.Range (0, 256), Random.Range (0, 256), Random.Range (0, 256), 255);
|
||||
OpenCVForUnityUtils.DrawFaceRect (rgbaMat, rect_ann, color, 2);
|
||||
OpenCVForUnityUtils.DrawFaceRect (rgbaMat, rect_bob, color, 2);
|
||||
//Imgproc.putText (rgbaMat, "" + i % 2, new Point (rect_ann.xMin, rect_ann.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, color, 2, Imgproc.LINE_AA, false);
|
||||
//Imgproc.putText (rgbaMat, "" + (i % 2 + 1), new Point (rect_bob.xMin, rect_bob.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, color, 2, Imgproc.LINE_AA, false);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
frontalFaceParam.Dispose ();
|
||||
|
||||
Texture2D texture = new Texture2D (rgbaMat.cols (), rgbaMat.rows (), TextureFormat.RGBA32, false);
|
||||
OpenCVForUnity.Utils.matToTexture2D (rgbaMat, texture);
|
||||
gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
|
||||
|
||||
rgbaMat.Dispose ();
|
||||
}
|
||||
|
||||
private void fileUploadHandler (UploadedFileInfo[] result)
|
||||
{
|
||||
|
||||
if (result.Length == 0) {
|
||||
Debug.Log ("File upload Error!");
|
||||
return;
|
||||
}
|
||||
|
||||
foreach (UploadedFileInfo file in result) {
|
||||
if (file.isSuccess) {
|
||||
Debug.Log ("file.filePath: " + file.filePath + " exists:" + File.Exists (file.filePath));
|
||||
|
||||
imgTexture = new Texture2D (2, 2);
|
||||
byte[] byteArray = File.ReadAllBytes (file.filePath);
|
||||
imgTexture.LoadImage (byteArray);
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
Run ();
|
||||
}
|
||||
|
||||
// Update is called once per frame
|
||||
void Update ()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the destroy event.
|
||||
/// </summary>
|
||||
void OnDestroy ()
|
||||
{
|
||||
WebGLFileUploadManager.FileUploadEventHandler -= fileUploadHandler;
|
||||
WebGLFileUploadManager.Dispose ();
|
||||
|
||||
if (faceLandmarkDetector != null)
|
||||
faceLandmarkDetector.Dispose ();
|
||||
|
||||
if (cascade != null)
|
||||
cascade.Dispose ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the back button event.
|
||||
/// </summary>
|
||||
public void OnBackButton ()
|
||||
{
|
||||
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
|
||||
SceneManager.LoadScene ("FaceSwapperSample");
|
||||
#else
|
||||
Application.LoadLevel ("FaceSwapperSample");
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the shuffle button event.
|
||||
/// </summary>
|
||||
public void OnShuffleButton ()
|
||||
{
|
||||
if (imgTexture != null)
|
||||
Run ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is showing face rects toggle event.
|
||||
/// </summary>
|
||||
public void OnIsShowingFaceRectsToggle ()
|
||||
{
|
||||
if (isShowingFaceRectsToggle.isOn) {
|
||||
isShowingFaceRects = true;
|
||||
} else {
|
||||
isShowingFaceRects = false;
|
||||
}
|
||||
|
||||
if (imgTexture != null)
|
||||
Run ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the use Dlib face detector toggle event.
|
||||
/// </summary>
|
||||
public void OnUseDlibFaceDetecterToggle ()
|
||||
{
|
||||
if (useDlibFaceDetecterToggle.isOn) {
|
||||
useDlibFaceDetecter = true;
|
||||
} else {
|
||||
useDlibFaceDetecter = false;
|
||||
}
|
||||
|
||||
if (imgTexture != null)
|
||||
Run ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is filtering non frontal faces toggle event.
|
||||
/// </summary>
|
||||
public void OnIsFilteringNonFrontalFacesToggle ()
|
||||
{
|
||||
if (isFilteringNonFrontalFacesToggle.isOn) {
|
||||
isFilteringNonFrontalFaces = true;
|
||||
} else {
|
||||
isFilteringNonFrontalFaces = false;
|
||||
}
|
||||
|
||||
if (imgTexture != null)
|
||||
Run ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the use seamless clone for paste faces toggle event.
|
||||
/// </summary>
|
||||
public void OnUseSeamlessCloneForPasteFacesToggle ()
|
||||
{
|
||||
if (useSeamlessCloneForPasteFacesToggle.isOn) {
|
||||
useSeamlessCloneForPasteFaces = true;
|
||||
} else {
|
||||
useSeamlessCloneForPasteFaces = false;
|
||||
}
|
||||
|
||||
if (imgTexture != null)
|
||||
Run ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is showing debug face points toggle event.
|
||||
/// </summary>
|
||||
public void OnIsShowingDebugFacePointsToggle ()
|
||||
{
|
||||
if (isShowingDebugFacePointsToggle.isOn) {
|
||||
isShowingDebugFacePoints = true;
|
||||
} else {
|
||||
isShowingDebugFacePoints = false;
|
||||
}
|
||||
|
||||
if (imgTexture != null)
|
||||
Run ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is upload image button event.
|
||||
/// </summary>
|
||||
public void OnUploadImageButton ()
|
||||
{
|
||||
WebGLFileUploadManager.PopupDialog (null, "Select image files (.png|.jpg|.gif)");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,466 @@
|
|||
using DlibFaceLandmarkDetector;
|
||||
using OpenCVForUnity;
|
||||
using OpenCVForUnity.RectangleTrack;
|
||||
using OpenCVForUnity.FaceSwap;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
using UnityEngine.UI;
|
||||
using System.Collections;
|
||||
|
||||
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
|
||||
using UnityEngine.SceneManagement;
|
||||
#endif
|
||||
|
||||
namespace FaceSwapperSample
|
||||
{
|
||||
/// <summary>
|
||||
/// Face Swapper from VideoCapture Sample.
|
||||
/// </summary>
|
||||
public class VideoCaptureFaceSwapperSample : MonoBehaviour
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// The width of the frame.
|
||||
/// </summary>
|
||||
private double frameWidth = 320;
|
||||
|
||||
/// <summary>
|
||||
/// The height of the frame.
|
||||
/// </summary>
|
||||
private double frameHeight = 240;
|
||||
|
||||
/// <summary>
|
||||
/// The capture.
|
||||
/// </summary>
|
||||
VideoCapture capture;
|
||||
|
||||
/// <summary>
|
||||
/// The rgb mat.
|
||||
/// </summary>
|
||||
Mat rgbMat;
|
||||
|
||||
/// <summary>
|
||||
/// The gray mat.
|
||||
/// </summary>
|
||||
Mat grayMat;
|
||||
|
||||
/// <summary>
|
||||
/// The texture.
|
||||
/// </summary>
|
||||
Texture2D texture;
|
||||
|
||||
/// <summary>
|
||||
/// The cascade.
|
||||
/// </summary>
|
||||
CascadeClassifier cascade;
|
||||
|
||||
/// <summary>
|
||||
/// The face landmark detector.
|
||||
/// </summary>
|
||||
FaceLandmarkDetector faceLandmarkDetector;
|
||||
|
||||
/// <summary>
|
||||
/// The face Swapper.
|
||||
/// </summary>
|
||||
DlibFaceSwapper faceSwapper;
|
||||
|
||||
/// <summary>
|
||||
/// The detection based tracker.
|
||||
/// </summary>
|
||||
RectangleTracker rectangleTracker;
|
||||
|
||||
/// <summary>
|
||||
/// The frontal face parameter.
|
||||
/// </summary>
|
||||
FrontalFaceParam frontalFaceParam;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing face rects.
|
||||
/// </summary>
|
||||
public bool isShowingFaceRects = false;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing face rects toggle.
|
||||
/// </summary>
|
||||
public Toggle isShowingFaceRectsToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The use Dlib face detector flag.
|
||||
/// </summary>
|
||||
public bool useDlibFaceDetecter = true;
|
||||
|
||||
/// <summary>
|
||||
/// The use dlib face detecter toggle.
|
||||
/// </summary>
|
||||
public Toggle useDlibFaceDetecterToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The is filtering non frontal faces.
|
||||
/// </summary>
|
||||
public bool isFilteringNonFrontalFaces;
|
||||
|
||||
/// <summary>
|
||||
/// The is filtering non frontal faces toggle.
|
||||
/// </summary>
|
||||
public Toggle isFilteringNonFrontalFacesToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The frontal face rate lower limit.
|
||||
/// </summary>
|
||||
[Range (0.0f, 1.0f)]
|
||||
public float
|
||||
frontalFaceRateLowerLimit;
|
||||
|
||||
/// <summary>
|
||||
/// The use seamless clone for paste faces.
|
||||
/// </summary>
|
||||
public bool useSeamlessCloneForPasteFaces = false;
|
||||
|
||||
/// <summary>
|
||||
/// The use seamless clone for paste faces toggle.
|
||||
/// </summary>
|
||||
public Toggle useSeamlessCloneForPasteFacesToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The enable tracker flag.
|
||||
/// </summary>
|
||||
public bool enableTracking = true;
|
||||
|
||||
/// <summary>
|
||||
/// The enable tracking toggle.
|
||||
/// </summary>
|
||||
public Toggle enableTrackingToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing debug face points.
|
||||
/// </summary>
|
||||
public bool isShowingDebugFacePoints = false;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing debug face points toggle.
|
||||
/// </summary>
|
||||
public Toggle isShowingDebugFacePointsToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The haarcascade_frontalface_alt_xml_filepath.
|
||||
/// </summary>
|
||||
private string haarcascade_frontalface_alt_xml_filepath;
|
||||
|
||||
/// <summary>
|
||||
/// The shape_predictor_68_face_landmarks_dat_filepath.
|
||||
/// </summary>
|
||||
private string shape_predictor_68_face_landmarks_dat_filepath;
|
||||
|
||||
/// <summary>
|
||||
/// The couple_avi_filepath.
|
||||
/// </summary>
|
||||
private string couple_avi_filepath;
|
||||
|
||||
// Use this for initialization
|
||||
void Start ()
|
||||
{
|
||||
capture = new VideoCapture ();
|
||||
|
||||
#if UNITY_WEBGL && !UNITY_EDITOR
|
||||
StartCoroutine(getFilePathCoroutine());
|
||||
#else
|
||||
haarcascade_frontalface_alt_xml_filepath = OpenCVForUnity.Utils.getFilePath ("haarcascade_frontalface_alt.xml");
|
||||
shape_predictor_68_face_landmarks_dat_filepath = DlibFaceLandmarkDetector.Utils.getFilePath ("shape_predictor_68_face_landmarks.dat");
|
||||
couple_avi_filepath = OpenCVForUnity.Utils.getFilePath ("couple.avi");
|
||||
Run ();
|
||||
#endif
|
||||
}
|
||||
|
||||
#if UNITY_WEBGL && !UNITY_EDITOR
|
||||
private IEnumerator getFilePathCoroutine()
|
||||
{
|
||||
var getFilePathAsync_0_Coroutine = StartCoroutine (OpenCVForUnity.Utils.getFilePathAsync ("haarcascade_frontalface_alt.xml", (result) => {
|
||||
haarcascade_frontalface_alt_xml_filepath = result;
|
||||
}));
|
||||
var getFilePathAsync_1_Coroutine = StartCoroutine (DlibFaceLandmarkDetector.Utils.getFilePathAsync ("shape_predictor_68_face_landmarks.dat", (result) => {
|
||||
shape_predictor_68_face_landmarks_dat_filepath = result;
|
||||
}));
|
||||
var getFilePathAsync_2_Coroutine = StartCoroutine (OpenCVForUnity.Utils.getFilePathAsync ("couple.avi", (result) => {
|
||||
couple_avi_filepath = result;
|
||||
}));
|
||||
|
||||
yield return getFilePathAsync_0_Coroutine;
|
||||
yield return getFilePathAsync_1_Coroutine;
|
||||
yield return getFilePathAsync_2_Coroutine;
|
||||
|
||||
Run ();
|
||||
}
|
||||
#endif
|
||||
|
||||
private void Run ()
|
||||
{
|
||||
rectangleTracker = new RectangleTracker ();
|
||||
|
||||
frontalFaceParam = new FrontalFaceParam ();
|
||||
|
||||
|
||||
faceLandmarkDetector = new FaceLandmarkDetector (shape_predictor_68_face_landmarks_dat_filepath);
|
||||
|
||||
faceSwapper = new DlibFaceSwapper ();
|
||||
faceSwapper.useSeamlessCloneForPasteFaces = useSeamlessCloneForPasteFaces;
|
||||
faceSwapper.isShowingDebugFacePoints = isShowingDebugFacePoints;
|
||||
|
||||
rgbMat = new Mat ();
|
||||
|
||||
capture.open (couple_avi_filepath);
|
||||
|
||||
if (capture.isOpened ()) {
|
||||
Debug.Log ("capture.isOpened() true");
|
||||
} else {
|
||||
Debug.Log ("capture.isOpened() false");
|
||||
}
|
||||
|
||||
|
||||
Debug.Log ("CAP_PROP_FORMAT: " + capture.get (Videoio.CAP_PROP_FORMAT));
|
||||
Debug.Log ("CV_CAP_PROP_PREVIEW_FORMAT: " + capture.get (Videoio.CV_CAP_PROP_PREVIEW_FORMAT));
|
||||
Debug.Log ("CAP_PROP_POS_MSEC: " + capture.get (Videoio.CAP_PROP_POS_MSEC));
|
||||
Debug.Log ("CAP_PROP_POS_FRAMES: " + capture.get (Videoio.CAP_PROP_POS_FRAMES));
|
||||
Debug.Log ("CAP_PROP_POS_AVI_RATIO: " + capture.get (Videoio.CAP_PROP_POS_AVI_RATIO));
|
||||
Debug.Log ("CAP_PROP_FRAME_COUNT: " + capture.get (Videoio.CAP_PROP_FRAME_COUNT));
|
||||
Debug.Log ("CAP_PROP_FPS: " + capture.get (Videoio.CAP_PROP_FPS));
|
||||
Debug.Log ("CAP_PROP_FRAME_WIDTH: " + capture.get (Videoio.CAP_PROP_FRAME_WIDTH));
|
||||
Debug.Log ("CAP_PROP_FRAME_HEIGHT: " + capture.get (Videoio.CAP_PROP_FRAME_HEIGHT));
|
||||
|
||||
|
||||
texture = new Texture2D ((int)(frameWidth), (int)(frameHeight), TextureFormat.RGBA32, false);
|
||||
gameObject.transform.localScale = new Vector3 ((float)frameWidth, (float)frameHeight, 1);
|
||||
float widthScale = (float)Screen.width / (float)frameWidth;
|
||||
float heightScale = (float)Screen.height / (float)frameHeight;
|
||||
if (widthScale < heightScale) {
|
||||
Camera.main.orthographicSize = ((float)frameWidth * (float)Screen.height / (float)Screen.width) / 2;
|
||||
} else {
|
||||
Camera.main.orthographicSize = (float)frameHeight / 2;
|
||||
}
|
||||
|
||||
gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
|
||||
|
||||
|
||||
grayMat = new Mat ((int)frameHeight, (int)frameWidth, CvType.CV_8UC1);
|
||||
cascade = new CascadeClassifier (haarcascade_frontalface_alt_xml_filepath);
|
||||
if (cascade.empty ()) {
|
||||
Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
|
||||
}
|
||||
|
||||
isShowingFaceRectsToggle.isOn = isShowingFaceRects;
|
||||
useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter;
|
||||
isFilteringNonFrontalFacesToggle.isOn = isFilteringNonFrontalFaces;
|
||||
useSeamlessCloneForPasteFacesToggle.isOn = useSeamlessCloneForPasteFaces;
|
||||
enableTrackingToggle.isOn = enableTracking;
|
||||
isShowingDebugFacePointsToggle.isOn = isShowingDebugFacePoints;
|
||||
}
|
||||
|
||||
// Update is called once per frame
|
||||
void Update ()
|
||||
{
|
||||
//Loop play
|
||||
if (capture.get (Videoio.CAP_PROP_POS_FRAMES) >= capture.get (Videoio.CAP_PROP_FRAME_COUNT))
|
||||
capture.set (Videoio.CAP_PROP_POS_FRAMES, 0);
|
||||
|
||||
//error PlayerLoop called recursively! on iOS.reccomend WebCamTexture.
|
||||
if (capture.grab ()) {
|
||||
|
||||
capture.retrieve (rgbMat, 0);
|
||||
|
||||
Imgproc.cvtColor (rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
|
||||
//Debug.Log ("Mat toString " + rgbMat.ToString ());
|
||||
|
||||
|
||||
//face detection
|
||||
List<OpenCVForUnity.Rect> detectResult = new List<OpenCVForUnity.Rect> ();
|
||||
if (useDlibFaceDetecter) {
|
||||
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbMat);
|
||||
List<UnityEngine.Rect> result = faceLandmarkDetector.Detect ();
|
||||
|
||||
foreach (var unityRect in result) {
|
||||
detectResult.Add (new OpenCVForUnity.Rect ((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
|
||||
}
|
||||
} else {
|
||||
//convert image to greyscale
|
||||
Imgproc.cvtColor (rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);
|
||||
|
||||
//convert image to greyscale
|
||||
using (Mat equalizeHistMat = new Mat ())
|
||||
using (MatOfRect faces = new MatOfRect ()) {
|
||||
Imgproc.equalizeHist (grayMat, equalizeHistMat);
|
||||
|
||||
cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ());
|
||||
|
||||
detectResult = faces.toList ();
|
||||
}
|
||||
}
|
||||
|
||||
//face traking
|
||||
if (enableTracking) {
|
||||
rectangleTracker.UpdateTrackedObjects (detectResult);
|
||||
detectResult = new List<OpenCVForUnity.Rect> ();
|
||||
rectangleTracker.GetObjects (detectResult, true);
|
||||
}
|
||||
|
||||
//face landmark detection
|
||||
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbMat);
|
||||
List<List<Vector2>> landmarkPoints = new List<List<Vector2>> ();
|
||||
foreach (var openCVRect in detectResult) {
|
||||
UnityEngine.Rect rect = new UnityEngine.Rect (openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
|
||||
|
||||
List<Vector2> points = faceLandmarkDetector.DetectLandmark (rect);
|
||||
if (points.Count > 0) {
|
||||
//OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, points, new Scalar(0, 255, 0, 255), 2);
|
||||
landmarkPoints.Add (points);
|
||||
}
|
||||
}
|
||||
|
||||
//filter nonfrontalface
|
||||
if (isFilteringNonFrontalFaces) {
|
||||
for (int i = 0; i < landmarkPoints.Count; i++) {
|
||||
if (frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]) < frontalFaceRateLowerLimit) {
|
||||
detectResult.RemoveAt (i);
|
||||
landmarkPoints.RemoveAt (i);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//face swapping
|
||||
if (landmarkPoints.Count >= 2) {
|
||||
int ann = 0, bob = 1;
|
||||
for (int i = 0; i < landmarkPoints.Count - 1; i += 2) {
|
||||
ann = i;
|
||||
bob = i + 1;
|
||||
|
||||
faceSwapper.SwapFaces (rgbMat, landmarkPoints [ann], landmarkPoints [bob], 1);
|
||||
}
|
||||
}
|
||||
|
||||
//draw face rects
|
||||
if (isShowingFaceRects) {
|
||||
for (int i = 0; i < detectResult.Count; i++) {
|
||||
UnityEngine.Rect rect = new UnityEngine.Rect (detectResult [i].x, detectResult [i].y, detectResult [i].width, detectResult [i].height);
|
||||
OpenCVForUnityUtils.DrawFaceRect (rgbMat, rect, new Scalar (255, 0, 0, 255), 2);
|
||||
//Imgproc.putText (rgbMat, " " + frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255), 2, Imgproc.LINE_AA, false);
|
||||
}
|
||||
}
|
||||
|
||||
Imgproc.putText (rgbMat, "W:" + rgbMat.width () + " H:" + rgbMat.height () + " SO:" + Screen.orientation, new Point (5, rgbMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255), 1, Imgproc.LINE_AA, false);
|
||||
|
||||
OpenCVForUnity.Utils.matToTexture2D (rgbMat, texture);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the disable event.
|
||||
/// </summary>
|
||||
void OnDestroy ()
|
||||
{
|
||||
capture.release ();
|
||||
|
||||
if (rgbMat != null)
|
||||
rgbMat.Dispose ();
|
||||
if (grayMat != null)
|
||||
grayMat.Dispose ();
|
||||
|
||||
if (rectangleTracker != null)
|
||||
rectangleTracker.Dispose ();
|
||||
|
||||
if (faceLandmarkDetector != null)
|
||||
faceLandmarkDetector.Dispose ();
|
||||
|
||||
if (faceSwapper != null)
|
||||
faceSwapper.Dispose ();
|
||||
|
||||
if (frontalFaceParam != null)
|
||||
frontalFaceParam.Dispose ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the back button event.
|
||||
/// </summary>
|
||||
public void OnBackButton ()
|
||||
{
|
||||
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
|
||||
SceneManager.LoadScene ("FaceSwapperSample");
|
||||
#else
|
||||
Application.LoadLevel ("FaceSwapperSample");
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is showing face rects toggle event.
|
||||
/// </summary>
|
||||
public void OnIsShowingFaceRectsToggle ()
|
||||
{
|
||||
if (isShowingFaceRectsToggle.isOn) {
|
||||
isShowingFaceRects = true;
|
||||
} else {
|
||||
isShowingFaceRects = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the use Dlib face detector toggle event.
|
||||
/// </summary>
|
||||
public void OnUseDlibFaceDetecterToggle ()
|
||||
{
|
||||
if (useDlibFaceDetecterToggle.isOn) {
|
||||
useDlibFaceDetecter = true;
|
||||
} else {
|
||||
useDlibFaceDetecter = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is filtering non frontal faces toggle event.
|
||||
/// </summary>
|
||||
public void OnIsFilteringNonFrontalFacesToggle ()
|
||||
{
|
||||
if (isFilteringNonFrontalFacesToggle.isOn) {
|
||||
isFilteringNonFrontalFaces = true;
|
||||
} else {
|
||||
isFilteringNonFrontalFaces = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the use seamless clone for paste faces toggle event.
|
||||
/// </summary>
|
||||
public void OnUseSeamlessCloneForPasteFacesToggle ()
|
||||
{
|
||||
if (useSeamlessCloneForPasteFacesToggle.isOn) {
|
||||
useSeamlessCloneForPasteFaces = true;
|
||||
} else {
|
||||
useSeamlessCloneForPasteFaces = false;
|
||||
}
|
||||
faceSwapper.useSeamlessCloneForPasteFaces = useSeamlessCloneForPasteFaces;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the enable tracking toggle event.
|
||||
/// </summary>
|
||||
public void OnEnableTrackingToggle ()
|
||||
{
|
||||
if (enableTrackingToggle.isOn) {
|
||||
enableTracking = true;
|
||||
} else {
|
||||
enableTracking = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is showing debug face points toggle event.
|
||||
/// </summary>
|
||||
public void OnIsShowingDebugFacePointsToggle ()
|
||||
{
|
||||
if (isShowingDebugFacePointsToggle.isOn) {
|
||||
isShowingDebugFacePoints = true;
|
||||
} else {
|
||||
isShowingDebugFacePoints = false;
|
||||
}
|
||||
faceSwapper.isShowingDebugFacePoints = isShowingDebugFacePoints;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,7 +1,7 @@
|
|||
fileFormatVersion: 2
|
||||
guid: d8d13016d5ab01348b307ab2d2a60701
|
||||
timeCreated: 1463652228
|
||||
licenseType: Free
|
||||
licenseType: Store
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
|
@ -0,0 +1,609 @@
|
|||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using System.IO;
|
||||
using DlibFaceLandmarkDetector;
|
||||
using OpenCVForUnity;
|
||||
using OpenCVForUnity.FaceChange;
|
||||
using OpenCVForUnity.RectangleTrack;
|
||||
using UnityEngine;
|
||||
using UnityEngine.UI;
|
||||
using WebGLFileUploader;
|
||||
|
||||
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
|
||||
using UnityEngine.SceneManagement;
|
||||
#endif
|
||||
|
||||
namespace FaceSwapperSample
|
||||
{
|
||||
/// <summary>
|
||||
/// WebCamTexture face changer sample.
|
||||
/// </summary>
|
||||
[RequireComponent (typeof(WebCamTextureToMatHelper))]
|
||||
public class WebCamTextureFaceChangerSample : MonoBehaviour
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// The colors.
|
||||
/// </summary>
|
||||
Color32[] colors;
|
||||
|
||||
/// <summary>
|
||||
/// The gray mat.
|
||||
/// </summary>
|
||||
Mat grayMat;
|
||||
|
||||
/// <summary>
|
||||
/// The texture.
|
||||
/// </summary>
|
||||
Texture2D texture;
|
||||
|
||||
/// <summary>
|
||||
/// The cascade.
|
||||
/// </summary>
|
||||
CascadeClassifier cascade;
|
||||
|
||||
/// <summary>
|
||||
/// The web cam texture to mat helper.
|
||||
/// </summary>
|
||||
WebCamTextureToMatHelper webCamTextureToMatHelper;
|
||||
|
||||
/// <summary>
|
||||
/// The face landmark detector.
|
||||
/// </summary>
|
||||
FaceLandmarkDetector faceLandmarkDetector;
|
||||
/// <summary>
|
||||
/// The face Changer
|
||||
/// </summary>
|
||||
DlibFaceChanger faceChanger;
|
||||
|
||||
/// <summary>
|
||||
/// The detection based tracker.
|
||||
/// </summary>
|
||||
RectangleTracker rectangleTracker;
|
||||
|
||||
/// <summary>
|
||||
/// The frontal face parameter.
|
||||
/// </summary>
|
||||
FrontalFaceParam frontalFaceParam;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing face rects.
|
||||
/// </summary>
|
||||
public bool isShowingFaceRects = false;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing face rects toggle.
|
||||
/// </summary>
|
||||
public Toggle isShowingFaceRectsToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The use Dlib face detector flag.
|
||||
/// </summary>
|
||||
public bool useDlibFaceDetecter = false;
|
||||
|
||||
/// <summary>
|
||||
/// The use dlib face detecter toggle.
|
||||
/// </summary>
|
||||
public Toggle useDlibFaceDetecterToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The is filtering non frontal faces.
|
||||
/// </summary>
|
||||
public bool isFilteringNonFrontalFaces;
|
||||
|
||||
/// <summary>
|
||||
/// The is filtering non frontal faces toggle.
|
||||
/// </summary>
|
||||
public Toggle isFilteringNonFrontalFacesToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The frontal face rate lower limit.
|
||||
/// </summary>
|
||||
[Range (0.0f, 1.0f)]
|
||||
public float
|
||||
frontalFaceRateLowerLimit;
|
||||
|
||||
/// <summary>
|
||||
/// The enable tracker flag.
|
||||
/// </summary>
|
||||
public bool enableTracking = true;
|
||||
|
||||
/// <summary>
|
||||
/// The enable tracking toggle.
|
||||
/// </summary>
|
||||
public Toggle enableTrackingToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing debug face points.
|
||||
/// </summary>
|
||||
public bool isShowingDebugFacePoints = false;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing debug face points toggle.
|
||||
/// </summary>
|
||||
public Toggle isShowingDebugFacePointsToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The face mask texture.
|
||||
/// </summary>
|
||||
Texture2D faceMaskTexture;
|
||||
|
||||
/// <summary>
|
||||
/// The face mask mat.
|
||||
/// </summary>
|
||||
Mat faceMaskMat;
|
||||
|
||||
/// <summary>
|
||||
/// The detected face rect.
|
||||
/// </summary>
|
||||
UnityEngine.Rect detectedFaceRect;
|
||||
|
||||
/// <summary>
|
||||
/// The haarcascade_frontalface_alt_xml_filepath.
|
||||
/// </summary>
|
||||
private string haarcascade_frontalface_alt_xml_filepath;
|
||||
|
||||
/// <summary>
|
||||
/// The shape_predictor_68_face_landmarks_dat_filepath.
|
||||
/// </summary>
|
||||
private string shape_predictor_68_face_landmarks_dat_filepath;
|
||||
|
||||
|
||||
// Use this for initialization
|
||||
void Start ()
|
||||
{
|
||||
WebGLFileUploadManager.SetImageEncodeSetting (true);
|
||||
WebGLFileUploadManager.SetAllowedFileName ("\\.(png|jpe?g|gif)$");
|
||||
WebGLFileUploadManager.SetImageShrinkingSize (640, 480);
|
||||
WebGLFileUploadManager.FileUploadEventHandler += fileUploadHandler;
|
||||
|
||||
webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper> ();
|
||||
|
||||
#if UNITY_WEBGL && !UNITY_EDITOR
|
||||
StartCoroutine(getFilePathCoroutine());
|
||||
#else
|
||||
haarcascade_frontalface_alt_xml_filepath = OpenCVForUnity.Utils.getFilePath ("haarcascade_frontalface_alt.xml");
|
||||
shape_predictor_68_face_landmarks_dat_filepath = DlibFaceLandmarkDetector.Utils.getFilePath ("shape_predictor_68_face_landmarks.dat");
|
||||
Run ();
|
||||
#endif
|
||||
}
|
||||
|
||||
#if UNITY_WEBGL && !UNITY_EDITOR
|
||||
private IEnumerator getFilePathCoroutine ()
|
||||
{
|
||||
var getFilePathAsync_0_Coroutine = StartCoroutine (OpenCVForUnity.Utils.getFilePathAsync ("haarcascade_frontalface_alt.xml", (result) => {
|
||||
haarcascade_frontalface_alt_xml_filepath = result;
|
||||
}));
|
||||
var getFilePathAsync_1_Coroutine = StartCoroutine (DlibFaceLandmarkDetector.Utils.getFilePathAsync ("shape_predictor_68_face_landmarks.dat", (result) => {
|
||||
shape_predictor_68_face_landmarks_dat_filepath = result;
|
||||
}));
|
||||
|
||||
yield return getFilePathAsync_0_Coroutine;
|
||||
yield return getFilePathAsync_1_Coroutine;
|
||||
|
||||
Run ();
|
||||
}
|
||||
#endif
|
||||
|
||||
private void Run ()
|
||||
{
|
||||
rectangleTracker = new RectangleTracker ();
|
||||
|
||||
faceLandmarkDetector = new FaceLandmarkDetector (shape_predictor_68_face_landmarks_dat_filepath);
|
||||
|
||||
faceChanger = new DlibFaceChanger ();
|
||||
faceChanger.isShowingDebugFacePoints = isShowingDebugFacePoints;
|
||||
|
||||
webCamTextureToMatHelper.Init ();
|
||||
|
||||
isShowingFaceRectsToggle.isOn = isShowingFaceRects;
|
||||
useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter;
|
||||
isFilteringNonFrontalFacesToggle.isOn = isFilteringNonFrontalFaces;
|
||||
enableTrackingToggle.isOn = enableTracking;
|
||||
isShowingDebugFacePointsToggle.isOn = isShowingDebugFacePoints;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the web cam texture to mat helper inited event.
|
||||
/// </summary>
|
||||
public void OnWebCamTextureToMatHelperInited ()
|
||||
{
|
||||
Debug.Log ("OnWebCamTextureToMatHelperInited");
|
||||
|
||||
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();
|
||||
|
||||
colors = new Color32[webCamTextureMat.cols () * webCamTextureMat.rows ()];
|
||||
texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);
|
||||
|
||||
|
||||
gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
|
||||
Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
|
||||
|
||||
float width = gameObject.transform.localScale.x;
|
||||
float height = gameObject.transform.localScale.y;
|
||||
|
||||
float widthScale = (float)Screen.width / width;
|
||||
float heightScale = (float)Screen.height / height;
|
||||
if (widthScale < heightScale) {
|
||||
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
|
||||
} else {
|
||||
Camera.main.orthographicSize = height / 2;
|
||||
}
|
||||
|
||||
gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
|
||||
|
||||
|
||||
|
||||
grayMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1);
|
||||
cascade = new CascadeClassifier (haarcascade_frontalface_alt_xml_filepath);
|
||||
if (cascade.empty ()) {
|
||||
Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
|
||||
}
|
||||
|
||||
frontalFaceParam = new FrontalFaceParam ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the web cam texture to mat helper disposed event.
|
||||
/// </summary>
|
||||
public void OnWebCamTextureToMatHelperDisposed ()
|
||||
{
|
||||
Debug.Log ("OnWebCamTextureToMatHelperDisposed");
|
||||
|
||||
grayMat.Dispose ();
|
||||
|
||||
rectangleTracker.Reset ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Files the upload handler.
|
||||
/// </summary>
|
||||
/// <param name="result">Result.</param>
|
||||
private void fileUploadHandler (UploadedFileInfo[] result)
|
||||
{
|
||||
|
||||
if (result.Length == 0) {
|
||||
Debug.Log ("File upload Error!");
|
||||
return;
|
||||
}
|
||||
|
||||
foreach (UploadedFileInfo file in result) {
|
||||
if (file.isSuccess) {
|
||||
Debug.Log ("file.filePath: " + file.filePath + " exists:" + File.Exists (file.filePath));
|
||||
|
||||
faceMaskTexture = new Texture2D (2, 2);
|
||||
byte[] byteArray = File.ReadAllBytes (file.filePath);
|
||||
faceMaskTexture.LoadImage (byteArray);
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (faceMaskTexture != null) {
|
||||
if (faceMaskMat != null)
|
||||
faceMaskMat.Dispose ();
|
||||
|
||||
faceMaskMat = new Mat (faceMaskTexture.height, faceMaskTexture.width, CvType.CV_8UC4);
|
||||
OpenCVForUnity.Utils.texture2DToMat (faceMaskTexture, faceMaskMat);
|
||||
Debug.Log ("faceMaskMat ToString " + faceMaskMat.ToString ());
|
||||
detectedFaceRect = new UnityEngine.Rect ();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Update is called once per frame
|
||||
void Update ()
|
||||
{
|
||||
|
||||
if (webCamTextureToMatHelper.isPlaying () && webCamTextureToMatHelper.didUpdateThisFrame ()) {
|
||||
|
||||
Mat rgbaMat = webCamTextureToMatHelper.GetMat ();
|
||||
|
||||
//face detection
|
||||
List<OpenCVForUnity.Rect> detectResult = new List<OpenCVForUnity.Rect> ();
|
||||
if (useDlibFaceDetecter) {
|
||||
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);
|
||||
List<UnityEngine.Rect> result = faceLandmarkDetector.Detect ();
|
||||
|
||||
foreach (var unityRect in result) {
|
||||
detectResult.Add (new OpenCVForUnity.Rect ((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
|
||||
}
|
||||
} else {
|
||||
//convert image to greyscale
|
||||
Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
|
||||
|
||||
using (Mat equalizeHistMat = new Mat ())
|
||||
using (MatOfRect faces = new MatOfRect ()) {
|
||||
Imgproc.equalizeHist (grayMat, equalizeHistMat);
|
||||
|
||||
cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ());
|
||||
|
||||
detectResult = faces.toList ();
|
||||
}
|
||||
}
|
||||
|
||||
//face traking
|
||||
if (enableTracking) {
|
||||
rectangleTracker.UpdateTrackedObjects (detectResult);
|
||||
detectResult = new List<OpenCVForUnity.Rect> ();
|
||||
rectangleTracker.GetObjects (detectResult, true);
|
||||
}
|
||||
|
||||
//face landmark detection
|
||||
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);
|
||||
List<List<Vector2>> landmarkPoints = new List<List<Vector2>> ();
|
||||
foreach (var openCVRect in detectResult) {
|
||||
UnityEngine.Rect rect = new UnityEngine.Rect (openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
|
||||
|
||||
List<Vector2> points = faceLandmarkDetector.DetectLandmark (rect);
|
||||
if (points.Count > 0) {
|
||||
//OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);
|
||||
landmarkPoints.Add (points);
|
||||
}
|
||||
}
|
||||
|
||||
//filter nonfrontalface
|
||||
if (isFilteringNonFrontalFaces) {
|
||||
for (int i = 0; i < landmarkPoints.Count; i++) {
|
||||
if (frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]) < frontalFaceRateLowerLimit) {
|
||||
detectResult.RemoveAt (i);
|
||||
landmarkPoints.RemoveAt (i);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//face changeing
|
||||
if (faceMaskTexture != null && landmarkPoints.Count >= 1) {
|
||||
|
||||
OpenCVForUnity.Utils.texture2DToMat (faceMaskTexture, faceMaskMat);
|
||||
|
||||
if (detectedFaceRect.width == 0.0f || detectedFaceRect.height == 0.0f) {
|
||||
if (useDlibFaceDetecter) {
|
||||
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, faceMaskMat);
|
||||
List<UnityEngine.Rect> result = faceLandmarkDetector.Detect ();
|
||||
if (result.Count >= 1)
|
||||
detectedFaceRect = result [0];
|
||||
} else {
|
||||
|
||||
using (Mat grayMat = new Mat ())
|
||||
using (Mat equalizeHistMat = new Mat ())
|
||||
using (MatOfRect faces = new MatOfRect ()) {
|
||||
//convert image to greyscale
|
||||
Imgproc.cvtColor (faceMaskMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
|
||||
Imgproc.equalizeHist (grayMat, equalizeHistMat);
|
||||
|
||||
cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ());
|
||||
|
||||
//detectResult = faces.toList ();
|
||||
List<OpenCVForUnity.Rect> faceList = faces.toList ();
|
||||
if (faceList.Count >= 1) {
|
||||
detectedFaceRect = new UnityEngine.Rect (faceList [0].x, faceList [0].y, faceList [0].width, faceList [0].height);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (detectedFaceRect.width > 0 || detectedFaceRect.height > 0) {
|
||||
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, faceMaskMat);
|
||||
List<Vector2> souseLandmarkPoint = faceLandmarkDetector.DetectLandmark (detectedFaceRect);
|
||||
|
||||
if (souseLandmarkPoint.Count > 0) {
|
||||
faceChanger.SetTargetImage (rgbaMat);
|
||||
for (int i = 0; i < landmarkPoints.Count; i++) {
|
||||
faceChanger.AddFaceChangeData (faceMaskMat, souseLandmarkPoint, landmarkPoints [i], 1);
|
||||
}
|
||||
faceChanger.ChangeFace ();
|
||||
}
|
||||
|
||||
if (isShowingFaceRects) {
|
||||
OpenCVForUnityUtils.DrawFaceRect (faceMaskMat, detectedFaceRect, new Scalar (255, 0, 0, 255), 2);
|
||||
}
|
||||
}
|
||||
} else if (landmarkPoints.Count >= 2) {
|
||||
faceChanger.SetTargetImage (rgbaMat);
|
||||
for (int i = 1; i < landmarkPoints.Count; i ++) {
|
||||
faceChanger.AddFaceChangeData (rgbaMat, landmarkPoints [0], landmarkPoints [i], 1);
|
||||
}
|
||||
faceChanger.ChangeFace ();
|
||||
}
|
||||
|
||||
//draw face rects
|
||||
if (isShowingFaceRects) {
|
||||
for (int i = 0; i < detectResult.Count; i++) {
|
||||
UnityEngine.Rect rect = new UnityEngine.Rect (detectResult [i].x, detectResult [i].y, detectResult [i].width, detectResult [i].height);
|
||||
OpenCVForUnityUtils.DrawFaceRect (rgbaMat, rect, new Scalar (255, 0, 0, 255), 2);
|
||||
//Imgproc.putText (rgbaMat, " " + frontalFaceParam.getAngleOfFrontalFace (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
|
||||
//Imgproc.putText (rgbaMat, " " + frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
|
||||
}
|
||||
}
|
||||
|
||||
//display face mask image.
|
||||
if (faceMaskMat != null) {
|
||||
|
||||
float scale = (rgbaMat.width () / 4f) / faceMaskMat.width ();
|
||||
float tx = rgbaMat.width () - faceMaskMat.width () * scale;
|
||||
float ty = 0.0f;
|
||||
Mat trans = new Mat (2, 3, CvType.CV_32F);//1.0, 0.0, tx, 0.0, 1.0, ty);
|
||||
trans.put (0, 0, scale);
|
||||
trans.put (0, 1, 0.0f);
|
||||
trans.put (0, 2, tx);
|
||||
trans.put (1, 0, 0.0f);
|
||||
trans.put (1, 1, scale);
|
||||
trans.put (1, 2, ty);
|
||||
|
||||
Imgproc.warpAffine (faceMaskMat, rgbaMat, trans, rgbaMat.size (), Imgproc.INTER_LINEAR, Core.BORDER_TRANSPARENT, new Scalar (0));
|
||||
}
|
||||
|
||||
|
||||
Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
|
||||
|
||||
OpenCVForUnity.Utils.matToTexture2D (rgbaMat, texture, colors);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Raises the disable event.
|
||||
/// </summary>
|
||||
void OnDisable ()
|
||||
{
|
||||
webCamTextureToMatHelper.Dispose ();
|
||||
|
||||
if (cascade != null)
|
||||
cascade.Dispose ();
|
||||
|
||||
if (rectangleTracker != null)
|
||||
rectangleTracker.Dispose ();
|
||||
|
||||
if (faceLandmarkDetector != null)
|
||||
faceLandmarkDetector.Dispose ();
|
||||
|
||||
if (faceChanger != null)
|
||||
faceChanger.Dispose ();
|
||||
|
||||
if (frontalFaceParam != null)
|
||||
frontalFaceParam.Dispose ();
|
||||
|
||||
if (faceMaskMat != null)
|
||||
faceMaskMat.Dispose ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the back button event.
|
||||
/// </summary>
|
||||
public void OnBackButton ()
|
||||
{
|
||||
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
|
||||
SceneManager.LoadScene ("FaceSwapperSample");
|
||||
#else
|
||||
Application.LoadLevel ("FaceSwapperSample");
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the play button event.
|
||||
/// </summary>
|
||||
public void OnPlayButton ()
|
||||
{
|
||||
webCamTextureToMatHelper.Play ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the pause button event.
|
||||
/// </summary>
|
||||
public void OnPauseButton ()
|
||||
{
|
||||
webCamTextureToMatHelper.Pause ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the stop button event.
|
||||
/// </summary>
|
||||
public void OnStopButton ()
|
||||
{
|
||||
webCamTextureToMatHelper.Stop ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the change camera button event.
|
||||
/// </summary>
|
||||
public void OnChangeCameraButton ()
|
||||
{
|
||||
webCamTextureToMatHelper.Init (null, webCamTextureToMatHelper.requestWidth, webCamTextureToMatHelper.requestHeight, !webCamTextureToMatHelper.requestIsFrontFacing);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is showing face rects toggle event.
|
||||
/// </summary>
|
||||
public void OnIsShowingFaceRectsToggle ()
|
||||
{
|
||||
if (isShowingFaceRectsToggle.isOn) {
|
||||
isShowingFaceRects = true;
|
||||
} else {
|
||||
isShowingFaceRects = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the use Dlib face detector toggle event.
|
||||
/// </summary>
|
||||
public void OnUseDlibFaceDetecterToggle ()
|
||||
{
|
||||
if (useDlibFaceDetecterToggle.isOn) {
|
||||
useDlibFaceDetecter = true;
|
||||
} else {
|
||||
useDlibFaceDetecter = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is filtering non frontal faces toggle event.
|
||||
/// </summary>
|
||||
public void OnIsFilteringNonFrontalFacesToggle ()
|
||||
{
|
||||
if (isFilteringNonFrontalFacesToggle.isOn) {
|
||||
isFilteringNonFrontalFaces = true;
|
||||
} else {
|
||||
isFilteringNonFrontalFaces = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the enable tracking toggle event.
|
||||
/// </summary>
|
||||
public void OnEnableTrackingToggle ()
|
||||
{
|
||||
if (enableTrackingToggle.isOn) {
|
||||
enableTracking = true;
|
||||
} else {
|
||||
enableTracking = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is showing debug face points toggle event.
|
||||
/// </summary>
|
||||
public void OnIsShowingDebugFacePointsToggle ()
|
||||
{
|
||||
if (isShowingDebugFacePointsToggle.isOn) {
|
||||
isShowingDebugFacePoints = true;
|
||||
} else {
|
||||
isShowingDebugFacePoints = false;
|
||||
}
|
||||
if (faceChanger != null)
|
||||
faceChanger.isShowingDebugFacePoints = isShowingDebugFacePoints;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is add face mask button event.
|
||||
/// </summary>
|
||||
public void OnAddFaceMaskButton ()
|
||||
{
|
||||
#if UNITY_WEBGL && !UNITY_EDITOR
|
||||
WebGLFileUploadManager.PopupDialog(null, "Select image files (.png|.jpg|.gif)");
|
||||
#else
|
||||
if (faceMaskTexture == null) {
|
||||
faceMaskTexture = Resources.Load ("face_mask") as Texture2D;
|
||||
faceMaskMat = new Mat (faceMaskTexture.height, faceMaskTexture.width, CvType.CV_8UC4);
|
||||
OpenCVForUnity.Utils.texture2DToMat (faceMaskTexture, faceMaskMat);
|
||||
Debug.Log ("faceMaskMat ToString " + faceMaskMat.ToString ());
|
||||
detectedFaceRect = new UnityEngine.Rect ();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is remove face mask button event.
|
||||
/// </summary>
|
||||
public void OnRemoveFaceMaskButton ()
|
||||
{
|
||||
if (faceMaskTexture != null) {
|
||||
faceMaskTexture = null;
|
||||
faceMaskMat.Dispose ();
|
||||
faceMaskMat = null;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,10 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 4ec56ffde0d7376439c55b1bce791050
|
||||
MonoImporter:
|
||||
serializedVersion: 2
|
||||
defaultReferences: []
|
||||
executionOrder: 0
|
||||
icon: {instanceID: 0}
|
||||
userData:
|
||||
assetBundleName:
|
||||
assetBundleVariant:
|
|
@ -0,0 +1,482 @@
|
|||
using System.Collections;
|
||||
using System.Collections.Generic;
|
||||
using DlibFaceLandmarkDetector;
|
||||
using OpenCVForUnity;
|
||||
using OpenCVForUnity.FaceSwap;
|
||||
using OpenCVForUnity.RectangleTrack;
|
||||
using UnityEngine;
|
||||
using UnityEngine.UI;
|
||||
|
||||
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
|
||||
using UnityEngine.SceneManagement;
|
||||
#endif
|
||||
|
||||
namespace FaceSwapperSample
|
||||
{
|
||||
/// <summary>
|
||||
/// WebCamTexture face swapper sample.
|
||||
/// </summary>
|
||||
[RequireComponent (typeof(WebCamTextureToMatHelper))]
|
||||
public class WebCamTextureFaceSwapperSample : MonoBehaviour
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// The colors.
|
||||
/// </summary>
|
||||
Color32[] colors;
|
||||
|
||||
/// <summary>
|
||||
/// The gray mat.
|
||||
/// </summary>
|
||||
Mat grayMat;
|
||||
|
||||
/// <summary>
|
||||
/// The texture.
|
||||
/// </summary>
|
||||
Texture2D texture;
|
||||
|
||||
/// <summary>
|
||||
/// The cascade.
|
||||
/// </summary>
|
||||
CascadeClassifier cascade;
|
||||
|
||||
/// <summary>
|
||||
/// The web cam texture to mat helper.
|
||||
/// </summary>
|
||||
WebCamTextureToMatHelper webCamTextureToMatHelper;
|
||||
|
||||
/// <summary>
|
||||
/// The face landmark detector.
|
||||
/// </summary>
|
||||
FaceLandmarkDetector faceLandmarkDetector;
|
||||
/// <summary>
|
||||
/// The face Swaper.
|
||||
/// </summary>
|
||||
DlibFaceSwapper faceSwapper;
|
||||
|
||||
/// <summary>
|
||||
/// The detection based tracker.
|
||||
/// </summary>
|
||||
RectangleTracker rectangleTracker;
|
||||
|
||||
/// <summary>
|
||||
/// The frontal face parameter.
|
||||
/// </summary>
|
||||
FrontalFaceParam frontalFaceParam;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing face rects.
|
||||
/// </summary>
|
||||
public bool isShowingFaceRects = false;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing face rects toggle.
|
||||
/// </summary>
|
||||
public Toggle isShowingFaceRectsToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The use Dlib face detector flag.
|
||||
/// </summary>
|
||||
public bool useDlibFaceDetecter = false;
|
||||
|
||||
/// <summary>
|
||||
/// The use dlib face detecter toggle.
|
||||
/// </summary>
|
||||
public Toggle useDlibFaceDetecterToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The is filtering non frontal faces.
|
||||
/// </summary>
|
||||
public bool isFilteringNonFrontalFaces;
|
||||
|
||||
/// <summary>
|
||||
/// The is filtering non frontal faces toggle.
|
||||
/// </summary>
|
||||
public Toggle isFilteringNonFrontalFacesToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The frontal face rate lower limit.
|
||||
/// </summary>
|
||||
[Range (0.0f, 1.0f)]
|
||||
public float
|
||||
frontalFaceRateLowerLimit;
|
||||
|
||||
/// <summary>
|
||||
/// The use seamless clone for paste faces.
|
||||
/// </summary>
|
||||
public bool useSeamlessCloneForPasteFaces = false;
|
||||
|
||||
/// <summary>
|
||||
/// The use seamless clone for paste faces toggle.
|
||||
/// </summary>
|
||||
public Toggle useSeamlessCloneForPasteFacesToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The enable tracker flag.
|
||||
/// </summary>
|
||||
public bool enableTracking = true;
|
||||
|
||||
/// <summary>
|
||||
/// The enable tracking toggle.
|
||||
/// </summary>
|
||||
public Toggle enableTrackingToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing debug face points.
|
||||
/// </summary>
|
||||
public bool isShowingDebugFacePoints = false;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing debug face points toggle.
|
||||
/// </summary>
|
||||
public Toggle isShowingDebugFacePointsToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The haarcascade_frontalface_alt_xml_filepath.
|
||||
/// </summary>
|
||||
private string haarcascade_frontalface_alt_xml_filepath;
|
||||
|
||||
/// <summary>
|
||||
/// The shape_predictor_68_face_landmarks_dat_filepath.
|
||||
/// </summary>
|
||||
private string shape_predictor_68_face_landmarks_dat_filepath;
|
||||
|
||||
|
||||
// Use this for initialization
|
||||
void Start ()
|
||||
{
|
||||
webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper> ();
|
||||
|
||||
#if UNITY_WEBGL && !UNITY_EDITOR
|
||||
StartCoroutine(getFilePathCoroutine());
|
||||
#else
|
||||
haarcascade_frontalface_alt_xml_filepath = OpenCVForUnity.Utils.getFilePath ("haarcascade_frontalface_alt.xml");
|
||||
shape_predictor_68_face_landmarks_dat_filepath = DlibFaceLandmarkDetector.Utils.getFilePath ("shape_predictor_68_face_landmarks.dat");
|
||||
Run ();
|
||||
#endif
|
||||
}
|
||||
|
||||
#if UNITY_WEBGL && !UNITY_EDITOR
|
||||
private IEnumerator getFilePathCoroutine()
|
||||
{
|
||||
var getFilePathAsync_0_Coroutine = StartCoroutine (OpenCVForUnity.Utils.getFilePathAsync ("haarcascade_frontalface_alt.xml", (result) => {
|
||||
haarcascade_frontalface_alt_xml_filepath = result;
|
||||
}));
|
||||
var getFilePathAsync_1_Coroutine = StartCoroutine (DlibFaceLandmarkDetector.Utils.getFilePathAsync ("shape_predictor_68_face_landmarks.dat", (result) => {
|
||||
shape_predictor_68_face_landmarks_dat_filepath = result;
|
||||
}));
|
||||
|
||||
yield return getFilePathAsync_0_Coroutine;
|
||||
yield return getFilePathAsync_1_Coroutine;
|
||||
|
||||
Run ();
|
||||
}
|
||||
#endif
|
||||
|
||||
private void Run ()
|
||||
{
|
||||
rectangleTracker = new RectangleTracker ();
|
||||
|
||||
faceLandmarkDetector = new FaceLandmarkDetector (shape_predictor_68_face_landmarks_dat_filepath);
|
||||
|
||||
faceSwapper = new DlibFaceSwapper ();
|
||||
faceSwapper.useSeamlessCloneForPasteFaces = useSeamlessCloneForPasteFaces;
|
||||
faceSwapper.isShowingDebugFacePoints = isShowingDebugFacePoints;
|
||||
|
||||
webCamTextureToMatHelper.Init ();
|
||||
|
||||
isShowingFaceRectsToggle.isOn = isShowingFaceRects;
|
||||
useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter;
|
||||
isFilteringNonFrontalFacesToggle.isOn = isFilteringNonFrontalFaces;
|
||||
useSeamlessCloneForPasteFacesToggle.isOn = useSeamlessCloneForPasteFaces;
|
||||
enableTrackingToggle.isOn = enableTracking;
|
||||
isShowingDebugFacePointsToggle.isOn = isShowingDebugFacePoints;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the web cam texture to mat helper inited event.
|
||||
/// </summary>
|
||||
public void OnWebCamTextureToMatHelperInited ()
|
||||
{
|
||||
Debug.Log ("OnWebCamTextureToMatHelperInited");
|
||||
|
||||
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();
|
||||
|
||||
colors = new Color32[webCamTextureMat.cols () * webCamTextureMat.rows ()];
|
||||
texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);
|
||||
|
||||
|
||||
gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
|
||||
Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
|
||||
|
||||
float width = gameObject.transform.localScale.x;
|
||||
float height = gameObject.transform.localScale.y;
|
||||
|
||||
float widthScale = (float)Screen.width / width;
|
||||
float heightScale = (float)Screen.height / height;
|
||||
if (widthScale < heightScale) {
|
||||
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
|
||||
} else {
|
||||
Camera.main.orthographicSize = height / 2;
|
||||
}
|
||||
|
||||
gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
|
||||
|
||||
|
||||
|
||||
grayMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1);
|
||||
cascade = new CascadeClassifier (haarcascade_frontalface_alt_xml_filepath);
|
||||
if (cascade.empty ()) {
|
||||
Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
|
||||
}
|
||||
|
||||
frontalFaceParam = new FrontalFaceParam ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the web cam texture to mat helper disposed event.
|
||||
/// </summary>
|
||||
public void OnWebCamTextureToMatHelperDisposed ()
|
||||
{
|
||||
Debug.Log ("OnWebCamTextureToMatHelperDisposed");
|
||||
|
||||
grayMat.Dispose ();
|
||||
|
||||
rectangleTracker.Reset ();
|
||||
}
|
||||
|
||||
// Update is called once per frame
|
||||
void Update ()
|
||||
{
|
||||
|
||||
if (webCamTextureToMatHelper.isPlaying () && webCamTextureToMatHelper.didUpdateThisFrame ()) {
|
||||
|
||||
Mat rgbaMat = webCamTextureToMatHelper.GetMat ();
|
||||
|
||||
//face detection
|
||||
List<OpenCVForUnity.Rect> detectResult = new List<OpenCVForUnity.Rect> ();
|
||||
if (useDlibFaceDetecter) {
|
||||
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);
|
||||
List<UnityEngine.Rect> result = faceLandmarkDetector.Detect ();
|
||||
|
||||
foreach (var unityRect in result) {
|
||||
detectResult.Add (new OpenCVForUnity.Rect ((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
|
||||
}
|
||||
} else {
|
||||
//convert image to greyscale
|
||||
Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
|
||||
|
||||
//convert image to greyscale
|
||||
using (Mat equalizeHistMat = new Mat ())
|
||||
using (MatOfRect faces = new MatOfRect ()) {
|
||||
Imgproc.equalizeHist (grayMat, equalizeHistMat);
|
||||
|
||||
cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ());
|
||||
|
||||
detectResult = faces.toList ();
|
||||
}
|
||||
}
|
||||
|
||||
//face traking
|
||||
if (enableTracking) {
|
||||
rectangleTracker.UpdateTrackedObjects (detectResult);
|
||||
detectResult = new List<OpenCVForUnity.Rect> ();
|
||||
rectangleTracker.GetObjects (detectResult, true);
|
||||
}
|
||||
|
||||
//face landmark detection
|
||||
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);
|
||||
List<List<Vector2>> landmarkPoints = new List<List<Vector2>> ();
|
||||
foreach (var openCVRect in detectResult) {
|
||||
UnityEngine.Rect rect = new UnityEngine.Rect (openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
|
||||
|
||||
List<Vector2> points = faceLandmarkDetector.DetectLandmark (rect);
|
||||
if (points.Count > 0) {
|
||||
//OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);
|
||||
landmarkPoints.Add (points);
|
||||
}
|
||||
}
|
||||
|
||||
//filter nonfrontalface
|
||||
if (isFilteringNonFrontalFaces) {
|
||||
for (int i = 0; i < landmarkPoints.Count; i++) {
|
||||
if (frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]) < frontalFaceRateLowerLimit) {
|
||||
detectResult.RemoveAt (i);
|
||||
landmarkPoints.RemoveAt (i);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//face swapping
|
||||
if (landmarkPoints.Count >= 2) {
|
||||
int ann = 0, bob = 1;
|
||||
for (int i = 0; i < landmarkPoints.Count - 1; i += 2) {
|
||||
ann = i;
|
||||
bob = i + 1;
|
||||
|
||||
faceSwapper.SwapFaces (rgbaMat, landmarkPoints [ann], landmarkPoints [bob], 1);
|
||||
}
|
||||
}
|
||||
|
||||
//draw face rects
|
||||
if (isShowingFaceRects) {
|
||||
for (int i = 0; i < detectResult.Count; i++) {
|
||||
UnityEngine.Rect rect = new UnityEngine.Rect (detectResult [i].x, detectResult [i].y, detectResult [i].width, detectResult [i].height);
|
||||
OpenCVForUnityUtils.DrawFaceRect (rgbaMat, rect, new Scalar (255, 0, 0, 255), 2);
|
||||
//Imgproc.putText (rgbaMat, " " + frontalFaceParam.getAngleOfFrontalFace (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
|
||||
//Imgproc.putText (rgbaMat, " " + frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
|
||||
|
||||
OpenCVForUnity.Utils.matToTexture2D (rgbaMat, texture, colors);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Raises the disable event.
|
||||
/// </summary>
|
||||
void OnDisable ()
|
||||
{
|
||||
webCamTextureToMatHelper.Dispose ();
|
||||
|
||||
if (cascade != null)
|
||||
cascade.Dispose ();
|
||||
|
||||
if (rectangleTracker != null)
|
||||
rectangleTracker.Dispose ();
|
||||
|
||||
if (faceLandmarkDetector != null)
|
||||
faceLandmarkDetector.Dispose ();
|
||||
|
||||
if (faceSwapper != null)
|
||||
faceSwapper.Dispose ();
|
||||
|
||||
if (frontalFaceParam != null)
|
||||
frontalFaceParam.Dispose ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the back button event.
|
||||
/// </summary>
|
||||
public void OnBackButton ()
|
||||
{
|
||||
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
|
||||
SceneManager.LoadScene ("FaceSwapperSample");
|
||||
#else
|
||||
Application.LoadLevel ("FaceSwapperSample");
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the play button event.
|
||||
/// </summary>
|
||||
public void OnPlayButton ()
|
||||
{
|
||||
webCamTextureToMatHelper.Play ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the pause button event.
|
||||
/// </summary>
|
||||
public void OnPauseButton ()
|
||||
{
|
||||
webCamTextureToMatHelper.Pause ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the stop button event.
|
||||
/// </summary>
|
||||
public void OnStopButton ()
|
||||
{
|
||||
webCamTextureToMatHelper.Stop ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the change camera button event.
|
||||
/// </summary>
|
||||
public void OnChangeCameraButton ()
|
||||
{
|
||||
webCamTextureToMatHelper.Init (null, webCamTextureToMatHelper.requestWidth, webCamTextureToMatHelper.requestHeight, !webCamTextureToMatHelper.requestIsFrontFacing);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is showing face rects toggle event.
|
||||
/// </summary>
|
||||
public void OnIsShowingFaceRectsToggle ()
|
||||
{
|
||||
if (isShowingFaceRectsToggle.isOn) {
|
||||
isShowingFaceRects = true;
|
||||
} else {
|
||||
isShowingFaceRects = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the use Dlib face detector toggle event.
|
||||
/// </summary>
|
||||
public void OnUseDlibFaceDetecterToggle ()
|
||||
{
|
||||
if (useDlibFaceDetecterToggle.isOn) {
|
||||
useDlibFaceDetecter = true;
|
||||
} else {
|
||||
useDlibFaceDetecter = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is filtering non frontal faces toggle event.
|
||||
/// </summary>
|
||||
public void OnIsFilteringNonFrontalFacesToggle ()
|
||||
{
|
||||
if (isFilteringNonFrontalFacesToggle.isOn) {
|
||||
isFilteringNonFrontalFaces = true;
|
||||
} else {
|
||||
isFilteringNonFrontalFaces = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the use seamless clone for paste faces toggle event.
|
||||
/// </summary>
|
||||
public void OnUseSeamlessCloneForPasteFacesToggle ()
|
||||
{
|
||||
if (useSeamlessCloneForPasteFacesToggle.isOn) {
|
||||
useSeamlessCloneForPasteFaces = true;
|
||||
} else {
|
||||
useSeamlessCloneForPasteFaces = false;
|
||||
}
|
||||
faceSwapper.useSeamlessCloneForPasteFaces = useSeamlessCloneForPasteFaces;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the enable tracking toggle event.
|
||||
/// </summary>
|
||||
public void OnEnableTrackingToggle ()
|
||||
{
|
||||
if (enableTrackingToggle.isOn) {
|
||||
enableTracking = true;
|
||||
} else {
|
||||
enableTracking = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is showing debug face points toggle event.
|
||||
/// </summary>
|
||||
public void OnIsShowingDebugFacePointsToggle ()
|
||||
{
|
||||
if (isShowingDebugFacePointsToggle.isOn) {
|
||||
isShowingDebugFacePoints = true;
|
||||
} else {
|
||||
isShowingDebugFacePoints = false;
|
||||
}
|
||||
if(faceSwapper != null) faceSwapper.isShowingDebugFacePoints = isShowingDebugFacePoints;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,444 @@
|
|||
using OpenCVForUnity;
|
||||
using System;
|
||||
using System.Collections;
|
||||
using UnityEngine;
|
||||
using UnityEngine.Events;
|
||||
|
||||
namespace FaceSwapperSample
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// Web cam texture to mat helper.
|
||||
/// </summary>
|
||||
public class WebCamTextureToMatHelper : MonoBehaviour
|
||||
{
|
||||
/// <summary>
|
||||
/// The name of the device.
|
||||
/// </summary>
|
||||
public string requestDeviceName = null;
|
||||
|
||||
/// <summary>
|
||||
/// The width.
|
||||
/// </summary>
|
||||
public int requestWidth = 640;
|
||||
|
||||
/// <summary>
|
||||
/// The height.
|
||||
/// </summary>
|
||||
public int requestHeight = 480;
|
||||
|
||||
/// <summary>
|
||||
/// Should use front facing.
|
||||
/// </summary>
|
||||
public bool requestIsFrontFacing = false;
|
||||
|
||||
/// <summary>
|
||||
/// The flip vertical.
|
||||
/// </summary>
|
||||
public bool flipVertical = false;
|
||||
|
||||
/// <summary>
|
||||
/// The flip horizontal.
|
||||
/// </summary>
|
||||
public bool flipHorizontal = false;
|
||||
|
||||
/// <summary>
|
||||
/// The on inited event.
|
||||
/// </summary>
|
||||
public UnityEvent OnInitedEvent;
|
||||
|
||||
/// <summary>
|
||||
/// The on disposed event.
|
||||
/// </summary>
|
||||
public UnityEvent OnDisposedEvent;
|
||||
|
||||
/// <summary>
|
||||
/// The web cam texture.
|
||||
/// </summary>
|
||||
WebCamTexture webCamTexture;
|
||||
|
||||
/// <summary>
|
||||
/// The web cam device.
|
||||
/// </summary>
|
||||
WebCamDevice webCamDevice;
|
||||
|
||||
/// <summary>
|
||||
/// The rgba mat.
|
||||
/// </summary>
|
||||
Mat rgbaMat;
|
||||
|
||||
/// <summary>
|
||||
/// The rotated rgba mat
|
||||
/// </summary>
|
||||
Mat rotatedRgbaMat;
|
||||
|
||||
/// <summary>
|
||||
/// The colors.
|
||||
/// </summary>
|
||||
Color32[] colors;
|
||||
|
||||
/// <summary>
|
||||
/// The init waiting.
|
||||
/// </summary>
|
||||
bool initWaiting = false;
|
||||
|
||||
/// <summary>
|
||||
/// The init done.
|
||||
/// </summary>
|
||||
bool initDone = false;
|
||||
|
||||
/// <summary>
|
||||
/// The screenOrientation.
|
||||
/// </summary>
|
||||
ScreenOrientation screenOrientation = ScreenOrientation.Unknown;
|
||||
|
||||
|
||||
// Update is called once per frame
|
||||
void Update ()
|
||||
{
|
||||
if (initDone) {
|
||||
if (screenOrientation != Screen.orientation) {
|
||||
StartCoroutine (init ());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Init this instance.
|
||||
/// </summary>
|
||||
public void Init ()
|
||||
{
|
||||
if (initWaiting)
|
||||
return;
|
||||
|
||||
if (OnInitedEvent == null)
|
||||
OnInitedEvent = new UnityEvent ();
|
||||
if (OnDisposedEvent == null)
|
||||
OnDisposedEvent = new UnityEvent ();
|
||||
|
||||
StartCoroutine (init ());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Init this instance.
|
||||
/// </summary>
|
||||
/// <param name="deviceName">Device name.</param>
|
||||
/// <param name="requestWidth">Request width.</param>
|
||||
/// <param name="requestHeight">Request height.</param>
|
||||
/// <param name="requestIsFrontFacing">If set to <c>true</c> request is front facing.</param>
|
||||
/// <param name="OnInited">On inited.</param>
|
||||
public void Init (string deviceName, int requestWidth, int requestHeight, bool requestIsFrontFacing)
|
||||
{
|
||||
if (initWaiting)
|
||||
return;
|
||||
|
||||
this.requestDeviceName = deviceName;
|
||||
this.requestWidth = requestWidth;
|
||||
this.requestHeight = requestHeight;
|
||||
this.requestIsFrontFacing = requestIsFrontFacing;
|
||||
if (OnInitedEvent == null)
|
||||
OnInitedEvent = new UnityEvent ();
|
||||
if (OnDisposedEvent == null)
|
||||
OnDisposedEvent = new UnityEvent ();
|
||||
|
||||
StartCoroutine (init ());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Init this instance by coroutine.
|
||||
/// </summary>
|
||||
private IEnumerator init ()
|
||||
{
|
||||
if (initDone)
|
||||
dispose ();
|
||||
|
||||
initWaiting = true;
|
||||
|
||||
if (!String.IsNullOrEmpty (requestDeviceName)) {
|
||||
//Debug.Log ("deviceName is "+requestDeviceName);
|
||||
webCamTexture = new WebCamTexture (requestDeviceName, requestWidth, requestHeight);
|
||||
} else {
|
||||
//Debug.Log ("deviceName is null");
|
||||
// Checks how many and which cameras are available on the device
|
||||
for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {
|
||||
if (WebCamTexture.devices [cameraIndex].isFrontFacing == requestIsFrontFacing) {
|
||||
|
||||
//Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);
|
||||
webCamDevice = WebCamTexture.devices [cameraIndex];
|
||||
webCamTexture = new WebCamTexture (webCamDevice.name, requestWidth, requestHeight);
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (webCamTexture == null) {
|
||||
if (WebCamTexture.devices.Length > 0) {
|
||||
webCamDevice = WebCamTexture.devices [0];
|
||||
webCamTexture = new WebCamTexture (webCamDevice.name, requestWidth, requestHeight);
|
||||
} else {
|
||||
webCamTexture = new WebCamTexture (requestWidth, requestHeight);
|
||||
}
|
||||
}
|
||||
|
||||
//Debug.Log ("name " + webCamTexture.name + " width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
|
||||
|
||||
// Starts the camera
|
||||
webCamTexture.Play ();
|
||||
|
||||
while (true) {
|
||||
// If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
|
||||
#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
|
||||
if (webCamTexture.width > 16 && webCamTexture.height > 16) {
|
||||
#else
|
||||
if (webCamTexture.didUpdateThisFrame) {
|
||||
#if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2
|
||||
while (webCamTexture.width <= 16) {
|
||||
webCamTexture.GetPixels32 ();
|
||||
yield return new WaitForEndOfFrame ();
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
|
||||
Debug.Log ("name " + webCamTexture.name + " width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
|
||||
Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);
|
||||
|
||||
if (colors == null || colors.Length != webCamTexture.width * webCamTexture.height)
|
||||
colors = new Color32[webCamTexture.width * webCamTexture.height];
|
||||
rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
|
||||
|
||||
//Debug.Log ("Screen.orientation " + Screen.orientation);
|
||||
screenOrientation = Screen.orientation;
|
||||
|
||||
#if !UNITY_EDITOR && !(UNITY_STANDALONE || UNITY_WEBGL)
|
||||
if (screenOrientation == ScreenOrientation.Portrait || screenOrientation == ScreenOrientation.PortraitUpsideDown) {
|
||||
rotatedRgbaMat = new Mat (webCamTexture.width, webCamTexture.height, CvType.CV_8UC4);
|
||||
}
|
||||
#endif
|
||||
|
||||
initWaiting = false;
|
||||
initDone = true;
|
||||
|
||||
if (OnInitedEvent != null)
|
||||
OnInitedEvent.Invoke ();
|
||||
|
||||
break;
|
||||
} else {
|
||||
yield return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Ises the inited.
|
||||
/// </summary>
|
||||
/// <returns><c>true</c>, if inited was ised, <c>false</c> otherwise.</returns>
|
||||
public bool isInited ()
|
||||
{
|
||||
return initDone;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Play this instance.
|
||||
/// </summary>
|
||||
public void Play ()
|
||||
{
|
||||
if (initDone)
|
||||
webCamTexture.Play ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Pause this instance.
|
||||
/// </summary>
|
||||
public void Pause ()
|
||||
{
|
||||
if (initDone)
|
||||
webCamTexture.Pause ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Stop this instance.
|
||||
/// </summary>
|
||||
public void Stop ()
|
||||
{
|
||||
if (initDone)
|
||||
webCamTexture.Stop ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Ises the playing.
|
||||
/// </summary>
|
||||
/// <returns><c>true</c>, if playing was ised, <c>false</c> otherwise.</returns>
|
||||
public bool isPlaying ()
|
||||
{
|
||||
if (!initDone)
|
||||
return false;
|
||||
return webCamTexture.isPlaying;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the web cam texture.
|
||||
/// </summary>
|
||||
/// <returns>The web cam texture.</returns>
|
||||
public WebCamTexture GetWebCamTexture ()
|
||||
{
|
||||
return webCamTexture;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the web cam device.
|
||||
/// </summary>
|
||||
/// <returns>The web cam device.</returns>
|
||||
public WebCamDevice GetWebCamDevice ()
|
||||
{
|
||||
return webCamDevice;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Dids the update this frame.
|
||||
/// </summary>
|
||||
/// <returns><c>true</c>, if update this frame was dided, <c>false</c> otherwise.</returns>
|
||||
public bool didUpdateThisFrame ()
|
||||
{
|
||||
if (!initDone)
|
||||
return false;
|
||||
|
||||
#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
|
||||
if (webCamTexture.width > 16 && webCamTexture.height > 16) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
#else
|
||||
return webCamTexture.didUpdateThisFrame;
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the mat.
|
||||
/// </summary>
|
||||
/// <returns>The mat.</returns>
|
||||
public Mat GetMat ()
|
||||
{
|
||||
if (!initDone || !webCamTexture.isPlaying) {
|
||||
if (rotatedRgbaMat != null) {
|
||||
return rotatedRgbaMat;
|
||||
} else {
|
||||
return rgbaMat;
|
||||
}
|
||||
}
|
||||
|
||||
Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);
|
||||
|
||||
int flipCode = int.MinValue;
|
||||
|
||||
if (webCamDevice.isFrontFacing) {
|
||||
if (webCamTexture.videoRotationAngle == 0) {
|
||||
flipCode = 1;
|
||||
} else if (webCamTexture.videoRotationAngle == 90) {
|
||||
flipCode = 0;
|
||||
}
|
||||
if (webCamTexture.videoRotationAngle == 180) {
|
||||
flipCode = 0;
|
||||
} else if (webCamTexture.videoRotationAngle == 270) {
|
||||
flipCode = 1;
|
||||
}
|
||||
} else {
|
||||
if (webCamTexture.videoRotationAngle == 180) {
|
||||
flipCode = -1;
|
||||
} else if (webCamTexture.videoRotationAngle == 270) {
|
||||
flipCode = -1;
|
||||
}
|
||||
}
|
||||
|
||||
if (flipVertical) {
|
||||
if (flipCode == int.MinValue) {
|
||||
flipCode = 0;
|
||||
} else if (flipCode == 0) {
|
||||
flipCode = int.MinValue;
|
||||
} else if (flipCode == 1) {
|
||||
flipCode = -1;
|
||||
} else if (flipCode == -1) {
|
||||
flipCode = 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (flipHorizontal) {
|
||||
if (flipCode == int.MinValue) {
|
||||
flipCode = 1;
|
||||
} else if (flipCode == 0) {
|
||||
flipCode = -1;
|
||||
} else if (flipCode == 1) {
|
||||
flipCode = int.MinValue;
|
||||
} else if (flipCode == -1) {
|
||||
flipCode = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (flipCode > int.MinValue) {
|
||||
Core.flip (rgbaMat, rgbaMat, flipCode);
|
||||
}
|
||||
|
||||
if (rotatedRgbaMat != null) {
|
||||
|
||||
using (Mat transposeRgbaMat = rgbaMat.t ()) {
|
||||
Core.flip (transposeRgbaMat, rotatedRgbaMat, 1);
|
||||
}
|
||||
|
||||
return rotatedRgbaMat;
|
||||
} else {
|
||||
return rgbaMat;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the buffer colors.
|
||||
/// </summary>
|
||||
/// <returns>The buffer colors.</returns>
|
||||
public Color32[] GetBufferColors ()
|
||||
{
|
||||
return colors;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// To release the resources for the init method.
|
||||
/// </summary>
|
||||
private void dispose ()
|
||||
{
|
||||
initWaiting = false;
|
||||
initDone = false;
|
||||
|
||||
if (webCamTexture != null) {
|
||||
webCamTexture.Stop ();
|
||||
webCamTexture = null;
|
||||
}
|
||||
if (rgbaMat != null) {
|
||||
rgbaMat.Dispose ();
|
||||
rgbaMat = null;
|
||||
}
|
||||
if (rotatedRgbaMat != null) {
|
||||
rotatedRgbaMat.Dispose ();
|
||||
rotatedRgbaMat = null;
|
||||
}
|
||||
|
||||
if (OnDisposedEvent != null)
|
||||
OnDisposedEvent.Invoke ();
|
||||
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Releases all resource used by the <see cref="WebCamTextureToMatHelper"/> object.
|
||||
/// </summary>
|
||||
/// <remarks>Call <see cref="Dispose"/> when you are finished using the <see cref="WebCamTextureToMatHelper"/>. The
|
||||
/// <see cref="Dispose"/> method leaves the <see cref="WebCamTextureToMatHelper"/> in an unusable state. After
|
||||
/// calling <see cref="Dispose"/>, you must release all references to the <see cref="WebCamTextureToMatHelper"/> so
|
||||
/// the garbage collector can reclaim the memory that the <see cref="WebCamTextureToMatHelper"/> was occupying.</remarks>
|
||||
public void Dispose ()
|
||||
{
|
||||
if (initDone)
|
||||
dispose ();
|
||||
|
||||
colors = null;
|
||||
}
|
||||
}
|
||||
}
|
Двоичные данные
Materials/quad_material.mat
Двоичные данные
Materials/quad_material.mat
Двоичный файл не отображается.
|
@ -9,8 +9,13 @@ Demo Video
|
|||
-----
|
||||
[![](http://img.youtube.com/vi/lQPL85LbSYo/0.jpg)](https://www.youtube.com/watch?v=lQPL85LbSYo)
|
||||
|
||||
Demo Application
|
||||
Demo
|
||||
-----
|
||||
- WebGL
|
||||
<https://enoxsoftware.github.io/FaceSwapperSample/webgl_sample/index.html>
|
||||
- WebGL(using shape_predictor_68_face_landmarks_for_mobile.dat)
|
||||
<https://enoxsoftware.github.io/FaceSwapperSample/webgl_sample_for_mobile/index.html>
|
||||
- Android
|
||||
<https://play.google.com/store/apps/details?id=com.enoxsoftware.faceswappersample>
|
||||
|
||||
Manual
|
||||
|
|
Двоичные данные
Scenes/FaceSwapperSample.unity
Двоичные данные
Scenes/FaceSwapperSample.unity
Двоичный файл не отображается.
Двоичные данные
Scenes/ShowLicense.unity
Двоичные данные
Scenes/ShowLicense.unity
Двоичный файл не отображается.
Двоичные данные
Scenes/Texture2DFaceSwapperSample.unity
Двоичные данные
Scenes/Texture2DFaceSwapperSample.unity
Двоичный файл не отображается.
Двоичные данные
Scenes/VideoCaptureFaceSwapperSample.unity
Двоичные данные
Scenes/VideoCaptureFaceSwapperSample.unity
Двоичный файл не отображается.
Двоичные данные
Scenes/WebCamTextureFaceSwapperSample.unity
Двоичные данные
Scenes/WebCamTextureFaceSwapperSample.unity
Двоичный файл не отображается.
|
@ -1,64 +0,0 @@
|
|||
using UnityEngine;
|
||||
using System.Collections;
|
||||
|
||||
#if UNITY_5_3
|
||||
using UnityEngine.SceneManagement;
|
||||
#endif
|
||||
|
||||
namespace FaceSwapperSample
|
||||
{
|
||||
/// <summary>
|
||||
/// Face swapper sample.
|
||||
/// </summary>
|
||||
public class FaceSwapperSample : MonoBehaviour
|
||||
{
|
||||
|
||||
// Use this for initialization
|
||||
void Start ()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
// Update is called once per frame
|
||||
void Update ()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public void OnShowLicenseButton ()
|
||||
{
|
||||
#if UNITY_5_3
|
||||
SceneManager.LoadScene ("ShowLicense");
|
||||
#else
|
||||
Application.LoadLevel ("ShowLicense");
|
||||
#endif
|
||||
}
|
||||
|
||||
public void OnTexture2DFaceSwapperSample ()
|
||||
{
|
||||
#if UNITY_5_3
|
||||
SceneManager.LoadScene ("Texture2DFaceSwapperSample");
|
||||
#else
|
||||
Application.LoadLevel("Texture2DFaceSwapperSample");
|
||||
#endif
|
||||
}
|
||||
|
||||
public void OnWebCamTextureFaceSwapperSample()
|
||||
{
|
||||
#if UNITY_5_3
|
||||
SceneManager.LoadScene ("WebCamTextureFaceSwapperSample");
|
||||
#else
|
||||
Application.LoadLevel("WebCamTextureFaceSwapperSample");
|
||||
#endif
|
||||
}
|
||||
|
||||
public void OnVideoCaptureFaceSwapperSample()
|
||||
{
|
||||
#if UNITY_5_3
|
||||
SceneManager.LoadScene ("VideoCaptureFaceSwapperSample");
|
||||
#else
|
||||
Application.LoadLevel("VideoCaptureFaceSwapperSample");
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,38 +0,0 @@
|
|||
using UnityEngine;
|
||||
using System.Collections;
|
||||
|
||||
#if UNITY_5_3
|
||||
using UnityEngine.SceneManagement;
|
||||
#endif
|
||||
|
||||
namespace FaceSwapperSample
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// Show license.
|
||||
/// </summary>
|
||||
public class ShowLicense : MonoBehaviour
|
||||
{
|
||||
|
||||
// Use this for initialization
|
||||
void Start ()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
// Update is called once per frame
|
||||
void Update ()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public void OnBackButton ()
|
||||
{
|
||||
#if UNITY_5_3
|
||||
SceneManager.LoadScene ("FaceSwapperSample");
|
||||
#else
|
||||
Application.LoadLevel("FaceSwapperSample");
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,312 +0,0 @@
|
|||
using DlibFaceLandmarkDetector;
|
||||
using OpenCVForUnity;
|
||||
using OpenCVForUnity.FaceSwap;
|
||||
using System.Collections.Generic;
|
||||
using System.Linq;
|
||||
using UnityEngine;
|
||||
using UnityEngine.UI;
|
||||
|
||||
#if UNITY_5_3
|
||||
using UnityEngine.SceneManagement;
|
||||
#endif
|
||||
|
||||
namespace FaceSwapperSample
|
||||
{
|
||||
/// <summary>
|
||||
/// Texture2D face swapper sample.
|
||||
/// </summary>
|
||||
public class Texture2DFaceSwapperSample : MonoBehaviour
|
||||
{
|
||||
/// <summary>
|
||||
/// The frontal face parameter.
|
||||
/// </summary>
|
||||
FrontalFaceParam frontalFaceParam;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing face rects.
|
||||
/// </summary>
|
||||
public bool isShowingFaceRects = false;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing face rects toggle.
|
||||
/// </summary>
|
||||
public Toggle isShowingFaceRectsToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The use Dlib face detector flag.
|
||||
/// </summary>
|
||||
public bool useDlibFaceDetecter = true;
|
||||
|
||||
/// <summary>
|
||||
/// The use dlib face detecter toggle.
|
||||
/// </summary>
|
||||
public Toggle useDlibFaceDetecterToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The is filtering non frontal faces.
|
||||
/// </summary>
|
||||
public bool isFilteringNonFrontalFaces;
|
||||
|
||||
/// <summary>
|
||||
/// The is filtering non frontal faces toggle.
|
||||
/// </summary>
|
||||
public Toggle isFilteringNonFrontalFacesToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The frontal face rate lower limit.
|
||||
/// </summary>
|
||||
[Range(0.0f, 1.0f)]
|
||||
public float
|
||||
frontalFaceRateLowerLimit;
|
||||
|
||||
/// <summary>
|
||||
/// The use seamless clone for paste faces.
|
||||
/// </summary>
|
||||
public bool useSeamlessCloneForPasteFaces = false;
|
||||
|
||||
/// <summary>
|
||||
/// The use seamless clone for paste faces toggle.
|
||||
/// </summary>
|
||||
public Toggle useSeamlessCloneForPasteFacesToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing debug face points.
|
||||
/// </summary>
|
||||
public bool isShowingDebugFacePoints = false;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing debug face points toggle.
|
||||
/// </summary>
|
||||
public Toggle isShowingDebugFacePointsToggle;
|
||||
|
||||
|
||||
// Use this for initialization
|
||||
void Start ()
|
||||
{
|
||||
isShowingFaceRectsToggle.isOn = isShowingFaceRects;
|
||||
useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter;
|
||||
isFilteringNonFrontalFacesToggle.isOn = isFilteringNonFrontalFaces;
|
||||
useSeamlessCloneForPasteFacesToggle.isOn = useSeamlessCloneForPasteFaces;
|
||||
isShowingDebugFacePointsToggle.isOn = isShowingDebugFacePoints;
|
||||
|
||||
|
||||
Texture2D imgTexture = Resources.Load ("family") as Texture2D;
|
||||
|
||||
gameObject.transform.localScale = new Vector3 (imgTexture.width, imgTexture.height, 1);
|
||||
Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
|
||||
|
||||
float width = 0;
|
||||
float height = 0;
|
||||
|
||||
width = gameObject.transform.localScale.x;
|
||||
height = gameObject.transform.localScale.y;
|
||||
|
||||
|
||||
float widthScale = (float)Screen.width / width;
|
||||
float heightScale = (float)Screen.height / height;
|
||||
if (widthScale < heightScale) {
|
||||
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
|
||||
} else {
|
||||
Camera.main.orthographicSize = height / 2;
|
||||
}
|
||||
|
||||
Mat rgbaMat = new Mat (imgTexture.height, imgTexture.width, CvType.CV_8UC4);
|
||||
|
||||
OpenCVForUnity.Utils.texture2DToMat (imgTexture, rgbaMat);
|
||||
Debug.Log ("rgbaMat ToString " + rgbaMat.ToString ());
|
||||
|
||||
|
||||
FaceLandmarkDetector faceLandmarkDetector = new FaceLandmarkDetector (DlibFaceLandmarkDetector.Utils.getFilePath ("shape_predictor_68_face_landmarks.dat"));
|
||||
|
||||
frontalFaceParam = new FrontalFaceParam ();
|
||||
|
||||
//face detection
|
||||
List<OpenCVForUnity.Rect> detectResult = new List<OpenCVForUnity.Rect> ();
|
||||
if (useDlibFaceDetecter) {
|
||||
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);
|
||||
List<UnityEngine.Rect> result = faceLandmarkDetector.Detect ();
|
||||
|
||||
foreach (var unityRect in result) {
|
||||
detectResult.Add (new OpenCVForUnity.Rect ((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
|
||||
}
|
||||
} else {
|
||||
CascadeClassifier cascade = new CascadeClassifier (OpenCVForUnity.Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
|
||||
if (cascade.empty ()) {
|
||||
Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
|
||||
}
|
||||
|
||||
//convert image to greyscale
|
||||
Mat gray = new Mat ();
|
||||
Imgproc.cvtColor (rgbaMat, gray, Imgproc.COLOR_RGBA2GRAY);
|
||||
|
||||
//detect Faces
|
||||
MatOfRect faces = new MatOfRect ();
|
||||
Imgproc.equalizeHist (gray, gray);
|
||||
cascade.detectMultiScale (gray, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (gray.cols () * 0.05, gray.cols () * 0.05), new Size ());
|
||||
// Debug.Log ("faces " + faces.dump ());
|
||||
|
||||
detectResult = faces.toList ();
|
||||
}
|
||||
|
||||
|
||||
//detect face landmark
|
||||
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);
|
||||
List<List<Vector2>> landmarkPoints = new List<List<Vector2>> ();
|
||||
foreach (var openCVRect in detectResult) {
|
||||
UnityEngine.Rect rect = new UnityEngine.Rect (openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
|
||||
|
||||
Debug.Log ("face : " + rect);
|
||||
|
||||
//OpenCVForUnityUtils.DrawFaceRect(imgMat, rect, new Scalar(255, 0, 0, 255), 2);
|
||||
|
||||
List<Vector2> points = faceLandmarkDetector.DetectLandmark (rect);
|
||||
if (points.Count > 0) {
|
||||
|
||||
//OpenCVForUnityUtils.DrawFaceLandmark(imgMat, points, new Scalar(0, 255, 0, 255), 2);
|
||||
landmarkPoints.Add (points);
|
||||
}
|
||||
}
|
||||
faceLandmarkDetector.Dispose ();
|
||||
|
||||
|
||||
//filter nonfrontalface
|
||||
if (isFilteringNonFrontalFaces) {
|
||||
for (int i = 0; i < landmarkPoints.Count; i++) {
|
||||
if (frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]) < frontalFaceRateLowerLimit) {
|
||||
detectResult.RemoveAt (i);
|
||||
landmarkPoints.RemoveAt (i);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//swap faces
|
||||
//Debug.Log("face points count : " + points.Count);
|
||||
int[] face_nums = new int[landmarkPoints.Count];
|
||||
for (int i = 0; i < face_nums.Length; i++) {
|
||||
face_nums [i] = i;
|
||||
}
|
||||
face_nums = face_nums.OrderBy (i => System.Guid.NewGuid ()).ToArray ();
|
||||
if (landmarkPoints.Count >= 2) {
|
||||
DlibFaceSwapper faceSwapper = new DlibFaceSwapper ();
|
||||
faceSwapper.useSeamlessCloneForPasteFaces = useSeamlessCloneForPasteFaces;
|
||||
faceSwapper.isShowingDebugFacePoints = isShowingDebugFacePoints;
|
||||
|
||||
int ann = 0, bob = 0;
|
||||
for (int i = 0; i < face_nums.Length-1; i += 2) {
|
||||
ann = face_nums [i];
|
||||
bob = face_nums [i + 1];
|
||||
|
||||
faceSwapper.SwapFaces (rgbaMat, landmarkPoints [ann], landmarkPoints [bob], 1);
|
||||
|
||||
}
|
||||
faceSwapper.Dispose ();
|
||||
}
|
||||
|
||||
//show face rects
|
||||
if (isShowingFaceRects) {
|
||||
int ann = 0, bob = 0;
|
||||
for (int i = 0; i < face_nums.Length-1; i += 2) {
|
||||
ann = face_nums [i];
|
||||
bob = face_nums [i + 1];
|
||||
|
||||
UnityEngine.Rect rect_ann = new UnityEngine.Rect (detectResult [ann].x, detectResult [ann].y, detectResult [ann].width, detectResult [ann].height);
|
||||
UnityEngine.Rect rect_bob = new UnityEngine.Rect (detectResult [bob].x, detectResult [bob].y, detectResult [bob].width, detectResult [bob].height);
|
||||
Scalar color = new Scalar (Random.Range (0, 256), Random.Range (0, 256), Random.Range (0, 256), 255);
|
||||
OpenCVForUnityUtils.DrawFaceRect (rgbaMat, rect_ann, color, 2);
|
||||
OpenCVForUnityUtils.DrawFaceRect (rgbaMat, rect_bob, color, 2);
|
||||
// Imgproc.putText (rgbaMat, "" + i % 2, new Point (rect_ann.xMin, rect_ann.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, color, 2, Imgproc.LINE_AA, false);
|
||||
// Imgproc.putText (rgbaMat, "" + (i % 2 + 1), new Point (rect_bob.xMin, rect_bob.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, color, 2, Imgproc.LINE_AA, false);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
frontalFaceParam.Dispose ();
|
||||
|
||||
Texture2D texture = new Texture2D (rgbaMat.cols (), rgbaMat.rows (), TextureFormat.RGBA32, false);
|
||||
OpenCVForUnity.Utils.matToTexture2D (rgbaMat, texture);
|
||||
gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
|
||||
}
|
||||
|
||||
// Update is called once per frame
|
||||
void Update ()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
public void OnBackButton ()
|
||||
{
|
||||
#if UNITY_5_3
|
||||
SceneManager.LoadScene ("FaceSwapperSample");
|
||||
#else
|
||||
Application.LoadLevel ("FaceSwapperSample");
|
||||
#endif
|
||||
}
|
||||
|
||||
public void OnShuffleButton ()
|
||||
{
|
||||
Start ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is showing face rects toggle event.
|
||||
/// </summary>
|
||||
public void OnIsShowingFaceRectsToggle ()
|
||||
{
|
||||
if (isShowingFaceRectsToggle.isOn) {
|
||||
isShowingFaceRects = true;
|
||||
} else {
|
||||
isShowingFaceRects = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the use Dlib face detector toggle event.
|
||||
/// </summary>
|
||||
public void OnUseDlibFaceDetecterToggle ()
|
||||
{
|
||||
if (useDlibFaceDetecterToggle.isOn) {
|
||||
useDlibFaceDetecter = true;
|
||||
} else {
|
||||
useDlibFaceDetecter = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is filtering non frontal faces toggle event.
|
||||
/// </summary>
|
||||
public void OnIsFilteringNonFrontalFacesToggle ()
|
||||
{
|
||||
if (isFilteringNonFrontalFacesToggle.isOn) {
|
||||
isFilteringNonFrontalFaces = true;
|
||||
} else {
|
||||
isFilteringNonFrontalFaces = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the use seamless clone for paste faces toggle event.
|
||||
/// </summary>
|
||||
public void OnUseSeamlessCloneForPasteFacesToggle ()
|
||||
{
|
||||
if (useSeamlessCloneForPasteFacesToggle.isOn) {
|
||||
useSeamlessCloneForPasteFaces = true;
|
||||
} else {
|
||||
useSeamlessCloneForPasteFaces = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is showing debug face points toggle event.
|
||||
/// </summary>
|
||||
public void OnIsShowingDebugFacePointsToggle ()
|
||||
{
|
||||
if (isShowingDebugFacePointsToggle.isOn) {
|
||||
isShowingDebugFacePoints = true;
|
||||
} else {
|
||||
isShowingDebugFacePoints = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,411 +0,0 @@
|
|||
using DlibFaceLandmarkDetector;
|
||||
using OpenCVForUnity;
|
||||
using OpenCVForUnity.RectangleTrack;
|
||||
using OpenCVForUnity.FaceSwap;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
using UnityEngine.UI;
|
||||
|
||||
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
|
||||
using UnityEngine.SceneManagement;
|
||||
#endif
|
||||
|
||||
namespace FaceSwapperSample
|
||||
{
|
||||
/// <summary>
|
||||
/// Face Swapper from VideoCapture Sample.
|
||||
/// </summary>
|
||||
public class VideoCaptureFaceSwapperSample : MonoBehaviour
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// The width of the frame.
|
||||
/// </summary>
|
||||
private double frameWidth = 320;
|
||||
|
||||
/// <summary>
|
||||
/// The height of the frame.
|
||||
/// </summary>
|
||||
private double frameHeight = 240;
|
||||
|
||||
/// <summary>
|
||||
/// The capture.
|
||||
/// </summary>
|
||||
VideoCapture capture;
|
||||
|
||||
/// <summary>
|
||||
/// The rgb mat.
|
||||
/// </summary>
|
||||
Mat rgbMat;
|
||||
|
||||
/// <summary>
|
||||
/// The gray mat.
|
||||
/// </summary>
|
||||
Mat grayMat;
|
||||
|
||||
/// <summary>
|
||||
/// The texture.
|
||||
/// </summary>
|
||||
Texture2D texture;
|
||||
|
||||
/// <summary>
|
||||
/// The cascade.
|
||||
/// </summary>
|
||||
CascadeClassifier cascade;
|
||||
|
||||
/// <summary>
|
||||
/// The face landmark detector.
|
||||
/// </summary>
|
||||
FaceLandmarkDetector faceLandmarkDetector;
|
||||
|
||||
/// <summary>
|
||||
/// The face Swapper.
|
||||
/// </summary>
|
||||
DlibFaceSwapper faceSwapper;
|
||||
|
||||
/// <summary>
|
||||
/// The detection based tracker.
|
||||
/// </summary>
|
||||
RectangleTracker rectangleTracker;
|
||||
|
||||
/// <summary>
|
||||
/// The frontal face parameter.
|
||||
/// </summary>
|
||||
FrontalFaceParam frontalFaceParam;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing face rects.
|
||||
/// </summary>
|
||||
public bool isShowingFaceRects = false;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing face rects toggle.
|
||||
/// </summary>
|
||||
public Toggle isShowingFaceRectsToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The use Dlib face detector flag.
|
||||
/// </summary>
|
||||
public bool useDlibFaceDetecter = true;
|
||||
|
||||
/// <summary>
|
||||
/// The use dlib face detecter toggle.
|
||||
/// </summary>
|
||||
public Toggle useDlibFaceDetecterToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The is filtering non frontal faces.
|
||||
/// </summary>
|
||||
public bool isFilteringNonFrontalFaces;
|
||||
|
||||
/// <summary>
|
||||
/// The is filtering non frontal faces toggle.
|
||||
/// </summary>
|
||||
public Toggle isFilteringNonFrontalFacesToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The frontal face rate lower limit.
|
||||
/// </summary>
|
||||
[Range(0.0f, 1.0f)]
|
||||
public float
|
||||
frontalFaceRateLowerLimit;
|
||||
|
||||
/// <summary>
|
||||
/// The use seamless clone for paste faces.
|
||||
/// </summary>
|
||||
public bool useSeamlessCloneForPasteFaces = false;
|
||||
|
||||
/// <summary>
|
||||
/// The use seamless clone for paste faces toggle.
|
||||
/// </summary>
|
||||
public Toggle useSeamlessCloneForPasteFacesToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The enable tracker flag.
|
||||
/// </summary>
|
||||
public bool enableTracking = true;
|
||||
|
||||
/// <summary>
|
||||
/// The enable tracking toggle.
|
||||
/// </summary>
|
||||
public Toggle enableTrackingToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing debug face points.
|
||||
/// </summary>
|
||||
public bool isShowingDebugFacePoints = false;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing debug face points toggle.
|
||||
/// </summary>
|
||||
public Toggle isShowingDebugFacePointsToggle;
|
||||
|
||||
|
||||
// Use this for initialization
|
||||
void Start ()
|
||||
{
|
||||
rectangleTracker = new RectangleTracker ();
|
||||
|
||||
frontalFaceParam = new FrontalFaceParam ();
|
||||
|
||||
|
||||
faceLandmarkDetector = new FaceLandmarkDetector (DlibFaceLandmarkDetector.Utils.getFilePath ("shape_predictor_68_face_landmarks.dat"));
|
||||
|
||||
faceSwapper = new DlibFaceSwapper ();
|
||||
faceSwapper.useSeamlessCloneForPasteFaces = useSeamlessCloneForPasteFaces;
|
||||
faceSwapper.isShowingDebugFacePoints = isShowingDebugFacePoints;
|
||||
|
||||
rgbMat = new Mat ();
|
||||
|
||||
capture = new VideoCapture ();
|
||||
capture.open (OpenCVForUnity.Utils.getFilePath ("couple.avi"));
|
||||
|
||||
if (capture.isOpened ()) {
|
||||
Debug.Log ("capture.isOpened() true");
|
||||
} else {
|
||||
Debug.Log ("capture.isOpened() false");
|
||||
}
|
||||
|
||||
|
||||
Debug.Log ("CAP_PROP_FORMAT: " + capture.get (Videoio.CAP_PROP_FORMAT));
|
||||
Debug.Log ("CV_CAP_PROP_PREVIEW_FORMAT: " + capture.get (Videoio.CV_CAP_PROP_PREVIEW_FORMAT));
|
||||
Debug.Log ("CAP_PROP_POS_MSEC: " + capture.get (Videoio.CAP_PROP_POS_MSEC));
|
||||
Debug.Log ("CAP_PROP_POS_FRAMES: " + capture.get (Videoio.CAP_PROP_POS_FRAMES));
|
||||
Debug.Log ("CAP_PROP_POS_AVI_RATIO: " + capture.get (Videoio.CAP_PROP_POS_AVI_RATIO));
|
||||
Debug.Log ("CAP_PROP_FRAME_COUNT: " + capture.get (Videoio.CAP_PROP_FRAME_COUNT));
|
||||
Debug.Log ("CAP_PROP_FPS: " + capture.get (Videoio.CAP_PROP_FPS));
|
||||
Debug.Log ("CAP_PROP_FRAME_WIDTH: " + capture.get (Videoio.CAP_PROP_FRAME_WIDTH));
|
||||
Debug.Log ("CAP_PROP_FRAME_HEIGHT: " + capture.get (Videoio.CAP_PROP_FRAME_HEIGHT));
|
||||
|
||||
|
||||
texture = new Texture2D ((int)(frameWidth), (int)(frameHeight), TextureFormat.RGBA32, false);
|
||||
gameObject.transform.localScale = new Vector3 ((float)frameWidth, (float)frameHeight, 1);
|
||||
float widthScale = (float)Screen.width / (float)frameWidth;
|
||||
float heightScale = (float)Screen.height / (float)frameHeight;
|
||||
if (widthScale < heightScale) {
|
||||
Camera.main.orthographicSize = ((float)frameWidth * (float)Screen.height / (float)Screen.width) / 2;
|
||||
} else {
|
||||
Camera.main.orthographicSize = (float)frameHeight / 2;
|
||||
}
|
||||
|
||||
gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
|
||||
|
||||
|
||||
grayMat = new Mat ((int)frameHeight, (int)frameWidth, CvType.CV_8UC1);
|
||||
cascade = new CascadeClassifier (OpenCVForUnity.Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
|
||||
if (cascade.empty ()) {
|
||||
Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
|
||||
}
|
||||
|
||||
isShowingFaceRectsToggle.isOn = isShowingFaceRects;
|
||||
useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter;
|
||||
isFilteringNonFrontalFacesToggle.isOn = isFilteringNonFrontalFaces;
|
||||
useSeamlessCloneForPasteFacesToggle.isOn = useSeamlessCloneForPasteFaces;
|
||||
enableTrackingToggle.isOn = enableTracking;
|
||||
isShowingDebugFacePointsToggle.isOn = isShowingDebugFacePoints;
|
||||
}
|
||||
|
||||
// Update is called once per frame
|
||||
void Update ()
|
||||
{
|
||||
//Loop play
|
||||
if (capture.get (Videoio.CAP_PROP_POS_FRAMES) >= capture.get (Videoio.CAP_PROP_FRAME_COUNT))
|
||||
capture.set (Videoio.CAP_PROP_POS_FRAMES, 0);
|
||||
|
||||
//error PlayerLoop called recursively! on iOS.reccomend WebCamTexture.
|
||||
if (capture.grab ()) {
|
||||
|
||||
capture.retrieve (rgbMat, 0);
|
||||
|
||||
Imgproc.cvtColor (rgbMat, rgbMat, Imgproc.COLOR_BGR2RGB);
|
||||
//Debug.Log ("Mat toString " + rgbMat.ToString ());
|
||||
|
||||
|
||||
//face detection
|
||||
List<OpenCVForUnity.Rect> detectResult = new List<OpenCVForUnity.Rect> ();
|
||||
if (useDlibFaceDetecter) {
|
||||
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbMat);
|
||||
List<UnityEngine.Rect> result = faceLandmarkDetector.Detect ();
|
||||
|
||||
foreach (var unityRect in result) {
|
||||
detectResult.Add (new OpenCVForUnity.Rect ((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
|
||||
}
|
||||
} else {
|
||||
//convert image to greyscale
|
||||
Imgproc.cvtColor (rgbMat, grayMat, Imgproc.COLOR_RGB2GRAY);
|
||||
|
||||
//convert image to greyscale
|
||||
using (Mat equalizeHistMat = new Mat())
|
||||
using (MatOfRect faces = new MatOfRect()) {
|
||||
Imgproc.equalizeHist (grayMat, equalizeHistMat);
|
||||
|
||||
cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ());
|
||||
|
||||
detectResult = faces.toList ();
|
||||
}
|
||||
}
|
||||
|
||||
//face traking
|
||||
if (enableTracking) {
|
||||
rectangleTracker.UpdateTrackedObjects (detectResult);
|
||||
detectResult = new List<OpenCVForUnity.Rect> ();
|
||||
rectangleTracker.GetObjects (detectResult, true);
|
||||
}
|
||||
|
||||
//face landmark detection
|
||||
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbMat);
|
||||
List<List<Vector2>> landmarkPoints = new List<List<Vector2>> ();
|
||||
foreach (var openCVRect in detectResult) {
|
||||
UnityEngine.Rect rect = new UnityEngine.Rect (openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
|
||||
|
||||
List<Vector2> points = faceLandmarkDetector.DetectLandmark (rect);
|
||||
if (points.Count > 0) {
|
||||
//OpenCVForUnityUtils.DrawFaceLandmark(rgbMat, points, new Scalar(0, 255, 0, 255), 2);
|
||||
landmarkPoints.Add (points);
|
||||
}
|
||||
}
|
||||
|
||||
//filter nonfrontalface
|
||||
if (isFilteringNonFrontalFaces) {
|
||||
for (int i = 0; i < landmarkPoints.Count; i++) {
|
||||
if (frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]) < frontalFaceRateLowerLimit) {
|
||||
detectResult.RemoveAt (i);
|
||||
landmarkPoints.RemoveAt (i);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//face swapping
|
||||
if (landmarkPoints.Count >= 2) {
|
||||
int ann = 0, bob = 1;
|
||||
for (int i = 0; i < landmarkPoints.Count-1; i += 2) {
|
||||
ann = i;
|
||||
bob = i + 1;
|
||||
|
||||
faceSwapper.SwapFaces (rgbMat, landmarkPoints [ann], landmarkPoints [bob], 1);
|
||||
}
|
||||
}
|
||||
|
||||
//draw face rects
|
||||
if (isShowingFaceRects) {
|
||||
for (int i = 0; i < detectResult.Count; i++) {
|
||||
UnityEngine.Rect rect = new UnityEngine.Rect (detectResult [i].x, detectResult [i].y, detectResult [i].width, detectResult [i].height);
|
||||
OpenCVForUnityUtils.DrawFaceRect (rgbMat, rect, new Scalar (255, 0, 0, 255), 2);
|
||||
// Imgproc.putText (rgbMat, " " + frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255), 2, Imgproc.LINE_AA, false);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Imgproc.putText (rgbMat, "W:" + rgbMat.width () + " H:" + rgbMat.height () + " SO:" + Screen.orientation, new Point (5, rgbMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255), 1, Imgproc.LINE_AA, false);
|
||||
|
||||
OpenCVForUnity.Utils.matToTexture2D (rgbMat, texture);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the disable event.
|
||||
/// </summary>
|
||||
void OnDestroy ()
|
||||
{
|
||||
capture.release ();
|
||||
|
||||
if (rgbMat != null)
|
||||
rgbMat.Dispose ();
|
||||
if (grayMat != null)
|
||||
grayMat.Dispose ();
|
||||
|
||||
rectangleTracker.Dispose ();
|
||||
faceLandmarkDetector.Dispose ();
|
||||
faceSwapper.Dispose ();
|
||||
frontalFaceParam.Dispose ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the back button event.
|
||||
/// </summary>
|
||||
public void OnBackButton ()
|
||||
{
|
||||
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
|
||||
SceneManager.LoadScene ("FaceSwapperSample");
|
||||
#else
|
||||
Application.LoadLevel ("FaceSwapperSample");
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is showing face rects toggle event.
|
||||
/// </summary>
|
||||
public void OnIsShowingFaceRectsToggle ()
|
||||
{
|
||||
if (isShowingFaceRectsToggle.isOn) {
|
||||
isShowingFaceRects = true;
|
||||
} else {
|
||||
isShowingFaceRects = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the use Dlib face detector toggle event.
|
||||
/// </summary>
|
||||
public void OnUseDlibFaceDetecterToggle ()
|
||||
{
|
||||
if (useDlibFaceDetecterToggle.isOn) {
|
||||
useDlibFaceDetecter = true;
|
||||
} else {
|
||||
useDlibFaceDetecter = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is filtering non frontal faces toggle event.
|
||||
/// </summary>
|
||||
public void OnIsFilteringNonFrontalFacesToggle ()
|
||||
{
|
||||
if (isFilteringNonFrontalFacesToggle.isOn) {
|
||||
isFilteringNonFrontalFaces = true;
|
||||
} else {
|
||||
isFilteringNonFrontalFaces = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the use seamless clone for paste faces toggle event.
|
||||
/// </summary>
|
||||
public void OnUseSeamlessCloneForPasteFacesToggle ()
|
||||
{
|
||||
if (useSeamlessCloneForPasteFacesToggle.isOn) {
|
||||
useSeamlessCloneForPasteFaces = true;
|
||||
} else {
|
||||
useSeamlessCloneForPasteFaces = false;
|
||||
}
|
||||
faceSwapper.useSeamlessCloneForPasteFaces = useSeamlessCloneForPasteFaces;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the enable tracking toggle event.
|
||||
/// </summary>
|
||||
public void OnEnableTrackingToggle ()
|
||||
{
|
||||
if (enableTrackingToggle.isOn) {
|
||||
enableTracking = true;
|
||||
} else {
|
||||
enableTracking = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is showing debug face points toggle event.
|
||||
/// </summary>
|
||||
public void OnIsShowingDebugFacePointsToggle ()
|
||||
{
|
||||
if (isShowingDebugFacePointsToggle.isOn) {
|
||||
isShowingDebugFacePoints = true;
|
||||
} else {
|
||||
isShowingDebugFacePoints = false;
|
||||
}
|
||||
faceSwapper.isShowingDebugFacePoints = isShowingDebugFacePoints;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,433 +0,0 @@
|
|||
using DlibFaceLandmarkDetector;
|
||||
using OpenCVForUnity;
|
||||
using OpenCVForUnity.RectangleTrack;
|
||||
using OpenCVForUnity.FaceSwap;
|
||||
using System.Collections.Generic;
|
||||
using UnityEngine;
|
||||
using UnityEngine.UI;
|
||||
|
||||
#if UNITY_5_3
|
||||
using UnityEngine.SceneManagement;
|
||||
#endif
|
||||
|
||||
namespace FaceSwapperSample
|
||||
{
|
||||
/// <summary>
|
||||
/// WebCamTexture face swapper sample.
|
||||
/// </summary>
|
||||
[RequireComponent(typeof(WebCamTextureToMatHelper))]
|
||||
public class WebCamTextureFaceSwapperSample : MonoBehaviour
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// The colors.
|
||||
/// </summary>
|
||||
Color32[] colors;
|
||||
|
||||
/// <summary>
|
||||
/// The gray mat.
|
||||
/// </summary>
|
||||
Mat grayMat;
|
||||
|
||||
/// <summary>
|
||||
/// The texture.
|
||||
/// </summary>
|
||||
Texture2D texture;
|
||||
|
||||
/// <summary>
|
||||
/// The cascade.
|
||||
/// </summary>
|
||||
CascadeClassifier cascade;
|
||||
|
||||
/// <summary>
|
||||
/// The web cam texture to mat helper.
|
||||
/// </summary>
|
||||
WebCamTextureToMatHelper webCamTextureToMatHelper;
|
||||
|
||||
/// <summary>
|
||||
/// The face landmark detector.
|
||||
/// </summary>
|
||||
FaceLandmarkDetector faceLandmarkDetector;
|
||||
/// <summary>
|
||||
/// The face Swaper.
|
||||
/// </summary>
|
||||
DlibFaceSwapper faceSwapper;
|
||||
|
||||
/// <summary>
|
||||
/// The detection based tracker.
|
||||
/// </summary>
|
||||
RectangleTracker rectangleTracker;
|
||||
|
||||
/// <summary>
|
||||
/// The frontal face parameter.
|
||||
/// </summary>
|
||||
FrontalFaceParam frontalFaceParam;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing face rects.
|
||||
/// </summary>
|
||||
public bool isShowingFaceRects = false;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing face rects toggle.
|
||||
/// </summary>
|
||||
public Toggle isShowingFaceRectsToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The use Dlib face detector flag.
|
||||
/// </summary>
|
||||
public bool useDlibFaceDetecter = false;
|
||||
|
||||
/// <summary>
|
||||
/// The use dlib face detecter toggle.
|
||||
/// </summary>
|
||||
public Toggle useDlibFaceDetecterToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The is filtering non frontal faces.
|
||||
/// </summary>
|
||||
public bool isFilteringNonFrontalFaces;
|
||||
|
||||
/// <summary>
|
||||
/// The is filtering non frontal faces toggle.
|
||||
/// </summary>
|
||||
public Toggle isFilteringNonFrontalFacesToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The frontal face rate lower limit.
|
||||
/// </summary>
|
||||
[Range(0.0f, 1.0f)]
|
||||
public float
|
||||
frontalFaceRateLowerLimit;
|
||||
|
||||
/// <summary>
|
||||
/// The use seamless clone for paste faces.
|
||||
/// </summary>
|
||||
public bool useSeamlessCloneForPasteFaces = false;
|
||||
|
||||
/// <summary>
|
||||
/// The use seamless clone for paste faces toggle.
|
||||
/// </summary>
|
||||
public Toggle useSeamlessCloneForPasteFacesToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The enable tracker flag.
|
||||
/// </summary>
|
||||
public bool enableTracking = true;
|
||||
|
||||
/// <summary>
|
||||
/// The enable tracking toggle.
|
||||
/// </summary>
|
||||
public Toggle enableTrackingToggle;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing debug face points.
|
||||
/// </summary>
|
||||
public bool isShowingDebugFacePoints = false;
|
||||
|
||||
/// <summary>
|
||||
/// The is showing debug face points toggle.
|
||||
/// </summary>
|
||||
public Toggle isShowingDebugFacePointsToggle;
|
||||
|
||||
|
||||
// Use this for initialization
|
||||
void Start ()
|
||||
{
|
||||
rectangleTracker = new RectangleTracker ();
|
||||
|
||||
faceLandmarkDetector = new FaceLandmarkDetector (DlibFaceLandmarkDetector.Utils.getFilePath ("shape_predictor_68_face_landmarks.dat"));
|
||||
|
||||
faceSwapper = new DlibFaceSwapper ();
|
||||
faceSwapper.useSeamlessCloneForPasteFaces = useSeamlessCloneForPasteFaces;
|
||||
faceSwapper.isShowingDebugFacePoints = isShowingDebugFacePoints;
|
||||
|
||||
webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper> ();
|
||||
webCamTextureToMatHelper.Init ();
|
||||
|
||||
isShowingFaceRectsToggle.isOn = isShowingFaceRects;
|
||||
useDlibFaceDetecterToggle.isOn = useDlibFaceDetecter;
|
||||
isFilteringNonFrontalFacesToggle.isOn = isFilteringNonFrontalFaces;
|
||||
useSeamlessCloneForPasteFacesToggle.isOn = useSeamlessCloneForPasteFaces;
|
||||
enableTrackingToggle.isOn = enableTracking;
|
||||
isShowingDebugFacePointsToggle.isOn = isShowingDebugFacePoints;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the web cam texture to mat helper inited event.
|
||||
/// </summary>
|
||||
public void OnWebCamTextureToMatHelperInited ()
|
||||
{
|
||||
Debug.Log ("OnWebCamTextureToMatHelperInited");
|
||||
|
||||
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();
|
||||
|
||||
colors = new Color32[webCamTextureMat.cols () * webCamTextureMat.rows ()];
|
||||
texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);
|
||||
|
||||
|
||||
gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
|
||||
Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
|
||||
|
||||
float width = gameObject.transform.localScale.x;
|
||||
float height = gameObject.transform.localScale.y;
|
||||
|
||||
float widthScale = (float)Screen.width / width;
|
||||
float heightScale = (float)Screen.height / height;
|
||||
if (widthScale < heightScale) {
|
||||
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
|
||||
} else {
|
||||
Camera.main.orthographicSize = height / 2;
|
||||
}
|
||||
|
||||
gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
|
||||
|
||||
|
||||
|
||||
grayMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1);
|
||||
cascade = new CascadeClassifier (OpenCVForUnity.Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
|
||||
if (cascade.empty ()) {
|
||||
Debug.LogError ("cascade file is not loaded.Please copy from “FaceTrackerSample/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
|
||||
}
|
||||
|
||||
frontalFaceParam = new FrontalFaceParam ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the web cam texture to mat helper disposed event.
|
||||
/// </summary>
|
||||
public void OnWebCamTextureToMatHelperDisposed ()
|
||||
{
|
||||
Debug.Log ("OnWebCamTextureToMatHelperDisposed");
|
||||
|
||||
grayMat.Dispose ();
|
||||
|
||||
rectangleTracker.Reset ();
|
||||
}
|
||||
|
||||
// Update is called once per frame
|
||||
void Update ()
|
||||
{
|
||||
|
||||
if (webCamTextureToMatHelper.isPlaying () && webCamTextureToMatHelper.didUpdateThisFrame ()) {
|
||||
|
||||
Mat rgbaMat = webCamTextureToMatHelper.GetMat ();
|
||||
|
||||
//face detection
|
||||
List<OpenCVForUnity.Rect> detectResult = new List<OpenCVForUnity.Rect> ();
|
||||
if (useDlibFaceDetecter) {
|
||||
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);
|
||||
List<UnityEngine.Rect> result = faceLandmarkDetector.Detect ();
|
||||
|
||||
foreach (var unityRect in result) {
|
||||
detectResult.Add (new OpenCVForUnity.Rect ((int)unityRect.x, (int)unityRect.y, (int)unityRect.width, (int)unityRect.height));
|
||||
}
|
||||
} else {
|
||||
//convert image to greyscale
|
||||
Imgproc.cvtColor (rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
|
||||
|
||||
//convert image to greyscale
|
||||
using (Mat equalizeHistMat = new Mat())
|
||||
using (MatOfRect faces = new MatOfRect()) {
|
||||
Imgproc.equalizeHist (grayMat, equalizeHistMat);
|
||||
|
||||
cascade.detectMultiScale (equalizeHistMat, faces, 1.1f, 2, 0 | Objdetect.CASCADE_SCALE_IMAGE, new OpenCVForUnity.Size (equalizeHistMat.cols () * 0.15, equalizeHistMat.cols () * 0.15), new Size ());
|
||||
|
||||
detectResult = faces.toList ();
|
||||
}
|
||||
}
|
||||
|
||||
//face traking
|
||||
if (enableTracking) {
|
||||
rectangleTracker.UpdateTrackedObjects (detectResult);
|
||||
detectResult = new List<OpenCVForUnity.Rect> ();
|
||||
rectangleTracker.GetObjects (detectResult, true);
|
||||
}
|
||||
|
||||
//face landmark detection
|
||||
OpenCVForUnityUtils.SetImage (faceLandmarkDetector, rgbaMat);
|
||||
List<List<Vector2>> landmarkPoints = new List<List<Vector2>> ();
|
||||
foreach (var openCVRect in detectResult) {
|
||||
UnityEngine.Rect rect = new UnityEngine.Rect (openCVRect.x, openCVRect.y, openCVRect.width, openCVRect.height);
|
||||
|
||||
List<Vector2> points = faceLandmarkDetector.DetectLandmark (rect);
|
||||
if (points.Count > 0) {
|
||||
//OpenCVForUnityUtils.DrawFaceLandmark(rgbaMat, points, new Scalar(0, 255, 0, 255), 2);
|
||||
landmarkPoints.Add (points);
|
||||
}
|
||||
}
|
||||
|
||||
//filter nonfrontalface
|
||||
if (isFilteringNonFrontalFaces) {
|
||||
for (int i = 0; i < landmarkPoints.Count; i++) {
|
||||
if (frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]) < frontalFaceRateLowerLimit) {
|
||||
detectResult.RemoveAt (i);
|
||||
landmarkPoints.RemoveAt (i);
|
||||
i--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//face swapping
|
||||
if (landmarkPoints.Count >= 2) {
|
||||
int ann = 0, bob = 1;
|
||||
for (int i = 0; i < landmarkPoints.Count-1; i += 2) {
|
||||
ann = i;
|
||||
bob = i + 1;
|
||||
|
||||
faceSwapper.SwapFaces (rgbaMat, landmarkPoints [ann], landmarkPoints [bob], 1);
|
||||
}
|
||||
}
|
||||
|
||||
//draw face rects
|
||||
if (isShowingFaceRects) {
|
||||
for (int i = 0; i < detectResult.Count; i++) {
|
||||
UnityEngine.Rect rect = new UnityEngine.Rect (detectResult [i].x, detectResult [i].y, detectResult [i].width, detectResult [i].height);
|
||||
OpenCVForUnityUtils.DrawFaceRect (rgbaMat, rect, new Scalar (255, 0, 0, 255), 2);
|
||||
// Imgproc.putText (rgbaMat, " " + frontalFaceParam.getAngleOfFrontalFace (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
|
||||
// Imgproc.putText (rgbaMat, " " + frontalFaceParam.getFrontalFaceRate (landmarkPoints [i]), new Point (rect.xMin, rect.yMin - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 0.5, new Scalar (255, 255, 255, 255), 1, Imgproc.LINE_AA, false);
|
||||
|
||||
OpenCVForUnity.Utils.matToTexture2D (rgbaMat, texture, colors);
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// <summary>
|
||||
/// Raises the disable event.
|
||||
/// </summary>
|
||||
void OnDisable ()
|
||||
{
|
||||
webCamTextureToMatHelper.Dispose ();
|
||||
|
||||
rectangleTracker.Dispose ();
|
||||
faceLandmarkDetector.Dispose ();
|
||||
faceSwapper.Dispose ();
|
||||
frontalFaceParam.Dispose ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the back button event.
|
||||
/// </summary>
|
||||
public void OnBackButton ()
|
||||
{
|
||||
#if UNITY_5_3
|
||||
SceneManager.LoadScene ("FaceSwapperSample");
|
||||
#else
|
||||
Application.LoadLevel ("FaceSwapperSample");
|
||||
#endif
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the play button event.
|
||||
/// </summary>
|
||||
public void OnPlayButton ()
|
||||
{
|
||||
webCamTextureToMatHelper.Play ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the pause button event.
|
||||
/// </summary>
|
||||
public void OnPauseButton ()
|
||||
{
|
||||
webCamTextureToMatHelper.Pause ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the stop button event.
|
||||
/// </summary>
|
||||
public void OnStopButton ()
|
||||
{
|
||||
webCamTextureToMatHelper.Stop ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the change camera button event.
|
||||
/// </summary>
|
||||
public void OnChangeCameraButton ()
|
||||
{
|
||||
webCamTextureToMatHelper.Init (null, webCamTextureToMatHelper.requestWidth, webCamTextureToMatHelper.requestHeight, !webCamTextureToMatHelper.requestIsFrontFacing);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is showing face rects toggle event.
|
||||
/// </summary>
|
||||
public void OnIsShowingFaceRectsToggle ()
|
||||
{
|
||||
if (isShowingFaceRectsToggle.isOn) {
|
||||
isShowingFaceRects = true;
|
||||
} else {
|
||||
isShowingFaceRects = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the use Dlib face detector toggle event.
|
||||
/// </summary>
|
||||
public void OnUseDlibFaceDetecterToggle ()
|
||||
{
|
||||
if (useDlibFaceDetecterToggle.isOn) {
|
||||
useDlibFaceDetecter = true;
|
||||
} else {
|
||||
useDlibFaceDetecter = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is filtering non frontal faces toggle event.
|
||||
/// </summary>
|
||||
public void OnIsFilteringNonFrontalFacesToggle ()
|
||||
{
|
||||
if (isFilteringNonFrontalFacesToggle.isOn) {
|
||||
isFilteringNonFrontalFaces = true;
|
||||
} else {
|
||||
isFilteringNonFrontalFaces = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the use seamless clone for paste faces toggle event.
|
||||
/// </summary>
|
||||
public void OnUseSeamlessCloneForPasteFacesToggle ()
|
||||
{
|
||||
if (useSeamlessCloneForPasteFacesToggle.isOn) {
|
||||
useSeamlessCloneForPasteFaces = true;
|
||||
} else {
|
||||
useSeamlessCloneForPasteFaces = false;
|
||||
}
|
||||
faceSwapper.useSeamlessCloneForPasteFaces = useSeamlessCloneForPasteFaces;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the enable tracking toggle event.
|
||||
/// </summary>
|
||||
public void OnEnableTrackingToggle ()
|
||||
{
|
||||
if (enableTrackingToggle.isOn) {
|
||||
enableTracking = true;
|
||||
} else {
|
||||
enableTracking = false;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Raises the is showing debug face points toggle event.
|
||||
/// </summary>
|
||||
public void OnIsShowingDebugFacePointsToggle ()
|
||||
{
|
||||
if (isShowingDebugFacePointsToggle.isOn) {
|
||||
isShowingDebugFacePoints = true;
|
||||
} else {
|
||||
isShowingDebugFacePoints = false;
|
||||
}
|
||||
faceSwapper.isShowingDebugFacePoints = isShowingDebugFacePoints;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,428 +0,0 @@
|
|||
using UnityEngine;
|
||||
using System.Collections;
|
||||
|
||||
using System;
|
||||
using OpenCVForUnity;
|
||||
using UnityEngine.Events;
|
||||
|
||||
namespace FaceSwapperSample
|
||||
{
|
||||
|
||||
/// <summary>
|
||||
/// Web cam texture to mat helper.
|
||||
/// </summary>
|
||||
public class WebCamTextureToMatHelper : MonoBehaviour
|
||||
{
|
||||
/// <summary>
|
||||
/// The name of the device.
|
||||
/// </summary>
|
||||
public string requestDeviceName = null;
|
||||
|
||||
/// <summary>
|
||||
/// The width.
|
||||
/// </summary>
|
||||
public int requestWidth = 640;
|
||||
|
||||
/// <summary>
|
||||
/// The height.
|
||||
/// </summary>
|
||||
public int requestHeight = 480;
|
||||
|
||||
/// <summary>
|
||||
/// Should use front facing.
|
||||
/// </summary>
|
||||
public bool requestIsFrontFacing = false;
|
||||
|
||||
/// <summary>
|
||||
/// The flip vertical.
|
||||
/// </summary>
|
||||
public bool flipVertical = false;
|
||||
|
||||
/// <summary>
|
||||
/// The flip horizontal.
|
||||
/// </summary>
|
||||
public bool flipHorizontal = false;
|
||||
|
||||
/// <summary>
|
||||
/// The on inited event.
|
||||
/// </summary>
|
||||
public UnityEvent OnInitedEvent;
|
||||
|
||||
/// <summary>
|
||||
/// The on disposed event.
|
||||
/// </summary>
|
||||
public UnityEvent OnDisposedEvent;
|
||||
|
||||
/// <summary>
|
||||
/// The web cam texture.
|
||||
/// </summary>
|
||||
WebCamTexture webCamTexture;
|
||||
|
||||
/// <summary>
|
||||
/// The web cam device.
|
||||
/// </summary>
|
||||
WebCamDevice webCamDevice;
|
||||
|
||||
/// <summary>
|
||||
/// The rgba mat.
|
||||
/// </summary>
|
||||
Mat rgbaMat;
|
||||
|
||||
/// <summary>
|
||||
/// The rotated rgba mat
|
||||
/// </summary>
|
||||
Mat rotatedRgbaMat;
|
||||
|
||||
/// <summary>
|
||||
/// The colors.
|
||||
/// </summary>
|
||||
Color32[] colors;
|
||||
|
||||
/// <summary>
|
||||
/// The init done.
|
||||
/// </summary>
|
||||
bool initDone = false;
|
||||
|
||||
/// <summary>
|
||||
/// The screenOrientation.
|
||||
/// </summary>
|
||||
ScreenOrientation screenOrientation = ScreenOrientation.Unknown;
|
||||
|
||||
|
||||
// Use this for initialization
|
||||
void Start ()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
// Update is called once per frame
|
||||
void Update ()
|
||||
{
|
||||
if (initDone) {
|
||||
if (screenOrientation != Screen.orientation) {
|
||||
StartCoroutine (init ());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public void Init ()
|
||||
{
|
||||
if (OnInitedEvent == null)
|
||||
OnInitedEvent = new UnityEvent ();
|
||||
if (OnDisposedEvent == null)
|
||||
OnDisposedEvent = new UnityEvent ();
|
||||
|
||||
StartCoroutine (init ());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Init the specified deviceName, requestWidth, requestHeight, requestIsFrontFacing and OnInited.
|
||||
/// </summary>
|
||||
/// <param name="deviceName">Device name.</param>
|
||||
/// <param name="requestWidth">Request width.</param>
|
||||
/// <param name="requestHeight">Request height.</param>
|
||||
/// <param name="requestIsFrontFacing">If set to <c>true</c> request is front facing.</param>
|
||||
/// <param name="OnInited">On inited.</param>
|
||||
public void Init (string deviceName, int requestWidth, int requestHeight, bool requestIsFrontFacing)
|
||||
{
|
||||
this.requestDeviceName = deviceName;
|
||||
this.requestWidth = requestWidth;
|
||||
this.requestHeight = requestHeight;
|
||||
this.requestIsFrontFacing = requestIsFrontFacing;
|
||||
if (OnInitedEvent == null)
|
||||
OnInitedEvent = new UnityEvent ();
|
||||
if (OnDisposedEvent == null)
|
||||
OnDisposedEvent = new UnityEvent ();
|
||||
|
||||
StartCoroutine (init ());
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Init this instance.
|
||||
/// </summary>
|
||||
private IEnumerator init ()
|
||||
{
|
||||
if (initDone)
|
||||
Dispose ();
|
||||
|
||||
if (!String.IsNullOrEmpty (requestDeviceName)) {
|
||||
// Debug.Log ("deviceName is "+requestDeviceName);
|
||||
webCamTexture = new WebCamTexture (requestDeviceName, requestWidth, requestHeight);
|
||||
} else {
|
||||
// Debug.Log ("deviceName is null");
|
||||
// Checks how many and which cameras are available on the device
|
||||
for (int cameraIndex = 0; cameraIndex < WebCamTexture.devices.Length; cameraIndex++) {
|
||||
|
||||
if (WebCamTexture.devices [cameraIndex].isFrontFacing == requestIsFrontFacing) {
|
||||
|
||||
// Debug.Log (cameraIndex + " name " + WebCamTexture.devices [cameraIndex].name + " isFrontFacing " + WebCamTexture.devices [cameraIndex].isFrontFacing);
|
||||
|
||||
webCamDevice = WebCamTexture.devices [cameraIndex];
|
||||
|
||||
webCamTexture = new WebCamTexture (webCamDevice.name, requestWidth, requestHeight);
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
if (webCamTexture == null) {
|
||||
// Debug.Log ("webCamTexture is null");
|
||||
if (WebCamTexture.devices.Length > 0) {
|
||||
webCamDevice = WebCamTexture.devices [0];
|
||||
webCamTexture = new WebCamTexture (webCamDevice.name, requestWidth, requestHeight);
|
||||
} else {
|
||||
webCamTexture = new WebCamTexture (requestWidth, requestHeight);
|
||||
}
|
||||
}
|
||||
|
||||
// Debug.Log ("name " + webCamTexture.name + " width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
|
||||
|
||||
|
||||
|
||||
// Starts the camera
|
||||
webCamTexture.Play ();
|
||||
|
||||
|
||||
while (true) {
|
||||
//If you want to use webcamTexture.width and webcamTexture.height on iOS, you have to wait until webcamTexture.didUpdateThisFrame == 1, otherwise these two values will be equal to 16. (http://forum.unity3d.com/threads/webcamtexture-and-error-0x0502.123922/)
|
||||
#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
|
||||
if (webCamTexture.width > 16 && webCamTexture.height > 16) {
|
||||
#else
|
||||
if (webCamTexture.didUpdateThisFrame) {
|
||||
#if UNITY_IOS && !UNITY_EDITOR && UNITY_5_2
|
||||
while (webCamTexture.width <= 16) {
|
||||
webCamTexture.GetPixels32 ();
|
||||
yield return new WaitForEndOfFrame ();
|
||||
}
|
||||
#endif
|
||||
#endif
|
||||
|
||||
Debug.Log ("name " + webCamTexture.name + " width " + webCamTexture.width + " height " + webCamTexture.height + " fps " + webCamTexture.requestedFPS);
|
||||
Debug.Log ("videoRotationAngle " + webCamTexture.videoRotationAngle + " videoVerticallyMirrored " + webCamTexture.videoVerticallyMirrored + " isFrongFacing " + webCamDevice.isFrontFacing);
|
||||
|
||||
colors = new Color32[webCamTexture.width * webCamTexture.height];
|
||||
rgbaMat = new Mat (webCamTexture.height, webCamTexture.width, CvType.CV_8UC4);
|
||||
|
||||
// Debug.Log ("Screen.orientation " + Screen.orientation);
|
||||
screenOrientation = Screen.orientation;
|
||||
// screenOrientation = ScreenOrientation.PortraitUpsideDown;
|
||||
|
||||
#if !UNITY_EDITOR && !UNITY_STANDALONE
|
||||
if (screenOrientation == ScreenOrientation.Portrait || screenOrientation == ScreenOrientation.PortraitUpsideDown) {
|
||||
rotatedRgbaMat = new Mat (webCamTexture.width, webCamTexture.height, CvType.CV_8UC4);
|
||||
}
|
||||
#endif
|
||||
|
||||
// webCamTexture.Stop ();
|
||||
|
||||
initDone = true;
|
||||
|
||||
if (OnInitedEvent != null)
|
||||
OnInitedEvent.Invoke ();
|
||||
|
||||
|
||||
break;
|
||||
} else {
|
||||
yield return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Ises the inited.
|
||||
/// </summary>
|
||||
/// <returns><c>true</c>, if inited was ised, <c>false</c> otherwise.</returns>
|
||||
public bool isInited ()
|
||||
{
|
||||
|
||||
return initDone;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Play this instance.
|
||||
/// </summary>
|
||||
public void Play ()
|
||||
{
|
||||
if (initDone)
|
||||
webCamTexture.Play ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Pause this instance.
|
||||
/// </summary>
|
||||
public void Pause ()
|
||||
{
|
||||
if (initDone)
|
||||
webCamTexture.Pause ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Stop this instance.
|
||||
/// </summary>
|
||||
public void Stop ()
|
||||
{
|
||||
if (initDone)
|
||||
webCamTexture.Stop ();
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Ises the playing.
|
||||
/// </summary>
|
||||
/// <returns><c>true</c>, if playing was ised, <c>false</c> otherwise.</returns>
|
||||
public bool isPlaying ()
|
||||
{
|
||||
if (!initDone)
|
||||
return false;
|
||||
return webCamTexture.isPlaying;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the web cam texture.
|
||||
/// </summary>
|
||||
/// <returns>The web cam texture.</returns>
|
||||
public WebCamTexture GetWebCamTexture ()
|
||||
{
|
||||
return webCamTexture;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the web cam device.
|
||||
/// </summary>
|
||||
/// <returns>The web cam device.</returns>
|
||||
public WebCamDevice GetWebCamDevice ()
|
||||
{
|
||||
return webCamDevice;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Dids the update this frame.
|
||||
/// </summary>
|
||||
/// <returns><c>true</c>, if update this frame was dided, <c>false</c> otherwise.</returns>
|
||||
public bool didUpdateThisFrame ()
|
||||
{
|
||||
if (!initDone)
|
||||
return false;
|
||||
|
||||
#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
|
||||
if (webCamTexture.width > 16 && webCamTexture.height > 16) {
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
#else
|
||||
return webCamTexture.didUpdateThisFrame;
|
||||
#endif
|
||||
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Gets the mat.
|
||||
/// </summary>
|
||||
/// <returns>The mat.</returns>
|
||||
public Mat GetMat ()
|
||||
{
|
||||
if (!initDone || !webCamTexture.isPlaying) {
|
||||
if (rotatedRgbaMat != null) {
|
||||
return rotatedRgbaMat;
|
||||
} else {
|
||||
return rgbaMat;
|
||||
}
|
||||
}
|
||||
|
||||
Utils.webCamTextureToMat (webCamTexture, rgbaMat, colors);
|
||||
|
||||
int flipCode = int.MinValue;
|
||||
|
||||
if (webCamDevice.isFrontFacing) {
|
||||
if (webCamTexture.videoRotationAngle == 0) {
|
||||
flipCode = 1;
|
||||
} else if (webCamTexture.videoRotationAngle == 90) {
|
||||
flipCode = 0;
|
||||
}
|
||||
if (webCamTexture.videoRotationAngle == 180) {
|
||||
flipCode = 0;
|
||||
} else if (webCamTexture.videoRotationAngle == 270) {
|
||||
flipCode = 1;
|
||||
}
|
||||
} else {
|
||||
if (webCamTexture.videoRotationAngle == 180) {
|
||||
flipCode = -1;
|
||||
} else if (webCamTexture.videoRotationAngle == 270) {
|
||||
flipCode = -1;
|
||||
}
|
||||
}
|
||||
|
||||
if (flipVertical) {
|
||||
if (flipCode == int.MinValue) {
|
||||
flipCode = 0;
|
||||
} else if (flipCode == 0) {
|
||||
flipCode = int.MinValue;
|
||||
} else if (flipCode == 1) {
|
||||
flipCode = -1;
|
||||
} else if (flipCode == -1) {
|
||||
flipCode = 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (flipHorizontal) {
|
||||
if (flipCode == int.MinValue) {
|
||||
flipCode = 1;
|
||||
} else if (flipCode == 0) {
|
||||
flipCode = -1;
|
||||
} else if (flipCode == 1) {
|
||||
flipCode = int.MinValue;
|
||||
} else if (flipCode == -1) {
|
||||
flipCode = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (flipCode > int.MinValue) {
|
||||
Core.flip (rgbaMat, rgbaMat, flipCode);
|
||||
}
|
||||
|
||||
|
||||
if (rotatedRgbaMat != null) {
|
||||
|
||||
using (Mat transposeRgbaMat = rgbaMat.t()) {
|
||||
Core.flip (transposeRgbaMat, rotatedRgbaMat, 1);
|
||||
}
|
||||
|
||||
return rotatedRgbaMat;
|
||||
} else {
|
||||
return rgbaMat;
|
||||
}
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Releases all resource used by the <see cref="WebCamTextureToMatHelper"/> object.
|
||||
/// </summary>
|
||||
/// <remarks>Call <see cref="Dispose"/> when you are finished using the <see cref="WebCamTextureToMatHelper"/>. The
|
||||
/// <see cref="Dispose"/> method leaves the <see cref="WebCamTextureToMatHelper"/> in an unusable state. After
|
||||
/// calling <see cref="Dispose"/>, you must release all references to the <see cref="WebCamTextureToMatHelper"/> so
|
||||
/// the garbage collector can reclaim the memory that the <see cref="WebCamTextureToMatHelper"/> was occupying.</remarks>
|
||||
public void Dispose ()
|
||||
{
|
||||
initDone = false;
|
||||
|
||||
if (webCamTexture != null) {
|
||||
webCamTexture.Stop ();
|
||||
webCamTexture = null;
|
||||
}
|
||||
if (rgbaMat != null) {
|
||||
rgbaMat.Dispose ();
|
||||
rgbaMat = null;
|
||||
}
|
||||
if (rotatedRgbaMat != null) {
|
||||
rotatedRgbaMat.Dispose ();
|
||||
rotatedRgbaMat = null;
|
||||
}
|
||||
colors = null;
|
||||
|
||||
if (OnDisposedEvent != null)
|
||||
OnDisposedEvent.Invoke ();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 9ce089e1e22469940a797356e6fb7e6a
|
||||
guid: 058ff8aa85554aa4cbe51a95c5ed9ccb
|
||||
folderAsset: yes
|
||||
DefaultImporter:
|
||||
userData:
|
|
@ -1,7 +1,7 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 93630434261d96947bcf7585201da5d8
|
||||
timeCreated: 1467788781
|
||||
licenseType: Pro
|
||||
licenseType: Store
|
||||
MovieImporter:
|
||||
serializedVersion: 1
|
||||
quality: .5
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
fileFormatVersion: 2
|
||||
guid: e5e282d2065baf7408f080371d55511f
|
||||
timeCreated: 1468559965
|
||||
licenseType: Pro
|
||||
licenseType: Store
|
||||
TextScriptImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
fileFormatVersion: 2
|
||||
guid: 8104e65283de4d544acdad30db854ad2
|
||||
timeCreated: 1467795760
|
||||
licenseType: Pro
|
||||
licenseType: Store
|
||||
DefaultImporter:
|
||||
userData:
|
||||
assetBundleName:
|
||||
|
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче