Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save toshinoritakata/59158fc672cf85ff35115c352eb2ee40 to your computer and use it in GitHub Desktop.
Save toshinoritakata/59158fc672cf85ff35115c352eb2ee40 to your computer and use it in GitHub Desktop.
cgworld3_2_Face
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using System;
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
using UnityEngine.SceneManagement;
#endif
using OpenCVForUnity;
namespace OpenCVForUnityExample
{
/// <summary>
/// Face Detection WebCamTexture Example
/// An example of detecting human face in a image of WebCamTexture using the CascadeClassifier class.
/// http://docs.opencv.org/3.2.0/db/d28/tutorial_cascade_classifier.html
/// </summary>
[RequireComponent (typeof(WebCamTextureToMatHelper))]
public class FaceDetectionWebCamTextureExample : MonoBehaviour
{
[SerializeField] private UnityEngine.UI.Image[] _mask = null; // maskを配列にする
[SerializeField] private Vector2 _offset = new Vector2(320, 240);
/// <summary>
/// The gray mat.
/// </summary>
Mat grayMat;
/// <summary>
/// The texture.
/// </summary>
Texture2D texture;
/// <summary>
/// The cascade.
/// </summary>
CascadeClassifier cascade;
/// <summary>
/// The faces.
/// </summary>
MatOfRect faces;
/// <summary>
/// The webcam texture to mat helper.
/// </summary>
WebCamTextureToMatHelper webCamTextureToMatHelper;
/// <summary>
/// The FPS monitor.
/// </summary>
FpsMonitor fpsMonitor;
#if UNITY_ANDROID && !UNITY_EDITOR
float rearCameraRequestedFPS;
#endif
#if UNITY_WEBGL && !UNITY_EDITOR
Stack<IEnumerator> coroutines = new Stack<IEnumerator> ();
#endif
#region 顔トラッキング
[SerializeField] private float _searchDist = 100;
class DetectFace
{
public Vector2 Pos { get; set; }
public bool IsTracking { get; set; }
public float LifeTime { get; set; }
public DetectFace()
{
Pos = Vector2.positiveInfinity;
IsTracking = false;
LifeTime = 0;
}
public void Update()
{
}
}
private DetectFace[] _detectFaces = null;
private int _detectFacesMaxNum = 5;
private int _detectedFacesNum = 0;
#endregion
// Use this for initialization
void Start ()
{
_detectFacesMaxNum = _mask.Length;
_detectFaces = new DetectFace[_detectFacesMaxNum]; // 5人まで対応
for (int i = 0; i < _detectFacesMaxNum; i++)
{
_detectFaces[i] = new DetectFace();
}
fpsMonitor = GetComponent<FpsMonitor> ();
webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper> ();
#if UNITY_WEBGL && !UNITY_EDITOR
var getFilePath_Coroutine = Utils.getFilePathAsync ("lbpcascade_frontalface.xml", (result) => {
coroutines.Clear ();
cascade = new CascadeClassifier ();
cascade.load (result);
if (cascade.empty ()) {
Debug.LogError ("cascade file is not loaded.Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
}
webCamTextureToMatHelper.Initialize ();
});
coroutines.Push (getFilePath_Coroutine);
StartCoroutine (getFilePath_Coroutine);
#else
cascade = new CascadeClassifier ();
cascade.load (Utils.getFilePath ("lbpcascade_frontalface.xml"));
// cascade = new CascadeClassifier ();
// cascade.load (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
#if !UNITY_WSA_10_0
if (cascade.empty ()) {
Debug.LogError ("cascade file is not loaded.Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
}
#endif
#if UNITY_ANDROID && !UNITY_EDITOR
// Set the requestedFPS parameter to avoid the problem of the WebCamTexture image becoming low light on some Android devices. (Pixel, pixel 2)
// https://forum.unity.com/threads/android-webcamtexture-in-low-light-only-some-models.520656/
// https://forum.unity.com/threads/released-opencv-for-unity.277080/page-33#post-3445178
rearCameraRequestedFPS = webCamTextureToMatHelper.requestedFPS;
if (webCamTextureToMatHelper.requestedIsFrontFacing) {
webCamTextureToMatHelper.requestedFPS = 15;
webCamTextureToMatHelper.Initialize ();
} else {
webCamTextureToMatHelper.Initialize ();
}
#else
webCamTextureToMatHelper.Initialize ();
#endif
#endif
}
/// <summary>
/// Raises the web cam texture to mat helper initialized event.
/// </summary>
public void OnWebCamTextureToMatHelperInitialized ()
{
Debug.Log ("OnWebCamTextureToMatHelperInitialized");
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();
texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);
gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
if (fpsMonitor != null){
fpsMonitor.Add ("width", webCamTextureMat.width ().ToString());
fpsMonitor.Add ("height", webCamTextureMat.height ().ToString());
fpsMonitor.Add ("orientation", Screen.orientation.ToString());
}
float width = webCamTextureMat.width ();
float height = webCamTextureMat.height ();
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale) {
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
} else {
Camera.main.orthographicSize = height / 2;
}
grayMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1);
faces = new MatOfRect ();
}
/// <summary>
/// Raises the web cam texture to mat helper disposed event.
/// </summary>
public void OnWebCamTextureToMatHelperDisposed ()
{
Debug.Log ("OnWebCamTextureToMatHelperDisposed");
if (grayMat != null)
grayMat.Dispose ();
if (texture != null) {
Texture2D.Destroy(texture);
texture = null;
}
if (faces != null)
faces.Dispose ();
}
/// <summary>
/// Raises the web cam texture to mat helper error occurred event.
/// </summary>
/// <param name="errorCode">Error code.</param>
public void OnWebCamTextureToMatHelperErrorOccurred (WebCamTextureToMatHelper.ErrorCode errorCode)
{
Debug.Log ("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
// Update is called once per frame
void Update ()
{
if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) {
Mat rgbaMat = webCamTextureToMatHelper.GetMat();
Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
Imgproc.equalizeHist(grayMat, grayMat);
// 顔検出を行う
if (cascade != null)
cascade.detectMultiScale(grayMat, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
new Size(grayMat.cols() * 0.2, grayMat.rows() * 0.2), new Size());
// 検出した顔位置は何回も使うのでリストとして保持しておく
OpenCVForUnity.Rect[] rects = faces.toArray();
List<Vector2> detectedPos = new List<Vector2>();
foreach (var rect in rects)
{
var cx = rect.x + rect.width / 2f;
var cy = rect.y + rect.height / 2f;
detectedPos.Add(new Vector2(cx, -cy));
}
// すでに検出している顔位置を新しい検出位置で更新する
// トラッキングされている顔が無い場合ここでは何もしない
// 新しくトラッキングする顔を追加するためにトラッキングに使われた検出点を保持しておく
List<int> useIds = new List<int>();
for (int i = 0; i < _detectFaces.Length; i++)
{
if (_detectFaces[i].IsTracking == false) continue;
var closeId = -1;
var minDist = Mathf.Infinity;
for (int n = 0; n < detectedPos.Count; n++)
{
if (useIds.Contains(n)) continue;
var dist = (_detectFaces[i].Pos - detectedPos[n]).magnitude;
if (minDist > dist && dist < _searchDist)
{
minDist = dist;
closeId = n;
}
}
var df = _detectFaces[i];
if (closeId > -1)
{
df.Pos = Vector2.Lerp(df.Pos, detectedPos[closeId], 0.8f);
useIds.Add(closeId);
}
else
{
df.LifeTime -= Time.deltaTime;
df.IsTracking = (df.LifeTime > 0f);
}
}
// トラッキングに使用されていない検出点のリストを作成する
// このリストが新たにトラッキングを行うための座標になる
List<int> freeIds = new List<int>();
for (int n = 0; n < detectedPos.Count; n++)
{
if (useIds.Contains(n) == false)
{
freeIds.Add(n);
}
}
// add
foreach (var fi in freeIds)
{
foreach (var df in _detectFaces)
{
if (df.IsTracking == false)
{
df.Pos = detectedPos[fi];
df.IsTracking = true;
df.LifeTime = 1f;
break;
}
}
}
for (int i = 0; i < _detectFacesMaxNum; i++)
{
_mask[i].enabled = false;
var df = _detectFaces[i];
if (df.IsTracking == false) continue;
_mask[i].enabled = true;
_mask[i].transform.localPosition = df.Pos + new Vector2(-_offset.x, +_offset.y);
//Imgproc.rectangle (rgbaMat, new Point (rects [i].x, rects [i].y),
// new Point (rects [i].x + rects [i].width, rects [i].y + rects [i].height), new Scalar (255, 0, 0, 255), 2);
}
//Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
Utils.fastMatToTexture2D (rgbaMat, texture);
}
}
/// <summary>
/// Raises the destroy event.
/// </summary>
void OnDestroy ()
{
webCamTextureToMatHelper.Dispose ();
if (cascade != null)
cascade.Dispose ();
#if UNITY_WEBGL && !UNITY_EDITOR
foreach (var coroutine in coroutines) {
StopCoroutine (coroutine);
((IDisposable)coroutine).Dispose ();
}
#endif
}
/// <summary>
/// Raises the back button click event.
/// </summary>
public void OnBackButtonClick ()
{
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
SceneManager.LoadScene ("OpenCVForUnityExample");
#else
Application.LoadLevel ("OpenCVForUnityExample");
#endif
}
/// <summary>
/// Raises the play button click event.
/// </summary>
public void OnPlayButtonClick ()
{
webCamTextureToMatHelper.Play ();
}
/// <summary>
/// Raises the pause button click event.
/// </summary>
public void OnPauseButtonClick ()
{
webCamTextureToMatHelper.Pause ();
}
/// <summary>
/// Raises the stop button click event.
/// </summary>
public void OnStopButtonClick ()
{
webCamTextureToMatHelper.Stop ();
}
/// <summary>
/// Raises the change camera button click event.
/// </summary>
public void OnChangeCameraButtonClick ()
{
#if UNITY_ANDROID && !UNITY_EDITOR
if (!webCamTextureToMatHelper.IsFrontFacing ()) {
rearCameraRequestedFPS = webCamTextureToMatHelper.requestedFPS;
webCamTextureToMatHelper.Initialize (!webCamTextureToMatHelper.IsFrontFacing (), 15, webCamTextureToMatHelper.rotate90Degree);
} else {
webCamTextureToMatHelper.Initialize (!webCamTextureToMatHelper.IsFrontFacing (), rearCameraRequestedFPS, webCamTextureToMatHelper.rotate90Degree);
}
#else
webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.IsFrontFacing ();
#endif
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment