Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save toshinoritakata/952cb7d0f5f88e5df7d3886df0587f31 to your computer and use it in GitHub Desktop.
Save toshinoritakata/952cb7d0f5f88e5df7d3886df0587f31 to your computer and use it in GitHub Desktop.
連載3 顔検出 複数対応版
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using System;
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
using UnityEngine.SceneManagement;
#endif
using OpenCVForUnity;
namespace OpenCVForUnityExample
{
/// <summary>
/// Face Detection WebCamTexture Example
/// An example of detecting human face in a image of WebCamTexture using the CascadeClassifier class.
/// http://docs.opencv.org/3.2.0/db/d28/tutorial_cascade_classifier.html
/// </summary>
[RequireComponent (typeof(WebCamTextureToMatHelper))]
public class FaceDetectionWebCamTextureExample : MonoBehaviour
{
/// <summary>
/// The gray mat.
/// </summary>
Mat grayMat;
/// <summary>
/// The texture.
/// </summary>
Texture2D texture;
/// <summary>
/// The cascade.
/// </summary>
CascadeClassifier cascade;
/// <summary>
/// The faces.
/// </summary>
MatOfRect faces;
/// <summary>
/// The webcam texture to mat helper.
/// </summary>
WebCamTextureToMatHelper webCamTextureToMatHelper;
/// <summary>
/// The FPS monitor.
/// </summary>
FpsMonitor fpsMonitor;
#if UNITY_ANDROID && !UNITY_EDITOR
float rearCameraRequestedFPS;
#endif
#if UNITY_WEBGL && !UNITY_EDITOR
Stack<IEnumerator> coroutines = new Stack<IEnumerator> ();
#endif
#region 顔トラッキング
[SerializeField] private UnityEngine.UI.Image[] _mask = null; // maskを配列にする
[SerializeField] private Vector2 _offset = new Vector2(320, 240);
[SerializeField] private float _searchDist = 100;
[SerializeField] private float _smooth = 0.8f;
class DetectFace
{
public enum State { Detecting, Tracking, None }
public State TrackingState { get; set; }
public Vector2 Pos { get; set; }
public float LostTime { get; set; }
public float DetectTime { get; set; }
public DetectFace()
{
Pos = Vector2.positiveInfinity;
TrackingState = State.None;
LostTime = 0;
DetectTime = 0;
}
}
private DetectFace[] _detectFaces = null;
#endregion
// Use this for initialization
void Start ()
{
#region 顔トラッキング
_detectFaces = new DetectFace[_mask.Length];
for (int i = 0; i < _detectFaces.Length; i++)
{
_detectFaces[i] = new DetectFace();
}
#endregion
fpsMonitor = GetComponent<FpsMonitor> ();
webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper> ();
#if UNITY_WEBGL && !UNITY_EDITOR
var getFilePath_Coroutine = Utils.getFilePathAsync ("lbpcascade_frontalface.xml", (result) => {
coroutines.Clear ();
cascade = new CascadeClassifier ();
cascade.load (result);
if (cascade.empty ()) {
Debug.LogError ("cascade file is not loaded.Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
}
webCamTextureToMatHelper.Initialize ();
});
coroutines.Push (getFilePath_Coroutine);
StartCoroutine (getFilePath_Coroutine);
#else
cascade = new CascadeClassifier ();
cascade.load (Utils.getFilePath ("lbpcascade_frontalface.xml"));
// cascade = new CascadeClassifier ();
// cascade.load (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
#if !UNITY_WSA_10_0
if (cascade.empty ()) {
Debug.LogError ("cascade file is not loaded.Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
}
#endif
#if UNITY_ANDROID && !UNITY_EDITOR
// Set the requestedFPS parameter to avoid the problem of the WebCamTexture image becoming low light on some Android devices. (Pixel, pixel 2)
// https://forum.unity.com/threads/android-webcamtexture-in-low-light-only-some-models.520656/
// https://forum.unity.com/threads/released-opencv-for-unity.277080/page-33#post-3445178
rearCameraRequestedFPS = webCamTextureToMatHelper.requestedFPS;
if (webCamTextureToMatHelper.requestedIsFrontFacing) {
webCamTextureToMatHelper.requestedFPS = 15;
webCamTextureToMatHelper.Initialize ();
} else {
webCamTextureToMatHelper.Initialize ();
}
#else
webCamTextureToMatHelper.Initialize ();
#endif
#endif
}
/// <summary>
/// Raises the web cam texture to mat helper initialized event.
/// </summary>
public void OnWebCamTextureToMatHelperInitialized ()
{
Debug.Log ("OnWebCamTextureToMatHelperInitialized");
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat ();
texture = new Texture2D (webCamTextureMat.cols (), webCamTextureMat.rows (), TextureFormat.RGBA32, false);
gameObject.GetComponent<Renderer> ().material.mainTexture = texture;
gameObject.transform.localScale = new Vector3 (webCamTextureMat.cols (), webCamTextureMat.rows (), 1);
Debug.Log ("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
if (fpsMonitor != null){
fpsMonitor.Add ("width", webCamTextureMat.width ().ToString());
fpsMonitor.Add ("height", webCamTextureMat.height ().ToString());
fpsMonitor.Add ("orientation", Screen.orientation.ToString());
}
float width = webCamTextureMat.width ();
float height = webCamTextureMat.height ();
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale) {
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
} else {
Camera.main.orthographicSize = height / 2;
}
grayMat = new Mat (webCamTextureMat.rows (), webCamTextureMat.cols (), CvType.CV_8UC1);
faces = new MatOfRect ();
}
/// <summary>
/// Raises the web cam texture to mat helper disposed event.
/// </summary>
public void OnWebCamTextureToMatHelperDisposed ()
{
Debug.Log ("OnWebCamTextureToMatHelperDisposed");
if (grayMat != null)
grayMat.Dispose ();
if (texture != null) {
Texture2D.Destroy(texture);
texture = null;
}
if (faces != null)
faces.Dispose ();
}
/// <summary>
/// Raises the web cam texture to mat helper error occurred event.
/// </summary>
/// <param name="errorCode">Error code.</param>
public void OnWebCamTextureToMatHelperErrorOccurred (WebCamTextureToMatHelper.ErrorCode errorCode)
{
Debug.Log ("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
// Update is called once per frame
void Update ()
{
if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame()) {
Mat rgbaMat = webCamTextureToMatHelper.GetMat();
Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
Imgproc.equalizeHist(grayMat, grayMat);
// 顔検出を行う
if (cascade != null)
{
cascade.detectMultiScale(grayMat, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
new Size(grayMat.cols() * 0.2, grayMat.rows() * 0.2), new Size());
}
#region 顔トラッキング
// 検出した顔位置は以降何回も使うのでリストとして保持しておく
OpenCVForUnity.Rect[] rects = faces.toArray();
List<Vector2> detectedPos = new List<Vector2>();
foreach (var rect in rects)
{
var cx = rect.x + rect.width / 2f;
var cy = rect.y + rect.height / 2f;
detectedPos.Add(new Vector2(cx, -cy));
}
// すでに検出している顔位置を新しい検出位置で更新する
// トラッキングされている顔が無い場合、ここでは何もしない
List<int> useIds = new List<int>();
for (int i = 0; i < _detectFaces.Length; i++)
{
// トラッキングされていない顔はスキップ
if (_detectFaces[i].TrackingState == DetectFace.State.None)
continue;
// 新しい検出点から最も近いトラッキングされている点を探す
// 一定(_searchDist)以上離れている点は無視
var closeId = -1;
var minDist = Mathf.Infinity;
for (int n = 0; n < detectedPos.Count; n++)
{
if (useIds.Contains(n)) continue;
var dist = (_detectFaces[i].Pos - detectedPos[n]).magnitude;
if (minDist > dist && dist < _searchDist)
{
minDist = dist;
closeId = n;
}
}
var df = _detectFaces[i];
if (closeId > -1)
{
useIds.Add(closeId);
df.LostTime = 1;
// ノイズの平滑化
df.Pos = Vector2.Lerp(df.Pos, detectedPos[closeId], _smooth);
// 新しい検出位置を一定時間以上検出し続けていれば検出できたとみなす
if (_detectFaces[i].TrackingState == DetectFace.State.Detecting)
{
df.DetectTime -= Time.deltaTime;
if (df.DetectTime < 0)
{
_detectFaces[i].TrackingState = DetectFace.State.Tracking;
}
}
}
else
{
// 新しい検出位置が見つからなかったので、LostTimeを減らす。これが0になるとトラッキングしなくなる
df.LostTime -= Time.deltaTime;
if (df.LostTime < 0f)
{
df.TrackingState = DetectFace.State.None;
}
}
}
// トラッキングに「使用されていない」検出点のリストを作成する
// このリストが新たにトラッキングを行うための座標になる
List<int> freeIds = new List<int>();
for (int n = 0; n < detectedPos.Count; n++)
{
if (!useIds.Contains(n))
{
freeIds.Add(n);
}
}
// 新たなトラッキング位置を追加
foreach (var fi in freeIds)
{
foreach (var df in _detectFaces)
{
if (df.TrackingState == DetectFace.State.None)
{
df.TrackingState = DetectFace.State.Detecting;
df.DetectTime = 1;
df.Pos = detectedPos[fi];
break;
}
}
}
// 検出した矩形を表示する
foreach (var rect in rects)
{
Imgproc.rectangle (rgbaMat,
new Point (rect.x, rect.y),
new Point (rect.x + rect.width, rect.y + rect.height), new Scalar (255, 0, 0, 255), 2);
}
// お面を表示する
for (int i = 0; i < _detectFaces.Length; i++)
{
_mask[i].enabled = false;
var df = _detectFaces[i];
if (df.TrackingState != DetectFace.State.Tracking) continue;
_mask[i].enabled = true;
_mask[i].transform.localPosition = df.Pos + new Vector2(-_offset.x, +_offset.y);
}
#endregion
//Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
Utils.fastMatToTexture2D (rgbaMat, texture);
}
}
/// <summary>
/// Raises the destroy event.
/// </summary>
void OnDestroy ()
{
webCamTextureToMatHelper.Dispose ();
if (cascade != null)
cascade.Dispose ();
#if UNITY_WEBGL && !UNITY_EDITOR
foreach (var coroutine in coroutines) {
StopCoroutine (coroutine);
((IDisposable)coroutine).Dispose ();
}
#endif
}
/// <summary>
/// Raises the back button click event.
/// </summary>
public void OnBackButtonClick ()
{
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
SceneManager.LoadScene ("OpenCVForUnityExample");
#else
Application.LoadLevel ("OpenCVForUnityExample");
#endif
}
/// <summary>
/// Raises the play button click event.
/// </summary>
public void OnPlayButtonClick ()
{
webCamTextureToMatHelper.Play ();
}
/// <summary>
/// Raises the pause button click event.
/// </summary>
public void OnPauseButtonClick ()
{
webCamTextureToMatHelper.Pause ();
}
/// <summary>
/// Raises the stop button click event.
/// </summary>
public void OnStopButtonClick ()
{
webCamTextureToMatHelper.Stop ();
}
/// <summary>
/// Raises the change camera button click event.
/// </summary>
public void OnChangeCameraButtonClick ()
{
#if UNITY_ANDROID && !UNITY_EDITOR
if (!webCamTextureToMatHelper.IsFrontFacing ()) {
rearCameraRequestedFPS = webCamTextureToMatHelper.requestedFPS;
webCamTextureToMatHelper.Initialize (!webCamTextureToMatHelper.IsFrontFacing (), 15, webCamTextureToMatHelper.rotate90Degree);
} else {
webCamTextureToMatHelper.Initialize (!webCamTextureToMatHelper.IsFrontFacing (), rearCameraRequestedFPS, webCamTextureToMatHelper.rotate90Degree);
}
#else
webCamTextureToMatHelper.requestedIsFrontFacing = !webCamTextureToMatHelper.IsFrontFacing ();
#endif
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment