Skip to content

Instantly share code, notes, and snippets.

@yosun
Last active June 10, 2018 05:38
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save yosun/3670bc4ecaa5430c528b67c9344d5555 to your computer and use it in GitHub Desktop.
Save yosun/3670bc4ecaa5430c528b67c9344d5555 to your computer and use it in GitHub Desktop.
OpenCVForUnity + Arcolib
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using System;
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
using UnityEngine.SceneManagement;
#endif
using OpenCVForUnity;
using Vexpot.Arcolib.Integration;
namespace OpenCVForUnityExample
{
/// <summary>
/// Face Detection WebCamTexture Example
/// An example of detecting human face in a image of WebCamTexture using the CascadeClassifier class.
/// http://docs.opencv.org/3.2.0/db/d28/tutorial_cascade_classifier.html
/// </summary>
public class FaceDetectionWebCamTextureArco : MonoBehaviour
{
/// <summary>
/// The gray mat.
/// </summary>
Mat grayMat;
public GameObject screen;
/// <summary>
/// The texture.
/// </summary>
Texture2D texture;
/// <summary>
/// The cascade.
/// </summary>
CascadeClassifier cascade;
/// <summary>
/// The faces.
/// </summary>
MatOfRect faces;
/// <summary>
/// The webcam texture to mat helper.
/// </summary>
WebCamInputInjectsToArco webCamInjectsToArco;
public InputSourceController isc;
#if UNITY_WEBGL && !UNITY_EDITOR
Stack<IEnumerator> coroutines = new Stack<IEnumerator> ();
#endif
bool inited = false;
// Use this for initialization
public void Init()
{
webCamInjectsToArco = (WebCamInputInjectsToArco) isc._input;
#if UNITY_WEBGL && !UNITY_EDITOR
var getFilePath_Coroutine = Utils.getFilePathAsync ("lbpcascade_frontalface.xml", (result) => {
coroutines.Clear ();
cascade = new CascadeClassifier ();
cascade.load (result);
if (cascade.empty ()) {
Debug.LogError ("cascade file is not loaded.Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
}
webCamInjectsToArco.Open ();
});
coroutines.Push (getFilePath_Coroutine);
StartCoroutine (getFilePath_Coroutine);
#else
cascade = new CascadeClassifier();
cascade.load(Utils.getFilePath("lbpcascade_frontalface.xml"));
// cascade = new CascadeClassifier ();
// cascade.load (Utils.getFilePath ("haarcascade_frontalface_alt.xml"));
#if !UNITY_WSA_10_0
if (cascade.empty())
{
Debug.LogError("cascade file is not loaded.Please copy from “OpenCVForUnity/StreamingAssets/” to “Assets/StreamingAssets/” folder. ");
}
#endif
webCamInjectsToArco.Open();
#endif
inited = true;
// webCamInjectsToArco.facearco.OnWebCamTextureToMatHelperInitialized();
print("Inited");
OnWebCamTextureToMatHelperInitialized();
}
/// <summary>
/// Raises the web cam texture to mat helper initialized event.
/// </summary>
public void OnWebCamTextureToMatHelperInitialized()
{
Debug.Log("OnWebCamTextureToMatHelperInitialized");
Mat webCamTextureMat = webCamInjectsToArco.GetMat();
if (webCamTextureMat != null)
{
texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
screen.GetComponent<Renderer>().material.mainTexture = texture;
//gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
/* float width = webCamTextureMat.width();
float height = webCamTextureMat.height();
float widthScale = (float)Screen.width / width;
float heightScale = (float)Screen.height / height;
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (width * (float)Screen.height / (float)Screen.width) / 2;
}
else
{
Camera.main.orthographicSize = height / 2;
}*/
grayMat = new Mat(webCamTextureMat.rows(), webCamTextureMat.cols(), CvType.CV_8UC1);
faces = new MatOfRect();
}
else print("webCamTextureMat is null");
}
/// <summary>
/// Raises the web cam texture to mat helper disposed event.
/// </summary>
public void OnWebCamTextureToMatHelperDisposed()
{
Debug.Log("OnWebCamTextureToMatHelperDisposed");
if (grayMat != null)
grayMat.Dispose();
if (faces != null)
faces.Dispose();
}
/// <summary>
/// Raises the web cam texture to mat helper error occurred event.
/// </summary>
/// <param name="errorCode">Error code.</param>
public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
{
Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
// Update is called once per frame
void Update()
{if (!inited) return;
if (webCamInjectsToArco.IsPlaying() && webCamInjectsToArco.DidUpdateThisFrame())
{
Mat rgbaMat = webCamInjectsToArco.GetMat();
Imgproc.cvtColor(rgbaMat, grayMat, Imgproc.COLOR_RGBA2GRAY);
Imgproc.equalizeHist(grayMat, grayMat);
if (cascade != null)
cascade.detectMultiScale(grayMat, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
new Size(grayMat.cols() * 0.2, grayMat.rows() * 0.2), new Size());
OpenCVForUnity.Rect[] rects = faces.toArray();
for (int i = 0; i < rects.Length; i++)
{
// Debug.Log ("detect faces " + rects [i]);
Point point0 = new Point(rects[i].x, rects[i].y);
Imgproc.rectangle(rgbaMat, point0, new Point(rects[i].x + rects[i].width, rects[i].y + rects[i].height), new Scalar(255, 255, 255, 255), 2);
Imgproc.putText(rgbaMat, "Engineer", new Point( point0.x-10,point0.y-10), 2, 1, new Scalar(255, 0, 0, 255), 1);
}
// Imgproc.putText (rgbaMat, "W:" + rgbaMat.width () + " H:" + rgbaMat.height () + " SO:" + Screen.orientation, new Point (5, rgbaMat.rows () - 10), Core.FONT_HERSHEY_SIMPLEX, 1.0, new Scalar (255, 255, 255, 255), 2, Imgproc.LINE_AA, false);
Utils.matToTexture2D(rgbaMat, texture, webCamInjectsToArco.GetBufferColors());
}
}
/// <summary>
/// Raises the destroy event.
/// </summary>
void OnDestroy()
{
webCamInjectsToArco.Dispose();
if (cascade != null)
cascade.Dispose();
#if UNITY_WEBGL && !UNITY_EDITOR
foreach (var coroutine in coroutines) {
StopCoroutine (coroutine);
((IDisposable)coroutine).Dispose ();
}
#endif
}
/// <summary>
/// Raises the back button click event.
/// </summary>
public void OnBackButtonClick()
{
#if UNITY_5_3 || UNITY_5_3_OR_NEWER
SceneManager.LoadScene("OpenCVForUnityExample");
#else
Application.LoadLevel ("OpenCVForUnityExample");
#endif
}
/// <summary>
/// Raises the play button click event.
/// </summary>
/* public void OnPlayButtonClick()
{
webCamTextureToMatHelper.Play();
}
/// <summary>
/// Raises the pause button click event.
/// </summary>
public void OnPauseButtonClick()
{
webCamTextureToMatHelper.Pause();
}
/// <summary>
/// Raises the stop button click event.
/// </summary>
public void OnStopButtonClick()
{
webCamTextureToMatHelper.Stop();
}
/// <summary>
/// Raises the change camera button click event.
/// </summary>
public void OnChangeCameraButtonClick()
{
webCamTextureToMatHelper.Initialize(null, webCamTextureToMatHelper.requestedWidth, webCamTextureToMatHelper.requestedHeight, !webCamTextureToMatHelper.requestedIsFrontFacing);
}
*/
}
}
using UnityEngine;
using System.Collections;
using System;
using OpenCVForUnityExample;
namespace Vexpot.Arcolib.Integration
{
/// <summary>
/// All implemented input sources types to be showed on editor.
/// </summary>
/// <remarks>
/// Here you can define your customized input sources to be listed
/// inside the editor input source list. This is only if you need to show
/// you input type in Editor mode.
/// </remarks>
public enum InputSourceType
{
/// <summary>
/// Camera capture implementation based on Unity <see cref="WebCamTexture"/>.
/// </summary>
WebcamTextureInput,
/// <summary>
/// Native camera capture for each supported platforms.
/// </summary>
NativeCameraInput,
WebCamInputInjectsToArco
}
/// <summary>
/// Video and image custom presets. Only used in editor mode.
/// </summary>
public enum VideoMode
{
/// <summary>
/// User defined resolution and frame rate.
/// </summary>
CustomResolution,
/// <summary>
/// 640x480@30fps
/// </summary>
Resolution_640x480_30fps,
/// <summary>
/// 960x720@30fps
/// </summary>
Resolution_960x720_30fps,
/// <summary>
/// 1280x720@30fps
/// </summary>
Resolution_1280x720_30fps,
/// <summary>
/// 1920x1080@30fps
/// </summary>
Resolution_1920x1080_30fps
}
/// <summary>
/// Delegate used to notify when the selected input is ready to be used.
/// </summary>
/// <param name="input"></param>
/// <param name="calibration"></param>
public delegate void OnInputReady(InputSource input, DeviceCalibration calibration);
/// <summary>
/// This class allows to provide the same input source
/// to multiple trackers in the scene (to achieve more efficiency).
/// </summary>
[DisallowMultipleComponent]
[System.Serializable]
public class InputSourceController : MonoBehaviour
{
public bool includeScreen = true;
/// <summary>
/// The current device position.
/// </summary>
public CameraPosition devicePosition = CameraPosition.BackFace;
/// <summary>
/// The current device index.
/// </summary>
public int deviceIndex = 0;
/// <summary>
/// The <see cref="InputSource"/> width.
/// </summary>
public int inputWidth = 640;
/// <summary>
/// The <see cref="InputSource"/> height.
/// </summary>
public int inputHeight = 480;
/// <summary>
/// The capture frame rate.
/// </summary>
public int frameRate = 30;
/// <summary>
/// The video mode.
/// </summary>
public VideoMode videoMode = VideoMode.Resolution_640x480_30fps;
/// <summary>
/// The input type.
/// </summary>
public InputSourceType inputType = InputSourceType.WebcamTextureInput;
/// <summary>
/// Delegate called when selected input is ready.
/// </summary>
public event OnInputReady onInputReady;
/// <summary>
///
/// </summary>
public bool autoStart = true;
/// <summary>
///
/// </summary>
public bool enableAutoOrientation = true;
/// <summary>
///
/// </summary>
public InputSource _input = null;
/// <summary>
///
/// </summary>
private GameObject _screen;
/// <summary>
///
/// </summary>
private Camera _perspectiveCamera;
/// <summary>
///
/// </summary>
private Camera _orthographicCamera;
/// <summary>
///
/// </summary>
private Transform _screenTransform;
/// <summary>
///
/// </summary>
private DeviceCalibration _calibration;
public FaceDetectionWebCamTextureArco fdwta;
/// <summary>
///
/// </summary>
public void Open()
{
if (_input == null)
{
#if UNITY_EDITOR || UNITY_STANDALONE
CreateInputSource((CameraPosition)deviceIndex, inputWidth, inputHeight, frameRate);
#else
CreateInputSource(devicePosition, inputWidth, inputHeight, frameRate);
#endif
StartCoroutine(SetupCameraRig());
}
}
/// <summary>
///
/// </summary>
public void Close()
{
if (_input != null)
{
_input.Close();
_input = null;
}
}
/// <summary>
///
/// </summary>
public InputSource currentInput
{
get { return _input; }
}
/// <summary>
///
/// </summary>
public DeviceCalibration currentCalibration
{
get { return _calibration; }
}
/// <summary>
///
/// </summary>
/// <param name="devicePosition"></param>
/// <param name="requestedWidth"></param>
/// <param name="requestedHeight"></param>
/// <param name="requestedFps"></param>
void CreateInputSource(CameraPosition devicePosition, int requestedWidth, int requestedHeight, int requestedFps)
{
switch (inputType)
{
case InputSourceType.WebcamTextureInput:
_input = new WebcamTextureInput(devicePosition, inputWidth, inputHeight, frameRate);
break;
case InputSourceType.NativeCameraInput:
_input = new NativeCameraInput(devicePosition, (uint)inputWidth, (uint)inputHeight);
break;
case InputSourceType.WebCamInputInjectsToArco:
_input = new WebCamInputInjectsToArco(devicePosition, inputWidth, inputHeight, frameRate);
break;
default:
_input = new WebcamTextureInput(devicePosition, inputWidth, inputHeight, frameRate);
inputType = InputSourceType.WebcamTextureInput;
break;
}
_input.Open();
}
/// <summary>
///
/// </summary>
void Update()
{
if (_input != null)
{
_input.GrabFrame();
if (enableAutoOrientation)
{
int rot = _input.rotation;
if (_orthographicCamera)
_orthographicCamera.transform.localEulerAngles = new Vector3(0, 0, rot);
if (_perspectiveCamera)
_perspectiveCamera.transform.localEulerAngles = new Vector3(0, 0, rot);
}
if (_screenTransform)
{
int orientedHeight = !_input.verticallyMirrored ? _input.height : -(_input.height);
_screenTransform.localScale = new Vector3(_input.width, orientedHeight, 1);
}
}
}
/// <summary>
/// Prints help and tracker results.
/// </summary>
void OnGUI()
{
// 8 - Prints result on screen.
int screenW = Screen.width;
int screenH = Screen.height;
if(_input != null)
GUI.Label(new Rect(20,20,screenW, screenH), _input.rotation.ToString());
}
void Awake()
{
fdwta = GetComponent < FaceDetectionWebCamTextureArco >();
}
/// <summary>
///
/// </summary>
void Start()
{
if (autoStart)
Open();
}
/// <summary>
///
/// </summary>
void OnDestroy()
{
Close();
}
/// <summary>
///
/// </summary>
/// <returns></returns>
private IEnumerator SetupCameraRig()
{
while (_input == null || _input.isReady == false)
{
yield return 0;
}
if (inputWidth != _input.width || inputHeight != _input.height)
{
inputWidth = _input.width;
inputHeight = _input.height;
videoMode = VideoMode.CustomResolution;
}
_calibration = new DeviceCalibration(inputWidth, inputHeight);
var perspectiveCameraTransform = transform.Find("PerspectiveCamera");
if (perspectiveCameraTransform)
{
ResetTransformToDefault(perspectiveCameraTransform);
_perspectiveCamera = perspectiveCameraTransform.gameObject.GetComponent<Camera>();
if (_perspectiveCamera)
_perspectiveCamera.fieldOfView = _calibration.verticalFieldOfView;
}
var orthographicCameraTransform = transform.Find("OrthographicCamera");
if (orthographicCameraTransform)
{
_orthographicCamera = orthographicCameraTransform.gameObject.GetComponent<Camera>();
if (_orthographicCamera)
_orthographicCamera.orthographicSize = _input.height / 2;
}
_screenTransform = transform.Find("Screen");
if (_screenTransform)
{
_screenTransform.localEulerAngles = new Vector3(0, 0, 0);
_screen = _screenTransform.gameObject;
if (_screen && includeScreen)
{
MeshRenderer meshRenderer = _screen.GetComponent<MeshRenderer>();
meshRenderer.material.mainTexture = _input.texture;
}
}
if (onInputReady != null)
onInputReady(_input, _calibration);
if(fdwta!=null)
fdwta.Init();
}
/// <summary>
///
/// </summary>
/// <param name="t"></param>
private void ResetTransformToDefault(Transform t)
{
t.position = Vector3.zero;
t.rotation = Quaternion.identity;
t.localScale = new Vector3(1, 1, 1);
}
}
}
using System;
using UnityEngine;
using OpenCVForUnity;
namespace Vexpot.Arcolib.Integration
{
/// <summary>
/// Unity's <see cref="WebCamTexture"/> based input.
/// </summary>
public class WebCamInputInjectsToArco : ImageInput
{
/// <summary>
///
/// </summary>
static protected WebCamTexture _webcamTexture;
/// <summary>
///
/// </summary>
static protected WebCamDevice _cameraDevice;
/// <summary>
///
/// </summary>
protected Color32[] _colorBuffer;
/// <summary>
///
/// </summary>
protected byte[] _pixelBuffer;
/// <summary>
///
/// </summary>
protected int _requestedWidth, _requestedHeight, _requestedFps;
/// Determines if flips vertically.
/// </summary>
public bool flipVertical = false;
/// <summary>
/// Determines if flips horizontal.
/// </summary>
public bool flipHorizontal = false;
/// <summary>
/// Determines if rotates 90 degree.
/// </summary>
public bool rotate90Degree = false;
public OpenCVForUnityExample.FaceDetectionWebCamTextureArco facearco;
/// <summary>
/// The rgba mat.
/// </summary>
protected Mat rgbaMat;
/// <summary>
/// The rotated rgba mat
/// </summary>
protected Mat rotatedRgbaMat;
/// <summary>
/// Indicates whether this instance is waiting for initialization to complete.
/// </summary>
protected bool isInitWaiting = false;
/// <summary>
/// Indicates whether this instance has been initialized.
/// </summary>
protected bool hasInitDone = false;
/// <summary>
///
/// </summary>
/// <param name="devicePosition"></param>
/// <param name="requestedWidth"></param>
/// <param name="requestedHeight"></param>
/// <param name="requestedFps"></param>
public WebCamInputInjectsToArco(CameraPosition devicePosition, int requestedWidth, int requestedHeight, int requestedFps) : base()
{
_requestedWidth = requestedWidth;
_requestedHeight = requestedHeight;
_requestedFps = requestedFps;
InitCamera((int)devicePosition);
}
/// <summary>
///
/// </summary>
public override Texture texture
{
get { return _webcamTexture; }
}
/// <summary>
/// For mobile platforms it will return the device rotation angle in degrees.
/// </summary>
public override int rotation
{
get { return _webcamTexture.videoRotationAngle; }
}
/// <summary>
/// Returns true if the image is vertically flipped.
/// </summary>
public override bool verticallyMirrored
{
get { return _webcamTexture.videoVerticallyMirrored; }
}
/// <inheritdoc/>
public override bool isReady
{
get { return base.isReady && _webcamTexture.width > 16 && _webcamTexture.height > 16; }
}
/// <inheritdoc/>
public override void Open()
{
Debug.Log("WebCamInput Open");
if (_webcamTexture && !_webcamTexture.isPlaying)
{
_webcamTexture.Play();
facearco = GameObject.Find("ArcoCameraRig").GetComponent<OpenCVForUnityExample.FaceDetectionWebCamTextureArco>();
// facearco.OnWebCamTextureToMatHelperInitialized();
if (_colorBuffer == null || _colorBuffer.Length != _webcamTexture.width * _webcamTexture.height)
_colorBuffer = new Color32[_webcamTexture.width * _webcamTexture.height];
rgbaMat = new Mat(_webcamTexture.height, _webcamTexture.width, CvType.CV_8UC4);
hasInitDone = true;
}
}
/// <inheritdoc/>
public override void Close()
{
if (_webcamTexture)
{
_webcamTexture.Stop();
_webcamTexture = null;
}
}
/// <inheritdoc/>
public override void GrabFrame()
{
if (_webcamTexture && _webcamTexture.didUpdateThisFrame)
{
ResizeBufferIfNeeded();
ImageFlipMode flipMode = ImageFlipMode.Both;
ImageUtils.WebCamTextureToData(_webcamTexture, _pixelBuffer, flipMode, _colorBuffer);
Apply(_pixelBuffer, _webcamTexture.width, _webcamTexture.height, 4);
}
}
/// <summary>
///
/// </summary>
/// <param name="deviceIndex"></param>
private void InitCamera(int deviceIndex)
{
if (WebCamTexture.devices.Length > 0)
{
if (_webcamTexture == null)
{
_cameraDevice = WebCamTexture.devices[deviceIndex];
_webcamTexture = new WebCamTexture(_cameraDevice.name, _requestedWidth, _requestedHeight, _requestedFps);
_webcamTexture.mipMapBias = -0.5f;
_webcamTexture.anisoLevel = 1;
}
}
}
/// <summary>
///
/// </summary>
protected void ResizeBufferIfNeeded()
{
int w = _webcamTexture.width;
int h = _webcamTexture.height;
int pixelsLength = w * h;
if (_pixelBuffer == null || _colorBuffer == null || _colorBuffer.Length != pixelsLength)
{
_colorBuffer = new Color32[w * h];
_pixelBuffer = new byte[w * h * 4];
}
}
/// <summary>
/// Gets the mat of the current frame.
/// </summary>
/// <returns>The mat.</returns>
public virtual Mat GetMat()
{
//Debug.Log("getmat "+ hasInitDone+" "+ _webcamTexture.isPlaying);
if (!hasInitDone || !_webcamTexture.isPlaying)
{
if (rotatedRgbaMat != null)
{
return rotatedRgbaMat;
}
else
{
return rgbaMat;
}
}
Utils.webCamTextureToMat(_webcamTexture, rgbaMat, _colorBuffer);
if (rotatedRgbaMat != null)
{
using (Mat transposeRgbaMat = rgbaMat.t())
{
Core.flip(transposeRgbaMat, rotatedRgbaMat, 1);
}
FlipMat(rotatedRgbaMat);
return rotatedRgbaMat;
}
else
{
FlipMat(rgbaMat);
return rgbaMat;
}
}
/// <summary>
/// Flips the mat.
/// </summary>
/// <param name="mat">Mat.</param>
protected virtual void FlipMat(Mat mat)
{
int flipCode = int.MinValue;
if (_cameraDevice.isFrontFacing)
{
if (_webcamTexture.videoRotationAngle == 0)
{
flipCode = 1;
}
else if (_webcamTexture.videoRotationAngle == 90)
{
flipCode = 1;
}
if (_webcamTexture.videoRotationAngle == 180)
{
flipCode = 0;
}
else if (_webcamTexture.videoRotationAngle == 270)
{
flipCode = 0;
}
}
else
{
if (_webcamTexture.videoRotationAngle == 180)
{
flipCode = -1;
}
else if (_webcamTexture.videoRotationAngle == 270)
{
flipCode = -1;
}
}
if (flipVertical)
{
if (flipCode == int.MinValue)
{
flipCode = 0;
}
else if (flipCode == 0)
{
flipCode = int.MinValue;
}
else if (flipCode == 1)
{
flipCode = -1;
}
else if (flipCode == -1)
{
flipCode = 1;
}
}
if (flipHorizontal)
{
if (flipCode == int.MinValue)
{
flipCode = 1;
}
else if (flipCode == 0)
{
flipCode = -1;
}
else if (flipCode == 1)
{
flipCode = int.MinValue;
}
else if (flipCode == -1)
{
flipCode = 0;
}
}
if (flipCode > int.MinValue)
{
Core.flip(mat, mat, flipCode);
}
}
public virtual Color32[] GetBufferColors()
{
return _colorBuffer;
}
/// <summary>
/// Indicates whether the webcam texture is currently playing.
/// </summary>
/// <returns><c>true</c>, if the webcam texture is playing, <c>false</c> otherwise.</returns>
public virtual bool IsPlaying()
{
if (!hasInitDone)
return false;
return _webcamTexture.isPlaying;
}
/// <summary>
/// To release the resources for the initialized method.
/// </summary>
protected virtual void _Dispose()
{
isInitWaiting = false;
hasInitDone = false;
if (_webcamTexture != null)
{
_webcamTexture.Stop();
_webcamTexture = null;
}
if (rgbaMat != null)
{
rgbaMat.Dispose();
rgbaMat = null;
}
if (rotatedRgbaMat != null)
{
rotatedRgbaMat.Dispose();
rotatedRgbaMat = null;
}
/* if (onDisposed != null)
onDisposed.Invoke();*/
}
/// <summary>
/// Releases all resource used by the <see cref="WebCamTextureToMatHelper"/> object.
/// </summary>
/// <remarks>Call <see cref="Dispose"/> when you are finished using the <see cref="WebCamTextureToMatHelper"/>. The
/// <see cref="Dispose"/> method leaves the <see cref="WebCamTextureToMatHelper"/> in an unusable state. After
/// calling <see cref="Dispose"/>, you must release all references to the <see cref="WebCamTextureToMatHelper"/> so
/// the garbage collector can reclaim the memory that the <see cref="WebCamTextureToMatHelper"/> was occupying.</remarks>
public virtual void Dispose()
{
if (hasInitDone)
_Dispose();
if (_colorBuffer != null)
_colorBuffer = null;
}
/// <summary>
/// Indicates whether the video buffer of the frame has been updated.
/// </summary>
/// <returns><c>true</c>, if the video buffer has been updated <c>false</c> otherwise.</returns>
public virtual bool DidUpdateThisFrame()
{
if (!hasInitDone)
return false;
#if UNITY_IOS && !UNITY_EDITOR && (UNITY_4_6_3 || UNITY_4_6_4 || UNITY_5_0_0 || UNITY_5_0_1)
if (_webcamTexture.width > 16 && _webcamTexture.height > 16) {
return true;
} else {
return false;
}
#else
return _webcamTexture.didUpdateThisFrame;
#endif
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment