Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save kazukitanaka0611/6fe970e4159b52b9be808761a5f8a033 to your computer and use it in GitHub Desktop.
Save kazukitanaka0611/6fe970e4159b52b9be808761a5f8a033 to your computer and use it in GitHub Desktop.
OpenCV For Unity HoughCircles
#if !(PLATFORM_LUMIN && !UNITY_EDITOR)
using OpenCVForUnity.CoreModule;
using OpenCVForUnity.ImgprocModule;
using OpenCVForUnity.UnityUtils;
using OpenCVForUnity.UnityUtils.Helper;
using OpenCVForUnityExample;
using UnityEngine;
[RequireComponent(typeof(WebCamTextureToMatHelper), typeof(FpsMonitor))]
public class MyOpenCVSample : MonoBehaviour
{
[Range(0, 200)]
public double dp = 0;
[Range(0, 200)]
public double minDist = 0;
[Range(0, 200)]
public double param1 = 0;
[Range(0, 200)]
public double param2 = 0;
private Mat dstMat = new Mat();
private Texture2D texture;
private WebCamTextureToMatHelper webCamTextureToMatHelper;
private FpsMonitor fpsMonitor;
private static readonly Scalar LOWER = new Scalar(97, 50, 50);
private static readonly Scalar UPPER = new Scalar(117, 255, 255);
// Use this for initialization
private void Start()
{
fpsMonitor = GetComponent<FpsMonitor>();
webCamTextureToMatHelper = gameObject.GetComponent<WebCamTextureToMatHelper>();
#if UNITY_ANDROID && !UNITY_EDITOR
// Avoids the front camera low light issue that occurs in only some Android devices (e.g. Google Pixel, Pixel2).
webCamTextureToMatHelper.avoidAndroidFrontCameraLowLightIssue = true;
#endif
webCamTextureToMatHelper.Initialize();
}
/// <summary>
/// Raises the web cam texture to mat helper initialized event.
/// </summary>
public void OnWebCamTextureToMatHelperInitialized()
{
Debug.Log("OnWebCamTextureToMatHelperInitialized");
Mat webCamTextureMat = webCamTextureToMatHelper.GetMat();
texture = new Texture2D(webCamTextureMat.cols(), webCamTextureMat.rows(), TextureFormat.RGBA32, false);
gameObject.GetComponent<Renderer>().material.mainTexture = texture;
gameObject.transform.localScale = new Vector3(webCamTextureMat.cols(), webCamTextureMat.rows(), 1);
Debug.Log("Screen.width " + Screen.width + " Screen.height " + Screen.height + " Screen.orientation " + Screen.orientation);
if (fpsMonitor != null)
{
fpsMonitor.Add("width", webCamTextureMat.width().ToString());
fpsMonitor.Add("height", webCamTextureMat.height().ToString());
fpsMonitor.Add("orientation", Screen.orientation.ToString());
}
float width = webCamTextureMat.width();
float height = webCamTextureMat.height();
float widthScale = Screen.width / width;
float heightScale = Screen.height / height;
if (widthScale < heightScale)
{
Camera.main.orthographicSize = (width * Screen.height / Screen.width) / 2;
}
else
{
Camera.main.orthographicSize = height / 2;
}
}
/// <summary>
/// Raises the web cam texture to mat helper disposed event.
/// </summary>
public void OnWebCamTextureToMatHelperDisposed()
{
Debug.Log("OnWebCamTextureToMatHelperDisposed");
dstMat.Dispose();
if (texture != null)
{
Texture2D.Destroy(texture);
texture = null;
}
}
/// <summary>
/// Raises the web cam texture to mat helper error occurred event.
/// </summary>
/// <param name="errorCode">Error code.</param>
public void OnWebCamTextureToMatHelperErrorOccurred(WebCamTextureToMatHelper.ErrorCode errorCode)
{
Debug.Log("OnWebCamTextureToMatHelperErrorOccurred " + errorCode);
}
// Update is called once per frame
private void Update()
{
if (webCamTextureToMatHelper.IsPlaying() && webCamTextureToMatHelper.DidUpdateThisFrame())
{
Mat rgbaMat = webCamTextureToMatHelper.GetMat();
Filter(rgbaMat);
Utils.fastMatToTexture2D(rgbaMat, texture);
}
}
/// <summary>
/// Raises the destroy event.
/// </summary>
private void OnDestroy()
{
webCamTextureToMatHelper.Dispose();
}
private void Filter(Mat rgbaMat)
{
// グレースケール
Imgproc.cvtColor(rgbaMat, rgbaMat, Imgproc.COLOR_RGBA2GRAY);
// 二値化
Imgproc.threshold(rgbaMat, rgbaMat, 0, 255.0, Imgproc.THRESH_BINARY | Imgproc.THRESH_OTSU);
// ノイズ処理
Mat kernel = Mat.ones(3, 3, CvType.CV_8UC1);
Point anchor = new Point(-1, -1);
Imgproc.morphologyEx(rgbaMat, rgbaMat, Imgproc.MORPH_CLOSE, kernel, anchor, 1);
Imgproc.morphologyEx(rgbaMat, rgbaMat, Imgproc.MORPH_OPEN, kernel, anchor, 1);
// ハフ変換で円検出
Mat circles = new Mat();
Imgproc.HoughCircles(rgbaMat, circles, Imgproc.CV_HOUGH_GRADIENT, dp, minDist, param1, param2);
Point pt = new Point();
for (int i = 0; i < circles.cols(); i++)
{
double[] data = circles.get(0, i);
pt.x = data[0];
pt.y = data[1];
double rho = data[2];
Imgproc.cvtColor(rgbaMat, rgbaMat, Imgproc.COLOR_GRAY2RGBA);
Imgproc.circle(rgbaMat, pt, (int)rho, new Scalar(225, 0, 0), 2);
}
// ノイズを除去するためにブラーを利用
Imgproc.cvtColor(rgbaMat, rgbaMat, Imgproc.COLOR_GRAY2RGBA);
Imgproc.medianBlur(rgbaMat, dstMat, Imgproc.COLOR_RGB2GRAY);
//Imgproc.GaussianBlur(rgbaMat, dstMat, new Size(3, 3), 0);
Imgproc.cvtColor(dstMat, rgbaMat, Imgproc.COLOR_GRAY2RGBA);
}
}
#endif
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment