Skip to content

Instantly share code, notes, and snippets.

Embed
What would you like to do?
opencv/samples/cpp/squares.cpp to unity @iphone5 Using NativePlugin
using UnityEngine;
using System.Collections;
using System.Runtime.InteropServices;
public class imageProcessing : MonoBehaviour {
[DllImport ("__Internal")]
private static extern void UpdateTexture(System.IntPtr colors, int width, int height);
WebCamTexture webcamTexture;
Texture2D texture = null;
// Use this for initialization
void Start () {
WebCamDevice[] devices = WebCamTexture.devices;
if (devices.Length > 0) {
webcamTexture = new WebCamTexture(devices[0].name ,512, 384, 30);
webcamTexture.Play();
}
Init();
}
// Update is called once per frame
void OnGUI() {
Color32[] pixels = webcamTexture.GetPixels32();
GCHandle pixelsHandle = GCHandle.Alloc(pixels, GCHandleType.Pinned);
UpdateTexture(pixelsHandle.AddrOfPinnedObject(), webcamTexture.width, webcamTexture.height);
if (texture) Destroy(texture);
texture = new Texture2D(webcamTexture.width, webcamTexture.height);
texture.SetPixels32(pixels);
texture.Apply();
pixelsHandle.Free();
renderer.material.mainTexture = texture;
}
}
#ifdef __cplusplus
#import <opencv2/opencv.hpp>
#endif
#include <sys/stat.h>
#include <iostream>
#include <math.h>
#include <string.h>
CvHaarClassifierCascade *cascade = 0;
CvMemStorage *storage = 0;
using namespace cv;
using namespace std;
int thresh = 30, N = 11;
extern "C" {
void Init();
void UpdateTexture(char* data, int width, int height);
double angle( cv::Point pt1, cv::Point pt2, cv::Point pt0 );
void findSquares( const IplImage& image, vector<vector<cv::Point> >& squares );
void drawSquares( IplImage& image, const vector<vector<cv::Point> >& squares );
}
// returns sequence of squares detected on the image.
// the sequence is stored in the specified memory storage
void findSquares( const IplImage& image, vector<vector<cv::Point> >& squares )
{
squares.clear();
Mat pyr, timg, gray0(cvSize(image.width, image.height), CV_8U), gray;
// karlphillip: dilate the image so this technique can detect the white square,
Mat out(&image);
dilate(out, out, Mat(), cv::Point(-1,-1));
// then blur it so that the ocean/sea become one big segment to avoid detecting them as 2 big squares.
medianBlur(out, out, 7);
// down-scale and upscale the image to filter out the noise
pyrDown(out, pyr, cv::Size(out.cols/2, out.rows/2));
pyrUp(pyr, timg, out.size());
vector<vector<cv::Point> > contours;
// find squares in every color plane of the image
for( int c = 0; c < 3; c++ )
{
int ch[] = {c, 0};
//mixChannels(&timg, 1, &gray0, 1, ch, 1);
gray0=timg;
// try several threshold levels
for( int l = 0; l < N; l++ )
{
// hack: use Canny instead of zero threshold level.
// Canny helps to catch squares with gradient shading
if( l == 0 )
{
// apply Canny. Take the upper threshold from slider
// and set the lower to 0 (which forces edges merging)
Canny(gray0, gray, 0, thresh, 5);
// dilate canny output to remove potential
// holes between edge segments
dilate(gray, gray, Mat(), cv::Point(-1,-1));
}
else
{
// apply threshold if l!=0:
// tgray(x,y) = gray(x,y) < (l+1)*255/N ? 255 : 0
gray = gray0 >= (l+1)*255/N;
}
// find contours and store them all as a list
findContours(gray, contours, CV_RETR_LIST, CV_CHAIN_APPROX_SIMPLE);
vector<cv::Point> approx;
// test each contour
for( size_t i = 0; i < contours.size(); i++ )
{
// approximate contour with accuracy proportional
// to the contour perimeter
approxPolyDP(Mat(contours[i]), approx, arcLength(Mat(contours[i]), true)*0.02, true);
// square contours should have 4 vertices after approximation
// relatively large area (to filter out noisy contours)
// and be convex.
// Note: absolute value of an area is used because
// area may be positive or negative - in accordance with the
// contour orientation
if( approx.size() == 4 &&
fabs(contourArea(Mat(approx))) > 1000 &&
isContourConvex(Mat(approx)) )
{
double maxCosine = 0;
for( int j = 2; j < 5; j++ )
{
// find the maximum cosine of the angle between joint edges
double cosine = fabs(angle(approx[j%4], approx[j-2], approx[j-1]));
maxCosine = MAX(maxCosine, cosine);
}
// if cosines of all angles are small
// (all angles are ~90 degree) then write quandrange
// vertices to resultant sequence
if( maxCosine < 0.3 )
squares.push_back(approx);
}
}
}
}
}
// the function draws all the squares in the image
void drawSquares( IplImage& image, const vector<vector<cv::Point> >& squares )
{
for( size_t i = 0; i < squares.size(); i++ )
{
int n = (int)squares[i].size();
if(n==4){
const cv::Point* p1 = &squares[i][0];
const cv::Point* p2 = &squares[i][1];
const cv::Point* p3 = &squares[i][2];
const cv::Point* p4 = &squares[i][3];
cvLine(&image, cvPoint(p1->x, p1->y), cvPoint(p2->x, p2->y), CV_RGB(255, 0, 0),3,CV_AA,0);
cvLine(&image, cvPoint(p2->x, p2->y), cvPoint(p3->x, p3->y), CV_RGB(0, 255, 0),3,CV_AA,0);
cvLine(&image, cvPoint(p3->x, p3->y), cvPoint(p4->x, p4->y), CV_RGB(0, 0, 255),3,CV_AA,0);
cvLine(&image, cvPoint(p4->x, p4->y), cvPoint(p1->x, p1->y), CV_RGB(255, 255, 255),3,CV_AA,0);
}
}
}
// helper function:
// finds a cosine of angle between vectors
// from pt0->pt1 and from pt0->pt2
double angle( cv::Point pt1, cv::Point pt2, cv::Point pt0 )
{
double dx1 = pt1.x - pt0.x;
double dy1 = pt1.y - pt0.y;
double dx2 = pt2.x - pt0.x;
double dy2 = pt2.y - pt0.y;
return (dx1*dx2 + dy1*dy2)/sqrt((dx1*dx1 + dy1*dy1)*(dx2*dx2 + dy2*dy2) + 1e-10);
}
void UpdateTexture(char* data, int width, int height)
{
IplImage* src_img = cvCreateImageHeader(cvSize(width, height), IPL_DEPTH_8U, 4);
cvSetData(src_img, data, src_img->widthStep);
vector<vector<cv::Point> > squares;
IplImage* src_gray = cvCreateImage (cvGetSize (src_img), IPL_DEPTH_8U, 1);
cvCvtColor (src_img, src_gray, CV_RGBA2GRAY);
cvEqualizeHist (src_gray, src_gray);
findSquares(*src_gray, squares);
drawSquares(*src_img, squares);
cvReleaseImageHeader(&src_img);
cvReleaseImage(&src_gray);
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
You can’t perform that action at this time.