Skip to content

Instantly share code, notes, and snippets.

@companje
Last active November 26, 2019 23:26
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save companje/8ddd5efab16a3057854dc8fc84bd752b to your computer and use it in GitHub Desktop.
Save companje/8ddd5efab16a3057854dc8fc84bd752b to your computer and use it in GitHub Desktop.
Finger Tracking based on Edge Finding
import processing.video.*;
import gab.opencv.*;
OpenCV opencv;
PImage src, canny, scharr, sobel;
Capture cam;
void setup() {
//src = loadImage("1021.png");
size(640, 480);
opencv = new OpenCV(this, 640, 480);
cam = new Capture(this, "name=HD Pro Webcam C920,size=640x480,fps=30");
cam.start();
}
void draw() {
if (cam.available()) cam.read();
if (cam.width==0) return;
src = cam.get();
opencv.loadImage(src);
opencv.findCannyEdges(24, 44); //lowThreshold, int highThreshold
//println(mouseX, mouseY);
opencv.dilate();
opencv.dilate();
opencv.erode();
ArrayList<Contour> contours = opencv.findContours();
canny = opencv.getSnapshot();
//opencv.loadImage(src);
//opencv.findScharrEdges(OpenCV.HORIZONTAL);
//scharr = opencv.getSnapshot();
//opencv.loadImage(src);
//opencv.findSobelEdges(1, 0);
//sobel = opencv.getSnapshot();
pushMatrix();
//scale(0.5);
image(src, 0, 0);
blendMode(SCREEN);
image(canny, 0, 0);
blendMode(BLEND);
for (Contour c : contours) {
//if (c.area()>100) continue;
noFill();
stroke(0, 255, 0);
c.draw();
PVector v = getCenter(c.getPoints());
fill(255, 0, 0);
noStroke();
ellipse(v.x, v.y, 6, 6);
}
//image(scharr, 0, src.height);
//image(sobel, src.width, src.height);
popMatrix();
//text("Source", 10, 25);
//text("Canny", src.width/2 + 10, 25);
//text("Scharr", 10, src.height/2 + 25);
//text("Sobel", src.width/2 + 10, src.height/2 + 25);
}
PVector getCenter(ArrayList<PVector> vectors) {
PVector center = new PVector();
for (PVector v : vectors) {
center.add(v);
}
center.div(vectors.size());
return center;
}
import signal.library.*;
class Tap extends PVector {
int age;
int clr;
Tap(PVector p) {
set(p);
clr = color(255);
}
Tap(PVector p, color c) {
set(p);
clr = c;
}
}
class Tracker {
KalmanFilter kfX, kfY;
ArrayList<PVector> trail = new ArrayList();
ArrayList<PVector> smoothtrail = new ArrayList();
ArrayList<Tap> debugPoints = new ArrayList();
float q = 1.0; // process variance
float r = 100.0; // estimate of measurement variance, change to see effect was 40
int releaseTimer = 0;
//boolean moveGesture;
PVector pos = new PVector(); //cur and prev pos in screen coordinates
//PVector prev = new PVector();
PVector center = new PVector(320, 240);
float maxFingerDist = 218; //218; //fingers further away than maxFingerDist to center are ignored
int minContourLength = 20;
String txt = "";
int lastTouchInput;
boolean isMoveGesture = false;
void setup() {
kfX = new KalmanFilter(q, r);
kfY = new KalmanFilter(q, r);
}
//PVector getClosestPointOnAllContours(PVector to, int minContourLength) {
// if (vision.fingers.getNumContours()==0) return null;
// float closestDist = 9999;
// PVector closestPoint = null;
// for (int k = 0; k < vision.fingers.getNumContours(); k++) {
// if (vision.fingers.contours.getContourLength(k) < minContourLength) continue;
// PVector pp[] = vision.fingers.contours.getContourPoints(k);
// for (int l = 0; l < vision.fingers.contours.getContourLength(k); l++) {
// PVector p = pp[l];
// if (closestPoint==null || p.dist(to) < closestPoint.dist(to)) {
// closestPoint = p;
// closestDist = p.dist(to);
// }
// }
// }
// return closestPoint;
//}
PVector getFingerClosestTo(ArrayList<Contour> contours, PVector to) {
float closestDist = 9999;
PVector closestPoint = null;
for (Contour c : contours) {
if (c.area()>100) continue;
PVector p = getCenter(c.getPoints());
if (closestPoint==null || p.dist(to) < closestPoint.dist(to)) {
closestPoint = p;
closestDist = p.dist(to);
}
}
return closestPoint;
}
//void updatePos() {
// if (smoothtrail.size()==0) return;
// pos = camToScreen(smoothtrail.get(smoothtrail.size()-1));
// prev = smoothtrail.size()==1 ? pos.copy() : camToScreen(smoothtrail.get(smoothtrail.size()-2));
//}
//void updatePos(PVector prev, PVector cur) {
// cur = camToScreen(cur);
// prev = camToScreen(prev);
//}
void update(ArrayList<Contour> contours) {
//maxFingerDist = mouseX;
//println(mouseX);
for (Tap t : debugPoints) {
t.age++;
}
for (int i=0; i<debugPoints.size(); i++) {
if (debugPoints.get(i).age>100) debugPoints.remove(i);
}
//PVector p = getClosestPointOnAllContours(center, minContourLength); //DIT HEEFT POTENTIE.. KOMT MISSCHIEN NOG VAN PAS
//PVector finger = getFingerClosestTo(center);
PVector p = getFingerClosestTo(contours, center);
if (p!=null && p.dist(center)>maxFingerDist) {
p = null;
}
if (p!=null) { //geometric correction
//p.sub(center);
//float a = atan2(p.y,p.x);
//txt = nfr(a,2);
//println(a);
//p.add(center);
//not finished... should this be here or in the application? since there is also a 3D mapping done..
}
//NB. Tracker.prev and Tracker.pos are in screen coordinates!!!!!
if (p!=null && smoothtrail.size()>6) {
PVector first = smoothtrail.get(0); //this one is in Camera coords
PVector prev = smoothtrail.get(smoothtrail.size()-1); //this one is in Camera coords
if (first.dist(prev)>40) isMoveGesture = true;
//println(p, prev, p.dist(prev));
if (!isMoveGesture && p.dist(prev)>100) { //this is needed to prevent glitches. but it also prevents good dragging.
p = null;
}
}
if (p!=null && p.dist(center)>maxFingerDist) {
p = null; // ignore input too far away from center. Probably belly noise
}
if (p!=null) {
lastTouchInput = millis();
}
if (p!=null) {
trail.add(p.copy()); //add unfiltered point to trail
if (smoothtrail.size()==0) {
kfX.xhat = p.x;
kfY.xhat = p.y;
smoothtrail.add(p);
pos.set(p);
println("touchDown",p);
touchDown();
} else {
p.x = kfX.predict_and_correct(p.x);
p.y = kfY.predict_and_correct(p.y);
smoothtrail.add(p);
pos.set(p);
touchMove();
}
}
if (p!=null) {
releaseTimer = 0; //reset the timer when a finger has been found
}
if (p==null && smoothtrail.size()>0 && ++releaseTimer>10) { //after 10 frames of no finger detected smoothtrail.size()>6 &&
println("touchUp size", smoothtrail.size(), smoothtrail.get(0));
touchUp();
if (smoothtrail.size()>3) {
float d = smoothtrail.get(0).dist(smoothtrail.get(smoothtrail.size()-1));
println("tap",d);
if (d<5) touchTap();
}
isMoveGesture = false;
trail.clear();
smoothtrail.clear();
releaseTimer = 0;
kfX = new KalmanFilter(q, r);
kfY = new KalmanFilter(q, r);
}
}
//float camToScreen(float f) {
// return f * 1200./480.;
//}
//PVector camToScreen(PVector p) {
// return p.copy().mult(1200./480.);
//}
//PVector getPointOnContourClosestTo() {
//}
//void draw() {
// if (!camera.enabled) return;
// hint(DISABLE_OPTIMIZED_STROKE); //needed to make strokes visible in P3D
// hint(DISABLE_DEPTH_TEST);
// pushMatrix();
// scale(1200./camera.camH);
// translate(-vision.cvW/2, -vision.cvH/2);
// blendMode(SCREEN);
// image(vision.imgResult, 0, 0);
// blendMode(BLEND);
// //draw RED fingers
// noStroke();
// fill(255, 0, 0);
// for (int i = 0; i < vision.fingers.getNumFingers(); i++) {
// PVector pos = vision.fingers.getFinger(i);
// //if (pos.dist(center)<maxFingerDist) {
// //println(pos.dist(center));
// ellipse(pos.x, pos.y, 4, 4);
// //}
// }
// //draw GREEN finger closest to center
// //noStroke();
// //fill(0, 255, 0);
// //PVector q = getFingerClosestTo(center);
// //if (q!=null) {
// // ellipse(q.x, q.y, 10, 10);
// //}
// //draw BLUE dot is last item of smoothtrail
// if (smoothtrail.size()>0) {
// PVector q = smoothtrail.get(smoothtrail.size()-1);
// fill(0, 100, 255);
// noStroke();
// ellipse(q.x, q.y, 10, 10);
// //fill(255);
// //scale(.5,-1);
// //text(txt,0,0);
// }
// //draw YELLOW point on contour closest to center
// //stroke(255, 255, 0);
// //for (int k = 0; k < vision.fingers.getNumContours(); k++) {
// // vision.fingers.drawContour(k);
// //}
// //PVector pp = getClosestPointOnAllContours(center, minContourLength);
// //if (pp!=null) {
// // noStroke();
// // fill(255, 255, 0);
// // ellipse(pp.x, pp.y, 10, 10);
// //}
// //trail
// stroke(128);
// noFill();
// beginShape();
// for (PVector p : trail) {
// vertex(p.x, p.y);
// }
// endShape();
// //smoothtrail
// stroke(255);
// strokeWeight(1);
// noFill();
// beginShape();
// for (PVector p : smoothtrail) {
// vertex(p.x, p.y);
// }
// endShape();
// strokeWeight(1);
// //taps
// //for (Tap f : debugPoints) {
// // fill(f.clr);
// // noStroke();
// // ellipse(f.x, f.y, 5, 5);
// //}
// popMatrix();
//}
}
import processing.video.*;
import gab.opencv.*;
import netP5.*;
import oscP5.*;
OpenCV opencv;
PImage src, canny, scharr, sobel;
Capture cam;
PImage imgCalib;
Tracker tracker = new Tracker();
OscP5 osc;
NetAddress oscTarget;
void setup() {
//src = loadImage("1021.png");
size(640, 480);
opencv = new OpenCV(this, 640, 480);
cam = new Capture(this, "name=HD Pro Webcam C920,size=640x480,fps=30");
cam.start();
imgCalib = loadImage("camera-calibration.png");
tracker.setup();
osc = new OscP5(this, 12001); //receive
oscTarget = new NetAddress("127.0.0.1", 12000); //send
}
void draw() {
if (cam.available()) cam.read();
if (cam.width==0) return;
if (src==null) src = cam.get();
else rotate180(cam.get(), src);
//TODO: make this masking more efficient...
src.loadPixels();
imgCalib.loadPixels();
for (int i=0; i<cam.width*height; i++) {
if (brightness(imgCalib.pixels[i])==0) src.pixels[i] = 0;
}
src.updatePixels();
opencv.loadImage(src);
opencv.findCannyEdges(15, 50); //lowThreshold, int highThreshold
//println(mouseX, mouseY);
opencv.dilate();
opencv.dilate();
opencv.erode();
ArrayList<Contour> contours = opencv.findContours();
canny = opencv.getSnapshot();
pushMatrix();
//scale(0.5);
image(src, 0, 0);
//blendMode(SCREEN);
////image(canny, 0, 0);
//blendMode(BLEND);
//println(contours.size());
tracker.update(contours);
for (Contour c : contours) {
if (c.area()>100) continue;
noFill();
stroke(0, 255, 0);
c.draw();
PVector v = getCenter(c.getPoints());
fill(255, 0, 0);
noStroke();
ellipse(v.x, v.y, 6, 6);
}
popMatrix();
}
PVector getCenter(ArrayList<PVector> vectors) {
PVector center = new PVector();
for (PVector v : vectors) {
center.add(v);
}
center.div(vectors.size());
return center;
}
PVector get3D(int x, int y) {
color c = getColor(x,y);
PVector p = new PVector();
p.x = red(c);
p.y = green(c);
p.z = blue(c);
p.div(255);
p.sub(.5, .5, .5);
p.mult(1200);
return p;
}
color getColor(int x, int y) {
return imgCalib.get((int)x, (int)y);
}
void rotate180(PImage src, PImage dst) {
src.loadPixels();
for (int wh=src.width*src.height, i=0; i<wh; i++) {
dst.pixels[i] = src.pixels[wh-i-1];
}
dst.updatePixels();
}
void touchDown() {
sendOsc("touchDown");
}
void touchMove() {
sendOsc("touchMove");
}
void touchUp() {
sendOsc("touchUp");
}
void touchTap() {
sendOsc("touchTap");
}
void sendOsc(String name) {
PVector pos3D = get3D((int)tracker.pos.x, (int)tracker.pos.y);
OscMessage msg = new OscMessage(name);
msg.add(pos3D.x);
msg.add(pos3D.y);
msg.add(pos3D.z);
osc.send(msg, oscTarget);
}
@companje
Copy link
Author

finger

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment