Skip to content

Instantly share code, notes, and snippets.

@micuat
Last active March 18, 2022 06:30
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save micuat/6194e281e64798611a6ba0c8ed6c8b47 to your computer and use it in GitHub Desktop.
Save micuat/6194e281e64798611a6ba0c8ed6c8b47 to your computer and use it in GitHub Desktop.
// Detects interest points between two images and matches them together.
// These matches are shown by drawing a line between the matching features in each images.
import boofcv.processing.*;
import boofcv.struct.image.*;
import boofcv.struct.feature.*;
import georegression.struct.point.*;
import java.util.*;
import boofcv.factory.feature.associate.ConfigAssociateGreedy;
import toxi.geom.*;
import toxi.geom.mesh2d.*;
import toxi.util.*;
import toxi.util.datatypes.*;
import toxi.processing.*;
// ranges for x/y positions of points
FloatRange xpos, ypos;
// helper class for rendering
ToxiclibsSupport gfx;
// empty voronoi mesh container
Voronoi voronoi = new Voronoi();
float[][][] meta = new float[480][640][4];
PImage input0;
PImage input1;
// feature locations
List<Point2D_F64> locations0,locations1;
// which features are matched together
List<AssociatedIndex> matches;
int border = 20;
void setup() {
size(960, 1280, P3D);
input0 = loadImage("IMG_9503.JPG");
input1 = loadImage("IMG_9502.JPG");
//input0 = loadImage("cave_01.jpg");
//input1 = loadImage("cave_02.jpg");
// BoofCV supports several feature descriptors and detectors, but SIFT and SURF are the
// easy to configure and well known so they are provided in the 'simple' interface. The others
// can still be used but will take a bit more effort and understanding.
SimpleDetectDescribePoint ddp = Boof.detectSurf(true, ImageDataType.F32);
//SimpleDetectDescribePoint ddp = Boof.detectSift(ImageDataType.F32);
// Only greedy association is provided in the simple interface. Other options are available
// in BoofCV, such as random forest.
ConfigAssociateGreedy configAssociate = new ConfigAssociateGreedy();
configAssociate.forwardsBackwards = true;
configAssociate.scoreRatioThreshold = 0.75;
SimpleAssociateDescription assoc = Boof.associateGreedy(configAssociate, ddp);
// Find the features
ddp.process(input0);
locations0 = ddp.getLocations();
List<TupleDesc> descs0 = ddp.getDescriptions();
ddp.process(input1);
locations1 = ddp.getLocations();
List<TupleDesc> descs1 = ddp.getDescriptions();
// associate them
assoc.associate(descs0,descs1);
matches = assoc.getMatches();
//surface.setSize(input0.width*2, input0.height*2);
xpos=new BiasedFloatRange(0, width, width/2, 0.333f);
// focus y positions around bottom (w/ 50% standard deviation)
ypos=new BiasedFloatRange(0, height, height, 0.5f);
}
void draw() {
clear();
scale(2);
int w = input0.width+border;
float X = (float)mouseX / width;
X = sin(millis()*0.001*3.14)*0.5+0.5;
tint(255,255);
image(input0, 0, 0);
tint(255,X*255);
image(input1, 0, 0);
tint(255,255);
strokeWeight(2);
voronoi = new Voronoi();
voronoi.addPoint(new Vec2D(0,0));
voronoi.addPoint(new Vec2D(480-1,0));
voronoi.addPoint(new Vec2D(0,640-1));
voronoi.addPoint(new Vec2D(480-1,640-1));
meta[0][0][0]=0;
meta[0][0][1]=0;
meta[0][0][2]=0;
meta[0][0][3]=0;
meta[480-1][0][0]=480-1;
meta[480-1][0][1]=0;
meta[480-1][0][2]=480-1;
meta[480-1][0][3]=0;
meta[0][640-1][0]=0;
meta[0][640-1][1]=640-1;
meta[0][640-1][2]=0;
meta[0][640-1][3]=640-1;
meta[480-1][640-1][0]=480-1;
meta[480-1][640-1][1]=640-1;
meta[480-1][640-1][2]=480-1;
meta[480-1][640-1][3]=640-1;
int count = 0;
for( AssociatedIndex i : matches ) {
// only display every 7th feature to make it less cluttered
if( count++ % 7 != 0 )
continue;
// draw a line showing the matching features in each image
Point2D_F64 p0 = locations0.get(i.src);
Point2D_F64 p1 = locations1.get(i.dst);
if(dist((float)p0.x, (float)p0.y, (float)(p1.x), (float)p1.y) > 200) continue;
if(dist(0, (float)p0.y, 0, (float)p1.y) > 40) continue;
float x = lerp((float)p0.x, (float)p1.x, X);
float y = lerp((float)p0.y, (float)p1.y, X);
voronoi.addPoint(new Vec2D(x, y));
meta[(int)x][(int)y][0] = (float)p0.x;
meta[(int)x][(int)y][1] = (float)p0.y;
meta[(int)x][(int)y][2] = (float)p1.x;
meta[(int)x][(int)y][3] = (float)p1.y;
}
stroke(0);
noFill();
for(int i = 0; i < 2; i++) {
if (i == 0) {
tint(255,255);
}
else {
tint(255,X*255);
}
noStroke();
beginShape(TRIANGLES);
if (i == 0) {
texture(input0);
}
else {
texture(input1);
}
for (Triangle2D t : voronoi.getTriangles()) {
if (!inRange(t.a.x, 0, 480)||!inRange(t.a.y, 0, 640)) continue;
if (t.b.x<0||480<=t.b.x||t.b.y<0||640<=t.b.y) continue;
if (t.c.x<0||480<=t.c.x||t.c.y<0||640<=t.c.y) continue;
vertex(t.a.x, t.a.y, getX(t.a.x, t.a.y, i), getY(t.a.x, t.a.y, i));
vertex(t.b.x, t.b.y, getX(t.b.x, t.b.y, i), getY(t.b.x, t.b.y, i));
vertex(t.c.x, t.c.y, getX(t.c.x, t.c.y, i), getY(t.c.x, t.c.y, i));
}
endShape();
}
}
boolean inRange(float v, float _0, float _1) {
return v >= _0 && _1 > v;
}
float getX(float x, float y, int index) {
return meta[toIndexX(x)][toIndexY(y)][0 + index * 2];
}
float getY(float x, float y, int index) {
return meta[toIndexX(x)][toIndexY(y)][1 + index * 2];
}
int toIndexX(float x) {
x = constrain(x, 0, 480-1);
return (int)x;
}
int toIndexY(float x) {
x = constrain(x, 0, 640-1);
return (int)x;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment