Skip to content

Instantly share code, notes, and snippets.

@beligum
Last active February 5, 2018 10:26
Show Gist options
  • Save beligum/cd21b2657a91369ac78dd716b685a7cb to your computer and use it in GitHub Desktop.
Save beligum/cd21b2657a91369ac78dd716b685a7cb to your computer and use it in GitHub Desktop.
javacpp preset for Allied Visions Vimba API
#!/bin/bash
# This file is meant to be included by the parent cppbuild.sh script
if [[ -z "$PLATFORM" ]]; then
pushd ..
bash cppbuild.sh "$@" vimba
popd
exit
fi
VIMBA_VERSION=2.1.3
# TODO: make the 'Linux' below dynamic
download https://www.alliedvision.com/fileadmin/content/software/software/Vimba/Vimba_v${VIMBA_VERSION}_Linux.tgz vimba-$VIMBA_VERSION.tar.gz
mkdir -p "$PLATFORM$EXTENSION"
cd "$PLATFORM$EXTENSION"
INSTALL_PATH=`pwd`
INCLUDE_PATH="$INSTALL_PATH/include"
LIB_PATH="$INSTALL_PATH/lib"
echo "Decompressing archives..."
tar --totals -xzf ../vimba-$VIMBA_VERSION.tar.gz
case $PLATFORM in
linux-x86*)
cd Vimba_*/VimbaCPP/Build/Make
make -j $MAKEJ
rm -rf $INCLUDE_PATH
mkdir -p "$INCLUDE_PATH/VimbaC"
cp -r ../../../VimbaC/Include "$INCLUDE_PATH/VimbaC/"
mkdir -p "$INCLUDE_PATH/VimbaCPP"
cp -r ../../../VimbaCPP/Include "$INCLUDE_PATH/VimbaCPP/"
rm -rf $LIB_PATH
mkdir -p $LIB_PATH
cp -r dynamic/x86_64bit/* "$LIB_PATH/"
;;
*)
echo "Error: Platform \"$PLATFORM\" is not supported"
;;
esac
package org.bytedeco.javacpp.presets;
import org.bytedeco.javacpp.annotation.Platform;
import org.bytedeco.javacpp.annotation.Properties;
import org.bytedeco.javacpp.tools.Info;
import org.bytedeco.javacpp.tools.InfoMap;
import org.bytedeco.javacpp.tools.InfoMapper;
import static org.bytedeco.javacpp.presets.Vimba.USE_C_API;
/**
* Wrapper for Allied Vision Vimba library (the C++ API).
*
* @author Bram Biesbrouck
*/
@Properties(target = "org.bytedeco.javacpp.Vimba", value = {
@Platform(value = { "linux-x86_64" },
compiler = { USE_C_API ? "" : "cpp11" },
define = { "USER_SHARED_POINTER", "SHARED_PTR_NAMESPACE std" },
include = {
"<VimbaC/Include/VmbCommonTypes.h>",
"<VimbaC/Include/VimbaC.h>",
"<VimbaCPP/Include/VimbaCPPCommon.h>",
//"<VimbaCPP/Include/SharedPointer.h>",
//"<VimbaCPP/Include/SharedPointer_impl.h>",
//"<VimbaCPP/Include/SharedPointerDefines.h>",
"<VimbaCPP/Include/UserSharedPointerDefines.h>",
"<VimbaCPP/Include/Mutex.h>",
"<VimbaCPP/Include/BasicLockable.h>",
// "<VimbaCPP/Include/AncillaryData.h>",
// "<VimbaCPP/Include/ICameraFactory.h>",
// "<VimbaCPP/Include/IFrameObserver.h>",
// "<VimbaCPP/Include/EnumEntry.h>",
// "<VimbaCPP/Include/ICameraListObserver.h>",
// "<VimbaCPP/Include/IInterfaceListObserver.h>",
// "<VimbaCPP/Include/IFeatureObserver.h>",
// "<VimbaCPP/Include/Interface.h>",
//
// //"<VimbaCPP/Include/FileLogger.h>",
// //"<VimbaCPP/Include/LoggerDefines.h>",
// //"<VimbaCPP/Include/UserLoggerDefines.h>",
// "<VimbaCPP/Include/Frame.h>",
// "<VimbaCPP/Include/IRegisterDevice.h>",
// "<VimbaCPP/Include/Camera.h>",
// "<VimbaCPP/Include/Feature.h>",
// "<VimbaCPP/Include/FeatureContainer.h>",
//
// "<VimbaCPP/Include/VimbaSystem.h>",
// //"<VimbaCPP/Include/VimbaCPP.h>",
},
link = { USE_C_API ? "VimbaC" : "VimbaCPP" }
)
})
public class Vimba implements InfoMapper
{
public static final boolean USE_C_API = false;
public void map(InfoMap infoMap)
{
infoMap.put(new Info("IMEXPORT").cppTypes().annotations());
infoMap.put(new Info("IMEXPORTC").cppTypes().annotations());
infoMap.put(new Info("defined (_WIN32)").define(false));
infoMap.put(new Info("__cplusplus").define());
infoMap.put(new Info("__i386__").define(false));
infoMap.put(new Info("Logger").skip());
// infoMap.put(new Info("std::shared_ptr<AVT::VmbAPI::Interface>").annotations("@SharedPtr").pointerTypes("Interface"));
// infoMap.put(new Info("std::shared_ptr<AVT::VmbAPI::Camera>").annotations("@SharedPtr").pointerTypes("Camera"));
// infoMap.put(new Info("std::shared_ptr<AVT::VmbAPI::Feature>").annotations("@SharedPtr").pointerTypes("Feature"));
// infoMap.put(new Info("std::shared_ptr<AVT::VmbAPI::FeatureContainer>").annotations("@SharedPtr").pointerTypes("FeatureContainer"));
// infoMap.put(new Info("std::shared_ptr<AVT::VmbAPI::IFeatureObserver>").annotations("@SharedPtr").pointerTypes("IFeatureObserver"));
// infoMap.put(new Info("std::shared_ptr<AVT::VmbAPI::Frame>").annotations("@SharedPtr").pointerTypes("Frame"));
// infoMap.put(new Info("std::shared_ptr<AVT::VmbAPI::FrameHandler>").annotations("@SharedPtr").pointerTypes("FrameHandler"));
// infoMap.put(new Info("std::shared_ptr<AVT::VmbAPI::IFrameObserver>").annotations("@SharedPtr").pointerTypes("IFrameObserver"));
// infoMap.put(new Info("std::shared_ptr<AVT::VmbAPI::AncillaryData>").annotations("@SharedPtr").pointerTypes("AncillaryData"));
// infoMap.put(new Info("std::shared_ptr<const AVT::VmbAPI::AncillaryData>").annotations("@SharedPtr").pointerTypes("ConstAncillaryData"));
// infoMap.put(new Info("std::shared_ptr<AVT::VmbAPI::ICameraFactory>").annotations("@SharedPtr").pointerTypes("ICameraFactory"));
// infoMap.put(new Info("std::shared_ptr<AVT::VmbAPI::ICameraListObserver>").annotations("@SharedPtr").pointerTypes("ICameraListObserver"));
// infoMap.put(new Info("std::shared_ptr<AVT::VmbAPI::IInterfaceListObserver>").annotations("@SharedPtr").pointerTypes("IInterfaceListObserver"));
// infoMap.put(new Info("std::shared_ptr<AVT::VmbAPI::Mutex>").annotations("@SharedPtr").pointerTypes("Mutex"));
// infoMap.put(new Info("std::shared_ptr<AVT::VmbAPI::BasicLockable>").annotations("@SharedPtr").pointerTypes("BasicLockable"));
}
}
package com.beligum.nevelland.vision.vimba;
import com.beligum.base.utils.Logger;
import com.beligum.nevelland.vision.CameraListener;
import com.beligum.nevelland.vision.CameraThread;
import com.beligum.nevelland.vision.exceptions.CameraException;
import org.bytedeco.javacpp.*;
import java.io.IOException;
import static org.bytedeco.javacpp.Vimba.*;
import static org.bytedeco.javacpp.opencv_core.CV_8UC1;
import static org.bytedeco.javacpp.opencv_core.CV_8UC3;
public class VimbaCameraThread extends Thread implements CameraThread
{
//-----CONSTANTS-----
private static final boolean ASYNC_MODE = true;
//-----VARIABLES-----
private Object shutdownMonitor = new Object();
private Object frameProcessorLock = new Object();
private boolean started;
private CameraListener listener;
private String cameraId;
private Vimba.VmbHandle_t vimbaHandle;
private Vimba.VmbHandle_t cameraHandlePtr;
private CameraException lastError;
private boolean acquiring;
private boolean capturing;
//-----CONSTRUCTORS-----
public VimbaCameraThread(String cameraSerialNumber, CameraListener listener) throws IOException
{
super();
this.vimbaHandle = gVimbaHandle();
this.listener = listener;
this.cameraId = this.findCamera(cameraSerialNumber);
this.started = false;
this.lastError = null;
this.acquiring = false;
this.capturing = false;
}
//-----PUBLIC METHODS-----
@Override
public synchronized void run()
{
int err = -1;
Vimba.VmbFrame_t frame = null;
try {
this.started = true;
if (this.listener != null) {
this.listener.onCameraStarted();
}
// Open camera (note: we need to do this in the same thread as the rest of the IO or things get weird)
this.cameraHandlePtr = new Vimba.VmbHandle_t();
if ((err = Vimba.VmbCameraOpen(this.cameraId, VmbAccessModeFull, cameraHandlePtr)) != VmbErrorSuccess) {
throw new CameraException("Error while opening Vimba camera " + this.cameraId + "", err);
}
// Set the GeV packet size to the highest possible value
// (In this example we do not test whether this cam actually is a GigE cam)
boolean[] bIsCommandDone = new boolean[1];
if (VmbErrorSuccess == Vimba.VmbFeatureCommandRun(cameraHandlePtr, "GVSPAdjustPacketSize")) {
do {
if (VmbErrorSuccess != Vimba.VmbFeatureCommandIsDone(cameraHandlePtr, "GVSPAdjustPacketSize", bIsCommandDone)) {
break;
}
} while (!bIsCommandDone[0]);
}
// Activate continuous auto exposure
//See https://www.alliedvision.com/fileadmin/content/documents/products/cameras/various/features/GigE_Features_Reference.pdf
if ((err = VmbFeatureEnumSet(cameraHandlePtr, "ExposureAuto", "Continuous")) != VmbErrorSuccess) {
throw new CameraException("Error while setting auto exposure", err);
}
// Set pixel format. For the sake of simplicity we only support Mono and RGB in this example.
if ((err = VmbFeatureEnumSet(cameraHandlePtr, "PixelFormat", "RGB8Packed")) != VmbErrorSuccess) {
// Fall back to Mono
err = VmbFeatureEnumSet(cameraHandlePtr, "PixelFormat", "Mono8");
}
if (err != VmbErrorSuccess) {
throw new CameraException("Error while setting the pixel format", err);
}
// Read back pixel format
BytePointer pPixelFormat = new BytePointer();
VmbFeatureEnumGet(cameraHandlePtr, "PixelFormat", pPixelFormat);
Logger.info("Pixel format: " + pPixelFormat.getString());
// Evaluate frame size
// The size of one frame
LongPointer payloadSizePtr = new LongPointer(1);
if ((err = VmbFeatureIntGet(cameraHandlePtr, "PayloadSize", payloadSizePtr)) != VmbErrorSuccess) {
throw new CameraException("Error while getting the payload size", err);
}
long payloadSize = payloadSizePtr.get();
Logger.info("Frame size: " + payloadSize + " bytes");
// Allocate memory for the frame buffer(s)
frame = new Vimba.VmbFrame_t();
frame.buffer(new BytePointer(payloadSize));
frame.bufferSize((int) payloadSize);
// Announce frames to the API that may be queued for frame capturing later.
// Allows some preparation for frames like DMA preparation depending on the
// transport layer. The order in which the frames are announced is not taken into
// consideration by the API.
if ((err = VmbFrameAnnounce(cameraHandlePtr, frame, frame.sizeof())) != VmbErrorSuccess) {
throw new CameraException("Error while announcing frame", err);
}
// Prepare the API for incoming frames.
if ((err = VmbCaptureStart(cameraHandlePtr)) != VmbErrorSuccess) {
throw new CameraException("Error while starting capture engine", err);
}
this.capturing = true;
// Queue frames that may be filled during frame capturing.
// The given frame is put into a queue that will be filled sequentially. The order in
// which the frames are filled is determined by the order in which they are
// queued. If the frame was announced with VmbFrameAnnounce() before, the
// application has to ensure that the frame is also revoked by calling
// VmbFrameRevoke() or VmbFrameRevokeAll() when cleaning up.
VmbFrameCallback callback = ASYNC_MODE ? new FrameCallback(cameraHandlePtr, this.listener) : null;
if ((err = VmbCaptureFrameQueue(cameraHandlePtr, frame, callback)) != VmbErrorSuccess) {
throw new CameraException("Error while queueing frame", err);
}
// Start acquiring images.
// Note: Triggering via the AcquisitionStart command (see chapter Image Acquisition) is supported by all
// cameras. However, it is less precise than triggering with an external device connected to the camera’s
// I/O port.
if ((err = VmbFeatureCommandRun(cameraHandlePtr, "AcquisitionStart")) != VmbErrorSuccess) {
throw new CameraException("Error while starting acquisition", err);
}
this.acquiring = true;
while (this.started) {
synchronized (this.shutdownMonitor) {
try {
this.shutdownMonitor.wait();
}
catch (InterruptedException e) {
}
}
}
//This is legacy code; we switched to async mode by default
// if (ASYNC_MODE) {
// Thread.sleep(10000);
// }
// else {
//
// // Wait synchronized for a queued frame to be filled (or dequeued).
// if ((err = VmbCaptureFrameWait(cameraHandlePtr, frame, 2000)) != VmbErrorSuccess) {
// throw new VimbaException("Error while capturing frame", err);
// }
//
// // Convert the captured frame to a bitmap and save to disk
// if (frame.receiveStatus() == VmbFrameStatusComplete) {
//
// Logger.info("Frame image size: " + frame.imageSize());
// Logger.info("Frame image width: " + frame.width());
// Logger.info("Frame image height: " + frame.height());
//
// // We only support Mono and RGB in this example
// int cvColor = -1;
// if (pPixelFormat.equals("RGB8Packed")) {
// Logger.info("Frame color: ColorCodeRGB24");
// cvColor = CV_8UC3;
// }
// else {
// Logger.info("Frame color: ColorCodeMono8");
// cvColor = CV_8UC1;
// }
//
// opencv_core.Mat frameMat = new opencv_core.Mat(frame.height(), frame.width(), cvColor, frame.buffer(), AUTO_STEP).clone();
//
// frameMat.deallocate();
// }
// else {
// Logger.error(String.format("Frame not successfully received. Error code: %d\n", frame.receiveStatus()));
// }
// }
}
catch (CameraException e) {
this.lastError = e;
}
finally {
if (this.lastError != null && this.listener != null) {
this.listener.onCameraError(this.lastError);
}
if (this.acquiring) {
// Stop acquiring images.
if ((err = VmbFeatureCommandRun(cameraHandlePtr, "AcquisitionStop")) != VmbErrorSuccess) {
//let's try to continue shutting down, not
Logger.error("Error while stopping acquisition" + "; " + CameraException.vimbaDecodeError(err));
}
this.acquiring = false;
}
if (this.capturing) {
// Stop the API from being able to receive frames.
// Consequences of VmbCaptureEnd(): - The frame callback will not be called anymore
if ((err = VmbCaptureEnd(cameraHandlePtr)) != VmbErrorSuccess) {
Logger.error("Error while ending capture" + "; " + CameraException.vimbaDecodeError(err));
}
this.capturing = false;
}
// Flush the capture queue.
// Control of all the currently queued frames will be returned to the user, leaving
// no frames in the capture queue. After this call, no frame notification will occur
// until frames are queued again.
if ((err = VmbCaptureQueueFlush(cameraHandlePtr)) != VmbErrorSuccess) {
Logger.error("Error while flushing queue" + "; " + CameraException.vimbaDecodeError(err));
}
// Ensure that revoking is not interrupted by a dangling frame callback
synchronized (this.frameProcessorLock) {
// Revoke all frames assigned to a certain camera.
// The referenced frame (or all) is removed from the pool of frames for capturing images.
if ((err = VmbFrameRevokeAll(cameraHandlePtr)) != VmbErrorSuccess) {
Logger.error("Error while revoking frame" + "; " + CameraException.vimbaDecodeError(err));
}
}
// Close the specified camera.
// Depending on the access mode this camera was opened with, events are killed,
// callbacks are unregistered, and camera control is released.
if ((err = Vimba.VmbCameraClose(cameraHandlePtr)) != VmbErrorSuccess) {
Logger.error("Error while closing Vimba camera " + this.cameraId + "; " + CameraException.vimbaDecodeError(err));
}
if (frame != null) {
frame.deallocate();
frame = null;
}
Vimba.VmbShutdown();
if (this.listener != null) {
this.listener.onCameraStopped();
}
Logger.info("All cleaned up");
}
}
@Override
public void shutdownAndWait()
{
Logger.info("Capture thread shutdown requested");
synchronized (this.shutdownMonitor) {
this.started = false;
this.shutdownMonitor.notifyAll();
}
try {
Logger.info("Waiting for capture thread to end...");
this.join();
}
catch (InterruptedException e) {
}
finally {
Logger.info("Capture thread ended");
}
}
//-----PROTECTED METHODS-----
//-----PRIVATE METHODS-----
private String findCamera(String cameraSerialNumber) throws CameraException
{
int err = -1;
if ((err = Vimba.VmbStartup()) != VmbErrorSuccess) {
throw new CameraException("Error while booting Vimba camera system", err);
}
Vimba.VmbVersionInfo_t versionInfo = new Vimba.VmbVersionInfo_t();
if ((err = Vimba.VmbVersionQuery(versionInfo, versionInfo.sizeof())) != VmbErrorSuccess) {
throw new CameraException("Error while getting Vimba version info", err);
}
Logger.info("Vimba version: " + versionInfo.major() + "." + versionInfo.minor() + "." + versionInfo.patch());
// Is Vimba connected to a GigE transport layer?
boolean[] isGigE = new boolean[1];
if ((err = Vimba.VmbFeatureBoolGet(vimbaHandle, "GeVTLIsPresent", isGigE)) != VmbErrorSuccess) {
throw new CameraException("Error while checking GeVTLIsPresent", err);
}
if (isGigE[0]) {
Logger.info("Found GigE transport layer; issuing network discovery");
// Set the waiting duration for discovery packets to return. If not set the default of 150 ms is used.
if ((err = Vimba.VmbFeatureIntSet(vimbaHandle, "GeVDiscoveryAllDuration", 250)) != VmbErrorSuccess) {
throw new CameraException("Error while setting GeVDiscoveryAllDuration", err);
}
// Send discovery packets to GigE cameras and wait 250 ms until they are answered
if ((err = Vimba.VmbFeatureCommandRun(vimbaHandle, "GeVDiscoveryAllOnce")) != VmbErrorSuccess) {
throw new CameraException("Error while running GeVDiscoveryAllOnce", err);
}
}
// Get the amount of known cameras
IntPointer numFoundPtr = new IntPointer(1);
if ((err = Vimba.VmbCamerasList(null, 0, numFoundPtr, 0)) != VmbErrorSuccess) {
throw new CameraException("Error while getting Vimba cameras", err);
}
int numFound = numFoundPtr.get();
Logger.info("Found " + numFound + " cameras");
// Query all static details of all known cameras
// Without having to open the cameras
Vimba.VmbCameraInfo_t cameraInfoPtr = new Vimba.VmbCameraInfo_t(numFound);
IntPointer foundCountPtr = new IntPointer(1);
if ((err = Vimba.VmbCamerasList(cameraInfoPtr, numFound, foundCountPtr, cameraInfoPtr.sizeof())) != VmbErrorSuccess) {
throw new CameraException("Error while getting Vimba cameras", err);
}
int foundCount = foundCountPtr.get();
// If a new camera was connected since we queried
// the amount of cameras, we can ignore that one
if (foundCount < numFound) {
numFound = foundCount;
}
String cameraId = null;
for (int i = 0; i < numFound; i++) {
if (cameraSerialNumber == null) {
cameraId = cameraInfoPtr.position(i).cameraIdString().getString();
}
else if (cameraSerialNumber.equals(cameraInfoPtr.position(i).serialString().getString())) {
cameraId = cameraInfoPtr.position(i).cameraIdString().getString();
}
if (cameraId != null) {
Logger.info(String.format("Found selected camera:\n/// Camera Name: %s\n/// Model Name: %s\n/// Camera ID: %s\n/// Serial Number: %s\n/// @ Interface ID: %s\n\n\n",
cameraInfoPtr.position(i).cameraName().getString(),
cameraInfoPtr.position(i).modelName().getString(),
cameraInfoPtr.position(i).cameraIdString().getString(),
cameraInfoPtr.position(i).serialString().getString(),
cameraInfoPtr.position(i).interfaceIdString().getString()));
break;
}
}
if (cameraId == null) {
throw new CameraException("Couldn't find the selected (or available) camera, can't continue");
}
return cameraId;
}
public class FrameCallback extends VmbFrameCallback
{
private VmbHandle_t cameraHandlePtr;
private CameraListener listener;
private long frameCounter;
public FrameCallback(VmbHandle_t cameraHandlePtr, CameraListener listener)
{
super();
this.cameraHandlePtr = cameraHandlePtr;
this.listener = listener;
this.frameCounter = -1;
}
public void call(VmbHandle_t cameraHandle, VmbFrame_t pFrame)
{
// ensure that a frame callback is not interrupted by a VmbFrameRevoke during shutdown
synchronized (frameProcessorLock) {
if (started) {
//initialized at -1, this means we'll start at 0
this.frameCounter++;
if (this.listener != null) {
try {
if (pFrame.receiveStatus() == VmbFrameStatusComplete) {
// We only support Mono and RGB for now
int cvColor = -1;
if (pFrame.pixelFormat() == VmbPixelFormatMono8) {
cvColor = CV_8UC1;
}
else if (pFrame.pixelFormat() == VmbPixelFormatRgb8) {
cvColor = CV_8UC3;
}
else {
throw new CameraException("Encountered unsupported color; " + pFrame.pixelFormat());
}
//Note: we need to clone the Mat to make the callback have stable info
this.listener.onCameraFrame(this.frameCounter, new opencv_core.Mat(pFrame.height(), pFrame.width(), cvColor, pFrame.buffer(), opencv_core.Mat.AUTO_STEP).clone());
}
else {
throw new CameraException("Error receiving frame; status " + pFrame.receiveStatus());
}
}
catch (CameraException e) {
this.listener.onCameraError(e);
}
}
// requeue the frame so it can be filled again
VmbCaptureFrameQueue(this.cameraHandlePtr, pFrame, this);
}
}
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment