Created
March 28, 2019 16:19
-
-
Save kkirby/1e350fe573c943ffba942ba03dc9b604 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package cc.openframeworks; | |
import java.lang.reflect.Method; | |
import java.util.HashMap; | |
import java.util.Map; | |
import java.util.List; | |
import android.app.Activity; | |
import android.view.Surface; | |
import android.content.Context; | |
import android.graphics.ImageFormat; | |
import android.graphics.SurfaceTexture; | |
import android.hardware.Camera; | |
import android.hardware.Camera.Size; | |
import android.os.Build; | |
import android.util.Log; | |
import android.view.WindowManager; | |
import android.view.Display; | |
import android.view.OrientationEventListener; | |
import java.util.concurrent.Semaphore; | |
class MutexBlock { | |
public static void run(Semaphore s,Runnable r){ | |
RuntimeException e = null; | |
try { | |
s.acquire(); | |
try { | |
r.run(); | |
} | |
catch(RuntimeException _e){ | |
e = _e; | |
} | |
finally { | |
s.release(); | |
} | |
if(e != null){ | |
throw e; | |
} | |
} | |
catch(InterruptedException interruptedException){ | |
interruptedException.printStackTrace(); | |
} | |
} | |
} | |
public class OFAndroidVideoGrabber extends OFAndroidObject implements Runnable { | |
public static class Config { | |
boolean usePixelData; | |
int width; | |
int height; | |
int textureID; | |
int deviceID = -1; | |
} | |
static android.hardware.Camera.CameraInfo info = null; | |
private int instanceID; | |
Semaphore mutex = new Semaphore(1); | |
public Camera camera; | |
Config config = new Config(); | |
private Thread thread; | |
private boolean initialized = false; | |
private boolean previewStarted = false; | |
SurfaceTexture surfaceTexture; | |
public boolean ready = false; | |
public Runnable onReadyCb; | |
byte callbackBufferA[]; | |
byte callbackBufferB[]; | |
boolean callbackBufferFlipFlop = true; | |
float[] textureMatrix = new float[16]; | |
boolean isTextureFrameNew; | |
boolean isPixelFrameNew; | |
public OFAndroidVideoGrabber(int instanceID){ | |
this.instanceID = instanceID; | |
} | |
// - Get | |
public int getId(){ | |
return instanceID; | |
} | |
public Camera getCamera(){ | |
return camera; | |
} | |
public float[] getTextureMatrix(){ | |
return textureMatrix; | |
} | |
// - Set | |
public void setConfig(int deviceID, int width, int height, int textureID, boolean usePixelData){ | |
if(deviceID == -1){ | |
deviceID = getCameraFacing(0); | |
} | |
boolean reinit = ( | |
config.deviceID != deviceID || config.width != width || config.height != height || | |
config.textureID != textureID || config.usePixelData != usePixelData | |
); | |
config.deviceID = deviceID; | |
config.width = width; | |
config.height = height; | |
config.textureID = textureID; | |
config.usePixelData = usePixelData; | |
if(reinit && initialized){ | |
stopGrabber(); | |
initGrabber(); | |
} | |
} | |
// - Lifecycle | |
public void updateTextureMatrix(){ | |
if(!supportsTextureRendering()){ | |
return; | |
} | |
if(surfaceTexture != null){ | |
try { | |
surfaceTexture.getTransformMatrix(textureMatrix); | |
} | |
catch(Exception e){ | |
e.printStackTrace(); | |
} | |
} | |
} | |
public void initGrabber(){ | |
MutexBlock.run(mutex,() -> { | |
if(camera != null){ | |
initialized = false; | |
camera.release(); | |
} | |
try { | |
if(supportsMultipleCameras()){ | |
camera = Camera.open(config.deviceID); | |
} | |
else { | |
camera = Camera.open(); | |
} | |
} | |
catch(Exception e){ | |
Log.e("OF","Error trying to open specific camera, trying default",e); | |
camera = Camera.open(); | |
} | |
if(config.textureID != -1 && supportsTextureRendering()){ | |
try { | |
surfaceTexture = new SurfaceTexture(config.textureID); | |
camera.setPreviewTexture(surfaceTexture); | |
} | |
catch(Exception e1){ | |
Log.e("OF","Error initializing gl surface",e1); | |
} | |
} | |
Camera.Parameters cameraParameters = camera.getParameters(); | |
printList("Grabber supported sizes",cameraParameters.getSupportedPreviewSizes()); | |
printList("Grabber supported formats",cameraParameters.getSupportedPreviewFormats()); | |
java.util.List<String> focusModes = cameraParameters.getSupportedFocusModes(); | |
if(focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)){ | |
cameraParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE); | |
} | |
else if(focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)){ | |
cameraParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO); | |
} | |
else if(focusModes.contains(Camera.Parameters.FOCUS_MODE_AUTO)){ | |
cameraParameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO); | |
} | |
Log.i("OF", "Grabber default format: " + cameraParameters.getPreviewFormat()); | |
Log.i("OF", "Grabber default preview size: " + cameraParameters.getPreviewSize().width + "," + cameraParameters.getPreviewSize().height); | |
cameraParameters.setPreviewSize(config.width, config.height); | |
cameraParameters.setPreviewFormat(ImageFormat.NV21); | |
Camera.Size cameraPreviewSize = cameraParameters.getPreviewSize(); | |
if( | |
cameraPreviewSize.width != config.width || | |
cameraPreviewSize.height != config.height | |
){ | |
Log.w("OF","camera size different than asked for, resizing (this can slow the app)"); | |
} | |
try { | |
camera.setParameters(cameraParameters); | |
ready = true; | |
if(onReadyCb != null){ | |
onReadyCb.run(); | |
} | |
} | |
catch(Exception e){ | |
Log.e("OF","couldn init camera", e); | |
} | |
if(config.usePixelData){ | |
int byteSize = cameraPreviewSize.width * cameraPreviewSize.height * 12 / 8; | |
callbackBufferA = new byte[byteSize]; | |
callbackBufferB = new byte[byteSize]; | |
camera.addCallbackBuffer(callbackBufferA); | |
} | |
thread = new Thread(this); | |
thread.start(); | |
initialized = true; | |
}); | |
} | |
public void stopGrabber(){ | |
MutexBlock.run(mutex,() -> { | |
if(initialized){ | |
Log.i("OF", "stopping camera"); | |
camera.stopPreview(); | |
previewStarted = false; | |
try { | |
thread.join(); | |
} | |
catch(InterruptedException e){ | |
Log.e("OF", "problem trying to close camera thread", e); | |
} | |
if(config.usePixelData){ | |
camera.setPreviewCallback(null); | |
} | |
if(supportsTextureRendering()){ | |
try { | |
camera.setPreviewTexture(null); | |
} | |
catch(Exception e){} | |
surfaceTexture.setOnFrameAvailableListener(null); | |
surfaceTexture.release(); | |
} | |
initialized = false; | |
camera.release(); | |
} | |
}); | |
} | |
public void close(){ | |
stopGrabber(); | |
} | |
public void update(){ | |
MutexBlock.run(mutex,() -> { | |
if(!initialized)return; | |
setCameraDisplayOrientation((Activity)OFAndroid.getContext(), config.deviceID, camera); | |
if(isTextureFrameNew && surfaceTexture != null){ | |
try { | |
surfaceTexture.updateTexImage(); | |
surfaceTexture.getTransformMatrix(textureMatrix); | |
} | |
catch(Exception e){ | |
e.printStackTrace(); | |
} | |
newTextureFrame(instanceID); | |
isTextureFrameNew = false; | |
} | |
if(config.usePixelData && isPixelFrameNew){ | |
if(callbackBufferFlipFlop){ | |
newPixelFrame(instanceID,callbackBufferA); | |
camera.addCallbackBuffer(callbackBufferB); | |
} | |
else { | |
newPixelFrame(instanceID,callbackBufferB); | |
camera.addCallbackBuffer(callbackBufferA); | |
} | |
callbackBufferFlipFlop = !callbackBufferFlipFlop; | |
isPixelFrameNew = false; | |
} | |
}); | |
} | |
public void run(){ | |
thread.setPriority(Thread.MAX_PRIORITY); | |
if(config.usePixelData){ | |
try { | |
camera.setPreviewCallbackWithBuffer( | |
new Camera.PreviewCallback(){ | |
public void onPreviewFrame(final byte[] data, Camera camera){ | |
MutexBlock.run(mutex,() -> { | |
isPixelFrameNew = true; | |
}); | |
} | |
} | |
); | |
} | |
catch(SecurityException e){ | |
Log.e("OF","security exception, check permissions in your AndroidManifest to access to the camera",e); | |
} | |
catch(Exception e){ | |
Log.e("OF","error adding callback",e); | |
} | |
} | |
if(supportsTextureRendering()){ | |
surfaceTexture.setOnFrameAvailableListener( | |
new SurfaceTexture.OnFrameAvailableListener(){ | |
public void onFrameAvailable(SurfaceTexture tex){ | |
MutexBlock.run(mutex,() -> { | |
isTextureFrameNew = true; | |
}); | |
} | |
} | |
); | |
} | |
setCameraDisplayOrientation((Activity)OFAndroid.getContext(), config.deviceID, camera); | |
try { | |
Log.d("OFAndroidVideoGrabber","thread - start preview"); | |
camera.startPreview(); | |
previewStarted = true; | |
} | |
catch(Exception e){ | |
Log.e("OF","error starting preview",e); | |
} | |
Log.d("OFAndroidVideoGrabber","thread - done"); | |
} | |
@Override | |
public void appStop(){ | |
if(initialized){ | |
//stopGrabber(); | |
} | |
} | |
@Override | |
public void appPause(){ | |
//appStop(); | |
} | |
@Override | |
public void appResume(){ | |
//appStop(); | |
} | |
// - Helper | |
public int getNumCameras(){ | |
if(!supportsMultipleCameras()){ | |
return 1; | |
} | |
return Camera.getNumberOfCameras(); | |
} | |
public int getCameraFacing(int facing){ | |
if(!supportsMultipleCameras()){ | |
return 0; | |
} | |
int numCameras = Camera.getNumberOfCameras(); | |
Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); | |
for(int i = 0; i < numCameras; i++){ | |
Camera.getCameraInfo(i, cameraInfo); | |
if(cameraInfo.facing == facing){ | |
return i; | |
} | |
} | |
return -1; | |
} | |
public int getCameraOrientation(int _deviceID){ | |
if(android.os.Build.VERSION.SDK_INT < 9){ | |
return 0; | |
} | |
if(_deviceID == -1){ | |
_deviceID = config.deviceID; | |
} | |
Camera.CameraInfo info = null; | |
info = new Camera.CameraInfo(); | |
Camera.getCameraInfo(_deviceID, info); | |
return info.orientation; | |
} | |
public int getFacingOfCamera(int _deviceID){ | |
if(android.os.Build.VERSION.SDK_INT < 9){ | |
return 0; | |
} | |
if(_deviceID == -1){ | |
_deviceID = config.deviceID; | |
} | |
Camera.CameraInfo info = new Camera.CameraInfo(); | |
Camera.getCameraInfo(_deviceID, info); | |
return info.facing; | |
} | |
public static boolean supportsTextureRendering(){ | |
return Build.VERSION.SDK_INT >= 11; | |
} | |
public static boolean supportsMultipleCameras(){ | |
return Build.VERSION.SDK_INT >= 9; | |
} | |
public static void setCameraDisplayOrientation(Activity activity,int cameraId, android.hardware.Camera camera){ | |
if(info == null){ | |
info = new android.hardware.Camera.CameraInfo(); | |
} | |
android.hardware.Camera.getCameraInfo(cameraId, info); | |
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation(); | |
int degrees = 0; | |
if(rotation == Surface.ROTATION_0){ | |
degrees = 0; | |
} | |
else if(rotation == Surface.ROTATION_90){ | |
degrees = 90; | |
} | |
else if(rotation == Surface.ROTATION_180){ | |
degrees = 180; | |
} | |
else if(rotation == Surface.ROTATION_270){ | |
degrees = 270; | |
} | |
int result; | |
if(info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT){ | |
result = (360 - (info.orientation + degrees) % 360) % 360; | |
} | |
else { | |
result = (info.orientation - degrees + 360) % 360; | |
} | |
camera.setDisplayOrientation(result); | |
} | |
public static <T> void printList(String label,List<T> list){ | |
Log.i("OF",label); | |
for(T item : list){ | |
if(item instanceof Camera.Size){ | |
Camera.Size cameraSize = (Camera.Size)item; | |
Log.i("OF",cameraSize.width + "x" + cameraSize.height); | |
} | |
else { | |
Log.i("OF",item.toString()); | |
} | |
} | |
} | |
// - Native | |
public static native int newTextureFrame(int cameraId); | |
public static native int newPixelFrame(int cameraId,byte[] buffer); | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/* | |
* ofxAndroidVideoGrabber.cpp | |
* | |
* Created on: 09/07/2010 | |
* Author: arturo, kkirbatski | |
*/ | |
#include "ofxAndroidVideoGrabber.h" | |
#include "ofxAndroidUtils.h" | |
#include <map> | |
#include <memory> | |
#include "ofAppRunner.h" | |
#include "ofUtils.h" | |
#include "ofVideoGrabber.h" | |
#include "ofMatrix4x4.h" | |
#include "ofVec3f.h" | |
#include "ofFbo.h" | |
#include <algorithm> | |
#include "ofNode.h" | |
#include "ofGraphics.h" | |
#include <exception> | |
#define squareSize 1.0 | |
#define GLSL_STRING(shader) \ | |
#shader | |
void checkGlError(std::string op) { | |
int error; | |
while ((error = glGetError()) != GL_NO_ERROR) { | |
ofLogError("SurfaceTest") << op << ": glError " << error; | |
} | |
} | |
namespace InstanceContainer { | |
std::map<int,ofxAndroidVideoGrabber*> instances; | |
int instanceCounter = 0; | |
int putInstance(ofxAndroidVideoGrabber* instance){ | |
int instanceID = instanceCounter; | |
instances[instanceID] = instance; | |
instanceCounter += 1; | |
return instanceID; | |
} | |
ofxAndroidVideoGrabber* getInstance(int instanceID){ | |
auto search = instances.find(instanceID); | |
if(search != instances.end()){ | |
return search->second; | |
} | |
return nullptr; | |
} | |
void removeInstance(int instanceID){ | |
auto search = instances.find(instanceID); | |
if(search != instances.end()){ | |
instances.erase(search); | |
} | |
} | |
}; | |
static jclass getJavaClass(){ | |
JNIEnv *env = ofGetJNIEnv(); | |
jclass javaClass = env->FindClass("cc/openframeworks/OFAndroidVideoGrabber"); | |
if(javaClass == NULL){ | |
ofLogError("ofxAndroidVideoGrabber") << "couldn't find OFAndroidVideoGrabber java class"; | |
} | |
return javaClass; | |
} | |
// - ofxAndroidVideoGrabber | |
ofxAndroidVideoGrabber::ofxAndroidVideoGrabber(): | |
javaVideoGrabber(nullptr), usePixelData(true), width(0), height(0), | |
deviceID(-1), textureID(-1), javaCallbackBuffer(nullptr), | |
callbackBufferPointer(nullptr), hasInitialized(false), appPaused(false), | |
hasNewPixelFrame(false), javaTextureMatrix(nullptr), | |
internalPixelFormat(OF_PIXELS_NV21){ | |
glEnable(GL_TEXTURE_EXTERNAL_OES); | |
javaInstanceID = InstanceContainer::putInstance(this); | |
JNIEnv *env = ofGetJNIEnv(); | |
jclass javaClass = getJavaClass(); | |
if(!javaClass){ | |
throw std::runtime_error("Unable to get ofxAndroidVideoGrabber java class."); | |
} | |
jmethodID videoGrabberConstructor = env->GetMethodID(javaClass, "<init>", "(I)V"); | |
if(!videoGrabberConstructor){ | |
ofLogError("ofxAndroidVideoGrabber") << "initGrabber(): couldn't find OFAndroidVideoGrabber constructor"; | |
return; | |
} | |
javaVideoGrabber = (jobject)env->NewGlobalRef( | |
env->NewObject(javaClass, videoGrabberConstructor, javaInstanceID) | |
); | |
if(!javaVideoGrabber){ | |
ofLogError("ofxAndroidVideoGrabber") << "Unable to construct OFAndroidVideoGrabber"; | |
return; | |
} | |
javaTextureMatrix = (jfloatArray)env->NewGlobalRef( | |
env->CallObjectMethod( | |
javaVideoGrabber, | |
env->GetMethodID(javaClass, "getTextureMatrix","()[F") | |
) | |
); | |
ofLogVerbose("ofxAndroidVideoGrabber") << "Java Texture Matrix:" << (void*)javaTextureMatrix; | |
ofAddListener(ofxAndroidEvents().unloadGL, this, &ofxAndroidVideoGrabber::onAppPause); | |
ofAddListener(ofxAndroidEvents().reloadGL, this, &ofxAndroidVideoGrabber::onAppResume); | |
} | |
ofxAndroidVideoGrabber::~ofxAndroidVideoGrabber(){ | |
InstanceContainer::removeInstance(javaInstanceID); | |
close(); | |
JNIEnv *env = ofGetJNIEnv(); | |
jclass javaClass = getJavaClass(); | |
jmethodID javaOFDestructor = env->GetMethodID(javaClass,"release","()V"); | |
if(javaVideoGrabber && javaOFDestructor){ | |
env->CallVoidMethod(javaVideoGrabber,javaOFDestructor); | |
env->DeleteGlobalRef(javaVideoGrabber); | |
} | |
ofRemoveListener(ofxAndroidEvents().unloadGL,this,&ofxAndroidVideoGrabber::onAppPause); | |
ofRemoveListener(ofxAndroidEvents().reloadGL,this,&ofxAndroidVideoGrabber::onAppResume); | |
} | |
bool ofxAndroidVideoGrabber::setup(int width, int height){ | |
if(hasInitialized){ | |
ofLogError("ofxAndroidVideoGrabber") << "Camera already initialized. Try calling close first."; | |
return false; | |
} | |
this->width = width; | |
this->height = height; | |
loadTexture(); | |
updateConfig(); | |
if(initCamera()){ | |
ofLogVerbose("ofxAndroidVideoGrabber") << "initGrabber(): camera initialized correctly"; | |
return true; | |
} | |
return false; | |
} | |
void ofxAndroidVideoGrabber::loadTexture(){ | |
if(textureID != -1){ | |
glDeleteTextures(1,&textureID); | |
textureID = -1; | |
} | |
GLuint textureIDArray[1]; | |
glGenTextures(1, textureIDArray); | |
textureID = textureIDArray[0]; | |
texture.setUseExternalTextureID(textureID); | |
ofTextureData& textureData = texture.getTextureData(); | |
textureData.textureTarget = GL_TEXTURE_EXTERNAL_OES; | |
textureData.tex_t = textureData.tex_u = 1; | |
textureData.width = textureData.tex_w = width; | |
textureData.height = textureData.tex_h = height; | |
oesRemovalFbo.allocate(width,height,GL_RGB); | |
flippedFbo.allocate(width,height,GL_RGB); | |
} | |
void ofxAndroidVideoGrabber::updateConfig(){ | |
JNIEnv *env = ofGetJNIEnv(); | |
if(javaVideoGrabber){ | |
env->CallVoidMethod( | |
javaVideoGrabber, | |
env->GetMethodID(getJavaClass(),"setConfig","(IIIIZ)V"), | |
deviceID, | |
width, | |
height, | |
textureID, | |
usePixelData | |
); | |
} | |
} | |
bool ofxAndroidVideoGrabber::initCamera(){ | |
if(hasInitialized){ | |
ofLogError("ofxAndroidVideoGrabber") << "Camera already initialized. Try close first."; | |
return false; | |
} | |
JNIEnv *env = ofGetJNIEnv(); | |
jclass javaClass = getJavaClass(); | |
if(javaVideoGrabber){ | |
env->CallVoidMethod( | |
javaVideoGrabber, | |
env->GetMethodID(getJavaClass(),"initGrabber","()V") | |
); | |
hasInitialized = true; | |
return true; | |
} | |
return false; | |
} | |
void ofxAndroidVideoGrabber::update(){ | |
if(appPaused || !hasInitialized){ | |
return; | |
} | |
hasNewPixelFrame = false; | |
ofGetJNIEnv()->CallVoidMethod( | |
javaVideoGrabber, | |
ofGetJNIEnv()->GetMethodID(getJavaClass(), "update", "()V") | |
); | |
} | |
void ofxAndroidVideoGrabber::newPixelFrame(jbyteArray callbackBuffer){ | |
JNIEnv *env = ofGetJNIEnv(); | |
ofLogVerbose("ofxAndroidVideoGrabber") << "newPixelFrame"; | |
releaseJavaCallbackBuffer(); | |
javaCallbackBuffer = (jbyteArray)env->NewGlobalRef((jobject)callbackBuffer); | |
jboolean isCopy; | |
callbackBufferPointer = (unsigned char*)env->GetByteArrayElements( | |
javaCallbackBuffer, | |
&isCopy | |
); | |
if(isCopy){ | |
throw std::runtime_error("Byte array for pixel data was copied. This is unexpected and there is no resolve. Likely Android changed their implementations of Get/Release PrimitiveArrayCritical"); | |
} | |
callbackBufferPixels.setFromExternalPixels( | |
callbackBufferPointer, | |
width, | |
height, | |
OF_PIXELS_NV21 | |
); | |
hasNewPixelFrame = true; | |
} | |
void ofxAndroidVideoGrabber::newTextureFrame(){ | |
JNIEnv *env = ofGetJNIEnv(); | |
jfloat* cfloats = env->GetFloatArrayElements(javaTextureMatrix, 0); | |
transformMatrix.set(cfloats); | |
env->ReleaseFloatArrayElements(javaTextureMatrix,cfloats,0); | |
ofVec3f whVec2 = (ofMatrix4x4::newTranslationMatrix(width,height,0.0) * transformMatrix).getTranslation(); | |
int actualWidth = abs(whVec2.x); | |
int actualHeight = abs(whVec2.y); | |
actualWidth += actualWidth % 2; | |
actualHeight += actualHeight % 2; | |
texture.getTextureData().width = actualWidth; | |
texture.getTextureData().height = actualHeight; | |
if( | |
!oesRemovalFbo.isAllocated() || | |
oesRemovalFbo.getWidth() != actualWidth || | |
oesRemovalFbo.getHeight() != actualHeight | |
){ | |
ofLogVerbose("ofxAndroidVideoGrabber") << "Actual Width: " << actualWidth << " Actual Height: " << actualHeight; | |
oesRemovalFbo.allocate(actualWidth,actualHeight,GL_RGB); | |
flippedFbo.allocate(actualWidth,actualHeight,GL_RGB); | |
} | |
/** | |
* This is some insane stuff. | |
* Android gives us a transform matrix for the camera peview. | |
* First off, the matrix isn't a proper affin transform matrix. | |
* The matrix will rotate the camera based on the device rotation | |
* that we setup inside ofxAndroidVideoGrabber.java | |
* However, the image is consistently flipped vertically. | |
* You're probably thinking why not just scale the texture on | |
* the Y -1 and translate by 1. It doesn't work. Y isn't always | |
* Y. it's sometimes X. And I can't find a way to easily determine | |
* which way is actually up. So we have this hack. We allocate | |
* two framebuffers. We draw the first image (which is flipped) | |
* and then we take that texture that was filled, flip it, and | |
* draw it to another fbo. | |
* | |
* Yeah. it's fab. If you find a better way, let me know. | |
* | |
* Future Kyle popping in to say hello. ofNode has a | |
* ofVec3f getUpDir() const method. Perchance this may be of use. | |
**/ | |
texture.setTextureMatrix(transformMatrix); | |
oesRemovalFbo.getTexture().getTextureData().bFlipTexture = true; | |
oesRemovalFbo.begin(); | |
texture.draw(0,0); | |
oesRemovalFbo.end(); | |
flippedFbo.begin(); | |
oesRemovalFbo.draw(0,0); | |
flippedFbo.end(); | |
} | |
void ofxAndroidVideoGrabber::onAppPause(){ | |
appPaused = true; | |
close(); | |
} | |
void ofxAndroidVideoGrabber::onAppResume(){ | |
appPaused = false; | |
setup(width,height); | |
} | |
// - ofxAndroidVideoGrabber - Set | |
void ofxAndroidVideoGrabber::setDeviceID(int deviceID){ | |
this->deviceID = deviceID; | |
if(hasInitialized){ | |
updateConfig(); | |
} | |
} | |
void ofxAndroidVideoGrabber::setDesiredFrameRate(int framerate){} | |
void ofxAndroidVideoGrabber::setUsePixels(bool usePixelData){ | |
if(hasInitialized){ | |
ofLogError("ofxAndroidVideoGrabber") << "Camere already initialized. Try calling close first."; | |
return; | |
} | |
this->usePixelData = usePixelData; | |
} | |
bool ofxAndroidVideoGrabber::setPixelFormat(ofPixelFormat pixelFormat){ | |
return pixelFormat == internalPixelFormat; | |
} | |
void ofxAndroidVideoGrabber::setVerbose(bool bTalkToMe){} | |
// - ofxAndroidVideoGrabber - Get | |
jobject ofxAndroidVideoGrabber::getJavaVideoGrabber(){ | |
return javaVideoGrabber; | |
} | |
ofTexture* ofxAndroidVideoGrabber::getTexturePtr(){ | |
if(supportsTextureRendering()){ | |
return &flippedFbo.getTexture(); | |
} | |
return nullptr; | |
} | |
ofPixelFormat ofxAndroidVideoGrabber::getPixelFormat() const { | |
return internalPixelFormat; | |
} | |
const ofPixels& ofxAndroidVideoGrabber::getPixels() const { | |
return const_cast<ofxAndroidVideoGrabber*>(this)->getPixels(); | |
} | |
ofPixels& ofxAndroidVideoGrabber::getPixels(){ | |
if(!usePixelData){ | |
ofLogNotice("ofxAndroidVideoGrabber") << "Calling getPixels will not return frame data when setUsePixels(false) has been set"; | |
} | |
return callbackBufferPixels; | |
} | |
float ofxAndroidVideoGrabber::getWidth() const { | |
return flippedFbo.getWidth(); | |
} | |
float ofxAndroidVideoGrabber::getHeight() const { | |
return flippedFbo.getHeight(); | |
} | |
float ofxAndroidVideoGrabber::getPreviewHeight() const { | |
return height; | |
} | |
float ofxAndroidVideoGrabber::getPreviewWidth() const { | |
return width; | |
} | |
// - ofxAndroidVideoGrabber - Lifecycle | |
bool ofxAndroidVideoGrabber::isFrameNew() const { | |
return hasNewPixelFrame; | |
} | |
bool ofxAndroidVideoGrabber::isInitialized() const { | |
return hasInitialized; | |
} | |
void ofxAndroidVideoGrabber::releaseJavaCallbackBuffer(){ | |
ofLogVerbose("ofxAndroidVideoGrabber") << "releaseJavaCallbackBuffer"; | |
if(javaCallbackBuffer != nullptr && callbackBufferPointer != nullptr){ | |
JNIEnv *env = ofGetJNIEnv(); | |
env->ReleaseByteArrayElements( | |
javaCallbackBuffer, | |
(jbyte*)callbackBufferPointer, | |
0 | |
); | |
env->DeleteGlobalRef((jobject)javaCallbackBuffer); | |
javaCallbackBuffer = nullptr; | |
callbackBufferPointer = nullptr; | |
} | |
} | |
void ofxAndroidVideoGrabber::close(){ | |
// Release texture | |
flippedFbo.clear(); | |
oesRemovalFbo.clear(); | |
glDeleteTextures(1, &textureID); | |
textureID = -1; | |
JNIEnv* env = ofGetJNIEnv(); | |
releaseJavaCallbackBuffer(); | |
callbackBufferPixels.clear(); | |
if(javaVideoGrabber){ | |
env->CallVoidMethod( | |
javaVideoGrabber, | |
env->GetMethodID(getJavaClass(),"close","()V") | |
); | |
} | |
else { | |
ofLogError("ofxAndroidVideoGrabber") << "close(): couldn't get OFAndroidVideoGrabber close grabber method"; | |
} | |
hasInitialized = false; | |
} | |
// - ofxAndroidVideoGrabber - Helper | |
void ofxAndroidVideoGrabber::videoSettings(){} | |
bool ofxAndroidVideoGrabber::supportsTextureRendering(){ | |
static bool supportsTexture = false; | |
static bool supportChecked = false; | |
if(!supportChecked){ | |
JNIEnv *env = ofGetJNIEnv(); | |
supportsTexture = env->CallStaticBooleanMethod( | |
getJavaClass(), | |
env->GetStaticMethodID( | |
getJavaClass(), | |
"supportsTextureRendering", | |
"()Z" | |
) | |
); | |
supportChecked = true; | |
} | |
return supportsTexture; | |
} | |
std::vector<ofVideoDevice> ofxAndroidVideoGrabber::listDevices() const { | |
std::vector<ofVideoDevice> devices; | |
int numDevices = getNumCameras(); | |
for(int i = 0; i < numDevices; i++){ | |
int facing = getFacingOfCamera(i); | |
ofVideoDevice vd; | |
vd.deviceName = facing == 0 ? "Back" : "Front"; | |
vd.id = i; | |
vd.bAvailable = true; | |
devices.push_back(vd); | |
} | |
return devices; | |
} | |
int ofxAndroidVideoGrabber::getCameraFacing(int facing) const { | |
static std::map<int,int> cameraFacingCache; | |
auto search = cameraFacingCache.find(facing); | |
if(search == cameraFacingCache.end()){ | |
JNIEnv *env = ofGetJNIEnv(); | |
if(javaVideoGrabber){ | |
int res = env->CallIntMethod( | |
javaVideoGrabber, | |
env->GetMethodID(getJavaClass(), "getCameraFacing", "(I)I"), | |
facing | |
); | |
cameraFacingCache[facing] = res; | |
} | |
return -1; | |
} | |
return cameraFacingCache[facing]; | |
} | |
int ofxAndroidVideoGrabber::getBackCamera() const { | |
return getCameraFacing(0); | |
} | |
int ofxAndroidVideoGrabber::getFrontCamera() const { | |
return getCameraFacing(1); | |
} | |
int ofxAndroidVideoGrabber::getNumCameras() const { | |
JNIEnv *env = ofGetJNIEnv(); | |
if(javaVideoGrabber){ | |
return env->CallIntMethod( | |
javaVideoGrabber, | |
env->GetMethodID(getJavaClass(),"getNumCameras","()I") | |
); | |
} | |
else { | |
ofLogError("ofxAndroidVideoGrabber") << "getNumCameras(): couldn't get OFAndroidVideoGrabber getNumCameras method"; | |
return 0; | |
} | |
} | |
int ofxAndroidVideoGrabber::getCameraOrientation(int device) const { | |
static std::map<int,int> cameraOrientationCache; | |
auto search = cameraOrientationCache.find(device); | |
if(search == cameraOrientationCache.end()){ | |
JNIEnv *env = ofGetJNIEnv(); | |
if(javaVideoGrabber){ | |
int res = env->CallIntMethod( | |
javaVideoGrabber, | |
env->GetMethodID(getJavaClass(),"getCameraOrientation","(I)I"), | |
device | |
); | |
cameraOrientationCache[device] = res; | |
} | |
else { | |
return 0; | |
} | |
} | |
return cameraOrientationCache[device]; | |
} | |
int ofxAndroidVideoGrabber::getFacingOfCamera(int device) const { | |
static std::map<int,int> facingOfCameraCache; | |
auto search = facingOfCameraCache.find(device); | |
if(search == facingOfCameraCache.end()){ | |
JNIEnv *env = ofGetJNIEnv(); | |
if(javaVideoGrabber){ | |
int res = env->CallIntMethod( | |
javaVideoGrabber, | |
env->GetMethodID(getJavaClass(),"getFacingOfCamera","(I)I"), | |
device | |
); | |
facingOfCameraCache[device] = res; | |
} | |
else { | |
return 0; | |
} | |
} | |
return facingOfCameraCache[device]; | |
} | |
// JNI | |
extern "C" { | |
jint Java_newPixelFrame(JNIEnv* env, jobject thiz, jint javaInstanceID, jbyteArray callbackBuffer){ | |
InstanceContainer::getInstance(javaInstanceID)->newPixelFrame(callbackBuffer); | |
return 0; | |
} | |
jint Java_newTextureFrame(JNIEnv* env, jobject thiz, jint javaInstanceID){ | |
InstanceContainer::getInstance(javaInstanceID)->newTextureFrame(); | |
return 0; | |
} | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/* | |
* ofxAndroidVideoGrabber.h | |
* | |
* Created on: 17/01/2011 | |
* Author: arturo | |
*/ | |
#pragma once | |
#include "ofFbo.h" | |
#include "ofBaseTypes.h" | |
#include "ofPixels.h" | |
#include "ofEvents.h" | |
#include "ofTypes.h" | |
#include "ofTexture.h" | |
#include <jni.h> | |
#define Java_newPixelFrame Java_cc_openframeworks_OFAndroidVideoGrabber_newPixelFrame | |
#define Java_newTextureFrame Java_cc_openframeworks_OFAndroidVideoGrabber_newTextureFrame | |
extern "C" { | |
jint Java_newPixelFrame(JNIEnv* env, jobject thiz, jint javaInstanceID, jbyteArray callbackBuffer); | |
jint Java_newTextureFrame(JNIEnv* env, jobject thiz, jint javaInstanceID); | |
} | |
class ofxAndroidVideoGrabber: public ofBaseVideoGrabber{ | |
public: | |
ofxAndroidVideoGrabber(); | |
~ofxAndroidVideoGrabber(); | |
// ofBaseVideo | |
bool isFrameNew() const; | |
void close(); | |
bool isInitialized() const; | |
bool setPixelFormat(ofPixelFormat pixelFormat); | |
ofPixelFormat getPixelFormat() const; | |
// ofBaseHasPixels | |
ofPixels& getPixels(); | |
const ofPixels& getPixels() const; | |
// ofBaseUpdates | |
void update(); | |
// ofBaseVideoGrabber | |
std::vector<ofVideoDevice> listDevices() const; | |
bool setup(int w, int h); | |
float getHeight() const; | |
float getWidth() const; | |
ofTexture* getTexturePtr(); | |
void setVerbose(bool talkToMe); | |
void setDeviceID(int deviceID); | |
void setDesiredFrameRate(int framerate); | |
void videoSettings(); | |
// Android Specific | |
void setUsePixels(bool usePixels); | |
float getPreviewHeight() const; | |
float getPreviewWidth() const; | |
// Get device id of back facing camera, -1 if no match is found | |
int getBackCamera() const; | |
// Get device id of front facing camera, -1 if no match is found | |
int getFrontCamera() const; | |
// Get the physical orientation of the camera. Typically on a phone the | |
// camera mounted in landscape mode, this returns 90 | |
int getCameraOrientation(int device=-1) const; | |
// Get facing of camera. | |
// Leave device = -1 to get selected cameras facing | |
// | |
// Returns 0 on backfacing camera, and 1 on frontal facing camera. | |
int getFacingOfCamera(int device = -1) const; | |
bool supportsTextureRendering(); | |
jobject getJavaVideoGrabber(); | |
friend jint Java_newPixelFrame(JNIEnv* env, jobject thiz, jint javaInstanceID, jbyteArray callbackBuffer); | |
friend jint Java_newTextureFrame(JNIEnv* env, jobject thiz, jint javaInstanceID); | |
private: | |
int javaInstanceID; | |
jobject javaVideoGrabber; | |
// - Config | |
bool usePixelData; | |
int width; | |
int height; | |
int deviceID; | |
GLuint textureID; | |
// Storage | |
ofPixels callbackBufferPixels; | |
jbyteArray javaCallbackBuffer; | |
unsigned char* callbackBufferPointer; | |
// State | |
bool hasInitialized; | |
bool appPaused; | |
bool hasNewPixelFrame; | |
// Graphics | |
jfloatArray javaTextureMatrix; | |
ofMatrix4x4 transformMatrix; | |
ofFbo oesRemovalFbo; | |
ofFbo flippedFbo; | |
ofTexture texture; | |
ofPixelFormat internalPixelFormat; | |
// Methods | |
void updateConfig(); | |
bool initCamera(); | |
void onAppPause(); | |
void onAppResume(); | |
void loadTexture(); | |
void newTextureFrame(); | |
void newPixelFrame(jbyteArray callbackBuffer); | |
int getCameraFacing(int facing) const; | |
int getNumCameras() const; | |
void releaseJavaCallbackBuffer(); | |
}; |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment