Skip to content

Instantly share code, notes, and snippets.

@salihyalcin
Created October 5, 2021 11:34
Show Gist options
  • Save salihyalcin/b2f8ab63c198bddd112ce5c4f0aa23cb to your computer and use it in GitHub Desktop.
Save salihyalcin/b2f8ab63c198bddd112ce5c4f0aa23cb to your computer and use it in GitHub Desktop.
Flashlight WebRTC
public class FlashlightCameraCapturer extends Camera1Capturer {
private FlashlightCameraSession cameraSession;
private final boolean captureToTexture;
public FlashlightCameraCapturer(String cameraName, CameraEventsHandler eventsHandler, boolean captureToTexture) {
super(cameraName, eventsHandler, captureToTexture);
this.captureToTexture = captureToTexture;
}
@Override
protected void createCameraSession(CameraSession.CreateSessionCallback createSessionCallback, CameraSession.Events events, Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, String cameraName, int width, int height, int framerate) {
CameraSession.CreateSessionCallback myCallback = new CameraSession.CreateSessionCallback() {
@Override
public void onDone(CameraSession cameraSession) {
FlashlightCameraCapturer.this.cameraSession = (FlashlightCameraSession) cameraSession;
createSessionCallback.onDone(cameraSession);
}
@Override
public void onFailure(CameraSession.FailureType failureType, String s) {
createSessionCallback.onFailure(failureType, s);
}
};
FlashlightCameraSession.create(myCallback, events, captureToTexture, applicationContext, surfaceTextureHelper, Camera1Enumerator.getCameraIndex(cameraName), width, height, framerate);
}
public void turnOnFlashlight() {
cameraSession.setFlashlightActive(true);
}
public void turnOffFlashlight() {
cameraSession.setFlashlightActive(false);
}
}
public class FlashlightCameraNumerator extends Camera1Enumerator{
@Override
public CameraVideoCapturer createCapturer(String deviceName, CameraVideoCapturer.CameraEventsHandler eventsHandler) {
return new FlashlightCameraCapturer(deviceName, eventsHandler, true);
}
}
public class FlashlightCameraSession implements CameraSession {
private static final String TAG = "FlashlightCameraSession";
private static final int NUMBER_OF_CAPTURE_BUFFERS = 3;
private static final Histogram camera1StartTimeMsHistogram = Histogram.createCounts("WebRTC.Android.Camera1.StartTimeMs", 1, 10000, 50);
private static final Histogram camera1StopTimeMsHistogram = Histogram.createCounts("WebRTC.Android.Camera1.StopTimeMs", 1, 10000, 50);
private static final Histogram camera1ResolutionHistogram;
private final Handler cameraThreadHandler;
private final Events events;
private final boolean captureToTexture;
private final Context applicationContext;
private final SurfaceTextureHelper surfaceTextureHelper;
private final int cameraId;
private final Camera camera;
private final Camera.CameraInfo info;
private final CameraEnumerationAndroid.CaptureFormat captureFormat;
private final long constructionTimeNs;
private FlashlightCameraSession.SessionState state;
private boolean firstFrameReported;
public static void create(CreateSessionCallback callback, Events events, boolean captureToTexture, Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, int cameraId, int width, int height, int framerate) {
long constructionTimeNs = System.nanoTime();
Logging.d("FlashlightCameraSession", "Open camera " + cameraId);
events.onCameraOpening();
Camera camera;
try {
camera = Camera.open(cameraId);
} catch (RuntimeException var19) {
callback.onFailure(FailureType.ERROR, var19.getMessage());
return;
}
if (camera == null) {
callback.onFailure(FailureType.ERROR, "android.hardware.Camera.open returned null for camera id = " + cameraId);
} else {
try {
camera.setPreviewTexture(surfaceTextureHelper.getSurfaceTexture());
} catch (RuntimeException | IOException var18) {
camera.release();
callback.onFailure(FailureType.ERROR, var18.getMessage());
return;
}
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(cameraId, info);
CameraEnumerationAndroid.CaptureFormat captureFormat;
try {
Camera.Parameters parameters = camera.getParameters();
captureFormat = findClosestCaptureFormat(parameters, width, height, framerate);
Size pictureSize = findClosestPictureSize(parameters, width, height);
updateCameraParameters(camera, parameters, captureFormat, pictureSize, captureToTexture);
} catch (RuntimeException var17) {
camera.release();
callback.onFailure(FailureType.ERROR, var17.getMessage());
return;
}
if (!captureToTexture) {
int frameSize = captureFormat.frameSize();
for(int i = 0; i < 3; ++i) {
ByteBuffer buffer = ByteBuffer.allocateDirect(frameSize);
camera.addCallbackBuffer(buffer.array());
}
}
camera.setDisplayOrientation(0);
callback.onDone(new FlashlightCameraSession(events, captureToTexture, applicationContext, surfaceTextureHelper, cameraId, camera, info, captureFormat, constructionTimeNs));
}
}
private static void updateCameraParameters(Camera camera, Camera.Parameters parameters, CameraEnumerationAndroid.CaptureFormat captureFormat, Size pictureSize, boolean captureToTexture) {
List<String> focusModes = parameters.getSupportedFocusModes();
parameters.setPreviewFpsRange(captureFormat.framerate.min, captureFormat.framerate.max);
parameters.setPreviewSize(captureFormat.width, captureFormat.height);
parameters.setPictureSize(pictureSize.width, pictureSize.height);
if (!captureToTexture) {
Objects.requireNonNull(captureFormat);
parameters.setPreviewFormat(17);
}
if (parameters.isVideoStabilizationSupported()) {
parameters.setVideoStabilization(true);
}
if (focusModes.contains("continuous-video")) {
parameters.setFocusMode("continuous-video");
}
camera.setParameters(parameters);
}
private static CameraEnumerationAndroid.CaptureFormat findClosestCaptureFormat(Camera.Parameters parameters, int width, int height, int framerate) {
List<CameraEnumerationAndroid.CaptureFormat.FramerateRange> supportedFramerates = Camera1Enumerator.convertFramerates(parameters.getSupportedPreviewFpsRange());
Logging.d("FlashlightCameraSession", "Available fps ranges: " + supportedFramerates);
CameraEnumerationAndroid.CaptureFormat.FramerateRange fpsRange = CameraEnumerationAndroid.getClosestSupportedFramerateRange(supportedFramerates, framerate);
Size previewSize = CameraEnumerationAndroid.getClosestSupportedSize(Camera1Enumerator.convertSizes(parameters.getSupportedPreviewSizes()), width, height);
CameraEnumerationAndroid.reportCameraResolution(camera1ResolutionHistogram, previewSize);
return new CameraEnumerationAndroid.CaptureFormat(previewSize.width, previewSize.height, fpsRange);
}
private static Size findClosestPictureSize(Camera.Parameters parameters, int width, int height) {
return CameraEnumerationAndroid.getClosestSupportedSize(Camera1Enumerator.convertSizes(parameters.getSupportedPictureSizes()), width, height);
}
private FlashlightCameraSession(Events events, boolean captureToTexture, Context applicationContext, SurfaceTextureHelper surfaceTextureHelper, int cameraId, Camera camera, CameraInfo info, CaptureFormat captureFormat, long constructionTimeNs) {
Logging.d("FlashlightCameraSession", "Create new camera1 session on camera " + cameraId);
this.cameraThreadHandler = new Handler();
this.events = events;
this.captureToTexture = captureToTexture;
this.applicationContext = applicationContext;
this.surfaceTextureHelper = surfaceTextureHelper;
this.cameraId = cameraId;
this.camera = camera;
this.info = info;
this.captureFormat = captureFormat;
this.constructionTimeNs = constructionTimeNs;
surfaceTextureHelper.setTextureSize(captureFormat.width, captureFormat.height);
this.startCapturing();
}
public void stop() {
Logging.d("FlashlightCameraSession", "Stop camera1 session on camera " + this.cameraId);
this.checkIsOnCameraThread();
if (this.state != FlashlightCameraSession.SessionState.STOPPED) {
long stopStartTime = System.nanoTime();
this.stopInternal();
int stopTimeMs = (int) TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - stopStartTime);
camera1StopTimeMsHistogram.addSample(stopTimeMs);
}
}
private void startCapturing() {
Logging.d("FlashlightCameraSession", "Start capturing");
this.checkIsOnCameraThread();
this.state = FlashlightCameraSession.SessionState.RUNNING;
this.camera.setErrorCallback(new Camera.ErrorCallback() {
public void onError(int error, Camera camera) {
String errorMessage;
if (error == 100) {
errorMessage = "Camera server died!";
} else {
errorMessage = "Camera error: " + error;
}
Logging.e("FlashlightCameraSession", errorMessage);
FlashlightCameraSession.this.stopInternal();
if (error == 2) {
FlashlightCameraSession.this.events.onCameraDisconnected(FlashlightCameraSession.this);
} else {
FlashlightCameraSession.this.events.onCameraError(FlashlightCameraSession.this, errorMessage);
}
}
});
if (this.captureToTexture) {
this.listenForTextureFrames();
} else {
this.listenForBytebufferFrames();
}
try {
this.camera.startPreview();
} catch (RuntimeException var2) {
this.stopInternal();
this.events.onCameraError(this, var2.getMessage());
}
}
private void stopInternal() {
Logging.d("FlashlightCameraSession", "Stop internal");
this.checkIsOnCameraThread();
if (this.state == FlashlightCameraSession.SessionState.STOPPED) {
Logging.d("FlashlightCameraSession", "Camera is already stopped");
} else {
this.state = FlashlightCameraSession.SessionState.STOPPED;
this.surfaceTextureHelper.stopListening();
this.camera.stopPreview();
this.camera.release();
this.events.onCameraClosed(this);
Logging.d("FlashlightCameraSession", "Stop done");
}
}
private void listenForTextureFrames() {
this.surfaceTextureHelper.startListening((frame) -> {
this.checkIsOnCameraThread();
if (this.state != FlashlightCameraSession.SessionState.RUNNING) {
Logging.d("FlashlightCameraSession", "Texture frame captured but camera is no longer running.");
} else {
if (!this.firstFrameReported) {
int startTimeMs = (int)TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - this.constructionTimeNs);
camera1StartTimeMsHistogram.addSample(startTimeMs);
this.firstFrameReported = true;
}
VideoFrame modifiedFrame = new VideoFrame(CameraSession.createTextureBufferWithModifiedTransformMatrix((TextureBufferImpl)frame.getBuffer(), this.info.facing == 1, 0), this.getFrameOrientation(), frame.getTimestampNs());
this.events.onFrameCaptured(this, modifiedFrame);
modifiedFrame.release();
}
});
}
private void listenForBytebufferFrames() {
this.camera.setPreviewCallbackWithBuffer((data, callbackCamera) -> {
FlashlightCameraSession.this.checkIsOnCameraThread();
if (callbackCamera != FlashlightCameraSession.this.camera) {
Logging.e("FlashlightCameraSession", "Callback from a different camera. This should never happen.");
} else if (FlashlightCameraSession.this.state != SessionState.RUNNING) {
Logging.d("FlashlightCameraSession", "Bytebuffer frame captured but camera is no longer running.");
} else {
long captureTimeNs = TimeUnit.MILLISECONDS.toNanos(SystemClock.elapsedRealtime());
if (!FlashlightCameraSession.this.firstFrameReported) {
int startTimeMs = (int)TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - FlashlightCameraSession.this.constructionTimeNs);
FlashlightCameraSession.camera1StartTimeMsHistogram.addSample(startTimeMs);
FlashlightCameraSession.this.firstFrameReported = true;
}
VideoFrame.Buffer frameBuffer = new NV21Buffer(data, FlashlightCameraSession.this.captureFormat.width, FlashlightCameraSession.this.captureFormat.height, () -> {
FlashlightCameraSession.this.cameraThreadHandler.post(() -> {
if (FlashlightCameraSession.this.state == SessionState.RUNNING) {
FlashlightCameraSession.this.camera.addCallbackBuffer(data);
}
});
});
VideoFrame frame = new VideoFrame(frameBuffer, FlashlightCameraSession.this.getFrameOrientation(), captureTimeNs);
FlashlightCameraSession.this.events.onFrameCaptured(FlashlightCameraSession.this, frame);
frame.release();
}
});
}
private int getFrameOrientation() {
int rotation = CameraSession.getDeviceOrientation(this.applicationContext);
if (this.info.facing == 0) {
rotation = 360 - rotation;
}
return (this.info.orientation + rotation) % 360;
}
private void checkIsOnCameraThread() {
if (Thread.currentThread() != this.cameraThreadHandler.getLooper().getThread()) {
throw new IllegalStateException("Wrong thread");
}
}
void setFlashlightActive(boolean isActive) {
Camera.Parameters params = camera.getParameters();
if (isActive) {
params.setFlashMode(Camera.Parameters.FLASH_MODE_TORCH);
} else {
params.setFlashMode(Camera.Parameters.FLASH_MODE_OFF);
}
camera.setParameters(params);
}
static {
camera1ResolutionHistogram = Histogram.createEnumeration("WebRTC.Android.Camera1.Resolution", CameraEnumerationAndroid.COMMON_RESOLUTIONS.size());
}
private static enum SessionState {
RUNNING,
STOPPED;
private SessionState() {
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment