Skip to content

Instantly share code, notes, and snippets.

@wernerd
Created March 20, 2016 09:43
Show Gist options
  • Save wernerd/b933deb3187f582fec2c to your computer and use it in GitHub Desktop.
Save wernerd/b933deb3187f582fec2c to your computer and use it in GitHub Desktop.
The modified Camera2VideoFragment showing RenderScript YUV-to-RGB conversion
/*
* Copyright 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.camera2video;
import android.Manifest;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.app.DialogFragment;
import android.app.Fragment;
import android.content.Context;
import android.content.DialogInterface;
import android.content.pm.PackageManager;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.PixelFormat;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.ImageReader;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.renderscript.Allocation;
import android.renderscript.Element;
import android.renderscript.RenderScript;
import android.renderscript.Type;
import android.support.annotation.NonNull;
import android.support.v13.app.FragmentCompat;
import android.support.v4.app.ActivityCompat;
import android.util.Log;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.LayoutInflater;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Toast;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
public class Camera2VideoFragment extends Fragment
implements FragmentCompat.OnRequestPermissionsResultCallback, Progress.GetFrameCallback
{
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
private static final String TAG = "Camera2VideoFragment";
private static final int REQUEST_VIDEO_PERMISSIONS = 1;
private static final String FRAGMENT_DIALOG = "dialog";
private static final String[] VIDEO_PERMISSIONS = {
Manifest.permission.CAMERA,
Manifest.permission.RECORD_AUDIO,
};
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private static final int WIDTH = 352; // 720; // 640;
private static final int HEIGHT = 288; // 480;
/**
* An {@link AutoFitTextureView} for camera preview.
*/
private AutoFitTextureView mTextureView;
/**
* A reference to the opened {@link android.hardware.camera2.CameraDevice}.
*/
private CameraDevice mCameraDevice;
/**
* A reference to the current {@link android.hardware.camera2.CameraCaptureSession} for
* preview.
*/
private CameraCaptureSession mPreviewSession;
private RenderScript mRS;
private RgbConversion mConversion;
/**
* {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a
* {@link TextureView}.
*/
private Rect mRectSrc;
private TextureView.SurfaceTextureListener mSurfaceTextureListener
= new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture,
int width, int height) {
openCamera(width, height);
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture,
int width, int height) {
configureTransform(width, height);
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
private Rect mRectDest;
boolean mVideoTexture;
private TextureView.SurfaceTextureListener mVideoTextureListener
= new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surfaceTexture,
int width, int height) {
configureTransformBitmap(width, height);
mVideoTexture = true;
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surfaceTexture,
int width, int height) {
configureTransformBitmap(width, height);
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surfaceTexture) {
mVideoTexture = false;
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surfaceTexture) {
}
};
/**
* The {@link android.util.Size} of camera preview.
*/
private Size mPreviewSize;
/**
* The {@link android.util.Size} of video recording.
*/
private Size mVideoSize;
/**
* Camera preview.
*/
private CaptureRequest.Builder mPreviewBuilder;
/**
* An additional thread for running tasks that shouldn't block the UI.
*/
private HandlerThread mBackgroundThread;
/**
* A {@link Handler} for running tasks in the background.
*/
private Handler mBackgroundHandler;
/**
* A {@link Semaphore} to prevent the app from exiting before closing the camera.
*/
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
private ImageReader mImageReader;
private TextureView mVideoView;
/**
* {@link CameraDevice.StateCallback} is called when {@link CameraDevice} changes its status.
*/
private CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice cameraDevice) {
mCameraDevice = cameraDevice;
startPreview();
mCameraOpenCloseLock.release();
if (null != mTextureView) {
configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
}
}
@Override
public void onDisconnected(@NonNull CameraDevice cameraDevice) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
}
@Override
public void onError(@NonNull CameraDevice cameraDevice, int error) {
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
Activity activity = getActivity();
if (null != activity) {
activity.finish();
}
}
};
public static Camera2VideoFragment newInstance() {
return new Camera2VideoFragment();
}
/**
* Given {@code choices} of {@code Size}s supported by a camera, chooses the smallest one whose
* width and height are at least as large as the respective requested values, and whose aspect
* ratio matches with the specified value.
*
* @param choices The list of sizes that the camera supports for the intended output class
* @param width The minimum desired width
* @param height The minimum desired height
* @return The optimal {@code Size}, or an arbitrary one if none were big enough
*/
private static Size chooseOptimalSize(Size[] choices, int width, int height) {
// Collect the supported resolutions that are at least as big as the preview Surface
List<Size> bigEnough = new ArrayList<>();
for (Size option : choices) {
Log.d(TAG, String.format("Available sizes: W: %d, h: %d", option.getWidth(), option.getHeight()));
if (option.getWidth() >= width && option.getHeight() >= height) {
bigEnough.add(option);
}
}
// Pick the smallest of those, assuming we found any
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size, requested width: " + width + ", height: " + height);
return choices[0];
}
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
mRS = RenderScript.create(getActivity());
return inflater.inflate(R.layout.fragment_camera2_video, container, false);
}
@Override
public void onViewCreated(final View view, Bundle savedInstanceState) {
mTextureView = (AutoFitTextureView) view.findViewById(R.id.texture);
mVideoView = (TextureView) view.findViewById(R.id.VideoSurfacePreview);
mVideoView.setSurfaceTextureListener(mVideoTextureListener);
mRectDest = new Rect(0, 0,WIDTH, HEIGHT);
}
@Override
public void onResume() {
super.onResume();
startBackgroundThread();
if (mTextureView.isAvailable()) {
openCamera(mTextureView.getWidth(), mTextureView.getHeight());
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
@Override
public void onPause() {
closeCamera();
stopBackgroundThread();
super.onPause();
}
private static Paint paL = new Paint(Paint.ANTI_ALIAS_FLAG);
/**
* Starts a background thread and its {@link Handler}.
*/
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
/**
* Stops the background thread and its {@link Handler}.
*/
private void stopBackgroundThread() {
mConversion.setOutputSurface(null);
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
/**
* Gets whether you should show UI with rationale for requesting permissions.
*
* @param permissions The permissions your app wants to request.
* @return Whether you can show permission rationale UI.
*/
private boolean shouldShowRequestPermissionRationale(String[] permissions) {
for (String permission : permissions) {
if (FragmentCompat.shouldShowRequestPermissionRationale(this, permission)) {
return true;
}
}
return false;
}
/**
* Requests permissions needed for recording video.
*/
private void requestVideoPermissions() {
if (shouldShowRequestPermissionRationale(VIDEO_PERMISSIONS)) {
new ConfirmationDialog().show(getChildFragmentManager(), FRAGMENT_DIALOG);
} else {
FragmentCompat.requestPermissions(this, VIDEO_PERMISSIONS, REQUEST_VIDEO_PERMISSIONS);
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
if (requestCode == REQUEST_VIDEO_PERMISSIONS) {
if (grantResults.length == VIDEO_PERMISSIONS.length) {
for (int result : grantResults) {
if (result != PackageManager.PERMISSION_GRANTED) {
ErrorDialog.newInstance(getString(R.string.permission_request))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
break;
}
}
} else {
ErrorDialog.newInstance(getString(R.string.permission_request))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
}
} else {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
}
private boolean hasPermissionsGranted(String[] permissions) {
for (String permission : permissions) {
if (ActivityCompat.checkSelfPermission(getActivity(), permission)
!= PackageManager.PERMISSION_GRANTED) {
return false;
}
}
return true;
}
/**
* Tries to open a {@link CameraDevice}. The result is listened by `mStateCallback`.
*/
String mFrontCameraId;
String mBackCameraId;
private void openCamera(int width, int height) {
if (!hasPermissionsGranted(VIDEO_PERMISSIONS)) {
requestVideoPermissions();
return;
}
final Activity activity = getActivity();
if (null == activity || activity.isFinishing()) {
return;
}
CameraManager manager = (CameraManager) activity.getSystemService(Context.CAMERA_SERVICE);
try {
Log.d(TAG, "tryAcquire");
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
// Get camera ids to check/test front and back facing camera
String[] cameraIds = manager.getCameraIdList();
for (String id : cameraIds) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(id);
int i = characteristics.get(CameraCharacteristics.LENS_FACING);
if (i == CameraCharacteristics.LENS_FACING_FRONT)
mFrontCameraId = id;
else if (i== CameraCharacteristics.LENS_FACING_BACK)
mBackCameraId = id;
else {
Log.d(TAG, "No appropriate Camera found.");
}
}
String cameraId = mBackCameraId;
// Choose the sizes for camera preview and video recording
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
mVideoSize = chooseOptimalSize(map.getOutputSizes(Allocation.class), WIDTH, HEIGHT);
mImageReader = ImageReader.newInstance(mVideoSize.getWidth(), mVideoSize.getHeight(), PixelFormat.RGBA_8888, 1);
mConversion = new RgbConversion(mRS, mVideoSize, this, 120);
mPreviewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height);
mRectSrc = new Rect(0, 0, WIDTH, HEIGHT);
int orientation = getResources().getConfiguration().orientation;
if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
mTextureView.setAspectRatio(mPreviewSize.getWidth(), mPreviewSize.getHeight());
} else {
mTextureView.setAspectRatio(mPreviewSize.getHeight(), mPreviewSize.getWidth());
}
configureTransform(width, height);
manager.openCamera(cameraId, mStateCallback, null);
} catch (CameraAccessException e) {
Toast.makeText(activity, "Cannot access the camera.", Toast.LENGTH_SHORT).show();
activity.finish();
} catch (NullPointerException e) {
// Currently an NPE is thrown if the Camera2API is used but not supported on the device.
ErrorDialog.newInstance(getString(R.string.camera_error))
.show(getChildFragmentManager(), FRAGMENT_DIALOG);
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera opening.");
} catch (SecurityException e) {
throw new SecurityException("No permission to access camera.");
}
}
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.");
} finally {
mCameraOpenCloseLock.release();
}
}
/**
* Start the camera preview.
*/
private void startPreview() {
if (null == mCameraDevice || !mTextureView.isAvailable() || null == mPreviewSize || !mVideoTexture) {
return;
}
try {
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
mPreviewBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
List<Surface> surfaces = new ArrayList<>();
Surface previewSurface = new Surface(texture);
surfaces.add(previewSurface);
mPreviewBuilder.addTarget(previewSurface);
Surface readerSurface = mImageReader.getSurface();
assert readerSurface != null;
mConversion.setOutputSurface(readerSurface);
Surface inputSurface = mConversion.getInputSurface();
surfaces.add(inputSurface);
mPreviewBuilder.addTarget(inputSurface);
mCameraDevice.createCaptureSession(surfaces, new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
mPreviewSession = cameraCaptureSession;
updatePreview();
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
Activity activity = getActivity();
if (null != activity) {
Toast.makeText(activity, "Failed", Toast.LENGTH_SHORT).show();
}
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Update the camera preview. {@link #startPreview()} needs to be called in advance.
*/
private void updatePreview() {
if (null == mCameraDevice) {
return;
}
try {
setUpCaptureRequestBuilder(mPreviewBuilder);
mPreviewSession.setRepeatingRequest(mPreviewBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void setUpCaptureRequestBuilder(CaptureRequest.Builder builder) {
builder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
}
private Matrix mBitMatrix;
/**
* Configures the necessary {@link android.graphics.Matrix} transformation to `mTextureView`.
* This method should not to be called until the camera preview size is determined in
* openCamera, or until the size of `mTextureView` is fixed.
*
* @param viewWidth The width of `mTextureView`
* @param viewHeight The height of `mTextureView`
*/
private void configureTransformBitmap(int viewWidth, int viewHeight) {
Activity activity = getActivity();
if (null == activity) {
return;
}
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
rotation = ORIENTATIONS.get(rotation);
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mVideoSize.getHeight(), mVideoSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (rotation != 0) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mVideoSize.getHeight(),
(float) viewWidth / mVideoSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(rotation, centerX, centerY);
}
mBitMatrix = matrix;
}
private void configureTransform(int viewWidth, int viewHeight) {
Activity activity = getActivity();
if (null == mTextureView || null == mPreviewSize || null == activity) {
return;
}
int rotation = activity.getWindowManager().getDefaultDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mPreviewSize.getHeight(),
(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
}
mTextureView.setTransform(matrix);
}
/**
* Compares two {@code Size}s based on their areas.
*/
static class CompareSizesByArea implements Comparator<Size> {
@Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
(long) rhs.getWidth() * rhs.getHeight());
}
}
public static class ErrorDialog extends DialogFragment {
private static final String ARG_MESSAGE = "message";
public static ErrorDialog newInstance(String message) {
ErrorDialog dialog = new ErrorDialog();
Bundle args = new Bundle();
args.putString(ARG_MESSAGE, message);
dialog.setArguments(args);
return dialog;
}
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Activity activity = getActivity();
return new AlertDialog.Builder(activity)
.setMessage(getArguments().getString(ARG_MESSAGE))
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialogInterface, int i) {
activity.finish();
}
})
.create();
}
}
public static class ConfirmationDialog extends DialogFragment {
@Override
public Dialog onCreateDialog(Bundle savedInstanceState) {
final Fragment parent = getParentFragment();
return new AlertDialog.Builder(getActivity())
.setMessage(R.string.permission_request)
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
FragmentCompat.requestPermissions(parent, VIDEO_PERMISSIONS,
REQUEST_VIDEO_PERMISSIONS);
}
})
.setNegativeButton(android.R.string.cancel,
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
parent.getActivity().finish();
}
})
.create();
}
}
/**
* Example how to use an integer ARGB array with a bitmap.
*
* @param frameDataRgb the integer ARGB array from renderscript.
*/
@Override
public void onFrameArrayInt(int[] frameDataRgb) {
Bitmap bitmap = Bitmap.createBitmap(frameDataRgb, mVideoSize.getWidth(), mVideoSize.getHeight(), Bitmap.Config.ARGB_8888);
Canvas c = mVideoView.lockCanvas();
if (c != null) {
c.drawColor(Color.BLACK);
c.drawBitmap(Bitmap.createBitmap(bitmap, 0, 0, bitmap.getWidth(), bitmap.getHeight(), mBitMatrix, true), mRectSrc, mRectDest, paL);
mVideoView.unlockCanvasAndPost(c);
}
}
private static class RgbConversion implements Allocation.OnBufferAvailableListener {
private Allocation mInputAllocation;
private Allocation mOutputAllocation;
private Allocation mOutputAllocationInt;
private Allocation mScriptAllocation;
private Size mSizeVideoCall;
private ScriptC_yuv2rgb mScriptC;
private int[] mOutBufferInt;
private long mLastProcessed;
private Camera2VideoFragment mFrameCallback;
private final int mFrameEveryMs;
RgbConversion(RenderScript rs, Size dimensions, Camera2VideoFragment frameCallback, int frameMs) {
mSizeVideoCall = dimensions;
mFrameCallback = frameCallback;
mFrameEveryMs = frameMs;
createAllocations(rs);
mInputAllocation.setOnBufferAvailableListener(this);
mScriptC = new ScriptC_yuv2rgb(rs);
mScriptC.set_gCurrentFrame(mInputAllocation);
mScriptC.set_gIntFrame(mOutputAllocationInt);
}
private void createAllocations(RenderScript rs) {
mOutBufferInt =
new int[mSizeVideoCall.getWidth() * mSizeVideoCall.getHeight()];
final int width = mSizeVideoCall.getWidth();
final int height = mSizeVideoCall.getHeight();
Type.Builder yuvTypeBuilder = new Type.Builder(rs, Element.YUV(rs));
yuvTypeBuilder.setX(width);
yuvTypeBuilder.setY(height);
yuvTypeBuilder.setYuvFormat(ImageFormat.YUV_420_888);
mInputAllocation = Allocation.createTyped(rs, yuvTypeBuilder.create(),
Allocation.USAGE_IO_INPUT | Allocation.USAGE_SCRIPT);
Type rgbType = Type.createXY(rs, Element.RGBA_8888(rs), width, height);
Type intType = Type.createXY(rs, Element.U32(rs), width, height);
mScriptAllocation = Allocation.createTyped(rs, rgbType, Allocation.USAGE_SCRIPT);
mOutputAllocation = Allocation.createTyped(rs, rgbType, Allocation.USAGE_IO_OUTPUT | Allocation.USAGE_SCRIPT);
mOutputAllocationInt = Allocation.createTyped(rs, intType, Allocation.USAGE_SCRIPT);
}
Surface getInputSurface() {
return mInputAllocation.getSurface();
}
void setOutputSurface(Surface output) {
mOutputAllocation.setSurface(output);
}
@Override
public void onBufferAvailable(Allocation a) {
// Get the new frame into the input allocation
mInputAllocation.ioReceive();
// Run processing pass if it's time to process a frame - just to limit the output frequency
final long current = System.currentTimeMillis();
if ((current - mLastProcessed) >= mFrameEveryMs) {
mScriptC.forEach_yuv2rgbFrames(mScriptAllocation, mOutputAllocation);
if (mFrameCallback != null) {
mOutputAllocationInt.copyTo(mOutBufferInt);
mFrameCallback.onFrameArrayInt(mOutBufferInt);
}
mLastProcessed = current;
}
}
}
}
@alanprodam
Copy link

Progress.GetFrameCallback... doesn't work

Copy link

ghost commented May 21, 2019

What is Progress.GetFrameCallback =?????

@MDude007
Copy link

Has anyone successfully used this code to convert camera frame from yuv to rgb?? Pls share.

@noncom
Copy link

noncom commented Jul 28, 2020

Has anyone successfully used this code to convert camera frame from yuv to rgb?? Pls share.

To whoever has problems with this: I don't know what exactly your problems are, but I've found a way to implement all this functionality. Take a look at https://github.com/noncom/CameraCaptureNative, it's intended to be used with Unity3D and C++, but if you strip all that away and simply leave the Java/Kotlin part, then that's what you can use in your Java/Kotlin app.

Notice that the ScriptC_yuv2rgb class is crucial for this and it's auto-generated by Android Studio from a render script (in the "rs" folder of the CameraCaptureNative project). You need to run the build once for it to be auto-generated, even though it's shown as a missing class at the beginning.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment